Merge remote-tracking branch 'upstream/v4' into v4
This commit is contained in:
@ -162,7 +162,6 @@ yargs(hideBin(process.argv))
|
||||
label: "Symlink an existing folder",
|
||||
hint: "don't select this unless you know what you are doing!",
|
||||
},
|
||||
{ value: "keep", label: "Keep the existing files" },
|
||||
],
|
||||
}),
|
||||
)
|
||||
@ -176,6 +175,7 @@ yargs(hideBin(process.argv))
|
||||
}
|
||||
}
|
||||
|
||||
await fs.promises.unlink(path.join(contentFolder, ".gitkeep"))
|
||||
if (setupStrategy === "copy" || setupStrategy === "symlink") {
|
||||
const originalFolder = escapePath(
|
||||
exitIfCancel(
|
||||
@ -205,8 +205,6 @@ yargs(hideBin(process.argv))
|
||||
await fs.promises.symlink(originalFolder, contentFolder, "dir")
|
||||
}
|
||||
} else if (setupStrategy === "new") {
|
||||
await rmContentFolder()
|
||||
await fs.promises.mkdir(contentFolder)
|
||||
await fs.promises.writeFile(
|
||||
path.join(contentFolder, "index.md"),
|
||||
`---
|
||||
@ -219,7 +217,7 @@ See the [documentation](https://quartz.jzhao.xyz) for how to get started.
|
||||
)
|
||||
}
|
||||
|
||||
// get a prefered link resolution strategy
|
||||
// get a preferred link resolution strategy
|
||||
const linkResolutionStrategy = exitIfCancel(
|
||||
await select({
|
||||
message: `Choose how Quartz should resolve links in your content. You can change this later in \`quartz.config.ts\`.`,
|
||||
@ -393,13 +391,19 @@ See the [documentation](https://quartz.jzhao.xyz) for how to get started.
|
||||
})
|
||||
|
||||
const buildMutex = new Mutex()
|
||||
const timeoutIds = new Set()
|
||||
let firstBuild = true
|
||||
let lastBuildMs = 0
|
||||
let cleanupBuild = null
|
||||
const build = async (clientRefresh) => {
|
||||
const buildStart = new Date().getTime()
|
||||
lastBuildMs = buildStart
|
||||
const release = await buildMutex.acquire()
|
||||
if (firstBuild) {
|
||||
firstBuild = false
|
||||
} else {
|
||||
if (lastBuildMs > buildStart) {
|
||||
release()
|
||||
return
|
||||
}
|
||||
|
||||
if (cleanupBuild) {
|
||||
await cleanupBuild()
|
||||
console.log(chalk.yellow("Detected a source code change, doing a hard rebuild..."))
|
||||
}
|
||||
|
||||
@ -408,6 +412,7 @@ See the [documentation](https://quartz.jzhao.xyz) for how to get started.
|
||||
console.log(`Reason: ${chalk.grey(err)}`)
|
||||
process.exit(1)
|
||||
})
|
||||
release()
|
||||
|
||||
if (argv.bundleInfo) {
|
||||
const outputFileName = "quartz/.quartz-cache/transpiled-build.mjs"
|
||||
@ -423,15 +428,8 @@ See the [documentation](https://quartz.jzhao.xyz) for how to get started.
|
||||
// bypass module cache
|
||||
// https://github.com/nodejs/modules/issues/307
|
||||
const { default: buildQuartz } = await import(cacheFile + `?update=${randomUUID()}`)
|
||||
await buildQuartz(argv, clientRefresh)
|
||||
cleanupBuild = await buildQuartz(argv, buildMutex, clientRefresh)
|
||||
clientRefresh()
|
||||
release()
|
||||
}
|
||||
|
||||
const rebuild = (clientRefresh) => {
|
||||
timeoutIds.forEach((id) => clearTimeout(id))
|
||||
timeoutIds.clear()
|
||||
timeoutIds.add(setTimeout(() => build(clientRefresh), 250))
|
||||
}
|
||||
|
||||
if (argv.serve) {
|
||||
@ -459,6 +457,7 @@ See the [documentation](https://quartz.jzhao.xyz) for how to get started.
|
||||
req.url = req.url?.slice(argv.baseDir.length)
|
||||
|
||||
const serve = async () => {
|
||||
const release = await buildMutex.acquire()
|
||||
await serveHandler(req, res, {
|
||||
public: argv.output,
|
||||
directoryListing: false,
|
||||
@ -473,6 +472,7 @@ See the [documentation](https://quartz.jzhao.xyz) for how to get started.
|
||||
const statusString =
|
||||
status >= 200 && status < 300 ? chalk.green(`[${status}]`) : chalk.red(`[${status}]`)
|
||||
console.log(statusString + chalk.grey(` ${argv.baseDir}${req.url}`))
|
||||
release()
|
||||
}
|
||||
|
||||
const redirect = (newFp) => {
|
||||
@ -539,7 +539,7 @@ See the [documentation](https://quartz.jzhao.xyz) for how to get started.
|
||||
ignoreInitial: true,
|
||||
})
|
||||
.on("all", async () => {
|
||||
rebuild(clientRefresh)
|
||||
build(clientRefresh)
|
||||
})
|
||||
} else {
|
||||
await build(() => {})
|
||||
|
@ -18,7 +18,7 @@ import { trace } from "./util/trace"
|
||||
import { options } from "./util/sourcemap"
|
||||
import { Mutex } from "async-mutex"
|
||||
|
||||
async function buildQuartz(argv: Argv, clientRefresh: () => void) {
|
||||
async function buildQuartz(argv: Argv, mut: Mutex, clientRefresh: () => void) {
|
||||
const ctx: BuildCtx = {
|
||||
argv,
|
||||
cfg,
|
||||
@ -38,13 +38,14 @@ async function buildQuartz(argv: Argv, clientRefresh: () => void) {
|
||||
console.log(` Emitters: ${pluginNames("emitters").join(", ")}`)
|
||||
}
|
||||
|
||||
const release = await mut.acquire()
|
||||
perf.addEvent("clean")
|
||||
await rimraf(output)
|
||||
console.log(`Cleaned output directory \`${output}\` in ${perf.timeSince("clean")}`)
|
||||
|
||||
perf.addEvent("glob")
|
||||
const allFiles = await glob("**/*.*", argv.directory, cfg.configuration.ignorePatterns)
|
||||
const fps = allFiles.filter((fp) => fp.endsWith(".md"))
|
||||
const fps = allFiles.filter((fp) => fp.endsWith(".md")).sort()
|
||||
console.log(
|
||||
`Found ${fps.length} input files from \`${argv.directory}\` in ${perf.timeSince("glob")}`,
|
||||
)
|
||||
@ -56,15 +57,17 @@ async function buildQuartz(argv: Argv, clientRefresh: () => void) {
|
||||
const filteredContent = filterContent(ctx, parsedFiles)
|
||||
await emitContent(ctx, filteredContent)
|
||||
console.log(chalk.green(`Done processing ${fps.length} files in ${perf.timeSince()}`))
|
||||
release()
|
||||
|
||||
if (argv.serve) {
|
||||
return startServing(ctx, parsedFiles, clientRefresh)
|
||||
return startServing(ctx, mut, parsedFiles, clientRefresh)
|
||||
}
|
||||
}
|
||||
|
||||
// setup watcher for rebuilds
|
||||
async function startServing(
|
||||
ctx: BuildCtx,
|
||||
mut: Mutex,
|
||||
initialContent: ProcessedContent[],
|
||||
clientRefresh: () => void,
|
||||
) {
|
||||
@ -78,8 +81,7 @@ async function startServing(
|
||||
}
|
||||
|
||||
const initialSlugs = ctx.allSlugs
|
||||
const buildMutex = new Mutex()
|
||||
const timeoutIds: Set<ReturnType<typeof setTimeout>> = new Set()
|
||||
let lastBuildMs = 0
|
||||
const toRebuild: Set<FilePath> = new Set()
|
||||
const toRemove: Set<FilePath> = new Set()
|
||||
const trackedAssets: Set<FilePath> = new Set()
|
||||
@ -109,49 +111,50 @@ async function startServing(
|
||||
}
|
||||
|
||||
// debounce rebuilds every 250ms
|
||||
timeoutIds.add(
|
||||
setTimeout(async () => {
|
||||
const release = await buildMutex.acquire()
|
||||
timeoutIds.forEach((id) => clearTimeout(id))
|
||||
timeoutIds.clear()
|
||||
|
||||
const perf = new PerfTimer()
|
||||
console.log(chalk.yellow("Detected change, rebuilding..."))
|
||||
try {
|
||||
const filesToRebuild = [...toRebuild].filter((fp) => !toRemove.has(fp))
|
||||
const buildStart = new Date().getTime()
|
||||
lastBuildMs = buildStart
|
||||
const release = await mut.acquire()
|
||||
if (lastBuildMs > buildStart) {
|
||||
release()
|
||||
return
|
||||
}
|
||||
|
||||
const trackedSlugs = [...new Set([...contentMap.keys(), ...toRebuild, ...trackedAssets])]
|
||||
.filter((fp) => !toRemove.has(fp))
|
||||
.map((fp) => slugifyFilePath(path.posix.relative(argv.directory, fp) as FilePath))
|
||||
const perf = new PerfTimer()
|
||||
console.log(chalk.yellow("Detected change, rebuilding..."))
|
||||
try {
|
||||
const filesToRebuild = [...toRebuild].filter((fp) => !toRemove.has(fp))
|
||||
|
||||
ctx.allSlugs = [...new Set([...initialSlugs, ...trackedSlugs])]
|
||||
const parsedContent = await parseMarkdown(ctx, filesToRebuild)
|
||||
for (const content of parsedContent) {
|
||||
const [_tree, vfile] = content
|
||||
contentMap.set(vfile.data.filePath!, content)
|
||||
}
|
||||
const trackedSlugs = [...new Set([...contentMap.keys(), ...toRebuild, ...trackedAssets])]
|
||||
.filter((fp) => !toRemove.has(fp))
|
||||
.map((fp) => slugifyFilePath(path.posix.relative(argv.directory, fp) as FilePath))
|
||||
|
||||
for (const fp of toRemove) {
|
||||
contentMap.delete(fp)
|
||||
}
|
||||
ctx.allSlugs = [...new Set([...initialSlugs, ...trackedSlugs])]
|
||||
const parsedContent = await parseMarkdown(ctx, filesToRebuild)
|
||||
for (const content of parsedContent) {
|
||||
const [_tree, vfile] = content
|
||||
contentMap.set(vfile.data.filePath!, content)
|
||||
}
|
||||
|
||||
// TODO: we can probably traverse the link graph to figure out what's safe to delete here
|
||||
// instead of just deleting everything
|
||||
await rimraf(argv.output)
|
||||
const parsedFiles = [...contentMap.values()]
|
||||
const filteredContent = filterContent(ctx, parsedFiles)
|
||||
await emitContent(ctx, filteredContent)
|
||||
console.log(chalk.green(`Done rebuilding in ${perf.timeSince()}`))
|
||||
} catch {
|
||||
console.log(chalk.yellow(`Rebuild failed. Waiting on a change to fix the error...`))
|
||||
}
|
||||
for (const fp of toRemove) {
|
||||
contentMap.delete(fp)
|
||||
}
|
||||
|
||||
clientRefresh()
|
||||
toRebuild.clear()
|
||||
toRemove.clear()
|
||||
release()
|
||||
}, 250),
|
||||
)
|
||||
const parsedFiles = [...contentMap.values()]
|
||||
const filteredContent = filterContent(ctx, parsedFiles)
|
||||
// TODO: we can probably traverse the link graph to figure out what's safe to delete here
|
||||
// instead of just deleting everything
|
||||
await rimraf(argv.output)
|
||||
await emitContent(ctx, filteredContent)
|
||||
console.log(chalk.green(`Done rebuilding in ${perf.timeSince()}`))
|
||||
} catch {
|
||||
console.log(chalk.yellow(`Rebuild failed. Waiting on a change to fix the error...`))
|
||||
}
|
||||
|
||||
clientRefresh()
|
||||
toRebuild.clear()
|
||||
toRemove.clear()
|
||||
release()
|
||||
}
|
||||
|
||||
const watcher = chokidar.watch(".", {
|
||||
@ -164,11 +167,15 @@ async function startServing(
|
||||
.on("add", (fp) => rebuild(fp, "add"))
|
||||
.on("change", (fp) => rebuild(fp, "change"))
|
||||
.on("unlink", (fp) => rebuild(fp, "delete"))
|
||||
|
||||
return async () => {
|
||||
await watcher.close()
|
||||
}
|
||||
}
|
||||
|
||||
export default async (argv: Argv, clientRefresh: () => void) => {
|
||||
export default async (argv: Argv, mut: Mutex, clientRefresh: () => void) => {
|
||||
try {
|
||||
return await buildQuartz(argv, clientRefresh)
|
||||
return await buildQuartz(argv, mut, clientRefresh)
|
||||
} catch (err) {
|
||||
trace("\nExiting Quartz due to a fatal error", err as Error)
|
||||
}
|
||||
|
@ -1,3 +1,4 @@
|
||||
import { ValidDateType } from "./components/Date"
|
||||
import { QuartzComponent } from "./components/types"
|
||||
import { PluginTypes } from "./plugins/types"
|
||||
import { Theme } from "./util/theme"
|
||||
@ -22,6 +23,8 @@ export interface GlobalConfiguration {
|
||||
analytics: Analytics
|
||||
/** Glob patterns to not search */
|
||||
ignorePatterns: string[]
|
||||
/** Whether to use created, modified, or published as the default type of date */
|
||||
defaultDateType: ValidDateType
|
||||
/** Base URL to use for CNAME files, sitemaps, and RSS feeds that require an absolute URL.
|
||||
* Quartz will avoid using this as much as possible and use relative URLs most of the time
|
||||
*/
|
||||
|
@ -1,15 +1,16 @@
|
||||
import { formatDate } from "./Date"
|
||||
import { formatDate, getDate } from "./Date"
|
||||
import { QuartzComponentConstructor, QuartzComponentProps } from "./types"
|
||||
import readingTime from "reading-time"
|
||||
|
||||
export default (() => {
|
||||
function ContentMetadata({ fileData }: QuartzComponentProps) {
|
||||
function ContentMetadata({ cfg, fileData }: QuartzComponentProps) {
|
||||
const text = fileData.text
|
||||
if (text) {
|
||||
const segments: string[] = []
|
||||
const { text: timeTaken, words: _words } = readingTime(text)
|
||||
if (fileData.dates?.modified) {
|
||||
segments.push(formatDate(fileData.dates.modified))
|
||||
|
||||
if (fileData.dates) {
|
||||
segments.push(formatDate(getDate(cfg, fileData)!))
|
||||
}
|
||||
|
||||
segments.push(timeTaken)
|
||||
|
@ -1,7 +1,21 @@
|
||||
import { GlobalConfiguration } from "../cfg"
|
||||
import { QuartzPluginData } from "../plugins/vfile"
|
||||
|
||||
interface Props {
|
||||
date: Date
|
||||
}
|
||||
|
||||
export type ValidDateType = keyof Required<QuartzPluginData>["dates"]
|
||||
|
||||
export function getDate(cfg: GlobalConfiguration, data: QuartzPluginData): Date | undefined {
|
||||
if (!cfg.defaultDateType) {
|
||||
throw new Error(
|
||||
`Field 'defaultDateType' was not set in the configuration object of quartz.config.ts. See https://quartz.jzhao.xyz/configuration#general-configuration for more details.`,
|
||||
)
|
||||
}
|
||||
return data.dates?.[cfg.defaultDateType]
|
||||
}
|
||||
|
||||
export function formatDate(d: Date): string {
|
||||
return d.toLocaleDateString("en-US", {
|
||||
year: "numeric",
|
||||
|
@ -1,31 +1,36 @@
|
||||
import { FullSlug, resolveRelative } from "../util/path"
|
||||
import { QuartzPluginData } from "../plugins/vfile"
|
||||
import { Date } from "./Date"
|
||||
import { Date, getDate } from "./Date"
|
||||
import { QuartzComponentProps } from "./types"
|
||||
import { GlobalConfiguration } from "../cfg"
|
||||
|
||||
export function byDateAndAlphabetical(f1: QuartzPluginData, f2: QuartzPluginData): number {
|
||||
if (f1.dates && f2.dates) {
|
||||
// sort descending by last modified
|
||||
return f2.dates.modified.getTime() - f1.dates.modified.getTime()
|
||||
} else if (f1.dates && !f2.dates) {
|
||||
// prioritize files with dates
|
||||
return -1
|
||||
} else if (!f1.dates && f2.dates) {
|
||||
return 1
|
||||
export function byDateAndAlphabetical(
|
||||
cfg: GlobalConfiguration,
|
||||
): (f1: QuartzPluginData, f2: QuartzPluginData) => number {
|
||||
return (f1, f2) => {
|
||||
if (f1.dates && f2.dates) {
|
||||
// sort descending
|
||||
return getDate(cfg, f2)!.getTime() - getDate(cfg, f1)!.getTime()
|
||||
} else if (f1.dates && !f2.dates) {
|
||||
// prioritize files with dates
|
||||
return -1
|
||||
} else if (!f1.dates && f2.dates) {
|
||||
return 1
|
||||
}
|
||||
|
||||
// otherwise, sort lexographically by title
|
||||
const f1Title = f1.frontmatter?.title.toLowerCase() ?? ""
|
||||
const f2Title = f2.frontmatter?.title.toLowerCase() ?? ""
|
||||
return f1Title.localeCompare(f2Title)
|
||||
}
|
||||
|
||||
// otherwise, sort lexographically by title
|
||||
const f1Title = f1.frontmatter?.title.toLowerCase() ?? ""
|
||||
const f2Title = f2.frontmatter?.title.toLowerCase() ?? ""
|
||||
return f1Title.localeCompare(f2Title)
|
||||
}
|
||||
|
||||
type Props = {
|
||||
limit?: number
|
||||
} & QuartzComponentProps
|
||||
|
||||
export function PageList({ fileData, allFiles, limit }: Props) {
|
||||
let list = allFiles.sort(byDateAndAlphabetical)
|
||||
export function PageList({ cfg, fileData, allFiles, limit }: Props) {
|
||||
let list = allFiles.sort(byDateAndAlphabetical(cfg))
|
||||
if (limit) {
|
||||
list = list.slice(0, limit)
|
||||
}
|
||||
@ -41,7 +46,7 @@ export function PageList({ fileData, allFiles, limit }: Props) {
|
||||
<div class="section">
|
||||
{page.dates && (
|
||||
<p class="meta">
|
||||
<Date date={page.dates.modified} />
|
||||
<Date date={getDate(cfg, page)!} />
|
||||
</p>
|
||||
)}
|
||||
<div class="desc">
|
||||
|
@ -3,7 +3,8 @@ import { FullSlug, SimpleSlug, resolveRelative } from "../util/path"
|
||||
import { QuartzPluginData } from "../plugins/vfile"
|
||||
import { byDateAndAlphabetical } from "./PageList"
|
||||
import style from "./styles/recentNotes.scss"
|
||||
import { Date } from "./Date"
|
||||
import { Date, getDate } from "./Date"
|
||||
import { GlobalConfiguration } from "../cfg"
|
||||
|
||||
interface Options {
|
||||
title: string
|
||||
@ -13,18 +14,18 @@ interface Options {
|
||||
sort: (f1: QuartzPluginData, f2: QuartzPluginData) => number
|
||||
}
|
||||
|
||||
const defaultOptions: Options = {
|
||||
const defaultOptions = (cfg: GlobalConfiguration): Options => ({
|
||||
title: "Recent Notes",
|
||||
limit: 3,
|
||||
linkToMore: false,
|
||||
filter: () => true,
|
||||
sort: byDateAndAlphabetical,
|
||||
}
|
||||
sort: byDateAndAlphabetical(cfg),
|
||||
})
|
||||
|
||||
export default ((userOpts?: Partial<Options>) => {
|
||||
const opts = { ...defaultOptions, ...userOpts }
|
||||
function RecentNotes(props: QuartzComponentProps) {
|
||||
const { allFiles, fileData, displayClass } = props
|
||||
const { allFiles, fileData, displayClass, cfg } = props
|
||||
const opts = { ...defaultOptions(cfg), ...userOpts }
|
||||
const pages = allFiles.filter(opts.filter).sort(opts.sort)
|
||||
const remaining = Math.max(0, pages.length - opts.limit)
|
||||
return (
|
||||
@ -47,7 +48,7 @@ export default ((userOpts?: Partial<Options>) => {
|
||||
</div>
|
||||
{page.dates && (
|
||||
<p class="meta">
|
||||
<Date date={page.dates.modified} />
|
||||
<Date date={getDate(cfg, page)!} />
|
||||
</p>
|
||||
)}
|
||||
<ul class="tags">
|
||||
|
@ -44,7 +44,8 @@ TagList.css = `
|
||||
a.tag-link {
|
||||
border-radius: 8px;
|
||||
background-color: var(--highlight);
|
||||
padding: 0.2rem 0.5rem;
|
||||
padding: 0.2rem 0.4rem;
|
||||
margin: 0 0.1rem;
|
||||
}
|
||||
`
|
||||
|
||||
|
@ -33,7 +33,9 @@ function FolderContent(props: QuartzComponentProps) {
|
||||
|
||||
return (
|
||||
<div class="popover-hint">
|
||||
<article>{content}</article>
|
||||
<article>
|
||||
<p>{content}</p>
|
||||
</article>
|
||||
<p>{allPagesInFolder.length} items under this folder.</p>
|
||||
<div>
|
||||
<PageList {...listProps} />
|
||||
|
@ -37,7 +37,9 @@ function TagContent(props: QuartzComponentProps) {
|
||||
|
||||
return (
|
||||
<div class="popover-hint">
|
||||
<article>{content}</article>
|
||||
<article>
|
||||
<p>{content}</p>
|
||||
</article>
|
||||
<p>Found {tags.length} total tags.</p>
|
||||
<div>
|
||||
{tags.map((tag) => {
|
||||
|
@ -64,7 +64,7 @@ async function navigate(url: URL, isBack: boolean = false) {
|
||||
// scroll into place and add history
|
||||
if (!isBack) {
|
||||
if (url.hash) {
|
||||
const el = document.getElementById(url.hash.substring(1))
|
||||
const el = document.getElementById(decodeURIComponent(url.hash.substring(1)))
|
||||
el?.scrollIntoView()
|
||||
} else {
|
||||
window.scrollTo({ top: 0 })
|
||||
@ -79,7 +79,9 @@ async function navigate(url: URL, isBack: boolean = false) {
|
||||
|
||||
// delay setting the url until now
|
||||
// at this point everything is loaded so changing the url should resolve to the correct addresses
|
||||
history.pushState({}, "", url)
|
||||
if (!isBack) {
|
||||
history.pushState({}, "", url)
|
||||
}
|
||||
notifyNav(getFullSlug(window))
|
||||
delete announcer.dataset.persist
|
||||
}
|
||||
|
@ -34,6 +34,7 @@
|
||||
border-radius: 5px;
|
||||
box-shadow: 6px 6px 36px 0 rgba(0, 0, 0, 0.25);
|
||||
overflow: auto;
|
||||
white-space: normal;
|
||||
}
|
||||
|
||||
h1 {
|
||||
|
@ -1,4 +1,5 @@
|
||||
import { GlobalConfiguration } from "../../cfg"
|
||||
import { getDate } from "../../components/Date"
|
||||
import { FilePath, FullSlug, SimpleSlug, simplifySlug } from "../../util/path"
|
||||
import { QuartzEmitterPlugin } from "../types"
|
||||
import path from "path"
|
||||
@ -22,7 +23,7 @@ interface Options {
|
||||
const defaultOptions: Options = {
|
||||
enableSiteMap: true,
|
||||
enableRSS: true,
|
||||
includeEmptyFiles: false,
|
||||
includeEmptyFiles: true,
|
||||
}
|
||||
|
||||
function generateSiteMap(cfg: GlobalConfiguration, idx: ContentIndex): string {
|
||||
@ -41,26 +42,26 @@ function generateRSSFeed(cfg: GlobalConfiguration, idx: ContentIndex): string {
|
||||
const base = cfg.baseUrl ?? ""
|
||||
const root = `https://${base}`
|
||||
|
||||
const createURLEntry = (slug: SimpleSlug, content: ContentDetails): string => `<items>
|
||||
const createURLEntry = (slug: SimpleSlug, content: ContentDetails): string => `<item>
|
||||
<title>${content.title}</title>
|
||||
<link>${root}/${slug}</link>
|
||||
<guid>${root}/${slug}</guid>
|
||||
<description>${content.description}</description>
|
||||
<pubDate>${content.date?.toUTCString()}</pubDate>
|
||||
</items>`
|
||||
</item>`
|
||||
|
||||
const items = Array.from(idx)
|
||||
.map(([slug, content]) => createURLEntry(simplifySlug(slug), content))
|
||||
.join("")
|
||||
return `<rss xmlns:atom="http://www.w3.org/2005/atom" version="2.0">
|
||||
return `<?xml version="1.0" encoding="UTF-8" ?>
|
||||
<rss version="2.0">
|
||||
<channel>
|
||||
<title>${cfg.pageTitle}</title>
|
||||
<link>${root}</link>
|
||||
<description>Recent content on ${cfg.pageTitle}</description>
|
||||
<generator>Quartz -- quartz.jzhao.xyz</generator>
|
||||
<atom:link href="${root}/index.xml" rel="self" type="application/rss+xml"/>
|
||||
${items}
|
||||
</channel>
|
||||
${items}
|
||||
</rss>`
|
||||
}
|
||||
|
||||
@ -74,7 +75,7 @@ export const ContentIndex: QuartzEmitterPlugin<Partial<Options>> = (opts) => {
|
||||
const linkIndex: ContentIndex = new Map()
|
||||
for (const [_tree, file] of content) {
|
||||
const slug = file.data.slug!
|
||||
const date = file.data.dates?.modified ?? new Date()
|
||||
const date = getDate(ctx.cfg.configuration, file.data) ?? new Date()
|
||||
if (opts?.includeEmptyFiles || (file.data.text && file.data.text !== "")) {
|
||||
linkIndex.set(slug, {
|
||||
title: file.data.frontmatter?.title!,
|
||||
|
@ -41,7 +41,7 @@ export const FrontMatter: QuartzTransformerPlugin<Partial<Options> | undefined>
|
||||
}
|
||||
|
||||
// slug them all!!
|
||||
data.tags = data.tags?.map((tag: string) => slugTag(tag)) ?? []
|
||||
data.tags = [...new Set(data.tags?.map((tag: string) => slugTag(tag)))] ?? []
|
||||
|
||||
// fill in frontmatter
|
||||
file.data.frontmatter = {
|
||||
|
@ -60,11 +60,17 @@ export const CrawlLinks: QuartzTransformerPlugin<Partial<Options> | undefined> =
|
||||
dest,
|
||||
transformOptions,
|
||||
)
|
||||
|
||||
// url.resolve is considered legacy
|
||||
// WHATWG equivalent https://nodejs.dev/en/api/v18/url/#urlresolvefrom-to
|
||||
const url = new URL(dest, `https://base.com/${curSlug}`)
|
||||
const canonicalDest = url.pathname
|
||||
const [destCanonical, _destAnchor] = splitAnchor(canonicalDest)
|
||||
const simple = decodeURI(simplifySlug(destCanonical as FullSlug)) as SimpleSlug
|
||||
|
||||
// need to decodeURIComponent here as WHATWG URL percent-encodes everything
|
||||
const simple = decodeURIComponent(
|
||||
simplifySlug(destCanonical as FullSlug),
|
||||
) as SimpleSlug
|
||||
outgoing.add(simple)
|
||||
}
|
||||
|
||||
|
@ -116,7 +116,7 @@ const calloutRegex = new RegExp(/^\[\!(\w+)\]([+-]?)/)
|
||||
const calloutLineRegex = new RegExp(/^> *\[\!\w+\][+-]?.*$/, "gm")
|
||||
// (?:^| ) -> non-capturing group, tag should start be separated by a space or be the start of the line
|
||||
// #(\w+) -> tag itself is # followed by a string of alpha-numeric characters
|
||||
const tagRegex = new RegExp(/(?:^| )#([\w-_\/]+)/, "g")
|
||||
const tagRegex = new RegExp(/(?:^| )#(\p{L}+)/, "gu")
|
||||
|
||||
export const ObsidianFlavoredMarkdown: QuartzTransformerPlugin<Partial<Options> | undefined> = (
|
||||
userOpts,
|
||||
@ -382,8 +382,8 @@ export const ObsidianFlavoredMarkdown: QuartzTransformerPlugin<Partial<Options>
|
||||
plugins.push(() => {
|
||||
return (tree: Root, file) => {
|
||||
const base = pathToRoot(file.data.slug!)
|
||||
findAndReplace(tree, tagRegex, (value: string, tag: string) => {
|
||||
if (file.data.frontmatter) {
|
||||
findAndReplace(tree, tagRegex, (_value: string, tag: string) => {
|
||||
if (file.data.frontmatter && !file.data.frontmatter.tags.includes(tag)) {
|
||||
file.data.frontmatter.tags.push(tag)
|
||||
}
|
||||
|
||||
@ -398,7 +398,7 @@ export const ObsidianFlavoredMarkdown: QuartzTransformerPlugin<Partial<Options>
|
||||
children: [
|
||||
{
|
||||
type: "text",
|
||||
value,
|
||||
value: `#${tag}`,
|
||||
},
|
||||
],
|
||||
}
|
||||
|
@ -2,7 +2,7 @@ import { QuartzTransformerPlugin } from "../types"
|
||||
import { Root } from "mdast"
|
||||
import { visit } from "unist-util-visit"
|
||||
import { toString } from "mdast-util-to-string"
|
||||
import { slug as slugAnchor } from "github-slugger"
|
||||
import Slugger from "github-slugger"
|
||||
|
||||
export interface Options {
|
||||
maxDepth: 1 | 2 | 3 | 4 | 5 | 6
|
||||
@ -34,6 +34,7 @@ export const TableOfContents: QuartzTransformerPlugin<Partial<Options> | undefin
|
||||
return async (tree: Root, file) => {
|
||||
const display = file.data.frontmatter?.enableToc ?? opts.showByDefault
|
||||
if (display) {
|
||||
const slugAnchor = new Slugger()
|
||||
const toc: TocEntry[] = []
|
||||
let highestDepth: number = opts.maxDepth
|
||||
visit(tree, "heading", (node) => {
|
||||
@ -43,7 +44,7 @@ export const TableOfContents: QuartzTransformerPlugin<Partial<Options> | undefin
|
||||
toc.push({
|
||||
depth: node.depth,
|
||||
text,
|
||||
slug: slugAnchor(text),
|
||||
slug: slugAnchor.slug(text),
|
||||
})
|
||||
}
|
||||
})
|
||||
|
@ -27,6 +27,11 @@ section {
|
||||
border-radius: 5px;
|
||||
}
|
||||
|
||||
::selection {
|
||||
background: color-mix(in srgb, var(--tertiary) 75%, transparent);
|
||||
color: var(--darkgray);
|
||||
}
|
||||
|
||||
p,
|
||||
ul,
|
||||
text,
|
||||
|
@ -52,7 +52,7 @@ export function slugifyFilePath(fp: FilePath, excludeExt?: boolean): FullSlug {
|
||||
|
||||
let slug = withoutFileExt
|
||||
.split("/")
|
||||
.map((segment) => segment.replace(/\s/g, "-")) // slugify all segments
|
||||
.map((segment) => segment.replace(/\s/g, "-").replace(/%/g, "-percent")) // slugify all segments
|
||||
.join("/") // always use / as sep
|
||||
.replace(/\/$/, "") // remove trailing slash
|
||||
|
||||
|
Reference in New Issue
Block a user