|
|
|
@@ -4,18 +4,20 @@ import remarkRehype from "remark-rehype"
|
|
|
|
|
import { Processor, unified } from "unified"
|
|
|
|
|
import { Root as MDRoot } from "remark-parse/lib"
|
|
|
|
|
import { Root as HTMLRoot } from "hast"
|
|
|
|
|
import { ProcessedContent } from "../plugins/vfile"
|
|
|
|
|
import { MarkdownContent, ProcessedContent } from "../plugins/vfile"
|
|
|
|
|
import { PerfTimer } from "../util/perf"
|
|
|
|
|
import { read } from "to-vfile"
|
|
|
|
|
import { FilePath, QUARTZ, slugifyFilePath } from "../util/path"
|
|
|
|
|
import { FilePath, FullSlug, QUARTZ, slugifyFilePath } from "../util/path"
|
|
|
|
|
import path from "path"
|
|
|
|
|
import workerpool, { Promise as WorkerPromise } from "workerpool"
|
|
|
|
|
import { QuartzLogger } from "../util/log"
|
|
|
|
|
import { trace } from "../util/trace"
|
|
|
|
|
import { BuildCtx } from "../util/ctx"
|
|
|
|
|
|
|
|
|
|
export type QuartzProcessor = Processor<MDRoot, MDRoot, HTMLRoot>
|
|
|
|
|
export function createProcessor(ctx: BuildCtx): QuartzProcessor {
|
|
|
|
|
export type QuartzMdProcessor = Processor<MDRoot, MDRoot, MDRoot>
|
|
|
|
|
export type QuartzHtmlProcessor = Processor<undefined, MDRoot, HTMLRoot>
|
|
|
|
|
|
|
|
|
|
export function createMdProcessor(ctx: BuildCtx): QuartzMdProcessor {
|
|
|
|
|
const transformers = ctx.cfg.plugins.transformers
|
|
|
|
|
|
|
|
|
|
return (
|
|
|
|
@@ -24,14 +26,20 @@ export function createProcessor(ctx: BuildCtx): QuartzProcessor {
|
|
|
|
|
.use(remarkParse)
|
|
|
|
|
// MD AST -> MD AST transforms
|
|
|
|
|
.use(
|
|
|
|
|
transformers
|
|
|
|
|
.filter((p) => p.markdownPlugins)
|
|
|
|
|
.flatMap((plugin) => plugin.markdownPlugins!(ctx)),
|
|
|
|
|
transformers.flatMap((plugin) => plugin.markdownPlugins?.(ctx) ?? []),
|
|
|
|
|
) as unknown as QuartzMdProcessor
|
|
|
|
|
// ^ sadly the typing of `use` is not smart enough to infer the correct type from our plugin list
|
|
|
|
|
)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
export function createHtmlProcessor(ctx: BuildCtx): QuartzHtmlProcessor {
|
|
|
|
|
const transformers = ctx.cfg.plugins.transformers
|
|
|
|
|
return (
|
|
|
|
|
unified()
|
|
|
|
|
// MD AST -> HTML AST
|
|
|
|
|
.use(remarkRehype, { allowDangerousHtml: true })
|
|
|
|
|
// HTML AST -> HTML AST transforms
|
|
|
|
|
.use(transformers.filter((p) => p.htmlPlugins).flatMap((plugin) => plugin.htmlPlugins!(ctx)))
|
|
|
|
|
.use(transformers.flatMap((plugin) => plugin.htmlPlugins?.(ctx) ?? []))
|
|
|
|
|
)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
@@ -75,8 +83,8 @@ async function transpileWorkerScript() {
|
|
|
|
|
|
|
|
|
|
export function createFileParser(ctx: BuildCtx, fps: FilePath[]) {
|
|
|
|
|
const { argv, cfg } = ctx
|
|
|
|
|
return async (processor: QuartzProcessor) => {
|
|
|
|
|
const res: ProcessedContent[] = []
|
|
|
|
|
return async (processor: QuartzMdProcessor) => {
|
|
|
|
|
const res: MarkdownContent[] = []
|
|
|
|
|
for (const fp of fps) {
|
|
|
|
|
try {
|
|
|
|
|
const perf = new PerfTimer()
|
|
|
|
@@ -100,10 +108,32 @@ export function createFileParser(ctx: BuildCtx, fps: FilePath[]) {
|
|
|
|
|
res.push([newAst, file])
|
|
|
|
|
|
|
|
|
|
if (argv.verbose) {
|
|
|
|
|
console.log(`[process] ${fp} -> ${file.data.slug} (${perf.timeSince()})`)
|
|
|
|
|
console.log(`[markdown] ${fp} -> ${file.data.slug} (${perf.timeSince()})`)
|
|
|
|
|
}
|
|
|
|
|
} catch (err) {
|
|
|
|
|
trace(`\nFailed to process \`${fp}\``, err as Error)
|
|
|
|
|
trace(`\nFailed to process markdown \`${fp}\``, err as Error)
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return res
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
export function createMarkdownParser(ctx: BuildCtx, mdContent: MarkdownContent[]) {
|
|
|
|
|
return async (processor: QuartzHtmlProcessor) => {
|
|
|
|
|
const res: ProcessedContent[] = []
|
|
|
|
|
for (const [ast, file] of mdContent) {
|
|
|
|
|
try {
|
|
|
|
|
const perf = new PerfTimer()
|
|
|
|
|
|
|
|
|
|
const newAst = await processor.run(ast as MDRoot, file)
|
|
|
|
|
res.push([newAst, file])
|
|
|
|
|
|
|
|
|
|
if (ctx.argv.verbose) {
|
|
|
|
|
console.log(`[html] ${file.data.slug} (${perf.timeSince()})`)
|
|
|
|
|
}
|
|
|
|
|
} catch (err) {
|
|
|
|
|
trace(`\nFailed to process html \`${file.data.filePath}\``, err as Error)
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
@@ -113,6 +143,7 @@ export function createFileParser(ctx: BuildCtx, fps: FilePath[]) {
|
|
|
|
|
|
|
|
|
|
const clamp = (num: number, min: number, max: number) =>
|
|
|
|
|
Math.min(Math.max(Math.round(num), min), max)
|
|
|
|
|
|
|
|
|
|
export async function parseMarkdown(ctx: BuildCtx, fps: FilePath[]): Promise<ProcessedContent[]> {
|
|
|
|
|
const { argv } = ctx
|
|
|
|
|
const perf = new PerfTimer()
|
|
|
|
@@ -126,9 +157,8 @@ export async function parseMarkdown(ctx: BuildCtx, fps: FilePath[]): Promise<Pro
|
|
|
|
|
log.start(`Parsing input files using ${concurrency} threads`)
|
|
|
|
|
if (concurrency === 1) {
|
|
|
|
|
try {
|
|
|
|
|
const processor = createProcessor(ctx)
|
|
|
|
|
const parse = createFileParser(ctx, fps)
|
|
|
|
|
res = await parse(processor)
|
|
|
|
|
const mdRes = await createFileParser(ctx, fps)(createMdProcessor(ctx))
|
|
|
|
|
res = await createMarkdownParser(ctx, mdRes)(createHtmlProcessor(ctx))
|
|
|
|
|
} catch (error) {
|
|
|
|
|
log.end()
|
|
|
|
|
throw error
|
|
|
|
@@ -140,17 +170,27 @@ export async function parseMarkdown(ctx: BuildCtx, fps: FilePath[]): Promise<Pro
|
|
|
|
|
maxWorkers: concurrency,
|
|
|
|
|
workerType: "thread",
|
|
|
|
|
})
|
|
|
|
|
|
|
|
|
|
const childPromises: WorkerPromise<ProcessedContent[]>[] = []
|
|
|
|
|
for (const chunk of chunks(fps, CHUNK_SIZE)) {
|
|
|
|
|
childPromises.push(pool.exec("parseFiles", [ctx.buildId, argv, chunk, ctx.allSlugs]))
|
|
|
|
|
const errorHandler = (err: any) => {
|
|
|
|
|
console.error(`${err}`.replace(/^error:\s*/i, ""))
|
|
|
|
|
process.exit(1)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
const results: ProcessedContent[][] = await WorkerPromise.all(childPromises).catch((err) => {
|
|
|
|
|
const errString = err.toString().slice("Error:".length)
|
|
|
|
|
console.error(errString)
|
|
|
|
|
process.exit(1)
|
|
|
|
|
})
|
|
|
|
|
const mdPromises: WorkerPromise<[MarkdownContent[], FullSlug[]]>[] = []
|
|
|
|
|
for (const chunk of chunks(fps, CHUNK_SIZE)) {
|
|
|
|
|
mdPromises.push(pool.exec("parseMarkdown", [ctx.buildId, argv, chunk]))
|
|
|
|
|
}
|
|
|
|
|
const mdResults: [MarkdownContent[], FullSlug[]][] =
|
|
|
|
|
await WorkerPromise.all(mdPromises).catch(errorHandler)
|
|
|
|
|
|
|
|
|
|
const childPromises: WorkerPromise<ProcessedContent[]>[] = []
|
|
|
|
|
for (const [_, extraSlugs] of mdResults) {
|
|
|
|
|
ctx.allSlugs.push(...extraSlugs)
|
|
|
|
|
}
|
|
|
|
|
for (const [mdChunk, _] of mdResults) {
|
|
|
|
|
childPromises.push(pool.exec("processHtml", [ctx.buildId, argv, mdChunk, ctx.allSlugs]))
|
|
|
|
|
}
|
|
|
|
|
const results: ProcessedContent[][] = await WorkerPromise.all(childPromises).catch(errorHandler)
|
|
|
|
|
|
|
|
|
|
res = results.flat()
|
|
|
|
|
await pool.terminate()
|
|
|
|
|
}
|
|
|
|
|