update spinners
This commit is contained in:
@ -7,6 +7,7 @@ import { EmitCallback } from "../plugins/types"
|
||||
import { ProcessedContent } from "../plugins/vfile"
|
||||
import { QUARTZ, slugify } from "../path"
|
||||
import { globbyStream } from "globby"
|
||||
import chalk from "chalk"
|
||||
|
||||
export async function emitContent(contentFolder: string, output: string, cfg: QuartzConfig, content: ProcessedContent[], verbose: boolean) {
|
||||
const perf = new PerfTimer()
|
||||
@ -23,13 +24,18 @@ export async function emitContent(contentFolder: string, output: string, cfg: Qu
|
||||
|
||||
let emittedFiles = 0
|
||||
for (const emitter of cfg.plugins.emitters) {
|
||||
const emitted = await emitter.emit(cfg.configuration, content, staticResources, emit)
|
||||
emittedFiles += emitted.length
|
||||
try {
|
||||
const emitted = await emitter.emit(cfg.configuration, content, staticResources, emit)
|
||||
emittedFiles += emitted.length
|
||||
|
||||
if (verbose) {
|
||||
for (const file of emitted) {
|
||||
console.log(`[emit:${emitter.name}] ${file}`)
|
||||
if (verbose) {
|
||||
for (const file of emitted) {
|
||||
console.log(`[emit:${emitter.name}] ${file}`)
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
console.log(chalk.red(`Failed to emit from plugin \`${emitter.name}\`: `) + err)
|
||||
process.exit(1)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -12,9 +12,11 @@ import path from 'path'
|
||||
import os from 'os'
|
||||
import workerpool, { Promise as WorkerPromise } from 'workerpool'
|
||||
import { QuartzTransformerPlugin } from '../plugins/types'
|
||||
import { QuartzLogger } from '../log'
|
||||
import chalk from 'chalk'
|
||||
|
||||
export type QuartzProcessor = Processor<MDRoot, HTMLRoot, void>
|
||||
export function createProcessor(transformers: QuartzTransformerPlugin[]): any {
|
||||
export function createProcessor(transformers: QuartzTransformerPlugin[]): QuartzProcessor {
|
||||
// base Markdown -> MD AST
|
||||
let processor = unified().use(remarkParse)
|
||||
|
||||
@ -41,15 +43,11 @@ function* chunks<T>(arr: T[], n: number) {
|
||||
}
|
||||
}
|
||||
|
||||
async function transpileWorkerScript(verbose: boolean) {
|
||||
async function transpileWorkerScript() {
|
||||
// transpile worker script
|
||||
const cacheFile = "./.quartz-cache/transpiled-worker.mjs"
|
||||
const fp = "./quartz/worker.ts"
|
||||
if (verbose) {
|
||||
console.log("Transpiling worker script")
|
||||
}
|
||||
|
||||
await esbuild.build({
|
||||
return esbuild.build({
|
||||
entryPoints: [fp],
|
||||
outfile: path.join("quartz", cacheFile),
|
||||
bundle: true,
|
||||
@ -75,31 +73,50 @@ async function transpileWorkerScript(verbose: boolean) {
|
||||
})
|
||||
}
|
||||
|
||||
export function createFileParser(baseDir: string, fps: string[], verbose: boolean) {
|
||||
return async (processor: QuartzProcessor) => {
|
||||
const res: ProcessedContent[] = []
|
||||
for (const fp of fps) {
|
||||
try {
|
||||
const file = await read(fp)
|
||||
|
||||
// base data properties that plugins may use
|
||||
file.data.slug = slugify(path.relative(baseDir, file.path))
|
||||
file.data.filePath = fp
|
||||
|
||||
const ast = processor.parse(file)
|
||||
const newAst = await processor.run(ast, file)
|
||||
res.push([newAst, file])
|
||||
|
||||
if (verbose) {
|
||||
console.log(`[process] ${fp} -> ${file.data.slug}`)
|
||||
}
|
||||
} catch (err) {
|
||||
console.log(chalk.red(`Failed to process \`${fp}\`: `) + err)
|
||||
process.exit(1)
|
||||
}
|
||||
}
|
||||
|
||||
return res
|
||||
}
|
||||
}
|
||||
|
||||
export async function parseMarkdown(transformers: QuartzTransformerPlugin[], baseDir: string, fps: string[], verbose: boolean): Promise<ProcessedContent[]> {
|
||||
const perf = new PerfTimer()
|
||||
const log = new QuartzLogger(verbose)
|
||||
|
||||
const CHUNK_SIZE = 128
|
||||
let concurrency = fps.length < CHUNK_SIZE ? 1 : os.availableParallelism()
|
||||
const res: ProcessedContent[] = []
|
||||
let res: ProcessedContent[] = []
|
||||
|
||||
log.start(`Parsing input files using ${concurrency} threads`)
|
||||
if (concurrency === 1) {
|
||||
// single-thread
|
||||
const processor = createProcessor(transformers)
|
||||
for (const fp of fps) {
|
||||
const file = await read(fp)
|
||||
|
||||
// base data properties that plugins may use
|
||||
file.data.slug = slugify(path.relative(baseDir, file.path))
|
||||
file.data.filePath = fp
|
||||
|
||||
const ast = processor.parse(file)
|
||||
res.push([await processor.run(ast, file), file])
|
||||
|
||||
if (verbose) {
|
||||
console.log(`[process] ${fp} -> ${file.data.slug}`)
|
||||
}
|
||||
}
|
||||
const parse = createFileParser(baseDir, fps, verbose)
|
||||
res = await parse(processor)
|
||||
} else {
|
||||
await transpileWorkerScript(verbose)
|
||||
await transpileWorkerScript()
|
||||
const pool = workerpool.pool(
|
||||
'./quartz/bootstrap-worker.mjs',
|
||||
{
|
||||
@ -113,11 +130,12 @@ export async function parseMarkdown(transformers: QuartzTransformerPlugin[], bas
|
||||
for (const chunk of chunks(fps, CHUNK_SIZE)) {
|
||||
childPromises.push(pool.exec('parseFiles', [baseDir, chunk, verbose]))
|
||||
}
|
||||
|
||||
const results: ProcessedContent[][] = await WorkerPromise.all(childPromises)
|
||||
res.push(...results.flat())
|
||||
res = results.flat()
|
||||
await pool.terminate()
|
||||
}
|
||||
|
||||
console.log(`Parsed and transformed ${res.length} Markdown files with ${concurrency} cores in ${perf.timeSince()}`)
|
||||
log.success(`Parsed ${res.length} Markdown files in ${perf.timeSince()}`)
|
||||
return res
|
||||
}
|
||||
|
Reference in New Issue
Block a user