Compare commits

..

17 Commits

Author SHA1 Message Date
Jacky Zhao
7681a86815 incremental all the things 2025-03-15 22:00:37 -07:00
Jacky Zhao
f528d6139e checkpoint 2025-03-14 09:18:21 -07:00
Jacky Zhao
e26658f4ed fix(description): calculate description properly when first sentence is longer than max
Some checks are pending
Build and Test / build-and-test (macos-latest) (push) Waiting to run
Build and Test / build-and-test (windows-latest) (push) Waiting to run
Build and Test / build-and-test (ubuntu-latest) (push) Has been skipped
Build and Test / publish-tag (push) Has been skipped
2025-03-13 14:22:11 -07:00
Jacky Zhao
b579950ae5 fix(og): adjust line clamp 2025-03-13 14:05:26 -07:00
Jacky Zhao
0babbdf780 fix(contentPage): dont emit folders 2025-03-13 13:57:00 -07:00
Jacky Zhao
1efe2e20a4 fix(graph): avoid keeping stroke in gfx 2025-03-13 12:29:30 -07:00
Jacky Zhao
5928d82a56 fix(og): search for font family properly 2025-03-13 12:11:27 -07:00
Jacky Zhao
696403d3fa chore: bump version to 4.4.1 2025-03-13 10:55:37 -07:00
Jacky Zhao
2c30abe457 fix(analytics): always use defer over async to account for document nav event 2025-03-13 10:48:03 -07:00
Jacky Zhao
80c3196fee feat(og): add reading time to default, improve logging 2025-03-13 10:41:50 -07:00
Jacky Zhao
d9159e0ac9 feat: make og images an emitter to properly await image generation (#1826)
* checkpoint

* make emitters async generators

* fix

* custom font spec

* replace spinner, use disk cache for fonts

* use readline instead

* make og images look nice
2025-03-13 10:27:46 -07:00
Jacky Zhao
c005fe4408 fix(explorer): properly respect folderDefaultState (closes #1827)
Some checks are pending
Build and Test / build-and-test (macos-latest) (push) Waiting to run
Build and Test / build-and-test (windows-latest) (push) Waiting to run
Build and Test / build-and-test (ubuntu-latest) (push) Has been skipped
Build and Test / publish-tag (push) Has been skipped
2025-03-12 22:31:44 -07:00
Emile Bangma
580c1bd608 fix(typography): properly pass Google font options (#1825)
Some checks are pending
Build and Test / build-and-test (macos-latest) (push) Waiting to run
Build and Test / build-and-test (windows-latest) (push) Waiting to run
Build and Test / build-and-test (ubuntu-latest) (push) Has been skipped
Build and Test / publish-tag (push) Has been skipped
2025-03-12 11:27:41 -07:00
Jacky Zhao
270a5dc14a fix(explorer): show file name instead of slug if no file data (closes #1822) 2025-03-12 11:24:28 -07:00
Jacky Zhao
bfa938cc62 fix(explorer): allow setting displayName (closes #1824) 2025-03-12 10:42:07 -07:00
Jacky Zhao
e3c50caf13 fix(explorer): dont invert mobile css, properly toggle .collapsed 2025-03-12 10:15:54 -07:00
Emile Bangma
ca08ec1ae7 fix(explorer): mobile explorer toggle (#1823)
Some checks are pending
Build and Test / build-and-test (macos-latest) (push) Waiting to run
Build and Test / build-and-test (windows-latest) (push) Waiting to run
Build and Test / build-and-test (ubuntu-latest) (push) Has been skipped
Build and Test / publish-tag (push) Has been skipped
2025-03-12 14:15:16 +01:00
43 changed files with 971 additions and 1236 deletions

View File

@@ -108,3 +108,25 @@ Some plugins are included by default in the [`quartz.config.ts`](https://github.
You can see a list of all plugins and their configuration options [[tags/plugin|here]]. You can see a list of all plugins and their configuration options [[tags/plugin|here]].
If you'd like to make your own plugins, see the [[making plugins|making custom plugins]] guide. If you'd like to make your own plugins, see the [[making plugins|making custom plugins]] guide.
## Fonts
Fonts can be specified as a `string` or a `FontSpecification`:
```ts
// string
typography: {
header: "Schibsted Grotesk",
...
}
// FontSpecification
typography: {
header: {
name: "Schibsted Grotesk",
weights: [400, 700],
includeItalic: true,
},
...
}
```

20
package-lock.json generated
View File

@@ -1,12 +1,12 @@
{ {
"name": "@jackyzha0/quartz", "name": "@jackyzha0/quartz",
"version": "4.4.0", "version": "4.5.0",
"lockfileVersion": 3, "lockfileVersion": 3,
"requires": true, "requires": true,
"packages": { "packages": {
"": { "": {
"name": "@jackyzha0/quartz", "name": "@jackyzha0/quartz",
"version": "4.4.0", "version": "4.5.0",
"license": "MIT", "license": "MIT",
"dependencies": { "dependencies": {
"@clack/prompts": "^0.10.0", "@clack/prompts": "^0.10.0",
@@ -14,6 +14,7 @@
"@myriaddreamin/rehype-typst": "^0.5.4", "@myriaddreamin/rehype-typst": "^0.5.4",
"@napi-rs/simple-git": "0.1.19", "@napi-rs/simple-git": "0.1.19",
"@tweenjs/tween.js": "^25.0.0", "@tweenjs/tween.js": "^25.0.0",
"ansi-truncate": "^1.2.0",
"async-mutex": "^0.5.0", "async-mutex": "^0.5.0",
"chalk": "^5.4.1", "chalk": "^5.4.1",
"chokidar": "^4.0.3", "chokidar": "^4.0.3",
@@ -2032,6 +2033,15 @@
"url": "https://github.com/chalk/ansi-styles?sponsor=1" "url": "https://github.com/chalk/ansi-styles?sponsor=1"
} }
}, },
"node_modules/ansi-truncate": {
"version": "1.2.0",
"resolved": "https://registry.npmjs.org/ansi-truncate/-/ansi-truncate-1.2.0.tgz",
"integrity": "sha512-/SLVrxNIP8o8iRHjdK3K9s2hDqdvb86NEjZOAB6ecWFsOo+9obaby97prnvAPn6j7ExXCpbvtlJFYPkkspg4BQ==",
"license": "MIT",
"dependencies": {
"fast-string-truncated-width": "^1.2.0"
}
},
"node_modules/argparse": { "node_modules/argparse": {
"version": "2.0.1", "version": "2.0.1",
"resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz",
@@ -3058,6 +3068,12 @@
"node": ">=8.6.0" "node": ">=8.6.0"
} }
}, },
"node_modules/fast-string-truncated-width": {
"version": "1.2.1",
"resolved": "https://registry.npmjs.org/fast-string-truncated-width/-/fast-string-truncated-width-1.2.1.tgz",
"integrity": "sha512-Q9acT/+Uu3GwGj+5w/zsGuQjh9O1TyywhIwAxHudtWrgF09nHOPrvTLhQevPbttcxjr/SNN7mJmfOw/B1bXgow==",
"license": "MIT"
},
"node_modules/fastq": { "node_modules/fastq": {
"version": "1.19.0", "version": "1.19.0",
"resolved": "https://registry.npmjs.org/fastq/-/fastq-1.19.0.tgz", "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.19.0.tgz",

View File

@@ -2,7 +2,7 @@
"name": "@jackyzha0/quartz", "name": "@jackyzha0/quartz",
"description": "🌱 publish your digital garden and notes as a website", "description": "🌱 publish your digital garden and notes as a website",
"private": true, "private": true,
"version": "4.4.0", "version": "4.5.0",
"type": "module", "type": "module",
"author": "jackyzha0 <j.zhao2k19@gmail.com>", "author": "jackyzha0 <j.zhao2k19@gmail.com>",
"license": "MIT", "license": "MIT",
@@ -40,6 +40,7 @@
"@myriaddreamin/rehype-typst": "^0.5.4", "@myriaddreamin/rehype-typst": "^0.5.4",
"@napi-rs/simple-git": "0.1.19", "@napi-rs/simple-git": "0.1.19",
"@tweenjs/tween.js": "^25.0.0", "@tweenjs/tween.js": "^25.0.0",
"ansi-truncate": "^1.2.0",
"async-mutex": "^0.5.0", "async-mutex": "^0.5.0",
"chalk": "^5.4.1", "chalk": "^5.4.1",
"chokidar": "^4.0.3", "chokidar": "^4.0.3",

View File

@@ -57,7 +57,7 @@ const config: QuartzConfig = {
transformers: [ transformers: [
Plugin.FrontMatter(), Plugin.FrontMatter(),
Plugin.CreatedModifiedDate({ Plugin.CreatedModifiedDate({
priority: ["frontmatter", "filesystem"], priority: ["git", "frontmatter", "filesystem"],
}), }),
Plugin.SyntaxHighlighting({ Plugin.SyntaxHighlighting({
theme: { theme: {
@@ -87,6 +87,7 @@ const config: QuartzConfig = {
Plugin.Assets(), Plugin.Assets(),
Plugin.Static(), Plugin.Static(),
Plugin.NotFoundPage(), Plugin.NotFoundPage(),
// Comment out CustomOgImages to speed up build time
Plugin.CustomOgImages(), Plugin.CustomOgImages(),
], ],
}, },

View File

@@ -49,8 +49,15 @@ export const defaultListPageLayout: PageLayout = {
left: [ left: [
Component.PageTitle(), Component.PageTitle(),
Component.MobileOnly(Component.Spacer()), Component.MobileOnly(Component.Spacer()),
Component.Search(), Component.Flex({
Component.Darkmode(), components: [
{
Component: Component.Search(),
grow: true,
},
{ Component: Component.Darkmode() },
],
}),
Component.Explorer(), Component.Explorer(),
], ],
right: [], right: [],

View File

@@ -9,7 +9,7 @@ import { parseMarkdown } from "./processors/parse"
import { filterContent } from "./processors/filter" import { filterContent } from "./processors/filter"
import { emitContent } from "./processors/emit" import { emitContent } from "./processors/emit"
import cfg from "../quartz.config" import cfg from "../quartz.config"
import { FilePath, FullSlug, joinSegments, slugifyFilePath } from "./util/path" import { FilePath, joinSegments, slugifyFilePath } from "./util/path"
import chokidar from "chokidar" import chokidar from "chokidar"
import { ProcessedContent } from "./plugins/vfile" import { ProcessedContent } from "./plugins/vfile"
import { Argv, BuildCtx } from "./util/ctx" import { Argv, BuildCtx } from "./util/ctx"
@@ -17,34 +17,38 @@ import { glob, toPosixPath } from "./util/glob"
import { trace } from "./util/trace" import { trace } from "./util/trace"
import { options } from "./util/sourcemap" import { options } from "./util/sourcemap"
import { Mutex } from "async-mutex" import { Mutex } from "async-mutex"
import DepGraph from "./depgraph"
import { getStaticResourcesFromPlugins } from "./plugins" import { getStaticResourcesFromPlugins } from "./plugins"
import { randomIdNonSecure } from "./util/random" import { randomIdNonSecure } from "./util/random"
import { ChangeEvent } from "./plugins/types"
type Dependencies = Record<string, DepGraph<FilePath> | null> type ContentMap = Map<
FilePath,
| {
type: "markdown"
content: ProcessedContent
}
| {
type: "other"
}
>
type BuildData = { type BuildData = {
ctx: BuildCtx ctx: BuildCtx
ignored: GlobbyFilterFunction ignored: GlobbyFilterFunction
mut: Mutex mut: Mutex
initialSlugs: FullSlug[] contentMap: ContentMap
// TODO merge contentMap and trackedAssets changesSinceLastBuild: Record<FilePath, ChangeEvent["type"]>
contentMap: Map<FilePath, ProcessedContent>
trackedAssets: Set<FilePath>
toRebuild: Set<FilePath>
toRemove: Set<FilePath>
lastBuildMs: number lastBuildMs: number
dependencies: Dependencies
} }
type FileEvent = "add" | "change" | "delete"
async function buildQuartz(argv: Argv, mut: Mutex, clientRefresh: () => void) { async function buildQuartz(argv: Argv, mut: Mutex, clientRefresh: () => void) {
const ctx: BuildCtx = { const ctx: BuildCtx = {
buildId: randomIdNonSecure(), buildId: randomIdNonSecure(),
argv, argv,
cfg, cfg,
allSlugs: [], allSlugs: [],
allFiles: [],
incremental: false,
} }
const perf = new PerfTimer() const perf = new PerfTimer()
@@ -67,64 +71,58 @@ async function buildQuartz(argv: Argv, mut: Mutex, clientRefresh: () => void) {
perf.addEvent("glob") perf.addEvent("glob")
const allFiles = await glob("**/*.*", argv.directory, cfg.configuration.ignorePatterns) const allFiles = await glob("**/*.*", argv.directory, cfg.configuration.ignorePatterns)
const fps = allFiles.filter((fp) => fp.endsWith(".md")).sort() const markdownPaths = allFiles.filter((fp) => fp.endsWith(".md")).sort()
console.log( console.log(
`Found ${fps.length} input files from \`${argv.directory}\` in ${perf.timeSince("glob")}`, `Found ${markdownPaths.length} input files from \`${argv.directory}\` in ${perf.timeSince("glob")}`,
) )
const filePaths = fps.map((fp) => joinSegments(argv.directory, fp) as FilePath) const filePaths = markdownPaths.map((fp) => joinSegments(argv.directory, fp) as FilePath)
ctx.allFiles = allFiles
ctx.allSlugs = allFiles.map((fp) => slugifyFilePath(fp as FilePath)) ctx.allSlugs = allFiles.map((fp) => slugifyFilePath(fp as FilePath))
const parsedFiles = await parseMarkdown(ctx, filePaths) const parsedFiles = await parseMarkdown(ctx, filePaths)
const filteredContent = filterContent(ctx, parsedFiles) const filteredContent = filterContent(ctx, parsedFiles)
const dependencies: Record<string, DepGraph<FilePath> | null> = {}
// Only build dependency graphs if we're doing a fast rebuild
if (argv.fastRebuild) {
const staticResources = getStaticResourcesFromPlugins(ctx)
for (const emitter of cfg.plugins.emitters) {
dependencies[emitter.name] =
(await emitter.getDependencyGraph?.(ctx, filteredContent, staticResources)) ?? null
}
}
await emitContent(ctx, filteredContent) await emitContent(ctx, filteredContent)
console.log(chalk.green(`Done processing ${fps.length} files in ${perf.timeSince()}`)) console.log(chalk.green(`Done processing ${markdownPaths.length} files in ${perf.timeSince()}`))
release() release()
if (argv.serve) { if (argv.watch) {
return startServing(ctx, mut, parsedFiles, clientRefresh, dependencies) ctx.incremental = true
return startWatching(ctx, mut, parsedFiles, clientRefresh)
} }
} }
// setup watcher for rebuilds // setup watcher for rebuilds
async function startServing( async function startWatching(
ctx: BuildCtx, ctx: BuildCtx,
mut: Mutex, mut: Mutex,
initialContent: ProcessedContent[], initialContent: ProcessedContent[],
clientRefresh: () => void, clientRefresh: () => void,
dependencies: Dependencies, // emitter name: dep graph
) { ) {
const { argv } = ctx const { argv, allFiles } = ctx
const contentMap: ContentMap = new Map()
for (const filePath of allFiles) {
contentMap.set(filePath, {
type: "other",
})
}
// cache file parse results
const contentMap = new Map<FilePath, ProcessedContent>()
for (const content of initialContent) { for (const content of initialContent) {
const [_tree, vfile] = content const [_tree, vfile] = content
contentMap.set(vfile.data.filePath!, content) contentMap.set(vfile.data.relativePath!, {
type: "markdown",
content,
})
} }
const buildData: BuildData = { const buildData: BuildData = {
ctx, ctx,
mut, mut,
dependencies,
contentMap, contentMap,
ignored: await isGitIgnored(), ignored: await isGitIgnored(),
initialSlugs: ctx.allSlugs, changesSinceLastBuild: {},
toRebuild: new Set<FilePath>(),
toRemove: new Set<FilePath>(),
trackedAssets: new Set<FilePath>(),
lastBuildMs: 0, lastBuildMs: 0,
} }
@@ -134,31 +132,33 @@ async function startServing(
ignoreInitial: true, ignoreInitial: true,
}) })
const buildFromEntry = argv.fastRebuild ? partialRebuildFromEntrypoint : rebuildFromEntrypoint const changes: ChangeEvent[] = []
watcher watcher
.on("add", (fp) => buildFromEntry(fp as string, "add", clientRefresh, buildData)) .on("add", (fp) => {
.on("change", (fp) => buildFromEntry(fp as string, "change", clientRefresh, buildData)) if (buildData.ignored(fp)) return
.on("unlink", (fp) => buildFromEntry(fp as string, "delete", clientRefresh, buildData)) changes.push({ path: fp as FilePath, type: "add" })
rebuild(changes, clientRefresh, buildData)
})
.on("change", (fp) => {
if (buildData.ignored(fp)) return
changes.push({ path: fp as FilePath, type: "change" })
rebuild(changes, clientRefresh, buildData)
})
.on("unlink", (fp) => {
if (buildData.ignored(fp)) return
changes.push({ path: fp as FilePath, type: "delete" })
rebuild(changes, clientRefresh, buildData)
})
return async () => { return async () => {
await watcher.close() await watcher.close()
} }
} }
async function partialRebuildFromEntrypoint( async function rebuild(changes: ChangeEvent[], clientRefresh: () => void, buildData: BuildData) {
filepath: string, const { ctx, contentMap, mut, changesSinceLastBuild } = buildData
action: FileEvent,
clientRefresh: () => void,
buildData: BuildData, // note: this function mutates buildData
) {
const { ctx, ignored, dependencies, contentMap, mut, toRemove } = buildData
const { argv, cfg } = ctx const { argv, cfg } = ctx
// don't do anything for gitignored files
if (ignored(filepath)) {
return
}
const buildId = randomIdNonSecure() const buildId = randomIdNonSecure()
ctx.buildId = buildId ctx.buildId = buildId
buildData.lastBuildMs = new Date().getTime() buildData.lastBuildMs = new Date().getTime()
@@ -171,261 +171,102 @@ async function partialRebuildFromEntrypoint(
} }
const perf = new PerfTimer() const perf = new PerfTimer()
perf.addEvent("rebuild")
console.log(chalk.yellow("Detected change, rebuilding...")) console.log(chalk.yellow("Detected change, rebuilding..."))
// UPDATE DEP GRAPH // update changesSinceLastBuild
const fp = joinSegments(argv.directory, toPosixPath(filepath)) as FilePath for (const change of changes) {
changesSinceLastBuild[change.path] = change.type
}
const staticResources = getStaticResourcesFromPlugins(ctx) const staticResources = getStaticResourcesFromPlugins(ctx)
let processedFiles: ProcessedContent[] = [] for (const [fp, type] of Object.entries(changesSinceLastBuild)) {
if (type === "delete" || path.extname(fp) !== ".md") continue
switch (action) { const fullPath = joinSegments(argv.directory, toPosixPath(fp)) as FilePath
case "add": const parsed = await parseMarkdown(ctx, [fullPath])
// add to cache when new file is added for (const content of parsed) {
processedFiles = await parseMarkdown(ctx, [fp]) contentMap.set(content[1].data.relativePath!, {
processedFiles.forEach(([tree, vfile]) => contentMap.set(vfile.data.filePath!, [tree, vfile])) type: "markdown",
content,
// update the dep graph by asking all emitters whether they depend on this file })
for (const emitter of cfg.plugins.emitters) { }
const emitterGraph =
(await emitter.getDependencyGraph?.(ctx, processedFiles, staticResources)) ?? null
if (emitterGraph) {
const existingGraph = dependencies[emitter.name]
if (existingGraph !== null) {
existingGraph.mergeGraph(emitterGraph)
} else {
// might be the first time we're adding a mardown file
dependencies[emitter.name] = emitterGraph
}
}
}
break
case "change":
// invalidate cache when file is changed
processedFiles = await parseMarkdown(ctx, [fp])
processedFiles.forEach(([tree, vfile]) => contentMap.set(vfile.data.filePath!, [tree, vfile]))
// only content files can have added/removed dependencies because of transclusions
if (path.extname(fp) === ".md") {
for (const emitter of cfg.plugins.emitters) {
// get new dependencies from all emitters for this file
const emitterGraph =
(await emitter.getDependencyGraph?.(ctx, processedFiles, staticResources)) ?? null
// only update the graph if the emitter plugin uses the changed file
// eg. Assets plugin ignores md files, so we skip updating the graph
if (emitterGraph?.hasNode(fp)) {
// merge the new dependencies into the dep graph
dependencies[emitter.name]?.updateIncomingEdgesForNode(emitterGraph, fp)
}
}
}
break
case "delete":
toRemove.add(fp)
break
} }
if (argv.verbose) { // update state using changesSinceLastBuild
console.log(`Updated dependency graphs in ${perf.timeSince()}`) // we do this weird play of add => compute change events => remove
// so that partialEmitters can do appropriate cleanup based on the content of deleted files
for (const [file, change] of Object.entries(changesSinceLastBuild)) {
if (change === "delete") {
// universal delete case
contentMap.delete(file as FilePath)
}
// manually track non-markdown files as processed files only
// contains markdown files
if (change === "add" && path.extname(file) !== ".md") {
contentMap.set(file as FilePath, {
type: "other",
})
}
} }
// EMIT const changeEvents: ChangeEvent[] = Object.entries(changesSinceLastBuild).map(([fp, type]) => {
perf.addEvent("rebuild") const path = fp as FilePath
const processedContent = contentMap.get(path)
if (processedContent?.type === "markdown") {
const [_tree, file] = processedContent.content
return {
type,
path,
file,
}
}
return {
type,
path,
}
})
// update allFiles and then allSlugs with the consistent view of content map
ctx.allFiles = Array.from(contentMap.keys())
ctx.allSlugs = ctx.allFiles.map((fp) => slugifyFilePath(fp as FilePath))
const processedFiles = Array.from(contentMap.values())
.filter((file) => file.type === "markdown")
.map((file) => file.content)
let emittedFiles = 0 let emittedFiles = 0
for (const emitter of cfg.plugins.emitters) { for (const emitter of cfg.plugins.emitters) {
const depGraph = dependencies[emitter.name] // Try to use partialEmit if available, otherwise assume the output is static
const emitFn = emitter.partialEmit ?? emitter.emit
// emitter hasn't defined a dependency graph. call it with all processed files const emitted = await emitFn(ctx, processedFiles, staticResources, changeEvents)
if (depGraph === null) { if (emitted === null) {
if (argv.verbose) {
console.log(
`Emitter ${emitter.name} doesn't define a dependency graph. Calling it with all files...`,
)
}
const files = [...contentMap.values()].filter(
([_node, vfile]) => !toRemove.has(vfile.data.filePath!),
)
const emitted = await emitter.emit(ctx, files, staticResources)
if (Symbol.asyncIterator in emitted) {
// Async generator case
for await (const file of emitted) {
emittedFiles++
if (ctx.argv.verbose) {
console.log(`[emit:${emitter.name}] ${file}`)
}
}
} else {
// Array case
emittedFiles += emitted.length
if (ctx.argv.verbose) {
for (const file of emitted) {
console.log(`[emit:${emitter.name}] ${file}`)
}
}
}
continue continue
} }
// only call the emitter if it uses this file if (Symbol.asyncIterator in emitted) {
if (depGraph.hasNode(fp)) { // Async generator case
// re-emit using all files that are needed for the downstream of this file for await (const file of emitted) {
// eg. for ContentIndex, the dep graph could be: emittedFiles++
// a.md --> contentIndex.json
// b.md ------^
//
// if a.md changes, we need to re-emit contentIndex.json,
// and supply [a.md, b.md] to the emitter
const upstreams = [...depGraph.getLeafNodeAncestors(fp)] as FilePath[]
const upstreamContent = upstreams
// filter out non-markdown files
.filter((file) => contentMap.has(file))
// if file was deleted, don't give it to the emitter
.filter((file) => !toRemove.has(file))
.map((file) => contentMap.get(file)!)
const emitted = await emitter.emit(ctx, upstreamContent, staticResources)
if (Symbol.asyncIterator in emitted) {
// Async generator case
for await (const file of emitted) {
emittedFiles++
if (ctx.argv.verbose) {
console.log(`[emit:${emitter.name}] ${file}`)
}
}
} else {
// Array case
emittedFiles += emitted.length
if (ctx.argv.verbose) { if (ctx.argv.verbose) {
for (const file of emitted) { console.log(`[emit:${emitter.name}] ${file}`)
console.log(`[emit:${emitter.name}] ${file}`) }
} }
} else {
// Array case
emittedFiles += emitted.length
if (ctx.argv.verbose) {
for (const file of emitted) {
console.log(`[emit:${emitter.name}] ${file}`)
} }
} }
} }
} }
console.log(`Emitted ${emittedFiles} files to \`${argv.output}\` in ${perf.timeSince("rebuild")}`) console.log(`Emitted ${emittedFiles} files to \`${argv.output}\` in ${perf.timeSince("rebuild")}`)
// CLEANUP
const destinationsToDelete = new Set<FilePath>()
for (const file of toRemove) {
// remove from cache
contentMap.delete(file)
Object.values(dependencies).forEach((depGraph) => {
// remove the node from dependency graphs
depGraph?.removeNode(file)
// remove any orphan nodes. eg if a.md is deleted, a.html is orphaned and should be removed
const orphanNodes = depGraph?.removeOrphanNodes()
orphanNodes?.forEach((node) => {
// only delete files that are in the output directory
if (node.startsWith(argv.output)) {
destinationsToDelete.add(node)
}
})
})
}
await rimraf([...destinationsToDelete])
console.log(chalk.green(`Done rebuilding in ${perf.timeSince()}`)) console.log(chalk.green(`Done rebuilding in ${perf.timeSince()}`))
changes.length = 0
toRemove.clear()
release()
clientRefresh() clientRefresh()
}
async function rebuildFromEntrypoint(
fp: string,
action: FileEvent,
clientRefresh: () => void,
buildData: BuildData, // note: this function mutates buildData
) {
const { ctx, ignored, mut, initialSlugs, contentMap, toRebuild, toRemove, trackedAssets } =
buildData
const { argv } = ctx
// don't do anything for gitignored files
if (ignored(fp)) {
return
}
// dont bother rebuilding for non-content files, just track and refresh
fp = toPosixPath(fp)
const filePath = joinSegments(argv.directory, fp) as FilePath
if (path.extname(fp) !== ".md") {
if (action === "add" || action === "change") {
trackedAssets.add(filePath)
} else if (action === "delete") {
trackedAssets.delete(filePath)
}
clientRefresh()
return
}
if (action === "add" || action === "change") {
toRebuild.add(filePath)
} else if (action === "delete") {
toRemove.add(filePath)
}
const buildId = randomIdNonSecure()
ctx.buildId = buildId
buildData.lastBuildMs = new Date().getTime()
const release = await mut.acquire()
// there's another build after us, release and let them do it
if (ctx.buildId !== buildId) {
release()
return
}
const perf = new PerfTimer()
console.log(chalk.yellow("Detected change, rebuilding..."))
try {
const filesToRebuild = [...toRebuild].filter((fp) => !toRemove.has(fp))
const parsedContent = await parseMarkdown(ctx, filesToRebuild)
for (const content of parsedContent) {
const [_tree, vfile] = content
contentMap.set(vfile.data.filePath!, content)
}
for (const fp of toRemove) {
contentMap.delete(fp)
}
const parsedFiles = [...contentMap.values()]
const filteredContent = filterContent(ctx, parsedFiles)
// re-update slugs
const trackedSlugs = [...new Set([...contentMap.keys(), ...toRebuild, ...trackedAssets])]
.filter((fp) => !toRemove.has(fp))
.map((fp) => slugifyFilePath(path.posix.relative(argv.directory, fp) as FilePath))
ctx.allSlugs = [...new Set([...initialSlugs, ...trackedSlugs])]
// TODO: we can probably traverse the link graph to figure out what's safe to delete here
// instead of just deleting everything
await rimraf(path.join(argv.output, ".*"), { glob: true })
await emitContent(ctx, filteredContent)
console.log(chalk.green(`Done rebuilding in ${perf.timeSince()}`))
} catch (err) {
console.log(chalk.yellow(`Rebuild failed. Waiting on a change to fix the error...`))
if (argv.verbose) {
console.log(chalk.red(err))
}
}
clientRefresh()
toRebuild.clear()
toRemove.clear()
release() release()
} }

View File

@@ -2,7 +2,6 @@ import { ValidDateType } from "./components/Date"
import { QuartzComponent } from "./components/types" import { QuartzComponent } from "./components/types"
import { ValidLocale } from "./i18n" import { ValidLocale } from "./i18n"
import { PluginTypes } from "./plugins/types" import { PluginTypes } from "./plugins/types"
import { SocialImageOptions } from "./util/og"
import { Theme } from "./util/theme" import { Theme } from "./util/theme"
export type Analytics = export type Analytics =

View File

@@ -71,10 +71,10 @@ export const BuildArgv = {
default: false, default: false,
describe: "run a local server to live-preview your Quartz", describe: "run a local server to live-preview your Quartz",
}, },
fastRebuild: { watch: {
boolean: true, boolean: true,
default: false, default: false,
describe: "[experimental] rebuild only the changed files", describe: "watch for changes and rebuild automatically",
}, },
baseDir: { baseDir: {
string: true, string: true,

View File

@@ -225,6 +225,10 @@ See the [documentation](https://quartz.jzhao.xyz) for how to get started.
* @param {*} argv arguments for `build` * @param {*} argv arguments for `build`
*/ */
export async function handleBuild(argv) { export async function handleBuild(argv) {
if (argv.serve) {
argv.watch = true
}
console.log(chalk.bgGreen.black(`\n Quartz v${version} \n`)) console.log(chalk.bgGreen.black(`\n Quartz v${version} \n`))
const ctx = await esbuild.context({ const ctx = await esbuild.context({
entryPoints: [fp], entryPoints: [fp],
@@ -331,9 +335,10 @@ export async function handleBuild(argv) {
clientRefresh() clientRefresh()
} }
let clientRefresh = () => {}
if (argv.serve) { if (argv.serve) {
const connections = [] const connections = []
const clientRefresh = () => connections.forEach((conn) => conn.send("rebuild")) clientRefresh = () => connections.forEach((conn) => conn.send("rebuild"))
if (argv.baseDir !== "" && !argv.baseDir.startsWith("/")) { if (argv.baseDir !== "" && !argv.baseDir.startsWith("/")) {
argv.baseDir = "/" + argv.baseDir argv.baseDir = "/" + argv.baseDir
@@ -433,6 +438,7 @@ export async function handleBuild(argv) {
return serve() return serve()
}) })
server.listen(argv.port) server.listen(argv.port)
const wss = new WebSocketServer({ port: argv.wsPort }) const wss = new WebSocketServer({ port: argv.wsPort })
wss.on("connection", (ws) => connections.push(ws)) wss.on("connection", (ws) => connections.push(ws))
@@ -441,16 +447,27 @@ export async function handleBuild(argv) {
`Started a Quartz server listening at http://localhost:${argv.port}${argv.baseDir}`, `Started a Quartz server listening at http://localhost:${argv.port}${argv.baseDir}`,
), ),
) )
console.log("hint: exit with ctrl+c") } else {
const paths = await globby(["**/*.ts", "**/*.tsx", "**/*.scss", "package.json"]) await build(clientRefresh)
ctx.dispose()
}
if (argv.watch) {
const paths = await globby([
"**/*.ts",
"quartz/cli/*.js",
"quartz/static/**/*",
"**/*.tsx",
"**/*.scss",
"package.json",
])
chokidar chokidar
.watch(paths, { ignoreInitial: true }) .watch(paths, { ignoreInitial: true })
.on("add", () => build(clientRefresh)) .on("add", () => build(clientRefresh))
.on("change", () => build(clientRefresh)) .on("change", () => build(clientRefresh))
.on("unlink", () => build(clientRefresh)) .on("unlink", () => build(clientRefresh))
} else {
await build(() => {}) console.log(chalk.grey("hint: exit with ctrl+c"))
ctx.dispose()
} }
} }

View File

@@ -9,7 +9,6 @@ import { visit } from "unist-util-visit"
import { Root, Element, ElementContent } from "hast" import { Root, Element, ElementContent } from "hast"
import { GlobalConfiguration } from "../cfg" import { GlobalConfiguration } from "../cfg"
import { i18n } from "../i18n" import { i18n } from "../i18n"
import { QuartzPluginData } from "../plugins/vfile"
interface RenderComponents { interface RenderComponents {
head: QuartzComponent head: QuartzComponent
@@ -25,7 +24,6 @@ interface RenderComponents {
const headerRegex = new RegExp(/h[1-6]/) const headerRegex = new RegExp(/h[1-6]/)
export function pageResources( export function pageResources(
baseDir: FullSlug | RelativeURL, baseDir: FullSlug | RelativeURL,
fileData: QuartzPluginData,
staticResources: StaticResources, staticResources: StaticResources,
): StaticResources { ): StaticResources {
const contentIndexPath = joinSegments(baseDir, "static/contentIndex.json") const contentIndexPath = joinSegments(baseDir, "static/contentIndex.json")
@@ -65,17 +63,12 @@ export function pageResources(
return resources return resources
} }
export function renderPage( function renderTranscludes(
root: Root,
cfg: GlobalConfiguration, cfg: GlobalConfiguration,
slug: FullSlug, slug: FullSlug,
componentData: QuartzComponentProps, componentData: QuartzComponentProps,
components: RenderComponents, ) {
pageResources: StaticResources,
): string {
// make a deep copy of the tree so we don't remove the transclusion references
// for the file cached in contentMap in build.ts
const root = clone(componentData.tree) as Root
// process transcludes in componentData // process transcludes in componentData
visit(root, "element", (node, _index, _parent) => { visit(root, "element", (node, _index, _parent) => {
if (node.tagName === "blockquote") { if (node.tagName === "blockquote") {
@@ -191,6 +184,19 @@ export function renderPage(
} }
} }
}) })
}
export function renderPage(
cfg: GlobalConfiguration,
slug: FullSlug,
componentData: QuartzComponentProps,
components: RenderComponents,
pageResources: StaticResources,
): string {
// make a deep copy of the tree so we don't remove the transclusion references
// for the file cached in contentMap in build.ts
const root = clone(componentData.tree) as Root
renderTranscludes(root, cfg, slug, componentData)
// set componentData.tree to the edited html that has transclusions rendered // set componentData.tree to the edited html that has transclusions rendered
componentData.tree = root componentData.tree = root

View File

@@ -10,7 +10,7 @@ const emitThemeChangeEvent = (theme: "light" | "dark") => {
} }
document.addEventListener("nav", () => { document.addEventListener("nav", () => {
const switchTheme = (e: Event) => { const switchTheme = () => {
const newTheme = const newTheme =
document.documentElement.getAttribute("saved-theme") === "dark" ? "light" : "dark" document.documentElement.getAttribute("saved-theme") === "dark" ? "light" : "dark"
document.documentElement.setAttribute("saved-theme", newTheme) document.documentElement.setAttribute("saved-theme", newTheme)

View File

@@ -161,7 +161,7 @@ async function setupExplorer(currentSlug: FullSlug) {
// Get folder state from local storage // Get folder state from local storage
const storageTree = localStorage.getItem("fileTree") const storageTree = localStorage.getItem("fileTree")
const serializedExplorerState = storageTree && opts.useSavedState ? JSON.parse(storageTree) : [] const serializedExplorerState = storageTree && opts.useSavedState ? JSON.parse(storageTree) : []
const oldIndex = new Map( const oldIndex = new Map<string, boolean>(
serializedExplorerState.map((entry: FolderState) => [entry.path, entry.collapsed]), serializedExplorerState.map((entry: FolderState) => [entry.path, entry.collapsed]),
) )
@@ -186,10 +186,14 @@ async function setupExplorer(currentSlug: FullSlug) {
// Get folder paths for state management // Get folder paths for state management
const folderPaths = trie.getFolderPaths() const folderPaths = trie.getFolderPaths()
currentExplorerState = folderPaths.map((path) => ({ currentExplorerState = folderPaths.map((path) => {
path, const previousState = oldIndex.get(path)
collapsed: oldIndex.get(path) === true, return {
})) path,
collapsed:
previousState === undefined ? opts.folderDefaultState === "collapsed" : previousState,
}
})
const explorerUl = explorer.querySelector(".explorer-ul") const explorerUl = explorer.querySelector(".explorer-ul")
if (!explorerUl) continue if (!explorerUl) continue
@@ -259,15 +263,17 @@ document.addEventListener("nav", async (e: CustomEventMap["nav"]) => {
await setupExplorer(currentSlug) await setupExplorer(currentSlug)
// if mobile hamburger is visible, collapse by default // if mobile hamburger is visible, collapse by default
for (const explorer of document.getElementsByClassName("mobile-explorer")) { for (const explorer of document.getElementsByClassName("explorer")) {
if (explorer.checkVisibility()) { const mobileExplorer = explorer.querySelector(".mobile-explorer")
if (!mobileExplorer) return
if (mobileExplorer.checkVisibility()) {
explorer.classList.add("collapsed") explorer.classList.add("collapsed")
explorer.setAttribute("aria-expanded", "false") explorer.setAttribute("aria-expanded", "false")
} }
}
const hiddenUntilDoneLoading = document.querySelector("#mobile-explorer") mobileExplorer.classList.remove("hide-until-loaded")
hiddenUntilDoneLoading?.classList.remove("hide-until-loaded") }
}) })
function setFolderState(folderElement: HTMLElement, collapsed: boolean) { function setFolderState(folderElement: HTMLElement, collapsed: boolean) {

View File

@@ -400,7 +400,6 @@ async function renderGraph(graph: HTMLElement, fullSlug: FullSlug) {
}) })
.circle(0, 0, nodeRadius(n)) .circle(0, 0, nodeRadius(n))
.fill({ color: isTagNode ? computedStyleMap["--light"] : color(n) }) .fill({ color: isTagNode ? computedStyleMap["--light"] : color(n) })
.stroke({ width: isTagNode ? 2 : 0, color: color(n) })
.on("pointerover", (e) => { .on("pointerover", (e) => {
updateHoverInfo(e.target.label) updateHoverInfo(e.target.label)
oldLabelOpacity = label.alpha oldLabelOpacity = label.alpha
@@ -416,6 +415,10 @@ async function renderGraph(graph: HTMLElement, fullSlug: FullSlug) {
} }
}) })
if (isTagNode) {
gfx.stroke({ width: 2, color: computedStyleMap["--tertiary"] })
}
nodesContainer.addChild(gfx) nodesContainer.addChild(gfx)
labelsContainer.addChild(label) labelsContainer.addChild(label)

View File

@@ -1,118 +0,0 @@
import test, { describe } from "node:test"
import DepGraph from "./depgraph"
import assert from "node:assert"
describe("DepGraph", () => {
test("getLeafNodes", () => {
const graph = new DepGraph<string>()
graph.addEdge("A", "B")
graph.addEdge("B", "C")
graph.addEdge("D", "C")
assert.deepStrictEqual(graph.getLeafNodes("A"), new Set(["C"]))
assert.deepStrictEqual(graph.getLeafNodes("B"), new Set(["C"]))
assert.deepStrictEqual(graph.getLeafNodes("C"), new Set(["C"]))
assert.deepStrictEqual(graph.getLeafNodes("D"), new Set(["C"]))
})
describe("getLeafNodeAncestors", () => {
test("gets correct ancestors in a graph without cycles", () => {
const graph = new DepGraph<string>()
graph.addEdge("A", "B")
graph.addEdge("B", "C")
graph.addEdge("D", "B")
assert.deepStrictEqual(graph.getLeafNodeAncestors("A"), new Set(["A", "B", "D"]))
assert.deepStrictEqual(graph.getLeafNodeAncestors("B"), new Set(["A", "B", "D"]))
assert.deepStrictEqual(graph.getLeafNodeAncestors("C"), new Set(["A", "B", "D"]))
assert.deepStrictEqual(graph.getLeafNodeAncestors("D"), new Set(["A", "B", "D"]))
})
test("gets correct ancestors in a graph with cycles", () => {
const graph = new DepGraph<string>()
graph.addEdge("A", "B")
graph.addEdge("B", "C")
graph.addEdge("C", "A")
graph.addEdge("C", "D")
assert.deepStrictEqual(graph.getLeafNodeAncestors("A"), new Set(["A", "B", "C"]))
assert.deepStrictEqual(graph.getLeafNodeAncestors("B"), new Set(["A", "B", "C"]))
assert.deepStrictEqual(graph.getLeafNodeAncestors("C"), new Set(["A", "B", "C"]))
assert.deepStrictEqual(graph.getLeafNodeAncestors("D"), new Set(["A", "B", "C"]))
})
})
describe("mergeGraph", () => {
test("merges two graphs", () => {
const graph = new DepGraph<string>()
graph.addEdge("A.md", "A.html")
const other = new DepGraph<string>()
other.addEdge("B.md", "B.html")
graph.mergeGraph(other)
const expected = {
nodes: ["A.md", "A.html", "B.md", "B.html"],
edges: [
["A.md", "A.html"],
["B.md", "B.html"],
],
}
assert.deepStrictEqual(graph.export(), expected)
})
})
describe("updateIncomingEdgesForNode", () => {
test("merges when node exists", () => {
// A.md -> B.md -> B.html
const graph = new DepGraph<string>()
graph.addEdge("A.md", "B.md")
graph.addEdge("B.md", "B.html")
// B.md is edited so it removes the A.md transclusion
// and adds C.md transclusion
// C.md -> B.md
const other = new DepGraph<string>()
other.addEdge("C.md", "B.md")
other.addEdge("B.md", "B.html")
// A.md -> B.md removed, C.md -> B.md added
// C.md -> B.md -> B.html
graph.updateIncomingEdgesForNode(other, "B.md")
const expected = {
nodes: ["A.md", "B.md", "B.html", "C.md"],
edges: [
["B.md", "B.html"],
["C.md", "B.md"],
],
}
assert.deepStrictEqual(graph.export(), expected)
})
test("adds node if it does not exist", () => {
// A.md -> B.md
const graph = new DepGraph<string>()
graph.addEdge("A.md", "B.md")
// Add a new file C.md that transcludes B.md
// B.md -> C.md
const other = new DepGraph<string>()
other.addEdge("B.md", "C.md")
// B.md -> C.md added
// A.md -> B.md -> C.md
graph.updateIncomingEdgesForNode(other, "C.md")
const expected = {
nodes: ["A.md", "B.md", "C.md"],
edges: [
["A.md", "B.md"],
["B.md", "C.md"],
],
}
assert.deepStrictEqual(graph.export(), expected)
})
})
})

View File

@@ -1,228 +0,0 @@
export default class DepGraph<T> {
// node: incoming and outgoing edges
_graph = new Map<T, { incoming: Set<T>; outgoing: Set<T> }>()
constructor() {
this._graph = new Map()
}
export(): Object {
return {
nodes: this.nodes,
edges: this.edges,
}
}
toString(): string {
return JSON.stringify(this.export(), null, 2)
}
// BASIC GRAPH OPERATIONS
get nodes(): T[] {
return Array.from(this._graph.keys())
}
get edges(): [T, T][] {
let edges: [T, T][] = []
this.forEachEdge((edge) => edges.push(edge))
return edges
}
hasNode(node: T): boolean {
return this._graph.has(node)
}
addNode(node: T): void {
if (!this._graph.has(node)) {
this._graph.set(node, { incoming: new Set(), outgoing: new Set() })
}
}
// Remove node and all edges connected to it
removeNode(node: T): void {
if (this._graph.has(node)) {
// first remove all edges so other nodes don't have references to this node
for (const target of this._graph.get(node)!.outgoing) {
this.removeEdge(node, target)
}
for (const source of this._graph.get(node)!.incoming) {
this.removeEdge(source, node)
}
this._graph.delete(node)
}
}
forEachNode(callback: (node: T) => void): void {
for (const node of this._graph.keys()) {
callback(node)
}
}
hasEdge(from: T, to: T): boolean {
return Boolean(this._graph.get(from)?.outgoing.has(to))
}
addEdge(from: T, to: T): void {
this.addNode(from)
this.addNode(to)
this._graph.get(from)!.outgoing.add(to)
this._graph.get(to)!.incoming.add(from)
}
removeEdge(from: T, to: T): void {
if (this._graph.has(from) && this._graph.has(to)) {
this._graph.get(from)!.outgoing.delete(to)
this._graph.get(to)!.incoming.delete(from)
}
}
// returns -1 if node does not exist
outDegree(node: T): number {
return this.hasNode(node) ? this._graph.get(node)!.outgoing.size : -1
}
// returns -1 if node does not exist
inDegree(node: T): number {
return this.hasNode(node) ? this._graph.get(node)!.incoming.size : -1
}
forEachOutNeighbor(node: T, callback: (neighbor: T) => void): void {
this._graph.get(node)?.outgoing.forEach(callback)
}
forEachInNeighbor(node: T, callback: (neighbor: T) => void): void {
this._graph.get(node)?.incoming.forEach(callback)
}
forEachEdge(callback: (edge: [T, T]) => void): void {
for (const [source, { outgoing }] of this._graph.entries()) {
for (const target of outgoing) {
callback([source, target])
}
}
}
// DEPENDENCY ALGORITHMS
// Add all nodes and edges from other graph to this graph
mergeGraph(other: DepGraph<T>): void {
other.forEachEdge(([source, target]) => {
this.addNode(source)
this.addNode(target)
this.addEdge(source, target)
})
}
// For the node provided:
// If node does not exist, add it
// If an incoming edge was added in other, it is added in this graph
// If an incoming edge was deleted in other, it is deleted in this graph
updateIncomingEdgesForNode(other: DepGraph<T>, node: T): void {
this.addNode(node)
// Add edge if it is present in other
other.forEachInNeighbor(node, (neighbor) => {
this.addEdge(neighbor, node)
})
// For node provided, remove incoming edge if it is absent in other
this.forEachEdge(([source, target]) => {
if (target === node && !other.hasEdge(source, target)) {
this.removeEdge(source, target)
}
})
}
// Remove all nodes that do not have any incoming or outgoing edges
// A node may be orphaned if the only node pointing to it was removed
removeOrphanNodes(): Set<T> {
let orphanNodes = new Set<T>()
this.forEachNode((node) => {
if (this.inDegree(node) === 0 && this.outDegree(node) === 0) {
orphanNodes.add(node)
}
})
orphanNodes.forEach((node) => {
this.removeNode(node)
})
return orphanNodes
}
// Get all leaf nodes (i.e. destination paths) reachable from the node provided
// Eg. if the graph is A -> B -> C
// D ---^
// and the node is B, this function returns [C]
getLeafNodes(node: T): Set<T> {
let stack: T[] = [node]
let visited = new Set<T>()
let leafNodes = new Set<T>()
// DFS
while (stack.length > 0) {
let node = stack.pop()!
// If the node is already visited, skip it
if (visited.has(node)) {
continue
}
visited.add(node)
// Check if the node is a leaf node (i.e. destination path)
if (this.outDegree(node) === 0) {
leafNodes.add(node)
}
// Add all unvisited neighbors to the stack
this.forEachOutNeighbor(node, (neighbor) => {
if (!visited.has(neighbor)) {
stack.push(neighbor)
}
})
}
return leafNodes
}
// Get all ancestors of the leaf nodes reachable from the node provided
// Eg. if the graph is A -> B -> C
// D ---^
// and the node is B, this function returns [A, B, D]
getLeafNodeAncestors(node: T): Set<T> {
const leafNodes = this.getLeafNodes(node)
let visited = new Set<T>()
let upstreamNodes = new Set<T>()
// Backwards DFS for each leaf node
leafNodes.forEach((leafNode) => {
let stack: T[] = [leafNode]
while (stack.length > 0) {
let node = stack.pop()!
if (visited.has(node)) {
continue
}
visited.add(node)
// Add node if it's not a leaf node (i.e. destination path)
// Assumes destination file cannot depend on another destination file
if (this.outDegree(node) !== 0) {
upstreamNodes.add(node)
}
// Add all unvisited parents to the stack
this.forEachInNeighbor(node, (parentNode) => {
if (!visited.has(parentNode)) {
stack.push(parentNode)
}
})
}
})
return upstreamNodes
}
}

View File

@@ -3,13 +3,12 @@ import { QuartzComponentProps } from "../../components/types"
import BodyConstructor from "../../components/Body" import BodyConstructor from "../../components/Body"
import { pageResources, renderPage } from "../../components/renderPage" import { pageResources, renderPage } from "../../components/renderPage"
import { FullPageLayout } from "../../cfg" import { FullPageLayout } from "../../cfg"
import { FilePath, FullSlug } from "../../util/path" import { FullSlug } from "../../util/path"
import { sharedPageComponents } from "../../../quartz.layout" import { sharedPageComponents } from "../../../quartz.layout"
import { NotFound } from "../../components" import { NotFound } from "../../components"
import { defaultProcessedContent } from "../vfile" import { defaultProcessedContent } from "../vfile"
import { write } from "./helpers" import { write } from "./helpers"
import { i18n } from "../../i18n" import { i18n } from "../../i18n"
import DepGraph from "../../depgraph"
export const NotFoundPage: QuartzEmitterPlugin = () => { export const NotFoundPage: QuartzEmitterPlugin = () => {
const opts: FullPageLayout = { const opts: FullPageLayout = {
@@ -28,9 +27,6 @@ export const NotFoundPage: QuartzEmitterPlugin = () => {
getQuartzComponents() { getQuartzComponents() {
return [Head, Body, pageBody, Footer] return [Head, Body, pageBody, Footer]
}, },
async getDependencyGraph(_ctx, _content, _resources) {
return new DepGraph<FilePath>()
},
async *emit(ctx, _content, resources) { async *emit(ctx, _content, resources) {
const cfg = ctx.cfg.configuration const cfg = ctx.cfg.configuration
const slug = "404" as FullSlug const slug = "404" as FullSlug
@@ -44,7 +40,7 @@ export const NotFoundPage: QuartzEmitterPlugin = () => {
description: notFound, description: notFound,
frontmatter: { title: notFound, tags: [] }, frontmatter: { title: notFound, tags: [] },
}) })
const externalResources = pageResources(path, vfile.data, resources) const externalResources = pageResources(path, resources)
const componentData: QuartzComponentProps = { const componentData: QuartzComponentProps = {
ctx, ctx,
fileData: vfile.data, fileData: vfile.data,
@@ -62,5 +58,6 @@ export const NotFoundPage: QuartzEmitterPlugin = () => {
ext: ".html", ext: ".html",
}) })
}, },
async *partialEmit() {},
} }
} }

View File

@@ -1,46 +1,47 @@
import { FilePath, joinSegments, resolveRelative, simplifySlug } from "../../util/path" import { resolveRelative, simplifySlug } from "../../util/path"
import { QuartzEmitterPlugin } from "../types" import { QuartzEmitterPlugin } from "../types"
import { write } from "./helpers" import { write } from "./helpers"
import DepGraph from "../../depgraph" import { BuildCtx } from "../../util/ctx"
import { getAliasSlugs } from "../transformers/frontmatter" import { VFile } from "vfile"
async function* processFile(ctx: BuildCtx, file: VFile) {
const ogSlug = simplifySlug(file.data.slug!)
for (const slug of file.data.aliases ?? []) {
const redirUrl = resolveRelative(slug, file.data.slug!)
yield write({
ctx,
content: `
<!DOCTYPE html>
<html lang="en-us">
<head>
<title>${ogSlug}</title>
<link rel="canonical" href="${redirUrl}">
<meta name="robots" content="noindex">
<meta charset="utf-8">
<meta http-equiv="refresh" content="0; url=${redirUrl}">
</head>
</html>
`,
slug,
ext: ".html",
})
}
}
export const AliasRedirects: QuartzEmitterPlugin = () => ({ export const AliasRedirects: QuartzEmitterPlugin = () => ({
name: "AliasRedirects", name: "AliasRedirects",
async getDependencyGraph(ctx, content, _resources) { async *emit(ctx, content) {
const graph = new DepGraph<FilePath>()
const { argv } = ctx
for (const [_tree, file] of content) { for (const [_tree, file] of content) {
for (const slug of getAliasSlugs(file.data.frontmatter?.aliases ?? [], argv, file)) { yield* processFile(ctx, file)
graph.addEdge(file.data.filePath!, joinSegments(argv.output, slug + ".html") as FilePath)
}
} }
return graph
}, },
async *emit(ctx, content, _resources) { async *partialEmit(ctx, _content, _resources, changeEvents) {
for (const [_tree, file] of content) { for (const changeEvent of changeEvents) {
const ogSlug = simplifySlug(file.data.slug!) if (!changeEvent.file) continue
if (changeEvent.type === "add" || changeEvent.type === "change") {
for (const slug of file.data.aliases ?? []) { // add new ones if this file still exists
const redirUrl = resolveRelative(slug, file.data.slug!) yield* processFile(ctx, changeEvent.file)
yield write({
ctx,
content: `
<!DOCTYPE html>
<html lang="en-us">
<head>
<title>${ogSlug}</title>
<link rel="canonical" href="${redirUrl}">
<meta name="robots" content="noindex">
<meta charset="utf-8">
<meta http-equiv="refresh" content="0; url=${redirUrl}">
</head>
</html>
`,
slug,
ext: ".html",
})
} }
} }
}, },

View File

@@ -3,7 +3,6 @@ import { QuartzEmitterPlugin } from "../types"
import path from "path" import path from "path"
import fs from "fs" import fs from "fs"
import { glob } from "../../util/glob" import { glob } from "../../util/glob"
import DepGraph from "../../depgraph"
import { Argv } from "../../util/ctx" import { Argv } from "../../util/ctx"
import { QuartzConfig } from "../../cfg" import { QuartzConfig } from "../../cfg"
@@ -12,40 +11,41 @@ const filesToCopy = async (argv: Argv, cfg: QuartzConfig) => {
return await glob("**", argv.directory, ["**/*.md", ...cfg.configuration.ignorePatterns]) return await glob("**", argv.directory, ["**/*.md", ...cfg.configuration.ignorePatterns])
} }
const copyFile = async (argv: Argv, fp: FilePath) => {
const src = joinSegments(argv.directory, fp) as FilePath
const name = slugifyFilePath(fp)
const dest = joinSegments(argv.output, name) as FilePath
// ensure dir exists
const dir = path.dirname(dest) as FilePath
await fs.promises.mkdir(dir, { recursive: true })
await fs.promises.copyFile(src, dest)
return dest
}
export const Assets: QuartzEmitterPlugin = () => { export const Assets: QuartzEmitterPlugin = () => {
return { return {
name: "Assets", name: "Assets",
async getDependencyGraph(ctx, _content, _resources) { async *emit({ argv, cfg }) {
const { argv, cfg } = ctx
const graph = new DepGraph<FilePath>()
const fps = await filesToCopy(argv, cfg) const fps = await filesToCopy(argv, cfg)
for (const fp of fps) { for (const fp of fps) {
const ext = path.extname(fp) yield copyFile(argv, fp)
const src = joinSegments(argv.directory, fp) as FilePath
const name = (slugifyFilePath(fp as FilePath, true) + ext) as FilePath
const dest = joinSegments(argv.output, name) as FilePath
graph.addEdge(src, dest)
} }
return graph
}, },
async *emit({ argv, cfg }, _content, _resources) { async *partialEmit(ctx, _content, _resources, changeEvents) {
const assetsPath = argv.output for (const changeEvent of changeEvents) {
const fps = await filesToCopy(argv, cfg) const ext = path.extname(changeEvent.path)
for (const fp of fps) { if (ext === ".md") continue
const ext = path.extname(fp)
const src = joinSegments(argv.directory, fp) as FilePath
const name = (slugifyFilePath(fp as FilePath, true) + ext) as FilePath
const dest = joinSegments(assetsPath, name) as FilePath if (changeEvent.type === "add" || changeEvent.type === "change") {
const dir = path.dirname(dest) as FilePath yield copyFile(ctx.argv, changeEvent.path)
await fs.promises.mkdir(dir, { recursive: true }) // ensure dir exists } else if (changeEvent.type === "delete") {
await fs.promises.copyFile(src, dest) const name = slugifyFilePath(changeEvent.path)
yield dest const dest = joinSegments(ctx.argv.output, name) as FilePath
await fs.promises.unlink(dest)
}
} }
}, },
} }

View File

@@ -2,7 +2,6 @@ import { FilePath, joinSegments } from "../../util/path"
import { QuartzEmitterPlugin } from "../types" import { QuartzEmitterPlugin } from "../types"
import fs from "fs" import fs from "fs"
import chalk from "chalk" import chalk from "chalk"
import DepGraph from "../../depgraph"
export function extractDomainFromBaseUrl(baseUrl: string) { export function extractDomainFromBaseUrl(baseUrl: string) {
const url = new URL(`https://${baseUrl}`) const url = new URL(`https://${baseUrl}`)
@@ -11,10 +10,7 @@ export function extractDomainFromBaseUrl(baseUrl: string) {
export const CNAME: QuartzEmitterPlugin = () => ({ export const CNAME: QuartzEmitterPlugin = () => ({
name: "CNAME", name: "CNAME",
async getDependencyGraph(_ctx, _content, _resources) { async emit({ argv, cfg }) {
return new DepGraph<FilePath>()
},
async emit({ argv, cfg }, _content, _resources) {
if (!cfg.configuration.baseUrl) { if (!cfg.configuration.baseUrl) {
console.warn(chalk.yellow("CNAME emitter requires `baseUrl` to be set in your configuration")) console.warn(chalk.yellow("CNAME emitter requires `baseUrl` to be set in your configuration"))
return [] return []
@@ -27,4 +23,5 @@ export const CNAME: QuartzEmitterPlugin = () => ({
await fs.promises.writeFile(path, content) await fs.promises.writeFile(path, content)
return [path] as FilePath[] return [path] as FilePath[]
}, },
async *partialEmit() {},
}) })

View File

@@ -1,4 +1,4 @@
import { FilePath, FullSlug, joinSegments } from "../../util/path" import { FullSlug, joinSegments } from "../../util/path"
import { QuartzEmitterPlugin } from "../types" import { QuartzEmitterPlugin } from "../types"
// @ts-ignore // @ts-ignore
@@ -13,7 +13,6 @@ import { googleFontHref, joinStyles, processGoogleFonts } from "../../util/theme
import { Features, transform } from "lightningcss" import { Features, transform } from "lightningcss"
import { transform as transpile } from "esbuild" import { transform as transpile } from "esbuild"
import { write } from "./helpers" import { write } from "./helpers"
import DepGraph from "../../depgraph"
type ComponentResources = { type ComponentResources = {
css: string[] css: string[]
@@ -86,7 +85,7 @@ function addGlobalPageResources(ctx: BuildCtx, componentResources: ComponentReso
componentResources.afterDOMLoaded.push(` componentResources.afterDOMLoaded.push(`
const gtagScript = document.createElement("script") const gtagScript = document.createElement("script")
gtagScript.src = "https://www.googletagmanager.com/gtag/js?id=${tagId}" gtagScript.src = "https://www.googletagmanager.com/gtag/js?id=${tagId}"
gtagScript.async = true gtagScript.defer = true
document.head.appendChild(gtagScript) document.head.appendChild(gtagScript)
window.dataLayer = window.dataLayer || []; window.dataLayer = window.dataLayer || [];
@@ -121,7 +120,7 @@ function addGlobalPageResources(ctx: BuildCtx, componentResources: ComponentReso
umamiScript.src = "${cfg.analytics.host ?? "https://analytics.umami.is"}/script.js" umamiScript.src = "${cfg.analytics.host ?? "https://analytics.umami.is"}/script.js"
umamiScript.setAttribute("data-website-id", "${cfg.analytics.websiteId}") umamiScript.setAttribute("data-website-id", "${cfg.analytics.websiteId}")
umamiScript.setAttribute("data-auto-track", "false") umamiScript.setAttribute("data-auto-track", "false")
umamiScript.async = true umamiScript.defer = true
document.head.appendChild(umamiScript) document.head.appendChild(umamiScript)
document.addEventListener("nav", () => { document.addEventListener("nav", () => {
@@ -132,7 +131,7 @@ function addGlobalPageResources(ctx: BuildCtx, componentResources: ComponentReso
componentResources.afterDOMLoaded.push(` componentResources.afterDOMLoaded.push(`
const goatcounterScript = document.createElement("script") const goatcounterScript = document.createElement("script")
goatcounterScript.src = "${cfg.analytics.scriptSrc ?? "https://gc.zgo.at/count.js"}" goatcounterScript.src = "${cfg.analytics.scriptSrc ?? "https://gc.zgo.at/count.js"}"
goatcounterScript.async = true goatcounterScript.defer = true
goatcounterScript.setAttribute("data-goatcounter", goatcounterScript.setAttribute("data-goatcounter",
"https://${cfg.analytics.websiteId}.${cfg.analytics.host ?? "goatcounter.com"}/count") "https://${cfg.analytics.websiteId}.${cfg.analytics.host ?? "goatcounter.com"}/count")
document.head.appendChild(goatcounterScript) document.head.appendChild(goatcounterScript)
@@ -173,14 +172,13 @@ function addGlobalPageResources(ctx: BuildCtx, componentResources: ComponentReso
const cabinScript = document.createElement("script") const cabinScript = document.createElement("script")
cabinScript.src = "${cfg.analytics.host ?? "https://scripts.withcabin.com"}/hello.js" cabinScript.src = "${cfg.analytics.host ?? "https://scripts.withcabin.com"}/hello.js"
cabinScript.defer = true cabinScript.defer = true
cabinScript.async = true
document.head.appendChild(cabinScript) document.head.appendChild(cabinScript)
`) `)
} else if (cfg.analytics?.provider === "clarity") { } else if (cfg.analytics?.provider === "clarity") {
componentResources.afterDOMLoaded.push(` componentResources.afterDOMLoaded.push(`
const clarityScript = document.createElement("script") const clarityScript = document.createElement("script")
clarityScript.innerHTML= \`(function(c,l,a,r,i,t,y){c[a]=c[a]||function(){(c[a].q=c[a].q||[]).push(arguments)}; clarityScript.innerHTML= \`(function(c,l,a,r,i,t,y){c[a]=c[a]||function(){(c[a].q=c[a].q||[]).push(arguments)};
t=l.createElement(r);t.async=1;t.src="https://www.clarity.ms/tag/"+i; t=l.createElement(r);t.defer=1;t.src="https://www.clarity.ms/tag/"+i;
y=l.getElementsByTagName(r)[0];y.parentNode.insertBefore(t,y); y=l.getElementsByTagName(r)[0];y.parentNode.insertBefore(t,y);
})(window, document, "clarity", "script", "${cfg.analytics.projectId}");\` })(window, document, "clarity", "script", "${cfg.analytics.projectId}");\`
document.head.appendChild(clarityScript) document.head.appendChild(clarityScript)
@@ -204,9 +202,6 @@ function addGlobalPageResources(ctx: BuildCtx, componentResources: ComponentReso
export const ComponentResources: QuartzEmitterPlugin = () => { export const ComponentResources: QuartzEmitterPlugin = () => {
return { return {
name: "ComponentResources", name: "ComponentResources",
async getDependencyGraph(_ctx, _content, _resources) {
return new DepGraph<FilePath>()
},
async *emit(ctx, _content, _resources) { async *emit(ctx, _content, _resources) {
const cfg = ctx.cfg.configuration const cfg = ctx.cfg.configuration
// component specific scripts and styles // component specific scripts and styles
@@ -235,7 +230,7 @@ export const ComponentResources: QuartzEmitterPlugin = () => {
for (const fontFile of fontFiles) { for (const fontFile of fontFiles) {
const res = await fetch(fontFile.url) const res = await fetch(fontFile.url)
if (!res.ok) { if (!res.ok) {
throw new Error(`failed to fetch font ${fontFile.filename}`) throw new Error(`Failed to fetch font ${fontFile.filename}`)
} }
const buf = await res.arrayBuffer() const buf = await res.arrayBuffer()
@@ -282,19 +277,22 @@ export const ComponentResources: QuartzEmitterPlugin = () => {
}, },
include: Features.MediaQueries, include: Features.MediaQueries,
}).code.toString(), }).code.toString(),
}), })
yield write({
ctx, yield write({
slug: "prescript" as FullSlug, ctx,
ext: ".js", slug: "prescript" as FullSlug,
content: prescript, ext: ".js",
}), content: prescript,
yield write({ })
ctx,
slug: "postscript" as FullSlug, yield write({
ext: ".js", ctx,
content: postscript, slug: "postscript" as FullSlug,
}) ext: ".js",
content: postscript,
})
}, },
async *partialEmit() {},
} }
} }

View File

@@ -7,11 +7,11 @@ import { QuartzEmitterPlugin } from "../types"
import { toHtml } from "hast-util-to-html" import { toHtml } from "hast-util-to-html"
import { write } from "./helpers" import { write } from "./helpers"
import { i18n } from "../../i18n" import { i18n } from "../../i18n"
import DepGraph from "../../depgraph"
export type ContentIndexMap = Map<FullSlug, ContentDetails> export type ContentIndexMap = Map<FullSlug, ContentDetails>
export type ContentDetails = { export type ContentDetails = {
slug: FullSlug slug: FullSlug
filePath: FilePath
title: string title: string
links: SimpleSlug[] links: SimpleSlug[]
tags: string[] tags: string[]
@@ -96,27 +96,7 @@ export const ContentIndex: QuartzEmitterPlugin<Partial<Options>> = (opts) => {
opts = { ...defaultOptions, ...opts } opts = { ...defaultOptions, ...opts }
return { return {
name: "ContentIndex", name: "ContentIndex",
async getDependencyGraph(ctx, content, _resources) { async *emit(ctx, content) {
const graph = new DepGraph<FilePath>()
for (const [_tree, file] of content) {
const sourcePath = file.data.filePath!
graph.addEdge(
sourcePath,
joinSegments(ctx.argv.output, "static/contentIndex.json") as FilePath,
)
if (opts?.enableSiteMap) {
graph.addEdge(sourcePath, joinSegments(ctx.argv.output, "sitemap.xml") as FilePath)
}
if (opts?.enableRSS) {
graph.addEdge(sourcePath, joinSegments(ctx.argv.output, "index.xml") as FilePath)
}
}
return graph
},
async *emit(ctx, content, _resources) {
const cfg = ctx.cfg.configuration const cfg = ctx.cfg.configuration
const linkIndex: ContentIndexMap = new Map() const linkIndex: ContentIndexMap = new Map()
for (const [tree, file] of content) { for (const [tree, file] of content) {
@@ -125,6 +105,7 @@ export const ContentIndex: QuartzEmitterPlugin<Partial<Options>> = (opts) => {
if (opts?.includeEmptyFiles || (file.data.text && file.data.text !== "")) { if (opts?.includeEmptyFiles || (file.data.text && file.data.text !== "")) {
linkIndex.set(slug, { linkIndex.set(slug, {
slug, slug,
filePath: file.data.relativePath!,
title: file.data.frontmatter?.title!, title: file.data.frontmatter?.title!,
links: file.data.links ?? [], links: file.data.links ?? [],
tags: file.data.frontmatter?.tags ?? [], tags: file.data.frontmatter?.tags ?? [],

View File

@@ -1,54 +1,48 @@
import path from "path" import path from "path"
import { visit } from "unist-util-visit"
import { Root } from "hast"
import { VFile } from "vfile"
import { QuartzEmitterPlugin } from "../types" import { QuartzEmitterPlugin } from "../types"
import { QuartzComponentProps } from "../../components/types" import { QuartzComponentProps } from "../../components/types"
import HeaderConstructor from "../../components/Header" import HeaderConstructor from "../../components/Header"
import BodyConstructor from "../../components/Body" import BodyConstructor from "../../components/Body"
import { pageResources, renderPage } from "../../components/renderPage" import { pageResources, renderPage } from "../../components/renderPage"
import { FullPageLayout } from "../../cfg" import { FullPageLayout } from "../../cfg"
import { Argv } from "../../util/ctx" import { pathToRoot } from "../../util/path"
import { FilePath, isRelativeURL, joinSegments, pathToRoot } from "../../util/path"
import { defaultContentPageLayout, sharedPageComponents } from "../../../quartz.layout" import { defaultContentPageLayout, sharedPageComponents } from "../../../quartz.layout"
import { Content } from "../../components" import { Content } from "../../components"
import chalk from "chalk" import chalk from "chalk"
import { write } from "./helpers" import { write } from "./helpers"
import DepGraph from "../../depgraph" import { BuildCtx } from "../../util/ctx"
import { Node } from "unist"
import { StaticResources } from "../../util/resources"
import { QuartzPluginData } from "../vfile"
// get all the dependencies for the markdown file async function processContent(
// eg. images, scripts, stylesheets, transclusions ctx: BuildCtx,
const parseDependencies = (argv: Argv, hast: Root, file: VFile): string[] => { tree: Node,
const dependencies: string[] = [] fileData: QuartzPluginData,
allFiles: QuartzPluginData[],
opts: FullPageLayout,
resources: StaticResources,
) {
const slug = fileData.slug!
const cfg = ctx.cfg.configuration
const externalResources = pageResources(pathToRoot(slug), resources)
const componentData: QuartzComponentProps = {
ctx,
fileData,
externalResources,
cfg,
children: [],
tree,
allFiles,
}
visit(hast, "element", (elem): void => { const content = renderPage(cfg, slug, componentData, opts, externalResources)
let ref: string | null = null return write({
ctx,
if ( content,
["script", "img", "audio", "video", "source", "iframe"].includes(elem.tagName) && slug,
elem?.properties?.src ext: ".html",
) {
ref = elem.properties.src.toString()
} else if (["a", "link"].includes(elem.tagName) && elem?.properties?.href) {
// transclusions will create a tags with relative hrefs
ref = elem.properties.href.toString()
}
// if it is a relative url, its a local file and we need to add
// it to the dependency graph. otherwise, ignore
if (ref === null || !isRelativeURL(ref)) {
return
}
let fp = path.join(file.data.filePath!, path.relative(argv.directory, ref)).replace(/\\/g, "/")
// markdown files have the .md extension stripped in hrefs, add it back here
if (!fp.split("/").pop()?.includes(".")) {
fp += ".md"
}
dependencies.push(fp)
}) })
return dependencies
} }
export const ContentPage: QuartzEmitterPlugin<Partial<FullPageLayout>> = (userOpts) => { export const ContentPage: QuartzEmitterPlugin<Partial<FullPageLayout>> = (userOpts) => {
@@ -79,53 +73,22 @@ export const ContentPage: QuartzEmitterPlugin<Partial<FullPageLayout>> = (userOp
Footer, Footer,
] ]
}, },
async getDependencyGraph(ctx, content, _resources) {
const graph = new DepGraph<FilePath>()
for (const [tree, file] of content) {
const sourcePath = file.data.filePath!
const slug = file.data.slug!
graph.addEdge(sourcePath, joinSegments(ctx.argv.output, slug + ".html") as FilePath)
parseDependencies(ctx.argv, tree as Root, file).forEach((dep) => {
graph.addEdge(dep as FilePath, sourcePath)
})
}
return graph
},
async *emit(ctx, content, resources) { async *emit(ctx, content, resources) {
const cfg = ctx.cfg.configuration
const allFiles = content.map((c) => c[1].data) const allFiles = content.map((c) => c[1].data)
let containsIndex = false let containsIndex = false
for (const [tree, file] of content) { for (const [tree, file] of content) {
const slug = file.data.slug! const slug = file.data.slug!
if (slug === "index") { if (slug === "index") {
containsIndex = true containsIndex = true
} }
const externalResources = pageResources(pathToRoot(slug), file.data, resources) // only process home page, non-tag pages, and non-index pages
const componentData: QuartzComponentProps = { if (slug.endsWith("/index") || slug.startsWith("tags/")) continue
ctx, yield processContent(ctx, tree, file.data, allFiles, opts, resources)
fileData: file.data,
externalResources,
cfg,
children: [],
tree,
allFiles,
}
const content = renderPage(cfg, slug, componentData, opts, externalResources)
yield write({
ctx,
content,
slug,
ext: ".html",
})
} }
if (!containsIndex && !ctx.argv.fastRebuild) { if (!containsIndex) {
console.log( console.log(
chalk.yellow( chalk.yellow(
`\nWarning: you seem to be missing an \`index.md\` home page file at the root of your \`${ctx.argv.directory}\` folder (\`${path.join(ctx.argv.directory, "index.md")} does not exist\`). This may cause errors when deploying.`, `\nWarning: you seem to be missing an \`index.md\` home page file at the root of your \`${ctx.argv.directory}\` folder (\`${path.join(ctx.argv.directory, "index.md")} does not exist\`). This may cause errors when deploying.`,
@@ -133,5 +96,25 @@ export const ContentPage: QuartzEmitterPlugin<Partial<FullPageLayout>> = (userOp
) )
} }
}, },
async *partialEmit(ctx, content, resources, changeEvents) {
const allFiles = content.map((c) => c[1].data)
// find all slugs that changed or were added
const changedSlugs = new Set<string>()
for (const changeEvent of changeEvents) {
if (!changeEvent.file) continue
if (changeEvent.type === "add" || changeEvent.type === "change") {
changedSlugs.add(changeEvent.file.data.slug!)
}
}
for (const [tree, file] of content) {
const slug = file.data.slug!
if (!changedSlugs.has(slug)) continue
if (slug.endsWith("/index") || slug.startsWith("tags/")) continue
yield processContent(ctx, tree, file.data, allFiles, opts, resources)
}
},
} }
} }

View File

@@ -7,7 +7,6 @@ import { ProcessedContent, QuartzPluginData, defaultProcessedContent } from "../
import { FullPageLayout } from "../../cfg" import { FullPageLayout } from "../../cfg"
import path from "path" import path from "path"
import { import {
FilePath,
FullSlug, FullSlug,
SimpleSlug, SimpleSlug,
stripSlashes, stripSlashes,
@@ -18,13 +17,89 @@ import {
import { defaultListPageLayout, sharedPageComponents } from "../../../quartz.layout" import { defaultListPageLayout, sharedPageComponents } from "../../../quartz.layout"
import { FolderContent } from "../../components" import { FolderContent } from "../../components"
import { write } from "./helpers" import { write } from "./helpers"
import { i18n } from "../../i18n" import { i18n, TRANSLATIONS } from "../../i18n"
import DepGraph from "../../depgraph" import { BuildCtx } from "../../util/ctx"
import { StaticResources } from "../../util/resources"
interface FolderPageOptions extends FullPageLayout { interface FolderPageOptions extends FullPageLayout {
sort?: (f1: QuartzPluginData, f2: QuartzPluginData) => number sort?: (f1: QuartzPluginData, f2: QuartzPluginData) => number
} }
async function* processFolderInfo(
ctx: BuildCtx,
folderInfo: Record<SimpleSlug, ProcessedContent>,
allFiles: QuartzPluginData[],
opts: FullPageLayout,
resources: StaticResources,
) {
for (const [folder, folderContent] of Object.entries(folderInfo) as [
SimpleSlug,
ProcessedContent,
][]) {
const slug = joinSegments(folder, "index") as FullSlug
const [tree, file] = folderContent
const cfg = ctx.cfg.configuration
const externalResources = pageResources(pathToRoot(slug), resources)
const componentData: QuartzComponentProps = {
ctx,
fileData: file.data,
externalResources,
cfg,
children: [],
tree,
allFiles,
}
const content = renderPage(cfg, slug, componentData, opts, externalResources)
yield write({
ctx,
content,
slug,
ext: ".html",
})
}
}
function computeFolderInfo(
folders: Set<SimpleSlug>,
content: ProcessedContent[],
locale: keyof typeof TRANSLATIONS,
): Record<SimpleSlug, ProcessedContent> {
// Create default folder descriptions
const folderInfo: Record<SimpleSlug, ProcessedContent> = Object.fromEntries(
[...folders].map((folder) => [
folder,
defaultProcessedContent({
slug: joinSegments(folder, "index") as FullSlug,
frontmatter: {
title: `${i18n(locale).pages.folderContent.folder}: ${folder}`,
tags: [],
},
}),
]),
)
// Update with actual content if available
for (const [tree, file] of content) {
const slug = stripSlashes(simplifySlug(file.data.slug!)) as SimpleSlug
if (folders.has(slug)) {
folderInfo[slug] = [tree, file]
}
}
return folderInfo
}
function _getFolders(slug: FullSlug): SimpleSlug[] {
var folderName = path.dirname(slug ?? "") as SimpleSlug
const parentFolderNames = [folderName]
while (folderName !== ".") {
folderName = path.dirname(folderName ?? "") as SimpleSlug
parentFolderNames.push(folderName)
}
return parentFolderNames
}
export const FolderPage: QuartzEmitterPlugin<Partial<FolderPageOptions>> = (userOpts) => { export const FolderPage: QuartzEmitterPlugin<Partial<FolderPageOptions>> = (userOpts) => {
const opts: FullPageLayout = { const opts: FullPageLayout = {
...sharedPageComponents, ...sharedPageComponents,
@@ -53,22 +128,6 @@ export const FolderPage: QuartzEmitterPlugin<Partial<FolderPageOptions>> = (user
Footer, Footer,
] ]
}, },
async getDependencyGraph(_ctx, content, _resources) {
// Example graph:
// nested/file.md --> nested/index.html
// nested/file2.md ------^
const graph = new DepGraph<FilePath>()
content.map(([_tree, vfile]) => {
const slug = vfile.data.slug
const folderName = path.dirname(slug ?? "") as SimpleSlug
if (slug && folderName !== "." && folderName !== "tags") {
graph.addEdge(vfile.data.filePath!, joinSegments(folderName, "index.html") as FilePath)
}
})
return graph
},
async *emit(ctx, content, resources) { async *emit(ctx, content, resources) {
const allFiles = content.map((c) => c[1].data) const allFiles = content.map((c) => c[1].data)
const cfg = ctx.cfg.configuration const cfg = ctx.cfg.configuration
@@ -83,59 +142,29 @@ export const FolderPage: QuartzEmitterPlugin<Partial<FolderPageOptions>> = (user
}), }),
) )
const folderDescriptions: Record<string, ProcessedContent> = Object.fromEntries( const folderInfo = computeFolderInfo(folders, content, cfg.locale)
[...folders].map((folder) => [ yield* processFolderInfo(ctx, folderInfo, allFiles, opts, resources)
folder, },
defaultProcessedContent({ async *partialEmit(ctx, content, resources, changeEvents) {
slug: joinSegments(folder, "index") as FullSlug, const allFiles = content.map((c) => c[1].data)
frontmatter: { const cfg = ctx.cfg.configuration
title: `${i18n(cfg.locale).pages.folderContent.folder}: ${folder}`,
tags: [],
},
}),
]),
)
for (const [tree, file] of content) { // Find all folders that need to be updated based on changed files
const slug = stripSlashes(simplifySlug(file.data.slug!)) as SimpleSlug const affectedFolders: Set<SimpleSlug> = new Set()
if (folders.has(slug)) { for (const changeEvent of changeEvents) {
folderDescriptions[slug] = [tree, file] if (!changeEvent.file) continue
} const slug = changeEvent.file.data.slug!
const folders = _getFolders(slug).filter(
(folderName) => folderName !== "." && folderName !== "tags",
)
folders.forEach((folder) => affectedFolders.add(folder))
} }
for (const folder of folders) { // If there are affected folders, rebuild their pages
const slug = joinSegments(folder, "index") as FullSlug if (affectedFolders.size > 0) {
const [tree, file] = folderDescriptions[folder] const folderInfo = computeFolderInfo(affectedFolders, content, cfg.locale)
const externalResources = pageResources(pathToRoot(slug), file.data, resources) yield* processFolderInfo(ctx, folderInfo, allFiles, opts, resources)
const componentData: QuartzComponentProps = {
ctx,
fileData: file.data,
externalResources,
cfg,
children: [],
tree,
allFiles,
}
const content = renderPage(cfg, slug, componentData, opts, externalResources)
yield write({
ctx,
content,
slug,
ext: ".html",
})
} }
}, },
} }
} }
function _getFolders(slug: FullSlug): SimpleSlug[] {
var folderName = path.dirname(slug ?? "") as SimpleSlug
const parentFolderNames = [folderName]
while (folderName !== ".") {
folderName = path.dirname(folderName ?? "") as SimpleSlug
parentFolderNames.push(folderName)
}
return parentFolderNames
}

View File

@@ -4,10 +4,12 @@ import { unescapeHTML } from "../../util/escape"
import { FullSlug, getFileExtension } from "../../util/path" import { FullSlug, getFileExtension } from "../../util/path"
import { ImageOptions, SocialImageOptions, defaultImage, getSatoriFonts } from "../../util/og" import { ImageOptions, SocialImageOptions, defaultImage, getSatoriFonts } from "../../util/og"
import sharp from "sharp" import sharp from "sharp"
import satori from "satori" import satori, { SatoriOptions } from "satori"
import { loadEmoji, getIconCode } from "../../util/emoji" import { loadEmoji, getIconCode } from "../../util/emoji"
import { Readable } from "stream" import { Readable } from "stream"
import { write } from "./helpers" import { write } from "./helpers"
import { BuildCtx } from "../../util/ctx"
import { QuartzPluginData } from "../vfile"
const defaultOptions: SocialImageOptions = { const defaultOptions: SocialImageOptions = {
colorScheme: "lightMode", colorScheme: "lightMode",
@@ -42,6 +44,41 @@ async function generateSocialImage(
return sharp(Buffer.from(svg)).webp({ quality: 40 }) return sharp(Buffer.from(svg)).webp({ quality: 40 })
} }
async function processOgImage(
ctx: BuildCtx,
fileData: QuartzPluginData,
fonts: SatoriOptions["fonts"],
fullOptions: SocialImageOptions,
) {
const cfg = ctx.cfg.configuration
const slug = fileData.slug!
const titleSuffix = cfg.pageTitleSuffix ?? ""
const title =
(fileData.frontmatter?.title ?? i18n(cfg.locale).propertyDefaults.title) + titleSuffix
const description =
fileData.frontmatter?.socialDescription ??
fileData.frontmatter?.description ??
unescapeHTML(fileData.description?.trim() ?? i18n(cfg.locale).propertyDefaults.description)
const stream = await generateSocialImage(
{
title,
description,
fonts,
cfg,
fileData,
},
fullOptions,
)
return write({
ctx,
content: stream,
slug: `${slug}-og-image` as FullSlug,
ext: ".webp",
})
}
export const CustomOgImagesEmitterName = "CustomOgImages" export const CustomOgImagesEmitterName = "CustomOgImages"
export const CustomOgImages: QuartzEmitterPlugin<Partial<SocialImageOptions>> = (userOpts) => { export const CustomOgImages: QuartzEmitterPlugin<Partial<SocialImageOptions>> = (userOpts) => {
const fullOptions = { ...defaultOptions, ...userOpts } const fullOptions = { ...defaultOptions, ...userOpts }
@@ -58,39 +95,23 @@ export const CustomOgImages: QuartzEmitterPlugin<Partial<SocialImageOptions>> =
const fonts = await getSatoriFonts(headerFont, bodyFont) const fonts = await getSatoriFonts(headerFont, bodyFont)
for (const [_tree, vfile] of content) { for (const [_tree, vfile] of content) {
// if this file defines socialImage, we can skip if (vfile.data.frontmatter?.socialImage !== undefined) continue
if (vfile.data.frontmatter?.socialImage !== undefined) { yield processOgImage(ctx, vfile.data, fonts, fullOptions)
continue }
},
async *partialEmit(ctx, _content, _resources, changeEvents) {
const cfg = ctx.cfg.configuration
const headerFont = cfg.theme.typography.header
const bodyFont = cfg.theme.typography.body
const fonts = await getSatoriFonts(headerFont, bodyFont)
// find all slugs that changed or were added
for (const changeEvent of changeEvents) {
if (!changeEvent.file) continue
if (changeEvent.file.data.frontmatter?.socialImage !== undefined) continue
if (changeEvent.type === "add" || changeEvent.type === "change") {
yield processOgImage(ctx, changeEvent.file.data, fonts, fullOptions)
} }
const slug = vfile.data.slug!
const titleSuffix = cfg.pageTitleSuffix ?? ""
const title =
(vfile.data.frontmatter?.title ?? i18n(cfg.locale).propertyDefaults.title) + titleSuffix
const description =
vfile.data.frontmatter?.socialDescription ??
vfile.data.frontmatter?.description ??
unescapeHTML(
vfile.data.description?.trim() ?? i18n(cfg.locale).propertyDefaults.description,
)
const stream = await generateSocialImage(
{
title,
description,
fonts,
cfg,
fileData: vfile.data,
},
fullOptions,
)
yield write({
ctx,
content: stream,
slug: `${slug}-og-image` as FullSlug,
ext: ".webp",
})
} }
}, },
externalResources: (ctx) => { externalResources: (ctx) => {

View File

@@ -2,26 +2,11 @@ import { FilePath, QUARTZ, joinSegments } from "../../util/path"
import { QuartzEmitterPlugin } from "../types" import { QuartzEmitterPlugin } from "../types"
import fs from "fs" import fs from "fs"
import { glob } from "../../util/glob" import { glob } from "../../util/glob"
import DepGraph from "../../depgraph"
import { dirname } from "path" import { dirname } from "path"
export const Static: QuartzEmitterPlugin = () => ({ export const Static: QuartzEmitterPlugin = () => ({
name: "Static", name: "Static",
async getDependencyGraph({ argv, cfg }, _content, _resources) { async *emit({ argv, cfg }) {
const graph = new DepGraph<FilePath>()
const staticPath = joinSegments(QUARTZ, "static")
const fps = await glob("**", staticPath, cfg.configuration.ignorePatterns)
for (const fp of fps) {
graph.addEdge(
joinSegments("static", fp) as FilePath,
joinSegments(argv.output, "static", fp) as FilePath,
)
}
return graph
},
async *emit({ argv, cfg }, _content) {
const staticPath = joinSegments(QUARTZ, "static") const staticPath = joinSegments(QUARTZ, "static")
const fps = await glob("**", staticPath, cfg.configuration.ignorePatterns) const fps = await glob("**", staticPath, cfg.configuration.ignorePatterns)
const outputStaticPath = joinSegments(argv.output, "static") const outputStaticPath = joinSegments(argv.output, "static")
@@ -34,4 +19,5 @@ export const Static: QuartzEmitterPlugin = () => ({
yield dest yield dest
} }
}, },
async *partialEmit() {},
}) })

View File

@@ -5,23 +5,94 @@ import BodyConstructor from "../../components/Body"
import { pageResources, renderPage } from "../../components/renderPage" import { pageResources, renderPage } from "../../components/renderPage"
import { ProcessedContent, QuartzPluginData, defaultProcessedContent } from "../vfile" import { ProcessedContent, QuartzPluginData, defaultProcessedContent } from "../vfile"
import { FullPageLayout } from "../../cfg" import { FullPageLayout } from "../../cfg"
import { import { FullSlug, getAllSegmentPrefixes, joinSegments, pathToRoot } from "../../util/path"
FilePath,
FullSlug,
getAllSegmentPrefixes,
joinSegments,
pathToRoot,
} from "../../util/path"
import { defaultListPageLayout, sharedPageComponents } from "../../../quartz.layout" import { defaultListPageLayout, sharedPageComponents } from "../../../quartz.layout"
import { TagContent } from "../../components" import { TagContent } from "../../components"
import { write } from "./helpers" import { write } from "./helpers"
import { i18n } from "../../i18n" import { i18n, TRANSLATIONS } from "../../i18n"
import DepGraph from "../../depgraph" import { BuildCtx } from "../../util/ctx"
import { StaticResources } from "../../util/resources"
interface TagPageOptions extends FullPageLayout { interface TagPageOptions extends FullPageLayout {
sort?: (f1: QuartzPluginData, f2: QuartzPluginData) => number sort?: (f1: QuartzPluginData, f2: QuartzPluginData) => number
} }
function computeTagInfo(
allFiles: QuartzPluginData[],
content: ProcessedContent[],
locale: keyof typeof TRANSLATIONS,
): [Set<string>, Record<string, ProcessedContent>] {
const tags: Set<string> = new Set(
allFiles.flatMap((data) => data.frontmatter?.tags ?? []).flatMap(getAllSegmentPrefixes),
)
// add base tag
tags.add("index")
const tagDescriptions: Record<string, ProcessedContent> = Object.fromEntries(
[...tags].map((tag) => {
const title =
tag === "index"
? i18n(locale).pages.tagContent.tagIndex
: `${i18n(locale).pages.tagContent.tag}: ${tag}`
return [
tag,
defaultProcessedContent({
slug: joinSegments("tags", tag) as FullSlug,
frontmatter: { title, tags: [] },
}),
]
}),
)
// Update with actual content if available
for (const [tree, file] of content) {
const slug = file.data.slug!
if (slug.startsWith("tags/")) {
const tag = slug.slice("tags/".length)
if (tags.has(tag)) {
tagDescriptions[tag] = [tree, file]
if (file.data.frontmatter?.title === tag) {
file.data.frontmatter.title = `${i18n(locale).pages.tagContent.tag}: ${tag}`
}
}
}
}
return [tags, tagDescriptions]
}
async function processTagPage(
ctx: BuildCtx,
tag: string,
tagContent: ProcessedContent,
allFiles: QuartzPluginData[],
opts: FullPageLayout,
resources: StaticResources,
) {
const slug = joinSegments("tags", tag) as FullSlug
const [tree, file] = tagContent
const cfg = ctx.cfg.configuration
const externalResources = pageResources(pathToRoot(slug), resources)
const componentData: QuartzComponentProps = {
ctx,
fileData: file.data,
externalResources,
cfg,
children: [],
tree,
allFiles,
}
const content = renderPage(cfg, slug, componentData, opts, externalResources)
return write({
ctx,
content,
slug: file.data.slug!,
ext: ".html",
})
}
export const TagPage: QuartzEmitterPlugin<Partial<TagPageOptions>> = (userOpts) => { export const TagPage: QuartzEmitterPlugin<Partial<TagPageOptions>> = (userOpts) => {
const opts: FullPageLayout = { const opts: FullPageLayout = {
...sharedPageComponents, ...sharedPageComponents,
@@ -50,88 +121,49 @@ export const TagPage: QuartzEmitterPlugin<Partial<TagPageOptions>> = (userOpts)
Footer, Footer,
] ]
}, },
async getDependencyGraph(ctx, content, _resources) {
const graph = new DepGraph<FilePath>()
for (const [_tree, file] of content) {
const sourcePath = file.data.filePath!
const tags = (file.data.frontmatter?.tags ?? []).flatMap(getAllSegmentPrefixes)
// if the file has at least one tag, it is used in the tag index page
if (tags.length > 0) {
tags.push("index")
}
for (const tag of tags) {
graph.addEdge(
sourcePath,
joinSegments(ctx.argv.output, "tags", tag + ".html") as FilePath,
)
}
}
return graph
},
async *emit(ctx, content, resources) { async *emit(ctx, content, resources) {
const allFiles = content.map((c) => c[1].data) const allFiles = content.map((c) => c[1].data)
const cfg = ctx.cfg.configuration const cfg = ctx.cfg.configuration
const [tags, tagDescriptions] = computeTagInfo(allFiles, content, cfg.locale)
const tags: Set<string> = new Set(
allFiles.flatMap((data) => data.frontmatter?.tags ?? []).flatMap(getAllSegmentPrefixes),
)
// add base tag
tags.add("index")
const tagDescriptions: Record<string, ProcessedContent> = Object.fromEntries(
[...tags].map((tag) => {
const title =
tag === "index"
? i18n(cfg.locale).pages.tagContent.tagIndex
: `${i18n(cfg.locale).pages.tagContent.tag}: ${tag}`
return [
tag,
defaultProcessedContent({
slug: joinSegments("tags", tag) as FullSlug,
frontmatter: { title, tags: [] },
}),
]
}),
)
for (const [tree, file] of content) {
const slug = file.data.slug!
if (slug.startsWith("tags/")) {
const tag = slug.slice("tags/".length)
if (tags.has(tag)) {
tagDescriptions[tag] = [tree, file]
if (file.data.frontmatter?.title === tag) {
file.data.frontmatter.title = `${i18n(cfg.locale).pages.tagContent.tag}: ${tag}`
}
}
}
}
for (const tag of tags) { for (const tag of tags) {
const slug = joinSegments("tags", tag) as FullSlug yield processTagPage(ctx, tag, tagDescriptions[tag], allFiles, opts, resources)
const [tree, file] = tagDescriptions[tag] }
const externalResources = pageResources(pathToRoot(slug), file.data, resources) },
const componentData: QuartzComponentProps = { async *partialEmit(ctx, content, resources, changeEvents) {
ctx, const allFiles = content.map((c) => c[1].data)
fileData: file.data, const cfg = ctx.cfg.configuration
externalResources,
cfg, // Find all tags that need to be updated based on changed files
children: [], const affectedTags: Set<string> = new Set()
tree, for (const changeEvent of changeEvents) {
allFiles, if (!changeEvent.file) continue
const slug = changeEvent.file.data.slug!
// If it's a tag page itself that changed
if (slug.startsWith("tags/")) {
const tag = slug.slice("tags/".length)
affectedTags.add(tag)
} }
const content = renderPage(cfg, slug, componentData, opts, externalResources) // If a file with tags changed, we need to update those tag pages
yield write({ const fileTags = changeEvent.file.data.frontmatter?.tags ?? []
ctx, fileTags.flatMap(getAllSegmentPrefixes).forEach((tag) => affectedTags.add(tag))
content,
slug: file.data.slug!, // Always update the index tag page if any file changes
ext: ".html", affectedTags.add("index")
}) }
// If there are affected tags, rebuild their pages
if (affectedTags.size > 0) {
// We still need to compute all tags because tag pages show all tags
const [_tags, tagDescriptions] = computeTagInfo(allFiles, content, cfg.locale)
for (const tag of affectedTags) {
if (tagDescriptions[tag]) {
yield processTagPage(ctx, tag, tagDescriptions[tag], allFiles, opts, resources)
}
}
} }
}, },
} }

View File

@@ -5,11 +5,13 @@ import { escapeHTML } from "../../util/escape"
export interface Options { export interface Options {
descriptionLength: number descriptionLength: number
maxDescriptionLength: number
replaceExternalLinks: boolean replaceExternalLinks: boolean
} }
const defaultOptions: Options = { const defaultOptions: Options = {
descriptionLength: 150, descriptionLength: 150,
maxDescriptionLength: 300,
replaceExternalLinks: true, replaceExternalLinks: true,
} }
@@ -37,35 +39,41 @@ export const Description: QuartzTransformerPlugin<Partial<Options>> = (userOpts)
text = text.replace(urlRegex, "$<domain>" + "$<path>") text = text.replace(urlRegex, "$<domain>" + "$<path>")
} }
const desc = frontMatterDescription ?? text if (frontMatterDescription) {
const sentences = desc.replace(/\s+/g, " ").split(/\.\s/) file.data.description = frontMatterDescription
const finalDesc: string[] = [] file.data.text = text
const len = opts.descriptionLength return
let sentenceIdx = 0 }
let currentDescriptionLength = 0
if (sentences[0] !== undefined && sentences[0].length >= len) { // otherwise, use the text content
const firstSentence = sentences[0].split(" ") const desc = text
while (currentDescriptionLength < len) { const sentences = desc.replace(/\s+/g, " ").split(/\.\s/)
const sentence = firstSentence[sentenceIdx] let finalDesc = ""
if (!sentence) break let sentenceIdx = 0
finalDesc.push(sentence)
currentDescriptionLength += sentence.length // Add full sentences until we exceed the guideline length
sentenceIdx++ while (sentenceIdx < sentences.length) {
} const sentence = sentences[sentenceIdx]
finalDesc.push("...") if (!sentence) break
} else {
while (currentDescriptionLength < len) { const currentSentence = sentence.endsWith(".") ? sentence : sentence + "."
const sentence = sentences[sentenceIdx] const nextLength = finalDesc.length + currentSentence.length + (finalDesc ? 1 : 0)
if (!sentence) break
const currentSentence = sentence.endsWith(".") ? sentence : sentence + "." // Add the sentence if we're under the guideline length
finalDesc.push(currentSentence) // or if this is the first sentence (always include at least one)
currentDescriptionLength += currentSentence.length if (nextLength <= opts.descriptionLength || sentenceIdx === 0) {
finalDesc += (finalDesc ? " " : "") + currentSentence
sentenceIdx++ sentenceIdx++
} else {
break
} }
} }
file.data.description = finalDesc.join(" ") // truncate to max length if necessary
file.data.description =
finalDesc.length > opts.maxDescriptionLength
? finalDesc.slice(0, opts.maxDescriptionLength) + "..."
: finalDesc
file.data.text = text file.data.text = text
} }
}, },

View File

@@ -3,12 +3,9 @@ import remarkFrontmatter from "remark-frontmatter"
import { QuartzTransformerPlugin } from "../types" import { QuartzTransformerPlugin } from "../types"
import yaml from "js-yaml" import yaml from "js-yaml"
import toml from "toml" import toml from "toml"
import { FilePath, FullSlug, joinSegments, slugifyFilePath, slugTag } from "../../util/path" import { FilePath, FullSlug, getFileExtension, slugifyFilePath, slugTag } from "../../util/path"
import { QuartzPluginData } from "../vfile" import { QuartzPluginData } from "../vfile"
import { i18n } from "../../i18n" import { i18n } from "../../i18n"
import { Argv } from "../../util/ctx"
import { VFile } from "vfile"
import path from "path"
export interface Options { export interface Options {
delimiters: string | [string, string] delimiters: string | [string, string]
@@ -43,26 +40,24 @@ function coerceToArray(input: string | string[]): string[] | undefined {
.map((tag: string | number) => tag.toString()) .map((tag: string | number) => tag.toString())
} }
export function getAliasSlugs(aliases: string[], argv: Argv, file: VFile): FullSlug[] { function getAliasSlugs(aliases: string[]): FullSlug[] {
const dir = path.posix.relative(argv.directory, path.dirname(file.data.filePath!)) const res: FullSlug[] = []
const slugs: FullSlug[] = aliases.map( for (const alias of aliases) {
(alias) => path.posix.join(dir, slugifyFilePath(alias as FilePath)) as FullSlug, const isMd = getFileExtension(alias) === "md"
) const mockFp = isMd ? alias : alias + ".md"
const permalink = file.data.frontmatter?.permalink const slug = slugifyFilePath(mockFp as FilePath)
if (typeof permalink === "string") { res.push(slug)
slugs.push(permalink as FullSlug)
} }
// fix any slugs that have trailing slash
return slugs.map((slug) => return res
slug.endsWith("/") ? (joinSegments(slug, "index") as FullSlug) : slug,
)
} }
export const FrontMatter: QuartzTransformerPlugin<Partial<Options>> = (userOpts) => { export const FrontMatter: QuartzTransformerPlugin<Partial<Options>> = (userOpts) => {
const opts = { ...defaultOptions, ...userOpts } const opts = { ...defaultOptions, ...userOpts }
return { return {
name: "FrontMatter", name: "FrontMatter",
markdownPlugins({ cfg, allSlugs, argv }) { markdownPlugins(ctx) {
const { cfg, allSlugs } = ctx
return [ return [
[remarkFrontmatter, ["yaml", "toml"]], [remarkFrontmatter, ["yaml", "toml"]],
() => { () => {
@@ -88,9 +83,18 @@ export const FrontMatter: QuartzTransformerPlugin<Partial<Options>> = (userOpts)
const aliases = coerceToArray(coalesceAliases(data, ["aliases", "alias"])) const aliases = coerceToArray(coalesceAliases(data, ["aliases", "alias"]))
if (aliases) { if (aliases) {
data.aliases = aliases // frontmatter data.aliases = aliases // frontmatter
const slugs = (file.data.aliases = getAliasSlugs(aliases, argv, file)) file.data.aliases = getAliasSlugs(aliases)
allSlugs.push(...slugs) allSlugs.push(...file.data.aliases)
} }
if (data.permalink != null && data.permalink.toString() !== "") {
data.permalink = data.permalink.toString() as FullSlug
const aliases = file.data.aliases ?? []
aliases.push(data.permalink)
file.data.aliases = aliases
allSlugs.push(data.permalink)
}
const cssclasses = coerceToArray(coalesceAliases(data, ["cssclasses", "cssclass"])) const cssclasses = coerceToArray(coalesceAliases(data, ["cssclasses", "cssclass"]))
if (cssclasses) data.cssclasses = cssclasses if (cssclasses) data.cssclasses = cssclasses

View File

@@ -31,7 +31,7 @@ export const CreatedModifiedDate: QuartzTransformerPlugin<Partial<Options>> = (u
const opts = { ...defaultOptions, ...userOpts } const opts = { ...defaultOptions, ...userOpts }
return { return {
name: "CreatedModifiedDate", name: "CreatedModifiedDate",
markdownPlugins() { markdownPlugins(ctx) {
return [ return [
() => { () => {
let repo: Repository | undefined = undefined let repo: Repository | undefined = undefined
@@ -40,8 +40,8 @@ export const CreatedModifiedDate: QuartzTransformerPlugin<Partial<Options>> = (u
let modified: MaybeDate = undefined let modified: MaybeDate = undefined
let published: MaybeDate = undefined let published: MaybeDate = undefined
const fp = file.data.filePath! const fp = file.data.relativePath!
const fullFp = path.isAbsolute(fp) ? fp : path.posix.join(file.cwd, fp) const fullFp = path.posix.join(ctx.argv.directory, fp)
for (const source of opts.priority) { for (const source of opts.priority) {
if (source === "filesystem") { if (source === "filesystem") {
const st = await fs.promises.stat(fullFp) const st = await fs.promises.stat(fullFp)
@@ -56,11 +56,11 @@ export const CreatedModifiedDate: QuartzTransformerPlugin<Partial<Options>> = (u
// Get a reference to the main git repo. // Get a reference to the main git repo.
// It's either the same as the workdir, // It's either the same as the workdir,
// or 1+ level higher in case of a submodule/subtree setup // or 1+ level higher in case of a submodule/subtree setup
repo = Repository.discover(file.cwd) repo = Repository.discover(ctx.argv.directory)
} }
try { try {
modified ||= await repo.getFileLatestModifiedDateAsync(file.data.filePath!) modified ||= await repo.getFileLatestModifiedDateAsync(fullFp)
} catch { } catch {
console.log( console.log(
chalk.yellow( chalk.yellow(

View File

@@ -54,7 +54,7 @@ export const OxHugoFlavouredMarkdown: QuartzTransformerPlugin<Partial<Options>>
textTransform(_ctx, src) { textTransform(_ctx, src) {
if (opts.wikilinks) { if (opts.wikilinks) {
src = src.toString() src = src.toString()
src = src.replaceAll(relrefRegex, (value, ...capture) => { src = src.replaceAll(relrefRegex, (_value, ...capture) => {
const [text, link] = capture const [text, link] = capture
return `[${text}](${link})` return `[${text}](${link})`
}) })
@@ -62,7 +62,7 @@ export const OxHugoFlavouredMarkdown: QuartzTransformerPlugin<Partial<Options>>
if (opts.removePredefinedAnchor) { if (opts.removePredefinedAnchor) {
src = src.toString() src = src.toString()
src = src.replaceAll(predefinedHeadingIdRegex, (value, ...capture) => { src = src.replaceAll(predefinedHeadingIdRegex, (_value, ...capture) => {
const [headingText] = capture const [headingText] = capture
return headingText return headingText
}) })
@@ -70,7 +70,7 @@ export const OxHugoFlavouredMarkdown: QuartzTransformerPlugin<Partial<Options>>
if (opts.removeHugoShortcode) { if (opts.removeHugoShortcode) {
src = src.toString() src = src.toString()
src = src.replaceAll(hugoShortcodeRegex, (value, ...capture) => { src = src.replaceAll(hugoShortcodeRegex, (_value, ...capture) => {
const [scContent] = capture const [scContent] = capture
return scContent return scContent
}) })
@@ -78,7 +78,7 @@ export const OxHugoFlavouredMarkdown: QuartzTransformerPlugin<Partial<Options>>
if (opts.replaceFigureWithMdImg) { if (opts.replaceFigureWithMdImg) {
src = src.toString() src = src.toString()
src = src.replaceAll(figureTagRegex, (value, ...capture) => { src = src.replaceAll(figureTagRegex, (_value, ...capture) => {
const [src] = capture const [src] = capture
return `![](${src})` return `![](${src})`
}) })
@@ -86,11 +86,11 @@ export const OxHugoFlavouredMarkdown: QuartzTransformerPlugin<Partial<Options>>
if (opts.replaceOrgLatex) { if (opts.replaceOrgLatex) {
src = src.toString() src = src.toString()
src = src.replaceAll(inlineLatexRegex, (value, ...capture) => { src = src.replaceAll(inlineLatexRegex, (_value, ...capture) => {
const [eqn] = capture const [eqn] = capture
return `$${eqn}$` return `$${eqn}$`
}) })
src = src.replaceAll(blockLatexRegex, (value, ...capture) => { src = src.replaceAll(blockLatexRegex, (_value, ...capture) => {
const [eqn] = capture const [eqn] = capture
return `$$${eqn}$$` return `$$${eqn}$$`
}) })

View File

@@ -1,10 +1,8 @@
import { QuartzTransformerPlugin } from "../types" import { QuartzTransformerPlugin } from "../types"
import { PluggableList } from "unified" import { PluggableList } from "unified"
import { SKIP, visit } from "unist-util-visit" import { visit } from "unist-util-visit"
import { ReplaceFunction, findAndReplace as mdastFindReplace } from "mdast-util-find-and-replace" import { ReplaceFunction, findAndReplace as mdastFindReplace } from "mdast-util-find-and-replace"
import { Root, Html, Paragraph, Text, Link, Parent } from "mdast" import { Root, Html, Paragraph, Text, Link, Parent } from "mdast"
import { Node } from "unist"
import { VFile } from "vfile"
import { BuildVisitor } from "unist-util-visit" import { BuildVisitor } from "unist-util-visit"
export interface Options { export interface Options {
@@ -34,21 +32,10 @@ const defaultOptions: Options = {
const orRegex = new RegExp(/{{or:(.*?)}}/, "g") const orRegex = new RegExp(/{{or:(.*?)}}/, "g")
const TODORegex = new RegExp(/{{.*?\bTODO\b.*?}}/, "g") const TODORegex = new RegExp(/{{.*?\bTODO\b.*?}}/, "g")
const DONERegex = new RegExp(/{{.*?\bDONE\b.*?}}/, "g") const DONERegex = new RegExp(/{{.*?\bDONE\b.*?}}/, "g")
const videoRegex = new RegExp(/{{.*?\[\[video\]\].*?\:(.*?)}}/, "g")
const youtubeRegex = new RegExp(
/{{.*?\[\[video\]\].*?(https?:\/\/(?:www\.)?youtu(?:be\.com\/watch\?v=|\.be\/)([\w\-\_]*)(&(amp;)?[\w\?=]*)?)}}/,
"g",
)
// const multimediaRegex = new RegExp(/{{.*?\b(video|audio)\b.*?\:(.*?)}}/, "g")
const audioRegex = new RegExp(/{{.*?\[\[audio\]\].*?\:(.*?)}}/, "g")
const pdfRegex = new RegExp(/{{.*?\[\[pdf\]\].*?\:(.*?)}}/, "g")
const blockquoteRegex = new RegExp(/(\[\[>\]\])\s*(.*)/, "g") const blockquoteRegex = new RegExp(/(\[\[>\]\])\s*(.*)/, "g")
const roamHighlightRegex = new RegExp(/\^\^(.+)\^\^/, "g") const roamHighlightRegex = new RegExp(/\^\^(.+)\^\^/, "g")
const roamItalicRegex = new RegExp(/__(.+)__/, "g") const roamItalicRegex = new RegExp(/__(.+)__/, "g")
const tableRegex = new RegExp(/- {{.*?\btable\b.*?}}/, "g") /* TODO */
const attributeRegex = new RegExp(/\b\w+(?:\s+\w+)*::/, "g") /* TODO */
function isSpecialEmbed(node: Paragraph): boolean { function isSpecialEmbed(node: Paragraph): boolean {
if (node.children.length !== 2) return false if (node.children.length !== 2) return false
@@ -135,7 +122,7 @@ export const RoamFlavoredMarkdown: QuartzTransformerPlugin<Partial<Options> | un
const plugins: PluggableList = [] const plugins: PluggableList = []
plugins.push(() => { plugins.push(() => {
return (tree: Root, file: VFile) => { return (tree: Root) => {
const replacements: [RegExp, ReplaceFunction][] = [] const replacements: [RegExp, ReplaceFunction][] = []
// Handle special embeds (audio, video, PDF) // Handle special embeds (audio, video, PDF)

View File

@@ -4,7 +4,7 @@ import { ProcessedContent } from "./vfile"
import { QuartzComponent } from "../components/types" import { QuartzComponent } from "../components/types"
import { FilePath } from "../util/path" import { FilePath } from "../util/path"
import { BuildCtx } from "../util/ctx" import { BuildCtx } from "../util/ctx"
import DepGraph from "../depgraph" import { VFile } from "vfile"
export interface PluginTypes { export interface PluginTypes {
transformers: QuartzTransformerPluginInstance[] transformers: QuartzTransformerPluginInstance[]
@@ -33,26 +33,33 @@ export type QuartzFilterPluginInstance = {
shouldPublish(ctx: BuildCtx, content: ProcessedContent): boolean shouldPublish(ctx: BuildCtx, content: ProcessedContent): boolean
} }
export type ChangeEvent = {
type: "add" | "change" | "delete"
path: FilePath
file?: VFile
}
export type QuartzEmitterPlugin<Options extends OptionType = undefined> = ( export type QuartzEmitterPlugin<Options extends OptionType = undefined> = (
opts?: Options, opts?: Options,
) => QuartzEmitterPluginInstance ) => QuartzEmitterPluginInstance
export type QuartzEmitterPluginInstance = { export type QuartzEmitterPluginInstance = {
name: string name: string
emit( emit: (
ctx: BuildCtx, ctx: BuildCtx,
content: ProcessedContent[], content: ProcessedContent[],
resources: StaticResources, resources: StaticResources,
): Promise<FilePath[]> | AsyncGenerator<FilePath> ) => Promise<FilePath[]> | AsyncGenerator<FilePath>
partialEmit?: (
ctx: BuildCtx,
content: ProcessedContent[],
resources: StaticResources,
changeEvents: ChangeEvent[],
) => Promise<FilePath[]> | AsyncGenerator<FilePath> | null
/** /**
* Returns the components (if any) that are used in rendering the page. * Returns the components (if any) that are used in rendering the page.
* This helps Quartz optimize the page by only including necessary resources * This helps Quartz optimize the page by only including necessary resources
* for components that are actually used. * for components that are actually used.
*/ */
getQuartzComponents?: (ctx: BuildCtx) => QuartzComponent[] getQuartzComponents?: (ctx: BuildCtx) => QuartzComponent[]
getDependencyGraph?(
ctx: BuildCtx,
content: ProcessedContent[],
resources: StaticResources,
): Promise<DepGraph<FilePath>>
externalResources?: ExternalResourcesFn externalResources?: ExternalResourcesFn
} }

View File

@@ -11,7 +11,7 @@ export async function emitContent(ctx: BuildCtx, content: ProcessedContent[]) {
const perf = new PerfTimer() const perf = new PerfTimer()
const log = new QuartzLogger(ctx.argv.verbose) const log = new QuartzLogger(ctx.argv.verbose)
log.start(`Emitting output files`) log.start(`Emitting files`)
let emittedFiles = 0 let emittedFiles = 0
const staticResources = getStaticResourcesFromPlugins(ctx) const staticResources = getStaticResourcesFromPlugins(ctx)
@@ -26,7 +26,7 @@ export async function emitContent(ctx: BuildCtx, content: ProcessedContent[]) {
if (ctx.argv.verbose) { if (ctx.argv.verbose) {
console.log(`[emit:${emitter.name}] ${file}`) console.log(`[emit:${emitter.name}] ${file}`)
} else { } else {
log.updateText(`Emitting output files: ${chalk.gray(file)}`) log.updateText(`${emitter.name} -> ${chalk.gray(file)}`)
} }
} }
} else { } else {
@@ -36,7 +36,7 @@ export async function emitContent(ctx: BuildCtx, content: ProcessedContent[]) {
if (ctx.argv.verbose) { if (ctx.argv.verbose) {
console.log(`[emit:${emitter.name}] ${file}`) console.log(`[emit:${emitter.name}] ${file}`)
} else { } else {
log.updateText(`Emitting output files: ${chalk.gray(file)}`) log.updateText(`${emitter.name} -> ${chalk.gray(file)}`)
} }
} }
} }

View File

@@ -7,12 +7,13 @@ import { Root as HTMLRoot } from "hast"
import { MarkdownContent, ProcessedContent } from "../plugins/vfile" import { MarkdownContent, ProcessedContent } from "../plugins/vfile"
import { PerfTimer } from "../util/perf" import { PerfTimer } from "../util/perf"
import { read } from "to-vfile" import { read } from "to-vfile"
import { FilePath, FullSlug, QUARTZ, slugifyFilePath } from "../util/path" import { FilePath, QUARTZ, slugifyFilePath } from "../util/path"
import path from "path" import path from "path"
import workerpool, { Promise as WorkerPromise } from "workerpool" import workerpool, { Promise as WorkerPromise } from "workerpool"
import { QuartzLogger } from "../util/log" import { QuartzLogger } from "../util/log"
import { trace } from "../util/trace" import { trace } from "../util/trace"
import { BuildCtx } from "../util/ctx" import { BuildCtx, WorkerSerializableBuildCtx } from "../util/ctx"
import chalk from "chalk"
export type QuartzMdProcessor = Processor<MDRoot, MDRoot, MDRoot> export type QuartzMdProcessor = Processor<MDRoot, MDRoot, MDRoot>
export type QuartzHtmlProcessor = Processor<undefined, MDRoot, HTMLRoot> export type QuartzHtmlProcessor = Processor<undefined, MDRoot, HTMLRoot>
@@ -175,21 +176,42 @@ export async function parseMarkdown(ctx: BuildCtx, fps: FilePath[]): Promise<Pro
process.exit(1) process.exit(1)
} }
const mdPromises: WorkerPromise<[MarkdownContent[], FullSlug[]]>[] = [] const serializableCtx: WorkerSerializableBuildCtx = {
for (const chunk of chunks(fps, CHUNK_SIZE)) { buildId: ctx.buildId,
mdPromises.push(pool.exec("parseMarkdown", [ctx.buildId, argv, chunk])) argv: ctx.argv,
allSlugs: ctx.allSlugs,
allFiles: ctx.allFiles,
incremental: ctx.incremental,
} }
const mdResults: [MarkdownContent[], FullSlug[]][] =
await WorkerPromise.all(mdPromises).catch(errorHandler)
const childPromises: WorkerPromise<ProcessedContent[]>[] = [] const textToMarkdownPromises: WorkerPromise<MarkdownContent[]>[] = []
for (const [_, extraSlugs] of mdResults) { let processedFiles = 0
ctx.allSlugs.push(...extraSlugs) for (const chunk of chunks(fps, CHUNK_SIZE)) {
textToMarkdownPromises.push(pool.exec("parseMarkdown", [serializableCtx, chunk]))
} }
const mdResults: Array<MarkdownContent[]> = await Promise.all(
textToMarkdownPromises.map(async (promise) => {
const result = await promise
processedFiles += result.length
log.updateText(`text->markdown ${chalk.gray(`${processedFiles}/${fps.length}`)}`)
return result
}),
).catch(errorHandler)
const markdownToHtmlPromises: WorkerPromise<ProcessedContent[]>[] = []
processedFiles = 0
for (const [mdChunk, _] of mdResults) { for (const [mdChunk, _] of mdResults) {
childPromises.push(pool.exec("processHtml", [ctx.buildId, argv, mdChunk, ctx.allSlugs])) markdownToHtmlPromises.push(pool.exec("processHtml", [serializableCtx, mdChunk]))
} }
const results: ProcessedContent[][] = await WorkerPromise.all(childPromises).catch(errorHandler) const results: ProcessedContent[][] = await Promise.all(
markdownToHtmlPromises.map(async (promise) => {
const result = await promise
processedFiles += result.length
log.updateText(`markdown->html ${chalk.gray(`${processedFiles}/${fps.length}`)}`)
return result
}),
).catch(errorHandler)
res = results.flat() res = results.flat()
await pool.terminate() await pool.terminate()

View File

@@ -1,12 +1,12 @@
import { QuartzConfig } from "../cfg" import { QuartzConfig } from "../cfg"
import { FullSlug } from "./path" import { FilePath, FullSlug } from "./path"
export interface Argv { export interface Argv {
directory: string directory: string
verbose: boolean verbose: boolean
output: string output: string
serve: boolean serve: boolean
fastRebuild: boolean watch: boolean
port: number port: number
wsPort: number wsPort: number
remoteDevHost?: string remoteDevHost?: string
@@ -18,4 +18,8 @@ export interface BuildCtx {
argv: Argv argv: Argv
cfg: QuartzConfig cfg: QuartzConfig
allSlugs: FullSlug[] allSlugs: FullSlug[]
allFiles: FilePath[]
incremental: boolean
} }
export type WorkerSerializableBuildCtx = Omit<BuildCtx, "cfg">

View File

@@ -5,6 +5,7 @@ import { FileTrieNode } from "./fileTrie"
interface TestData { interface TestData {
title: string title: string
slug: string slug: string
filePath: string
} }
describe("FileTrie", () => { describe("FileTrie", () => {
@@ -26,11 +27,24 @@ describe("FileTrie", () => {
const data = { const data = {
title: "Test Title", title: "Test Title",
slug: "test", slug: "test",
filePath: "test.md",
} }
trie.add(data) trie.add(data)
assert.strictEqual(trie.children[0].displayName, "Test Title") assert.strictEqual(trie.children[0].displayName, "Test Title")
}) })
test("should be able to set displayName", () => {
const data = {
title: "Test Title",
slug: "test",
filePath: "test.md",
}
trie.add(data)
trie.children[0].displayName = "Modified"
assert.strictEqual(trie.children[0].displayName, "Modified")
})
}) })
describe("add", () => { describe("add", () => {
@@ -38,6 +52,7 @@ describe("FileTrie", () => {
const data = { const data = {
title: "Test", title: "Test",
slug: "test", slug: "test",
filePath: "test.md",
} }
trie.add(data) trie.add(data)
@@ -50,6 +65,7 @@ describe("FileTrie", () => {
const data = { const data = {
title: "Index", title: "Index",
slug: "index", slug: "index",
filePath: "index.md",
} }
trie.add(data) trie.add(data)
@@ -61,11 +77,13 @@ describe("FileTrie", () => {
const data1 = { const data1 = {
title: "Nested", title: "Nested",
slug: "folder/test", slug: "folder/test",
filePath: "folder/test.md",
} }
const data2 = { const data2 = {
title: "Really nested index", title: "Really nested index",
slug: "a/b/c/index", slug: "a/b/c/index",
filePath: "a/b/c/index.md",
} }
trie.add(data1) trie.add(data1)
@@ -92,8 +110,8 @@ describe("FileTrie", () => {
describe("filter", () => { describe("filter", () => {
test("should filter nodes based on condition", () => { test("should filter nodes based on condition", () => {
const data1 = { title: "Test1", slug: "test1" } const data1 = { title: "Test1", slug: "test1", filePath: "test1.md" }
const data2 = { title: "Test2", slug: "test2" } const data2 = { title: "Test2", slug: "test2", filePath: "test2.md" }
trie.add(data1) trie.add(data1)
trie.add(data2) trie.add(data2)
@@ -106,8 +124,8 @@ describe("FileTrie", () => {
describe("map", () => { describe("map", () => {
test("should apply function to all nodes", () => { test("should apply function to all nodes", () => {
const data1 = { title: "Test1", slug: "test1" } const data1 = { title: "Test1", slug: "test1", filePath: "test1.md" }
const data2 = { title: "Test2", slug: "test2" } const data2 = { title: "Test2", slug: "test2", filePath: "test2.md" }
trie.add(data1) trie.add(data1)
trie.add(data2) trie.add(data2)
@@ -121,12 +139,41 @@ describe("FileTrie", () => {
assert.strictEqual(trie.children[0].displayName, "Modified") assert.strictEqual(trie.children[0].displayName, "Modified")
assert.strictEqual(trie.children[1].displayName, "Modified") assert.strictEqual(trie.children[1].displayName, "Modified")
}) })
test("map over folders should work", () => {
const data1 = { title: "Test1", slug: "test1", filePath: "test1.md" }
const data2 = {
title: "Test2",
slug: "a/b-with-space/test2",
filePath: "a/b with space/test2.md",
}
trie.add(data1)
trie.add(data2)
trie.map((node) => {
if (node.isFolder) {
node.displayName = `Folder: ${node.displayName}`
} else {
node.displayName = `File: ${node.displayName}`
}
})
assert.strictEqual(trie.children[0].displayName, "File: Test1")
assert.strictEqual(trie.children[1].displayName, "Folder: a")
assert.strictEqual(trie.children[1].children[0].displayName, "Folder: b with space")
assert.strictEqual(trie.children[1].children[0].children[0].displayName, "File: Test2")
})
}) })
describe("entries", () => { describe("entries", () => {
test("should return all entries", () => { test("should return all entries", () => {
const data1 = { title: "Test1", slug: "test1" } const data1 = { title: "Test1", slug: "test1", filePath: "test1.md" }
const data2 = { title: "Test2", slug: "a/b/test2" } const data2 = {
title: "Test2",
slug: "a/b-with-space/test2",
filePath: "a/b with space/test2.md",
}
trie.add(data1) trie.add(data1)
trie.add(data2) trie.add(data2)
@@ -138,8 +185,8 @@ describe("FileTrie", () => {
["index", trie.data], ["index", trie.data],
["test1", data1], ["test1", data1],
["a/index", null], ["a/index", null],
["a/b/index", null], ["a/b-with-space/index", null],
["a/b/test2", data2], ["a/b-with-space/test2", data2],
], ],
) )
}) })
@@ -150,14 +197,17 @@ describe("FileTrie", () => {
const data1 = { const data1 = {
title: "Root", title: "Root",
slug: "index", slug: "index",
filePath: "index.md",
} }
const data2 = { const data2 = {
title: "Test", title: "Test",
slug: "folder/subfolder/test", slug: "folder/subfolder/test",
filePath: "folder/subfolder/test.md",
} }
const data3 = { const data3 = {
title: "Folder Index", title: "Folder Index",
slug: "abc/index", slug: "abc/index",
filePath: "abc/index.md",
} }
trie.add(data1) trie.add(data1)
@@ -176,9 +226,9 @@ describe("FileTrie", () => {
describe("sort", () => { describe("sort", () => {
test("should sort nodes according to sort function", () => { test("should sort nodes according to sort function", () => {
const data1 = { title: "A", slug: "a" } const data1 = { title: "A", slug: "a", filePath: "a.md" }
const data2 = { title: "B", slug: "b" } const data2 = { title: "B", slug: "b", filePath: "b.md" }
const data3 = { title: "C", slug: "c" } const data3 = { title: "C", slug: "c", filePath: "c.md" }
trie.add(data3) trie.add(data3)
trie.add(data1) trie.add(data1)

View File

@@ -4,6 +4,7 @@ import { FullSlug, joinSegments } from "./path"
interface FileTrieData { interface FileTrieData {
slug: string slug: string
title: string title: string
filePath: string
} }
export class FileTrieNode<T extends FileTrieData = ContentDetails> { export class FileTrieNode<T extends FileTrieData = ContentDetails> {
@@ -11,6 +12,11 @@ export class FileTrieNode<T extends FileTrieData = ContentDetails> {
children: Array<FileTrieNode<T>> children: Array<FileTrieNode<T>>
private slugSegments: string[] private slugSegments: string[]
// prefer showing the file path segment over the slug segment
// so that folders that dont have index files can be shown as is
// without dashes in the slug
private fileSegmentHint?: string
private displayNameOverride?: string
data: T | null data: T | null
constructor(segments: string[], data?: T) { constructor(segments: string[], data?: T) {
@@ -18,10 +24,18 @@ export class FileTrieNode<T extends FileTrieData = ContentDetails> {
this.slugSegments = segments this.slugSegments = segments
this.data = data ?? null this.data = data ?? null
this.isFolder = false this.isFolder = false
this.displayNameOverride = undefined
} }
get displayName(): string { get displayName(): string {
return this.data?.title ?? this.slugSegment ?? "" const nonIndexTitle = this.data?.title === "index" ? undefined : this.data?.title
return (
this.displayNameOverride ?? nonIndexTitle ?? this.fileSegmentHint ?? this.slugSegment ?? ""
)
}
set displayName(name: string) {
this.displayNameOverride = name
} }
get slug(): FullSlug { get slug(): FullSlug {
@@ -63,6 +77,9 @@ export class FileTrieNode<T extends FileTrieData = ContentDetails> {
// recursive case, we are not at the end of the path // recursive case, we are not at the end of the path
const child = const child =
this.children.find((c) => c.slugSegment === segment) ?? this.makeChild(path, undefined) this.children.find((c) => c.slugSegment === segment) ?? this.makeChild(path, undefined)
const fileParts = file.filePath.split("/")
child.fileSegmentHint = fileParts.at(-path.length)
child.insert(path.slice(1), file) child.insert(path.slice(1), file)
} }
} }

View File

@@ -1,9 +1,11 @@
import truncate from "ansi-truncate"
import readline from "readline" import readline from "readline"
export class QuartzLogger { export class QuartzLogger {
verbose: boolean verbose: boolean
private spinnerInterval: NodeJS.Timeout | undefined private spinnerInterval: NodeJS.Timeout | undefined
private spinnerText: string = "" private spinnerText: string = ""
private updateSuffix: string = ""
private spinnerIndex: number = 0 private spinnerIndex: number = 0
private readonly spinnerChars = ["⠋", "⠙", "⠹", "⠸", "⠼", "⠴", "⠦", "⠧", "⠇", "⠏"] private readonly spinnerChars = ["⠋", "⠙", "⠹", "⠸", "⠼", "⠴", "⠦", "⠧", "⠇", "⠏"]
@@ -13,6 +15,7 @@ export class QuartzLogger {
start(text: string) { start(text: string) {
this.spinnerText = text this.spinnerText = text
if (this.verbose) { if (this.verbose) {
console.log(text) console.log(text)
} else { } else {
@@ -20,14 +23,22 @@ export class QuartzLogger {
this.spinnerInterval = setInterval(() => { this.spinnerInterval = setInterval(() => {
readline.clearLine(process.stdout, 0) readline.clearLine(process.stdout, 0)
readline.cursorTo(process.stdout, 0) readline.cursorTo(process.stdout, 0)
process.stdout.write(`${this.spinnerChars[this.spinnerIndex]} ${this.spinnerText}`)
const columns = process.stdout.columns || 80
let output = `${this.spinnerChars[this.spinnerIndex]} ${this.spinnerText}`
if (this.updateSuffix) {
output += `: ${this.updateSuffix}`
}
const truncated = truncate(output, columns)
process.stdout.write(truncated)
this.spinnerIndex = (this.spinnerIndex + 1) % this.spinnerChars.length this.spinnerIndex = (this.spinnerIndex + 1) % this.spinnerChars.length
}, 100) }, 20)
} }
} }
updateText(text: string) { updateText(text: string) {
this.spinnerText = text this.updateSuffix = text
} }
end(text?: string) { end(text?: string) {

View File

@@ -3,11 +3,13 @@ import { FontWeight, SatoriOptions } from "satori/wasm"
import { GlobalConfiguration } from "../cfg" import { GlobalConfiguration } from "../cfg"
import { QuartzPluginData } from "../plugins/vfile" import { QuartzPluginData } from "../plugins/vfile"
import { JSXInternal } from "preact/src/jsx" import { JSXInternal } from "preact/src/jsx"
import { FontSpecification, ThemeKey } from "./theme" import { FontSpecification, getFontSpecificationName, ThemeKey } from "./theme"
import path from "path" import path from "path"
import { QUARTZ } from "./path" import { QUARTZ } from "./path"
import { formatDate } from "../components/Date" import { formatDate, getDate } from "../components/Date"
import { getDate } from "../components/Date" import readingTime from "reading-time"
import { i18n } from "../i18n"
import chalk from "chalk"
const defaultHeaderWeight = [700] const defaultHeaderWeight = [700]
const defaultBodyWeight = [400] const defaultBodyWeight = [400]
@@ -25,29 +27,38 @@ export async function getSatoriFonts(headerFont: FontSpecification, bodyFont: Fo
const headerFontName = typeof headerFont === "string" ? headerFont : headerFont.name const headerFontName = typeof headerFont === "string" ? headerFont : headerFont.name
const bodyFontName = typeof bodyFont === "string" ? bodyFont : bodyFont.name const bodyFontName = typeof bodyFont === "string" ? bodyFont : bodyFont.name
// Fetch fonts for all weights // Fetch fonts for all weights and convert to satori format in one go
const headerFontPromises = headerWeights.map((weight) => fetchTtf(headerFontName, weight)) const headerFontPromises = headerWeights.map(async (weight) => {
const bodyFontPromises = bodyWeights.map((weight) => fetchTtf(bodyFontName, weight)) const data = await fetchTtf(headerFontName, weight)
if (!data) return null
return {
name: headerFontName,
data,
weight,
style: "normal" as const,
}
})
const [headerFontData, bodyFontData] = await Promise.all([ const bodyFontPromises = bodyWeights.map(async (weight) => {
const data = await fetchTtf(bodyFontName, weight)
if (!data) return null
return {
name: bodyFontName,
data,
weight,
style: "normal" as const,
}
})
const [headerFonts, bodyFonts] = await Promise.all([
Promise.all(headerFontPromises), Promise.all(headerFontPromises),
Promise.all(bodyFontPromises), Promise.all(bodyFontPromises),
]) ])
// Convert fonts to satori font format and return // Filter out any failed fetches and combine header and body fonts
const fonts: SatoriOptions["fonts"] = [ const fonts: SatoriOptions["fonts"] = [
...headerFontData.map((data, idx) => ({ ...headerFonts.filter((font): font is NonNullable<typeof font> => font !== null),
name: headerFontName, ...bodyFonts.filter((font): font is NonNullable<typeof font> => font !== null),
data,
weight: headerWeights[idx],
style: "normal" as const,
})),
...bodyFontData.map((data, idx) => ({
name: bodyFontName,
data,
weight: bodyWeights[idx],
style: "normal" as const,
})),
] ]
return fonts return fonts
@@ -60,10 +71,11 @@ export async function getSatoriFonts(headerFont: FontSpecification, bodyFont: Fo
* @returns `.ttf` file of google font * @returns `.ttf` file of google font
*/ */
export async function fetchTtf( export async function fetchTtf(
fontName: string, rawFontName: string,
weight: FontWeight, weight: FontWeight,
): Promise<Buffer<ArrayBufferLike>> { ): Promise<Buffer<ArrayBufferLike> | undefined> {
const cacheKey = `${fontName.replaceAll(" ", "-")}-${weight}` const fontName = rawFontName.replaceAll(" ", "+")
const cacheKey = `${fontName}-${weight}`
const cacheDir = path.join(QUARTZ, ".quartz-cache", "fonts") const cacheDir = path.join(QUARTZ, ".quartz-cache", "fonts")
const cachePath = path.join(cacheDir, cacheKey) const cachePath = path.join(cacheDir, cacheKey)
@@ -86,20 +98,19 @@ export async function fetchTtf(
const match = urlRegex.exec(css) const match = urlRegex.exec(css)
if (!match) { if (!match) {
throw new Error("Could not fetch font") console.log(
chalk.yellow(
`\nWarning: Failed to fetch font ${rawFontName} with weight ${weight}, got ${cssResponse.statusText}`,
),
)
return
} }
// fontData is an ArrayBuffer containing the .ttf file data // fontData is an ArrayBuffer containing the .ttf file data
const fontResponse = await fetch(match[1]) const fontResponse = await fetch(match[1])
const fontData = Buffer.from(await fontResponse.arrayBuffer()) const fontData = Buffer.from(await fontResponse.arrayBuffer())
await fs.mkdir(cacheDir, { recursive: true })
try { await fs.writeFile(cachePath, fontData)
await fs.mkdir(cacheDir, { recursive: true })
await fs.writeFile(cachePath, fontData)
} catch (error) {
console.warn(`Failed to cache font: ${error}`)
// Continue even if caching fails
}
return fontData return fontData
} }
@@ -172,7 +183,7 @@ export const defaultImage: SocialImageOptions["imageStructure"] = (
{ colorScheme }: UserOpts, { colorScheme }: UserOpts,
title: string, title: string,
description: string, description: string,
fonts: SatoriOptions["fonts"], _fonts: SatoriOptions["fonts"],
fileData: QuartzPluginData, fileData: QuartzPluginData,
) => { ) => {
const fontBreakPoint = 32 const fontBreakPoint = 32
@@ -183,8 +194,16 @@ export const defaultImage: SocialImageOptions["imageStructure"] = (
const rawDate = getDate(cfg, fileData) const rawDate = getDate(cfg, fileData)
const date = rawDate ? formatDate(rawDate, cfg.locale) : null const date = rawDate ? formatDate(rawDate, cfg.locale) : null
// Calculate reading time
const { minutes } = readingTime(fileData.text ?? "")
const readingTimeText = i18n(cfg.locale).components.contentMeta.readingTime({
minutes: Math.ceil(minutes),
})
// Get tags if available // Get tags if available
const tags = fileData.frontmatter?.tags ?? [] const tags = fileData.frontmatter?.tags ?? []
const bodyFont = getFontSpecificationName(cfg.theme.typography.body)
const headerFont = getFontSpecificationName(cfg.theme.typography.header)
return ( return (
<div <div
@@ -195,7 +214,7 @@ export const defaultImage: SocialImageOptions["imageStructure"] = (
width: "100%", width: "100%",
backgroundColor: cfg.theme.colors[colorScheme].light, backgroundColor: cfg.theme.colors[colorScheme].light,
padding: "2.5rem", padding: "2.5rem",
fontFamily: fonts[1].name, fontFamily: bodyFont,
}} }}
> >
{/* Header Section */} {/* Header Section */}
@@ -220,7 +239,7 @@ export const defaultImage: SocialImageOptions["imageStructure"] = (
display: "flex", display: "flex",
fontSize: 32, fontSize: 32,
color: cfg.theme.colors[colorScheme].gray, color: cfg.theme.colors[colorScheme].gray,
fontFamily: fonts[1].name, fontFamily: bodyFont,
}} }}
> >
{cfg.baseUrl} {cfg.baseUrl}
@@ -239,7 +258,7 @@ export const defaultImage: SocialImageOptions["imageStructure"] = (
style={{ style={{
margin: 0, margin: 0,
fontSize: useSmallerFont ? 64 : 72, fontSize: useSmallerFont ? 64 : 72,
fontFamily: fonts[0].name, fontFamily: headerFont,
fontWeight: 700, fontWeight: 700,
color: cfg.theme.colors[colorScheme].dark, color: cfg.theme.colors[colorScheme].dark,
lineHeight: 1.2, lineHeight: 1.2,
@@ -247,6 +266,7 @@ export const defaultImage: SocialImageOptions["imageStructure"] = (
WebkitBoxOrient: "vertical", WebkitBoxOrient: "vertical",
WebkitLineClamp: 2, WebkitLineClamp: 2,
overflow: "hidden", overflow: "hidden",
textOverflow: "ellipsis",
}} }}
> >
{title} {title}
@@ -268,8 +288,9 @@ export const defaultImage: SocialImageOptions["imageStructure"] = (
margin: 0, margin: 0,
display: "-webkit-box", display: "-webkit-box",
WebkitBoxOrient: "vertical", WebkitBoxOrient: "vertical",
WebkitLineClamp: 4, WebkitLineClamp: 5,
overflow: "hidden", overflow: "hidden",
textOverflow: "ellipsis",
}} }}
> >
{description} {description}
@@ -287,11 +308,12 @@ export const defaultImage: SocialImageOptions["imageStructure"] = (
borderTop: `1px solid ${cfg.theme.colors[colorScheme].lightgray}`, borderTop: `1px solid ${cfg.theme.colors[colorScheme].lightgray}`,
}} }}
> >
{/* Left side - Date */} {/* Left side - Date and Reading Time */}
<div <div
style={{ style={{
display: "flex", display: "flex",
alignItems: "center", alignItems: "center",
gap: "2rem",
color: cfg.theme.colors[colorScheme].gray, color: cfg.theme.colors[colorScheme].gray,
fontSize: 28, fontSize: 28,
}} }}
@@ -314,6 +336,20 @@ export const defaultImage: SocialImageOptions["imageStructure"] = (
{date} {date}
</div> </div>
)} )}
<div style={{ display: "flex", alignItems: "center" }}>
<svg
style={{ marginRight: "0.5rem" }}
width="28"
height="28"
viewBox="0 0 24 24"
fill="none"
stroke="currentColor"
>
<circle cx="12" cy="12" r="10"></circle>
<polyline points="12 6 12 12 16 14"></polyline>
</svg>
{readingTimeText}
</div>
</div> </div>
{/* Right side - Tags */} {/* Right side - Tags */}

View File

@@ -260,7 +260,7 @@ export function endsWith(s: string, suffix: string): boolean {
return s === suffix || s.endsWith("/" + suffix) return s === suffix || s.endsWith("/" + suffix)
} }
function trimSuffix(s: string, suffix: string): string { export function trimSuffix(s: string, suffix: string): string {
if (endsWith(s, suffix)) { if (endsWith(s, suffix)) {
s = s.slice(0, -suffix.length) s = s.slice(0, -suffix.length)
} }

View File

@@ -135,9 +135,9 @@ ${stylesheet.join("\n\n")}
--highlight: ${theme.colors.lightMode.highlight}; --highlight: ${theme.colors.lightMode.highlight};
--textHighlight: ${theme.colors.lightMode.textHighlight}; --textHighlight: ${theme.colors.lightMode.textHighlight};
--headerFont: "${theme.typography.header}", ${DEFAULT_SANS_SERIF}; --headerFont: "${getFontSpecificationName(theme.typography.header)}", ${DEFAULT_SANS_SERIF};
--bodyFont: "${theme.typography.body}", ${DEFAULT_SANS_SERIF}; --bodyFont: "${getFontSpecificationName(theme.typography.body)}", ${DEFAULT_SANS_SERIF};
--codeFont: "${theme.typography.code}", ${DEFAULT_MONO}; --codeFont: "${getFontSpecificationName(theme.typography.code)}", ${DEFAULT_MONO};
} }
:root[saved-theme="dark"] { :root[saved-theme="dark"] {

View File

@@ -1,8 +1,8 @@
import sourceMapSupport from "source-map-support" import sourceMapSupport from "source-map-support"
sourceMapSupport.install(options) sourceMapSupport.install(options)
import cfg from "../quartz.config" import cfg from "../quartz.config"
import { Argv, BuildCtx } from "./util/ctx" import { BuildCtx, WorkerSerializableBuildCtx } from "./util/ctx"
import { FilePath, FullSlug } from "./util/path" import { FilePath } from "./util/path"
import { import {
createFileParser, createFileParser,
createHtmlProcessor, createHtmlProcessor,
@@ -14,35 +14,24 @@ import { MarkdownContent, ProcessedContent } from "./plugins/vfile"
// only called from worker thread // only called from worker thread
export async function parseMarkdown( export async function parseMarkdown(
buildId: string, partialCtx: WorkerSerializableBuildCtx,
argv: Argv,
fps: FilePath[], fps: FilePath[],
): Promise<[MarkdownContent[], FullSlug[]]> { ): Promise<MarkdownContent[]> {
// this is a hack
// we assume markdown parsers can add to `allSlugs`,
// but don't actually use them
const allSlugs: FullSlug[] = []
const ctx: BuildCtx = { const ctx: BuildCtx = {
buildId, ...partialCtx,
cfg, cfg,
argv,
allSlugs,
} }
return [await createFileParser(ctx, fps)(createMdProcessor(ctx)), allSlugs] return await createFileParser(ctx, fps)(createMdProcessor(ctx))
} }
// only called from worker thread // only called from worker thread
export function processHtml( export function processHtml(
buildId: string, partialCtx: WorkerSerializableBuildCtx,
argv: Argv,
mds: MarkdownContent[], mds: MarkdownContent[],
allSlugs: FullSlug[],
): Promise<ProcessedContent[]> { ): Promise<ProcessedContent[]> {
const ctx: BuildCtx = { const ctx: BuildCtx = {
buildId, ...partialCtx,
cfg, cfg,
argv,
allSlugs,
} }
return createMarkdownParser(ctx, mds)(createHtmlProcessor(ctx)) return createMarkdownParser(ctx, mds)(createHtmlProcessor(ctx))
} }

View File

@@ -11,6 +11,8 @@
"skipLibCheck": true, "skipLibCheck": true,
"allowSyntheticDefaultImports": true, "allowSyntheticDefaultImports": true,
"forceConsistentCasingInFileNames": true, "forceConsistentCasingInFileNames": true,
"noUnusedLocals": true,
"noUnusedParameters": true,
"esModuleInterop": true, "esModuleInterop": true,
"jsx": "react-jsx", "jsx": "react-jsx",
"jsxImportSource": "preact" "jsxImportSource": "preact"