Initial commit
This commit is contained in:
3
quartz/util/clone.ts
Normal file
3
quartz/util/clone.ts
Normal file
@@ -0,0 +1,3 @@
|
||||
import rfdc from "rfdc"
|
||||
|
||||
export const clone = rfdc()
|
||||
50
quartz/util/ctx.ts
Normal file
50
quartz/util/ctx.ts
Normal file
@@ -0,0 +1,50 @@
|
||||
import { QuartzConfig } from "../cfg"
|
||||
import { QuartzPluginData } from "../plugins/vfile"
|
||||
import { FileTrieNode } from "./fileTrie"
|
||||
import { FilePath, FullSlug } from "./path"
|
||||
|
||||
export interface Argv {
|
||||
directory: string
|
||||
verbose: boolean
|
||||
output: string
|
||||
serve: boolean
|
||||
watch: boolean
|
||||
port: number
|
||||
wsPort: number
|
||||
remoteDevHost?: string
|
||||
concurrency?: number
|
||||
}
|
||||
|
||||
export type BuildTimeTrieData = QuartzPluginData & {
|
||||
slug: string
|
||||
title: string
|
||||
filePath: string
|
||||
}
|
||||
|
||||
export interface BuildCtx {
|
||||
buildId: string
|
||||
argv: Argv
|
||||
cfg: QuartzConfig
|
||||
allSlugs: FullSlug[]
|
||||
allFiles: FilePath[]
|
||||
trie?: FileTrieNode<BuildTimeTrieData>
|
||||
incremental: boolean
|
||||
}
|
||||
|
||||
export function trieFromAllFiles(allFiles: QuartzPluginData[]): FileTrieNode<BuildTimeTrieData> {
|
||||
const trie = new FileTrieNode<BuildTimeTrieData>([])
|
||||
allFiles.forEach((file) => {
|
||||
if (file.frontmatter) {
|
||||
trie.add({
|
||||
...file,
|
||||
slug: file.slug!,
|
||||
title: file.frontmatter.title,
|
||||
filePath: file.filePath!,
|
||||
})
|
||||
}
|
||||
})
|
||||
|
||||
return trie
|
||||
}
|
||||
|
||||
export type WorkerSerializableBuildCtx = Omit<BuildCtx, "cfg" | "trie">
|
||||
47
quartz/util/emoji.ts
Normal file
47
quartz/util/emoji.ts
Normal file
@@ -0,0 +1,47 @@
|
||||
const U200D = String.fromCharCode(8205)
|
||||
const UFE0Fg = /\uFE0F/g
|
||||
|
||||
export function getIconCode(char: string) {
|
||||
return toCodePoint(char.indexOf(U200D) < 0 ? char.replace(UFE0Fg, "") : char)
|
||||
}
|
||||
|
||||
function toCodePoint(unicodeSurrogates: string) {
|
||||
const r = []
|
||||
let c = 0,
|
||||
p = 0,
|
||||
i = 0
|
||||
|
||||
while (i < unicodeSurrogates.length) {
|
||||
c = unicodeSurrogates.charCodeAt(i++)
|
||||
if (p) {
|
||||
r.push((65536 + ((p - 55296) << 10) + (c - 56320)).toString(16))
|
||||
p = 0
|
||||
} else if (55296 <= c && c <= 56319) {
|
||||
p = c
|
||||
} else {
|
||||
r.push(c.toString(16))
|
||||
}
|
||||
}
|
||||
return r.join("-")
|
||||
}
|
||||
|
||||
type EmojiMap = {
|
||||
codePointToName: Record<string, string>
|
||||
nameToBase64: Record<string, string>
|
||||
}
|
||||
|
||||
let emojimap: EmojiMap | undefined = undefined
|
||||
export async function loadEmoji(code: string) {
|
||||
if (!emojimap) {
|
||||
const data = await import("./emojimap.json")
|
||||
emojimap = data
|
||||
}
|
||||
|
||||
const name = emojimap.codePointToName[`${code.toUpperCase()}`]
|
||||
if (!name) throw new Error(`codepoint ${code} not found in map`)
|
||||
|
||||
const b64 = emojimap.nameToBase64[name]
|
||||
if (!b64) throw new Error(`name ${name} not found in map`)
|
||||
|
||||
return b64
|
||||
}
|
||||
7568
quartz/util/emojimap.json
Normal file
7568
quartz/util/emojimap.json
Normal file
File diff suppressed because one or more lines are too long
17
quartz/util/escape.ts
Normal file
17
quartz/util/escape.ts
Normal file
@@ -0,0 +1,17 @@
|
||||
export const escapeHTML = (unsafe: string) => {
|
||||
return unsafe
|
||||
.replaceAll("&", "&")
|
||||
.replaceAll("<", "<")
|
||||
.replaceAll(">", ">")
|
||||
.replaceAll('"', """)
|
||||
.replaceAll("'", "'")
|
||||
}
|
||||
|
||||
export const unescapeHTML = (html: string) => {
|
||||
return html
|
||||
.replaceAll("&", "&")
|
||||
.replaceAll("<", "<")
|
||||
.replaceAll(">", ">")
|
||||
.replaceAll(""", '"')
|
||||
.replaceAll("'", "'")
|
||||
}
|
||||
415
quartz/util/fileTrie.test.ts
Normal file
415
quartz/util/fileTrie.test.ts
Normal file
@@ -0,0 +1,415 @@
|
||||
import test, { describe, beforeEach } from "node:test"
|
||||
import assert from "node:assert"
|
||||
import { FileTrieNode } from "./fileTrie"
|
||||
import { FullSlug } from "./path"
|
||||
|
||||
interface TestData {
|
||||
title: string
|
||||
slug: string
|
||||
filePath: string
|
||||
}
|
||||
|
||||
describe("FileTrie", () => {
|
||||
let trie: FileTrieNode<TestData>
|
||||
|
||||
beforeEach(() => {
|
||||
trie = new FileTrieNode<TestData>([])
|
||||
})
|
||||
|
||||
describe("constructor", () => {
|
||||
test("should create an empty trie", () => {
|
||||
assert.deepStrictEqual(trie.children, [])
|
||||
assert.strictEqual(trie.slug, "")
|
||||
assert.strictEqual(trie.displayName, "")
|
||||
assert.strictEqual(trie.data, null)
|
||||
})
|
||||
|
||||
test("should set displayName from data title", () => {
|
||||
const data = {
|
||||
title: "Test Title",
|
||||
slug: "test",
|
||||
filePath: "test.md",
|
||||
}
|
||||
|
||||
trie.add(data)
|
||||
assert.strictEqual(trie.children[0].displayName, "Test Title")
|
||||
})
|
||||
|
||||
test("should be able to set displayName", () => {
|
||||
const data = {
|
||||
title: "Test Title",
|
||||
slug: "test",
|
||||
filePath: "test.md",
|
||||
}
|
||||
|
||||
trie.add(data)
|
||||
trie.children[0].displayName = "Modified"
|
||||
assert.strictEqual(trie.children[0].displayName, "Modified")
|
||||
})
|
||||
})
|
||||
|
||||
describe("add", () => {
|
||||
test("should add a file at root level", () => {
|
||||
const data = {
|
||||
title: "Test",
|
||||
slug: "test",
|
||||
filePath: "test.md",
|
||||
}
|
||||
|
||||
trie.add(data)
|
||||
assert.strictEqual(trie.children.length, 1)
|
||||
assert.strictEqual(trie.children[0].slug, "test")
|
||||
assert.strictEqual(trie.children[0].data, data)
|
||||
})
|
||||
|
||||
test("should handle index files", () => {
|
||||
const data = {
|
||||
title: "Index",
|
||||
slug: "index",
|
||||
filePath: "index.md",
|
||||
}
|
||||
|
||||
trie.add(data)
|
||||
assert.strictEqual(trie.data, data)
|
||||
assert.strictEqual(trie.children.length, 0)
|
||||
})
|
||||
|
||||
test("should add nested files", () => {
|
||||
const data1 = {
|
||||
title: "Nested",
|
||||
slug: "folder/test",
|
||||
filePath: "folder/test.md",
|
||||
}
|
||||
|
||||
const data2 = {
|
||||
title: "Really nested index",
|
||||
slug: "a/b/c/index",
|
||||
filePath: "a/b/c/index.md",
|
||||
}
|
||||
|
||||
trie.add(data1)
|
||||
trie.add(data2)
|
||||
assert.strictEqual(trie.children.length, 2)
|
||||
assert.strictEqual(trie.children[0].slug, "folder/index")
|
||||
assert.strictEqual(trie.children[0].children.length, 1)
|
||||
assert.strictEqual(trie.children[0].children[0].slug, "folder/test")
|
||||
assert.strictEqual(trie.children[0].children[0].data, data1)
|
||||
|
||||
assert.strictEqual(trie.children[1].slug, "a/index")
|
||||
assert.strictEqual(trie.children[1].children.length, 1)
|
||||
assert.strictEqual(trie.children[1].data, null)
|
||||
|
||||
assert.strictEqual(trie.children[1].children[0].slug, "a/b/index")
|
||||
assert.strictEqual(trie.children[1].children[0].children.length, 1)
|
||||
assert.strictEqual(trie.children[1].children[0].data, null)
|
||||
|
||||
assert.strictEqual(trie.children[1].children[0].children[0].slug, "a/b/c/index")
|
||||
assert.strictEqual(trie.children[1].children[0].children[0].data, data2)
|
||||
assert.strictEqual(trie.children[1].children[0].children[0].children.length, 0)
|
||||
})
|
||||
})
|
||||
|
||||
describe("filter", () => {
|
||||
test("should filter nodes based on condition", () => {
|
||||
const data1 = { title: "Test1", slug: "test1", filePath: "test1.md" }
|
||||
const data2 = { title: "Test2", slug: "test2", filePath: "test2.md" }
|
||||
|
||||
trie.add(data1)
|
||||
trie.add(data2)
|
||||
|
||||
trie.filter((node) => node.slug !== "test1")
|
||||
assert.strictEqual(trie.children.length, 1)
|
||||
assert.strictEqual(trie.children[0].slug, "test2")
|
||||
})
|
||||
})
|
||||
|
||||
describe("map", () => {
|
||||
test("should apply function to all nodes", () => {
|
||||
const data1 = { title: "Test1", slug: "test1", filePath: "test1.md" }
|
||||
const data2 = { title: "Test2", slug: "test2", filePath: "test2.md" }
|
||||
|
||||
trie.add(data1)
|
||||
trie.add(data2)
|
||||
|
||||
trie.map((node) => {
|
||||
if (node.data) {
|
||||
node.data.title = "Modified"
|
||||
}
|
||||
})
|
||||
|
||||
assert.strictEqual(trie.children[0].displayName, "Modified")
|
||||
assert.strictEqual(trie.children[1].displayName, "Modified")
|
||||
})
|
||||
|
||||
test("map over folders should work", () => {
|
||||
const data1 = { title: "Test1", slug: "test1", filePath: "test1.md" }
|
||||
const data2 = {
|
||||
title: "Test2",
|
||||
slug: "a/b-with-space/test2",
|
||||
filePath: "a/b with space/test2.md",
|
||||
}
|
||||
|
||||
trie.add(data1)
|
||||
trie.add(data2)
|
||||
|
||||
trie.map((node) => {
|
||||
if (node.isFolder) {
|
||||
node.displayName = `Folder: ${node.displayName}`
|
||||
} else {
|
||||
node.displayName = `File: ${node.displayName}`
|
||||
}
|
||||
})
|
||||
|
||||
assert.strictEqual(trie.children[0].displayName, "File: Test1")
|
||||
assert.strictEqual(trie.children[1].displayName, "Folder: a")
|
||||
assert.strictEqual(trie.children[1].children[0].displayName, "Folder: b with space")
|
||||
assert.strictEqual(trie.children[1].children[0].children[0].displayName, "File: Test2")
|
||||
})
|
||||
})
|
||||
|
||||
describe("entries", () => {
|
||||
test("should return all entries", () => {
|
||||
const data1 = { title: "Test1", slug: "test1", filePath: "test1.md" }
|
||||
const data2 = {
|
||||
title: "Test2",
|
||||
slug: "a/b-with-space/test2",
|
||||
filePath: "a/b with space/test2.md",
|
||||
}
|
||||
|
||||
trie.add(data1)
|
||||
trie.add(data2)
|
||||
|
||||
const entries = trie.entries()
|
||||
assert.deepStrictEqual(
|
||||
entries.map(([path, node]) => [path, node.data]),
|
||||
[
|
||||
["index", trie.data],
|
||||
["test1", data1],
|
||||
["a/index", null],
|
||||
["a/b-with-space/index", null],
|
||||
["a/b-with-space/test2", data2],
|
||||
],
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe("fromEntries", () => {
|
||||
test("nested", () => {
|
||||
const trie = FileTrieNode.fromEntries([
|
||||
["index" as FullSlug, { title: "Root", slug: "index", filePath: "index.md" }],
|
||||
[
|
||||
"folder/file1" as FullSlug,
|
||||
{ title: "File 1", slug: "folder/file1", filePath: "folder/file1.md" },
|
||||
],
|
||||
[
|
||||
"folder/index" as FullSlug,
|
||||
{ title: "Folder Index", slug: "folder/index", filePath: "folder/index.md" },
|
||||
],
|
||||
[
|
||||
"folder/file2" as FullSlug,
|
||||
{ title: "File 2", slug: "folder/file2", filePath: "folder/file2.md" },
|
||||
],
|
||||
[
|
||||
"folder/folder2/index" as FullSlug,
|
||||
{
|
||||
title: "Subfolder Index",
|
||||
slug: "folder/folder2/index",
|
||||
filePath: "folder/folder2/index.md",
|
||||
},
|
||||
],
|
||||
])
|
||||
|
||||
assert.strictEqual(trie.children.length, 1)
|
||||
assert.strictEqual(trie.children[0].slug, "folder/index")
|
||||
assert.strictEqual(trie.children[0].children.length, 3)
|
||||
assert.strictEqual(trie.children[0].children[0].slug, "folder/file1")
|
||||
assert.strictEqual(trie.children[0].children[1].slug, "folder/file2")
|
||||
assert.strictEqual(trie.children[0].children[2].slug, "folder/folder2/index")
|
||||
assert.strictEqual(trie.children[0].children[2].children.length, 0)
|
||||
})
|
||||
})
|
||||
|
||||
describe("findNode", () => {
|
||||
test("should find root node with empty path", () => {
|
||||
const data = { title: "Root", slug: "index", filePath: "index.md" }
|
||||
trie.add(data)
|
||||
const found = trie.findNode([])
|
||||
assert.strictEqual(found, trie)
|
||||
})
|
||||
|
||||
test("should find node at first level", () => {
|
||||
const data = { title: "Test", slug: "test", filePath: "test.md" }
|
||||
trie.add(data)
|
||||
const found = trie.findNode(["test"])
|
||||
assert.strictEqual(found?.data, data)
|
||||
})
|
||||
|
||||
test("should find nested node", () => {
|
||||
const data = {
|
||||
title: "Nested",
|
||||
slug: "folder/subfolder/test",
|
||||
filePath: "folder/subfolder/test.md",
|
||||
}
|
||||
trie.add(data)
|
||||
const found = trie.findNode(["folder", "subfolder", "test"])
|
||||
assert.strictEqual(found?.data, data)
|
||||
|
||||
// should find the folder and subfolder indexes too
|
||||
assert.strictEqual(
|
||||
trie.findNode(["folder", "subfolder", "index"]),
|
||||
trie.children[0].children[0],
|
||||
)
|
||||
assert.strictEqual(trie.findNode(["folder", "index"]), trie.children[0])
|
||||
})
|
||||
|
||||
test("should return undefined for non-existent path", () => {
|
||||
const data = { title: "Test", slug: "test", filePath: "test.md" }
|
||||
trie.add(data)
|
||||
const found = trie.findNode(["nonexistent"])
|
||||
assert.strictEqual(found, undefined)
|
||||
})
|
||||
|
||||
test("should return undefined for partial path", () => {
|
||||
const data = {
|
||||
title: "Nested",
|
||||
slug: "folder/subfolder/test",
|
||||
filePath: "folder/subfolder/test.md",
|
||||
}
|
||||
trie.add(data)
|
||||
const found = trie.findNode(["folder"])
|
||||
assert.strictEqual(found?.data, null)
|
||||
})
|
||||
})
|
||||
|
||||
describe("getFolderPaths", () => {
|
||||
test("should return all folder paths", () => {
|
||||
const data1 = {
|
||||
title: "Root",
|
||||
slug: "index",
|
||||
filePath: "index.md",
|
||||
}
|
||||
const data2 = {
|
||||
title: "Test",
|
||||
slug: "folder/subfolder/test",
|
||||
filePath: "folder/subfolder/test.md",
|
||||
}
|
||||
const data3 = {
|
||||
title: "Folder Index",
|
||||
slug: "abc/index",
|
||||
filePath: "abc/index.md",
|
||||
}
|
||||
|
||||
trie.add(data1)
|
||||
trie.add(data2)
|
||||
trie.add(data3)
|
||||
const paths = trie.getFolderPaths()
|
||||
|
||||
assert.deepStrictEqual(paths, [
|
||||
"index",
|
||||
"folder/index",
|
||||
"folder/subfolder/index",
|
||||
"abc/index",
|
||||
])
|
||||
})
|
||||
})
|
||||
|
||||
describe("sort", () => {
|
||||
test("should sort nodes according to sort function", () => {
|
||||
const data1 = { title: "A", slug: "a", filePath: "a.md" }
|
||||
const data2 = { title: "B", slug: "b", filePath: "b.md" }
|
||||
const data3 = { title: "C", slug: "c", filePath: "c.md" }
|
||||
|
||||
trie.add(data3)
|
||||
trie.add(data1)
|
||||
trie.add(data2)
|
||||
|
||||
trie.sort((a, b) => a.slug.localeCompare(b.slug))
|
||||
assert.deepStrictEqual(
|
||||
trie.children.map((n) => n.slug),
|
||||
["a", "b", "c"],
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe("pathToNode", () => {
|
||||
test("should return root node for empty path", () => {
|
||||
const data = { title: "Root", slug: "index", filePath: "index.md" }
|
||||
trie.add(data)
|
||||
const path = trie.ancestryChain([])
|
||||
assert.deepStrictEqual(path, [trie])
|
||||
})
|
||||
|
||||
test("should return root node for index path", () => {
|
||||
const data = { title: "Root", slug: "index", filePath: "index.md" }
|
||||
trie.add(data)
|
||||
const path = trie.ancestryChain(["index"])
|
||||
assert.deepStrictEqual(path, [trie])
|
||||
})
|
||||
|
||||
test("should return path to first level node", () => {
|
||||
const data = { title: "Test", slug: "test", filePath: "test.md" }
|
||||
trie.add(data)
|
||||
const path = trie.ancestryChain(["test"])
|
||||
assert.deepStrictEqual(path, [trie, trie.children[0]])
|
||||
})
|
||||
|
||||
test("should return path to nested node", () => {
|
||||
const data = {
|
||||
title: "Nested",
|
||||
slug: "folder/subfolder/test",
|
||||
filePath: "folder/subfolder/test.md",
|
||||
}
|
||||
trie.add(data)
|
||||
const path = trie.ancestryChain(["folder", "subfolder", "test"])
|
||||
assert.deepStrictEqual(path, [
|
||||
trie,
|
||||
trie.children[0],
|
||||
trie.children[0].children[0],
|
||||
trie.children[0].children[0].children[0],
|
||||
])
|
||||
})
|
||||
|
||||
test("should return undefined for non-existent path", () => {
|
||||
const data = { title: "Test", slug: "test", filePath: "test.md" }
|
||||
trie.add(data)
|
||||
const path = trie.ancestryChain(["nonexistent"])
|
||||
assert.strictEqual(path, undefined)
|
||||
})
|
||||
|
||||
test("should return file data for intermediate folders", () => {
|
||||
const data1 = {
|
||||
title: "Root",
|
||||
slug: "index",
|
||||
filePath: "index.md",
|
||||
}
|
||||
const data2 = {
|
||||
title: "Test",
|
||||
slug: "folder/subfolder/test",
|
||||
filePath: "folder/subfolder/test.md",
|
||||
}
|
||||
const data3 = {
|
||||
title: "Folder Index",
|
||||
slug: "folder/index",
|
||||
filePath: "folder/index.md",
|
||||
}
|
||||
|
||||
trie.add(data1)
|
||||
trie.add(data2)
|
||||
trie.add(data3)
|
||||
const path = trie.ancestryChain(["folder", "subfolder"])
|
||||
assert.deepStrictEqual(path, [trie, trie.children[0], trie.children[0].children[0]])
|
||||
assert.strictEqual(path[1].data, data3)
|
||||
})
|
||||
|
||||
test("should return path for partial path", () => {
|
||||
const data = {
|
||||
title: "Nested",
|
||||
slug: "folder/subfolder/test",
|
||||
filePath: "folder/subfolder/test.md",
|
||||
}
|
||||
trie.add(data)
|
||||
const path = trie.ancestryChain(["folder"])
|
||||
assert.deepStrictEqual(path, [trie, trie.children[0]])
|
||||
})
|
||||
})
|
||||
})
|
||||
170
quartz/util/fileTrie.ts
Normal file
170
quartz/util/fileTrie.ts
Normal file
@@ -0,0 +1,170 @@
|
||||
import { ContentDetails } from "../plugins/emitters/contentIndex"
|
||||
import { FullSlug, joinSegments } from "./path"
|
||||
|
||||
interface FileTrieData {
|
||||
slug: string
|
||||
title: string
|
||||
filePath: string
|
||||
}
|
||||
|
||||
export class FileTrieNode<T extends FileTrieData = ContentDetails> {
|
||||
isFolder: boolean
|
||||
children: Array<FileTrieNode<T>>
|
||||
|
||||
private slugSegments: string[]
|
||||
// prefer showing the file path segment over the slug segment
|
||||
// so that folders that dont have index files can be shown as is
|
||||
// without dashes in the slug
|
||||
private fileSegmentHint?: string
|
||||
private displayNameOverride?: string
|
||||
data: T | null
|
||||
|
||||
constructor(segments: string[], data?: T) {
|
||||
this.children = []
|
||||
this.slugSegments = segments
|
||||
this.data = data ?? null
|
||||
this.isFolder = false
|
||||
this.displayNameOverride = undefined
|
||||
}
|
||||
|
||||
get displayName(): string {
|
||||
const nonIndexTitle = this.data?.title === "index" ? undefined : this.data?.title
|
||||
return (
|
||||
this.displayNameOverride ?? nonIndexTitle ?? this.fileSegmentHint ?? this.slugSegment ?? ""
|
||||
)
|
||||
}
|
||||
|
||||
set displayName(name: string) {
|
||||
this.displayNameOverride = name
|
||||
}
|
||||
|
||||
get slug(): FullSlug {
|
||||
const path = joinSegments(...this.slugSegments) as FullSlug
|
||||
if (this.isFolder) {
|
||||
return joinSegments(path, "index") as FullSlug
|
||||
}
|
||||
|
||||
return path
|
||||
}
|
||||
|
||||
get slugSegment(): string {
|
||||
return this.slugSegments[this.slugSegments.length - 1]
|
||||
}
|
||||
|
||||
private makeChild(path: string[], file?: T) {
|
||||
const fullPath = [...this.slugSegments, path[0]]
|
||||
const child = new FileTrieNode<T>(fullPath, file)
|
||||
this.children.push(child)
|
||||
return child
|
||||
}
|
||||
|
||||
private insert(path: string[], file: T) {
|
||||
if (path.length === 0) {
|
||||
throw new Error("path is empty")
|
||||
}
|
||||
|
||||
// if we are inserting, we are a folder
|
||||
this.isFolder = true
|
||||
const segment = path[0]
|
||||
if (path.length === 1) {
|
||||
// base case, we are at the end of the path
|
||||
if (segment === "index") {
|
||||
this.data ??= file
|
||||
} else {
|
||||
this.makeChild(path, file)
|
||||
}
|
||||
} else if (path.length > 1) {
|
||||
// recursive case, we are not at the end of the path
|
||||
const child =
|
||||
this.children.find((c) => c.slugSegment === segment) ?? this.makeChild(path, undefined)
|
||||
|
||||
const fileParts = file.filePath.split("/")
|
||||
child.fileSegmentHint = fileParts.at(-path.length)
|
||||
child.insert(path.slice(1), file)
|
||||
}
|
||||
}
|
||||
|
||||
// Add new file to trie
|
||||
add(file: T) {
|
||||
this.insert(file.slug.split("/"), file)
|
||||
}
|
||||
|
||||
findNode(path: string[]): FileTrieNode<T> | undefined {
|
||||
if (path.length === 0 || (path.length === 1 && path[0] === "index")) {
|
||||
return this
|
||||
}
|
||||
|
||||
return this.children.find((c) => c.slugSegment === path[0])?.findNode(path.slice(1))
|
||||
}
|
||||
|
||||
ancestryChain(path: string[]): Array<FileTrieNode<T>> | undefined {
|
||||
if (path.length === 0 || (path.length === 1 && path[0] === "index")) {
|
||||
return [this]
|
||||
}
|
||||
|
||||
const child = this.children.find((c) => c.slugSegment === path[0])
|
||||
if (!child) {
|
||||
return undefined
|
||||
}
|
||||
|
||||
const childPath = child.ancestryChain(path.slice(1))
|
||||
if (!childPath) {
|
||||
return undefined
|
||||
}
|
||||
|
||||
return [this, ...childPath]
|
||||
}
|
||||
|
||||
/**
|
||||
* Filter trie nodes. Behaves similar to `Array.prototype.filter()`, but modifies tree in place
|
||||
*/
|
||||
filter(filterFn: (node: FileTrieNode<T>) => boolean) {
|
||||
this.children = this.children.filter(filterFn)
|
||||
this.children.forEach((child) => child.filter(filterFn))
|
||||
}
|
||||
|
||||
/**
|
||||
* Map over trie nodes. Behaves similar to `Array.prototype.map()`, but modifies tree in place
|
||||
*/
|
||||
map(mapFn: (node: FileTrieNode<T>) => void) {
|
||||
mapFn(this)
|
||||
this.children.forEach((child) => child.map(mapFn))
|
||||
}
|
||||
|
||||
/**
|
||||
* Sort trie nodes according to sort/compare function
|
||||
*/
|
||||
sort(sortFn: (a: FileTrieNode<T>, b: FileTrieNode<T>) => number) {
|
||||
this.children = this.children.sort(sortFn)
|
||||
this.children.forEach((e) => e.sort(sortFn))
|
||||
}
|
||||
|
||||
static fromEntries<T extends FileTrieData>(entries: [FullSlug, T][]) {
|
||||
const trie = new FileTrieNode<T>([])
|
||||
entries.forEach(([, entry]) => trie.add(entry))
|
||||
return trie
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all entries in the trie
|
||||
* in the a flat array including the full path and the node
|
||||
*/
|
||||
entries(): [FullSlug, FileTrieNode<T>][] {
|
||||
const traverse = (node: FileTrieNode<T>): [FullSlug, FileTrieNode<T>][] => {
|
||||
const result: [FullSlug, FileTrieNode<T>][] = [[node.slug, node]]
|
||||
return result.concat(...node.children.map(traverse))
|
||||
}
|
||||
|
||||
return traverse(this)
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all folder paths in the trie
|
||||
* @returns array containing folder state for trie
|
||||
*/
|
||||
getFolderPaths() {
|
||||
return this.entries()
|
||||
.filter(([_, node]) => node.isFolder)
|
||||
.map(([path, _]) => path)
|
||||
}
|
||||
}
|
||||
22
quartz/util/glob.ts
Normal file
22
quartz/util/glob.ts
Normal file
@@ -0,0 +1,22 @@
|
||||
import path from "path"
|
||||
import { FilePath } from "./path"
|
||||
import { globby } from "globby"
|
||||
|
||||
export function toPosixPath(fp: string): string {
|
||||
return fp.split(path.sep).join("/")
|
||||
}
|
||||
|
||||
export async function glob(
|
||||
pattern: string,
|
||||
cwd: string,
|
||||
ignorePatterns: string[],
|
||||
): Promise<FilePath[]> {
|
||||
const fps = (
|
||||
await globby(pattern, {
|
||||
cwd,
|
||||
ignore: ignorePatterns,
|
||||
gitignore: true,
|
||||
})
|
||||
).map(toPosixPath)
|
||||
return fps as FilePath[]
|
||||
}
|
||||
27
quartz/util/jsx.tsx
Normal file
27
quartz/util/jsx.tsx
Normal file
@@ -0,0 +1,27 @@
|
||||
import { Components, Jsx, toJsxRuntime } from "hast-util-to-jsx-runtime"
|
||||
import { Node, Root } from "hast"
|
||||
import { Fragment, jsx, jsxs } from "preact/jsx-runtime"
|
||||
import { trace } from "./trace"
|
||||
import { type FilePath } from "./path"
|
||||
|
||||
const customComponents: Components = {
|
||||
table: (props) => (
|
||||
<div class="table-container">
|
||||
<table {...props} />
|
||||
</div>
|
||||
),
|
||||
}
|
||||
|
||||
export function htmlToJsx(fp: FilePath, tree: Node) {
|
||||
try {
|
||||
return toJsxRuntime(tree as Root, {
|
||||
Fragment,
|
||||
jsx: jsx as Jsx,
|
||||
jsxs: jsxs as Jsx,
|
||||
elementAttributeNameCase: "html",
|
||||
components: customComponents,
|
||||
})
|
||||
} catch (e) {
|
||||
trace(`Failed to parse Markdown in \`${fp}\` into JSX`, e as Error)
|
||||
}
|
||||
}
|
||||
13
quartz/util/lang.ts
Normal file
13
quartz/util/lang.ts
Normal file
@@ -0,0 +1,13 @@
|
||||
export function capitalize(s: string): string {
|
||||
return s.substring(0, 1).toUpperCase() + s.substring(1)
|
||||
}
|
||||
|
||||
export function classNames(
|
||||
displayClass?: "mobile-only" | "desktop-only",
|
||||
...classes: string[]
|
||||
): string {
|
||||
if (displayClass) {
|
||||
classes.push(displayClass)
|
||||
}
|
||||
return classes.join(" ")
|
||||
}
|
||||
58
quartz/util/log.ts
Normal file
58
quartz/util/log.ts
Normal file
@@ -0,0 +1,58 @@
|
||||
import truncate from "ansi-truncate"
|
||||
import readline from "readline"
|
||||
|
||||
export class QuartzLogger {
|
||||
verbose: boolean
|
||||
private spinnerInterval: NodeJS.Timeout | undefined
|
||||
private spinnerText: string = ""
|
||||
private updateSuffix: string = ""
|
||||
private spinnerIndex: number = 0
|
||||
private readonly spinnerChars = ["⠋", "⠙", "⠹", "⠸", "⠼", "⠴", "⠦", "⠧", "⠇", "⠏"]
|
||||
|
||||
constructor(verbose: boolean) {
|
||||
const isInteractiveTerminal =
|
||||
process.stdout.isTTY && process.env.TERM !== "dumb" && !process.env.CI
|
||||
this.verbose = verbose || !isInteractiveTerminal
|
||||
}
|
||||
|
||||
start(text: string) {
|
||||
this.spinnerText = text
|
||||
|
||||
if (this.verbose) {
|
||||
console.log(text)
|
||||
} else {
|
||||
this.spinnerIndex = 0
|
||||
this.spinnerInterval = setInterval(() => {
|
||||
readline.clearLine(process.stdout, 0)
|
||||
readline.cursorTo(process.stdout, 0)
|
||||
|
||||
const columns = process.stdout.columns || 80
|
||||
let output = `${this.spinnerChars[this.spinnerIndex]} ${this.spinnerText}`
|
||||
if (this.updateSuffix) {
|
||||
output += `: ${this.updateSuffix}`
|
||||
}
|
||||
|
||||
const truncated = truncate(output, columns)
|
||||
process.stdout.write(truncated)
|
||||
this.spinnerIndex = (this.spinnerIndex + 1) % this.spinnerChars.length
|
||||
}, 50)
|
||||
}
|
||||
}
|
||||
|
||||
updateText(text: string) {
|
||||
this.updateSuffix = text
|
||||
}
|
||||
|
||||
end(text?: string) {
|
||||
if (!this.verbose && this.spinnerInterval) {
|
||||
clearInterval(this.spinnerInterval)
|
||||
this.spinnerInterval = undefined
|
||||
readline.clearLine(process.stdout, 0)
|
||||
readline.cursorTo(process.stdout, 0)
|
||||
}
|
||||
|
||||
if (text) {
|
||||
console.log(text)
|
||||
}
|
||||
}
|
||||
}
|
||||
378
quartz/util/og.tsx
Normal file
378
quartz/util/og.tsx
Normal file
@@ -0,0 +1,378 @@
|
||||
import { promises as fs } from "fs"
|
||||
import { FontWeight, SatoriOptions } from "satori/wasm"
|
||||
import { GlobalConfiguration } from "../cfg"
|
||||
import { QuartzPluginData } from "../plugins/vfile"
|
||||
import { JSXInternal } from "preact/src/jsx"
|
||||
import { FontSpecification, getFontSpecificationName, ThemeKey } from "./theme"
|
||||
import path from "path"
|
||||
import { QUARTZ } from "./path"
|
||||
import { formatDate, getDate } from "../components/Date"
|
||||
import readingTime from "reading-time"
|
||||
import { i18n } from "../i18n"
|
||||
import { styleText } from "util"
|
||||
|
||||
const defaultHeaderWeight = [700]
|
||||
const defaultBodyWeight = [400]
|
||||
|
||||
export async function getSatoriFonts(headerFont: FontSpecification, bodyFont: FontSpecification) {
|
||||
// Get all weights for header and body fonts
|
||||
const headerWeights: FontWeight[] = (
|
||||
typeof headerFont === "string"
|
||||
? defaultHeaderWeight
|
||||
: (headerFont.weights ?? defaultHeaderWeight)
|
||||
) as FontWeight[]
|
||||
const bodyWeights: FontWeight[] = (
|
||||
typeof bodyFont === "string" ? defaultBodyWeight : (bodyFont.weights ?? defaultBodyWeight)
|
||||
) as FontWeight[]
|
||||
|
||||
const headerFontName = typeof headerFont === "string" ? headerFont : headerFont.name
|
||||
const bodyFontName = typeof bodyFont === "string" ? bodyFont : bodyFont.name
|
||||
|
||||
// Fetch fonts for all weights and convert to satori format in one go
|
||||
const headerFontPromises = headerWeights.map(async (weight) => {
|
||||
const data = await fetchTtf(headerFontName, weight)
|
||||
if (!data) return null
|
||||
return {
|
||||
name: headerFontName,
|
||||
data,
|
||||
weight,
|
||||
style: "normal" as const,
|
||||
}
|
||||
})
|
||||
|
||||
const bodyFontPromises = bodyWeights.map(async (weight) => {
|
||||
const data = await fetchTtf(bodyFontName, weight)
|
||||
if (!data) return null
|
||||
return {
|
||||
name: bodyFontName,
|
||||
data,
|
||||
weight,
|
||||
style: "normal" as const,
|
||||
}
|
||||
})
|
||||
|
||||
const [headerFonts, bodyFonts] = await Promise.all([
|
||||
Promise.all(headerFontPromises),
|
||||
Promise.all(bodyFontPromises),
|
||||
])
|
||||
|
||||
// Filter out any failed fetches and combine header and body fonts
|
||||
const fonts: SatoriOptions["fonts"] = [
|
||||
...headerFonts.filter((font): font is NonNullable<typeof font> => font !== null),
|
||||
...bodyFonts.filter((font): font is NonNullable<typeof font> => font !== null),
|
||||
]
|
||||
|
||||
return fonts
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the `.ttf` file of a google font
|
||||
* @param fontName name of google font
|
||||
* @param weight what font weight to fetch font
|
||||
* @returns `.ttf` file of google font
|
||||
*/
|
||||
export async function fetchTtf(
|
||||
rawFontName: string,
|
||||
weight: FontWeight,
|
||||
): Promise<Buffer<ArrayBufferLike> | undefined> {
|
||||
const fontName = rawFontName.replaceAll(" ", "+")
|
||||
const cacheKey = `${fontName}-${weight}`
|
||||
const cacheDir = path.join(QUARTZ, ".quartz-cache", "fonts")
|
||||
const cachePath = path.join(cacheDir, cacheKey)
|
||||
|
||||
// Check if font exists in cache
|
||||
try {
|
||||
await fs.access(cachePath)
|
||||
return fs.readFile(cachePath)
|
||||
} catch (error) {
|
||||
// ignore errors and fetch font
|
||||
}
|
||||
|
||||
// Get css file from google fonts
|
||||
const cssResponse = await fetch(
|
||||
`https://fonts.googleapis.com/css2?family=${fontName}:wght@${weight}`,
|
||||
)
|
||||
const css = await cssResponse.text()
|
||||
|
||||
// Extract .ttf url from css file
|
||||
const urlRegex = /url\((https:\/\/fonts.gstatic.com\/s\/.*?.ttf)\)/g
|
||||
const match = urlRegex.exec(css)
|
||||
|
||||
if (!match) {
|
||||
console.log(
|
||||
styleText(
|
||||
"yellow",
|
||||
`\nWarning: Failed to fetch font ${rawFontName} with weight ${weight}, got ${cssResponse.statusText}`,
|
||||
),
|
||||
)
|
||||
return
|
||||
}
|
||||
|
||||
// fontData is an ArrayBuffer containing the .ttf file data
|
||||
const fontResponse = await fetch(match[1])
|
||||
const fontData = Buffer.from(await fontResponse.arrayBuffer())
|
||||
await fs.mkdir(cacheDir, { recursive: true })
|
||||
await fs.writeFile(cachePath, fontData)
|
||||
|
||||
return fontData
|
||||
}
|
||||
|
||||
export type SocialImageOptions = {
|
||||
/**
|
||||
* What color scheme to use for image generation (uses colors from config theme)
|
||||
*/
|
||||
colorScheme: ThemeKey
|
||||
/**
|
||||
* Height to generate image with in pixels (should be around 630px)
|
||||
*/
|
||||
height: number
|
||||
/**
|
||||
* Width to generate image with in pixels (should be around 1200px)
|
||||
*/
|
||||
width: number
|
||||
/**
|
||||
* Whether to use the auto generated image for the root path ("/", when set to false) or the default og image (when set to true).
|
||||
*/
|
||||
excludeRoot: boolean
|
||||
/**
|
||||
* JSX to use for generating image. See satori docs for more info (https://github.com/vercel/satori)
|
||||
*/
|
||||
imageStructure: (
|
||||
options: ImageOptions & {
|
||||
userOpts: UserOpts
|
||||
iconBase64?: string
|
||||
},
|
||||
) => JSXInternal.Element
|
||||
}
|
||||
|
||||
export type UserOpts = Omit<SocialImageOptions, "imageStructure">
|
||||
|
||||
export type ImageOptions = {
|
||||
/**
|
||||
* what title to use as header in image
|
||||
*/
|
||||
title: string
|
||||
/**
|
||||
* what description to use as body in image
|
||||
*/
|
||||
description: string
|
||||
/**
|
||||
* header + body font to be used when generating satori image (as promise to work around sync in component)
|
||||
*/
|
||||
fonts: SatoriOptions["fonts"]
|
||||
/**
|
||||
* `GlobalConfiguration` of quartz (used for theme/typography)
|
||||
*/
|
||||
cfg: GlobalConfiguration
|
||||
/**
|
||||
* full file data of current page
|
||||
*/
|
||||
fileData: QuartzPluginData
|
||||
}
|
||||
|
||||
// This is the default template for generated social image.
|
||||
export const defaultImage: SocialImageOptions["imageStructure"] = ({
|
||||
cfg,
|
||||
userOpts,
|
||||
title,
|
||||
description,
|
||||
fileData,
|
||||
iconBase64,
|
||||
}) => {
|
||||
const { colorScheme } = userOpts
|
||||
const fontBreakPoint = 32
|
||||
const useSmallerFont = title.length > fontBreakPoint
|
||||
|
||||
// Format date if available
|
||||
const rawDate = getDate(cfg, fileData)
|
||||
const date = rawDate ? formatDate(rawDate, cfg.locale) : null
|
||||
|
||||
// Calculate reading time
|
||||
const { minutes } = readingTime(fileData.text ?? "")
|
||||
const readingTimeText = i18n(cfg.locale).components.contentMeta.readingTime({
|
||||
minutes: Math.ceil(minutes),
|
||||
})
|
||||
|
||||
// Get tags if available
|
||||
const tags = fileData.frontmatter?.tags ?? []
|
||||
const bodyFont = getFontSpecificationName(cfg.theme.typography.body)
|
||||
const headerFont = getFontSpecificationName(cfg.theme.typography.header)
|
||||
|
||||
return (
|
||||
<div
|
||||
style={{
|
||||
display: "flex",
|
||||
flexDirection: "column",
|
||||
height: "100%",
|
||||
width: "100%",
|
||||
backgroundColor: cfg.theme.colors[colorScheme].light,
|
||||
padding: "2.5rem",
|
||||
fontFamily: bodyFont,
|
||||
}}
|
||||
>
|
||||
{/* Header Section */}
|
||||
<div
|
||||
style={{
|
||||
display: "flex",
|
||||
alignItems: "center",
|
||||
gap: "1rem",
|
||||
marginBottom: "0.5rem",
|
||||
}}
|
||||
>
|
||||
{iconBase64 && (
|
||||
<img
|
||||
src={iconBase64}
|
||||
width={56}
|
||||
height={56}
|
||||
style={{
|
||||
borderRadius: "50%",
|
||||
}}
|
||||
/>
|
||||
)}
|
||||
<div
|
||||
style={{
|
||||
display: "flex",
|
||||
fontSize: 32,
|
||||
color: cfg.theme.colors[colorScheme].gray,
|
||||
fontFamily: bodyFont,
|
||||
}}
|
||||
>
|
||||
{cfg.baseUrl}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Title Section */}
|
||||
<div
|
||||
style={{
|
||||
display: "flex",
|
||||
marginTop: "1rem",
|
||||
marginBottom: "1.5rem",
|
||||
}}
|
||||
>
|
||||
<h1
|
||||
style={{
|
||||
margin: 0,
|
||||
fontSize: useSmallerFont ? 64 : 72,
|
||||
fontFamily: headerFont,
|
||||
fontWeight: 700,
|
||||
color: cfg.theme.colors[colorScheme].dark,
|
||||
lineHeight: 1.2,
|
||||
display: "-webkit-box",
|
||||
WebkitBoxOrient: "vertical",
|
||||
WebkitLineClamp: 2,
|
||||
overflow: "hidden",
|
||||
textOverflow: "ellipsis",
|
||||
}}
|
||||
>
|
||||
{title}
|
||||
</h1>
|
||||
</div>
|
||||
|
||||
{/* Description Section */}
|
||||
<div
|
||||
style={{
|
||||
display: "flex",
|
||||
flex: 1,
|
||||
fontSize: 36,
|
||||
color: cfg.theme.colors[colorScheme].darkgray,
|
||||
lineHeight: 1.4,
|
||||
}}
|
||||
>
|
||||
<p
|
||||
style={{
|
||||
margin: 0,
|
||||
display: "-webkit-box",
|
||||
WebkitBoxOrient: "vertical",
|
||||
WebkitLineClamp: 5,
|
||||
overflow: "hidden",
|
||||
textOverflow: "ellipsis",
|
||||
}}
|
||||
>
|
||||
{description}
|
||||
</p>
|
||||
</div>
|
||||
|
||||
{/* Footer with Metadata */}
|
||||
<div
|
||||
style={{
|
||||
display: "flex",
|
||||
alignItems: "center",
|
||||
justifyContent: "space-between",
|
||||
marginTop: "2rem",
|
||||
paddingTop: "2rem",
|
||||
borderTop: `1px solid ${cfg.theme.colors[colorScheme].lightgray}`,
|
||||
}}
|
||||
>
|
||||
{/* Left side - Date and Reading Time */}
|
||||
<div
|
||||
style={{
|
||||
display: "flex",
|
||||
alignItems: "center",
|
||||
gap: "2rem",
|
||||
color: cfg.theme.colors[colorScheme].gray,
|
||||
fontSize: 28,
|
||||
}}
|
||||
>
|
||||
{date && (
|
||||
<div style={{ display: "flex", alignItems: "center" }}>
|
||||
<svg
|
||||
style={{ marginRight: "0.5rem" }}
|
||||
width="28"
|
||||
height="28"
|
||||
viewBox="0 0 24 24"
|
||||
fill="none"
|
||||
stroke="currentColor"
|
||||
>
|
||||
<rect x="3" y="4" width="18" height="18" rx="2" ry="2"></rect>
|
||||
<line x1="16" y1="2" x2="16" y2="6"></line>
|
||||
<line x1="8" y1="2" x2="8" y2="6"></line>
|
||||
<line x1="3" y1="10" x2="21" y2="10"></line>
|
||||
</svg>
|
||||
{date}
|
||||
</div>
|
||||
)}
|
||||
<div style={{ display: "flex", alignItems: "center" }}>
|
||||
<svg
|
||||
style={{ marginRight: "0.5rem" }}
|
||||
width="28"
|
||||
height="28"
|
||||
viewBox="0 0 24 24"
|
||||
fill="none"
|
||||
stroke="currentColor"
|
||||
>
|
||||
<circle cx="12" cy="12" r="10"></circle>
|
||||
<polyline points="12 6 12 12 16 14"></polyline>
|
||||
</svg>
|
||||
{readingTimeText}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Right side - Tags */}
|
||||
<div
|
||||
style={{
|
||||
display: "flex",
|
||||
gap: "0.5rem",
|
||||
flexWrap: "wrap",
|
||||
justifyContent: "flex-end",
|
||||
maxWidth: "60%",
|
||||
}}
|
||||
>
|
||||
{tags.slice(0, 3).map((tag: string) => (
|
||||
<div
|
||||
style={{
|
||||
display: "flex",
|
||||
padding: "0.5rem 1rem",
|
||||
backgroundColor: cfg.theme.colors[colorScheme].highlight,
|
||||
color: cfg.theme.colors[colorScheme].secondary,
|
||||
borderRadius: "10px",
|
||||
fontSize: 24,
|
||||
}}
|
||||
>
|
||||
#{tag}
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
363
quartz/util/path.test.ts
Normal file
363
quartz/util/path.test.ts
Normal file
@@ -0,0 +1,363 @@
|
||||
import test, { describe } from "node:test"
|
||||
import * as path from "./path"
|
||||
import assert from "node:assert"
|
||||
import { FullSlug, TransformOptions, SimpleSlug } from "./path"
|
||||
|
||||
describe("typeguards", () => {
|
||||
test("isSimpleSlug", () => {
|
||||
assert(path.isSimpleSlug(""))
|
||||
assert(path.isSimpleSlug("abc"))
|
||||
assert(path.isSimpleSlug("abc/"))
|
||||
assert(path.isSimpleSlug("notindex"))
|
||||
assert(path.isSimpleSlug("notindex/def"))
|
||||
|
||||
assert(!path.isSimpleSlug("//"))
|
||||
assert(!path.isSimpleSlug("index"))
|
||||
assert(!path.isSimpleSlug("https://example.com"))
|
||||
assert(!path.isSimpleSlug("/abc"))
|
||||
assert(!path.isSimpleSlug("abc/index"))
|
||||
assert(!path.isSimpleSlug("abc#anchor"))
|
||||
assert(!path.isSimpleSlug("abc?query=1"))
|
||||
assert(!path.isSimpleSlug("index.md"))
|
||||
assert(!path.isSimpleSlug("index.html"))
|
||||
})
|
||||
|
||||
test("isRelativeURL", () => {
|
||||
assert(path.isRelativeURL("."))
|
||||
assert(path.isRelativeURL(".."))
|
||||
assert(path.isRelativeURL("./abc/def"))
|
||||
assert(path.isRelativeURL("./abc/def#an-anchor"))
|
||||
assert(path.isRelativeURL("./abc/def?query=1#an-anchor"))
|
||||
assert(path.isRelativeURL("../abc/def"))
|
||||
assert(path.isRelativeURL("./abc/def.pdf"))
|
||||
|
||||
assert(!path.isRelativeURL("abc"))
|
||||
assert(!path.isRelativeURL("/abc/def"))
|
||||
assert(!path.isRelativeURL(""))
|
||||
assert(!path.isRelativeURL("./abc/def.html"))
|
||||
assert(!path.isRelativeURL("./abc/def.md"))
|
||||
})
|
||||
|
||||
test("isAbsoluteURL", () => {
|
||||
assert(path.isAbsoluteURL("https://example.com"))
|
||||
assert(path.isAbsoluteURL("http://example.com"))
|
||||
assert(path.isAbsoluteURL("ftp://example.com/a/b/c"))
|
||||
assert(path.isAbsoluteURL("http://host/%25"))
|
||||
assert(path.isAbsoluteURL("file://host/twoslashes?more//slashes"))
|
||||
|
||||
assert(!path.isAbsoluteURL("example.com/abc/def"))
|
||||
assert(!path.isAbsoluteURL("abc"))
|
||||
})
|
||||
|
||||
test("isFullSlug", () => {
|
||||
assert(path.isFullSlug("index"))
|
||||
assert(path.isFullSlug("abc/def"))
|
||||
assert(path.isFullSlug("html.energy"))
|
||||
assert(path.isFullSlug("test.pdf"))
|
||||
|
||||
assert(!path.isFullSlug("."))
|
||||
assert(!path.isFullSlug("./abc/def"))
|
||||
assert(!path.isFullSlug("../abc/def"))
|
||||
assert(!path.isFullSlug("abc/def#anchor"))
|
||||
assert(!path.isFullSlug("abc/def?query=1"))
|
||||
assert(!path.isFullSlug("note with spaces"))
|
||||
})
|
||||
|
||||
test("isFilePath", () => {
|
||||
assert(path.isFilePath("content/index.md"))
|
||||
assert(path.isFilePath("content/test.png"))
|
||||
assert(!path.isFilePath("../test.pdf"))
|
||||
assert(!path.isFilePath("content/test"))
|
||||
assert(!path.isFilePath("./content/test"))
|
||||
})
|
||||
})
|
||||
|
||||
describe("transforms", () => {
|
||||
function asserts<Inp, Out>(
|
||||
pairs: [string, string][],
|
||||
transform: (inp: Inp) => Out,
|
||||
checkPre: (x: any) => x is Inp,
|
||||
checkPost: (x: any) => x is Out,
|
||||
) {
|
||||
for (const [inp, expected] of pairs) {
|
||||
assert(checkPre(inp), `${inp} wasn't the expected input type`)
|
||||
const actual = transform(inp)
|
||||
assert.strictEqual(
|
||||
actual,
|
||||
expected,
|
||||
`after transforming ${inp}, '${actual}' was not '${expected}'`,
|
||||
)
|
||||
assert(checkPost(actual), `${actual} wasn't the expected output type`)
|
||||
}
|
||||
}
|
||||
|
||||
test("simplifySlug", () => {
|
||||
asserts(
|
||||
[
|
||||
["index", "/"],
|
||||
["abc", "abc"],
|
||||
["abc/index", "abc/"],
|
||||
["abc/def", "abc/def"],
|
||||
],
|
||||
path.simplifySlug,
|
||||
path.isFullSlug,
|
||||
path.isSimpleSlug,
|
||||
)
|
||||
})
|
||||
|
||||
test("slugifyFilePath", () => {
|
||||
asserts(
|
||||
[
|
||||
["content/index.md", "content/index"],
|
||||
["content/index.html", "content/index"],
|
||||
["content/_index.md", "content/index"],
|
||||
["/content/index.md", "content/index"],
|
||||
["content/cool.png", "content/cool.png"],
|
||||
["index.md", "index"],
|
||||
["test.mp4", "test.mp4"],
|
||||
["note with spaces.md", "note-with-spaces"],
|
||||
["notes.with.dots.md", "notes.with.dots"],
|
||||
["test/special chars?.md", "test/special-chars"],
|
||||
["test/special chars #3.md", "test/special-chars-3"],
|
||||
["cool/what about r&d?.md", "cool/what-about-r-and-d"],
|
||||
],
|
||||
path.slugifyFilePath,
|
||||
path.isFilePath,
|
||||
path.isFullSlug,
|
||||
)
|
||||
})
|
||||
|
||||
test("transformInternalLink", () => {
|
||||
asserts(
|
||||
[
|
||||
["", "."],
|
||||
[".", "."],
|
||||
["./", "./"],
|
||||
["./index", "./"],
|
||||
["./index#abc", "./#abc"],
|
||||
["./index.html", "./"],
|
||||
["./index.md", "./"],
|
||||
["./index.css", "./index.css"],
|
||||
["content", "./content"],
|
||||
["content/test.md", "./content/test"],
|
||||
["content/test.pdf", "./content/test.pdf"],
|
||||
["./content/test.md", "./content/test"],
|
||||
["../content/test.md", "../content/test"],
|
||||
["tags/", "./tags/"],
|
||||
["/tags/", "./tags/"],
|
||||
["content/with spaces", "./content/with-spaces"],
|
||||
["content/with spaces/index", "./content/with-spaces/"],
|
||||
["content/with spaces#and Anchor!", "./content/with-spaces#and-anchor"],
|
||||
],
|
||||
path.transformInternalLink,
|
||||
(_x: string): _x is string => true,
|
||||
path.isRelativeURL,
|
||||
)
|
||||
})
|
||||
|
||||
test("pathToRoot", () => {
|
||||
asserts(
|
||||
[
|
||||
["index", "."],
|
||||
["abc", "."],
|
||||
["abc/def", ".."],
|
||||
["abc/def/ghi", "../.."],
|
||||
["abc/def/index", "../.."],
|
||||
],
|
||||
path.pathToRoot,
|
||||
path.isFullSlug,
|
||||
path.isRelativeURL,
|
||||
)
|
||||
})
|
||||
|
||||
test("joinSegments", () => {
|
||||
assert.strictEqual(path.joinSegments("a", "b"), "a/b")
|
||||
assert.strictEqual(path.joinSegments("a/", "b"), "a/b")
|
||||
assert.strictEqual(path.joinSegments("a", "b/"), "a/b/")
|
||||
assert.strictEqual(path.joinSegments("a/", "b/"), "a/b/")
|
||||
|
||||
// preserve leading and trailing slashes
|
||||
assert.strictEqual(path.joinSegments("/a", "b"), "/a/b")
|
||||
assert.strictEqual(path.joinSegments("/a/", "b"), "/a/b")
|
||||
assert.strictEqual(path.joinSegments("/a", "b/"), "/a/b/")
|
||||
assert.strictEqual(path.joinSegments("/a/", "b/"), "/a/b/")
|
||||
|
||||
// lone slash
|
||||
assert.strictEqual(path.joinSegments("/a/", "b", "/"), "/a/b/")
|
||||
assert.strictEqual(path.joinSegments("a/", "b" + "/"), "a/b/")
|
||||
|
||||
// works with protocol specifiers
|
||||
assert.strictEqual(path.joinSegments("https://example.com", "a"), "https://example.com/a")
|
||||
assert.strictEqual(path.joinSegments("https://example.com/", "a"), "https://example.com/a")
|
||||
assert.strictEqual(path.joinSegments("https://example.com", "a/"), "https://example.com/a/")
|
||||
assert.strictEqual(path.joinSegments("https://example.com/", "a/"), "https://example.com/a/")
|
||||
})
|
||||
})
|
||||
|
||||
describe("link strategies", () => {
|
||||
const allSlugs = [
|
||||
"a/b/c",
|
||||
"a/b/d",
|
||||
"a/b/index",
|
||||
"e/f",
|
||||
"e/g/h",
|
||||
"index",
|
||||
"a/test.png",
|
||||
] as FullSlug[]
|
||||
|
||||
describe("absolute", () => {
|
||||
const opts: TransformOptions = {
|
||||
strategy: "absolute",
|
||||
allSlugs,
|
||||
}
|
||||
|
||||
test("from a/b/c", () => {
|
||||
const cur = "a/b/c" as FullSlug
|
||||
assert.strictEqual(path.transformLink(cur, "a/b/d", opts), "../../a/b/d")
|
||||
assert.strictEqual(path.transformLink(cur, "a/b/index", opts), "../../a/b/")
|
||||
assert.strictEqual(path.transformLink(cur, "e/f", opts), "../../e/f")
|
||||
assert.strictEqual(path.transformLink(cur, "e/g/h", opts), "../../e/g/h")
|
||||
assert.strictEqual(path.transformLink(cur, "index", opts), "../../")
|
||||
assert.strictEqual(path.transformLink(cur, "index.png", opts), "../../index.png")
|
||||
assert.strictEqual(path.transformLink(cur, "index#abc", opts), "../../#abc")
|
||||
assert.strictEqual(path.transformLink(cur, "tag/test", opts), "../../tag/test")
|
||||
assert.strictEqual(path.transformLink(cur, "a/b/c#test", opts), "../../a/b/c#test")
|
||||
assert.strictEqual(path.transformLink(cur, "a/test.png", opts), "../../a/test.png")
|
||||
})
|
||||
|
||||
test("from a/b/index", () => {
|
||||
const cur = "a/b/index" as FullSlug
|
||||
assert.strictEqual(path.transformLink(cur, "a/b/d", opts), "../../a/b/d")
|
||||
assert.strictEqual(path.transformLink(cur, "a/b", opts), "../../a/b")
|
||||
assert.strictEqual(path.transformLink(cur, "index", opts), "../../")
|
||||
})
|
||||
|
||||
test("from index", () => {
|
||||
const cur = "index" as FullSlug
|
||||
assert.strictEqual(path.transformLink(cur, "index", opts), "./")
|
||||
assert.strictEqual(path.transformLink(cur, "a/b/c", opts), "./a/b/c")
|
||||
assert.strictEqual(path.transformLink(cur, "a/b/index", opts), "./a/b/")
|
||||
})
|
||||
})
|
||||
|
||||
describe("shortest", () => {
|
||||
const opts: TransformOptions = {
|
||||
strategy: "shortest",
|
||||
allSlugs,
|
||||
}
|
||||
|
||||
test("from a/b/c", () => {
|
||||
const cur = "a/b/c" as FullSlug
|
||||
assert.strictEqual(path.transformLink(cur, "d", opts), "../../a/b/d")
|
||||
assert.strictEqual(path.transformLink(cur, "h", opts), "../../e/g/h")
|
||||
assert.strictEqual(path.transformLink(cur, "a/b/index", opts), "../../a/b/")
|
||||
assert.strictEqual(path.transformLink(cur, "a/b/index.png", opts), "../../a/b/index.png")
|
||||
assert.strictEqual(path.transformLink(cur, "a/b/index#abc", opts), "../../a/b/#abc")
|
||||
assert.strictEqual(path.transformLink(cur, "index", opts), "../../")
|
||||
assert.strictEqual(path.transformLink(cur, "index.png", opts), "../../index.png")
|
||||
assert.strictEqual(path.transformLink(cur, "test.png", opts), "../../a/test.png")
|
||||
assert.strictEqual(path.transformLink(cur, "index#abc", opts), "../../#abc")
|
||||
})
|
||||
|
||||
test("from a/b/index", () => {
|
||||
const cur = "a/b/index" as FullSlug
|
||||
assert.strictEqual(path.transformLink(cur, "d", opts), "../../a/b/d")
|
||||
assert.strictEqual(path.transformLink(cur, "h", opts), "../../e/g/h")
|
||||
assert.strictEqual(path.transformLink(cur, "a/b/index", opts), "../../a/b/")
|
||||
assert.strictEqual(path.transformLink(cur, "index", opts), "../../")
|
||||
})
|
||||
|
||||
test("from index", () => {
|
||||
const cur = "index" as FullSlug
|
||||
assert.strictEqual(path.transformLink(cur, "d", opts), "./a/b/d")
|
||||
assert.strictEqual(path.transformLink(cur, "h", opts), "./e/g/h")
|
||||
assert.strictEqual(path.transformLink(cur, "a/b/index", opts), "./a/b/")
|
||||
assert.strictEqual(path.transformLink(cur, "index", opts), "./")
|
||||
})
|
||||
})
|
||||
|
||||
describe("relative", () => {
|
||||
const opts: TransformOptions = {
|
||||
strategy: "relative",
|
||||
allSlugs,
|
||||
}
|
||||
|
||||
test("from a/b/c", () => {
|
||||
const cur = "a/b/c" as FullSlug
|
||||
assert.strictEqual(path.transformLink(cur, "d", opts), "./d")
|
||||
assert.strictEqual(path.transformLink(cur, "index", opts), "./")
|
||||
assert.strictEqual(path.transformLink(cur, "../../../index", opts), "../../../")
|
||||
assert.strictEqual(path.transformLink(cur, "../../../index.png", opts), "../../../index.png")
|
||||
assert.strictEqual(path.transformLink(cur, "../../../index#abc", opts), "../../../#abc")
|
||||
assert.strictEqual(path.transformLink(cur, "../../../", opts), "../../../")
|
||||
assert.strictEqual(
|
||||
path.transformLink(cur, "../../../a/test.png", opts),
|
||||
"../../../a/test.png",
|
||||
)
|
||||
assert.strictEqual(path.transformLink(cur, "../../../e/g/h", opts), "../../../e/g/h")
|
||||
assert.strictEqual(path.transformLink(cur, "../../../e/g/h", opts), "../../../e/g/h")
|
||||
assert.strictEqual(path.transformLink(cur, "../../../e/g/h#abc", opts), "../../../e/g/h#abc")
|
||||
})
|
||||
|
||||
test("from a/b/index", () => {
|
||||
const cur = "a/b/index" as FullSlug
|
||||
assert.strictEqual(path.transformLink(cur, "../../index", opts), "../../")
|
||||
assert.strictEqual(path.transformLink(cur, "../../", opts), "../../")
|
||||
assert.strictEqual(path.transformLink(cur, "../../e/g/h", opts), "../../e/g/h")
|
||||
assert.strictEqual(path.transformLink(cur, "c", opts), "./c")
|
||||
})
|
||||
|
||||
test("from index", () => {
|
||||
const cur = "index" as FullSlug
|
||||
assert.strictEqual(path.transformLink(cur, "e/g/h", opts), "./e/g/h")
|
||||
assert.strictEqual(path.transformLink(cur, "a/b/index", opts), "./a/b/")
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe("resolveRelative", () => {
|
||||
test("from index", () => {
|
||||
assert.strictEqual(path.resolveRelative("index" as FullSlug, "index" as FullSlug), "./")
|
||||
assert.strictEqual(path.resolveRelative("index" as FullSlug, "abc" as FullSlug), "./abc")
|
||||
assert.strictEqual(
|
||||
path.resolveRelative("index" as FullSlug, "abc/def" as FullSlug),
|
||||
"./abc/def",
|
||||
)
|
||||
assert.strictEqual(
|
||||
path.resolveRelative("index" as FullSlug, "abc/def/ghi" as FullSlug),
|
||||
"./abc/def/ghi",
|
||||
)
|
||||
})
|
||||
|
||||
test("from nested page", () => {
|
||||
assert.strictEqual(path.resolveRelative("abc/def" as FullSlug, "index" as FullSlug), "../")
|
||||
assert.strictEqual(path.resolveRelative("abc/def" as FullSlug, "abc" as FullSlug), "../abc")
|
||||
assert.strictEqual(
|
||||
path.resolveRelative("abc/def" as FullSlug, "abc/def" as FullSlug),
|
||||
"../abc/def",
|
||||
)
|
||||
assert.strictEqual(
|
||||
path.resolveRelative("abc/def" as FullSlug, "ghi/jkl" as FullSlug),
|
||||
"../ghi/jkl",
|
||||
)
|
||||
})
|
||||
|
||||
test("with index paths", () => {
|
||||
assert.strictEqual(path.resolveRelative("abc/index" as FullSlug, "index" as FullSlug), "../")
|
||||
assert.strictEqual(
|
||||
path.resolveRelative("abc/def/index" as FullSlug, "index" as FullSlug),
|
||||
"../../",
|
||||
)
|
||||
assert.strictEqual(path.resolveRelative("index" as FullSlug, "abc/index" as FullSlug), "./abc/")
|
||||
assert.strictEqual(
|
||||
path.resolveRelative("abc/def" as FullSlug, "abc/index" as FullSlug),
|
||||
"../abc/",
|
||||
)
|
||||
})
|
||||
|
||||
test("with simple slugs", () => {
|
||||
assert.strictEqual(path.resolveRelative("abc/def" as FullSlug, "" as SimpleSlug), "../")
|
||||
assert.strictEqual(path.resolveRelative("abc/def" as FullSlug, "ghi" as SimpleSlug), "../ghi")
|
||||
assert.strictEqual(path.resolveRelative("abc/def" as FullSlug, "ghi/" as SimpleSlug), "../ghi/")
|
||||
})
|
||||
})
|
||||
318
quartz/util/path.ts
Normal file
318
quartz/util/path.ts
Normal file
@@ -0,0 +1,318 @@
|
||||
import { slug as slugAnchor } from "github-slugger"
|
||||
import type { Element as HastElement } from "hast"
|
||||
import { clone } from "./clone"
|
||||
|
||||
// this file must be isomorphic so it can't use node libs (e.g. path)
|
||||
|
||||
export const QUARTZ = "quartz"
|
||||
|
||||
/// Utility type to simulate nominal types in TypeScript
|
||||
type SlugLike<T> = string & { __brand: T }
|
||||
|
||||
/** Cannot be relative and must have a file extension. */
|
||||
export type FilePath = SlugLike<"filepath">
|
||||
export function isFilePath(s: string): s is FilePath {
|
||||
const validStart = !s.startsWith(".")
|
||||
return validStart && _hasFileExtension(s)
|
||||
}
|
||||
|
||||
/** Cannot be relative and may not have leading or trailing slashes. It can have `index` as it's last segment. Use this wherever possible is it's the most 'general' interpretation of a slug. */
|
||||
export type FullSlug = SlugLike<"full">
|
||||
export function isFullSlug(s: string): s is FullSlug {
|
||||
const validStart = !(s.startsWith(".") || s.startsWith("/"))
|
||||
const validEnding = !s.endsWith("/")
|
||||
return validStart && validEnding && !containsForbiddenCharacters(s)
|
||||
}
|
||||
|
||||
/** Shouldn't be a relative path and shouldn't have `/index` as an ending or a file extension. It _can_ however have a trailing slash to indicate a folder path. */
|
||||
export type SimpleSlug = SlugLike<"simple">
|
||||
export function isSimpleSlug(s: string): s is SimpleSlug {
|
||||
const validStart = !(s.startsWith(".") || (s.length > 1 && s.startsWith("/")))
|
||||
const validEnding = !endsWith(s, "index")
|
||||
return validStart && !containsForbiddenCharacters(s) && validEnding && !_hasFileExtension(s)
|
||||
}
|
||||
|
||||
/** Can be found on `href`s but can also be constructed for client-side navigation (e.g. search and graph) */
|
||||
export type RelativeURL = SlugLike<"relative">
|
||||
export function isRelativeURL(s: string): s is RelativeURL {
|
||||
const validStart = /^\.{1,2}/.test(s)
|
||||
const validEnding = !endsWith(s, "index")
|
||||
return validStart && validEnding && ![".md", ".html"].includes(getFileExtension(s) ?? "")
|
||||
}
|
||||
|
||||
export function isAbsoluteURL(s: string): boolean {
|
||||
try {
|
||||
new URL(s)
|
||||
} catch {
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
export function getFullSlug(window: Window): FullSlug {
|
||||
const res = window.document.body.dataset.slug! as FullSlug
|
||||
return res
|
||||
}
|
||||
|
||||
function sluggify(s: string): string {
|
||||
return s
|
||||
.split("/")
|
||||
.map((segment) =>
|
||||
segment
|
||||
.replace(/\s/g, "-")
|
||||
.replace(/&/g, "-and-")
|
||||
.replace(/%/g, "-percent")
|
||||
.replace(/\?/g, "")
|
||||
.replace(/#/g, ""),
|
||||
)
|
||||
.join("/") // always use / as sep
|
||||
.replace(/\/$/, "")
|
||||
}
|
||||
|
||||
export function slugifyFilePath(fp: FilePath, excludeExt?: boolean): FullSlug {
|
||||
fp = stripSlashes(fp) as FilePath
|
||||
let ext = getFileExtension(fp)
|
||||
const withoutFileExt = fp.replace(new RegExp(ext + "$"), "")
|
||||
if (excludeExt || [".md", ".html", undefined].includes(ext)) {
|
||||
ext = ""
|
||||
}
|
||||
|
||||
let slug = sluggify(withoutFileExt)
|
||||
|
||||
// treat _index as index
|
||||
if (endsWith(slug, "_index")) {
|
||||
slug = slug.replace(/_index$/, "index")
|
||||
}
|
||||
|
||||
return (slug + ext) as FullSlug
|
||||
}
|
||||
|
||||
export function simplifySlug(fp: FullSlug): SimpleSlug {
|
||||
const res = stripSlashes(trimSuffix(fp, "index"), true)
|
||||
return (res.length === 0 ? "/" : res) as SimpleSlug
|
||||
}
|
||||
|
||||
export function transformInternalLink(link: string): RelativeURL {
|
||||
let [fplike, anchor] = splitAnchor(decodeURI(link))
|
||||
|
||||
const folderPath = isFolderPath(fplike)
|
||||
let segments = fplike.split("/").filter((x) => x.length > 0)
|
||||
let prefix = segments.filter(isRelativeSegment).join("/")
|
||||
let fp = segments.filter((seg) => !isRelativeSegment(seg) && seg !== "").join("/")
|
||||
|
||||
// manually add ext here as we want to not strip 'index' if it has an extension
|
||||
const simpleSlug = simplifySlug(slugifyFilePath(fp as FilePath))
|
||||
const joined = joinSegments(stripSlashes(prefix), stripSlashes(simpleSlug))
|
||||
const trail = folderPath ? "/" : ""
|
||||
const res = (_addRelativeToStart(joined) + trail + anchor) as RelativeURL
|
||||
return res
|
||||
}
|
||||
|
||||
// from micromorph/src/utils.ts
|
||||
// https://github.com/natemoo-re/micromorph/blob/main/src/utils.ts#L5
|
||||
const _rebaseHtmlElement = (el: Element, attr: string, newBase: string | URL) => {
|
||||
const rebased = new URL(el.getAttribute(attr)!, newBase)
|
||||
el.setAttribute(attr, rebased.pathname + rebased.hash)
|
||||
}
|
||||
export function normalizeRelativeURLs(el: Element | Document, destination: string | URL) {
|
||||
el.querySelectorAll('[href=""], [href^="./"], [href^="../"]').forEach((item) =>
|
||||
_rebaseHtmlElement(item, "href", destination),
|
||||
)
|
||||
el.querySelectorAll('[src=""], [src^="./"], [src^="../"]').forEach((item) =>
|
||||
_rebaseHtmlElement(item, "src", destination),
|
||||
)
|
||||
}
|
||||
|
||||
const _rebaseHastElement = (
|
||||
el: HastElement,
|
||||
attr: string,
|
||||
curBase: FullSlug,
|
||||
newBase: FullSlug,
|
||||
) => {
|
||||
if (el.properties?.[attr]) {
|
||||
if (!isRelativeURL(String(el.properties[attr]))) {
|
||||
return
|
||||
}
|
||||
|
||||
const rel = joinSegments(resolveRelative(curBase, newBase), "..", el.properties[attr] as string)
|
||||
el.properties[attr] = rel
|
||||
}
|
||||
}
|
||||
|
||||
export function normalizeHastElement(rawEl: HastElement, curBase: FullSlug, newBase: FullSlug) {
|
||||
const el = clone(rawEl) // clone so we dont modify the original page
|
||||
_rebaseHastElement(el, "src", curBase, newBase)
|
||||
_rebaseHastElement(el, "href", curBase, newBase)
|
||||
if (el.children) {
|
||||
el.children = el.children.map((child) =>
|
||||
normalizeHastElement(child as HastElement, curBase, newBase),
|
||||
)
|
||||
}
|
||||
|
||||
return el
|
||||
}
|
||||
|
||||
// resolve /a/b/c to ../..
|
||||
export function pathToRoot(slug: FullSlug): RelativeURL {
|
||||
let rootPath = slug
|
||||
.split("/")
|
||||
.filter((x) => x !== "")
|
||||
.slice(0, -1)
|
||||
.map((_) => "..")
|
||||
.join("/")
|
||||
|
||||
if (rootPath.length === 0) {
|
||||
rootPath = "."
|
||||
}
|
||||
|
||||
return rootPath as RelativeURL
|
||||
}
|
||||
|
||||
export function resolveRelative(current: FullSlug, target: FullSlug | SimpleSlug): RelativeURL {
|
||||
const res = joinSegments(pathToRoot(current), simplifySlug(target as FullSlug)) as RelativeURL
|
||||
return res
|
||||
}
|
||||
|
||||
export function splitAnchor(link: string): [string, string] {
|
||||
let [fp, anchor] = link.split("#", 2)
|
||||
if (fp.endsWith(".pdf")) {
|
||||
return [fp, anchor === undefined ? "" : `#${anchor}`]
|
||||
}
|
||||
anchor = anchor === undefined ? "" : "#" + slugAnchor(anchor)
|
||||
return [fp, anchor]
|
||||
}
|
||||
|
||||
export function slugTag(tag: string) {
|
||||
return tag
|
||||
.split("/")
|
||||
.map((tagSegment) => sluggify(tagSegment))
|
||||
.join("/")
|
||||
}
|
||||
|
||||
export function joinSegments(...args: string[]): string {
|
||||
if (args.length === 0) {
|
||||
return ""
|
||||
}
|
||||
|
||||
let joined = args
|
||||
.filter((segment) => segment !== "" && segment !== "/")
|
||||
.map((segment) => stripSlashes(segment))
|
||||
.join("/")
|
||||
|
||||
// if the first segment starts with a slash, add it back
|
||||
if (args[0].startsWith("/")) {
|
||||
joined = "/" + joined
|
||||
}
|
||||
|
||||
// if the last segment is a folder, add a trailing slash
|
||||
if (args[args.length - 1].endsWith("/")) {
|
||||
joined = joined + "/"
|
||||
}
|
||||
|
||||
return joined
|
||||
}
|
||||
|
||||
export function getAllSegmentPrefixes(tags: string): string[] {
|
||||
const segments = tags.split("/")
|
||||
const results: string[] = []
|
||||
for (let i = 0; i < segments.length; i++) {
|
||||
results.push(segments.slice(0, i + 1).join("/"))
|
||||
}
|
||||
return results
|
||||
}
|
||||
|
||||
export interface TransformOptions {
|
||||
strategy: "absolute" | "relative" | "shortest"
|
||||
allSlugs: FullSlug[]
|
||||
}
|
||||
|
||||
export function transformLink(src: FullSlug, target: string, opts: TransformOptions): RelativeURL {
|
||||
let targetSlug = transformInternalLink(target)
|
||||
|
||||
if (opts.strategy === "relative") {
|
||||
return targetSlug as RelativeURL
|
||||
} else {
|
||||
const folderTail = isFolderPath(targetSlug) ? "/" : ""
|
||||
const canonicalSlug = stripSlashes(targetSlug.slice(".".length))
|
||||
let [targetCanonical, targetAnchor] = splitAnchor(canonicalSlug)
|
||||
|
||||
if (opts.strategy === "shortest") {
|
||||
// if the file name is unique, then it's just the filename
|
||||
const matchingFileNames = opts.allSlugs.filter((slug) => {
|
||||
const parts = slug.split("/")
|
||||
const fileName = parts.at(-1)
|
||||
return targetCanonical === fileName
|
||||
})
|
||||
|
||||
// only match, just use it
|
||||
if (matchingFileNames.length === 1) {
|
||||
const targetSlug = matchingFileNames[0]
|
||||
return (resolveRelative(src, targetSlug) + targetAnchor) as RelativeURL
|
||||
}
|
||||
}
|
||||
|
||||
// if it's not unique, then it's the absolute path from the vault root
|
||||
return (joinSegments(pathToRoot(src), canonicalSlug) + folderTail) as RelativeURL
|
||||
}
|
||||
}
|
||||
|
||||
// path helpers
|
||||
export function isFolderPath(fplike: string): boolean {
|
||||
return (
|
||||
fplike.endsWith("/") ||
|
||||
endsWith(fplike, "index") ||
|
||||
endsWith(fplike, "index.md") ||
|
||||
endsWith(fplike, "index.html")
|
||||
)
|
||||
}
|
||||
|
||||
export function endsWith(s: string, suffix: string): boolean {
|
||||
return s === suffix || s.endsWith("/" + suffix)
|
||||
}
|
||||
|
||||
export function trimSuffix(s: string, suffix: string): string {
|
||||
if (endsWith(s, suffix)) {
|
||||
s = s.slice(0, -suffix.length)
|
||||
}
|
||||
return s
|
||||
}
|
||||
|
||||
function containsForbiddenCharacters(s: string): boolean {
|
||||
return s.includes(" ") || s.includes("#") || s.includes("?") || s.includes("&")
|
||||
}
|
||||
|
||||
function _hasFileExtension(s: string): boolean {
|
||||
return getFileExtension(s) !== undefined
|
||||
}
|
||||
|
||||
export function getFileExtension(s: string): string | undefined {
|
||||
return s.match(/\.[A-Za-z0-9]+$/)?.[0]
|
||||
}
|
||||
|
||||
function isRelativeSegment(s: string): boolean {
|
||||
return /^\.{0,2}$/.test(s)
|
||||
}
|
||||
|
||||
export function stripSlashes(s: string, onlyStripPrefix?: boolean): string {
|
||||
if (s.startsWith("/")) {
|
||||
s = s.substring(1)
|
||||
}
|
||||
|
||||
if (!onlyStripPrefix && s.endsWith("/")) {
|
||||
s = s.slice(0, -1)
|
||||
}
|
||||
|
||||
return s
|
||||
}
|
||||
|
||||
function _addRelativeToStart(s: string): string {
|
||||
if (s === "") {
|
||||
s = "."
|
||||
}
|
||||
|
||||
if (!s.startsWith(".")) {
|
||||
s = joinSegments(".", s)
|
||||
}
|
||||
|
||||
return s
|
||||
}
|
||||
19
quartz/util/perf.ts
Normal file
19
quartz/util/perf.ts
Normal file
@@ -0,0 +1,19 @@
|
||||
import pretty from "pretty-time"
|
||||
import { styleText } from "util"
|
||||
|
||||
export class PerfTimer {
|
||||
evts: { [key: string]: [number, number] }
|
||||
|
||||
constructor() {
|
||||
this.evts = {}
|
||||
this.addEvent("start")
|
||||
}
|
||||
|
||||
addEvent(evtName: string) {
|
||||
this.evts[evtName] = process.hrtime()
|
||||
}
|
||||
|
||||
timeSince(evtName?: string): string {
|
||||
return styleText("yellow", pretty(process.hrtime(this.evts[evtName ?? "start"])))
|
||||
}
|
||||
}
|
||||
3
quartz/util/random.ts
Normal file
3
quartz/util/random.ts
Normal file
@@ -0,0 +1,3 @@
|
||||
export function randomIdNonSecure() {
|
||||
return Math.random().toString(36).substring(2, 8)
|
||||
}
|
||||
74
quartz/util/resources.tsx
Normal file
74
quartz/util/resources.tsx
Normal file
@@ -0,0 +1,74 @@
|
||||
import { randomUUID } from "crypto"
|
||||
import { JSX } from "preact/jsx-runtime"
|
||||
import { QuartzPluginData } from "../plugins/vfile"
|
||||
|
||||
export type JSResource = {
|
||||
loadTime: "beforeDOMReady" | "afterDOMReady"
|
||||
moduleType?: "module"
|
||||
spaPreserve?: boolean
|
||||
} & (
|
||||
| {
|
||||
src: string
|
||||
contentType: "external"
|
||||
}
|
||||
| {
|
||||
script: string
|
||||
contentType: "inline"
|
||||
}
|
||||
)
|
||||
|
||||
export type CSSResource = {
|
||||
content: string
|
||||
inline?: boolean
|
||||
spaPreserve?: boolean
|
||||
}
|
||||
|
||||
export function JSResourceToScriptElement(resource: JSResource, preserve?: boolean): JSX.Element {
|
||||
const scriptType = resource.moduleType ?? "application/javascript"
|
||||
const spaPreserve = preserve ?? resource.spaPreserve
|
||||
if (resource.contentType === "external") {
|
||||
return (
|
||||
<script key={resource.src} src={resource.src} type={scriptType} spa-preserve={spaPreserve} />
|
||||
)
|
||||
} else {
|
||||
const content = resource.script
|
||||
return (
|
||||
<script
|
||||
key={randomUUID()}
|
||||
type={scriptType}
|
||||
spa-preserve={spaPreserve}
|
||||
dangerouslySetInnerHTML={{ __html: content }}
|
||||
></script>
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
export function CSSResourceToStyleElement(resource: CSSResource, preserve?: boolean): JSX.Element {
|
||||
const spaPreserve = preserve ?? resource.spaPreserve
|
||||
if (resource.inline ?? false) {
|
||||
return <style>{resource.content}</style>
|
||||
} else {
|
||||
return (
|
||||
<link
|
||||
key={resource.content}
|
||||
href={resource.content}
|
||||
rel="stylesheet"
|
||||
type="text/css"
|
||||
spa-preserve={spaPreserve}
|
||||
/>
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
export interface StaticResources {
|
||||
css: CSSResource[]
|
||||
js: JSResource[]
|
||||
additionalHead: (JSX.Element | ((pageData: QuartzPluginData) => JSX.Element))[]
|
||||
}
|
||||
|
||||
export type StringResource = string | string[] | undefined
|
||||
export function concatenateResources(...resources: StringResource[]): StringResource {
|
||||
return resources
|
||||
.filter((resource): resource is string | string[] => resource !== undefined)
|
||||
.flat()
|
||||
}
|
||||
18
quartz/util/sourcemap.ts
Normal file
18
quartz/util/sourcemap.ts
Normal file
@@ -0,0 +1,18 @@
|
||||
import fs from "fs"
|
||||
import sourceMapSupport from "source-map-support"
|
||||
import { fileURLToPath } from "url"
|
||||
|
||||
export const options: sourceMapSupport.Options = {
|
||||
// source map hack to get around query param
|
||||
// import cache busting
|
||||
retrieveSourceMap(source) {
|
||||
if (source.includes(".quartz-cache")) {
|
||||
let realSource = fileURLToPath(source.split("?", 2)[0] + ".map")
|
||||
return {
|
||||
map: fs.readFileSync(realSource, "utf8"),
|
||||
}
|
||||
} else {
|
||||
return null
|
||||
}
|
||||
},
|
||||
}
|
||||
176
quartz/util/theme.ts
Normal file
176
quartz/util/theme.ts
Normal file
@@ -0,0 +1,176 @@
|
||||
export interface ColorScheme {
|
||||
light: string
|
||||
lightgray: string
|
||||
gray: string
|
||||
darkgray: string
|
||||
dark: string
|
||||
secondary: string
|
||||
tertiary: string
|
||||
highlight: string
|
||||
textHighlight: string
|
||||
}
|
||||
|
||||
interface Colors {
|
||||
lightMode: ColorScheme
|
||||
darkMode: ColorScheme
|
||||
}
|
||||
|
||||
export type FontSpecification =
|
||||
| string
|
||||
| {
|
||||
name: string
|
||||
weights?: number[]
|
||||
includeItalic?: boolean
|
||||
}
|
||||
|
||||
export interface Theme {
|
||||
typography: {
|
||||
title?: FontSpecification
|
||||
header: FontSpecification
|
||||
body: FontSpecification
|
||||
code: FontSpecification
|
||||
}
|
||||
cdnCaching: boolean
|
||||
colors: Colors
|
||||
fontOrigin: "googleFonts" | "local"
|
||||
}
|
||||
|
||||
export type ThemeKey = keyof Colors
|
||||
|
||||
const DEFAULT_SANS_SERIF =
|
||||
'system-ui, "Segoe UI", Roboto, Helvetica, Arial, sans-serif, "Apple Color Emoji", "Segoe UI Emoji", "Segoe UI Symbol"'
|
||||
const DEFAULT_MONO = "ui-monospace, SFMono-Regular, SF Mono, Menlo, monospace"
|
||||
|
||||
export function getFontSpecificationName(spec: FontSpecification): string {
|
||||
if (typeof spec === "string") {
|
||||
return spec
|
||||
}
|
||||
|
||||
return spec.name
|
||||
}
|
||||
|
||||
function formatFontSpecification(
|
||||
type: "title" | "header" | "body" | "code",
|
||||
spec: FontSpecification,
|
||||
) {
|
||||
if (typeof spec === "string") {
|
||||
spec = { name: spec }
|
||||
}
|
||||
|
||||
const defaultIncludeWeights = type === "header" ? [400, 700] : [400, 600]
|
||||
const defaultIncludeItalic = type === "body"
|
||||
const weights = spec.weights ?? defaultIncludeWeights
|
||||
const italic = spec.includeItalic ?? defaultIncludeItalic
|
||||
|
||||
const features: string[] = []
|
||||
if (italic) {
|
||||
features.push("ital")
|
||||
}
|
||||
|
||||
if (weights.length > 1) {
|
||||
const weightSpec = italic
|
||||
? weights
|
||||
.flatMap((w) => [`0,${w}`, `1,${w}`])
|
||||
.sort()
|
||||
.join(";")
|
||||
: weights.join(";")
|
||||
|
||||
features.push(`wght@${weightSpec}`)
|
||||
}
|
||||
|
||||
if (features.length > 0) {
|
||||
return `${spec.name}:${features.join(",")}`
|
||||
}
|
||||
|
||||
return spec.name
|
||||
}
|
||||
|
||||
export function googleFontHref(theme: Theme) {
|
||||
const { header, body, code } = theme.typography
|
||||
const headerFont = formatFontSpecification("header", header)
|
||||
const bodyFont = formatFontSpecification("body", body)
|
||||
const codeFont = formatFontSpecification("code", code)
|
||||
|
||||
return `https://fonts.googleapis.com/css2?family=${headerFont}&family=${bodyFont}&family=${codeFont}&display=swap`
|
||||
}
|
||||
|
||||
export function googleFontSubsetHref(theme: Theme, text: string) {
|
||||
const title = theme.typography.title || theme.typography.header
|
||||
const titleFont = formatFontSpecification("title", title)
|
||||
|
||||
return `https://fonts.googleapis.com/css2?family=${titleFont}&text=${encodeURIComponent(text)}&display=swap`
|
||||
}
|
||||
|
||||
export interface GoogleFontFile {
|
||||
url: string
|
||||
filename: string
|
||||
extension: string
|
||||
}
|
||||
|
||||
const fontMimeMap: Record<string, string> = {
|
||||
truetype: "ttf",
|
||||
woff: "woff",
|
||||
woff2: "woff2",
|
||||
opentype: "otf",
|
||||
}
|
||||
|
||||
export async function processGoogleFonts(
|
||||
stylesheet: string,
|
||||
baseUrl: string,
|
||||
): Promise<{
|
||||
processedStylesheet: string
|
||||
fontFiles: GoogleFontFile[]
|
||||
}> {
|
||||
const fontSourceRegex =
|
||||
/url\((https:\/\/fonts.gstatic.com\/.+(?:\/|(?:kit=))(.+?)[.&].+?)\)\sformat\('(\w+?)'\);/g
|
||||
const fontFiles: GoogleFontFile[] = []
|
||||
let processedStylesheet = stylesheet
|
||||
|
||||
let match
|
||||
while ((match = fontSourceRegex.exec(stylesheet)) !== null) {
|
||||
const url = match[1]
|
||||
const filename = match[2]
|
||||
const extension = fontMimeMap[match[3].toLowerCase()]
|
||||
const staticUrl = `https://${baseUrl}/static/fonts/${filename}.${extension}`
|
||||
|
||||
processedStylesheet = processedStylesheet.replace(url, staticUrl)
|
||||
fontFiles.push({ url, filename, extension })
|
||||
}
|
||||
|
||||
return { processedStylesheet, fontFiles }
|
||||
}
|
||||
|
||||
export function joinStyles(theme: Theme, ...stylesheet: string[]) {
|
||||
return `
|
||||
${stylesheet.join("\n\n")}
|
||||
|
||||
:root {
|
||||
--light: ${theme.colors.lightMode.light};
|
||||
--lightgray: ${theme.colors.lightMode.lightgray};
|
||||
--gray: ${theme.colors.lightMode.gray};
|
||||
--darkgray: ${theme.colors.lightMode.darkgray};
|
||||
--dark: ${theme.colors.lightMode.dark};
|
||||
--secondary: ${theme.colors.lightMode.secondary};
|
||||
--tertiary: ${theme.colors.lightMode.tertiary};
|
||||
--highlight: ${theme.colors.lightMode.highlight};
|
||||
--textHighlight: ${theme.colors.lightMode.textHighlight};
|
||||
|
||||
--titleFont: "${getFontSpecificationName(theme.typography.title || theme.typography.header)}", ${DEFAULT_SANS_SERIF};
|
||||
--headerFont: "${getFontSpecificationName(theme.typography.header)}", ${DEFAULT_SANS_SERIF};
|
||||
--bodyFont: "${getFontSpecificationName(theme.typography.body)}", ${DEFAULT_SANS_SERIF};
|
||||
--codeFont: "${getFontSpecificationName(theme.typography.code)}", ${DEFAULT_MONO};
|
||||
}
|
||||
|
||||
:root[saved-theme="dark"] {
|
||||
--light: ${theme.colors.darkMode.light};
|
||||
--lightgray: ${theme.colors.darkMode.lightgray};
|
||||
--gray: ${theme.colors.darkMode.gray};
|
||||
--darkgray: ${theme.colors.darkMode.darkgray};
|
||||
--dark: ${theme.colors.darkMode.dark};
|
||||
--secondary: ${theme.colors.darkMode.secondary};
|
||||
--tertiary: ${theme.colors.darkMode.tertiary};
|
||||
--highlight: ${theme.colors.darkMode.highlight};
|
||||
--textHighlight: ${theme.colors.darkMode.textHighlight};
|
||||
}
|
||||
`
|
||||
}
|
||||
43
quartz/util/trace.ts
Normal file
43
quartz/util/trace.ts
Normal file
@@ -0,0 +1,43 @@
|
||||
import { styleText } from "util"
|
||||
import process from "process"
|
||||
import { isMainThread } from "workerpool"
|
||||
|
||||
const rootFile = /.*at file:/
|
||||
export function trace(msg: string, err: Error) {
|
||||
let stack = err.stack ?? ""
|
||||
|
||||
const lines: string[] = []
|
||||
|
||||
lines.push("")
|
||||
lines.push(
|
||||
"\n" +
|
||||
styleText(["bgRed", "black", "bold"], " ERROR ") +
|
||||
"\n\n" +
|
||||
styleText("red", ` ${msg}`) +
|
||||
(err.message.length > 0 ? `: ${err.message}` : ""),
|
||||
)
|
||||
|
||||
let reachedEndOfLegibleTrace = false
|
||||
for (const line of stack.split("\n").slice(1)) {
|
||||
if (reachedEndOfLegibleTrace) {
|
||||
break
|
||||
}
|
||||
|
||||
if (!line.includes("node_modules")) {
|
||||
lines.push(` ${line}`)
|
||||
if (rootFile.test(line)) {
|
||||
reachedEndOfLegibleTrace = true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const traceMsg = lines.join("\n")
|
||||
if (!isMainThread) {
|
||||
// gather lines and throw
|
||||
throw new Error(traceMsg)
|
||||
} else {
|
||||
// print and exit
|
||||
console.error(traceMsg)
|
||||
process.exit(1)
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user