refactor: moved plugins to own folders
This commit is contained in:
parent
a212df96dd
commit
63bd438e45
6 changed files with 235 additions and 223 deletions
|
|
@ -1,172 +1,22 @@
|
|||
const markdownIt = require("markdown-it");
|
||||
const markdownItContainer = require("markdown-it-container");
|
||||
const markdownItFootnote = require("markdown-it-footnote");
|
||||
const markdownItMermaid = require('markdown-it-mermaid').default
|
||||
const markdownItTaskLists = require('markdown-it-task-lists');
|
||||
const markdownItMeasurements = require('./lib/measurements/plugin');
|
||||
const markdownItHashtag = require('./lib/hashtags/plugin');
|
||||
const markdownItStripTrailingHashtags = require('./lib/strip-trailing-hashtags/plugin');
|
||||
const markdownItDetails = require('./lib/details/plugin');
|
||||
const { extractTags, expandHierarchicalTags, getPostTags, getRecipeTags } = require('./lib/tags/plugin');
|
||||
const { cacheBustingPlugin } = require('./lib/cache-busting/plugin');
|
||||
const syntaxHighlight = require("@11ty/eleventy-plugin-syntaxhighlight");
|
||||
const fs = require("fs");
|
||||
const crypto = require("crypto");
|
||||
const path = require("path");
|
||||
const { DateTime } = require("luxon");
|
||||
const siteConfig = require("./_data/site.js");
|
||||
|
||||
const fileHashCache = {};
|
||||
const getFileHash = (file, dir = "css") => {
|
||||
const cacheKey = `${dir}/${file}`;
|
||||
if (fileHashCache[cacheKey]) return fileHashCache[cacheKey];
|
||||
|
||||
const filePath = path.join(__dirname, dir, file);
|
||||
try {
|
||||
const content = fs.readFileSync(filePath, "utf-8");
|
||||
const hash = crypto.createHash("md5").update(content).digest("hex").slice(0, 8);
|
||||
fileHashCache[cacheKey] = hash;
|
||||
return hash;
|
||||
} catch (e) {
|
||||
console.warn(`Could not hash file: ${file} in ${dir}`);
|
||||
return null;
|
||||
}
|
||||
};
|
||||
|
||||
const extractTags = (content, mdInstance) => {
|
||||
if (!content) return [];
|
||||
|
||||
const collectHashtags = (tokens) =>
|
||||
tokens.flatMap(token => [
|
||||
...(token.type === 'hashtag' ? [token.content] : []),
|
||||
...(token.children ? collectHashtags(token.children) : [])
|
||||
]);
|
||||
|
||||
const tokens = mdInstance.parse(content, {});
|
||||
const tags = collectHashtags(tokens);
|
||||
return [...new Set(tags)];
|
||||
}
|
||||
|
||||
const expandHierarchicalTags = (tags) => {
|
||||
const expanded = new Set();
|
||||
tags.forEach(tag => {
|
||||
expanded.add(tag);
|
||||
const parts = tag.split('/');
|
||||
for (let i = 1; i < parts.length; i++) {
|
||||
const parentTag = parts.slice(0, i).join('/');
|
||||
expanded.add(parentTag);
|
||||
}
|
||||
});
|
||||
return [...expanded];
|
||||
};
|
||||
|
||||
const getPostTags = (post, tagMdInstance) => {
|
||||
const filePath = post.inputPath;
|
||||
try {
|
||||
const content = fs.readFileSync(filePath, 'utf-8');
|
||||
const tags = extractTags(content, tagMdInstance);
|
||||
return tags.map(tag => tag.toLowerCase());
|
||||
} catch (e) {
|
||||
// Skip if file can't be read
|
||||
return []
|
||||
}
|
||||
}
|
||||
|
||||
const isReleased = (post) => {
|
||||
return post.data.released !== false;
|
||||
}
|
||||
|
||||
const markdownItHashtag = (md) => {
|
||||
const hashtagRegex = /^#([a-zA-Z][a-zA-Z0-9_\\\\/\-]*)(?![a-zA-Z0-9_-])/;
|
||||
|
||||
const HASH_CODE = '#'.charCodeAt(0);
|
||||
const SPACE_CODE = ' '.charCodeAt(0);
|
||||
const TAB_CODE = '\\\\t'.charCodeAt(0);
|
||||
const NEWLINE_CODE = '\\\\n'.charCodeAt(0);
|
||||
const CARRIAGE_RETURN_CODE = '\\\\r'.charCodeAt(0);
|
||||
|
||||
md.inline.ruler.push('hashtag', function(state, silent) {
|
||||
const pos = state.pos;
|
||||
const ch = state.src.charCodeAt(pos);
|
||||
|
||||
if (ch !== HASH_CODE) return false;
|
||||
|
||||
if (pos > 0) {
|
||||
const prevCh = state.src.charCodeAt(pos - 1);
|
||||
if (prevCh !== SPACE_CODE && prevCh !== TAB_CODE && prevCh !== NEWLINE_CODE && prevCh !== CARRIAGE_RETURN_CODE) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
const match = state.src.slice(pos).match(hashtagRegex);
|
||||
if (!match) return false;
|
||||
|
||||
if (!silent) {
|
||||
const token = state.push('hashtag', 'a', 0);
|
||||
token.content = match[1];
|
||||
token.markup = '#';
|
||||
}
|
||||
|
||||
state.pos += match[0].length;
|
||||
return true;
|
||||
});
|
||||
|
||||
md.renderer.rules.hashtag = function(tokens, idx) {
|
||||
const tagName = tokens[idx].content;
|
||||
const slug = tagName.toLowerCase();
|
||||
return `<a href="/tags/${slug}/" class="inline-tag">#${md.utils.escapeHtml(tagName)}</a>`;
|
||||
};
|
||||
};
|
||||
|
||||
// Plugin: Strip trailing hashtag-only paragraphs from rendered output
|
||||
// Must be applied AFTER footnote plugin since footnotes are moved to the end
|
||||
const markdownItStripTrailingHashtags = (md) => {
|
||||
md.core.ruler.push('strip_trailing_hashtags', function(state) {
|
||||
const tokens = state.tokens;
|
||||
|
||||
const isHashtagOnlyParagraph = (inlineToken) =>
|
||||
inlineToken?.type === 'inline' &&
|
||||
inlineToken.children?.every(child =>
|
||||
child.type === 'hashtag' ||
|
||||
(child.type === 'text' && child.content.trim() === '') ||
|
||||
child.type === 'softbreak'
|
||||
) &&
|
||||
inlineToken.children?.some(child => child.type === 'hashtag');
|
||||
|
||||
const isHashtagParagraphAt = (idx) =>
|
||||
tokens[idx]?.type === 'paragraph_open' &&
|
||||
tokens[idx + 1]?.type === 'inline' &&
|
||||
tokens[idx + 2]?.type === 'paragraph_close' &&
|
||||
isHashtagOnlyParagraph(tokens[idx + 1]);
|
||||
|
||||
const footnoteIdx = tokens.findIndex(t => t.type === 'footnote_block_open');
|
||||
const footnoteSectionStart = footnoteIdx === -1 ? tokens.length : footnoteIdx;
|
||||
|
||||
const hashtagSectionStart = Array.from(
|
||||
{ length: Math.floor(footnoteSectionStart / 3) },
|
||||
(_, i) => footnoteSectionStart - 3 * (i + 1)
|
||||
).reduce(
|
||||
(start, idx) => isHashtagParagraphAt(idx) ? idx : start,
|
||||
footnoteSectionStart
|
||||
);
|
||||
|
||||
state.tokens = tokens.filter((_, idx) =>
|
||||
idx < hashtagSectionStart || idx >= footnoteSectionStart
|
||||
);
|
||||
|
||||
return true;
|
||||
});
|
||||
};
|
||||
|
||||
const markdownItDetails = (md) => {
|
||||
md.use(markdownItContainer, 'details', {
|
||||
validate: (params) => params.trim().match(/^(.*)$/),
|
||||
render: (tokens, idx) => {
|
||||
const m = tokens[idx].info.trim().match(/^(.*)$/);
|
||||
if (tokens[idx].nesting === 1) {
|
||||
const title = md.utils.escapeHtml(m[1]);
|
||||
return `<details class="expandable">\\n<summary>${title}</summary>\\n`;
|
||||
}
|
||||
return '</details>\\n';
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
const sharedPlugins = [
|
||||
markdownItFootnote,
|
||||
markdownItHashtag,
|
||||
|
|
@ -207,6 +57,7 @@ const tagExtractorMd = createMarkdownInstance();
|
|||
|
||||
module.exports = (eleventyConfig) => {
|
||||
eleventyConfig.addPlugin(syntaxHighlight);
|
||||
eleventyConfig.addPlugin(cacheBustingPlugin, { rootDir: __dirname });
|
||||
|
||||
eleventyConfig.addFilter("extractTags", (content) => extractTags(content, tagExtractorMd));
|
||||
eleventyConfig.addFilter("extractTagsFromFile", (filePath) => {
|
||||
|
|
@ -358,25 +209,13 @@ module.exports = (eleventyConfig) => {
|
|||
}, {});
|
||||
});
|
||||
|
||||
// Get tags from recipes (only from newest non-draft versions)
|
||||
const getRecipeTags = (recipe) => {
|
||||
const filePath = recipe.inputPath;
|
||||
try {
|
||||
const content = fs.readFileSync(filePath, 'utf-8');
|
||||
const tags = extractTags(content, tagExtractorMd);
|
||||
return tags.map(tag => tag.toLowerCase());
|
||||
} catch (e) {
|
||||
return [];
|
||||
}
|
||||
};
|
||||
|
||||
eleventyConfig.addCollection("contentTags", (collectionApi) => {
|
||||
const posts = collectionApi.getFilteredByGlob("posts/**/*.md").filter(isReleased);
|
||||
const recipes = collectionApi.getFilteredByGlob("recipes/**/*.md")
|
||||
.filter(r => r.data.isNewestVersion && r.data.draft !== true);
|
||||
|
||||
const postTags = posts.flatMap(post => getPostTags(post, tagExtractorMd));
|
||||
const recipeTags = recipes.flatMap(recipe => getRecipeTags(recipe));
|
||||
const recipeTags = recipes.flatMap(recipe => getRecipeTags(recipe, tagExtractorMd));
|
||||
|
||||
const allTags = expandHierarchicalTags([...postTags, ...recipeTags]);
|
||||
|
||||
|
|
@ -408,7 +247,7 @@ module.exports = (eleventyConfig) => {
|
|||
}, {});
|
||||
|
||||
const tagMap = recipes.reduce((acc, recipe) => {
|
||||
const rawTags = getRecipeTags(recipe);
|
||||
const rawTags = getRecipeTags(recipe, tagExtractorMd);
|
||||
const tags = expandHierarchicalTags(rawTags);
|
||||
return tags.reduce((innerAcc, tag) => ({
|
||||
...innerAcc,
|
||||
|
|
@ -430,60 +269,6 @@ module.exports = (eleventyConfig) => {
|
|||
}), {});
|
||||
});
|
||||
|
||||
// Cache busting filter: returns hashed filename
|
||||
eleventyConfig.addFilter("fileHash", (file, dir = "css") => {
|
||||
const hash = getFileHash(file, dir);
|
||||
const ext = path.extname(file);
|
||||
const base = path.basename(file, ext);
|
||||
return `/${dir}/${base}.${hash}${ext}`;
|
||||
});
|
||||
|
||||
eleventyConfig.on("eleventy.before", async () => {
|
||||
// Copy CSS files with hashes
|
||||
const cssDir = path.join(__dirname, "css");
|
||||
const outputCssDir = path.join(__dirname, "_site", "css");
|
||||
|
||||
if (!fs.existsSync(outputCssDir)) {
|
||||
fs.mkdirSync(outputCssDir, { recursive: true });
|
||||
}
|
||||
|
||||
const cssFiles = fs.readdirSync(cssDir).filter(f => f.endsWith(".css"));
|
||||
for (const cssFile of cssFiles) {
|
||||
const hash = getFileHash(cssFile, "css");
|
||||
const ext = path.extname(cssFile);
|
||||
const base = path.basename(cssFile, ext);
|
||||
const hashedName = `${base}${hash == null ? '' : `.${hash}`}${ext}`;
|
||||
|
||||
fs.copyFileSync(
|
||||
path.join(cssDir, cssFile),
|
||||
path.join(outputCssDir, hashedName)
|
||||
);
|
||||
}
|
||||
|
||||
// Copy assets files with hashes
|
||||
const assetsDir = path.join(__dirname, "assets");
|
||||
const outputAssetsDir = path.join(__dirname, "_site", "assets");
|
||||
|
||||
if (fs.existsSync(assetsDir)) {
|
||||
if (!fs.existsSync(outputAssetsDir)) {
|
||||
fs.mkdirSync(outputAssetsDir, { recursive: true });
|
||||
}
|
||||
|
||||
const assetFiles = fs.readdirSync(assetsDir);
|
||||
for (const assetFile of assetFiles) {
|
||||
const hash = getFileHash(assetFile, "assets");
|
||||
const ext = path.extname(assetFile);
|
||||
const base = path.basename(assetFile, ext);
|
||||
const hashedName = `${base}${hash == null ? '' : `.${hash}`}${ext}`;
|
||||
|
||||
fs.copyFileSync(
|
||||
path.join(assetsDir, assetFile),
|
||||
path.join(outputAssetsDir, hashedName)
|
||||
);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
eleventyConfig.addPassthroughCopy("robots.txt");
|
||||
eleventyConfig.addPassthroughCopy("simulations");
|
||||
eleventyConfig.addPassthroughCopy("js");
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue