mirror of
https://github.com/samber/lo.git
synced 2026-04-22 15:37:14 +08:00
feat(doc): add documnetation validation script (wip)
This commit is contained in:
@@ -0,0 +1,24 @@
|
||||
#!/usr/bin/env node
|
||||
const path = require('path');
|
||||
const { loadHelpers } = require('./utils');
|
||||
|
||||
const dataDir = process.argv[2] || path.join(__dirname, '..', 'data');
|
||||
const { helpers, byFullKey } = loadHelpers(dataDir);
|
||||
|
||||
let hasError = false;
|
||||
|
||||
helpers.forEach((h) => {
|
||||
const thisKey = `${h.category}#${h.subCategory}#${h.name}`;
|
||||
(h.similarHelpers || []).forEach((ref) => {
|
||||
const other = byFullKey.get(ref);
|
||||
if (!other) return; // Existence is checked by another script
|
||||
const otherHasBackRef = (other.similarHelpers || []).includes(thisKey);
|
||||
if (!otherHasBackRef) {
|
||||
hasError = true;
|
||||
console.error(`Cross-ref missing: ${h.fileName} -> ${ref} but not reciprocated.`);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
if (hasError) process.exit(1);
|
||||
console.log('OK: all similarHelpers are reciprocal.');
|
||||
@@ -0,0 +1,19 @@
|
||||
#!/usr/bin/env node
|
||||
const path = require('path');
|
||||
const { loadHelpers } = require('./utils');
|
||||
|
||||
const dataDir = process.argv[2] || path.join(__dirname, '..', 'data');
|
||||
const { byCategoryName } = loadHelpers(dataDir);
|
||||
|
||||
let hasError = false;
|
||||
for (const [key, list] of byCategoryName.entries()) {
|
||||
if (list.length > 1) {
|
||||
hasError = true;
|
||||
const files = list.map((h) => h.fileName).join(', ');
|
||||
console.error(`Duplicate helper in category detected for ${key}: ${files}`);
|
||||
}
|
||||
}
|
||||
|
||||
if (hasError) process.exit(1);
|
||||
console.log('OK: no duplicate helpers within categories.');
|
||||
|
||||
@@ -0,0 +1,24 @@
|
||||
#!/usr/bin/env node
|
||||
const path = require('path');
|
||||
const { loadHelpers, expectedFileName } = require('./utils');
|
||||
|
||||
const dataDir = process.argv[2] || path.join(__dirname, '..', 'data');
|
||||
const { helpers } = loadHelpers(dataDir);
|
||||
|
||||
let hasError = false;
|
||||
helpers.forEach((h) => {
|
||||
const expected = expectedFileName(h);
|
||||
if (!expected) {
|
||||
hasError = true;
|
||||
console.error(`Invalid or missing frontmatter (category/slug) in ${h.fileName}`);
|
||||
return;
|
||||
}
|
||||
if (h.fileName !== expected) {
|
||||
hasError = true;
|
||||
console.error(`Filename mismatch for ${h.fileName}, expected ${expected}`);
|
||||
}
|
||||
});
|
||||
|
||||
if (hasError) process.exit(1);
|
||||
console.log('OK: all filenames match category and slug.');
|
||||
|
||||
@@ -0,0 +1,202 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const readline = require('readline');
|
||||
const { listMarkdownFiles, parseFrontmatter } = require('./utils');
|
||||
|
||||
const repoRoot = path.resolve(__dirname, '..', '..');
|
||||
const dataDir = path.resolve(__dirname, '..', 'data');
|
||||
|
||||
function readFile(filePath) {
|
||||
return fs.readFileSync(filePath, 'utf8');
|
||||
}
|
||||
|
||||
function* walkGoFiles(dir, excludeDirs = new Set()) {
|
||||
const entries = fs.readdirSync(dir, { withFileTypes: true });
|
||||
for (const entry of entries) {
|
||||
if (entry.name.startsWith('.')) continue;
|
||||
const abs = path.join(dir, entry.name);
|
||||
const rel = path.relative(repoRoot, abs);
|
||||
if (entry.isDirectory()) {
|
||||
if (excludeDirs.has(entry.name)) continue;
|
||||
yield* walkGoFiles(abs, excludeDirs);
|
||||
} else if (entry.isFile() && entry.name.endsWith('.go')) {
|
||||
// skip tests
|
||||
if (entry.name.endsWith('_test.go')) continue;
|
||||
// skip docs/ directory
|
||||
if (rel.split(path.sep)[0] === 'docs') continue;
|
||||
yield abs;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function buildFunctionRegex(name) {
|
||||
// Matches: func Name[...]( or func Name(
|
||||
const escaped = name.replace(/[-\/\\^$*+?.()|[\]{}]/g, '\\$&');
|
||||
return new RegExp('^func\\s+' + escaped + '(?:\\(|\\[)', '');
|
||||
}
|
||||
|
||||
async function findFunctionDeclarations(name, preferredPathHint) {
|
||||
const fnRegex = buildFunctionRegex(name);
|
||||
const hits = [];
|
||||
|
||||
// Prefer hinted file if provided
|
||||
if (preferredPathHint) {
|
||||
const hintedAbs = path.resolve(repoRoot, preferredPathHint);
|
||||
if (fs.existsSync(hintedAbs)) {
|
||||
const hit = await scanFileForSignature(hintedAbs, fnRegex);
|
||||
if (hit) hits.push(hit);
|
||||
}
|
||||
}
|
||||
|
||||
for (const abs of walkGoFiles(repoRoot)) {
|
||||
const hit = await scanFileForSignature(abs, fnRegex);
|
||||
if (hit) hits.push(hit);
|
||||
}
|
||||
return hits;
|
||||
}
|
||||
|
||||
function stripBOM(s) {
|
||||
return s.charCodeAt(0) === 0xfeff ? s.slice(1) : s;
|
||||
}
|
||||
|
||||
async function scanFileForSignature(absPath, fnRegex) {
|
||||
const rl = readline.createInterface({
|
||||
input: fs.createReadStream(absPath, { encoding: 'utf8' }),
|
||||
crlfDelay: Infinity,
|
||||
});
|
||||
let lineNo = 0;
|
||||
for await (const rawLine of rl) {
|
||||
lineNo++;
|
||||
const line = stripBOM(rawLine);
|
||||
if (fnRegex.test(line)) {
|
||||
// Normalize multiple spaces and tabs minimally: keep original line
|
||||
let signature = line.trim();
|
||||
// Remove end-of-line comments before processing trailing bracket
|
||||
signature = signature
|
||||
// remove line comments
|
||||
.replace(/\/\/.*$/, '')
|
||||
// remove trailing block comment
|
||||
.replace(/\/\*.*?\*\/\s*$/, '')
|
||||
.trimEnd();
|
||||
|
||||
// Remove spaces before and after trailing opening brace, then drop it
|
||||
signature = signature.replace(/\s*\{\s*$/, '');
|
||||
const rel = path.relative(repoRoot, absPath).replace(/\\/g, '/');
|
||||
return { file: rel, line: lineNo, signature };
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
function parseSourceRefFile(sourceRef) {
|
||||
if (!sourceRef) return null;
|
||||
const idx = sourceRef.indexOf('#');
|
||||
if (idx === -1) return sourceRef;
|
||||
return sourceRef.slice(0, idx);
|
||||
}
|
||||
|
||||
function normalizeSignature(signature) {
|
||||
// Collapse all whitespace and remove spaces before '(' or '['
|
||||
return signature
|
||||
.replace(/\s+/g, ' ')
|
||||
.replace(/\s+(\(|\[)/g, '$1')
|
||||
.trim();
|
||||
}
|
||||
|
||||
async function main() {
|
||||
const args = new Set(process.argv.slice(2));
|
||||
const files = listMarkdownFiles(dataDir);
|
||||
let issues = 0;
|
||||
|
||||
for (const absPath of files) {
|
||||
const content = readFile(absPath);
|
||||
const fm = parseFrontmatter(content) || {};
|
||||
const name = fm.name;
|
||||
if (!name) continue;
|
||||
|
||||
const hintFile = parseSourceRefFile(fm.sourceRef);
|
||||
const hits = await findFunctionDeclarations(name, hintFile);
|
||||
const relMd = path.relative(repoRoot, absPath).replace(/\\/g, '/');
|
||||
|
||||
if (!hits || hits.length === 0) {
|
||||
// eslint-disable-next-line no-console
|
||||
console.warn(`[missing-helper] ${relMd} -> name="${name}"`);
|
||||
issues++;
|
||||
continue;
|
||||
}
|
||||
|
||||
// Sort hits by file path to ensure consistent order
|
||||
hits.sort((a, b) => a.file.localeCompare(b.file));
|
||||
|
||||
// Deduplicate signatures from hits (preserve first encountered formatting)
|
||||
const seenFromHits = new Set();
|
||||
const uniqueHitSignatures = [];
|
||||
for (const hit of hits) {
|
||||
const norm = normalizeSignature(hit.signature);
|
||||
if (!seenFromHits.has(norm)) {
|
||||
seenFromHits.add(norm);
|
||||
uniqueHitSignatures.push(hit.signature);
|
||||
}
|
||||
}
|
||||
|
||||
// Existing frontmatter signatures
|
||||
const existing = Array.isArray(fm.signatures) ? fm.signatures : [];
|
||||
const existingNorm = existing.map(normalizeSignature);
|
||||
|
||||
// Report duplicate signatures within frontmatter (second and further occurrences)
|
||||
const seenExisting = new Set();
|
||||
for (let i = 0; i < existing.length; i++) {
|
||||
const norm = existingNorm[i];
|
||||
if (seenExisting.has(norm)) {
|
||||
// eslint-disable-next-line no-console
|
||||
console.warn(`[duplicate-signature] ${relMd} -> "${existing[i]}"`);
|
||||
issues++;
|
||||
} else {
|
||||
seenExisting.add(norm);
|
||||
}
|
||||
}
|
||||
|
||||
// Unknown signatures (exist in frontmatter but not in code)
|
||||
const hitsNormalized = new Set(uniqueHitSignatures.map(normalizeSignature));
|
||||
for (const sig of existing) {
|
||||
const norm = normalizeSignature(sig);
|
||||
if (!hitsNormalized.has(norm)) {
|
||||
// eslint-disable-next-line no-console
|
||||
console.warn(`[unknown-signature] ${relMd} -> "${sig}"`);
|
||||
issues++;
|
||||
}
|
||||
}
|
||||
|
||||
// Missing signatures (found in code but not listed in frontmatter)
|
||||
const existingNormalizedSet = new Set(existingNorm);
|
||||
for (const sig of uniqueHitSignatures) {
|
||||
const norm = normalizeSignature(sig);
|
||||
if (!existingNormalizedSet.has(norm)) {
|
||||
// eslint-disable-next-line no-console
|
||||
console.warn(`[missing-signature] ${relMd} -> "${sig}"`);
|
||||
issues++;
|
||||
}
|
||||
}
|
||||
|
||||
// SourceRef verification
|
||||
const expectedSourceRef = `${hits[0].file}#L${hits[0].line}`;
|
||||
if (fm.sourceRef !== expectedSourceRef) {
|
||||
// eslint-disable-next-line no-console
|
||||
console.warn(`[sourceRef-outdated] ${relMd} -> expected=${expectedSourceRef} actual=${fm.sourceRef || '""'}`);
|
||||
issues++;
|
||||
}
|
||||
}
|
||||
|
||||
if (args.has('--check') && issues > 0) {
|
||||
process.exitCode = 1;
|
||||
}
|
||||
}
|
||||
|
||||
main().catch((err) => {
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
});
|
||||
|
||||
|
||||
@@ -0,0 +1,157 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
|
||||
// Read all markdown files in docs/data directory
|
||||
const dataDir = path.join(__dirname, '../data');
|
||||
const files = fs.readdirSync(dataDir).filter(f => f.endsWith('.md'));
|
||||
|
||||
const combinations = new Set();
|
||||
const coreCategories = new Set();
|
||||
const parallelCategories = new Set();
|
||||
const itCategories = new Set();
|
||||
const mutableCategories = new Set();
|
||||
|
||||
// Extract type+category combinations from each file
|
||||
files.forEach(file => {
|
||||
const filePath = path.join(dataDir, file);
|
||||
const content = fs.readFileSync(filePath, 'utf8');
|
||||
|
||||
const typeMatch = content.match(/^category:\s*(.+)$/m);
|
||||
const categoryMatch = content.match(/^subCategory:\s*(.+)$/m);
|
||||
|
||||
if (typeMatch && categoryMatch) {
|
||||
const type = typeMatch[1].trim();
|
||||
const category = categoryMatch[1].trim();
|
||||
const combination = `${type}|${category}`;
|
||||
|
||||
combinations.add(combination);
|
||||
|
||||
if (type === 'core') {
|
||||
coreCategories.add(category);
|
||||
} else if (type === 'it') {
|
||||
itCategories.add(category);
|
||||
} else if (type === 'mutable') {
|
||||
mutableCategories.add(category);
|
||||
} else if (type === 'parallel') {
|
||||
parallelCategories.add(category);
|
||||
} else {
|
||||
throw new Error(`Unknown type: ${type}`);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
console.log('=== TYPE+CATEGORY COMBINATIONS FOUND ===');
|
||||
Array.from(combinations).sort().forEach(comb => console.log(comb));
|
||||
console.log('\n=== CORE CATEGORIES ===');
|
||||
Array.from(coreCategories).sort().forEach(cat => console.log(cat));
|
||||
console.log('\n=== IT CATEGORIES ===');
|
||||
Array.from(itCategories).sort().forEach(cat => console.log(cat));
|
||||
console.log('\n=== MUTABLE CATEGORIES ===');
|
||||
Array.from(mutableCategories).sort().forEach(cat => console.log(cat));
|
||||
console.log('\n=== PARALLEL CATEGORIES ===');
|
||||
Array.from(parallelCategories).sort().forEach(cat => console.log(cat));
|
||||
|
||||
// Check existing pages
|
||||
const corePagesDir = path.join(__dirname, '../docs/core');
|
||||
const itPagesDir = path.join(__dirname, '../docs/it');
|
||||
const mutablePagesDir = path.join(__dirname, '../docs/mutable');
|
||||
const parallelPagesDir = path.join(__dirname, '../docs/parallel');
|
||||
|
||||
const existingCorePages = new Set();
|
||||
const existingItPages = new Set();
|
||||
const existingMutablePages = new Set();
|
||||
const existingParallelPages = new Set();
|
||||
|
||||
if (fs.existsSync(corePagesDir)) {
|
||||
fs.readdirSync(corePagesDir)
|
||||
.filter(f => f.endsWith('.md'))
|
||||
.forEach(f => existingCorePages.add(f.replace('.md', '')));
|
||||
}
|
||||
if (fs.existsSync(itPagesDir)) {
|
||||
fs.readdirSync(itPagesDir)
|
||||
.filter(f => f.endsWith('.md'))
|
||||
.forEach(f => existingItPages.add(f.replace('.md', '')));
|
||||
}
|
||||
if (fs.existsSync(mutablePagesDir)) {
|
||||
fs.readdirSync(mutablePagesDir)
|
||||
.filter(f => f.endsWith('.md'))
|
||||
.forEach(f => existingMutablePages.add(f.replace('.md', '')));
|
||||
}
|
||||
if (fs.existsSync(parallelPagesDir)) {
|
||||
fs.readdirSync(parallelPagesDir)
|
||||
.filter(f => f.endsWith('.md'))
|
||||
.forEach(f => existingParallelPages.add(f.replace('.md', '')));
|
||||
}
|
||||
|
||||
console.log('\n=== EXISTING CORE PAGES ===');
|
||||
Array.from(existingCorePages).sort().forEach(page => console.log(page));
|
||||
console.log('\n=== EXISTING IT PAGES ===');
|
||||
Array.from(existingItPages).sort().forEach(page => console.log(page));
|
||||
console.log('\n=== EXISTING MUTABLE PAGES ===');
|
||||
Array.from(existingMutablePages).sort().forEach(page => console.log(page));
|
||||
console.log('\n=== EXISTING PARALLEL PAGES ===');
|
||||
Array.from(existingParallelPages).sort().forEach(page => console.log(page));
|
||||
|
||||
// Find missing pages
|
||||
console.log('\n=== MISSING CORE PAGES ===');
|
||||
Array.from(coreCategories).sort().forEach(category => {
|
||||
if (!existingCorePages.has(category)) {
|
||||
console.log(`MISSING: core/${category}.md`);
|
||||
}
|
||||
});
|
||||
console.log('\n=== MISSING IT PAGES ===');
|
||||
Array.from(itCategories).sort().forEach(category => {
|
||||
if (!existingItPages.has(category)) {
|
||||
console.log(`MISSING: it/${category}.md`);
|
||||
}
|
||||
});
|
||||
console.log('\n=== MISSING MUTABLE PAGES ===');
|
||||
Array.from(mutableCategories).sort().forEach(category => {
|
||||
if (!existingMutablePages.has(category)) {
|
||||
console.log(`MISSING: mutable/${category}.md`);
|
||||
}
|
||||
});
|
||||
console.log('\n=== MISSING PARALLEL PAGES ===');
|
||||
Array.from(parallelCategories).sort().forEach(category => {
|
||||
if (!existingParallelPages.has(category)) {
|
||||
console.log(`MISSING: parallel/${category}.md`);
|
||||
}
|
||||
});
|
||||
|
||||
// Check for duplicates
|
||||
console.log('\n=== VALIDATION RESULTS ===');
|
||||
let hasErrors = false;
|
||||
|
||||
Array.from(coreCategories).sort().forEach(category => {
|
||||
if (!existingCorePages.has(category)) {
|
||||
console.log(`❌ ERROR: Missing core page for category: ${category}`);
|
||||
hasErrors = true;
|
||||
}
|
||||
});
|
||||
Array.from(itCategories).sort().forEach(category => {
|
||||
if (!existingItPages.has(category)) {
|
||||
console.log(`❌ ERROR: Missing it page for category: ${category}`);
|
||||
hasErrors = true;
|
||||
}
|
||||
});
|
||||
Array.from(mutableCategories).sort().forEach(category => {
|
||||
if (!existingMutablePages.has(category)) {
|
||||
console.log(`❌ ERROR: Missing mutable page for category: ${category}`);
|
||||
hasErrors = true;
|
||||
}
|
||||
});
|
||||
Array.from(parallelCategories).sort().forEach(category => {
|
||||
if (!existingParallelPages.has(category)) {
|
||||
console.log(`❌ ERROR: Missing parallel page for category: ${category}`);
|
||||
hasErrors = true;
|
||||
}
|
||||
});
|
||||
|
||||
if (!hasErrors) {
|
||||
console.log('✅ All helper categories have corresponding pages!');
|
||||
} else {
|
||||
console.log('\n❌ Found missing pages. Please create them as shown above.');
|
||||
process.exit(1);
|
||||
}
|
||||
@@ -0,0 +1,21 @@
|
||||
#!/usr/bin/env node
|
||||
const path = require('path');
|
||||
const { loadHelpers, toFullKey } = require('./utils');
|
||||
|
||||
const dataDir = process.argv[2] || path.join(__dirname, '..', 'data');
|
||||
const { helpers, byFullKey } = loadHelpers(dataDir);
|
||||
|
||||
let hasError = false;
|
||||
helpers.forEach((h) => {
|
||||
(h.similarHelpers || []).forEach((ref) => {
|
||||
const key = toFullKey(ref);
|
||||
if (!byFullKey.has(key)) {
|
||||
hasError = true;
|
||||
console.error(`Missing similar helper reference from ${h.fileName} -> ${key}`);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
if (hasError) process.exit(1);
|
||||
console.log('OK: all similarHelpers references point to existing helpers.');
|
||||
|
||||
@@ -0,0 +1,22 @@
|
||||
#!/usr/bin/env node
|
||||
const path = require('path');
|
||||
const { loadHelpers, toFullKey } = require('./utils');
|
||||
|
||||
const dataDir = process.argv[2] || path.join(__dirname, '..', 'data');
|
||||
const { helpers, byFullKey } = loadHelpers(dataDir);
|
||||
|
||||
let hasError = false;
|
||||
|
||||
helpers.forEach((h) => {
|
||||
(h.similarHelpers || []).forEach((ref) => {
|
||||
const key = toFullKey(ref);
|
||||
if (!byFullKey.has(key)) {
|
||||
hasError = true;
|
||||
console.error(`Reference not found in directory for ${h.fileName} -> ${key}`);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
if (hasError) process.exit(1);
|
||||
console.log('OK: every similarHelpers reference exists in the directory.');
|
||||
|
||||
@@ -0,0 +1,112 @@
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
|
||||
function readFile(filePath) {
|
||||
return fs.readFileSync(filePath, 'utf8');
|
||||
}
|
||||
|
||||
function listMarkdownFiles(dirPath) {
|
||||
return fs
|
||||
.readdirSync(dirPath)
|
||||
.filter((f) => f.endsWith('.md'))
|
||||
.map((f) => path.join(dirPath, f));
|
||||
}
|
||||
|
||||
function parseFrontmatter(content) {
|
||||
// Very small frontmatter parser tailored to our files
|
||||
// Expects YAML-like block between leading --- lines
|
||||
const fmMatch = content.match(/^---[\r\n]+([\s\S]*?)[\r\n]+---/);
|
||||
if (!fmMatch) return null;
|
||||
const fm = fmMatch[1];
|
||||
const data = {};
|
||||
|
||||
// Capture simple key: value pairs and array values like: key: [a, b]
|
||||
fm.split(/\r?\n/).forEach((line) => {
|
||||
const m = line.match(/^([A-Za-z][A-Za-z0-9_-]*):\s*(.*)$/);
|
||||
if (!m) return;
|
||||
const key = m[1];
|
||||
const raw = m[2].trim();
|
||||
if (raw.startsWith('[') && raw.endsWith(']')) {
|
||||
const inner = raw.slice(1, -1).trim();
|
||||
if (inner.length === 0) {
|
||||
data[key] = [];
|
||||
} else {
|
||||
data[key] = inner
|
||||
.split(',')
|
||||
.map((s) => s.trim())
|
||||
.map((s) => (s.startsWith('"') && s.endsWith('"') ? s.slice(1, -1) : s));
|
||||
}
|
||||
} else if (raw.startsWith('"') && raw.endsWith('"')) {
|
||||
data[key] = raw.slice(1, -1);
|
||||
} else if (raw === '[]') {
|
||||
data[key] = [];
|
||||
} else if (raw === 'null') {
|
||||
data[key] = null;
|
||||
} else {
|
||||
data[key] = raw;
|
||||
}
|
||||
});
|
||||
return data;
|
||||
}
|
||||
|
||||
function loadHelpers(dataDir) {
|
||||
const files = listMarkdownFiles(dataDir);
|
||||
const helpers = [];
|
||||
|
||||
files.forEach((absPath) => {
|
||||
const filename = path.basename(absPath);
|
||||
const content = readFile(absPath);
|
||||
const fm = parseFrontmatter(content) || {};
|
||||
const helper = {
|
||||
filePath: absPath,
|
||||
fileName: filename,
|
||||
name: fm.name || null,
|
||||
slug: fm.slug || null,
|
||||
category: fm.category || null,
|
||||
subCategory: fm.subCategory || null,
|
||||
similarHelpers: Array.isArray(fm.similarHelpers) ? fm.similarHelpers : [],
|
||||
};
|
||||
helpers.push(helper);
|
||||
});
|
||||
|
||||
// Build index by keys for quick lookup
|
||||
const byCategoryName = new Map(); // key: `${category}#${name}` -> helper
|
||||
const byFullKey = new Map(); // key: `${category}#${subCategory}#${name}` -> helper
|
||||
const byPath = new Map(); // key: filename -> helper
|
||||
|
||||
helpers.forEach((h) => {
|
||||
if (h.category && h.name) {
|
||||
const k = `${h.category}#${h.name}`;
|
||||
if (!byCategoryName.has(k)) byCategoryName.set(k, []);
|
||||
byCategoryName.get(k).push(h);
|
||||
}
|
||||
if (h.category && h.subCategory && h.name) {
|
||||
const k2 = `${h.category}#${h.subCategory}#${h.name}`;
|
||||
byFullKey.set(k2, h);
|
||||
}
|
||||
byPath.set(h.fileName, h);
|
||||
});
|
||||
|
||||
return { helpers, byCategoryName, byFullKey, byPath };
|
||||
}
|
||||
|
||||
function expectedFileName(helper) {
|
||||
if (!helper || !helper.category || !helper.slug) return null;
|
||||
return `${helper.category}-${helper.slug}.md`;
|
||||
}
|
||||
|
||||
function toFullKey(ref) {
|
||||
// ref format: category#subcategory#Name
|
||||
return ref.trim();
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
readFile,
|
||||
listMarkdownFiles,
|
||||
parseFrontmatter,
|
||||
loadHelpers,
|
||||
expectedFileName,
|
||||
toFullKey,
|
||||
};
|
||||
|
||||
|
||||
Reference in New Issue
Block a user