#!/usr/bin/env bun /** * Extract TRACES from source code and generate requirement mapping * * Usage: * bun run scripts/extract-traces.ts * bun run scripts/extract-traces.ts --format json * bun run scripts/extract-traces.ts --format markdown > docs/TRACEABILITY.md */ import * as fs from "fs"; import * as path from "path"; // Use built-in Bun.glob const glob = (pattern: string) => new Bun.Glob(pattern); interface TraceEntry { file: string; line: number; context: string; requirements: string[]; } interface RequirementMapping { [reqId: string]: TraceEntry[]; } interface TracesData { timestamp: string; totalFiles: number; totalTraces: number; requirements: RequirementMapping; byType: { UR: string[]; IR: string[]; DR: string[]; JA: string[]; }; } const TRACES_PATTERN = /TRACES:\s*([^\n]+)/gi; const REQ_ID_PATTERN = /([A-Z]{2})-(\d{3})/g; function extractRequirementIds(tracesString: string): string[] { const matches = [...tracesString.matchAll(REQ_ID_PATTERN)]; return matches.map((m) => `${m[1]}-${m[2]}`); } function getContext(content: string, lineNum: number): string { const lines = content.split("\n"); const contextStart = Math.max(0, lineNum - 3); const contextEnd = Math.min(lines.length, lineNum + 1); const contextLines = lines.slice(contextStart, contextEnd); return contextLines.join("\n").trim(); } async function extractTraces(): Promise { const patterns = [ "src/**/*.ts", "src/**/*.svelte", "src-tauri/src/**/*.rs", ]; const requirementMap: RequirementMapping = {}; const byType: Record> = { UR: new Set(), IR: new Set(), DR: new Set(), JA: new Set(), }; let totalTraces = 0; const processedFiles = new Set(); for (const pattern of patterns) { const globber = glob(pattern); const files = []; for await (const file of globber.scan({ cwd: "/home/dtourolle/Development/JellyTau", })) { files.push(file); } for (const file of files) { if (processedFiles.has(file)) continue; processedFiles.add(file); try { const fullPath = `/home/dtourolle/Development/JellyTau/${file}`; const content = fs.readFileSync(fullPath, "utf-8"); const lines = content.split("\n"); let match; TRACES_PATTERN.lastIndex = 0; while ((match = TRACES_PATTERN.exec(content)) !== null) { const tracesStr = match[1]; const reqIds = extractRequirementIds(tracesStr); if (reqIds.length === 0) continue; // Find line number const beforeMatch = content.substring(0, match.index); const lineNum = beforeMatch.split("\n").length - 1; // Get context (function/class name if available) let context = "Unknown"; for (let i = lineNum; i >= Math.max(0, lineNum - 10); i--) { const line = lines[i]; if ( line.includes("function ") || line.includes("export const ") || line.includes("pub fn ") || line.includes("pub enum ") || line.includes("pub struct ") || line.includes("impl ") || line.includes("async function ") || line.includes("class ") ) { context = line.trim(); break; } } const entry: TraceEntry = { file: file.replace(/^\//, ""), line: lineNum + 1, context, requirements: reqIds, }; for (const reqId of reqIds) { if (!requirementMap[reqId]) { requirementMap[reqId] = []; } requirementMap[reqId].push(entry); // Track by type const type = reqId.substring(0, 2); if (byType[type]) { byType[type].add(reqId); } } totalTraces++; } } catch (error) { console.error(`Error processing ${file}:`, error); } } } return { timestamp: new Date().toISOString(), totalFiles: processedFiles.size, totalTraces, requirements: requirementMap, byType: { UR: Array.from(byType["UR"]).sort(), IR: Array.from(byType["IR"]).sort(), DR: Array.from(byType["DR"]).sort(), JA: Array.from(byType["JA"]).sort(), }, }; } function generateMarkdown(data: TracesData): string { let md = `# Code Traceability Matrix **Generated:** ${new Date(data.timestamp).toLocaleString()} ## Summary - **Total Files Scanned:** ${data.totalFiles} - **Total TRACES Found:** ${data.totalTraces} - **Requirements Covered:** - User Requirements (UR): ${data.byType.UR.length} - Integration Requirements (IR): ${data.byType.IR.length} - Development Requirements (DR): ${data.byType.DR.length} - Jellyfin API Requirements (JA): ${data.byType.JA.length} ## Requirements by Type ### User Requirements (UR) \`\`\` ${data.byType.UR.join(", ")} \`\`\` ### Integration Requirements (IR) \`\`\` ${data.byType.IR.join(", ")} \`\`\` ### Development Requirements (DR) \`\`\` ${data.byType.DR.join(", ")} \`\`\` ### Jellyfin API Requirements (JA) \`\`\` ${data.byType.JA.join(", ")} \`\`\` ## Detailed Mapping `; // Sort requirements by ID const sortedReqs = Object.keys(data.requirements).sort((a, b) => { const typeA = a.substring(0, 2); const typeB = b.substring(0, 2); const typeOrder = { UR: 0, IR: 1, DR: 2, JA: 3 }; if (typeOrder[typeA] !== typeOrder[typeB]) { return (typeOrder[typeA] || 4) - (typeOrder[typeB] || 4); } return a.localeCompare(b); }); for (const reqId of sortedReqs) { const entries = data.requirements[reqId]; md += `### ${reqId}\n\n`; md += `**Locations:** ${entries.length} file(s)\n\n`; for (const entry of entries) { md += `- **File:** [\`${entry.file}\`](${entry.file}#L${entry.line})\n`; md += ` - **Line:** ${entry.line}\n`; md += ` - **Context:** \`${entry.context.substring(0, 80)}...\`\n`; } md += "\n"; } return md; } function generateJson(data: TracesData): string { return JSON.stringify(data, null, 2); } // Main const args = Bun.argv.slice(2); const format = args.includes("--format") ? args[args.indexOf("--format") + 1] : "markdown"; console.error("Extracting TRACES from codebase..."); const data = await extractTraces(); if (format === "json") { console.log(generateJson(data)); } else { console.log(generateMarkdown(data)); } console.error(`\n✅ Complete! Found ${data.totalTraces} TRACES across ${data.totalFiles} files`);