282 lines
7.1 KiB
TypeScript
282 lines
7.1 KiB
TypeScript
#!/usr/bin/env bun
|
|
/**
|
|
* Extract TRACES from source code and generate requirement mapping
|
|
*
|
|
* Usage:
|
|
* bun run scripts/extract-traces.ts
|
|
* bun run scripts/extract-traces.ts --format json
|
|
* bun run scripts/extract-traces.ts --format markdown > docs/TRACEABILITY.md
|
|
*/
|
|
|
|
import * as fs from "fs";
|
|
import * as path from "path";
|
|
import { execSync } from "child_process";
|
|
|
|
interface TraceEntry {
|
|
file: string;
|
|
line: number;
|
|
context: string;
|
|
requirements: string[];
|
|
}
|
|
|
|
interface RequirementMapping {
|
|
[reqId: string]: TraceEntry[];
|
|
}
|
|
|
|
interface TracesData {
|
|
timestamp: string;
|
|
totalFiles: number;
|
|
totalTraces: number;
|
|
requirements: RequirementMapping;
|
|
byType: {
|
|
UR: string[];
|
|
IR: string[];
|
|
DR: string[];
|
|
JA: string[];
|
|
};
|
|
}
|
|
|
|
const TRACES_PATTERN = /TRACES:\s*([^\n]+)/gi;
|
|
const REQ_ID_PATTERN = /([A-Z]{2})-(\d{3})/g;
|
|
|
|
function extractRequirementIds(tracesString: string): string[] {
|
|
const matches = [...tracesString.matchAll(REQ_ID_PATTERN)];
|
|
return matches.map((m) => `${m[1]}-${m[2]}`);
|
|
}
|
|
|
|
function getAllSourceFiles(): string[] {
|
|
const baseDir = "/home/dtourolle/Development/JellyTau";
|
|
const patterns = ["src", "src-tauri/src"];
|
|
const files: string[] = [];
|
|
|
|
function walkDir(dir: string) {
|
|
try {
|
|
const entries = fs.readdirSync(dir, { withFileTypes: true });
|
|
for (const entry of entries) {
|
|
const fullPath = path.join(dir, entry.name);
|
|
const relativePath = path.relative(baseDir, fullPath);
|
|
|
|
// Skip node_modules, target, build
|
|
if (
|
|
relativePath.includes("node_modules") ||
|
|
relativePath.includes("target") ||
|
|
relativePath.includes("build") ||
|
|
relativePath.includes(".git")
|
|
) {
|
|
continue;
|
|
}
|
|
|
|
if (entry.isDirectory()) {
|
|
walkDir(fullPath);
|
|
} else if (
|
|
entry.name.endsWith(".ts") ||
|
|
entry.name.endsWith(".svelte") ||
|
|
entry.name.endsWith(".rs")
|
|
) {
|
|
files.push(fullPath);
|
|
}
|
|
}
|
|
} catch (error) {
|
|
// Skip directories we can't read
|
|
}
|
|
}
|
|
|
|
for (const pattern of patterns) {
|
|
const dir = path.join(baseDir, pattern);
|
|
if (fs.existsSync(dir)) {
|
|
walkDir(dir);
|
|
}
|
|
}
|
|
|
|
return files;
|
|
}
|
|
|
|
function extractTraces(): TracesData {
|
|
const requirementMap: RequirementMapping = {};
|
|
const byType: Record<string, Set<string>> = {
|
|
UR: new Set(),
|
|
IR: new Set(),
|
|
DR: new Set(),
|
|
JA: new Set(),
|
|
};
|
|
|
|
let totalTraces = 0;
|
|
const baseDir = "/home/dtourolle/Development/JellyTau";
|
|
|
|
const files = getAllSourceFiles();
|
|
|
|
for (const fullPath of files) {
|
|
try {
|
|
const content = fs.readFileSync(fullPath, "utf-8");
|
|
const lines = content.split("\n");
|
|
const relativePath = path.relative(baseDir, fullPath);
|
|
|
|
let match;
|
|
TRACES_PATTERN.lastIndex = 0;
|
|
|
|
while ((match = TRACES_PATTERN.exec(content)) !== null) {
|
|
const tracesStr = match[1];
|
|
const reqIds = extractRequirementIds(tracesStr);
|
|
|
|
if (reqIds.length === 0) continue;
|
|
|
|
// Find line number
|
|
const beforeMatch = content.substring(0, match.index);
|
|
const lineNum = beforeMatch.split("\n").length - 1;
|
|
|
|
// Get context (function/class name if available)
|
|
let context = "Unknown";
|
|
for (let i = lineNum; i >= Math.max(0, lineNum - 10); i--) {
|
|
const line = lines[i];
|
|
if (
|
|
line.includes("function ") ||
|
|
line.includes("export const ") ||
|
|
line.includes("pub fn ") ||
|
|
line.includes("pub enum ") ||
|
|
line.includes("pub struct ") ||
|
|
line.includes("impl ") ||
|
|
line.includes("async function ") ||
|
|
line.includes("class ") ||
|
|
line.includes("export type ")
|
|
) {
|
|
context = line
|
|
.trim()
|
|
.replace(/^\s*\/\/\s*/, "")
|
|
.replace(/^\s*\/\*\*\s*/, "");
|
|
break;
|
|
}
|
|
}
|
|
|
|
const entry: TraceEntry = {
|
|
file: relativePath,
|
|
line: lineNum + 1,
|
|
context,
|
|
requirements: reqIds,
|
|
};
|
|
|
|
for (const reqId of reqIds) {
|
|
if (!requirementMap[reqId]) {
|
|
requirementMap[reqId] = [];
|
|
}
|
|
requirementMap[reqId].push(entry);
|
|
|
|
// Track by type
|
|
const type = reqId.substring(0, 2);
|
|
if (byType[type]) {
|
|
byType[type].add(reqId);
|
|
}
|
|
}
|
|
|
|
totalTraces++;
|
|
}
|
|
} catch (error) {
|
|
// Skip files we can't read
|
|
}
|
|
}
|
|
|
|
return {
|
|
timestamp: new Date().toISOString(),
|
|
totalFiles: files.length,
|
|
totalTraces,
|
|
requirements: requirementMap,
|
|
byType: {
|
|
UR: Array.from(byType["UR"]).sort(),
|
|
IR: Array.from(byType["IR"]).sort(),
|
|
DR: Array.from(byType["DR"]).sort(),
|
|
JA: Array.from(byType["JA"]).sort(),
|
|
},
|
|
};
|
|
}
|
|
|
|
function generateMarkdown(data: TracesData): string {
|
|
let md = `# Code Traceability Matrix
|
|
|
|
**Generated:** ${new Date(data.timestamp).toLocaleString()}
|
|
|
|
## Summary
|
|
|
|
- **Total Files Scanned:** ${data.totalFiles}
|
|
- **Total TRACES Found:** ${data.totalTraces}
|
|
- **Requirements Covered:**
|
|
- User Requirements (UR): ${data.byType.UR.length}
|
|
- Integration Requirements (IR): ${data.byType.IR.length}
|
|
- Development Requirements (DR): ${data.byType.DR.length}
|
|
- Jellyfin API Requirements (JA): ${data.byType.JA.length}
|
|
|
|
## Requirements by Type
|
|
|
|
### User Requirements (UR)
|
|
\`\`\`
|
|
${data.byType.UR.join(", ")}
|
|
\`\`\`
|
|
|
|
### Integration Requirements (IR)
|
|
\`\`\`
|
|
${data.byType.IR.join(", ")}
|
|
\`\`\`
|
|
|
|
### Development Requirements (DR)
|
|
\`\`\`
|
|
${data.byType.DR.join(", ")}
|
|
\`\`\`
|
|
|
|
### Jellyfin API Requirements (JA)
|
|
\`\`\`
|
|
${data.byType.JA.join(", ")}
|
|
\`\`\`
|
|
|
|
## Detailed Mapping
|
|
|
|
`;
|
|
|
|
// Sort requirements by ID
|
|
const sortedReqs = Object.keys(data.requirements).sort((a, b) => {
|
|
const typeA = a.substring(0, 2);
|
|
const typeB = b.substring(0, 2);
|
|
const typeOrder = { UR: 0, IR: 1, DR: 2, JA: 3 };
|
|
if (typeOrder[typeA] !== typeOrder[typeB]) {
|
|
return (typeOrder[typeA] || 4) - (typeOrder[typeB] || 4);
|
|
}
|
|
return a.localeCompare(b);
|
|
});
|
|
|
|
for (const reqId of sortedReqs) {
|
|
const entries = data.requirements[reqId];
|
|
md += `### ${reqId}\n\n`;
|
|
md += `**Locations:** ${entries.length} file(s)\n\n`;
|
|
|
|
for (const entry of entries) {
|
|
md += `- **File:** [\`${entry.file}\`](${entry.file}#L${entry.line})\n`;
|
|
md += ` - **Line:** ${entry.line}\n`;
|
|
const contextPreview = entry.context.substring(0, 70);
|
|
md += ` - **Context:** \`${contextPreview}${entry.context.length > 70 ? "..." : ""}\`\n`;
|
|
}
|
|
md += "\n";
|
|
}
|
|
|
|
return md;
|
|
}
|
|
|
|
function generateJson(data: TracesData): string {
|
|
return JSON.stringify(data, null, 2);
|
|
}
|
|
|
|
// Main
|
|
const args = Bun.argv.slice(2);
|
|
const format = args.includes("--format")
|
|
? args[args.indexOf("--format") + 1]
|
|
: "markdown";
|
|
|
|
console.error("🔍 Extracting TRACES from codebase...");
|
|
const data = extractTraces();
|
|
|
|
if (format === "json") {
|
|
console.log(generateJson(data));
|
|
} else {
|
|
console.log(generateMarkdown(data));
|
|
}
|
|
|
|
console.error(
|
|
`\n✅ Complete! Found ${data.totalTraces} TRACES across ${data.totalFiles} files`
|
|
);
|