jellytau/scripts/extract-traces.ts
Duncan Tourolle 6d1c618a3a Implement Phase 1-2 of backend migration refactoring
CRITICAL FIXES (Previous):
- Fix nextEpisode event handlers (was calling undefined methods)
- Replace queue polling with event-based updates (90% reduction in backend calls)
- Move device ID to Tauri secure storage (security fix)
- Fix event listener memory leaks with proper cleanup
- Replace browser alerts with toast notifications
- Remove silent error handlers and improve logging
- Fix race condition in downloads store with request queuing
- Centralize duration formatting utility
- Add input validation to image URLs (prevent injection attacks)

PHASE 1: BACKEND SORTING & FILTERING 
- Created Jellyfin field mapping utility (src/lib/utils/jellyfinFieldMapping.ts)
  - Maps frontend sort keys to Jellyfin API field names
  - Provides item type constants and groups
  - Includes 20+ test cases for comprehensive coverage
- Updated route components to use backend sorting:
  - src/routes/library/music/tracks/+page.svelte
  - src/routes/library/music/albums/+page.svelte
  - src/routes/library/music/artists/+page.svelte
- Refactored GenericMediaListPage.svelte:
  - Removed client-side sorting/filtering logic
  - Removed filteredItems and applySortAndFilter()
  - Now passes sort parameters to backend
  - Uses backend search instead of client-side filtering
  - Added sortOrder state for Ascending/Descending toggle

PHASE 3: SEARCH (Already Implemented) 
- Search now uses backend repository_search command
- Replaced client-side filtering with backend calls
- Set up for debouncing implementation

PHASE 2: BACKEND URL CONSTRUCTION (Started)
- Converted getImageUrl() to async backend call
- Removed sync URL construction with credentials
- Next: Update 12+ components to handle async image URLs

UNIT TESTS ADDED:
- jellyfinFieldMapping.test.ts (20+ test cases)
- duration.test.ts (15+ test cases)
- validation.test.ts (25+ test cases)
- deviceId.test.ts (8+ test cases)
- playerEvents.test.ts (event initialization tests)

SUMMARY:
- Eliminated all client-side sorting/filtering logic
- Improved security by removing frontend URL construction
- Reduced backend polling load significantly
- Fixed critical bugs (nextEpisode, race conditions, memory leaks)
- 80+ new unit tests across utilities and services
- Comprehensive infrastructure for future phases

Co-Authored-By: Claude Haiku 4.5 <noreply@anthropic.com>
2026-02-13 23:34:18 +01:00

255 lines
6.5 KiB
TypeScript

#!/usr/bin/env bun
/**
* Extract TRACES from source code and generate requirement mapping
*
* Usage:
* bun run scripts/extract-traces.ts
* bun run scripts/extract-traces.ts --format json
* bun run scripts/extract-traces.ts --format markdown > docs/TRACEABILITY.md
*/
import * as fs from "fs";
import * as path from "path";
// Use built-in Bun.glob
const glob = (pattern: string) => new Bun.Glob(pattern);
interface TraceEntry {
file: string;
line: number;
context: string;
requirements: string[];
}
interface RequirementMapping {
[reqId: string]: TraceEntry[];
}
interface TracesData {
timestamp: string;
totalFiles: number;
totalTraces: number;
requirements: RequirementMapping;
byType: {
UR: string[];
IR: string[];
DR: string[];
JA: string[];
};
}
const TRACES_PATTERN = /TRACES:\s*([^\n]+)/gi;
const REQ_ID_PATTERN = /([A-Z]{2})-(\d{3})/g;
function extractRequirementIds(tracesString: string): string[] {
const matches = [...tracesString.matchAll(REQ_ID_PATTERN)];
return matches.map((m) => `${m[1]}-${m[2]}`);
}
function getContext(content: string, lineNum: number): string {
const lines = content.split("\n");
const contextStart = Math.max(0, lineNum - 3);
const contextEnd = Math.min(lines.length, lineNum + 1);
const contextLines = lines.slice(contextStart, contextEnd);
return contextLines.join("\n").trim();
}
async function extractTraces(): Promise<TracesData> {
const patterns = [
"src/**/*.ts",
"src/**/*.svelte",
"src-tauri/src/**/*.rs",
];
const requirementMap: RequirementMapping = {};
const byType: Record<string, Set<string>> = {
UR: new Set(),
IR: new Set(),
DR: new Set(),
JA: new Set(),
};
let totalTraces = 0;
const processedFiles = new Set<string>();
for (const pattern of patterns) {
const globber = glob(pattern);
const files = [];
for await (const file of globber.scan({
cwd: "/home/dtourolle/Development/JellyTau",
})) {
files.push(file);
}
for (const file of files) {
if (processedFiles.has(file)) continue;
processedFiles.add(file);
try {
const fullPath = `/home/dtourolle/Development/JellyTau/${file}`;
const content = fs.readFileSync(fullPath, "utf-8");
const lines = content.split("\n");
let match;
TRACES_PATTERN.lastIndex = 0;
while ((match = TRACES_PATTERN.exec(content)) !== null) {
const tracesStr = match[1];
const reqIds = extractRequirementIds(tracesStr);
if (reqIds.length === 0) continue;
// Find line number
const beforeMatch = content.substring(0, match.index);
const lineNum = beforeMatch.split("\n").length - 1;
// Get context (function/class name if available)
let context = "Unknown";
for (let i = lineNum; i >= Math.max(0, lineNum - 10); i--) {
const line = lines[i];
if (
line.includes("function ") ||
line.includes("export const ") ||
line.includes("pub fn ") ||
line.includes("pub enum ") ||
line.includes("pub struct ") ||
line.includes("impl ") ||
line.includes("async function ") ||
line.includes("class ")
) {
context = line.trim();
break;
}
}
const entry: TraceEntry = {
file: file.replace(/^\//, ""),
line: lineNum + 1,
context,
requirements: reqIds,
};
for (const reqId of reqIds) {
if (!requirementMap[reqId]) {
requirementMap[reqId] = [];
}
requirementMap[reqId].push(entry);
// Track by type
const type = reqId.substring(0, 2);
if (byType[type]) {
byType[type].add(reqId);
}
}
totalTraces++;
}
} catch (error) {
console.error(`Error processing ${file}:`, error);
}
}
}
return {
timestamp: new Date().toISOString(),
totalFiles: processedFiles.size,
totalTraces,
requirements: requirementMap,
byType: {
UR: Array.from(byType["UR"]).sort(),
IR: Array.from(byType["IR"]).sort(),
DR: Array.from(byType["DR"]).sort(),
JA: Array.from(byType["JA"]).sort(),
},
};
}
function generateMarkdown(data: TracesData): string {
let md = `# Code Traceability Matrix
**Generated:** ${new Date(data.timestamp).toLocaleString()}
## Summary
- **Total Files Scanned:** ${data.totalFiles}
- **Total TRACES Found:** ${data.totalTraces}
- **Requirements Covered:**
- User Requirements (UR): ${data.byType.UR.length}
- Integration Requirements (IR): ${data.byType.IR.length}
- Development Requirements (DR): ${data.byType.DR.length}
- Jellyfin API Requirements (JA): ${data.byType.JA.length}
## Requirements by Type
### User Requirements (UR)
\`\`\`
${data.byType.UR.join(", ")}
\`\`\`
### Integration Requirements (IR)
\`\`\`
${data.byType.IR.join(", ")}
\`\`\`
### Development Requirements (DR)
\`\`\`
${data.byType.DR.join(", ")}
\`\`\`
### Jellyfin API Requirements (JA)
\`\`\`
${data.byType.JA.join(", ")}
\`\`\`
## Detailed Mapping
`;
// Sort requirements by ID
const sortedReqs = Object.keys(data.requirements).sort((a, b) => {
const typeA = a.substring(0, 2);
const typeB = b.substring(0, 2);
const typeOrder = { UR: 0, IR: 1, DR: 2, JA: 3 };
if (typeOrder[typeA] !== typeOrder[typeB]) {
return (typeOrder[typeA] || 4) - (typeOrder[typeB] || 4);
}
return a.localeCompare(b);
});
for (const reqId of sortedReqs) {
const entries = data.requirements[reqId];
md += `### ${reqId}\n\n`;
md += `**Locations:** ${entries.length} file(s)\n\n`;
for (const entry of entries) {
md += `- **File:** [\`${entry.file}\`](${entry.file}#L${entry.line})\n`;
md += ` - **Line:** ${entry.line}\n`;
md += ` - **Context:** \`${entry.context.substring(0, 80)}...\`\n`;
}
md += "\n";
}
return md;
}
function generateJson(data: TracesData): string {
return JSON.stringify(data, null, 2);
}
// Main
const args = Bun.argv.slice(2);
const format = args.includes("--format")
? args[args.indexOf("--format") + 1]
: "markdown";
console.error("Extracting TRACES from codebase...");
const data = await extractTraces();
if (format === "json") {
console.log(generateJson(data));
} else {
console.log(generateMarkdown(data));
}
console.error(`\n✅ Complete! Found ${data.totalTraces} TRACES across ${data.totalFiles} files`);