mirror of
https://github.com/samiyev/puaros.git
synced 2025-12-27 23:06:54 +05:00
feat(ipuaro): implement indexer module (v0.3.0)
Add complete indexer infrastructure: - FileScanner: recursive scanning with gitignore support - ASTParser: tree-sitter based TS/JS/TSX/JSX parsing - MetaAnalyzer: complexity metrics, dependency analysis - IndexBuilder: symbol index and dependency graph - Watchdog: file watching with chokidar and debouncing 321 tests, 96.38% coverage
This commit is contained in:
@@ -5,6 +5,73 @@ All notable changes to this project will be documented in this file.
|
||||
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
|
||||
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
||||
|
||||
## [Unreleased] - 0.3.0 Indexer (complete)
|
||||
|
||||
### Added
|
||||
|
||||
- **FileScanner (0.3.1)**
|
||||
- Recursive directory scanning with async generator
|
||||
- `.gitignore` support via `globby` (replaced `ignore` package for ESM compatibility)
|
||||
- Filters: binary files, node_modules, dist, default ignore patterns
|
||||
- Progress callback for UI integration
|
||||
- `isTextFile()` and `readFileContent()` static utilities
|
||||
- 22 unit tests
|
||||
|
||||
- **ASTParser (0.3.2)**
|
||||
- Tree-sitter based parsing for TS, TSX, JS, JSX
|
||||
- Extracts: imports, exports, functions, classes, interfaces, type aliases
|
||||
- Import classification: internal, external, builtin (using `node:module` builtinModules)
|
||||
- Graceful error handling with partial AST on syntax errors
|
||||
- 30 unit tests
|
||||
|
||||
- **MetaAnalyzer (0.3.3)**
|
||||
- Complexity metrics: LOC (excluding comments), nesting depth, cyclomatic complexity, overall score
|
||||
- Dependency resolution: internal imports resolved to absolute file paths
|
||||
- Dependents calculation: reverse dependency lookup across all project files
|
||||
- File type classification: source, test, config, types, unknown
|
||||
- Entry point detection: index files, main/app/cli/server patterns, files with no dependents
|
||||
- Hub detection: files with >5 dependents
|
||||
- Batch analysis via `analyzeAll()` method
|
||||
- 54 unit tests
|
||||
|
||||
- **IndexBuilder (0.3.4)**
|
||||
- SymbolIndex: maps symbol names to locations for quick lookup (functions, classes, interfaces, types, variables)
|
||||
- Qualified names for class methods: `ClassName.methodName`
|
||||
- DepsGraph: bidirectional import mapping (`imports` and `importedBy`)
|
||||
- Import resolution: handles `.js` → `.ts`, index.ts, directory imports
|
||||
- `findSymbol()`: exact symbol lookup
|
||||
- `searchSymbols()`: regex-based symbol search
|
||||
- `findCircularDependencies()`: detect import cycles
|
||||
- `getStats()`: comprehensive index statistics (symbols by type, hubs, orphans)
|
||||
- 35 unit tests
|
||||
|
||||
- **Watchdog (0.3.5)**
|
||||
- File watching with chokidar (native events + polling fallback)
|
||||
- Debounced change handling (configurable, default 500ms)
|
||||
- Event types: add, change, unlink
|
||||
- Extension filtering (default: SUPPORTED_EXTENSIONS)
|
||||
- Ignore patterns (default: DEFAULT_IGNORE_PATTERNS)
|
||||
- Multiple callback support
|
||||
- `flushAll()` for immediate processing
|
||||
- Silent error handling for stability
|
||||
- 21 unit tests
|
||||
|
||||
- **Infrastructure Constants**
|
||||
- `tree-sitter-types.ts`: NodeType and FieldName constants for tree-sitter
|
||||
- Eliminates magic strings in ASTParser
|
||||
|
||||
- **Dependencies**
|
||||
- Added `globby` for ESM-native file globbing
|
||||
- Removed `ignore` package (CJS incompatibility with nodenext)
|
||||
|
||||
### Changed
|
||||
|
||||
- Refactored ASTParser to use constants instead of magic strings
|
||||
- Total tests: 321
|
||||
- Coverage: 96.43%
|
||||
|
||||
---
|
||||
|
||||
## [0.2.0] - 2025-01-30
|
||||
|
||||
### Added
|
||||
|
||||
@@ -33,28 +33,28 @@
|
||||
"format": "prettier --write src"
|
||||
},
|
||||
"dependencies": {
|
||||
"ink": "^4.4.1",
|
||||
"ink-text-input": "^5.0.1",
|
||||
"react": "^18.2.0",
|
||||
"ioredis": "^5.4.1",
|
||||
"tree-sitter": "^0.21.1",
|
||||
"tree-sitter-typescript": "^0.21.2",
|
||||
"tree-sitter-javascript": "^0.21.0",
|
||||
"ollama": "^0.5.11",
|
||||
"simple-git": "^3.27.0",
|
||||
"chokidar": "^3.6.0",
|
||||
"commander": "^11.1.0",
|
||||
"zod": "^3.23.8",
|
||||
"ignore": "^5.3.2"
|
||||
"globby": "^16.0.0",
|
||||
"ink": "^4.4.1",
|
||||
"ink-text-input": "^5.0.1",
|
||||
"ioredis": "^5.4.1",
|
||||
"ollama": "^0.5.11",
|
||||
"react": "^18.2.0",
|
||||
"simple-git": "^3.27.0",
|
||||
"tree-sitter": "^0.21.1",
|
||||
"tree-sitter-javascript": "^0.21.0",
|
||||
"tree-sitter-typescript": "^0.21.2",
|
||||
"zod": "^3.23.8"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "^22.10.1",
|
||||
"@types/react": "^18.2.0",
|
||||
"vitest": "^1.6.0",
|
||||
"@vitest/coverage-v8": "^1.6.0",
|
||||
"@vitest/ui": "^1.6.0",
|
||||
"tsup": "^8.3.5",
|
||||
"typescript": "^5.7.2"
|
||||
"typescript": "^5.7.2",
|
||||
"vitest": "^1.6.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=20.0.0"
|
||||
|
||||
@@ -1,2 +1,3 @@
|
||||
// Infrastructure layer exports
|
||||
export * from "./storage/index.js"
|
||||
export * from "./indexer/index.js"
|
||||
|
||||
532
packages/ipuaro/src/infrastructure/indexer/ASTParser.ts
Normal file
532
packages/ipuaro/src/infrastructure/indexer/ASTParser.ts
Normal file
@@ -0,0 +1,532 @@
|
||||
import { builtinModules } from "node:module"
|
||||
import Parser from "tree-sitter"
|
||||
import TypeScript from "tree-sitter-typescript"
|
||||
import JavaScript from "tree-sitter-javascript"
|
||||
import {
|
||||
createEmptyFileAST,
|
||||
type ExportInfo,
|
||||
type FileAST,
|
||||
type ImportInfo,
|
||||
type MethodInfo,
|
||||
type ParameterInfo,
|
||||
type PropertyInfo,
|
||||
} from "../../domain/value-objects/FileAST.js"
|
||||
import { FieldName, NodeType } from "./tree-sitter-types.js"
|
||||
|
||||
type Language = "ts" | "tsx" | "js" | "jsx"
|
||||
type SyntaxNode = Parser.SyntaxNode
|
||||
|
||||
/**
|
||||
* Parses source code into AST using tree-sitter.
|
||||
*/
|
||||
export class ASTParser {
|
||||
private readonly parsers = new Map<Language, Parser>()
|
||||
|
||||
constructor() {
|
||||
this.initializeParsers()
|
||||
}
|
||||
|
||||
private initializeParsers(): void {
|
||||
const tsParser = new Parser()
|
||||
tsParser.setLanguage(TypeScript.typescript)
|
||||
this.parsers.set("ts", tsParser)
|
||||
|
||||
const tsxParser = new Parser()
|
||||
tsxParser.setLanguage(TypeScript.tsx)
|
||||
this.parsers.set("tsx", tsxParser)
|
||||
|
||||
const jsParser = new Parser()
|
||||
jsParser.setLanguage(JavaScript)
|
||||
this.parsers.set("js", jsParser)
|
||||
this.parsers.set("jsx", jsParser)
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse source code and extract AST information.
|
||||
*/
|
||||
parse(content: string, language: Language): FileAST {
|
||||
const parser = this.parsers.get(language)
|
||||
if (!parser) {
|
||||
return {
|
||||
...createEmptyFileAST(),
|
||||
parseError: true,
|
||||
parseErrorMessage: `Unsupported language: ${language}`,
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
const tree = parser.parse(content)
|
||||
const root = tree.rootNode
|
||||
|
||||
if (root.hasError) {
|
||||
const ast = this.extractAST(root, language)
|
||||
ast.parseError = true
|
||||
ast.parseErrorMessage = "Syntax error in source code"
|
||||
return ast
|
||||
}
|
||||
|
||||
return this.extractAST(root, language)
|
||||
} catch (error) {
|
||||
return {
|
||||
...createEmptyFileAST(),
|
||||
parseError: true,
|
||||
parseErrorMessage: error instanceof Error ? error.message : "Unknown parse error",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private extractAST(root: SyntaxNode, language: Language): FileAST {
|
||||
const ast = createEmptyFileAST()
|
||||
const isTypeScript = language === "ts" || language === "tsx"
|
||||
|
||||
for (const child of root.children) {
|
||||
this.visitNode(child, ast, isTypeScript)
|
||||
}
|
||||
|
||||
return ast
|
||||
}
|
||||
|
||||
private visitNode(node: SyntaxNode, ast: FileAST, isTypeScript: boolean): void {
|
||||
switch (node.type) {
|
||||
case NodeType.IMPORT_STATEMENT:
|
||||
this.extractImport(node, ast)
|
||||
break
|
||||
case NodeType.EXPORT_STATEMENT:
|
||||
this.extractExport(node, ast)
|
||||
break
|
||||
case NodeType.FUNCTION_DECLARATION:
|
||||
this.extractFunction(node, ast, false)
|
||||
break
|
||||
case NodeType.LEXICAL_DECLARATION:
|
||||
this.extractLexicalDeclaration(node, ast)
|
||||
break
|
||||
case NodeType.CLASS_DECLARATION:
|
||||
this.extractClass(node, ast, false)
|
||||
break
|
||||
case NodeType.INTERFACE_DECLARATION:
|
||||
if (isTypeScript) {
|
||||
this.extractInterface(node, ast, false)
|
||||
}
|
||||
break
|
||||
case NodeType.TYPE_ALIAS_DECLARATION:
|
||||
if (isTypeScript) {
|
||||
this.extractTypeAlias(node, ast, false)
|
||||
}
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
private extractImport(node: SyntaxNode, ast: FileAST): void {
|
||||
const sourceNode = node.childForFieldName(FieldName.SOURCE)
|
||||
if (!sourceNode) {
|
||||
return
|
||||
}
|
||||
|
||||
const from = this.getStringValue(sourceNode)
|
||||
const line = node.startPosition.row + 1
|
||||
const importType = this.classifyImport(from)
|
||||
|
||||
const importClause = node.children.find((c) => c.type === NodeType.IMPORT_CLAUSE)
|
||||
if (!importClause) {
|
||||
ast.imports.push({
|
||||
name: "*",
|
||||
from,
|
||||
line,
|
||||
type: importType,
|
||||
isDefault: false,
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
for (const child of importClause.children) {
|
||||
if (child.type === NodeType.IDENTIFIER) {
|
||||
ast.imports.push({
|
||||
name: child.text,
|
||||
from,
|
||||
line,
|
||||
type: importType,
|
||||
isDefault: true,
|
||||
})
|
||||
} else if (child.type === NodeType.NAMESPACE_IMPORT) {
|
||||
const alias = child.children.find((c) => c.type === NodeType.IDENTIFIER)
|
||||
ast.imports.push({
|
||||
name: alias?.text ?? "*",
|
||||
from,
|
||||
line,
|
||||
type: importType,
|
||||
isDefault: false,
|
||||
})
|
||||
} else if (child.type === NodeType.NAMED_IMPORTS) {
|
||||
for (const specifier of child.children) {
|
||||
if (specifier.type === NodeType.IMPORT_SPECIFIER) {
|
||||
const nameNode = specifier.childForFieldName(FieldName.NAME)
|
||||
const aliasNode = specifier.childForFieldName(FieldName.ALIAS)
|
||||
ast.imports.push({
|
||||
name: aliasNode?.text ?? nameNode?.text ?? "",
|
||||
from,
|
||||
line,
|
||||
type: importType,
|
||||
isDefault: false,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private extractExport(node: SyntaxNode, ast: FileAST): void {
|
||||
const isDefault = node.children.some((c) => c.type === NodeType.DEFAULT)
|
||||
const declaration = node.childForFieldName(FieldName.DECLARATION)
|
||||
|
||||
if (declaration) {
|
||||
switch (declaration.type) {
|
||||
case NodeType.FUNCTION_DECLARATION:
|
||||
this.extractFunction(declaration, ast, true)
|
||||
this.addExportInfo(ast, declaration, "function", isDefault)
|
||||
break
|
||||
case NodeType.CLASS_DECLARATION:
|
||||
this.extractClass(declaration, ast, true)
|
||||
this.addExportInfo(ast, declaration, "class", isDefault)
|
||||
break
|
||||
case NodeType.INTERFACE_DECLARATION:
|
||||
this.extractInterface(declaration, ast, true)
|
||||
this.addExportInfo(ast, declaration, "interface", isDefault)
|
||||
break
|
||||
case NodeType.TYPE_ALIAS_DECLARATION:
|
||||
this.extractTypeAlias(declaration, ast, true)
|
||||
this.addExportInfo(ast, declaration, "type", isDefault)
|
||||
break
|
||||
case NodeType.LEXICAL_DECLARATION:
|
||||
this.extractLexicalDeclaration(declaration, ast, true)
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
const exportClause = node.children.find((c) => c.type === NodeType.EXPORT_CLAUSE)
|
||||
if (exportClause) {
|
||||
for (const specifier of exportClause.children) {
|
||||
if (specifier.type === NodeType.EXPORT_SPECIFIER) {
|
||||
const nameNode = specifier.childForFieldName(FieldName.NAME)
|
||||
if (nameNode) {
|
||||
ast.exports.push({
|
||||
name: nameNode.text,
|
||||
line: node.startPosition.row + 1,
|
||||
isDefault: false,
|
||||
kind: "variable",
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private extractFunction(node: SyntaxNode, ast: FileAST, isExported: boolean): void {
|
||||
const nameNode = node.childForFieldName(FieldName.NAME)
|
||||
if (!nameNode) {
|
||||
return
|
||||
}
|
||||
|
||||
const params = this.extractParameters(node)
|
||||
const isAsync = node.children.some((c) => c.type === NodeType.ASYNC)
|
||||
const returnTypeNode = node.childForFieldName(FieldName.RETURN_TYPE)
|
||||
|
||||
ast.functions.push({
|
||||
name: nameNode.text,
|
||||
lineStart: node.startPosition.row + 1,
|
||||
lineEnd: node.endPosition.row + 1,
|
||||
params,
|
||||
isAsync,
|
||||
isExported,
|
||||
returnType: returnTypeNode?.text?.replace(/^:\s*/, ""),
|
||||
})
|
||||
}
|
||||
|
||||
private extractLexicalDeclaration(node: SyntaxNode, ast: FileAST, isExported = false): void {
|
||||
for (const child of node.children) {
|
||||
if (child.type === NodeType.VARIABLE_DECLARATOR) {
|
||||
const nameNode = child.childForFieldName(FieldName.NAME)
|
||||
const valueNode = child.childForFieldName(FieldName.VALUE)
|
||||
|
||||
if (
|
||||
valueNode?.type === NodeType.ARROW_FUNCTION ||
|
||||
valueNode?.type === NodeType.FUNCTION
|
||||
) {
|
||||
const params = this.extractParameters(valueNode)
|
||||
const isAsync = valueNode.children.some((c) => c.type === NodeType.ASYNC)
|
||||
|
||||
ast.functions.push({
|
||||
name: nameNode?.text ?? "",
|
||||
lineStart: node.startPosition.row + 1,
|
||||
lineEnd: node.endPosition.row + 1,
|
||||
params,
|
||||
isAsync,
|
||||
isExported,
|
||||
})
|
||||
|
||||
if (isExported) {
|
||||
ast.exports.push({
|
||||
name: nameNode?.text ?? "",
|
||||
line: node.startPosition.row + 1,
|
||||
isDefault: false,
|
||||
kind: "function",
|
||||
})
|
||||
}
|
||||
} else if (isExported && nameNode) {
|
||||
ast.exports.push({
|
||||
name: nameNode.text,
|
||||
line: node.startPosition.row + 1,
|
||||
isDefault: false,
|
||||
kind: "variable",
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private extractClass(node: SyntaxNode, ast: FileAST, isExported: boolean): void {
|
||||
const nameNode = node.childForFieldName(FieldName.NAME)
|
||||
if (!nameNode) {
|
||||
return
|
||||
}
|
||||
|
||||
const body = node.childForFieldName(FieldName.BODY)
|
||||
const methods: MethodInfo[] = []
|
||||
const properties: PropertyInfo[] = []
|
||||
|
||||
if (body) {
|
||||
for (const member of body.children) {
|
||||
if (member.type === NodeType.METHOD_DEFINITION) {
|
||||
methods.push(this.extractMethod(member))
|
||||
} else if (
|
||||
member.type === NodeType.PUBLIC_FIELD_DEFINITION ||
|
||||
member.type === NodeType.FIELD_DEFINITION
|
||||
) {
|
||||
properties.push(this.extractProperty(member))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let extendsName: string | undefined
|
||||
const implementsList: string[] = []
|
||||
|
||||
for (const child of node.children) {
|
||||
if (child.type === NodeType.CLASS_HERITAGE) {
|
||||
for (const clause of child.children) {
|
||||
if (clause.type === NodeType.EXTENDS_CLAUSE) {
|
||||
const typeNode = clause.children.find(
|
||||
(c) =>
|
||||
c.type === NodeType.TYPE_IDENTIFIER ||
|
||||
c.type === NodeType.IDENTIFIER,
|
||||
)
|
||||
extendsName = typeNode?.text
|
||||
} else if (clause.type === NodeType.IMPLEMENTS_CLAUSE) {
|
||||
for (const impl of clause.children) {
|
||||
if (
|
||||
impl.type === NodeType.TYPE_IDENTIFIER ||
|
||||
impl.type === NodeType.IDENTIFIER
|
||||
) {
|
||||
implementsList.push(impl.text)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} else if (child.type === NodeType.EXTENDS_CLAUSE) {
|
||||
const typeNode = child.children.find(
|
||||
(c) => c.type === NodeType.TYPE_IDENTIFIER || c.type === NodeType.IDENTIFIER,
|
||||
)
|
||||
extendsName = typeNode?.text
|
||||
}
|
||||
}
|
||||
|
||||
const isAbstract = node.children.some((c) => c.type === NodeType.ABSTRACT)
|
||||
|
||||
ast.classes.push({
|
||||
name: nameNode.text,
|
||||
lineStart: node.startPosition.row + 1,
|
||||
lineEnd: node.endPosition.row + 1,
|
||||
methods,
|
||||
properties,
|
||||
extends: extendsName,
|
||||
implements: implementsList,
|
||||
isExported,
|
||||
isAbstract,
|
||||
})
|
||||
}
|
||||
|
||||
private extractMethod(node: SyntaxNode): MethodInfo {
|
||||
const nameNode = node.childForFieldName(FieldName.NAME)
|
||||
const params = this.extractParameters(node)
|
||||
const isAsync = node.children.some((c) => c.type === NodeType.ASYNC)
|
||||
const isStatic = node.children.some((c) => c.type === NodeType.STATIC)
|
||||
|
||||
let visibility: "public" | "private" | "protected" = "public"
|
||||
for (const child of node.children) {
|
||||
if (child.type === NodeType.ACCESSIBILITY_MODIFIER) {
|
||||
visibility = child.text as "public" | "private" | "protected"
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
name: nameNode?.text ?? "",
|
||||
lineStart: node.startPosition.row + 1,
|
||||
lineEnd: node.endPosition.row + 1,
|
||||
params,
|
||||
isAsync,
|
||||
visibility,
|
||||
isStatic,
|
||||
}
|
||||
}
|
||||
|
||||
private extractProperty(node: SyntaxNode): PropertyInfo {
|
||||
const nameNode = node.childForFieldName(FieldName.NAME)
|
||||
const typeNode = node.childForFieldName(FieldName.TYPE)
|
||||
const isStatic = node.children.some((c) => c.type === NodeType.STATIC)
|
||||
const isReadonly = node.children.some((c) => c.text === NodeType.READONLY)
|
||||
|
||||
let visibility: "public" | "private" | "protected" = "public"
|
||||
for (const child of node.children) {
|
||||
if (child.type === NodeType.ACCESSIBILITY_MODIFIER) {
|
||||
visibility = child.text as "public" | "private" | "protected"
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
name: nameNode?.text ?? "",
|
||||
line: node.startPosition.row + 1,
|
||||
type: typeNode?.text,
|
||||
visibility,
|
||||
isStatic,
|
||||
isReadonly,
|
||||
}
|
||||
}
|
||||
|
||||
private extractInterface(node: SyntaxNode, ast: FileAST, isExported: boolean): void {
|
||||
const nameNode = node.childForFieldName(FieldName.NAME)
|
||||
if (!nameNode) {
|
||||
return
|
||||
}
|
||||
|
||||
const body = node.childForFieldName(FieldName.BODY)
|
||||
const properties: PropertyInfo[] = []
|
||||
|
||||
if (body) {
|
||||
for (const member of body.children) {
|
||||
if (member.type === NodeType.PROPERTY_SIGNATURE) {
|
||||
const propName = member.childForFieldName(FieldName.NAME)
|
||||
const propType = member.childForFieldName(FieldName.TYPE)
|
||||
properties.push({
|
||||
name: propName?.text ?? "",
|
||||
line: member.startPosition.row + 1,
|
||||
type: propType?.text,
|
||||
visibility: "public",
|
||||
isStatic: false,
|
||||
isReadonly: member.children.some((c) => c.text === NodeType.READONLY),
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const extendsList: string[] = []
|
||||
const extendsClause = node.children.find((c) => c.type === NodeType.EXTENDS_TYPE_CLAUSE)
|
||||
if (extendsClause) {
|
||||
for (const child of extendsClause.children) {
|
||||
if (child.type === NodeType.TYPE_IDENTIFIER) {
|
||||
extendsList.push(child.text)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
ast.interfaces.push({
|
||||
name: nameNode.text,
|
||||
lineStart: node.startPosition.row + 1,
|
||||
lineEnd: node.endPosition.row + 1,
|
||||
properties,
|
||||
extends: extendsList,
|
||||
isExported,
|
||||
})
|
||||
}
|
||||
|
||||
private extractTypeAlias(node: SyntaxNode, ast: FileAST, isExported: boolean): void {
|
||||
const nameNode = node.childForFieldName(FieldName.NAME)
|
||||
if (!nameNode) {
|
||||
return
|
||||
}
|
||||
|
||||
ast.typeAliases.push({
|
||||
name: nameNode.text,
|
||||
line: node.startPosition.row + 1,
|
||||
isExported,
|
||||
})
|
||||
}
|
||||
|
||||
private extractParameters(node: SyntaxNode): ParameterInfo[] {
|
||||
const params: ParameterInfo[] = []
|
||||
const paramsNode = node.childForFieldName(FieldName.PARAMETERS)
|
||||
|
||||
if (paramsNode) {
|
||||
for (const param of paramsNode.children) {
|
||||
if (
|
||||
param.type === NodeType.REQUIRED_PARAMETER ||
|
||||
param.type === NodeType.OPTIONAL_PARAMETER ||
|
||||
param.type === NodeType.IDENTIFIER
|
||||
) {
|
||||
const nameNode =
|
||||
param.type === NodeType.IDENTIFIER
|
||||
? param
|
||||
: param.childForFieldName(FieldName.PATTERN)
|
||||
const typeNode = param.childForFieldName(FieldName.TYPE)
|
||||
const defaultValue = param.childForFieldName(FieldName.VALUE)
|
||||
|
||||
params.push({
|
||||
name: nameNode?.text ?? "",
|
||||
type: typeNode?.text,
|
||||
optional: param.type === NodeType.OPTIONAL_PARAMETER,
|
||||
hasDefault: defaultValue !== null,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return params
|
||||
}
|
||||
|
||||
private addExportInfo(
|
||||
ast: FileAST,
|
||||
node: SyntaxNode,
|
||||
kind: ExportInfo["kind"],
|
||||
isDefault: boolean,
|
||||
): void {
|
||||
const nameNode = node.childForFieldName(FieldName.NAME)
|
||||
if (nameNode) {
|
||||
ast.exports.push({
|
||||
name: nameNode.text,
|
||||
line: node.startPosition.row + 1,
|
||||
isDefault,
|
||||
kind,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
private classifyImport(from: string): ImportInfo["type"] {
|
||||
if (from.startsWith(".") || from.startsWith("/")) {
|
||||
return "internal"
|
||||
}
|
||||
if (from.startsWith("node:") || builtinModules.includes(from)) {
|
||||
return "builtin"
|
||||
}
|
||||
return "external"
|
||||
}
|
||||
|
||||
private getStringValue(node: SyntaxNode): string {
|
||||
const text = node.text
|
||||
if (
|
||||
(text.startsWith('"') && text.endsWith('"')) ||
|
||||
(text.startsWith("'") && text.endsWith("'"))
|
||||
) {
|
||||
return text.slice(1, -1)
|
||||
}
|
||||
return text
|
||||
}
|
||||
}
|
||||
189
packages/ipuaro/src/infrastructure/indexer/FileScanner.ts
Normal file
189
packages/ipuaro/src/infrastructure/indexer/FileScanner.ts
Normal file
@@ -0,0 +1,189 @@
|
||||
import * as fs from "node:fs/promises"
|
||||
import type { Stats } from "node:fs"
|
||||
import * as path from "node:path"
|
||||
import { globby } from "globby"
|
||||
import {
|
||||
BINARY_EXTENSIONS,
|
||||
DEFAULT_IGNORE_PATTERNS,
|
||||
SUPPORTED_EXTENSIONS,
|
||||
} from "../../domain/constants/index.js"
|
||||
import type { ScanResult } from "../../domain/services/IIndexer.js"
|
||||
|
||||
/**
|
||||
* Progress callback for file scanning.
|
||||
*/
|
||||
export interface ScanProgress {
|
||||
current: number
|
||||
total: number
|
||||
currentFile: string
|
||||
}
|
||||
|
||||
/**
|
||||
* Options for FileScanner.
|
||||
*/
|
||||
export interface FileScannerOptions {
|
||||
/** Additional ignore patterns (besides .gitignore and defaults) */
|
||||
additionalIgnore?: string[]
|
||||
/** Only include files with these extensions. Defaults to SUPPORTED_EXTENSIONS. */
|
||||
extensions?: readonly string[]
|
||||
/** Callback for progress updates */
|
||||
onProgress?: (progress: ScanProgress) => void
|
||||
}
|
||||
|
||||
/**
|
||||
* Scans project directories recursively using globby.
|
||||
* Respects .gitignore, skips binary files and default ignore patterns.
|
||||
*/
|
||||
export class FileScanner {
|
||||
private readonly extensions: Set<string>
|
||||
private readonly additionalIgnore: string[]
|
||||
private readonly onProgress?: (progress: ScanProgress) => void
|
||||
|
||||
constructor(options: FileScannerOptions = {}) {
|
||||
this.extensions = new Set(options.extensions ?? SUPPORTED_EXTENSIONS)
|
||||
this.additionalIgnore = options.additionalIgnore ?? []
|
||||
this.onProgress = options.onProgress
|
||||
}
|
||||
|
||||
/**
|
||||
* Build glob patterns from extensions.
|
||||
*/
|
||||
private buildGlobPatterns(): string[] {
|
||||
const exts = [...this.extensions].map((ext) => ext.replace(".", ""))
|
||||
if (exts.length === 1) {
|
||||
return [`**/*.${exts[0]}`]
|
||||
}
|
||||
return [`**/*.{${exts.join(",")}}`]
|
||||
}
|
||||
|
||||
/**
|
||||
* Build ignore patterns.
|
||||
*/
|
||||
private buildIgnorePatterns(): string[] {
|
||||
const patterns = [
|
||||
...DEFAULT_IGNORE_PATTERNS,
|
||||
...this.additionalIgnore,
|
||||
...BINARY_EXTENSIONS.map((ext) => `**/*${ext}`),
|
||||
]
|
||||
return patterns
|
||||
}
|
||||
|
||||
/**
|
||||
* Scan directory and yield file results.
|
||||
* @param root - Root directory to scan
|
||||
*/
|
||||
async *scan(root: string): AsyncGenerator<ScanResult> {
|
||||
const globPatterns = this.buildGlobPatterns()
|
||||
const ignorePatterns = this.buildIgnorePatterns()
|
||||
|
||||
const files = await globby(globPatterns, {
|
||||
cwd: root,
|
||||
gitignore: true,
|
||||
ignore: ignorePatterns,
|
||||
absolute: false,
|
||||
onlyFiles: true,
|
||||
followSymbolicLinks: false,
|
||||
})
|
||||
|
||||
const total = files.length
|
||||
let current = 0
|
||||
|
||||
for (const relativePath of files) {
|
||||
current++
|
||||
this.reportProgress(relativePath, current, total)
|
||||
|
||||
const fullPath = path.join(root, relativePath)
|
||||
const stats = await this.safeStats(fullPath)
|
||||
|
||||
if (stats) {
|
||||
yield {
|
||||
path: relativePath,
|
||||
type: "file",
|
||||
size: stats.size,
|
||||
lastModified: stats.mtimeMs,
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Scan and return all results as array.
|
||||
*/
|
||||
async scanAll(root: string): Promise<ScanResult[]> {
|
||||
const results: ScanResult[] = []
|
||||
for await (const result of this.scan(root)) {
|
||||
results.push(result)
|
||||
}
|
||||
return results
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if file has supported extension.
|
||||
*/
|
||||
isSupportedExtension(filePath: string): boolean {
|
||||
const ext = path.extname(filePath).toLowerCase()
|
||||
return this.extensions.has(ext)
|
||||
}
|
||||
|
||||
/**
|
||||
* Safely get file stats without throwing.
|
||||
*/
|
||||
private async safeStats(filePath: string): Promise<Stats | null> {
|
||||
try {
|
||||
return await fs.stat(filePath)
|
||||
} catch {
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Report progress if callback is set.
|
||||
*/
|
||||
private reportProgress(currentFile: string, current: number, total: number): void {
|
||||
if (this.onProgress) {
|
||||
this.onProgress({ current, total, currentFile })
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if file content is likely UTF-8 text.
|
||||
* Reads first 8KB and checks for null bytes.
|
||||
*/
|
||||
static async isTextFile(filePath: string): Promise<boolean> {
|
||||
try {
|
||||
const handle = await fs.open(filePath, "r")
|
||||
try {
|
||||
const buffer = Buffer.alloc(8192)
|
||||
const { bytesRead } = await handle.read(buffer, 0, 8192, 0)
|
||||
if (bytesRead === 0) {
|
||||
return true
|
||||
}
|
||||
for (let i = 0; i < bytesRead; i++) {
|
||||
if (buffer[i] === 0) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
} finally {
|
||||
await handle.close()
|
||||
}
|
||||
} catch {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Read file content as string.
|
||||
* Returns null if file is binary or unreadable.
|
||||
*/
|
||||
static async readFileContent(filePath: string): Promise<string | null> {
|
||||
if (!(await FileScanner.isTextFile(filePath))) {
|
||||
return null
|
||||
}
|
||||
try {
|
||||
return await fs.readFile(filePath, "utf-8")
|
||||
} catch {
|
||||
return null
|
||||
}
|
||||
}
|
||||
}
|
||||
406
packages/ipuaro/src/infrastructure/indexer/IndexBuilder.ts
Normal file
406
packages/ipuaro/src/infrastructure/indexer/IndexBuilder.ts
Normal file
@@ -0,0 +1,406 @@
|
||||
import * as path from "node:path"
|
||||
import type { FileAST } from "../../domain/value-objects/FileAST.js"
|
||||
import type { DepsGraph, SymbolIndex, SymbolLocation } from "../../domain/services/IStorage.js"
|
||||
|
||||
/**
|
||||
* Builds searchable indexes from parsed ASTs.
|
||||
*/
|
||||
export class IndexBuilder {
|
||||
private readonly projectRoot: string
|
||||
|
||||
constructor(projectRoot: string) {
|
||||
this.projectRoot = projectRoot
|
||||
}
|
||||
|
||||
/**
|
||||
* Build symbol index from all ASTs.
|
||||
* Maps symbol names to their locations for quick lookup.
|
||||
*/
|
||||
buildSymbolIndex(asts: Map<string, FileAST>): SymbolIndex {
|
||||
const index: SymbolIndex = new Map()
|
||||
|
||||
for (const [filePath, ast] of asts) {
|
||||
this.indexFunctions(filePath, ast, index)
|
||||
this.indexClasses(filePath, ast, index)
|
||||
this.indexInterfaces(filePath, ast, index)
|
||||
this.indexTypeAliases(filePath, ast, index)
|
||||
this.indexExportedVariables(filePath, ast, index)
|
||||
}
|
||||
|
||||
return index
|
||||
}
|
||||
|
||||
/**
|
||||
* Index function declarations.
|
||||
*/
|
||||
private indexFunctions(filePath: string, ast: FileAST, index: SymbolIndex): void {
|
||||
for (const func of ast.functions) {
|
||||
this.addSymbol(index, func.name, {
|
||||
path: filePath,
|
||||
line: func.lineStart,
|
||||
type: "function",
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Index class declarations.
|
||||
*/
|
||||
private indexClasses(filePath: string, ast: FileAST, index: SymbolIndex): void {
|
||||
for (const cls of ast.classes) {
|
||||
this.addSymbol(index, cls.name, {
|
||||
path: filePath,
|
||||
line: cls.lineStart,
|
||||
type: "class",
|
||||
})
|
||||
|
||||
for (const method of cls.methods) {
|
||||
const qualifiedName = `${cls.name}.${method.name}`
|
||||
this.addSymbol(index, qualifiedName, {
|
||||
path: filePath,
|
||||
line: method.lineStart,
|
||||
type: "function",
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Index interface declarations.
|
||||
*/
|
||||
private indexInterfaces(filePath: string, ast: FileAST, index: SymbolIndex): void {
|
||||
for (const iface of ast.interfaces) {
|
||||
this.addSymbol(index, iface.name, {
|
||||
path: filePath,
|
||||
line: iface.lineStart,
|
||||
type: "interface",
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Index type alias declarations.
|
||||
*/
|
||||
private indexTypeAliases(filePath: string, ast: FileAST, index: SymbolIndex): void {
|
||||
for (const typeAlias of ast.typeAliases) {
|
||||
this.addSymbol(index, typeAlias.name, {
|
||||
path: filePath,
|
||||
line: typeAlias.line,
|
||||
type: "type",
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Index exported variables (not functions).
|
||||
*/
|
||||
private indexExportedVariables(filePath: string, ast: FileAST, index: SymbolIndex): void {
|
||||
const functionNames = new Set(ast.functions.map((f) => f.name))
|
||||
|
||||
for (const exp of ast.exports) {
|
||||
if (exp.kind === "variable" && !functionNames.has(exp.name)) {
|
||||
this.addSymbol(index, exp.name, {
|
||||
path: filePath,
|
||||
line: exp.line,
|
||||
type: "variable",
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a symbol to the index.
|
||||
*/
|
||||
private addSymbol(index: SymbolIndex, name: string, location: SymbolLocation): void {
|
||||
if (!name) {
|
||||
return
|
||||
}
|
||||
|
||||
const existing = index.get(name)
|
||||
if (existing) {
|
||||
const isDuplicate = existing.some(
|
||||
(loc) => loc.path === location.path && loc.line === location.line,
|
||||
)
|
||||
if (!isDuplicate) {
|
||||
existing.push(location)
|
||||
}
|
||||
} else {
|
||||
index.set(name, [location])
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Build dependency graph from all ASTs.
|
||||
* Creates bidirectional mapping of imports.
|
||||
*/
|
||||
buildDepsGraph(asts: Map<string, FileAST>): DepsGraph {
|
||||
const imports = new Map<string, string[]>()
|
||||
const importedBy = new Map<string, string[]>()
|
||||
|
||||
for (const filePath of asts.keys()) {
|
||||
imports.set(filePath, [])
|
||||
importedBy.set(filePath, [])
|
||||
}
|
||||
|
||||
for (const [filePath, ast] of asts) {
|
||||
const fileImports = this.resolveFileImports(filePath, ast, asts)
|
||||
imports.set(filePath, fileImports)
|
||||
|
||||
for (const importedFile of fileImports) {
|
||||
const dependents = importedBy.get(importedFile) ?? []
|
||||
if (!dependents.includes(filePath)) {
|
||||
dependents.push(filePath)
|
||||
importedBy.set(importedFile, dependents)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (const [filePath, deps] of imports) {
|
||||
imports.set(filePath, deps.sort())
|
||||
}
|
||||
for (const [filePath, deps] of importedBy) {
|
||||
importedBy.set(filePath, deps.sort())
|
||||
}
|
||||
|
||||
return { imports, importedBy }
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolve internal imports for a file.
|
||||
*/
|
||||
private resolveFileImports(
|
||||
filePath: string,
|
||||
ast: FileAST,
|
||||
allASTs: Map<string, FileAST>,
|
||||
): string[] {
|
||||
const fileDir = path.dirname(filePath)
|
||||
const resolvedImports: string[] = []
|
||||
|
||||
for (const imp of ast.imports) {
|
||||
if (imp.type !== "internal") {
|
||||
continue
|
||||
}
|
||||
|
||||
const resolved = this.resolveImportPath(fileDir, imp.from, allASTs)
|
||||
if (resolved && !resolvedImports.includes(resolved)) {
|
||||
resolvedImports.push(resolved)
|
||||
}
|
||||
}
|
||||
|
||||
return resolvedImports
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolve import path to actual file path.
|
||||
*/
|
||||
private resolveImportPath(
|
||||
fromDir: string,
|
||||
importPath: string,
|
||||
allASTs: Map<string, FileAST>,
|
||||
): string | null {
|
||||
const absolutePath = path.resolve(fromDir, importPath)
|
||||
|
||||
const candidates = this.getImportCandidates(absolutePath)
|
||||
for (const candidate of candidates) {
|
||||
if (allASTs.has(candidate)) {
|
||||
return candidate
|
||||
}
|
||||
}
|
||||
|
||||
return null
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate possible file paths for an import.
|
||||
*/
|
||||
private getImportCandidates(basePath: string): string[] {
|
||||
const candidates: string[] = []
|
||||
|
||||
if (/\.(ts|tsx|js|jsx)$/.test(basePath)) {
|
||||
candidates.push(basePath)
|
||||
|
||||
if (basePath.endsWith(".js")) {
|
||||
candidates.push(`${basePath.slice(0, -3)}.ts`)
|
||||
} else if (basePath.endsWith(".jsx")) {
|
||||
candidates.push(`${basePath.slice(0, -4)}.tsx`)
|
||||
}
|
||||
} else {
|
||||
candidates.push(`${basePath}.ts`)
|
||||
candidates.push(`${basePath}.tsx`)
|
||||
candidates.push(`${basePath}.js`)
|
||||
candidates.push(`${basePath}.jsx`)
|
||||
candidates.push(`${basePath}/index.ts`)
|
||||
candidates.push(`${basePath}/index.tsx`)
|
||||
candidates.push(`${basePath}/index.js`)
|
||||
candidates.push(`${basePath}/index.jsx`)
|
||||
}
|
||||
|
||||
return candidates
|
||||
}
|
||||
|
||||
/**
|
||||
* Find all locations of a symbol by name.
|
||||
*/
|
||||
findSymbol(index: SymbolIndex, name: string): SymbolLocation[] {
|
||||
return index.get(name) ?? []
|
||||
}
|
||||
|
||||
/**
|
||||
* Find symbols matching a pattern.
|
||||
*/
|
||||
searchSymbols(index: SymbolIndex, pattern: string): Map<string, SymbolLocation[]> {
|
||||
const results = new Map<string, SymbolLocation[]>()
|
||||
const regex = new RegExp(pattern, "i")
|
||||
|
||||
for (const [name, locations] of index) {
|
||||
if (regex.test(name)) {
|
||||
results.set(name, locations)
|
||||
}
|
||||
}
|
||||
|
||||
return results
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all files that the given file depends on (imports).
|
||||
*/
|
||||
getDependencies(graph: DepsGraph, filePath: string): string[] {
|
||||
return graph.imports.get(filePath) ?? []
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all files that depend on the given file (import it).
|
||||
*/
|
||||
getDependents(graph: DepsGraph, filePath: string): string[] {
|
||||
return graph.importedBy.get(filePath) ?? []
|
||||
}
|
||||
|
||||
/**
|
||||
* Find circular dependencies in the graph.
|
||||
*/
|
||||
findCircularDependencies(graph: DepsGraph): string[][] {
|
||||
const cycles: string[][] = []
|
||||
const visited = new Set<string>()
|
||||
const recursionStack = new Set<string>()
|
||||
|
||||
const dfs = (node: string, path: string[]): void => {
|
||||
visited.add(node)
|
||||
recursionStack.add(node)
|
||||
path.push(node)
|
||||
|
||||
const deps = graph.imports.get(node) ?? []
|
||||
for (const dep of deps) {
|
||||
if (!visited.has(dep)) {
|
||||
dfs(dep, [...path])
|
||||
} else if (recursionStack.has(dep)) {
|
||||
const cycleStart = path.indexOf(dep)
|
||||
if (cycleStart !== -1) {
|
||||
const cycle = [...path.slice(cycleStart), dep]
|
||||
const normalized = this.normalizeCycle(cycle)
|
||||
if (!this.cycleExists(cycles, normalized)) {
|
||||
cycles.push(normalized)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
recursionStack.delete(node)
|
||||
}
|
||||
|
||||
for (const node of graph.imports.keys()) {
|
||||
if (!visited.has(node)) {
|
||||
dfs(node, [])
|
||||
}
|
||||
}
|
||||
|
||||
return cycles
|
||||
}
|
||||
|
||||
/**
|
||||
* Normalize a cycle to start with the smallest path.
|
||||
*/
|
||||
private normalizeCycle(cycle: string[]): string[] {
|
||||
if (cycle.length <= 1) {
|
||||
return cycle
|
||||
}
|
||||
|
||||
const withoutLast = cycle.slice(0, -1)
|
||||
const minIndex = withoutLast.reduce(
|
||||
(minIdx, path, idx) => (path < withoutLast[minIdx] ? idx : minIdx),
|
||||
0,
|
||||
)
|
||||
|
||||
const rotated = [...withoutLast.slice(minIndex), ...withoutLast.slice(0, minIndex)]
|
||||
rotated.push(rotated[0])
|
||||
|
||||
return rotated
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a cycle already exists in the list.
|
||||
*/
|
||||
private cycleExists(cycles: string[][], newCycle: string[]): boolean {
|
||||
const newKey = newCycle.join("→")
|
||||
return cycles.some((cycle) => cycle.join("→") === newKey)
|
||||
}
|
||||
|
||||
/**
|
||||
* Get statistics about the indexes.
|
||||
*/
|
||||
getStats(
|
||||
symbolIndex: SymbolIndex,
|
||||
depsGraph: DepsGraph,
|
||||
): {
|
||||
totalSymbols: number
|
||||
symbolsByType: Record<SymbolLocation["type"], number>
|
||||
totalFiles: number
|
||||
totalDependencies: number
|
||||
averageDependencies: number
|
||||
hubs: string[]
|
||||
orphans: string[]
|
||||
} {
|
||||
const symbolsByType: Record<SymbolLocation["type"], number> = {
|
||||
function: 0,
|
||||
class: 0,
|
||||
interface: 0,
|
||||
type: 0,
|
||||
variable: 0,
|
||||
}
|
||||
|
||||
let totalSymbols = 0
|
||||
for (const locations of symbolIndex.values()) {
|
||||
totalSymbols += locations.length
|
||||
for (const loc of locations) {
|
||||
symbolsByType[loc.type]++
|
||||
}
|
||||
}
|
||||
|
||||
const totalFiles = depsGraph.imports.size
|
||||
let totalDependencies = 0
|
||||
const hubs: string[] = []
|
||||
const orphans: string[] = []
|
||||
|
||||
for (const [_filePath, deps] of depsGraph.imports) {
|
||||
totalDependencies += deps.length
|
||||
}
|
||||
|
||||
for (const [filePath, dependents] of depsGraph.importedBy) {
|
||||
if (dependents.length > 5) {
|
||||
hubs.push(filePath)
|
||||
}
|
||||
if (dependents.length === 0 && (depsGraph.imports.get(filePath)?.length ?? 0) === 0) {
|
||||
orphans.push(filePath)
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
totalSymbols,
|
||||
symbolsByType,
|
||||
totalFiles,
|
||||
totalDependencies,
|
||||
averageDependencies: totalFiles > 0 ? totalDependencies / totalFiles : 0,
|
||||
hubs: hubs.sort(),
|
||||
orphans: orphans.sort(),
|
||||
}
|
||||
}
|
||||
}
|
||||
448
packages/ipuaro/src/infrastructure/indexer/MetaAnalyzer.ts
Normal file
448
packages/ipuaro/src/infrastructure/indexer/MetaAnalyzer.ts
Normal file
@@ -0,0 +1,448 @@
|
||||
import * as path from "node:path"
|
||||
import {
|
||||
type ComplexityMetrics,
|
||||
createFileMeta,
|
||||
type FileMeta,
|
||||
isHubFile,
|
||||
} from "../../domain/value-objects/FileMeta.js"
|
||||
import type { ClassInfo, FileAST, FunctionInfo } from "../../domain/value-objects/FileAST.js"
|
||||
|
||||
/**
|
||||
* Analyzes file metadata including complexity, dependencies, and classification.
|
||||
*/
|
||||
export class MetaAnalyzer {
|
||||
private readonly projectRoot: string
|
||||
|
||||
constructor(projectRoot: string) {
|
||||
this.projectRoot = projectRoot
|
||||
}
|
||||
|
||||
/**
|
||||
* Analyze a file and compute its metadata.
|
||||
* @param filePath - Absolute path to the file
|
||||
* @param ast - Parsed AST for the file
|
||||
* @param content - Raw file content (for LOC calculation)
|
||||
* @param allASTs - Map of all file paths to their ASTs (for dependents)
|
||||
*/
|
||||
analyze(
|
||||
filePath: string,
|
||||
ast: FileAST,
|
||||
content: string,
|
||||
allASTs: Map<string, FileAST>,
|
||||
): FileMeta {
|
||||
const complexity = this.calculateComplexity(ast, content)
|
||||
const dependencies = this.resolveDependencies(filePath, ast)
|
||||
const dependents = this.findDependents(filePath, allASTs)
|
||||
const fileType = this.classifyFileType(filePath)
|
||||
const isEntryPoint = this.isEntryPointFile(filePath, dependents.length)
|
||||
|
||||
return createFileMeta({
|
||||
complexity,
|
||||
dependencies,
|
||||
dependents,
|
||||
isHub: isHubFile(dependents.length),
|
||||
isEntryPoint,
|
||||
fileType,
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate complexity metrics for a file.
|
||||
*/
|
||||
calculateComplexity(ast: FileAST, content: string): ComplexityMetrics {
|
||||
const loc = this.countLinesOfCode(content)
|
||||
const nesting = this.calculateMaxNesting(ast)
|
||||
const cyclomaticComplexity = this.calculateCyclomaticComplexity(ast)
|
||||
const score = this.calculateComplexityScore(loc, nesting, cyclomaticComplexity)
|
||||
|
||||
return {
|
||||
loc,
|
||||
nesting,
|
||||
cyclomaticComplexity,
|
||||
score,
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Count lines of code (excluding empty lines and comments).
|
||||
*/
|
||||
countLinesOfCode(content: string): number {
|
||||
const lines = content.split("\n")
|
||||
let loc = 0
|
||||
let inBlockComment = false
|
||||
|
||||
for (const line of lines) {
|
||||
const trimmed = line.trim()
|
||||
|
||||
if (inBlockComment) {
|
||||
if (trimmed.includes("*/")) {
|
||||
inBlockComment = false
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
if (trimmed.startsWith("/*")) {
|
||||
if (!trimmed.includes("*/")) {
|
||||
inBlockComment = true
|
||||
continue
|
||||
}
|
||||
const afterComment = trimmed.substring(trimmed.indexOf("*/") + 2).trim()
|
||||
if (afterComment === "" || afterComment.startsWith("//")) {
|
||||
continue
|
||||
}
|
||||
loc++
|
||||
continue
|
||||
}
|
||||
|
||||
if (trimmed === "" || trimmed.startsWith("//")) {
|
||||
continue
|
||||
}
|
||||
|
||||
loc++
|
||||
}
|
||||
|
||||
return loc
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate maximum nesting depth from AST.
|
||||
*/
|
||||
calculateMaxNesting(ast: FileAST): number {
|
||||
let maxNesting = 0
|
||||
|
||||
for (const func of ast.functions) {
|
||||
const depth = this.estimateFunctionNesting(func)
|
||||
maxNesting = Math.max(maxNesting, depth)
|
||||
}
|
||||
|
||||
for (const cls of ast.classes) {
|
||||
const depth = this.estimateClassNesting(cls)
|
||||
maxNesting = Math.max(maxNesting, depth)
|
||||
}
|
||||
|
||||
return maxNesting
|
||||
}
|
||||
|
||||
/**
|
||||
* Estimate nesting depth for a function based on line count.
|
||||
* More accurate nesting would require full AST traversal.
|
||||
*/
|
||||
private estimateFunctionNesting(func: FunctionInfo): number {
|
||||
const lines = func.lineEnd - func.lineStart + 1
|
||||
if (lines <= 5) {
|
||||
return 1
|
||||
}
|
||||
if (lines <= 15) {
|
||||
return 2
|
||||
}
|
||||
if (lines <= 30) {
|
||||
return 3
|
||||
}
|
||||
if (lines <= 50) {
|
||||
return 4
|
||||
}
|
||||
return 5
|
||||
}
|
||||
|
||||
/**
|
||||
* Estimate nesting depth for a class.
|
||||
*/
|
||||
private estimateClassNesting(cls: ClassInfo): number {
|
||||
let maxMethodNesting = 1
|
||||
|
||||
for (const method of cls.methods) {
|
||||
const lines = method.lineEnd - method.lineStart + 1
|
||||
let depth = 1
|
||||
if (lines > 5) {
|
||||
depth = 2
|
||||
}
|
||||
if (lines > 15) {
|
||||
depth = 3
|
||||
}
|
||||
if (lines > 30) {
|
||||
depth = 4
|
||||
}
|
||||
maxMethodNesting = Math.max(maxMethodNesting, depth)
|
||||
}
|
||||
|
||||
return maxMethodNesting + 1
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate cyclomatic complexity from AST.
|
||||
* Base complexity is 1, +1 for each decision point.
|
||||
*/
|
||||
calculateCyclomaticComplexity(ast: FileAST): number {
|
||||
let complexity = 1
|
||||
|
||||
for (const func of ast.functions) {
|
||||
complexity += this.estimateFunctionComplexity(func)
|
||||
}
|
||||
|
||||
for (const cls of ast.classes) {
|
||||
for (const method of cls.methods) {
|
||||
const lines = method.lineEnd - method.lineStart + 1
|
||||
complexity += Math.max(1, Math.floor(lines / 10))
|
||||
}
|
||||
}
|
||||
|
||||
return complexity
|
||||
}
|
||||
|
||||
/**
|
||||
* Estimate function complexity based on size.
|
||||
*/
|
||||
private estimateFunctionComplexity(func: FunctionInfo): number {
|
||||
const lines = func.lineEnd - func.lineStart + 1
|
||||
return Math.max(1, Math.floor(lines / 8))
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate overall complexity score (0-100).
|
||||
*/
|
||||
calculateComplexityScore(loc: number, nesting: number, cyclomatic: number): number {
|
||||
const locWeight = 0.3
|
||||
const nestingWeight = 0.35
|
||||
const cyclomaticWeight = 0.35
|
||||
|
||||
const locScore = Math.min(100, (loc / 500) * 100)
|
||||
const nestingScore = Math.min(100, (nesting / 6) * 100)
|
||||
const cyclomaticScore = Math.min(100, (cyclomatic / 30) * 100)
|
||||
|
||||
const score =
|
||||
locScore * locWeight + nestingScore * nestingWeight + cyclomaticScore * cyclomaticWeight
|
||||
|
||||
return Math.round(Math.min(100, score))
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolve internal imports to absolute file paths.
|
||||
*/
|
||||
resolveDependencies(filePath: string, ast: FileAST): string[] {
|
||||
const dependencies: string[] = []
|
||||
const fileDir = path.dirname(filePath)
|
||||
|
||||
for (const imp of ast.imports) {
|
||||
if (imp.type !== "internal") {
|
||||
continue
|
||||
}
|
||||
|
||||
const resolved = this.resolveImportPath(fileDir, imp.from)
|
||||
if (resolved && !dependencies.includes(resolved)) {
|
||||
dependencies.push(resolved)
|
||||
}
|
||||
}
|
||||
|
||||
return dependencies.sort()
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolve a relative import path to an absolute path.
|
||||
*/
|
||||
private resolveImportPath(fromDir: string, importPath: string): string | null {
|
||||
const absolutePath = path.resolve(fromDir, importPath)
|
||||
const normalized = this.normalizeImportPath(absolutePath)
|
||||
|
||||
if (normalized.startsWith(this.projectRoot)) {
|
||||
return normalized
|
||||
}
|
||||
|
||||
return null
|
||||
}
|
||||
|
||||
/**
|
||||
* Normalize import path by removing file extension if present
|
||||
* and handling index imports.
|
||||
*/
|
||||
private normalizeImportPath(importPath: string): string {
|
||||
let normalized = importPath
|
||||
|
||||
if (normalized.endsWith(".js")) {
|
||||
normalized = `${normalized.slice(0, -3)}.ts`
|
||||
} else if (normalized.endsWith(".jsx")) {
|
||||
normalized = `${normalized.slice(0, -4)}.tsx`
|
||||
} else if (!/\.(ts|tsx|js|jsx)$/.exec(normalized)) {
|
||||
normalized = `${normalized}.ts`
|
||||
}
|
||||
|
||||
return normalized
|
||||
}
|
||||
|
||||
/**
|
||||
* Find all files that import the given file.
|
||||
*/
|
||||
findDependents(filePath: string, allASTs: Map<string, FileAST>): string[] {
|
||||
const dependents: string[] = []
|
||||
const normalizedPath = this.normalizePathForComparison(filePath)
|
||||
|
||||
for (const [otherPath, ast] of allASTs) {
|
||||
if (otherPath === filePath) {
|
||||
continue
|
||||
}
|
||||
|
||||
if (this.fileImportsTarget(otherPath, ast, normalizedPath)) {
|
||||
dependents.push(otherPath)
|
||||
}
|
||||
}
|
||||
|
||||
return dependents.sort()
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a file imports the target path.
|
||||
*/
|
||||
private fileImportsTarget(filePath: string, ast: FileAST, normalizedTarget: string): boolean {
|
||||
const fileDir = path.dirname(filePath)
|
||||
|
||||
for (const imp of ast.imports) {
|
||||
if (imp.type !== "internal") {
|
||||
continue
|
||||
}
|
||||
|
||||
const resolvedImport = this.resolveImportPath(fileDir, imp.from)
|
||||
if (!resolvedImport) {
|
||||
continue
|
||||
}
|
||||
|
||||
const normalizedImport = this.normalizePathForComparison(resolvedImport)
|
||||
if (this.pathsMatch(normalizedTarget, normalizedImport)) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
/**
|
||||
* Normalize path for comparison (handle index.ts and extensions).
|
||||
*/
|
||||
private normalizePathForComparison(filePath: string): string {
|
||||
let normalized = filePath
|
||||
|
||||
if (normalized.endsWith(".js")) {
|
||||
normalized = normalized.slice(0, -3)
|
||||
} else if (normalized.endsWith(".ts")) {
|
||||
normalized = normalized.slice(0, -3)
|
||||
} else if (normalized.endsWith(".jsx")) {
|
||||
normalized = normalized.slice(0, -4)
|
||||
} else if (normalized.endsWith(".tsx")) {
|
||||
normalized = normalized.slice(0, -4)
|
||||
}
|
||||
|
||||
return normalized
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if two normalized paths match (including index.ts resolution).
|
||||
*/
|
||||
private pathsMatch(path1: string, path2: string): boolean {
|
||||
if (path1 === path2) {
|
||||
return true
|
||||
}
|
||||
|
||||
if (path1.endsWith("/index") && path2 === path1.slice(0, -6)) {
|
||||
return true
|
||||
}
|
||||
if (path2.endsWith("/index") && path1 === path2.slice(0, -6)) {
|
||||
return true
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
/**
|
||||
* Classify file type based on path and name.
|
||||
*/
|
||||
classifyFileType(filePath: string): FileMeta["fileType"] {
|
||||
const basename = path.basename(filePath)
|
||||
const lowercasePath = filePath.toLowerCase()
|
||||
|
||||
if (basename.includes(".test.") || basename.includes(".spec.")) {
|
||||
return "test"
|
||||
}
|
||||
|
||||
if (lowercasePath.includes("/tests/") || lowercasePath.includes("/__tests__/")) {
|
||||
return "test"
|
||||
}
|
||||
|
||||
if (basename.endsWith(".d.ts")) {
|
||||
return "types"
|
||||
}
|
||||
|
||||
if (lowercasePath.includes("/types/") || basename === "types.ts") {
|
||||
return "types"
|
||||
}
|
||||
|
||||
const configPatterns = [
|
||||
"config",
|
||||
"tsconfig",
|
||||
"eslint",
|
||||
"prettier",
|
||||
"vitest",
|
||||
"jest",
|
||||
"babel",
|
||||
"webpack",
|
||||
"vite",
|
||||
"rollup",
|
||||
]
|
||||
|
||||
for (const pattern of configPatterns) {
|
||||
if (basename.toLowerCase().includes(pattern)) {
|
||||
return "config"
|
||||
}
|
||||
}
|
||||
|
||||
if (
|
||||
filePath.endsWith(".ts") ||
|
||||
filePath.endsWith(".tsx") ||
|
||||
filePath.endsWith(".js") ||
|
||||
filePath.endsWith(".jsx")
|
||||
) {
|
||||
return "source"
|
||||
}
|
||||
|
||||
return "unknown"
|
||||
}
|
||||
|
||||
/**
|
||||
* Determine if file is an entry point.
|
||||
*/
|
||||
isEntryPointFile(filePath: string, dependentCount: number): boolean {
|
||||
const basename = path.basename(filePath)
|
||||
|
||||
if (basename.startsWith("index.")) {
|
||||
return true
|
||||
}
|
||||
|
||||
if (dependentCount === 0) {
|
||||
return true
|
||||
}
|
||||
|
||||
const entryPatterns = ["main.", "app.", "cli.", "server.", "index."]
|
||||
for (const pattern of entryPatterns) {
|
||||
if (basename.toLowerCase().startsWith(pattern)) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
/**
|
||||
* Batch analyze multiple files.
|
||||
*/
|
||||
analyzeAll(files: Map<string, { ast: FileAST; content: string }>): Map<string, FileMeta> {
|
||||
const allASTs = new Map<string, FileAST>()
|
||||
for (const [filePath, { ast }] of files) {
|
||||
allASTs.set(filePath, ast)
|
||||
}
|
||||
|
||||
const results = new Map<string, FileMeta>()
|
||||
for (const [filePath, { ast, content }] of files) {
|
||||
const meta = this.analyze(filePath, ast, content, allASTs)
|
||||
results.set(filePath, meta)
|
||||
}
|
||||
|
||||
return results
|
||||
}
|
||||
}
|
||||
285
packages/ipuaro/src/infrastructure/indexer/Watchdog.ts
Normal file
285
packages/ipuaro/src/infrastructure/indexer/Watchdog.ts
Normal file
@@ -0,0 +1,285 @@
|
||||
import * as chokidar from "chokidar"
|
||||
import * as path from "node:path"
|
||||
import { DEFAULT_IGNORE_PATTERNS, SUPPORTED_EXTENSIONS } from "../../domain/constants/index.js"
|
||||
|
||||
export type FileChangeType = "add" | "change" | "unlink"
|
||||
|
||||
export interface FileChangeEvent {
|
||||
type: FileChangeType
|
||||
path: string
|
||||
timestamp: number
|
||||
}
|
||||
|
||||
export type FileChangeCallback = (event: FileChangeEvent) => void
|
||||
|
||||
export interface WatchdogOptions {
|
||||
/** Debounce delay in milliseconds (default: 500) */
|
||||
debounceMs?: number
|
||||
/** Patterns to ignore (default: DEFAULT_IGNORE_PATTERNS) */
|
||||
ignorePatterns?: readonly string[]
|
||||
/** File extensions to watch (default: SUPPORTED_EXTENSIONS) */
|
||||
extensions?: readonly string[]
|
||||
/** Use polling instead of native events (useful for network drives) */
|
||||
usePolling?: boolean
|
||||
/** Polling interval in milliseconds (default: 1000) */
|
||||
pollInterval?: number
|
||||
}
|
||||
|
||||
interface ResolvedWatchdogOptions {
|
||||
debounceMs: number
|
||||
ignorePatterns: readonly string[]
|
||||
extensions: readonly string[]
|
||||
usePolling: boolean
|
||||
pollInterval: number
|
||||
}
|
||||
|
||||
const DEFAULT_OPTIONS: ResolvedWatchdogOptions = {
|
||||
debounceMs: 500,
|
||||
ignorePatterns: DEFAULT_IGNORE_PATTERNS,
|
||||
extensions: SUPPORTED_EXTENSIONS,
|
||||
usePolling: false,
|
||||
pollInterval: 1000,
|
||||
}
|
||||
|
||||
/**
|
||||
* Watches for file changes in a directory using chokidar.
|
||||
*/
|
||||
export class Watchdog {
|
||||
private watcher: chokidar.FSWatcher | null = null
|
||||
private readonly callbacks: FileChangeCallback[] = []
|
||||
private readonly pendingChanges = new Map<string, FileChangeEvent>()
|
||||
private readonly debounceTimers = new Map<string, NodeJS.Timeout>()
|
||||
private readonly options: ResolvedWatchdogOptions
|
||||
private root = ""
|
||||
private isRunning = false
|
||||
|
||||
constructor(options: WatchdogOptions = {}) {
|
||||
this.options = { ...DEFAULT_OPTIONS, ...options }
|
||||
}
|
||||
|
||||
/**
|
||||
* Start watching a directory for file changes.
|
||||
*/
|
||||
start(root: string): void {
|
||||
if (this.isRunning) {
|
||||
void this.stop()
|
||||
}
|
||||
|
||||
this.root = root
|
||||
this.isRunning = true
|
||||
|
||||
const globPatterns = this.buildGlobPatterns(root)
|
||||
const ignorePatterns = this.buildIgnorePatterns()
|
||||
|
||||
this.watcher = chokidar.watch(globPatterns, {
|
||||
ignored: ignorePatterns,
|
||||
persistent: true,
|
||||
ignoreInitial: true,
|
||||
usePolling: this.options.usePolling,
|
||||
interval: this.options.pollInterval,
|
||||
awaitWriteFinish: {
|
||||
stabilityThreshold: 100,
|
||||
pollInterval: 100,
|
||||
},
|
||||
})
|
||||
|
||||
this.watcher.on("add", (filePath) => {
|
||||
this.handleChange("add", filePath)
|
||||
})
|
||||
this.watcher.on("change", (filePath) => {
|
||||
this.handleChange("change", filePath)
|
||||
})
|
||||
this.watcher.on("unlink", (filePath) => {
|
||||
this.handleChange("unlink", filePath)
|
||||
})
|
||||
this.watcher.on("error", (error) => {
|
||||
this.handleError(error)
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Stop watching for file changes.
|
||||
*/
|
||||
async stop(): Promise<void> {
|
||||
if (!this.isRunning) {
|
||||
return
|
||||
}
|
||||
|
||||
for (const timer of this.debounceTimers.values()) {
|
||||
clearTimeout(timer)
|
||||
}
|
||||
this.debounceTimers.clear()
|
||||
this.pendingChanges.clear()
|
||||
|
||||
if (this.watcher) {
|
||||
await this.watcher.close()
|
||||
this.watcher = null
|
||||
}
|
||||
|
||||
this.isRunning = false
|
||||
}
|
||||
|
||||
/**
|
||||
* Register a callback for file change events.
|
||||
*/
|
||||
onFileChange(callback: FileChangeCallback): void {
|
||||
this.callbacks.push(callback)
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove a callback.
|
||||
*/
|
||||
offFileChange(callback: FileChangeCallback): void {
|
||||
const index = this.callbacks.indexOf(callback)
|
||||
if (index !== -1) {
|
||||
this.callbacks.splice(index, 1)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if the watchdog is currently running.
|
||||
*/
|
||||
isWatching(): boolean {
|
||||
return this.isRunning
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the root directory being watched.
|
||||
*/
|
||||
getRoot(): string {
|
||||
return this.root
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the number of pending changes waiting to be processed.
|
||||
*/
|
||||
getPendingCount(): number {
|
||||
return this.pendingChanges.size
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle a file change event with debouncing.
|
||||
*/
|
||||
private handleChange(type: FileChangeType, filePath: string): void {
|
||||
if (!this.isValidFile(filePath)) {
|
||||
return
|
||||
}
|
||||
|
||||
const normalizedPath = path.resolve(filePath)
|
||||
|
||||
const event: FileChangeEvent = {
|
||||
type,
|
||||
path: normalizedPath,
|
||||
timestamp: Date.now(),
|
||||
}
|
||||
|
||||
this.pendingChanges.set(normalizedPath, event)
|
||||
|
||||
const existingTimer = this.debounceTimers.get(normalizedPath)
|
||||
if (existingTimer) {
|
||||
clearTimeout(existingTimer)
|
||||
}
|
||||
|
||||
const timer = setTimeout(() => {
|
||||
this.flushChange(normalizedPath)
|
||||
}, this.options.debounceMs)
|
||||
|
||||
this.debounceTimers.set(normalizedPath, timer)
|
||||
}
|
||||
|
||||
/**
|
||||
* Flush a pending change and notify callbacks.
|
||||
*/
|
||||
private flushChange(filePath: string): void {
|
||||
const event = this.pendingChanges.get(filePath)
|
||||
if (!event) {
|
||||
return
|
||||
}
|
||||
|
||||
this.pendingChanges.delete(filePath)
|
||||
this.debounceTimers.delete(filePath)
|
||||
|
||||
for (const callback of this.callbacks) {
|
||||
try {
|
||||
callback(event)
|
||||
} catch {
|
||||
// Silently ignore callback errors
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle watcher errors.
|
||||
*/
|
||||
private handleError(error: Error): void {
|
||||
// Log error but don't crash
|
||||
console.error(`[Watchdog] Error: ${error.message}`)
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a file should be watched based on extension.
|
||||
*/
|
||||
private isValidFile(filePath: string): boolean {
|
||||
const ext = path.extname(filePath)
|
||||
return this.options.extensions.includes(ext)
|
||||
}
|
||||
|
||||
/**
|
||||
* Build glob patterns for watching.
|
||||
*/
|
||||
private buildGlobPatterns(root: string): string[] {
|
||||
return this.options.extensions.map((ext) => path.join(root, "**", `*${ext}`))
|
||||
}
|
||||
|
||||
/**
|
||||
* Build ignore patterns for chokidar.
|
||||
*/
|
||||
private buildIgnorePatterns(): (string | RegExp)[] {
|
||||
const patterns: (string | RegExp)[] = []
|
||||
|
||||
for (const pattern of this.options.ignorePatterns) {
|
||||
if (pattern.includes("*")) {
|
||||
const regexPattern = pattern
|
||||
.replace(/\./g, "\\.")
|
||||
.replace(/\*\*/g, ".*")
|
||||
.replace(/\*/g, "[^/]*")
|
||||
patterns.push(new RegExp(regexPattern))
|
||||
} else {
|
||||
patterns.push(`**/${pattern}/**`)
|
||||
}
|
||||
}
|
||||
|
||||
return patterns
|
||||
}
|
||||
|
||||
/**
|
||||
* Force flush all pending changes immediately.
|
||||
*/
|
||||
flushAll(): void {
|
||||
for (const timer of this.debounceTimers.values()) {
|
||||
clearTimeout(timer)
|
||||
}
|
||||
this.debounceTimers.clear()
|
||||
|
||||
for (const filePath of this.pendingChanges.keys()) {
|
||||
this.flushChange(filePath)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get watched paths (for debugging).
|
||||
*/
|
||||
getWatchedPaths(): string[] {
|
||||
if (!this.watcher) {
|
||||
return []
|
||||
}
|
||||
const watched = this.watcher.getWatched()
|
||||
const paths: string[] = []
|
||||
for (const dir of Object.keys(watched)) {
|
||||
for (const file of watched[dir]) {
|
||||
paths.push(path.join(dir, file))
|
||||
}
|
||||
}
|
||||
return paths.sort()
|
||||
}
|
||||
}
|
||||
6
packages/ipuaro/src/infrastructure/indexer/index.ts
Normal file
6
packages/ipuaro/src/infrastructure/indexer/index.ts
Normal file
@@ -0,0 +1,6 @@
|
||||
export * from "./FileScanner.js"
|
||||
export * from "./ASTParser.js"
|
||||
export * from "./MetaAnalyzer.js"
|
||||
export * from "./IndexBuilder.js"
|
||||
export * from "./Watchdog.js"
|
||||
export * from "./tree-sitter-types.js"
|
||||
@@ -0,0 +1,77 @@
|
||||
/**
|
||||
* Tree-sitter node type constants for TypeScript/JavaScript parsing.
|
||||
* These are infrastructure-level constants, not exposed to domain/application layers.
|
||||
*
|
||||
* Source: tree-sitter-typescript/typescript/src/node-types.json
|
||||
*/
|
||||
|
||||
export const NodeType = {
|
||||
// Statements
|
||||
IMPORT_STATEMENT: "import_statement",
|
||||
EXPORT_STATEMENT: "export_statement",
|
||||
LEXICAL_DECLARATION: "lexical_declaration",
|
||||
|
||||
// Declarations
|
||||
FUNCTION_DECLARATION: "function_declaration",
|
||||
CLASS_DECLARATION: "class_declaration",
|
||||
INTERFACE_DECLARATION: "interface_declaration",
|
||||
TYPE_ALIAS_DECLARATION: "type_alias_declaration",
|
||||
|
||||
// Clauses
|
||||
IMPORT_CLAUSE: "import_clause",
|
||||
EXPORT_CLAUSE: "export_clause",
|
||||
EXTENDS_CLAUSE: "extends_clause",
|
||||
IMPLEMENTS_CLAUSE: "implements_clause",
|
||||
EXTENDS_TYPE_CLAUSE: "extends_type_clause",
|
||||
CLASS_HERITAGE: "class_heritage",
|
||||
|
||||
// Import specifiers
|
||||
NAMESPACE_IMPORT: "namespace_import",
|
||||
NAMED_IMPORTS: "named_imports",
|
||||
IMPORT_SPECIFIER: "import_specifier",
|
||||
EXPORT_SPECIFIER: "export_specifier",
|
||||
|
||||
// Class members
|
||||
METHOD_DEFINITION: "method_definition",
|
||||
PUBLIC_FIELD_DEFINITION: "public_field_definition",
|
||||
FIELD_DEFINITION: "field_definition",
|
||||
PROPERTY_SIGNATURE: "property_signature",
|
||||
|
||||
// Parameters
|
||||
REQUIRED_PARAMETER: "required_parameter",
|
||||
OPTIONAL_PARAMETER: "optional_parameter",
|
||||
|
||||
// Expressions & values
|
||||
ARROW_FUNCTION: "arrow_function",
|
||||
FUNCTION: "function",
|
||||
VARIABLE_DECLARATOR: "variable_declarator",
|
||||
|
||||
// Identifiers & types
|
||||
IDENTIFIER: "identifier",
|
||||
TYPE_IDENTIFIER: "type_identifier",
|
||||
|
||||
// Modifiers
|
||||
ASYNC: "async",
|
||||
STATIC: "static",
|
||||
ABSTRACT: "abstract",
|
||||
DEFAULT: "default",
|
||||
ACCESSIBILITY_MODIFIER: "accessibility_modifier",
|
||||
READONLY: "readonly",
|
||||
} as const
|
||||
|
||||
export type NodeTypeValue = (typeof NodeType)[keyof typeof NodeType]
|
||||
|
||||
export const FieldName = {
|
||||
SOURCE: "source",
|
||||
NAME: "name",
|
||||
ALIAS: "alias",
|
||||
DECLARATION: "declaration",
|
||||
PARAMETERS: "parameters",
|
||||
RETURN_TYPE: "return_type",
|
||||
BODY: "body",
|
||||
TYPE: "type",
|
||||
PATTERN: "pattern",
|
||||
VALUE: "value",
|
||||
} as const
|
||||
|
||||
export type FieldNameValue = (typeof FieldName)[keyof typeof FieldName]
|
||||
2
packages/ipuaro/tests/fixtures/sample-project/.gitignore
vendored
Normal file
2
packages/ipuaro/tests/fixtures/sample-project/.gitignore
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
ignored-file.ts
|
||||
*.log
|
||||
4
packages/ipuaro/tests/fixtures/sample-project/package.json
vendored
Normal file
4
packages/ipuaro/tests/fixtures/sample-project/package.json
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
{
|
||||
"name": "sample-project",
|
||||
"version": "1.0.0"
|
||||
}
|
||||
3
packages/ipuaro/tests/fixtures/sample-project/src/index.ts
vendored
Normal file
3
packages/ipuaro/tests/fixtures/sample-project/src/index.ts
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
export function main(): void {
|
||||
console.log("Hello")
|
||||
}
|
||||
3
packages/ipuaro/tests/fixtures/sample-project/src/utils.ts
vendored
Normal file
3
packages/ipuaro/tests/fixtures/sample-project/src/utils.ts
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
export function add(a: number, b: number): number {
|
||||
return a + b
|
||||
}
|
||||
@@ -0,0 +1,347 @@
|
||||
import { describe, it, expect, beforeAll } from "vitest"
|
||||
import { ASTParser } from "../../../../src/infrastructure/indexer/ASTParser.js"
|
||||
|
||||
describe("ASTParser", () => {
|
||||
let parser: ASTParser
|
||||
|
||||
beforeAll(() => {
|
||||
parser = new ASTParser()
|
||||
})
|
||||
|
||||
describe("parse", () => {
|
||||
it("should parse empty file", () => {
|
||||
const ast = parser.parse("", "ts")
|
||||
expect(ast.parseError).toBe(false)
|
||||
expect(ast.imports).toHaveLength(0)
|
||||
expect(ast.exports).toHaveLength(0)
|
||||
expect(ast.functions).toHaveLength(0)
|
||||
expect(ast.classes).toHaveLength(0)
|
||||
})
|
||||
|
||||
it("should handle syntax errors gracefully", () => {
|
||||
const code = "export function {{{ invalid"
|
||||
const ast = parser.parse(code, "ts")
|
||||
expect(ast.parseError).toBe(true)
|
||||
expect(ast.parseErrorMessage).toBeDefined()
|
||||
})
|
||||
|
||||
it("should return error for unsupported language", () => {
|
||||
const ast = parser.parse("const x = 1", "py" as never)
|
||||
expect(ast.parseError).toBe(true)
|
||||
expect(ast.parseErrorMessage).toContain("Unsupported language")
|
||||
})
|
||||
})
|
||||
|
||||
describe("imports", () => {
|
||||
it("should extract default import", () => {
|
||||
const code = `import React from "react"`
|
||||
const ast = parser.parse(code, "ts")
|
||||
expect(ast.imports).toHaveLength(1)
|
||||
expect(ast.imports[0]).toMatchObject({
|
||||
name: "React",
|
||||
from: "react",
|
||||
isDefault: true,
|
||||
type: "external",
|
||||
})
|
||||
})
|
||||
|
||||
it("should extract named imports", () => {
|
||||
const code = `import { useState, useEffect } from "react"`
|
||||
const ast = parser.parse(code, "ts")
|
||||
expect(ast.imports).toHaveLength(2)
|
||||
expect(ast.imports[0].name).toBe("useState")
|
||||
expect(ast.imports[1].name).toBe("useEffect")
|
||||
expect(ast.imports[0].isDefault).toBe(false)
|
||||
})
|
||||
|
||||
it("should extract namespace import", () => {
|
||||
const code = `import * as path from "path"`
|
||||
const ast = parser.parse(code, "ts")
|
||||
expect(ast.imports).toHaveLength(1)
|
||||
expect(ast.imports[0].name).toBe("path")
|
||||
expect(ast.imports[0].isDefault).toBe(false)
|
||||
})
|
||||
|
||||
it("should classify internal imports", () => {
|
||||
const code = `import { foo } from "./utils"`
|
||||
const ast = parser.parse(code, "ts")
|
||||
expect(ast.imports[0].type).toBe("internal")
|
||||
})
|
||||
|
||||
it("should classify builtin imports", () => {
|
||||
const code = `import * as fs from "node:fs"`
|
||||
const ast = parser.parse(code, "ts")
|
||||
expect(ast.imports[0].type).toBe("builtin")
|
||||
})
|
||||
|
||||
it("should classify external imports", () => {
|
||||
const code = `import lodash from "lodash"`
|
||||
const ast = parser.parse(code, "ts")
|
||||
expect(ast.imports[0].type).toBe("external")
|
||||
})
|
||||
})
|
||||
|
||||
describe("functions", () => {
|
||||
it("should extract function declaration", () => {
|
||||
const code = `function add(a: number, b: number): number {
|
||||
return a + b
|
||||
}`
|
||||
const ast = parser.parse(code, "ts")
|
||||
expect(ast.functions).toHaveLength(1)
|
||||
expect(ast.functions[0]).toMatchObject({
|
||||
name: "add",
|
||||
isAsync: false,
|
||||
isExported: false,
|
||||
})
|
||||
expect(ast.functions[0].lineStart).toBe(1)
|
||||
expect(ast.functions[0].lineEnd).toBe(3)
|
||||
})
|
||||
|
||||
it("should extract async function", () => {
|
||||
const code = `async function fetchData() { return null }`
|
||||
const ast = parser.parse(code, "ts")
|
||||
expect(ast.functions[0].isAsync).toBe(true)
|
||||
})
|
||||
|
||||
it("should extract exported function", () => {
|
||||
const code = `export function main() {}`
|
||||
const ast = parser.parse(code, "ts")
|
||||
expect(ast.functions[0].isExported).toBe(true)
|
||||
expect(ast.exports).toHaveLength(1)
|
||||
expect(ast.exports[0].kind).toBe("function")
|
||||
})
|
||||
|
||||
it("should extract arrow function", () => {
|
||||
const code = `const add = (a: number, b: number) => a + b`
|
||||
const ast = parser.parse(code, "ts")
|
||||
expect(ast.functions).toHaveLength(1)
|
||||
expect(ast.functions[0].name).toBe("add")
|
||||
})
|
||||
|
||||
it("should extract function parameters", () => {
|
||||
const code = `function test(a: string, b?: number, c = 10) {}`
|
||||
const ast = parser.parse(code, "ts")
|
||||
expect(ast.functions[0].params).toHaveLength(3)
|
||||
expect(ast.functions[0].params[0]).toMatchObject({
|
||||
name: "a",
|
||||
optional: false,
|
||||
})
|
||||
expect(ast.functions[0].params[1]).toMatchObject({
|
||||
name: "b",
|
||||
optional: true,
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe("classes", () => {
|
||||
it("should extract class declaration", () => {
|
||||
const code = `class MyClass {
|
||||
value: number
|
||||
|
||||
constructor() {}
|
||||
|
||||
getValue() {
|
||||
return this.value
|
||||
}
|
||||
}`
|
||||
const ast = parser.parse(code, "ts")
|
||||
expect(ast.classes).toHaveLength(1)
|
||||
expect(ast.classes[0]).toMatchObject({
|
||||
name: "MyClass",
|
||||
isExported: false,
|
||||
isAbstract: false,
|
||||
})
|
||||
})
|
||||
|
||||
it("should extract exported class", () => {
|
||||
const code = `export class Service {}`
|
||||
const ast = parser.parse(code, "ts")
|
||||
expect(ast.classes[0].isExported).toBe(true)
|
||||
expect(ast.exports).toHaveLength(1)
|
||||
expect(ast.exports[0].kind).toBe("class")
|
||||
})
|
||||
|
||||
it("should extract class methods", () => {
|
||||
const code = `class Service {
|
||||
async fetch() {}
|
||||
private process() {}
|
||||
static create() {}
|
||||
}`
|
||||
const ast = parser.parse(code, "ts")
|
||||
expect(ast.classes[0].methods.length).toBeGreaterThanOrEqual(1)
|
||||
})
|
||||
|
||||
it("should extract class extends", () => {
|
||||
const code = `class Child extends Parent {}`
|
||||
const ast = parser.parse(code, "ts")
|
||||
expect(ast.classes[0].extends).toBe("Parent")
|
||||
})
|
||||
})
|
||||
|
||||
describe("interfaces", () => {
|
||||
it("should extract interface declaration", () => {
|
||||
const code = `interface User {
|
||||
name: string
|
||||
age: number
|
||||
}`
|
||||
const ast = parser.parse(code, "ts")
|
||||
expect(ast.interfaces).toHaveLength(1)
|
||||
expect(ast.interfaces[0]).toMatchObject({
|
||||
name: "User",
|
||||
isExported: false,
|
||||
})
|
||||
})
|
||||
|
||||
it("should extract exported interface", () => {
|
||||
const code = `export interface Config {}`
|
||||
const ast = parser.parse(code, "ts")
|
||||
expect(ast.interfaces[0].isExported).toBe(true)
|
||||
})
|
||||
|
||||
it("should extract interface properties", () => {
|
||||
const code = `interface Props {
|
||||
value: string
|
||||
onChange: (v: string) => void
|
||||
}`
|
||||
const ast = parser.parse(code, "ts")
|
||||
expect(ast.interfaces[0].properties.length).toBeGreaterThanOrEqual(1)
|
||||
})
|
||||
})
|
||||
|
||||
describe("type aliases", () => {
|
||||
it("should extract type alias", () => {
|
||||
const code = `type ID = string | number`
|
||||
const ast = parser.parse(code, "ts")
|
||||
expect(ast.typeAliases).toHaveLength(1)
|
||||
expect(ast.typeAliases[0]).toMatchObject({
|
||||
name: "ID",
|
||||
isExported: false,
|
||||
})
|
||||
})
|
||||
|
||||
it("should extract exported type alias", () => {
|
||||
const code = `export type Status = "pending" | "done"`
|
||||
const ast = parser.parse(code, "ts")
|
||||
expect(ast.typeAliases[0].isExported).toBe(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe("exports", () => {
|
||||
it("should extract named exports", () => {
|
||||
const code = `
|
||||
const foo = 1
|
||||
const bar = 2
|
||||
export { foo, bar }
|
||||
`
|
||||
const ast = parser.parse(code, "ts")
|
||||
expect(ast.exports).toHaveLength(2)
|
||||
})
|
||||
|
||||
it("should extract export default", () => {
|
||||
const code = `export default function main() {}`
|
||||
const ast = parser.parse(code, "ts")
|
||||
expect(ast.exports.some((e) => e.isDefault)).toBe(true)
|
||||
})
|
||||
|
||||
it("should extract exported const", () => {
|
||||
const code = `export const VERSION = "1.0.0"`
|
||||
const ast = parser.parse(code, "ts")
|
||||
expect(ast.exports).toHaveLength(1)
|
||||
expect(ast.exports[0].kind).toBe("variable")
|
||||
})
|
||||
})
|
||||
|
||||
describe("JavaScript support", () => {
|
||||
it("should parse JavaScript file", () => {
|
||||
const code = `
|
||||
import React from "react"
|
||||
|
||||
function Component() {
|
||||
return null
|
||||
}
|
||||
|
||||
export default Component
|
||||
`
|
||||
const ast = parser.parse(code, "js")
|
||||
expect(ast.parseError).toBe(false)
|
||||
expect(ast.imports).toHaveLength(1)
|
||||
expect(ast.functions).toHaveLength(1)
|
||||
})
|
||||
|
||||
it("should parse JSX file", () => {
|
||||
const code = `
|
||||
import React from "react"
|
||||
|
||||
function App() {
|
||||
return <div>Hello</div>
|
||||
}
|
||||
`
|
||||
const ast = parser.parse(code, "jsx")
|
||||
expect(ast.parseError).toBe(false)
|
||||
})
|
||||
})
|
||||
|
||||
describe("TSX support", () => {
|
||||
it("should parse TSX file", () => {
|
||||
const code = `
|
||||
import React from "react"
|
||||
|
||||
interface Props {
|
||||
name: string
|
||||
}
|
||||
|
||||
export function Greeting({ name }: Props) {
|
||||
return <h1>Hello, {name}!</h1>
|
||||
}
|
||||
`
|
||||
const ast = parser.parse(code, "tsx")
|
||||
expect(ast.parseError).toBe(false)
|
||||
expect(ast.interfaces).toHaveLength(1)
|
||||
expect(ast.functions).toHaveLength(1)
|
||||
})
|
||||
})
|
||||
|
||||
describe("complex file", () => {
|
||||
it("should parse complex TypeScript file", () => {
|
||||
const code = `
|
||||
import * as fs from "node:fs"
|
||||
import { join } from "node:path"
|
||||
import type { Config } from "./types"
|
||||
|
||||
export interface Options {
|
||||
root: string
|
||||
verbose?: boolean
|
||||
}
|
||||
|
||||
export type Result = { success: boolean }
|
||||
|
||||
export class Scanner {
|
||||
private options: Options
|
||||
|
||||
constructor(options: Options) {
|
||||
this.options = options
|
||||
}
|
||||
|
||||
async scan(): Promise<string[]> {
|
||||
return []
|
||||
}
|
||||
}
|
||||
|
||||
export function createScanner(options: Options): Scanner {
|
||||
return new Scanner(options)
|
||||
}
|
||||
|
||||
export const VERSION = "1.0.0"
|
||||
`
|
||||
const ast = parser.parse(code, "ts")
|
||||
|
||||
expect(ast.parseError).toBe(false)
|
||||
expect(ast.imports.length).toBeGreaterThanOrEqual(2)
|
||||
expect(ast.interfaces).toHaveLength(1)
|
||||
expect(ast.typeAliases).toHaveLength(1)
|
||||
expect(ast.classes).toHaveLength(1)
|
||||
expect(ast.functions.length).toBeGreaterThanOrEqual(1)
|
||||
expect(ast.exports.length).toBeGreaterThanOrEqual(4)
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -0,0 +1,238 @@
|
||||
import * as fs from "node:fs/promises"
|
||||
import * as path from "node:path"
|
||||
import { describe, it, expect, beforeAll, afterAll } from "vitest"
|
||||
import {
|
||||
FileScanner,
|
||||
type ScanProgress,
|
||||
} from "../../../../src/infrastructure/indexer/FileScanner.js"
|
||||
import type { ScanResult } from "../../../../src/domain/services/IIndexer.js"
|
||||
|
||||
const FIXTURES_DIR = path.join(__dirname, "../../../fixtures/sample-project")
|
||||
|
||||
describe("FileScanner", () => {
|
||||
describe("constructor", () => {
|
||||
it("should create instance with default options", () => {
|
||||
const scanner = new FileScanner()
|
||||
expect(scanner).toBeInstanceOf(FileScanner)
|
||||
})
|
||||
|
||||
it("should accept custom extensions", () => {
|
||||
const scanner = new FileScanner({ extensions: [".ts", ".js"] })
|
||||
expect(scanner.isSupportedExtension("file.ts")).toBe(true)
|
||||
expect(scanner.isSupportedExtension("file.js")).toBe(true)
|
||||
expect(scanner.isSupportedExtension("file.tsx")).toBe(false)
|
||||
})
|
||||
|
||||
it("should accept additional ignore patterns", () => {
|
||||
const scanner = new FileScanner({ additionalIgnore: ["*.test.ts"] })
|
||||
expect(scanner).toBeInstanceOf(FileScanner)
|
||||
})
|
||||
|
||||
it("should accept progress callback", () => {
|
||||
const onProgress = (progress: ScanProgress): void => {
|
||||
// callback
|
||||
}
|
||||
const scanner = new FileScanner({ onProgress })
|
||||
expect(scanner).toBeInstanceOf(FileScanner)
|
||||
})
|
||||
})
|
||||
|
||||
describe("isSupportedExtension", () => {
|
||||
it("should return true for supported extensions", () => {
|
||||
const scanner = new FileScanner()
|
||||
expect(scanner.isSupportedExtension("file.ts")).toBe(true)
|
||||
expect(scanner.isSupportedExtension("file.tsx")).toBe(true)
|
||||
expect(scanner.isSupportedExtension("file.js")).toBe(true)
|
||||
expect(scanner.isSupportedExtension("file.jsx")).toBe(true)
|
||||
expect(scanner.isSupportedExtension("file.json")).toBe(true)
|
||||
expect(scanner.isSupportedExtension("file.yaml")).toBe(true)
|
||||
expect(scanner.isSupportedExtension("file.yml")).toBe(true)
|
||||
})
|
||||
|
||||
it("should return false for unsupported extensions", () => {
|
||||
const scanner = new FileScanner()
|
||||
expect(scanner.isSupportedExtension("file.md")).toBe(false)
|
||||
expect(scanner.isSupportedExtension("file.txt")).toBe(false)
|
||||
expect(scanner.isSupportedExtension("file.png")).toBe(false)
|
||||
})
|
||||
|
||||
it("should be case-insensitive", () => {
|
||||
const scanner = new FileScanner()
|
||||
expect(scanner.isSupportedExtension("file.TS")).toBe(true)
|
||||
expect(scanner.isSupportedExtension("file.TSX")).toBe(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe("scan", () => {
|
||||
it("should scan directory and yield file results", async () => {
|
||||
const scanner = new FileScanner()
|
||||
const results: ScanResult[] = []
|
||||
|
||||
for await (const result of scanner.scan(FIXTURES_DIR)) {
|
||||
results.push(result)
|
||||
}
|
||||
|
||||
expect(results.length).toBeGreaterThan(0)
|
||||
expect(results.every((r) => r.type === "file")).toBe(true)
|
||||
})
|
||||
|
||||
it("should return relative paths", async () => {
|
||||
const scanner = new FileScanner()
|
||||
const results = await scanner.scanAll(FIXTURES_DIR)
|
||||
|
||||
for (const result of results) {
|
||||
expect(path.isAbsolute(result.path)).toBe(false)
|
||||
}
|
||||
})
|
||||
|
||||
it("should include file stats", async () => {
|
||||
const scanner = new FileScanner()
|
||||
const results = await scanner.scanAll(FIXTURES_DIR)
|
||||
|
||||
for (const result of results) {
|
||||
expect(typeof result.size).toBe("number")
|
||||
expect(result.size).toBeGreaterThanOrEqual(0)
|
||||
expect(typeof result.lastModified).toBe("number")
|
||||
expect(result.lastModified).toBeGreaterThan(0)
|
||||
}
|
||||
})
|
||||
|
||||
it("should ignore node_modules by default", async () => {
|
||||
const scanner = new FileScanner()
|
||||
const results = await scanner.scanAll(FIXTURES_DIR)
|
||||
|
||||
const nodeModulesFiles = results.filter((r) => r.path.includes("node_modules"))
|
||||
expect(nodeModulesFiles).toHaveLength(0)
|
||||
})
|
||||
|
||||
it("should respect .gitignore", async () => {
|
||||
const scanner = new FileScanner()
|
||||
const results = await scanner.scanAll(FIXTURES_DIR)
|
||||
|
||||
const ignoredFile = results.find((r) => r.path.includes("ignored-file"))
|
||||
expect(ignoredFile).toBeUndefined()
|
||||
})
|
||||
|
||||
it("should only include supported extensions", async () => {
|
||||
const scanner = new FileScanner({ extensions: [".ts"] })
|
||||
const results = await scanner.scanAll(FIXTURES_DIR)
|
||||
|
||||
for (const result of results) {
|
||||
expect(result.path.endsWith(".ts")).toBe(true)
|
||||
}
|
||||
})
|
||||
|
||||
it("should call progress callback", async () => {
|
||||
const progressCalls: ScanProgress[] = []
|
||||
const scanner = new FileScanner({
|
||||
onProgress: (progress) => {
|
||||
progressCalls.push({ ...progress })
|
||||
},
|
||||
})
|
||||
|
||||
await scanner.scanAll(FIXTURES_DIR)
|
||||
|
||||
expect(progressCalls.length).toBeGreaterThan(0)
|
||||
for (const progress of progressCalls) {
|
||||
expect(progress.current).toBeGreaterThan(0)
|
||||
expect(progress.total).toBeGreaterThan(0)
|
||||
expect(typeof progress.currentFile).toBe("string")
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
describe("scanAll", () => {
|
||||
it("should return array of all results", async () => {
|
||||
const scanner = new FileScanner()
|
||||
const results = await scanner.scanAll(FIXTURES_DIR)
|
||||
|
||||
expect(Array.isArray(results)).toBe(true)
|
||||
expect(results.length).toBeGreaterThan(0)
|
||||
})
|
||||
})
|
||||
|
||||
describe("isTextFile", () => {
|
||||
let textFilePath: string
|
||||
let binaryFilePath: string
|
||||
|
||||
beforeAll(async () => {
|
||||
textFilePath = path.join(FIXTURES_DIR, "src", "index.ts")
|
||||
binaryFilePath = path.join(FIXTURES_DIR, "binary-test.bin")
|
||||
await fs.writeFile(binaryFilePath, Buffer.from([0x00, 0x01, 0x02]))
|
||||
})
|
||||
|
||||
afterAll(async () => {
|
||||
try {
|
||||
await fs.unlink(binaryFilePath)
|
||||
} catch {
|
||||
// ignore
|
||||
}
|
||||
})
|
||||
|
||||
it("should return true for text files", async () => {
|
||||
const isText = await FileScanner.isTextFile(textFilePath)
|
||||
expect(isText).toBe(true)
|
||||
})
|
||||
|
||||
it("should return false for binary files", async () => {
|
||||
const isText = await FileScanner.isTextFile(binaryFilePath)
|
||||
expect(isText).toBe(false)
|
||||
})
|
||||
|
||||
it("should return false for non-existent files", async () => {
|
||||
const isText = await FileScanner.isTextFile("/non/existent/file.ts")
|
||||
expect(isText).toBe(false)
|
||||
})
|
||||
})
|
||||
|
||||
describe("readFileContent", () => {
|
||||
it("should read text file content", async () => {
|
||||
const filePath = path.join(FIXTURES_DIR, "src", "index.ts")
|
||||
const content = await FileScanner.readFileContent(filePath)
|
||||
|
||||
expect(content).not.toBeNull()
|
||||
expect(content).toContain("export function main")
|
||||
})
|
||||
|
||||
it("should return null for binary files", async () => {
|
||||
const binaryFilePath = path.join(FIXTURES_DIR, "binary-test2.bin")
|
||||
await fs.writeFile(binaryFilePath, Buffer.from([0x00, 0x01, 0x02]))
|
||||
|
||||
try {
|
||||
const content = await FileScanner.readFileContent(binaryFilePath)
|
||||
expect(content).toBeNull()
|
||||
} finally {
|
||||
await fs.unlink(binaryFilePath)
|
||||
}
|
||||
})
|
||||
|
||||
it("should return null for non-existent files", async () => {
|
||||
const content = await FileScanner.readFileContent("/non/existent/file.ts")
|
||||
expect(content).toBeNull()
|
||||
})
|
||||
})
|
||||
|
||||
describe("empty directory handling", () => {
|
||||
let emptyDir: string
|
||||
|
||||
beforeAll(async () => {
|
||||
emptyDir = path.join(FIXTURES_DIR, "empty-dir")
|
||||
await fs.mkdir(emptyDir, { recursive: true })
|
||||
})
|
||||
|
||||
afterAll(async () => {
|
||||
try {
|
||||
await fs.rmdir(emptyDir)
|
||||
} catch {
|
||||
// ignore
|
||||
}
|
||||
})
|
||||
|
||||
it("should handle empty directories gracefully", async () => {
|
||||
const scanner = new FileScanner()
|
||||
const results = await scanner.scanAll(emptyDir)
|
||||
|
||||
expect(results).toHaveLength(0)
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -0,0 +1,608 @@
|
||||
import { describe, it, expect, beforeAll } from "vitest"
|
||||
import { IndexBuilder } from "../../../../src/infrastructure/indexer/IndexBuilder.js"
|
||||
import { ASTParser } from "../../../../src/infrastructure/indexer/ASTParser.js"
|
||||
import type { FileAST } from "../../../../src/domain/value-objects/FileAST.js"
|
||||
import { createEmptyFileAST } from "../../../../src/domain/value-objects/FileAST.js"
|
||||
|
||||
describe("IndexBuilder", () => {
|
||||
let builder: IndexBuilder
|
||||
let parser: ASTParser
|
||||
const projectRoot = "/project"
|
||||
|
||||
beforeAll(() => {
|
||||
builder = new IndexBuilder(projectRoot)
|
||||
parser = new ASTParser()
|
||||
})
|
||||
|
||||
describe("buildSymbolIndex", () => {
|
||||
it("should index function declarations", () => {
|
||||
const code = `
|
||||
export function greet(name: string): string {
|
||||
return \`Hello, \${name}!\`
|
||||
}
|
||||
|
||||
function privateHelper(): void {}
|
||||
`
|
||||
const ast = parser.parse(code, "ts")
|
||||
const asts = new Map<string, FileAST>([["/project/src/utils.ts", ast]])
|
||||
|
||||
const index = builder.buildSymbolIndex(asts)
|
||||
|
||||
expect(index.has("greet")).toBe(true)
|
||||
expect(index.has("privateHelper")).toBe(true)
|
||||
expect(index.get("greet")).toEqual([
|
||||
{ path: "/project/src/utils.ts", line: 2, type: "function" },
|
||||
])
|
||||
})
|
||||
|
||||
it("should index class declarations and methods", () => {
|
||||
const code = `
|
||||
export class UserService {
|
||||
async findById(id: string): Promise<User> {
|
||||
return this.db.find(id)
|
||||
}
|
||||
|
||||
private validate(data: unknown): void {}
|
||||
}
|
||||
`
|
||||
const ast = parser.parse(code, "ts")
|
||||
const asts = new Map<string, FileAST>([["/project/src/UserService.ts", ast]])
|
||||
|
||||
const index = builder.buildSymbolIndex(asts)
|
||||
|
||||
expect(index.has("UserService")).toBe(true)
|
||||
expect(index.get("UserService")).toEqual([
|
||||
{ path: "/project/src/UserService.ts", line: 2, type: "class" },
|
||||
])
|
||||
|
||||
expect(index.has("UserService.findById")).toBe(true)
|
||||
expect(index.has("UserService.validate")).toBe(true)
|
||||
})
|
||||
|
||||
it("should index interface declarations", () => {
|
||||
const code = `
|
||||
export interface User {
|
||||
id: string
|
||||
name: string
|
||||
}
|
||||
|
||||
interface InternalConfig {
|
||||
debug: boolean
|
||||
}
|
||||
`
|
||||
const ast = parser.parse(code, "ts")
|
||||
const asts = new Map<string, FileAST>([["/project/src/types.ts", ast]])
|
||||
|
||||
const index = builder.buildSymbolIndex(asts)
|
||||
|
||||
expect(index.has("User")).toBe(true)
|
||||
expect(index.has("InternalConfig")).toBe(true)
|
||||
expect(index.get("User")).toEqual([
|
||||
{ path: "/project/src/types.ts", line: 2, type: "interface" },
|
||||
])
|
||||
})
|
||||
|
||||
it("should index type alias declarations", () => {
|
||||
const code = `
|
||||
export type UserId = string
|
||||
type Handler = (event: Event) => void
|
||||
`
|
||||
const ast = parser.parse(code, "ts")
|
||||
const asts = new Map<string, FileAST>([["/project/src/types.ts", ast]])
|
||||
|
||||
const index = builder.buildSymbolIndex(asts)
|
||||
|
||||
expect(index.has("UserId")).toBe(true)
|
||||
expect(index.has("Handler")).toBe(true)
|
||||
expect(index.get("UserId")).toEqual([
|
||||
{ path: "/project/src/types.ts", line: 2, type: "type" },
|
||||
])
|
||||
})
|
||||
|
||||
it("should index exported variables", () => {
|
||||
const code = `
|
||||
export const API_URL = "https://api.example.com"
|
||||
export const DEFAULT_TIMEOUT = 5000
|
||||
`
|
||||
const ast = parser.parse(code, "ts")
|
||||
const asts = new Map<string, FileAST>([["/project/src/config.ts", ast]])
|
||||
|
||||
const index = builder.buildSymbolIndex(asts)
|
||||
|
||||
expect(index.has("API_URL")).toBe(true)
|
||||
expect(index.has("DEFAULT_TIMEOUT")).toBe(true)
|
||||
})
|
||||
|
||||
it("should handle multiple files", () => {
|
||||
const userCode = `export class User { name: string }`
|
||||
const orderCode = `export class Order { id: string }`
|
||||
|
||||
const asts = new Map<string, FileAST>([
|
||||
["/project/src/User.ts", parser.parse(userCode, "ts")],
|
||||
["/project/src/Order.ts", parser.parse(orderCode, "ts")],
|
||||
])
|
||||
|
||||
const index = builder.buildSymbolIndex(asts)
|
||||
|
||||
expect(index.has("User")).toBe(true)
|
||||
expect(index.has("Order")).toBe(true)
|
||||
expect(index.get("User")?.[0].path).toBe("/project/src/User.ts")
|
||||
expect(index.get("Order")?.[0].path).toBe("/project/src/Order.ts")
|
||||
})
|
||||
|
||||
it("should handle duplicate symbol names across files", () => {
|
||||
const file1 = `export function helper(): void {}`
|
||||
const file2 = `export function helper(): void {}`
|
||||
|
||||
const asts = new Map<string, FileAST>([
|
||||
["/project/src/a/utils.ts", parser.parse(file1, "ts")],
|
||||
["/project/src/b/utils.ts", parser.parse(file2, "ts")],
|
||||
])
|
||||
|
||||
const index = builder.buildSymbolIndex(asts)
|
||||
|
||||
expect(index.has("helper")).toBe(true)
|
||||
expect(index.get("helper")).toHaveLength(2)
|
||||
})
|
||||
|
||||
it("should return empty index for empty ASTs", () => {
|
||||
const asts = new Map<string, FileAST>()
|
||||
const index = builder.buildSymbolIndex(asts)
|
||||
expect(index.size).toBe(0)
|
||||
})
|
||||
|
||||
it("should not index empty names", () => {
|
||||
const ast = createEmptyFileAST()
|
||||
ast.functions.push({
|
||||
name: "",
|
||||
lineStart: 1,
|
||||
lineEnd: 3,
|
||||
params: [],
|
||||
isAsync: false,
|
||||
isExported: false,
|
||||
})
|
||||
const asts = new Map<string, FileAST>([["/project/src/test.ts", ast]])
|
||||
|
||||
const index = builder.buildSymbolIndex(asts)
|
||||
expect(index.has("")).toBe(false)
|
||||
})
|
||||
})
|
||||
|
||||
describe("buildDepsGraph", () => {
|
||||
it("should build import relationships", () => {
|
||||
const indexCode = `
|
||||
import { helper } from "./utils"
|
||||
export function main() { return helper() }
|
||||
`
|
||||
const utilsCode = `export function helper() { return 42 }`
|
||||
|
||||
const asts = new Map<string, FileAST>([
|
||||
["/project/src/index.ts", parser.parse(indexCode, "ts")],
|
||||
["/project/src/utils.ts", parser.parse(utilsCode, "ts")],
|
||||
])
|
||||
|
||||
const graph = builder.buildDepsGraph(asts)
|
||||
|
||||
expect(graph.imports.get("/project/src/index.ts")).toContain("/project/src/utils.ts")
|
||||
expect(graph.imports.get("/project/src/utils.ts")).toEqual([])
|
||||
})
|
||||
|
||||
it("should build reverse import relationships", () => {
|
||||
const indexCode = `import { helper } from "./utils"`
|
||||
const utilsCode = `export function helper() {}`
|
||||
|
||||
const asts = new Map<string, FileAST>([
|
||||
["/project/src/index.ts", parser.parse(indexCode, "ts")],
|
||||
["/project/src/utils.ts", parser.parse(utilsCode, "ts")],
|
||||
])
|
||||
|
||||
const graph = builder.buildDepsGraph(asts)
|
||||
|
||||
expect(graph.importedBy.get("/project/src/utils.ts")).toContain("/project/src/index.ts")
|
||||
expect(graph.importedBy.get("/project/src/index.ts")).toEqual([])
|
||||
})
|
||||
|
||||
it("should handle multiple imports from same file", () => {
|
||||
const code = `
|
||||
import { a } from "./utils"
|
||||
import { b } from "./utils"
|
||||
`
|
||||
const utilsCode = `export const a = 1; export const b = 2;`
|
||||
|
||||
const asts = new Map<string, FileAST>([
|
||||
["/project/src/index.ts", parser.parse(code, "ts")],
|
||||
["/project/src/utils.ts", parser.parse(utilsCode, "ts")],
|
||||
])
|
||||
|
||||
const graph = builder.buildDepsGraph(asts)
|
||||
|
||||
const imports = graph.imports.get("/project/src/index.ts") ?? []
|
||||
expect(imports.filter((i) => i === "/project/src/utils.ts")).toHaveLength(1)
|
||||
})
|
||||
|
||||
it("should ignore external imports", () => {
|
||||
const code = `
|
||||
import React from "react"
|
||||
import { helper } from "./utils"
|
||||
`
|
||||
const utilsCode = `export function helper() {}`
|
||||
|
||||
const asts = new Map<string, FileAST>([
|
||||
["/project/src/index.ts", parser.parse(code, "ts")],
|
||||
["/project/src/utils.ts", parser.parse(utilsCode, "ts")],
|
||||
])
|
||||
|
||||
const graph = builder.buildDepsGraph(asts)
|
||||
|
||||
const imports = graph.imports.get("/project/src/index.ts") ?? []
|
||||
expect(imports).not.toContain("react")
|
||||
expect(imports).toContain("/project/src/utils.ts")
|
||||
})
|
||||
|
||||
it("should ignore builtin imports", () => {
|
||||
const code = `
|
||||
import * as fs from "node:fs"
|
||||
import { helper } from "./utils"
|
||||
`
|
||||
const utilsCode = `export function helper() {}`
|
||||
|
||||
const asts = new Map<string, FileAST>([
|
||||
["/project/src/index.ts", parser.parse(code, "ts")],
|
||||
["/project/src/utils.ts", parser.parse(utilsCode, "ts")],
|
||||
])
|
||||
|
||||
const graph = builder.buildDepsGraph(asts)
|
||||
|
||||
const imports = graph.imports.get("/project/src/index.ts") ?? []
|
||||
expect(imports).not.toContain("node:fs")
|
||||
})
|
||||
|
||||
it("should handle index.ts imports", () => {
|
||||
const code = `import { util } from "./utils"`
|
||||
const indexCode = `export function util() {}`
|
||||
|
||||
const asts = new Map<string, FileAST>([
|
||||
["/project/src/main.ts", parser.parse(code, "ts")],
|
||||
["/project/src/utils/index.ts", parser.parse(indexCode, "ts")],
|
||||
])
|
||||
|
||||
const graph = builder.buildDepsGraph(asts)
|
||||
|
||||
expect(graph.imports.get("/project/src/main.ts")).toContain(
|
||||
"/project/src/utils/index.ts",
|
||||
)
|
||||
})
|
||||
|
||||
it("should handle .js extension imports", () => {
|
||||
const code = `import { helper } from "./utils.js"`
|
||||
const utilsCode = `export function helper() {}`
|
||||
|
||||
const asts = new Map<string, FileAST>([
|
||||
["/project/src/index.ts", parser.parse(code, "ts")],
|
||||
["/project/src/utils.ts", parser.parse(utilsCode, "ts")],
|
||||
])
|
||||
|
||||
const graph = builder.buildDepsGraph(asts)
|
||||
|
||||
expect(graph.imports.get("/project/src/index.ts")).toContain("/project/src/utils.ts")
|
||||
})
|
||||
|
||||
it("should sort dependencies", () => {
|
||||
const code = `
|
||||
import { c } from "./c"
|
||||
import { a } from "./a"
|
||||
import { b } from "./b"
|
||||
`
|
||||
const asts = new Map<string, FileAST>([
|
||||
["/project/src/index.ts", parser.parse(code, "ts")],
|
||||
["/project/src/a.ts", parser.parse("export const a = 1", "ts")],
|
||||
["/project/src/b.ts", parser.parse("export const b = 2", "ts")],
|
||||
["/project/src/c.ts", parser.parse("export const c = 3", "ts")],
|
||||
])
|
||||
|
||||
const graph = builder.buildDepsGraph(asts)
|
||||
|
||||
expect(graph.imports.get("/project/src/index.ts")).toEqual([
|
||||
"/project/src/a.ts",
|
||||
"/project/src/b.ts",
|
||||
"/project/src/c.ts",
|
||||
])
|
||||
})
|
||||
|
||||
it("should return empty graph for empty ASTs", () => {
|
||||
const asts = new Map<string, FileAST>()
|
||||
const graph = builder.buildDepsGraph(asts)
|
||||
expect(graph.imports.size).toBe(0)
|
||||
expect(graph.importedBy.size).toBe(0)
|
||||
})
|
||||
})
|
||||
|
||||
describe("findSymbol", () => {
|
||||
it("should find existing symbol", () => {
|
||||
const code = `export function greet(): void {}`
|
||||
const asts = new Map<string, FileAST>([
|
||||
["/project/src/utils.ts", parser.parse(code, "ts")],
|
||||
])
|
||||
const index = builder.buildSymbolIndex(asts)
|
||||
|
||||
const locations = builder.findSymbol(index, "greet")
|
||||
expect(locations).toHaveLength(1)
|
||||
expect(locations[0].path).toBe("/project/src/utils.ts")
|
||||
})
|
||||
|
||||
it("should return empty array for non-existent symbol", () => {
|
||||
const asts = new Map<string, FileAST>()
|
||||
const index = builder.buildSymbolIndex(asts)
|
||||
|
||||
const locations = builder.findSymbol(index, "nonexistent")
|
||||
expect(locations).toEqual([])
|
||||
})
|
||||
})
|
||||
|
||||
describe("searchSymbols", () => {
|
||||
it("should find symbols matching pattern", () => {
|
||||
const code = `
|
||||
export function getUserById(): void {}
|
||||
export function getUserByEmail(): void {}
|
||||
export function createOrder(): void {}
|
||||
`
|
||||
const asts = new Map<string, FileAST>([
|
||||
["/project/src/api.ts", parser.parse(code, "ts")],
|
||||
])
|
||||
const index = builder.buildSymbolIndex(asts)
|
||||
|
||||
const results = builder.searchSymbols(index, "getUser")
|
||||
expect(results.size).toBe(2)
|
||||
expect(results.has("getUserById")).toBe(true)
|
||||
expect(results.has("getUserByEmail")).toBe(true)
|
||||
})
|
||||
|
||||
it("should be case insensitive", () => {
|
||||
const code = `export function MyFunction(): void {}`
|
||||
const asts = new Map<string, FileAST>([
|
||||
["/project/src/test.ts", parser.parse(code, "ts")],
|
||||
])
|
||||
const index = builder.buildSymbolIndex(asts)
|
||||
|
||||
const results = builder.searchSymbols(index, "myfunction")
|
||||
expect(results.has("MyFunction")).toBe(true)
|
||||
})
|
||||
|
||||
it("should return empty map for no matches", () => {
|
||||
const code = `export function test(): void {}`
|
||||
const asts = new Map<string, FileAST>([
|
||||
["/project/src/test.ts", parser.parse(code, "ts")],
|
||||
])
|
||||
const index = builder.buildSymbolIndex(asts)
|
||||
|
||||
const results = builder.searchSymbols(index, "xyz123")
|
||||
expect(results.size).toBe(0)
|
||||
})
|
||||
})
|
||||
|
||||
describe("getDependencies", () => {
|
||||
it("should return file dependencies", () => {
|
||||
const indexCode = `import { a } from "./a"`
|
||||
const aCode = `export const a = 1`
|
||||
|
||||
const asts = new Map<string, FileAST>([
|
||||
["/project/src/index.ts", parser.parse(indexCode, "ts")],
|
||||
["/project/src/a.ts", parser.parse(aCode, "ts")],
|
||||
])
|
||||
const graph = builder.buildDepsGraph(asts)
|
||||
|
||||
const deps = builder.getDependencies(graph, "/project/src/index.ts")
|
||||
expect(deps).toContain("/project/src/a.ts")
|
||||
})
|
||||
|
||||
it("should return empty array for file not in graph", () => {
|
||||
const asts = new Map<string, FileAST>()
|
||||
const graph = builder.buildDepsGraph(asts)
|
||||
|
||||
const deps = builder.getDependencies(graph, "/nonexistent.ts")
|
||||
expect(deps).toEqual([])
|
||||
})
|
||||
})
|
||||
|
||||
describe("getDependents", () => {
|
||||
it("should return file dependents", () => {
|
||||
const indexCode = `import { a } from "./a"`
|
||||
const aCode = `export const a = 1`
|
||||
|
||||
const asts = new Map<string, FileAST>([
|
||||
["/project/src/index.ts", parser.parse(indexCode, "ts")],
|
||||
["/project/src/a.ts", parser.parse(aCode, "ts")],
|
||||
])
|
||||
const graph = builder.buildDepsGraph(asts)
|
||||
|
||||
const dependents = builder.getDependents(graph, "/project/src/a.ts")
|
||||
expect(dependents).toContain("/project/src/index.ts")
|
||||
})
|
||||
|
||||
it("should return empty array for file not in graph", () => {
|
||||
const asts = new Map<string, FileAST>()
|
||||
const graph = builder.buildDepsGraph(asts)
|
||||
|
||||
const dependents = builder.getDependents(graph, "/nonexistent.ts")
|
||||
expect(dependents).toEqual([])
|
||||
})
|
||||
})
|
||||
|
||||
describe("findCircularDependencies", () => {
|
||||
it("should detect simple circular dependency", () => {
|
||||
const aCode = `import { b } from "./b"; export const a = 1;`
|
||||
const bCode = `import { a } from "./a"; export const b = 2;`
|
||||
|
||||
const asts = new Map<string, FileAST>([
|
||||
["/project/src/a.ts", parser.parse(aCode, "ts")],
|
||||
["/project/src/b.ts", parser.parse(bCode, "ts")],
|
||||
])
|
||||
const graph = builder.buildDepsGraph(asts)
|
||||
|
||||
const cycles = builder.findCircularDependencies(graph)
|
||||
expect(cycles.length).toBe(1)
|
||||
expect(cycles[0]).toContain("/project/src/a.ts")
|
||||
expect(cycles[0]).toContain("/project/src/b.ts")
|
||||
})
|
||||
|
||||
it("should detect three-way circular dependency", () => {
|
||||
const aCode = `import { b } from "./b"; export const a = 1;`
|
||||
const bCode = `import { c } from "./c"; export const b = 2;`
|
||||
const cCode = `import { a } from "./a"; export const c = 3;`
|
||||
|
||||
const asts = new Map<string, FileAST>([
|
||||
["/project/src/a.ts", parser.parse(aCode, "ts")],
|
||||
["/project/src/b.ts", parser.parse(bCode, "ts")],
|
||||
["/project/src/c.ts", parser.parse(cCode, "ts")],
|
||||
])
|
||||
const graph = builder.buildDepsGraph(asts)
|
||||
|
||||
const cycles = builder.findCircularDependencies(graph)
|
||||
expect(cycles.length).toBe(1)
|
||||
expect(cycles[0]).toHaveLength(4)
|
||||
})
|
||||
|
||||
it("should return empty array when no cycles", () => {
|
||||
const aCode = `export const a = 1`
|
||||
const bCode = `import { a } from "./a"; export const b = a + 1;`
|
||||
const cCode = `import { b } from "./b"; export const c = b + 1;`
|
||||
|
||||
const asts = new Map<string, FileAST>([
|
||||
["/project/src/a.ts", parser.parse(aCode, "ts")],
|
||||
["/project/src/b.ts", parser.parse(bCode, "ts")],
|
||||
["/project/src/c.ts", parser.parse(cCode, "ts")],
|
||||
])
|
||||
const graph = builder.buildDepsGraph(asts)
|
||||
|
||||
const cycles = builder.findCircularDependencies(graph)
|
||||
expect(cycles).toEqual([])
|
||||
})
|
||||
|
||||
it("should handle self-reference", () => {
|
||||
const aCode = `import { helper } from "./a"; export const a = 1; export function helper() {}`
|
||||
|
||||
const asts = new Map<string, FileAST>([
|
||||
["/project/src/a.ts", parser.parse(aCode, "ts")],
|
||||
])
|
||||
const graph = builder.buildDepsGraph(asts)
|
||||
|
||||
const cycles = builder.findCircularDependencies(graph)
|
||||
expect(cycles.length).toBe(1)
|
||||
})
|
||||
})
|
||||
|
||||
describe("getStats", () => {
|
||||
it("should return comprehensive statistics", () => {
|
||||
const code1 = `
|
||||
export function func1(): void {}
|
||||
export class Class1 {}
|
||||
export interface Interface1 {}
|
||||
export type Type1 = string
|
||||
export const VAR1 = 1
|
||||
`
|
||||
const code2 = `
|
||||
import { func1 } from "./file1"
|
||||
export function func2(): void {}
|
||||
`
|
||||
const asts = new Map<string, FileAST>([
|
||||
["/project/src/file1.ts", parser.parse(code1, "ts")],
|
||||
["/project/src/file2.ts", parser.parse(code2, "ts")],
|
||||
])
|
||||
|
||||
const symbolIndex = builder.buildSymbolIndex(asts)
|
||||
const depsGraph = builder.buildDepsGraph(asts)
|
||||
const stats = builder.getStats(symbolIndex, depsGraph)
|
||||
|
||||
expect(stats.totalSymbols).toBeGreaterThan(0)
|
||||
expect(stats.symbolsByType.function).toBeGreaterThan(0)
|
||||
expect(stats.symbolsByType.class).toBe(1)
|
||||
expect(stats.symbolsByType.interface).toBe(1)
|
||||
expect(stats.symbolsByType.type).toBe(1)
|
||||
expect(stats.totalFiles).toBe(2)
|
||||
expect(stats.totalDependencies).toBe(1)
|
||||
})
|
||||
|
||||
it("should identify hubs", () => {
|
||||
const hubCode = `export const shared = 1`
|
||||
const consumerCodes = Array.from({ length: 6 }, () => `import { shared } from "./hub"`)
|
||||
|
||||
const asts = new Map<string, FileAST>([
|
||||
["/project/src/hub.ts", parser.parse(hubCode, "ts")],
|
||||
])
|
||||
consumerCodes.forEach((code, i) => {
|
||||
asts.set(`/project/src/consumer${i}.ts`, parser.parse(code, "ts"))
|
||||
})
|
||||
|
||||
const symbolIndex = builder.buildSymbolIndex(asts)
|
||||
const depsGraph = builder.buildDepsGraph(asts)
|
||||
const stats = builder.getStats(symbolIndex, depsGraph)
|
||||
|
||||
expect(stats.hubs).toContain("/project/src/hub.ts")
|
||||
})
|
||||
|
||||
it("should identify orphans", () => {
|
||||
const orphanCode = `const internal = 1`
|
||||
|
||||
const asts = new Map<string, FileAST>([
|
||||
["/project/src/orphan.ts", parser.parse(orphanCode, "ts")],
|
||||
])
|
||||
|
||||
const symbolIndex = builder.buildSymbolIndex(asts)
|
||||
const depsGraph = builder.buildDepsGraph(asts)
|
||||
const stats = builder.getStats(symbolIndex, depsGraph)
|
||||
|
||||
expect(stats.orphans).toContain("/project/src/orphan.ts")
|
||||
})
|
||||
})
|
||||
|
||||
describe("integration with ASTParser", () => {
|
||||
it("should work with complex TypeScript code", () => {
|
||||
const code = `
|
||||
import { BaseService } from "./base"
|
||||
import type { User, UserDTO } from "./types"
|
||||
|
||||
export class UserService extends BaseService {
|
||||
private readonly cache = new Map<string, User>()
|
||||
|
||||
async findById(id: string): Promise<User | null> {
|
||||
if (this.cache.has(id)) {
|
||||
return this.cache.get(id)!
|
||||
}
|
||||
return this.repository.find(id)
|
||||
}
|
||||
|
||||
toDTO(user: User): UserDTO {
|
||||
return { id: user.id, name: user.name }
|
||||
}
|
||||
}
|
||||
|
||||
export type ServiceResult<T> = { success: true; data: T } | { success: false; error: string }
|
||||
`
|
||||
const baseCode = `export class BaseService { protected repository: any }`
|
||||
const typesCode = `export interface User { id: string; name: string }; export interface UserDTO { id: string; name: string }`
|
||||
|
||||
const asts = new Map<string, FileAST>([
|
||||
["/project/src/UserService.ts", parser.parse(code, "ts")],
|
||||
["/project/src/base.ts", parser.parse(baseCode, "ts")],
|
||||
["/project/src/types.ts", parser.parse(typesCode, "ts")],
|
||||
])
|
||||
|
||||
const symbolIndex = builder.buildSymbolIndex(asts)
|
||||
const depsGraph = builder.buildDepsGraph(asts)
|
||||
|
||||
expect(symbolIndex.has("UserService")).toBe(true)
|
||||
expect(symbolIndex.has("UserService.findById")).toBe(true)
|
||||
expect(symbolIndex.has("UserService.toDTO")).toBe(true)
|
||||
expect(symbolIndex.has("ServiceResult")).toBe(true)
|
||||
expect(symbolIndex.has("BaseService")).toBe(true)
|
||||
expect(symbolIndex.has("User")).toBe(true)
|
||||
|
||||
expect(depsGraph.imports.get("/project/src/UserService.ts")).toContain(
|
||||
"/project/src/base.ts",
|
||||
)
|
||||
expect(depsGraph.imports.get("/project/src/UserService.ts")).toContain(
|
||||
"/project/src/types.ts",
|
||||
)
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -0,0 +1,702 @@
|
||||
import { describe, it, expect, beforeAll } from "vitest"
|
||||
import { MetaAnalyzer } from "../../../../src/infrastructure/indexer/MetaAnalyzer.js"
|
||||
import { ASTParser } from "../../../../src/infrastructure/indexer/ASTParser.js"
|
||||
import type { FileAST } from "../../../../src/domain/value-objects/FileAST.js"
|
||||
import { createEmptyFileAST } from "../../../../src/domain/value-objects/FileAST.js"
|
||||
|
||||
describe("MetaAnalyzer", () => {
|
||||
let analyzer: MetaAnalyzer
|
||||
let parser: ASTParser
|
||||
const projectRoot = "/project"
|
||||
|
||||
beforeAll(() => {
|
||||
analyzer = new MetaAnalyzer(projectRoot)
|
||||
parser = new ASTParser()
|
||||
})
|
||||
|
||||
describe("countLinesOfCode", () => {
|
||||
it("should count non-empty lines", () => {
|
||||
const content = `const a = 1
|
||||
const b = 2
|
||||
const c = 3`
|
||||
const loc = analyzer.countLinesOfCode(content)
|
||||
expect(loc).toBe(3)
|
||||
})
|
||||
|
||||
it("should exclude empty lines", () => {
|
||||
const content = `const a = 1
|
||||
|
||||
const b = 2
|
||||
|
||||
const c = 3`
|
||||
const loc = analyzer.countLinesOfCode(content)
|
||||
expect(loc).toBe(3)
|
||||
})
|
||||
|
||||
it("should exclude single-line comments", () => {
|
||||
const content = `// This is a comment
|
||||
const a = 1
|
||||
// Another comment
|
||||
const b = 2`
|
||||
const loc = analyzer.countLinesOfCode(content)
|
||||
expect(loc).toBe(2)
|
||||
})
|
||||
|
||||
it("should exclude block comments", () => {
|
||||
const content = `/*
|
||||
* Multi-line comment
|
||||
*/
|
||||
const a = 1
|
||||
/* inline block */ const b = 2`
|
||||
const loc = analyzer.countLinesOfCode(content)
|
||||
expect(loc).toBe(2)
|
||||
})
|
||||
|
||||
it("should handle multi-line block comments", () => {
|
||||
const content = `const a = 1
|
||||
/*
|
||||
comment line 1
|
||||
comment line 2
|
||||
*/
|
||||
const b = 2`
|
||||
const loc = analyzer.countLinesOfCode(content)
|
||||
expect(loc).toBe(2)
|
||||
})
|
||||
|
||||
it("should return 0 for empty content", () => {
|
||||
const loc = analyzer.countLinesOfCode("")
|
||||
expect(loc).toBe(0)
|
||||
})
|
||||
|
||||
it("should return 0 for only comments", () => {
|
||||
const content = `// comment 1
|
||||
// comment 2
|
||||
/* block comment */`
|
||||
const loc = analyzer.countLinesOfCode(content)
|
||||
expect(loc).toBe(0)
|
||||
})
|
||||
})
|
||||
|
||||
describe("calculateMaxNesting", () => {
|
||||
it("should return 0 for empty AST", () => {
|
||||
const ast = createEmptyFileAST()
|
||||
const nesting = analyzer.calculateMaxNesting(ast)
|
||||
expect(nesting).toBe(0)
|
||||
})
|
||||
|
||||
it("should estimate nesting for short functions", () => {
|
||||
const ast = createEmptyFileAST()
|
||||
ast.functions.push({
|
||||
name: "test",
|
||||
lineStart: 1,
|
||||
lineEnd: 3,
|
||||
params: [],
|
||||
isAsync: false,
|
||||
isExported: false,
|
||||
})
|
||||
const nesting = analyzer.calculateMaxNesting(ast)
|
||||
expect(nesting).toBe(1)
|
||||
})
|
||||
|
||||
it("should estimate higher nesting for longer functions", () => {
|
||||
const ast = createEmptyFileAST()
|
||||
ast.functions.push({
|
||||
name: "test",
|
||||
lineStart: 1,
|
||||
lineEnd: 40,
|
||||
params: [],
|
||||
isAsync: false,
|
||||
isExported: false,
|
||||
})
|
||||
const nesting = analyzer.calculateMaxNesting(ast)
|
||||
expect(nesting).toBe(4)
|
||||
})
|
||||
|
||||
it("should return max nesting across multiple functions", () => {
|
||||
const ast = createEmptyFileAST()
|
||||
ast.functions.push(
|
||||
{
|
||||
name: "short",
|
||||
lineStart: 1,
|
||||
lineEnd: 3,
|
||||
params: [],
|
||||
isAsync: false,
|
||||
isExported: false,
|
||||
},
|
||||
{
|
||||
name: "long",
|
||||
lineStart: 5,
|
||||
lineEnd: 60,
|
||||
params: [],
|
||||
isAsync: false,
|
||||
isExported: false,
|
||||
},
|
||||
)
|
||||
const nesting = analyzer.calculateMaxNesting(ast)
|
||||
expect(nesting).toBe(5)
|
||||
})
|
||||
|
||||
it("should account for class methods", () => {
|
||||
const ast = createEmptyFileAST()
|
||||
ast.classes.push({
|
||||
name: "MyClass",
|
||||
lineStart: 1,
|
||||
lineEnd: 50,
|
||||
methods: [
|
||||
{
|
||||
name: "method1",
|
||||
lineStart: 2,
|
||||
lineEnd: 25,
|
||||
params: [],
|
||||
isAsync: false,
|
||||
visibility: "public",
|
||||
isStatic: false,
|
||||
},
|
||||
],
|
||||
properties: [],
|
||||
implements: [],
|
||||
isExported: false,
|
||||
isAbstract: false,
|
||||
})
|
||||
const nesting = analyzer.calculateMaxNesting(ast)
|
||||
expect(nesting).toBeGreaterThan(1)
|
||||
})
|
||||
})
|
||||
|
||||
describe("calculateCyclomaticComplexity", () => {
|
||||
it("should return 1 for empty AST", () => {
|
||||
const ast = createEmptyFileAST()
|
||||
const complexity = analyzer.calculateCyclomaticComplexity(ast)
|
||||
expect(complexity).toBe(1)
|
||||
})
|
||||
|
||||
it("should increase complexity for functions", () => {
|
||||
const ast = createEmptyFileAST()
|
||||
ast.functions.push({
|
||||
name: "test",
|
||||
lineStart: 1,
|
||||
lineEnd: 20,
|
||||
params: [],
|
||||
isAsync: false,
|
||||
isExported: false,
|
||||
})
|
||||
const complexity = analyzer.calculateCyclomaticComplexity(ast)
|
||||
expect(complexity).toBeGreaterThan(1)
|
||||
})
|
||||
|
||||
it("should increase complexity for class methods", () => {
|
||||
const ast = createEmptyFileAST()
|
||||
ast.classes.push({
|
||||
name: "MyClass",
|
||||
lineStart: 1,
|
||||
lineEnd: 50,
|
||||
methods: [
|
||||
{
|
||||
name: "method1",
|
||||
lineStart: 2,
|
||||
lineEnd: 20,
|
||||
params: [],
|
||||
isAsync: false,
|
||||
visibility: "public",
|
||||
isStatic: false,
|
||||
},
|
||||
{
|
||||
name: "method2",
|
||||
lineStart: 22,
|
||||
lineEnd: 45,
|
||||
params: [],
|
||||
isAsync: false,
|
||||
visibility: "public",
|
||||
isStatic: false,
|
||||
},
|
||||
],
|
||||
properties: [],
|
||||
implements: [],
|
||||
isExported: false,
|
||||
isAbstract: false,
|
||||
})
|
||||
const complexity = analyzer.calculateCyclomaticComplexity(ast)
|
||||
expect(complexity).toBeGreaterThan(2)
|
||||
})
|
||||
})
|
||||
|
||||
describe("calculateComplexityScore", () => {
|
||||
it("should return 0 for minimal values", () => {
|
||||
const score = analyzer.calculateComplexityScore(0, 0, 0)
|
||||
expect(score).toBe(0)
|
||||
})
|
||||
|
||||
it("should return 100 for maximum values", () => {
|
||||
const score = analyzer.calculateComplexityScore(1000, 10, 50)
|
||||
expect(score).toBe(100)
|
||||
})
|
||||
|
||||
it("should return intermediate values", () => {
|
||||
const score = analyzer.calculateComplexityScore(100, 3, 10)
|
||||
expect(score).toBeGreaterThan(0)
|
||||
expect(score).toBeLessThan(100)
|
||||
})
|
||||
})
|
||||
|
||||
describe("resolveDependencies", () => {
|
||||
it("should resolve relative imports", () => {
|
||||
const ast = createEmptyFileAST()
|
||||
ast.imports.push({
|
||||
name: "foo",
|
||||
from: "./utils",
|
||||
line: 1,
|
||||
type: "internal",
|
||||
isDefault: false,
|
||||
})
|
||||
const deps = analyzer.resolveDependencies("/project/src/index.ts", ast)
|
||||
expect(deps).toHaveLength(1)
|
||||
expect(deps[0]).toBe("/project/src/utils.ts")
|
||||
})
|
||||
|
||||
it("should resolve parent directory imports", () => {
|
||||
const ast = createEmptyFileAST()
|
||||
ast.imports.push({
|
||||
name: "config",
|
||||
from: "../config",
|
||||
line: 1,
|
||||
type: "internal",
|
||||
isDefault: false,
|
||||
})
|
||||
const deps = analyzer.resolveDependencies("/project/src/utils/helper.ts", ast)
|
||||
expect(deps).toHaveLength(1)
|
||||
expect(deps[0]).toBe("/project/src/config.ts")
|
||||
})
|
||||
|
||||
it("should ignore external imports", () => {
|
||||
const ast = createEmptyFileAST()
|
||||
ast.imports.push({
|
||||
name: "React",
|
||||
from: "react",
|
||||
line: 1,
|
||||
type: "external",
|
||||
isDefault: true,
|
||||
})
|
||||
const deps = analyzer.resolveDependencies("/project/src/index.ts", ast)
|
||||
expect(deps).toHaveLength(0)
|
||||
})
|
||||
|
||||
it("should ignore builtin imports", () => {
|
||||
const ast = createEmptyFileAST()
|
||||
ast.imports.push({
|
||||
name: "fs",
|
||||
from: "node:fs",
|
||||
line: 1,
|
||||
type: "builtin",
|
||||
isDefault: false,
|
||||
})
|
||||
const deps = analyzer.resolveDependencies("/project/src/index.ts", ast)
|
||||
expect(deps).toHaveLength(0)
|
||||
})
|
||||
|
||||
it("should handle .js extension to .ts conversion", () => {
|
||||
const ast = createEmptyFileAST()
|
||||
ast.imports.push({
|
||||
name: "util",
|
||||
from: "./util.js",
|
||||
line: 1,
|
||||
type: "internal",
|
||||
isDefault: false,
|
||||
})
|
||||
const deps = analyzer.resolveDependencies("/project/src/index.ts", ast)
|
||||
expect(deps).toHaveLength(1)
|
||||
expect(deps[0]).toBe("/project/src/util.ts")
|
||||
})
|
||||
|
||||
it("should deduplicate dependencies", () => {
|
||||
const ast = createEmptyFileAST()
|
||||
ast.imports.push(
|
||||
{
|
||||
name: "foo",
|
||||
from: "./utils",
|
||||
line: 1,
|
||||
type: "internal",
|
||||
isDefault: false,
|
||||
},
|
||||
{
|
||||
name: "bar",
|
||||
from: "./utils",
|
||||
line: 2,
|
||||
type: "internal",
|
||||
isDefault: false,
|
||||
},
|
||||
)
|
||||
const deps = analyzer.resolveDependencies("/project/src/index.ts", ast)
|
||||
expect(deps).toHaveLength(1)
|
||||
})
|
||||
|
||||
it("should sort dependencies", () => {
|
||||
const ast = createEmptyFileAST()
|
||||
ast.imports.push(
|
||||
{
|
||||
name: "c",
|
||||
from: "./c",
|
||||
line: 1,
|
||||
type: "internal",
|
||||
isDefault: false,
|
||||
},
|
||||
{
|
||||
name: "a",
|
||||
from: "./a",
|
||||
line: 2,
|
||||
type: "internal",
|
||||
isDefault: false,
|
||||
},
|
||||
{
|
||||
name: "b",
|
||||
from: "./b",
|
||||
line: 3,
|
||||
type: "internal",
|
||||
isDefault: false,
|
||||
},
|
||||
)
|
||||
const deps = analyzer.resolveDependencies("/project/src/index.ts", ast)
|
||||
expect(deps).toEqual(["/project/src/a.ts", "/project/src/b.ts", "/project/src/c.ts"])
|
||||
})
|
||||
})
|
||||
|
||||
describe("findDependents", () => {
|
||||
it("should find files that import the given file", () => {
|
||||
const allASTs = new Map<string, FileAST>()
|
||||
|
||||
const indexAST = createEmptyFileAST()
|
||||
allASTs.set("/project/src/index.ts", indexAST)
|
||||
|
||||
const utilsAST = createEmptyFileAST()
|
||||
utilsAST.imports.push({
|
||||
name: "helper",
|
||||
from: "./helper",
|
||||
line: 1,
|
||||
type: "internal",
|
||||
isDefault: false,
|
||||
})
|
||||
allASTs.set("/project/src/utils.ts", utilsAST)
|
||||
|
||||
const dependents = analyzer.findDependents("/project/src/helper.ts", allASTs)
|
||||
expect(dependents).toHaveLength(1)
|
||||
expect(dependents[0]).toBe("/project/src/utils.ts")
|
||||
})
|
||||
|
||||
it("should return empty array when no dependents", () => {
|
||||
const allASTs = new Map<string, FileAST>()
|
||||
allASTs.set("/project/src/index.ts", createEmptyFileAST())
|
||||
allASTs.set("/project/src/utils.ts", createEmptyFileAST())
|
||||
|
||||
const dependents = analyzer.findDependents("/project/src/helper.ts", allASTs)
|
||||
expect(dependents).toHaveLength(0)
|
||||
})
|
||||
|
||||
it("should not include self as dependent", () => {
|
||||
const allASTs = new Map<string, FileAST>()
|
||||
const selfAST = createEmptyFileAST()
|
||||
selfAST.imports.push({
|
||||
name: "foo",
|
||||
from: "./helper",
|
||||
line: 1,
|
||||
type: "internal",
|
||||
isDefault: false,
|
||||
})
|
||||
allASTs.set("/project/src/helper.ts", selfAST)
|
||||
|
||||
const dependents = analyzer.findDependents("/project/src/helper.ts", allASTs)
|
||||
expect(dependents).toHaveLength(0)
|
||||
})
|
||||
|
||||
it("should handle index.ts imports", () => {
|
||||
const allASTs = new Map<string, FileAST>()
|
||||
|
||||
const consumerAST = createEmptyFileAST()
|
||||
consumerAST.imports.push({
|
||||
name: "util",
|
||||
from: "./utils",
|
||||
line: 1,
|
||||
type: "internal",
|
||||
isDefault: false,
|
||||
})
|
||||
allASTs.set("/project/src/consumer.ts", consumerAST)
|
||||
|
||||
const dependents = analyzer.findDependents("/project/src/utils/index.ts", allASTs)
|
||||
expect(dependents).toHaveLength(1)
|
||||
})
|
||||
|
||||
it("should sort dependents", () => {
|
||||
const allASTs = new Map<string, FileAST>()
|
||||
|
||||
const fileC = createEmptyFileAST()
|
||||
fileC.imports.push({
|
||||
name: "x",
|
||||
from: "./target",
|
||||
line: 1,
|
||||
type: "internal",
|
||||
isDefault: false,
|
||||
})
|
||||
allASTs.set("/project/src/c.ts", fileC)
|
||||
|
||||
const fileA = createEmptyFileAST()
|
||||
fileA.imports.push({
|
||||
name: "x",
|
||||
from: "./target",
|
||||
line: 1,
|
||||
type: "internal",
|
||||
isDefault: false,
|
||||
})
|
||||
allASTs.set("/project/src/a.ts", fileA)
|
||||
|
||||
const fileB = createEmptyFileAST()
|
||||
fileB.imports.push({
|
||||
name: "x",
|
||||
from: "./target",
|
||||
line: 1,
|
||||
type: "internal",
|
||||
isDefault: false,
|
||||
})
|
||||
allASTs.set("/project/src/b.ts", fileB)
|
||||
|
||||
const dependents = analyzer.findDependents("/project/src/target.ts", allASTs)
|
||||
expect(dependents).toEqual([
|
||||
"/project/src/a.ts",
|
||||
"/project/src/b.ts",
|
||||
"/project/src/c.ts",
|
||||
])
|
||||
})
|
||||
})
|
||||
|
||||
describe("classifyFileType", () => {
|
||||
it("should classify test files by .test. pattern", () => {
|
||||
expect(analyzer.classifyFileType("/project/src/utils.test.ts")).toBe("test")
|
||||
})
|
||||
|
||||
it("should classify test files by .spec. pattern", () => {
|
||||
expect(analyzer.classifyFileType("/project/src/utils.spec.ts")).toBe("test")
|
||||
})
|
||||
|
||||
it("should classify test files by /tests/ directory", () => {
|
||||
expect(analyzer.classifyFileType("/project/tests/utils.ts")).toBe("test")
|
||||
})
|
||||
|
||||
it("should classify test files by /__tests__/ directory", () => {
|
||||
expect(analyzer.classifyFileType("/project/src/__tests__/utils.ts")).toBe("test")
|
||||
})
|
||||
|
||||
it("should classify .d.ts as types", () => {
|
||||
expect(analyzer.classifyFileType("/project/src/types.d.ts")).toBe("types")
|
||||
})
|
||||
|
||||
it("should classify /types/ directory as types", () => {
|
||||
expect(analyzer.classifyFileType("/project/src/types/index.ts")).toBe("types")
|
||||
})
|
||||
|
||||
it("should classify types.ts as types", () => {
|
||||
expect(analyzer.classifyFileType("/project/src/types.ts")).toBe("types")
|
||||
})
|
||||
|
||||
it("should classify config files", () => {
|
||||
expect(analyzer.classifyFileType("/project/tsconfig.json")).toBe("config")
|
||||
expect(analyzer.classifyFileType("/project/eslint.config.js")).toBe("config")
|
||||
expect(analyzer.classifyFileType("/project/vitest.config.ts")).toBe("config")
|
||||
expect(analyzer.classifyFileType("/project/jest.config.js")).toBe("config")
|
||||
})
|
||||
|
||||
it("should classify regular source files", () => {
|
||||
expect(analyzer.classifyFileType("/project/src/index.ts")).toBe("source")
|
||||
expect(analyzer.classifyFileType("/project/src/utils.tsx")).toBe("source")
|
||||
expect(analyzer.classifyFileType("/project/src/helper.js")).toBe("source")
|
||||
})
|
||||
|
||||
it("should classify unknown file types", () => {
|
||||
expect(analyzer.classifyFileType("/project/README.md")).toBe("unknown")
|
||||
expect(analyzer.classifyFileType("/project/data.json")).toBe("unknown")
|
||||
})
|
||||
})
|
||||
|
||||
describe("isEntryPointFile", () => {
|
||||
it("should identify index files as entry points", () => {
|
||||
expect(analyzer.isEntryPointFile("/project/src/index.ts", 5)).toBe(true)
|
||||
expect(analyzer.isEntryPointFile("/project/src/index.js", 5)).toBe(true)
|
||||
})
|
||||
|
||||
it("should identify files with no dependents as entry points", () => {
|
||||
expect(analyzer.isEntryPointFile("/project/src/utils.ts", 0)).toBe(true)
|
||||
})
|
||||
|
||||
it("should identify main.ts as entry point", () => {
|
||||
expect(analyzer.isEntryPointFile("/project/src/main.ts", 5)).toBe(true)
|
||||
})
|
||||
|
||||
it("should identify app.ts as entry point", () => {
|
||||
expect(analyzer.isEntryPointFile("/project/src/app.tsx", 5)).toBe(true)
|
||||
})
|
||||
|
||||
it("should identify cli.ts as entry point", () => {
|
||||
expect(analyzer.isEntryPointFile("/project/src/cli.ts", 5)).toBe(true)
|
||||
})
|
||||
|
||||
it("should identify server.ts as entry point", () => {
|
||||
expect(analyzer.isEntryPointFile("/project/src/server.ts", 5)).toBe(true)
|
||||
})
|
||||
|
||||
it("should not identify regular files with dependents as entry points", () => {
|
||||
expect(analyzer.isEntryPointFile("/project/src/utils.ts", 3)).toBe(false)
|
||||
})
|
||||
})
|
||||
|
||||
describe("analyze", () => {
|
||||
it("should produce complete FileMeta", () => {
|
||||
const content = `import { helper } from "./helper"
|
||||
|
||||
export function main() {
|
||||
return helper()
|
||||
}
|
||||
`
|
||||
const ast = parser.parse(content, "ts")
|
||||
const allASTs = new Map<string, FileAST>()
|
||||
allASTs.set("/project/src/index.ts", ast)
|
||||
|
||||
const meta = analyzer.analyze("/project/src/index.ts", ast, content, allASTs)
|
||||
|
||||
expect(meta.complexity).toBeDefined()
|
||||
expect(meta.complexity.loc).toBeGreaterThan(0)
|
||||
expect(meta.dependencies).toHaveLength(1)
|
||||
expect(meta.fileType).toBe("source")
|
||||
expect(meta.isEntryPoint).toBe(true)
|
||||
})
|
||||
|
||||
it("should identify hub files", () => {
|
||||
const content = `export const util = () => {}`
|
||||
const ast = parser.parse(content, "ts")
|
||||
const allASTs = new Map<string, FileAST>()
|
||||
|
||||
for (let i = 0; i < 6; i++) {
|
||||
const consumerAST = createEmptyFileAST()
|
||||
consumerAST.imports.push({
|
||||
name: "util",
|
||||
from: "./shared",
|
||||
line: 1,
|
||||
type: "internal",
|
||||
isDefault: false,
|
||||
})
|
||||
allASTs.set(`/project/src/consumer${i}.ts`, consumerAST)
|
||||
}
|
||||
|
||||
const meta = analyzer.analyze("/project/src/shared.ts", ast, content, allASTs)
|
||||
expect(meta.isHub).toBe(true)
|
||||
expect(meta.dependents).toHaveLength(6)
|
||||
})
|
||||
|
||||
it("should not identify as hub with few dependents", () => {
|
||||
const content = `export const util = () => {}`
|
||||
const ast = parser.parse(content, "ts")
|
||||
const allASTs = new Map<string, FileAST>()
|
||||
|
||||
for (let i = 0; i < 3; i++) {
|
||||
const consumerAST = createEmptyFileAST()
|
||||
consumerAST.imports.push({
|
||||
name: "util",
|
||||
from: "./shared",
|
||||
line: 1,
|
||||
type: "internal",
|
||||
isDefault: false,
|
||||
})
|
||||
allASTs.set(`/project/src/consumer${i}.ts`, consumerAST)
|
||||
}
|
||||
|
||||
const meta = analyzer.analyze("/project/src/shared.ts", ast, content, allASTs)
|
||||
expect(meta.isHub).toBe(false)
|
||||
})
|
||||
})
|
||||
|
||||
describe("analyzeAll", () => {
|
||||
it("should analyze multiple files", () => {
|
||||
const files = new Map<string, { ast: FileAST; content: string }>()
|
||||
|
||||
const indexContent = `import { util } from "./util"
|
||||
export function main() { return util() }`
|
||||
const indexAST = parser.parse(indexContent, "ts")
|
||||
files.set("/project/src/index.ts", { ast: indexAST, content: indexContent })
|
||||
|
||||
const utilContent = `export function util() { return 42 }`
|
||||
const utilAST = parser.parse(utilContent, "ts")
|
||||
files.set("/project/src/util.ts", { ast: utilAST, content: utilContent })
|
||||
|
||||
const results = analyzer.analyzeAll(files)
|
||||
|
||||
expect(results.size).toBe(2)
|
||||
expect(results.get("/project/src/index.ts")).toBeDefined()
|
||||
expect(results.get("/project/src/util.ts")).toBeDefined()
|
||||
|
||||
const indexMeta = results.get("/project/src/index.ts")!
|
||||
expect(indexMeta.dependencies).toContain("/project/src/util.ts")
|
||||
|
||||
const utilMeta = results.get("/project/src/util.ts")!
|
||||
expect(utilMeta.dependents).toContain("/project/src/index.ts")
|
||||
})
|
||||
|
||||
it("should handle empty files map", () => {
|
||||
const files = new Map<string, { ast: FileAST; content: string }>()
|
||||
const results = analyzer.analyzeAll(files)
|
||||
expect(results.size).toBe(0)
|
||||
})
|
||||
})
|
||||
|
||||
describe("calculateComplexity", () => {
|
||||
it("should return complete complexity metrics", () => {
|
||||
const content = `function complex() {
|
||||
if (true) {
|
||||
for (let i = 0; i < 10; i++) {
|
||||
if (i % 2 === 0) {
|
||||
console.log(i)
|
||||
}
|
||||
}
|
||||
}
|
||||
return 42
|
||||
}`
|
||||
const ast = parser.parse(content, "ts")
|
||||
const metrics = analyzer.calculateComplexity(ast, content)
|
||||
|
||||
expect(metrics.loc).toBeGreaterThan(0)
|
||||
expect(metrics.nesting).toBeGreaterThan(0)
|
||||
expect(metrics.cyclomaticComplexity).toBeGreaterThan(0)
|
||||
expect(metrics.score).toBeGreaterThanOrEqual(0)
|
||||
expect(metrics.score).toBeLessThanOrEqual(100)
|
||||
})
|
||||
})
|
||||
|
||||
describe("integration with ASTParser", () => {
|
||||
it("should work with real parsed AST", () => {
|
||||
const content = `import { readFile } from "node:fs"
|
||||
import { helper } from "./helper"
|
||||
import React from "react"
|
||||
|
||||
export class MyComponent {
|
||||
private data: string[] = []
|
||||
|
||||
async loadData(): Promise<void> {
|
||||
const content = await readFile("file.txt", "utf-8")
|
||||
this.data = content.split("\\n")
|
||||
}
|
||||
|
||||
render() {
|
||||
return this.data.map(line => <div>{line}</div>)
|
||||
}
|
||||
}
|
||||
|
||||
export function createComponent(): MyComponent {
|
||||
return new MyComponent()
|
||||
}
|
||||
`
|
||||
const ast = parser.parse(content, "tsx")
|
||||
const allASTs = new Map<string, FileAST>()
|
||||
allASTs.set("/project/src/Component.tsx", ast)
|
||||
|
||||
const meta = analyzer.analyze("/project/src/Component.tsx", ast, content, allASTs)
|
||||
|
||||
expect(meta.complexity.loc).toBeGreaterThan(10)
|
||||
expect(meta.dependencies).toContain("/project/src/helper.ts")
|
||||
expect(meta.fileType).toBe("source")
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -0,0 +1,278 @@
|
||||
import { describe, it, expect, beforeEach, afterEach, vi } from "vitest"
|
||||
import { Watchdog, type FileChangeEvent } from "../../../../src/infrastructure/indexer/Watchdog.js"
|
||||
import * as fs from "node:fs/promises"
|
||||
import * as path from "node:path"
|
||||
import * as os from "node:os"
|
||||
|
||||
describe("Watchdog", () => {
|
||||
let watchdog: Watchdog
|
||||
let tempDir: string
|
||||
|
||||
beforeEach(async () => {
|
||||
tempDir = await fs.mkdtemp(path.join(os.tmpdir(), "watchdog-test-"))
|
||||
watchdog = new Watchdog({ debounceMs: 50 })
|
||||
})
|
||||
|
||||
afterEach(async () => {
|
||||
await watchdog.stop()
|
||||
await fs.rm(tempDir, { recursive: true, force: true })
|
||||
})
|
||||
|
||||
describe("constructor", () => {
|
||||
it("should create with default options", () => {
|
||||
const wd = new Watchdog()
|
||||
expect(wd.isWatching()).toBe(false)
|
||||
expect(wd.getRoot()).toBe("")
|
||||
})
|
||||
|
||||
it("should accept custom options", () => {
|
||||
const wd = new Watchdog({
|
||||
debounceMs: 100,
|
||||
extensions: [".ts"],
|
||||
usePolling: true,
|
||||
})
|
||||
expect(wd.isWatching()).toBe(false)
|
||||
})
|
||||
})
|
||||
|
||||
describe("start/stop", () => {
|
||||
it("should start watching", () => {
|
||||
watchdog.start(tempDir)
|
||||
expect(watchdog.isWatching()).toBe(true)
|
||||
expect(watchdog.getRoot()).toBe(tempDir)
|
||||
})
|
||||
|
||||
it("should stop watching", async () => {
|
||||
watchdog.start(tempDir)
|
||||
await watchdog.stop()
|
||||
expect(watchdog.isWatching()).toBe(false)
|
||||
})
|
||||
|
||||
it("should handle stop when not started", async () => {
|
||||
await watchdog.stop()
|
||||
expect(watchdog.isWatching()).toBe(false)
|
||||
})
|
||||
|
||||
it("should restart when start called while running", async () => {
|
||||
watchdog.start(tempDir)
|
||||
const newTempDir = await fs.mkdtemp(path.join(os.tmpdir(), "watchdog-test2-"))
|
||||
|
||||
watchdog.start(newTempDir)
|
||||
expect(watchdog.isWatching()).toBe(true)
|
||||
expect(watchdog.getRoot()).toBe(newTempDir)
|
||||
|
||||
await fs.rm(newTempDir, { recursive: true, force: true })
|
||||
})
|
||||
})
|
||||
|
||||
describe("onFileChange/offFileChange", () => {
|
||||
it("should register callback", () => {
|
||||
const callback = vi.fn()
|
||||
watchdog.onFileChange(callback)
|
||||
expect(callback).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it("should remove callback", () => {
|
||||
const callback = vi.fn()
|
||||
watchdog.onFileChange(callback)
|
||||
watchdog.offFileChange(callback)
|
||||
})
|
||||
|
||||
it("should handle removing non-existent callback", () => {
|
||||
const callback = vi.fn()
|
||||
watchdog.offFileChange(callback)
|
||||
})
|
||||
})
|
||||
|
||||
describe("getPendingCount", () => {
|
||||
it("should return 0 when no pending changes", () => {
|
||||
expect(watchdog.getPendingCount()).toBe(0)
|
||||
})
|
||||
})
|
||||
|
||||
describe("getWatchedPaths", () => {
|
||||
it("should return empty array when not watching", () => {
|
||||
expect(watchdog.getWatchedPaths()).toEqual([])
|
||||
})
|
||||
})
|
||||
|
||||
describe("flushAll", () => {
|
||||
it("should not throw when no pending changes", () => {
|
||||
expect(() => watchdog.flushAll()).not.toThrow()
|
||||
})
|
||||
})
|
||||
|
||||
describe("file change detection", () => {
|
||||
it("should detect new file creation", async () => {
|
||||
const events: FileChangeEvent[] = []
|
||||
watchdog.onFileChange((event) => events.push(event))
|
||||
watchdog.start(tempDir)
|
||||
|
||||
await new Promise((resolve) => setTimeout(resolve, 100))
|
||||
|
||||
const testFile = path.join(tempDir, "test.ts")
|
||||
await fs.writeFile(testFile, "const x = 1")
|
||||
|
||||
await new Promise((resolve) => setTimeout(resolve, 200))
|
||||
|
||||
expect(events.length).toBeGreaterThanOrEqual(0)
|
||||
})
|
||||
|
||||
it("should detect file modification", async () => {
|
||||
const testFile = path.join(tempDir, "test.ts")
|
||||
await fs.writeFile(testFile, "const x = 1")
|
||||
|
||||
const events: FileChangeEvent[] = []
|
||||
watchdog.onFileChange((event) => events.push(event))
|
||||
watchdog.start(tempDir)
|
||||
|
||||
await new Promise((resolve) => setTimeout(resolve, 100))
|
||||
|
||||
await fs.writeFile(testFile, "const x = 2")
|
||||
|
||||
await new Promise((resolve) => setTimeout(resolve, 200))
|
||||
|
||||
expect(events.length).toBeGreaterThanOrEqual(0)
|
||||
})
|
||||
|
||||
it("should detect file deletion", async () => {
|
||||
const testFile = path.join(tempDir, "test.ts")
|
||||
await fs.writeFile(testFile, "const x = 1")
|
||||
|
||||
const events: FileChangeEvent[] = []
|
||||
watchdog.onFileChange((event) => events.push(event))
|
||||
watchdog.start(tempDir)
|
||||
|
||||
await new Promise((resolve) => setTimeout(resolve, 100))
|
||||
|
||||
await fs.unlink(testFile)
|
||||
|
||||
await new Promise((resolve) => setTimeout(resolve, 200))
|
||||
|
||||
expect(events.length).toBeGreaterThanOrEqual(0)
|
||||
})
|
||||
|
||||
it("should ignore non-watched extensions", async () => {
|
||||
const events: FileChangeEvent[] = []
|
||||
watchdog.onFileChange((event) => events.push(event))
|
||||
watchdog.start(tempDir)
|
||||
|
||||
await new Promise((resolve) => setTimeout(resolve, 100))
|
||||
|
||||
const txtFile = path.join(tempDir, "test.txt")
|
||||
await fs.writeFile(txtFile, "hello")
|
||||
|
||||
await new Promise((resolve) => setTimeout(resolve, 200))
|
||||
|
||||
const tsEvents = events.filter((e) => e.path.endsWith(".txt"))
|
||||
expect(tsEvents.length).toBe(0)
|
||||
})
|
||||
|
||||
it("should debounce rapid changes", async () => {
|
||||
const testFile = path.join(tempDir, "test.ts")
|
||||
await fs.writeFile(testFile, "const x = 1")
|
||||
|
||||
const events: FileChangeEvent[] = []
|
||||
watchdog.onFileChange((event) => events.push(event))
|
||||
watchdog.start(tempDir)
|
||||
|
||||
await new Promise((resolve) => setTimeout(resolve, 100))
|
||||
|
||||
await fs.writeFile(testFile, "const x = 2")
|
||||
await fs.writeFile(testFile, "const x = 3")
|
||||
await fs.writeFile(testFile, "const x = 4")
|
||||
|
||||
await new Promise((resolve) => setTimeout(resolve, 200))
|
||||
|
||||
expect(events.length).toBeLessThanOrEqual(3)
|
||||
})
|
||||
})
|
||||
|
||||
describe("callback error handling", () => {
|
||||
it("should continue after callback throws", async () => {
|
||||
const events: FileChangeEvent[] = []
|
||||
watchdog.onFileChange(() => {
|
||||
throw new Error("Test error")
|
||||
})
|
||||
watchdog.onFileChange((event) => events.push(event))
|
||||
watchdog.start(tempDir)
|
||||
|
||||
await new Promise((resolve) => setTimeout(resolve, 100))
|
||||
|
||||
const testFile = path.join(tempDir, "test.ts")
|
||||
await fs.writeFile(testFile, "const x = 1")
|
||||
|
||||
await new Promise((resolve) => setTimeout(resolve, 200))
|
||||
})
|
||||
})
|
||||
|
||||
describe("custom extensions", () => {
|
||||
it("should watch only specified extensions", async () => {
|
||||
const customWatchdog = new Watchdog({
|
||||
debounceMs: 50,
|
||||
extensions: [".ts"],
|
||||
})
|
||||
|
||||
const events: FileChangeEvent[] = []
|
||||
customWatchdog.onFileChange((event) => events.push(event))
|
||||
customWatchdog.start(tempDir)
|
||||
|
||||
await new Promise((resolve) => setTimeout(resolve, 100))
|
||||
|
||||
const tsFile = path.join(tempDir, "test.ts")
|
||||
const jsFile = path.join(tempDir, "test.js")
|
||||
await fs.writeFile(tsFile, "const x = 1")
|
||||
await fs.writeFile(jsFile, "const y = 2")
|
||||
|
||||
await new Promise((resolve) => setTimeout(resolve, 200))
|
||||
|
||||
const jsEvents = events.filter((e) => e.path.endsWith(".js"))
|
||||
expect(jsEvents.length).toBe(0)
|
||||
|
||||
await customWatchdog.stop()
|
||||
})
|
||||
})
|
||||
|
||||
describe("multiple callbacks", () => {
|
||||
it("should notify all registered callbacks", async () => {
|
||||
const events1: FileChangeEvent[] = []
|
||||
const events2: FileChangeEvent[] = []
|
||||
|
||||
watchdog.onFileChange((event) => events1.push(event))
|
||||
watchdog.onFileChange((event) => events2.push(event))
|
||||
watchdog.start(tempDir)
|
||||
|
||||
await new Promise((resolve) => setTimeout(resolve, 100))
|
||||
|
||||
const testFile = path.join(tempDir, "test.ts")
|
||||
await fs.writeFile(testFile, "const x = 1")
|
||||
|
||||
await new Promise((resolve) => setTimeout(resolve, 200))
|
||||
|
||||
expect(events1.length).toBe(events2.length)
|
||||
})
|
||||
})
|
||||
|
||||
describe("event properties", () => {
|
||||
it("should include correct event type and path", async () => {
|
||||
const events: FileChangeEvent[] = []
|
||||
watchdog.onFileChange((event) => events.push(event))
|
||||
watchdog.start(tempDir)
|
||||
|
||||
await new Promise((resolve) => setTimeout(resolve, 100))
|
||||
|
||||
const testFile = path.join(tempDir, "test.ts")
|
||||
await fs.writeFile(testFile, "const x = 1")
|
||||
|
||||
await new Promise((resolve) => setTimeout(resolve, 200))
|
||||
|
||||
if (events.length > 0) {
|
||||
const event = events[0]
|
||||
expect(event.type).toMatch(/^(add|change)$/)
|
||||
expect(event.path).toContain("test.ts")
|
||||
expect(typeof event.timestamp).toBe("number")
|
||||
expect(event.timestamp).toBeLessThanOrEqual(Date.now())
|
||||
}
|
||||
})
|
||||
})
|
||||
})
|
||||
43
pnpm-lock.yaml
generated
43
pnpm-lock.yaml
generated
@@ -141,9 +141,9 @@ importers:
|
||||
commander:
|
||||
specifier: ^11.1.0
|
||||
version: 11.1.0
|
||||
ignore:
|
||||
specifier: ^5.3.2
|
||||
version: 5.3.2
|
||||
globby:
|
||||
specifier: ^16.0.0
|
||||
version: 16.0.0
|
||||
ink:
|
||||
specifier: ^4.4.1
|
||||
version: 4.4.1(@types/react@18.3.27)(react@18.3.1)
|
||||
@@ -1471,6 +1471,10 @@ packages:
|
||||
'@sinclair/typebox@0.34.41':
|
||||
resolution: {integrity: sha512-6gS8pZzSXdyRHTIqoqSVknxolr1kzfy4/CeDnrzsVz8TTIWUbOBr6gnzOmTYJ3eXQNh4IYHIGi5aIL7sOZ2G/g==}
|
||||
|
||||
'@sindresorhus/merge-streams@4.0.0':
|
||||
resolution: {integrity: sha512-tlqY9xq5ukxTUZBmoOp+m61cqwQD5pHJtFY3Mn8CA8ps6yghLH/Hw8UPdqg4OLmFW3IFlcXnQNmo/dh8HzXYIQ==}
|
||||
engines: {node: '>=18'}
|
||||
|
||||
'@sinonjs/commons@3.0.1':
|
||||
resolution: {integrity: sha512-K3mCHKQ9sVh8o1C9cxkwxaOmXoAMlDxC1mYyHrjqOWEcBjYr76t96zL2zlj5dUGZ3HSw240X1qgH3Mjf1yJWpQ==}
|
||||
|
||||
@@ -2732,6 +2736,10 @@ packages:
|
||||
resolution: {integrity: sha512-c/c15i26VrJ4IRt5Z89DnIzCGDn9EcebibhAOjw5ibqEHsE1wLUgkPn9RDmNcUKyU87GeaL633nyJ+pplFR2ZQ==}
|
||||
engines: {node: '>=18'}
|
||||
|
||||
globby@16.0.0:
|
||||
resolution: {integrity: sha512-ejy4TJFga99yW6Q0uhM3pFawKWZmtZzZD/v/GwI5+9bCV5Ew+D2pSND6W7fUes5UykqSsJkUfxFVdRh7Q1+P3Q==}
|
||||
engines: {node: '>=20'}
|
||||
|
||||
gopd@1.2.0:
|
||||
resolution: {integrity: sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==}
|
||||
engines: {node: '>= 0.4'}
|
||||
@@ -2879,6 +2887,10 @@ packages:
|
||||
resolution: {integrity: sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==}
|
||||
engines: {node: '>=0.12.0'}
|
||||
|
||||
is-path-inside@4.0.0:
|
||||
resolution: {integrity: sha512-lJJV/5dYS+RcL8uQdBDW9c9uWFLLBNRyFhnAKXw5tVqLlKZ4RMGZKv+YQ/IA3OhD+RpbJa1LLFM1FQPGyIXvOA==}
|
||||
engines: {node: '>=12'}
|
||||
|
||||
is-stream@2.0.1:
|
||||
resolution: {integrity: sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==}
|
||||
engines: {node: '>=8'}
|
||||
@@ -3712,6 +3724,10 @@ packages:
|
||||
resolution: {integrity: sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==}
|
||||
engines: {node: '>=8'}
|
||||
|
||||
slash@5.1.0:
|
||||
resolution: {integrity: sha512-ZA6oR3T/pEyuqwMgAKT0/hAv8oAXckzbkmR0UkUosQ+Mc4RxGoJkRmwHgHufaenlyAgE1Mxgpdcrf75y6XcnDg==}
|
||||
engines: {node: '>=14.16'}
|
||||
|
||||
slice-ansi@4.0.0:
|
||||
resolution: {integrity: sha512-qMCMfhY040cVHT43K9BFygqYbUPFZKHOg7K73mtTWJRb8pyP3fzf4Ixd5SzdEJQ6MRUg/WBnOLxghZtKKurENQ==}
|
||||
engines: {node: '>=10'}
|
||||
@@ -4128,6 +4144,10 @@ packages:
|
||||
undici-types@6.21.0:
|
||||
resolution: {integrity: sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==}
|
||||
|
||||
unicorn-magic@0.4.0:
|
||||
resolution: {integrity: sha512-wH590V9VNgYH9g3lH9wWjTrUoKsjLF6sGLjhR4sH1LWpLmCOH0Zf7PukhDA8BiS7KHe4oPNkcTHqYkj7SOGUOw==}
|
||||
engines: {node: '>=20'}
|
||||
|
||||
universalify@2.0.1:
|
||||
resolution: {integrity: sha512-gptHNQghINnc/vTGIk0SOFGFNXw7JVrlRUtConJRlvaw6DuX0wO5Jeko9sWrMBhh+PsYAZ7oXAiOnf/UKogyiw==}
|
||||
engines: {node: '>= 10.0.0'}
|
||||
@@ -5614,6 +5634,8 @@ snapshots:
|
||||
|
||||
'@sinclair/typebox@0.34.41': {}
|
||||
|
||||
'@sindresorhus/merge-streams@4.0.0': {}
|
||||
|
||||
'@sinonjs/commons@3.0.1':
|
||||
dependencies:
|
||||
type-detect: 4.0.8
|
||||
@@ -7072,6 +7094,15 @@ snapshots:
|
||||
|
||||
globals@16.5.0: {}
|
||||
|
||||
globby@16.0.0:
|
||||
dependencies:
|
||||
'@sindresorhus/merge-streams': 4.0.0
|
||||
fast-glob: 3.3.3
|
||||
ignore: 7.0.5
|
||||
is-path-inside: 4.0.0
|
||||
slash: 5.1.0
|
||||
unicorn-magic: 0.4.0
|
||||
|
||||
gopd@1.2.0: {}
|
||||
|
||||
graceful-fs@4.2.11: {}
|
||||
@@ -7221,6 +7252,8 @@ snapshots:
|
||||
|
||||
is-number@7.0.0: {}
|
||||
|
||||
is-path-inside@4.0.0: {}
|
||||
|
||||
is-stream@2.0.1: {}
|
||||
|
||||
is-stream@3.0.0: {}
|
||||
@@ -8203,6 +8236,8 @@ snapshots:
|
||||
|
||||
slash@3.0.0: {}
|
||||
|
||||
slash@5.1.0: {}
|
||||
|
||||
slice-ansi@4.0.0:
|
||||
dependencies:
|
||||
ansi-styles: 4.3.0
|
||||
@@ -8610,6 +8645,8 @@ snapshots:
|
||||
|
||||
undici-types@6.21.0: {}
|
||||
|
||||
unicorn-magic@0.4.0: {}
|
||||
|
||||
universalify@2.0.1: {}
|
||||
|
||||
unrs-resolver@1.11.1:
|
||||
|
||||
Reference in New Issue
Block a user