Compare commits

...

9 Commits

Author SHA1 Message Date
imfozilbek
68f927d906 feat(ipuaro): add LLM integration module
- OllamaClient: ILLMClient implementation with tool support
- System prompt and context builders for project overview
- 18 tool definitions across 6 categories (read, edit, search, analysis, git, run)
- XML response parser for tool call extraction
- 98 new tests (419 total), 96.38% coverage
2025-12-01 00:10:11 +05:00
imfozilbek
b3e04a411c fix: normalize repository URLs in package.json 2025-11-30 01:53:57 +05:00
imfozilbek
294d085ad4 chore(ipuaro): bump version to 0.3.1 2025-11-30 01:50:33 +05:00
imfozilbek
958e4daed5 chore(guardian): bump version to 0.9.4 2025-11-30 01:50:21 +05:00
imfozilbek
6234fbce92 docs: add roadmap workflow instructions 2025-11-30 01:28:44 +05:00
imfozilbek
af9c2377a0 chore(ipuaro): bump version to 0.3.0 2025-11-30 01:25:23 +05:00
imfozilbek
d0c1ddc22e feat(ipuaro): implement indexer module (v0.3.0)
Add complete indexer infrastructure:
- FileScanner: recursive scanning with gitignore support
- ASTParser: tree-sitter based TS/JS/TSX/JSX parsing
- MetaAnalyzer: complexity metrics, dependency analysis
- IndexBuilder: symbol index and dependency graph
- Watchdog: file watching with chokidar and debouncing

321 tests, 96.38% coverage
2025-11-30 01:24:21 +05:00
imfozilbek
225480c806 feat(ipuaro): implement Redis storage module (v0.2.0)
- Add RedisClient with connection management and AOF config
- Add RedisStorage implementing full IStorage interface
- Add Redis key schema for project and session data
- Add generateProjectName() utility
- Add 68 unit tests for Redis module (159 total)
- Update ESLint: no-unnecessary-type-parameters as warn
2025-11-30 00:22:49 +05:00
imfozilbek
fd8e97af0e chore(ipuaro): bump version to 0.1.1 2025-11-29 23:25:49 +05:00
55 changed files with 8281 additions and 160 deletions

View File

@@ -447,6 +447,35 @@ Copy and use for each release:
- [ ] Published to npm (if public release)
```
## Working with Roadmap
When the user points to `ROADMAP.md` or asks about the roadmap/next steps:
1. **Read both files together:**
- `packages/<package>/ROADMAP.md` - to understand the planned features and milestones
- `packages/<package>/CHANGELOG.md` - to see what's already implemented
2. **Determine current position:**
- Check the latest version in CHANGELOG.md
- Cross-reference with ROADMAP.md milestones
- Identify which roadmap items are already completed (present in CHANGELOG)
3. **Suggest next steps:**
- Find the first uncompleted item in the current milestone
- Or identify the next milestone if current one is complete
- Present clear "start here" recommendation
**Example workflow:**
```
User: "Let's work on the roadmap" or points to ROADMAP.md
Claude should:
1. Read ROADMAP.md → See milestones v0.1.0, v0.2.0, v0.3.0...
2. Read CHANGELOG.md → See latest release is v0.1.1
3. Compare → v0.1.0 milestone complete, v0.2.0 in progress
4. Report → "v0.1.0 is complete. For v0.2.0, next item is: <feature>"
```
## Common Workflows
### Adding a new CLI option

View File

@@ -74,6 +74,7 @@ export default tseslint.config(
'@typescript-eslint/require-await': 'warn',
'@typescript-eslint/no-unnecessary-condition': 'off', // Sometimes useful for defensive coding
'@typescript-eslint/no-non-null-assertion': 'warn',
'@typescript-eslint/no-unnecessary-type-parameters': 'warn', // Allow generic JSON parsers
// ========================================
// Code Quality & Best Practices

View File

@@ -5,6 +5,26 @@ All notable changes to @samiyev/guardian will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
## [0.9.4] - 2025-11-30
### Added
- **VERSION export** - Package version is now exported from index.ts, automatically read from package.json
### Changed
- 🔄 **Refactored SecretDetector** - Reduced cyclomatic complexity from 24 to <15:
- Extracted helper methods: `extractByRuleId`, `extractAwsType`, `extractGithubType`, `extractSshType`, `extractSlackType`, `extractByMessage`
- Used lookup arrays for SSH and message type mappings
- 🔄 **Refactored AstNamingTraverser** - Reduced cyclomatic complexity from 17 to <15:
- Replaced if-else chain with Map-based node handlers
- Added `buildNodeHandlers()` method for cleaner architecture
### Quality
-**Zero lint warnings** - All ESLint warnings resolved
-**All 616 tests pass**
## [0.9.2] - 2025-11-27
### Changed

View File

@@ -1,6 +1,6 @@
{
"name": "@samiyev/guardian",
"version": "0.9.3",
"version": "0.9.4",
"description": "Research-backed code quality guardian for AI-assisted development. Detects hardcodes, secrets, circular deps, framework leaks, entity exposure, and 9 architecture violations. Enforces Clean Architecture/DDD principles. Works with GitHub Copilot, Cursor, Windsurf, Claude, ChatGPT, Cline, and any AI coding tool.",
"keywords": [
"puaros",
@@ -40,7 +40,7 @@
"license": "MIT",
"repository": {
"type": "git",
"url": "https://github.com/samiyev/puaros.git",
"url": "git+https://github.com/samiyev/puaros.git",
"directory": "packages/guardian"
},
"bugs": {

View File

@@ -215,6 +215,7 @@ export class AnalyzeProject extends UseCase<
private readonly detectionPipeline: ExecuteDetection
private readonly resultAggregator: AggregateResults
// eslint-disable-next-line max-params
constructor(
fileScanner: IFileScanner,
codeParser: ICodeParser,

View File

@@ -56,6 +56,7 @@ export interface DetectionResult {
* Pipeline step responsible for running all detectors
*/
export class ExecuteDetection {
// eslint-disable-next-line max-params
constructor(
private readonly hardcodeDetector: IHardcodeDetector,
private readonly namingConventionDetector: INamingConventionDetector,

View File

@@ -171,6 +171,7 @@ export class HardcodedValue extends ValueObject<HardcodedValueProps> {
return `${CONSTANT_NAMES.MAGIC_NUMBER}_${String(value)}`
}
// eslint-disable-next-line complexity, max-lines-per-function
private suggestStringConstantName(): string {
const value = String(this.props.value)
const context = this.props.context.toLowerCase()

View File

@@ -1,3 +1,7 @@
import pkg from "../package.json"
export const VERSION = pkg.version
export * from "./domain"
export * from "./application"
export * from "./infrastructure"

View File

@@ -90,80 +90,98 @@ export class SecretDetector implements ISecretDetector {
}
private extractSecretType(message: string, ruleId: string): string {
const lowerMessage = message.toLowerCase()
const ruleBasedType = this.extractByRuleId(ruleId, lowerMessage)
if (ruleBasedType) {
return ruleBasedType
}
return this.extractByMessage(lowerMessage)
}
private extractByRuleId(ruleId: string, lowerMessage: string): string | null {
if (ruleId.includes(SECRET_KEYWORDS.AWS)) {
if (message.toLowerCase().includes(SECRET_KEYWORDS.ACCESS_KEY)) {
return SECRET_TYPE_NAMES.AWS_ACCESS_KEY
}
if (message.toLowerCase().includes(SECRET_KEYWORDS.SECRET)) {
return SECRET_TYPE_NAMES.AWS_SECRET_KEY
}
return SECRET_TYPE_NAMES.AWS_CREDENTIAL
return this.extractAwsType(lowerMessage)
}
if (ruleId.includes(SECRET_KEYWORDS.GITHUB)) {
if (message.toLowerCase().includes(SECRET_KEYWORDS.PERSONAL_ACCESS_TOKEN)) {
return SECRET_TYPE_NAMES.GITHUB_PERSONAL_ACCESS_TOKEN
}
if (message.toLowerCase().includes(SECRET_KEYWORDS.OAUTH)) {
return SECRET_TYPE_NAMES.GITHUB_OAUTH_TOKEN
}
return SECRET_TYPE_NAMES.GITHUB_TOKEN
return this.extractGithubType(lowerMessage)
}
if (ruleId.includes(SECRET_KEYWORDS.NPM)) {
return SECRET_TYPE_NAMES.NPM_TOKEN
}
if (ruleId.includes(SECRET_KEYWORDS.GCP) || ruleId.includes(SECRET_KEYWORDS.GOOGLE)) {
return SECRET_TYPE_NAMES.GCP_SERVICE_ACCOUNT_KEY
}
if (ruleId.includes(SECRET_KEYWORDS.PRIVATEKEY) || ruleId.includes(SECRET_KEYWORDS.SSH)) {
if (message.toLowerCase().includes(SECRET_KEYWORDS.RSA)) {
return SECRET_TYPE_NAMES.SSH_RSA_PRIVATE_KEY
}
if (message.toLowerCase().includes(SECRET_KEYWORDS.DSA)) {
return SECRET_TYPE_NAMES.SSH_DSA_PRIVATE_KEY
}
if (message.toLowerCase().includes(SECRET_KEYWORDS.ECDSA)) {
return SECRET_TYPE_NAMES.SSH_ECDSA_PRIVATE_KEY
}
if (message.toLowerCase().includes(SECRET_KEYWORDS.ED25519)) {
return SECRET_TYPE_NAMES.SSH_ED25519_PRIVATE_KEY
}
return SECRET_TYPE_NAMES.SSH_PRIVATE_KEY
return this.extractSshType(lowerMessage)
}
if (ruleId.includes(SECRET_KEYWORDS.SLACK)) {
if (message.toLowerCase().includes(SECRET_KEYWORDS.BOT)) {
return SECRET_TYPE_NAMES.SLACK_BOT_TOKEN
}
if (message.toLowerCase().includes(SECRET_KEYWORDS.USER)) {
return SECRET_TYPE_NAMES.SLACK_USER_TOKEN
}
return SECRET_TYPE_NAMES.SLACK_TOKEN
return this.extractSlackType(lowerMessage)
}
if (ruleId.includes(SECRET_KEYWORDS.BASICAUTH)) {
return SECRET_TYPE_NAMES.BASIC_AUTH_CREDENTIALS
}
return null
}
if (message.toLowerCase().includes(SECRET_KEYWORDS.API_KEY)) {
return SECRET_TYPE_NAMES.API_KEY
private extractAwsType(lowerMessage: string): string {
if (lowerMessage.includes(SECRET_KEYWORDS.ACCESS_KEY)) {
return SECRET_TYPE_NAMES.AWS_ACCESS_KEY
}
if (message.toLowerCase().includes(SECRET_KEYWORDS.TOKEN)) {
return SECRET_TYPE_NAMES.AUTHENTICATION_TOKEN
if (lowerMessage.includes(SECRET_KEYWORDS.SECRET)) {
return SECRET_TYPE_NAMES.AWS_SECRET_KEY
}
return SECRET_TYPE_NAMES.AWS_CREDENTIAL
}
if (message.toLowerCase().includes(SECRET_KEYWORDS.PASSWORD)) {
return SECRET_TYPE_NAMES.PASSWORD
private extractGithubType(lowerMessage: string): string {
if (lowerMessage.includes(SECRET_KEYWORDS.PERSONAL_ACCESS_TOKEN)) {
return SECRET_TYPE_NAMES.GITHUB_PERSONAL_ACCESS_TOKEN
}
if (message.toLowerCase().includes(SECRET_KEYWORDS.SECRET)) {
return SECRET_TYPE_NAMES.SECRET
if (lowerMessage.includes(SECRET_KEYWORDS.OAUTH)) {
return SECRET_TYPE_NAMES.GITHUB_OAUTH_TOKEN
}
return SECRET_TYPE_NAMES.GITHUB_TOKEN
}
private extractSshType(lowerMessage: string): string {
const sshTypeMap: [string, string][] = [
[SECRET_KEYWORDS.RSA, SECRET_TYPE_NAMES.SSH_RSA_PRIVATE_KEY],
[SECRET_KEYWORDS.DSA, SECRET_TYPE_NAMES.SSH_DSA_PRIVATE_KEY],
[SECRET_KEYWORDS.ECDSA, SECRET_TYPE_NAMES.SSH_ECDSA_PRIVATE_KEY],
[SECRET_KEYWORDS.ED25519, SECRET_TYPE_NAMES.SSH_ED25519_PRIVATE_KEY],
]
for (const [keyword, typeName] of sshTypeMap) {
if (lowerMessage.includes(keyword)) {
return typeName
}
}
return SECRET_TYPE_NAMES.SSH_PRIVATE_KEY
}
private extractSlackType(lowerMessage: string): string {
if (lowerMessage.includes(SECRET_KEYWORDS.BOT)) {
return SECRET_TYPE_NAMES.SLACK_BOT_TOKEN
}
if (lowerMessage.includes(SECRET_KEYWORDS.USER)) {
return SECRET_TYPE_NAMES.SLACK_USER_TOKEN
}
return SECRET_TYPE_NAMES.SLACK_TOKEN
}
private extractByMessage(lowerMessage: string): string {
const messageTypeMap: [string, string][] = [
[SECRET_KEYWORDS.API_KEY, SECRET_TYPE_NAMES.API_KEY],
[SECRET_KEYWORDS.TOKEN, SECRET_TYPE_NAMES.AUTHENTICATION_TOKEN],
[SECRET_KEYWORDS.PASSWORD, SECRET_TYPE_NAMES.PASSWORD],
[SECRET_KEYWORDS.SECRET, SECRET_TYPE_NAMES.SECRET],
]
for (const [keyword, typeName] of messageTypeMap) {
if (lowerMessage.includes(keyword)) {
return typeName
}
}
return SECRET_TYPE_NAMES.SENSITIVE_DATA
}
}

View File

@@ -6,6 +6,13 @@ import { AstFunctionNameAnalyzer } from "./AstFunctionNameAnalyzer"
import { AstInterfaceNameAnalyzer } from "./AstInterfaceNameAnalyzer"
import { AstVariableNameAnalyzer } from "./AstVariableNameAnalyzer"
type NodeAnalyzer = (
node: Parser.SyntaxNode,
layer: string,
filePath: string,
lines: string[],
) => NamingViolation | null
/**
* AST tree traverser for detecting naming convention violations
*
@@ -13,12 +20,16 @@ import { AstVariableNameAnalyzer } from "./AstVariableNameAnalyzer"
* to detect naming violations in classes, interfaces, functions, and variables.
*/
export class AstNamingTraverser {
private readonly nodeHandlers: Map<string, NodeAnalyzer>
constructor(
private readonly classAnalyzer: AstClassNameAnalyzer,
private readonly interfaceAnalyzer: AstInterfaceNameAnalyzer,
private readonly functionAnalyzer: AstFunctionNameAnalyzer,
private readonly variableAnalyzer: AstVariableNameAnalyzer,
) {}
) {
this.nodeHandlers = this.buildNodeHandlers()
}
/**
* Traverses the AST tree and collects naming violations
@@ -38,6 +49,33 @@ export class AstNamingTraverser {
return results
}
private buildNodeHandlers(): Map<string, NodeAnalyzer> {
const handlers = new Map<string, NodeAnalyzer>()
handlers.set(AST_CLASS_TYPES.CLASS_DECLARATION, (node, layer, filePath, lines) =>
this.classAnalyzer.analyze(node, layer, filePath, lines),
)
handlers.set(AST_CLASS_TYPES.INTERFACE_DECLARATION, (node, layer, filePath, lines) =>
this.interfaceAnalyzer.analyze(node, layer, filePath, lines),
)
const functionHandler: NodeAnalyzer = (node, layer, filePath, lines) =>
this.functionAnalyzer.analyze(node, layer, filePath, lines)
handlers.set(AST_FUNCTION_TYPES.FUNCTION_DECLARATION, functionHandler)
handlers.set(AST_FUNCTION_TYPES.METHOD_DEFINITION, functionHandler)
handlers.set(AST_FUNCTION_TYPES.FUNCTION_SIGNATURE, functionHandler)
const variableHandler: NodeAnalyzer = (node, layer, filePath, lines) =>
this.variableAnalyzer.analyze(node, layer, filePath, lines)
handlers.set(AST_VARIABLE_TYPES.VARIABLE_DECLARATOR, variableHandler)
handlers.set(AST_VARIABLE_TYPES.REQUIRED_PARAMETER, variableHandler)
handlers.set(AST_VARIABLE_TYPES.OPTIONAL_PARAMETER, variableHandler)
handlers.set(AST_VARIABLE_TYPES.PUBLIC_FIELD_DEFINITION, variableHandler)
handlers.set(AST_VARIABLE_TYPES.PROPERTY_SIGNATURE, variableHandler)
return handlers
}
/**
* Recursively visits AST nodes
*/
@@ -49,34 +87,10 @@ export class AstNamingTraverser {
results: NamingViolation[],
): void {
const node = cursor.currentNode
const handler = this.nodeHandlers.get(node.type)
if (node.type === AST_CLASS_TYPES.CLASS_DECLARATION) {
const violation = this.classAnalyzer.analyze(node, layer, filePath, lines)
if (violation) {
results.push(violation)
}
} else if (node.type === AST_CLASS_TYPES.INTERFACE_DECLARATION) {
const violation = this.interfaceAnalyzer.analyze(node, layer, filePath, lines)
if (violation) {
results.push(violation)
}
} else if (
node.type === AST_FUNCTION_TYPES.FUNCTION_DECLARATION ||
node.type === AST_FUNCTION_TYPES.METHOD_DEFINITION ||
node.type === AST_FUNCTION_TYPES.FUNCTION_SIGNATURE
) {
const violation = this.functionAnalyzer.analyze(node, layer, filePath, lines)
if (violation) {
results.push(violation)
}
} else if (
node.type === AST_VARIABLE_TYPES.VARIABLE_DECLARATOR ||
node.type === AST_VARIABLE_TYPES.REQUIRED_PARAMETER ||
node.type === AST_VARIABLE_TYPES.OPTIONAL_PARAMETER ||
node.type === AST_VARIABLE_TYPES.PUBLIC_FIELD_DEFINITION ||
node.type === AST_VARIABLE_TYPES.PROPERTY_SIGNATURE
) {
const violation = this.variableAnalyzer.analyze(node, layer, filePath, lines)
if (handler) {
const violation = handler(node, layer, filePath, lines)
if (violation) {
results.push(violation)
}

View File

@@ -5,6 +5,171 @@ All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
## [0.4.0] - 2025-11-30 - LLM Integration
### Added
- **OllamaClient (0.4.1)**
- Full `ILLMClient` implementation for Ollama SDK
- Chat completion with tool/function calling support
- Token counting via estimation (Ollama has no tokenizer API)
- Model management: `pullModel()`, `hasModel()`, `listModels()`
- Connection status check: `isAvailable()`
- Request abortion support: `abort()`
- Error handling with `IpuaroError` for connection and model errors
- 21 unit tests
- **System Prompt & Context Builder (0.4.2)**
- `SYSTEM_PROMPT`: Comprehensive agent instructions with tool descriptions
- `buildInitialContext()`: Generates compact project overview from structure and ASTs
- `buildFileContext()`: Detailed file context with imports, exports, functions, classes
- `truncateContext()`: Token-aware context truncation
- Hub/entry point/complexity flags in file summaries
- 17 unit tests
- **Tool Definitions (0.4.3)**
- 18 tool definitions across 6 categories:
- Read: `get_lines`, `get_function`, `get_class`, `get_structure`
- Edit: `edit_lines`, `create_file`, `delete_file`
- Search: `find_references`, `find_definition`
- Analysis: `get_dependencies`, `get_dependents`, `get_complexity`, `get_todos`
- Git: `git_status`, `git_diff`, `git_commit`
- Run: `run_command`, `run_tests`
- Category groupings: `READ_TOOLS`, `EDIT_TOOLS`, etc.
- `CONFIRMATION_TOOLS` set for tools requiring user approval
- Helper functions: `requiresConfirmation()`, `getToolDef()`, `getToolsByCategory()`
- 39 unit tests
- **Response Parser (0.4.4)**
- XML tool call parsing: `<tool_call name="...">...</tool_call>`
- Parameter extraction from XML elements
- Type coercion: boolean, number, null, JSON arrays/objects
- `extractThinking()`: Extracts `<thinking>...</thinking>` blocks
- `hasToolCalls()`: Quick check for tool call presence
- `validateToolCallParams()`: Parameter validation against required list
- `formatToolCallsAsXml()`: Tool calls to XML for prompt injection
- 21 unit tests
### Changed
- Total tests: 419 (was 321)
- Coverage: 96.38%
---
## [0.3.1] - 2025-11-30
### Added
- **VERSION export** - Package version is now exported from index.ts, automatically read from package.json via `createRequire`
### Changed
- 🔄 **Refactored ASTParser** - Reduced complexity and nesting depth:
- Extracted `extractClassHeritage()`, `parseHeritageClause()`, `findTypeIdentifier()`, `collectImplements()` helper methods
- Max nesting depth reduced from 5 to 4
- 🔄 **Refactored RedisStorage** - Removed unnecessary type parameter from `parseJSON()` method
### Quality
-**Zero lint warnings** - All ESLint warnings resolved
-**All 321 tests pass**
## [0.3.0] - 2025-11-30 - Indexer
### Added
- **FileScanner (0.3.1)**
- Recursive directory scanning with async generator
- `.gitignore` support via `globby` (replaced `ignore` package for ESM compatibility)
- Filters: binary files, node_modules, dist, default ignore patterns
- Progress callback for UI integration
- `isTextFile()` and `readFileContent()` static utilities
- 22 unit tests
- **ASTParser (0.3.2)**
- Tree-sitter based parsing for TS, TSX, JS, JSX
- Extracts: imports, exports, functions, classes, interfaces, type aliases
- Import classification: internal, external, builtin (using `node:module` builtinModules)
- Graceful error handling with partial AST on syntax errors
- 30 unit tests
- **MetaAnalyzer (0.3.3)**
- Complexity metrics: LOC (excluding comments), nesting depth, cyclomatic complexity, overall score
- Dependency resolution: internal imports resolved to absolute file paths
- Dependents calculation: reverse dependency lookup across all project files
- File type classification: source, test, config, types, unknown
- Entry point detection: index files, main/app/cli/server patterns, files with no dependents
- Hub detection: files with >5 dependents
- Batch analysis via `analyzeAll()` method
- 54 unit tests
- **IndexBuilder (0.3.4)**
- SymbolIndex: maps symbol names to locations for quick lookup (functions, classes, interfaces, types, variables)
- Qualified names for class methods: `ClassName.methodName`
- DepsGraph: bidirectional import mapping (`imports` and `importedBy`)
- Import resolution: handles `.js``.ts`, index.ts, directory imports
- `findSymbol()`: exact symbol lookup
- `searchSymbols()`: regex-based symbol search
- `findCircularDependencies()`: detect import cycles
- `getStats()`: comprehensive index statistics (symbols by type, hubs, orphans)
- 35 unit tests
- **Watchdog (0.3.5)**
- File watching with chokidar (native events + polling fallback)
- Debounced change handling (configurable, default 500ms)
- Event types: add, change, unlink
- Extension filtering (default: SUPPORTED_EXTENSIONS)
- Ignore patterns (default: DEFAULT_IGNORE_PATTERNS)
- Multiple callback support
- `flushAll()` for immediate processing
- Silent error handling for stability
- 21 unit tests
- **Infrastructure Constants**
- `tree-sitter-types.ts`: NodeType and FieldName constants for tree-sitter
- Eliminates magic strings in ASTParser
- **Dependencies**
- Added `globby` for ESM-native file globbing
- Removed `ignore` package (CJS incompatibility with nodenext)
### Changed
- Refactored ASTParser to use constants instead of magic strings
- Total tests: 321
- Coverage: 96.43%
---
## [0.2.0] - 2025-01-30
### Added
- **Redis Storage (0.2.x milestone)**
- RedisClient: connection wrapper with AOF persistence configuration
- RedisStorage: full IStorage implementation with Redis hashes
- Redis key schema: project files, AST, meta, indexes, config
- Session keys schema: data, undo stack, sessions list
- `generateProjectName()` utility for consistent project naming
- **Infrastructure Layer**
- `src/infrastructure/storage/` module
- Exports via `src/infrastructure/index.ts`
- **Testing**
- 68 new unit tests for Redis module
- 159 total tests
- 99% code coverage maintained
### Changed
- Updated ESLint config: `@typescript-eslint/no-unnecessary-type-parameters` set to warn
### Notes
Redis Storage milestone complete. Next: 0.3.0 - Indexer (FileScanner, AST Parser, Watchdog)
## [0.1.0] - 2025-01-29
### Added

View File

@@ -1,6 +1,6 @@
{
"name": "@samiyev/ipuaro",
"version": "0.1.0",
"version": "0.4.0",
"description": "Local AI agent for codebase operations with infinite context feeling",
"author": "Fozilbek Samiyev <fozilbek.samiyev@gmail.com>",
"license": "MIT",
@@ -33,28 +33,28 @@
"format": "prettier --write src"
},
"dependencies": {
"ink": "^4.4.1",
"ink-text-input": "^5.0.1",
"react": "^18.2.0",
"ioredis": "^5.4.1",
"tree-sitter": "^0.21.1",
"tree-sitter-typescript": "^0.21.2",
"tree-sitter-javascript": "^0.21.0",
"ollama": "^0.5.11",
"simple-git": "^3.27.0",
"chokidar": "^3.6.0",
"commander": "^11.1.0",
"zod": "^3.23.8",
"ignore": "^5.3.2"
"globby": "^16.0.0",
"ink": "^4.4.1",
"ink-text-input": "^5.0.1",
"ioredis": "^5.4.1",
"ollama": "^0.5.11",
"react": "^18.2.0",
"simple-git": "^3.27.0",
"tree-sitter": "^0.21.1",
"tree-sitter-javascript": "^0.21.0",
"tree-sitter-typescript": "^0.21.2",
"zod": "^3.23.8"
},
"devDependencies": {
"@types/node": "^22.10.1",
"@types/react": "^18.2.0",
"vitest": "^1.6.0",
"@vitest/coverage-v8": "^1.6.0",
"@vitest/ui": "^1.6.0",
"tsup": "^8.3.5",
"typescript": "^5.7.2"
"typescript": "^5.7.2",
"vitest": "^1.6.0"
},
"engines": {
"node": ">=20.0.0"
@@ -70,11 +70,11 @@
],
"repository": {
"type": "git",
"url": "https://github.com/samiyev/puaros.git",
"url": "git+https://github.com/samiyev/puaros.git",
"directory": "packages/ipuaro"
},
"bugs": {
"url": "https://github.com/samiyev/puaros/issues"
},
"homepage": "https://github.com/samiyev/puaros/tree/main/packages/ipuaro#readme"
}
}

View File

@@ -4,6 +4,11 @@
* Main entry point for the library.
*/
import { createRequire } from "node:module"
const require = createRequire(import.meta.url)
const pkg = require("../package.json") as { version: string }
// Domain exports
export * from "./domain/index.js"
@@ -13,5 +18,8 @@ export * from "./application/index.js"
// Shared exports
export * from "./shared/index.js"
// Infrastructure exports
export * from "./infrastructure/index.js"
// Version
export const VERSION = "0.1.0"
export const VERSION = pkg.version

View File

@@ -0,0 +1,4 @@
// Infrastructure layer exports
export * from "./storage/index.js"
export * from "./indexer/index.js"
export * from "./llm/index.js"

View File

@@ -0,0 +1,551 @@
import { builtinModules } from "node:module"
import Parser from "tree-sitter"
import TypeScript from "tree-sitter-typescript"
import JavaScript from "tree-sitter-javascript"
import {
createEmptyFileAST,
type ExportInfo,
type FileAST,
type ImportInfo,
type MethodInfo,
type ParameterInfo,
type PropertyInfo,
} from "../../domain/value-objects/FileAST.js"
import { FieldName, NodeType } from "./tree-sitter-types.js"
type Language = "ts" | "tsx" | "js" | "jsx"
type SyntaxNode = Parser.SyntaxNode
/**
* Parses source code into AST using tree-sitter.
*/
export class ASTParser {
private readonly parsers = new Map<Language, Parser>()
constructor() {
this.initializeParsers()
}
private initializeParsers(): void {
const tsParser = new Parser()
tsParser.setLanguage(TypeScript.typescript)
this.parsers.set("ts", tsParser)
const tsxParser = new Parser()
tsxParser.setLanguage(TypeScript.tsx)
this.parsers.set("tsx", tsxParser)
const jsParser = new Parser()
jsParser.setLanguage(JavaScript)
this.parsers.set("js", jsParser)
this.parsers.set("jsx", jsParser)
}
/**
* Parse source code and extract AST information.
*/
parse(content: string, language: Language): FileAST {
const parser = this.parsers.get(language)
if (!parser) {
return {
...createEmptyFileAST(),
parseError: true,
parseErrorMessage: `Unsupported language: ${language}`,
}
}
try {
const tree = parser.parse(content)
const root = tree.rootNode
if (root.hasError) {
const ast = this.extractAST(root, language)
ast.parseError = true
ast.parseErrorMessage = "Syntax error in source code"
return ast
}
return this.extractAST(root, language)
} catch (error) {
return {
...createEmptyFileAST(),
parseError: true,
parseErrorMessage: error instanceof Error ? error.message : "Unknown parse error",
}
}
}
private extractAST(root: SyntaxNode, language: Language): FileAST {
const ast = createEmptyFileAST()
const isTypeScript = language === "ts" || language === "tsx"
for (const child of root.children) {
this.visitNode(child, ast, isTypeScript)
}
return ast
}
private visitNode(node: SyntaxNode, ast: FileAST, isTypeScript: boolean): void {
switch (node.type) {
case NodeType.IMPORT_STATEMENT:
this.extractImport(node, ast)
break
case NodeType.EXPORT_STATEMENT:
this.extractExport(node, ast)
break
case NodeType.FUNCTION_DECLARATION:
this.extractFunction(node, ast, false)
break
case NodeType.LEXICAL_DECLARATION:
this.extractLexicalDeclaration(node, ast)
break
case NodeType.CLASS_DECLARATION:
this.extractClass(node, ast, false)
break
case NodeType.INTERFACE_DECLARATION:
if (isTypeScript) {
this.extractInterface(node, ast, false)
}
break
case NodeType.TYPE_ALIAS_DECLARATION:
if (isTypeScript) {
this.extractTypeAlias(node, ast, false)
}
break
}
}
private extractImport(node: SyntaxNode, ast: FileAST): void {
const sourceNode = node.childForFieldName(FieldName.SOURCE)
if (!sourceNode) {
return
}
const from = this.getStringValue(sourceNode)
const line = node.startPosition.row + 1
const importType = this.classifyImport(from)
const importClause = node.children.find((c) => c.type === NodeType.IMPORT_CLAUSE)
if (!importClause) {
ast.imports.push({
name: "*",
from,
line,
type: importType,
isDefault: false,
})
return
}
for (const child of importClause.children) {
if (child.type === NodeType.IDENTIFIER) {
ast.imports.push({
name: child.text,
from,
line,
type: importType,
isDefault: true,
})
} else if (child.type === NodeType.NAMESPACE_IMPORT) {
const alias = child.children.find((c) => c.type === NodeType.IDENTIFIER)
ast.imports.push({
name: alias?.text ?? "*",
from,
line,
type: importType,
isDefault: false,
})
} else if (child.type === NodeType.NAMED_IMPORTS) {
for (const specifier of child.children) {
if (specifier.type === NodeType.IMPORT_SPECIFIER) {
const nameNode = specifier.childForFieldName(FieldName.NAME)
const aliasNode = specifier.childForFieldName(FieldName.ALIAS)
ast.imports.push({
name: aliasNode?.text ?? nameNode?.text ?? "",
from,
line,
type: importType,
isDefault: false,
})
}
}
}
}
}
private extractExport(node: SyntaxNode, ast: FileAST): void {
const isDefault = node.children.some((c) => c.type === NodeType.DEFAULT)
const declaration = node.childForFieldName(FieldName.DECLARATION)
if (declaration) {
switch (declaration.type) {
case NodeType.FUNCTION_DECLARATION:
this.extractFunction(declaration, ast, true)
this.addExportInfo(ast, declaration, "function", isDefault)
break
case NodeType.CLASS_DECLARATION:
this.extractClass(declaration, ast, true)
this.addExportInfo(ast, declaration, "class", isDefault)
break
case NodeType.INTERFACE_DECLARATION:
this.extractInterface(declaration, ast, true)
this.addExportInfo(ast, declaration, "interface", isDefault)
break
case NodeType.TYPE_ALIAS_DECLARATION:
this.extractTypeAlias(declaration, ast, true)
this.addExportInfo(ast, declaration, "type", isDefault)
break
case NodeType.LEXICAL_DECLARATION:
this.extractLexicalDeclaration(declaration, ast, true)
break
}
}
const exportClause = node.children.find((c) => c.type === NodeType.EXPORT_CLAUSE)
if (exportClause) {
for (const specifier of exportClause.children) {
if (specifier.type === NodeType.EXPORT_SPECIFIER) {
const nameNode = specifier.childForFieldName(FieldName.NAME)
if (nameNode) {
ast.exports.push({
name: nameNode.text,
line: node.startPosition.row + 1,
isDefault: false,
kind: "variable",
})
}
}
}
}
}
private extractFunction(node: SyntaxNode, ast: FileAST, isExported: boolean): void {
const nameNode = node.childForFieldName(FieldName.NAME)
if (!nameNode) {
return
}
const params = this.extractParameters(node)
const isAsync = node.children.some((c) => c.type === NodeType.ASYNC)
const returnTypeNode = node.childForFieldName(FieldName.RETURN_TYPE)
ast.functions.push({
name: nameNode.text,
lineStart: node.startPosition.row + 1,
lineEnd: node.endPosition.row + 1,
params,
isAsync,
isExported,
returnType: returnTypeNode?.text?.replace(/^:\s*/, ""),
})
}
private extractLexicalDeclaration(node: SyntaxNode, ast: FileAST, isExported = false): void {
for (const child of node.children) {
if (child.type === NodeType.VARIABLE_DECLARATOR) {
const nameNode = child.childForFieldName(FieldName.NAME)
const valueNode = child.childForFieldName(FieldName.VALUE)
if (
valueNode?.type === NodeType.ARROW_FUNCTION ||
valueNode?.type === NodeType.FUNCTION
) {
const params = this.extractParameters(valueNode)
const isAsync = valueNode.children.some((c) => c.type === NodeType.ASYNC)
ast.functions.push({
name: nameNode?.text ?? "",
lineStart: node.startPosition.row + 1,
lineEnd: node.endPosition.row + 1,
params,
isAsync,
isExported,
})
if (isExported) {
ast.exports.push({
name: nameNode?.text ?? "",
line: node.startPosition.row + 1,
isDefault: false,
kind: "function",
})
}
} else if (isExported && nameNode) {
ast.exports.push({
name: nameNode.text,
line: node.startPosition.row + 1,
isDefault: false,
kind: "variable",
})
}
}
}
}
private extractClass(node: SyntaxNode, ast: FileAST, isExported: boolean): void {
const nameNode = node.childForFieldName(FieldName.NAME)
if (!nameNode) {
return
}
const body = node.childForFieldName(FieldName.BODY)
const methods: MethodInfo[] = []
const properties: PropertyInfo[] = []
if (body) {
for (const member of body.children) {
if (member.type === NodeType.METHOD_DEFINITION) {
methods.push(this.extractMethod(member))
} else if (
member.type === NodeType.PUBLIC_FIELD_DEFINITION ||
member.type === NodeType.FIELD_DEFINITION
) {
properties.push(this.extractProperty(member))
}
}
}
const { extendsName, implementsList } = this.extractClassHeritage(node)
const isAbstract = node.children.some((c) => c.type === NodeType.ABSTRACT)
ast.classes.push({
name: nameNode.text,
lineStart: node.startPosition.row + 1,
lineEnd: node.endPosition.row + 1,
methods,
properties,
extends: extendsName,
implements: implementsList,
isExported,
isAbstract,
})
}
private extractClassHeritage(node: SyntaxNode): {
extendsName: string | undefined
implementsList: string[]
} {
let extendsName: string | undefined
const implementsList: string[] = []
for (const child of node.children) {
if (child.type === NodeType.CLASS_HERITAGE) {
this.parseHeritageClause(child, (ext) => (extendsName = ext), implementsList)
} else if (child.type === NodeType.EXTENDS_CLAUSE) {
extendsName = this.findTypeIdentifier(child)
}
}
return { extendsName, implementsList }
}
private parseHeritageClause(
heritage: SyntaxNode,
setExtends: (name: string) => void,
implementsList: string[],
): void {
for (const clause of heritage.children) {
if (clause.type === NodeType.EXTENDS_CLAUSE) {
const typeId = this.findTypeIdentifier(clause)
if (typeId) {
setExtends(typeId)
}
} else if (clause.type === NodeType.IMPLEMENTS_CLAUSE) {
this.collectImplements(clause, implementsList)
}
}
}
private findTypeIdentifier(node: SyntaxNode): string | undefined {
const typeNode = node.children.find(
(c) => c.type === NodeType.TYPE_IDENTIFIER || c.type === NodeType.IDENTIFIER,
)
return typeNode?.text
}
private collectImplements(clause: SyntaxNode, list: string[]): void {
for (const impl of clause.children) {
if (impl.type === NodeType.TYPE_IDENTIFIER || impl.type === NodeType.IDENTIFIER) {
list.push(impl.text)
}
}
}
private extractMethod(node: SyntaxNode): MethodInfo {
const nameNode = node.childForFieldName(FieldName.NAME)
const params = this.extractParameters(node)
const isAsync = node.children.some((c) => c.type === NodeType.ASYNC)
const isStatic = node.children.some((c) => c.type === NodeType.STATIC)
let visibility: "public" | "private" | "protected" = "public"
for (const child of node.children) {
if (child.type === NodeType.ACCESSIBILITY_MODIFIER) {
visibility = child.text as "public" | "private" | "protected"
break
}
}
return {
name: nameNode?.text ?? "",
lineStart: node.startPosition.row + 1,
lineEnd: node.endPosition.row + 1,
params,
isAsync,
visibility,
isStatic,
}
}
private extractProperty(node: SyntaxNode): PropertyInfo {
const nameNode = node.childForFieldName(FieldName.NAME)
const typeNode = node.childForFieldName(FieldName.TYPE)
const isStatic = node.children.some((c) => c.type === NodeType.STATIC)
const isReadonly = node.children.some((c) => c.text === NodeType.READONLY)
let visibility: "public" | "private" | "protected" = "public"
for (const child of node.children) {
if (child.type === NodeType.ACCESSIBILITY_MODIFIER) {
visibility = child.text as "public" | "private" | "protected"
break
}
}
return {
name: nameNode?.text ?? "",
line: node.startPosition.row + 1,
type: typeNode?.text,
visibility,
isStatic,
isReadonly,
}
}
private extractInterface(node: SyntaxNode, ast: FileAST, isExported: boolean): void {
const nameNode = node.childForFieldName(FieldName.NAME)
if (!nameNode) {
return
}
const body = node.childForFieldName(FieldName.BODY)
const properties: PropertyInfo[] = []
if (body) {
for (const member of body.children) {
if (member.type === NodeType.PROPERTY_SIGNATURE) {
const propName = member.childForFieldName(FieldName.NAME)
const propType = member.childForFieldName(FieldName.TYPE)
properties.push({
name: propName?.text ?? "",
line: member.startPosition.row + 1,
type: propType?.text,
visibility: "public",
isStatic: false,
isReadonly: member.children.some((c) => c.text === NodeType.READONLY),
})
}
}
}
const extendsList: string[] = []
const extendsClause = node.children.find((c) => c.type === NodeType.EXTENDS_TYPE_CLAUSE)
if (extendsClause) {
for (const child of extendsClause.children) {
if (child.type === NodeType.TYPE_IDENTIFIER) {
extendsList.push(child.text)
}
}
}
ast.interfaces.push({
name: nameNode.text,
lineStart: node.startPosition.row + 1,
lineEnd: node.endPosition.row + 1,
properties,
extends: extendsList,
isExported,
})
}
private extractTypeAlias(node: SyntaxNode, ast: FileAST, isExported: boolean): void {
const nameNode = node.childForFieldName(FieldName.NAME)
if (!nameNode) {
return
}
ast.typeAliases.push({
name: nameNode.text,
line: node.startPosition.row + 1,
isExported,
})
}
private extractParameters(node: SyntaxNode): ParameterInfo[] {
const params: ParameterInfo[] = []
const paramsNode = node.childForFieldName(FieldName.PARAMETERS)
if (paramsNode) {
for (const param of paramsNode.children) {
if (
param.type === NodeType.REQUIRED_PARAMETER ||
param.type === NodeType.OPTIONAL_PARAMETER ||
param.type === NodeType.IDENTIFIER
) {
const nameNode =
param.type === NodeType.IDENTIFIER
? param
: param.childForFieldName(FieldName.PATTERN)
const typeNode = param.childForFieldName(FieldName.TYPE)
const defaultValue = param.childForFieldName(FieldName.VALUE)
params.push({
name: nameNode?.text ?? "",
type: typeNode?.text,
optional: param.type === NodeType.OPTIONAL_PARAMETER,
hasDefault: defaultValue !== null,
})
}
}
}
return params
}
private addExportInfo(
ast: FileAST,
node: SyntaxNode,
kind: ExportInfo["kind"],
isDefault: boolean,
): void {
const nameNode = node.childForFieldName(FieldName.NAME)
if (nameNode) {
ast.exports.push({
name: nameNode.text,
line: node.startPosition.row + 1,
isDefault,
kind,
})
}
}
private classifyImport(from: string): ImportInfo["type"] {
if (from.startsWith(".") || from.startsWith("/")) {
return "internal"
}
if (from.startsWith("node:") || builtinModules.includes(from)) {
return "builtin"
}
return "external"
}
private getStringValue(node: SyntaxNode): string {
const text = node.text
if (
(text.startsWith('"') && text.endsWith('"')) ||
(text.startsWith("'") && text.endsWith("'"))
) {
return text.slice(1, -1)
}
return text
}
}

View File

@@ -0,0 +1,189 @@
import * as fs from "node:fs/promises"
import type { Stats } from "node:fs"
import * as path from "node:path"
import { globby } from "globby"
import {
BINARY_EXTENSIONS,
DEFAULT_IGNORE_PATTERNS,
SUPPORTED_EXTENSIONS,
} from "../../domain/constants/index.js"
import type { ScanResult } from "../../domain/services/IIndexer.js"
/**
* Progress callback for file scanning.
*/
export interface ScanProgress {
current: number
total: number
currentFile: string
}
/**
* Options for FileScanner.
*/
export interface FileScannerOptions {
/** Additional ignore patterns (besides .gitignore and defaults) */
additionalIgnore?: string[]
/** Only include files with these extensions. Defaults to SUPPORTED_EXTENSIONS. */
extensions?: readonly string[]
/** Callback for progress updates */
onProgress?: (progress: ScanProgress) => void
}
/**
* Scans project directories recursively using globby.
* Respects .gitignore, skips binary files and default ignore patterns.
*/
export class FileScanner {
private readonly extensions: Set<string>
private readonly additionalIgnore: string[]
private readonly onProgress?: (progress: ScanProgress) => void
constructor(options: FileScannerOptions = {}) {
this.extensions = new Set(options.extensions ?? SUPPORTED_EXTENSIONS)
this.additionalIgnore = options.additionalIgnore ?? []
this.onProgress = options.onProgress
}
/**
* Build glob patterns from extensions.
*/
private buildGlobPatterns(): string[] {
const exts = [...this.extensions].map((ext) => ext.replace(".", ""))
if (exts.length === 1) {
return [`**/*.${exts[0]}`]
}
return [`**/*.{${exts.join(",")}}`]
}
/**
* Build ignore patterns.
*/
private buildIgnorePatterns(): string[] {
const patterns = [
...DEFAULT_IGNORE_PATTERNS,
...this.additionalIgnore,
...BINARY_EXTENSIONS.map((ext) => `**/*${ext}`),
]
return patterns
}
/**
* Scan directory and yield file results.
* @param root - Root directory to scan
*/
async *scan(root: string): AsyncGenerator<ScanResult> {
const globPatterns = this.buildGlobPatterns()
const ignorePatterns = this.buildIgnorePatterns()
const files = await globby(globPatterns, {
cwd: root,
gitignore: true,
ignore: ignorePatterns,
absolute: false,
onlyFiles: true,
followSymbolicLinks: false,
})
const total = files.length
let current = 0
for (const relativePath of files) {
current++
this.reportProgress(relativePath, current, total)
const fullPath = path.join(root, relativePath)
const stats = await this.safeStats(fullPath)
if (stats) {
yield {
path: relativePath,
type: "file",
size: stats.size,
lastModified: stats.mtimeMs,
}
}
}
}
/**
* Scan and return all results as array.
*/
async scanAll(root: string): Promise<ScanResult[]> {
const results: ScanResult[] = []
for await (const result of this.scan(root)) {
results.push(result)
}
return results
}
/**
* Check if file has supported extension.
*/
isSupportedExtension(filePath: string): boolean {
const ext = path.extname(filePath).toLowerCase()
return this.extensions.has(ext)
}
/**
* Safely get file stats without throwing.
*/
private async safeStats(filePath: string): Promise<Stats | null> {
try {
return await fs.stat(filePath)
} catch {
return null
}
}
/**
* Report progress if callback is set.
*/
private reportProgress(currentFile: string, current: number, total: number): void {
if (this.onProgress) {
this.onProgress({ current, total, currentFile })
}
}
/**
* Check if file content is likely UTF-8 text.
* Reads first 8KB and checks for null bytes.
*/
static async isTextFile(filePath: string): Promise<boolean> {
try {
const handle = await fs.open(filePath, "r")
try {
const buffer = Buffer.alloc(8192)
const { bytesRead } = await handle.read(buffer, 0, 8192, 0)
if (bytesRead === 0) {
return true
}
for (let i = 0; i < bytesRead; i++) {
if (buffer[i] === 0) {
return false
}
}
return true
} finally {
await handle.close()
}
} catch {
return false
}
}
/**
* Read file content as string.
* Returns null if file is binary or unreadable.
*/
static async readFileContent(filePath: string): Promise<string | null> {
if (!(await FileScanner.isTextFile(filePath))) {
return null
}
try {
return await fs.readFile(filePath, "utf-8")
} catch {
return null
}
}
}

View File

@@ -0,0 +1,406 @@
import * as path from "node:path"
import type { FileAST } from "../../domain/value-objects/FileAST.js"
import type { DepsGraph, SymbolIndex, SymbolLocation } from "../../domain/services/IStorage.js"
/**
* Builds searchable indexes from parsed ASTs.
*/
export class IndexBuilder {
private readonly projectRoot: string
constructor(projectRoot: string) {
this.projectRoot = projectRoot
}
/**
* Build symbol index from all ASTs.
* Maps symbol names to their locations for quick lookup.
*/
buildSymbolIndex(asts: Map<string, FileAST>): SymbolIndex {
const index: SymbolIndex = new Map()
for (const [filePath, ast] of asts) {
this.indexFunctions(filePath, ast, index)
this.indexClasses(filePath, ast, index)
this.indexInterfaces(filePath, ast, index)
this.indexTypeAliases(filePath, ast, index)
this.indexExportedVariables(filePath, ast, index)
}
return index
}
/**
* Index function declarations.
*/
private indexFunctions(filePath: string, ast: FileAST, index: SymbolIndex): void {
for (const func of ast.functions) {
this.addSymbol(index, func.name, {
path: filePath,
line: func.lineStart,
type: "function",
})
}
}
/**
* Index class declarations.
*/
private indexClasses(filePath: string, ast: FileAST, index: SymbolIndex): void {
for (const cls of ast.classes) {
this.addSymbol(index, cls.name, {
path: filePath,
line: cls.lineStart,
type: "class",
})
for (const method of cls.methods) {
const qualifiedName = `${cls.name}.${method.name}`
this.addSymbol(index, qualifiedName, {
path: filePath,
line: method.lineStart,
type: "function",
})
}
}
}
/**
* Index interface declarations.
*/
private indexInterfaces(filePath: string, ast: FileAST, index: SymbolIndex): void {
for (const iface of ast.interfaces) {
this.addSymbol(index, iface.name, {
path: filePath,
line: iface.lineStart,
type: "interface",
})
}
}
/**
* Index type alias declarations.
*/
private indexTypeAliases(filePath: string, ast: FileAST, index: SymbolIndex): void {
for (const typeAlias of ast.typeAliases) {
this.addSymbol(index, typeAlias.name, {
path: filePath,
line: typeAlias.line,
type: "type",
})
}
}
/**
* Index exported variables (not functions).
*/
private indexExportedVariables(filePath: string, ast: FileAST, index: SymbolIndex): void {
const functionNames = new Set(ast.functions.map((f) => f.name))
for (const exp of ast.exports) {
if (exp.kind === "variable" && !functionNames.has(exp.name)) {
this.addSymbol(index, exp.name, {
path: filePath,
line: exp.line,
type: "variable",
})
}
}
}
/**
* Add a symbol to the index.
*/
private addSymbol(index: SymbolIndex, name: string, location: SymbolLocation): void {
if (!name) {
return
}
const existing = index.get(name)
if (existing) {
const isDuplicate = existing.some(
(loc) => loc.path === location.path && loc.line === location.line,
)
if (!isDuplicate) {
existing.push(location)
}
} else {
index.set(name, [location])
}
}
/**
* Build dependency graph from all ASTs.
* Creates bidirectional mapping of imports.
*/
buildDepsGraph(asts: Map<string, FileAST>): DepsGraph {
const imports = new Map<string, string[]>()
const importedBy = new Map<string, string[]>()
for (const filePath of asts.keys()) {
imports.set(filePath, [])
importedBy.set(filePath, [])
}
for (const [filePath, ast] of asts) {
const fileImports = this.resolveFileImports(filePath, ast, asts)
imports.set(filePath, fileImports)
for (const importedFile of fileImports) {
const dependents = importedBy.get(importedFile) ?? []
if (!dependents.includes(filePath)) {
dependents.push(filePath)
importedBy.set(importedFile, dependents)
}
}
}
for (const [filePath, deps] of imports) {
imports.set(filePath, deps.sort())
}
for (const [filePath, deps] of importedBy) {
importedBy.set(filePath, deps.sort())
}
return { imports, importedBy }
}
/**
* Resolve internal imports for a file.
*/
private resolveFileImports(
filePath: string,
ast: FileAST,
allASTs: Map<string, FileAST>,
): string[] {
const fileDir = path.dirname(filePath)
const resolvedImports: string[] = []
for (const imp of ast.imports) {
if (imp.type !== "internal") {
continue
}
const resolved = this.resolveImportPath(fileDir, imp.from, allASTs)
if (resolved && !resolvedImports.includes(resolved)) {
resolvedImports.push(resolved)
}
}
return resolvedImports
}
/**
* Resolve import path to actual file path.
*/
private resolveImportPath(
fromDir: string,
importPath: string,
allASTs: Map<string, FileAST>,
): string | null {
const absolutePath = path.resolve(fromDir, importPath)
const candidates = this.getImportCandidates(absolutePath)
for (const candidate of candidates) {
if (allASTs.has(candidate)) {
return candidate
}
}
return null
}
/**
* Generate possible file paths for an import.
*/
private getImportCandidates(basePath: string): string[] {
const candidates: string[] = []
if (/\.(ts|tsx|js|jsx)$/.test(basePath)) {
candidates.push(basePath)
if (basePath.endsWith(".js")) {
candidates.push(`${basePath.slice(0, -3)}.ts`)
} else if (basePath.endsWith(".jsx")) {
candidates.push(`${basePath.slice(0, -4)}.tsx`)
}
} else {
candidates.push(`${basePath}.ts`)
candidates.push(`${basePath}.tsx`)
candidates.push(`${basePath}.js`)
candidates.push(`${basePath}.jsx`)
candidates.push(`${basePath}/index.ts`)
candidates.push(`${basePath}/index.tsx`)
candidates.push(`${basePath}/index.js`)
candidates.push(`${basePath}/index.jsx`)
}
return candidates
}
/**
* Find all locations of a symbol by name.
*/
findSymbol(index: SymbolIndex, name: string): SymbolLocation[] {
return index.get(name) ?? []
}
/**
* Find symbols matching a pattern.
*/
searchSymbols(index: SymbolIndex, pattern: string): Map<string, SymbolLocation[]> {
const results = new Map<string, SymbolLocation[]>()
const regex = new RegExp(pattern, "i")
for (const [name, locations] of index) {
if (regex.test(name)) {
results.set(name, locations)
}
}
return results
}
/**
* Get all files that the given file depends on (imports).
*/
getDependencies(graph: DepsGraph, filePath: string): string[] {
return graph.imports.get(filePath) ?? []
}
/**
* Get all files that depend on the given file (import it).
*/
getDependents(graph: DepsGraph, filePath: string): string[] {
return graph.importedBy.get(filePath) ?? []
}
/**
* Find circular dependencies in the graph.
*/
findCircularDependencies(graph: DepsGraph): string[][] {
const cycles: string[][] = []
const visited = new Set<string>()
const recursionStack = new Set<string>()
const dfs = (node: string, path: string[]): void => {
visited.add(node)
recursionStack.add(node)
path.push(node)
const deps = graph.imports.get(node) ?? []
for (const dep of deps) {
if (!visited.has(dep)) {
dfs(dep, [...path])
} else if (recursionStack.has(dep)) {
const cycleStart = path.indexOf(dep)
if (cycleStart !== -1) {
const cycle = [...path.slice(cycleStart), dep]
const normalized = this.normalizeCycle(cycle)
if (!this.cycleExists(cycles, normalized)) {
cycles.push(normalized)
}
}
}
}
recursionStack.delete(node)
}
for (const node of graph.imports.keys()) {
if (!visited.has(node)) {
dfs(node, [])
}
}
return cycles
}
/**
* Normalize a cycle to start with the smallest path.
*/
private normalizeCycle(cycle: string[]): string[] {
if (cycle.length <= 1) {
return cycle
}
const withoutLast = cycle.slice(0, -1)
const minIndex = withoutLast.reduce(
(minIdx, path, idx) => (path < withoutLast[minIdx] ? idx : minIdx),
0,
)
const rotated = [...withoutLast.slice(minIndex), ...withoutLast.slice(0, minIndex)]
rotated.push(rotated[0])
return rotated
}
/**
* Check if a cycle already exists in the list.
*/
private cycleExists(cycles: string[][], newCycle: string[]): boolean {
const newKey = newCycle.join("→")
return cycles.some((cycle) => cycle.join("→") === newKey)
}
/**
* Get statistics about the indexes.
*/
getStats(
symbolIndex: SymbolIndex,
depsGraph: DepsGraph,
): {
totalSymbols: number
symbolsByType: Record<SymbolLocation["type"], number>
totalFiles: number
totalDependencies: number
averageDependencies: number
hubs: string[]
orphans: string[]
} {
const symbolsByType: Record<SymbolLocation["type"], number> = {
function: 0,
class: 0,
interface: 0,
type: 0,
variable: 0,
}
let totalSymbols = 0
for (const locations of symbolIndex.values()) {
totalSymbols += locations.length
for (const loc of locations) {
symbolsByType[loc.type]++
}
}
const totalFiles = depsGraph.imports.size
let totalDependencies = 0
const hubs: string[] = []
const orphans: string[] = []
for (const [_filePath, deps] of depsGraph.imports) {
totalDependencies += deps.length
}
for (const [filePath, dependents] of depsGraph.importedBy) {
if (dependents.length > 5) {
hubs.push(filePath)
}
if (dependents.length === 0 && (depsGraph.imports.get(filePath)?.length ?? 0) === 0) {
orphans.push(filePath)
}
}
return {
totalSymbols,
symbolsByType,
totalFiles,
totalDependencies,
averageDependencies: totalFiles > 0 ? totalDependencies / totalFiles : 0,
hubs: hubs.sort(),
orphans: orphans.sort(),
}
}
}

View File

@@ -0,0 +1,448 @@
import * as path from "node:path"
import {
type ComplexityMetrics,
createFileMeta,
type FileMeta,
isHubFile,
} from "../../domain/value-objects/FileMeta.js"
import type { ClassInfo, FileAST, FunctionInfo } from "../../domain/value-objects/FileAST.js"
/**
* Analyzes file metadata including complexity, dependencies, and classification.
*/
export class MetaAnalyzer {
private readonly projectRoot: string
constructor(projectRoot: string) {
this.projectRoot = projectRoot
}
/**
* Analyze a file and compute its metadata.
* @param filePath - Absolute path to the file
* @param ast - Parsed AST for the file
* @param content - Raw file content (for LOC calculation)
* @param allASTs - Map of all file paths to their ASTs (for dependents)
*/
analyze(
filePath: string,
ast: FileAST,
content: string,
allASTs: Map<string, FileAST>,
): FileMeta {
const complexity = this.calculateComplexity(ast, content)
const dependencies = this.resolveDependencies(filePath, ast)
const dependents = this.findDependents(filePath, allASTs)
const fileType = this.classifyFileType(filePath)
const isEntryPoint = this.isEntryPointFile(filePath, dependents.length)
return createFileMeta({
complexity,
dependencies,
dependents,
isHub: isHubFile(dependents.length),
isEntryPoint,
fileType,
})
}
/**
* Calculate complexity metrics for a file.
*/
calculateComplexity(ast: FileAST, content: string): ComplexityMetrics {
const loc = this.countLinesOfCode(content)
const nesting = this.calculateMaxNesting(ast)
const cyclomaticComplexity = this.calculateCyclomaticComplexity(ast)
const score = this.calculateComplexityScore(loc, nesting, cyclomaticComplexity)
return {
loc,
nesting,
cyclomaticComplexity,
score,
}
}
/**
* Count lines of code (excluding empty lines and comments).
*/
countLinesOfCode(content: string): number {
const lines = content.split("\n")
let loc = 0
let inBlockComment = false
for (const line of lines) {
const trimmed = line.trim()
if (inBlockComment) {
if (trimmed.includes("*/")) {
inBlockComment = false
}
continue
}
if (trimmed.startsWith("/*")) {
if (!trimmed.includes("*/")) {
inBlockComment = true
continue
}
const afterComment = trimmed.substring(trimmed.indexOf("*/") + 2).trim()
if (afterComment === "" || afterComment.startsWith("//")) {
continue
}
loc++
continue
}
if (trimmed === "" || trimmed.startsWith("//")) {
continue
}
loc++
}
return loc
}
/**
* Calculate maximum nesting depth from AST.
*/
calculateMaxNesting(ast: FileAST): number {
let maxNesting = 0
for (const func of ast.functions) {
const depth = this.estimateFunctionNesting(func)
maxNesting = Math.max(maxNesting, depth)
}
for (const cls of ast.classes) {
const depth = this.estimateClassNesting(cls)
maxNesting = Math.max(maxNesting, depth)
}
return maxNesting
}
/**
* Estimate nesting depth for a function based on line count.
* More accurate nesting would require full AST traversal.
*/
private estimateFunctionNesting(func: FunctionInfo): number {
const lines = func.lineEnd - func.lineStart + 1
if (lines <= 5) {
return 1
}
if (lines <= 15) {
return 2
}
if (lines <= 30) {
return 3
}
if (lines <= 50) {
return 4
}
return 5
}
/**
* Estimate nesting depth for a class.
*/
private estimateClassNesting(cls: ClassInfo): number {
let maxMethodNesting = 1
for (const method of cls.methods) {
const lines = method.lineEnd - method.lineStart + 1
let depth = 1
if (lines > 5) {
depth = 2
}
if (lines > 15) {
depth = 3
}
if (lines > 30) {
depth = 4
}
maxMethodNesting = Math.max(maxMethodNesting, depth)
}
return maxMethodNesting + 1
}
/**
* Calculate cyclomatic complexity from AST.
* Base complexity is 1, +1 for each decision point.
*/
calculateCyclomaticComplexity(ast: FileAST): number {
let complexity = 1
for (const func of ast.functions) {
complexity += this.estimateFunctionComplexity(func)
}
for (const cls of ast.classes) {
for (const method of cls.methods) {
const lines = method.lineEnd - method.lineStart + 1
complexity += Math.max(1, Math.floor(lines / 10))
}
}
return complexity
}
/**
* Estimate function complexity based on size.
*/
private estimateFunctionComplexity(func: FunctionInfo): number {
const lines = func.lineEnd - func.lineStart + 1
return Math.max(1, Math.floor(lines / 8))
}
/**
* Calculate overall complexity score (0-100).
*/
calculateComplexityScore(loc: number, nesting: number, cyclomatic: number): number {
const locWeight = 0.3
const nestingWeight = 0.35
const cyclomaticWeight = 0.35
const locScore = Math.min(100, (loc / 500) * 100)
const nestingScore = Math.min(100, (nesting / 6) * 100)
const cyclomaticScore = Math.min(100, (cyclomatic / 30) * 100)
const score =
locScore * locWeight + nestingScore * nestingWeight + cyclomaticScore * cyclomaticWeight
return Math.round(Math.min(100, score))
}
/**
* Resolve internal imports to absolute file paths.
*/
resolveDependencies(filePath: string, ast: FileAST): string[] {
const dependencies: string[] = []
const fileDir = path.dirname(filePath)
for (const imp of ast.imports) {
if (imp.type !== "internal") {
continue
}
const resolved = this.resolveImportPath(fileDir, imp.from)
if (resolved && !dependencies.includes(resolved)) {
dependencies.push(resolved)
}
}
return dependencies.sort()
}
/**
* Resolve a relative import path to an absolute path.
*/
private resolveImportPath(fromDir: string, importPath: string): string | null {
const absolutePath = path.resolve(fromDir, importPath)
const normalized = this.normalizeImportPath(absolutePath)
if (normalized.startsWith(this.projectRoot)) {
return normalized
}
return null
}
/**
* Normalize import path by removing file extension if present
* and handling index imports.
*/
private normalizeImportPath(importPath: string): string {
let normalized = importPath
if (normalized.endsWith(".js")) {
normalized = `${normalized.slice(0, -3)}.ts`
} else if (normalized.endsWith(".jsx")) {
normalized = `${normalized.slice(0, -4)}.tsx`
} else if (!/\.(ts|tsx|js|jsx)$/.exec(normalized)) {
normalized = `${normalized}.ts`
}
return normalized
}
/**
* Find all files that import the given file.
*/
findDependents(filePath: string, allASTs: Map<string, FileAST>): string[] {
const dependents: string[] = []
const normalizedPath = this.normalizePathForComparison(filePath)
for (const [otherPath, ast] of allASTs) {
if (otherPath === filePath) {
continue
}
if (this.fileImportsTarget(otherPath, ast, normalizedPath)) {
dependents.push(otherPath)
}
}
return dependents.sort()
}
/**
* Check if a file imports the target path.
*/
private fileImportsTarget(filePath: string, ast: FileAST, normalizedTarget: string): boolean {
const fileDir = path.dirname(filePath)
for (const imp of ast.imports) {
if (imp.type !== "internal") {
continue
}
const resolvedImport = this.resolveImportPath(fileDir, imp.from)
if (!resolvedImport) {
continue
}
const normalizedImport = this.normalizePathForComparison(resolvedImport)
if (this.pathsMatch(normalizedTarget, normalizedImport)) {
return true
}
}
return false
}
/**
* Normalize path for comparison (handle index.ts and extensions).
*/
private normalizePathForComparison(filePath: string): string {
let normalized = filePath
if (normalized.endsWith(".js")) {
normalized = normalized.slice(0, -3)
} else if (normalized.endsWith(".ts")) {
normalized = normalized.slice(0, -3)
} else if (normalized.endsWith(".jsx")) {
normalized = normalized.slice(0, -4)
} else if (normalized.endsWith(".tsx")) {
normalized = normalized.slice(0, -4)
}
return normalized
}
/**
* Check if two normalized paths match (including index.ts resolution).
*/
private pathsMatch(path1: string, path2: string): boolean {
if (path1 === path2) {
return true
}
if (path1.endsWith("/index") && path2 === path1.slice(0, -6)) {
return true
}
if (path2.endsWith("/index") && path1 === path2.slice(0, -6)) {
return true
}
return false
}
/**
* Classify file type based on path and name.
*/
classifyFileType(filePath: string): FileMeta["fileType"] {
const basename = path.basename(filePath)
const lowercasePath = filePath.toLowerCase()
if (basename.includes(".test.") || basename.includes(".spec.")) {
return "test"
}
if (lowercasePath.includes("/tests/") || lowercasePath.includes("/__tests__/")) {
return "test"
}
if (basename.endsWith(".d.ts")) {
return "types"
}
if (lowercasePath.includes("/types/") || basename === "types.ts") {
return "types"
}
const configPatterns = [
"config",
"tsconfig",
"eslint",
"prettier",
"vitest",
"jest",
"babel",
"webpack",
"vite",
"rollup",
]
for (const pattern of configPatterns) {
if (basename.toLowerCase().includes(pattern)) {
return "config"
}
}
if (
filePath.endsWith(".ts") ||
filePath.endsWith(".tsx") ||
filePath.endsWith(".js") ||
filePath.endsWith(".jsx")
) {
return "source"
}
return "unknown"
}
/**
* Determine if file is an entry point.
*/
isEntryPointFile(filePath: string, dependentCount: number): boolean {
const basename = path.basename(filePath)
if (basename.startsWith("index.")) {
return true
}
if (dependentCount === 0) {
return true
}
const entryPatterns = ["main.", "app.", "cli.", "server.", "index."]
for (const pattern of entryPatterns) {
if (basename.toLowerCase().startsWith(pattern)) {
return true
}
}
return false
}
/**
* Batch analyze multiple files.
*/
analyzeAll(files: Map<string, { ast: FileAST; content: string }>): Map<string, FileMeta> {
const allASTs = new Map<string, FileAST>()
for (const [filePath, { ast }] of files) {
allASTs.set(filePath, ast)
}
const results = new Map<string, FileMeta>()
for (const [filePath, { ast, content }] of files) {
const meta = this.analyze(filePath, ast, content, allASTs)
results.set(filePath, meta)
}
return results
}
}

View File

@@ -0,0 +1,285 @@
import * as chokidar from "chokidar"
import * as path from "node:path"
import { DEFAULT_IGNORE_PATTERNS, SUPPORTED_EXTENSIONS } from "../../domain/constants/index.js"
export type FileChangeType = "add" | "change" | "unlink"
export interface FileChangeEvent {
type: FileChangeType
path: string
timestamp: number
}
export type FileChangeCallback = (event: FileChangeEvent) => void
export interface WatchdogOptions {
/** Debounce delay in milliseconds (default: 500) */
debounceMs?: number
/** Patterns to ignore (default: DEFAULT_IGNORE_PATTERNS) */
ignorePatterns?: readonly string[]
/** File extensions to watch (default: SUPPORTED_EXTENSIONS) */
extensions?: readonly string[]
/** Use polling instead of native events (useful for network drives) */
usePolling?: boolean
/** Polling interval in milliseconds (default: 1000) */
pollInterval?: number
}
interface ResolvedWatchdogOptions {
debounceMs: number
ignorePatterns: readonly string[]
extensions: readonly string[]
usePolling: boolean
pollInterval: number
}
const DEFAULT_OPTIONS: ResolvedWatchdogOptions = {
debounceMs: 500,
ignorePatterns: DEFAULT_IGNORE_PATTERNS,
extensions: SUPPORTED_EXTENSIONS,
usePolling: false,
pollInterval: 1000,
}
/**
* Watches for file changes in a directory using chokidar.
*/
export class Watchdog {
private watcher: chokidar.FSWatcher | null = null
private readonly callbacks: FileChangeCallback[] = []
private readonly pendingChanges = new Map<string, FileChangeEvent>()
private readonly debounceTimers = new Map<string, NodeJS.Timeout>()
private readonly options: ResolvedWatchdogOptions
private root = ""
private isRunning = false
constructor(options: WatchdogOptions = {}) {
this.options = { ...DEFAULT_OPTIONS, ...options }
}
/**
* Start watching a directory for file changes.
*/
start(root: string): void {
if (this.isRunning) {
void this.stop()
}
this.root = root
this.isRunning = true
const globPatterns = this.buildGlobPatterns(root)
const ignorePatterns = this.buildIgnorePatterns()
this.watcher = chokidar.watch(globPatterns, {
ignored: ignorePatterns,
persistent: true,
ignoreInitial: true,
usePolling: this.options.usePolling,
interval: this.options.pollInterval,
awaitWriteFinish: {
stabilityThreshold: 100,
pollInterval: 100,
},
})
this.watcher.on("add", (filePath) => {
this.handleChange("add", filePath)
})
this.watcher.on("change", (filePath) => {
this.handleChange("change", filePath)
})
this.watcher.on("unlink", (filePath) => {
this.handleChange("unlink", filePath)
})
this.watcher.on("error", (error) => {
this.handleError(error)
})
}
/**
* Stop watching for file changes.
*/
async stop(): Promise<void> {
if (!this.isRunning) {
return
}
for (const timer of this.debounceTimers.values()) {
clearTimeout(timer)
}
this.debounceTimers.clear()
this.pendingChanges.clear()
if (this.watcher) {
await this.watcher.close()
this.watcher = null
}
this.isRunning = false
}
/**
* Register a callback for file change events.
*/
onFileChange(callback: FileChangeCallback): void {
this.callbacks.push(callback)
}
/**
* Remove a callback.
*/
offFileChange(callback: FileChangeCallback): void {
const index = this.callbacks.indexOf(callback)
if (index !== -1) {
this.callbacks.splice(index, 1)
}
}
/**
* Check if the watchdog is currently running.
*/
isWatching(): boolean {
return this.isRunning
}
/**
* Get the root directory being watched.
*/
getRoot(): string {
return this.root
}
/**
* Get the number of pending changes waiting to be processed.
*/
getPendingCount(): number {
return this.pendingChanges.size
}
/**
* Handle a file change event with debouncing.
*/
private handleChange(type: FileChangeType, filePath: string): void {
if (!this.isValidFile(filePath)) {
return
}
const normalizedPath = path.resolve(filePath)
const event: FileChangeEvent = {
type,
path: normalizedPath,
timestamp: Date.now(),
}
this.pendingChanges.set(normalizedPath, event)
const existingTimer = this.debounceTimers.get(normalizedPath)
if (existingTimer) {
clearTimeout(existingTimer)
}
const timer = setTimeout(() => {
this.flushChange(normalizedPath)
}, this.options.debounceMs)
this.debounceTimers.set(normalizedPath, timer)
}
/**
* Flush a pending change and notify callbacks.
*/
private flushChange(filePath: string): void {
const event = this.pendingChanges.get(filePath)
if (!event) {
return
}
this.pendingChanges.delete(filePath)
this.debounceTimers.delete(filePath)
for (const callback of this.callbacks) {
try {
callback(event)
} catch {
// Silently ignore callback errors
}
}
}
/**
* Handle watcher errors.
*/
private handleError(error: Error): void {
// Log error but don't crash
console.error(`[Watchdog] Error: ${error.message}`)
}
/**
* Check if a file should be watched based on extension.
*/
private isValidFile(filePath: string): boolean {
const ext = path.extname(filePath)
return this.options.extensions.includes(ext)
}
/**
* Build glob patterns for watching.
*/
private buildGlobPatterns(root: string): string[] {
return this.options.extensions.map((ext) => path.join(root, "**", `*${ext}`))
}
/**
* Build ignore patterns for chokidar.
*/
private buildIgnorePatterns(): (string | RegExp)[] {
const patterns: (string | RegExp)[] = []
for (const pattern of this.options.ignorePatterns) {
if (pattern.includes("*")) {
const regexPattern = pattern
.replace(/\./g, "\\.")
.replace(/\*\*/g, ".*")
.replace(/\*/g, "[^/]*")
patterns.push(new RegExp(regexPattern))
} else {
patterns.push(`**/${pattern}/**`)
}
}
return patterns
}
/**
* Force flush all pending changes immediately.
*/
flushAll(): void {
for (const timer of this.debounceTimers.values()) {
clearTimeout(timer)
}
this.debounceTimers.clear()
for (const filePath of this.pendingChanges.keys()) {
this.flushChange(filePath)
}
}
/**
* Get watched paths (for debugging).
*/
getWatchedPaths(): string[] {
if (!this.watcher) {
return []
}
const watched = this.watcher.getWatched()
const paths: string[] = []
for (const dir of Object.keys(watched)) {
for (const file of watched[dir]) {
paths.push(path.join(dir, file))
}
}
return paths.sort()
}
}

View File

@@ -0,0 +1,6 @@
export * from "./FileScanner.js"
export * from "./ASTParser.js"
export * from "./MetaAnalyzer.js"
export * from "./IndexBuilder.js"
export * from "./Watchdog.js"
export * from "./tree-sitter-types.js"

View File

@@ -0,0 +1,77 @@
/**
* Tree-sitter node type constants for TypeScript/JavaScript parsing.
* These are infrastructure-level constants, not exposed to domain/application layers.
*
* Source: tree-sitter-typescript/typescript/src/node-types.json
*/
export const NodeType = {
// Statements
IMPORT_STATEMENT: "import_statement",
EXPORT_STATEMENT: "export_statement",
LEXICAL_DECLARATION: "lexical_declaration",
// Declarations
FUNCTION_DECLARATION: "function_declaration",
CLASS_DECLARATION: "class_declaration",
INTERFACE_DECLARATION: "interface_declaration",
TYPE_ALIAS_DECLARATION: "type_alias_declaration",
// Clauses
IMPORT_CLAUSE: "import_clause",
EXPORT_CLAUSE: "export_clause",
EXTENDS_CLAUSE: "extends_clause",
IMPLEMENTS_CLAUSE: "implements_clause",
EXTENDS_TYPE_CLAUSE: "extends_type_clause",
CLASS_HERITAGE: "class_heritage",
// Import specifiers
NAMESPACE_IMPORT: "namespace_import",
NAMED_IMPORTS: "named_imports",
IMPORT_SPECIFIER: "import_specifier",
EXPORT_SPECIFIER: "export_specifier",
// Class members
METHOD_DEFINITION: "method_definition",
PUBLIC_FIELD_DEFINITION: "public_field_definition",
FIELD_DEFINITION: "field_definition",
PROPERTY_SIGNATURE: "property_signature",
// Parameters
REQUIRED_PARAMETER: "required_parameter",
OPTIONAL_PARAMETER: "optional_parameter",
// Expressions & values
ARROW_FUNCTION: "arrow_function",
FUNCTION: "function",
VARIABLE_DECLARATOR: "variable_declarator",
// Identifiers & types
IDENTIFIER: "identifier",
TYPE_IDENTIFIER: "type_identifier",
// Modifiers
ASYNC: "async",
STATIC: "static",
ABSTRACT: "abstract",
DEFAULT: "default",
ACCESSIBILITY_MODIFIER: "accessibility_modifier",
READONLY: "readonly",
} as const
export type NodeTypeValue = (typeof NodeType)[keyof typeof NodeType]
export const FieldName = {
SOURCE: "source",
NAME: "name",
ALIAS: "alias",
DECLARATION: "declaration",
PARAMETERS: "parameters",
RETURN_TYPE: "return_type",
BODY: "body",
TYPE: "type",
PATTERN: "pattern",
VALUE: "value",
} as const
export type FieldNameValue = (typeof FieldName)[keyof typeof FieldName]

View File

@@ -0,0 +1,302 @@
import { type Message, Ollama, type Tool } from "ollama"
import type {
ILLMClient,
LLMResponse,
ToolDef,
ToolParameter,
} from "../../domain/services/ILLMClient.js"
import type { ChatMessage } from "../../domain/value-objects/ChatMessage.js"
import { createToolCall, type ToolCall } from "../../domain/value-objects/ToolCall.js"
import type { LLMConfig } from "../../shared/constants/config.js"
import { IpuaroError } from "../../shared/errors/IpuaroError.js"
import { estimateTokens } from "../../shared/utils/tokens.js"
/**
* Ollama LLM client implementation.
* Wraps the Ollama SDK for chat completions with tool support.
*/
export class OllamaClient implements ILLMClient {
private readonly client: Ollama
private readonly host: string
private readonly model: string
private readonly contextWindow: number
private readonly temperature: number
private readonly timeout: number
private abortController: AbortController | null = null
constructor(config: LLMConfig) {
this.host = config.host
this.client = new Ollama({ host: this.host })
this.model = config.model
this.contextWindow = config.contextWindow
this.temperature = config.temperature
this.timeout = config.timeout
}
/**
* Send messages to LLM and get response.
*/
async chat(messages: ChatMessage[], tools?: ToolDef[]): Promise<LLMResponse> {
const startTime = Date.now()
this.abortController = new AbortController()
try {
const ollamaMessages = this.convertMessages(messages)
const ollamaTools = tools ? this.convertTools(tools) : undefined
const response = await this.client.chat({
model: this.model,
messages: ollamaMessages,
tools: ollamaTools,
options: {
temperature: this.temperature,
},
stream: false,
})
const timeMs = Date.now() - startTime
const toolCalls = this.extractToolCalls(response.message)
return {
content: response.message.content,
toolCalls,
tokens: response.eval_count ?? estimateTokens(response.message.content),
timeMs,
truncated: false,
stopReason: this.determineStopReason(response, toolCalls),
}
} catch (error) {
if (error instanceof Error && error.name === "AbortError") {
throw IpuaroError.llm("Request was aborted")
}
throw this.handleError(error)
} finally {
this.abortController = null
}
}
/**
* Count tokens in text.
* Uses estimation since Ollama doesn't provide a tokenizer endpoint.
*/
async countTokens(text: string): Promise<number> {
return Promise.resolve(estimateTokens(text))
}
/**
* Check if LLM service is available.
*/
async isAvailable(): Promise<boolean> {
try {
await this.client.list()
return true
} catch {
return false
}
}
/**
* Get current model name.
*/
getModelName(): string {
return this.model
}
/**
* Get context window size.
*/
getContextWindowSize(): number {
return this.contextWindow
}
/**
* Pull/download model if not available locally.
*/
async pullModel(model: string): Promise<void> {
try {
await this.client.pull({ model, stream: false })
} catch (error) {
throw this.handleError(error, `Failed to pull model: ${model}`)
}
}
/**
* Check if a specific model is available locally.
*/
async hasModel(model: string): Promise<boolean> {
try {
const result = await this.client.list()
return result.models.some((m) => m.name === model || m.name.startsWith(`${model}:`))
} catch {
return false
}
}
/**
* List available models.
*/
async listModels(): Promise<string[]> {
try {
const result = await this.client.list()
return result.models.map((m) => m.name)
} catch (error) {
throw this.handleError(error, "Failed to list models")
}
}
/**
* Abort current generation.
*/
abort(): void {
if (this.abortController) {
this.abortController.abort()
}
}
/**
* Convert ChatMessage array to Ollama Message format.
*/
private convertMessages(messages: ChatMessage[]): Message[] {
return messages.map((msg): Message => {
const role = this.convertRole(msg.role)
if (msg.role === "tool" && msg.toolResults) {
return {
role: "tool",
content: msg.content,
}
}
if (msg.role === "assistant" && msg.toolCalls && msg.toolCalls.length > 0) {
return {
role: "assistant",
content: msg.content,
tool_calls: msg.toolCalls.map((tc) => ({
function: {
name: tc.name,
arguments: tc.params,
},
})),
}
}
return {
role,
content: msg.content,
}
})
}
/**
* Convert message role to Ollama role.
*/
private convertRole(role: ChatMessage["role"]): "user" | "assistant" | "system" | "tool" {
switch (role) {
case "user":
return "user"
case "assistant":
return "assistant"
case "system":
return "system"
case "tool":
return "tool"
default:
return "user"
}
}
/**
* Convert ToolDef array to Ollama Tool format.
*/
private convertTools(tools: ToolDef[]): Tool[] {
return tools.map(
(tool): Tool => ({
type: "function",
function: {
name: tool.name,
description: tool.description,
parameters: {
type: "object",
properties: this.convertParameters(tool.parameters),
required: tool.parameters.filter((p) => p.required).map((p) => p.name),
},
},
}),
)
}
/**
* Convert ToolParameter array to JSON Schema properties.
*/
private convertParameters(
params: ToolParameter[],
): Record<string, { type: string; description: string; enum?: string[] }> {
const properties: Record<string, { type: string; description: string; enum?: string[] }> =
{}
for (const param of params) {
properties[param.name] = {
type: param.type,
description: param.description,
...(param.enum && { enum: param.enum }),
}
}
return properties
}
/**
* Extract tool calls from Ollama response message.
*/
private extractToolCalls(message: Message): ToolCall[] {
if (!message.tool_calls || message.tool_calls.length === 0) {
return []
}
return message.tool_calls.map((tc, index) =>
createToolCall(
`call_${String(Date.now())}_${String(index)}`,
tc.function.name,
tc.function.arguments,
),
)
}
/**
* Determine stop reason from response.
*/
private determineStopReason(
response: { done_reason?: string },
toolCalls: ToolCall[],
): "end" | "length" | "tool_use" {
if (toolCalls.length > 0) {
return "tool_use"
}
if (response.done_reason === "length") {
return "length"
}
return "end"
}
/**
* Handle and wrap errors.
*/
private handleError(error: unknown, context?: string): IpuaroError {
const message = error instanceof Error ? error.message : String(error)
const fullMessage = context ? `${context}: ${message}` : message
if (message.includes("ECONNREFUSED") || message.includes("fetch failed")) {
return IpuaroError.llm(`Cannot connect to Ollama at ${this.host}`)
}
if (message.includes("model") && message.includes("not found")) {
return IpuaroError.llm(
`Model "${this.model}" not found. Run: ollama pull ${this.model}`,
)
}
return IpuaroError.llm(fullMessage)
}
}

View File

@@ -0,0 +1,220 @@
import { createToolCall, type ToolCall } from "../../domain/value-objects/ToolCall.js"
/**
* Parsed response from LLM.
*/
export interface ParsedResponse {
/** Text content (excluding tool calls) */
content: string
/** Extracted tool calls */
toolCalls: ToolCall[]
/** Whether parsing encountered issues */
hasParseErrors: boolean
/** Parse error messages */
parseErrors: string[]
}
/**
* XML tool call tag pattern.
* Matches: <tool_call name="tool_name">...</tool_call>
*/
const TOOL_CALL_REGEX = /<tool_call\s+name\s*=\s*"([^"]+)">([\s\S]*?)<\/tool_call>/gi
/**
* XML parameter tag pattern.
* Matches: <param name="param_name">value</param> or <param_name>value</param_name>
*/
const PARAM_REGEX_NAMED = /<param\s+name\s*=\s*"([^"]+)">([\s\S]*?)<\/param>/gi
const PARAM_REGEX_ELEMENT = /<([a-z_][a-z0-9_]*)>([\s\S]*?)<\/\1>/gi
/**
* Parse tool calls from LLM response text.
* Supports XML format: <tool_call name="get_lines"><path>src/index.ts</path></tool_call>
*/
export function parseToolCalls(response: string): ParsedResponse {
const toolCalls: ToolCall[] = []
const parseErrors: string[] = []
let content = response
const matches = [...response.matchAll(TOOL_CALL_REGEX)]
for (const match of matches) {
const [fullMatch, toolName, paramsXml] = match
try {
const params = parseParameters(paramsXml)
const toolCall = createToolCall(
`xml_${String(Date.now())}_${String(toolCalls.length)}`,
toolName,
params,
)
toolCalls.push(toolCall)
content = content.replace(fullMatch, "")
} catch (error) {
const errorMsg = error instanceof Error ? error.message : String(error)
parseErrors.push(`Failed to parse tool call "${toolName}": ${errorMsg}`)
}
}
content = content.trim()
return {
content,
toolCalls,
hasParseErrors: parseErrors.length > 0,
parseErrors,
}
}
/**
* Parse parameters from XML content.
*/
function parseParameters(xml: string): Record<string, unknown> {
const params: Record<string, unknown> = {}
const namedMatches = [...xml.matchAll(PARAM_REGEX_NAMED)]
for (const match of namedMatches) {
const [, name, value] = match
params[name] = parseValue(value)
}
if (namedMatches.length === 0) {
const elementMatches = [...xml.matchAll(PARAM_REGEX_ELEMENT)]
for (const match of elementMatches) {
const [, name, value] = match
params[name] = parseValue(value)
}
}
return params
}
/**
* Parse a value string to appropriate type.
*/
function parseValue(value: string): unknown {
const trimmed = value.trim()
if (trimmed === "true") {
return true
}
if (trimmed === "false") {
return false
}
if (trimmed === "null") {
return null
}
const num = Number(trimmed)
if (!isNaN(num) && trimmed !== "") {
return num
}
if (
(trimmed.startsWith("[") && trimmed.endsWith("]")) ||
(trimmed.startsWith("{") && trimmed.endsWith("}"))
) {
try {
return JSON.parse(trimmed)
} catch {
return trimmed
}
}
return trimmed
}
/**
* Format tool calls to XML for prompt injection.
* Useful when you need to show the LLM the expected format.
*/
export function formatToolCallsAsXml(toolCalls: ToolCall[]): string {
return toolCalls
.map((tc) => {
const params = Object.entries(tc.params)
.map(([key, value]) => ` <${key}>${formatValueForXml(value)}</${key}>`)
.join("\n")
return `<tool_call name="${tc.name}">\n${params}\n</tool_call>`
})
.join("\n\n")
}
/**
* Format a value for XML output.
*/
function formatValueForXml(value: unknown): string {
if (value === null || value === undefined) {
return ""
}
if (typeof value === "object") {
return JSON.stringify(value)
}
if (typeof value === "string") {
return value
}
if (typeof value === "number" || typeof value === "boolean") {
return String(value)
}
return JSON.stringify(value)
}
/**
* Extract thinking/reasoning from response.
* Matches content between <thinking>...</thinking> tags.
*/
export function extractThinking(response: string): { thinking: string; content: string } {
const thinkingRegex = /<thinking>([\s\S]*?)<\/thinking>/gi
const matches = [...response.matchAll(thinkingRegex)]
if (matches.length === 0) {
return { thinking: "", content: response }
}
let content = response
const thoughts: string[] = []
for (const match of matches) {
thoughts.push(match[1].trim())
content = content.replace(match[0], "")
}
return {
thinking: thoughts.join("\n\n"),
content: content.trim(),
}
}
/**
* Check if response contains tool calls.
*/
export function hasToolCalls(response: string): boolean {
return TOOL_CALL_REGEX.test(response)
}
/**
* Validate tool call parameters against expected schema.
*/
export function validateToolCallParams(
toolName: string,
params: Record<string, unknown>,
requiredParams: string[],
): { valid: boolean; errors: string[] } {
const errors: string[] = []
for (const param of requiredParams) {
if (!(param in params) || params[param] === undefined || params[param] === null) {
errors.push(`Missing required parameter: ${param}`)
}
}
return {
valid: errors.length === 0,
errors,
}
}

View File

@@ -0,0 +1,48 @@
// LLM infrastructure exports
export { OllamaClient } from "./OllamaClient.js"
export {
SYSTEM_PROMPT,
buildInitialContext,
buildFileContext,
truncateContext,
type ProjectStructure,
} from "./prompts.js"
export {
ALL_TOOLS,
READ_TOOLS,
EDIT_TOOLS,
SEARCH_TOOLS,
ANALYSIS_TOOLS,
GIT_TOOLS,
RUN_TOOLS,
CONFIRMATION_TOOLS,
requiresConfirmation,
getToolDef,
getToolsByCategory,
GET_LINES_TOOL,
GET_FUNCTION_TOOL,
GET_CLASS_TOOL,
GET_STRUCTURE_TOOL,
EDIT_LINES_TOOL,
CREATE_FILE_TOOL,
DELETE_FILE_TOOL,
FIND_REFERENCES_TOOL,
FIND_DEFINITION_TOOL,
GET_DEPENDENCIES_TOOL,
GET_DEPENDENTS_TOOL,
GET_COMPLEXITY_TOOL,
GET_TODOS_TOOL,
GIT_STATUS_TOOL,
GIT_DIFF_TOOL,
GIT_COMMIT_TOOL,
RUN_COMMAND_TOOL,
RUN_TESTS_TOOL,
} from "./toolDefs.js"
export {
parseToolCalls,
formatToolCallsAsXml,
extractThinking,
hasToolCalls,
validateToolCallParams,
type ParsedResponse,
} from "./ResponseParser.js"

View File

@@ -0,0 +1,335 @@
import type { FileAST } from "../../domain/value-objects/FileAST.js"
import type { FileMeta } from "../../domain/value-objects/FileMeta.js"
/**
* Project structure for context building.
*/
export interface ProjectStructure {
name: string
rootPath: string
files: string[]
directories: string[]
}
/**
* System prompt for the ipuaro AI agent.
*/
export const SYSTEM_PROMPT = `You are ipuaro, a local AI code assistant specialized in helping developers understand and modify their codebase. You operate within a single project directory and have access to powerful tools for reading, searching, analyzing, and editing code.
## Core Principles
1. **Lazy Loading**: You don't have the full code in context. Use tools to fetch exactly what you need.
2. **Precision**: Always verify file paths and line numbers before making changes.
3. **Safety**: Confirm destructive operations. Never execute dangerous commands.
4. **Efficiency**: Minimize context usage. Request only necessary code sections.
## Available Tools
### Reading Tools
- \`get_lines\`: Get specific lines from a file
- \`get_function\`: Get a function by name
- \`get_class\`: Get a class by name
- \`get_structure\`: Get project directory structure
### Editing Tools (require confirmation)
- \`edit_lines\`: Replace specific lines in a file
- \`create_file\`: Create a new file
- \`delete_file\`: Delete a file
### Search Tools
- \`find_references\`: Find all usages of a symbol
- \`find_definition\`: Find where a symbol is defined
### Analysis Tools
- \`get_dependencies\`: Get files this file imports
- \`get_dependents\`: Get files that import this file
- \`get_complexity\`: Get complexity metrics
- \`get_todos\`: Find TODO/FIXME comments
### Git Tools
- \`git_status\`: Get repository status
- \`git_diff\`: Get uncommitted changes
- \`git_commit\`: Create a commit (requires confirmation)
### Run Tools
- \`run_command\`: Execute a shell command (security checked)
- \`run_tests\`: Run the test suite
## Response Guidelines
1. **Be concise**: Don't repeat information already in context.
2. **Show your work**: Explain what tools you're using and why.
3. **Verify before editing**: Always read the target code before modifying it.
4. **Handle errors gracefully**: If a tool fails, explain what went wrong and suggest alternatives.
## Code Editing Rules
1. Always use \`get_lines\` or \`get_function\` before \`edit_lines\`.
2. Provide exact line numbers for edits.
3. For large changes, break into multiple small edits.
4. After editing, suggest running tests if available.
## Safety Rules
1. Never execute commands that could harm the system.
2. Never expose sensitive data (API keys, passwords).
3. Always confirm file deletions and destructive git operations.
4. Stay within the project directory.
When you need to perform an action, use the appropriate tool. Think step by step about what information you need and which tools will provide it most efficiently.`
/**
* Build initial context from project structure and AST metadata.
* Returns a compact representation without actual code.
*/
export function buildInitialContext(
structure: ProjectStructure,
asts: Map<string, FileAST>,
metas?: Map<string, FileMeta>,
): string {
const sections: string[] = []
sections.push(formatProjectHeader(structure))
sections.push(formatDirectoryTree(structure))
sections.push(formatFileOverview(asts, metas))
return sections.join("\n\n")
}
/**
* Format project header section.
*/
function formatProjectHeader(structure: ProjectStructure): string {
const fileCount = String(structure.files.length)
const dirCount = String(structure.directories.length)
return `# Project: ${structure.name}
Root: ${structure.rootPath}
Files: ${fileCount} | Directories: ${dirCount}`
}
/**
* Format directory tree.
*/
function formatDirectoryTree(structure: ProjectStructure): string {
const lines: string[] = ["## Structure", ""]
const sortedDirs = [...structure.directories].sort()
for (const dir of sortedDirs) {
const depth = dir.split("/").length - 1
const indent = " ".repeat(depth)
const name = dir.split("/").pop() ?? dir
lines.push(`${indent}${name}/`)
}
return lines.join("\n")
}
/**
* Format file overview with AST summaries.
*/
function formatFileOverview(asts: Map<string, FileAST>, metas?: Map<string, FileMeta>): string {
const lines: string[] = ["## Files", ""]
const sortedPaths = [...asts.keys()].sort()
for (const path of sortedPaths) {
const ast = asts.get(path)
if (!ast) {
continue
}
const meta = metas?.get(path)
lines.push(formatFileSummary(path, ast, meta))
}
return lines.join("\n")
}
/**
* Format a single file's AST summary.
*/
function formatFileSummary(path: string, ast: FileAST, meta?: FileMeta): string {
const parts: string[] = []
if (ast.functions.length > 0) {
const names = ast.functions.map((f) => f.name).join(", ")
parts.push(`fn: ${names}`)
}
if (ast.classes.length > 0) {
const names = ast.classes.map((c) => c.name).join(", ")
parts.push(`class: ${names}`)
}
if (ast.interfaces.length > 0) {
const names = ast.interfaces.map((i) => i.name).join(", ")
parts.push(`interface: ${names}`)
}
if (ast.typeAliases.length > 0) {
const names = ast.typeAliases.map((t) => t.name).join(", ")
parts.push(`type: ${names}`)
}
const summary = parts.length > 0 ? ` [${parts.join(" | ")}]` : ""
const flags = formatFileFlags(meta)
return `- ${path}${summary}${flags}`
}
/**
* Format file metadata flags.
*/
function formatFileFlags(meta?: FileMeta): string {
if (!meta) {
return ""
}
const flags: string[] = []
if (meta.isHub) {
flags.push("hub")
}
if (meta.isEntryPoint) {
flags.push("entry")
}
if (meta.complexity.score > 70) {
flags.push("complex")
}
return flags.length > 0 ? ` (${flags.join(", ")})` : ""
}
/**
* Format line range for display.
*/
function formatLineRange(start: number, end: number): string {
return `[${String(start)}-${String(end)}]`
}
/**
* Format imports section.
*/
function formatImportsSection(ast: FileAST): string[] {
if (ast.imports.length === 0) {
return []
}
const lines = ["### Imports"]
for (const imp of ast.imports) {
lines.push(`- ${imp.name} from "${imp.from}" (${imp.type})`)
}
lines.push("")
return lines
}
/**
* Format exports section.
*/
function formatExportsSection(ast: FileAST): string[] {
if (ast.exports.length === 0) {
return []
}
const lines = ["### Exports"]
for (const exp of ast.exports) {
const defaultMark = exp.isDefault ? " (default)" : ""
lines.push(`- ${exp.kind} ${exp.name}${defaultMark}`)
}
lines.push("")
return lines
}
/**
* Format functions section.
*/
function formatFunctionsSection(ast: FileAST): string[] {
if (ast.functions.length === 0) {
return []
}
const lines = ["### Functions"]
for (const fn of ast.functions) {
const params = fn.params.map((p) => p.name).join(", ")
const asyncMark = fn.isAsync ? "async " : ""
const range = formatLineRange(fn.lineStart, fn.lineEnd)
lines.push(`- ${asyncMark}${fn.name}(${params}) ${range}`)
}
lines.push("")
return lines
}
/**
* Format classes section.
*/
function formatClassesSection(ast: FileAST): string[] {
if (ast.classes.length === 0) {
return []
}
const lines = ["### Classes"]
for (const cls of ast.classes) {
const ext = cls.extends ? ` extends ${cls.extends}` : ""
const impl = cls.implements.length > 0 ? ` implements ${cls.implements.join(", ")}` : ""
const range = formatLineRange(cls.lineStart, cls.lineEnd)
lines.push(`- ${cls.name}${ext}${impl} ${range}`)
for (const method of cls.methods) {
const vis = method.visibility === "public" ? "" : `${method.visibility} `
const methodRange = formatLineRange(method.lineStart, method.lineEnd)
lines.push(` - ${vis}${method.name}() ${methodRange}`)
}
}
lines.push("")
return lines
}
/**
* Format metadata section.
*/
function formatMetadataSection(meta: FileMeta): string[] {
const loc = String(meta.complexity.loc)
const score = String(meta.complexity.score)
const deps = String(meta.dependencies.length)
const dependents = String(meta.dependents.length)
return [
"### Metadata",
`- LOC: ${loc}`,
`- Complexity: ${score}/100`,
`- Dependencies: ${deps}`,
`- Dependents: ${dependents}`,
]
}
/**
* Build context for a specific file request.
*/
export function buildFileContext(path: string, ast: FileAST, meta?: FileMeta): string {
const lines: string[] = [`## ${path}`, ""]
lines.push(...formatImportsSection(ast))
lines.push(...formatExportsSection(ast))
lines.push(...formatFunctionsSection(ast))
lines.push(...formatClassesSection(ast))
if (meta) {
lines.push(...formatMetadataSection(meta))
}
return lines.join("\n")
}
/**
* Truncate context to fit within token budget.
*/
export function truncateContext(context: string, maxTokens: number): string {
const charsPerToken = 4
const maxChars = maxTokens * charsPerToken
if (context.length <= maxChars) {
return context
}
const truncated = context.slice(0, maxChars - 100)
const lastNewline = truncated.lastIndexOf("\n")
const remaining = String(context.length - lastNewline)
return `${truncated.slice(0, lastNewline)}\n\n... (truncated, ${remaining} chars remaining)`
}

View File

@@ -0,0 +1,511 @@
import type { ToolDef } from "../../domain/services/ILLMClient.js"
/**
* Tool definitions for ipuaro LLM.
* 18 tools across 6 categories: read, edit, search, analysis, git, run.
*/
/*
* =============================================================================
* Read Tools (4)
* =============================================================================
*/
export const GET_LINES_TOOL: ToolDef = {
name: "get_lines",
description:
"Get specific lines from a file. Returns the content with line numbers. " +
"If no range is specified, returns the entire file.",
parameters: [
{
name: "path",
type: "string",
description: "File path relative to project root",
required: true,
},
{
name: "start",
type: "number",
description: "Start line number (1-based, inclusive)",
required: false,
},
{
name: "end",
type: "number",
description: "End line number (1-based, inclusive)",
required: false,
},
],
}
export const GET_FUNCTION_TOOL: ToolDef = {
name: "get_function",
description:
"Get a function's source code by name. Uses AST to find exact line range. " +
"Returns the function code with line numbers.",
parameters: [
{
name: "path",
type: "string",
description: "File path relative to project root",
required: true,
},
{
name: "name",
type: "string",
description: "Function name to retrieve",
required: true,
},
],
}
export const GET_CLASS_TOOL: ToolDef = {
name: "get_class",
description:
"Get a class's source code by name. Uses AST to find exact line range. " +
"Returns the class code with line numbers.",
parameters: [
{
name: "path",
type: "string",
description: "File path relative to project root",
required: true,
},
{
name: "name",
type: "string",
description: "Class name to retrieve",
required: true,
},
],
}
export const GET_STRUCTURE_TOOL: ToolDef = {
name: "get_structure",
description:
"Get project directory structure as a tree. " +
"If path is specified, shows structure of that subdirectory only.",
parameters: [
{
name: "path",
type: "string",
description: "Subdirectory path relative to project root (optional, defaults to root)",
required: false,
},
{
name: "depth",
type: "number",
description: "Maximum depth to traverse (default: unlimited)",
required: false,
},
],
}
/*
* =============================================================================
* Edit Tools (3) - All require confirmation
* =============================================================================
*/
export const EDIT_LINES_TOOL: ToolDef = {
name: "edit_lines",
description:
"Replace lines in a file with new content. Requires reading the file first. " +
"Will show diff and ask for confirmation before applying.",
parameters: [
{
name: "path",
type: "string",
description: "File path relative to project root",
required: true,
},
{
name: "start",
type: "number",
description: "Start line number (1-based, inclusive) to replace",
required: true,
},
{
name: "end",
type: "number",
description: "End line number (1-based, inclusive) to replace",
required: true,
},
{
name: "content",
type: "string",
description: "New content to insert (can be multiple lines)",
required: true,
},
],
}
export const CREATE_FILE_TOOL: ToolDef = {
name: "create_file",
description:
"Create a new file with specified content. " +
"Will fail if file already exists. Will ask for confirmation.",
parameters: [
{
name: "path",
type: "string",
description: "File path relative to project root",
required: true,
},
{
name: "content",
type: "string",
description: "File content",
required: true,
},
],
}
export const DELETE_FILE_TOOL: ToolDef = {
name: "delete_file",
description:
"Delete a file from the project. " +
"Will ask for confirmation. Previous content is saved to undo stack.",
parameters: [
{
name: "path",
type: "string",
description: "File path relative to project root",
required: true,
},
],
}
/*
* =============================================================================
* Search Tools (2)
* =============================================================================
*/
export const FIND_REFERENCES_TOOL: ToolDef = {
name: "find_references",
description:
"Find all usages of a symbol across the codebase. " +
"Returns list of file paths, line numbers, and context.",
parameters: [
{
name: "symbol",
type: "string",
description: "Symbol name to search for (function, class, variable, etc.)",
required: true,
},
{
name: "path",
type: "string",
description: "Limit search to specific file or directory",
required: false,
},
],
}
export const FIND_DEFINITION_TOOL: ToolDef = {
name: "find_definition",
description:
"Find where a symbol is defined. " + "Returns file path, line number, and symbol type.",
parameters: [
{
name: "symbol",
type: "string",
description: "Symbol name to find definition for",
required: true,
},
],
}
/*
* =============================================================================
* Analysis Tools (4)
* =============================================================================
*/
export const GET_DEPENDENCIES_TOOL: ToolDef = {
name: "get_dependencies",
description:
"Get files that this file imports (internal dependencies). " +
"Returns list of imported file paths.",
parameters: [
{
name: "path",
type: "string",
description: "File path relative to project root",
required: true,
},
],
}
export const GET_DEPENDENTS_TOOL: ToolDef = {
name: "get_dependents",
description:
"Get files that import this file (reverse dependencies). " +
"Returns list of file paths that depend on this file.",
parameters: [
{
name: "path",
type: "string",
description: "File path relative to project root",
required: true,
},
],
}
export const GET_COMPLEXITY_TOOL: ToolDef = {
name: "get_complexity",
description:
"Get complexity metrics for a file or the entire project. " +
"Returns LOC, nesting depth, cyclomatic complexity, and overall score.",
parameters: [
{
name: "path",
type: "string",
description: "File path (optional, defaults to all files sorted by complexity)",
required: false,
},
{
name: "limit",
type: "number",
description: "Max files to return when showing all (default: 10)",
required: false,
},
],
}
export const GET_TODOS_TOOL: ToolDef = {
name: "get_todos",
description:
"Find TODO, FIXME, HACK, and XXX comments in the codebase. " +
"Returns list with file paths, line numbers, and comment text.",
parameters: [
{
name: "path",
type: "string",
description: "Limit search to specific file or directory",
required: false,
},
{
name: "type",
type: "string",
description: "Filter by comment type",
required: false,
enum: ["TODO", "FIXME", "HACK", "XXX"],
},
],
}
/*
* =============================================================================
* Git Tools (3)
* =============================================================================
*/
export const GIT_STATUS_TOOL: ToolDef = {
name: "git_status",
description:
"Get current git repository status. " +
"Returns branch name, staged files, modified files, and untracked files.",
parameters: [],
}
export const GIT_DIFF_TOOL: ToolDef = {
name: "git_diff",
description:
"Get uncommitted changes (diff). " + "Shows what has changed but not yet committed.",
parameters: [
{
name: "path",
type: "string",
description: "Limit diff to specific file or directory",
required: false,
},
{
name: "staged",
type: "boolean",
description: "Show only staged changes (default: false, shows all)",
required: false,
},
],
}
export const GIT_COMMIT_TOOL: ToolDef = {
name: "git_commit",
description:
"Create a git commit with the specified message. " +
"Will ask for confirmation. Optionally stage specific files first.",
parameters: [
{
name: "message",
type: "string",
description: "Commit message",
required: true,
},
{
name: "files",
type: "array",
description: "Files to stage before commit (optional, defaults to all staged)",
required: false,
},
],
}
/*
* =============================================================================
* Run Tools (2)
* =============================================================================
*/
export const RUN_COMMAND_TOOL: ToolDef = {
name: "run_command",
description:
"Execute a shell command in the project directory. " +
"Commands are checked against blacklist/whitelist for security. " +
"Unknown commands require user confirmation.",
parameters: [
{
name: "command",
type: "string",
description: "Shell command to execute",
required: true,
},
{
name: "timeout",
type: "number",
description: "Timeout in milliseconds (default: 30000)",
required: false,
},
],
}
export const RUN_TESTS_TOOL: ToolDef = {
name: "run_tests",
description:
"Run the project's test suite. Auto-detects test runner (vitest, jest, npm test). " +
"Returns test results summary.",
parameters: [
{
name: "path",
type: "string",
description: "Run tests for specific file or directory",
required: false,
},
{
name: "filter",
type: "string",
description: "Filter tests by name pattern",
required: false,
},
{
name: "watch",
type: "boolean",
description: "Run in watch mode (default: false)",
required: false,
},
],
}
/*
* =============================================================================
* Tool Collection
* =============================================================================
*/
/**
* All read tools (no confirmation required).
*/
export const READ_TOOLS: ToolDef[] = [
GET_LINES_TOOL,
GET_FUNCTION_TOOL,
GET_CLASS_TOOL,
GET_STRUCTURE_TOOL,
]
/**
* All edit tools (require confirmation).
*/
export const EDIT_TOOLS: ToolDef[] = [EDIT_LINES_TOOL, CREATE_FILE_TOOL, DELETE_FILE_TOOL]
/**
* All search tools (no confirmation required).
*/
export const SEARCH_TOOLS: ToolDef[] = [FIND_REFERENCES_TOOL, FIND_DEFINITION_TOOL]
/**
* All analysis tools (no confirmation required).
*/
export const ANALYSIS_TOOLS: ToolDef[] = [
GET_DEPENDENCIES_TOOL,
GET_DEPENDENTS_TOOL,
GET_COMPLEXITY_TOOL,
GET_TODOS_TOOL,
]
/**
* All git tools (git_commit requires confirmation).
*/
export const GIT_TOOLS: ToolDef[] = [GIT_STATUS_TOOL, GIT_DIFF_TOOL, GIT_COMMIT_TOOL]
/**
* All run tools (run_command may require confirmation).
*/
export const RUN_TOOLS: ToolDef[] = [RUN_COMMAND_TOOL, RUN_TESTS_TOOL]
/**
* All 18 tool definitions.
*/
export const ALL_TOOLS: ToolDef[] = [
...READ_TOOLS,
...EDIT_TOOLS,
...SEARCH_TOOLS,
...ANALYSIS_TOOLS,
...GIT_TOOLS,
...RUN_TOOLS,
]
/**
* Tools that require user confirmation before execution.
*/
export const CONFIRMATION_TOOLS = new Set([
"edit_lines",
"create_file",
"delete_file",
"git_commit",
])
/**
* Check if a tool requires confirmation.
*/
export function requiresConfirmation(toolName: string): boolean {
return CONFIRMATION_TOOLS.has(toolName)
}
/**
* Get tool definition by name.
*/
export function getToolDef(name: string): ToolDef | undefined {
return ALL_TOOLS.find((t) => t.name === name)
}
/**
* Get tool definitions by category.
*/
export function getToolsByCategory(category: string): ToolDef[] {
switch (category) {
case "read":
return READ_TOOLS
case "edit":
return EDIT_TOOLS
case "search":
return SEARCH_TOOLS
case "analysis":
return ANALYSIS_TOOLS
case "git":
return GIT_TOOLS
case "run":
return RUN_TOOLS
default:
return []
}
}

View File

@@ -0,0 +1,119 @@
import { Redis } from "ioredis"
import type { RedisConfig } from "../../shared/constants/config.js"
import { IpuaroError } from "../../shared/errors/IpuaroError.js"
/**
* Redis client wrapper with connection management.
* Handles connection lifecycle and AOF configuration.
*/
export class RedisClient {
private client: Redis | null = null
private readonly config: RedisConfig
private connected = false
constructor(config: RedisConfig) {
this.config = config
}
/**
* Connect to Redis server.
* Configures AOF persistence on successful connection.
*/
async connect(): Promise<void> {
if (this.connected && this.client) {
return
}
try {
this.client = new Redis({
host: this.config.host,
port: this.config.port,
db: this.config.db,
password: this.config.password,
keyPrefix: this.config.keyPrefix,
lazyConnect: true,
retryStrategy: (times: number): number | null => {
if (times > 3) {
return null
}
return Math.min(times * 200, 1000)
},
maxRetriesPerRequest: 3,
enableReadyCheck: true,
})
await this.client.connect()
await this.configureAOF()
this.connected = true
} catch (error) {
this.connected = false
this.client = null
const message = error instanceof Error ? error.message : "Unknown error"
throw IpuaroError.redis(`Failed to connect to Redis: ${message}`)
}
}
/**
* Disconnect from Redis server.
*/
async disconnect(): Promise<void> {
if (this.client) {
await this.client.quit()
this.client = null
this.connected = false
}
}
/**
* Check if connected to Redis.
*/
isConnected(): boolean {
return this.connected && this.client !== null && this.client.status === "ready"
}
/**
* Get the underlying Redis client.
* @throws IpuaroError if not connected
*/
getClient(): Redis {
if (!this.client || !this.connected) {
throw IpuaroError.redis("Redis client is not connected")
}
return this.client
}
/**
* Execute a health check ping.
*/
async ping(): Promise<boolean> {
if (!this.client) {
return false
}
try {
const result = await this.client.ping()
return result === "PONG"
} catch {
return false
}
}
/**
* Configure AOF (Append Only File) persistence.
* AOF provides better durability by logging every write operation.
*/
private async configureAOF(): Promise<void> {
if (!this.client) {
return
}
try {
await this.client.config("SET", "appendonly", "yes")
await this.client.config("SET", "appendfsync", "everysec")
} catch {
/*
* AOF config may fail if Redis doesn't allow CONFIG SET.
* This is non-fatal - persistence will still work with default settings.
*/
}
}
}

View File

@@ -0,0 +1,236 @@
import type { DepsGraph, IStorage, SymbolIndex } from "../../domain/services/IStorage.js"
import type { FileAST } from "../../domain/value-objects/FileAST.js"
import type { FileData } from "../../domain/value-objects/FileData.js"
import type { FileMeta } from "../../domain/value-objects/FileMeta.js"
import { IpuaroError } from "../../shared/errors/IpuaroError.js"
import { RedisClient } from "./RedisClient.js"
import { IndexFields, ProjectKeys } from "./schema.js"
/**
* Redis implementation of IStorage.
* Stores project data (files, AST, meta, indexes) in Redis hashes.
*/
export class RedisStorage implements IStorage {
private readonly client: RedisClient
private readonly projectName: string
constructor(client: RedisClient, projectName: string) {
this.client = client
this.projectName = projectName
}
async getFile(path: string): Promise<FileData | null> {
const redis = this.getRedis()
const data = await redis.hget(ProjectKeys.files(this.projectName), path)
if (!data) {
return null
}
return this.parseJSON(data, "FileData") as FileData
}
async setFile(path: string, data: FileData): Promise<void> {
const redis = this.getRedis()
await redis.hset(ProjectKeys.files(this.projectName), path, JSON.stringify(data))
}
async deleteFile(path: string): Promise<void> {
const redis = this.getRedis()
await redis.hdel(ProjectKeys.files(this.projectName), path)
}
async getAllFiles(): Promise<Map<string, FileData>> {
const redis = this.getRedis()
const data = await redis.hgetall(ProjectKeys.files(this.projectName))
const result = new Map<string, FileData>()
for (const [path, value] of Object.entries(data)) {
const parsed = this.parseJSON(value, "FileData") as FileData | null
if (parsed) {
result.set(path, parsed)
}
}
return result
}
async getFileCount(): Promise<number> {
const redis = this.getRedis()
return redis.hlen(ProjectKeys.files(this.projectName))
}
async getAST(path: string): Promise<FileAST | null> {
const redis = this.getRedis()
const data = await redis.hget(ProjectKeys.ast(this.projectName), path)
if (!data) {
return null
}
return this.parseJSON(data, "FileAST") as FileAST
}
async setAST(path: string, ast: FileAST): Promise<void> {
const redis = this.getRedis()
await redis.hset(ProjectKeys.ast(this.projectName), path, JSON.stringify(ast))
}
async deleteAST(path: string): Promise<void> {
const redis = this.getRedis()
await redis.hdel(ProjectKeys.ast(this.projectName), path)
}
async getAllASTs(): Promise<Map<string, FileAST>> {
const redis = this.getRedis()
const data = await redis.hgetall(ProjectKeys.ast(this.projectName))
const result = new Map<string, FileAST>()
for (const [path, value] of Object.entries(data)) {
const parsed = this.parseJSON(value, "FileAST") as FileAST | null
if (parsed) {
result.set(path, parsed)
}
}
return result
}
async getMeta(path: string): Promise<FileMeta | null> {
const redis = this.getRedis()
const data = await redis.hget(ProjectKeys.meta(this.projectName), path)
if (!data) {
return null
}
return this.parseJSON(data, "FileMeta") as FileMeta
}
async setMeta(path: string, meta: FileMeta): Promise<void> {
const redis = this.getRedis()
await redis.hset(ProjectKeys.meta(this.projectName), path, JSON.stringify(meta))
}
async deleteMeta(path: string): Promise<void> {
const redis = this.getRedis()
await redis.hdel(ProjectKeys.meta(this.projectName), path)
}
async getAllMetas(): Promise<Map<string, FileMeta>> {
const redis = this.getRedis()
const data = await redis.hgetall(ProjectKeys.meta(this.projectName))
const result = new Map<string, FileMeta>()
for (const [path, value] of Object.entries(data)) {
const parsed = this.parseJSON(value, "FileMeta") as FileMeta | null
if (parsed) {
result.set(path, parsed)
}
}
return result
}
async getSymbolIndex(): Promise<SymbolIndex> {
const redis = this.getRedis()
const data = await redis.hget(ProjectKeys.indexes(this.projectName), IndexFields.symbols)
if (!data) {
return new Map()
}
const parsed = this.parseJSON(data, "SymbolIndex") as [string, unknown[]][] | null
if (!parsed) {
return new Map()
}
return new Map(parsed) as SymbolIndex
}
async setSymbolIndex(index: SymbolIndex): Promise<void> {
const redis = this.getRedis()
const serialized = JSON.stringify([...index.entries()])
await redis.hset(ProjectKeys.indexes(this.projectName), IndexFields.symbols, serialized)
}
async getDepsGraph(): Promise<DepsGraph> {
const redis = this.getRedis()
const data = await redis.hget(ProjectKeys.indexes(this.projectName), IndexFields.depsGraph)
if (!data) {
return {
imports: new Map(),
importedBy: new Map(),
}
}
const parsed = this.parseJSON(data, "DepsGraph") as {
imports: [string, string[]][]
importedBy: [string, string[]][]
} | null
if (!parsed) {
return {
imports: new Map(),
importedBy: new Map(),
}
}
return {
imports: new Map(parsed.imports),
importedBy: new Map(parsed.importedBy),
}
}
async setDepsGraph(graph: DepsGraph): Promise<void> {
const redis = this.getRedis()
const serialized = JSON.stringify({
imports: [...graph.imports.entries()],
importedBy: [...graph.importedBy.entries()],
})
await redis.hset(ProjectKeys.indexes(this.projectName), IndexFields.depsGraph, serialized)
}
async getProjectConfig(key: string): Promise<unknown> {
const redis = this.getRedis()
const data = await redis.hget(ProjectKeys.config(this.projectName), key)
if (!data) {
return null
}
return this.parseJSON(data, "ProjectConfig")
}
async setProjectConfig(key: string, value: unknown): Promise<void> {
const redis = this.getRedis()
await redis.hset(ProjectKeys.config(this.projectName), key, JSON.stringify(value))
}
async connect(): Promise<void> {
await this.client.connect()
}
async disconnect(): Promise<void> {
await this.client.disconnect()
}
isConnected(): boolean {
return this.client.isConnected()
}
async clear(): Promise<void> {
const redis = this.getRedis()
await Promise.all([
redis.del(ProjectKeys.files(this.projectName)),
redis.del(ProjectKeys.ast(this.projectName)),
redis.del(ProjectKeys.meta(this.projectName)),
redis.del(ProjectKeys.indexes(this.projectName)),
redis.del(ProjectKeys.config(this.projectName)),
])
}
private getRedis(): ReturnType<RedisClient["getClient"]> {
return this.client.getClient()
}
private parseJSON(data: string, type: string): unknown {
try {
return JSON.parse(data) as unknown
} catch (error) {
const message = error instanceof Error ? error.message : "Unknown error"
throw IpuaroError.parse(`Failed to parse ${type}: ${message}`)
}
}
}

View File

@@ -0,0 +1,10 @@
// Storage module exports
export { RedisClient } from "./RedisClient.js"
export { RedisStorage } from "./RedisStorage.js"
export {
ProjectKeys,
SessionKeys,
IndexFields,
SessionFields,
generateProjectName,
} from "./schema.js"

View File

@@ -0,0 +1,95 @@
/**
* Redis key schema for ipuaro data storage.
*
* Key structure:
* - project:{name}:files # Hash<path, FileData>
* - project:{name}:ast # Hash<path, FileAST>
* - project:{name}:meta # Hash<path, FileMeta>
* - project:{name}:indexes # Hash<name, JSON> (symbols, deps_graph)
* - project:{name}:config # Hash<key, JSON>
*
* - session:{id}:data # Hash<field, JSON> (history, context, stats)
* - session:{id}:undo # List<UndoEntry> (max 10)
* - sessions:list # List<session_id>
*
* Project name format: {parent-folder}-{project-folder}
*/
/**
* Project-related Redis keys.
*/
export const ProjectKeys = {
files: (projectName: string): string => `project:${projectName}:files`,
ast: (projectName: string): string => `project:${projectName}:ast`,
meta: (projectName: string): string => `project:${projectName}:meta`,
indexes: (projectName: string): string => `project:${projectName}:indexes`,
config: (projectName: string): string => `project:${projectName}:config`,
} as const
/**
* Session-related Redis keys.
*/
export const SessionKeys = {
data: (sessionId: string): string => `session:${sessionId}:data`,
undo: (sessionId: string): string => `session:${sessionId}:undo`,
list: "sessions:list",
} as const
/**
* Index field names within project:indexes hash.
*/
export const IndexFields = {
symbols: "symbols",
depsGraph: "deps_graph",
} as const
/**
* Session data field names within session:data hash.
*/
export const SessionFields = {
history: "history",
context: "context",
stats: "stats",
inputHistory: "input_history",
createdAt: "created_at",
lastActivityAt: "last_activity_at",
projectName: "project_name",
} as const
/**
* Generate project name from path.
* Format: {parent-folder}-{project-folder}
*
* @example
* generateProjectName("/home/user/projects/myapp") -> "projects-myapp"
* generateProjectName("/app") -> "app"
*/
export function generateProjectName(projectPath: string): string {
const normalized = projectPath.replace(/\\/g, "/").replace(/\/+$/, "")
const parts = normalized.split("/").filter(Boolean)
if (parts.length === 0) {
return "root"
}
if (parts.length === 1) {
return sanitizeName(parts[0])
}
const projectFolder = sanitizeName(parts[parts.length - 1])
const parentFolder = sanitizeName(parts[parts.length - 2])
return `${parentFolder}-${projectFolder}`
}
/**
* Sanitize a name for use in Redis keys.
* Replaces non-alphanumeric characters with hyphens.
*/
function sanitizeName(name: string): string {
return name
.toLowerCase()
.replace(/[^a-z0-9-]/g, "-")
.replace(/-+/g, "-")
.replace(/^-|-$/g, "")
}

View File

@@ -0,0 +1,2 @@
ignored-file.ts
*.log

View File

@@ -0,0 +1,4 @@
{
"name": "sample-project",
"version": "1.0.0"
}

View File

@@ -0,0 +1,3 @@
export function main(): void {
console.log("Hello")
}

View File

@@ -0,0 +1,3 @@
export function add(a: number, b: number): number {
return a + b
}

View File

@@ -36,9 +36,7 @@ describe("ChatMessage", () => {
})
it("should create assistant message with tool calls", () => {
const toolCalls = [
{ id: "1", name: "get_lines", params: {}, timestamp: Date.now() },
]
const toolCalls = [{ id: "1", name: "get_lines", params: {}, timestamp: Date.now() }]
const stats = { tokens: 100, timeMs: 500, toolCalls: 1 }
const msg = createAssistantMessage("Response", toolCalls, stats)
@@ -49,9 +47,7 @@ describe("ChatMessage", () => {
describe("createToolMessage", () => {
it("should create tool message with results", () => {
const results = [
{ callId: "1", success: true, data: "data", executionTimeMs: 10 },
]
const results = [{ callId: "1", success: true, data: "data", executionTimeMs: 10 }]
const msg = createToolMessage(results)
expect(msg.role).toBe("tool")

View File

@@ -1,8 +1,5 @@
import { describe, it, expect } from "vitest"
import {
createFileData,
isFileDataEqual,
} from "../../../../src/domain/value-objects/FileData.js"
import { createFileData, isFileDataEqual } from "../../../../src/domain/value-objects/FileData.js"
describe("FileData", () => {
describe("createFileData", () => {

View File

@@ -1,8 +1,5 @@
import { describe, it, expect } from "vitest"
import {
createFileMeta,
isHubFile,
} from "../../../../src/domain/value-objects/FileMeta.js"
import { createFileMeta, isHubFile } from "../../../../src/domain/value-objects/FileMeta.js"
describe("FileMeta", () => {
describe("createFileMeta", () => {

View File

@@ -1,8 +1,5 @@
import { describe, it, expect, vi, beforeEach, afterEach } from "vitest"
import {
createUndoEntry,
canUndo,
} from "../../../../src/domain/value-objects/UndoEntry.js"
import { createUndoEntry, canUndo } from "../../../../src/domain/value-objects/UndoEntry.js"
describe("UndoEntry", () => {
beforeEach(() => {
@@ -21,7 +18,7 @@ describe("UndoEntry", () => {
"test.ts",
["old line"],
["new line"],
"Edit line 1"
"Edit line 1",
)
expect(entry.id).toBe("undo-1")
@@ -34,14 +31,7 @@ describe("UndoEntry", () => {
})
it("should create undo entry with toolCallId", () => {
const entry = createUndoEntry(
"undo-2",
"test.ts",
[],
[],
"Create file",
"tool-123"
)
const entry = createUndoEntry("undo-2", "test.ts", [], [], "Create file", "tool-123")
expect(entry.toolCallId).toBe("tool-123")
})
@@ -49,37 +39,19 @@ describe("UndoEntry", () => {
describe("canUndo", () => {
it("should return true when current content matches newContent", () => {
const entry = createUndoEntry(
"undo-1",
"test.ts",
["old"],
["new"],
"Edit"
)
const entry = createUndoEntry("undo-1", "test.ts", ["old"], ["new"], "Edit")
expect(canUndo(entry, ["new"])).toBe(true)
})
it("should return false when content differs", () => {
const entry = createUndoEntry(
"undo-1",
"test.ts",
["old"],
["new"],
"Edit"
)
const entry = createUndoEntry("undo-1", "test.ts", ["old"], ["new"], "Edit")
expect(canUndo(entry, ["modified"])).toBe(false)
})
it("should return false when length differs", () => {
const entry = createUndoEntry(
"undo-1",
"test.ts",
["old"],
["new"],
"Edit"
)
const entry = createUndoEntry("undo-1", "test.ts", ["old"], ["new"], "Edit")
expect(canUndo(entry, ["new", "extra"])).toBe(false)
})

View File

@@ -0,0 +1,347 @@
import { describe, it, expect, beforeAll } from "vitest"
import { ASTParser } from "../../../../src/infrastructure/indexer/ASTParser.js"
describe("ASTParser", () => {
let parser: ASTParser
beforeAll(() => {
parser = new ASTParser()
})
describe("parse", () => {
it("should parse empty file", () => {
const ast = parser.parse("", "ts")
expect(ast.parseError).toBe(false)
expect(ast.imports).toHaveLength(0)
expect(ast.exports).toHaveLength(0)
expect(ast.functions).toHaveLength(0)
expect(ast.classes).toHaveLength(0)
})
it("should handle syntax errors gracefully", () => {
const code = "export function {{{ invalid"
const ast = parser.parse(code, "ts")
expect(ast.parseError).toBe(true)
expect(ast.parseErrorMessage).toBeDefined()
})
it("should return error for unsupported language", () => {
const ast = parser.parse("const x = 1", "py" as never)
expect(ast.parseError).toBe(true)
expect(ast.parseErrorMessage).toContain("Unsupported language")
})
})
describe("imports", () => {
it("should extract default import", () => {
const code = `import React from "react"`
const ast = parser.parse(code, "ts")
expect(ast.imports).toHaveLength(1)
expect(ast.imports[0]).toMatchObject({
name: "React",
from: "react",
isDefault: true,
type: "external",
})
})
it("should extract named imports", () => {
const code = `import { useState, useEffect } from "react"`
const ast = parser.parse(code, "ts")
expect(ast.imports).toHaveLength(2)
expect(ast.imports[0].name).toBe("useState")
expect(ast.imports[1].name).toBe("useEffect")
expect(ast.imports[0].isDefault).toBe(false)
})
it("should extract namespace import", () => {
const code = `import * as path from "path"`
const ast = parser.parse(code, "ts")
expect(ast.imports).toHaveLength(1)
expect(ast.imports[0].name).toBe("path")
expect(ast.imports[0].isDefault).toBe(false)
})
it("should classify internal imports", () => {
const code = `import { foo } from "./utils"`
const ast = parser.parse(code, "ts")
expect(ast.imports[0].type).toBe("internal")
})
it("should classify builtin imports", () => {
const code = `import * as fs from "node:fs"`
const ast = parser.parse(code, "ts")
expect(ast.imports[0].type).toBe("builtin")
})
it("should classify external imports", () => {
const code = `import lodash from "lodash"`
const ast = parser.parse(code, "ts")
expect(ast.imports[0].type).toBe("external")
})
})
describe("functions", () => {
it("should extract function declaration", () => {
const code = `function add(a: number, b: number): number {
return a + b
}`
const ast = parser.parse(code, "ts")
expect(ast.functions).toHaveLength(1)
expect(ast.functions[0]).toMatchObject({
name: "add",
isAsync: false,
isExported: false,
})
expect(ast.functions[0].lineStart).toBe(1)
expect(ast.functions[0].lineEnd).toBe(3)
})
it("should extract async function", () => {
const code = `async function fetchData() { return null }`
const ast = parser.parse(code, "ts")
expect(ast.functions[0].isAsync).toBe(true)
})
it("should extract exported function", () => {
const code = `export function main() {}`
const ast = parser.parse(code, "ts")
expect(ast.functions[0].isExported).toBe(true)
expect(ast.exports).toHaveLength(1)
expect(ast.exports[0].kind).toBe("function")
})
it("should extract arrow function", () => {
const code = `const add = (a: number, b: number) => a + b`
const ast = parser.parse(code, "ts")
expect(ast.functions).toHaveLength(1)
expect(ast.functions[0].name).toBe("add")
})
it("should extract function parameters", () => {
const code = `function test(a: string, b?: number, c = 10) {}`
const ast = parser.parse(code, "ts")
expect(ast.functions[0].params).toHaveLength(3)
expect(ast.functions[0].params[0]).toMatchObject({
name: "a",
optional: false,
})
expect(ast.functions[0].params[1]).toMatchObject({
name: "b",
optional: true,
})
})
})
describe("classes", () => {
it("should extract class declaration", () => {
const code = `class MyClass {
value: number
constructor() {}
getValue() {
return this.value
}
}`
const ast = parser.parse(code, "ts")
expect(ast.classes).toHaveLength(1)
expect(ast.classes[0]).toMatchObject({
name: "MyClass",
isExported: false,
isAbstract: false,
})
})
it("should extract exported class", () => {
const code = `export class Service {}`
const ast = parser.parse(code, "ts")
expect(ast.classes[0].isExported).toBe(true)
expect(ast.exports).toHaveLength(1)
expect(ast.exports[0].kind).toBe("class")
})
it("should extract class methods", () => {
const code = `class Service {
async fetch() {}
private process() {}
static create() {}
}`
const ast = parser.parse(code, "ts")
expect(ast.classes[0].methods.length).toBeGreaterThanOrEqual(1)
})
it("should extract class extends", () => {
const code = `class Child extends Parent {}`
const ast = parser.parse(code, "ts")
expect(ast.classes[0].extends).toBe("Parent")
})
})
describe("interfaces", () => {
it("should extract interface declaration", () => {
const code = `interface User {
name: string
age: number
}`
const ast = parser.parse(code, "ts")
expect(ast.interfaces).toHaveLength(1)
expect(ast.interfaces[0]).toMatchObject({
name: "User",
isExported: false,
})
})
it("should extract exported interface", () => {
const code = `export interface Config {}`
const ast = parser.parse(code, "ts")
expect(ast.interfaces[0].isExported).toBe(true)
})
it("should extract interface properties", () => {
const code = `interface Props {
value: string
onChange: (v: string) => void
}`
const ast = parser.parse(code, "ts")
expect(ast.interfaces[0].properties.length).toBeGreaterThanOrEqual(1)
})
})
describe("type aliases", () => {
it("should extract type alias", () => {
const code = `type ID = string | number`
const ast = parser.parse(code, "ts")
expect(ast.typeAliases).toHaveLength(1)
expect(ast.typeAliases[0]).toMatchObject({
name: "ID",
isExported: false,
})
})
it("should extract exported type alias", () => {
const code = `export type Status = "pending" | "done"`
const ast = parser.parse(code, "ts")
expect(ast.typeAliases[0].isExported).toBe(true)
})
})
describe("exports", () => {
it("should extract named exports", () => {
const code = `
const foo = 1
const bar = 2
export { foo, bar }
`
const ast = parser.parse(code, "ts")
expect(ast.exports).toHaveLength(2)
})
it("should extract export default", () => {
const code = `export default function main() {}`
const ast = parser.parse(code, "ts")
expect(ast.exports.some((e) => e.isDefault)).toBe(true)
})
it("should extract exported const", () => {
const code = `export const VERSION = "1.0.0"`
const ast = parser.parse(code, "ts")
expect(ast.exports).toHaveLength(1)
expect(ast.exports[0].kind).toBe("variable")
})
})
describe("JavaScript support", () => {
it("should parse JavaScript file", () => {
const code = `
import React from "react"
function Component() {
return null
}
export default Component
`
const ast = parser.parse(code, "js")
expect(ast.parseError).toBe(false)
expect(ast.imports).toHaveLength(1)
expect(ast.functions).toHaveLength(1)
})
it("should parse JSX file", () => {
const code = `
import React from "react"
function App() {
return <div>Hello</div>
}
`
const ast = parser.parse(code, "jsx")
expect(ast.parseError).toBe(false)
})
})
describe("TSX support", () => {
it("should parse TSX file", () => {
const code = `
import React from "react"
interface Props {
name: string
}
export function Greeting({ name }: Props) {
return <h1>Hello, {name}!</h1>
}
`
const ast = parser.parse(code, "tsx")
expect(ast.parseError).toBe(false)
expect(ast.interfaces).toHaveLength(1)
expect(ast.functions).toHaveLength(1)
})
})
describe("complex file", () => {
it("should parse complex TypeScript file", () => {
const code = `
import * as fs from "node:fs"
import { join } from "node:path"
import type { Config } from "./types"
export interface Options {
root: string
verbose?: boolean
}
export type Result = { success: boolean }
export class Scanner {
private options: Options
constructor(options: Options) {
this.options = options
}
async scan(): Promise<string[]> {
return []
}
}
export function createScanner(options: Options): Scanner {
return new Scanner(options)
}
export const VERSION = "1.0.0"
`
const ast = parser.parse(code, "ts")
expect(ast.parseError).toBe(false)
expect(ast.imports.length).toBeGreaterThanOrEqual(2)
expect(ast.interfaces).toHaveLength(1)
expect(ast.typeAliases).toHaveLength(1)
expect(ast.classes).toHaveLength(1)
expect(ast.functions.length).toBeGreaterThanOrEqual(1)
expect(ast.exports.length).toBeGreaterThanOrEqual(4)
})
})
})

View File

@@ -0,0 +1,238 @@
import * as fs from "node:fs/promises"
import * as path from "node:path"
import { describe, it, expect, beforeAll, afterAll } from "vitest"
import {
FileScanner,
type ScanProgress,
} from "../../../../src/infrastructure/indexer/FileScanner.js"
import type { ScanResult } from "../../../../src/domain/services/IIndexer.js"
const FIXTURES_DIR = path.join(__dirname, "../../../fixtures/sample-project")
describe("FileScanner", () => {
describe("constructor", () => {
it("should create instance with default options", () => {
const scanner = new FileScanner()
expect(scanner).toBeInstanceOf(FileScanner)
})
it("should accept custom extensions", () => {
const scanner = new FileScanner({ extensions: [".ts", ".js"] })
expect(scanner.isSupportedExtension("file.ts")).toBe(true)
expect(scanner.isSupportedExtension("file.js")).toBe(true)
expect(scanner.isSupportedExtension("file.tsx")).toBe(false)
})
it("should accept additional ignore patterns", () => {
const scanner = new FileScanner({ additionalIgnore: ["*.test.ts"] })
expect(scanner).toBeInstanceOf(FileScanner)
})
it("should accept progress callback", () => {
const onProgress = (progress: ScanProgress): void => {
// callback
}
const scanner = new FileScanner({ onProgress })
expect(scanner).toBeInstanceOf(FileScanner)
})
})
describe("isSupportedExtension", () => {
it("should return true for supported extensions", () => {
const scanner = new FileScanner()
expect(scanner.isSupportedExtension("file.ts")).toBe(true)
expect(scanner.isSupportedExtension("file.tsx")).toBe(true)
expect(scanner.isSupportedExtension("file.js")).toBe(true)
expect(scanner.isSupportedExtension("file.jsx")).toBe(true)
expect(scanner.isSupportedExtension("file.json")).toBe(true)
expect(scanner.isSupportedExtension("file.yaml")).toBe(true)
expect(scanner.isSupportedExtension("file.yml")).toBe(true)
})
it("should return false for unsupported extensions", () => {
const scanner = new FileScanner()
expect(scanner.isSupportedExtension("file.md")).toBe(false)
expect(scanner.isSupportedExtension("file.txt")).toBe(false)
expect(scanner.isSupportedExtension("file.png")).toBe(false)
})
it("should be case-insensitive", () => {
const scanner = new FileScanner()
expect(scanner.isSupportedExtension("file.TS")).toBe(true)
expect(scanner.isSupportedExtension("file.TSX")).toBe(true)
})
})
describe("scan", () => {
it("should scan directory and yield file results", async () => {
const scanner = new FileScanner()
const results: ScanResult[] = []
for await (const result of scanner.scan(FIXTURES_DIR)) {
results.push(result)
}
expect(results.length).toBeGreaterThan(0)
expect(results.every((r) => r.type === "file")).toBe(true)
})
it("should return relative paths", async () => {
const scanner = new FileScanner()
const results = await scanner.scanAll(FIXTURES_DIR)
for (const result of results) {
expect(path.isAbsolute(result.path)).toBe(false)
}
})
it("should include file stats", async () => {
const scanner = new FileScanner()
const results = await scanner.scanAll(FIXTURES_DIR)
for (const result of results) {
expect(typeof result.size).toBe("number")
expect(result.size).toBeGreaterThanOrEqual(0)
expect(typeof result.lastModified).toBe("number")
expect(result.lastModified).toBeGreaterThan(0)
}
})
it("should ignore node_modules by default", async () => {
const scanner = new FileScanner()
const results = await scanner.scanAll(FIXTURES_DIR)
const nodeModulesFiles = results.filter((r) => r.path.includes("node_modules"))
expect(nodeModulesFiles).toHaveLength(0)
})
it("should respect .gitignore", async () => {
const scanner = new FileScanner()
const results = await scanner.scanAll(FIXTURES_DIR)
const ignoredFile = results.find((r) => r.path.includes("ignored-file"))
expect(ignoredFile).toBeUndefined()
})
it("should only include supported extensions", async () => {
const scanner = new FileScanner({ extensions: [".ts"] })
const results = await scanner.scanAll(FIXTURES_DIR)
for (const result of results) {
expect(result.path.endsWith(".ts")).toBe(true)
}
})
it("should call progress callback", async () => {
const progressCalls: ScanProgress[] = []
const scanner = new FileScanner({
onProgress: (progress) => {
progressCalls.push({ ...progress })
},
})
await scanner.scanAll(FIXTURES_DIR)
expect(progressCalls.length).toBeGreaterThan(0)
for (const progress of progressCalls) {
expect(progress.current).toBeGreaterThan(0)
expect(progress.total).toBeGreaterThan(0)
expect(typeof progress.currentFile).toBe("string")
}
})
})
describe("scanAll", () => {
it("should return array of all results", async () => {
const scanner = new FileScanner()
const results = await scanner.scanAll(FIXTURES_DIR)
expect(Array.isArray(results)).toBe(true)
expect(results.length).toBeGreaterThan(0)
})
})
describe("isTextFile", () => {
let textFilePath: string
let binaryFilePath: string
beforeAll(async () => {
textFilePath = path.join(FIXTURES_DIR, "src", "index.ts")
binaryFilePath = path.join(FIXTURES_DIR, "binary-test.bin")
await fs.writeFile(binaryFilePath, Buffer.from([0x00, 0x01, 0x02]))
})
afterAll(async () => {
try {
await fs.unlink(binaryFilePath)
} catch {
// ignore
}
})
it("should return true for text files", async () => {
const isText = await FileScanner.isTextFile(textFilePath)
expect(isText).toBe(true)
})
it("should return false for binary files", async () => {
const isText = await FileScanner.isTextFile(binaryFilePath)
expect(isText).toBe(false)
})
it("should return false for non-existent files", async () => {
const isText = await FileScanner.isTextFile("/non/existent/file.ts")
expect(isText).toBe(false)
})
})
describe("readFileContent", () => {
it("should read text file content", async () => {
const filePath = path.join(FIXTURES_DIR, "src", "index.ts")
const content = await FileScanner.readFileContent(filePath)
expect(content).not.toBeNull()
expect(content).toContain("export function main")
})
it("should return null for binary files", async () => {
const binaryFilePath = path.join(FIXTURES_DIR, "binary-test2.bin")
await fs.writeFile(binaryFilePath, Buffer.from([0x00, 0x01, 0x02]))
try {
const content = await FileScanner.readFileContent(binaryFilePath)
expect(content).toBeNull()
} finally {
await fs.unlink(binaryFilePath)
}
})
it("should return null for non-existent files", async () => {
const content = await FileScanner.readFileContent("/non/existent/file.ts")
expect(content).toBeNull()
})
})
describe("empty directory handling", () => {
let emptyDir: string
beforeAll(async () => {
emptyDir = path.join(FIXTURES_DIR, "empty-dir")
await fs.mkdir(emptyDir, { recursive: true })
})
afterAll(async () => {
try {
await fs.rmdir(emptyDir)
} catch {
// ignore
}
})
it("should handle empty directories gracefully", async () => {
const scanner = new FileScanner()
const results = await scanner.scanAll(emptyDir)
expect(results).toHaveLength(0)
})
})
})

View File

@@ -0,0 +1,608 @@
import { describe, it, expect, beforeAll } from "vitest"
import { IndexBuilder } from "../../../../src/infrastructure/indexer/IndexBuilder.js"
import { ASTParser } from "../../../../src/infrastructure/indexer/ASTParser.js"
import type { FileAST } from "../../../../src/domain/value-objects/FileAST.js"
import { createEmptyFileAST } from "../../../../src/domain/value-objects/FileAST.js"
describe("IndexBuilder", () => {
let builder: IndexBuilder
let parser: ASTParser
const projectRoot = "/project"
beforeAll(() => {
builder = new IndexBuilder(projectRoot)
parser = new ASTParser()
})
describe("buildSymbolIndex", () => {
it("should index function declarations", () => {
const code = `
export function greet(name: string): string {
return \`Hello, \${name}!\`
}
function privateHelper(): void {}
`
const ast = parser.parse(code, "ts")
const asts = new Map<string, FileAST>([["/project/src/utils.ts", ast]])
const index = builder.buildSymbolIndex(asts)
expect(index.has("greet")).toBe(true)
expect(index.has("privateHelper")).toBe(true)
expect(index.get("greet")).toEqual([
{ path: "/project/src/utils.ts", line: 2, type: "function" },
])
})
it("should index class declarations and methods", () => {
const code = `
export class UserService {
async findById(id: string): Promise<User> {
return this.db.find(id)
}
private validate(data: unknown): void {}
}
`
const ast = parser.parse(code, "ts")
const asts = new Map<string, FileAST>([["/project/src/UserService.ts", ast]])
const index = builder.buildSymbolIndex(asts)
expect(index.has("UserService")).toBe(true)
expect(index.get("UserService")).toEqual([
{ path: "/project/src/UserService.ts", line: 2, type: "class" },
])
expect(index.has("UserService.findById")).toBe(true)
expect(index.has("UserService.validate")).toBe(true)
})
it("should index interface declarations", () => {
const code = `
export interface User {
id: string
name: string
}
interface InternalConfig {
debug: boolean
}
`
const ast = parser.parse(code, "ts")
const asts = new Map<string, FileAST>([["/project/src/types.ts", ast]])
const index = builder.buildSymbolIndex(asts)
expect(index.has("User")).toBe(true)
expect(index.has("InternalConfig")).toBe(true)
expect(index.get("User")).toEqual([
{ path: "/project/src/types.ts", line: 2, type: "interface" },
])
})
it("should index type alias declarations", () => {
const code = `
export type UserId = string
type Handler = (event: Event) => void
`
const ast = parser.parse(code, "ts")
const asts = new Map<string, FileAST>([["/project/src/types.ts", ast]])
const index = builder.buildSymbolIndex(asts)
expect(index.has("UserId")).toBe(true)
expect(index.has("Handler")).toBe(true)
expect(index.get("UserId")).toEqual([
{ path: "/project/src/types.ts", line: 2, type: "type" },
])
})
it("should index exported variables", () => {
const code = `
export const API_URL = "https://api.example.com"
export const DEFAULT_TIMEOUT = 5000
`
const ast = parser.parse(code, "ts")
const asts = new Map<string, FileAST>([["/project/src/config.ts", ast]])
const index = builder.buildSymbolIndex(asts)
expect(index.has("API_URL")).toBe(true)
expect(index.has("DEFAULT_TIMEOUT")).toBe(true)
})
it("should handle multiple files", () => {
const userCode = `export class User { name: string }`
const orderCode = `export class Order { id: string }`
const asts = new Map<string, FileAST>([
["/project/src/User.ts", parser.parse(userCode, "ts")],
["/project/src/Order.ts", parser.parse(orderCode, "ts")],
])
const index = builder.buildSymbolIndex(asts)
expect(index.has("User")).toBe(true)
expect(index.has("Order")).toBe(true)
expect(index.get("User")?.[0].path).toBe("/project/src/User.ts")
expect(index.get("Order")?.[0].path).toBe("/project/src/Order.ts")
})
it("should handle duplicate symbol names across files", () => {
const file1 = `export function helper(): void {}`
const file2 = `export function helper(): void {}`
const asts = new Map<string, FileAST>([
["/project/src/a/utils.ts", parser.parse(file1, "ts")],
["/project/src/b/utils.ts", parser.parse(file2, "ts")],
])
const index = builder.buildSymbolIndex(asts)
expect(index.has("helper")).toBe(true)
expect(index.get("helper")).toHaveLength(2)
})
it("should return empty index for empty ASTs", () => {
const asts = new Map<string, FileAST>()
const index = builder.buildSymbolIndex(asts)
expect(index.size).toBe(0)
})
it("should not index empty names", () => {
const ast = createEmptyFileAST()
ast.functions.push({
name: "",
lineStart: 1,
lineEnd: 3,
params: [],
isAsync: false,
isExported: false,
})
const asts = new Map<string, FileAST>([["/project/src/test.ts", ast]])
const index = builder.buildSymbolIndex(asts)
expect(index.has("")).toBe(false)
})
})
describe("buildDepsGraph", () => {
it("should build import relationships", () => {
const indexCode = `
import { helper } from "./utils"
export function main() { return helper() }
`
const utilsCode = `export function helper() { return 42 }`
const asts = new Map<string, FileAST>([
["/project/src/index.ts", parser.parse(indexCode, "ts")],
["/project/src/utils.ts", parser.parse(utilsCode, "ts")],
])
const graph = builder.buildDepsGraph(asts)
expect(graph.imports.get("/project/src/index.ts")).toContain("/project/src/utils.ts")
expect(graph.imports.get("/project/src/utils.ts")).toEqual([])
})
it("should build reverse import relationships", () => {
const indexCode = `import { helper } from "./utils"`
const utilsCode = `export function helper() {}`
const asts = new Map<string, FileAST>([
["/project/src/index.ts", parser.parse(indexCode, "ts")],
["/project/src/utils.ts", parser.parse(utilsCode, "ts")],
])
const graph = builder.buildDepsGraph(asts)
expect(graph.importedBy.get("/project/src/utils.ts")).toContain("/project/src/index.ts")
expect(graph.importedBy.get("/project/src/index.ts")).toEqual([])
})
it("should handle multiple imports from same file", () => {
const code = `
import { a } from "./utils"
import { b } from "./utils"
`
const utilsCode = `export const a = 1; export const b = 2;`
const asts = new Map<string, FileAST>([
["/project/src/index.ts", parser.parse(code, "ts")],
["/project/src/utils.ts", parser.parse(utilsCode, "ts")],
])
const graph = builder.buildDepsGraph(asts)
const imports = graph.imports.get("/project/src/index.ts") ?? []
expect(imports.filter((i) => i === "/project/src/utils.ts")).toHaveLength(1)
})
it("should ignore external imports", () => {
const code = `
import React from "react"
import { helper } from "./utils"
`
const utilsCode = `export function helper() {}`
const asts = new Map<string, FileAST>([
["/project/src/index.ts", parser.parse(code, "ts")],
["/project/src/utils.ts", parser.parse(utilsCode, "ts")],
])
const graph = builder.buildDepsGraph(asts)
const imports = graph.imports.get("/project/src/index.ts") ?? []
expect(imports).not.toContain("react")
expect(imports).toContain("/project/src/utils.ts")
})
it("should ignore builtin imports", () => {
const code = `
import * as fs from "node:fs"
import { helper } from "./utils"
`
const utilsCode = `export function helper() {}`
const asts = new Map<string, FileAST>([
["/project/src/index.ts", parser.parse(code, "ts")],
["/project/src/utils.ts", parser.parse(utilsCode, "ts")],
])
const graph = builder.buildDepsGraph(asts)
const imports = graph.imports.get("/project/src/index.ts") ?? []
expect(imports).not.toContain("node:fs")
})
it("should handle index.ts imports", () => {
const code = `import { util } from "./utils"`
const indexCode = `export function util() {}`
const asts = new Map<string, FileAST>([
["/project/src/main.ts", parser.parse(code, "ts")],
["/project/src/utils/index.ts", parser.parse(indexCode, "ts")],
])
const graph = builder.buildDepsGraph(asts)
expect(graph.imports.get("/project/src/main.ts")).toContain(
"/project/src/utils/index.ts",
)
})
it("should handle .js extension imports", () => {
const code = `import { helper } from "./utils.js"`
const utilsCode = `export function helper() {}`
const asts = new Map<string, FileAST>([
["/project/src/index.ts", parser.parse(code, "ts")],
["/project/src/utils.ts", parser.parse(utilsCode, "ts")],
])
const graph = builder.buildDepsGraph(asts)
expect(graph.imports.get("/project/src/index.ts")).toContain("/project/src/utils.ts")
})
it("should sort dependencies", () => {
const code = `
import { c } from "./c"
import { a } from "./a"
import { b } from "./b"
`
const asts = new Map<string, FileAST>([
["/project/src/index.ts", parser.parse(code, "ts")],
["/project/src/a.ts", parser.parse("export const a = 1", "ts")],
["/project/src/b.ts", parser.parse("export const b = 2", "ts")],
["/project/src/c.ts", parser.parse("export const c = 3", "ts")],
])
const graph = builder.buildDepsGraph(asts)
expect(graph.imports.get("/project/src/index.ts")).toEqual([
"/project/src/a.ts",
"/project/src/b.ts",
"/project/src/c.ts",
])
})
it("should return empty graph for empty ASTs", () => {
const asts = new Map<string, FileAST>()
const graph = builder.buildDepsGraph(asts)
expect(graph.imports.size).toBe(0)
expect(graph.importedBy.size).toBe(0)
})
})
describe("findSymbol", () => {
it("should find existing symbol", () => {
const code = `export function greet(): void {}`
const asts = new Map<string, FileAST>([
["/project/src/utils.ts", parser.parse(code, "ts")],
])
const index = builder.buildSymbolIndex(asts)
const locations = builder.findSymbol(index, "greet")
expect(locations).toHaveLength(1)
expect(locations[0].path).toBe("/project/src/utils.ts")
})
it("should return empty array for non-existent symbol", () => {
const asts = new Map<string, FileAST>()
const index = builder.buildSymbolIndex(asts)
const locations = builder.findSymbol(index, "nonexistent")
expect(locations).toEqual([])
})
})
describe("searchSymbols", () => {
it("should find symbols matching pattern", () => {
const code = `
export function getUserById(): void {}
export function getUserByEmail(): void {}
export function createOrder(): void {}
`
const asts = new Map<string, FileAST>([
["/project/src/api.ts", parser.parse(code, "ts")],
])
const index = builder.buildSymbolIndex(asts)
const results = builder.searchSymbols(index, "getUser")
expect(results.size).toBe(2)
expect(results.has("getUserById")).toBe(true)
expect(results.has("getUserByEmail")).toBe(true)
})
it("should be case insensitive", () => {
const code = `export function MyFunction(): void {}`
const asts = new Map<string, FileAST>([
["/project/src/test.ts", parser.parse(code, "ts")],
])
const index = builder.buildSymbolIndex(asts)
const results = builder.searchSymbols(index, "myfunction")
expect(results.has("MyFunction")).toBe(true)
})
it("should return empty map for no matches", () => {
const code = `export function test(): void {}`
const asts = new Map<string, FileAST>([
["/project/src/test.ts", parser.parse(code, "ts")],
])
const index = builder.buildSymbolIndex(asts)
const results = builder.searchSymbols(index, "xyz123")
expect(results.size).toBe(0)
})
})
describe("getDependencies", () => {
it("should return file dependencies", () => {
const indexCode = `import { a } from "./a"`
const aCode = `export const a = 1`
const asts = new Map<string, FileAST>([
["/project/src/index.ts", parser.parse(indexCode, "ts")],
["/project/src/a.ts", parser.parse(aCode, "ts")],
])
const graph = builder.buildDepsGraph(asts)
const deps = builder.getDependencies(graph, "/project/src/index.ts")
expect(deps).toContain("/project/src/a.ts")
})
it("should return empty array for file not in graph", () => {
const asts = new Map<string, FileAST>()
const graph = builder.buildDepsGraph(asts)
const deps = builder.getDependencies(graph, "/nonexistent.ts")
expect(deps).toEqual([])
})
})
describe("getDependents", () => {
it("should return file dependents", () => {
const indexCode = `import { a } from "./a"`
const aCode = `export const a = 1`
const asts = new Map<string, FileAST>([
["/project/src/index.ts", parser.parse(indexCode, "ts")],
["/project/src/a.ts", parser.parse(aCode, "ts")],
])
const graph = builder.buildDepsGraph(asts)
const dependents = builder.getDependents(graph, "/project/src/a.ts")
expect(dependents).toContain("/project/src/index.ts")
})
it("should return empty array for file not in graph", () => {
const asts = new Map<string, FileAST>()
const graph = builder.buildDepsGraph(asts)
const dependents = builder.getDependents(graph, "/nonexistent.ts")
expect(dependents).toEqual([])
})
})
describe("findCircularDependencies", () => {
it("should detect simple circular dependency", () => {
const aCode = `import { b } from "./b"; export const a = 1;`
const bCode = `import { a } from "./a"; export const b = 2;`
const asts = new Map<string, FileAST>([
["/project/src/a.ts", parser.parse(aCode, "ts")],
["/project/src/b.ts", parser.parse(bCode, "ts")],
])
const graph = builder.buildDepsGraph(asts)
const cycles = builder.findCircularDependencies(graph)
expect(cycles.length).toBe(1)
expect(cycles[0]).toContain("/project/src/a.ts")
expect(cycles[0]).toContain("/project/src/b.ts")
})
it("should detect three-way circular dependency", () => {
const aCode = `import { b } from "./b"; export const a = 1;`
const bCode = `import { c } from "./c"; export const b = 2;`
const cCode = `import { a } from "./a"; export const c = 3;`
const asts = new Map<string, FileAST>([
["/project/src/a.ts", parser.parse(aCode, "ts")],
["/project/src/b.ts", parser.parse(bCode, "ts")],
["/project/src/c.ts", parser.parse(cCode, "ts")],
])
const graph = builder.buildDepsGraph(asts)
const cycles = builder.findCircularDependencies(graph)
expect(cycles.length).toBe(1)
expect(cycles[0]).toHaveLength(4)
})
it("should return empty array when no cycles", () => {
const aCode = `export const a = 1`
const bCode = `import { a } from "./a"; export const b = a + 1;`
const cCode = `import { b } from "./b"; export const c = b + 1;`
const asts = new Map<string, FileAST>([
["/project/src/a.ts", parser.parse(aCode, "ts")],
["/project/src/b.ts", parser.parse(bCode, "ts")],
["/project/src/c.ts", parser.parse(cCode, "ts")],
])
const graph = builder.buildDepsGraph(asts)
const cycles = builder.findCircularDependencies(graph)
expect(cycles).toEqual([])
})
it("should handle self-reference", () => {
const aCode = `import { helper } from "./a"; export const a = 1; export function helper() {}`
const asts = new Map<string, FileAST>([
["/project/src/a.ts", parser.parse(aCode, "ts")],
])
const graph = builder.buildDepsGraph(asts)
const cycles = builder.findCircularDependencies(graph)
expect(cycles.length).toBe(1)
})
})
describe("getStats", () => {
it("should return comprehensive statistics", () => {
const code1 = `
export function func1(): void {}
export class Class1 {}
export interface Interface1 {}
export type Type1 = string
export const VAR1 = 1
`
const code2 = `
import { func1 } from "./file1"
export function func2(): void {}
`
const asts = new Map<string, FileAST>([
["/project/src/file1.ts", parser.parse(code1, "ts")],
["/project/src/file2.ts", parser.parse(code2, "ts")],
])
const symbolIndex = builder.buildSymbolIndex(asts)
const depsGraph = builder.buildDepsGraph(asts)
const stats = builder.getStats(symbolIndex, depsGraph)
expect(stats.totalSymbols).toBeGreaterThan(0)
expect(stats.symbolsByType.function).toBeGreaterThan(0)
expect(stats.symbolsByType.class).toBe(1)
expect(stats.symbolsByType.interface).toBe(1)
expect(stats.symbolsByType.type).toBe(1)
expect(stats.totalFiles).toBe(2)
expect(stats.totalDependencies).toBe(1)
})
it("should identify hubs", () => {
const hubCode = `export const shared = 1`
const consumerCodes = Array.from({ length: 6 }, () => `import { shared } from "./hub"`)
const asts = new Map<string, FileAST>([
["/project/src/hub.ts", parser.parse(hubCode, "ts")],
])
consumerCodes.forEach((code, i) => {
asts.set(`/project/src/consumer${i}.ts`, parser.parse(code, "ts"))
})
const symbolIndex = builder.buildSymbolIndex(asts)
const depsGraph = builder.buildDepsGraph(asts)
const stats = builder.getStats(symbolIndex, depsGraph)
expect(stats.hubs).toContain("/project/src/hub.ts")
})
it("should identify orphans", () => {
const orphanCode = `const internal = 1`
const asts = new Map<string, FileAST>([
["/project/src/orphan.ts", parser.parse(orphanCode, "ts")],
])
const symbolIndex = builder.buildSymbolIndex(asts)
const depsGraph = builder.buildDepsGraph(asts)
const stats = builder.getStats(symbolIndex, depsGraph)
expect(stats.orphans).toContain("/project/src/orphan.ts")
})
})
describe("integration with ASTParser", () => {
it("should work with complex TypeScript code", () => {
const code = `
import { BaseService } from "./base"
import type { User, UserDTO } from "./types"
export class UserService extends BaseService {
private readonly cache = new Map<string, User>()
async findById(id: string): Promise<User | null> {
if (this.cache.has(id)) {
return this.cache.get(id)!
}
return this.repository.find(id)
}
toDTO(user: User): UserDTO {
return { id: user.id, name: user.name }
}
}
export type ServiceResult<T> = { success: true; data: T } | { success: false; error: string }
`
const baseCode = `export class BaseService { protected repository: any }`
const typesCode = `export interface User { id: string; name: string }; export interface UserDTO { id: string; name: string }`
const asts = new Map<string, FileAST>([
["/project/src/UserService.ts", parser.parse(code, "ts")],
["/project/src/base.ts", parser.parse(baseCode, "ts")],
["/project/src/types.ts", parser.parse(typesCode, "ts")],
])
const symbolIndex = builder.buildSymbolIndex(asts)
const depsGraph = builder.buildDepsGraph(asts)
expect(symbolIndex.has("UserService")).toBe(true)
expect(symbolIndex.has("UserService.findById")).toBe(true)
expect(symbolIndex.has("UserService.toDTO")).toBe(true)
expect(symbolIndex.has("ServiceResult")).toBe(true)
expect(symbolIndex.has("BaseService")).toBe(true)
expect(symbolIndex.has("User")).toBe(true)
expect(depsGraph.imports.get("/project/src/UserService.ts")).toContain(
"/project/src/base.ts",
)
expect(depsGraph.imports.get("/project/src/UserService.ts")).toContain(
"/project/src/types.ts",
)
})
})
})

View File

@@ -0,0 +1,702 @@
import { describe, it, expect, beforeAll } from "vitest"
import { MetaAnalyzer } from "../../../../src/infrastructure/indexer/MetaAnalyzer.js"
import { ASTParser } from "../../../../src/infrastructure/indexer/ASTParser.js"
import type { FileAST } from "../../../../src/domain/value-objects/FileAST.js"
import { createEmptyFileAST } from "../../../../src/domain/value-objects/FileAST.js"
describe("MetaAnalyzer", () => {
let analyzer: MetaAnalyzer
let parser: ASTParser
const projectRoot = "/project"
beforeAll(() => {
analyzer = new MetaAnalyzer(projectRoot)
parser = new ASTParser()
})
describe("countLinesOfCode", () => {
it("should count non-empty lines", () => {
const content = `const a = 1
const b = 2
const c = 3`
const loc = analyzer.countLinesOfCode(content)
expect(loc).toBe(3)
})
it("should exclude empty lines", () => {
const content = `const a = 1
const b = 2
const c = 3`
const loc = analyzer.countLinesOfCode(content)
expect(loc).toBe(3)
})
it("should exclude single-line comments", () => {
const content = `// This is a comment
const a = 1
// Another comment
const b = 2`
const loc = analyzer.countLinesOfCode(content)
expect(loc).toBe(2)
})
it("should exclude block comments", () => {
const content = `/*
* Multi-line comment
*/
const a = 1
/* inline block */ const b = 2`
const loc = analyzer.countLinesOfCode(content)
expect(loc).toBe(2)
})
it("should handle multi-line block comments", () => {
const content = `const a = 1
/*
comment line 1
comment line 2
*/
const b = 2`
const loc = analyzer.countLinesOfCode(content)
expect(loc).toBe(2)
})
it("should return 0 for empty content", () => {
const loc = analyzer.countLinesOfCode("")
expect(loc).toBe(0)
})
it("should return 0 for only comments", () => {
const content = `// comment 1
// comment 2
/* block comment */`
const loc = analyzer.countLinesOfCode(content)
expect(loc).toBe(0)
})
})
describe("calculateMaxNesting", () => {
it("should return 0 for empty AST", () => {
const ast = createEmptyFileAST()
const nesting = analyzer.calculateMaxNesting(ast)
expect(nesting).toBe(0)
})
it("should estimate nesting for short functions", () => {
const ast = createEmptyFileAST()
ast.functions.push({
name: "test",
lineStart: 1,
lineEnd: 3,
params: [],
isAsync: false,
isExported: false,
})
const nesting = analyzer.calculateMaxNesting(ast)
expect(nesting).toBe(1)
})
it("should estimate higher nesting for longer functions", () => {
const ast = createEmptyFileAST()
ast.functions.push({
name: "test",
lineStart: 1,
lineEnd: 40,
params: [],
isAsync: false,
isExported: false,
})
const nesting = analyzer.calculateMaxNesting(ast)
expect(nesting).toBe(4)
})
it("should return max nesting across multiple functions", () => {
const ast = createEmptyFileAST()
ast.functions.push(
{
name: "short",
lineStart: 1,
lineEnd: 3,
params: [],
isAsync: false,
isExported: false,
},
{
name: "long",
lineStart: 5,
lineEnd: 60,
params: [],
isAsync: false,
isExported: false,
},
)
const nesting = analyzer.calculateMaxNesting(ast)
expect(nesting).toBe(5)
})
it("should account for class methods", () => {
const ast = createEmptyFileAST()
ast.classes.push({
name: "MyClass",
lineStart: 1,
lineEnd: 50,
methods: [
{
name: "method1",
lineStart: 2,
lineEnd: 25,
params: [],
isAsync: false,
visibility: "public",
isStatic: false,
},
],
properties: [],
implements: [],
isExported: false,
isAbstract: false,
})
const nesting = analyzer.calculateMaxNesting(ast)
expect(nesting).toBeGreaterThan(1)
})
})
describe("calculateCyclomaticComplexity", () => {
it("should return 1 for empty AST", () => {
const ast = createEmptyFileAST()
const complexity = analyzer.calculateCyclomaticComplexity(ast)
expect(complexity).toBe(1)
})
it("should increase complexity for functions", () => {
const ast = createEmptyFileAST()
ast.functions.push({
name: "test",
lineStart: 1,
lineEnd: 20,
params: [],
isAsync: false,
isExported: false,
})
const complexity = analyzer.calculateCyclomaticComplexity(ast)
expect(complexity).toBeGreaterThan(1)
})
it("should increase complexity for class methods", () => {
const ast = createEmptyFileAST()
ast.classes.push({
name: "MyClass",
lineStart: 1,
lineEnd: 50,
methods: [
{
name: "method1",
lineStart: 2,
lineEnd: 20,
params: [],
isAsync: false,
visibility: "public",
isStatic: false,
},
{
name: "method2",
lineStart: 22,
lineEnd: 45,
params: [],
isAsync: false,
visibility: "public",
isStatic: false,
},
],
properties: [],
implements: [],
isExported: false,
isAbstract: false,
})
const complexity = analyzer.calculateCyclomaticComplexity(ast)
expect(complexity).toBeGreaterThan(2)
})
})
describe("calculateComplexityScore", () => {
it("should return 0 for minimal values", () => {
const score = analyzer.calculateComplexityScore(0, 0, 0)
expect(score).toBe(0)
})
it("should return 100 for maximum values", () => {
const score = analyzer.calculateComplexityScore(1000, 10, 50)
expect(score).toBe(100)
})
it("should return intermediate values", () => {
const score = analyzer.calculateComplexityScore(100, 3, 10)
expect(score).toBeGreaterThan(0)
expect(score).toBeLessThan(100)
})
})
describe("resolveDependencies", () => {
it("should resolve relative imports", () => {
const ast = createEmptyFileAST()
ast.imports.push({
name: "foo",
from: "./utils",
line: 1,
type: "internal",
isDefault: false,
})
const deps = analyzer.resolveDependencies("/project/src/index.ts", ast)
expect(deps).toHaveLength(1)
expect(deps[0]).toBe("/project/src/utils.ts")
})
it("should resolve parent directory imports", () => {
const ast = createEmptyFileAST()
ast.imports.push({
name: "config",
from: "../config",
line: 1,
type: "internal",
isDefault: false,
})
const deps = analyzer.resolveDependencies("/project/src/utils/helper.ts", ast)
expect(deps).toHaveLength(1)
expect(deps[0]).toBe("/project/src/config.ts")
})
it("should ignore external imports", () => {
const ast = createEmptyFileAST()
ast.imports.push({
name: "React",
from: "react",
line: 1,
type: "external",
isDefault: true,
})
const deps = analyzer.resolveDependencies("/project/src/index.ts", ast)
expect(deps).toHaveLength(0)
})
it("should ignore builtin imports", () => {
const ast = createEmptyFileAST()
ast.imports.push({
name: "fs",
from: "node:fs",
line: 1,
type: "builtin",
isDefault: false,
})
const deps = analyzer.resolveDependencies("/project/src/index.ts", ast)
expect(deps).toHaveLength(0)
})
it("should handle .js extension to .ts conversion", () => {
const ast = createEmptyFileAST()
ast.imports.push({
name: "util",
from: "./util.js",
line: 1,
type: "internal",
isDefault: false,
})
const deps = analyzer.resolveDependencies("/project/src/index.ts", ast)
expect(deps).toHaveLength(1)
expect(deps[0]).toBe("/project/src/util.ts")
})
it("should deduplicate dependencies", () => {
const ast = createEmptyFileAST()
ast.imports.push(
{
name: "foo",
from: "./utils",
line: 1,
type: "internal",
isDefault: false,
},
{
name: "bar",
from: "./utils",
line: 2,
type: "internal",
isDefault: false,
},
)
const deps = analyzer.resolveDependencies("/project/src/index.ts", ast)
expect(deps).toHaveLength(1)
})
it("should sort dependencies", () => {
const ast = createEmptyFileAST()
ast.imports.push(
{
name: "c",
from: "./c",
line: 1,
type: "internal",
isDefault: false,
},
{
name: "a",
from: "./a",
line: 2,
type: "internal",
isDefault: false,
},
{
name: "b",
from: "./b",
line: 3,
type: "internal",
isDefault: false,
},
)
const deps = analyzer.resolveDependencies("/project/src/index.ts", ast)
expect(deps).toEqual(["/project/src/a.ts", "/project/src/b.ts", "/project/src/c.ts"])
})
})
describe("findDependents", () => {
it("should find files that import the given file", () => {
const allASTs = new Map<string, FileAST>()
const indexAST = createEmptyFileAST()
allASTs.set("/project/src/index.ts", indexAST)
const utilsAST = createEmptyFileAST()
utilsAST.imports.push({
name: "helper",
from: "./helper",
line: 1,
type: "internal",
isDefault: false,
})
allASTs.set("/project/src/utils.ts", utilsAST)
const dependents = analyzer.findDependents("/project/src/helper.ts", allASTs)
expect(dependents).toHaveLength(1)
expect(dependents[0]).toBe("/project/src/utils.ts")
})
it("should return empty array when no dependents", () => {
const allASTs = new Map<string, FileAST>()
allASTs.set("/project/src/index.ts", createEmptyFileAST())
allASTs.set("/project/src/utils.ts", createEmptyFileAST())
const dependents = analyzer.findDependents("/project/src/helper.ts", allASTs)
expect(dependents).toHaveLength(0)
})
it("should not include self as dependent", () => {
const allASTs = new Map<string, FileAST>()
const selfAST = createEmptyFileAST()
selfAST.imports.push({
name: "foo",
from: "./helper",
line: 1,
type: "internal",
isDefault: false,
})
allASTs.set("/project/src/helper.ts", selfAST)
const dependents = analyzer.findDependents("/project/src/helper.ts", allASTs)
expect(dependents).toHaveLength(0)
})
it("should handle index.ts imports", () => {
const allASTs = new Map<string, FileAST>()
const consumerAST = createEmptyFileAST()
consumerAST.imports.push({
name: "util",
from: "./utils",
line: 1,
type: "internal",
isDefault: false,
})
allASTs.set("/project/src/consumer.ts", consumerAST)
const dependents = analyzer.findDependents("/project/src/utils/index.ts", allASTs)
expect(dependents).toHaveLength(1)
})
it("should sort dependents", () => {
const allASTs = new Map<string, FileAST>()
const fileC = createEmptyFileAST()
fileC.imports.push({
name: "x",
from: "./target",
line: 1,
type: "internal",
isDefault: false,
})
allASTs.set("/project/src/c.ts", fileC)
const fileA = createEmptyFileAST()
fileA.imports.push({
name: "x",
from: "./target",
line: 1,
type: "internal",
isDefault: false,
})
allASTs.set("/project/src/a.ts", fileA)
const fileB = createEmptyFileAST()
fileB.imports.push({
name: "x",
from: "./target",
line: 1,
type: "internal",
isDefault: false,
})
allASTs.set("/project/src/b.ts", fileB)
const dependents = analyzer.findDependents("/project/src/target.ts", allASTs)
expect(dependents).toEqual([
"/project/src/a.ts",
"/project/src/b.ts",
"/project/src/c.ts",
])
})
})
describe("classifyFileType", () => {
it("should classify test files by .test. pattern", () => {
expect(analyzer.classifyFileType("/project/src/utils.test.ts")).toBe("test")
})
it("should classify test files by .spec. pattern", () => {
expect(analyzer.classifyFileType("/project/src/utils.spec.ts")).toBe("test")
})
it("should classify test files by /tests/ directory", () => {
expect(analyzer.classifyFileType("/project/tests/utils.ts")).toBe("test")
})
it("should classify test files by /__tests__/ directory", () => {
expect(analyzer.classifyFileType("/project/src/__tests__/utils.ts")).toBe("test")
})
it("should classify .d.ts as types", () => {
expect(analyzer.classifyFileType("/project/src/types.d.ts")).toBe("types")
})
it("should classify /types/ directory as types", () => {
expect(analyzer.classifyFileType("/project/src/types/index.ts")).toBe("types")
})
it("should classify types.ts as types", () => {
expect(analyzer.classifyFileType("/project/src/types.ts")).toBe("types")
})
it("should classify config files", () => {
expect(analyzer.classifyFileType("/project/tsconfig.json")).toBe("config")
expect(analyzer.classifyFileType("/project/eslint.config.js")).toBe("config")
expect(analyzer.classifyFileType("/project/vitest.config.ts")).toBe("config")
expect(analyzer.classifyFileType("/project/jest.config.js")).toBe("config")
})
it("should classify regular source files", () => {
expect(analyzer.classifyFileType("/project/src/index.ts")).toBe("source")
expect(analyzer.classifyFileType("/project/src/utils.tsx")).toBe("source")
expect(analyzer.classifyFileType("/project/src/helper.js")).toBe("source")
})
it("should classify unknown file types", () => {
expect(analyzer.classifyFileType("/project/README.md")).toBe("unknown")
expect(analyzer.classifyFileType("/project/data.json")).toBe("unknown")
})
})
describe("isEntryPointFile", () => {
it("should identify index files as entry points", () => {
expect(analyzer.isEntryPointFile("/project/src/index.ts", 5)).toBe(true)
expect(analyzer.isEntryPointFile("/project/src/index.js", 5)).toBe(true)
})
it("should identify files with no dependents as entry points", () => {
expect(analyzer.isEntryPointFile("/project/src/utils.ts", 0)).toBe(true)
})
it("should identify main.ts as entry point", () => {
expect(analyzer.isEntryPointFile("/project/src/main.ts", 5)).toBe(true)
})
it("should identify app.ts as entry point", () => {
expect(analyzer.isEntryPointFile("/project/src/app.tsx", 5)).toBe(true)
})
it("should identify cli.ts as entry point", () => {
expect(analyzer.isEntryPointFile("/project/src/cli.ts", 5)).toBe(true)
})
it("should identify server.ts as entry point", () => {
expect(analyzer.isEntryPointFile("/project/src/server.ts", 5)).toBe(true)
})
it("should not identify regular files with dependents as entry points", () => {
expect(analyzer.isEntryPointFile("/project/src/utils.ts", 3)).toBe(false)
})
})
describe("analyze", () => {
it("should produce complete FileMeta", () => {
const content = `import { helper } from "./helper"
export function main() {
return helper()
}
`
const ast = parser.parse(content, "ts")
const allASTs = new Map<string, FileAST>()
allASTs.set("/project/src/index.ts", ast)
const meta = analyzer.analyze("/project/src/index.ts", ast, content, allASTs)
expect(meta.complexity).toBeDefined()
expect(meta.complexity.loc).toBeGreaterThan(0)
expect(meta.dependencies).toHaveLength(1)
expect(meta.fileType).toBe("source")
expect(meta.isEntryPoint).toBe(true)
})
it("should identify hub files", () => {
const content = `export const util = () => {}`
const ast = parser.parse(content, "ts")
const allASTs = new Map<string, FileAST>()
for (let i = 0; i < 6; i++) {
const consumerAST = createEmptyFileAST()
consumerAST.imports.push({
name: "util",
from: "./shared",
line: 1,
type: "internal",
isDefault: false,
})
allASTs.set(`/project/src/consumer${i}.ts`, consumerAST)
}
const meta = analyzer.analyze("/project/src/shared.ts", ast, content, allASTs)
expect(meta.isHub).toBe(true)
expect(meta.dependents).toHaveLength(6)
})
it("should not identify as hub with few dependents", () => {
const content = `export const util = () => {}`
const ast = parser.parse(content, "ts")
const allASTs = new Map<string, FileAST>()
for (let i = 0; i < 3; i++) {
const consumerAST = createEmptyFileAST()
consumerAST.imports.push({
name: "util",
from: "./shared",
line: 1,
type: "internal",
isDefault: false,
})
allASTs.set(`/project/src/consumer${i}.ts`, consumerAST)
}
const meta = analyzer.analyze("/project/src/shared.ts", ast, content, allASTs)
expect(meta.isHub).toBe(false)
})
})
describe("analyzeAll", () => {
it("should analyze multiple files", () => {
const files = new Map<string, { ast: FileAST; content: string }>()
const indexContent = `import { util } from "./util"
export function main() { return util() }`
const indexAST = parser.parse(indexContent, "ts")
files.set("/project/src/index.ts", { ast: indexAST, content: indexContent })
const utilContent = `export function util() { return 42 }`
const utilAST = parser.parse(utilContent, "ts")
files.set("/project/src/util.ts", { ast: utilAST, content: utilContent })
const results = analyzer.analyzeAll(files)
expect(results.size).toBe(2)
expect(results.get("/project/src/index.ts")).toBeDefined()
expect(results.get("/project/src/util.ts")).toBeDefined()
const indexMeta = results.get("/project/src/index.ts")!
expect(indexMeta.dependencies).toContain("/project/src/util.ts")
const utilMeta = results.get("/project/src/util.ts")!
expect(utilMeta.dependents).toContain("/project/src/index.ts")
})
it("should handle empty files map", () => {
const files = new Map<string, { ast: FileAST; content: string }>()
const results = analyzer.analyzeAll(files)
expect(results.size).toBe(0)
})
})
describe("calculateComplexity", () => {
it("should return complete complexity metrics", () => {
const content = `function complex() {
if (true) {
for (let i = 0; i < 10; i++) {
if (i % 2 === 0) {
console.log(i)
}
}
}
return 42
}`
const ast = parser.parse(content, "ts")
const metrics = analyzer.calculateComplexity(ast, content)
expect(metrics.loc).toBeGreaterThan(0)
expect(metrics.nesting).toBeGreaterThan(0)
expect(metrics.cyclomaticComplexity).toBeGreaterThan(0)
expect(metrics.score).toBeGreaterThanOrEqual(0)
expect(metrics.score).toBeLessThanOrEqual(100)
})
})
describe("integration with ASTParser", () => {
it("should work with real parsed AST", () => {
const content = `import { readFile } from "node:fs"
import { helper } from "./helper"
import React from "react"
export class MyComponent {
private data: string[] = []
async loadData(): Promise<void> {
const content = await readFile("file.txt", "utf-8")
this.data = content.split("\\n")
}
render() {
return this.data.map(line => <div>{line}</div>)
}
}
export function createComponent(): MyComponent {
return new MyComponent()
}
`
const ast = parser.parse(content, "tsx")
const allASTs = new Map<string, FileAST>()
allASTs.set("/project/src/Component.tsx", ast)
const meta = analyzer.analyze("/project/src/Component.tsx", ast, content, allASTs)
expect(meta.complexity.loc).toBeGreaterThan(10)
expect(meta.dependencies).toContain("/project/src/helper.ts")
expect(meta.fileType).toBe("source")
})
})
})

View File

@@ -0,0 +1,278 @@
import { describe, it, expect, beforeEach, afterEach, vi } from "vitest"
import { Watchdog, type FileChangeEvent } from "../../../../src/infrastructure/indexer/Watchdog.js"
import * as fs from "node:fs/promises"
import * as path from "node:path"
import * as os from "node:os"
describe("Watchdog", () => {
let watchdog: Watchdog
let tempDir: string
beforeEach(async () => {
tempDir = await fs.mkdtemp(path.join(os.tmpdir(), "watchdog-test-"))
watchdog = new Watchdog({ debounceMs: 50 })
})
afterEach(async () => {
await watchdog.stop()
await fs.rm(tempDir, { recursive: true, force: true })
})
describe("constructor", () => {
it("should create with default options", () => {
const wd = new Watchdog()
expect(wd.isWatching()).toBe(false)
expect(wd.getRoot()).toBe("")
})
it("should accept custom options", () => {
const wd = new Watchdog({
debounceMs: 100,
extensions: [".ts"],
usePolling: true,
})
expect(wd.isWatching()).toBe(false)
})
})
describe("start/stop", () => {
it("should start watching", () => {
watchdog.start(tempDir)
expect(watchdog.isWatching()).toBe(true)
expect(watchdog.getRoot()).toBe(tempDir)
})
it("should stop watching", async () => {
watchdog.start(tempDir)
await watchdog.stop()
expect(watchdog.isWatching()).toBe(false)
})
it("should handle stop when not started", async () => {
await watchdog.stop()
expect(watchdog.isWatching()).toBe(false)
})
it("should restart when start called while running", async () => {
watchdog.start(tempDir)
const newTempDir = await fs.mkdtemp(path.join(os.tmpdir(), "watchdog-test2-"))
watchdog.start(newTempDir)
expect(watchdog.isWatching()).toBe(true)
expect(watchdog.getRoot()).toBe(newTempDir)
await fs.rm(newTempDir, { recursive: true, force: true })
})
})
describe("onFileChange/offFileChange", () => {
it("should register callback", () => {
const callback = vi.fn()
watchdog.onFileChange(callback)
expect(callback).not.toHaveBeenCalled()
})
it("should remove callback", () => {
const callback = vi.fn()
watchdog.onFileChange(callback)
watchdog.offFileChange(callback)
})
it("should handle removing non-existent callback", () => {
const callback = vi.fn()
watchdog.offFileChange(callback)
})
})
describe("getPendingCount", () => {
it("should return 0 when no pending changes", () => {
expect(watchdog.getPendingCount()).toBe(0)
})
})
describe("getWatchedPaths", () => {
it("should return empty array when not watching", () => {
expect(watchdog.getWatchedPaths()).toEqual([])
})
})
describe("flushAll", () => {
it("should not throw when no pending changes", () => {
expect(() => watchdog.flushAll()).not.toThrow()
})
})
describe("file change detection", () => {
it("should detect new file creation", async () => {
const events: FileChangeEvent[] = []
watchdog.onFileChange((event) => events.push(event))
watchdog.start(tempDir)
await new Promise((resolve) => setTimeout(resolve, 100))
const testFile = path.join(tempDir, "test.ts")
await fs.writeFile(testFile, "const x = 1")
await new Promise((resolve) => setTimeout(resolve, 200))
expect(events.length).toBeGreaterThanOrEqual(0)
})
it("should detect file modification", async () => {
const testFile = path.join(tempDir, "test.ts")
await fs.writeFile(testFile, "const x = 1")
const events: FileChangeEvent[] = []
watchdog.onFileChange((event) => events.push(event))
watchdog.start(tempDir)
await new Promise((resolve) => setTimeout(resolve, 100))
await fs.writeFile(testFile, "const x = 2")
await new Promise((resolve) => setTimeout(resolve, 200))
expect(events.length).toBeGreaterThanOrEqual(0)
})
it("should detect file deletion", async () => {
const testFile = path.join(tempDir, "test.ts")
await fs.writeFile(testFile, "const x = 1")
const events: FileChangeEvent[] = []
watchdog.onFileChange((event) => events.push(event))
watchdog.start(tempDir)
await new Promise((resolve) => setTimeout(resolve, 100))
await fs.unlink(testFile)
await new Promise((resolve) => setTimeout(resolve, 200))
expect(events.length).toBeGreaterThanOrEqual(0)
})
it("should ignore non-watched extensions", async () => {
const events: FileChangeEvent[] = []
watchdog.onFileChange((event) => events.push(event))
watchdog.start(tempDir)
await new Promise((resolve) => setTimeout(resolve, 100))
const txtFile = path.join(tempDir, "test.txt")
await fs.writeFile(txtFile, "hello")
await new Promise((resolve) => setTimeout(resolve, 200))
const tsEvents = events.filter((e) => e.path.endsWith(".txt"))
expect(tsEvents.length).toBe(0)
})
it("should debounce rapid changes", async () => {
const testFile = path.join(tempDir, "test.ts")
await fs.writeFile(testFile, "const x = 1")
const events: FileChangeEvent[] = []
watchdog.onFileChange((event) => events.push(event))
watchdog.start(tempDir)
await new Promise((resolve) => setTimeout(resolve, 100))
await fs.writeFile(testFile, "const x = 2")
await fs.writeFile(testFile, "const x = 3")
await fs.writeFile(testFile, "const x = 4")
await new Promise((resolve) => setTimeout(resolve, 200))
expect(events.length).toBeLessThanOrEqual(3)
})
})
describe("callback error handling", () => {
it("should continue after callback throws", async () => {
const events: FileChangeEvent[] = []
watchdog.onFileChange(() => {
throw new Error("Test error")
})
watchdog.onFileChange((event) => events.push(event))
watchdog.start(tempDir)
await new Promise((resolve) => setTimeout(resolve, 100))
const testFile = path.join(tempDir, "test.ts")
await fs.writeFile(testFile, "const x = 1")
await new Promise((resolve) => setTimeout(resolve, 200))
})
})
describe("custom extensions", () => {
it("should watch only specified extensions", async () => {
const customWatchdog = new Watchdog({
debounceMs: 50,
extensions: [".ts"],
})
const events: FileChangeEvent[] = []
customWatchdog.onFileChange((event) => events.push(event))
customWatchdog.start(tempDir)
await new Promise((resolve) => setTimeout(resolve, 100))
const tsFile = path.join(tempDir, "test.ts")
const jsFile = path.join(tempDir, "test.js")
await fs.writeFile(tsFile, "const x = 1")
await fs.writeFile(jsFile, "const y = 2")
await new Promise((resolve) => setTimeout(resolve, 200))
const jsEvents = events.filter((e) => e.path.endsWith(".js"))
expect(jsEvents.length).toBe(0)
await customWatchdog.stop()
})
})
describe("multiple callbacks", () => {
it("should notify all registered callbacks", async () => {
const events1: FileChangeEvent[] = []
const events2: FileChangeEvent[] = []
watchdog.onFileChange((event) => events1.push(event))
watchdog.onFileChange((event) => events2.push(event))
watchdog.start(tempDir)
await new Promise((resolve) => setTimeout(resolve, 100))
const testFile = path.join(tempDir, "test.ts")
await fs.writeFile(testFile, "const x = 1")
await new Promise((resolve) => setTimeout(resolve, 200))
expect(events1.length).toBe(events2.length)
})
})
describe("event properties", () => {
it("should include correct event type and path", async () => {
const events: FileChangeEvent[] = []
watchdog.onFileChange((event) => events.push(event))
watchdog.start(tempDir)
await new Promise((resolve) => setTimeout(resolve, 100))
const testFile = path.join(tempDir, "test.ts")
await fs.writeFile(testFile, "const x = 1")
await new Promise((resolve) => setTimeout(resolve, 200))
if (events.length > 0) {
const event = events[0]
expect(event.type).toMatch(/^(add|change)$/)
expect(event.path).toContain("test.ts")
expect(typeof event.timestamp).toBe("number")
expect(event.timestamp).toBeLessThanOrEqual(Date.now())
}
})
})
})

View File

@@ -0,0 +1,304 @@
import { describe, it, expect, vi, beforeEach, afterEach } from "vitest"
import type { LLMConfig } from "../../../../src/shared/constants/config.js"
import { IpuaroError } from "../../../../src/shared/errors/IpuaroError.js"
import { createUserMessage } from "../../../../src/domain/value-objects/ChatMessage.js"
const mockChatResponse = {
message: {
role: "assistant",
content: "This is a test response.",
tool_calls: undefined,
},
eval_count: 50,
done_reason: "stop",
}
const mockListResponse = {
models: [
{ name: "qwen2.5-coder:7b-instruct", size: 4000000000 },
{ name: "llama2:latest", size: 3500000000 },
],
}
const mockOllamaInstance = {
chat: vi.fn(),
list: vi.fn(),
pull: vi.fn(),
}
vi.mock("ollama", () => {
return {
Ollama: vi.fn(() => mockOllamaInstance),
}
})
const { OllamaClient } = await import("../../../../src/infrastructure/llm/OllamaClient.js")
describe("OllamaClient", () => {
const defaultConfig: LLMConfig = {
model: "qwen2.5-coder:7b-instruct",
contextWindow: 128000,
temperature: 0.1,
host: "http://localhost:11434",
timeout: 120000,
}
beforeEach(() => {
vi.clearAllMocks()
mockOllamaInstance.chat.mockResolvedValue(mockChatResponse)
mockOllamaInstance.list.mockResolvedValue(mockListResponse)
mockOllamaInstance.pull.mockResolvedValue({})
})
afterEach(() => {
vi.restoreAllMocks()
})
describe("constructor", () => {
it("should create instance with config", () => {
const client = new OllamaClient(defaultConfig)
expect(client).toBeDefined()
expect(client.getModelName()).toBe("qwen2.5-coder:7b-instruct")
expect(client.getContextWindowSize()).toBe(128000)
})
})
describe("chat", () => {
it("should send messages and return response", async () => {
const client = new OllamaClient(defaultConfig)
const messages = [createUserMessage("Hello, world!")]
const response = await client.chat(messages)
expect(response.content).toBe("This is a test response.")
expect(response.tokens).toBe(50)
expect(response.stopReason).toBe("end")
expect(response.truncated).toBe(false)
})
it("should convert messages to Ollama format", async () => {
const client = new OllamaClient(defaultConfig)
const messages = [createUserMessage("Hello")]
await client.chat(messages)
expect(mockOllamaInstance.chat).toHaveBeenCalledWith(
expect.objectContaining({
model: "qwen2.5-coder:7b-instruct",
messages: expect.arrayContaining([
expect.objectContaining({
role: "user",
content: "Hello",
}),
]),
}),
)
})
it("should pass tools when provided", async () => {
const client = new OllamaClient(defaultConfig)
const messages = [createUserMessage("Read file")]
const tools = [
{
name: "get_lines",
description: "Get lines from file",
parameters: [
{
name: "path",
type: "string" as const,
description: "File path",
required: true,
},
],
},
]
await client.chat(messages, tools)
expect(mockOllamaInstance.chat).toHaveBeenCalledWith(
expect.objectContaining({
tools: expect.arrayContaining([
expect.objectContaining({
type: "function",
function: expect.objectContaining({
name: "get_lines",
}),
}),
]),
}),
)
})
it("should extract tool calls from response", async () => {
mockOllamaInstance.chat.mockResolvedValue({
message: {
role: "assistant",
content: "",
tool_calls: [
{
function: {
name: "get_lines",
arguments: { path: "src/index.ts" },
},
},
],
},
eval_count: 30,
})
const client = new OllamaClient(defaultConfig)
const response = await client.chat([createUserMessage("Read file")])
expect(response.toolCalls).toHaveLength(1)
expect(response.toolCalls[0].name).toBe("get_lines")
expect(response.toolCalls[0].params).toEqual({ path: "src/index.ts" })
expect(response.stopReason).toBe("tool_use")
})
it("should handle connection errors", async () => {
mockOllamaInstance.chat.mockRejectedValue(new Error("fetch failed"))
const client = new OllamaClient(defaultConfig)
await expect(client.chat([createUserMessage("Hello")])).rejects.toThrow(IpuaroError)
})
it("should handle model not found errors", async () => {
mockOllamaInstance.chat.mockRejectedValue(new Error("model not found"))
const client = new OllamaClient(defaultConfig)
await expect(client.chat([createUserMessage("Hello")])).rejects.toThrow(/not found/)
})
})
describe("countTokens", () => {
it("should estimate tokens for text", async () => {
const client = new OllamaClient(defaultConfig)
const count = await client.countTokens("Hello, world!")
expect(count).toBeGreaterThan(0)
expect(typeof count).toBe("number")
})
})
describe("isAvailable", () => {
it("should return true when Ollama is available", async () => {
const client = new OllamaClient(defaultConfig)
const available = await client.isAvailable()
expect(available).toBe(true)
})
it("should return false when Ollama is not available", async () => {
mockOllamaInstance.list.mockRejectedValue(new Error("Connection refused"))
const client = new OllamaClient(defaultConfig)
const available = await client.isAvailable()
expect(available).toBe(false)
})
})
describe("getModelName", () => {
it("should return configured model name", () => {
const client = new OllamaClient(defaultConfig)
expect(client.getModelName()).toBe("qwen2.5-coder:7b-instruct")
})
})
describe("getContextWindowSize", () => {
it("should return configured context window size", () => {
const client = new OllamaClient(defaultConfig)
expect(client.getContextWindowSize()).toBe(128000)
})
})
describe("pullModel", () => {
it("should pull model successfully", async () => {
const client = new OllamaClient(defaultConfig)
await expect(client.pullModel("llama2")).resolves.toBeUndefined()
expect(mockOllamaInstance.pull).toHaveBeenCalledWith({
model: "llama2",
stream: false,
})
})
it("should throw on pull failure", async () => {
mockOllamaInstance.pull.mockRejectedValue(new Error("Network error"))
const client = new OllamaClient(defaultConfig)
await expect(client.pullModel("llama2")).rejects.toThrow(IpuaroError)
})
})
describe("hasModel", () => {
it("should return true for available model", async () => {
const client = new OllamaClient(defaultConfig)
const has = await client.hasModel("qwen2.5-coder:7b-instruct")
expect(has).toBe(true)
})
it("should return true for model prefix", async () => {
const client = new OllamaClient(defaultConfig)
const has = await client.hasModel("llama2")
expect(has).toBe(true)
})
it("should return false for missing model", async () => {
const client = new OllamaClient(defaultConfig)
const has = await client.hasModel("unknown-model")
expect(has).toBe(false)
})
it("should return false when list fails", async () => {
mockOllamaInstance.list.mockRejectedValue(new Error("Error"))
const client = new OllamaClient(defaultConfig)
const has = await client.hasModel("any-model")
expect(has).toBe(false)
})
})
describe("listModels", () => {
it("should return list of model names", async () => {
const client = new OllamaClient(defaultConfig)
const models = await client.listModels()
expect(models).toContain("qwen2.5-coder:7b-instruct")
expect(models).toContain("llama2:latest")
})
it("should throw on list failure", async () => {
mockOllamaInstance.list.mockRejectedValue(new Error("Network error"))
const client = new OllamaClient(defaultConfig)
await expect(client.listModels()).rejects.toThrow(IpuaroError)
})
})
describe("abort", () => {
it("should not throw when no request is in progress", () => {
const client = new OllamaClient(defaultConfig)
expect(() => client.abort()).not.toThrow()
})
})
})

View File

@@ -0,0 +1,255 @@
import { describe, it, expect } from "vitest"
import {
parseToolCalls,
formatToolCallsAsXml,
extractThinking,
hasToolCalls,
validateToolCallParams,
} from "../../../../src/infrastructure/llm/ResponseParser.js"
import { createToolCall } from "../../../../src/domain/value-objects/ToolCall.js"
describe("ResponseParser", () => {
describe("parseToolCalls", () => {
it("should parse a single tool call", () => {
const response = `<tool_call name="get_lines">
<path>src/index.ts</path>
<start>1</start>
<end>10</end>
</tool_call>`
const result = parseToolCalls(response)
expect(result.toolCalls).toHaveLength(1)
expect(result.toolCalls[0].name).toBe("get_lines")
expect(result.toolCalls[0].params).toEqual({
path: "src/index.ts",
start: 1,
end: 10,
})
expect(result.hasParseErrors).toBe(false)
})
it("should parse multiple tool calls", () => {
const response = `
<tool_call name="get_lines">
<path>src/a.ts</path>
</tool_call>
<tool_call name="get_function">
<path>src/b.ts</path>
<name>myFunc</name>
</tool_call>
`
const result = parseToolCalls(response)
expect(result.toolCalls).toHaveLength(2)
expect(result.toolCalls[0].name).toBe("get_lines")
expect(result.toolCalls[1].name).toBe("get_function")
})
it("should extract text content without tool calls", () => {
const response = `Let me check the file.
<tool_call name="get_lines">
<path>src/index.ts</path>
</tool_call>
Here's what I found.`
const result = parseToolCalls(response)
expect(result.content).toContain("Let me check the file.")
expect(result.content).toContain("Here's what I found.")
expect(result.content).not.toContain("tool_call")
})
it("should parse boolean values", () => {
const response = `<tool_call name="git_diff">
<staged>true</staged>
</tool_call>`
const result = parseToolCalls(response)
expect(result.toolCalls[0].params.staged).toBe(true)
})
it("should parse null values", () => {
const response = `<tool_call name="test">
<value>null</value>
</tool_call>`
const result = parseToolCalls(response)
expect(result.toolCalls[0].params.value).toBe(null)
})
it("should parse JSON arrays", () => {
const response = `<tool_call name="git_commit">
<files>["a.ts", "b.ts"]</files>
</tool_call>`
const result = parseToolCalls(response)
expect(result.toolCalls[0].params.files).toEqual(["a.ts", "b.ts"])
})
it("should parse JSON objects", () => {
const response = `<tool_call name="test">
<config>{"key": "value"}</config>
</tool_call>`
const result = parseToolCalls(response)
expect(result.toolCalls[0].params.config).toEqual({ key: "value" })
})
it("should return empty array for response without tool calls", () => {
const response = "This is just a regular response."
const result = parseToolCalls(response)
expect(result.toolCalls).toHaveLength(0)
expect(result.content).toBe(response)
})
it("should handle named param syntax", () => {
const response = `<tool_call name="get_lines">
<param name="path">src/index.ts</param>
<param name="start">5</param>
</tool_call>`
const result = parseToolCalls(response)
expect(result.toolCalls[0].params).toEqual({
path: "src/index.ts",
start: 5,
})
})
})
describe("formatToolCallsAsXml", () => {
it("should format tool calls as XML", () => {
const toolCalls = [
createToolCall("1", "get_lines", { path: "src/index.ts", start: 1 }),
]
const xml = formatToolCallsAsXml(toolCalls)
expect(xml).toContain('<tool_call name="get_lines">')
expect(xml).toContain("<path>src/index.ts</path>")
expect(xml).toContain("<start>1</start>")
expect(xml).toContain("</tool_call>")
})
it("should format multiple tool calls", () => {
const toolCalls = [
createToolCall("1", "get_lines", { path: "a.ts" }),
createToolCall("2", "get_function", { path: "b.ts", name: "foo" }),
]
const xml = formatToolCallsAsXml(toolCalls)
expect(xml).toContain('<tool_call name="get_lines">')
expect(xml).toContain('<tool_call name="get_function">')
})
it("should handle object values as JSON", () => {
const toolCalls = [
createToolCall("1", "test", { data: { key: "value" } }),
]
const xml = formatToolCallsAsXml(toolCalls)
expect(xml).toContain('{"key":"value"}')
})
})
describe("extractThinking", () => {
it("should extract thinking content", () => {
const response = `<thinking>Let me analyze this.</thinking>
Here is the answer.`
const result = extractThinking(response)
expect(result.thinking).toBe("Let me analyze this.")
expect(result.content).toContain("Here is the answer.")
expect(result.content).not.toContain("thinking")
})
it("should handle multiple thinking blocks", () => {
const response = `<thinking>First thought.</thinking>
Some content.
<thinking>Second thought.</thinking>
More content.`
const result = extractThinking(response)
expect(result.thinking).toContain("First thought.")
expect(result.thinking).toContain("Second thought.")
})
it("should return original content if no thinking", () => {
const response = "Just a regular response."
const result = extractThinking(response)
expect(result.thinking).toBe("")
expect(result.content).toBe(response)
})
})
describe("hasToolCalls", () => {
it("should return true if response has tool calls", () => {
const response = `<tool_call name="get_lines"><path>a.ts</path></tool_call>`
expect(hasToolCalls(response)).toBe(true)
})
it("should return false if response has no tool calls", () => {
const response = "Just text without tool calls."
expect(hasToolCalls(response)).toBe(false)
})
})
describe("validateToolCallParams", () => {
it("should return valid for complete params", () => {
const params = { path: "src/index.ts", start: 1, end: 10 }
const required = ["path", "start", "end"]
const result = validateToolCallParams("get_lines", params, required)
expect(result.valid).toBe(true)
expect(result.errors).toHaveLength(0)
})
it("should return errors for missing required params", () => {
const params = { path: "src/index.ts" }
const required = ["path", "start", "end"]
const result = validateToolCallParams("get_lines", params, required)
expect(result.valid).toBe(false)
expect(result.errors).toHaveLength(2)
expect(result.errors).toContain("Missing required parameter: start")
expect(result.errors).toContain("Missing required parameter: end")
})
it("should treat null and undefined as missing", () => {
const params = { path: null, start: undefined }
const required = ["path", "start"]
const result = validateToolCallParams("test", params, required)
expect(result.valid).toBe(false)
expect(result.errors).toHaveLength(2)
})
it("should accept empty required array", () => {
const params = {}
const required: string[] = []
const result = validateToolCallParams("git_status", params, required)
expect(result.valid).toBe(true)
})
})
})

View File

@@ -0,0 +1,278 @@
import { describe, it, expect } from "vitest"
import {
SYSTEM_PROMPT,
buildInitialContext,
buildFileContext,
truncateContext,
type ProjectStructure,
} from "../../../../src/infrastructure/llm/prompts.js"
import type { FileAST } from "../../../../src/domain/value-objects/FileAST.js"
import type { FileMeta } from "../../../../src/domain/value-objects/FileMeta.js"
describe("prompts", () => {
describe("SYSTEM_PROMPT", () => {
it("should be a non-empty string", () => {
expect(typeof SYSTEM_PROMPT).toBe("string")
expect(SYSTEM_PROMPT.length).toBeGreaterThan(100)
})
it("should contain core principles", () => {
expect(SYSTEM_PROMPT).toContain("Lazy Loading")
expect(SYSTEM_PROMPT).toContain("Precision")
expect(SYSTEM_PROMPT).toContain("Safety")
})
it("should list available tools", () => {
expect(SYSTEM_PROMPT).toContain("get_lines")
expect(SYSTEM_PROMPT).toContain("edit_lines")
expect(SYSTEM_PROMPT).toContain("find_references")
expect(SYSTEM_PROMPT).toContain("git_status")
expect(SYSTEM_PROMPT).toContain("run_command")
})
it("should include safety rules", () => {
expect(SYSTEM_PROMPT).toContain("Safety Rules")
expect(SYSTEM_PROMPT).toContain("Never execute commands that could harm")
})
})
describe("buildInitialContext", () => {
const structure: ProjectStructure = {
name: "my-project",
rootPath: "/home/user/my-project",
files: ["src/index.ts", "src/utils.ts", "package.json"],
directories: ["src", "tests"],
}
const asts = new Map<string, FileAST>([
[
"src/index.ts",
{
imports: [],
exports: [],
functions: [
{
name: "main",
lineStart: 1,
lineEnd: 10,
params: [],
isAsync: false,
isExported: true,
},
],
classes: [],
interfaces: [],
typeAliases: [],
parseError: false,
},
],
[
"src/utils.ts",
{
imports: [],
exports: [],
functions: [],
classes: [
{
name: "Helper",
lineStart: 1,
lineEnd: 20,
methods: [],
properties: [],
implements: [],
isExported: true,
isAbstract: false,
},
],
interfaces: [],
typeAliases: [],
parseError: false,
},
],
])
it("should include project header", () => {
const context = buildInitialContext(structure, asts)
expect(context).toContain("# Project: my-project")
expect(context).toContain("Root: /home/user/my-project")
expect(context).toContain("Files: 3")
expect(context).toContain("Directories: 2")
})
it("should include directory structure", () => {
const context = buildInitialContext(structure, asts)
expect(context).toContain("## Structure")
expect(context).toContain("src/")
expect(context).toContain("tests/")
})
it("should include file overview with AST summaries", () => {
const context = buildInitialContext(structure, asts)
expect(context).toContain("## Files")
expect(context).toContain("src/index.ts")
expect(context).toContain("fn: main")
expect(context).toContain("src/utils.ts")
expect(context).toContain("class: Helper")
})
it("should include file flags from metadata", () => {
const metas = new Map<string, FileMeta>([
[
"src/index.ts",
{
complexity: { loc: 100, nesting: 3, cyclomaticComplexity: 10, score: 75 },
dependencies: [],
dependents: ["a.ts", "b.ts", "c.ts", "d.ts", "e.ts", "f.ts"],
isHub: true,
isEntryPoint: true,
fileType: "source",
},
],
])
const context = buildInitialContext(structure, asts, metas)
expect(context).toContain("(hub, entry, complex)")
})
})
describe("buildFileContext", () => {
const ast: FileAST = {
imports: [
{ name: "fs", from: "node:fs", line: 1, type: "builtin", isDefault: false },
{ name: "helper", from: "./helper", line: 2, type: "internal", isDefault: true },
],
exports: [
{ name: "main", line: 10, isDefault: false, kind: "function" },
{ name: "Config", line: 20, isDefault: true, kind: "class" },
],
functions: [
{
name: "main",
lineStart: 10,
lineEnd: 30,
params: [
{ name: "args", optional: false, hasDefault: false },
{ name: "options", optional: true, hasDefault: false },
],
isAsync: true,
isExported: true,
},
],
classes: [
{
name: "Config",
lineStart: 40,
lineEnd: 80,
methods: [
{
name: "load",
lineStart: 50,
lineEnd: 60,
params: [],
isAsync: false,
visibility: "public",
isStatic: false,
},
],
properties: [],
extends: "BaseConfig",
implements: ["IConfig"],
isExported: true,
isAbstract: false,
},
],
interfaces: [],
typeAliases: [],
parseError: false,
}
it("should include file path header", () => {
const context = buildFileContext("src/index.ts", ast)
expect(context).toContain("## src/index.ts")
})
it("should include imports section", () => {
const context = buildFileContext("src/index.ts", ast)
expect(context).toContain("### Imports")
expect(context).toContain('fs from "node:fs" (builtin)')
expect(context).toContain('helper from "./helper" (internal)')
})
it("should include exports section", () => {
const context = buildFileContext("src/index.ts", ast)
expect(context).toContain("### Exports")
expect(context).toContain("function main")
expect(context).toContain("class Config (default)")
})
it("should include functions section", () => {
const context = buildFileContext("src/index.ts", ast)
expect(context).toContain("### Functions")
expect(context).toContain("async main(args, options)")
expect(context).toContain("[10-30]")
})
it("should include classes section with methods", () => {
const context = buildFileContext("src/index.ts", ast)
expect(context).toContain("### Classes")
expect(context).toContain("Config extends BaseConfig implements IConfig")
expect(context).toContain("[40-80]")
expect(context).toContain("load()")
})
it("should include metadata section when provided", () => {
const meta: FileMeta = {
complexity: { loc: 100, nesting: 3, cyclomaticComplexity: 10, score: 65 },
dependencies: ["a.ts", "b.ts"],
dependents: ["c.ts"],
isHub: false,
isEntryPoint: true,
fileType: "source",
}
const context = buildFileContext("src/index.ts", ast, meta)
expect(context).toContain("### Metadata")
expect(context).toContain("LOC: 100")
expect(context).toContain("Complexity: 65/100")
expect(context).toContain("Dependencies: 2")
expect(context).toContain("Dependents: 1")
})
})
describe("truncateContext", () => {
it("should return original context if within limit", () => {
const context = "Short context"
const result = truncateContext(context, 1000)
expect(result).toBe(context)
})
it("should truncate long context", () => {
const context = "a".repeat(1000)
const result = truncateContext(context, 100)
expect(result.length).toBeLessThan(500)
expect(result).toContain("truncated")
})
it("should break at newline boundary", () => {
const context = "Line 1\nLine 2\nLine 3\n" + "a".repeat(1000)
const result = truncateContext(context, 50)
expect(result).toContain("truncated")
})
})
})

View File

@@ -0,0 +1,287 @@
import { describe, it, expect } from "vitest"
import {
ALL_TOOLS,
READ_TOOLS,
EDIT_TOOLS,
SEARCH_TOOLS,
ANALYSIS_TOOLS,
GIT_TOOLS,
RUN_TOOLS,
CONFIRMATION_TOOLS,
requiresConfirmation,
getToolDef,
getToolsByCategory,
GET_LINES_TOOL,
GET_FUNCTION_TOOL,
GET_CLASS_TOOL,
GET_STRUCTURE_TOOL,
EDIT_LINES_TOOL,
CREATE_FILE_TOOL,
DELETE_FILE_TOOL,
FIND_REFERENCES_TOOL,
FIND_DEFINITION_TOOL,
GET_DEPENDENCIES_TOOL,
GET_DEPENDENTS_TOOL,
GET_COMPLEXITY_TOOL,
GET_TODOS_TOOL,
GIT_STATUS_TOOL,
GIT_DIFF_TOOL,
GIT_COMMIT_TOOL,
RUN_COMMAND_TOOL,
RUN_TESTS_TOOL,
} from "../../../../src/infrastructure/llm/toolDefs.js"
describe("toolDefs", () => {
describe("ALL_TOOLS", () => {
it("should contain exactly 18 tools", () => {
expect(ALL_TOOLS).toHaveLength(18)
})
it("should have unique tool names", () => {
const names = ALL_TOOLS.map((t) => t.name)
const uniqueNames = new Set(names)
expect(uniqueNames.size).toBe(18)
})
it("should have valid structure for all tools", () => {
for (const tool of ALL_TOOLS) {
expect(tool.name).toBeDefined()
expect(typeof tool.name).toBe("string")
expect(tool.description).toBeDefined()
expect(typeof tool.description).toBe("string")
expect(Array.isArray(tool.parameters)).toBe(true)
}
})
})
describe("READ_TOOLS", () => {
it("should contain 4 read tools", () => {
expect(READ_TOOLS).toHaveLength(4)
})
it("should include all read tools", () => {
expect(READ_TOOLS).toContain(GET_LINES_TOOL)
expect(READ_TOOLS).toContain(GET_FUNCTION_TOOL)
expect(READ_TOOLS).toContain(GET_CLASS_TOOL)
expect(READ_TOOLS).toContain(GET_STRUCTURE_TOOL)
})
})
describe("EDIT_TOOLS", () => {
it("should contain 3 edit tools", () => {
expect(EDIT_TOOLS).toHaveLength(3)
})
it("should include all edit tools", () => {
expect(EDIT_TOOLS).toContain(EDIT_LINES_TOOL)
expect(EDIT_TOOLS).toContain(CREATE_FILE_TOOL)
expect(EDIT_TOOLS).toContain(DELETE_FILE_TOOL)
})
})
describe("SEARCH_TOOLS", () => {
it("should contain 2 search tools", () => {
expect(SEARCH_TOOLS).toHaveLength(2)
})
it("should include all search tools", () => {
expect(SEARCH_TOOLS).toContain(FIND_REFERENCES_TOOL)
expect(SEARCH_TOOLS).toContain(FIND_DEFINITION_TOOL)
})
})
describe("ANALYSIS_TOOLS", () => {
it("should contain 4 analysis tools", () => {
expect(ANALYSIS_TOOLS).toHaveLength(4)
})
it("should include all analysis tools", () => {
expect(ANALYSIS_TOOLS).toContain(GET_DEPENDENCIES_TOOL)
expect(ANALYSIS_TOOLS).toContain(GET_DEPENDENTS_TOOL)
expect(ANALYSIS_TOOLS).toContain(GET_COMPLEXITY_TOOL)
expect(ANALYSIS_TOOLS).toContain(GET_TODOS_TOOL)
})
})
describe("GIT_TOOLS", () => {
it("should contain 3 git tools", () => {
expect(GIT_TOOLS).toHaveLength(3)
})
it("should include all git tools", () => {
expect(GIT_TOOLS).toContain(GIT_STATUS_TOOL)
expect(GIT_TOOLS).toContain(GIT_DIFF_TOOL)
expect(GIT_TOOLS).toContain(GIT_COMMIT_TOOL)
})
})
describe("RUN_TOOLS", () => {
it("should contain 2 run tools", () => {
expect(RUN_TOOLS).toHaveLength(2)
})
it("should include all run tools", () => {
expect(RUN_TOOLS).toContain(RUN_COMMAND_TOOL)
expect(RUN_TOOLS).toContain(RUN_TESTS_TOOL)
})
})
describe("individual tool definitions", () => {
describe("GET_LINES_TOOL", () => {
it("should have correct name", () => {
expect(GET_LINES_TOOL.name).toBe("get_lines")
})
it("should have required path parameter", () => {
const pathParam = GET_LINES_TOOL.parameters.find((p) => p.name === "path")
expect(pathParam).toBeDefined()
expect(pathParam?.required).toBe(true)
})
it("should have optional start and end parameters", () => {
const startParam = GET_LINES_TOOL.parameters.find((p) => p.name === "start")
const endParam = GET_LINES_TOOL.parameters.find((p) => p.name === "end")
expect(startParam?.required).toBe(false)
expect(endParam?.required).toBe(false)
})
})
describe("EDIT_LINES_TOOL", () => {
it("should have all required parameters", () => {
const requiredParams = EDIT_LINES_TOOL.parameters.filter((p) => p.required)
const names = requiredParams.map((p) => p.name)
expect(names).toContain("path")
expect(names).toContain("start")
expect(names).toContain("end")
expect(names).toContain("content")
})
})
describe("GIT_STATUS_TOOL", () => {
it("should have no required parameters", () => {
expect(GIT_STATUS_TOOL.parameters).toHaveLength(0)
})
})
describe("GET_TODOS_TOOL", () => {
it("should have enum for type parameter", () => {
const typeParam = GET_TODOS_TOOL.parameters.find((p) => p.name === "type")
expect(typeParam?.enum).toEqual(["TODO", "FIXME", "HACK", "XXX"])
})
})
})
describe("CONFIRMATION_TOOLS", () => {
it("should be a Set", () => {
expect(CONFIRMATION_TOOLS instanceof Set).toBe(true)
})
it("should contain edit and git_commit tools", () => {
expect(CONFIRMATION_TOOLS.has("edit_lines")).toBe(true)
expect(CONFIRMATION_TOOLS.has("create_file")).toBe(true)
expect(CONFIRMATION_TOOLS.has("delete_file")).toBe(true)
expect(CONFIRMATION_TOOLS.has("git_commit")).toBe(true)
})
it("should not contain read tools", () => {
expect(CONFIRMATION_TOOLS.has("get_lines")).toBe(false)
expect(CONFIRMATION_TOOLS.has("get_function")).toBe(false)
})
})
describe("requiresConfirmation", () => {
it("should return true for edit tools", () => {
expect(requiresConfirmation("edit_lines")).toBe(true)
expect(requiresConfirmation("create_file")).toBe(true)
expect(requiresConfirmation("delete_file")).toBe(true)
})
it("should return true for git_commit", () => {
expect(requiresConfirmation("git_commit")).toBe(true)
})
it("should return false for read tools", () => {
expect(requiresConfirmation("get_lines")).toBe(false)
expect(requiresConfirmation("get_function")).toBe(false)
expect(requiresConfirmation("get_structure")).toBe(false)
})
it("should return false for analysis tools", () => {
expect(requiresConfirmation("get_dependencies")).toBe(false)
expect(requiresConfirmation("get_complexity")).toBe(false)
})
it("should return false for unknown tools", () => {
expect(requiresConfirmation("unknown_tool")).toBe(false)
})
})
describe("getToolDef", () => {
it("should return tool definition by name", () => {
const tool = getToolDef("get_lines")
expect(tool).toBe(GET_LINES_TOOL)
})
it("should return undefined for unknown tool", () => {
const tool = getToolDef("unknown_tool")
expect(tool).toBeUndefined()
})
it("should find all 18 tools", () => {
const names = [
"get_lines",
"get_function",
"get_class",
"get_structure",
"edit_lines",
"create_file",
"delete_file",
"find_references",
"find_definition",
"get_dependencies",
"get_dependents",
"get_complexity",
"get_todos",
"git_status",
"git_diff",
"git_commit",
"run_command",
"run_tests",
]
for (const name of names) {
expect(getToolDef(name)).toBeDefined()
}
})
})
describe("getToolsByCategory", () => {
it("should return read tools", () => {
expect(getToolsByCategory("read")).toBe(READ_TOOLS)
})
it("should return edit tools", () => {
expect(getToolsByCategory("edit")).toBe(EDIT_TOOLS)
})
it("should return search tools", () => {
expect(getToolsByCategory("search")).toBe(SEARCH_TOOLS)
})
it("should return analysis tools", () => {
expect(getToolsByCategory("analysis")).toBe(ANALYSIS_TOOLS)
})
it("should return git tools", () => {
expect(getToolsByCategory("git")).toBe(GIT_TOOLS)
})
it("should return run tools", () => {
expect(getToolsByCategory("run")).toBe(RUN_TOOLS)
})
it("should return empty array for unknown category", () => {
expect(getToolsByCategory("unknown")).toEqual([])
})
})
})

View File

@@ -0,0 +1,177 @@
import { describe, it, expect, vi, beforeEach, afterEach } from "vitest"
import type { RedisConfig } from "../../../../src/shared/constants/config.js"
import { IpuaroError } from "../../../../src/shared/errors/IpuaroError.js"
const mockRedisInstance = {
connect: vi.fn(),
quit: vi.fn(),
ping: vi.fn(),
config: vi.fn(),
status: "ready" as string,
}
vi.mock("ioredis", () => {
return {
Redis: vi.fn(() => mockRedisInstance),
}
})
const { RedisClient } = await import("../../../../src/infrastructure/storage/RedisClient.js")
describe("RedisClient", () => {
const defaultConfig: RedisConfig = {
host: "localhost",
port: 6379,
db: 0,
keyPrefix: "ipuaro:",
}
beforeEach(() => {
vi.clearAllMocks()
mockRedisInstance.status = "ready"
mockRedisInstance.connect.mockResolvedValue(undefined)
mockRedisInstance.quit.mockResolvedValue(undefined)
mockRedisInstance.ping.mockResolvedValue("PONG")
mockRedisInstance.config.mockResolvedValue(undefined)
})
afterEach(() => {
vi.restoreAllMocks()
})
describe("constructor", () => {
it("should create instance with config", () => {
const client = new RedisClient(defaultConfig)
expect(client).toBeDefined()
expect(client.isConnected()).toBe(false)
})
})
describe("connect", () => {
it("should connect to Redis", async () => {
const client = new RedisClient(defaultConfig)
await client.connect()
expect(mockRedisInstance.connect).toHaveBeenCalled()
expect(client.isConnected()).toBe(true)
})
it("should configure AOF on connect", async () => {
const client = new RedisClient(defaultConfig)
await client.connect()
expect(mockRedisInstance.config).toHaveBeenCalledWith("SET", "appendonly", "yes")
expect(mockRedisInstance.config).toHaveBeenCalledWith("SET", "appendfsync", "everysec")
})
it("should not reconnect if already connected", async () => {
const client = new RedisClient(defaultConfig)
await client.connect()
await client.connect()
expect(mockRedisInstance.connect).toHaveBeenCalledTimes(1)
})
it("should throw IpuaroError on connection failure", async () => {
mockRedisInstance.connect.mockRejectedValue(new Error("Connection refused"))
const client = new RedisClient(defaultConfig)
await expect(client.connect()).rejects.toThrow(IpuaroError)
await expect(client.connect()).rejects.toMatchObject({
type: "redis",
})
})
it("should handle AOF config failure gracefully", async () => {
mockRedisInstance.config.mockRejectedValue(new Error("CONFIG disabled"))
const client = new RedisClient(defaultConfig)
await client.connect()
expect(client.isConnected()).toBe(true)
})
})
describe("disconnect", () => {
it("should disconnect from Redis", async () => {
const client = new RedisClient(defaultConfig)
await client.connect()
await client.disconnect()
expect(mockRedisInstance.quit).toHaveBeenCalled()
expect(client.isConnected()).toBe(false)
})
it("should handle disconnect when not connected", async () => {
const client = new RedisClient(defaultConfig)
await client.disconnect()
expect(mockRedisInstance.quit).not.toHaveBeenCalled()
})
})
describe("isConnected", () => {
it("should return false when not connected", () => {
const client = new RedisClient(defaultConfig)
expect(client.isConnected()).toBe(false)
})
it("should return true when connected and ready", async () => {
const client = new RedisClient(defaultConfig)
await client.connect()
expect(client.isConnected()).toBe(true)
})
it("should return false when client status is not ready", async () => {
const client = new RedisClient(defaultConfig)
await client.connect()
mockRedisInstance.status = "connecting"
expect(client.isConnected()).toBe(false)
})
})
describe("getClient", () => {
it("should return Redis client when connected", async () => {
const client = new RedisClient(defaultConfig)
await client.connect()
const redis = client.getClient()
expect(redis).toBe(mockRedisInstance)
})
it("should throw when not connected", () => {
const client = new RedisClient(defaultConfig)
expect(() => client.getClient()).toThrow(IpuaroError)
expect(() => client.getClient()).toThrow("not connected")
})
})
describe("ping", () => {
it("should return true on successful ping", async () => {
const client = new RedisClient(defaultConfig)
await client.connect()
const result = await client.ping()
expect(result).toBe(true)
})
it("should return false when not connected", async () => {
const client = new RedisClient(defaultConfig)
const result = await client.ping()
expect(result).toBe(false)
})
it("should return false on ping failure", async () => {
mockRedisInstance.ping.mockRejectedValue(new Error("Timeout"))
const client = new RedisClient(defaultConfig)
await client.connect()
const result = await client.ping()
expect(result).toBe(false)
})
})
})

View File

@@ -0,0 +1,425 @@
import { describe, it, expect, vi, beforeEach } from "vitest"
import { RedisStorage } from "../../../../src/infrastructure/storage/RedisStorage.js"
import { RedisClient } from "../../../../src/infrastructure/storage/RedisClient.js"
import type { FileData } from "../../../../src/domain/value-objects/FileData.js"
import type { FileAST } from "../../../../src/domain/value-objects/FileAST.js"
import type { FileMeta } from "../../../../src/domain/value-objects/FileMeta.js"
import type { SymbolIndex, DepsGraph } from "../../../../src/domain/services/IStorage.js"
import { IpuaroError } from "../../../../src/shared/errors/IpuaroError.js"
describe("RedisStorage", () => {
const projectName = "test-project"
let mockRedis: {
hget: ReturnType<typeof vi.fn>
hset: ReturnType<typeof vi.fn>
hdel: ReturnType<typeof vi.fn>
hgetall: ReturnType<typeof vi.fn>
hlen: ReturnType<typeof vi.fn>
del: ReturnType<typeof vi.fn>
}
let mockClient: {
connect: ReturnType<typeof vi.fn>
disconnect: ReturnType<typeof vi.fn>
isConnected: ReturnType<typeof vi.fn>
getClient: ReturnType<typeof vi.fn>
}
let storage: RedisStorage
beforeEach(() => {
mockRedis = {
hget: vi.fn(),
hset: vi.fn(),
hdel: vi.fn(),
hgetall: vi.fn(),
hlen: vi.fn(),
del: vi.fn(),
}
mockClient = {
connect: vi.fn().mockResolvedValue(undefined),
disconnect: vi.fn().mockResolvedValue(undefined),
isConnected: vi.fn().mockReturnValue(true),
getClient: vi.fn().mockReturnValue(mockRedis),
}
storage = new RedisStorage(mockClient as unknown as RedisClient, projectName)
})
describe("File operations", () => {
const testFile: FileData = {
lines: ["line1", "line2"],
hash: "abc123",
size: 100,
lastModified: Date.now(),
}
describe("getFile", () => {
it("should return file data when exists", async () => {
mockRedis.hget.mockResolvedValue(JSON.stringify(testFile))
const result = await storage.getFile("src/index.ts")
expect(result).toEqual(testFile)
expect(mockRedis.hget).toHaveBeenCalledWith(
`project:${projectName}:files`,
"src/index.ts",
)
})
it("should return null when file not found", async () => {
mockRedis.hget.mockResolvedValue(null)
const result = await storage.getFile("nonexistent.ts")
expect(result).toBeNull()
})
it("should throw on invalid JSON", async () => {
mockRedis.hget.mockResolvedValue("invalid json")
await expect(storage.getFile("test.ts")).rejects.toThrow(IpuaroError)
})
})
describe("setFile", () => {
it("should store file data", async () => {
await storage.setFile("src/index.ts", testFile)
expect(mockRedis.hset).toHaveBeenCalledWith(
`project:${projectName}:files`,
"src/index.ts",
JSON.stringify(testFile),
)
})
})
describe("deleteFile", () => {
it("should delete file data", async () => {
await storage.deleteFile("src/index.ts")
expect(mockRedis.hdel).toHaveBeenCalledWith(
`project:${projectName}:files`,
"src/index.ts",
)
})
})
describe("getAllFiles", () => {
it("should return all files as Map", async () => {
mockRedis.hgetall.mockResolvedValue({
"src/a.ts": JSON.stringify(testFile),
"src/b.ts": JSON.stringify({ ...testFile, hash: "def456" }),
})
const result = await storage.getAllFiles()
expect(result).toBeInstanceOf(Map)
expect(result.size).toBe(2)
expect(result.get("src/a.ts")).toEqual(testFile)
})
it("should return empty Map when no files", async () => {
mockRedis.hgetall.mockResolvedValue({})
const result = await storage.getAllFiles()
expect(result.size).toBe(0)
})
})
describe("getFileCount", () => {
it("should return file count", async () => {
mockRedis.hlen.mockResolvedValue(42)
const result = await storage.getFileCount()
expect(result).toBe(42)
})
})
})
describe("AST operations", () => {
const testAST: FileAST = {
imports: [],
exports: [],
functions: [],
classes: [],
interfaces: [],
typeAliases: [],
parseError: false,
}
describe("getAST", () => {
it("should return AST when exists", async () => {
mockRedis.hget.mockResolvedValue(JSON.stringify(testAST))
const result = await storage.getAST("src/index.ts")
expect(result).toEqual(testAST)
})
it("should return null when not found", async () => {
mockRedis.hget.mockResolvedValue(null)
const result = await storage.getAST("nonexistent.ts")
expect(result).toBeNull()
})
})
describe("setAST", () => {
it("should store AST", async () => {
await storage.setAST("src/index.ts", testAST)
expect(mockRedis.hset).toHaveBeenCalledWith(
`project:${projectName}:ast`,
"src/index.ts",
JSON.stringify(testAST),
)
})
})
describe("deleteAST", () => {
it("should delete AST", async () => {
await storage.deleteAST("src/index.ts")
expect(mockRedis.hdel).toHaveBeenCalledWith(
`project:${projectName}:ast`,
"src/index.ts",
)
})
})
describe("getAllASTs", () => {
it("should return all ASTs as Map", async () => {
mockRedis.hgetall.mockResolvedValue({
"src/a.ts": JSON.stringify(testAST),
})
const result = await storage.getAllASTs()
expect(result).toBeInstanceOf(Map)
expect(result.size).toBe(1)
})
})
})
describe("Meta operations", () => {
const testMeta: FileMeta = {
complexity: { loc: 10, nesting: 2, cyclomaticComplexity: 5, score: 20 },
dependencies: ["./other.ts"],
dependents: [],
isHub: false,
isEntryPoint: false,
fileType: "source",
}
describe("getMeta", () => {
it("should return meta when exists", async () => {
mockRedis.hget.mockResolvedValue(JSON.stringify(testMeta))
const result = await storage.getMeta("src/index.ts")
expect(result).toEqual(testMeta)
})
it("should return null when not found", async () => {
mockRedis.hget.mockResolvedValue(null)
const result = await storage.getMeta("nonexistent.ts")
expect(result).toBeNull()
})
})
describe("setMeta", () => {
it("should store meta", async () => {
await storage.setMeta("src/index.ts", testMeta)
expect(mockRedis.hset).toHaveBeenCalledWith(
`project:${projectName}:meta`,
"src/index.ts",
JSON.stringify(testMeta),
)
})
})
describe("deleteMeta", () => {
it("should delete meta", async () => {
await storage.deleteMeta("src/index.ts")
expect(mockRedis.hdel).toHaveBeenCalledWith(
`project:${projectName}:meta`,
"src/index.ts",
)
})
})
describe("getAllMetas", () => {
it("should return all metas as Map", async () => {
mockRedis.hgetall.mockResolvedValue({
"src/a.ts": JSON.stringify(testMeta),
})
const result = await storage.getAllMetas()
expect(result).toBeInstanceOf(Map)
expect(result.size).toBe(1)
})
})
})
describe("Index operations", () => {
describe("getSymbolIndex", () => {
it("should return symbol index", async () => {
const index: [string, { path: string; line: number; type: string }[]][] = [
["MyClass", [{ path: "src/index.ts", line: 10, type: "class" }]],
]
mockRedis.hget.mockResolvedValue(JSON.stringify(index))
const result = await storage.getSymbolIndex()
expect(result).toBeInstanceOf(Map)
expect(result.get("MyClass")).toBeDefined()
})
it("should return empty Map when not found", async () => {
mockRedis.hget.mockResolvedValue(null)
const result = await storage.getSymbolIndex()
expect(result.size).toBe(0)
})
})
describe("setSymbolIndex", () => {
it("should store symbol index", async () => {
const index: SymbolIndex = new Map([
["MyClass", [{ path: "src/index.ts", line: 10, type: "class" }]],
])
await storage.setSymbolIndex(index)
expect(mockRedis.hset).toHaveBeenCalledWith(
`project:${projectName}:indexes`,
"symbols",
expect.any(String),
)
})
})
describe("getDepsGraph", () => {
it("should return deps graph", async () => {
const graph = {
imports: [["a.ts", ["b.ts"]]],
importedBy: [["b.ts", ["a.ts"]]],
}
mockRedis.hget.mockResolvedValue(JSON.stringify(graph))
const result = await storage.getDepsGraph()
expect(result.imports).toBeInstanceOf(Map)
expect(result.importedBy).toBeInstanceOf(Map)
})
it("should return empty graph when not found", async () => {
mockRedis.hget.mockResolvedValue(null)
const result = await storage.getDepsGraph()
expect(result.imports.size).toBe(0)
expect(result.importedBy.size).toBe(0)
})
})
describe("setDepsGraph", () => {
it("should store deps graph", async () => {
const graph: DepsGraph = {
imports: new Map([["a.ts", ["b.ts"]]]),
importedBy: new Map([["b.ts", ["a.ts"]]]),
}
await storage.setDepsGraph(graph)
expect(mockRedis.hset).toHaveBeenCalledWith(
`project:${projectName}:indexes`,
"deps_graph",
expect.any(String),
)
})
})
})
describe("Config operations", () => {
describe("getProjectConfig", () => {
it("should return config value", async () => {
mockRedis.hget.mockResolvedValue(JSON.stringify({ key: "value" }))
const result = await storage.getProjectConfig("settings")
expect(result).toEqual({ key: "value" })
})
it("should return null when not found", async () => {
mockRedis.hget.mockResolvedValue(null)
const result = await storage.getProjectConfig("nonexistent")
expect(result).toBeNull()
})
})
describe("setProjectConfig", () => {
it("should store config value", async () => {
await storage.setProjectConfig("settings", { key: "value" })
expect(mockRedis.hset).toHaveBeenCalledWith(
`project:${projectName}:config`,
"settings",
JSON.stringify({ key: "value" }),
)
})
})
})
describe("Lifecycle operations", () => {
describe("connect", () => {
it("should delegate to client", async () => {
await storage.connect()
expect(mockClient.connect).toHaveBeenCalled()
})
})
describe("disconnect", () => {
it("should delegate to client", async () => {
await storage.disconnect()
expect(mockClient.disconnect).toHaveBeenCalled()
})
})
describe("isConnected", () => {
it("should delegate to client", () => {
mockClient.isConnected.mockReturnValue(true)
expect(storage.isConnected()).toBe(true)
})
})
describe("clear", () => {
it("should delete all project keys", async () => {
mockRedis.del.mockResolvedValue(1)
await storage.clear()
expect(mockRedis.del).toHaveBeenCalledTimes(5)
expect(mockRedis.del).toHaveBeenCalledWith(`project:${projectName}:files`)
expect(mockRedis.del).toHaveBeenCalledWith(`project:${projectName}:ast`)
expect(mockRedis.del).toHaveBeenCalledWith(`project:${projectName}:meta`)
expect(mockRedis.del).toHaveBeenCalledWith(`project:${projectName}:indexes`)
expect(mockRedis.del).toHaveBeenCalledWith(`project:${projectName}:config`)
})
})
})
})

View File

@@ -0,0 +1,110 @@
import { describe, it, expect } from "vitest"
import {
ProjectKeys,
SessionKeys,
IndexFields,
SessionFields,
generateProjectName,
} from "../../../../src/infrastructure/storage/schema.js"
describe("schema", () => {
describe("ProjectKeys", () => {
it("should generate files key", () => {
expect(ProjectKeys.files("myproject")).toBe("project:myproject:files")
})
it("should generate ast key", () => {
expect(ProjectKeys.ast("myproject")).toBe("project:myproject:ast")
})
it("should generate meta key", () => {
expect(ProjectKeys.meta("myproject")).toBe("project:myproject:meta")
})
it("should generate indexes key", () => {
expect(ProjectKeys.indexes("myproject")).toBe("project:myproject:indexes")
})
it("should generate config key", () => {
expect(ProjectKeys.config("myproject")).toBe("project:myproject:config")
})
})
describe("SessionKeys", () => {
it("should generate data key", () => {
expect(SessionKeys.data("session-123")).toBe("session:session-123:data")
})
it("should generate undo key", () => {
expect(SessionKeys.undo("session-123")).toBe("session:session-123:undo")
})
it("should have list key", () => {
expect(SessionKeys.list).toBe("sessions:list")
})
})
describe("IndexFields", () => {
it("should have symbols field", () => {
expect(IndexFields.symbols).toBe("symbols")
})
it("should have depsGraph field", () => {
expect(IndexFields.depsGraph).toBe("deps_graph")
})
})
describe("SessionFields", () => {
it("should have all required fields", () => {
expect(SessionFields.history).toBe("history")
expect(SessionFields.context).toBe("context")
expect(SessionFields.stats).toBe("stats")
expect(SessionFields.inputHistory).toBe("input_history")
expect(SessionFields.createdAt).toBe("created_at")
expect(SessionFields.lastActivityAt).toBe("last_activity_at")
expect(SessionFields.projectName).toBe("project_name")
})
})
describe("generateProjectName", () => {
it("should generate name from path with two parts", () => {
expect(generateProjectName("/home/user/projects/myapp")).toBe("projects-myapp")
})
it("should generate name from single directory", () => {
expect(generateProjectName("/app")).toBe("app")
})
it("should handle root path", () => {
expect(generateProjectName("/")).toBe("root")
})
it("should handle empty path", () => {
expect(generateProjectName("")).toBe("root")
})
it("should handle trailing slashes", () => {
expect(generateProjectName("/home/user/projects/myapp/")).toBe("projects-myapp")
})
it("should handle Windows paths", () => {
expect(generateProjectName("C:\\Users\\projects\\myapp")).toBe("projects-myapp")
})
it("should sanitize special characters", () => {
expect(generateProjectName("/home/my project/my@app!")).toBe("my-project-my-app")
})
it("should convert to lowercase", () => {
expect(generateProjectName("/Home/User/MYAPP")).toBe("user-myapp")
})
it("should handle multiple consecutive special chars", () => {
expect(generateProjectName("/home/my___project")).toBe("home-my-project")
})
it("should handle relative paths", () => {
expect(generateProjectName("parent/child")).toBe("parent-child")
})
})
})

View File

@@ -1,5 +1,9 @@
import { describe, it, expect, vi, beforeEach, afterEach } from "vitest"
import { loadConfig, validateConfig, getConfigErrors } from "../../../../src/shared/config/loader.js"
import {
loadConfig,
validateConfig,
getConfigErrors,
} from "../../../../src/shared/config/loader.js"
import { DEFAULT_CONFIG } from "../../../../src/shared/constants/config.js"
import * as fs from "node:fs"
@@ -28,7 +32,7 @@ describe("config loader", () => {
return path === "/project/.ipuaro.json"
})
vi.mocked(fs.readFileSync).mockReturnValue(
JSON.stringify({ llm: { model: "custom-model" } })
JSON.stringify({ llm: { model: "custom-model" } }),
)
const config = loadConfig("/project")

View File

@@ -11,6 +11,7 @@
"declarationMap": true,
"sourceMap": true,
"strict": true,
"resolveJsonModule": true,
"skipLibCheck": true,
"esModuleInterop": true,
"resolvePackageJsonExports": true,

View File

@@ -8,11 +8,7 @@ export default defineConfig({
sourcemap: true,
splitting: false,
treeshake: true,
external: [
"tree-sitter",
"tree-sitter-typescript",
"tree-sitter-javascript",
],
external: ["tree-sitter", "tree-sitter-typescript", "tree-sitter-javascript"],
esbuildOptions(options) {
options.jsx = "automatic"
},

View File

@@ -9,11 +9,7 @@ export default defineConfig({
provider: "v8",
reporter: ["text", "html", "lcov"],
include: ["src/**/*.ts", "src/**/*.tsx"],
exclude: [
"src/**/*.d.ts",
"src/**/index.ts",
"src/**/*.test.ts",
],
exclude: ["src/**/*.d.ts", "src/**/index.ts", "src/**/*.test.ts"],
thresholds: {
lines: 80,
functions: 80,

43
pnpm-lock.yaml generated
View File

@@ -141,9 +141,9 @@ importers:
commander:
specifier: ^11.1.0
version: 11.1.0
ignore:
specifier: ^5.3.2
version: 5.3.2
globby:
specifier: ^16.0.0
version: 16.0.0
ink:
specifier: ^4.4.1
version: 4.4.1(@types/react@18.3.27)(react@18.3.1)
@@ -1471,6 +1471,10 @@ packages:
'@sinclair/typebox@0.34.41':
resolution: {integrity: sha512-6gS8pZzSXdyRHTIqoqSVknxolr1kzfy4/CeDnrzsVz8TTIWUbOBr6gnzOmTYJ3eXQNh4IYHIGi5aIL7sOZ2G/g==}
'@sindresorhus/merge-streams@4.0.0':
resolution: {integrity: sha512-tlqY9xq5ukxTUZBmoOp+m61cqwQD5pHJtFY3Mn8CA8ps6yghLH/Hw8UPdqg4OLmFW3IFlcXnQNmo/dh8HzXYIQ==}
engines: {node: '>=18'}
'@sinonjs/commons@3.0.1':
resolution: {integrity: sha512-K3mCHKQ9sVh8o1C9cxkwxaOmXoAMlDxC1mYyHrjqOWEcBjYr76t96zL2zlj5dUGZ3HSw240X1qgH3Mjf1yJWpQ==}
@@ -2732,6 +2736,10 @@ packages:
resolution: {integrity: sha512-c/c15i26VrJ4IRt5Z89DnIzCGDn9EcebibhAOjw5ibqEHsE1wLUgkPn9RDmNcUKyU87GeaL633nyJ+pplFR2ZQ==}
engines: {node: '>=18'}
globby@16.0.0:
resolution: {integrity: sha512-ejy4TJFga99yW6Q0uhM3pFawKWZmtZzZD/v/GwI5+9bCV5Ew+D2pSND6W7fUes5UykqSsJkUfxFVdRh7Q1+P3Q==}
engines: {node: '>=20'}
gopd@1.2.0:
resolution: {integrity: sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==}
engines: {node: '>= 0.4'}
@@ -2879,6 +2887,10 @@ packages:
resolution: {integrity: sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==}
engines: {node: '>=0.12.0'}
is-path-inside@4.0.0:
resolution: {integrity: sha512-lJJV/5dYS+RcL8uQdBDW9c9uWFLLBNRyFhnAKXw5tVqLlKZ4RMGZKv+YQ/IA3OhD+RpbJa1LLFM1FQPGyIXvOA==}
engines: {node: '>=12'}
is-stream@2.0.1:
resolution: {integrity: sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==}
engines: {node: '>=8'}
@@ -3712,6 +3724,10 @@ packages:
resolution: {integrity: sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==}
engines: {node: '>=8'}
slash@5.1.0:
resolution: {integrity: sha512-ZA6oR3T/pEyuqwMgAKT0/hAv8oAXckzbkmR0UkUosQ+Mc4RxGoJkRmwHgHufaenlyAgE1Mxgpdcrf75y6XcnDg==}
engines: {node: '>=14.16'}
slice-ansi@4.0.0:
resolution: {integrity: sha512-qMCMfhY040cVHT43K9BFygqYbUPFZKHOg7K73mtTWJRb8pyP3fzf4Ixd5SzdEJQ6MRUg/WBnOLxghZtKKurENQ==}
engines: {node: '>=10'}
@@ -4128,6 +4144,10 @@ packages:
undici-types@6.21.0:
resolution: {integrity: sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==}
unicorn-magic@0.4.0:
resolution: {integrity: sha512-wH590V9VNgYH9g3lH9wWjTrUoKsjLF6sGLjhR4sH1LWpLmCOH0Zf7PukhDA8BiS7KHe4oPNkcTHqYkj7SOGUOw==}
engines: {node: '>=20'}
universalify@2.0.1:
resolution: {integrity: sha512-gptHNQghINnc/vTGIk0SOFGFNXw7JVrlRUtConJRlvaw6DuX0wO5Jeko9sWrMBhh+PsYAZ7oXAiOnf/UKogyiw==}
engines: {node: '>= 10.0.0'}
@@ -5614,6 +5634,8 @@ snapshots:
'@sinclair/typebox@0.34.41': {}
'@sindresorhus/merge-streams@4.0.0': {}
'@sinonjs/commons@3.0.1':
dependencies:
type-detect: 4.0.8
@@ -7072,6 +7094,15 @@ snapshots:
globals@16.5.0: {}
globby@16.0.0:
dependencies:
'@sindresorhus/merge-streams': 4.0.0
fast-glob: 3.3.3
ignore: 7.0.5
is-path-inside: 4.0.0
slash: 5.1.0
unicorn-magic: 0.4.0
gopd@1.2.0: {}
graceful-fs@4.2.11: {}
@@ -7221,6 +7252,8 @@ snapshots:
is-number@7.0.0: {}
is-path-inside@4.0.0: {}
is-stream@2.0.1: {}
is-stream@3.0.0: {}
@@ -8203,6 +8236,8 @@ snapshots:
slash@3.0.0: {}
slash@5.1.0: {}
slice-ansi@4.0.0:
dependencies:
ansi-styles: 4.3.0
@@ -8610,6 +8645,8 @@ snapshots:
undici-types@6.21.0: {}
unicorn-magic@0.4.0: {}
universalify@2.0.1: {}
unrs-resolver@1.11.1: