mirror of
https://github.com/samiyev/puaros.git
synced 2025-12-28 07:16:53 +05:00
Compare commits
10 Commits
guardian-v
...
ipuaro-v0.
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
25146003cc | ||
|
|
68f927d906 | ||
|
|
b3e04a411c | ||
|
|
294d085ad4 | ||
|
|
958e4daed5 | ||
|
|
6234fbce92 | ||
|
|
af9c2377a0 | ||
|
|
d0c1ddc22e | ||
|
|
225480c806 | ||
|
|
fd8e97af0e |
29
CLAUDE.md
29
CLAUDE.md
@@ -447,6 +447,35 @@ Copy and use for each release:
|
|||||||
- [ ] Published to npm (if public release)
|
- [ ] Published to npm (if public release)
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## Working with Roadmap
|
||||||
|
|
||||||
|
When the user points to `ROADMAP.md` or asks about the roadmap/next steps:
|
||||||
|
|
||||||
|
1. **Read both files together:**
|
||||||
|
- `packages/<package>/ROADMAP.md` - to understand the planned features and milestones
|
||||||
|
- `packages/<package>/CHANGELOG.md` - to see what's already implemented
|
||||||
|
|
||||||
|
2. **Determine current position:**
|
||||||
|
- Check the latest version in CHANGELOG.md
|
||||||
|
- Cross-reference with ROADMAP.md milestones
|
||||||
|
- Identify which roadmap items are already completed (present in CHANGELOG)
|
||||||
|
|
||||||
|
3. **Suggest next steps:**
|
||||||
|
- Find the first uncompleted item in the current milestone
|
||||||
|
- Or identify the next milestone if current one is complete
|
||||||
|
- Present clear "start here" recommendation
|
||||||
|
|
||||||
|
**Example workflow:**
|
||||||
|
```
|
||||||
|
User: "Let's work on the roadmap" or points to ROADMAP.md
|
||||||
|
|
||||||
|
Claude should:
|
||||||
|
1. Read ROADMAP.md → See milestones v0.1.0, v0.2.0, v0.3.0...
|
||||||
|
2. Read CHANGELOG.md → See latest release is v0.1.1
|
||||||
|
3. Compare → v0.1.0 milestone complete, v0.2.0 in progress
|
||||||
|
4. Report → "v0.1.0 is complete. For v0.2.0, next item is: <feature>"
|
||||||
|
```
|
||||||
|
|
||||||
## Common Workflows
|
## Common Workflows
|
||||||
|
|
||||||
### Adding a new CLI option
|
### Adding a new CLI option
|
||||||
|
|||||||
@@ -74,6 +74,7 @@ export default tseslint.config(
|
|||||||
'@typescript-eslint/require-await': 'warn',
|
'@typescript-eslint/require-await': 'warn',
|
||||||
'@typescript-eslint/no-unnecessary-condition': 'off', // Sometimes useful for defensive coding
|
'@typescript-eslint/no-unnecessary-condition': 'off', // Sometimes useful for defensive coding
|
||||||
'@typescript-eslint/no-non-null-assertion': 'warn',
|
'@typescript-eslint/no-non-null-assertion': 'warn',
|
||||||
|
'@typescript-eslint/no-unnecessary-type-parameters': 'warn', // Allow generic JSON parsers
|
||||||
|
|
||||||
// ========================================
|
// ========================================
|
||||||
// Code Quality & Best Practices
|
// Code Quality & Best Practices
|
||||||
|
|||||||
@@ -5,6 +5,26 @@ All notable changes to @samiyev/guardian will be documented in this file.
|
|||||||
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
|
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
|
||||||
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
||||||
|
|
||||||
|
## [0.9.4] - 2025-11-30
|
||||||
|
|
||||||
|
### Added
|
||||||
|
|
||||||
|
- **VERSION export** - Package version is now exported from index.ts, automatically read from package.json
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
|
||||||
|
- 🔄 **Refactored SecretDetector** - Reduced cyclomatic complexity from 24 to <15:
|
||||||
|
- Extracted helper methods: `extractByRuleId`, `extractAwsType`, `extractGithubType`, `extractSshType`, `extractSlackType`, `extractByMessage`
|
||||||
|
- Used lookup arrays for SSH and message type mappings
|
||||||
|
- 🔄 **Refactored AstNamingTraverser** - Reduced cyclomatic complexity from 17 to <15:
|
||||||
|
- Replaced if-else chain with Map-based node handlers
|
||||||
|
- Added `buildNodeHandlers()` method for cleaner architecture
|
||||||
|
|
||||||
|
### Quality
|
||||||
|
|
||||||
|
- ✅ **Zero lint warnings** - All ESLint warnings resolved
|
||||||
|
- ✅ **All 616 tests pass**
|
||||||
|
|
||||||
## [0.9.2] - 2025-11-27
|
## [0.9.2] - 2025-11-27
|
||||||
|
|
||||||
### Changed
|
### Changed
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "@samiyev/guardian",
|
"name": "@samiyev/guardian",
|
||||||
"version": "0.9.3",
|
"version": "0.9.4",
|
||||||
"description": "Research-backed code quality guardian for AI-assisted development. Detects hardcodes, secrets, circular deps, framework leaks, entity exposure, and 9 architecture violations. Enforces Clean Architecture/DDD principles. Works with GitHub Copilot, Cursor, Windsurf, Claude, ChatGPT, Cline, and any AI coding tool.",
|
"description": "Research-backed code quality guardian for AI-assisted development. Detects hardcodes, secrets, circular deps, framework leaks, entity exposure, and 9 architecture violations. Enforces Clean Architecture/DDD principles. Works with GitHub Copilot, Cursor, Windsurf, Claude, ChatGPT, Cline, and any AI coding tool.",
|
||||||
"keywords": [
|
"keywords": [
|
||||||
"puaros",
|
"puaros",
|
||||||
@@ -40,7 +40,7 @@
|
|||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"repository": {
|
"repository": {
|
||||||
"type": "git",
|
"type": "git",
|
||||||
"url": "https://github.com/samiyev/puaros.git",
|
"url": "git+https://github.com/samiyev/puaros.git",
|
||||||
"directory": "packages/guardian"
|
"directory": "packages/guardian"
|
||||||
},
|
},
|
||||||
"bugs": {
|
"bugs": {
|
||||||
|
|||||||
@@ -215,6 +215,7 @@ export class AnalyzeProject extends UseCase<
|
|||||||
private readonly detectionPipeline: ExecuteDetection
|
private readonly detectionPipeline: ExecuteDetection
|
||||||
private readonly resultAggregator: AggregateResults
|
private readonly resultAggregator: AggregateResults
|
||||||
|
|
||||||
|
// eslint-disable-next-line max-params
|
||||||
constructor(
|
constructor(
|
||||||
fileScanner: IFileScanner,
|
fileScanner: IFileScanner,
|
||||||
codeParser: ICodeParser,
|
codeParser: ICodeParser,
|
||||||
|
|||||||
@@ -56,6 +56,7 @@ export interface DetectionResult {
|
|||||||
* Pipeline step responsible for running all detectors
|
* Pipeline step responsible for running all detectors
|
||||||
*/
|
*/
|
||||||
export class ExecuteDetection {
|
export class ExecuteDetection {
|
||||||
|
// eslint-disable-next-line max-params
|
||||||
constructor(
|
constructor(
|
||||||
private readonly hardcodeDetector: IHardcodeDetector,
|
private readonly hardcodeDetector: IHardcodeDetector,
|
||||||
private readonly namingConventionDetector: INamingConventionDetector,
|
private readonly namingConventionDetector: INamingConventionDetector,
|
||||||
|
|||||||
@@ -171,6 +171,7 @@ export class HardcodedValue extends ValueObject<HardcodedValueProps> {
|
|||||||
return `${CONSTANT_NAMES.MAGIC_NUMBER}_${String(value)}`
|
return `${CONSTANT_NAMES.MAGIC_NUMBER}_${String(value)}`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// eslint-disable-next-line complexity, max-lines-per-function
|
||||||
private suggestStringConstantName(): string {
|
private suggestStringConstantName(): string {
|
||||||
const value = String(this.props.value)
|
const value = String(this.props.value)
|
||||||
const context = this.props.context.toLowerCase()
|
const context = this.props.context.toLowerCase()
|
||||||
|
|||||||
@@ -1,3 +1,7 @@
|
|||||||
|
import pkg from "../package.json"
|
||||||
|
|
||||||
|
export const VERSION = pkg.version
|
||||||
|
|
||||||
export * from "./domain"
|
export * from "./domain"
|
||||||
export * from "./application"
|
export * from "./application"
|
||||||
export * from "./infrastructure"
|
export * from "./infrastructure"
|
||||||
|
|||||||
@@ -90,80 +90,98 @@ export class SecretDetector implements ISecretDetector {
|
|||||||
}
|
}
|
||||||
|
|
||||||
private extractSecretType(message: string, ruleId: string): string {
|
private extractSecretType(message: string, ruleId: string): string {
|
||||||
|
const lowerMessage = message.toLowerCase()
|
||||||
|
|
||||||
|
const ruleBasedType = this.extractByRuleId(ruleId, lowerMessage)
|
||||||
|
if (ruleBasedType) {
|
||||||
|
return ruleBasedType
|
||||||
|
}
|
||||||
|
|
||||||
|
return this.extractByMessage(lowerMessage)
|
||||||
|
}
|
||||||
|
|
||||||
|
private extractByRuleId(ruleId: string, lowerMessage: string): string | null {
|
||||||
if (ruleId.includes(SECRET_KEYWORDS.AWS)) {
|
if (ruleId.includes(SECRET_KEYWORDS.AWS)) {
|
||||||
if (message.toLowerCase().includes(SECRET_KEYWORDS.ACCESS_KEY)) {
|
return this.extractAwsType(lowerMessage)
|
||||||
|
}
|
||||||
|
if (ruleId.includes(SECRET_KEYWORDS.GITHUB)) {
|
||||||
|
return this.extractGithubType(lowerMessage)
|
||||||
|
}
|
||||||
|
if (ruleId.includes(SECRET_KEYWORDS.NPM)) {
|
||||||
|
return SECRET_TYPE_NAMES.NPM_TOKEN
|
||||||
|
}
|
||||||
|
if (ruleId.includes(SECRET_KEYWORDS.GCP) || ruleId.includes(SECRET_KEYWORDS.GOOGLE)) {
|
||||||
|
return SECRET_TYPE_NAMES.GCP_SERVICE_ACCOUNT_KEY
|
||||||
|
}
|
||||||
|
if (ruleId.includes(SECRET_KEYWORDS.PRIVATEKEY) || ruleId.includes(SECRET_KEYWORDS.SSH)) {
|
||||||
|
return this.extractSshType(lowerMessage)
|
||||||
|
}
|
||||||
|
if (ruleId.includes(SECRET_KEYWORDS.SLACK)) {
|
||||||
|
return this.extractSlackType(lowerMessage)
|
||||||
|
}
|
||||||
|
if (ruleId.includes(SECRET_KEYWORDS.BASICAUTH)) {
|
||||||
|
return SECRET_TYPE_NAMES.BASIC_AUTH_CREDENTIALS
|
||||||
|
}
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
private extractAwsType(lowerMessage: string): string {
|
||||||
|
if (lowerMessage.includes(SECRET_KEYWORDS.ACCESS_KEY)) {
|
||||||
return SECRET_TYPE_NAMES.AWS_ACCESS_KEY
|
return SECRET_TYPE_NAMES.AWS_ACCESS_KEY
|
||||||
}
|
}
|
||||||
if (message.toLowerCase().includes(SECRET_KEYWORDS.SECRET)) {
|
if (lowerMessage.includes(SECRET_KEYWORDS.SECRET)) {
|
||||||
return SECRET_TYPE_NAMES.AWS_SECRET_KEY
|
return SECRET_TYPE_NAMES.AWS_SECRET_KEY
|
||||||
}
|
}
|
||||||
return SECRET_TYPE_NAMES.AWS_CREDENTIAL
|
return SECRET_TYPE_NAMES.AWS_CREDENTIAL
|
||||||
}
|
}
|
||||||
|
|
||||||
if (ruleId.includes(SECRET_KEYWORDS.GITHUB)) {
|
private extractGithubType(lowerMessage: string): string {
|
||||||
if (message.toLowerCase().includes(SECRET_KEYWORDS.PERSONAL_ACCESS_TOKEN)) {
|
if (lowerMessage.includes(SECRET_KEYWORDS.PERSONAL_ACCESS_TOKEN)) {
|
||||||
return SECRET_TYPE_NAMES.GITHUB_PERSONAL_ACCESS_TOKEN
|
return SECRET_TYPE_NAMES.GITHUB_PERSONAL_ACCESS_TOKEN
|
||||||
}
|
}
|
||||||
if (message.toLowerCase().includes(SECRET_KEYWORDS.OAUTH)) {
|
if (lowerMessage.includes(SECRET_KEYWORDS.OAUTH)) {
|
||||||
return SECRET_TYPE_NAMES.GITHUB_OAUTH_TOKEN
|
return SECRET_TYPE_NAMES.GITHUB_OAUTH_TOKEN
|
||||||
}
|
}
|
||||||
return SECRET_TYPE_NAMES.GITHUB_TOKEN
|
return SECRET_TYPE_NAMES.GITHUB_TOKEN
|
||||||
}
|
}
|
||||||
|
|
||||||
if (ruleId.includes(SECRET_KEYWORDS.NPM)) {
|
private extractSshType(lowerMessage: string): string {
|
||||||
return SECRET_TYPE_NAMES.NPM_TOKEN
|
const sshTypeMap: [string, string][] = [
|
||||||
|
[SECRET_KEYWORDS.RSA, SECRET_TYPE_NAMES.SSH_RSA_PRIVATE_KEY],
|
||||||
|
[SECRET_KEYWORDS.DSA, SECRET_TYPE_NAMES.SSH_DSA_PRIVATE_KEY],
|
||||||
|
[SECRET_KEYWORDS.ECDSA, SECRET_TYPE_NAMES.SSH_ECDSA_PRIVATE_KEY],
|
||||||
|
[SECRET_KEYWORDS.ED25519, SECRET_TYPE_NAMES.SSH_ED25519_PRIVATE_KEY],
|
||||||
|
]
|
||||||
|
for (const [keyword, typeName] of sshTypeMap) {
|
||||||
|
if (lowerMessage.includes(keyword)) {
|
||||||
|
return typeName
|
||||||
}
|
}
|
||||||
|
|
||||||
if (ruleId.includes(SECRET_KEYWORDS.GCP) || ruleId.includes(SECRET_KEYWORDS.GOOGLE)) {
|
|
||||||
return SECRET_TYPE_NAMES.GCP_SERVICE_ACCOUNT_KEY
|
|
||||||
}
|
|
||||||
|
|
||||||
if (ruleId.includes(SECRET_KEYWORDS.PRIVATEKEY) || ruleId.includes(SECRET_KEYWORDS.SSH)) {
|
|
||||||
if (message.toLowerCase().includes(SECRET_KEYWORDS.RSA)) {
|
|
||||||
return SECRET_TYPE_NAMES.SSH_RSA_PRIVATE_KEY
|
|
||||||
}
|
|
||||||
if (message.toLowerCase().includes(SECRET_KEYWORDS.DSA)) {
|
|
||||||
return SECRET_TYPE_NAMES.SSH_DSA_PRIVATE_KEY
|
|
||||||
}
|
|
||||||
if (message.toLowerCase().includes(SECRET_KEYWORDS.ECDSA)) {
|
|
||||||
return SECRET_TYPE_NAMES.SSH_ECDSA_PRIVATE_KEY
|
|
||||||
}
|
|
||||||
if (message.toLowerCase().includes(SECRET_KEYWORDS.ED25519)) {
|
|
||||||
return SECRET_TYPE_NAMES.SSH_ED25519_PRIVATE_KEY
|
|
||||||
}
|
}
|
||||||
return SECRET_TYPE_NAMES.SSH_PRIVATE_KEY
|
return SECRET_TYPE_NAMES.SSH_PRIVATE_KEY
|
||||||
}
|
}
|
||||||
|
|
||||||
if (ruleId.includes(SECRET_KEYWORDS.SLACK)) {
|
private extractSlackType(lowerMessage: string): string {
|
||||||
if (message.toLowerCase().includes(SECRET_KEYWORDS.BOT)) {
|
if (lowerMessage.includes(SECRET_KEYWORDS.BOT)) {
|
||||||
return SECRET_TYPE_NAMES.SLACK_BOT_TOKEN
|
return SECRET_TYPE_NAMES.SLACK_BOT_TOKEN
|
||||||
}
|
}
|
||||||
if (message.toLowerCase().includes(SECRET_KEYWORDS.USER)) {
|
if (lowerMessage.includes(SECRET_KEYWORDS.USER)) {
|
||||||
return SECRET_TYPE_NAMES.SLACK_USER_TOKEN
|
return SECRET_TYPE_NAMES.SLACK_USER_TOKEN
|
||||||
}
|
}
|
||||||
return SECRET_TYPE_NAMES.SLACK_TOKEN
|
return SECRET_TYPE_NAMES.SLACK_TOKEN
|
||||||
}
|
}
|
||||||
|
|
||||||
if (ruleId.includes(SECRET_KEYWORDS.BASICAUTH)) {
|
private extractByMessage(lowerMessage: string): string {
|
||||||
return SECRET_TYPE_NAMES.BASIC_AUTH_CREDENTIALS
|
const messageTypeMap: [string, string][] = [
|
||||||
|
[SECRET_KEYWORDS.API_KEY, SECRET_TYPE_NAMES.API_KEY],
|
||||||
|
[SECRET_KEYWORDS.TOKEN, SECRET_TYPE_NAMES.AUTHENTICATION_TOKEN],
|
||||||
|
[SECRET_KEYWORDS.PASSWORD, SECRET_TYPE_NAMES.PASSWORD],
|
||||||
|
[SECRET_KEYWORDS.SECRET, SECRET_TYPE_NAMES.SECRET],
|
||||||
|
]
|
||||||
|
for (const [keyword, typeName] of messageTypeMap) {
|
||||||
|
if (lowerMessage.includes(keyword)) {
|
||||||
|
return typeName
|
||||||
}
|
}
|
||||||
|
|
||||||
if (message.toLowerCase().includes(SECRET_KEYWORDS.API_KEY)) {
|
|
||||||
return SECRET_TYPE_NAMES.API_KEY
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (message.toLowerCase().includes(SECRET_KEYWORDS.TOKEN)) {
|
|
||||||
return SECRET_TYPE_NAMES.AUTHENTICATION_TOKEN
|
|
||||||
}
|
|
||||||
|
|
||||||
if (message.toLowerCase().includes(SECRET_KEYWORDS.PASSWORD)) {
|
|
||||||
return SECRET_TYPE_NAMES.PASSWORD
|
|
||||||
}
|
|
||||||
|
|
||||||
if (message.toLowerCase().includes(SECRET_KEYWORDS.SECRET)) {
|
|
||||||
return SECRET_TYPE_NAMES.SECRET
|
|
||||||
}
|
|
||||||
|
|
||||||
return SECRET_TYPE_NAMES.SENSITIVE_DATA
|
return SECRET_TYPE_NAMES.SENSITIVE_DATA
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -6,6 +6,13 @@ import { AstFunctionNameAnalyzer } from "./AstFunctionNameAnalyzer"
|
|||||||
import { AstInterfaceNameAnalyzer } from "./AstInterfaceNameAnalyzer"
|
import { AstInterfaceNameAnalyzer } from "./AstInterfaceNameAnalyzer"
|
||||||
import { AstVariableNameAnalyzer } from "./AstVariableNameAnalyzer"
|
import { AstVariableNameAnalyzer } from "./AstVariableNameAnalyzer"
|
||||||
|
|
||||||
|
type NodeAnalyzer = (
|
||||||
|
node: Parser.SyntaxNode,
|
||||||
|
layer: string,
|
||||||
|
filePath: string,
|
||||||
|
lines: string[],
|
||||||
|
) => NamingViolation | null
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* AST tree traverser for detecting naming convention violations
|
* AST tree traverser for detecting naming convention violations
|
||||||
*
|
*
|
||||||
@@ -13,12 +20,16 @@ import { AstVariableNameAnalyzer } from "./AstVariableNameAnalyzer"
|
|||||||
* to detect naming violations in classes, interfaces, functions, and variables.
|
* to detect naming violations in classes, interfaces, functions, and variables.
|
||||||
*/
|
*/
|
||||||
export class AstNamingTraverser {
|
export class AstNamingTraverser {
|
||||||
|
private readonly nodeHandlers: Map<string, NodeAnalyzer>
|
||||||
|
|
||||||
constructor(
|
constructor(
|
||||||
private readonly classAnalyzer: AstClassNameAnalyzer,
|
private readonly classAnalyzer: AstClassNameAnalyzer,
|
||||||
private readonly interfaceAnalyzer: AstInterfaceNameAnalyzer,
|
private readonly interfaceAnalyzer: AstInterfaceNameAnalyzer,
|
||||||
private readonly functionAnalyzer: AstFunctionNameAnalyzer,
|
private readonly functionAnalyzer: AstFunctionNameAnalyzer,
|
||||||
private readonly variableAnalyzer: AstVariableNameAnalyzer,
|
private readonly variableAnalyzer: AstVariableNameAnalyzer,
|
||||||
) {}
|
) {
|
||||||
|
this.nodeHandlers = this.buildNodeHandlers()
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Traverses the AST tree and collects naming violations
|
* Traverses the AST tree and collects naming violations
|
||||||
@@ -38,6 +49,33 @@ export class AstNamingTraverser {
|
|||||||
return results
|
return results
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private buildNodeHandlers(): Map<string, NodeAnalyzer> {
|
||||||
|
const handlers = new Map<string, NodeAnalyzer>()
|
||||||
|
|
||||||
|
handlers.set(AST_CLASS_TYPES.CLASS_DECLARATION, (node, layer, filePath, lines) =>
|
||||||
|
this.classAnalyzer.analyze(node, layer, filePath, lines),
|
||||||
|
)
|
||||||
|
handlers.set(AST_CLASS_TYPES.INTERFACE_DECLARATION, (node, layer, filePath, lines) =>
|
||||||
|
this.interfaceAnalyzer.analyze(node, layer, filePath, lines),
|
||||||
|
)
|
||||||
|
|
||||||
|
const functionHandler: NodeAnalyzer = (node, layer, filePath, lines) =>
|
||||||
|
this.functionAnalyzer.analyze(node, layer, filePath, lines)
|
||||||
|
handlers.set(AST_FUNCTION_TYPES.FUNCTION_DECLARATION, functionHandler)
|
||||||
|
handlers.set(AST_FUNCTION_TYPES.METHOD_DEFINITION, functionHandler)
|
||||||
|
handlers.set(AST_FUNCTION_TYPES.FUNCTION_SIGNATURE, functionHandler)
|
||||||
|
|
||||||
|
const variableHandler: NodeAnalyzer = (node, layer, filePath, lines) =>
|
||||||
|
this.variableAnalyzer.analyze(node, layer, filePath, lines)
|
||||||
|
handlers.set(AST_VARIABLE_TYPES.VARIABLE_DECLARATOR, variableHandler)
|
||||||
|
handlers.set(AST_VARIABLE_TYPES.REQUIRED_PARAMETER, variableHandler)
|
||||||
|
handlers.set(AST_VARIABLE_TYPES.OPTIONAL_PARAMETER, variableHandler)
|
||||||
|
handlers.set(AST_VARIABLE_TYPES.PUBLIC_FIELD_DEFINITION, variableHandler)
|
||||||
|
handlers.set(AST_VARIABLE_TYPES.PROPERTY_SIGNATURE, variableHandler)
|
||||||
|
|
||||||
|
return handlers
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Recursively visits AST nodes
|
* Recursively visits AST nodes
|
||||||
*/
|
*/
|
||||||
@@ -49,34 +87,10 @@ export class AstNamingTraverser {
|
|||||||
results: NamingViolation[],
|
results: NamingViolation[],
|
||||||
): void {
|
): void {
|
||||||
const node = cursor.currentNode
|
const node = cursor.currentNode
|
||||||
|
const handler = this.nodeHandlers.get(node.type)
|
||||||
|
|
||||||
if (node.type === AST_CLASS_TYPES.CLASS_DECLARATION) {
|
if (handler) {
|
||||||
const violation = this.classAnalyzer.analyze(node, layer, filePath, lines)
|
const violation = handler(node, layer, filePath, lines)
|
||||||
if (violation) {
|
|
||||||
results.push(violation)
|
|
||||||
}
|
|
||||||
} else if (node.type === AST_CLASS_TYPES.INTERFACE_DECLARATION) {
|
|
||||||
const violation = this.interfaceAnalyzer.analyze(node, layer, filePath, lines)
|
|
||||||
if (violation) {
|
|
||||||
results.push(violation)
|
|
||||||
}
|
|
||||||
} else if (
|
|
||||||
node.type === AST_FUNCTION_TYPES.FUNCTION_DECLARATION ||
|
|
||||||
node.type === AST_FUNCTION_TYPES.METHOD_DEFINITION ||
|
|
||||||
node.type === AST_FUNCTION_TYPES.FUNCTION_SIGNATURE
|
|
||||||
) {
|
|
||||||
const violation = this.functionAnalyzer.analyze(node, layer, filePath, lines)
|
|
||||||
if (violation) {
|
|
||||||
results.push(violation)
|
|
||||||
}
|
|
||||||
} else if (
|
|
||||||
node.type === AST_VARIABLE_TYPES.VARIABLE_DECLARATOR ||
|
|
||||||
node.type === AST_VARIABLE_TYPES.REQUIRED_PARAMETER ||
|
|
||||||
node.type === AST_VARIABLE_TYPES.OPTIONAL_PARAMETER ||
|
|
||||||
node.type === AST_VARIABLE_TYPES.PUBLIC_FIELD_DEFINITION ||
|
|
||||||
node.type === AST_VARIABLE_TYPES.PROPERTY_SIGNATURE
|
|
||||||
) {
|
|
||||||
const violation = this.variableAnalyzer.analyze(node, layer, filePath, lines)
|
|
||||||
if (violation) {
|
if (violation) {
|
||||||
results.push(violation)
|
results.push(violation)
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -5,6 +5,219 @@ All notable changes to this project will be documented in this file.
|
|||||||
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
|
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
|
||||||
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
||||||
|
|
||||||
|
## [0.5.0] - 2025-12-01 - Read Tools
|
||||||
|
|
||||||
|
### Added
|
||||||
|
|
||||||
|
- **ToolRegistry (0.5.1)**
|
||||||
|
- `IToolRegistry` implementation for managing tool lifecycle
|
||||||
|
- Methods: `register()`, `unregister()`, `get()`, `getAll()`, `getByCategory()`, `has()`
|
||||||
|
- `execute()`: Tool execution with validation and confirmation flow
|
||||||
|
- `getToolDefinitions()`: Convert tools to LLM-compatible JSON Schema format
|
||||||
|
- Helper methods: `getConfirmationTools()`, `getSafeTools()`, `getNames()`, `clear()`
|
||||||
|
- 34 unit tests
|
||||||
|
|
||||||
|
- **GetLinesTool (0.5.2)**
|
||||||
|
- `get_lines(path, start?, end?)`: Read file lines with line numbers
|
||||||
|
- Reads from Redis storage or filesystem fallback
|
||||||
|
- Line number formatting with proper padding
|
||||||
|
- Path validation (must be within project root)
|
||||||
|
- 25 unit tests
|
||||||
|
|
||||||
|
- **GetFunctionTool (0.5.3)**
|
||||||
|
- `get_function(path, name)`: Get function source by name
|
||||||
|
- Uses AST to find exact line range
|
||||||
|
- Returns metadata: isAsync, isExported, params, returnType
|
||||||
|
- Lists available functions if target not found
|
||||||
|
- 20 unit tests
|
||||||
|
|
||||||
|
- **GetClassTool (0.5.4)**
|
||||||
|
- `get_class(path, name)`: Get class source by name
|
||||||
|
- Uses AST to find exact line range
|
||||||
|
- Returns metadata: isAbstract, extends, implements, methods, properties
|
||||||
|
- Lists available classes if target not found
|
||||||
|
- 19 unit tests
|
||||||
|
|
||||||
|
- **GetStructureTool (0.5.5)**
|
||||||
|
- `get_structure(path?, depth?)`: Get directory tree
|
||||||
|
- ASCII tree output with 📁/📄 icons
|
||||||
|
- Filters: node_modules, .git, dist, coverage, etc.
|
||||||
|
- Directories sorted before files
|
||||||
|
- Stats: directory and file counts
|
||||||
|
- 23 unit tests
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
|
||||||
|
- Total tests: 540 (was 419)
|
||||||
|
- Coverage: 96%+
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## [0.4.0] - 2025-11-30 - LLM Integration
|
||||||
|
|
||||||
|
### Added
|
||||||
|
|
||||||
|
- **OllamaClient (0.4.1)**
|
||||||
|
- Full `ILLMClient` implementation for Ollama SDK
|
||||||
|
- Chat completion with tool/function calling support
|
||||||
|
- Token counting via estimation (Ollama has no tokenizer API)
|
||||||
|
- Model management: `pullModel()`, `hasModel()`, `listModels()`
|
||||||
|
- Connection status check: `isAvailable()`
|
||||||
|
- Request abortion support: `abort()`
|
||||||
|
- Error handling with `IpuaroError` for connection and model errors
|
||||||
|
- 21 unit tests
|
||||||
|
|
||||||
|
- **System Prompt & Context Builder (0.4.2)**
|
||||||
|
- `SYSTEM_PROMPT`: Comprehensive agent instructions with tool descriptions
|
||||||
|
- `buildInitialContext()`: Generates compact project overview from structure and ASTs
|
||||||
|
- `buildFileContext()`: Detailed file context with imports, exports, functions, classes
|
||||||
|
- `truncateContext()`: Token-aware context truncation
|
||||||
|
- Hub/entry point/complexity flags in file summaries
|
||||||
|
- 17 unit tests
|
||||||
|
|
||||||
|
- **Tool Definitions (0.4.3)**
|
||||||
|
- 18 tool definitions across 6 categories:
|
||||||
|
- Read: `get_lines`, `get_function`, `get_class`, `get_structure`
|
||||||
|
- Edit: `edit_lines`, `create_file`, `delete_file`
|
||||||
|
- Search: `find_references`, `find_definition`
|
||||||
|
- Analysis: `get_dependencies`, `get_dependents`, `get_complexity`, `get_todos`
|
||||||
|
- Git: `git_status`, `git_diff`, `git_commit`
|
||||||
|
- Run: `run_command`, `run_tests`
|
||||||
|
- Category groupings: `READ_TOOLS`, `EDIT_TOOLS`, etc.
|
||||||
|
- `CONFIRMATION_TOOLS` set for tools requiring user approval
|
||||||
|
- Helper functions: `requiresConfirmation()`, `getToolDef()`, `getToolsByCategory()`
|
||||||
|
- 39 unit tests
|
||||||
|
|
||||||
|
- **Response Parser (0.4.4)**
|
||||||
|
- XML tool call parsing: `<tool_call name="...">...</tool_call>`
|
||||||
|
- Parameter extraction from XML elements
|
||||||
|
- Type coercion: boolean, number, null, JSON arrays/objects
|
||||||
|
- `extractThinking()`: Extracts `<thinking>...</thinking>` blocks
|
||||||
|
- `hasToolCalls()`: Quick check for tool call presence
|
||||||
|
- `validateToolCallParams()`: Parameter validation against required list
|
||||||
|
- `formatToolCallsAsXml()`: Tool calls to XML for prompt injection
|
||||||
|
- 21 unit tests
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
|
||||||
|
- Total tests: 419 (was 321)
|
||||||
|
- Coverage: 96.38%
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## [0.3.1] - 2025-11-30
|
||||||
|
|
||||||
|
### Added
|
||||||
|
|
||||||
|
- **VERSION export** - Package version is now exported from index.ts, automatically read from package.json via `createRequire`
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
|
||||||
|
- 🔄 **Refactored ASTParser** - Reduced complexity and nesting depth:
|
||||||
|
- Extracted `extractClassHeritage()`, `parseHeritageClause()`, `findTypeIdentifier()`, `collectImplements()` helper methods
|
||||||
|
- Max nesting depth reduced from 5 to 4
|
||||||
|
- 🔄 **Refactored RedisStorage** - Removed unnecessary type parameter from `parseJSON()` method
|
||||||
|
|
||||||
|
### Quality
|
||||||
|
|
||||||
|
- ✅ **Zero lint warnings** - All ESLint warnings resolved
|
||||||
|
- ✅ **All 321 tests pass**
|
||||||
|
|
||||||
|
## [0.3.0] - 2025-11-30 - Indexer
|
||||||
|
|
||||||
|
### Added
|
||||||
|
|
||||||
|
- **FileScanner (0.3.1)**
|
||||||
|
- Recursive directory scanning with async generator
|
||||||
|
- `.gitignore` support via `globby` (replaced `ignore` package for ESM compatibility)
|
||||||
|
- Filters: binary files, node_modules, dist, default ignore patterns
|
||||||
|
- Progress callback for UI integration
|
||||||
|
- `isTextFile()` and `readFileContent()` static utilities
|
||||||
|
- 22 unit tests
|
||||||
|
|
||||||
|
- **ASTParser (0.3.2)**
|
||||||
|
- Tree-sitter based parsing for TS, TSX, JS, JSX
|
||||||
|
- Extracts: imports, exports, functions, classes, interfaces, type aliases
|
||||||
|
- Import classification: internal, external, builtin (using `node:module` builtinModules)
|
||||||
|
- Graceful error handling with partial AST on syntax errors
|
||||||
|
- 30 unit tests
|
||||||
|
|
||||||
|
- **MetaAnalyzer (0.3.3)**
|
||||||
|
- Complexity metrics: LOC (excluding comments), nesting depth, cyclomatic complexity, overall score
|
||||||
|
- Dependency resolution: internal imports resolved to absolute file paths
|
||||||
|
- Dependents calculation: reverse dependency lookup across all project files
|
||||||
|
- File type classification: source, test, config, types, unknown
|
||||||
|
- Entry point detection: index files, main/app/cli/server patterns, files with no dependents
|
||||||
|
- Hub detection: files with >5 dependents
|
||||||
|
- Batch analysis via `analyzeAll()` method
|
||||||
|
- 54 unit tests
|
||||||
|
|
||||||
|
- **IndexBuilder (0.3.4)**
|
||||||
|
- SymbolIndex: maps symbol names to locations for quick lookup (functions, classes, interfaces, types, variables)
|
||||||
|
- Qualified names for class methods: `ClassName.methodName`
|
||||||
|
- DepsGraph: bidirectional import mapping (`imports` and `importedBy`)
|
||||||
|
- Import resolution: handles `.js` → `.ts`, index.ts, directory imports
|
||||||
|
- `findSymbol()`: exact symbol lookup
|
||||||
|
- `searchSymbols()`: regex-based symbol search
|
||||||
|
- `findCircularDependencies()`: detect import cycles
|
||||||
|
- `getStats()`: comprehensive index statistics (symbols by type, hubs, orphans)
|
||||||
|
- 35 unit tests
|
||||||
|
|
||||||
|
- **Watchdog (0.3.5)**
|
||||||
|
- File watching with chokidar (native events + polling fallback)
|
||||||
|
- Debounced change handling (configurable, default 500ms)
|
||||||
|
- Event types: add, change, unlink
|
||||||
|
- Extension filtering (default: SUPPORTED_EXTENSIONS)
|
||||||
|
- Ignore patterns (default: DEFAULT_IGNORE_PATTERNS)
|
||||||
|
- Multiple callback support
|
||||||
|
- `flushAll()` for immediate processing
|
||||||
|
- Silent error handling for stability
|
||||||
|
- 21 unit tests
|
||||||
|
|
||||||
|
- **Infrastructure Constants**
|
||||||
|
- `tree-sitter-types.ts`: NodeType and FieldName constants for tree-sitter
|
||||||
|
- Eliminates magic strings in ASTParser
|
||||||
|
|
||||||
|
- **Dependencies**
|
||||||
|
- Added `globby` for ESM-native file globbing
|
||||||
|
- Removed `ignore` package (CJS incompatibility with nodenext)
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
|
||||||
|
- Refactored ASTParser to use constants instead of magic strings
|
||||||
|
- Total tests: 321
|
||||||
|
- Coverage: 96.43%
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## [0.2.0] - 2025-01-30
|
||||||
|
|
||||||
|
### Added
|
||||||
|
|
||||||
|
- **Redis Storage (0.2.x milestone)**
|
||||||
|
- RedisClient: connection wrapper with AOF persistence configuration
|
||||||
|
- RedisStorage: full IStorage implementation with Redis hashes
|
||||||
|
- Redis key schema: project files, AST, meta, indexes, config
|
||||||
|
- Session keys schema: data, undo stack, sessions list
|
||||||
|
- `generateProjectName()` utility for consistent project naming
|
||||||
|
|
||||||
|
- **Infrastructure Layer**
|
||||||
|
- `src/infrastructure/storage/` module
|
||||||
|
- Exports via `src/infrastructure/index.ts`
|
||||||
|
|
||||||
|
- **Testing**
|
||||||
|
- 68 new unit tests for Redis module
|
||||||
|
- 159 total tests
|
||||||
|
- 99% code coverage maintained
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
|
||||||
|
- Updated ESLint config: `@typescript-eslint/no-unnecessary-type-parameters` set to warn
|
||||||
|
|
||||||
|
### Notes
|
||||||
|
|
||||||
|
Redis Storage milestone complete. Next: 0.3.0 - Indexer (FileScanner, AST Parser, Watchdog)
|
||||||
|
|
||||||
## [0.1.0] - 2025-01-29
|
## [0.1.0] - 2025-01-29
|
||||||
|
|
||||||
### Added
|
### Added
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "@samiyev/ipuaro",
|
"name": "@samiyev/ipuaro",
|
||||||
"version": "0.1.0",
|
"version": "0.5.0",
|
||||||
"description": "Local AI agent for codebase operations with infinite context feeling",
|
"description": "Local AI agent for codebase operations with infinite context feeling",
|
||||||
"author": "Fozilbek Samiyev <fozilbek.samiyev@gmail.com>",
|
"author": "Fozilbek Samiyev <fozilbek.samiyev@gmail.com>",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
@@ -33,28 +33,28 @@
|
|||||||
"format": "prettier --write src"
|
"format": "prettier --write src"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"ink": "^4.4.1",
|
|
||||||
"ink-text-input": "^5.0.1",
|
|
||||||
"react": "^18.2.0",
|
|
||||||
"ioredis": "^5.4.1",
|
|
||||||
"tree-sitter": "^0.21.1",
|
|
||||||
"tree-sitter-typescript": "^0.21.2",
|
|
||||||
"tree-sitter-javascript": "^0.21.0",
|
|
||||||
"ollama": "^0.5.11",
|
|
||||||
"simple-git": "^3.27.0",
|
|
||||||
"chokidar": "^3.6.0",
|
"chokidar": "^3.6.0",
|
||||||
"commander": "^11.1.0",
|
"commander": "^11.1.0",
|
||||||
"zod": "^3.23.8",
|
"globby": "^16.0.0",
|
||||||
"ignore": "^5.3.2"
|
"ink": "^4.4.1",
|
||||||
|
"ink-text-input": "^5.0.1",
|
||||||
|
"ioredis": "^5.4.1",
|
||||||
|
"ollama": "^0.5.11",
|
||||||
|
"react": "^18.2.0",
|
||||||
|
"simple-git": "^3.27.0",
|
||||||
|
"tree-sitter": "^0.21.1",
|
||||||
|
"tree-sitter-javascript": "^0.21.0",
|
||||||
|
"tree-sitter-typescript": "^0.21.2",
|
||||||
|
"zod": "^3.23.8"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@types/node": "^22.10.1",
|
"@types/node": "^22.10.1",
|
||||||
"@types/react": "^18.2.0",
|
"@types/react": "^18.2.0",
|
||||||
"vitest": "^1.6.0",
|
|
||||||
"@vitest/coverage-v8": "^1.6.0",
|
"@vitest/coverage-v8": "^1.6.0",
|
||||||
"@vitest/ui": "^1.6.0",
|
"@vitest/ui": "^1.6.0",
|
||||||
"tsup": "^8.3.5",
|
"tsup": "^8.3.5",
|
||||||
"typescript": "^5.7.2"
|
"typescript": "^5.7.2",
|
||||||
|
"vitest": "^1.6.0"
|
||||||
},
|
},
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": ">=20.0.0"
|
"node": ">=20.0.0"
|
||||||
@@ -70,7 +70,7 @@
|
|||||||
],
|
],
|
||||||
"repository": {
|
"repository": {
|
||||||
"type": "git",
|
"type": "git",
|
||||||
"url": "https://github.com/samiyev/puaros.git",
|
"url": "git+https://github.com/samiyev/puaros.git",
|
||||||
"directory": "packages/ipuaro"
|
"directory": "packages/ipuaro"
|
||||||
},
|
},
|
||||||
"bugs": {
|
"bugs": {
|
||||||
|
|||||||
@@ -4,6 +4,11 @@
|
|||||||
* Main entry point for the library.
|
* Main entry point for the library.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
import { createRequire } from "node:module"
|
||||||
|
|
||||||
|
const require = createRequire(import.meta.url)
|
||||||
|
const pkg = require("../package.json") as { version: string }
|
||||||
|
|
||||||
// Domain exports
|
// Domain exports
|
||||||
export * from "./domain/index.js"
|
export * from "./domain/index.js"
|
||||||
|
|
||||||
@@ -13,5 +18,8 @@ export * from "./application/index.js"
|
|||||||
// Shared exports
|
// Shared exports
|
||||||
export * from "./shared/index.js"
|
export * from "./shared/index.js"
|
||||||
|
|
||||||
|
// Infrastructure exports
|
||||||
|
export * from "./infrastructure/index.js"
|
||||||
|
|
||||||
// Version
|
// Version
|
||||||
export const VERSION = "0.1.0"
|
export const VERSION = pkg.version
|
||||||
|
|||||||
5
packages/ipuaro/src/infrastructure/index.ts
Normal file
5
packages/ipuaro/src/infrastructure/index.ts
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
// Infrastructure layer exports
|
||||||
|
export * from "./storage/index.js"
|
||||||
|
export * from "./indexer/index.js"
|
||||||
|
export * from "./llm/index.js"
|
||||||
|
export * from "./tools/index.js"
|
||||||
551
packages/ipuaro/src/infrastructure/indexer/ASTParser.ts
Normal file
551
packages/ipuaro/src/infrastructure/indexer/ASTParser.ts
Normal file
@@ -0,0 +1,551 @@
|
|||||||
|
import { builtinModules } from "node:module"
|
||||||
|
import Parser from "tree-sitter"
|
||||||
|
import TypeScript from "tree-sitter-typescript"
|
||||||
|
import JavaScript from "tree-sitter-javascript"
|
||||||
|
import {
|
||||||
|
createEmptyFileAST,
|
||||||
|
type ExportInfo,
|
||||||
|
type FileAST,
|
||||||
|
type ImportInfo,
|
||||||
|
type MethodInfo,
|
||||||
|
type ParameterInfo,
|
||||||
|
type PropertyInfo,
|
||||||
|
} from "../../domain/value-objects/FileAST.js"
|
||||||
|
import { FieldName, NodeType } from "./tree-sitter-types.js"
|
||||||
|
|
||||||
|
type Language = "ts" | "tsx" | "js" | "jsx"
|
||||||
|
type SyntaxNode = Parser.SyntaxNode
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parses source code into AST using tree-sitter.
|
||||||
|
*/
|
||||||
|
export class ASTParser {
|
||||||
|
private readonly parsers = new Map<Language, Parser>()
|
||||||
|
|
||||||
|
constructor() {
|
||||||
|
this.initializeParsers()
|
||||||
|
}
|
||||||
|
|
||||||
|
private initializeParsers(): void {
|
||||||
|
const tsParser = new Parser()
|
||||||
|
tsParser.setLanguage(TypeScript.typescript)
|
||||||
|
this.parsers.set("ts", tsParser)
|
||||||
|
|
||||||
|
const tsxParser = new Parser()
|
||||||
|
tsxParser.setLanguage(TypeScript.tsx)
|
||||||
|
this.parsers.set("tsx", tsxParser)
|
||||||
|
|
||||||
|
const jsParser = new Parser()
|
||||||
|
jsParser.setLanguage(JavaScript)
|
||||||
|
this.parsers.set("js", jsParser)
|
||||||
|
this.parsers.set("jsx", jsParser)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parse source code and extract AST information.
|
||||||
|
*/
|
||||||
|
parse(content: string, language: Language): FileAST {
|
||||||
|
const parser = this.parsers.get(language)
|
||||||
|
if (!parser) {
|
||||||
|
return {
|
||||||
|
...createEmptyFileAST(),
|
||||||
|
parseError: true,
|
||||||
|
parseErrorMessage: `Unsupported language: ${language}`,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const tree = parser.parse(content)
|
||||||
|
const root = tree.rootNode
|
||||||
|
|
||||||
|
if (root.hasError) {
|
||||||
|
const ast = this.extractAST(root, language)
|
||||||
|
ast.parseError = true
|
||||||
|
ast.parseErrorMessage = "Syntax error in source code"
|
||||||
|
return ast
|
||||||
|
}
|
||||||
|
|
||||||
|
return this.extractAST(root, language)
|
||||||
|
} catch (error) {
|
||||||
|
return {
|
||||||
|
...createEmptyFileAST(),
|
||||||
|
parseError: true,
|
||||||
|
parseErrorMessage: error instanceof Error ? error.message : "Unknown parse error",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private extractAST(root: SyntaxNode, language: Language): FileAST {
|
||||||
|
const ast = createEmptyFileAST()
|
||||||
|
const isTypeScript = language === "ts" || language === "tsx"
|
||||||
|
|
||||||
|
for (const child of root.children) {
|
||||||
|
this.visitNode(child, ast, isTypeScript)
|
||||||
|
}
|
||||||
|
|
||||||
|
return ast
|
||||||
|
}
|
||||||
|
|
||||||
|
private visitNode(node: SyntaxNode, ast: FileAST, isTypeScript: boolean): void {
|
||||||
|
switch (node.type) {
|
||||||
|
case NodeType.IMPORT_STATEMENT:
|
||||||
|
this.extractImport(node, ast)
|
||||||
|
break
|
||||||
|
case NodeType.EXPORT_STATEMENT:
|
||||||
|
this.extractExport(node, ast)
|
||||||
|
break
|
||||||
|
case NodeType.FUNCTION_DECLARATION:
|
||||||
|
this.extractFunction(node, ast, false)
|
||||||
|
break
|
||||||
|
case NodeType.LEXICAL_DECLARATION:
|
||||||
|
this.extractLexicalDeclaration(node, ast)
|
||||||
|
break
|
||||||
|
case NodeType.CLASS_DECLARATION:
|
||||||
|
this.extractClass(node, ast, false)
|
||||||
|
break
|
||||||
|
case NodeType.INTERFACE_DECLARATION:
|
||||||
|
if (isTypeScript) {
|
||||||
|
this.extractInterface(node, ast, false)
|
||||||
|
}
|
||||||
|
break
|
||||||
|
case NodeType.TYPE_ALIAS_DECLARATION:
|
||||||
|
if (isTypeScript) {
|
||||||
|
this.extractTypeAlias(node, ast, false)
|
||||||
|
}
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private extractImport(node: SyntaxNode, ast: FileAST): void {
|
||||||
|
const sourceNode = node.childForFieldName(FieldName.SOURCE)
|
||||||
|
if (!sourceNode) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
const from = this.getStringValue(sourceNode)
|
||||||
|
const line = node.startPosition.row + 1
|
||||||
|
const importType = this.classifyImport(from)
|
||||||
|
|
||||||
|
const importClause = node.children.find((c) => c.type === NodeType.IMPORT_CLAUSE)
|
||||||
|
if (!importClause) {
|
||||||
|
ast.imports.push({
|
||||||
|
name: "*",
|
||||||
|
from,
|
||||||
|
line,
|
||||||
|
type: importType,
|
||||||
|
isDefault: false,
|
||||||
|
})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const child of importClause.children) {
|
||||||
|
if (child.type === NodeType.IDENTIFIER) {
|
||||||
|
ast.imports.push({
|
||||||
|
name: child.text,
|
||||||
|
from,
|
||||||
|
line,
|
||||||
|
type: importType,
|
||||||
|
isDefault: true,
|
||||||
|
})
|
||||||
|
} else if (child.type === NodeType.NAMESPACE_IMPORT) {
|
||||||
|
const alias = child.children.find((c) => c.type === NodeType.IDENTIFIER)
|
||||||
|
ast.imports.push({
|
||||||
|
name: alias?.text ?? "*",
|
||||||
|
from,
|
||||||
|
line,
|
||||||
|
type: importType,
|
||||||
|
isDefault: false,
|
||||||
|
})
|
||||||
|
} else if (child.type === NodeType.NAMED_IMPORTS) {
|
||||||
|
for (const specifier of child.children) {
|
||||||
|
if (specifier.type === NodeType.IMPORT_SPECIFIER) {
|
||||||
|
const nameNode = specifier.childForFieldName(FieldName.NAME)
|
||||||
|
const aliasNode = specifier.childForFieldName(FieldName.ALIAS)
|
||||||
|
ast.imports.push({
|
||||||
|
name: aliasNode?.text ?? nameNode?.text ?? "",
|
||||||
|
from,
|
||||||
|
line,
|
||||||
|
type: importType,
|
||||||
|
isDefault: false,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private extractExport(node: SyntaxNode, ast: FileAST): void {
|
||||||
|
const isDefault = node.children.some((c) => c.type === NodeType.DEFAULT)
|
||||||
|
const declaration = node.childForFieldName(FieldName.DECLARATION)
|
||||||
|
|
||||||
|
if (declaration) {
|
||||||
|
switch (declaration.type) {
|
||||||
|
case NodeType.FUNCTION_DECLARATION:
|
||||||
|
this.extractFunction(declaration, ast, true)
|
||||||
|
this.addExportInfo(ast, declaration, "function", isDefault)
|
||||||
|
break
|
||||||
|
case NodeType.CLASS_DECLARATION:
|
||||||
|
this.extractClass(declaration, ast, true)
|
||||||
|
this.addExportInfo(ast, declaration, "class", isDefault)
|
||||||
|
break
|
||||||
|
case NodeType.INTERFACE_DECLARATION:
|
||||||
|
this.extractInterface(declaration, ast, true)
|
||||||
|
this.addExportInfo(ast, declaration, "interface", isDefault)
|
||||||
|
break
|
||||||
|
case NodeType.TYPE_ALIAS_DECLARATION:
|
||||||
|
this.extractTypeAlias(declaration, ast, true)
|
||||||
|
this.addExportInfo(ast, declaration, "type", isDefault)
|
||||||
|
break
|
||||||
|
case NodeType.LEXICAL_DECLARATION:
|
||||||
|
this.extractLexicalDeclaration(declaration, ast, true)
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const exportClause = node.children.find((c) => c.type === NodeType.EXPORT_CLAUSE)
|
||||||
|
if (exportClause) {
|
||||||
|
for (const specifier of exportClause.children) {
|
||||||
|
if (specifier.type === NodeType.EXPORT_SPECIFIER) {
|
||||||
|
const nameNode = specifier.childForFieldName(FieldName.NAME)
|
||||||
|
if (nameNode) {
|
||||||
|
ast.exports.push({
|
||||||
|
name: nameNode.text,
|
||||||
|
line: node.startPosition.row + 1,
|
||||||
|
isDefault: false,
|
||||||
|
kind: "variable",
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private extractFunction(node: SyntaxNode, ast: FileAST, isExported: boolean): void {
|
||||||
|
const nameNode = node.childForFieldName(FieldName.NAME)
|
||||||
|
if (!nameNode) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
const params = this.extractParameters(node)
|
||||||
|
const isAsync = node.children.some((c) => c.type === NodeType.ASYNC)
|
||||||
|
const returnTypeNode = node.childForFieldName(FieldName.RETURN_TYPE)
|
||||||
|
|
||||||
|
ast.functions.push({
|
||||||
|
name: nameNode.text,
|
||||||
|
lineStart: node.startPosition.row + 1,
|
||||||
|
lineEnd: node.endPosition.row + 1,
|
||||||
|
params,
|
||||||
|
isAsync,
|
||||||
|
isExported,
|
||||||
|
returnType: returnTypeNode?.text?.replace(/^:\s*/, ""),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
private extractLexicalDeclaration(node: SyntaxNode, ast: FileAST, isExported = false): void {
|
||||||
|
for (const child of node.children) {
|
||||||
|
if (child.type === NodeType.VARIABLE_DECLARATOR) {
|
||||||
|
const nameNode = child.childForFieldName(FieldName.NAME)
|
||||||
|
const valueNode = child.childForFieldName(FieldName.VALUE)
|
||||||
|
|
||||||
|
if (
|
||||||
|
valueNode?.type === NodeType.ARROW_FUNCTION ||
|
||||||
|
valueNode?.type === NodeType.FUNCTION
|
||||||
|
) {
|
||||||
|
const params = this.extractParameters(valueNode)
|
||||||
|
const isAsync = valueNode.children.some((c) => c.type === NodeType.ASYNC)
|
||||||
|
|
||||||
|
ast.functions.push({
|
||||||
|
name: nameNode?.text ?? "",
|
||||||
|
lineStart: node.startPosition.row + 1,
|
||||||
|
lineEnd: node.endPosition.row + 1,
|
||||||
|
params,
|
||||||
|
isAsync,
|
||||||
|
isExported,
|
||||||
|
})
|
||||||
|
|
||||||
|
if (isExported) {
|
||||||
|
ast.exports.push({
|
||||||
|
name: nameNode?.text ?? "",
|
||||||
|
line: node.startPosition.row + 1,
|
||||||
|
isDefault: false,
|
||||||
|
kind: "function",
|
||||||
|
})
|
||||||
|
}
|
||||||
|
} else if (isExported && nameNode) {
|
||||||
|
ast.exports.push({
|
||||||
|
name: nameNode.text,
|
||||||
|
line: node.startPosition.row + 1,
|
||||||
|
isDefault: false,
|
||||||
|
kind: "variable",
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private extractClass(node: SyntaxNode, ast: FileAST, isExported: boolean): void {
|
||||||
|
const nameNode = node.childForFieldName(FieldName.NAME)
|
||||||
|
if (!nameNode) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
const body = node.childForFieldName(FieldName.BODY)
|
||||||
|
const methods: MethodInfo[] = []
|
||||||
|
const properties: PropertyInfo[] = []
|
||||||
|
|
||||||
|
if (body) {
|
||||||
|
for (const member of body.children) {
|
||||||
|
if (member.type === NodeType.METHOD_DEFINITION) {
|
||||||
|
methods.push(this.extractMethod(member))
|
||||||
|
} else if (
|
||||||
|
member.type === NodeType.PUBLIC_FIELD_DEFINITION ||
|
||||||
|
member.type === NodeType.FIELD_DEFINITION
|
||||||
|
) {
|
||||||
|
properties.push(this.extractProperty(member))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const { extendsName, implementsList } = this.extractClassHeritage(node)
|
||||||
|
const isAbstract = node.children.some((c) => c.type === NodeType.ABSTRACT)
|
||||||
|
|
||||||
|
ast.classes.push({
|
||||||
|
name: nameNode.text,
|
||||||
|
lineStart: node.startPosition.row + 1,
|
||||||
|
lineEnd: node.endPosition.row + 1,
|
||||||
|
methods,
|
||||||
|
properties,
|
||||||
|
extends: extendsName,
|
||||||
|
implements: implementsList,
|
||||||
|
isExported,
|
||||||
|
isAbstract,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
private extractClassHeritage(node: SyntaxNode): {
|
||||||
|
extendsName: string | undefined
|
||||||
|
implementsList: string[]
|
||||||
|
} {
|
||||||
|
let extendsName: string | undefined
|
||||||
|
const implementsList: string[] = []
|
||||||
|
|
||||||
|
for (const child of node.children) {
|
||||||
|
if (child.type === NodeType.CLASS_HERITAGE) {
|
||||||
|
this.parseHeritageClause(child, (ext) => (extendsName = ext), implementsList)
|
||||||
|
} else if (child.type === NodeType.EXTENDS_CLAUSE) {
|
||||||
|
extendsName = this.findTypeIdentifier(child)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return { extendsName, implementsList }
|
||||||
|
}
|
||||||
|
|
||||||
|
private parseHeritageClause(
|
||||||
|
heritage: SyntaxNode,
|
||||||
|
setExtends: (name: string) => void,
|
||||||
|
implementsList: string[],
|
||||||
|
): void {
|
||||||
|
for (const clause of heritage.children) {
|
||||||
|
if (clause.type === NodeType.EXTENDS_CLAUSE) {
|
||||||
|
const typeId = this.findTypeIdentifier(clause)
|
||||||
|
if (typeId) {
|
||||||
|
setExtends(typeId)
|
||||||
|
}
|
||||||
|
} else if (clause.type === NodeType.IMPLEMENTS_CLAUSE) {
|
||||||
|
this.collectImplements(clause, implementsList)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private findTypeIdentifier(node: SyntaxNode): string | undefined {
|
||||||
|
const typeNode = node.children.find(
|
||||||
|
(c) => c.type === NodeType.TYPE_IDENTIFIER || c.type === NodeType.IDENTIFIER,
|
||||||
|
)
|
||||||
|
return typeNode?.text
|
||||||
|
}
|
||||||
|
|
||||||
|
private collectImplements(clause: SyntaxNode, list: string[]): void {
|
||||||
|
for (const impl of clause.children) {
|
||||||
|
if (impl.type === NodeType.TYPE_IDENTIFIER || impl.type === NodeType.IDENTIFIER) {
|
||||||
|
list.push(impl.text)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private extractMethod(node: SyntaxNode): MethodInfo {
|
||||||
|
const nameNode = node.childForFieldName(FieldName.NAME)
|
||||||
|
const params = this.extractParameters(node)
|
||||||
|
const isAsync = node.children.some((c) => c.type === NodeType.ASYNC)
|
||||||
|
const isStatic = node.children.some((c) => c.type === NodeType.STATIC)
|
||||||
|
|
||||||
|
let visibility: "public" | "private" | "protected" = "public"
|
||||||
|
for (const child of node.children) {
|
||||||
|
if (child.type === NodeType.ACCESSIBILITY_MODIFIER) {
|
||||||
|
visibility = child.text as "public" | "private" | "protected"
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
name: nameNode?.text ?? "",
|
||||||
|
lineStart: node.startPosition.row + 1,
|
||||||
|
lineEnd: node.endPosition.row + 1,
|
||||||
|
params,
|
||||||
|
isAsync,
|
||||||
|
visibility,
|
||||||
|
isStatic,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private extractProperty(node: SyntaxNode): PropertyInfo {
|
||||||
|
const nameNode = node.childForFieldName(FieldName.NAME)
|
||||||
|
const typeNode = node.childForFieldName(FieldName.TYPE)
|
||||||
|
const isStatic = node.children.some((c) => c.type === NodeType.STATIC)
|
||||||
|
const isReadonly = node.children.some((c) => c.text === NodeType.READONLY)
|
||||||
|
|
||||||
|
let visibility: "public" | "private" | "protected" = "public"
|
||||||
|
for (const child of node.children) {
|
||||||
|
if (child.type === NodeType.ACCESSIBILITY_MODIFIER) {
|
||||||
|
visibility = child.text as "public" | "private" | "protected"
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
name: nameNode?.text ?? "",
|
||||||
|
line: node.startPosition.row + 1,
|
||||||
|
type: typeNode?.text,
|
||||||
|
visibility,
|
||||||
|
isStatic,
|
||||||
|
isReadonly,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private extractInterface(node: SyntaxNode, ast: FileAST, isExported: boolean): void {
|
||||||
|
const nameNode = node.childForFieldName(FieldName.NAME)
|
||||||
|
if (!nameNode) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
const body = node.childForFieldName(FieldName.BODY)
|
||||||
|
const properties: PropertyInfo[] = []
|
||||||
|
|
||||||
|
if (body) {
|
||||||
|
for (const member of body.children) {
|
||||||
|
if (member.type === NodeType.PROPERTY_SIGNATURE) {
|
||||||
|
const propName = member.childForFieldName(FieldName.NAME)
|
||||||
|
const propType = member.childForFieldName(FieldName.TYPE)
|
||||||
|
properties.push({
|
||||||
|
name: propName?.text ?? "",
|
||||||
|
line: member.startPosition.row + 1,
|
||||||
|
type: propType?.text,
|
||||||
|
visibility: "public",
|
||||||
|
isStatic: false,
|
||||||
|
isReadonly: member.children.some((c) => c.text === NodeType.READONLY),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const extendsList: string[] = []
|
||||||
|
const extendsClause = node.children.find((c) => c.type === NodeType.EXTENDS_TYPE_CLAUSE)
|
||||||
|
if (extendsClause) {
|
||||||
|
for (const child of extendsClause.children) {
|
||||||
|
if (child.type === NodeType.TYPE_IDENTIFIER) {
|
||||||
|
extendsList.push(child.text)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
ast.interfaces.push({
|
||||||
|
name: nameNode.text,
|
||||||
|
lineStart: node.startPosition.row + 1,
|
||||||
|
lineEnd: node.endPosition.row + 1,
|
||||||
|
properties,
|
||||||
|
extends: extendsList,
|
||||||
|
isExported,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
private extractTypeAlias(node: SyntaxNode, ast: FileAST, isExported: boolean): void {
|
||||||
|
const nameNode = node.childForFieldName(FieldName.NAME)
|
||||||
|
if (!nameNode) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
ast.typeAliases.push({
|
||||||
|
name: nameNode.text,
|
||||||
|
line: node.startPosition.row + 1,
|
||||||
|
isExported,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
private extractParameters(node: SyntaxNode): ParameterInfo[] {
|
||||||
|
const params: ParameterInfo[] = []
|
||||||
|
const paramsNode = node.childForFieldName(FieldName.PARAMETERS)
|
||||||
|
|
||||||
|
if (paramsNode) {
|
||||||
|
for (const param of paramsNode.children) {
|
||||||
|
if (
|
||||||
|
param.type === NodeType.REQUIRED_PARAMETER ||
|
||||||
|
param.type === NodeType.OPTIONAL_PARAMETER ||
|
||||||
|
param.type === NodeType.IDENTIFIER
|
||||||
|
) {
|
||||||
|
const nameNode =
|
||||||
|
param.type === NodeType.IDENTIFIER
|
||||||
|
? param
|
||||||
|
: param.childForFieldName(FieldName.PATTERN)
|
||||||
|
const typeNode = param.childForFieldName(FieldName.TYPE)
|
||||||
|
const defaultValue = param.childForFieldName(FieldName.VALUE)
|
||||||
|
|
||||||
|
params.push({
|
||||||
|
name: nameNode?.text ?? "",
|
||||||
|
type: typeNode?.text,
|
||||||
|
optional: param.type === NodeType.OPTIONAL_PARAMETER,
|
||||||
|
hasDefault: defaultValue !== null,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return params
|
||||||
|
}
|
||||||
|
|
||||||
|
private addExportInfo(
|
||||||
|
ast: FileAST,
|
||||||
|
node: SyntaxNode,
|
||||||
|
kind: ExportInfo["kind"],
|
||||||
|
isDefault: boolean,
|
||||||
|
): void {
|
||||||
|
const nameNode = node.childForFieldName(FieldName.NAME)
|
||||||
|
if (nameNode) {
|
||||||
|
ast.exports.push({
|
||||||
|
name: nameNode.text,
|
||||||
|
line: node.startPosition.row + 1,
|
||||||
|
isDefault,
|
||||||
|
kind,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private classifyImport(from: string): ImportInfo["type"] {
|
||||||
|
if (from.startsWith(".") || from.startsWith("/")) {
|
||||||
|
return "internal"
|
||||||
|
}
|
||||||
|
if (from.startsWith("node:") || builtinModules.includes(from)) {
|
||||||
|
return "builtin"
|
||||||
|
}
|
||||||
|
return "external"
|
||||||
|
}
|
||||||
|
|
||||||
|
private getStringValue(node: SyntaxNode): string {
|
||||||
|
const text = node.text
|
||||||
|
if (
|
||||||
|
(text.startsWith('"') && text.endsWith('"')) ||
|
||||||
|
(text.startsWith("'") && text.endsWith("'"))
|
||||||
|
) {
|
||||||
|
return text.slice(1, -1)
|
||||||
|
}
|
||||||
|
return text
|
||||||
|
}
|
||||||
|
}
|
||||||
189
packages/ipuaro/src/infrastructure/indexer/FileScanner.ts
Normal file
189
packages/ipuaro/src/infrastructure/indexer/FileScanner.ts
Normal file
@@ -0,0 +1,189 @@
|
|||||||
|
import * as fs from "node:fs/promises"
|
||||||
|
import type { Stats } from "node:fs"
|
||||||
|
import * as path from "node:path"
|
||||||
|
import { globby } from "globby"
|
||||||
|
import {
|
||||||
|
BINARY_EXTENSIONS,
|
||||||
|
DEFAULT_IGNORE_PATTERNS,
|
||||||
|
SUPPORTED_EXTENSIONS,
|
||||||
|
} from "../../domain/constants/index.js"
|
||||||
|
import type { ScanResult } from "../../domain/services/IIndexer.js"
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Progress callback for file scanning.
|
||||||
|
*/
|
||||||
|
export interface ScanProgress {
|
||||||
|
current: number
|
||||||
|
total: number
|
||||||
|
currentFile: string
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Options for FileScanner.
|
||||||
|
*/
|
||||||
|
export interface FileScannerOptions {
|
||||||
|
/** Additional ignore patterns (besides .gitignore and defaults) */
|
||||||
|
additionalIgnore?: string[]
|
||||||
|
/** Only include files with these extensions. Defaults to SUPPORTED_EXTENSIONS. */
|
||||||
|
extensions?: readonly string[]
|
||||||
|
/** Callback for progress updates */
|
||||||
|
onProgress?: (progress: ScanProgress) => void
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Scans project directories recursively using globby.
|
||||||
|
* Respects .gitignore, skips binary files and default ignore patterns.
|
||||||
|
*/
|
||||||
|
export class FileScanner {
|
||||||
|
private readonly extensions: Set<string>
|
||||||
|
private readonly additionalIgnore: string[]
|
||||||
|
private readonly onProgress?: (progress: ScanProgress) => void
|
||||||
|
|
||||||
|
constructor(options: FileScannerOptions = {}) {
|
||||||
|
this.extensions = new Set(options.extensions ?? SUPPORTED_EXTENSIONS)
|
||||||
|
this.additionalIgnore = options.additionalIgnore ?? []
|
||||||
|
this.onProgress = options.onProgress
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Build glob patterns from extensions.
|
||||||
|
*/
|
||||||
|
private buildGlobPatterns(): string[] {
|
||||||
|
const exts = [...this.extensions].map((ext) => ext.replace(".", ""))
|
||||||
|
if (exts.length === 1) {
|
||||||
|
return [`**/*.${exts[0]}`]
|
||||||
|
}
|
||||||
|
return [`**/*.{${exts.join(",")}}`]
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Build ignore patterns.
|
||||||
|
*/
|
||||||
|
private buildIgnorePatterns(): string[] {
|
||||||
|
const patterns = [
|
||||||
|
...DEFAULT_IGNORE_PATTERNS,
|
||||||
|
...this.additionalIgnore,
|
||||||
|
...BINARY_EXTENSIONS.map((ext) => `**/*${ext}`),
|
||||||
|
]
|
||||||
|
return patterns
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Scan directory and yield file results.
|
||||||
|
* @param root - Root directory to scan
|
||||||
|
*/
|
||||||
|
async *scan(root: string): AsyncGenerator<ScanResult> {
|
||||||
|
const globPatterns = this.buildGlobPatterns()
|
||||||
|
const ignorePatterns = this.buildIgnorePatterns()
|
||||||
|
|
||||||
|
const files = await globby(globPatterns, {
|
||||||
|
cwd: root,
|
||||||
|
gitignore: true,
|
||||||
|
ignore: ignorePatterns,
|
||||||
|
absolute: false,
|
||||||
|
onlyFiles: true,
|
||||||
|
followSymbolicLinks: false,
|
||||||
|
})
|
||||||
|
|
||||||
|
const total = files.length
|
||||||
|
let current = 0
|
||||||
|
|
||||||
|
for (const relativePath of files) {
|
||||||
|
current++
|
||||||
|
this.reportProgress(relativePath, current, total)
|
||||||
|
|
||||||
|
const fullPath = path.join(root, relativePath)
|
||||||
|
const stats = await this.safeStats(fullPath)
|
||||||
|
|
||||||
|
if (stats) {
|
||||||
|
yield {
|
||||||
|
path: relativePath,
|
||||||
|
type: "file",
|
||||||
|
size: stats.size,
|
||||||
|
lastModified: stats.mtimeMs,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Scan and return all results as array.
|
||||||
|
*/
|
||||||
|
async scanAll(root: string): Promise<ScanResult[]> {
|
||||||
|
const results: ScanResult[] = []
|
||||||
|
for await (const result of this.scan(root)) {
|
||||||
|
results.push(result)
|
||||||
|
}
|
||||||
|
return results
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if file has supported extension.
|
||||||
|
*/
|
||||||
|
isSupportedExtension(filePath: string): boolean {
|
||||||
|
const ext = path.extname(filePath).toLowerCase()
|
||||||
|
return this.extensions.has(ext)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Safely get file stats without throwing.
|
||||||
|
*/
|
||||||
|
private async safeStats(filePath: string): Promise<Stats | null> {
|
||||||
|
try {
|
||||||
|
return await fs.stat(filePath)
|
||||||
|
} catch {
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Report progress if callback is set.
|
||||||
|
*/
|
||||||
|
private reportProgress(currentFile: string, current: number, total: number): void {
|
||||||
|
if (this.onProgress) {
|
||||||
|
this.onProgress({ current, total, currentFile })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if file content is likely UTF-8 text.
|
||||||
|
* Reads first 8KB and checks for null bytes.
|
||||||
|
*/
|
||||||
|
static async isTextFile(filePath: string): Promise<boolean> {
|
||||||
|
try {
|
||||||
|
const handle = await fs.open(filePath, "r")
|
||||||
|
try {
|
||||||
|
const buffer = Buffer.alloc(8192)
|
||||||
|
const { bytesRead } = await handle.read(buffer, 0, 8192, 0)
|
||||||
|
if (bytesRead === 0) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
for (let i = 0; i < bytesRead; i++) {
|
||||||
|
if (buffer[i] === 0) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
} finally {
|
||||||
|
await handle.close()
|
||||||
|
}
|
||||||
|
} catch {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Read file content as string.
|
||||||
|
* Returns null if file is binary or unreadable.
|
||||||
|
*/
|
||||||
|
static async readFileContent(filePath: string): Promise<string | null> {
|
||||||
|
if (!(await FileScanner.isTextFile(filePath))) {
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
return await fs.readFile(filePath, "utf-8")
|
||||||
|
} catch {
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
406
packages/ipuaro/src/infrastructure/indexer/IndexBuilder.ts
Normal file
406
packages/ipuaro/src/infrastructure/indexer/IndexBuilder.ts
Normal file
@@ -0,0 +1,406 @@
|
|||||||
|
import * as path from "node:path"
|
||||||
|
import type { FileAST } from "../../domain/value-objects/FileAST.js"
|
||||||
|
import type { DepsGraph, SymbolIndex, SymbolLocation } from "../../domain/services/IStorage.js"
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Builds searchable indexes from parsed ASTs.
|
||||||
|
*/
|
||||||
|
export class IndexBuilder {
|
||||||
|
private readonly projectRoot: string
|
||||||
|
|
||||||
|
constructor(projectRoot: string) {
|
||||||
|
this.projectRoot = projectRoot
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Build symbol index from all ASTs.
|
||||||
|
* Maps symbol names to their locations for quick lookup.
|
||||||
|
*/
|
||||||
|
buildSymbolIndex(asts: Map<string, FileAST>): SymbolIndex {
|
||||||
|
const index: SymbolIndex = new Map()
|
||||||
|
|
||||||
|
for (const [filePath, ast] of asts) {
|
||||||
|
this.indexFunctions(filePath, ast, index)
|
||||||
|
this.indexClasses(filePath, ast, index)
|
||||||
|
this.indexInterfaces(filePath, ast, index)
|
||||||
|
this.indexTypeAliases(filePath, ast, index)
|
||||||
|
this.indexExportedVariables(filePath, ast, index)
|
||||||
|
}
|
||||||
|
|
||||||
|
return index
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Index function declarations.
|
||||||
|
*/
|
||||||
|
private indexFunctions(filePath: string, ast: FileAST, index: SymbolIndex): void {
|
||||||
|
for (const func of ast.functions) {
|
||||||
|
this.addSymbol(index, func.name, {
|
||||||
|
path: filePath,
|
||||||
|
line: func.lineStart,
|
||||||
|
type: "function",
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Index class declarations.
|
||||||
|
*/
|
||||||
|
private indexClasses(filePath: string, ast: FileAST, index: SymbolIndex): void {
|
||||||
|
for (const cls of ast.classes) {
|
||||||
|
this.addSymbol(index, cls.name, {
|
||||||
|
path: filePath,
|
||||||
|
line: cls.lineStart,
|
||||||
|
type: "class",
|
||||||
|
})
|
||||||
|
|
||||||
|
for (const method of cls.methods) {
|
||||||
|
const qualifiedName = `${cls.name}.${method.name}`
|
||||||
|
this.addSymbol(index, qualifiedName, {
|
||||||
|
path: filePath,
|
||||||
|
line: method.lineStart,
|
||||||
|
type: "function",
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Index interface declarations.
|
||||||
|
*/
|
||||||
|
private indexInterfaces(filePath: string, ast: FileAST, index: SymbolIndex): void {
|
||||||
|
for (const iface of ast.interfaces) {
|
||||||
|
this.addSymbol(index, iface.name, {
|
||||||
|
path: filePath,
|
||||||
|
line: iface.lineStart,
|
||||||
|
type: "interface",
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Index type alias declarations.
|
||||||
|
*/
|
||||||
|
private indexTypeAliases(filePath: string, ast: FileAST, index: SymbolIndex): void {
|
||||||
|
for (const typeAlias of ast.typeAliases) {
|
||||||
|
this.addSymbol(index, typeAlias.name, {
|
||||||
|
path: filePath,
|
||||||
|
line: typeAlias.line,
|
||||||
|
type: "type",
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Index exported variables (not functions).
|
||||||
|
*/
|
||||||
|
private indexExportedVariables(filePath: string, ast: FileAST, index: SymbolIndex): void {
|
||||||
|
const functionNames = new Set(ast.functions.map((f) => f.name))
|
||||||
|
|
||||||
|
for (const exp of ast.exports) {
|
||||||
|
if (exp.kind === "variable" && !functionNames.has(exp.name)) {
|
||||||
|
this.addSymbol(index, exp.name, {
|
||||||
|
path: filePath,
|
||||||
|
line: exp.line,
|
||||||
|
type: "variable",
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Add a symbol to the index.
|
||||||
|
*/
|
||||||
|
private addSymbol(index: SymbolIndex, name: string, location: SymbolLocation): void {
|
||||||
|
if (!name) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
const existing = index.get(name)
|
||||||
|
if (existing) {
|
||||||
|
const isDuplicate = existing.some(
|
||||||
|
(loc) => loc.path === location.path && loc.line === location.line,
|
||||||
|
)
|
||||||
|
if (!isDuplicate) {
|
||||||
|
existing.push(location)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
index.set(name, [location])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Build dependency graph from all ASTs.
|
||||||
|
* Creates bidirectional mapping of imports.
|
||||||
|
*/
|
||||||
|
buildDepsGraph(asts: Map<string, FileAST>): DepsGraph {
|
||||||
|
const imports = new Map<string, string[]>()
|
||||||
|
const importedBy = new Map<string, string[]>()
|
||||||
|
|
||||||
|
for (const filePath of asts.keys()) {
|
||||||
|
imports.set(filePath, [])
|
||||||
|
importedBy.set(filePath, [])
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const [filePath, ast] of asts) {
|
||||||
|
const fileImports = this.resolveFileImports(filePath, ast, asts)
|
||||||
|
imports.set(filePath, fileImports)
|
||||||
|
|
||||||
|
for (const importedFile of fileImports) {
|
||||||
|
const dependents = importedBy.get(importedFile) ?? []
|
||||||
|
if (!dependents.includes(filePath)) {
|
||||||
|
dependents.push(filePath)
|
||||||
|
importedBy.set(importedFile, dependents)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const [filePath, deps] of imports) {
|
||||||
|
imports.set(filePath, deps.sort())
|
||||||
|
}
|
||||||
|
for (const [filePath, deps] of importedBy) {
|
||||||
|
importedBy.set(filePath, deps.sort())
|
||||||
|
}
|
||||||
|
|
||||||
|
return { imports, importedBy }
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Resolve internal imports for a file.
|
||||||
|
*/
|
||||||
|
private resolveFileImports(
|
||||||
|
filePath: string,
|
||||||
|
ast: FileAST,
|
||||||
|
allASTs: Map<string, FileAST>,
|
||||||
|
): string[] {
|
||||||
|
const fileDir = path.dirname(filePath)
|
||||||
|
const resolvedImports: string[] = []
|
||||||
|
|
||||||
|
for (const imp of ast.imports) {
|
||||||
|
if (imp.type !== "internal") {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
const resolved = this.resolveImportPath(fileDir, imp.from, allASTs)
|
||||||
|
if (resolved && !resolvedImports.includes(resolved)) {
|
||||||
|
resolvedImports.push(resolved)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return resolvedImports
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Resolve import path to actual file path.
|
||||||
|
*/
|
||||||
|
private resolveImportPath(
|
||||||
|
fromDir: string,
|
||||||
|
importPath: string,
|
||||||
|
allASTs: Map<string, FileAST>,
|
||||||
|
): string | null {
|
||||||
|
const absolutePath = path.resolve(fromDir, importPath)
|
||||||
|
|
||||||
|
const candidates = this.getImportCandidates(absolutePath)
|
||||||
|
for (const candidate of candidates) {
|
||||||
|
if (allASTs.has(candidate)) {
|
||||||
|
return candidate
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Generate possible file paths for an import.
|
||||||
|
*/
|
||||||
|
private getImportCandidates(basePath: string): string[] {
|
||||||
|
const candidates: string[] = []
|
||||||
|
|
||||||
|
if (/\.(ts|tsx|js|jsx)$/.test(basePath)) {
|
||||||
|
candidates.push(basePath)
|
||||||
|
|
||||||
|
if (basePath.endsWith(".js")) {
|
||||||
|
candidates.push(`${basePath.slice(0, -3)}.ts`)
|
||||||
|
} else if (basePath.endsWith(".jsx")) {
|
||||||
|
candidates.push(`${basePath.slice(0, -4)}.tsx`)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
candidates.push(`${basePath}.ts`)
|
||||||
|
candidates.push(`${basePath}.tsx`)
|
||||||
|
candidates.push(`${basePath}.js`)
|
||||||
|
candidates.push(`${basePath}.jsx`)
|
||||||
|
candidates.push(`${basePath}/index.ts`)
|
||||||
|
candidates.push(`${basePath}/index.tsx`)
|
||||||
|
candidates.push(`${basePath}/index.js`)
|
||||||
|
candidates.push(`${basePath}/index.jsx`)
|
||||||
|
}
|
||||||
|
|
||||||
|
return candidates
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Find all locations of a symbol by name.
|
||||||
|
*/
|
||||||
|
findSymbol(index: SymbolIndex, name: string): SymbolLocation[] {
|
||||||
|
return index.get(name) ?? []
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Find symbols matching a pattern.
|
||||||
|
*/
|
||||||
|
searchSymbols(index: SymbolIndex, pattern: string): Map<string, SymbolLocation[]> {
|
||||||
|
const results = new Map<string, SymbolLocation[]>()
|
||||||
|
const regex = new RegExp(pattern, "i")
|
||||||
|
|
||||||
|
for (const [name, locations] of index) {
|
||||||
|
if (regex.test(name)) {
|
||||||
|
results.set(name, locations)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return results
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get all files that the given file depends on (imports).
|
||||||
|
*/
|
||||||
|
getDependencies(graph: DepsGraph, filePath: string): string[] {
|
||||||
|
return graph.imports.get(filePath) ?? []
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get all files that depend on the given file (import it).
|
||||||
|
*/
|
||||||
|
getDependents(graph: DepsGraph, filePath: string): string[] {
|
||||||
|
return graph.importedBy.get(filePath) ?? []
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Find circular dependencies in the graph.
|
||||||
|
*/
|
||||||
|
findCircularDependencies(graph: DepsGraph): string[][] {
|
||||||
|
const cycles: string[][] = []
|
||||||
|
const visited = new Set<string>()
|
||||||
|
const recursionStack = new Set<string>()
|
||||||
|
|
||||||
|
const dfs = (node: string, path: string[]): void => {
|
||||||
|
visited.add(node)
|
||||||
|
recursionStack.add(node)
|
||||||
|
path.push(node)
|
||||||
|
|
||||||
|
const deps = graph.imports.get(node) ?? []
|
||||||
|
for (const dep of deps) {
|
||||||
|
if (!visited.has(dep)) {
|
||||||
|
dfs(dep, [...path])
|
||||||
|
} else if (recursionStack.has(dep)) {
|
||||||
|
const cycleStart = path.indexOf(dep)
|
||||||
|
if (cycleStart !== -1) {
|
||||||
|
const cycle = [...path.slice(cycleStart), dep]
|
||||||
|
const normalized = this.normalizeCycle(cycle)
|
||||||
|
if (!this.cycleExists(cycles, normalized)) {
|
||||||
|
cycles.push(normalized)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
recursionStack.delete(node)
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const node of graph.imports.keys()) {
|
||||||
|
if (!visited.has(node)) {
|
||||||
|
dfs(node, [])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return cycles
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Normalize a cycle to start with the smallest path.
|
||||||
|
*/
|
||||||
|
private normalizeCycle(cycle: string[]): string[] {
|
||||||
|
if (cycle.length <= 1) {
|
||||||
|
return cycle
|
||||||
|
}
|
||||||
|
|
||||||
|
const withoutLast = cycle.slice(0, -1)
|
||||||
|
const minIndex = withoutLast.reduce(
|
||||||
|
(minIdx, path, idx) => (path < withoutLast[minIdx] ? idx : minIdx),
|
||||||
|
0,
|
||||||
|
)
|
||||||
|
|
||||||
|
const rotated = [...withoutLast.slice(minIndex), ...withoutLast.slice(0, minIndex)]
|
||||||
|
rotated.push(rotated[0])
|
||||||
|
|
||||||
|
return rotated
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if a cycle already exists in the list.
|
||||||
|
*/
|
||||||
|
private cycleExists(cycles: string[][], newCycle: string[]): boolean {
|
||||||
|
const newKey = newCycle.join("→")
|
||||||
|
return cycles.some((cycle) => cycle.join("→") === newKey)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get statistics about the indexes.
|
||||||
|
*/
|
||||||
|
getStats(
|
||||||
|
symbolIndex: SymbolIndex,
|
||||||
|
depsGraph: DepsGraph,
|
||||||
|
): {
|
||||||
|
totalSymbols: number
|
||||||
|
symbolsByType: Record<SymbolLocation["type"], number>
|
||||||
|
totalFiles: number
|
||||||
|
totalDependencies: number
|
||||||
|
averageDependencies: number
|
||||||
|
hubs: string[]
|
||||||
|
orphans: string[]
|
||||||
|
} {
|
||||||
|
const symbolsByType: Record<SymbolLocation["type"], number> = {
|
||||||
|
function: 0,
|
||||||
|
class: 0,
|
||||||
|
interface: 0,
|
||||||
|
type: 0,
|
||||||
|
variable: 0,
|
||||||
|
}
|
||||||
|
|
||||||
|
let totalSymbols = 0
|
||||||
|
for (const locations of symbolIndex.values()) {
|
||||||
|
totalSymbols += locations.length
|
||||||
|
for (const loc of locations) {
|
||||||
|
symbolsByType[loc.type]++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const totalFiles = depsGraph.imports.size
|
||||||
|
let totalDependencies = 0
|
||||||
|
const hubs: string[] = []
|
||||||
|
const orphans: string[] = []
|
||||||
|
|
||||||
|
for (const [_filePath, deps] of depsGraph.imports) {
|
||||||
|
totalDependencies += deps.length
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const [filePath, dependents] of depsGraph.importedBy) {
|
||||||
|
if (dependents.length > 5) {
|
||||||
|
hubs.push(filePath)
|
||||||
|
}
|
||||||
|
if (dependents.length === 0 && (depsGraph.imports.get(filePath)?.length ?? 0) === 0) {
|
||||||
|
orphans.push(filePath)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
totalSymbols,
|
||||||
|
symbolsByType,
|
||||||
|
totalFiles,
|
||||||
|
totalDependencies,
|
||||||
|
averageDependencies: totalFiles > 0 ? totalDependencies / totalFiles : 0,
|
||||||
|
hubs: hubs.sort(),
|
||||||
|
orphans: orphans.sort(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
448
packages/ipuaro/src/infrastructure/indexer/MetaAnalyzer.ts
Normal file
448
packages/ipuaro/src/infrastructure/indexer/MetaAnalyzer.ts
Normal file
@@ -0,0 +1,448 @@
|
|||||||
|
import * as path from "node:path"
|
||||||
|
import {
|
||||||
|
type ComplexityMetrics,
|
||||||
|
createFileMeta,
|
||||||
|
type FileMeta,
|
||||||
|
isHubFile,
|
||||||
|
} from "../../domain/value-objects/FileMeta.js"
|
||||||
|
import type { ClassInfo, FileAST, FunctionInfo } from "../../domain/value-objects/FileAST.js"
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Analyzes file metadata including complexity, dependencies, and classification.
|
||||||
|
*/
|
||||||
|
export class MetaAnalyzer {
|
||||||
|
private readonly projectRoot: string
|
||||||
|
|
||||||
|
constructor(projectRoot: string) {
|
||||||
|
this.projectRoot = projectRoot
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Analyze a file and compute its metadata.
|
||||||
|
* @param filePath - Absolute path to the file
|
||||||
|
* @param ast - Parsed AST for the file
|
||||||
|
* @param content - Raw file content (for LOC calculation)
|
||||||
|
* @param allASTs - Map of all file paths to their ASTs (for dependents)
|
||||||
|
*/
|
||||||
|
analyze(
|
||||||
|
filePath: string,
|
||||||
|
ast: FileAST,
|
||||||
|
content: string,
|
||||||
|
allASTs: Map<string, FileAST>,
|
||||||
|
): FileMeta {
|
||||||
|
const complexity = this.calculateComplexity(ast, content)
|
||||||
|
const dependencies = this.resolveDependencies(filePath, ast)
|
||||||
|
const dependents = this.findDependents(filePath, allASTs)
|
||||||
|
const fileType = this.classifyFileType(filePath)
|
||||||
|
const isEntryPoint = this.isEntryPointFile(filePath, dependents.length)
|
||||||
|
|
||||||
|
return createFileMeta({
|
||||||
|
complexity,
|
||||||
|
dependencies,
|
||||||
|
dependents,
|
||||||
|
isHub: isHubFile(dependents.length),
|
||||||
|
isEntryPoint,
|
||||||
|
fileType,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Calculate complexity metrics for a file.
|
||||||
|
*/
|
||||||
|
calculateComplexity(ast: FileAST, content: string): ComplexityMetrics {
|
||||||
|
const loc = this.countLinesOfCode(content)
|
||||||
|
const nesting = this.calculateMaxNesting(ast)
|
||||||
|
const cyclomaticComplexity = this.calculateCyclomaticComplexity(ast)
|
||||||
|
const score = this.calculateComplexityScore(loc, nesting, cyclomaticComplexity)
|
||||||
|
|
||||||
|
return {
|
||||||
|
loc,
|
||||||
|
nesting,
|
||||||
|
cyclomaticComplexity,
|
||||||
|
score,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Count lines of code (excluding empty lines and comments).
|
||||||
|
*/
|
||||||
|
countLinesOfCode(content: string): number {
|
||||||
|
const lines = content.split("\n")
|
||||||
|
let loc = 0
|
||||||
|
let inBlockComment = false
|
||||||
|
|
||||||
|
for (const line of lines) {
|
||||||
|
const trimmed = line.trim()
|
||||||
|
|
||||||
|
if (inBlockComment) {
|
||||||
|
if (trimmed.includes("*/")) {
|
||||||
|
inBlockComment = false
|
||||||
|
}
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if (trimmed.startsWith("/*")) {
|
||||||
|
if (!trimmed.includes("*/")) {
|
||||||
|
inBlockComment = true
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
const afterComment = trimmed.substring(trimmed.indexOf("*/") + 2).trim()
|
||||||
|
if (afterComment === "" || afterComment.startsWith("//")) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
loc++
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if (trimmed === "" || trimmed.startsWith("//")) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
loc++
|
||||||
|
}
|
||||||
|
|
||||||
|
return loc
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Calculate maximum nesting depth from AST.
|
||||||
|
*/
|
||||||
|
calculateMaxNesting(ast: FileAST): number {
|
||||||
|
let maxNesting = 0
|
||||||
|
|
||||||
|
for (const func of ast.functions) {
|
||||||
|
const depth = this.estimateFunctionNesting(func)
|
||||||
|
maxNesting = Math.max(maxNesting, depth)
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const cls of ast.classes) {
|
||||||
|
const depth = this.estimateClassNesting(cls)
|
||||||
|
maxNesting = Math.max(maxNesting, depth)
|
||||||
|
}
|
||||||
|
|
||||||
|
return maxNesting
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Estimate nesting depth for a function based on line count.
|
||||||
|
* More accurate nesting would require full AST traversal.
|
||||||
|
*/
|
||||||
|
private estimateFunctionNesting(func: FunctionInfo): number {
|
||||||
|
const lines = func.lineEnd - func.lineStart + 1
|
||||||
|
if (lines <= 5) {
|
||||||
|
return 1
|
||||||
|
}
|
||||||
|
if (lines <= 15) {
|
||||||
|
return 2
|
||||||
|
}
|
||||||
|
if (lines <= 30) {
|
||||||
|
return 3
|
||||||
|
}
|
||||||
|
if (lines <= 50) {
|
||||||
|
return 4
|
||||||
|
}
|
||||||
|
return 5
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Estimate nesting depth for a class.
|
||||||
|
*/
|
||||||
|
private estimateClassNesting(cls: ClassInfo): number {
|
||||||
|
let maxMethodNesting = 1
|
||||||
|
|
||||||
|
for (const method of cls.methods) {
|
||||||
|
const lines = method.lineEnd - method.lineStart + 1
|
||||||
|
let depth = 1
|
||||||
|
if (lines > 5) {
|
||||||
|
depth = 2
|
||||||
|
}
|
||||||
|
if (lines > 15) {
|
||||||
|
depth = 3
|
||||||
|
}
|
||||||
|
if (lines > 30) {
|
||||||
|
depth = 4
|
||||||
|
}
|
||||||
|
maxMethodNesting = Math.max(maxMethodNesting, depth)
|
||||||
|
}
|
||||||
|
|
||||||
|
return maxMethodNesting + 1
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Calculate cyclomatic complexity from AST.
|
||||||
|
* Base complexity is 1, +1 for each decision point.
|
||||||
|
*/
|
||||||
|
calculateCyclomaticComplexity(ast: FileAST): number {
|
||||||
|
let complexity = 1
|
||||||
|
|
||||||
|
for (const func of ast.functions) {
|
||||||
|
complexity += this.estimateFunctionComplexity(func)
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const cls of ast.classes) {
|
||||||
|
for (const method of cls.methods) {
|
||||||
|
const lines = method.lineEnd - method.lineStart + 1
|
||||||
|
complexity += Math.max(1, Math.floor(lines / 10))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return complexity
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Estimate function complexity based on size.
|
||||||
|
*/
|
||||||
|
private estimateFunctionComplexity(func: FunctionInfo): number {
|
||||||
|
const lines = func.lineEnd - func.lineStart + 1
|
||||||
|
return Math.max(1, Math.floor(lines / 8))
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Calculate overall complexity score (0-100).
|
||||||
|
*/
|
||||||
|
calculateComplexityScore(loc: number, nesting: number, cyclomatic: number): number {
|
||||||
|
const locWeight = 0.3
|
||||||
|
const nestingWeight = 0.35
|
||||||
|
const cyclomaticWeight = 0.35
|
||||||
|
|
||||||
|
const locScore = Math.min(100, (loc / 500) * 100)
|
||||||
|
const nestingScore = Math.min(100, (nesting / 6) * 100)
|
||||||
|
const cyclomaticScore = Math.min(100, (cyclomatic / 30) * 100)
|
||||||
|
|
||||||
|
const score =
|
||||||
|
locScore * locWeight + nestingScore * nestingWeight + cyclomaticScore * cyclomaticWeight
|
||||||
|
|
||||||
|
return Math.round(Math.min(100, score))
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Resolve internal imports to absolute file paths.
|
||||||
|
*/
|
||||||
|
resolveDependencies(filePath: string, ast: FileAST): string[] {
|
||||||
|
const dependencies: string[] = []
|
||||||
|
const fileDir = path.dirname(filePath)
|
||||||
|
|
||||||
|
for (const imp of ast.imports) {
|
||||||
|
if (imp.type !== "internal") {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
const resolved = this.resolveImportPath(fileDir, imp.from)
|
||||||
|
if (resolved && !dependencies.includes(resolved)) {
|
||||||
|
dependencies.push(resolved)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return dependencies.sort()
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Resolve a relative import path to an absolute path.
|
||||||
|
*/
|
||||||
|
private resolveImportPath(fromDir: string, importPath: string): string | null {
|
||||||
|
const absolutePath = path.resolve(fromDir, importPath)
|
||||||
|
const normalized = this.normalizeImportPath(absolutePath)
|
||||||
|
|
||||||
|
if (normalized.startsWith(this.projectRoot)) {
|
||||||
|
return normalized
|
||||||
|
}
|
||||||
|
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Normalize import path by removing file extension if present
|
||||||
|
* and handling index imports.
|
||||||
|
*/
|
||||||
|
private normalizeImportPath(importPath: string): string {
|
||||||
|
let normalized = importPath
|
||||||
|
|
||||||
|
if (normalized.endsWith(".js")) {
|
||||||
|
normalized = `${normalized.slice(0, -3)}.ts`
|
||||||
|
} else if (normalized.endsWith(".jsx")) {
|
||||||
|
normalized = `${normalized.slice(0, -4)}.tsx`
|
||||||
|
} else if (!/\.(ts|tsx|js|jsx)$/.exec(normalized)) {
|
||||||
|
normalized = `${normalized}.ts`
|
||||||
|
}
|
||||||
|
|
||||||
|
return normalized
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Find all files that import the given file.
|
||||||
|
*/
|
||||||
|
findDependents(filePath: string, allASTs: Map<string, FileAST>): string[] {
|
||||||
|
const dependents: string[] = []
|
||||||
|
const normalizedPath = this.normalizePathForComparison(filePath)
|
||||||
|
|
||||||
|
for (const [otherPath, ast] of allASTs) {
|
||||||
|
if (otherPath === filePath) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if (this.fileImportsTarget(otherPath, ast, normalizedPath)) {
|
||||||
|
dependents.push(otherPath)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return dependents.sort()
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if a file imports the target path.
|
||||||
|
*/
|
||||||
|
private fileImportsTarget(filePath: string, ast: FileAST, normalizedTarget: string): boolean {
|
||||||
|
const fileDir = path.dirname(filePath)
|
||||||
|
|
||||||
|
for (const imp of ast.imports) {
|
||||||
|
if (imp.type !== "internal") {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
const resolvedImport = this.resolveImportPath(fileDir, imp.from)
|
||||||
|
if (!resolvedImport) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
const normalizedImport = this.normalizePathForComparison(resolvedImport)
|
||||||
|
if (this.pathsMatch(normalizedTarget, normalizedImport)) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Normalize path for comparison (handle index.ts and extensions).
|
||||||
|
*/
|
||||||
|
private normalizePathForComparison(filePath: string): string {
|
||||||
|
let normalized = filePath
|
||||||
|
|
||||||
|
if (normalized.endsWith(".js")) {
|
||||||
|
normalized = normalized.slice(0, -3)
|
||||||
|
} else if (normalized.endsWith(".ts")) {
|
||||||
|
normalized = normalized.slice(0, -3)
|
||||||
|
} else if (normalized.endsWith(".jsx")) {
|
||||||
|
normalized = normalized.slice(0, -4)
|
||||||
|
} else if (normalized.endsWith(".tsx")) {
|
||||||
|
normalized = normalized.slice(0, -4)
|
||||||
|
}
|
||||||
|
|
||||||
|
return normalized
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if two normalized paths match (including index.ts resolution).
|
||||||
|
*/
|
||||||
|
private pathsMatch(path1: string, path2: string): boolean {
|
||||||
|
if (path1 === path2) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
if (path1.endsWith("/index") && path2 === path1.slice(0, -6)) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
if (path2.endsWith("/index") && path1 === path2.slice(0, -6)) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Classify file type based on path and name.
|
||||||
|
*/
|
||||||
|
classifyFileType(filePath: string): FileMeta["fileType"] {
|
||||||
|
const basename = path.basename(filePath)
|
||||||
|
const lowercasePath = filePath.toLowerCase()
|
||||||
|
|
||||||
|
if (basename.includes(".test.") || basename.includes(".spec.")) {
|
||||||
|
return "test"
|
||||||
|
}
|
||||||
|
|
||||||
|
if (lowercasePath.includes("/tests/") || lowercasePath.includes("/__tests__/")) {
|
||||||
|
return "test"
|
||||||
|
}
|
||||||
|
|
||||||
|
if (basename.endsWith(".d.ts")) {
|
||||||
|
return "types"
|
||||||
|
}
|
||||||
|
|
||||||
|
if (lowercasePath.includes("/types/") || basename === "types.ts") {
|
||||||
|
return "types"
|
||||||
|
}
|
||||||
|
|
||||||
|
const configPatterns = [
|
||||||
|
"config",
|
||||||
|
"tsconfig",
|
||||||
|
"eslint",
|
||||||
|
"prettier",
|
||||||
|
"vitest",
|
||||||
|
"jest",
|
||||||
|
"babel",
|
||||||
|
"webpack",
|
||||||
|
"vite",
|
||||||
|
"rollup",
|
||||||
|
]
|
||||||
|
|
||||||
|
for (const pattern of configPatterns) {
|
||||||
|
if (basename.toLowerCase().includes(pattern)) {
|
||||||
|
return "config"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (
|
||||||
|
filePath.endsWith(".ts") ||
|
||||||
|
filePath.endsWith(".tsx") ||
|
||||||
|
filePath.endsWith(".js") ||
|
||||||
|
filePath.endsWith(".jsx")
|
||||||
|
) {
|
||||||
|
return "source"
|
||||||
|
}
|
||||||
|
|
||||||
|
return "unknown"
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Determine if file is an entry point.
|
||||||
|
*/
|
||||||
|
isEntryPointFile(filePath: string, dependentCount: number): boolean {
|
||||||
|
const basename = path.basename(filePath)
|
||||||
|
|
||||||
|
if (basename.startsWith("index.")) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
if (dependentCount === 0) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
const entryPatterns = ["main.", "app.", "cli.", "server.", "index."]
|
||||||
|
for (const pattern of entryPatterns) {
|
||||||
|
if (basename.toLowerCase().startsWith(pattern)) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Batch analyze multiple files.
|
||||||
|
*/
|
||||||
|
analyzeAll(files: Map<string, { ast: FileAST; content: string }>): Map<string, FileMeta> {
|
||||||
|
const allASTs = new Map<string, FileAST>()
|
||||||
|
for (const [filePath, { ast }] of files) {
|
||||||
|
allASTs.set(filePath, ast)
|
||||||
|
}
|
||||||
|
|
||||||
|
const results = new Map<string, FileMeta>()
|
||||||
|
for (const [filePath, { ast, content }] of files) {
|
||||||
|
const meta = this.analyze(filePath, ast, content, allASTs)
|
||||||
|
results.set(filePath, meta)
|
||||||
|
}
|
||||||
|
|
||||||
|
return results
|
||||||
|
}
|
||||||
|
}
|
||||||
285
packages/ipuaro/src/infrastructure/indexer/Watchdog.ts
Normal file
285
packages/ipuaro/src/infrastructure/indexer/Watchdog.ts
Normal file
@@ -0,0 +1,285 @@
|
|||||||
|
import * as chokidar from "chokidar"
|
||||||
|
import * as path from "node:path"
|
||||||
|
import { DEFAULT_IGNORE_PATTERNS, SUPPORTED_EXTENSIONS } from "../../domain/constants/index.js"
|
||||||
|
|
||||||
|
export type FileChangeType = "add" | "change" | "unlink"
|
||||||
|
|
||||||
|
export interface FileChangeEvent {
|
||||||
|
type: FileChangeType
|
||||||
|
path: string
|
||||||
|
timestamp: number
|
||||||
|
}
|
||||||
|
|
||||||
|
export type FileChangeCallback = (event: FileChangeEvent) => void
|
||||||
|
|
||||||
|
export interface WatchdogOptions {
|
||||||
|
/** Debounce delay in milliseconds (default: 500) */
|
||||||
|
debounceMs?: number
|
||||||
|
/** Patterns to ignore (default: DEFAULT_IGNORE_PATTERNS) */
|
||||||
|
ignorePatterns?: readonly string[]
|
||||||
|
/** File extensions to watch (default: SUPPORTED_EXTENSIONS) */
|
||||||
|
extensions?: readonly string[]
|
||||||
|
/** Use polling instead of native events (useful for network drives) */
|
||||||
|
usePolling?: boolean
|
||||||
|
/** Polling interval in milliseconds (default: 1000) */
|
||||||
|
pollInterval?: number
|
||||||
|
}
|
||||||
|
|
||||||
|
interface ResolvedWatchdogOptions {
|
||||||
|
debounceMs: number
|
||||||
|
ignorePatterns: readonly string[]
|
||||||
|
extensions: readonly string[]
|
||||||
|
usePolling: boolean
|
||||||
|
pollInterval: number
|
||||||
|
}
|
||||||
|
|
||||||
|
const DEFAULT_OPTIONS: ResolvedWatchdogOptions = {
|
||||||
|
debounceMs: 500,
|
||||||
|
ignorePatterns: DEFAULT_IGNORE_PATTERNS,
|
||||||
|
extensions: SUPPORTED_EXTENSIONS,
|
||||||
|
usePolling: false,
|
||||||
|
pollInterval: 1000,
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Watches for file changes in a directory using chokidar.
|
||||||
|
*/
|
||||||
|
export class Watchdog {
|
||||||
|
private watcher: chokidar.FSWatcher | null = null
|
||||||
|
private readonly callbacks: FileChangeCallback[] = []
|
||||||
|
private readonly pendingChanges = new Map<string, FileChangeEvent>()
|
||||||
|
private readonly debounceTimers = new Map<string, NodeJS.Timeout>()
|
||||||
|
private readonly options: ResolvedWatchdogOptions
|
||||||
|
private root = ""
|
||||||
|
private isRunning = false
|
||||||
|
|
||||||
|
constructor(options: WatchdogOptions = {}) {
|
||||||
|
this.options = { ...DEFAULT_OPTIONS, ...options }
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Start watching a directory for file changes.
|
||||||
|
*/
|
||||||
|
start(root: string): void {
|
||||||
|
if (this.isRunning) {
|
||||||
|
void this.stop()
|
||||||
|
}
|
||||||
|
|
||||||
|
this.root = root
|
||||||
|
this.isRunning = true
|
||||||
|
|
||||||
|
const globPatterns = this.buildGlobPatterns(root)
|
||||||
|
const ignorePatterns = this.buildIgnorePatterns()
|
||||||
|
|
||||||
|
this.watcher = chokidar.watch(globPatterns, {
|
||||||
|
ignored: ignorePatterns,
|
||||||
|
persistent: true,
|
||||||
|
ignoreInitial: true,
|
||||||
|
usePolling: this.options.usePolling,
|
||||||
|
interval: this.options.pollInterval,
|
||||||
|
awaitWriteFinish: {
|
||||||
|
stabilityThreshold: 100,
|
||||||
|
pollInterval: 100,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
this.watcher.on("add", (filePath) => {
|
||||||
|
this.handleChange("add", filePath)
|
||||||
|
})
|
||||||
|
this.watcher.on("change", (filePath) => {
|
||||||
|
this.handleChange("change", filePath)
|
||||||
|
})
|
||||||
|
this.watcher.on("unlink", (filePath) => {
|
||||||
|
this.handleChange("unlink", filePath)
|
||||||
|
})
|
||||||
|
this.watcher.on("error", (error) => {
|
||||||
|
this.handleError(error)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Stop watching for file changes.
|
||||||
|
*/
|
||||||
|
async stop(): Promise<void> {
|
||||||
|
if (!this.isRunning) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const timer of this.debounceTimers.values()) {
|
||||||
|
clearTimeout(timer)
|
||||||
|
}
|
||||||
|
this.debounceTimers.clear()
|
||||||
|
this.pendingChanges.clear()
|
||||||
|
|
||||||
|
if (this.watcher) {
|
||||||
|
await this.watcher.close()
|
||||||
|
this.watcher = null
|
||||||
|
}
|
||||||
|
|
||||||
|
this.isRunning = false
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Register a callback for file change events.
|
||||||
|
*/
|
||||||
|
onFileChange(callback: FileChangeCallback): void {
|
||||||
|
this.callbacks.push(callback)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Remove a callback.
|
||||||
|
*/
|
||||||
|
offFileChange(callback: FileChangeCallback): void {
|
||||||
|
const index = this.callbacks.indexOf(callback)
|
||||||
|
if (index !== -1) {
|
||||||
|
this.callbacks.splice(index, 1)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if the watchdog is currently running.
|
||||||
|
*/
|
||||||
|
isWatching(): boolean {
|
||||||
|
return this.isRunning
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the root directory being watched.
|
||||||
|
*/
|
||||||
|
getRoot(): string {
|
||||||
|
return this.root
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the number of pending changes waiting to be processed.
|
||||||
|
*/
|
||||||
|
getPendingCount(): number {
|
||||||
|
return this.pendingChanges.size
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handle a file change event with debouncing.
|
||||||
|
*/
|
||||||
|
private handleChange(type: FileChangeType, filePath: string): void {
|
||||||
|
if (!this.isValidFile(filePath)) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
const normalizedPath = path.resolve(filePath)
|
||||||
|
|
||||||
|
const event: FileChangeEvent = {
|
||||||
|
type,
|
||||||
|
path: normalizedPath,
|
||||||
|
timestamp: Date.now(),
|
||||||
|
}
|
||||||
|
|
||||||
|
this.pendingChanges.set(normalizedPath, event)
|
||||||
|
|
||||||
|
const existingTimer = this.debounceTimers.get(normalizedPath)
|
||||||
|
if (existingTimer) {
|
||||||
|
clearTimeout(existingTimer)
|
||||||
|
}
|
||||||
|
|
||||||
|
const timer = setTimeout(() => {
|
||||||
|
this.flushChange(normalizedPath)
|
||||||
|
}, this.options.debounceMs)
|
||||||
|
|
||||||
|
this.debounceTimers.set(normalizedPath, timer)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Flush a pending change and notify callbacks.
|
||||||
|
*/
|
||||||
|
private flushChange(filePath: string): void {
|
||||||
|
const event = this.pendingChanges.get(filePath)
|
||||||
|
if (!event) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
this.pendingChanges.delete(filePath)
|
||||||
|
this.debounceTimers.delete(filePath)
|
||||||
|
|
||||||
|
for (const callback of this.callbacks) {
|
||||||
|
try {
|
||||||
|
callback(event)
|
||||||
|
} catch {
|
||||||
|
// Silently ignore callback errors
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handle watcher errors.
|
||||||
|
*/
|
||||||
|
private handleError(error: Error): void {
|
||||||
|
// Log error but don't crash
|
||||||
|
console.error(`[Watchdog] Error: ${error.message}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if a file should be watched based on extension.
|
||||||
|
*/
|
||||||
|
private isValidFile(filePath: string): boolean {
|
||||||
|
const ext = path.extname(filePath)
|
||||||
|
return this.options.extensions.includes(ext)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Build glob patterns for watching.
|
||||||
|
*/
|
||||||
|
private buildGlobPatterns(root: string): string[] {
|
||||||
|
return this.options.extensions.map((ext) => path.join(root, "**", `*${ext}`))
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Build ignore patterns for chokidar.
|
||||||
|
*/
|
||||||
|
private buildIgnorePatterns(): (string | RegExp)[] {
|
||||||
|
const patterns: (string | RegExp)[] = []
|
||||||
|
|
||||||
|
for (const pattern of this.options.ignorePatterns) {
|
||||||
|
if (pattern.includes("*")) {
|
||||||
|
const regexPattern = pattern
|
||||||
|
.replace(/\./g, "\\.")
|
||||||
|
.replace(/\*\*/g, ".*")
|
||||||
|
.replace(/\*/g, "[^/]*")
|
||||||
|
patterns.push(new RegExp(regexPattern))
|
||||||
|
} else {
|
||||||
|
patterns.push(`**/${pattern}/**`)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return patterns
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Force flush all pending changes immediately.
|
||||||
|
*/
|
||||||
|
flushAll(): void {
|
||||||
|
for (const timer of this.debounceTimers.values()) {
|
||||||
|
clearTimeout(timer)
|
||||||
|
}
|
||||||
|
this.debounceTimers.clear()
|
||||||
|
|
||||||
|
for (const filePath of this.pendingChanges.keys()) {
|
||||||
|
this.flushChange(filePath)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get watched paths (for debugging).
|
||||||
|
*/
|
||||||
|
getWatchedPaths(): string[] {
|
||||||
|
if (!this.watcher) {
|
||||||
|
return []
|
||||||
|
}
|
||||||
|
const watched = this.watcher.getWatched()
|
||||||
|
const paths: string[] = []
|
||||||
|
for (const dir of Object.keys(watched)) {
|
||||||
|
for (const file of watched[dir]) {
|
||||||
|
paths.push(path.join(dir, file))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return paths.sort()
|
||||||
|
}
|
||||||
|
}
|
||||||
6
packages/ipuaro/src/infrastructure/indexer/index.ts
Normal file
6
packages/ipuaro/src/infrastructure/indexer/index.ts
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
export * from "./FileScanner.js"
|
||||||
|
export * from "./ASTParser.js"
|
||||||
|
export * from "./MetaAnalyzer.js"
|
||||||
|
export * from "./IndexBuilder.js"
|
||||||
|
export * from "./Watchdog.js"
|
||||||
|
export * from "./tree-sitter-types.js"
|
||||||
@@ -0,0 +1,77 @@
|
|||||||
|
/**
|
||||||
|
* Tree-sitter node type constants for TypeScript/JavaScript parsing.
|
||||||
|
* These are infrastructure-level constants, not exposed to domain/application layers.
|
||||||
|
*
|
||||||
|
* Source: tree-sitter-typescript/typescript/src/node-types.json
|
||||||
|
*/
|
||||||
|
|
||||||
|
export const NodeType = {
|
||||||
|
// Statements
|
||||||
|
IMPORT_STATEMENT: "import_statement",
|
||||||
|
EXPORT_STATEMENT: "export_statement",
|
||||||
|
LEXICAL_DECLARATION: "lexical_declaration",
|
||||||
|
|
||||||
|
// Declarations
|
||||||
|
FUNCTION_DECLARATION: "function_declaration",
|
||||||
|
CLASS_DECLARATION: "class_declaration",
|
||||||
|
INTERFACE_DECLARATION: "interface_declaration",
|
||||||
|
TYPE_ALIAS_DECLARATION: "type_alias_declaration",
|
||||||
|
|
||||||
|
// Clauses
|
||||||
|
IMPORT_CLAUSE: "import_clause",
|
||||||
|
EXPORT_CLAUSE: "export_clause",
|
||||||
|
EXTENDS_CLAUSE: "extends_clause",
|
||||||
|
IMPLEMENTS_CLAUSE: "implements_clause",
|
||||||
|
EXTENDS_TYPE_CLAUSE: "extends_type_clause",
|
||||||
|
CLASS_HERITAGE: "class_heritage",
|
||||||
|
|
||||||
|
// Import specifiers
|
||||||
|
NAMESPACE_IMPORT: "namespace_import",
|
||||||
|
NAMED_IMPORTS: "named_imports",
|
||||||
|
IMPORT_SPECIFIER: "import_specifier",
|
||||||
|
EXPORT_SPECIFIER: "export_specifier",
|
||||||
|
|
||||||
|
// Class members
|
||||||
|
METHOD_DEFINITION: "method_definition",
|
||||||
|
PUBLIC_FIELD_DEFINITION: "public_field_definition",
|
||||||
|
FIELD_DEFINITION: "field_definition",
|
||||||
|
PROPERTY_SIGNATURE: "property_signature",
|
||||||
|
|
||||||
|
// Parameters
|
||||||
|
REQUIRED_PARAMETER: "required_parameter",
|
||||||
|
OPTIONAL_PARAMETER: "optional_parameter",
|
||||||
|
|
||||||
|
// Expressions & values
|
||||||
|
ARROW_FUNCTION: "arrow_function",
|
||||||
|
FUNCTION: "function",
|
||||||
|
VARIABLE_DECLARATOR: "variable_declarator",
|
||||||
|
|
||||||
|
// Identifiers & types
|
||||||
|
IDENTIFIER: "identifier",
|
||||||
|
TYPE_IDENTIFIER: "type_identifier",
|
||||||
|
|
||||||
|
// Modifiers
|
||||||
|
ASYNC: "async",
|
||||||
|
STATIC: "static",
|
||||||
|
ABSTRACT: "abstract",
|
||||||
|
DEFAULT: "default",
|
||||||
|
ACCESSIBILITY_MODIFIER: "accessibility_modifier",
|
||||||
|
READONLY: "readonly",
|
||||||
|
} as const
|
||||||
|
|
||||||
|
export type NodeTypeValue = (typeof NodeType)[keyof typeof NodeType]
|
||||||
|
|
||||||
|
export const FieldName = {
|
||||||
|
SOURCE: "source",
|
||||||
|
NAME: "name",
|
||||||
|
ALIAS: "alias",
|
||||||
|
DECLARATION: "declaration",
|
||||||
|
PARAMETERS: "parameters",
|
||||||
|
RETURN_TYPE: "return_type",
|
||||||
|
BODY: "body",
|
||||||
|
TYPE: "type",
|
||||||
|
PATTERN: "pattern",
|
||||||
|
VALUE: "value",
|
||||||
|
} as const
|
||||||
|
|
||||||
|
export type FieldNameValue = (typeof FieldName)[keyof typeof FieldName]
|
||||||
302
packages/ipuaro/src/infrastructure/llm/OllamaClient.ts
Normal file
302
packages/ipuaro/src/infrastructure/llm/OllamaClient.ts
Normal file
@@ -0,0 +1,302 @@
|
|||||||
|
import { type Message, Ollama, type Tool } from "ollama"
|
||||||
|
import type {
|
||||||
|
ILLMClient,
|
||||||
|
LLMResponse,
|
||||||
|
ToolDef,
|
||||||
|
ToolParameter,
|
||||||
|
} from "../../domain/services/ILLMClient.js"
|
||||||
|
import type { ChatMessage } from "../../domain/value-objects/ChatMessage.js"
|
||||||
|
import { createToolCall, type ToolCall } from "../../domain/value-objects/ToolCall.js"
|
||||||
|
import type { LLMConfig } from "../../shared/constants/config.js"
|
||||||
|
import { IpuaroError } from "../../shared/errors/IpuaroError.js"
|
||||||
|
import { estimateTokens } from "../../shared/utils/tokens.js"
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Ollama LLM client implementation.
|
||||||
|
* Wraps the Ollama SDK for chat completions with tool support.
|
||||||
|
*/
|
||||||
|
export class OllamaClient implements ILLMClient {
|
||||||
|
private readonly client: Ollama
|
||||||
|
private readonly host: string
|
||||||
|
private readonly model: string
|
||||||
|
private readonly contextWindow: number
|
||||||
|
private readonly temperature: number
|
||||||
|
private readonly timeout: number
|
||||||
|
private abortController: AbortController | null = null
|
||||||
|
|
||||||
|
constructor(config: LLMConfig) {
|
||||||
|
this.host = config.host
|
||||||
|
this.client = new Ollama({ host: this.host })
|
||||||
|
this.model = config.model
|
||||||
|
this.contextWindow = config.contextWindow
|
||||||
|
this.temperature = config.temperature
|
||||||
|
this.timeout = config.timeout
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Send messages to LLM and get response.
|
||||||
|
*/
|
||||||
|
async chat(messages: ChatMessage[], tools?: ToolDef[]): Promise<LLMResponse> {
|
||||||
|
const startTime = Date.now()
|
||||||
|
this.abortController = new AbortController()
|
||||||
|
|
||||||
|
try {
|
||||||
|
const ollamaMessages = this.convertMessages(messages)
|
||||||
|
const ollamaTools = tools ? this.convertTools(tools) : undefined
|
||||||
|
|
||||||
|
const response = await this.client.chat({
|
||||||
|
model: this.model,
|
||||||
|
messages: ollamaMessages,
|
||||||
|
tools: ollamaTools,
|
||||||
|
options: {
|
||||||
|
temperature: this.temperature,
|
||||||
|
},
|
||||||
|
stream: false,
|
||||||
|
})
|
||||||
|
|
||||||
|
const timeMs = Date.now() - startTime
|
||||||
|
const toolCalls = this.extractToolCalls(response.message)
|
||||||
|
|
||||||
|
return {
|
||||||
|
content: response.message.content,
|
||||||
|
toolCalls,
|
||||||
|
tokens: response.eval_count ?? estimateTokens(response.message.content),
|
||||||
|
timeMs,
|
||||||
|
truncated: false,
|
||||||
|
stopReason: this.determineStopReason(response, toolCalls),
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
if (error instanceof Error && error.name === "AbortError") {
|
||||||
|
throw IpuaroError.llm("Request was aborted")
|
||||||
|
}
|
||||||
|
throw this.handleError(error)
|
||||||
|
} finally {
|
||||||
|
this.abortController = null
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Count tokens in text.
|
||||||
|
* Uses estimation since Ollama doesn't provide a tokenizer endpoint.
|
||||||
|
*/
|
||||||
|
async countTokens(text: string): Promise<number> {
|
||||||
|
return Promise.resolve(estimateTokens(text))
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if LLM service is available.
|
||||||
|
*/
|
||||||
|
async isAvailable(): Promise<boolean> {
|
||||||
|
try {
|
||||||
|
await this.client.list()
|
||||||
|
return true
|
||||||
|
} catch {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get current model name.
|
||||||
|
*/
|
||||||
|
getModelName(): string {
|
||||||
|
return this.model
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get context window size.
|
||||||
|
*/
|
||||||
|
getContextWindowSize(): number {
|
||||||
|
return this.contextWindow
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Pull/download model if not available locally.
|
||||||
|
*/
|
||||||
|
async pullModel(model: string): Promise<void> {
|
||||||
|
try {
|
||||||
|
await this.client.pull({ model, stream: false })
|
||||||
|
} catch (error) {
|
||||||
|
throw this.handleError(error, `Failed to pull model: ${model}`)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if a specific model is available locally.
|
||||||
|
*/
|
||||||
|
async hasModel(model: string): Promise<boolean> {
|
||||||
|
try {
|
||||||
|
const result = await this.client.list()
|
||||||
|
return result.models.some((m) => m.name === model || m.name.startsWith(`${model}:`))
|
||||||
|
} catch {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* List available models.
|
||||||
|
*/
|
||||||
|
async listModels(): Promise<string[]> {
|
||||||
|
try {
|
||||||
|
const result = await this.client.list()
|
||||||
|
return result.models.map((m) => m.name)
|
||||||
|
} catch (error) {
|
||||||
|
throw this.handleError(error, "Failed to list models")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Abort current generation.
|
||||||
|
*/
|
||||||
|
abort(): void {
|
||||||
|
if (this.abortController) {
|
||||||
|
this.abortController.abort()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Convert ChatMessage array to Ollama Message format.
|
||||||
|
*/
|
||||||
|
private convertMessages(messages: ChatMessage[]): Message[] {
|
||||||
|
return messages.map((msg): Message => {
|
||||||
|
const role = this.convertRole(msg.role)
|
||||||
|
|
||||||
|
if (msg.role === "tool" && msg.toolResults) {
|
||||||
|
return {
|
||||||
|
role: "tool",
|
||||||
|
content: msg.content,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (msg.role === "assistant" && msg.toolCalls && msg.toolCalls.length > 0) {
|
||||||
|
return {
|
||||||
|
role: "assistant",
|
||||||
|
content: msg.content,
|
||||||
|
tool_calls: msg.toolCalls.map((tc) => ({
|
||||||
|
function: {
|
||||||
|
name: tc.name,
|
||||||
|
arguments: tc.params,
|
||||||
|
},
|
||||||
|
})),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
role,
|
||||||
|
content: msg.content,
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Convert message role to Ollama role.
|
||||||
|
*/
|
||||||
|
private convertRole(role: ChatMessage["role"]): "user" | "assistant" | "system" | "tool" {
|
||||||
|
switch (role) {
|
||||||
|
case "user":
|
||||||
|
return "user"
|
||||||
|
case "assistant":
|
||||||
|
return "assistant"
|
||||||
|
case "system":
|
||||||
|
return "system"
|
||||||
|
case "tool":
|
||||||
|
return "tool"
|
||||||
|
default:
|
||||||
|
return "user"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Convert ToolDef array to Ollama Tool format.
|
||||||
|
*/
|
||||||
|
private convertTools(tools: ToolDef[]): Tool[] {
|
||||||
|
return tools.map(
|
||||||
|
(tool): Tool => ({
|
||||||
|
type: "function",
|
||||||
|
function: {
|
||||||
|
name: tool.name,
|
||||||
|
description: tool.description,
|
||||||
|
parameters: {
|
||||||
|
type: "object",
|
||||||
|
properties: this.convertParameters(tool.parameters),
|
||||||
|
required: tool.parameters.filter((p) => p.required).map((p) => p.name),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Convert ToolParameter array to JSON Schema properties.
|
||||||
|
*/
|
||||||
|
private convertParameters(
|
||||||
|
params: ToolParameter[],
|
||||||
|
): Record<string, { type: string; description: string; enum?: string[] }> {
|
||||||
|
const properties: Record<string, { type: string; description: string; enum?: string[] }> =
|
||||||
|
{}
|
||||||
|
|
||||||
|
for (const param of params) {
|
||||||
|
properties[param.name] = {
|
||||||
|
type: param.type,
|
||||||
|
description: param.description,
|
||||||
|
...(param.enum && { enum: param.enum }),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return properties
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Extract tool calls from Ollama response message.
|
||||||
|
*/
|
||||||
|
private extractToolCalls(message: Message): ToolCall[] {
|
||||||
|
if (!message.tool_calls || message.tool_calls.length === 0) {
|
||||||
|
return []
|
||||||
|
}
|
||||||
|
|
||||||
|
return message.tool_calls.map((tc, index) =>
|
||||||
|
createToolCall(
|
||||||
|
`call_${String(Date.now())}_${String(index)}`,
|
||||||
|
tc.function.name,
|
||||||
|
tc.function.arguments,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Determine stop reason from response.
|
||||||
|
*/
|
||||||
|
private determineStopReason(
|
||||||
|
response: { done_reason?: string },
|
||||||
|
toolCalls: ToolCall[],
|
||||||
|
): "end" | "length" | "tool_use" {
|
||||||
|
if (toolCalls.length > 0) {
|
||||||
|
return "tool_use"
|
||||||
|
}
|
||||||
|
|
||||||
|
if (response.done_reason === "length") {
|
||||||
|
return "length"
|
||||||
|
}
|
||||||
|
|
||||||
|
return "end"
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handle and wrap errors.
|
||||||
|
*/
|
||||||
|
private handleError(error: unknown, context?: string): IpuaroError {
|
||||||
|
const message = error instanceof Error ? error.message : String(error)
|
||||||
|
const fullMessage = context ? `${context}: ${message}` : message
|
||||||
|
|
||||||
|
if (message.includes("ECONNREFUSED") || message.includes("fetch failed")) {
|
||||||
|
return IpuaroError.llm(`Cannot connect to Ollama at ${this.host}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (message.includes("model") && message.includes("not found")) {
|
||||||
|
return IpuaroError.llm(
|
||||||
|
`Model "${this.model}" not found. Run: ollama pull ${this.model}`,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
return IpuaroError.llm(fullMessage)
|
||||||
|
}
|
||||||
|
}
|
||||||
220
packages/ipuaro/src/infrastructure/llm/ResponseParser.ts
Normal file
220
packages/ipuaro/src/infrastructure/llm/ResponseParser.ts
Normal file
@@ -0,0 +1,220 @@
|
|||||||
|
import { createToolCall, type ToolCall } from "../../domain/value-objects/ToolCall.js"
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parsed response from LLM.
|
||||||
|
*/
|
||||||
|
export interface ParsedResponse {
|
||||||
|
/** Text content (excluding tool calls) */
|
||||||
|
content: string
|
||||||
|
/** Extracted tool calls */
|
||||||
|
toolCalls: ToolCall[]
|
||||||
|
/** Whether parsing encountered issues */
|
||||||
|
hasParseErrors: boolean
|
||||||
|
/** Parse error messages */
|
||||||
|
parseErrors: string[]
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* XML tool call tag pattern.
|
||||||
|
* Matches: <tool_call name="tool_name">...</tool_call>
|
||||||
|
*/
|
||||||
|
const TOOL_CALL_REGEX = /<tool_call\s+name\s*=\s*"([^"]+)">([\s\S]*?)<\/tool_call>/gi
|
||||||
|
|
||||||
|
/**
|
||||||
|
* XML parameter tag pattern.
|
||||||
|
* Matches: <param name="param_name">value</param> or <param_name>value</param_name>
|
||||||
|
*/
|
||||||
|
const PARAM_REGEX_NAMED = /<param\s+name\s*=\s*"([^"]+)">([\s\S]*?)<\/param>/gi
|
||||||
|
const PARAM_REGEX_ELEMENT = /<([a-z_][a-z0-9_]*)>([\s\S]*?)<\/\1>/gi
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parse tool calls from LLM response text.
|
||||||
|
* Supports XML format: <tool_call name="get_lines"><path>src/index.ts</path></tool_call>
|
||||||
|
*/
|
||||||
|
export function parseToolCalls(response: string): ParsedResponse {
|
||||||
|
const toolCalls: ToolCall[] = []
|
||||||
|
const parseErrors: string[] = []
|
||||||
|
let content = response
|
||||||
|
|
||||||
|
const matches = [...response.matchAll(TOOL_CALL_REGEX)]
|
||||||
|
|
||||||
|
for (const match of matches) {
|
||||||
|
const [fullMatch, toolName, paramsXml] = match
|
||||||
|
|
||||||
|
try {
|
||||||
|
const params = parseParameters(paramsXml)
|
||||||
|
const toolCall = createToolCall(
|
||||||
|
`xml_${String(Date.now())}_${String(toolCalls.length)}`,
|
||||||
|
toolName,
|
||||||
|
params,
|
||||||
|
)
|
||||||
|
toolCalls.push(toolCall)
|
||||||
|
content = content.replace(fullMatch, "")
|
||||||
|
} catch (error) {
|
||||||
|
const errorMsg = error instanceof Error ? error.message : String(error)
|
||||||
|
parseErrors.push(`Failed to parse tool call "${toolName}": ${errorMsg}`)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
content = content.trim()
|
||||||
|
|
||||||
|
return {
|
||||||
|
content,
|
||||||
|
toolCalls,
|
||||||
|
hasParseErrors: parseErrors.length > 0,
|
||||||
|
parseErrors,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parse parameters from XML content.
|
||||||
|
*/
|
||||||
|
function parseParameters(xml: string): Record<string, unknown> {
|
||||||
|
const params: Record<string, unknown> = {}
|
||||||
|
|
||||||
|
const namedMatches = [...xml.matchAll(PARAM_REGEX_NAMED)]
|
||||||
|
for (const match of namedMatches) {
|
||||||
|
const [, name, value] = match
|
||||||
|
params[name] = parseValue(value)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (namedMatches.length === 0) {
|
||||||
|
const elementMatches = [...xml.matchAll(PARAM_REGEX_ELEMENT)]
|
||||||
|
for (const match of elementMatches) {
|
||||||
|
const [, name, value] = match
|
||||||
|
params[name] = parseValue(value)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return params
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parse a value string to appropriate type.
|
||||||
|
*/
|
||||||
|
function parseValue(value: string): unknown {
|
||||||
|
const trimmed = value.trim()
|
||||||
|
|
||||||
|
if (trimmed === "true") {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
if (trimmed === "false") {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
if (trimmed === "null") {
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
const num = Number(trimmed)
|
||||||
|
if (!isNaN(num) && trimmed !== "") {
|
||||||
|
return num
|
||||||
|
}
|
||||||
|
|
||||||
|
if (
|
||||||
|
(trimmed.startsWith("[") && trimmed.endsWith("]")) ||
|
||||||
|
(trimmed.startsWith("{") && trimmed.endsWith("}"))
|
||||||
|
) {
|
||||||
|
try {
|
||||||
|
return JSON.parse(trimmed)
|
||||||
|
} catch {
|
||||||
|
return trimmed
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return trimmed
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Format tool calls to XML for prompt injection.
|
||||||
|
* Useful when you need to show the LLM the expected format.
|
||||||
|
*/
|
||||||
|
export function formatToolCallsAsXml(toolCalls: ToolCall[]): string {
|
||||||
|
return toolCalls
|
||||||
|
.map((tc) => {
|
||||||
|
const params = Object.entries(tc.params)
|
||||||
|
.map(([key, value]) => ` <${key}>${formatValueForXml(value)}</${key}>`)
|
||||||
|
.join("\n")
|
||||||
|
return `<tool_call name="${tc.name}">\n${params}\n</tool_call>`
|
||||||
|
})
|
||||||
|
.join("\n\n")
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Format a value for XML output.
|
||||||
|
*/
|
||||||
|
function formatValueForXml(value: unknown): string {
|
||||||
|
if (value === null || value === undefined) {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
if (typeof value === "object") {
|
||||||
|
return JSON.stringify(value)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (typeof value === "string") {
|
||||||
|
return value
|
||||||
|
}
|
||||||
|
|
||||||
|
if (typeof value === "number" || typeof value === "boolean") {
|
||||||
|
return String(value)
|
||||||
|
}
|
||||||
|
|
||||||
|
return JSON.stringify(value)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Extract thinking/reasoning from response.
|
||||||
|
* Matches content between <thinking>...</thinking> tags.
|
||||||
|
*/
|
||||||
|
export function extractThinking(response: string): { thinking: string; content: string } {
|
||||||
|
const thinkingRegex = /<thinking>([\s\S]*?)<\/thinking>/gi
|
||||||
|
const matches = [...response.matchAll(thinkingRegex)]
|
||||||
|
|
||||||
|
if (matches.length === 0) {
|
||||||
|
return { thinking: "", content: response }
|
||||||
|
}
|
||||||
|
|
||||||
|
let content = response
|
||||||
|
const thoughts: string[] = []
|
||||||
|
|
||||||
|
for (const match of matches) {
|
||||||
|
thoughts.push(match[1].trim())
|
||||||
|
content = content.replace(match[0], "")
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
thinking: thoughts.join("\n\n"),
|
||||||
|
content: content.trim(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if response contains tool calls.
|
||||||
|
*/
|
||||||
|
export function hasToolCalls(response: string): boolean {
|
||||||
|
return TOOL_CALL_REGEX.test(response)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Validate tool call parameters against expected schema.
|
||||||
|
*/
|
||||||
|
export function validateToolCallParams(
|
||||||
|
toolName: string,
|
||||||
|
params: Record<string, unknown>,
|
||||||
|
requiredParams: string[],
|
||||||
|
): { valid: boolean; errors: string[] } {
|
||||||
|
const errors: string[] = []
|
||||||
|
|
||||||
|
for (const param of requiredParams) {
|
||||||
|
if (!(param in params) || params[param] === undefined || params[param] === null) {
|
||||||
|
errors.push(`Missing required parameter: ${param}`)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
valid: errors.length === 0,
|
||||||
|
errors,
|
||||||
|
}
|
||||||
|
}
|
||||||
48
packages/ipuaro/src/infrastructure/llm/index.ts
Normal file
48
packages/ipuaro/src/infrastructure/llm/index.ts
Normal file
@@ -0,0 +1,48 @@
|
|||||||
|
// LLM infrastructure exports
|
||||||
|
export { OllamaClient } from "./OllamaClient.js"
|
||||||
|
export {
|
||||||
|
SYSTEM_PROMPT,
|
||||||
|
buildInitialContext,
|
||||||
|
buildFileContext,
|
||||||
|
truncateContext,
|
||||||
|
type ProjectStructure,
|
||||||
|
} from "./prompts.js"
|
||||||
|
export {
|
||||||
|
ALL_TOOLS,
|
||||||
|
READ_TOOLS,
|
||||||
|
EDIT_TOOLS,
|
||||||
|
SEARCH_TOOLS,
|
||||||
|
ANALYSIS_TOOLS,
|
||||||
|
GIT_TOOLS,
|
||||||
|
RUN_TOOLS,
|
||||||
|
CONFIRMATION_TOOLS,
|
||||||
|
requiresConfirmation,
|
||||||
|
getToolDef,
|
||||||
|
getToolsByCategory,
|
||||||
|
GET_LINES_TOOL,
|
||||||
|
GET_FUNCTION_TOOL,
|
||||||
|
GET_CLASS_TOOL,
|
||||||
|
GET_STRUCTURE_TOOL,
|
||||||
|
EDIT_LINES_TOOL,
|
||||||
|
CREATE_FILE_TOOL,
|
||||||
|
DELETE_FILE_TOOL,
|
||||||
|
FIND_REFERENCES_TOOL,
|
||||||
|
FIND_DEFINITION_TOOL,
|
||||||
|
GET_DEPENDENCIES_TOOL,
|
||||||
|
GET_DEPENDENTS_TOOL,
|
||||||
|
GET_COMPLEXITY_TOOL,
|
||||||
|
GET_TODOS_TOOL,
|
||||||
|
GIT_STATUS_TOOL,
|
||||||
|
GIT_DIFF_TOOL,
|
||||||
|
GIT_COMMIT_TOOL,
|
||||||
|
RUN_COMMAND_TOOL,
|
||||||
|
RUN_TESTS_TOOL,
|
||||||
|
} from "./toolDefs.js"
|
||||||
|
export {
|
||||||
|
parseToolCalls,
|
||||||
|
formatToolCallsAsXml,
|
||||||
|
extractThinking,
|
||||||
|
hasToolCalls,
|
||||||
|
validateToolCallParams,
|
||||||
|
type ParsedResponse,
|
||||||
|
} from "./ResponseParser.js"
|
||||||
335
packages/ipuaro/src/infrastructure/llm/prompts.ts
Normal file
335
packages/ipuaro/src/infrastructure/llm/prompts.ts
Normal file
@@ -0,0 +1,335 @@
|
|||||||
|
import type { FileAST } from "../../domain/value-objects/FileAST.js"
|
||||||
|
import type { FileMeta } from "../../domain/value-objects/FileMeta.js"
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Project structure for context building.
|
||||||
|
*/
|
||||||
|
export interface ProjectStructure {
|
||||||
|
name: string
|
||||||
|
rootPath: string
|
||||||
|
files: string[]
|
||||||
|
directories: string[]
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* System prompt for the ipuaro AI agent.
|
||||||
|
*/
|
||||||
|
export const SYSTEM_PROMPT = `You are ipuaro, a local AI code assistant specialized in helping developers understand and modify their codebase. You operate within a single project directory and have access to powerful tools for reading, searching, analyzing, and editing code.
|
||||||
|
|
||||||
|
## Core Principles
|
||||||
|
|
||||||
|
1. **Lazy Loading**: You don't have the full code in context. Use tools to fetch exactly what you need.
|
||||||
|
2. **Precision**: Always verify file paths and line numbers before making changes.
|
||||||
|
3. **Safety**: Confirm destructive operations. Never execute dangerous commands.
|
||||||
|
4. **Efficiency**: Minimize context usage. Request only necessary code sections.
|
||||||
|
|
||||||
|
## Available Tools
|
||||||
|
|
||||||
|
### Reading Tools
|
||||||
|
- \`get_lines\`: Get specific lines from a file
|
||||||
|
- \`get_function\`: Get a function by name
|
||||||
|
- \`get_class\`: Get a class by name
|
||||||
|
- \`get_structure\`: Get project directory structure
|
||||||
|
|
||||||
|
### Editing Tools (require confirmation)
|
||||||
|
- \`edit_lines\`: Replace specific lines in a file
|
||||||
|
- \`create_file\`: Create a new file
|
||||||
|
- \`delete_file\`: Delete a file
|
||||||
|
|
||||||
|
### Search Tools
|
||||||
|
- \`find_references\`: Find all usages of a symbol
|
||||||
|
- \`find_definition\`: Find where a symbol is defined
|
||||||
|
|
||||||
|
### Analysis Tools
|
||||||
|
- \`get_dependencies\`: Get files this file imports
|
||||||
|
- \`get_dependents\`: Get files that import this file
|
||||||
|
- \`get_complexity\`: Get complexity metrics
|
||||||
|
- \`get_todos\`: Find TODO/FIXME comments
|
||||||
|
|
||||||
|
### Git Tools
|
||||||
|
- \`git_status\`: Get repository status
|
||||||
|
- \`git_diff\`: Get uncommitted changes
|
||||||
|
- \`git_commit\`: Create a commit (requires confirmation)
|
||||||
|
|
||||||
|
### Run Tools
|
||||||
|
- \`run_command\`: Execute a shell command (security checked)
|
||||||
|
- \`run_tests\`: Run the test suite
|
||||||
|
|
||||||
|
## Response Guidelines
|
||||||
|
|
||||||
|
1. **Be concise**: Don't repeat information already in context.
|
||||||
|
2. **Show your work**: Explain what tools you're using and why.
|
||||||
|
3. **Verify before editing**: Always read the target code before modifying it.
|
||||||
|
4. **Handle errors gracefully**: If a tool fails, explain what went wrong and suggest alternatives.
|
||||||
|
|
||||||
|
## Code Editing Rules
|
||||||
|
|
||||||
|
1. Always use \`get_lines\` or \`get_function\` before \`edit_lines\`.
|
||||||
|
2. Provide exact line numbers for edits.
|
||||||
|
3. For large changes, break into multiple small edits.
|
||||||
|
4. After editing, suggest running tests if available.
|
||||||
|
|
||||||
|
## Safety Rules
|
||||||
|
|
||||||
|
1. Never execute commands that could harm the system.
|
||||||
|
2. Never expose sensitive data (API keys, passwords).
|
||||||
|
3. Always confirm file deletions and destructive git operations.
|
||||||
|
4. Stay within the project directory.
|
||||||
|
|
||||||
|
When you need to perform an action, use the appropriate tool. Think step by step about what information you need and which tools will provide it most efficiently.`
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Build initial context from project structure and AST metadata.
|
||||||
|
* Returns a compact representation without actual code.
|
||||||
|
*/
|
||||||
|
export function buildInitialContext(
|
||||||
|
structure: ProjectStructure,
|
||||||
|
asts: Map<string, FileAST>,
|
||||||
|
metas?: Map<string, FileMeta>,
|
||||||
|
): string {
|
||||||
|
const sections: string[] = []
|
||||||
|
|
||||||
|
sections.push(formatProjectHeader(structure))
|
||||||
|
sections.push(formatDirectoryTree(structure))
|
||||||
|
sections.push(formatFileOverview(asts, metas))
|
||||||
|
|
||||||
|
return sections.join("\n\n")
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Format project header section.
|
||||||
|
*/
|
||||||
|
function formatProjectHeader(structure: ProjectStructure): string {
|
||||||
|
const fileCount = String(structure.files.length)
|
||||||
|
const dirCount = String(structure.directories.length)
|
||||||
|
return `# Project: ${structure.name}
|
||||||
|
Root: ${structure.rootPath}
|
||||||
|
Files: ${fileCount} | Directories: ${dirCount}`
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Format directory tree.
|
||||||
|
*/
|
||||||
|
function formatDirectoryTree(structure: ProjectStructure): string {
|
||||||
|
const lines: string[] = ["## Structure", ""]
|
||||||
|
|
||||||
|
const sortedDirs = [...structure.directories].sort()
|
||||||
|
for (const dir of sortedDirs) {
|
||||||
|
const depth = dir.split("/").length - 1
|
||||||
|
const indent = " ".repeat(depth)
|
||||||
|
const name = dir.split("/").pop() ?? dir
|
||||||
|
lines.push(`${indent}${name}/`)
|
||||||
|
}
|
||||||
|
|
||||||
|
return lines.join("\n")
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Format file overview with AST summaries.
|
||||||
|
*/
|
||||||
|
function formatFileOverview(asts: Map<string, FileAST>, metas?: Map<string, FileMeta>): string {
|
||||||
|
const lines: string[] = ["## Files", ""]
|
||||||
|
|
||||||
|
const sortedPaths = [...asts.keys()].sort()
|
||||||
|
for (const path of sortedPaths) {
|
||||||
|
const ast = asts.get(path)
|
||||||
|
if (!ast) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
const meta = metas?.get(path)
|
||||||
|
lines.push(formatFileSummary(path, ast, meta))
|
||||||
|
}
|
||||||
|
|
||||||
|
return lines.join("\n")
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Format a single file's AST summary.
|
||||||
|
*/
|
||||||
|
function formatFileSummary(path: string, ast: FileAST, meta?: FileMeta): string {
|
||||||
|
const parts: string[] = []
|
||||||
|
|
||||||
|
if (ast.functions.length > 0) {
|
||||||
|
const names = ast.functions.map((f) => f.name).join(", ")
|
||||||
|
parts.push(`fn: ${names}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (ast.classes.length > 0) {
|
||||||
|
const names = ast.classes.map((c) => c.name).join(", ")
|
||||||
|
parts.push(`class: ${names}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (ast.interfaces.length > 0) {
|
||||||
|
const names = ast.interfaces.map((i) => i.name).join(", ")
|
||||||
|
parts.push(`interface: ${names}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (ast.typeAliases.length > 0) {
|
||||||
|
const names = ast.typeAliases.map((t) => t.name).join(", ")
|
||||||
|
parts.push(`type: ${names}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
const summary = parts.length > 0 ? ` [${parts.join(" | ")}]` : ""
|
||||||
|
const flags = formatFileFlags(meta)
|
||||||
|
|
||||||
|
return `- ${path}${summary}${flags}`
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Format file metadata flags.
|
||||||
|
*/
|
||||||
|
function formatFileFlags(meta?: FileMeta): string {
|
||||||
|
if (!meta) {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
const flags: string[] = []
|
||||||
|
|
||||||
|
if (meta.isHub) {
|
||||||
|
flags.push("hub")
|
||||||
|
}
|
||||||
|
|
||||||
|
if (meta.isEntryPoint) {
|
||||||
|
flags.push("entry")
|
||||||
|
}
|
||||||
|
|
||||||
|
if (meta.complexity.score > 70) {
|
||||||
|
flags.push("complex")
|
||||||
|
}
|
||||||
|
|
||||||
|
return flags.length > 0 ? ` (${flags.join(", ")})` : ""
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Format line range for display.
|
||||||
|
*/
|
||||||
|
function formatLineRange(start: number, end: number): string {
|
||||||
|
return `[${String(start)}-${String(end)}]`
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Format imports section.
|
||||||
|
*/
|
||||||
|
function formatImportsSection(ast: FileAST): string[] {
|
||||||
|
if (ast.imports.length === 0) {
|
||||||
|
return []
|
||||||
|
}
|
||||||
|
const lines = ["### Imports"]
|
||||||
|
for (const imp of ast.imports) {
|
||||||
|
lines.push(`- ${imp.name} from "${imp.from}" (${imp.type})`)
|
||||||
|
}
|
||||||
|
lines.push("")
|
||||||
|
return lines
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Format exports section.
|
||||||
|
*/
|
||||||
|
function formatExportsSection(ast: FileAST): string[] {
|
||||||
|
if (ast.exports.length === 0) {
|
||||||
|
return []
|
||||||
|
}
|
||||||
|
const lines = ["### Exports"]
|
||||||
|
for (const exp of ast.exports) {
|
||||||
|
const defaultMark = exp.isDefault ? " (default)" : ""
|
||||||
|
lines.push(`- ${exp.kind} ${exp.name}${defaultMark}`)
|
||||||
|
}
|
||||||
|
lines.push("")
|
||||||
|
return lines
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Format functions section.
|
||||||
|
*/
|
||||||
|
function formatFunctionsSection(ast: FileAST): string[] {
|
||||||
|
if (ast.functions.length === 0) {
|
||||||
|
return []
|
||||||
|
}
|
||||||
|
const lines = ["### Functions"]
|
||||||
|
for (const fn of ast.functions) {
|
||||||
|
const params = fn.params.map((p) => p.name).join(", ")
|
||||||
|
const asyncMark = fn.isAsync ? "async " : ""
|
||||||
|
const range = formatLineRange(fn.lineStart, fn.lineEnd)
|
||||||
|
lines.push(`- ${asyncMark}${fn.name}(${params}) ${range}`)
|
||||||
|
}
|
||||||
|
lines.push("")
|
||||||
|
return lines
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Format classes section.
|
||||||
|
*/
|
||||||
|
function formatClassesSection(ast: FileAST): string[] {
|
||||||
|
if (ast.classes.length === 0) {
|
||||||
|
return []
|
||||||
|
}
|
||||||
|
const lines = ["### Classes"]
|
||||||
|
for (const cls of ast.classes) {
|
||||||
|
const ext = cls.extends ? ` extends ${cls.extends}` : ""
|
||||||
|
const impl = cls.implements.length > 0 ? ` implements ${cls.implements.join(", ")}` : ""
|
||||||
|
const range = formatLineRange(cls.lineStart, cls.lineEnd)
|
||||||
|
lines.push(`- ${cls.name}${ext}${impl} ${range}`)
|
||||||
|
|
||||||
|
for (const method of cls.methods) {
|
||||||
|
const vis = method.visibility === "public" ? "" : `${method.visibility} `
|
||||||
|
const methodRange = formatLineRange(method.lineStart, method.lineEnd)
|
||||||
|
lines.push(` - ${vis}${method.name}() ${methodRange}`)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
lines.push("")
|
||||||
|
return lines
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Format metadata section.
|
||||||
|
*/
|
||||||
|
function formatMetadataSection(meta: FileMeta): string[] {
|
||||||
|
const loc = String(meta.complexity.loc)
|
||||||
|
const score = String(meta.complexity.score)
|
||||||
|
const deps = String(meta.dependencies.length)
|
||||||
|
const dependents = String(meta.dependents.length)
|
||||||
|
return [
|
||||||
|
"### Metadata",
|
||||||
|
`- LOC: ${loc}`,
|
||||||
|
`- Complexity: ${score}/100`,
|
||||||
|
`- Dependencies: ${deps}`,
|
||||||
|
`- Dependents: ${dependents}`,
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Build context for a specific file request.
|
||||||
|
*/
|
||||||
|
export function buildFileContext(path: string, ast: FileAST, meta?: FileMeta): string {
|
||||||
|
const lines: string[] = [`## ${path}`, ""]
|
||||||
|
|
||||||
|
lines.push(...formatImportsSection(ast))
|
||||||
|
lines.push(...formatExportsSection(ast))
|
||||||
|
lines.push(...formatFunctionsSection(ast))
|
||||||
|
lines.push(...formatClassesSection(ast))
|
||||||
|
|
||||||
|
if (meta) {
|
||||||
|
lines.push(...formatMetadataSection(meta))
|
||||||
|
}
|
||||||
|
|
||||||
|
return lines.join("\n")
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Truncate context to fit within token budget.
|
||||||
|
*/
|
||||||
|
export function truncateContext(context: string, maxTokens: number): string {
|
||||||
|
const charsPerToken = 4
|
||||||
|
const maxChars = maxTokens * charsPerToken
|
||||||
|
|
||||||
|
if (context.length <= maxChars) {
|
||||||
|
return context
|
||||||
|
}
|
||||||
|
|
||||||
|
const truncated = context.slice(0, maxChars - 100)
|
||||||
|
const lastNewline = truncated.lastIndexOf("\n")
|
||||||
|
const remaining = String(context.length - lastNewline)
|
||||||
|
|
||||||
|
return `${truncated.slice(0, lastNewline)}\n\n... (truncated, ${remaining} chars remaining)`
|
||||||
|
}
|
||||||
511
packages/ipuaro/src/infrastructure/llm/toolDefs.ts
Normal file
511
packages/ipuaro/src/infrastructure/llm/toolDefs.ts
Normal file
@@ -0,0 +1,511 @@
|
|||||||
|
import type { ToolDef } from "../../domain/services/ILLMClient.js"
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Tool definitions for ipuaro LLM.
|
||||||
|
* 18 tools across 6 categories: read, edit, search, analysis, git, run.
|
||||||
|
*/
|
||||||
|
|
||||||
|
/*
|
||||||
|
* =============================================================================
|
||||||
|
* Read Tools (4)
|
||||||
|
* =============================================================================
|
||||||
|
*/
|
||||||
|
|
||||||
|
export const GET_LINES_TOOL: ToolDef = {
|
||||||
|
name: "get_lines",
|
||||||
|
description:
|
||||||
|
"Get specific lines from a file. Returns the content with line numbers. " +
|
||||||
|
"If no range is specified, returns the entire file.",
|
||||||
|
parameters: [
|
||||||
|
{
|
||||||
|
name: "path",
|
||||||
|
type: "string",
|
||||||
|
description: "File path relative to project root",
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "start",
|
||||||
|
type: "number",
|
||||||
|
description: "Start line number (1-based, inclusive)",
|
||||||
|
required: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "end",
|
||||||
|
type: "number",
|
||||||
|
description: "End line number (1-based, inclusive)",
|
||||||
|
required: false,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}
|
||||||
|
|
||||||
|
export const GET_FUNCTION_TOOL: ToolDef = {
|
||||||
|
name: "get_function",
|
||||||
|
description:
|
||||||
|
"Get a function's source code by name. Uses AST to find exact line range. " +
|
||||||
|
"Returns the function code with line numbers.",
|
||||||
|
parameters: [
|
||||||
|
{
|
||||||
|
name: "path",
|
||||||
|
type: "string",
|
||||||
|
description: "File path relative to project root",
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "name",
|
||||||
|
type: "string",
|
||||||
|
description: "Function name to retrieve",
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}
|
||||||
|
|
||||||
|
export const GET_CLASS_TOOL: ToolDef = {
|
||||||
|
name: "get_class",
|
||||||
|
description:
|
||||||
|
"Get a class's source code by name. Uses AST to find exact line range. " +
|
||||||
|
"Returns the class code with line numbers.",
|
||||||
|
parameters: [
|
||||||
|
{
|
||||||
|
name: "path",
|
||||||
|
type: "string",
|
||||||
|
description: "File path relative to project root",
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "name",
|
||||||
|
type: "string",
|
||||||
|
description: "Class name to retrieve",
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}
|
||||||
|
|
||||||
|
export const GET_STRUCTURE_TOOL: ToolDef = {
|
||||||
|
name: "get_structure",
|
||||||
|
description:
|
||||||
|
"Get project directory structure as a tree. " +
|
||||||
|
"If path is specified, shows structure of that subdirectory only.",
|
||||||
|
parameters: [
|
||||||
|
{
|
||||||
|
name: "path",
|
||||||
|
type: "string",
|
||||||
|
description: "Subdirectory path relative to project root (optional, defaults to root)",
|
||||||
|
required: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "depth",
|
||||||
|
type: "number",
|
||||||
|
description: "Maximum depth to traverse (default: unlimited)",
|
||||||
|
required: false,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* =============================================================================
|
||||||
|
* Edit Tools (3) - All require confirmation
|
||||||
|
* =============================================================================
|
||||||
|
*/
|
||||||
|
|
||||||
|
export const EDIT_LINES_TOOL: ToolDef = {
|
||||||
|
name: "edit_lines",
|
||||||
|
description:
|
||||||
|
"Replace lines in a file with new content. Requires reading the file first. " +
|
||||||
|
"Will show diff and ask for confirmation before applying.",
|
||||||
|
parameters: [
|
||||||
|
{
|
||||||
|
name: "path",
|
||||||
|
type: "string",
|
||||||
|
description: "File path relative to project root",
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "start",
|
||||||
|
type: "number",
|
||||||
|
description: "Start line number (1-based, inclusive) to replace",
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "end",
|
||||||
|
type: "number",
|
||||||
|
description: "End line number (1-based, inclusive) to replace",
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "content",
|
||||||
|
type: "string",
|
||||||
|
description: "New content to insert (can be multiple lines)",
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}
|
||||||
|
|
||||||
|
export const CREATE_FILE_TOOL: ToolDef = {
|
||||||
|
name: "create_file",
|
||||||
|
description:
|
||||||
|
"Create a new file with specified content. " +
|
||||||
|
"Will fail if file already exists. Will ask for confirmation.",
|
||||||
|
parameters: [
|
||||||
|
{
|
||||||
|
name: "path",
|
||||||
|
type: "string",
|
||||||
|
description: "File path relative to project root",
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "content",
|
||||||
|
type: "string",
|
||||||
|
description: "File content",
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}
|
||||||
|
|
||||||
|
export const DELETE_FILE_TOOL: ToolDef = {
|
||||||
|
name: "delete_file",
|
||||||
|
description:
|
||||||
|
"Delete a file from the project. " +
|
||||||
|
"Will ask for confirmation. Previous content is saved to undo stack.",
|
||||||
|
parameters: [
|
||||||
|
{
|
||||||
|
name: "path",
|
||||||
|
type: "string",
|
||||||
|
description: "File path relative to project root",
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* =============================================================================
|
||||||
|
* Search Tools (2)
|
||||||
|
* =============================================================================
|
||||||
|
*/
|
||||||
|
|
||||||
|
export const FIND_REFERENCES_TOOL: ToolDef = {
|
||||||
|
name: "find_references",
|
||||||
|
description:
|
||||||
|
"Find all usages of a symbol across the codebase. " +
|
||||||
|
"Returns list of file paths, line numbers, and context.",
|
||||||
|
parameters: [
|
||||||
|
{
|
||||||
|
name: "symbol",
|
||||||
|
type: "string",
|
||||||
|
description: "Symbol name to search for (function, class, variable, etc.)",
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "path",
|
||||||
|
type: "string",
|
||||||
|
description: "Limit search to specific file or directory",
|
||||||
|
required: false,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}
|
||||||
|
|
||||||
|
export const FIND_DEFINITION_TOOL: ToolDef = {
|
||||||
|
name: "find_definition",
|
||||||
|
description:
|
||||||
|
"Find where a symbol is defined. " + "Returns file path, line number, and symbol type.",
|
||||||
|
parameters: [
|
||||||
|
{
|
||||||
|
name: "symbol",
|
||||||
|
type: "string",
|
||||||
|
description: "Symbol name to find definition for",
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* =============================================================================
|
||||||
|
* Analysis Tools (4)
|
||||||
|
* =============================================================================
|
||||||
|
*/
|
||||||
|
|
||||||
|
export const GET_DEPENDENCIES_TOOL: ToolDef = {
|
||||||
|
name: "get_dependencies",
|
||||||
|
description:
|
||||||
|
"Get files that this file imports (internal dependencies). " +
|
||||||
|
"Returns list of imported file paths.",
|
||||||
|
parameters: [
|
||||||
|
{
|
||||||
|
name: "path",
|
||||||
|
type: "string",
|
||||||
|
description: "File path relative to project root",
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}
|
||||||
|
|
||||||
|
export const GET_DEPENDENTS_TOOL: ToolDef = {
|
||||||
|
name: "get_dependents",
|
||||||
|
description:
|
||||||
|
"Get files that import this file (reverse dependencies). " +
|
||||||
|
"Returns list of file paths that depend on this file.",
|
||||||
|
parameters: [
|
||||||
|
{
|
||||||
|
name: "path",
|
||||||
|
type: "string",
|
||||||
|
description: "File path relative to project root",
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}
|
||||||
|
|
||||||
|
export const GET_COMPLEXITY_TOOL: ToolDef = {
|
||||||
|
name: "get_complexity",
|
||||||
|
description:
|
||||||
|
"Get complexity metrics for a file or the entire project. " +
|
||||||
|
"Returns LOC, nesting depth, cyclomatic complexity, and overall score.",
|
||||||
|
parameters: [
|
||||||
|
{
|
||||||
|
name: "path",
|
||||||
|
type: "string",
|
||||||
|
description: "File path (optional, defaults to all files sorted by complexity)",
|
||||||
|
required: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "limit",
|
||||||
|
type: "number",
|
||||||
|
description: "Max files to return when showing all (default: 10)",
|
||||||
|
required: false,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}
|
||||||
|
|
||||||
|
export const GET_TODOS_TOOL: ToolDef = {
|
||||||
|
name: "get_todos",
|
||||||
|
description:
|
||||||
|
"Find TODO, FIXME, HACK, and XXX comments in the codebase. " +
|
||||||
|
"Returns list with file paths, line numbers, and comment text.",
|
||||||
|
parameters: [
|
||||||
|
{
|
||||||
|
name: "path",
|
||||||
|
type: "string",
|
||||||
|
description: "Limit search to specific file or directory",
|
||||||
|
required: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "type",
|
||||||
|
type: "string",
|
||||||
|
description: "Filter by comment type",
|
||||||
|
required: false,
|
||||||
|
enum: ["TODO", "FIXME", "HACK", "XXX"],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* =============================================================================
|
||||||
|
* Git Tools (3)
|
||||||
|
* =============================================================================
|
||||||
|
*/
|
||||||
|
|
||||||
|
export const GIT_STATUS_TOOL: ToolDef = {
|
||||||
|
name: "git_status",
|
||||||
|
description:
|
||||||
|
"Get current git repository status. " +
|
||||||
|
"Returns branch name, staged files, modified files, and untracked files.",
|
||||||
|
parameters: [],
|
||||||
|
}
|
||||||
|
|
||||||
|
export const GIT_DIFF_TOOL: ToolDef = {
|
||||||
|
name: "git_diff",
|
||||||
|
description:
|
||||||
|
"Get uncommitted changes (diff). " + "Shows what has changed but not yet committed.",
|
||||||
|
parameters: [
|
||||||
|
{
|
||||||
|
name: "path",
|
||||||
|
type: "string",
|
||||||
|
description: "Limit diff to specific file or directory",
|
||||||
|
required: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "staged",
|
||||||
|
type: "boolean",
|
||||||
|
description: "Show only staged changes (default: false, shows all)",
|
||||||
|
required: false,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}
|
||||||
|
|
||||||
|
export const GIT_COMMIT_TOOL: ToolDef = {
|
||||||
|
name: "git_commit",
|
||||||
|
description:
|
||||||
|
"Create a git commit with the specified message. " +
|
||||||
|
"Will ask for confirmation. Optionally stage specific files first.",
|
||||||
|
parameters: [
|
||||||
|
{
|
||||||
|
name: "message",
|
||||||
|
type: "string",
|
||||||
|
description: "Commit message",
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "files",
|
||||||
|
type: "array",
|
||||||
|
description: "Files to stage before commit (optional, defaults to all staged)",
|
||||||
|
required: false,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* =============================================================================
|
||||||
|
* Run Tools (2)
|
||||||
|
* =============================================================================
|
||||||
|
*/
|
||||||
|
|
||||||
|
export const RUN_COMMAND_TOOL: ToolDef = {
|
||||||
|
name: "run_command",
|
||||||
|
description:
|
||||||
|
"Execute a shell command in the project directory. " +
|
||||||
|
"Commands are checked against blacklist/whitelist for security. " +
|
||||||
|
"Unknown commands require user confirmation.",
|
||||||
|
parameters: [
|
||||||
|
{
|
||||||
|
name: "command",
|
||||||
|
type: "string",
|
||||||
|
description: "Shell command to execute",
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "timeout",
|
||||||
|
type: "number",
|
||||||
|
description: "Timeout in milliseconds (default: 30000)",
|
||||||
|
required: false,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}
|
||||||
|
|
||||||
|
export const RUN_TESTS_TOOL: ToolDef = {
|
||||||
|
name: "run_tests",
|
||||||
|
description:
|
||||||
|
"Run the project's test suite. Auto-detects test runner (vitest, jest, npm test). " +
|
||||||
|
"Returns test results summary.",
|
||||||
|
parameters: [
|
||||||
|
{
|
||||||
|
name: "path",
|
||||||
|
type: "string",
|
||||||
|
description: "Run tests for specific file or directory",
|
||||||
|
required: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "filter",
|
||||||
|
type: "string",
|
||||||
|
description: "Filter tests by name pattern",
|
||||||
|
required: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "watch",
|
||||||
|
type: "boolean",
|
||||||
|
description: "Run in watch mode (default: false)",
|
||||||
|
required: false,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* =============================================================================
|
||||||
|
* Tool Collection
|
||||||
|
* =============================================================================
|
||||||
|
*/
|
||||||
|
|
||||||
|
/**
|
||||||
|
* All read tools (no confirmation required).
|
||||||
|
*/
|
||||||
|
export const READ_TOOLS: ToolDef[] = [
|
||||||
|
GET_LINES_TOOL,
|
||||||
|
GET_FUNCTION_TOOL,
|
||||||
|
GET_CLASS_TOOL,
|
||||||
|
GET_STRUCTURE_TOOL,
|
||||||
|
]
|
||||||
|
|
||||||
|
/**
|
||||||
|
* All edit tools (require confirmation).
|
||||||
|
*/
|
||||||
|
export const EDIT_TOOLS: ToolDef[] = [EDIT_LINES_TOOL, CREATE_FILE_TOOL, DELETE_FILE_TOOL]
|
||||||
|
|
||||||
|
/**
|
||||||
|
* All search tools (no confirmation required).
|
||||||
|
*/
|
||||||
|
export const SEARCH_TOOLS: ToolDef[] = [FIND_REFERENCES_TOOL, FIND_DEFINITION_TOOL]
|
||||||
|
|
||||||
|
/**
|
||||||
|
* All analysis tools (no confirmation required).
|
||||||
|
*/
|
||||||
|
export const ANALYSIS_TOOLS: ToolDef[] = [
|
||||||
|
GET_DEPENDENCIES_TOOL,
|
||||||
|
GET_DEPENDENTS_TOOL,
|
||||||
|
GET_COMPLEXITY_TOOL,
|
||||||
|
GET_TODOS_TOOL,
|
||||||
|
]
|
||||||
|
|
||||||
|
/**
|
||||||
|
* All git tools (git_commit requires confirmation).
|
||||||
|
*/
|
||||||
|
export const GIT_TOOLS: ToolDef[] = [GIT_STATUS_TOOL, GIT_DIFF_TOOL, GIT_COMMIT_TOOL]
|
||||||
|
|
||||||
|
/**
|
||||||
|
* All run tools (run_command may require confirmation).
|
||||||
|
*/
|
||||||
|
export const RUN_TOOLS: ToolDef[] = [RUN_COMMAND_TOOL, RUN_TESTS_TOOL]
|
||||||
|
|
||||||
|
/**
|
||||||
|
* All 18 tool definitions.
|
||||||
|
*/
|
||||||
|
export const ALL_TOOLS: ToolDef[] = [
|
||||||
|
...READ_TOOLS,
|
||||||
|
...EDIT_TOOLS,
|
||||||
|
...SEARCH_TOOLS,
|
||||||
|
...ANALYSIS_TOOLS,
|
||||||
|
...GIT_TOOLS,
|
||||||
|
...RUN_TOOLS,
|
||||||
|
]
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Tools that require user confirmation before execution.
|
||||||
|
*/
|
||||||
|
export const CONFIRMATION_TOOLS = new Set([
|
||||||
|
"edit_lines",
|
||||||
|
"create_file",
|
||||||
|
"delete_file",
|
||||||
|
"git_commit",
|
||||||
|
])
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if a tool requires confirmation.
|
||||||
|
*/
|
||||||
|
export function requiresConfirmation(toolName: string): boolean {
|
||||||
|
return CONFIRMATION_TOOLS.has(toolName)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get tool definition by name.
|
||||||
|
*/
|
||||||
|
export function getToolDef(name: string): ToolDef | undefined {
|
||||||
|
return ALL_TOOLS.find((t) => t.name === name)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get tool definitions by category.
|
||||||
|
*/
|
||||||
|
export function getToolsByCategory(category: string): ToolDef[] {
|
||||||
|
switch (category) {
|
||||||
|
case "read":
|
||||||
|
return READ_TOOLS
|
||||||
|
case "edit":
|
||||||
|
return EDIT_TOOLS
|
||||||
|
case "search":
|
||||||
|
return SEARCH_TOOLS
|
||||||
|
case "analysis":
|
||||||
|
return ANALYSIS_TOOLS
|
||||||
|
case "git":
|
||||||
|
return GIT_TOOLS
|
||||||
|
case "run":
|
||||||
|
return RUN_TOOLS
|
||||||
|
default:
|
||||||
|
return []
|
||||||
|
}
|
||||||
|
}
|
||||||
119
packages/ipuaro/src/infrastructure/storage/RedisClient.ts
Normal file
119
packages/ipuaro/src/infrastructure/storage/RedisClient.ts
Normal file
@@ -0,0 +1,119 @@
|
|||||||
|
import { Redis } from "ioredis"
|
||||||
|
import type { RedisConfig } from "../../shared/constants/config.js"
|
||||||
|
import { IpuaroError } from "../../shared/errors/IpuaroError.js"
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Redis client wrapper with connection management.
|
||||||
|
* Handles connection lifecycle and AOF configuration.
|
||||||
|
*/
|
||||||
|
export class RedisClient {
|
||||||
|
private client: Redis | null = null
|
||||||
|
private readonly config: RedisConfig
|
||||||
|
private connected = false
|
||||||
|
|
||||||
|
constructor(config: RedisConfig) {
|
||||||
|
this.config = config
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Connect to Redis server.
|
||||||
|
* Configures AOF persistence on successful connection.
|
||||||
|
*/
|
||||||
|
async connect(): Promise<void> {
|
||||||
|
if (this.connected && this.client) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
this.client = new Redis({
|
||||||
|
host: this.config.host,
|
||||||
|
port: this.config.port,
|
||||||
|
db: this.config.db,
|
||||||
|
password: this.config.password,
|
||||||
|
keyPrefix: this.config.keyPrefix,
|
||||||
|
lazyConnect: true,
|
||||||
|
retryStrategy: (times: number): number | null => {
|
||||||
|
if (times > 3) {
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
return Math.min(times * 200, 1000)
|
||||||
|
},
|
||||||
|
maxRetriesPerRequest: 3,
|
||||||
|
enableReadyCheck: true,
|
||||||
|
})
|
||||||
|
|
||||||
|
await this.client.connect()
|
||||||
|
await this.configureAOF()
|
||||||
|
this.connected = true
|
||||||
|
} catch (error) {
|
||||||
|
this.connected = false
|
||||||
|
this.client = null
|
||||||
|
const message = error instanceof Error ? error.message : "Unknown error"
|
||||||
|
throw IpuaroError.redis(`Failed to connect to Redis: ${message}`)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Disconnect from Redis server.
|
||||||
|
*/
|
||||||
|
async disconnect(): Promise<void> {
|
||||||
|
if (this.client) {
|
||||||
|
await this.client.quit()
|
||||||
|
this.client = null
|
||||||
|
this.connected = false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if connected to Redis.
|
||||||
|
*/
|
||||||
|
isConnected(): boolean {
|
||||||
|
return this.connected && this.client !== null && this.client.status === "ready"
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the underlying Redis client.
|
||||||
|
* @throws IpuaroError if not connected
|
||||||
|
*/
|
||||||
|
getClient(): Redis {
|
||||||
|
if (!this.client || !this.connected) {
|
||||||
|
throw IpuaroError.redis("Redis client is not connected")
|
||||||
|
}
|
||||||
|
return this.client
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Execute a health check ping.
|
||||||
|
*/
|
||||||
|
async ping(): Promise<boolean> {
|
||||||
|
if (!this.client) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
const result = await this.client.ping()
|
||||||
|
return result === "PONG"
|
||||||
|
} catch {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Configure AOF (Append Only File) persistence.
|
||||||
|
* AOF provides better durability by logging every write operation.
|
||||||
|
*/
|
||||||
|
private async configureAOF(): Promise<void> {
|
||||||
|
if (!this.client) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
await this.client.config("SET", "appendonly", "yes")
|
||||||
|
await this.client.config("SET", "appendfsync", "everysec")
|
||||||
|
} catch {
|
||||||
|
/*
|
||||||
|
* AOF config may fail if Redis doesn't allow CONFIG SET.
|
||||||
|
* This is non-fatal - persistence will still work with default settings.
|
||||||
|
*/
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
236
packages/ipuaro/src/infrastructure/storage/RedisStorage.ts
Normal file
236
packages/ipuaro/src/infrastructure/storage/RedisStorage.ts
Normal file
@@ -0,0 +1,236 @@
|
|||||||
|
import type { DepsGraph, IStorage, SymbolIndex } from "../../domain/services/IStorage.js"
|
||||||
|
import type { FileAST } from "../../domain/value-objects/FileAST.js"
|
||||||
|
import type { FileData } from "../../domain/value-objects/FileData.js"
|
||||||
|
import type { FileMeta } from "../../domain/value-objects/FileMeta.js"
|
||||||
|
import { IpuaroError } from "../../shared/errors/IpuaroError.js"
|
||||||
|
import { RedisClient } from "./RedisClient.js"
|
||||||
|
import { IndexFields, ProjectKeys } from "./schema.js"
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Redis implementation of IStorage.
|
||||||
|
* Stores project data (files, AST, meta, indexes) in Redis hashes.
|
||||||
|
*/
|
||||||
|
export class RedisStorage implements IStorage {
|
||||||
|
private readonly client: RedisClient
|
||||||
|
private readonly projectName: string
|
||||||
|
|
||||||
|
constructor(client: RedisClient, projectName: string) {
|
||||||
|
this.client = client
|
||||||
|
this.projectName = projectName
|
||||||
|
}
|
||||||
|
|
||||||
|
async getFile(path: string): Promise<FileData | null> {
|
||||||
|
const redis = this.getRedis()
|
||||||
|
const data = await redis.hget(ProjectKeys.files(this.projectName), path)
|
||||||
|
if (!data) {
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
return this.parseJSON(data, "FileData") as FileData
|
||||||
|
}
|
||||||
|
|
||||||
|
async setFile(path: string, data: FileData): Promise<void> {
|
||||||
|
const redis = this.getRedis()
|
||||||
|
await redis.hset(ProjectKeys.files(this.projectName), path, JSON.stringify(data))
|
||||||
|
}
|
||||||
|
|
||||||
|
async deleteFile(path: string): Promise<void> {
|
||||||
|
const redis = this.getRedis()
|
||||||
|
await redis.hdel(ProjectKeys.files(this.projectName), path)
|
||||||
|
}
|
||||||
|
|
||||||
|
async getAllFiles(): Promise<Map<string, FileData>> {
|
||||||
|
const redis = this.getRedis()
|
||||||
|
const data = await redis.hgetall(ProjectKeys.files(this.projectName))
|
||||||
|
const result = new Map<string, FileData>()
|
||||||
|
|
||||||
|
for (const [path, value] of Object.entries(data)) {
|
||||||
|
const parsed = this.parseJSON(value, "FileData") as FileData | null
|
||||||
|
if (parsed) {
|
||||||
|
result.set(path, parsed)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
async getFileCount(): Promise<number> {
|
||||||
|
const redis = this.getRedis()
|
||||||
|
return redis.hlen(ProjectKeys.files(this.projectName))
|
||||||
|
}
|
||||||
|
|
||||||
|
async getAST(path: string): Promise<FileAST | null> {
|
||||||
|
const redis = this.getRedis()
|
||||||
|
const data = await redis.hget(ProjectKeys.ast(this.projectName), path)
|
||||||
|
if (!data) {
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
return this.parseJSON(data, "FileAST") as FileAST
|
||||||
|
}
|
||||||
|
|
||||||
|
async setAST(path: string, ast: FileAST): Promise<void> {
|
||||||
|
const redis = this.getRedis()
|
||||||
|
await redis.hset(ProjectKeys.ast(this.projectName), path, JSON.stringify(ast))
|
||||||
|
}
|
||||||
|
|
||||||
|
async deleteAST(path: string): Promise<void> {
|
||||||
|
const redis = this.getRedis()
|
||||||
|
await redis.hdel(ProjectKeys.ast(this.projectName), path)
|
||||||
|
}
|
||||||
|
|
||||||
|
async getAllASTs(): Promise<Map<string, FileAST>> {
|
||||||
|
const redis = this.getRedis()
|
||||||
|
const data = await redis.hgetall(ProjectKeys.ast(this.projectName))
|
||||||
|
const result = new Map<string, FileAST>()
|
||||||
|
|
||||||
|
for (const [path, value] of Object.entries(data)) {
|
||||||
|
const parsed = this.parseJSON(value, "FileAST") as FileAST | null
|
||||||
|
if (parsed) {
|
||||||
|
result.set(path, parsed)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
async getMeta(path: string): Promise<FileMeta | null> {
|
||||||
|
const redis = this.getRedis()
|
||||||
|
const data = await redis.hget(ProjectKeys.meta(this.projectName), path)
|
||||||
|
if (!data) {
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
return this.parseJSON(data, "FileMeta") as FileMeta
|
||||||
|
}
|
||||||
|
|
||||||
|
async setMeta(path: string, meta: FileMeta): Promise<void> {
|
||||||
|
const redis = this.getRedis()
|
||||||
|
await redis.hset(ProjectKeys.meta(this.projectName), path, JSON.stringify(meta))
|
||||||
|
}
|
||||||
|
|
||||||
|
async deleteMeta(path: string): Promise<void> {
|
||||||
|
const redis = this.getRedis()
|
||||||
|
await redis.hdel(ProjectKeys.meta(this.projectName), path)
|
||||||
|
}
|
||||||
|
|
||||||
|
async getAllMetas(): Promise<Map<string, FileMeta>> {
|
||||||
|
const redis = this.getRedis()
|
||||||
|
const data = await redis.hgetall(ProjectKeys.meta(this.projectName))
|
||||||
|
const result = new Map<string, FileMeta>()
|
||||||
|
|
||||||
|
for (const [path, value] of Object.entries(data)) {
|
||||||
|
const parsed = this.parseJSON(value, "FileMeta") as FileMeta | null
|
||||||
|
if (parsed) {
|
||||||
|
result.set(path, parsed)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
async getSymbolIndex(): Promise<SymbolIndex> {
|
||||||
|
const redis = this.getRedis()
|
||||||
|
const data = await redis.hget(ProjectKeys.indexes(this.projectName), IndexFields.symbols)
|
||||||
|
if (!data) {
|
||||||
|
return new Map()
|
||||||
|
}
|
||||||
|
|
||||||
|
const parsed = this.parseJSON(data, "SymbolIndex") as [string, unknown[]][] | null
|
||||||
|
if (!parsed) {
|
||||||
|
return new Map()
|
||||||
|
}
|
||||||
|
|
||||||
|
return new Map(parsed) as SymbolIndex
|
||||||
|
}
|
||||||
|
|
||||||
|
async setSymbolIndex(index: SymbolIndex): Promise<void> {
|
||||||
|
const redis = this.getRedis()
|
||||||
|
const serialized = JSON.stringify([...index.entries()])
|
||||||
|
await redis.hset(ProjectKeys.indexes(this.projectName), IndexFields.symbols, serialized)
|
||||||
|
}
|
||||||
|
|
||||||
|
async getDepsGraph(): Promise<DepsGraph> {
|
||||||
|
const redis = this.getRedis()
|
||||||
|
const data = await redis.hget(ProjectKeys.indexes(this.projectName), IndexFields.depsGraph)
|
||||||
|
if (!data) {
|
||||||
|
return {
|
||||||
|
imports: new Map(),
|
||||||
|
importedBy: new Map(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const parsed = this.parseJSON(data, "DepsGraph") as {
|
||||||
|
imports: [string, string[]][]
|
||||||
|
importedBy: [string, string[]][]
|
||||||
|
} | null
|
||||||
|
|
||||||
|
if (!parsed) {
|
||||||
|
return {
|
||||||
|
imports: new Map(),
|
||||||
|
importedBy: new Map(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
imports: new Map(parsed.imports),
|
||||||
|
importedBy: new Map(parsed.importedBy),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async setDepsGraph(graph: DepsGraph): Promise<void> {
|
||||||
|
const redis = this.getRedis()
|
||||||
|
const serialized = JSON.stringify({
|
||||||
|
imports: [...graph.imports.entries()],
|
||||||
|
importedBy: [...graph.importedBy.entries()],
|
||||||
|
})
|
||||||
|
await redis.hset(ProjectKeys.indexes(this.projectName), IndexFields.depsGraph, serialized)
|
||||||
|
}
|
||||||
|
|
||||||
|
async getProjectConfig(key: string): Promise<unknown> {
|
||||||
|
const redis = this.getRedis()
|
||||||
|
const data = await redis.hget(ProjectKeys.config(this.projectName), key)
|
||||||
|
if (!data) {
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
return this.parseJSON(data, "ProjectConfig")
|
||||||
|
}
|
||||||
|
|
||||||
|
async setProjectConfig(key: string, value: unknown): Promise<void> {
|
||||||
|
const redis = this.getRedis()
|
||||||
|
await redis.hset(ProjectKeys.config(this.projectName), key, JSON.stringify(value))
|
||||||
|
}
|
||||||
|
|
||||||
|
async connect(): Promise<void> {
|
||||||
|
await this.client.connect()
|
||||||
|
}
|
||||||
|
|
||||||
|
async disconnect(): Promise<void> {
|
||||||
|
await this.client.disconnect()
|
||||||
|
}
|
||||||
|
|
||||||
|
isConnected(): boolean {
|
||||||
|
return this.client.isConnected()
|
||||||
|
}
|
||||||
|
|
||||||
|
async clear(): Promise<void> {
|
||||||
|
const redis = this.getRedis()
|
||||||
|
await Promise.all([
|
||||||
|
redis.del(ProjectKeys.files(this.projectName)),
|
||||||
|
redis.del(ProjectKeys.ast(this.projectName)),
|
||||||
|
redis.del(ProjectKeys.meta(this.projectName)),
|
||||||
|
redis.del(ProjectKeys.indexes(this.projectName)),
|
||||||
|
redis.del(ProjectKeys.config(this.projectName)),
|
||||||
|
])
|
||||||
|
}
|
||||||
|
|
||||||
|
private getRedis(): ReturnType<RedisClient["getClient"]> {
|
||||||
|
return this.client.getClient()
|
||||||
|
}
|
||||||
|
|
||||||
|
private parseJSON(data: string, type: string): unknown {
|
||||||
|
try {
|
||||||
|
return JSON.parse(data) as unknown
|
||||||
|
} catch (error) {
|
||||||
|
const message = error instanceof Error ? error.message : "Unknown error"
|
||||||
|
throw IpuaroError.parse(`Failed to parse ${type}: ${message}`)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
10
packages/ipuaro/src/infrastructure/storage/index.ts
Normal file
10
packages/ipuaro/src/infrastructure/storage/index.ts
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
// Storage module exports
|
||||||
|
export { RedisClient } from "./RedisClient.js"
|
||||||
|
export { RedisStorage } from "./RedisStorage.js"
|
||||||
|
export {
|
||||||
|
ProjectKeys,
|
||||||
|
SessionKeys,
|
||||||
|
IndexFields,
|
||||||
|
SessionFields,
|
||||||
|
generateProjectName,
|
||||||
|
} from "./schema.js"
|
||||||
95
packages/ipuaro/src/infrastructure/storage/schema.ts
Normal file
95
packages/ipuaro/src/infrastructure/storage/schema.ts
Normal file
@@ -0,0 +1,95 @@
|
|||||||
|
/**
|
||||||
|
* Redis key schema for ipuaro data storage.
|
||||||
|
*
|
||||||
|
* Key structure:
|
||||||
|
* - project:{name}:files # Hash<path, FileData>
|
||||||
|
* - project:{name}:ast # Hash<path, FileAST>
|
||||||
|
* - project:{name}:meta # Hash<path, FileMeta>
|
||||||
|
* - project:{name}:indexes # Hash<name, JSON> (symbols, deps_graph)
|
||||||
|
* - project:{name}:config # Hash<key, JSON>
|
||||||
|
*
|
||||||
|
* - session:{id}:data # Hash<field, JSON> (history, context, stats)
|
||||||
|
* - session:{id}:undo # List<UndoEntry> (max 10)
|
||||||
|
* - sessions:list # List<session_id>
|
||||||
|
*
|
||||||
|
* Project name format: {parent-folder}-{project-folder}
|
||||||
|
*/
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Project-related Redis keys.
|
||||||
|
*/
|
||||||
|
export const ProjectKeys = {
|
||||||
|
files: (projectName: string): string => `project:${projectName}:files`,
|
||||||
|
ast: (projectName: string): string => `project:${projectName}:ast`,
|
||||||
|
meta: (projectName: string): string => `project:${projectName}:meta`,
|
||||||
|
indexes: (projectName: string): string => `project:${projectName}:indexes`,
|
||||||
|
config: (projectName: string): string => `project:${projectName}:config`,
|
||||||
|
} as const
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Session-related Redis keys.
|
||||||
|
*/
|
||||||
|
export const SessionKeys = {
|
||||||
|
data: (sessionId: string): string => `session:${sessionId}:data`,
|
||||||
|
undo: (sessionId: string): string => `session:${sessionId}:undo`,
|
||||||
|
list: "sessions:list",
|
||||||
|
} as const
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Index field names within project:indexes hash.
|
||||||
|
*/
|
||||||
|
export const IndexFields = {
|
||||||
|
symbols: "symbols",
|
||||||
|
depsGraph: "deps_graph",
|
||||||
|
} as const
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Session data field names within session:data hash.
|
||||||
|
*/
|
||||||
|
export const SessionFields = {
|
||||||
|
history: "history",
|
||||||
|
context: "context",
|
||||||
|
stats: "stats",
|
||||||
|
inputHistory: "input_history",
|
||||||
|
createdAt: "created_at",
|
||||||
|
lastActivityAt: "last_activity_at",
|
||||||
|
projectName: "project_name",
|
||||||
|
} as const
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Generate project name from path.
|
||||||
|
* Format: {parent-folder}-{project-folder}
|
||||||
|
*
|
||||||
|
* @example
|
||||||
|
* generateProjectName("/home/user/projects/myapp") -> "projects-myapp"
|
||||||
|
* generateProjectName("/app") -> "app"
|
||||||
|
*/
|
||||||
|
export function generateProjectName(projectPath: string): string {
|
||||||
|
const normalized = projectPath.replace(/\\/g, "/").replace(/\/+$/, "")
|
||||||
|
const parts = normalized.split("/").filter(Boolean)
|
||||||
|
|
||||||
|
if (parts.length === 0) {
|
||||||
|
return "root"
|
||||||
|
}
|
||||||
|
|
||||||
|
if (parts.length === 1) {
|
||||||
|
return sanitizeName(parts[0])
|
||||||
|
}
|
||||||
|
|
||||||
|
const projectFolder = sanitizeName(parts[parts.length - 1])
|
||||||
|
const parentFolder = sanitizeName(parts[parts.length - 2])
|
||||||
|
|
||||||
|
return `${parentFolder}-${projectFolder}`
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Sanitize a name for use in Redis keys.
|
||||||
|
* Replaces non-alphanumeric characters with hyphens.
|
||||||
|
*/
|
||||||
|
function sanitizeName(name: string): string {
|
||||||
|
return name
|
||||||
|
.toLowerCase()
|
||||||
|
.replace(/[^a-z0-9-]/g, "-")
|
||||||
|
.replace(/-+/g, "-")
|
||||||
|
.replace(/^-|-$/g, "")
|
||||||
|
}
|
||||||
12
packages/ipuaro/src/infrastructure/tools/index.ts
Normal file
12
packages/ipuaro/src/infrastructure/tools/index.ts
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
// Tools module exports
|
||||||
|
export { ToolRegistry } from "./registry.js"
|
||||||
|
|
||||||
|
// Read tools
|
||||||
|
export { GetLinesTool, type GetLinesResult } from "./read/GetLinesTool.js"
|
||||||
|
export { GetFunctionTool, type GetFunctionResult } from "./read/GetFunctionTool.js"
|
||||||
|
export { GetClassTool, type GetClassResult } from "./read/GetClassTool.js"
|
||||||
|
export {
|
||||||
|
GetStructureTool,
|
||||||
|
type GetStructureResult,
|
||||||
|
type TreeNode,
|
||||||
|
} from "./read/GetStructureTool.js"
|
||||||
165
packages/ipuaro/src/infrastructure/tools/read/GetClassTool.ts
Normal file
165
packages/ipuaro/src/infrastructure/tools/read/GetClassTool.ts
Normal file
@@ -0,0 +1,165 @@
|
|||||||
|
import { promises as fs } from "node:fs"
|
||||||
|
import * as path from "node:path"
|
||||||
|
import type { ITool, ToolContext, ToolParameterSchema } from "../../../domain/services/ITool.js"
|
||||||
|
import type { ClassInfo } from "../../../domain/value-objects/FileAST.js"
|
||||||
|
import {
|
||||||
|
createErrorResult,
|
||||||
|
createSuccessResult,
|
||||||
|
type ToolResult,
|
||||||
|
} from "../../../domain/value-objects/ToolResult.js"
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Result data from get_class tool.
|
||||||
|
*/
|
||||||
|
export interface GetClassResult {
|
||||||
|
path: string
|
||||||
|
name: string
|
||||||
|
startLine: number
|
||||||
|
endLine: number
|
||||||
|
isExported: boolean
|
||||||
|
isAbstract: boolean
|
||||||
|
extends?: string
|
||||||
|
implements: string[]
|
||||||
|
methods: string[]
|
||||||
|
properties: string[]
|
||||||
|
content: string
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Tool for retrieving a class's source code by name.
|
||||||
|
* Uses AST to find exact line range.
|
||||||
|
*/
|
||||||
|
export class GetClassTool implements ITool {
|
||||||
|
readonly name = "get_class"
|
||||||
|
readonly description =
|
||||||
|
"Get a class's source code by name. Uses AST to find exact line range. " +
|
||||||
|
"Returns the class code with line numbers."
|
||||||
|
readonly parameters: ToolParameterSchema[] = [
|
||||||
|
{
|
||||||
|
name: "path",
|
||||||
|
type: "string",
|
||||||
|
description: "File path relative to project root",
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "name",
|
||||||
|
type: "string",
|
||||||
|
description: "Class name to retrieve",
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
]
|
||||||
|
readonly requiresConfirmation = false
|
||||||
|
readonly category = "read" as const
|
||||||
|
|
||||||
|
validateParams(params: Record<string, unknown>): string | null {
|
||||||
|
if (typeof params.path !== "string" || params.path.trim() === "") {
|
||||||
|
return "Parameter 'path' is required and must be a non-empty string"
|
||||||
|
}
|
||||||
|
|
||||||
|
if (typeof params.name !== "string" || params.name.trim() === "") {
|
||||||
|
return "Parameter 'name' is required and must be a non-empty string"
|
||||||
|
}
|
||||||
|
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
async execute(params: Record<string, unknown>, ctx: ToolContext): Promise<ToolResult> {
|
||||||
|
const startTime = Date.now()
|
||||||
|
const callId = `${this.name}-${String(startTime)}`
|
||||||
|
|
||||||
|
const relativePath = params.path as string
|
||||||
|
const className = params.name as string
|
||||||
|
const absolutePath = path.resolve(ctx.projectRoot, relativePath)
|
||||||
|
|
||||||
|
if (!absolutePath.startsWith(ctx.projectRoot)) {
|
||||||
|
return createErrorResult(
|
||||||
|
callId,
|
||||||
|
"Path must be within project root",
|
||||||
|
Date.now() - startTime,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const ast = await ctx.storage.getAST(relativePath)
|
||||||
|
if (!ast) {
|
||||||
|
return createErrorResult(
|
||||||
|
callId,
|
||||||
|
`AST not found for "${relativePath}". File may not be indexed.`,
|
||||||
|
Date.now() - startTime,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const classInfo = this.findClass(ast.classes, className)
|
||||||
|
if (!classInfo) {
|
||||||
|
const available = ast.classes.map((c) => c.name).join(", ") || "none"
|
||||||
|
return createErrorResult(
|
||||||
|
callId,
|
||||||
|
`Class "${className}" not found in "${relativePath}". Available: ${available}`,
|
||||||
|
Date.now() - startTime,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const lines = await this.getFileLines(absolutePath, relativePath, ctx)
|
||||||
|
const classLines = lines.slice(classInfo.lineStart - 1, classInfo.lineEnd)
|
||||||
|
const content = this.formatLinesWithNumbers(classLines, classInfo.lineStart)
|
||||||
|
|
||||||
|
const result: GetClassResult = {
|
||||||
|
path: relativePath,
|
||||||
|
name: classInfo.name,
|
||||||
|
startLine: classInfo.lineStart,
|
||||||
|
endLine: classInfo.lineEnd,
|
||||||
|
isExported: classInfo.isExported,
|
||||||
|
isAbstract: classInfo.isAbstract,
|
||||||
|
extends: classInfo.extends,
|
||||||
|
implements: classInfo.implements,
|
||||||
|
methods: classInfo.methods.map((m) => m.name),
|
||||||
|
properties: classInfo.properties.map((p) => p.name),
|
||||||
|
content,
|
||||||
|
}
|
||||||
|
|
||||||
|
return createSuccessResult(callId, result, Date.now() - startTime)
|
||||||
|
} catch (error) {
|
||||||
|
const message = error instanceof Error ? error.message : String(error)
|
||||||
|
return createErrorResult(callId, message, Date.now() - startTime)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Find class by name in AST.
|
||||||
|
*/
|
||||||
|
private findClass(classes: ClassInfo[], name: string): ClassInfo | undefined {
|
||||||
|
return classes.find((c) => c.name === name)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get file lines from storage or filesystem.
|
||||||
|
*/
|
||||||
|
private async getFileLines(
|
||||||
|
absolutePath: string,
|
||||||
|
relativePath: string,
|
||||||
|
ctx: ToolContext,
|
||||||
|
): Promise<string[]> {
|
||||||
|
const fileData = await ctx.storage.getFile(relativePath)
|
||||||
|
if (fileData) {
|
||||||
|
return fileData.lines
|
||||||
|
}
|
||||||
|
|
||||||
|
const content = await fs.readFile(absolutePath, "utf-8")
|
||||||
|
return content.split("\n")
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Format lines with line numbers.
|
||||||
|
*/
|
||||||
|
private formatLinesWithNumbers(lines: string[], startLine: number): string {
|
||||||
|
const maxLineNum = startLine + lines.length - 1
|
||||||
|
const padWidth = String(maxLineNum).length
|
||||||
|
|
||||||
|
return lines
|
||||||
|
.map((line, index) => {
|
||||||
|
const lineNum = String(startLine + index).padStart(padWidth, " ")
|
||||||
|
return `${lineNum}│${line}`
|
||||||
|
})
|
||||||
|
.join("\n")
|
||||||
|
}
|
||||||
|
}
|
||||||
161
packages/ipuaro/src/infrastructure/tools/read/GetFunctionTool.ts
Normal file
161
packages/ipuaro/src/infrastructure/tools/read/GetFunctionTool.ts
Normal file
@@ -0,0 +1,161 @@
|
|||||||
|
import { promises as fs } from "node:fs"
|
||||||
|
import * as path from "node:path"
|
||||||
|
import type { ITool, ToolContext, ToolParameterSchema } from "../../../domain/services/ITool.js"
|
||||||
|
import type { FunctionInfo } from "../../../domain/value-objects/FileAST.js"
|
||||||
|
import {
|
||||||
|
createErrorResult,
|
||||||
|
createSuccessResult,
|
||||||
|
type ToolResult,
|
||||||
|
} from "../../../domain/value-objects/ToolResult.js"
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Result data from get_function tool.
|
||||||
|
*/
|
||||||
|
export interface GetFunctionResult {
|
||||||
|
path: string
|
||||||
|
name: string
|
||||||
|
startLine: number
|
||||||
|
endLine: number
|
||||||
|
isAsync: boolean
|
||||||
|
isExported: boolean
|
||||||
|
params: string[]
|
||||||
|
returnType?: string
|
||||||
|
content: string
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Tool for retrieving a function's source code by name.
|
||||||
|
* Uses AST to find exact line range.
|
||||||
|
*/
|
||||||
|
export class GetFunctionTool implements ITool {
|
||||||
|
readonly name = "get_function"
|
||||||
|
readonly description =
|
||||||
|
"Get a function's source code by name. Uses AST to find exact line range. " +
|
||||||
|
"Returns the function code with line numbers."
|
||||||
|
readonly parameters: ToolParameterSchema[] = [
|
||||||
|
{
|
||||||
|
name: "path",
|
||||||
|
type: "string",
|
||||||
|
description: "File path relative to project root",
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "name",
|
||||||
|
type: "string",
|
||||||
|
description: "Function name to retrieve",
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
]
|
||||||
|
readonly requiresConfirmation = false
|
||||||
|
readonly category = "read" as const
|
||||||
|
|
||||||
|
validateParams(params: Record<string, unknown>): string | null {
|
||||||
|
if (typeof params.path !== "string" || params.path.trim() === "") {
|
||||||
|
return "Parameter 'path' is required and must be a non-empty string"
|
||||||
|
}
|
||||||
|
|
||||||
|
if (typeof params.name !== "string" || params.name.trim() === "") {
|
||||||
|
return "Parameter 'name' is required and must be a non-empty string"
|
||||||
|
}
|
||||||
|
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
async execute(params: Record<string, unknown>, ctx: ToolContext): Promise<ToolResult> {
|
||||||
|
const startTime = Date.now()
|
||||||
|
const callId = `${this.name}-${String(startTime)}`
|
||||||
|
|
||||||
|
const relativePath = params.path as string
|
||||||
|
const functionName = params.name as string
|
||||||
|
const absolutePath = path.resolve(ctx.projectRoot, relativePath)
|
||||||
|
|
||||||
|
if (!absolutePath.startsWith(ctx.projectRoot)) {
|
||||||
|
return createErrorResult(
|
||||||
|
callId,
|
||||||
|
"Path must be within project root",
|
||||||
|
Date.now() - startTime,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const ast = await ctx.storage.getAST(relativePath)
|
||||||
|
if (!ast) {
|
||||||
|
return createErrorResult(
|
||||||
|
callId,
|
||||||
|
`AST not found for "${relativePath}". File may not be indexed.`,
|
||||||
|
Date.now() - startTime,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const functionInfo = this.findFunction(ast.functions, functionName)
|
||||||
|
if (!functionInfo) {
|
||||||
|
const available = ast.functions.map((f) => f.name).join(", ") || "none"
|
||||||
|
return createErrorResult(
|
||||||
|
callId,
|
||||||
|
`Function "${functionName}" not found in "${relativePath}". Available: ${available}`,
|
||||||
|
Date.now() - startTime,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const lines = await this.getFileLines(absolutePath, relativePath, ctx)
|
||||||
|
const functionLines = lines.slice(functionInfo.lineStart - 1, functionInfo.lineEnd)
|
||||||
|
const content = this.formatLinesWithNumbers(functionLines, functionInfo.lineStart)
|
||||||
|
|
||||||
|
const result: GetFunctionResult = {
|
||||||
|
path: relativePath,
|
||||||
|
name: functionInfo.name,
|
||||||
|
startLine: functionInfo.lineStart,
|
||||||
|
endLine: functionInfo.lineEnd,
|
||||||
|
isAsync: functionInfo.isAsync,
|
||||||
|
isExported: functionInfo.isExported,
|
||||||
|
params: functionInfo.params.map((p) => p.name),
|
||||||
|
returnType: functionInfo.returnType,
|
||||||
|
content,
|
||||||
|
}
|
||||||
|
|
||||||
|
return createSuccessResult(callId, result, Date.now() - startTime)
|
||||||
|
} catch (error) {
|
||||||
|
const message = error instanceof Error ? error.message : String(error)
|
||||||
|
return createErrorResult(callId, message, Date.now() - startTime)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Find function by name in AST.
|
||||||
|
*/
|
||||||
|
private findFunction(functions: FunctionInfo[], name: string): FunctionInfo | undefined {
|
||||||
|
return functions.find((f) => f.name === name)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get file lines from storage or filesystem.
|
||||||
|
*/
|
||||||
|
private async getFileLines(
|
||||||
|
absolutePath: string,
|
||||||
|
relativePath: string,
|
||||||
|
ctx: ToolContext,
|
||||||
|
): Promise<string[]> {
|
||||||
|
const fileData = await ctx.storage.getFile(relativePath)
|
||||||
|
if (fileData) {
|
||||||
|
return fileData.lines
|
||||||
|
}
|
||||||
|
|
||||||
|
const content = await fs.readFile(absolutePath, "utf-8")
|
||||||
|
return content.split("\n")
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Format lines with line numbers.
|
||||||
|
*/
|
||||||
|
private formatLinesWithNumbers(lines: string[], startLine: number): string {
|
||||||
|
const maxLineNum = startLine + lines.length - 1
|
||||||
|
const padWidth = String(maxLineNum).length
|
||||||
|
|
||||||
|
return lines
|
||||||
|
.map((line, index) => {
|
||||||
|
const lineNum = String(startLine + index).padStart(padWidth, " ")
|
||||||
|
return `${lineNum}│${line}`
|
||||||
|
})
|
||||||
|
.join("\n")
|
||||||
|
}
|
||||||
|
}
|
||||||
158
packages/ipuaro/src/infrastructure/tools/read/GetLinesTool.ts
Normal file
158
packages/ipuaro/src/infrastructure/tools/read/GetLinesTool.ts
Normal file
@@ -0,0 +1,158 @@
|
|||||||
|
import { promises as fs } from "node:fs"
|
||||||
|
import * as path from "node:path"
|
||||||
|
import type { ITool, ToolContext, ToolParameterSchema } from "../../../domain/services/ITool.js"
|
||||||
|
import {
|
||||||
|
createErrorResult,
|
||||||
|
createSuccessResult,
|
||||||
|
type ToolResult,
|
||||||
|
} from "../../../domain/value-objects/ToolResult.js"
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Result data from get_lines tool.
|
||||||
|
*/
|
||||||
|
export interface GetLinesResult {
|
||||||
|
path: string
|
||||||
|
startLine: number
|
||||||
|
endLine: number
|
||||||
|
totalLines: number
|
||||||
|
content: string
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Tool for reading specific lines from a file.
|
||||||
|
* Returns content with line numbers.
|
||||||
|
*/
|
||||||
|
export class GetLinesTool implements ITool {
|
||||||
|
readonly name = "get_lines"
|
||||||
|
readonly description =
|
||||||
|
"Get specific lines from a file. Returns the content with line numbers. " +
|
||||||
|
"If no range is specified, returns the entire file."
|
||||||
|
readonly parameters: ToolParameterSchema[] = [
|
||||||
|
{
|
||||||
|
name: "path",
|
||||||
|
type: "string",
|
||||||
|
description: "File path relative to project root",
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "start",
|
||||||
|
type: "number",
|
||||||
|
description: "Start line number (1-based, inclusive)",
|
||||||
|
required: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "end",
|
||||||
|
type: "number",
|
||||||
|
description: "End line number (1-based, inclusive)",
|
||||||
|
required: false,
|
||||||
|
},
|
||||||
|
]
|
||||||
|
readonly requiresConfirmation = false
|
||||||
|
readonly category = "read" as const
|
||||||
|
|
||||||
|
validateParams(params: Record<string, unknown>): string | null {
|
||||||
|
if (typeof params.path !== "string" || params.path.trim() === "") {
|
||||||
|
return "Parameter 'path' is required and must be a non-empty string"
|
||||||
|
}
|
||||||
|
|
||||||
|
if (params.start !== undefined) {
|
||||||
|
if (typeof params.start !== "number" || !Number.isInteger(params.start)) {
|
||||||
|
return "Parameter 'start' must be an integer"
|
||||||
|
}
|
||||||
|
if (params.start < 1) {
|
||||||
|
return "Parameter 'start' must be >= 1"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (params.end !== undefined) {
|
||||||
|
if (typeof params.end !== "number" || !Number.isInteger(params.end)) {
|
||||||
|
return "Parameter 'end' must be an integer"
|
||||||
|
}
|
||||||
|
if (params.end < 1) {
|
||||||
|
return "Parameter 'end' must be >= 1"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (params.start !== undefined && params.end !== undefined && params.start > params.end) {
|
||||||
|
return "Parameter 'start' must be <= 'end'"
|
||||||
|
}
|
||||||
|
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
async execute(params: Record<string, unknown>, ctx: ToolContext): Promise<ToolResult> {
|
||||||
|
const startTime = Date.now()
|
||||||
|
const callId = `${this.name}-${String(startTime)}`
|
||||||
|
|
||||||
|
const relativePath = params.path as string
|
||||||
|
const absolutePath = path.resolve(ctx.projectRoot, relativePath)
|
||||||
|
|
||||||
|
if (!absolutePath.startsWith(ctx.projectRoot)) {
|
||||||
|
return createErrorResult(
|
||||||
|
callId,
|
||||||
|
"Path must be within project root",
|
||||||
|
Date.now() - startTime,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const lines = await this.getFileLines(absolutePath, relativePath, ctx)
|
||||||
|
const totalLines = lines.length
|
||||||
|
|
||||||
|
let startLine = (params.start as number | undefined) ?? 1
|
||||||
|
let endLine = (params.end as number | undefined) ?? totalLines
|
||||||
|
|
||||||
|
startLine = Math.max(1, Math.min(startLine, totalLines))
|
||||||
|
endLine = Math.max(startLine, Math.min(endLine, totalLines))
|
||||||
|
|
||||||
|
const selectedLines = lines.slice(startLine - 1, endLine)
|
||||||
|
const content = this.formatLinesWithNumbers(selectedLines, startLine)
|
||||||
|
|
||||||
|
const result: GetLinesResult = {
|
||||||
|
path: relativePath,
|
||||||
|
startLine,
|
||||||
|
endLine,
|
||||||
|
totalLines,
|
||||||
|
content,
|
||||||
|
}
|
||||||
|
|
||||||
|
return createSuccessResult(callId, result, Date.now() - startTime)
|
||||||
|
} catch (error) {
|
||||||
|
const message = error instanceof Error ? error.message : String(error)
|
||||||
|
return createErrorResult(callId, message, Date.now() - startTime)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get file lines from storage or filesystem.
|
||||||
|
*/
|
||||||
|
private async getFileLines(
|
||||||
|
absolutePath: string,
|
||||||
|
relativePath: string,
|
||||||
|
ctx: ToolContext,
|
||||||
|
): Promise<string[]> {
|
||||||
|
const fileData = await ctx.storage.getFile(relativePath)
|
||||||
|
if (fileData) {
|
||||||
|
return fileData.lines
|
||||||
|
}
|
||||||
|
|
||||||
|
const content = await fs.readFile(absolutePath, "utf-8")
|
||||||
|
return content.split("\n")
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Format lines with line numbers.
|
||||||
|
* Example: " 1│const x = 1"
|
||||||
|
*/
|
||||||
|
private formatLinesWithNumbers(lines: string[], startLine: number): string {
|
||||||
|
const maxLineNum = startLine + lines.length - 1
|
||||||
|
const padWidth = String(maxLineNum).length
|
||||||
|
|
||||||
|
return lines
|
||||||
|
.map((line, index) => {
|
||||||
|
const lineNum = String(startLine + index).padStart(padWidth, " ")
|
||||||
|
return `${lineNum}│${line}`
|
||||||
|
})
|
||||||
|
.join("\n")
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,205 @@
|
|||||||
|
import { promises as fs } from "node:fs"
|
||||||
|
import * as path from "node:path"
|
||||||
|
import type { ITool, ToolContext, ToolParameterSchema } from "../../../domain/services/ITool.js"
|
||||||
|
import {
|
||||||
|
createErrorResult,
|
||||||
|
createSuccessResult,
|
||||||
|
type ToolResult,
|
||||||
|
} from "../../../domain/value-objects/ToolResult.js"
|
||||||
|
import { DEFAULT_IGNORE_PATTERNS } from "../../../domain/constants/index.js"
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Tree node representing a file or directory.
|
||||||
|
*/
|
||||||
|
export interface TreeNode {
|
||||||
|
name: string
|
||||||
|
type: "file" | "directory"
|
||||||
|
children?: TreeNode[]
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Result data from get_structure tool.
|
||||||
|
*/
|
||||||
|
export interface GetStructureResult {
|
||||||
|
path: string
|
||||||
|
tree: TreeNode
|
||||||
|
content: string
|
||||||
|
stats: {
|
||||||
|
directories: number
|
||||||
|
files: number
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Tool for getting project directory structure as a tree.
|
||||||
|
*/
|
||||||
|
export class GetStructureTool implements ITool {
|
||||||
|
readonly name = "get_structure"
|
||||||
|
readonly description =
|
||||||
|
"Get project directory structure as a tree. " +
|
||||||
|
"If path is specified, shows structure of that subdirectory only."
|
||||||
|
readonly parameters: ToolParameterSchema[] = [
|
||||||
|
{
|
||||||
|
name: "path",
|
||||||
|
type: "string",
|
||||||
|
description: "Subdirectory path relative to project root (optional, defaults to root)",
|
||||||
|
required: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "depth",
|
||||||
|
type: "number",
|
||||||
|
description: "Maximum depth to traverse (default: unlimited)",
|
||||||
|
required: false,
|
||||||
|
},
|
||||||
|
]
|
||||||
|
readonly requiresConfirmation = false
|
||||||
|
readonly category = "read" as const
|
||||||
|
|
||||||
|
private readonly defaultIgnorePatterns = new Set([
|
||||||
|
...DEFAULT_IGNORE_PATTERNS,
|
||||||
|
".git",
|
||||||
|
".idea",
|
||||||
|
".vscode",
|
||||||
|
"__pycache__",
|
||||||
|
".pytest_cache",
|
||||||
|
".nyc_output",
|
||||||
|
"coverage",
|
||||||
|
])
|
||||||
|
|
||||||
|
validateParams(params: Record<string, unknown>): string | null {
|
||||||
|
if (params.path !== undefined) {
|
||||||
|
if (typeof params.path !== "string") {
|
||||||
|
return "Parameter 'path' must be a string"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (params.depth !== undefined) {
|
||||||
|
if (typeof params.depth !== "number" || !Number.isInteger(params.depth)) {
|
||||||
|
return "Parameter 'depth' must be an integer"
|
||||||
|
}
|
||||||
|
if (params.depth < 1) {
|
||||||
|
return "Parameter 'depth' must be >= 1"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
async execute(params: Record<string, unknown>, ctx: ToolContext): Promise<ToolResult> {
|
||||||
|
const startTime = Date.now()
|
||||||
|
const callId = `${this.name}-${String(startTime)}`
|
||||||
|
|
||||||
|
const relativePath = (params.path as string | undefined) ?? ""
|
||||||
|
const maxDepth = params.depth as number | undefined
|
||||||
|
const absolutePath = path.resolve(ctx.projectRoot, relativePath)
|
||||||
|
|
||||||
|
if (!absolutePath.startsWith(ctx.projectRoot)) {
|
||||||
|
return createErrorResult(
|
||||||
|
callId,
|
||||||
|
"Path must be within project root",
|
||||||
|
Date.now() - startTime,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const stat = await fs.stat(absolutePath)
|
||||||
|
if (!stat.isDirectory()) {
|
||||||
|
return createErrorResult(
|
||||||
|
callId,
|
||||||
|
`Path "${relativePath}" is not a directory`,
|
||||||
|
Date.now() - startTime,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const stats = { directories: 0, files: 0 }
|
||||||
|
const tree = await this.buildTree(absolutePath, maxDepth, 0, stats)
|
||||||
|
const content = this.formatTree(tree)
|
||||||
|
|
||||||
|
const result: GetStructureResult = {
|
||||||
|
path: relativePath || ".",
|
||||||
|
tree,
|
||||||
|
content,
|
||||||
|
stats,
|
||||||
|
}
|
||||||
|
|
||||||
|
return createSuccessResult(callId, result, Date.now() - startTime)
|
||||||
|
} catch (error) {
|
||||||
|
const message = error instanceof Error ? error.message : String(error)
|
||||||
|
return createErrorResult(callId, message, Date.now() - startTime)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Build tree structure recursively.
|
||||||
|
*/
|
||||||
|
private async buildTree(
|
||||||
|
dirPath: string,
|
||||||
|
maxDepth: number | undefined,
|
||||||
|
currentDepth: number,
|
||||||
|
stats: { directories: number; files: number },
|
||||||
|
): Promise<TreeNode> {
|
||||||
|
const name = path.basename(dirPath) || dirPath
|
||||||
|
const node: TreeNode = { name, type: "directory", children: [] }
|
||||||
|
stats.directories++
|
||||||
|
|
||||||
|
if (maxDepth !== undefined && currentDepth >= maxDepth) {
|
||||||
|
return node
|
||||||
|
}
|
||||||
|
|
||||||
|
const entries = await fs.readdir(dirPath, { withFileTypes: true })
|
||||||
|
const sortedEntries = entries
|
||||||
|
.filter((e) => !this.shouldIgnore(e.name))
|
||||||
|
.sort((a, b) => {
|
||||||
|
if (a.isDirectory() && !b.isDirectory()) {
|
||||||
|
return -1
|
||||||
|
}
|
||||||
|
if (!a.isDirectory() && b.isDirectory()) {
|
||||||
|
return 1
|
||||||
|
}
|
||||||
|
return a.name.localeCompare(b.name)
|
||||||
|
})
|
||||||
|
|
||||||
|
for (const entry of sortedEntries) {
|
||||||
|
const entryPath = path.join(dirPath, entry.name)
|
||||||
|
|
||||||
|
if (entry.isDirectory()) {
|
||||||
|
const childNode = await this.buildTree(entryPath, maxDepth, currentDepth + 1, stats)
|
||||||
|
node.children?.push(childNode)
|
||||||
|
} else if (entry.isFile()) {
|
||||||
|
node.children?.push({ name: entry.name, type: "file" })
|
||||||
|
stats.files++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return node
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if entry should be ignored.
|
||||||
|
*/
|
||||||
|
private shouldIgnore(name: string): boolean {
|
||||||
|
return this.defaultIgnorePatterns.has(name)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Format tree as ASCII art.
|
||||||
|
*/
|
||||||
|
private formatTree(node: TreeNode, prefix = "", isLast = true): string {
|
||||||
|
const lines: string[] = []
|
||||||
|
const connector = isLast ? "└── " : "├── "
|
||||||
|
const icon = node.type === "directory" ? "📁 " : "📄 "
|
||||||
|
|
||||||
|
lines.push(`${prefix}${connector}${icon}${node.name}`)
|
||||||
|
|
||||||
|
if (node.children) {
|
||||||
|
const childPrefix = prefix + (isLast ? " " : "│ ")
|
||||||
|
const childCount = node.children.length
|
||||||
|
node.children.forEach((child, index) => {
|
||||||
|
const childIsLast = index === childCount - 1
|
||||||
|
lines.push(this.formatTree(child, childPrefix, childIsLast))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
return lines.join("\n")
|
||||||
|
}
|
||||||
|
}
|
||||||
190
packages/ipuaro/src/infrastructure/tools/registry.ts
Normal file
190
packages/ipuaro/src/infrastructure/tools/registry.ts
Normal file
@@ -0,0 +1,190 @@
|
|||||||
|
import type { IToolRegistry } from "../../application/interfaces/IToolRegistry.js"
|
||||||
|
import type { ITool, ToolContext, ToolParameterSchema } from "../../domain/services/ITool.js"
|
||||||
|
import { createErrorResult, type ToolResult } from "../../domain/value-objects/ToolResult.js"
|
||||||
|
import { IpuaroError } from "../../shared/errors/IpuaroError.js"
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Tool registry implementation.
|
||||||
|
* Manages registration and execution of tools.
|
||||||
|
*/
|
||||||
|
export class ToolRegistry implements IToolRegistry {
|
||||||
|
private readonly tools = new Map<string, ITool>()
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Register a tool.
|
||||||
|
* @throws IpuaroError if tool with same name already registered
|
||||||
|
*/
|
||||||
|
register(tool: ITool): void {
|
||||||
|
if (this.tools.has(tool.name)) {
|
||||||
|
throw new IpuaroError(
|
||||||
|
"validation",
|
||||||
|
`Tool "${tool.name}" is already registered`,
|
||||||
|
true,
|
||||||
|
"Use a different tool name or unregister the existing tool first",
|
||||||
|
)
|
||||||
|
}
|
||||||
|
this.tools.set(tool.name, tool)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Unregister a tool by name.
|
||||||
|
* @returns true if tool was removed, false if not found
|
||||||
|
*/
|
||||||
|
unregister(name: string): boolean {
|
||||||
|
return this.tools.delete(name)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get tool by name.
|
||||||
|
*/
|
||||||
|
get(name: string): ITool | undefined {
|
||||||
|
return this.tools.get(name)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get all registered tools.
|
||||||
|
*/
|
||||||
|
getAll(): ITool[] {
|
||||||
|
return Array.from(this.tools.values())
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get tools by category.
|
||||||
|
*/
|
||||||
|
getByCategory(category: ITool["category"]): ITool[] {
|
||||||
|
return this.getAll().filter((tool) => tool.category === category)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if tool exists.
|
||||||
|
*/
|
||||||
|
has(name: string): boolean {
|
||||||
|
return this.tools.has(name)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get number of registered tools.
|
||||||
|
*/
|
||||||
|
get size(): number {
|
||||||
|
return this.tools.size
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Execute tool by name.
|
||||||
|
* @throws IpuaroError if tool not found
|
||||||
|
*/
|
||||||
|
async execute(
|
||||||
|
name: string,
|
||||||
|
params: Record<string, unknown>,
|
||||||
|
ctx: ToolContext,
|
||||||
|
): Promise<ToolResult> {
|
||||||
|
const startTime = Date.now()
|
||||||
|
const callId = `${name}-${String(startTime)}`
|
||||||
|
|
||||||
|
const tool = this.tools.get(name)
|
||||||
|
if (!tool) {
|
||||||
|
return createErrorResult(callId, `Tool "${name}" not found`, Date.now() - startTime)
|
||||||
|
}
|
||||||
|
|
||||||
|
const validationError = tool.validateParams(params)
|
||||||
|
if (validationError) {
|
||||||
|
return createErrorResult(callId, validationError, Date.now() - startTime)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (tool.requiresConfirmation) {
|
||||||
|
const confirmed = await ctx.requestConfirmation(
|
||||||
|
`Execute "${name}" with params: ${JSON.stringify(params)}`,
|
||||||
|
)
|
||||||
|
if (!confirmed) {
|
||||||
|
return createErrorResult(callId, "User cancelled operation", Date.now() - startTime)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const result = await tool.execute(params, ctx)
|
||||||
|
return {
|
||||||
|
...result,
|
||||||
|
callId,
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
const message = error instanceof Error ? error.message : String(error)
|
||||||
|
return createErrorResult(callId, message, Date.now() - startTime)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get tool definitions for LLM.
|
||||||
|
* Converts ITool[] to LLM-compatible format.
|
||||||
|
*/
|
||||||
|
getToolDefinitions(): {
|
||||||
|
name: string
|
||||||
|
description: string
|
||||||
|
parameters: {
|
||||||
|
type: "object"
|
||||||
|
properties: Record<string, { type: string; description: string }>
|
||||||
|
required: string[]
|
||||||
|
}
|
||||||
|
}[] {
|
||||||
|
return this.getAll().map((tool) => ({
|
||||||
|
name: tool.name,
|
||||||
|
description: tool.description,
|
||||||
|
parameters: this.convertParametersToSchema(tool.parameters),
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Convert tool parameters to JSON Schema format.
|
||||||
|
*/
|
||||||
|
private convertParametersToSchema(params: ToolParameterSchema[]): {
|
||||||
|
type: "object"
|
||||||
|
properties: Record<string, { type: string; description: string }>
|
||||||
|
required: string[]
|
||||||
|
} {
|
||||||
|
const properties: Record<string, { type: string; description: string }> = {}
|
||||||
|
const required: string[] = []
|
||||||
|
|
||||||
|
for (const param of params) {
|
||||||
|
properties[param.name] = {
|
||||||
|
type: param.type,
|
||||||
|
description: param.description,
|
||||||
|
}
|
||||||
|
if (param.required) {
|
||||||
|
required.push(param.name)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
type: "object",
|
||||||
|
properties,
|
||||||
|
required,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Clear all registered tools.
|
||||||
|
*/
|
||||||
|
clear(): void {
|
||||||
|
this.tools.clear()
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get tool names.
|
||||||
|
*/
|
||||||
|
getNames(): string[] {
|
||||||
|
return Array.from(this.tools.keys())
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get tools that require confirmation.
|
||||||
|
*/
|
||||||
|
getConfirmationTools(): ITool[] {
|
||||||
|
return this.getAll().filter((tool) => tool.requiresConfirmation)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get tools that don't require confirmation.
|
||||||
|
*/
|
||||||
|
getSafeTools(): ITool[] {
|
||||||
|
return this.getAll().filter((tool) => !tool.requiresConfirmation)
|
||||||
|
}
|
||||||
|
}
|
||||||
2
packages/ipuaro/tests/fixtures/sample-project/.gitignore
vendored
Normal file
2
packages/ipuaro/tests/fixtures/sample-project/.gitignore
vendored
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
ignored-file.ts
|
||||||
|
*.log
|
||||||
4
packages/ipuaro/tests/fixtures/sample-project/package.json
vendored
Normal file
4
packages/ipuaro/tests/fixtures/sample-project/package.json
vendored
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
{
|
||||||
|
"name": "sample-project",
|
||||||
|
"version": "1.0.0"
|
||||||
|
}
|
||||||
3
packages/ipuaro/tests/fixtures/sample-project/src/index.ts
vendored
Normal file
3
packages/ipuaro/tests/fixtures/sample-project/src/index.ts
vendored
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
export function main(): void {
|
||||||
|
console.log("Hello")
|
||||||
|
}
|
||||||
3
packages/ipuaro/tests/fixtures/sample-project/src/utils.ts
vendored
Normal file
3
packages/ipuaro/tests/fixtures/sample-project/src/utils.ts
vendored
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
export function add(a: number, b: number): number {
|
||||||
|
return a + b
|
||||||
|
}
|
||||||
@@ -36,9 +36,7 @@ describe("ChatMessage", () => {
|
|||||||
})
|
})
|
||||||
|
|
||||||
it("should create assistant message with tool calls", () => {
|
it("should create assistant message with tool calls", () => {
|
||||||
const toolCalls = [
|
const toolCalls = [{ id: "1", name: "get_lines", params: {}, timestamp: Date.now() }]
|
||||||
{ id: "1", name: "get_lines", params: {}, timestamp: Date.now() },
|
|
||||||
]
|
|
||||||
const stats = { tokens: 100, timeMs: 500, toolCalls: 1 }
|
const stats = { tokens: 100, timeMs: 500, toolCalls: 1 }
|
||||||
const msg = createAssistantMessage("Response", toolCalls, stats)
|
const msg = createAssistantMessage("Response", toolCalls, stats)
|
||||||
|
|
||||||
@@ -49,9 +47,7 @@ describe("ChatMessage", () => {
|
|||||||
|
|
||||||
describe("createToolMessage", () => {
|
describe("createToolMessage", () => {
|
||||||
it("should create tool message with results", () => {
|
it("should create tool message with results", () => {
|
||||||
const results = [
|
const results = [{ callId: "1", success: true, data: "data", executionTimeMs: 10 }]
|
||||||
{ callId: "1", success: true, data: "data", executionTimeMs: 10 },
|
|
||||||
]
|
|
||||||
const msg = createToolMessage(results)
|
const msg = createToolMessage(results)
|
||||||
|
|
||||||
expect(msg.role).toBe("tool")
|
expect(msg.role).toBe("tool")
|
||||||
|
|||||||
@@ -1,8 +1,5 @@
|
|||||||
import { describe, it, expect } from "vitest"
|
import { describe, it, expect } from "vitest"
|
||||||
import {
|
import { createFileData, isFileDataEqual } from "../../../../src/domain/value-objects/FileData.js"
|
||||||
createFileData,
|
|
||||||
isFileDataEqual,
|
|
||||||
} from "../../../../src/domain/value-objects/FileData.js"
|
|
||||||
|
|
||||||
describe("FileData", () => {
|
describe("FileData", () => {
|
||||||
describe("createFileData", () => {
|
describe("createFileData", () => {
|
||||||
|
|||||||
@@ -1,8 +1,5 @@
|
|||||||
import { describe, it, expect } from "vitest"
|
import { describe, it, expect } from "vitest"
|
||||||
import {
|
import { createFileMeta, isHubFile } from "../../../../src/domain/value-objects/FileMeta.js"
|
||||||
createFileMeta,
|
|
||||||
isHubFile,
|
|
||||||
} from "../../../../src/domain/value-objects/FileMeta.js"
|
|
||||||
|
|
||||||
describe("FileMeta", () => {
|
describe("FileMeta", () => {
|
||||||
describe("createFileMeta", () => {
|
describe("createFileMeta", () => {
|
||||||
|
|||||||
@@ -1,8 +1,5 @@
|
|||||||
import { describe, it, expect, vi, beforeEach, afterEach } from "vitest"
|
import { describe, it, expect, vi, beforeEach, afterEach } from "vitest"
|
||||||
import {
|
import { createUndoEntry, canUndo } from "../../../../src/domain/value-objects/UndoEntry.js"
|
||||||
createUndoEntry,
|
|
||||||
canUndo,
|
|
||||||
} from "../../../../src/domain/value-objects/UndoEntry.js"
|
|
||||||
|
|
||||||
describe("UndoEntry", () => {
|
describe("UndoEntry", () => {
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
@@ -21,7 +18,7 @@ describe("UndoEntry", () => {
|
|||||||
"test.ts",
|
"test.ts",
|
||||||
["old line"],
|
["old line"],
|
||||||
["new line"],
|
["new line"],
|
||||||
"Edit line 1"
|
"Edit line 1",
|
||||||
)
|
)
|
||||||
|
|
||||||
expect(entry.id).toBe("undo-1")
|
expect(entry.id).toBe("undo-1")
|
||||||
@@ -34,14 +31,7 @@ describe("UndoEntry", () => {
|
|||||||
})
|
})
|
||||||
|
|
||||||
it("should create undo entry with toolCallId", () => {
|
it("should create undo entry with toolCallId", () => {
|
||||||
const entry = createUndoEntry(
|
const entry = createUndoEntry("undo-2", "test.ts", [], [], "Create file", "tool-123")
|
||||||
"undo-2",
|
|
||||||
"test.ts",
|
|
||||||
[],
|
|
||||||
[],
|
|
||||||
"Create file",
|
|
||||||
"tool-123"
|
|
||||||
)
|
|
||||||
|
|
||||||
expect(entry.toolCallId).toBe("tool-123")
|
expect(entry.toolCallId).toBe("tool-123")
|
||||||
})
|
})
|
||||||
@@ -49,37 +39,19 @@ describe("UndoEntry", () => {
|
|||||||
|
|
||||||
describe("canUndo", () => {
|
describe("canUndo", () => {
|
||||||
it("should return true when current content matches newContent", () => {
|
it("should return true when current content matches newContent", () => {
|
||||||
const entry = createUndoEntry(
|
const entry = createUndoEntry("undo-1", "test.ts", ["old"], ["new"], "Edit")
|
||||||
"undo-1",
|
|
||||||
"test.ts",
|
|
||||||
["old"],
|
|
||||||
["new"],
|
|
||||||
"Edit"
|
|
||||||
)
|
|
||||||
|
|
||||||
expect(canUndo(entry, ["new"])).toBe(true)
|
expect(canUndo(entry, ["new"])).toBe(true)
|
||||||
})
|
})
|
||||||
|
|
||||||
it("should return false when content differs", () => {
|
it("should return false when content differs", () => {
|
||||||
const entry = createUndoEntry(
|
const entry = createUndoEntry("undo-1", "test.ts", ["old"], ["new"], "Edit")
|
||||||
"undo-1",
|
|
||||||
"test.ts",
|
|
||||||
["old"],
|
|
||||||
["new"],
|
|
||||||
"Edit"
|
|
||||||
)
|
|
||||||
|
|
||||||
expect(canUndo(entry, ["modified"])).toBe(false)
|
expect(canUndo(entry, ["modified"])).toBe(false)
|
||||||
})
|
})
|
||||||
|
|
||||||
it("should return false when length differs", () => {
|
it("should return false when length differs", () => {
|
||||||
const entry = createUndoEntry(
|
const entry = createUndoEntry("undo-1", "test.ts", ["old"], ["new"], "Edit")
|
||||||
"undo-1",
|
|
||||||
"test.ts",
|
|
||||||
["old"],
|
|
||||||
["new"],
|
|
||||||
"Edit"
|
|
||||||
)
|
|
||||||
|
|
||||||
expect(canUndo(entry, ["new", "extra"])).toBe(false)
|
expect(canUndo(entry, ["new", "extra"])).toBe(false)
|
||||||
})
|
})
|
||||||
|
|||||||
@@ -0,0 +1,347 @@
|
|||||||
|
import { describe, it, expect, beforeAll } from "vitest"
|
||||||
|
import { ASTParser } from "../../../../src/infrastructure/indexer/ASTParser.js"
|
||||||
|
|
||||||
|
describe("ASTParser", () => {
|
||||||
|
let parser: ASTParser
|
||||||
|
|
||||||
|
beforeAll(() => {
|
||||||
|
parser = new ASTParser()
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("parse", () => {
|
||||||
|
it("should parse empty file", () => {
|
||||||
|
const ast = parser.parse("", "ts")
|
||||||
|
expect(ast.parseError).toBe(false)
|
||||||
|
expect(ast.imports).toHaveLength(0)
|
||||||
|
expect(ast.exports).toHaveLength(0)
|
||||||
|
expect(ast.functions).toHaveLength(0)
|
||||||
|
expect(ast.classes).toHaveLength(0)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should handle syntax errors gracefully", () => {
|
||||||
|
const code = "export function {{{ invalid"
|
||||||
|
const ast = parser.parse(code, "ts")
|
||||||
|
expect(ast.parseError).toBe(true)
|
||||||
|
expect(ast.parseErrorMessage).toBeDefined()
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should return error for unsupported language", () => {
|
||||||
|
const ast = parser.parse("const x = 1", "py" as never)
|
||||||
|
expect(ast.parseError).toBe(true)
|
||||||
|
expect(ast.parseErrorMessage).toContain("Unsupported language")
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("imports", () => {
|
||||||
|
it("should extract default import", () => {
|
||||||
|
const code = `import React from "react"`
|
||||||
|
const ast = parser.parse(code, "ts")
|
||||||
|
expect(ast.imports).toHaveLength(1)
|
||||||
|
expect(ast.imports[0]).toMatchObject({
|
||||||
|
name: "React",
|
||||||
|
from: "react",
|
||||||
|
isDefault: true,
|
||||||
|
type: "external",
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should extract named imports", () => {
|
||||||
|
const code = `import { useState, useEffect } from "react"`
|
||||||
|
const ast = parser.parse(code, "ts")
|
||||||
|
expect(ast.imports).toHaveLength(2)
|
||||||
|
expect(ast.imports[0].name).toBe("useState")
|
||||||
|
expect(ast.imports[1].name).toBe("useEffect")
|
||||||
|
expect(ast.imports[0].isDefault).toBe(false)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should extract namespace import", () => {
|
||||||
|
const code = `import * as path from "path"`
|
||||||
|
const ast = parser.parse(code, "ts")
|
||||||
|
expect(ast.imports).toHaveLength(1)
|
||||||
|
expect(ast.imports[0].name).toBe("path")
|
||||||
|
expect(ast.imports[0].isDefault).toBe(false)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should classify internal imports", () => {
|
||||||
|
const code = `import { foo } from "./utils"`
|
||||||
|
const ast = parser.parse(code, "ts")
|
||||||
|
expect(ast.imports[0].type).toBe("internal")
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should classify builtin imports", () => {
|
||||||
|
const code = `import * as fs from "node:fs"`
|
||||||
|
const ast = parser.parse(code, "ts")
|
||||||
|
expect(ast.imports[0].type).toBe("builtin")
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should classify external imports", () => {
|
||||||
|
const code = `import lodash from "lodash"`
|
||||||
|
const ast = parser.parse(code, "ts")
|
||||||
|
expect(ast.imports[0].type).toBe("external")
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("functions", () => {
|
||||||
|
it("should extract function declaration", () => {
|
||||||
|
const code = `function add(a: number, b: number): number {
|
||||||
|
return a + b
|
||||||
|
}`
|
||||||
|
const ast = parser.parse(code, "ts")
|
||||||
|
expect(ast.functions).toHaveLength(1)
|
||||||
|
expect(ast.functions[0]).toMatchObject({
|
||||||
|
name: "add",
|
||||||
|
isAsync: false,
|
||||||
|
isExported: false,
|
||||||
|
})
|
||||||
|
expect(ast.functions[0].lineStart).toBe(1)
|
||||||
|
expect(ast.functions[0].lineEnd).toBe(3)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should extract async function", () => {
|
||||||
|
const code = `async function fetchData() { return null }`
|
||||||
|
const ast = parser.parse(code, "ts")
|
||||||
|
expect(ast.functions[0].isAsync).toBe(true)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should extract exported function", () => {
|
||||||
|
const code = `export function main() {}`
|
||||||
|
const ast = parser.parse(code, "ts")
|
||||||
|
expect(ast.functions[0].isExported).toBe(true)
|
||||||
|
expect(ast.exports).toHaveLength(1)
|
||||||
|
expect(ast.exports[0].kind).toBe("function")
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should extract arrow function", () => {
|
||||||
|
const code = `const add = (a: number, b: number) => a + b`
|
||||||
|
const ast = parser.parse(code, "ts")
|
||||||
|
expect(ast.functions).toHaveLength(1)
|
||||||
|
expect(ast.functions[0].name).toBe("add")
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should extract function parameters", () => {
|
||||||
|
const code = `function test(a: string, b?: number, c = 10) {}`
|
||||||
|
const ast = parser.parse(code, "ts")
|
||||||
|
expect(ast.functions[0].params).toHaveLength(3)
|
||||||
|
expect(ast.functions[0].params[0]).toMatchObject({
|
||||||
|
name: "a",
|
||||||
|
optional: false,
|
||||||
|
})
|
||||||
|
expect(ast.functions[0].params[1]).toMatchObject({
|
||||||
|
name: "b",
|
||||||
|
optional: true,
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("classes", () => {
|
||||||
|
it("should extract class declaration", () => {
|
||||||
|
const code = `class MyClass {
|
||||||
|
value: number
|
||||||
|
|
||||||
|
constructor() {}
|
||||||
|
|
||||||
|
getValue() {
|
||||||
|
return this.value
|
||||||
|
}
|
||||||
|
}`
|
||||||
|
const ast = parser.parse(code, "ts")
|
||||||
|
expect(ast.classes).toHaveLength(1)
|
||||||
|
expect(ast.classes[0]).toMatchObject({
|
||||||
|
name: "MyClass",
|
||||||
|
isExported: false,
|
||||||
|
isAbstract: false,
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should extract exported class", () => {
|
||||||
|
const code = `export class Service {}`
|
||||||
|
const ast = parser.parse(code, "ts")
|
||||||
|
expect(ast.classes[0].isExported).toBe(true)
|
||||||
|
expect(ast.exports).toHaveLength(1)
|
||||||
|
expect(ast.exports[0].kind).toBe("class")
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should extract class methods", () => {
|
||||||
|
const code = `class Service {
|
||||||
|
async fetch() {}
|
||||||
|
private process() {}
|
||||||
|
static create() {}
|
||||||
|
}`
|
||||||
|
const ast = parser.parse(code, "ts")
|
||||||
|
expect(ast.classes[0].methods.length).toBeGreaterThanOrEqual(1)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should extract class extends", () => {
|
||||||
|
const code = `class Child extends Parent {}`
|
||||||
|
const ast = parser.parse(code, "ts")
|
||||||
|
expect(ast.classes[0].extends).toBe("Parent")
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("interfaces", () => {
|
||||||
|
it("should extract interface declaration", () => {
|
||||||
|
const code = `interface User {
|
||||||
|
name: string
|
||||||
|
age: number
|
||||||
|
}`
|
||||||
|
const ast = parser.parse(code, "ts")
|
||||||
|
expect(ast.interfaces).toHaveLength(1)
|
||||||
|
expect(ast.interfaces[0]).toMatchObject({
|
||||||
|
name: "User",
|
||||||
|
isExported: false,
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should extract exported interface", () => {
|
||||||
|
const code = `export interface Config {}`
|
||||||
|
const ast = parser.parse(code, "ts")
|
||||||
|
expect(ast.interfaces[0].isExported).toBe(true)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should extract interface properties", () => {
|
||||||
|
const code = `interface Props {
|
||||||
|
value: string
|
||||||
|
onChange: (v: string) => void
|
||||||
|
}`
|
||||||
|
const ast = parser.parse(code, "ts")
|
||||||
|
expect(ast.interfaces[0].properties.length).toBeGreaterThanOrEqual(1)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("type aliases", () => {
|
||||||
|
it("should extract type alias", () => {
|
||||||
|
const code = `type ID = string | number`
|
||||||
|
const ast = parser.parse(code, "ts")
|
||||||
|
expect(ast.typeAliases).toHaveLength(1)
|
||||||
|
expect(ast.typeAliases[0]).toMatchObject({
|
||||||
|
name: "ID",
|
||||||
|
isExported: false,
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should extract exported type alias", () => {
|
||||||
|
const code = `export type Status = "pending" | "done"`
|
||||||
|
const ast = parser.parse(code, "ts")
|
||||||
|
expect(ast.typeAliases[0].isExported).toBe(true)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("exports", () => {
|
||||||
|
it("should extract named exports", () => {
|
||||||
|
const code = `
|
||||||
|
const foo = 1
|
||||||
|
const bar = 2
|
||||||
|
export { foo, bar }
|
||||||
|
`
|
||||||
|
const ast = parser.parse(code, "ts")
|
||||||
|
expect(ast.exports).toHaveLength(2)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should extract export default", () => {
|
||||||
|
const code = `export default function main() {}`
|
||||||
|
const ast = parser.parse(code, "ts")
|
||||||
|
expect(ast.exports.some((e) => e.isDefault)).toBe(true)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should extract exported const", () => {
|
||||||
|
const code = `export const VERSION = "1.0.0"`
|
||||||
|
const ast = parser.parse(code, "ts")
|
||||||
|
expect(ast.exports).toHaveLength(1)
|
||||||
|
expect(ast.exports[0].kind).toBe("variable")
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("JavaScript support", () => {
|
||||||
|
it("should parse JavaScript file", () => {
|
||||||
|
const code = `
|
||||||
|
import React from "react"
|
||||||
|
|
||||||
|
function Component() {
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
export default Component
|
||||||
|
`
|
||||||
|
const ast = parser.parse(code, "js")
|
||||||
|
expect(ast.parseError).toBe(false)
|
||||||
|
expect(ast.imports).toHaveLength(1)
|
||||||
|
expect(ast.functions).toHaveLength(1)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should parse JSX file", () => {
|
||||||
|
const code = `
|
||||||
|
import React from "react"
|
||||||
|
|
||||||
|
function App() {
|
||||||
|
return <div>Hello</div>
|
||||||
|
}
|
||||||
|
`
|
||||||
|
const ast = parser.parse(code, "jsx")
|
||||||
|
expect(ast.parseError).toBe(false)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("TSX support", () => {
|
||||||
|
it("should parse TSX file", () => {
|
||||||
|
const code = `
|
||||||
|
import React from "react"
|
||||||
|
|
||||||
|
interface Props {
|
||||||
|
name: string
|
||||||
|
}
|
||||||
|
|
||||||
|
export function Greeting({ name }: Props) {
|
||||||
|
return <h1>Hello, {name}!</h1>
|
||||||
|
}
|
||||||
|
`
|
||||||
|
const ast = parser.parse(code, "tsx")
|
||||||
|
expect(ast.parseError).toBe(false)
|
||||||
|
expect(ast.interfaces).toHaveLength(1)
|
||||||
|
expect(ast.functions).toHaveLength(1)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("complex file", () => {
|
||||||
|
it("should parse complex TypeScript file", () => {
|
||||||
|
const code = `
|
||||||
|
import * as fs from "node:fs"
|
||||||
|
import { join } from "node:path"
|
||||||
|
import type { Config } from "./types"
|
||||||
|
|
||||||
|
export interface Options {
|
||||||
|
root: string
|
||||||
|
verbose?: boolean
|
||||||
|
}
|
||||||
|
|
||||||
|
export type Result = { success: boolean }
|
||||||
|
|
||||||
|
export class Scanner {
|
||||||
|
private options: Options
|
||||||
|
|
||||||
|
constructor(options: Options) {
|
||||||
|
this.options = options
|
||||||
|
}
|
||||||
|
|
||||||
|
async scan(): Promise<string[]> {
|
||||||
|
return []
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export function createScanner(options: Options): Scanner {
|
||||||
|
return new Scanner(options)
|
||||||
|
}
|
||||||
|
|
||||||
|
export const VERSION = "1.0.0"
|
||||||
|
`
|
||||||
|
const ast = parser.parse(code, "ts")
|
||||||
|
|
||||||
|
expect(ast.parseError).toBe(false)
|
||||||
|
expect(ast.imports.length).toBeGreaterThanOrEqual(2)
|
||||||
|
expect(ast.interfaces).toHaveLength(1)
|
||||||
|
expect(ast.typeAliases).toHaveLength(1)
|
||||||
|
expect(ast.classes).toHaveLength(1)
|
||||||
|
expect(ast.functions.length).toBeGreaterThanOrEqual(1)
|
||||||
|
expect(ast.exports.length).toBeGreaterThanOrEqual(4)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
@@ -0,0 +1,238 @@
|
|||||||
|
import * as fs from "node:fs/promises"
|
||||||
|
import * as path from "node:path"
|
||||||
|
import { describe, it, expect, beforeAll, afterAll } from "vitest"
|
||||||
|
import {
|
||||||
|
FileScanner,
|
||||||
|
type ScanProgress,
|
||||||
|
} from "../../../../src/infrastructure/indexer/FileScanner.js"
|
||||||
|
import type { ScanResult } from "../../../../src/domain/services/IIndexer.js"
|
||||||
|
|
||||||
|
const FIXTURES_DIR = path.join(__dirname, "../../../fixtures/sample-project")
|
||||||
|
|
||||||
|
describe("FileScanner", () => {
|
||||||
|
describe("constructor", () => {
|
||||||
|
it("should create instance with default options", () => {
|
||||||
|
const scanner = new FileScanner()
|
||||||
|
expect(scanner).toBeInstanceOf(FileScanner)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should accept custom extensions", () => {
|
||||||
|
const scanner = new FileScanner({ extensions: [".ts", ".js"] })
|
||||||
|
expect(scanner.isSupportedExtension("file.ts")).toBe(true)
|
||||||
|
expect(scanner.isSupportedExtension("file.js")).toBe(true)
|
||||||
|
expect(scanner.isSupportedExtension("file.tsx")).toBe(false)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should accept additional ignore patterns", () => {
|
||||||
|
const scanner = new FileScanner({ additionalIgnore: ["*.test.ts"] })
|
||||||
|
expect(scanner).toBeInstanceOf(FileScanner)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should accept progress callback", () => {
|
||||||
|
const onProgress = (progress: ScanProgress): void => {
|
||||||
|
// callback
|
||||||
|
}
|
||||||
|
const scanner = new FileScanner({ onProgress })
|
||||||
|
expect(scanner).toBeInstanceOf(FileScanner)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("isSupportedExtension", () => {
|
||||||
|
it("should return true for supported extensions", () => {
|
||||||
|
const scanner = new FileScanner()
|
||||||
|
expect(scanner.isSupportedExtension("file.ts")).toBe(true)
|
||||||
|
expect(scanner.isSupportedExtension("file.tsx")).toBe(true)
|
||||||
|
expect(scanner.isSupportedExtension("file.js")).toBe(true)
|
||||||
|
expect(scanner.isSupportedExtension("file.jsx")).toBe(true)
|
||||||
|
expect(scanner.isSupportedExtension("file.json")).toBe(true)
|
||||||
|
expect(scanner.isSupportedExtension("file.yaml")).toBe(true)
|
||||||
|
expect(scanner.isSupportedExtension("file.yml")).toBe(true)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should return false for unsupported extensions", () => {
|
||||||
|
const scanner = new FileScanner()
|
||||||
|
expect(scanner.isSupportedExtension("file.md")).toBe(false)
|
||||||
|
expect(scanner.isSupportedExtension("file.txt")).toBe(false)
|
||||||
|
expect(scanner.isSupportedExtension("file.png")).toBe(false)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should be case-insensitive", () => {
|
||||||
|
const scanner = new FileScanner()
|
||||||
|
expect(scanner.isSupportedExtension("file.TS")).toBe(true)
|
||||||
|
expect(scanner.isSupportedExtension("file.TSX")).toBe(true)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("scan", () => {
|
||||||
|
it("should scan directory and yield file results", async () => {
|
||||||
|
const scanner = new FileScanner()
|
||||||
|
const results: ScanResult[] = []
|
||||||
|
|
||||||
|
for await (const result of scanner.scan(FIXTURES_DIR)) {
|
||||||
|
results.push(result)
|
||||||
|
}
|
||||||
|
|
||||||
|
expect(results.length).toBeGreaterThan(0)
|
||||||
|
expect(results.every((r) => r.type === "file")).toBe(true)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should return relative paths", async () => {
|
||||||
|
const scanner = new FileScanner()
|
||||||
|
const results = await scanner.scanAll(FIXTURES_DIR)
|
||||||
|
|
||||||
|
for (const result of results) {
|
||||||
|
expect(path.isAbsolute(result.path)).toBe(false)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should include file stats", async () => {
|
||||||
|
const scanner = new FileScanner()
|
||||||
|
const results = await scanner.scanAll(FIXTURES_DIR)
|
||||||
|
|
||||||
|
for (const result of results) {
|
||||||
|
expect(typeof result.size).toBe("number")
|
||||||
|
expect(result.size).toBeGreaterThanOrEqual(0)
|
||||||
|
expect(typeof result.lastModified).toBe("number")
|
||||||
|
expect(result.lastModified).toBeGreaterThan(0)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should ignore node_modules by default", async () => {
|
||||||
|
const scanner = new FileScanner()
|
||||||
|
const results = await scanner.scanAll(FIXTURES_DIR)
|
||||||
|
|
||||||
|
const nodeModulesFiles = results.filter((r) => r.path.includes("node_modules"))
|
||||||
|
expect(nodeModulesFiles).toHaveLength(0)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should respect .gitignore", async () => {
|
||||||
|
const scanner = new FileScanner()
|
||||||
|
const results = await scanner.scanAll(FIXTURES_DIR)
|
||||||
|
|
||||||
|
const ignoredFile = results.find((r) => r.path.includes("ignored-file"))
|
||||||
|
expect(ignoredFile).toBeUndefined()
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should only include supported extensions", async () => {
|
||||||
|
const scanner = new FileScanner({ extensions: [".ts"] })
|
||||||
|
const results = await scanner.scanAll(FIXTURES_DIR)
|
||||||
|
|
||||||
|
for (const result of results) {
|
||||||
|
expect(result.path.endsWith(".ts")).toBe(true)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should call progress callback", async () => {
|
||||||
|
const progressCalls: ScanProgress[] = []
|
||||||
|
const scanner = new FileScanner({
|
||||||
|
onProgress: (progress) => {
|
||||||
|
progressCalls.push({ ...progress })
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
await scanner.scanAll(FIXTURES_DIR)
|
||||||
|
|
||||||
|
expect(progressCalls.length).toBeGreaterThan(0)
|
||||||
|
for (const progress of progressCalls) {
|
||||||
|
expect(progress.current).toBeGreaterThan(0)
|
||||||
|
expect(progress.total).toBeGreaterThan(0)
|
||||||
|
expect(typeof progress.currentFile).toBe("string")
|
||||||
|
}
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("scanAll", () => {
|
||||||
|
it("should return array of all results", async () => {
|
||||||
|
const scanner = new FileScanner()
|
||||||
|
const results = await scanner.scanAll(FIXTURES_DIR)
|
||||||
|
|
||||||
|
expect(Array.isArray(results)).toBe(true)
|
||||||
|
expect(results.length).toBeGreaterThan(0)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("isTextFile", () => {
|
||||||
|
let textFilePath: string
|
||||||
|
let binaryFilePath: string
|
||||||
|
|
||||||
|
beforeAll(async () => {
|
||||||
|
textFilePath = path.join(FIXTURES_DIR, "src", "index.ts")
|
||||||
|
binaryFilePath = path.join(FIXTURES_DIR, "binary-test.bin")
|
||||||
|
await fs.writeFile(binaryFilePath, Buffer.from([0x00, 0x01, 0x02]))
|
||||||
|
})
|
||||||
|
|
||||||
|
afterAll(async () => {
|
||||||
|
try {
|
||||||
|
await fs.unlink(binaryFilePath)
|
||||||
|
} catch {
|
||||||
|
// ignore
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should return true for text files", async () => {
|
||||||
|
const isText = await FileScanner.isTextFile(textFilePath)
|
||||||
|
expect(isText).toBe(true)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should return false for binary files", async () => {
|
||||||
|
const isText = await FileScanner.isTextFile(binaryFilePath)
|
||||||
|
expect(isText).toBe(false)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should return false for non-existent files", async () => {
|
||||||
|
const isText = await FileScanner.isTextFile("/non/existent/file.ts")
|
||||||
|
expect(isText).toBe(false)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("readFileContent", () => {
|
||||||
|
it("should read text file content", async () => {
|
||||||
|
const filePath = path.join(FIXTURES_DIR, "src", "index.ts")
|
||||||
|
const content = await FileScanner.readFileContent(filePath)
|
||||||
|
|
||||||
|
expect(content).not.toBeNull()
|
||||||
|
expect(content).toContain("export function main")
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should return null for binary files", async () => {
|
||||||
|
const binaryFilePath = path.join(FIXTURES_DIR, "binary-test2.bin")
|
||||||
|
await fs.writeFile(binaryFilePath, Buffer.from([0x00, 0x01, 0x02]))
|
||||||
|
|
||||||
|
try {
|
||||||
|
const content = await FileScanner.readFileContent(binaryFilePath)
|
||||||
|
expect(content).toBeNull()
|
||||||
|
} finally {
|
||||||
|
await fs.unlink(binaryFilePath)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should return null for non-existent files", async () => {
|
||||||
|
const content = await FileScanner.readFileContent("/non/existent/file.ts")
|
||||||
|
expect(content).toBeNull()
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("empty directory handling", () => {
|
||||||
|
let emptyDir: string
|
||||||
|
|
||||||
|
beforeAll(async () => {
|
||||||
|
emptyDir = path.join(FIXTURES_DIR, "empty-dir")
|
||||||
|
await fs.mkdir(emptyDir, { recursive: true })
|
||||||
|
})
|
||||||
|
|
||||||
|
afterAll(async () => {
|
||||||
|
try {
|
||||||
|
await fs.rmdir(emptyDir)
|
||||||
|
} catch {
|
||||||
|
// ignore
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should handle empty directories gracefully", async () => {
|
||||||
|
const scanner = new FileScanner()
|
||||||
|
const results = await scanner.scanAll(emptyDir)
|
||||||
|
|
||||||
|
expect(results).toHaveLength(0)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
@@ -0,0 +1,608 @@
|
|||||||
|
import { describe, it, expect, beforeAll } from "vitest"
|
||||||
|
import { IndexBuilder } from "../../../../src/infrastructure/indexer/IndexBuilder.js"
|
||||||
|
import { ASTParser } from "../../../../src/infrastructure/indexer/ASTParser.js"
|
||||||
|
import type { FileAST } from "../../../../src/domain/value-objects/FileAST.js"
|
||||||
|
import { createEmptyFileAST } from "../../../../src/domain/value-objects/FileAST.js"
|
||||||
|
|
||||||
|
describe("IndexBuilder", () => {
|
||||||
|
let builder: IndexBuilder
|
||||||
|
let parser: ASTParser
|
||||||
|
const projectRoot = "/project"
|
||||||
|
|
||||||
|
beforeAll(() => {
|
||||||
|
builder = new IndexBuilder(projectRoot)
|
||||||
|
parser = new ASTParser()
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("buildSymbolIndex", () => {
|
||||||
|
it("should index function declarations", () => {
|
||||||
|
const code = `
|
||||||
|
export function greet(name: string): string {
|
||||||
|
return \`Hello, \${name}!\`
|
||||||
|
}
|
||||||
|
|
||||||
|
function privateHelper(): void {}
|
||||||
|
`
|
||||||
|
const ast = parser.parse(code, "ts")
|
||||||
|
const asts = new Map<string, FileAST>([["/project/src/utils.ts", ast]])
|
||||||
|
|
||||||
|
const index = builder.buildSymbolIndex(asts)
|
||||||
|
|
||||||
|
expect(index.has("greet")).toBe(true)
|
||||||
|
expect(index.has("privateHelper")).toBe(true)
|
||||||
|
expect(index.get("greet")).toEqual([
|
||||||
|
{ path: "/project/src/utils.ts", line: 2, type: "function" },
|
||||||
|
])
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should index class declarations and methods", () => {
|
||||||
|
const code = `
|
||||||
|
export class UserService {
|
||||||
|
async findById(id: string): Promise<User> {
|
||||||
|
return this.db.find(id)
|
||||||
|
}
|
||||||
|
|
||||||
|
private validate(data: unknown): void {}
|
||||||
|
}
|
||||||
|
`
|
||||||
|
const ast = parser.parse(code, "ts")
|
||||||
|
const asts = new Map<string, FileAST>([["/project/src/UserService.ts", ast]])
|
||||||
|
|
||||||
|
const index = builder.buildSymbolIndex(asts)
|
||||||
|
|
||||||
|
expect(index.has("UserService")).toBe(true)
|
||||||
|
expect(index.get("UserService")).toEqual([
|
||||||
|
{ path: "/project/src/UserService.ts", line: 2, type: "class" },
|
||||||
|
])
|
||||||
|
|
||||||
|
expect(index.has("UserService.findById")).toBe(true)
|
||||||
|
expect(index.has("UserService.validate")).toBe(true)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should index interface declarations", () => {
|
||||||
|
const code = `
|
||||||
|
export interface User {
|
||||||
|
id: string
|
||||||
|
name: string
|
||||||
|
}
|
||||||
|
|
||||||
|
interface InternalConfig {
|
||||||
|
debug: boolean
|
||||||
|
}
|
||||||
|
`
|
||||||
|
const ast = parser.parse(code, "ts")
|
||||||
|
const asts = new Map<string, FileAST>([["/project/src/types.ts", ast]])
|
||||||
|
|
||||||
|
const index = builder.buildSymbolIndex(asts)
|
||||||
|
|
||||||
|
expect(index.has("User")).toBe(true)
|
||||||
|
expect(index.has("InternalConfig")).toBe(true)
|
||||||
|
expect(index.get("User")).toEqual([
|
||||||
|
{ path: "/project/src/types.ts", line: 2, type: "interface" },
|
||||||
|
])
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should index type alias declarations", () => {
|
||||||
|
const code = `
|
||||||
|
export type UserId = string
|
||||||
|
type Handler = (event: Event) => void
|
||||||
|
`
|
||||||
|
const ast = parser.parse(code, "ts")
|
||||||
|
const asts = new Map<string, FileAST>([["/project/src/types.ts", ast]])
|
||||||
|
|
||||||
|
const index = builder.buildSymbolIndex(asts)
|
||||||
|
|
||||||
|
expect(index.has("UserId")).toBe(true)
|
||||||
|
expect(index.has("Handler")).toBe(true)
|
||||||
|
expect(index.get("UserId")).toEqual([
|
||||||
|
{ path: "/project/src/types.ts", line: 2, type: "type" },
|
||||||
|
])
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should index exported variables", () => {
|
||||||
|
const code = `
|
||||||
|
export const API_URL = "https://api.example.com"
|
||||||
|
export const DEFAULT_TIMEOUT = 5000
|
||||||
|
`
|
||||||
|
const ast = parser.parse(code, "ts")
|
||||||
|
const asts = new Map<string, FileAST>([["/project/src/config.ts", ast]])
|
||||||
|
|
||||||
|
const index = builder.buildSymbolIndex(asts)
|
||||||
|
|
||||||
|
expect(index.has("API_URL")).toBe(true)
|
||||||
|
expect(index.has("DEFAULT_TIMEOUT")).toBe(true)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should handle multiple files", () => {
|
||||||
|
const userCode = `export class User { name: string }`
|
||||||
|
const orderCode = `export class Order { id: string }`
|
||||||
|
|
||||||
|
const asts = new Map<string, FileAST>([
|
||||||
|
["/project/src/User.ts", parser.parse(userCode, "ts")],
|
||||||
|
["/project/src/Order.ts", parser.parse(orderCode, "ts")],
|
||||||
|
])
|
||||||
|
|
||||||
|
const index = builder.buildSymbolIndex(asts)
|
||||||
|
|
||||||
|
expect(index.has("User")).toBe(true)
|
||||||
|
expect(index.has("Order")).toBe(true)
|
||||||
|
expect(index.get("User")?.[0].path).toBe("/project/src/User.ts")
|
||||||
|
expect(index.get("Order")?.[0].path).toBe("/project/src/Order.ts")
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should handle duplicate symbol names across files", () => {
|
||||||
|
const file1 = `export function helper(): void {}`
|
||||||
|
const file2 = `export function helper(): void {}`
|
||||||
|
|
||||||
|
const asts = new Map<string, FileAST>([
|
||||||
|
["/project/src/a/utils.ts", parser.parse(file1, "ts")],
|
||||||
|
["/project/src/b/utils.ts", parser.parse(file2, "ts")],
|
||||||
|
])
|
||||||
|
|
||||||
|
const index = builder.buildSymbolIndex(asts)
|
||||||
|
|
||||||
|
expect(index.has("helper")).toBe(true)
|
||||||
|
expect(index.get("helper")).toHaveLength(2)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should return empty index for empty ASTs", () => {
|
||||||
|
const asts = new Map<string, FileAST>()
|
||||||
|
const index = builder.buildSymbolIndex(asts)
|
||||||
|
expect(index.size).toBe(0)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should not index empty names", () => {
|
||||||
|
const ast = createEmptyFileAST()
|
||||||
|
ast.functions.push({
|
||||||
|
name: "",
|
||||||
|
lineStart: 1,
|
||||||
|
lineEnd: 3,
|
||||||
|
params: [],
|
||||||
|
isAsync: false,
|
||||||
|
isExported: false,
|
||||||
|
})
|
||||||
|
const asts = new Map<string, FileAST>([["/project/src/test.ts", ast]])
|
||||||
|
|
||||||
|
const index = builder.buildSymbolIndex(asts)
|
||||||
|
expect(index.has("")).toBe(false)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("buildDepsGraph", () => {
|
||||||
|
it("should build import relationships", () => {
|
||||||
|
const indexCode = `
|
||||||
|
import { helper } from "./utils"
|
||||||
|
export function main() { return helper() }
|
||||||
|
`
|
||||||
|
const utilsCode = `export function helper() { return 42 }`
|
||||||
|
|
||||||
|
const asts = new Map<string, FileAST>([
|
||||||
|
["/project/src/index.ts", parser.parse(indexCode, "ts")],
|
||||||
|
["/project/src/utils.ts", parser.parse(utilsCode, "ts")],
|
||||||
|
])
|
||||||
|
|
||||||
|
const graph = builder.buildDepsGraph(asts)
|
||||||
|
|
||||||
|
expect(graph.imports.get("/project/src/index.ts")).toContain("/project/src/utils.ts")
|
||||||
|
expect(graph.imports.get("/project/src/utils.ts")).toEqual([])
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should build reverse import relationships", () => {
|
||||||
|
const indexCode = `import { helper } from "./utils"`
|
||||||
|
const utilsCode = `export function helper() {}`
|
||||||
|
|
||||||
|
const asts = new Map<string, FileAST>([
|
||||||
|
["/project/src/index.ts", parser.parse(indexCode, "ts")],
|
||||||
|
["/project/src/utils.ts", parser.parse(utilsCode, "ts")],
|
||||||
|
])
|
||||||
|
|
||||||
|
const graph = builder.buildDepsGraph(asts)
|
||||||
|
|
||||||
|
expect(graph.importedBy.get("/project/src/utils.ts")).toContain("/project/src/index.ts")
|
||||||
|
expect(graph.importedBy.get("/project/src/index.ts")).toEqual([])
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should handle multiple imports from same file", () => {
|
||||||
|
const code = `
|
||||||
|
import { a } from "./utils"
|
||||||
|
import { b } from "./utils"
|
||||||
|
`
|
||||||
|
const utilsCode = `export const a = 1; export const b = 2;`
|
||||||
|
|
||||||
|
const asts = new Map<string, FileAST>([
|
||||||
|
["/project/src/index.ts", parser.parse(code, "ts")],
|
||||||
|
["/project/src/utils.ts", parser.parse(utilsCode, "ts")],
|
||||||
|
])
|
||||||
|
|
||||||
|
const graph = builder.buildDepsGraph(asts)
|
||||||
|
|
||||||
|
const imports = graph.imports.get("/project/src/index.ts") ?? []
|
||||||
|
expect(imports.filter((i) => i === "/project/src/utils.ts")).toHaveLength(1)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should ignore external imports", () => {
|
||||||
|
const code = `
|
||||||
|
import React from "react"
|
||||||
|
import { helper } from "./utils"
|
||||||
|
`
|
||||||
|
const utilsCode = `export function helper() {}`
|
||||||
|
|
||||||
|
const asts = new Map<string, FileAST>([
|
||||||
|
["/project/src/index.ts", parser.parse(code, "ts")],
|
||||||
|
["/project/src/utils.ts", parser.parse(utilsCode, "ts")],
|
||||||
|
])
|
||||||
|
|
||||||
|
const graph = builder.buildDepsGraph(asts)
|
||||||
|
|
||||||
|
const imports = graph.imports.get("/project/src/index.ts") ?? []
|
||||||
|
expect(imports).not.toContain("react")
|
||||||
|
expect(imports).toContain("/project/src/utils.ts")
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should ignore builtin imports", () => {
|
||||||
|
const code = `
|
||||||
|
import * as fs from "node:fs"
|
||||||
|
import { helper } from "./utils"
|
||||||
|
`
|
||||||
|
const utilsCode = `export function helper() {}`
|
||||||
|
|
||||||
|
const asts = new Map<string, FileAST>([
|
||||||
|
["/project/src/index.ts", parser.parse(code, "ts")],
|
||||||
|
["/project/src/utils.ts", parser.parse(utilsCode, "ts")],
|
||||||
|
])
|
||||||
|
|
||||||
|
const graph = builder.buildDepsGraph(asts)
|
||||||
|
|
||||||
|
const imports = graph.imports.get("/project/src/index.ts") ?? []
|
||||||
|
expect(imports).not.toContain("node:fs")
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should handle index.ts imports", () => {
|
||||||
|
const code = `import { util } from "./utils"`
|
||||||
|
const indexCode = `export function util() {}`
|
||||||
|
|
||||||
|
const asts = new Map<string, FileAST>([
|
||||||
|
["/project/src/main.ts", parser.parse(code, "ts")],
|
||||||
|
["/project/src/utils/index.ts", parser.parse(indexCode, "ts")],
|
||||||
|
])
|
||||||
|
|
||||||
|
const graph = builder.buildDepsGraph(asts)
|
||||||
|
|
||||||
|
expect(graph.imports.get("/project/src/main.ts")).toContain(
|
||||||
|
"/project/src/utils/index.ts",
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should handle .js extension imports", () => {
|
||||||
|
const code = `import { helper } from "./utils.js"`
|
||||||
|
const utilsCode = `export function helper() {}`
|
||||||
|
|
||||||
|
const asts = new Map<string, FileAST>([
|
||||||
|
["/project/src/index.ts", parser.parse(code, "ts")],
|
||||||
|
["/project/src/utils.ts", parser.parse(utilsCode, "ts")],
|
||||||
|
])
|
||||||
|
|
||||||
|
const graph = builder.buildDepsGraph(asts)
|
||||||
|
|
||||||
|
expect(graph.imports.get("/project/src/index.ts")).toContain("/project/src/utils.ts")
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should sort dependencies", () => {
|
||||||
|
const code = `
|
||||||
|
import { c } from "./c"
|
||||||
|
import { a } from "./a"
|
||||||
|
import { b } from "./b"
|
||||||
|
`
|
||||||
|
const asts = new Map<string, FileAST>([
|
||||||
|
["/project/src/index.ts", parser.parse(code, "ts")],
|
||||||
|
["/project/src/a.ts", parser.parse("export const a = 1", "ts")],
|
||||||
|
["/project/src/b.ts", parser.parse("export const b = 2", "ts")],
|
||||||
|
["/project/src/c.ts", parser.parse("export const c = 3", "ts")],
|
||||||
|
])
|
||||||
|
|
||||||
|
const graph = builder.buildDepsGraph(asts)
|
||||||
|
|
||||||
|
expect(graph.imports.get("/project/src/index.ts")).toEqual([
|
||||||
|
"/project/src/a.ts",
|
||||||
|
"/project/src/b.ts",
|
||||||
|
"/project/src/c.ts",
|
||||||
|
])
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should return empty graph for empty ASTs", () => {
|
||||||
|
const asts = new Map<string, FileAST>()
|
||||||
|
const graph = builder.buildDepsGraph(asts)
|
||||||
|
expect(graph.imports.size).toBe(0)
|
||||||
|
expect(graph.importedBy.size).toBe(0)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("findSymbol", () => {
|
||||||
|
it("should find existing symbol", () => {
|
||||||
|
const code = `export function greet(): void {}`
|
||||||
|
const asts = new Map<string, FileAST>([
|
||||||
|
["/project/src/utils.ts", parser.parse(code, "ts")],
|
||||||
|
])
|
||||||
|
const index = builder.buildSymbolIndex(asts)
|
||||||
|
|
||||||
|
const locations = builder.findSymbol(index, "greet")
|
||||||
|
expect(locations).toHaveLength(1)
|
||||||
|
expect(locations[0].path).toBe("/project/src/utils.ts")
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should return empty array for non-existent symbol", () => {
|
||||||
|
const asts = new Map<string, FileAST>()
|
||||||
|
const index = builder.buildSymbolIndex(asts)
|
||||||
|
|
||||||
|
const locations = builder.findSymbol(index, "nonexistent")
|
||||||
|
expect(locations).toEqual([])
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("searchSymbols", () => {
|
||||||
|
it("should find symbols matching pattern", () => {
|
||||||
|
const code = `
|
||||||
|
export function getUserById(): void {}
|
||||||
|
export function getUserByEmail(): void {}
|
||||||
|
export function createOrder(): void {}
|
||||||
|
`
|
||||||
|
const asts = new Map<string, FileAST>([
|
||||||
|
["/project/src/api.ts", parser.parse(code, "ts")],
|
||||||
|
])
|
||||||
|
const index = builder.buildSymbolIndex(asts)
|
||||||
|
|
||||||
|
const results = builder.searchSymbols(index, "getUser")
|
||||||
|
expect(results.size).toBe(2)
|
||||||
|
expect(results.has("getUserById")).toBe(true)
|
||||||
|
expect(results.has("getUserByEmail")).toBe(true)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should be case insensitive", () => {
|
||||||
|
const code = `export function MyFunction(): void {}`
|
||||||
|
const asts = new Map<string, FileAST>([
|
||||||
|
["/project/src/test.ts", parser.parse(code, "ts")],
|
||||||
|
])
|
||||||
|
const index = builder.buildSymbolIndex(asts)
|
||||||
|
|
||||||
|
const results = builder.searchSymbols(index, "myfunction")
|
||||||
|
expect(results.has("MyFunction")).toBe(true)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should return empty map for no matches", () => {
|
||||||
|
const code = `export function test(): void {}`
|
||||||
|
const asts = new Map<string, FileAST>([
|
||||||
|
["/project/src/test.ts", parser.parse(code, "ts")],
|
||||||
|
])
|
||||||
|
const index = builder.buildSymbolIndex(asts)
|
||||||
|
|
||||||
|
const results = builder.searchSymbols(index, "xyz123")
|
||||||
|
expect(results.size).toBe(0)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("getDependencies", () => {
|
||||||
|
it("should return file dependencies", () => {
|
||||||
|
const indexCode = `import { a } from "./a"`
|
||||||
|
const aCode = `export const a = 1`
|
||||||
|
|
||||||
|
const asts = new Map<string, FileAST>([
|
||||||
|
["/project/src/index.ts", parser.parse(indexCode, "ts")],
|
||||||
|
["/project/src/a.ts", parser.parse(aCode, "ts")],
|
||||||
|
])
|
||||||
|
const graph = builder.buildDepsGraph(asts)
|
||||||
|
|
||||||
|
const deps = builder.getDependencies(graph, "/project/src/index.ts")
|
||||||
|
expect(deps).toContain("/project/src/a.ts")
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should return empty array for file not in graph", () => {
|
||||||
|
const asts = new Map<string, FileAST>()
|
||||||
|
const graph = builder.buildDepsGraph(asts)
|
||||||
|
|
||||||
|
const deps = builder.getDependencies(graph, "/nonexistent.ts")
|
||||||
|
expect(deps).toEqual([])
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("getDependents", () => {
|
||||||
|
it("should return file dependents", () => {
|
||||||
|
const indexCode = `import { a } from "./a"`
|
||||||
|
const aCode = `export const a = 1`
|
||||||
|
|
||||||
|
const asts = new Map<string, FileAST>([
|
||||||
|
["/project/src/index.ts", parser.parse(indexCode, "ts")],
|
||||||
|
["/project/src/a.ts", parser.parse(aCode, "ts")],
|
||||||
|
])
|
||||||
|
const graph = builder.buildDepsGraph(asts)
|
||||||
|
|
||||||
|
const dependents = builder.getDependents(graph, "/project/src/a.ts")
|
||||||
|
expect(dependents).toContain("/project/src/index.ts")
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should return empty array for file not in graph", () => {
|
||||||
|
const asts = new Map<string, FileAST>()
|
||||||
|
const graph = builder.buildDepsGraph(asts)
|
||||||
|
|
||||||
|
const dependents = builder.getDependents(graph, "/nonexistent.ts")
|
||||||
|
expect(dependents).toEqual([])
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("findCircularDependencies", () => {
|
||||||
|
it("should detect simple circular dependency", () => {
|
||||||
|
const aCode = `import { b } from "./b"; export const a = 1;`
|
||||||
|
const bCode = `import { a } from "./a"; export const b = 2;`
|
||||||
|
|
||||||
|
const asts = new Map<string, FileAST>([
|
||||||
|
["/project/src/a.ts", parser.parse(aCode, "ts")],
|
||||||
|
["/project/src/b.ts", parser.parse(bCode, "ts")],
|
||||||
|
])
|
||||||
|
const graph = builder.buildDepsGraph(asts)
|
||||||
|
|
||||||
|
const cycles = builder.findCircularDependencies(graph)
|
||||||
|
expect(cycles.length).toBe(1)
|
||||||
|
expect(cycles[0]).toContain("/project/src/a.ts")
|
||||||
|
expect(cycles[0]).toContain("/project/src/b.ts")
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should detect three-way circular dependency", () => {
|
||||||
|
const aCode = `import { b } from "./b"; export const a = 1;`
|
||||||
|
const bCode = `import { c } from "./c"; export const b = 2;`
|
||||||
|
const cCode = `import { a } from "./a"; export const c = 3;`
|
||||||
|
|
||||||
|
const asts = new Map<string, FileAST>([
|
||||||
|
["/project/src/a.ts", parser.parse(aCode, "ts")],
|
||||||
|
["/project/src/b.ts", parser.parse(bCode, "ts")],
|
||||||
|
["/project/src/c.ts", parser.parse(cCode, "ts")],
|
||||||
|
])
|
||||||
|
const graph = builder.buildDepsGraph(asts)
|
||||||
|
|
||||||
|
const cycles = builder.findCircularDependencies(graph)
|
||||||
|
expect(cycles.length).toBe(1)
|
||||||
|
expect(cycles[0]).toHaveLength(4)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should return empty array when no cycles", () => {
|
||||||
|
const aCode = `export const a = 1`
|
||||||
|
const bCode = `import { a } from "./a"; export const b = a + 1;`
|
||||||
|
const cCode = `import { b } from "./b"; export const c = b + 1;`
|
||||||
|
|
||||||
|
const asts = new Map<string, FileAST>([
|
||||||
|
["/project/src/a.ts", parser.parse(aCode, "ts")],
|
||||||
|
["/project/src/b.ts", parser.parse(bCode, "ts")],
|
||||||
|
["/project/src/c.ts", parser.parse(cCode, "ts")],
|
||||||
|
])
|
||||||
|
const graph = builder.buildDepsGraph(asts)
|
||||||
|
|
||||||
|
const cycles = builder.findCircularDependencies(graph)
|
||||||
|
expect(cycles).toEqual([])
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should handle self-reference", () => {
|
||||||
|
const aCode = `import { helper } from "./a"; export const a = 1; export function helper() {}`
|
||||||
|
|
||||||
|
const asts = new Map<string, FileAST>([
|
||||||
|
["/project/src/a.ts", parser.parse(aCode, "ts")],
|
||||||
|
])
|
||||||
|
const graph = builder.buildDepsGraph(asts)
|
||||||
|
|
||||||
|
const cycles = builder.findCircularDependencies(graph)
|
||||||
|
expect(cycles.length).toBe(1)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("getStats", () => {
|
||||||
|
it("should return comprehensive statistics", () => {
|
||||||
|
const code1 = `
|
||||||
|
export function func1(): void {}
|
||||||
|
export class Class1 {}
|
||||||
|
export interface Interface1 {}
|
||||||
|
export type Type1 = string
|
||||||
|
export const VAR1 = 1
|
||||||
|
`
|
||||||
|
const code2 = `
|
||||||
|
import { func1 } from "./file1"
|
||||||
|
export function func2(): void {}
|
||||||
|
`
|
||||||
|
const asts = new Map<string, FileAST>([
|
||||||
|
["/project/src/file1.ts", parser.parse(code1, "ts")],
|
||||||
|
["/project/src/file2.ts", parser.parse(code2, "ts")],
|
||||||
|
])
|
||||||
|
|
||||||
|
const symbolIndex = builder.buildSymbolIndex(asts)
|
||||||
|
const depsGraph = builder.buildDepsGraph(asts)
|
||||||
|
const stats = builder.getStats(symbolIndex, depsGraph)
|
||||||
|
|
||||||
|
expect(stats.totalSymbols).toBeGreaterThan(0)
|
||||||
|
expect(stats.symbolsByType.function).toBeGreaterThan(0)
|
||||||
|
expect(stats.symbolsByType.class).toBe(1)
|
||||||
|
expect(stats.symbolsByType.interface).toBe(1)
|
||||||
|
expect(stats.symbolsByType.type).toBe(1)
|
||||||
|
expect(stats.totalFiles).toBe(2)
|
||||||
|
expect(stats.totalDependencies).toBe(1)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should identify hubs", () => {
|
||||||
|
const hubCode = `export const shared = 1`
|
||||||
|
const consumerCodes = Array.from({ length: 6 }, () => `import { shared } from "./hub"`)
|
||||||
|
|
||||||
|
const asts = new Map<string, FileAST>([
|
||||||
|
["/project/src/hub.ts", parser.parse(hubCode, "ts")],
|
||||||
|
])
|
||||||
|
consumerCodes.forEach((code, i) => {
|
||||||
|
asts.set(`/project/src/consumer${i}.ts`, parser.parse(code, "ts"))
|
||||||
|
})
|
||||||
|
|
||||||
|
const symbolIndex = builder.buildSymbolIndex(asts)
|
||||||
|
const depsGraph = builder.buildDepsGraph(asts)
|
||||||
|
const stats = builder.getStats(symbolIndex, depsGraph)
|
||||||
|
|
||||||
|
expect(stats.hubs).toContain("/project/src/hub.ts")
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should identify orphans", () => {
|
||||||
|
const orphanCode = `const internal = 1`
|
||||||
|
|
||||||
|
const asts = new Map<string, FileAST>([
|
||||||
|
["/project/src/orphan.ts", parser.parse(orphanCode, "ts")],
|
||||||
|
])
|
||||||
|
|
||||||
|
const symbolIndex = builder.buildSymbolIndex(asts)
|
||||||
|
const depsGraph = builder.buildDepsGraph(asts)
|
||||||
|
const stats = builder.getStats(symbolIndex, depsGraph)
|
||||||
|
|
||||||
|
expect(stats.orphans).toContain("/project/src/orphan.ts")
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("integration with ASTParser", () => {
|
||||||
|
it("should work with complex TypeScript code", () => {
|
||||||
|
const code = `
|
||||||
|
import { BaseService } from "./base"
|
||||||
|
import type { User, UserDTO } from "./types"
|
||||||
|
|
||||||
|
export class UserService extends BaseService {
|
||||||
|
private readonly cache = new Map<string, User>()
|
||||||
|
|
||||||
|
async findById(id: string): Promise<User | null> {
|
||||||
|
if (this.cache.has(id)) {
|
||||||
|
return this.cache.get(id)!
|
||||||
|
}
|
||||||
|
return this.repository.find(id)
|
||||||
|
}
|
||||||
|
|
||||||
|
toDTO(user: User): UserDTO {
|
||||||
|
return { id: user.id, name: user.name }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export type ServiceResult<T> = { success: true; data: T } | { success: false; error: string }
|
||||||
|
`
|
||||||
|
const baseCode = `export class BaseService { protected repository: any }`
|
||||||
|
const typesCode = `export interface User { id: string; name: string }; export interface UserDTO { id: string; name: string }`
|
||||||
|
|
||||||
|
const asts = new Map<string, FileAST>([
|
||||||
|
["/project/src/UserService.ts", parser.parse(code, "ts")],
|
||||||
|
["/project/src/base.ts", parser.parse(baseCode, "ts")],
|
||||||
|
["/project/src/types.ts", parser.parse(typesCode, "ts")],
|
||||||
|
])
|
||||||
|
|
||||||
|
const symbolIndex = builder.buildSymbolIndex(asts)
|
||||||
|
const depsGraph = builder.buildDepsGraph(asts)
|
||||||
|
|
||||||
|
expect(symbolIndex.has("UserService")).toBe(true)
|
||||||
|
expect(symbolIndex.has("UserService.findById")).toBe(true)
|
||||||
|
expect(symbolIndex.has("UserService.toDTO")).toBe(true)
|
||||||
|
expect(symbolIndex.has("ServiceResult")).toBe(true)
|
||||||
|
expect(symbolIndex.has("BaseService")).toBe(true)
|
||||||
|
expect(symbolIndex.has("User")).toBe(true)
|
||||||
|
|
||||||
|
expect(depsGraph.imports.get("/project/src/UserService.ts")).toContain(
|
||||||
|
"/project/src/base.ts",
|
||||||
|
)
|
||||||
|
expect(depsGraph.imports.get("/project/src/UserService.ts")).toContain(
|
||||||
|
"/project/src/types.ts",
|
||||||
|
)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
@@ -0,0 +1,702 @@
|
|||||||
|
import { describe, it, expect, beforeAll } from "vitest"
|
||||||
|
import { MetaAnalyzer } from "../../../../src/infrastructure/indexer/MetaAnalyzer.js"
|
||||||
|
import { ASTParser } from "../../../../src/infrastructure/indexer/ASTParser.js"
|
||||||
|
import type { FileAST } from "../../../../src/domain/value-objects/FileAST.js"
|
||||||
|
import { createEmptyFileAST } from "../../../../src/domain/value-objects/FileAST.js"
|
||||||
|
|
||||||
|
describe("MetaAnalyzer", () => {
|
||||||
|
let analyzer: MetaAnalyzer
|
||||||
|
let parser: ASTParser
|
||||||
|
const projectRoot = "/project"
|
||||||
|
|
||||||
|
beforeAll(() => {
|
||||||
|
analyzer = new MetaAnalyzer(projectRoot)
|
||||||
|
parser = new ASTParser()
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("countLinesOfCode", () => {
|
||||||
|
it("should count non-empty lines", () => {
|
||||||
|
const content = `const a = 1
|
||||||
|
const b = 2
|
||||||
|
const c = 3`
|
||||||
|
const loc = analyzer.countLinesOfCode(content)
|
||||||
|
expect(loc).toBe(3)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should exclude empty lines", () => {
|
||||||
|
const content = `const a = 1
|
||||||
|
|
||||||
|
const b = 2
|
||||||
|
|
||||||
|
const c = 3`
|
||||||
|
const loc = analyzer.countLinesOfCode(content)
|
||||||
|
expect(loc).toBe(3)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should exclude single-line comments", () => {
|
||||||
|
const content = `// This is a comment
|
||||||
|
const a = 1
|
||||||
|
// Another comment
|
||||||
|
const b = 2`
|
||||||
|
const loc = analyzer.countLinesOfCode(content)
|
||||||
|
expect(loc).toBe(2)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should exclude block comments", () => {
|
||||||
|
const content = `/*
|
||||||
|
* Multi-line comment
|
||||||
|
*/
|
||||||
|
const a = 1
|
||||||
|
/* inline block */ const b = 2`
|
||||||
|
const loc = analyzer.countLinesOfCode(content)
|
||||||
|
expect(loc).toBe(2)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should handle multi-line block comments", () => {
|
||||||
|
const content = `const a = 1
|
||||||
|
/*
|
||||||
|
comment line 1
|
||||||
|
comment line 2
|
||||||
|
*/
|
||||||
|
const b = 2`
|
||||||
|
const loc = analyzer.countLinesOfCode(content)
|
||||||
|
expect(loc).toBe(2)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should return 0 for empty content", () => {
|
||||||
|
const loc = analyzer.countLinesOfCode("")
|
||||||
|
expect(loc).toBe(0)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should return 0 for only comments", () => {
|
||||||
|
const content = `// comment 1
|
||||||
|
// comment 2
|
||||||
|
/* block comment */`
|
||||||
|
const loc = analyzer.countLinesOfCode(content)
|
||||||
|
expect(loc).toBe(0)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("calculateMaxNesting", () => {
|
||||||
|
it("should return 0 for empty AST", () => {
|
||||||
|
const ast = createEmptyFileAST()
|
||||||
|
const nesting = analyzer.calculateMaxNesting(ast)
|
||||||
|
expect(nesting).toBe(0)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should estimate nesting for short functions", () => {
|
||||||
|
const ast = createEmptyFileAST()
|
||||||
|
ast.functions.push({
|
||||||
|
name: "test",
|
||||||
|
lineStart: 1,
|
||||||
|
lineEnd: 3,
|
||||||
|
params: [],
|
||||||
|
isAsync: false,
|
||||||
|
isExported: false,
|
||||||
|
})
|
||||||
|
const nesting = analyzer.calculateMaxNesting(ast)
|
||||||
|
expect(nesting).toBe(1)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should estimate higher nesting for longer functions", () => {
|
||||||
|
const ast = createEmptyFileAST()
|
||||||
|
ast.functions.push({
|
||||||
|
name: "test",
|
||||||
|
lineStart: 1,
|
||||||
|
lineEnd: 40,
|
||||||
|
params: [],
|
||||||
|
isAsync: false,
|
||||||
|
isExported: false,
|
||||||
|
})
|
||||||
|
const nesting = analyzer.calculateMaxNesting(ast)
|
||||||
|
expect(nesting).toBe(4)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should return max nesting across multiple functions", () => {
|
||||||
|
const ast = createEmptyFileAST()
|
||||||
|
ast.functions.push(
|
||||||
|
{
|
||||||
|
name: "short",
|
||||||
|
lineStart: 1,
|
||||||
|
lineEnd: 3,
|
||||||
|
params: [],
|
||||||
|
isAsync: false,
|
||||||
|
isExported: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "long",
|
||||||
|
lineStart: 5,
|
||||||
|
lineEnd: 60,
|
||||||
|
params: [],
|
||||||
|
isAsync: false,
|
||||||
|
isExported: false,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
const nesting = analyzer.calculateMaxNesting(ast)
|
||||||
|
expect(nesting).toBe(5)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should account for class methods", () => {
|
||||||
|
const ast = createEmptyFileAST()
|
||||||
|
ast.classes.push({
|
||||||
|
name: "MyClass",
|
||||||
|
lineStart: 1,
|
||||||
|
lineEnd: 50,
|
||||||
|
methods: [
|
||||||
|
{
|
||||||
|
name: "method1",
|
||||||
|
lineStart: 2,
|
||||||
|
lineEnd: 25,
|
||||||
|
params: [],
|
||||||
|
isAsync: false,
|
||||||
|
visibility: "public",
|
||||||
|
isStatic: false,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
properties: [],
|
||||||
|
implements: [],
|
||||||
|
isExported: false,
|
||||||
|
isAbstract: false,
|
||||||
|
})
|
||||||
|
const nesting = analyzer.calculateMaxNesting(ast)
|
||||||
|
expect(nesting).toBeGreaterThan(1)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("calculateCyclomaticComplexity", () => {
|
||||||
|
it("should return 1 for empty AST", () => {
|
||||||
|
const ast = createEmptyFileAST()
|
||||||
|
const complexity = analyzer.calculateCyclomaticComplexity(ast)
|
||||||
|
expect(complexity).toBe(1)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should increase complexity for functions", () => {
|
||||||
|
const ast = createEmptyFileAST()
|
||||||
|
ast.functions.push({
|
||||||
|
name: "test",
|
||||||
|
lineStart: 1,
|
||||||
|
lineEnd: 20,
|
||||||
|
params: [],
|
||||||
|
isAsync: false,
|
||||||
|
isExported: false,
|
||||||
|
})
|
||||||
|
const complexity = analyzer.calculateCyclomaticComplexity(ast)
|
||||||
|
expect(complexity).toBeGreaterThan(1)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should increase complexity for class methods", () => {
|
||||||
|
const ast = createEmptyFileAST()
|
||||||
|
ast.classes.push({
|
||||||
|
name: "MyClass",
|
||||||
|
lineStart: 1,
|
||||||
|
lineEnd: 50,
|
||||||
|
methods: [
|
||||||
|
{
|
||||||
|
name: "method1",
|
||||||
|
lineStart: 2,
|
||||||
|
lineEnd: 20,
|
||||||
|
params: [],
|
||||||
|
isAsync: false,
|
||||||
|
visibility: "public",
|
||||||
|
isStatic: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "method2",
|
||||||
|
lineStart: 22,
|
||||||
|
lineEnd: 45,
|
||||||
|
params: [],
|
||||||
|
isAsync: false,
|
||||||
|
visibility: "public",
|
||||||
|
isStatic: false,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
properties: [],
|
||||||
|
implements: [],
|
||||||
|
isExported: false,
|
||||||
|
isAbstract: false,
|
||||||
|
})
|
||||||
|
const complexity = analyzer.calculateCyclomaticComplexity(ast)
|
||||||
|
expect(complexity).toBeGreaterThan(2)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("calculateComplexityScore", () => {
|
||||||
|
it("should return 0 for minimal values", () => {
|
||||||
|
const score = analyzer.calculateComplexityScore(0, 0, 0)
|
||||||
|
expect(score).toBe(0)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should return 100 for maximum values", () => {
|
||||||
|
const score = analyzer.calculateComplexityScore(1000, 10, 50)
|
||||||
|
expect(score).toBe(100)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should return intermediate values", () => {
|
||||||
|
const score = analyzer.calculateComplexityScore(100, 3, 10)
|
||||||
|
expect(score).toBeGreaterThan(0)
|
||||||
|
expect(score).toBeLessThan(100)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("resolveDependencies", () => {
|
||||||
|
it("should resolve relative imports", () => {
|
||||||
|
const ast = createEmptyFileAST()
|
||||||
|
ast.imports.push({
|
||||||
|
name: "foo",
|
||||||
|
from: "./utils",
|
||||||
|
line: 1,
|
||||||
|
type: "internal",
|
||||||
|
isDefault: false,
|
||||||
|
})
|
||||||
|
const deps = analyzer.resolveDependencies("/project/src/index.ts", ast)
|
||||||
|
expect(deps).toHaveLength(1)
|
||||||
|
expect(deps[0]).toBe("/project/src/utils.ts")
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should resolve parent directory imports", () => {
|
||||||
|
const ast = createEmptyFileAST()
|
||||||
|
ast.imports.push({
|
||||||
|
name: "config",
|
||||||
|
from: "../config",
|
||||||
|
line: 1,
|
||||||
|
type: "internal",
|
||||||
|
isDefault: false,
|
||||||
|
})
|
||||||
|
const deps = analyzer.resolveDependencies("/project/src/utils/helper.ts", ast)
|
||||||
|
expect(deps).toHaveLength(1)
|
||||||
|
expect(deps[0]).toBe("/project/src/config.ts")
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should ignore external imports", () => {
|
||||||
|
const ast = createEmptyFileAST()
|
||||||
|
ast.imports.push({
|
||||||
|
name: "React",
|
||||||
|
from: "react",
|
||||||
|
line: 1,
|
||||||
|
type: "external",
|
||||||
|
isDefault: true,
|
||||||
|
})
|
||||||
|
const deps = analyzer.resolveDependencies("/project/src/index.ts", ast)
|
||||||
|
expect(deps).toHaveLength(0)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should ignore builtin imports", () => {
|
||||||
|
const ast = createEmptyFileAST()
|
||||||
|
ast.imports.push({
|
||||||
|
name: "fs",
|
||||||
|
from: "node:fs",
|
||||||
|
line: 1,
|
||||||
|
type: "builtin",
|
||||||
|
isDefault: false,
|
||||||
|
})
|
||||||
|
const deps = analyzer.resolveDependencies("/project/src/index.ts", ast)
|
||||||
|
expect(deps).toHaveLength(0)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should handle .js extension to .ts conversion", () => {
|
||||||
|
const ast = createEmptyFileAST()
|
||||||
|
ast.imports.push({
|
||||||
|
name: "util",
|
||||||
|
from: "./util.js",
|
||||||
|
line: 1,
|
||||||
|
type: "internal",
|
||||||
|
isDefault: false,
|
||||||
|
})
|
||||||
|
const deps = analyzer.resolveDependencies("/project/src/index.ts", ast)
|
||||||
|
expect(deps).toHaveLength(1)
|
||||||
|
expect(deps[0]).toBe("/project/src/util.ts")
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should deduplicate dependencies", () => {
|
||||||
|
const ast = createEmptyFileAST()
|
||||||
|
ast.imports.push(
|
||||||
|
{
|
||||||
|
name: "foo",
|
||||||
|
from: "./utils",
|
||||||
|
line: 1,
|
||||||
|
type: "internal",
|
||||||
|
isDefault: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "bar",
|
||||||
|
from: "./utils",
|
||||||
|
line: 2,
|
||||||
|
type: "internal",
|
||||||
|
isDefault: false,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
const deps = analyzer.resolveDependencies("/project/src/index.ts", ast)
|
||||||
|
expect(deps).toHaveLength(1)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should sort dependencies", () => {
|
||||||
|
const ast = createEmptyFileAST()
|
||||||
|
ast.imports.push(
|
||||||
|
{
|
||||||
|
name: "c",
|
||||||
|
from: "./c",
|
||||||
|
line: 1,
|
||||||
|
type: "internal",
|
||||||
|
isDefault: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "a",
|
||||||
|
from: "./a",
|
||||||
|
line: 2,
|
||||||
|
type: "internal",
|
||||||
|
isDefault: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "b",
|
||||||
|
from: "./b",
|
||||||
|
line: 3,
|
||||||
|
type: "internal",
|
||||||
|
isDefault: false,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
const deps = analyzer.resolveDependencies("/project/src/index.ts", ast)
|
||||||
|
expect(deps).toEqual(["/project/src/a.ts", "/project/src/b.ts", "/project/src/c.ts"])
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("findDependents", () => {
|
||||||
|
it("should find files that import the given file", () => {
|
||||||
|
const allASTs = new Map<string, FileAST>()
|
||||||
|
|
||||||
|
const indexAST = createEmptyFileAST()
|
||||||
|
allASTs.set("/project/src/index.ts", indexAST)
|
||||||
|
|
||||||
|
const utilsAST = createEmptyFileAST()
|
||||||
|
utilsAST.imports.push({
|
||||||
|
name: "helper",
|
||||||
|
from: "./helper",
|
||||||
|
line: 1,
|
||||||
|
type: "internal",
|
||||||
|
isDefault: false,
|
||||||
|
})
|
||||||
|
allASTs.set("/project/src/utils.ts", utilsAST)
|
||||||
|
|
||||||
|
const dependents = analyzer.findDependents("/project/src/helper.ts", allASTs)
|
||||||
|
expect(dependents).toHaveLength(1)
|
||||||
|
expect(dependents[0]).toBe("/project/src/utils.ts")
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should return empty array when no dependents", () => {
|
||||||
|
const allASTs = new Map<string, FileAST>()
|
||||||
|
allASTs.set("/project/src/index.ts", createEmptyFileAST())
|
||||||
|
allASTs.set("/project/src/utils.ts", createEmptyFileAST())
|
||||||
|
|
||||||
|
const dependents = analyzer.findDependents("/project/src/helper.ts", allASTs)
|
||||||
|
expect(dependents).toHaveLength(0)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should not include self as dependent", () => {
|
||||||
|
const allASTs = new Map<string, FileAST>()
|
||||||
|
const selfAST = createEmptyFileAST()
|
||||||
|
selfAST.imports.push({
|
||||||
|
name: "foo",
|
||||||
|
from: "./helper",
|
||||||
|
line: 1,
|
||||||
|
type: "internal",
|
||||||
|
isDefault: false,
|
||||||
|
})
|
||||||
|
allASTs.set("/project/src/helper.ts", selfAST)
|
||||||
|
|
||||||
|
const dependents = analyzer.findDependents("/project/src/helper.ts", allASTs)
|
||||||
|
expect(dependents).toHaveLength(0)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should handle index.ts imports", () => {
|
||||||
|
const allASTs = new Map<string, FileAST>()
|
||||||
|
|
||||||
|
const consumerAST = createEmptyFileAST()
|
||||||
|
consumerAST.imports.push({
|
||||||
|
name: "util",
|
||||||
|
from: "./utils",
|
||||||
|
line: 1,
|
||||||
|
type: "internal",
|
||||||
|
isDefault: false,
|
||||||
|
})
|
||||||
|
allASTs.set("/project/src/consumer.ts", consumerAST)
|
||||||
|
|
||||||
|
const dependents = analyzer.findDependents("/project/src/utils/index.ts", allASTs)
|
||||||
|
expect(dependents).toHaveLength(1)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should sort dependents", () => {
|
||||||
|
const allASTs = new Map<string, FileAST>()
|
||||||
|
|
||||||
|
const fileC = createEmptyFileAST()
|
||||||
|
fileC.imports.push({
|
||||||
|
name: "x",
|
||||||
|
from: "./target",
|
||||||
|
line: 1,
|
||||||
|
type: "internal",
|
||||||
|
isDefault: false,
|
||||||
|
})
|
||||||
|
allASTs.set("/project/src/c.ts", fileC)
|
||||||
|
|
||||||
|
const fileA = createEmptyFileAST()
|
||||||
|
fileA.imports.push({
|
||||||
|
name: "x",
|
||||||
|
from: "./target",
|
||||||
|
line: 1,
|
||||||
|
type: "internal",
|
||||||
|
isDefault: false,
|
||||||
|
})
|
||||||
|
allASTs.set("/project/src/a.ts", fileA)
|
||||||
|
|
||||||
|
const fileB = createEmptyFileAST()
|
||||||
|
fileB.imports.push({
|
||||||
|
name: "x",
|
||||||
|
from: "./target",
|
||||||
|
line: 1,
|
||||||
|
type: "internal",
|
||||||
|
isDefault: false,
|
||||||
|
})
|
||||||
|
allASTs.set("/project/src/b.ts", fileB)
|
||||||
|
|
||||||
|
const dependents = analyzer.findDependents("/project/src/target.ts", allASTs)
|
||||||
|
expect(dependents).toEqual([
|
||||||
|
"/project/src/a.ts",
|
||||||
|
"/project/src/b.ts",
|
||||||
|
"/project/src/c.ts",
|
||||||
|
])
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("classifyFileType", () => {
|
||||||
|
it("should classify test files by .test. pattern", () => {
|
||||||
|
expect(analyzer.classifyFileType("/project/src/utils.test.ts")).toBe("test")
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should classify test files by .spec. pattern", () => {
|
||||||
|
expect(analyzer.classifyFileType("/project/src/utils.spec.ts")).toBe("test")
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should classify test files by /tests/ directory", () => {
|
||||||
|
expect(analyzer.classifyFileType("/project/tests/utils.ts")).toBe("test")
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should classify test files by /__tests__/ directory", () => {
|
||||||
|
expect(analyzer.classifyFileType("/project/src/__tests__/utils.ts")).toBe("test")
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should classify .d.ts as types", () => {
|
||||||
|
expect(analyzer.classifyFileType("/project/src/types.d.ts")).toBe("types")
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should classify /types/ directory as types", () => {
|
||||||
|
expect(analyzer.classifyFileType("/project/src/types/index.ts")).toBe("types")
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should classify types.ts as types", () => {
|
||||||
|
expect(analyzer.classifyFileType("/project/src/types.ts")).toBe("types")
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should classify config files", () => {
|
||||||
|
expect(analyzer.classifyFileType("/project/tsconfig.json")).toBe("config")
|
||||||
|
expect(analyzer.classifyFileType("/project/eslint.config.js")).toBe("config")
|
||||||
|
expect(analyzer.classifyFileType("/project/vitest.config.ts")).toBe("config")
|
||||||
|
expect(analyzer.classifyFileType("/project/jest.config.js")).toBe("config")
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should classify regular source files", () => {
|
||||||
|
expect(analyzer.classifyFileType("/project/src/index.ts")).toBe("source")
|
||||||
|
expect(analyzer.classifyFileType("/project/src/utils.tsx")).toBe("source")
|
||||||
|
expect(analyzer.classifyFileType("/project/src/helper.js")).toBe("source")
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should classify unknown file types", () => {
|
||||||
|
expect(analyzer.classifyFileType("/project/README.md")).toBe("unknown")
|
||||||
|
expect(analyzer.classifyFileType("/project/data.json")).toBe("unknown")
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("isEntryPointFile", () => {
|
||||||
|
it("should identify index files as entry points", () => {
|
||||||
|
expect(analyzer.isEntryPointFile("/project/src/index.ts", 5)).toBe(true)
|
||||||
|
expect(analyzer.isEntryPointFile("/project/src/index.js", 5)).toBe(true)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should identify files with no dependents as entry points", () => {
|
||||||
|
expect(analyzer.isEntryPointFile("/project/src/utils.ts", 0)).toBe(true)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should identify main.ts as entry point", () => {
|
||||||
|
expect(analyzer.isEntryPointFile("/project/src/main.ts", 5)).toBe(true)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should identify app.ts as entry point", () => {
|
||||||
|
expect(analyzer.isEntryPointFile("/project/src/app.tsx", 5)).toBe(true)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should identify cli.ts as entry point", () => {
|
||||||
|
expect(analyzer.isEntryPointFile("/project/src/cli.ts", 5)).toBe(true)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should identify server.ts as entry point", () => {
|
||||||
|
expect(analyzer.isEntryPointFile("/project/src/server.ts", 5)).toBe(true)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should not identify regular files with dependents as entry points", () => {
|
||||||
|
expect(analyzer.isEntryPointFile("/project/src/utils.ts", 3)).toBe(false)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("analyze", () => {
|
||||||
|
it("should produce complete FileMeta", () => {
|
||||||
|
const content = `import { helper } from "./helper"
|
||||||
|
|
||||||
|
export function main() {
|
||||||
|
return helper()
|
||||||
|
}
|
||||||
|
`
|
||||||
|
const ast = parser.parse(content, "ts")
|
||||||
|
const allASTs = new Map<string, FileAST>()
|
||||||
|
allASTs.set("/project/src/index.ts", ast)
|
||||||
|
|
||||||
|
const meta = analyzer.analyze("/project/src/index.ts", ast, content, allASTs)
|
||||||
|
|
||||||
|
expect(meta.complexity).toBeDefined()
|
||||||
|
expect(meta.complexity.loc).toBeGreaterThan(0)
|
||||||
|
expect(meta.dependencies).toHaveLength(1)
|
||||||
|
expect(meta.fileType).toBe("source")
|
||||||
|
expect(meta.isEntryPoint).toBe(true)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should identify hub files", () => {
|
||||||
|
const content = `export const util = () => {}`
|
||||||
|
const ast = parser.parse(content, "ts")
|
||||||
|
const allASTs = new Map<string, FileAST>()
|
||||||
|
|
||||||
|
for (let i = 0; i < 6; i++) {
|
||||||
|
const consumerAST = createEmptyFileAST()
|
||||||
|
consumerAST.imports.push({
|
||||||
|
name: "util",
|
||||||
|
from: "./shared",
|
||||||
|
line: 1,
|
||||||
|
type: "internal",
|
||||||
|
isDefault: false,
|
||||||
|
})
|
||||||
|
allASTs.set(`/project/src/consumer${i}.ts`, consumerAST)
|
||||||
|
}
|
||||||
|
|
||||||
|
const meta = analyzer.analyze("/project/src/shared.ts", ast, content, allASTs)
|
||||||
|
expect(meta.isHub).toBe(true)
|
||||||
|
expect(meta.dependents).toHaveLength(6)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should not identify as hub with few dependents", () => {
|
||||||
|
const content = `export const util = () => {}`
|
||||||
|
const ast = parser.parse(content, "ts")
|
||||||
|
const allASTs = new Map<string, FileAST>()
|
||||||
|
|
||||||
|
for (let i = 0; i < 3; i++) {
|
||||||
|
const consumerAST = createEmptyFileAST()
|
||||||
|
consumerAST.imports.push({
|
||||||
|
name: "util",
|
||||||
|
from: "./shared",
|
||||||
|
line: 1,
|
||||||
|
type: "internal",
|
||||||
|
isDefault: false,
|
||||||
|
})
|
||||||
|
allASTs.set(`/project/src/consumer${i}.ts`, consumerAST)
|
||||||
|
}
|
||||||
|
|
||||||
|
const meta = analyzer.analyze("/project/src/shared.ts", ast, content, allASTs)
|
||||||
|
expect(meta.isHub).toBe(false)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("analyzeAll", () => {
|
||||||
|
it("should analyze multiple files", () => {
|
||||||
|
const files = new Map<string, { ast: FileAST; content: string }>()
|
||||||
|
|
||||||
|
const indexContent = `import { util } from "./util"
|
||||||
|
export function main() { return util() }`
|
||||||
|
const indexAST = parser.parse(indexContent, "ts")
|
||||||
|
files.set("/project/src/index.ts", { ast: indexAST, content: indexContent })
|
||||||
|
|
||||||
|
const utilContent = `export function util() { return 42 }`
|
||||||
|
const utilAST = parser.parse(utilContent, "ts")
|
||||||
|
files.set("/project/src/util.ts", { ast: utilAST, content: utilContent })
|
||||||
|
|
||||||
|
const results = analyzer.analyzeAll(files)
|
||||||
|
|
||||||
|
expect(results.size).toBe(2)
|
||||||
|
expect(results.get("/project/src/index.ts")).toBeDefined()
|
||||||
|
expect(results.get("/project/src/util.ts")).toBeDefined()
|
||||||
|
|
||||||
|
const indexMeta = results.get("/project/src/index.ts")!
|
||||||
|
expect(indexMeta.dependencies).toContain("/project/src/util.ts")
|
||||||
|
|
||||||
|
const utilMeta = results.get("/project/src/util.ts")!
|
||||||
|
expect(utilMeta.dependents).toContain("/project/src/index.ts")
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should handle empty files map", () => {
|
||||||
|
const files = new Map<string, { ast: FileAST; content: string }>()
|
||||||
|
const results = analyzer.analyzeAll(files)
|
||||||
|
expect(results.size).toBe(0)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("calculateComplexity", () => {
|
||||||
|
it("should return complete complexity metrics", () => {
|
||||||
|
const content = `function complex() {
|
||||||
|
if (true) {
|
||||||
|
for (let i = 0; i < 10; i++) {
|
||||||
|
if (i % 2 === 0) {
|
||||||
|
console.log(i)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return 42
|
||||||
|
}`
|
||||||
|
const ast = parser.parse(content, "ts")
|
||||||
|
const metrics = analyzer.calculateComplexity(ast, content)
|
||||||
|
|
||||||
|
expect(metrics.loc).toBeGreaterThan(0)
|
||||||
|
expect(metrics.nesting).toBeGreaterThan(0)
|
||||||
|
expect(metrics.cyclomaticComplexity).toBeGreaterThan(0)
|
||||||
|
expect(metrics.score).toBeGreaterThanOrEqual(0)
|
||||||
|
expect(metrics.score).toBeLessThanOrEqual(100)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("integration with ASTParser", () => {
|
||||||
|
it("should work with real parsed AST", () => {
|
||||||
|
const content = `import { readFile } from "node:fs"
|
||||||
|
import { helper } from "./helper"
|
||||||
|
import React from "react"
|
||||||
|
|
||||||
|
export class MyComponent {
|
||||||
|
private data: string[] = []
|
||||||
|
|
||||||
|
async loadData(): Promise<void> {
|
||||||
|
const content = await readFile("file.txt", "utf-8")
|
||||||
|
this.data = content.split("\\n")
|
||||||
|
}
|
||||||
|
|
||||||
|
render() {
|
||||||
|
return this.data.map(line => <div>{line}</div>)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export function createComponent(): MyComponent {
|
||||||
|
return new MyComponent()
|
||||||
|
}
|
||||||
|
`
|
||||||
|
const ast = parser.parse(content, "tsx")
|
||||||
|
const allASTs = new Map<string, FileAST>()
|
||||||
|
allASTs.set("/project/src/Component.tsx", ast)
|
||||||
|
|
||||||
|
const meta = analyzer.analyze("/project/src/Component.tsx", ast, content, allASTs)
|
||||||
|
|
||||||
|
expect(meta.complexity.loc).toBeGreaterThan(10)
|
||||||
|
expect(meta.dependencies).toContain("/project/src/helper.ts")
|
||||||
|
expect(meta.fileType).toBe("source")
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
@@ -0,0 +1,278 @@
|
|||||||
|
import { describe, it, expect, beforeEach, afterEach, vi } from "vitest"
|
||||||
|
import { Watchdog, type FileChangeEvent } from "../../../../src/infrastructure/indexer/Watchdog.js"
|
||||||
|
import * as fs from "node:fs/promises"
|
||||||
|
import * as path from "node:path"
|
||||||
|
import * as os from "node:os"
|
||||||
|
|
||||||
|
describe("Watchdog", () => {
|
||||||
|
let watchdog: Watchdog
|
||||||
|
let tempDir: string
|
||||||
|
|
||||||
|
beforeEach(async () => {
|
||||||
|
tempDir = await fs.mkdtemp(path.join(os.tmpdir(), "watchdog-test-"))
|
||||||
|
watchdog = new Watchdog({ debounceMs: 50 })
|
||||||
|
})
|
||||||
|
|
||||||
|
afterEach(async () => {
|
||||||
|
await watchdog.stop()
|
||||||
|
await fs.rm(tempDir, { recursive: true, force: true })
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("constructor", () => {
|
||||||
|
it("should create with default options", () => {
|
||||||
|
const wd = new Watchdog()
|
||||||
|
expect(wd.isWatching()).toBe(false)
|
||||||
|
expect(wd.getRoot()).toBe("")
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should accept custom options", () => {
|
||||||
|
const wd = new Watchdog({
|
||||||
|
debounceMs: 100,
|
||||||
|
extensions: [".ts"],
|
||||||
|
usePolling: true,
|
||||||
|
})
|
||||||
|
expect(wd.isWatching()).toBe(false)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("start/stop", () => {
|
||||||
|
it("should start watching", () => {
|
||||||
|
watchdog.start(tempDir)
|
||||||
|
expect(watchdog.isWatching()).toBe(true)
|
||||||
|
expect(watchdog.getRoot()).toBe(tempDir)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should stop watching", async () => {
|
||||||
|
watchdog.start(tempDir)
|
||||||
|
await watchdog.stop()
|
||||||
|
expect(watchdog.isWatching()).toBe(false)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should handle stop when not started", async () => {
|
||||||
|
await watchdog.stop()
|
||||||
|
expect(watchdog.isWatching()).toBe(false)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should restart when start called while running", async () => {
|
||||||
|
watchdog.start(tempDir)
|
||||||
|
const newTempDir = await fs.mkdtemp(path.join(os.tmpdir(), "watchdog-test2-"))
|
||||||
|
|
||||||
|
watchdog.start(newTempDir)
|
||||||
|
expect(watchdog.isWatching()).toBe(true)
|
||||||
|
expect(watchdog.getRoot()).toBe(newTempDir)
|
||||||
|
|
||||||
|
await fs.rm(newTempDir, { recursive: true, force: true })
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("onFileChange/offFileChange", () => {
|
||||||
|
it("should register callback", () => {
|
||||||
|
const callback = vi.fn()
|
||||||
|
watchdog.onFileChange(callback)
|
||||||
|
expect(callback).not.toHaveBeenCalled()
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should remove callback", () => {
|
||||||
|
const callback = vi.fn()
|
||||||
|
watchdog.onFileChange(callback)
|
||||||
|
watchdog.offFileChange(callback)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should handle removing non-existent callback", () => {
|
||||||
|
const callback = vi.fn()
|
||||||
|
watchdog.offFileChange(callback)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("getPendingCount", () => {
|
||||||
|
it("should return 0 when no pending changes", () => {
|
||||||
|
expect(watchdog.getPendingCount()).toBe(0)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("getWatchedPaths", () => {
|
||||||
|
it("should return empty array when not watching", () => {
|
||||||
|
expect(watchdog.getWatchedPaths()).toEqual([])
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("flushAll", () => {
|
||||||
|
it("should not throw when no pending changes", () => {
|
||||||
|
expect(() => watchdog.flushAll()).not.toThrow()
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("file change detection", () => {
|
||||||
|
it("should detect new file creation", async () => {
|
||||||
|
const events: FileChangeEvent[] = []
|
||||||
|
watchdog.onFileChange((event) => events.push(event))
|
||||||
|
watchdog.start(tempDir)
|
||||||
|
|
||||||
|
await new Promise((resolve) => setTimeout(resolve, 100))
|
||||||
|
|
||||||
|
const testFile = path.join(tempDir, "test.ts")
|
||||||
|
await fs.writeFile(testFile, "const x = 1")
|
||||||
|
|
||||||
|
await new Promise((resolve) => setTimeout(resolve, 200))
|
||||||
|
|
||||||
|
expect(events.length).toBeGreaterThanOrEqual(0)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should detect file modification", async () => {
|
||||||
|
const testFile = path.join(tempDir, "test.ts")
|
||||||
|
await fs.writeFile(testFile, "const x = 1")
|
||||||
|
|
||||||
|
const events: FileChangeEvent[] = []
|
||||||
|
watchdog.onFileChange((event) => events.push(event))
|
||||||
|
watchdog.start(tempDir)
|
||||||
|
|
||||||
|
await new Promise((resolve) => setTimeout(resolve, 100))
|
||||||
|
|
||||||
|
await fs.writeFile(testFile, "const x = 2")
|
||||||
|
|
||||||
|
await new Promise((resolve) => setTimeout(resolve, 200))
|
||||||
|
|
||||||
|
expect(events.length).toBeGreaterThanOrEqual(0)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should detect file deletion", async () => {
|
||||||
|
const testFile = path.join(tempDir, "test.ts")
|
||||||
|
await fs.writeFile(testFile, "const x = 1")
|
||||||
|
|
||||||
|
const events: FileChangeEvent[] = []
|
||||||
|
watchdog.onFileChange((event) => events.push(event))
|
||||||
|
watchdog.start(tempDir)
|
||||||
|
|
||||||
|
await new Promise((resolve) => setTimeout(resolve, 100))
|
||||||
|
|
||||||
|
await fs.unlink(testFile)
|
||||||
|
|
||||||
|
await new Promise((resolve) => setTimeout(resolve, 200))
|
||||||
|
|
||||||
|
expect(events.length).toBeGreaterThanOrEqual(0)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should ignore non-watched extensions", async () => {
|
||||||
|
const events: FileChangeEvent[] = []
|
||||||
|
watchdog.onFileChange((event) => events.push(event))
|
||||||
|
watchdog.start(tempDir)
|
||||||
|
|
||||||
|
await new Promise((resolve) => setTimeout(resolve, 100))
|
||||||
|
|
||||||
|
const txtFile = path.join(tempDir, "test.txt")
|
||||||
|
await fs.writeFile(txtFile, "hello")
|
||||||
|
|
||||||
|
await new Promise((resolve) => setTimeout(resolve, 200))
|
||||||
|
|
||||||
|
const tsEvents = events.filter((e) => e.path.endsWith(".txt"))
|
||||||
|
expect(tsEvents.length).toBe(0)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should debounce rapid changes", async () => {
|
||||||
|
const testFile = path.join(tempDir, "test.ts")
|
||||||
|
await fs.writeFile(testFile, "const x = 1")
|
||||||
|
|
||||||
|
const events: FileChangeEvent[] = []
|
||||||
|
watchdog.onFileChange((event) => events.push(event))
|
||||||
|
watchdog.start(tempDir)
|
||||||
|
|
||||||
|
await new Promise((resolve) => setTimeout(resolve, 100))
|
||||||
|
|
||||||
|
await fs.writeFile(testFile, "const x = 2")
|
||||||
|
await fs.writeFile(testFile, "const x = 3")
|
||||||
|
await fs.writeFile(testFile, "const x = 4")
|
||||||
|
|
||||||
|
await new Promise((resolve) => setTimeout(resolve, 200))
|
||||||
|
|
||||||
|
expect(events.length).toBeLessThanOrEqual(3)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("callback error handling", () => {
|
||||||
|
it("should continue after callback throws", async () => {
|
||||||
|
const events: FileChangeEvent[] = []
|
||||||
|
watchdog.onFileChange(() => {
|
||||||
|
throw new Error("Test error")
|
||||||
|
})
|
||||||
|
watchdog.onFileChange((event) => events.push(event))
|
||||||
|
watchdog.start(tempDir)
|
||||||
|
|
||||||
|
await new Promise((resolve) => setTimeout(resolve, 100))
|
||||||
|
|
||||||
|
const testFile = path.join(tempDir, "test.ts")
|
||||||
|
await fs.writeFile(testFile, "const x = 1")
|
||||||
|
|
||||||
|
await new Promise((resolve) => setTimeout(resolve, 200))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("custom extensions", () => {
|
||||||
|
it("should watch only specified extensions", async () => {
|
||||||
|
const customWatchdog = new Watchdog({
|
||||||
|
debounceMs: 50,
|
||||||
|
extensions: [".ts"],
|
||||||
|
})
|
||||||
|
|
||||||
|
const events: FileChangeEvent[] = []
|
||||||
|
customWatchdog.onFileChange((event) => events.push(event))
|
||||||
|
customWatchdog.start(tempDir)
|
||||||
|
|
||||||
|
await new Promise((resolve) => setTimeout(resolve, 100))
|
||||||
|
|
||||||
|
const tsFile = path.join(tempDir, "test.ts")
|
||||||
|
const jsFile = path.join(tempDir, "test.js")
|
||||||
|
await fs.writeFile(tsFile, "const x = 1")
|
||||||
|
await fs.writeFile(jsFile, "const y = 2")
|
||||||
|
|
||||||
|
await new Promise((resolve) => setTimeout(resolve, 200))
|
||||||
|
|
||||||
|
const jsEvents = events.filter((e) => e.path.endsWith(".js"))
|
||||||
|
expect(jsEvents.length).toBe(0)
|
||||||
|
|
||||||
|
await customWatchdog.stop()
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("multiple callbacks", () => {
|
||||||
|
it("should notify all registered callbacks", async () => {
|
||||||
|
const events1: FileChangeEvent[] = []
|
||||||
|
const events2: FileChangeEvent[] = []
|
||||||
|
|
||||||
|
watchdog.onFileChange((event) => events1.push(event))
|
||||||
|
watchdog.onFileChange((event) => events2.push(event))
|
||||||
|
watchdog.start(tempDir)
|
||||||
|
|
||||||
|
await new Promise((resolve) => setTimeout(resolve, 100))
|
||||||
|
|
||||||
|
const testFile = path.join(tempDir, "test.ts")
|
||||||
|
await fs.writeFile(testFile, "const x = 1")
|
||||||
|
|
||||||
|
await new Promise((resolve) => setTimeout(resolve, 200))
|
||||||
|
|
||||||
|
expect(events1.length).toBe(events2.length)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("event properties", () => {
|
||||||
|
it("should include correct event type and path", async () => {
|
||||||
|
const events: FileChangeEvent[] = []
|
||||||
|
watchdog.onFileChange((event) => events.push(event))
|
||||||
|
watchdog.start(tempDir)
|
||||||
|
|
||||||
|
await new Promise((resolve) => setTimeout(resolve, 100))
|
||||||
|
|
||||||
|
const testFile = path.join(tempDir, "test.ts")
|
||||||
|
await fs.writeFile(testFile, "const x = 1")
|
||||||
|
|
||||||
|
await new Promise((resolve) => setTimeout(resolve, 200))
|
||||||
|
|
||||||
|
if (events.length > 0) {
|
||||||
|
const event = events[0]
|
||||||
|
expect(event.type).toMatch(/^(add|change)$/)
|
||||||
|
expect(event.path).toContain("test.ts")
|
||||||
|
expect(typeof event.timestamp).toBe("number")
|
||||||
|
expect(event.timestamp).toBeLessThanOrEqual(Date.now())
|
||||||
|
}
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
@@ -0,0 +1,304 @@
|
|||||||
|
import { describe, it, expect, vi, beforeEach, afterEach } from "vitest"
|
||||||
|
import type { LLMConfig } from "../../../../src/shared/constants/config.js"
|
||||||
|
import { IpuaroError } from "../../../../src/shared/errors/IpuaroError.js"
|
||||||
|
import { createUserMessage } from "../../../../src/domain/value-objects/ChatMessage.js"
|
||||||
|
|
||||||
|
const mockChatResponse = {
|
||||||
|
message: {
|
||||||
|
role: "assistant",
|
||||||
|
content: "This is a test response.",
|
||||||
|
tool_calls: undefined,
|
||||||
|
},
|
||||||
|
eval_count: 50,
|
||||||
|
done_reason: "stop",
|
||||||
|
}
|
||||||
|
|
||||||
|
const mockListResponse = {
|
||||||
|
models: [
|
||||||
|
{ name: "qwen2.5-coder:7b-instruct", size: 4000000000 },
|
||||||
|
{ name: "llama2:latest", size: 3500000000 },
|
||||||
|
],
|
||||||
|
}
|
||||||
|
|
||||||
|
const mockOllamaInstance = {
|
||||||
|
chat: vi.fn(),
|
||||||
|
list: vi.fn(),
|
||||||
|
pull: vi.fn(),
|
||||||
|
}
|
||||||
|
|
||||||
|
vi.mock("ollama", () => {
|
||||||
|
return {
|
||||||
|
Ollama: vi.fn(() => mockOllamaInstance),
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
const { OllamaClient } = await import("../../../../src/infrastructure/llm/OllamaClient.js")
|
||||||
|
|
||||||
|
describe("OllamaClient", () => {
|
||||||
|
const defaultConfig: LLMConfig = {
|
||||||
|
model: "qwen2.5-coder:7b-instruct",
|
||||||
|
contextWindow: 128000,
|
||||||
|
temperature: 0.1,
|
||||||
|
host: "http://localhost:11434",
|
||||||
|
timeout: 120000,
|
||||||
|
}
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
vi.clearAllMocks()
|
||||||
|
mockOllamaInstance.chat.mockResolvedValue(mockChatResponse)
|
||||||
|
mockOllamaInstance.list.mockResolvedValue(mockListResponse)
|
||||||
|
mockOllamaInstance.pull.mockResolvedValue({})
|
||||||
|
})
|
||||||
|
|
||||||
|
afterEach(() => {
|
||||||
|
vi.restoreAllMocks()
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("constructor", () => {
|
||||||
|
it("should create instance with config", () => {
|
||||||
|
const client = new OllamaClient(defaultConfig)
|
||||||
|
expect(client).toBeDefined()
|
||||||
|
expect(client.getModelName()).toBe("qwen2.5-coder:7b-instruct")
|
||||||
|
expect(client.getContextWindowSize()).toBe(128000)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("chat", () => {
|
||||||
|
it("should send messages and return response", async () => {
|
||||||
|
const client = new OllamaClient(defaultConfig)
|
||||||
|
const messages = [createUserMessage("Hello, world!")]
|
||||||
|
|
||||||
|
const response = await client.chat(messages)
|
||||||
|
|
||||||
|
expect(response.content).toBe("This is a test response.")
|
||||||
|
expect(response.tokens).toBe(50)
|
||||||
|
expect(response.stopReason).toBe("end")
|
||||||
|
expect(response.truncated).toBe(false)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should convert messages to Ollama format", async () => {
|
||||||
|
const client = new OllamaClient(defaultConfig)
|
||||||
|
const messages = [createUserMessage("Hello")]
|
||||||
|
|
||||||
|
await client.chat(messages)
|
||||||
|
|
||||||
|
expect(mockOllamaInstance.chat).toHaveBeenCalledWith(
|
||||||
|
expect.objectContaining({
|
||||||
|
model: "qwen2.5-coder:7b-instruct",
|
||||||
|
messages: expect.arrayContaining([
|
||||||
|
expect.objectContaining({
|
||||||
|
role: "user",
|
||||||
|
content: "Hello",
|
||||||
|
}),
|
||||||
|
]),
|
||||||
|
}),
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should pass tools when provided", async () => {
|
||||||
|
const client = new OllamaClient(defaultConfig)
|
||||||
|
const messages = [createUserMessage("Read file")]
|
||||||
|
const tools = [
|
||||||
|
{
|
||||||
|
name: "get_lines",
|
||||||
|
description: "Get lines from file",
|
||||||
|
parameters: [
|
||||||
|
{
|
||||||
|
name: "path",
|
||||||
|
type: "string" as const,
|
||||||
|
description: "File path",
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
]
|
||||||
|
|
||||||
|
await client.chat(messages, tools)
|
||||||
|
|
||||||
|
expect(mockOllamaInstance.chat).toHaveBeenCalledWith(
|
||||||
|
expect.objectContaining({
|
||||||
|
tools: expect.arrayContaining([
|
||||||
|
expect.objectContaining({
|
||||||
|
type: "function",
|
||||||
|
function: expect.objectContaining({
|
||||||
|
name: "get_lines",
|
||||||
|
}),
|
||||||
|
}),
|
||||||
|
]),
|
||||||
|
}),
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should extract tool calls from response", async () => {
|
||||||
|
mockOllamaInstance.chat.mockResolvedValue({
|
||||||
|
message: {
|
||||||
|
role: "assistant",
|
||||||
|
content: "",
|
||||||
|
tool_calls: [
|
||||||
|
{
|
||||||
|
function: {
|
||||||
|
name: "get_lines",
|
||||||
|
arguments: { path: "src/index.ts" },
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
eval_count: 30,
|
||||||
|
})
|
||||||
|
|
||||||
|
const client = new OllamaClient(defaultConfig)
|
||||||
|
const response = await client.chat([createUserMessage("Read file")])
|
||||||
|
|
||||||
|
expect(response.toolCalls).toHaveLength(1)
|
||||||
|
expect(response.toolCalls[0].name).toBe("get_lines")
|
||||||
|
expect(response.toolCalls[0].params).toEqual({ path: "src/index.ts" })
|
||||||
|
expect(response.stopReason).toBe("tool_use")
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should handle connection errors", async () => {
|
||||||
|
mockOllamaInstance.chat.mockRejectedValue(new Error("fetch failed"))
|
||||||
|
|
||||||
|
const client = new OllamaClient(defaultConfig)
|
||||||
|
|
||||||
|
await expect(client.chat([createUserMessage("Hello")])).rejects.toThrow(IpuaroError)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should handle model not found errors", async () => {
|
||||||
|
mockOllamaInstance.chat.mockRejectedValue(new Error("model not found"))
|
||||||
|
|
||||||
|
const client = new OllamaClient(defaultConfig)
|
||||||
|
|
||||||
|
await expect(client.chat([createUserMessage("Hello")])).rejects.toThrow(/not found/)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("countTokens", () => {
|
||||||
|
it("should estimate tokens for text", async () => {
|
||||||
|
const client = new OllamaClient(defaultConfig)
|
||||||
|
|
||||||
|
const count = await client.countTokens("Hello, world!")
|
||||||
|
|
||||||
|
expect(count).toBeGreaterThan(0)
|
||||||
|
expect(typeof count).toBe("number")
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("isAvailable", () => {
|
||||||
|
it("should return true when Ollama is available", async () => {
|
||||||
|
const client = new OllamaClient(defaultConfig)
|
||||||
|
|
||||||
|
const available = await client.isAvailable()
|
||||||
|
|
||||||
|
expect(available).toBe(true)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should return false when Ollama is not available", async () => {
|
||||||
|
mockOllamaInstance.list.mockRejectedValue(new Error("Connection refused"))
|
||||||
|
|
||||||
|
const client = new OllamaClient(defaultConfig)
|
||||||
|
|
||||||
|
const available = await client.isAvailable()
|
||||||
|
|
||||||
|
expect(available).toBe(false)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("getModelName", () => {
|
||||||
|
it("should return configured model name", () => {
|
||||||
|
const client = new OllamaClient(defaultConfig)
|
||||||
|
|
||||||
|
expect(client.getModelName()).toBe("qwen2.5-coder:7b-instruct")
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("getContextWindowSize", () => {
|
||||||
|
it("should return configured context window size", () => {
|
||||||
|
const client = new OllamaClient(defaultConfig)
|
||||||
|
|
||||||
|
expect(client.getContextWindowSize()).toBe(128000)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("pullModel", () => {
|
||||||
|
it("should pull model successfully", async () => {
|
||||||
|
const client = new OllamaClient(defaultConfig)
|
||||||
|
|
||||||
|
await expect(client.pullModel("llama2")).resolves.toBeUndefined()
|
||||||
|
expect(mockOllamaInstance.pull).toHaveBeenCalledWith({
|
||||||
|
model: "llama2",
|
||||||
|
stream: false,
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should throw on pull failure", async () => {
|
||||||
|
mockOllamaInstance.pull.mockRejectedValue(new Error("Network error"))
|
||||||
|
|
||||||
|
const client = new OllamaClient(defaultConfig)
|
||||||
|
|
||||||
|
await expect(client.pullModel("llama2")).rejects.toThrow(IpuaroError)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("hasModel", () => {
|
||||||
|
it("should return true for available model", async () => {
|
||||||
|
const client = new OllamaClient(defaultConfig)
|
||||||
|
|
||||||
|
const has = await client.hasModel("qwen2.5-coder:7b-instruct")
|
||||||
|
|
||||||
|
expect(has).toBe(true)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should return true for model prefix", async () => {
|
||||||
|
const client = new OllamaClient(defaultConfig)
|
||||||
|
|
||||||
|
const has = await client.hasModel("llama2")
|
||||||
|
|
||||||
|
expect(has).toBe(true)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should return false for missing model", async () => {
|
||||||
|
const client = new OllamaClient(defaultConfig)
|
||||||
|
|
||||||
|
const has = await client.hasModel("unknown-model")
|
||||||
|
|
||||||
|
expect(has).toBe(false)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should return false when list fails", async () => {
|
||||||
|
mockOllamaInstance.list.mockRejectedValue(new Error("Error"))
|
||||||
|
|
||||||
|
const client = new OllamaClient(defaultConfig)
|
||||||
|
|
||||||
|
const has = await client.hasModel("any-model")
|
||||||
|
|
||||||
|
expect(has).toBe(false)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("listModels", () => {
|
||||||
|
it("should return list of model names", async () => {
|
||||||
|
const client = new OllamaClient(defaultConfig)
|
||||||
|
|
||||||
|
const models = await client.listModels()
|
||||||
|
|
||||||
|
expect(models).toContain("qwen2.5-coder:7b-instruct")
|
||||||
|
expect(models).toContain("llama2:latest")
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should throw on list failure", async () => {
|
||||||
|
mockOllamaInstance.list.mockRejectedValue(new Error("Network error"))
|
||||||
|
|
||||||
|
const client = new OllamaClient(defaultConfig)
|
||||||
|
|
||||||
|
await expect(client.listModels()).rejects.toThrow(IpuaroError)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("abort", () => {
|
||||||
|
it("should not throw when no request is in progress", () => {
|
||||||
|
const client = new OllamaClient(defaultConfig)
|
||||||
|
|
||||||
|
expect(() => client.abort()).not.toThrow()
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
@@ -0,0 +1,251 @@
|
|||||||
|
import { describe, it, expect } from "vitest"
|
||||||
|
import {
|
||||||
|
parseToolCalls,
|
||||||
|
formatToolCallsAsXml,
|
||||||
|
extractThinking,
|
||||||
|
hasToolCalls,
|
||||||
|
validateToolCallParams,
|
||||||
|
} from "../../../../src/infrastructure/llm/ResponseParser.js"
|
||||||
|
import { createToolCall } from "../../../../src/domain/value-objects/ToolCall.js"
|
||||||
|
|
||||||
|
describe("ResponseParser", () => {
|
||||||
|
describe("parseToolCalls", () => {
|
||||||
|
it("should parse a single tool call", () => {
|
||||||
|
const response = `<tool_call name="get_lines">
|
||||||
|
<path>src/index.ts</path>
|
||||||
|
<start>1</start>
|
||||||
|
<end>10</end>
|
||||||
|
</tool_call>`
|
||||||
|
|
||||||
|
const result = parseToolCalls(response)
|
||||||
|
|
||||||
|
expect(result.toolCalls).toHaveLength(1)
|
||||||
|
expect(result.toolCalls[0].name).toBe("get_lines")
|
||||||
|
expect(result.toolCalls[0].params).toEqual({
|
||||||
|
path: "src/index.ts",
|
||||||
|
start: 1,
|
||||||
|
end: 10,
|
||||||
|
})
|
||||||
|
expect(result.hasParseErrors).toBe(false)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should parse multiple tool calls", () => {
|
||||||
|
const response = `
|
||||||
|
<tool_call name="get_lines">
|
||||||
|
<path>src/a.ts</path>
|
||||||
|
</tool_call>
|
||||||
|
<tool_call name="get_function">
|
||||||
|
<path>src/b.ts</path>
|
||||||
|
<name>myFunc</name>
|
||||||
|
</tool_call>
|
||||||
|
`
|
||||||
|
|
||||||
|
const result = parseToolCalls(response)
|
||||||
|
|
||||||
|
expect(result.toolCalls).toHaveLength(2)
|
||||||
|
expect(result.toolCalls[0].name).toBe("get_lines")
|
||||||
|
expect(result.toolCalls[1].name).toBe("get_function")
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should extract text content without tool calls", () => {
|
||||||
|
const response = `Let me check the file.
|
||||||
|
<tool_call name="get_lines">
|
||||||
|
<path>src/index.ts</path>
|
||||||
|
</tool_call>
|
||||||
|
Here's what I found.`
|
||||||
|
|
||||||
|
const result = parseToolCalls(response)
|
||||||
|
|
||||||
|
expect(result.content).toContain("Let me check the file.")
|
||||||
|
expect(result.content).toContain("Here's what I found.")
|
||||||
|
expect(result.content).not.toContain("tool_call")
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should parse boolean values", () => {
|
||||||
|
const response = `<tool_call name="git_diff">
|
||||||
|
<staged>true</staged>
|
||||||
|
</tool_call>`
|
||||||
|
|
||||||
|
const result = parseToolCalls(response)
|
||||||
|
|
||||||
|
expect(result.toolCalls[0].params.staged).toBe(true)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should parse null values", () => {
|
||||||
|
const response = `<tool_call name="test">
|
||||||
|
<value>null</value>
|
||||||
|
</tool_call>`
|
||||||
|
|
||||||
|
const result = parseToolCalls(response)
|
||||||
|
|
||||||
|
expect(result.toolCalls[0].params.value).toBe(null)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should parse JSON arrays", () => {
|
||||||
|
const response = `<tool_call name="git_commit">
|
||||||
|
<files>["a.ts", "b.ts"]</files>
|
||||||
|
</tool_call>`
|
||||||
|
|
||||||
|
const result = parseToolCalls(response)
|
||||||
|
|
||||||
|
expect(result.toolCalls[0].params.files).toEqual(["a.ts", "b.ts"])
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should parse JSON objects", () => {
|
||||||
|
const response = `<tool_call name="test">
|
||||||
|
<config>{"key": "value"}</config>
|
||||||
|
</tool_call>`
|
||||||
|
|
||||||
|
const result = parseToolCalls(response)
|
||||||
|
|
||||||
|
expect(result.toolCalls[0].params.config).toEqual({ key: "value" })
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should return empty array for response without tool calls", () => {
|
||||||
|
const response = "This is just a regular response."
|
||||||
|
|
||||||
|
const result = parseToolCalls(response)
|
||||||
|
|
||||||
|
expect(result.toolCalls).toHaveLength(0)
|
||||||
|
expect(result.content).toBe(response)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should handle named param syntax", () => {
|
||||||
|
const response = `<tool_call name="get_lines">
|
||||||
|
<param name="path">src/index.ts</param>
|
||||||
|
<param name="start">5</param>
|
||||||
|
</tool_call>`
|
||||||
|
|
||||||
|
const result = parseToolCalls(response)
|
||||||
|
|
||||||
|
expect(result.toolCalls[0].params).toEqual({
|
||||||
|
path: "src/index.ts",
|
||||||
|
start: 5,
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("formatToolCallsAsXml", () => {
|
||||||
|
it("should format tool calls as XML", () => {
|
||||||
|
const toolCalls = [createToolCall("1", "get_lines", { path: "src/index.ts", start: 1 })]
|
||||||
|
|
||||||
|
const xml = formatToolCallsAsXml(toolCalls)
|
||||||
|
|
||||||
|
expect(xml).toContain('<tool_call name="get_lines">')
|
||||||
|
expect(xml).toContain("<path>src/index.ts</path>")
|
||||||
|
expect(xml).toContain("<start>1</start>")
|
||||||
|
expect(xml).toContain("</tool_call>")
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should format multiple tool calls", () => {
|
||||||
|
const toolCalls = [
|
||||||
|
createToolCall("1", "get_lines", { path: "a.ts" }),
|
||||||
|
createToolCall("2", "get_function", { path: "b.ts", name: "foo" }),
|
||||||
|
]
|
||||||
|
|
||||||
|
const xml = formatToolCallsAsXml(toolCalls)
|
||||||
|
|
||||||
|
expect(xml).toContain('<tool_call name="get_lines">')
|
||||||
|
expect(xml).toContain('<tool_call name="get_function">')
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should handle object values as JSON", () => {
|
||||||
|
const toolCalls = [createToolCall("1", "test", { data: { key: "value" } })]
|
||||||
|
|
||||||
|
const xml = formatToolCallsAsXml(toolCalls)
|
||||||
|
|
||||||
|
expect(xml).toContain('{"key":"value"}')
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("extractThinking", () => {
|
||||||
|
it("should extract thinking content", () => {
|
||||||
|
const response = `<thinking>Let me analyze this.</thinking>
|
||||||
|
Here is the answer.`
|
||||||
|
|
||||||
|
const result = extractThinking(response)
|
||||||
|
|
||||||
|
expect(result.thinking).toBe("Let me analyze this.")
|
||||||
|
expect(result.content).toContain("Here is the answer.")
|
||||||
|
expect(result.content).not.toContain("thinking")
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should handle multiple thinking blocks", () => {
|
||||||
|
const response = `<thinking>First thought.</thinking>
|
||||||
|
Some content.
|
||||||
|
<thinking>Second thought.</thinking>
|
||||||
|
More content.`
|
||||||
|
|
||||||
|
const result = extractThinking(response)
|
||||||
|
|
||||||
|
expect(result.thinking).toContain("First thought.")
|
||||||
|
expect(result.thinking).toContain("Second thought.")
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should return original content if no thinking", () => {
|
||||||
|
const response = "Just a regular response."
|
||||||
|
|
||||||
|
const result = extractThinking(response)
|
||||||
|
|
||||||
|
expect(result.thinking).toBe("")
|
||||||
|
expect(result.content).toBe(response)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("hasToolCalls", () => {
|
||||||
|
it("should return true if response has tool calls", () => {
|
||||||
|
const response = `<tool_call name="get_lines"><path>a.ts</path></tool_call>`
|
||||||
|
|
||||||
|
expect(hasToolCalls(response)).toBe(true)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should return false if response has no tool calls", () => {
|
||||||
|
const response = "Just text without tool calls."
|
||||||
|
|
||||||
|
expect(hasToolCalls(response)).toBe(false)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("validateToolCallParams", () => {
|
||||||
|
it("should return valid for complete params", () => {
|
||||||
|
const params = { path: "src/index.ts", start: 1, end: 10 }
|
||||||
|
const required = ["path", "start", "end"]
|
||||||
|
|
||||||
|
const result = validateToolCallParams("get_lines", params, required)
|
||||||
|
|
||||||
|
expect(result.valid).toBe(true)
|
||||||
|
expect(result.errors).toHaveLength(0)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should return errors for missing required params", () => {
|
||||||
|
const params = { path: "src/index.ts" }
|
||||||
|
const required = ["path", "start", "end"]
|
||||||
|
|
||||||
|
const result = validateToolCallParams("get_lines", params, required)
|
||||||
|
|
||||||
|
expect(result.valid).toBe(false)
|
||||||
|
expect(result.errors).toHaveLength(2)
|
||||||
|
expect(result.errors).toContain("Missing required parameter: start")
|
||||||
|
expect(result.errors).toContain("Missing required parameter: end")
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should treat null and undefined as missing", () => {
|
||||||
|
const params = { path: null, start: undefined }
|
||||||
|
const required = ["path", "start"]
|
||||||
|
|
||||||
|
const result = validateToolCallParams("test", params, required)
|
||||||
|
|
||||||
|
expect(result.valid).toBe(false)
|
||||||
|
expect(result.errors).toHaveLength(2)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should accept empty required array", () => {
|
||||||
|
const params = {}
|
||||||
|
const required: string[] = []
|
||||||
|
|
||||||
|
const result = validateToolCallParams("git_status", params, required)
|
||||||
|
|
||||||
|
expect(result.valid).toBe(true)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
278
packages/ipuaro/tests/unit/infrastructure/llm/prompts.test.ts
Normal file
278
packages/ipuaro/tests/unit/infrastructure/llm/prompts.test.ts
Normal file
@@ -0,0 +1,278 @@
|
|||||||
|
import { describe, it, expect } from "vitest"
|
||||||
|
import {
|
||||||
|
SYSTEM_PROMPT,
|
||||||
|
buildInitialContext,
|
||||||
|
buildFileContext,
|
||||||
|
truncateContext,
|
||||||
|
type ProjectStructure,
|
||||||
|
} from "../../../../src/infrastructure/llm/prompts.js"
|
||||||
|
import type { FileAST } from "../../../../src/domain/value-objects/FileAST.js"
|
||||||
|
import type { FileMeta } from "../../../../src/domain/value-objects/FileMeta.js"
|
||||||
|
|
||||||
|
describe("prompts", () => {
|
||||||
|
describe("SYSTEM_PROMPT", () => {
|
||||||
|
it("should be a non-empty string", () => {
|
||||||
|
expect(typeof SYSTEM_PROMPT).toBe("string")
|
||||||
|
expect(SYSTEM_PROMPT.length).toBeGreaterThan(100)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should contain core principles", () => {
|
||||||
|
expect(SYSTEM_PROMPT).toContain("Lazy Loading")
|
||||||
|
expect(SYSTEM_PROMPT).toContain("Precision")
|
||||||
|
expect(SYSTEM_PROMPT).toContain("Safety")
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should list available tools", () => {
|
||||||
|
expect(SYSTEM_PROMPT).toContain("get_lines")
|
||||||
|
expect(SYSTEM_PROMPT).toContain("edit_lines")
|
||||||
|
expect(SYSTEM_PROMPT).toContain("find_references")
|
||||||
|
expect(SYSTEM_PROMPT).toContain("git_status")
|
||||||
|
expect(SYSTEM_PROMPT).toContain("run_command")
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should include safety rules", () => {
|
||||||
|
expect(SYSTEM_PROMPT).toContain("Safety Rules")
|
||||||
|
expect(SYSTEM_PROMPT).toContain("Never execute commands that could harm")
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("buildInitialContext", () => {
|
||||||
|
const structure: ProjectStructure = {
|
||||||
|
name: "my-project",
|
||||||
|
rootPath: "/home/user/my-project",
|
||||||
|
files: ["src/index.ts", "src/utils.ts", "package.json"],
|
||||||
|
directories: ["src", "tests"],
|
||||||
|
}
|
||||||
|
|
||||||
|
const asts = new Map<string, FileAST>([
|
||||||
|
[
|
||||||
|
"src/index.ts",
|
||||||
|
{
|
||||||
|
imports: [],
|
||||||
|
exports: [],
|
||||||
|
functions: [
|
||||||
|
{
|
||||||
|
name: "main",
|
||||||
|
lineStart: 1,
|
||||||
|
lineEnd: 10,
|
||||||
|
params: [],
|
||||||
|
isAsync: false,
|
||||||
|
isExported: true,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
classes: [],
|
||||||
|
interfaces: [],
|
||||||
|
typeAliases: [],
|
||||||
|
parseError: false,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
[
|
||||||
|
"src/utils.ts",
|
||||||
|
{
|
||||||
|
imports: [],
|
||||||
|
exports: [],
|
||||||
|
functions: [],
|
||||||
|
classes: [
|
||||||
|
{
|
||||||
|
name: "Helper",
|
||||||
|
lineStart: 1,
|
||||||
|
lineEnd: 20,
|
||||||
|
methods: [],
|
||||||
|
properties: [],
|
||||||
|
implements: [],
|
||||||
|
isExported: true,
|
||||||
|
isAbstract: false,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
interfaces: [],
|
||||||
|
typeAliases: [],
|
||||||
|
parseError: false,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
])
|
||||||
|
|
||||||
|
it("should include project header", () => {
|
||||||
|
const context = buildInitialContext(structure, asts)
|
||||||
|
|
||||||
|
expect(context).toContain("# Project: my-project")
|
||||||
|
expect(context).toContain("Root: /home/user/my-project")
|
||||||
|
expect(context).toContain("Files: 3")
|
||||||
|
expect(context).toContain("Directories: 2")
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should include directory structure", () => {
|
||||||
|
const context = buildInitialContext(structure, asts)
|
||||||
|
|
||||||
|
expect(context).toContain("## Structure")
|
||||||
|
expect(context).toContain("src/")
|
||||||
|
expect(context).toContain("tests/")
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should include file overview with AST summaries", () => {
|
||||||
|
const context = buildInitialContext(structure, asts)
|
||||||
|
|
||||||
|
expect(context).toContain("## Files")
|
||||||
|
expect(context).toContain("src/index.ts")
|
||||||
|
expect(context).toContain("fn: main")
|
||||||
|
expect(context).toContain("src/utils.ts")
|
||||||
|
expect(context).toContain("class: Helper")
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should include file flags from metadata", () => {
|
||||||
|
const metas = new Map<string, FileMeta>([
|
||||||
|
[
|
||||||
|
"src/index.ts",
|
||||||
|
{
|
||||||
|
complexity: { loc: 100, nesting: 3, cyclomaticComplexity: 10, score: 75 },
|
||||||
|
dependencies: [],
|
||||||
|
dependents: ["a.ts", "b.ts", "c.ts", "d.ts", "e.ts", "f.ts"],
|
||||||
|
isHub: true,
|
||||||
|
isEntryPoint: true,
|
||||||
|
fileType: "source",
|
||||||
|
},
|
||||||
|
],
|
||||||
|
])
|
||||||
|
|
||||||
|
const context = buildInitialContext(structure, asts, metas)
|
||||||
|
|
||||||
|
expect(context).toContain("(hub, entry, complex)")
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("buildFileContext", () => {
|
||||||
|
const ast: FileAST = {
|
||||||
|
imports: [
|
||||||
|
{ name: "fs", from: "node:fs", line: 1, type: "builtin", isDefault: false },
|
||||||
|
{ name: "helper", from: "./helper", line: 2, type: "internal", isDefault: true },
|
||||||
|
],
|
||||||
|
exports: [
|
||||||
|
{ name: "main", line: 10, isDefault: false, kind: "function" },
|
||||||
|
{ name: "Config", line: 20, isDefault: true, kind: "class" },
|
||||||
|
],
|
||||||
|
functions: [
|
||||||
|
{
|
||||||
|
name: "main",
|
||||||
|
lineStart: 10,
|
||||||
|
lineEnd: 30,
|
||||||
|
params: [
|
||||||
|
{ name: "args", optional: false, hasDefault: false },
|
||||||
|
{ name: "options", optional: true, hasDefault: false },
|
||||||
|
],
|
||||||
|
isAsync: true,
|
||||||
|
isExported: true,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
classes: [
|
||||||
|
{
|
||||||
|
name: "Config",
|
||||||
|
lineStart: 40,
|
||||||
|
lineEnd: 80,
|
||||||
|
methods: [
|
||||||
|
{
|
||||||
|
name: "load",
|
||||||
|
lineStart: 50,
|
||||||
|
lineEnd: 60,
|
||||||
|
params: [],
|
||||||
|
isAsync: false,
|
||||||
|
visibility: "public",
|
||||||
|
isStatic: false,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
properties: [],
|
||||||
|
extends: "BaseConfig",
|
||||||
|
implements: ["IConfig"],
|
||||||
|
isExported: true,
|
||||||
|
isAbstract: false,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
interfaces: [],
|
||||||
|
typeAliases: [],
|
||||||
|
parseError: false,
|
||||||
|
}
|
||||||
|
|
||||||
|
it("should include file path header", () => {
|
||||||
|
const context = buildFileContext("src/index.ts", ast)
|
||||||
|
|
||||||
|
expect(context).toContain("## src/index.ts")
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should include imports section", () => {
|
||||||
|
const context = buildFileContext("src/index.ts", ast)
|
||||||
|
|
||||||
|
expect(context).toContain("### Imports")
|
||||||
|
expect(context).toContain('fs from "node:fs" (builtin)')
|
||||||
|
expect(context).toContain('helper from "./helper" (internal)')
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should include exports section", () => {
|
||||||
|
const context = buildFileContext("src/index.ts", ast)
|
||||||
|
|
||||||
|
expect(context).toContain("### Exports")
|
||||||
|
expect(context).toContain("function main")
|
||||||
|
expect(context).toContain("class Config (default)")
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should include functions section", () => {
|
||||||
|
const context = buildFileContext("src/index.ts", ast)
|
||||||
|
|
||||||
|
expect(context).toContain("### Functions")
|
||||||
|
expect(context).toContain("async main(args, options)")
|
||||||
|
expect(context).toContain("[10-30]")
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should include classes section with methods", () => {
|
||||||
|
const context = buildFileContext("src/index.ts", ast)
|
||||||
|
|
||||||
|
expect(context).toContain("### Classes")
|
||||||
|
expect(context).toContain("Config extends BaseConfig implements IConfig")
|
||||||
|
expect(context).toContain("[40-80]")
|
||||||
|
expect(context).toContain("load()")
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should include metadata section when provided", () => {
|
||||||
|
const meta: FileMeta = {
|
||||||
|
complexity: { loc: 100, nesting: 3, cyclomaticComplexity: 10, score: 65 },
|
||||||
|
dependencies: ["a.ts", "b.ts"],
|
||||||
|
dependents: ["c.ts"],
|
||||||
|
isHub: false,
|
||||||
|
isEntryPoint: true,
|
||||||
|
fileType: "source",
|
||||||
|
}
|
||||||
|
|
||||||
|
const context = buildFileContext("src/index.ts", ast, meta)
|
||||||
|
|
||||||
|
expect(context).toContain("### Metadata")
|
||||||
|
expect(context).toContain("LOC: 100")
|
||||||
|
expect(context).toContain("Complexity: 65/100")
|
||||||
|
expect(context).toContain("Dependencies: 2")
|
||||||
|
expect(context).toContain("Dependents: 1")
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("truncateContext", () => {
|
||||||
|
it("should return original context if within limit", () => {
|
||||||
|
const context = "Short context"
|
||||||
|
|
||||||
|
const result = truncateContext(context, 1000)
|
||||||
|
|
||||||
|
expect(result).toBe(context)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should truncate long context", () => {
|
||||||
|
const context = "a".repeat(1000)
|
||||||
|
|
||||||
|
const result = truncateContext(context, 100)
|
||||||
|
|
||||||
|
expect(result.length).toBeLessThan(500)
|
||||||
|
expect(result).toContain("truncated")
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should break at newline boundary", () => {
|
||||||
|
const context = "Line 1\nLine 2\nLine 3\n" + "a".repeat(1000)
|
||||||
|
|
||||||
|
const result = truncateContext(context, 50)
|
||||||
|
|
||||||
|
expect(result).toContain("truncated")
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
287
packages/ipuaro/tests/unit/infrastructure/llm/toolDefs.test.ts
Normal file
287
packages/ipuaro/tests/unit/infrastructure/llm/toolDefs.test.ts
Normal file
@@ -0,0 +1,287 @@
|
|||||||
|
import { describe, it, expect } from "vitest"
|
||||||
|
import {
|
||||||
|
ALL_TOOLS,
|
||||||
|
READ_TOOLS,
|
||||||
|
EDIT_TOOLS,
|
||||||
|
SEARCH_TOOLS,
|
||||||
|
ANALYSIS_TOOLS,
|
||||||
|
GIT_TOOLS,
|
||||||
|
RUN_TOOLS,
|
||||||
|
CONFIRMATION_TOOLS,
|
||||||
|
requiresConfirmation,
|
||||||
|
getToolDef,
|
||||||
|
getToolsByCategory,
|
||||||
|
GET_LINES_TOOL,
|
||||||
|
GET_FUNCTION_TOOL,
|
||||||
|
GET_CLASS_TOOL,
|
||||||
|
GET_STRUCTURE_TOOL,
|
||||||
|
EDIT_LINES_TOOL,
|
||||||
|
CREATE_FILE_TOOL,
|
||||||
|
DELETE_FILE_TOOL,
|
||||||
|
FIND_REFERENCES_TOOL,
|
||||||
|
FIND_DEFINITION_TOOL,
|
||||||
|
GET_DEPENDENCIES_TOOL,
|
||||||
|
GET_DEPENDENTS_TOOL,
|
||||||
|
GET_COMPLEXITY_TOOL,
|
||||||
|
GET_TODOS_TOOL,
|
||||||
|
GIT_STATUS_TOOL,
|
||||||
|
GIT_DIFF_TOOL,
|
||||||
|
GIT_COMMIT_TOOL,
|
||||||
|
RUN_COMMAND_TOOL,
|
||||||
|
RUN_TESTS_TOOL,
|
||||||
|
} from "../../../../src/infrastructure/llm/toolDefs.js"
|
||||||
|
|
||||||
|
describe("toolDefs", () => {
|
||||||
|
describe("ALL_TOOLS", () => {
|
||||||
|
it("should contain exactly 18 tools", () => {
|
||||||
|
expect(ALL_TOOLS).toHaveLength(18)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should have unique tool names", () => {
|
||||||
|
const names = ALL_TOOLS.map((t) => t.name)
|
||||||
|
const uniqueNames = new Set(names)
|
||||||
|
expect(uniqueNames.size).toBe(18)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should have valid structure for all tools", () => {
|
||||||
|
for (const tool of ALL_TOOLS) {
|
||||||
|
expect(tool.name).toBeDefined()
|
||||||
|
expect(typeof tool.name).toBe("string")
|
||||||
|
expect(tool.description).toBeDefined()
|
||||||
|
expect(typeof tool.description).toBe("string")
|
||||||
|
expect(Array.isArray(tool.parameters)).toBe(true)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("READ_TOOLS", () => {
|
||||||
|
it("should contain 4 read tools", () => {
|
||||||
|
expect(READ_TOOLS).toHaveLength(4)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should include all read tools", () => {
|
||||||
|
expect(READ_TOOLS).toContain(GET_LINES_TOOL)
|
||||||
|
expect(READ_TOOLS).toContain(GET_FUNCTION_TOOL)
|
||||||
|
expect(READ_TOOLS).toContain(GET_CLASS_TOOL)
|
||||||
|
expect(READ_TOOLS).toContain(GET_STRUCTURE_TOOL)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("EDIT_TOOLS", () => {
|
||||||
|
it("should contain 3 edit tools", () => {
|
||||||
|
expect(EDIT_TOOLS).toHaveLength(3)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should include all edit tools", () => {
|
||||||
|
expect(EDIT_TOOLS).toContain(EDIT_LINES_TOOL)
|
||||||
|
expect(EDIT_TOOLS).toContain(CREATE_FILE_TOOL)
|
||||||
|
expect(EDIT_TOOLS).toContain(DELETE_FILE_TOOL)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("SEARCH_TOOLS", () => {
|
||||||
|
it("should contain 2 search tools", () => {
|
||||||
|
expect(SEARCH_TOOLS).toHaveLength(2)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should include all search tools", () => {
|
||||||
|
expect(SEARCH_TOOLS).toContain(FIND_REFERENCES_TOOL)
|
||||||
|
expect(SEARCH_TOOLS).toContain(FIND_DEFINITION_TOOL)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("ANALYSIS_TOOLS", () => {
|
||||||
|
it("should contain 4 analysis tools", () => {
|
||||||
|
expect(ANALYSIS_TOOLS).toHaveLength(4)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should include all analysis tools", () => {
|
||||||
|
expect(ANALYSIS_TOOLS).toContain(GET_DEPENDENCIES_TOOL)
|
||||||
|
expect(ANALYSIS_TOOLS).toContain(GET_DEPENDENTS_TOOL)
|
||||||
|
expect(ANALYSIS_TOOLS).toContain(GET_COMPLEXITY_TOOL)
|
||||||
|
expect(ANALYSIS_TOOLS).toContain(GET_TODOS_TOOL)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("GIT_TOOLS", () => {
|
||||||
|
it("should contain 3 git tools", () => {
|
||||||
|
expect(GIT_TOOLS).toHaveLength(3)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should include all git tools", () => {
|
||||||
|
expect(GIT_TOOLS).toContain(GIT_STATUS_TOOL)
|
||||||
|
expect(GIT_TOOLS).toContain(GIT_DIFF_TOOL)
|
||||||
|
expect(GIT_TOOLS).toContain(GIT_COMMIT_TOOL)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("RUN_TOOLS", () => {
|
||||||
|
it("should contain 2 run tools", () => {
|
||||||
|
expect(RUN_TOOLS).toHaveLength(2)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should include all run tools", () => {
|
||||||
|
expect(RUN_TOOLS).toContain(RUN_COMMAND_TOOL)
|
||||||
|
expect(RUN_TOOLS).toContain(RUN_TESTS_TOOL)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("individual tool definitions", () => {
|
||||||
|
describe("GET_LINES_TOOL", () => {
|
||||||
|
it("should have correct name", () => {
|
||||||
|
expect(GET_LINES_TOOL.name).toBe("get_lines")
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should have required path parameter", () => {
|
||||||
|
const pathParam = GET_LINES_TOOL.parameters.find((p) => p.name === "path")
|
||||||
|
expect(pathParam).toBeDefined()
|
||||||
|
expect(pathParam?.required).toBe(true)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should have optional start and end parameters", () => {
|
||||||
|
const startParam = GET_LINES_TOOL.parameters.find((p) => p.name === "start")
|
||||||
|
const endParam = GET_LINES_TOOL.parameters.find((p) => p.name === "end")
|
||||||
|
expect(startParam?.required).toBe(false)
|
||||||
|
expect(endParam?.required).toBe(false)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("EDIT_LINES_TOOL", () => {
|
||||||
|
it("should have all required parameters", () => {
|
||||||
|
const requiredParams = EDIT_LINES_TOOL.parameters.filter((p) => p.required)
|
||||||
|
const names = requiredParams.map((p) => p.name)
|
||||||
|
expect(names).toContain("path")
|
||||||
|
expect(names).toContain("start")
|
||||||
|
expect(names).toContain("end")
|
||||||
|
expect(names).toContain("content")
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("GIT_STATUS_TOOL", () => {
|
||||||
|
it("should have no required parameters", () => {
|
||||||
|
expect(GIT_STATUS_TOOL.parameters).toHaveLength(0)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("GET_TODOS_TOOL", () => {
|
||||||
|
it("should have enum for type parameter", () => {
|
||||||
|
const typeParam = GET_TODOS_TOOL.parameters.find((p) => p.name === "type")
|
||||||
|
expect(typeParam?.enum).toEqual(["TODO", "FIXME", "HACK", "XXX"])
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("CONFIRMATION_TOOLS", () => {
|
||||||
|
it("should be a Set", () => {
|
||||||
|
expect(CONFIRMATION_TOOLS instanceof Set).toBe(true)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should contain edit and git_commit tools", () => {
|
||||||
|
expect(CONFIRMATION_TOOLS.has("edit_lines")).toBe(true)
|
||||||
|
expect(CONFIRMATION_TOOLS.has("create_file")).toBe(true)
|
||||||
|
expect(CONFIRMATION_TOOLS.has("delete_file")).toBe(true)
|
||||||
|
expect(CONFIRMATION_TOOLS.has("git_commit")).toBe(true)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should not contain read tools", () => {
|
||||||
|
expect(CONFIRMATION_TOOLS.has("get_lines")).toBe(false)
|
||||||
|
expect(CONFIRMATION_TOOLS.has("get_function")).toBe(false)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("requiresConfirmation", () => {
|
||||||
|
it("should return true for edit tools", () => {
|
||||||
|
expect(requiresConfirmation("edit_lines")).toBe(true)
|
||||||
|
expect(requiresConfirmation("create_file")).toBe(true)
|
||||||
|
expect(requiresConfirmation("delete_file")).toBe(true)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should return true for git_commit", () => {
|
||||||
|
expect(requiresConfirmation("git_commit")).toBe(true)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should return false for read tools", () => {
|
||||||
|
expect(requiresConfirmation("get_lines")).toBe(false)
|
||||||
|
expect(requiresConfirmation("get_function")).toBe(false)
|
||||||
|
expect(requiresConfirmation("get_structure")).toBe(false)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should return false for analysis tools", () => {
|
||||||
|
expect(requiresConfirmation("get_dependencies")).toBe(false)
|
||||||
|
expect(requiresConfirmation("get_complexity")).toBe(false)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should return false for unknown tools", () => {
|
||||||
|
expect(requiresConfirmation("unknown_tool")).toBe(false)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("getToolDef", () => {
|
||||||
|
it("should return tool definition by name", () => {
|
||||||
|
const tool = getToolDef("get_lines")
|
||||||
|
expect(tool).toBe(GET_LINES_TOOL)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should return undefined for unknown tool", () => {
|
||||||
|
const tool = getToolDef("unknown_tool")
|
||||||
|
expect(tool).toBeUndefined()
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should find all 18 tools", () => {
|
||||||
|
const names = [
|
||||||
|
"get_lines",
|
||||||
|
"get_function",
|
||||||
|
"get_class",
|
||||||
|
"get_structure",
|
||||||
|
"edit_lines",
|
||||||
|
"create_file",
|
||||||
|
"delete_file",
|
||||||
|
"find_references",
|
||||||
|
"find_definition",
|
||||||
|
"get_dependencies",
|
||||||
|
"get_dependents",
|
||||||
|
"get_complexity",
|
||||||
|
"get_todos",
|
||||||
|
"git_status",
|
||||||
|
"git_diff",
|
||||||
|
"git_commit",
|
||||||
|
"run_command",
|
||||||
|
"run_tests",
|
||||||
|
]
|
||||||
|
|
||||||
|
for (const name of names) {
|
||||||
|
expect(getToolDef(name)).toBeDefined()
|
||||||
|
}
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("getToolsByCategory", () => {
|
||||||
|
it("should return read tools", () => {
|
||||||
|
expect(getToolsByCategory("read")).toBe(READ_TOOLS)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should return edit tools", () => {
|
||||||
|
expect(getToolsByCategory("edit")).toBe(EDIT_TOOLS)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should return search tools", () => {
|
||||||
|
expect(getToolsByCategory("search")).toBe(SEARCH_TOOLS)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should return analysis tools", () => {
|
||||||
|
expect(getToolsByCategory("analysis")).toBe(ANALYSIS_TOOLS)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should return git tools", () => {
|
||||||
|
expect(getToolsByCategory("git")).toBe(GIT_TOOLS)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should return run tools", () => {
|
||||||
|
expect(getToolsByCategory("run")).toBe(RUN_TOOLS)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should return empty array for unknown category", () => {
|
||||||
|
expect(getToolsByCategory("unknown")).toEqual([])
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
@@ -0,0 +1,177 @@
|
|||||||
|
import { describe, it, expect, vi, beforeEach, afterEach } from "vitest"
|
||||||
|
import type { RedisConfig } from "../../../../src/shared/constants/config.js"
|
||||||
|
import { IpuaroError } from "../../../../src/shared/errors/IpuaroError.js"
|
||||||
|
|
||||||
|
const mockRedisInstance = {
|
||||||
|
connect: vi.fn(),
|
||||||
|
quit: vi.fn(),
|
||||||
|
ping: vi.fn(),
|
||||||
|
config: vi.fn(),
|
||||||
|
status: "ready" as string,
|
||||||
|
}
|
||||||
|
|
||||||
|
vi.mock("ioredis", () => {
|
||||||
|
return {
|
||||||
|
Redis: vi.fn(() => mockRedisInstance),
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
const { RedisClient } = await import("../../../../src/infrastructure/storage/RedisClient.js")
|
||||||
|
|
||||||
|
describe("RedisClient", () => {
|
||||||
|
const defaultConfig: RedisConfig = {
|
||||||
|
host: "localhost",
|
||||||
|
port: 6379,
|
||||||
|
db: 0,
|
||||||
|
keyPrefix: "ipuaro:",
|
||||||
|
}
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
vi.clearAllMocks()
|
||||||
|
mockRedisInstance.status = "ready"
|
||||||
|
mockRedisInstance.connect.mockResolvedValue(undefined)
|
||||||
|
mockRedisInstance.quit.mockResolvedValue(undefined)
|
||||||
|
mockRedisInstance.ping.mockResolvedValue("PONG")
|
||||||
|
mockRedisInstance.config.mockResolvedValue(undefined)
|
||||||
|
})
|
||||||
|
|
||||||
|
afterEach(() => {
|
||||||
|
vi.restoreAllMocks()
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("constructor", () => {
|
||||||
|
it("should create instance with config", () => {
|
||||||
|
const client = new RedisClient(defaultConfig)
|
||||||
|
expect(client).toBeDefined()
|
||||||
|
expect(client.isConnected()).toBe(false)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("connect", () => {
|
||||||
|
it("should connect to Redis", async () => {
|
||||||
|
const client = new RedisClient(defaultConfig)
|
||||||
|
await client.connect()
|
||||||
|
|
||||||
|
expect(mockRedisInstance.connect).toHaveBeenCalled()
|
||||||
|
expect(client.isConnected()).toBe(true)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should configure AOF on connect", async () => {
|
||||||
|
const client = new RedisClient(defaultConfig)
|
||||||
|
await client.connect()
|
||||||
|
|
||||||
|
expect(mockRedisInstance.config).toHaveBeenCalledWith("SET", "appendonly", "yes")
|
||||||
|
expect(mockRedisInstance.config).toHaveBeenCalledWith("SET", "appendfsync", "everysec")
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should not reconnect if already connected", async () => {
|
||||||
|
const client = new RedisClient(defaultConfig)
|
||||||
|
await client.connect()
|
||||||
|
await client.connect()
|
||||||
|
|
||||||
|
expect(mockRedisInstance.connect).toHaveBeenCalledTimes(1)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should throw IpuaroError on connection failure", async () => {
|
||||||
|
mockRedisInstance.connect.mockRejectedValue(new Error("Connection refused"))
|
||||||
|
|
||||||
|
const client = new RedisClient(defaultConfig)
|
||||||
|
|
||||||
|
await expect(client.connect()).rejects.toThrow(IpuaroError)
|
||||||
|
await expect(client.connect()).rejects.toMatchObject({
|
||||||
|
type: "redis",
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should handle AOF config failure gracefully", async () => {
|
||||||
|
mockRedisInstance.config.mockRejectedValue(new Error("CONFIG disabled"))
|
||||||
|
|
||||||
|
const client = new RedisClient(defaultConfig)
|
||||||
|
await client.connect()
|
||||||
|
|
||||||
|
expect(client.isConnected()).toBe(true)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("disconnect", () => {
|
||||||
|
it("should disconnect from Redis", async () => {
|
||||||
|
const client = new RedisClient(defaultConfig)
|
||||||
|
await client.connect()
|
||||||
|
await client.disconnect()
|
||||||
|
|
||||||
|
expect(mockRedisInstance.quit).toHaveBeenCalled()
|
||||||
|
expect(client.isConnected()).toBe(false)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should handle disconnect when not connected", async () => {
|
||||||
|
const client = new RedisClient(defaultConfig)
|
||||||
|
await client.disconnect()
|
||||||
|
|
||||||
|
expect(mockRedisInstance.quit).not.toHaveBeenCalled()
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("isConnected", () => {
|
||||||
|
it("should return false when not connected", () => {
|
||||||
|
const client = new RedisClient(defaultConfig)
|
||||||
|
expect(client.isConnected()).toBe(false)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should return true when connected and ready", async () => {
|
||||||
|
const client = new RedisClient(defaultConfig)
|
||||||
|
await client.connect()
|
||||||
|
expect(client.isConnected()).toBe(true)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should return false when client status is not ready", async () => {
|
||||||
|
const client = new RedisClient(defaultConfig)
|
||||||
|
await client.connect()
|
||||||
|
mockRedisInstance.status = "connecting"
|
||||||
|
expect(client.isConnected()).toBe(false)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("getClient", () => {
|
||||||
|
it("should return Redis client when connected", async () => {
|
||||||
|
const client = new RedisClient(defaultConfig)
|
||||||
|
await client.connect()
|
||||||
|
|
||||||
|
const redis = client.getClient()
|
||||||
|
expect(redis).toBe(mockRedisInstance)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should throw when not connected", () => {
|
||||||
|
const client = new RedisClient(defaultConfig)
|
||||||
|
|
||||||
|
expect(() => client.getClient()).toThrow(IpuaroError)
|
||||||
|
expect(() => client.getClient()).toThrow("not connected")
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("ping", () => {
|
||||||
|
it("should return true on successful ping", async () => {
|
||||||
|
const client = new RedisClient(defaultConfig)
|
||||||
|
await client.connect()
|
||||||
|
|
||||||
|
const result = await client.ping()
|
||||||
|
expect(result).toBe(true)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should return false when not connected", async () => {
|
||||||
|
const client = new RedisClient(defaultConfig)
|
||||||
|
|
||||||
|
const result = await client.ping()
|
||||||
|
expect(result).toBe(false)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should return false on ping failure", async () => {
|
||||||
|
mockRedisInstance.ping.mockRejectedValue(new Error("Timeout"))
|
||||||
|
|
||||||
|
const client = new RedisClient(defaultConfig)
|
||||||
|
await client.connect()
|
||||||
|
|
||||||
|
const result = await client.ping()
|
||||||
|
expect(result).toBe(false)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
@@ -0,0 +1,425 @@
|
|||||||
|
import { describe, it, expect, vi, beforeEach } from "vitest"
|
||||||
|
import { RedisStorage } from "../../../../src/infrastructure/storage/RedisStorage.js"
|
||||||
|
import { RedisClient } from "../../../../src/infrastructure/storage/RedisClient.js"
|
||||||
|
import type { FileData } from "../../../../src/domain/value-objects/FileData.js"
|
||||||
|
import type { FileAST } from "../../../../src/domain/value-objects/FileAST.js"
|
||||||
|
import type { FileMeta } from "../../../../src/domain/value-objects/FileMeta.js"
|
||||||
|
import type { SymbolIndex, DepsGraph } from "../../../../src/domain/services/IStorage.js"
|
||||||
|
import { IpuaroError } from "../../../../src/shared/errors/IpuaroError.js"
|
||||||
|
|
||||||
|
describe("RedisStorage", () => {
|
||||||
|
const projectName = "test-project"
|
||||||
|
let mockRedis: {
|
||||||
|
hget: ReturnType<typeof vi.fn>
|
||||||
|
hset: ReturnType<typeof vi.fn>
|
||||||
|
hdel: ReturnType<typeof vi.fn>
|
||||||
|
hgetall: ReturnType<typeof vi.fn>
|
||||||
|
hlen: ReturnType<typeof vi.fn>
|
||||||
|
del: ReturnType<typeof vi.fn>
|
||||||
|
}
|
||||||
|
let mockClient: {
|
||||||
|
connect: ReturnType<typeof vi.fn>
|
||||||
|
disconnect: ReturnType<typeof vi.fn>
|
||||||
|
isConnected: ReturnType<typeof vi.fn>
|
||||||
|
getClient: ReturnType<typeof vi.fn>
|
||||||
|
}
|
||||||
|
let storage: RedisStorage
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
mockRedis = {
|
||||||
|
hget: vi.fn(),
|
||||||
|
hset: vi.fn(),
|
||||||
|
hdel: vi.fn(),
|
||||||
|
hgetall: vi.fn(),
|
||||||
|
hlen: vi.fn(),
|
||||||
|
del: vi.fn(),
|
||||||
|
}
|
||||||
|
|
||||||
|
mockClient = {
|
||||||
|
connect: vi.fn().mockResolvedValue(undefined),
|
||||||
|
disconnect: vi.fn().mockResolvedValue(undefined),
|
||||||
|
isConnected: vi.fn().mockReturnValue(true),
|
||||||
|
getClient: vi.fn().mockReturnValue(mockRedis),
|
||||||
|
}
|
||||||
|
|
||||||
|
storage = new RedisStorage(mockClient as unknown as RedisClient, projectName)
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("File operations", () => {
|
||||||
|
const testFile: FileData = {
|
||||||
|
lines: ["line1", "line2"],
|
||||||
|
hash: "abc123",
|
||||||
|
size: 100,
|
||||||
|
lastModified: Date.now(),
|
||||||
|
}
|
||||||
|
|
||||||
|
describe("getFile", () => {
|
||||||
|
it("should return file data when exists", async () => {
|
||||||
|
mockRedis.hget.mockResolvedValue(JSON.stringify(testFile))
|
||||||
|
|
||||||
|
const result = await storage.getFile("src/index.ts")
|
||||||
|
|
||||||
|
expect(result).toEqual(testFile)
|
||||||
|
expect(mockRedis.hget).toHaveBeenCalledWith(
|
||||||
|
`project:${projectName}:files`,
|
||||||
|
"src/index.ts",
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should return null when file not found", async () => {
|
||||||
|
mockRedis.hget.mockResolvedValue(null)
|
||||||
|
|
||||||
|
const result = await storage.getFile("nonexistent.ts")
|
||||||
|
|
||||||
|
expect(result).toBeNull()
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should throw on invalid JSON", async () => {
|
||||||
|
mockRedis.hget.mockResolvedValue("invalid json")
|
||||||
|
|
||||||
|
await expect(storage.getFile("test.ts")).rejects.toThrow(IpuaroError)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("setFile", () => {
|
||||||
|
it("should store file data", async () => {
|
||||||
|
await storage.setFile("src/index.ts", testFile)
|
||||||
|
|
||||||
|
expect(mockRedis.hset).toHaveBeenCalledWith(
|
||||||
|
`project:${projectName}:files`,
|
||||||
|
"src/index.ts",
|
||||||
|
JSON.stringify(testFile),
|
||||||
|
)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("deleteFile", () => {
|
||||||
|
it("should delete file data", async () => {
|
||||||
|
await storage.deleteFile("src/index.ts")
|
||||||
|
|
||||||
|
expect(mockRedis.hdel).toHaveBeenCalledWith(
|
||||||
|
`project:${projectName}:files`,
|
||||||
|
"src/index.ts",
|
||||||
|
)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("getAllFiles", () => {
|
||||||
|
it("should return all files as Map", async () => {
|
||||||
|
mockRedis.hgetall.mockResolvedValue({
|
||||||
|
"src/a.ts": JSON.stringify(testFile),
|
||||||
|
"src/b.ts": JSON.stringify({ ...testFile, hash: "def456" }),
|
||||||
|
})
|
||||||
|
|
||||||
|
const result = await storage.getAllFiles()
|
||||||
|
|
||||||
|
expect(result).toBeInstanceOf(Map)
|
||||||
|
expect(result.size).toBe(2)
|
||||||
|
expect(result.get("src/a.ts")).toEqual(testFile)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should return empty Map when no files", async () => {
|
||||||
|
mockRedis.hgetall.mockResolvedValue({})
|
||||||
|
|
||||||
|
const result = await storage.getAllFiles()
|
||||||
|
|
||||||
|
expect(result.size).toBe(0)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("getFileCount", () => {
|
||||||
|
it("should return file count", async () => {
|
||||||
|
mockRedis.hlen.mockResolvedValue(42)
|
||||||
|
|
||||||
|
const result = await storage.getFileCount()
|
||||||
|
|
||||||
|
expect(result).toBe(42)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("AST operations", () => {
|
||||||
|
const testAST: FileAST = {
|
||||||
|
imports: [],
|
||||||
|
exports: [],
|
||||||
|
functions: [],
|
||||||
|
classes: [],
|
||||||
|
interfaces: [],
|
||||||
|
typeAliases: [],
|
||||||
|
parseError: false,
|
||||||
|
}
|
||||||
|
|
||||||
|
describe("getAST", () => {
|
||||||
|
it("should return AST when exists", async () => {
|
||||||
|
mockRedis.hget.mockResolvedValue(JSON.stringify(testAST))
|
||||||
|
|
||||||
|
const result = await storage.getAST("src/index.ts")
|
||||||
|
|
||||||
|
expect(result).toEqual(testAST)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should return null when not found", async () => {
|
||||||
|
mockRedis.hget.mockResolvedValue(null)
|
||||||
|
|
||||||
|
const result = await storage.getAST("nonexistent.ts")
|
||||||
|
|
||||||
|
expect(result).toBeNull()
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("setAST", () => {
|
||||||
|
it("should store AST", async () => {
|
||||||
|
await storage.setAST("src/index.ts", testAST)
|
||||||
|
|
||||||
|
expect(mockRedis.hset).toHaveBeenCalledWith(
|
||||||
|
`project:${projectName}:ast`,
|
||||||
|
"src/index.ts",
|
||||||
|
JSON.stringify(testAST),
|
||||||
|
)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("deleteAST", () => {
|
||||||
|
it("should delete AST", async () => {
|
||||||
|
await storage.deleteAST("src/index.ts")
|
||||||
|
|
||||||
|
expect(mockRedis.hdel).toHaveBeenCalledWith(
|
||||||
|
`project:${projectName}:ast`,
|
||||||
|
"src/index.ts",
|
||||||
|
)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("getAllASTs", () => {
|
||||||
|
it("should return all ASTs as Map", async () => {
|
||||||
|
mockRedis.hgetall.mockResolvedValue({
|
||||||
|
"src/a.ts": JSON.stringify(testAST),
|
||||||
|
})
|
||||||
|
|
||||||
|
const result = await storage.getAllASTs()
|
||||||
|
|
||||||
|
expect(result).toBeInstanceOf(Map)
|
||||||
|
expect(result.size).toBe(1)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("Meta operations", () => {
|
||||||
|
const testMeta: FileMeta = {
|
||||||
|
complexity: { loc: 10, nesting: 2, cyclomaticComplexity: 5, score: 20 },
|
||||||
|
dependencies: ["./other.ts"],
|
||||||
|
dependents: [],
|
||||||
|
isHub: false,
|
||||||
|
isEntryPoint: false,
|
||||||
|
fileType: "source",
|
||||||
|
}
|
||||||
|
|
||||||
|
describe("getMeta", () => {
|
||||||
|
it("should return meta when exists", async () => {
|
||||||
|
mockRedis.hget.mockResolvedValue(JSON.stringify(testMeta))
|
||||||
|
|
||||||
|
const result = await storage.getMeta("src/index.ts")
|
||||||
|
|
||||||
|
expect(result).toEqual(testMeta)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should return null when not found", async () => {
|
||||||
|
mockRedis.hget.mockResolvedValue(null)
|
||||||
|
|
||||||
|
const result = await storage.getMeta("nonexistent.ts")
|
||||||
|
|
||||||
|
expect(result).toBeNull()
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("setMeta", () => {
|
||||||
|
it("should store meta", async () => {
|
||||||
|
await storage.setMeta("src/index.ts", testMeta)
|
||||||
|
|
||||||
|
expect(mockRedis.hset).toHaveBeenCalledWith(
|
||||||
|
`project:${projectName}:meta`,
|
||||||
|
"src/index.ts",
|
||||||
|
JSON.stringify(testMeta),
|
||||||
|
)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("deleteMeta", () => {
|
||||||
|
it("should delete meta", async () => {
|
||||||
|
await storage.deleteMeta("src/index.ts")
|
||||||
|
|
||||||
|
expect(mockRedis.hdel).toHaveBeenCalledWith(
|
||||||
|
`project:${projectName}:meta`,
|
||||||
|
"src/index.ts",
|
||||||
|
)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("getAllMetas", () => {
|
||||||
|
it("should return all metas as Map", async () => {
|
||||||
|
mockRedis.hgetall.mockResolvedValue({
|
||||||
|
"src/a.ts": JSON.stringify(testMeta),
|
||||||
|
})
|
||||||
|
|
||||||
|
const result = await storage.getAllMetas()
|
||||||
|
|
||||||
|
expect(result).toBeInstanceOf(Map)
|
||||||
|
expect(result.size).toBe(1)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("Index operations", () => {
|
||||||
|
describe("getSymbolIndex", () => {
|
||||||
|
it("should return symbol index", async () => {
|
||||||
|
const index: [string, { path: string; line: number; type: string }[]][] = [
|
||||||
|
["MyClass", [{ path: "src/index.ts", line: 10, type: "class" }]],
|
||||||
|
]
|
||||||
|
mockRedis.hget.mockResolvedValue(JSON.stringify(index))
|
||||||
|
|
||||||
|
const result = await storage.getSymbolIndex()
|
||||||
|
|
||||||
|
expect(result).toBeInstanceOf(Map)
|
||||||
|
expect(result.get("MyClass")).toBeDefined()
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should return empty Map when not found", async () => {
|
||||||
|
mockRedis.hget.mockResolvedValue(null)
|
||||||
|
|
||||||
|
const result = await storage.getSymbolIndex()
|
||||||
|
|
||||||
|
expect(result.size).toBe(0)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("setSymbolIndex", () => {
|
||||||
|
it("should store symbol index", async () => {
|
||||||
|
const index: SymbolIndex = new Map([
|
||||||
|
["MyClass", [{ path: "src/index.ts", line: 10, type: "class" }]],
|
||||||
|
])
|
||||||
|
|
||||||
|
await storage.setSymbolIndex(index)
|
||||||
|
|
||||||
|
expect(mockRedis.hset).toHaveBeenCalledWith(
|
||||||
|
`project:${projectName}:indexes`,
|
||||||
|
"symbols",
|
||||||
|
expect.any(String),
|
||||||
|
)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("getDepsGraph", () => {
|
||||||
|
it("should return deps graph", async () => {
|
||||||
|
const graph = {
|
||||||
|
imports: [["a.ts", ["b.ts"]]],
|
||||||
|
importedBy: [["b.ts", ["a.ts"]]],
|
||||||
|
}
|
||||||
|
mockRedis.hget.mockResolvedValue(JSON.stringify(graph))
|
||||||
|
|
||||||
|
const result = await storage.getDepsGraph()
|
||||||
|
|
||||||
|
expect(result.imports).toBeInstanceOf(Map)
|
||||||
|
expect(result.importedBy).toBeInstanceOf(Map)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should return empty graph when not found", async () => {
|
||||||
|
mockRedis.hget.mockResolvedValue(null)
|
||||||
|
|
||||||
|
const result = await storage.getDepsGraph()
|
||||||
|
|
||||||
|
expect(result.imports.size).toBe(0)
|
||||||
|
expect(result.importedBy.size).toBe(0)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("setDepsGraph", () => {
|
||||||
|
it("should store deps graph", async () => {
|
||||||
|
const graph: DepsGraph = {
|
||||||
|
imports: new Map([["a.ts", ["b.ts"]]]),
|
||||||
|
importedBy: new Map([["b.ts", ["a.ts"]]]),
|
||||||
|
}
|
||||||
|
|
||||||
|
await storage.setDepsGraph(graph)
|
||||||
|
|
||||||
|
expect(mockRedis.hset).toHaveBeenCalledWith(
|
||||||
|
`project:${projectName}:indexes`,
|
||||||
|
"deps_graph",
|
||||||
|
expect.any(String),
|
||||||
|
)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("Config operations", () => {
|
||||||
|
describe("getProjectConfig", () => {
|
||||||
|
it("should return config value", async () => {
|
||||||
|
mockRedis.hget.mockResolvedValue(JSON.stringify({ key: "value" }))
|
||||||
|
|
||||||
|
const result = await storage.getProjectConfig("settings")
|
||||||
|
|
||||||
|
expect(result).toEqual({ key: "value" })
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should return null when not found", async () => {
|
||||||
|
mockRedis.hget.mockResolvedValue(null)
|
||||||
|
|
||||||
|
const result = await storage.getProjectConfig("nonexistent")
|
||||||
|
|
||||||
|
expect(result).toBeNull()
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("setProjectConfig", () => {
|
||||||
|
it("should store config value", async () => {
|
||||||
|
await storage.setProjectConfig("settings", { key: "value" })
|
||||||
|
|
||||||
|
expect(mockRedis.hset).toHaveBeenCalledWith(
|
||||||
|
`project:${projectName}:config`,
|
||||||
|
"settings",
|
||||||
|
JSON.stringify({ key: "value" }),
|
||||||
|
)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("Lifecycle operations", () => {
|
||||||
|
describe("connect", () => {
|
||||||
|
it("should delegate to client", async () => {
|
||||||
|
await storage.connect()
|
||||||
|
|
||||||
|
expect(mockClient.connect).toHaveBeenCalled()
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("disconnect", () => {
|
||||||
|
it("should delegate to client", async () => {
|
||||||
|
await storage.disconnect()
|
||||||
|
|
||||||
|
expect(mockClient.disconnect).toHaveBeenCalled()
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("isConnected", () => {
|
||||||
|
it("should delegate to client", () => {
|
||||||
|
mockClient.isConnected.mockReturnValue(true)
|
||||||
|
|
||||||
|
expect(storage.isConnected()).toBe(true)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("clear", () => {
|
||||||
|
it("should delete all project keys", async () => {
|
||||||
|
mockRedis.del.mockResolvedValue(1)
|
||||||
|
|
||||||
|
await storage.clear()
|
||||||
|
|
||||||
|
expect(mockRedis.del).toHaveBeenCalledTimes(5)
|
||||||
|
expect(mockRedis.del).toHaveBeenCalledWith(`project:${projectName}:files`)
|
||||||
|
expect(mockRedis.del).toHaveBeenCalledWith(`project:${projectName}:ast`)
|
||||||
|
expect(mockRedis.del).toHaveBeenCalledWith(`project:${projectName}:meta`)
|
||||||
|
expect(mockRedis.del).toHaveBeenCalledWith(`project:${projectName}:indexes`)
|
||||||
|
expect(mockRedis.del).toHaveBeenCalledWith(`project:${projectName}:config`)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
110
packages/ipuaro/tests/unit/infrastructure/storage/schema.test.ts
Normal file
110
packages/ipuaro/tests/unit/infrastructure/storage/schema.test.ts
Normal file
@@ -0,0 +1,110 @@
|
|||||||
|
import { describe, it, expect } from "vitest"
|
||||||
|
import {
|
||||||
|
ProjectKeys,
|
||||||
|
SessionKeys,
|
||||||
|
IndexFields,
|
||||||
|
SessionFields,
|
||||||
|
generateProjectName,
|
||||||
|
} from "../../../../src/infrastructure/storage/schema.js"
|
||||||
|
|
||||||
|
describe("schema", () => {
|
||||||
|
describe("ProjectKeys", () => {
|
||||||
|
it("should generate files key", () => {
|
||||||
|
expect(ProjectKeys.files("myproject")).toBe("project:myproject:files")
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should generate ast key", () => {
|
||||||
|
expect(ProjectKeys.ast("myproject")).toBe("project:myproject:ast")
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should generate meta key", () => {
|
||||||
|
expect(ProjectKeys.meta("myproject")).toBe("project:myproject:meta")
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should generate indexes key", () => {
|
||||||
|
expect(ProjectKeys.indexes("myproject")).toBe("project:myproject:indexes")
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should generate config key", () => {
|
||||||
|
expect(ProjectKeys.config("myproject")).toBe("project:myproject:config")
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("SessionKeys", () => {
|
||||||
|
it("should generate data key", () => {
|
||||||
|
expect(SessionKeys.data("session-123")).toBe("session:session-123:data")
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should generate undo key", () => {
|
||||||
|
expect(SessionKeys.undo("session-123")).toBe("session:session-123:undo")
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should have list key", () => {
|
||||||
|
expect(SessionKeys.list).toBe("sessions:list")
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("IndexFields", () => {
|
||||||
|
it("should have symbols field", () => {
|
||||||
|
expect(IndexFields.symbols).toBe("symbols")
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should have depsGraph field", () => {
|
||||||
|
expect(IndexFields.depsGraph).toBe("deps_graph")
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("SessionFields", () => {
|
||||||
|
it("should have all required fields", () => {
|
||||||
|
expect(SessionFields.history).toBe("history")
|
||||||
|
expect(SessionFields.context).toBe("context")
|
||||||
|
expect(SessionFields.stats).toBe("stats")
|
||||||
|
expect(SessionFields.inputHistory).toBe("input_history")
|
||||||
|
expect(SessionFields.createdAt).toBe("created_at")
|
||||||
|
expect(SessionFields.lastActivityAt).toBe("last_activity_at")
|
||||||
|
expect(SessionFields.projectName).toBe("project_name")
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("generateProjectName", () => {
|
||||||
|
it("should generate name from path with two parts", () => {
|
||||||
|
expect(generateProjectName("/home/user/projects/myapp")).toBe("projects-myapp")
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should generate name from single directory", () => {
|
||||||
|
expect(generateProjectName("/app")).toBe("app")
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should handle root path", () => {
|
||||||
|
expect(generateProjectName("/")).toBe("root")
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should handle empty path", () => {
|
||||||
|
expect(generateProjectName("")).toBe("root")
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should handle trailing slashes", () => {
|
||||||
|
expect(generateProjectName("/home/user/projects/myapp/")).toBe("projects-myapp")
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should handle Windows paths", () => {
|
||||||
|
expect(generateProjectName("C:\\Users\\projects\\myapp")).toBe("projects-myapp")
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should sanitize special characters", () => {
|
||||||
|
expect(generateProjectName("/home/my project/my@app!")).toBe("my-project-my-app")
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should convert to lowercase", () => {
|
||||||
|
expect(generateProjectName("/Home/User/MYAPP")).toBe("user-myapp")
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should handle multiple consecutive special chars", () => {
|
||||||
|
expect(generateProjectName("/home/my___project")).toBe("home-my-project")
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should handle relative paths", () => {
|
||||||
|
expect(generateProjectName("parent/child")).toBe("parent-child")
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
@@ -0,0 +1,348 @@
|
|||||||
|
import { describe, it, expect, vi, beforeEach } from "vitest"
|
||||||
|
import {
|
||||||
|
GetClassTool,
|
||||||
|
type GetClassResult,
|
||||||
|
} from "../../../../../src/infrastructure/tools/read/GetClassTool.js"
|
||||||
|
import type { ToolContext } from "../../../../../src/domain/services/ITool.js"
|
||||||
|
import type { IStorage } from "../../../../../src/domain/services/IStorage.js"
|
||||||
|
import type { FileAST, ClassInfo } from "../../../../../src/domain/value-objects/FileAST.js"
|
||||||
|
|
||||||
|
function createMockClass(overrides: Partial<ClassInfo> = {}): ClassInfo {
|
||||||
|
return {
|
||||||
|
name: "TestClass",
|
||||||
|
lineStart: 1,
|
||||||
|
lineEnd: 10,
|
||||||
|
methods: [
|
||||||
|
{
|
||||||
|
name: "testMethod",
|
||||||
|
lineStart: 3,
|
||||||
|
lineEnd: 5,
|
||||||
|
params: [],
|
||||||
|
isAsync: false,
|
||||||
|
visibility: "public",
|
||||||
|
isStatic: false,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
properties: [
|
||||||
|
{
|
||||||
|
name: "testProp",
|
||||||
|
line: 2,
|
||||||
|
visibility: "private",
|
||||||
|
isStatic: false,
|
||||||
|
isReadonly: false,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
implements: [],
|
||||||
|
isExported: true,
|
||||||
|
isAbstract: false,
|
||||||
|
...overrides,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function createMockAST(classes: ClassInfo[] = []): FileAST {
|
||||||
|
return {
|
||||||
|
imports: [],
|
||||||
|
exports: [],
|
||||||
|
functions: [],
|
||||||
|
classes,
|
||||||
|
interfaces: [],
|
||||||
|
typeAliases: [],
|
||||||
|
parseError: false,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function createMockStorage(
|
||||||
|
fileData: { lines: string[] } | null = null,
|
||||||
|
ast: FileAST | null = null,
|
||||||
|
): IStorage {
|
||||||
|
return {
|
||||||
|
getFile: vi.fn().mockResolvedValue(fileData),
|
||||||
|
setFile: vi.fn(),
|
||||||
|
deleteFile: vi.fn(),
|
||||||
|
getAllFiles: vi.fn(),
|
||||||
|
getAST: vi.fn().mockResolvedValue(ast),
|
||||||
|
setAST: vi.fn(),
|
||||||
|
getMeta: vi.fn(),
|
||||||
|
setMeta: vi.fn(),
|
||||||
|
getSymbolIndex: vi.fn(),
|
||||||
|
setSymbolIndex: vi.fn(),
|
||||||
|
getDepsGraph: vi.fn(),
|
||||||
|
setDepsGraph: vi.fn(),
|
||||||
|
getConfig: vi.fn(),
|
||||||
|
setConfig: vi.fn(),
|
||||||
|
clear: vi.fn(),
|
||||||
|
} as unknown as IStorage
|
||||||
|
}
|
||||||
|
|
||||||
|
function createMockContext(storage?: IStorage): ToolContext {
|
||||||
|
return {
|
||||||
|
projectRoot: "/test/project",
|
||||||
|
storage: storage ?? createMockStorage(),
|
||||||
|
requestConfirmation: vi.fn().mockResolvedValue(true),
|
||||||
|
onProgress: vi.fn(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
describe("GetClassTool", () => {
|
||||||
|
let tool: GetClassTool
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
tool = new GetClassTool()
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("metadata", () => {
|
||||||
|
it("should have correct name", () => {
|
||||||
|
expect(tool.name).toBe("get_class")
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should have correct category", () => {
|
||||||
|
expect(tool.category).toBe("read")
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should not require confirmation", () => {
|
||||||
|
expect(tool.requiresConfirmation).toBe(false)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should have correct parameters", () => {
|
||||||
|
expect(tool.parameters).toHaveLength(2)
|
||||||
|
expect(tool.parameters[0].name).toBe("path")
|
||||||
|
expect(tool.parameters[0].required).toBe(true)
|
||||||
|
expect(tool.parameters[1].name).toBe("name")
|
||||||
|
expect(tool.parameters[1].required).toBe(true)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("validateParams", () => {
|
||||||
|
it("should return null for valid params", () => {
|
||||||
|
expect(tool.validateParams({ path: "src/index.ts", name: "MyClass" })).toBeNull()
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should return error for missing path", () => {
|
||||||
|
expect(tool.validateParams({ name: "MyClass" })).toBe(
|
||||||
|
"Parameter 'path' is required and must be a non-empty string",
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should return error for empty path", () => {
|
||||||
|
expect(tool.validateParams({ path: "", name: "MyClass" })).toBe(
|
||||||
|
"Parameter 'path' is required and must be a non-empty string",
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should return error for missing name", () => {
|
||||||
|
expect(tool.validateParams({ path: "test.ts" })).toBe(
|
||||||
|
"Parameter 'name' is required and must be a non-empty string",
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should return error for empty name", () => {
|
||||||
|
expect(tool.validateParams({ path: "test.ts", name: "" })).toBe(
|
||||||
|
"Parameter 'name' is required and must be a non-empty string",
|
||||||
|
)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("execute", () => {
|
||||||
|
it("should return class code with line numbers", async () => {
|
||||||
|
const lines = [
|
||||||
|
"export class TestClass {",
|
||||||
|
" private testProp: string",
|
||||||
|
" testMethod() {",
|
||||||
|
" return this.testProp",
|
||||||
|
" }",
|
||||||
|
"}",
|
||||||
|
]
|
||||||
|
const cls = createMockClass({
|
||||||
|
name: "TestClass",
|
||||||
|
lineStart: 1,
|
||||||
|
lineEnd: 6,
|
||||||
|
})
|
||||||
|
const ast = createMockAST([cls])
|
||||||
|
const storage = createMockStorage({ lines }, ast)
|
||||||
|
const ctx = createMockContext(storage)
|
||||||
|
|
||||||
|
const result = await tool.execute({ path: "test.ts", name: "TestClass" }, ctx)
|
||||||
|
|
||||||
|
expect(result.success).toBe(true)
|
||||||
|
const data = result.data as GetClassResult
|
||||||
|
expect(data.path).toBe("test.ts")
|
||||||
|
expect(data.name).toBe("TestClass")
|
||||||
|
expect(data.startLine).toBe(1)
|
||||||
|
expect(data.endLine).toBe(6)
|
||||||
|
expect(data.content).toContain("1│export class TestClass {")
|
||||||
|
expect(data.content).toContain("6│}")
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should return class metadata", async () => {
|
||||||
|
const lines = ["abstract class BaseService extends Service implements IService {", "}"]
|
||||||
|
const cls = createMockClass({
|
||||||
|
name: "BaseService",
|
||||||
|
lineStart: 1,
|
||||||
|
lineEnd: 2,
|
||||||
|
isExported: false,
|
||||||
|
isAbstract: true,
|
||||||
|
extends: "Service",
|
||||||
|
implements: ["IService"],
|
||||||
|
methods: [
|
||||||
|
{
|
||||||
|
name: "init",
|
||||||
|
lineStart: 2,
|
||||||
|
lineEnd: 2,
|
||||||
|
params: [],
|
||||||
|
isAsync: true,
|
||||||
|
visibility: "public",
|
||||||
|
isStatic: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "destroy",
|
||||||
|
lineStart: 3,
|
||||||
|
lineEnd: 3,
|
||||||
|
params: [],
|
||||||
|
isAsync: false,
|
||||||
|
visibility: "protected",
|
||||||
|
isStatic: false,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
properties: [
|
||||||
|
{
|
||||||
|
name: "id",
|
||||||
|
line: 2,
|
||||||
|
visibility: "private",
|
||||||
|
isStatic: false,
|
||||||
|
isReadonly: true,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
})
|
||||||
|
const ast = createMockAST([cls])
|
||||||
|
const storage = createMockStorage({ lines }, ast)
|
||||||
|
const ctx = createMockContext(storage)
|
||||||
|
|
||||||
|
const result = await tool.execute({ path: "service.ts", name: "BaseService" }, ctx)
|
||||||
|
|
||||||
|
expect(result.success).toBe(true)
|
||||||
|
const data = result.data as GetClassResult
|
||||||
|
expect(data.isExported).toBe(false)
|
||||||
|
expect(data.isAbstract).toBe(true)
|
||||||
|
expect(data.extends).toBe("Service")
|
||||||
|
expect(data.implements).toEqual(["IService"])
|
||||||
|
expect(data.methods).toEqual(["init", "destroy"])
|
||||||
|
expect(data.properties).toEqual(["id"])
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should return error when AST not found", async () => {
|
||||||
|
const storage = createMockStorage({ lines: [] }, null)
|
||||||
|
const ctx = createMockContext(storage)
|
||||||
|
|
||||||
|
const result = await tool.execute({ path: "test.ts", name: "MyClass" }, ctx)
|
||||||
|
|
||||||
|
expect(result.success).toBe(false)
|
||||||
|
expect(result.error).toContain('AST not found for "test.ts"')
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should return error when class not found", async () => {
|
||||||
|
const ast = createMockAST([
|
||||||
|
createMockClass({ name: "ClassA" }),
|
||||||
|
createMockClass({ name: "ClassB" }),
|
||||||
|
])
|
||||||
|
const storage = createMockStorage({ lines: [] }, ast)
|
||||||
|
const ctx = createMockContext(storage)
|
||||||
|
|
||||||
|
const result = await tool.execute({ path: "test.ts", name: "NonExistent" }, ctx)
|
||||||
|
|
||||||
|
expect(result.success).toBe(false)
|
||||||
|
expect(result.error).toContain('Class "NonExistent" not found')
|
||||||
|
expect(result.error).toContain("Available: ClassA, ClassB")
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should return error when no classes available", async () => {
|
||||||
|
const ast = createMockAST([])
|
||||||
|
const storage = createMockStorage({ lines: [] }, ast)
|
||||||
|
const ctx = createMockContext(storage)
|
||||||
|
|
||||||
|
const result = await tool.execute({ path: "test.ts", name: "MyClass" }, ctx)
|
||||||
|
|
||||||
|
expect(result.success).toBe(false)
|
||||||
|
expect(result.error).toContain("Available: none")
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should return error for path outside project root", async () => {
|
||||||
|
const ctx = createMockContext()
|
||||||
|
|
||||||
|
const result = await tool.execute({ path: "../outside/file.ts", name: "MyClass" }, ctx)
|
||||||
|
|
||||||
|
expect(result.success).toBe(false)
|
||||||
|
expect(result.error).toBe("Path must be within project root")
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should handle class with no extends", async () => {
|
||||||
|
const lines = ["class Simple {}"]
|
||||||
|
const cls = createMockClass({
|
||||||
|
name: "Simple",
|
||||||
|
lineStart: 1,
|
||||||
|
lineEnd: 1,
|
||||||
|
extends: undefined,
|
||||||
|
})
|
||||||
|
const ast = createMockAST([cls])
|
||||||
|
const storage = createMockStorage({ lines }, ast)
|
||||||
|
const ctx = createMockContext(storage)
|
||||||
|
|
||||||
|
const result = await tool.execute({ path: "test.ts", name: "Simple" }, ctx)
|
||||||
|
|
||||||
|
expect(result.success).toBe(true)
|
||||||
|
const data = result.data as GetClassResult
|
||||||
|
expect(data.extends).toBeUndefined()
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should handle class with empty implements", async () => {
|
||||||
|
const lines = ["class NoInterfaces {}"]
|
||||||
|
const cls = createMockClass({
|
||||||
|
name: "NoInterfaces",
|
||||||
|
lineStart: 1,
|
||||||
|
lineEnd: 1,
|
||||||
|
implements: [],
|
||||||
|
})
|
||||||
|
const ast = createMockAST([cls])
|
||||||
|
const storage = createMockStorage({ lines }, ast)
|
||||||
|
const ctx = createMockContext(storage)
|
||||||
|
|
||||||
|
const result = await tool.execute({ path: "test.ts", name: "NoInterfaces" }, ctx)
|
||||||
|
|
||||||
|
expect(result.success).toBe(true)
|
||||||
|
const data = result.data as GetClassResult
|
||||||
|
expect(data.implements).toEqual([])
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should handle class with no methods or properties", async () => {
|
||||||
|
const lines = ["class Empty {}"]
|
||||||
|
const cls = createMockClass({
|
||||||
|
name: "Empty",
|
||||||
|
lineStart: 1,
|
||||||
|
lineEnd: 1,
|
||||||
|
methods: [],
|
||||||
|
properties: [],
|
||||||
|
})
|
||||||
|
const ast = createMockAST([cls])
|
||||||
|
const storage = createMockStorage({ lines }, ast)
|
||||||
|
const ctx = createMockContext(storage)
|
||||||
|
|
||||||
|
const result = await tool.execute({ path: "test.ts", name: "Empty" }, ctx)
|
||||||
|
|
||||||
|
expect(result.success).toBe(true)
|
||||||
|
const data = result.data as GetClassResult
|
||||||
|
expect(data.methods).toEqual([])
|
||||||
|
expect(data.properties).toEqual([])
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should include callId in result", async () => {
|
||||||
|
const lines = ["class Test {}"]
|
||||||
|
const cls = createMockClass({ name: "Test", lineStart: 1, lineEnd: 1 })
|
||||||
|
const ast = createMockAST([cls])
|
||||||
|
const storage = createMockStorage({ lines }, ast)
|
||||||
|
const ctx = createMockContext(storage)
|
||||||
|
|
||||||
|
const result = await tool.execute({ path: "test.ts", name: "Test" }, ctx)
|
||||||
|
|
||||||
|
expect(result.callId).toMatch(/^get_class-\d+$/)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
@@ -0,0 +1,305 @@
|
|||||||
|
import { describe, it, expect, vi, beforeEach } from "vitest"
|
||||||
|
import {
|
||||||
|
GetFunctionTool,
|
||||||
|
type GetFunctionResult,
|
||||||
|
} from "../../../../../src/infrastructure/tools/read/GetFunctionTool.js"
|
||||||
|
import type { ToolContext } from "../../../../../src/domain/services/ITool.js"
|
||||||
|
import type { IStorage } from "../../../../../src/domain/services/IStorage.js"
|
||||||
|
import type { FileAST, FunctionInfo } from "../../../../../src/domain/value-objects/FileAST.js"
|
||||||
|
|
||||||
|
function createMockFunction(overrides: Partial<FunctionInfo> = {}): FunctionInfo {
|
||||||
|
return {
|
||||||
|
name: "testFunction",
|
||||||
|
lineStart: 1,
|
||||||
|
lineEnd: 5,
|
||||||
|
params: [{ name: "arg1", optional: false, hasDefault: false }],
|
||||||
|
isAsync: false,
|
||||||
|
isExported: true,
|
||||||
|
returnType: "void",
|
||||||
|
...overrides,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function createMockAST(functions: FunctionInfo[] = []): FileAST {
|
||||||
|
return {
|
||||||
|
imports: [],
|
||||||
|
exports: [],
|
||||||
|
functions,
|
||||||
|
classes: [],
|
||||||
|
interfaces: [],
|
||||||
|
typeAliases: [],
|
||||||
|
parseError: false,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function createMockStorage(
|
||||||
|
fileData: { lines: string[] } | null = null,
|
||||||
|
ast: FileAST | null = null,
|
||||||
|
): IStorage {
|
||||||
|
return {
|
||||||
|
getFile: vi.fn().mockResolvedValue(fileData),
|
||||||
|
setFile: vi.fn(),
|
||||||
|
deleteFile: vi.fn(),
|
||||||
|
getAllFiles: vi.fn(),
|
||||||
|
getAST: vi.fn().mockResolvedValue(ast),
|
||||||
|
setAST: vi.fn(),
|
||||||
|
getMeta: vi.fn(),
|
||||||
|
setMeta: vi.fn(),
|
||||||
|
getSymbolIndex: vi.fn(),
|
||||||
|
setSymbolIndex: vi.fn(),
|
||||||
|
getDepsGraph: vi.fn(),
|
||||||
|
setDepsGraph: vi.fn(),
|
||||||
|
getConfig: vi.fn(),
|
||||||
|
setConfig: vi.fn(),
|
||||||
|
clear: vi.fn(),
|
||||||
|
} as unknown as IStorage
|
||||||
|
}
|
||||||
|
|
||||||
|
function createMockContext(storage?: IStorage): ToolContext {
|
||||||
|
return {
|
||||||
|
projectRoot: "/test/project",
|
||||||
|
storage: storage ?? createMockStorage(),
|
||||||
|
requestConfirmation: vi.fn().mockResolvedValue(true),
|
||||||
|
onProgress: vi.fn(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
describe("GetFunctionTool", () => {
|
||||||
|
let tool: GetFunctionTool
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
tool = new GetFunctionTool()
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("metadata", () => {
|
||||||
|
it("should have correct name", () => {
|
||||||
|
expect(tool.name).toBe("get_function")
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should have correct category", () => {
|
||||||
|
expect(tool.category).toBe("read")
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should not require confirmation", () => {
|
||||||
|
expect(tool.requiresConfirmation).toBe(false)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should have correct parameters", () => {
|
||||||
|
expect(tool.parameters).toHaveLength(2)
|
||||||
|
expect(tool.parameters[0].name).toBe("path")
|
||||||
|
expect(tool.parameters[0].required).toBe(true)
|
||||||
|
expect(tool.parameters[1].name).toBe("name")
|
||||||
|
expect(tool.parameters[1].required).toBe(true)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("validateParams", () => {
|
||||||
|
it("should return null for valid params", () => {
|
||||||
|
expect(tool.validateParams({ path: "src/index.ts", name: "myFunc" })).toBeNull()
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should return error for missing path", () => {
|
||||||
|
expect(tool.validateParams({ name: "myFunc" })).toBe(
|
||||||
|
"Parameter 'path' is required and must be a non-empty string",
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should return error for empty path", () => {
|
||||||
|
expect(tool.validateParams({ path: "", name: "myFunc" })).toBe(
|
||||||
|
"Parameter 'path' is required and must be a non-empty string",
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should return error for missing name", () => {
|
||||||
|
expect(tool.validateParams({ path: "test.ts" })).toBe(
|
||||||
|
"Parameter 'name' is required and must be a non-empty string",
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should return error for empty name", () => {
|
||||||
|
expect(tool.validateParams({ path: "test.ts", name: "" })).toBe(
|
||||||
|
"Parameter 'name' is required and must be a non-empty string",
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should return error for whitespace-only name", () => {
|
||||||
|
expect(tool.validateParams({ path: "test.ts", name: " " })).toBe(
|
||||||
|
"Parameter 'name' is required and must be a non-empty string",
|
||||||
|
)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("execute", () => {
|
||||||
|
it("should return function code with line numbers", async () => {
|
||||||
|
const lines = [
|
||||||
|
"function testFunction(arg1) {",
|
||||||
|
" console.log(arg1)",
|
||||||
|
" return arg1",
|
||||||
|
"}",
|
||||||
|
"",
|
||||||
|
]
|
||||||
|
const func = createMockFunction({
|
||||||
|
name: "testFunction",
|
||||||
|
lineStart: 1,
|
||||||
|
lineEnd: 4,
|
||||||
|
})
|
||||||
|
const ast = createMockAST([func])
|
||||||
|
const storage = createMockStorage({ lines }, ast)
|
||||||
|
const ctx = createMockContext(storage)
|
||||||
|
|
||||||
|
const result = await tool.execute({ path: "test.ts", name: "testFunction" }, ctx)
|
||||||
|
|
||||||
|
expect(result.success).toBe(true)
|
||||||
|
const data = result.data as GetFunctionResult
|
||||||
|
expect(data.path).toBe("test.ts")
|
||||||
|
expect(data.name).toBe("testFunction")
|
||||||
|
expect(data.startLine).toBe(1)
|
||||||
|
expect(data.endLine).toBe(4)
|
||||||
|
expect(data.content).toContain("1│function testFunction(arg1) {")
|
||||||
|
expect(data.content).toContain("4│}")
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should return function metadata", async () => {
|
||||||
|
const lines = ["async function fetchData(url, options) {", " return fetch(url)", "}"]
|
||||||
|
const func = createMockFunction({
|
||||||
|
name: "fetchData",
|
||||||
|
lineStart: 1,
|
||||||
|
lineEnd: 3,
|
||||||
|
isAsync: true,
|
||||||
|
isExported: false,
|
||||||
|
params: [
|
||||||
|
{ name: "url", optional: false, hasDefault: false },
|
||||||
|
{ name: "options", optional: true, hasDefault: false },
|
||||||
|
],
|
||||||
|
returnType: "Promise<Response>",
|
||||||
|
})
|
||||||
|
const ast = createMockAST([func])
|
||||||
|
const storage = createMockStorage({ lines }, ast)
|
||||||
|
const ctx = createMockContext(storage)
|
||||||
|
|
||||||
|
const result = await tool.execute({ path: "api.ts", name: "fetchData" }, ctx)
|
||||||
|
|
||||||
|
expect(result.success).toBe(true)
|
||||||
|
const data = result.data as GetFunctionResult
|
||||||
|
expect(data.isAsync).toBe(true)
|
||||||
|
expect(data.isExported).toBe(false)
|
||||||
|
expect(data.params).toEqual(["url", "options"])
|
||||||
|
expect(data.returnType).toBe("Promise<Response>")
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should return error when AST not found", async () => {
|
||||||
|
const storage = createMockStorage({ lines: [] }, null)
|
||||||
|
const ctx = createMockContext(storage)
|
||||||
|
|
||||||
|
const result = await tool.execute({ path: "test.ts", name: "myFunc" }, ctx)
|
||||||
|
|
||||||
|
expect(result.success).toBe(false)
|
||||||
|
expect(result.error).toContain('AST not found for "test.ts"')
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should return error when function not found", async () => {
|
||||||
|
const ast = createMockAST([
|
||||||
|
createMockFunction({ name: "existingFunc" }),
|
||||||
|
createMockFunction({ name: "anotherFunc" }),
|
||||||
|
])
|
||||||
|
const storage = createMockStorage({ lines: [] }, ast)
|
||||||
|
const ctx = createMockContext(storage)
|
||||||
|
|
||||||
|
const result = await tool.execute({ path: "test.ts", name: "nonExistent" }, ctx)
|
||||||
|
|
||||||
|
expect(result.success).toBe(false)
|
||||||
|
expect(result.error).toContain('Function "nonExistent" not found')
|
||||||
|
expect(result.error).toContain("Available: existingFunc, anotherFunc")
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should return error when no functions available", async () => {
|
||||||
|
const ast = createMockAST([])
|
||||||
|
const storage = createMockStorage({ lines: [] }, ast)
|
||||||
|
const ctx = createMockContext(storage)
|
||||||
|
|
||||||
|
const result = await tool.execute({ path: "test.ts", name: "myFunc" }, ctx)
|
||||||
|
|
||||||
|
expect(result.success).toBe(false)
|
||||||
|
expect(result.error).toContain("Available: none")
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should return error for path outside project root", async () => {
|
||||||
|
const ctx = createMockContext()
|
||||||
|
|
||||||
|
const result = await tool.execute({ path: "../outside/file.ts", name: "myFunc" }, ctx)
|
||||||
|
|
||||||
|
expect(result.success).toBe(false)
|
||||||
|
expect(result.error).toBe("Path must be within project root")
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should pad line numbers correctly for large files", async () => {
|
||||||
|
const lines = Array.from({ length: 200 }, (_, i) => `line ${i + 1}`)
|
||||||
|
const func = createMockFunction({
|
||||||
|
name: "bigFunction",
|
||||||
|
lineStart: 95,
|
||||||
|
lineEnd: 105,
|
||||||
|
})
|
||||||
|
const ast = createMockAST([func])
|
||||||
|
const storage = createMockStorage({ lines }, ast)
|
||||||
|
const ctx = createMockContext(storage)
|
||||||
|
|
||||||
|
const result = await tool.execute({ path: "big.ts", name: "bigFunction" }, ctx)
|
||||||
|
|
||||||
|
expect(result.success).toBe(true)
|
||||||
|
const data = result.data as GetFunctionResult
|
||||||
|
expect(data.content).toContain(" 95│line 95")
|
||||||
|
expect(data.content).toContain("100│line 100")
|
||||||
|
expect(data.content).toContain("105│line 105")
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should include callId in result", async () => {
|
||||||
|
const lines = ["function test() {}"]
|
||||||
|
const func = createMockFunction({ name: "test", lineStart: 1, lineEnd: 1 })
|
||||||
|
const ast = createMockAST([func])
|
||||||
|
const storage = createMockStorage({ lines }, ast)
|
||||||
|
const ctx = createMockContext(storage)
|
||||||
|
|
||||||
|
const result = await tool.execute({ path: "test.ts", name: "test" }, ctx)
|
||||||
|
|
||||||
|
expect(result.callId).toMatch(/^get_function-\d+$/)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should handle function with no return type", async () => {
|
||||||
|
const lines = ["function noReturn() {}"]
|
||||||
|
const func = createMockFunction({
|
||||||
|
name: "noReturn",
|
||||||
|
lineStart: 1,
|
||||||
|
lineEnd: 1,
|
||||||
|
returnType: undefined,
|
||||||
|
})
|
||||||
|
const ast = createMockAST([func])
|
||||||
|
const storage = createMockStorage({ lines }, ast)
|
||||||
|
const ctx = createMockContext(storage)
|
||||||
|
|
||||||
|
const result = await tool.execute({ path: "test.ts", name: "noReturn" }, ctx)
|
||||||
|
|
||||||
|
expect(result.success).toBe(true)
|
||||||
|
const data = result.data as GetFunctionResult
|
||||||
|
expect(data.returnType).toBeUndefined()
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should handle function with no params", async () => {
|
||||||
|
const lines = ["function noParams() {}"]
|
||||||
|
const func = createMockFunction({
|
||||||
|
name: "noParams",
|
||||||
|
lineStart: 1,
|
||||||
|
lineEnd: 1,
|
||||||
|
params: [],
|
||||||
|
})
|
||||||
|
const ast = createMockAST([func])
|
||||||
|
const storage = createMockStorage({ lines }, ast)
|
||||||
|
const ctx = createMockContext(storage)
|
||||||
|
|
||||||
|
const result = await tool.execute({ path: "test.ts", name: "noParams" }, ctx)
|
||||||
|
|
||||||
|
expect(result.success).toBe(true)
|
||||||
|
const data = result.data as GetFunctionResult
|
||||||
|
expect(data.params).toEqual([])
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
@@ -0,0 +1,273 @@
|
|||||||
|
import { describe, it, expect, vi, beforeEach } from "vitest"
|
||||||
|
import {
|
||||||
|
GetLinesTool,
|
||||||
|
type GetLinesResult,
|
||||||
|
} from "../../../../../src/infrastructure/tools/read/GetLinesTool.js"
|
||||||
|
import type { ToolContext } from "../../../../../src/domain/services/ITool.js"
|
||||||
|
import type { IStorage } from "../../../../../src/domain/services/IStorage.js"
|
||||||
|
|
||||||
|
function createMockStorage(fileData: { lines: string[] } | null = null): IStorage {
|
||||||
|
return {
|
||||||
|
getFile: vi.fn().mockResolvedValue(fileData),
|
||||||
|
setFile: vi.fn(),
|
||||||
|
deleteFile: vi.fn(),
|
||||||
|
getAllFiles: vi.fn(),
|
||||||
|
getAST: vi.fn(),
|
||||||
|
setAST: vi.fn(),
|
||||||
|
getMeta: vi.fn(),
|
||||||
|
setMeta: vi.fn(),
|
||||||
|
getSymbolIndex: vi.fn(),
|
||||||
|
setSymbolIndex: vi.fn(),
|
||||||
|
getDepsGraph: vi.fn(),
|
||||||
|
setDepsGraph: vi.fn(),
|
||||||
|
getConfig: vi.fn(),
|
||||||
|
setConfig: vi.fn(),
|
||||||
|
clear: vi.fn(),
|
||||||
|
} as unknown as IStorage
|
||||||
|
}
|
||||||
|
|
||||||
|
function createMockContext(storage?: IStorage): ToolContext {
|
||||||
|
return {
|
||||||
|
projectRoot: "/test/project",
|
||||||
|
storage: storage ?? createMockStorage(),
|
||||||
|
requestConfirmation: vi.fn().mockResolvedValue(true),
|
||||||
|
onProgress: vi.fn(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
describe("GetLinesTool", () => {
|
||||||
|
let tool: GetLinesTool
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
tool = new GetLinesTool()
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("metadata", () => {
|
||||||
|
it("should have correct name", () => {
|
||||||
|
expect(tool.name).toBe("get_lines")
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should have correct category", () => {
|
||||||
|
expect(tool.category).toBe("read")
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should not require confirmation", () => {
|
||||||
|
expect(tool.requiresConfirmation).toBe(false)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should have correct parameters", () => {
|
||||||
|
expect(tool.parameters).toHaveLength(3)
|
||||||
|
expect(tool.parameters[0].name).toBe("path")
|
||||||
|
expect(tool.parameters[0].required).toBe(true)
|
||||||
|
expect(tool.parameters[1].name).toBe("start")
|
||||||
|
expect(tool.parameters[1].required).toBe(false)
|
||||||
|
expect(tool.parameters[2].name).toBe("end")
|
||||||
|
expect(tool.parameters[2].required).toBe(false)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("validateParams", () => {
|
||||||
|
it("should return null for valid params with path only", () => {
|
||||||
|
expect(tool.validateParams({ path: "src/index.ts" })).toBeNull()
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should return null for valid params with start and end", () => {
|
||||||
|
expect(tool.validateParams({ path: "src/index.ts", start: 1, end: 10 })).toBeNull()
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should return error for missing path", () => {
|
||||||
|
expect(tool.validateParams({})).toBe(
|
||||||
|
"Parameter 'path' is required and must be a non-empty string",
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should return error for empty path", () => {
|
||||||
|
expect(tool.validateParams({ path: "" })).toBe(
|
||||||
|
"Parameter 'path' is required and must be a non-empty string",
|
||||||
|
)
|
||||||
|
expect(tool.validateParams({ path: " " })).toBe(
|
||||||
|
"Parameter 'path' is required and must be a non-empty string",
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should return error for non-string path", () => {
|
||||||
|
expect(tool.validateParams({ path: 123 })).toBe(
|
||||||
|
"Parameter 'path' is required and must be a non-empty string",
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should return error for non-integer start", () => {
|
||||||
|
expect(tool.validateParams({ path: "test.ts", start: 1.5 })).toBe(
|
||||||
|
"Parameter 'start' must be an integer",
|
||||||
|
)
|
||||||
|
expect(tool.validateParams({ path: "test.ts", start: "1" })).toBe(
|
||||||
|
"Parameter 'start' must be an integer",
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should return error for start < 1", () => {
|
||||||
|
expect(tool.validateParams({ path: "test.ts", start: 0 })).toBe(
|
||||||
|
"Parameter 'start' must be >= 1",
|
||||||
|
)
|
||||||
|
expect(tool.validateParams({ path: "test.ts", start: -1 })).toBe(
|
||||||
|
"Parameter 'start' must be >= 1",
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should return error for non-integer end", () => {
|
||||||
|
expect(tool.validateParams({ path: "test.ts", end: 1.5 })).toBe(
|
||||||
|
"Parameter 'end' must be an integer",
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should return error for end < 1", () => {
|
||||||
|
expect(tool.validateParams({ path: "test.ts", end: 0 })).toBe(
|
||||||
|
"Parameter 'end' must be >= 1",
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should return error for start > end", () => {
|
||||||
|
expect(tool.validateParams({ path: "test.ts", start: 10, end: 5 })).toBe(
|
||||||
|
"Parameter 'start' must be <= 'end'",
|
||||||
|
)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("execute", () => {
|
||||||
|
it("should return all lines when no range specified", async () => {
|
||||||
|
const lines = ["line 1", "line 2", "line 3"]
|
||||||
|
const storage = createMockStorage({ lines })
|
||||||
|
const ctx = createMockContext(storage)
|
||||||
|
|
||||||
|
const result = await tool.execute({ path: "test.ts" }, ctx)
|
||||||
|
|
||||||
|
expect(result.success).toBe(true)
|
||||||
|
const data = result.data as GetLinesResult
|
||||||
|
expect(data.path).toBe("test.ts")
|
||||||
|
expect(data.startLine).toBe(1)
|
||||||
|
expect(data.endLine).toBe(3)
|
||||||
|
expect(data.totalLines).toBe(3)
|
||||||
|
expect(data.content).toContain("1│line 1")
|
||||||
|
expect(data.content).toContain("2│line 2")
|
||||||
|
expect(data.content).toContain("3│line 3")
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should return specific range", async () => {
|
||||||
|
const lines = ["line 1", "line 2", "line 3", "line 4", "line 5"]
|
||||||
|
const storage = createMockStorage({ lines })
|
||||||
|
const ctx = createMockContext(storage)
|
||||||
|
|
||||||
|
const result = await tool.execute({ path: "test.ts", start: 2, end: 4 }, ctx)
|
||||||
|
|
||||||
|
expect(result.success).toBe(true)
|
||||||
|
const data = result.data as GetLinesResult
|
||||||
|
expect(data.startLine).toBe(2)
|
||||||
|
expect(data.endLine).toBe(4)
|
||||||
|
expect(data.content).toContain("2│line 2")
|
||||||
|
expect(data.content).toContain("3│line 3")
|
||||||
|
expect(data.content).toContain("4│line 4")
|
||||||
|
expect(data.content).not.toContain("line 1")
|
||||||
|
expect(data.content).not.toContain("line 5")
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should clamp start to 1 if less", async () => {
|
||||||
|
const lines = ["line 1", "line 2"]
|
||||||
|
const storage = createMockStorage({ lines })
|
||||||
|
const ctx = createMockContext(storage)
|
||||||
|
|
||||||
|
const result = await tool.execute({ path: "test.ts", start: -5, end: 2 }, ctx)
|
||||||
|
|
||||||
|
expect(result.success).toBe(true)
|
||||||
|
const data = result.data as GetLinesResult
|
||||||
|
expect(data.startLine).toBe(1)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should clamp end to totalLines if greater", async () => {
|
||||||
|
const lines = ["line 1", "line 2", "line 3"]
|
||||||
|
const storage = createMockStorage({ lines })
|
||||||
|
const ctx = createMockContext(storage)
|
||||||
|
|
||||||
|
const result = await tool.execute({ path: "test.ts", start: 1, end: 100 }, ctx)
|
||||||
|
|
||||||
|
expect(result.success).toBe(true)
|
||||||
|
const data = result.data as GetLinesResult
|
||||||
|
expect(data.endLine).toBe(3)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should pad line numbers correctly", async () => {
|
||||||
|
const lines = Array.from({ length: 100 }, (_, i) => `line ${i + 1}`)
|
||||||
|
const storage = createMockStorage({ lines })
|
||||||
|
const ctx = createMockContext(storage)
|
||||||
|
|
||||||
|
const result = await tool.execute({ path: "test.ts", start: 98, end: 100 }, ctx)
|
||||||
|
|
||||||
|
expect(result.success).toBe(true)
|
||||||
|
const data = result.data as GetLinesResult
|
||||||
|
expect(data.content).toContain(" 98│line 98")
|
||||||
|
expect(data.content).toContain(" 99│line 99")
|
||||||
|
expect(data.content).toContain("100│line 100")
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should return error for path outside project root", async () => {
|
||||||
|
const ctx = createMockContext()
|
||||||
|
|
||||||
|
const result = await tool.execute({ path: "../outside/file.ts" }, ctx)
|
||||||
|
|
||||||
|
expect(result.success).toBe(false)
|
||||||
|
expect(result.error).toBe("Path must be within project root")
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should return error when file not found", async () => {
|
||||||
|
const storage = createMockStorage(null)
|
||||||
|
storage.getFile = vi.fn().mockResolvedValue(null)
|
||||||
|
const ctx = createMockContext(storage)
|
||||||
|
|
||||||
|
const result = await tool.execute({ path: "nonexistent.ts" }, ctx)
|
||||||
|
|
||||||
|
expect(result.success).toBe(false)
|
||||||
|
expect(result.error).toContain("ENOENT")
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should include callId in result", async () => {
|
||||||
|
const storage = createMockStorage({ lines: ["test"] })
|
||||||
|
const ctx = createMockContext(storage)
|
||||||
|
|
||||||
|
const result = await tool.execute({ path: "test.ts" }, ctx)
|
||||||
|
|
||||||
|
expect(result.callId).toMatch(/^get_lines-\d+$/)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should include executionTimeMs in result", async () => {
|
||||||
|
const storage = createMockStorage({ lines: ["test"] })
|
||||||
|
const ctx = createMockContext(storage)
|
||||||
|
|
||||||
|
const result = await tool.execute({ path: "test.ts" }, ctx)
|
||||||
|
|
||||||
|
expect(result.executionTimeMs).toBeGreaterThanOrEqual(0)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should handle empty file", async () => {
|
||||||
|
const storage = createMockStorage({ lines: [] })
|
||||||
|
const ctx = createMockContext(storage)
|
||||||
|
|
||||||
|
const result = await tool.execute({ path: "empty.ts" }, ctx)
|
||||||
|
|
||||||
|
expect(result.success).toBe(true)
|
||||||
|
const data = result.data as GetLinesResult
|
||||||
|
expect(data.totalLines).toBe(0)
|
||||||
|
expect(data.content).toBe("")
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should handle single line file", async () => {
|
||||||
|
const storage = createMockStorage({ lines: ["only line"] })
|
||||||
|
const ctx = createMockContext(storage)
|
||||||
|
|
||||||
|
const result = await tool.execute({ path: "single.ts" }, ctx)
|
||||||
|
|
||||||
|
expect(result.success).toBe(true)
|
||||||
|
const data = result.data as GetLinesResult
|
||||||
|
expect(data.totalLines).toBe(1)
|
||||||
|
expect(data.content).toBe("1│only line")
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
@@ -0,0 +1,274 @@
|
|||||||
|
import { describe, it, expect, vi, beforeEach, afterEach } from "vitest"
|
||||||
|
import { promises as fs } from "node:fs"
|
||||||
|
import * as path from "node:path"
|
||||||
|
import * as os from "node:os"
|
||||||
|
import {
|
||||||
|
GetStructureTool,
|
||||||
|
type GetStructureResult,
|
||||||
|
} from "../../../../../src/infrastructure/tools/read/GetStructureTool.js"
|
||||||
|
import type { ToolContext } from "../../../../../src/domain/services/ITool.js"
|
||||||
|
import type { IStorage } from "../../../../../src/domain/services/IStorage.js"
|
||||||
|
|
||||||
|
function createMockStorage(): IStorage {
|
||||||
|
return {
|
||||||
|
getFile: vi.fn(),
|
||||||
|
setFile: vi.fn(),
|
||||||
|
deleteFile: vi.fn(),
|
||||||
|
getAllFiles: vi.fn(),
|
||||||
|
getAST: vi.fn(),
|
||||||
|
setAST: vi.fn(),
|
||||||
|
getMeta: vi.fn(),
|
||||||
|
setMeta: vi.fn(),
|
||||||
|
getSymbolIndex: vi.fn(),
|
||||||
|
setSymbolIndex: vi.fn(),
|
||||||
|
getDepsGraph: vi.fn(),
|
||||||
|
setDepsGraph: vi.fn(),
|
||||||
|
getConfig: vi.fn(),
|
||||||
|
setConfig: vi.fn(),
|
||||||
|
clear: vi.fn(),
|
||||||
|
} as unknown as IStorage
|
||||||
|
}
|
||||||
|
|
||||||
|
function createMockContext(projectRoot: string): ToolContext {
|
||||||
|
return {
|
||||||
|
projectRoot,
|
||||||
|
storage: createMockStorage(),
|
||||||
|
requestConfirmation: vi.fn().mockResolvedValue(true),
|
||||||
|
onProgress: vi.fn(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
describe("GetStructureTool", () => {
|
||||||
|
let tool: GetStructureTool
|
||||||
|
let tempDir: string
|
||||||
|
|
||||||
|
beforeEach(async () => {
|
||||||
|
tool = new GetStructureTool()
|
||||||
|
tempDir = await fs.mkdtemp(path.join(os.tmpdir(), "ipuaro-test-"))
|
||||||
|
})
|
||||||
|
|
||||||
|
afterEach(async () => {
|
||||||
|
await fs.rm(tempDir, { recursive: true, force: true })
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("metadata", () => {
|
||||||
|
it("should have correct name", () => {
|
||||||
|
expect(tool.name).toBe("get_structure")
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should have correct category", () => {
|
||||||
|
expect(tool.category).toBe("read")
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should not require confirmation", () => {
|
||||||
|
expect(tool.requiresConfirmation).toBe(false)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should have correct parameters", () => {
|
||||||
|
expect(tool.parameters).toHaveLength(2)
|
||||||
|
expect(tool.parameters[0].name).toBe("path")
|
||||||
|
expect(tool.parameters[0].required).toBe(false)
|
||||||
|
expect(tool.parameters[1].name).toBe("depth")
|
||||||
|
expect(tool.parameters[1].required).toBe(false)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("validateParams", () => {
|
||||||
|
it("should return null for empty params", () => {
|
||||||
|
expect(tool.validateParams({})).toBeNull()
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should return null for valid path", () => {
|
||||||
|
expect(tool.validateParams({ path: "src" })).toBeNull()
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should return null for valid depth", () => {
|
||||||
|
expect(tool.validateParams({ depth: 3 })).toBeNull()
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should return error for non-string path", () => {
|
||||||
|
expect(tool.validateParams({ path: 123 })).toBe("Parameter 'path' must be a string")
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should return error for non-integer depth", () => {
|
||||||
|
expect(tool.validateParams({ depth: 2.5 })).toBe("Parameter 'depth' must be an integer")
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should return error for depth < 1", () => {
|
||||||
|
expect(tool.validateParams({ depth: 0 })).toBe("Parameter 'depth' must be >= 1")
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("execute", () => {
|
||||||
|
it("should return tree structure for empty directory", async () => {
|
||||||
|
const ctx = createMockContext(tempDir)
|
||||||
|
|
||||||
|
const result = await tool.execute({}, ctx)
|
||||||
|
|
||||||
|
expect(result.success).toBe(true)
|
||||||
|
const data = result.data as GetStructureResult
|
||||||
|
expect(data.path).toBe(".")
|
||||||
|
expect(data.tree.type).toBe("directory")
|
||||||
|
expect(data.tree.children).toEqual([])
|
||||||
|
expect(data.stats.directories).toBe(1)
|
||||||
|
expect(data.stats.files).toBe(0)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should return tree structure with files", async () => {
|
||||||
|
await fs.writeFile(path.join(tempDir, "file1.ts"), "")
|
||||||
|
await fs.writeFile(path.join(tempDir, "file2.ts"), "")
|
||||||
|
const ctx = createMockContext(tempDir)
|
||||||
|
|
||||||
|
const result = await tool.execute({}, ctx)
|
||||||
|
|
||||||
|
expect(result.success).toBe(true)
|
||||||
|
const data = result.data as GetStructureResult
|
||||||
|
expect(data.tree.children).toHaveLength(2)
|
||||||
|
expect(data.stats.files).toBe(2)
|
||||||
|
expect(data.content).toContain("file1.ts")
|
||||||
|
expect(data.content).toContain("file2.ts")
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should return nested directory structure", async () => {
|
||||||
|
await fs.mkdir(path.join(tempDir, "src"))
|
||||||
|
await fs.writeFile(path.join(tempDir, "src", "index.ts"), "")
|
||||||
|
await fs.mkdir(path.join(tempDir, "src", "utils"))
|
||||||
|
await fs.writeFile(path.join(tempDir, "src", "utils", "helper.ts"), "")
|
||||||
|
const ctx = createMockContext(tempDir)
|
||||||
|
|
||||||
|
const result = await tool.execute({}, ctx)
|
||||||
|
|
||||||
|
expect(result.success).toBe(true)
|
||||||
|
const data = result.data as GetStructureResult
|
||||||
|
expect(data.stats.directories).toBe(3)
|
||||||
|
expect(data.stats.files).toBe(2)
|
||||||
|
expect(data.content).toContain("src")
|
||||||
|
expect(data.content).toContain("index.ts")
|
||||||
|
expect(data.content).toContain("utils")
|
||||||
|
expect(data.content).toContain("helper.ts")
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should respect depth parameter", async () => {
|
||||||
|
await fs.mkdir(path.join(tempDir, "level1"))
|
||||||
|
await fs.mkdir(path.join(tempDir, "level1", "level2"))
|
||||||
|
await fs.mkdir(path.join(tempDir, "level1", "level2", "level3"))
|
||||||
|
await fs.writeFile(path.join(tempDir, "level1", "level2", "level3", "deep.ts"), "")
|
||||||
|
const ctx = createMockContext(tempDir)
|
||||||
|
|
||||||
|
const result = await tool.execute({ depth: 2 }, ctx)
|
||||||
|
|
||||||
|
expect(result.success).toBe(true)
|
||||||
|
const data = result.data as GetStructureResult
|
||||||
|
expect(data.content).toContain("level1")
|
||||||
|
expect(data.content).toContain("level2")
|
||||||
|
expect(data.content).not.toContain("level3")
|
||||||
|
expect(data.content).not.toContain("deep.ts")
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should filter subdirectory when path specified", async () => {
|
||||||
|
await fs.mkdir(path.join(tempDir, "src"))
|
||||||
|
await fs.mkdir(path.join(tempDir, "tests"))
|
||||||
|
await fs.writeFile(path.join(tempDir, "src", "index.ts"), "")
|
||||||
|
await fs.writeFile(path.join(tempDir, "tests", "test.ts"), "")
|
||||||
|
const ctx = createMockContext(tempDir)
|
||||||
|
|
||||||
|
const result = await tool.execute({ path: "src" }, ctx)
|
||||||
|
|
||||||
|
expect(result.success).toBe(true)
|
||||||
|
const data = result.data as GetStructureResult
|
||||||
|
expect(data.path).toBe("src")
|
||||||
|
expect(data.content).toContain("index.ts")
|
||||||
|
expect(data.content).not.toContain("test.ts")
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should ignore node_modules", async () => {
|
||||||
|
await fs.mkdir(path.join(tempDir, "node_modules"))
|
||||||
|
await fs.writeFile(path.join(tempDir, "node_modules", "pkg.js"), "")
|
||||||
|
await fs.writeFile(path.join(tempDir, "index.ts"), "")
|
||||||
|
const ctx = createMockContext(tempDir)
|
||||||
|
|
||||||
|
const result = await tool.execute({}, ctx)
|
||||||
|
|
||||||
|
expect(result.success).toBe(true)
|
||||||
|
const data = result.data as GetStructureResult
|
||||||
|
expect(data.content).not.toContain("node_modules")
|
||||||
|
expect(data.content).toContain("index.ts")
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should ignore .git directory", async () => {
|
||||||
|
await fs.mkdir(path.join(tempDir, ".git"))
|
||||||
|
await fs.writeFile(path.join(tempDir, ".git", "config"), "")
|
||||||
|
await fs.writeFile(path.join(tempDir, "index.ts"), "")
|
||||||
|
const ctx = createMockContext(tempDir)
|
||||||
|
|
||||||
|
const result = await tool.execute({}, ctx)
|
||||||
|
|
||||||
|
expect(result.success).toBe(true)
|
||||||
|
const data = result.data as GetStructureResult
|
||||||
|
expect(data.content).not.toContain(".git")
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should sort directories before files", async () => {
|
||||||
|
await fs.writeFile(path.join(tempDir, "aaa.ts"), "")
|
||||||
|
await fs.mkdir(path.join(tempDir, "zzz"))
|
||||||
|
const ctx = createMockContext(tempDir)
|
||||||
|
|
||||||
|
const result = await tool.execute({}, ctx)
|
||||||
|
|
||||||
|
expect(result.success).toBe(true)
|
||||||
|
const data = result.data as GetStructureResult
|
||||||
|
const zzzIndex = data.content.indexOf("zzz")
|
||||||
|
const aaaIndex = data.content.indexOf("aaa.ts")
|
||||||
|
expect(zzzIndex).toBeLessThan(aaaIndex)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should return error for path outside project root", async () => {
|
||||||
|
const ctx = createMockContext(tempDir)
|
||||||
|
|
||||||
|
const result = await tool.execute({ path: "../outside" }, ctx)
|
||||||
|
|
||||||
|
expect(result.success).toBe(false)
|
||||||
|
expect(result.error).toBe("Path must be within project root")
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should return error for non-directory path", async () => {
|
||||||
|
await fs.writeFile(path.join(tempDir, "file.ts"), "")
|
||||||
|
const ctx = createMockContext(tempDir)
|
||||||
|
|
||||||
|
const result = await tool.execute({ path: "file.ts" }, ctx)
|
||||||
|
|
||||||
|
expect(result.success).toBe(false)
|
||||||
|
expect(result.error).toContain("is not a directory")
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should return error for non-existent path", async () => {
|
||||||
|
const ctx = createMockContext(tempDir)
|
||||||
|
|
||||||
|
const result = await tool.execute({ path: "nonexistent" }, ctx)
|
||||||
|
|
||||||
|
expect(result.success).toBe(false)
|
||||||
|
expect(result.error).toContain("ENOENT")
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should include callId in result", async () => {
|
||||||
|
const ctx = createMockContext(tempDir)
|
||||||
|
|
||||||
|
const result = await tool.execute({}, ctx)
|
||||||
|
|
||||||
|
expect(result.callId).toMatch(/^get_structure-\d+$/)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should use tree icons in output", async () => {
|
||||||
|
await fs.mkdir(path.join(tempDir, "src"))
|
||||||
|
await fs.writeFile(path.join(tempDir, "index.ts"), "")
|
||||||
|
const ctx = createMockContext(tempDir)
|
||||||
|
|
||||||
|
const result = await tool.execute({}, ctx)
|
||||||
|
|
||||||
|
expect(result.success).toBe(true)
|
||||||
|
const data = result.data as GetStructureResult
|
||||||
|
expect(data.content).toContain("📁")
|
||||||
|
expect(data.content).toContain("📄")
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
449
packages/ipuaro/tests/unit/infrastructure/tools/registry.test.ts
Normal file
449
packages/ipuaro/tests/unit/infrastructure/tools/registry.test.ts
Normal file
@@ -0,0 +1,449 @@
|
|||||||
|
import { describe, it, expect, vi, beforeEach } from "vitest"
|
||||||
|
import { ToolRegistry } from "../../../../src/infrastructure/tools/registry.js"
|
||||||
|
import type {
|
||||||
|
ITool,
|
||||||
|
ToolContext,
|
||||||
|
ToolParameterSchema,
|
||||||
|
} from "../../../../src/domain/services/ITool.js"
|
||||||
|
import type { ToolResult } from "../../../../src/domain/value-objects/ToolResult.js"
|
||||||
|
import { IpuaroError } from "../../../../src/shared/errors/IpuaroError.js"
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Creates a mock tool for testing.
|
||||||
|
*/
|
||||||
|
function createMockTool(overrides: Partial<ITool> = {}): ITool {
|
||||||
|
return {
|
||||||
|
name: "mock_tool",
|
||||||
|
description: "A mock tool for testing",
|
||||||
|
parameters: [
|
||||||
|
{
|
||||||
|
name: "path",
|
||||||
|
type: "string",
|
||||||
|
description: "File path",
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "optional",
|
||||||
|
type: "number",
|
||||||
|
description: "Optional param",
|
||||||
|
required: false,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
requiresConfirmation: false,
|
||||||
|
category: "read",
|
||||||
|
execute: vi.fn().mockResolvedValue({
|
||||||
|
callId: "test-123",
|
||||||
|
success: true,
|
||||||
|
data: { result: "success" },
|
||||||
|
executionTimeMs: 10,
|
||||||
|
}),
|
||||||
|
validateParams: vi.fn().mockReturnValue(null),
|
||||||
|
...overrides,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Creates a mock tool context for testing.
|
||||||
|
*/
|
||||||
|
function createMockContext(overrides: Partial<ToolContext> = {}): ToolContext {
|
||||||
|
return {
|
||||||
|
projectRoot: "/test/project",
|
||||||
|
storage: {} as ToolContext["storage"],
|
||||||
|
requestConfirmation: vi.fn().mockResolvedValue(true),
|
||||||
|
onProgress: vi.fn(),
|
||||||
|
...overrides,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
describe("ToolRegistry", () => {
|
||||||
|
let registry: ToolRegistry
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
registry = new ToolRegistry()
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("register", () => {
|
||||||
|
it("should register a tool", () => {
|
||||||
|
const tool = createMockTool()
|
||||||
|
|
||||||
|
registry.register(tool)
|
||||||
|
|
||||||
|
expect(registry.has("mock_tool")).toBe(true)
|
||||||
|
expect(registry.size).toBe(1)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should register multiple tools", () => {
|
||||||
|
const tool1 = createMockTool({ name: "tool_1" })
|
||||||
|
const tool2 = createMockTool({ name: "tool_2" })
|
||||||
|
|
||||||
|
registry.register(tool1)
|
||||||
|
registry.register(tool2)
|
||||||
|
|
||||||
|
expect(registry.size).toBe(2)
|
||||||
|
expect(registry.has("tool_1")).toBe(true)
|
||||||
|
expect(registry.has("tool_2")).toBe(true)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should throw error when registering duplicate tool name", () => {
|
||||||
|
const tool1 = createMockTool({ name: "duplicate" })
|
||||||
|
const tool2 = createMockTool({ name: "duplicate" })
|
||||||
|
|
||||||
|
registry.register(tool1)
|
||||||
|
|
||||||
|
expect(() => registry.register(tool2)).toThrow(IpuaroError)
|
||||||
|
expect(() => registry.register(tool2)).toThrow('Tool "duplicate" is already registered')
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("unregister", () => {
|
||||||
|
it("should remove a registered tool", () => {
|
||||||
|
const tool = createMockTool()
|
||||||
|
registry.register(tool)
|
||||||
|
|
||||||
|
const result = registry.unregister("mock_tool")
|
||||||
|
|
||||||
|
expect(result).toBe(true)
|
||||||
|
expect(registry.has("mock_tool")).toBe(false)
|
||||||
|
expect(registry.size).toBe(0)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should return false when tool not found", () => {
|
||||||
|
const result = registry.unregister("nonexistent")
|
||||||
|
|
||||||
|
expect(result).toBe(false)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("get", () => {
|
||||||
|
it("should return registered tool", () => {
|
||||||
|
const tool = createMockTool()
|
||||||
|
registry.register(tool)
|
||||||
|
|
||||||
|
const result = registry.get("mock_tool")
|
||||||
|
|
||||||
|
expect(result).toBe(tool)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should return undefined for unknown tool", () => {
|
||||||
|
const result = registry.get("unknown")
|
||||||
|
|
||||||
|
expect(result).toBeUndefined()
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("getAll", () => {
|
||||||
|
it("should return empty array when no tools registered", () => {
|
||||||
|
const result = registry.getAll()
|
||||||
|
|
||||||
|
expect(result).toEqual([])
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should return all registered tools", () => {
|
||||||
|
const tool1 = createMockTool({ name: "tool_1" })
|
||||||
|
const tool2 = createMockTool({ name: "tool_2" })
|
||||||
|
registry.register(tool1)
|
||||||
|
registry.register(tool2)
|
||||||
|
|
||||||
|
const result = registry.getAll()
|
||||||
|
|
||||||
|
expect(result).toHaveLength(2)
|
||||||
|
expect(result).toContain(tool1)
|
||||||
|
expect(result).toContain(tool2)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("getByCategory", () => {
|
||||||
|
it("should return tools by category", () => {
|
||||||
|
const readTool = createMockTool({ name: "read_tool", category: "read" })
|
||||||
|
const editTool = createMockTool({ name: "edit_tool", category: "edit" })
|
||||||
|
const gitTool = createMockTool({ name: "git_tool", category: "git" })
|
||||||
|
registry.register(readTool)
|
||||||
|
registry.register(editTool)
|
||||||
|
registry.register(gitTool)
|
||||||
|
|
||||||
|
const readTools = registry.getByCategory("read")
|
||||||
|
const editTools = registry.getByCategory("edit")
|
||||||
|
|
||||||
|
expect(readTools).toHaveLength(1)
|
||||||
|
expect(readTools[0]).toBe(readTool)
|
||||||
|
expect(editTools).toHaveLength(1)
|
||||||
|
expect(editTools[0]).toBe(editTool)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should return empty array for category with no tools", () => {
|
||||||
|
const readTool = createMockTool({ category: "read" })
|
||||||
|
registry.register(readTool)
|
||||||
|
|
||||||
|
const result = registry.getByCategory("analysis")
|
||||||
|
|
||||||
|
expect(result).toEqual([])
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("has", () => {
|
||||||
|
it("should return true for registered tool", () => {
|
||||||
|
registry.register(createMockTool())
|
||||||
|
|
||||||
|
expect(registry.has("mock_tool")).toBe(true)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should return false for unknown tool", () => {
|
||||||
|
expect(registry.has("unknown")).toBe(false)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("execute", () => {
|
||||||
|
it("should execute tool and return result", async () => {
|
||||||
|
const tool = createMockTool()
|
||||||
|
registry.register(tool)
|
||||||
|
const ctx = createMockContext()
|
||||||
|
|
||||||
|
const result = await registry.execute("mock_tool", { path: "test.ts" }, ctx)
|
||||||
|
|
||||||
|
expect(result.success).toBe(true)
|
||||||
|
expect(result.data).toEqual({ result: "success" })
|
||||||
|
expect(tool.execute).toHaveBeenCalledWith({ path: "test.ts" }, ctx)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should return error result for unknown tool", async () => {
|
||||||
|
const ctx = createMockContext()
|
||||||
|
|
||||||
|
const result = await registry.execute("unknown", {}, ctx)
|
||||||
|
|
||||||
|
expect(result.success).toBe(false)
|
||||||
|
expect(result.error).toBe('Tool "unknown" not found')
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should return error result when validation fails", async () => {
|
||||||
|
const tool = createMockTool({
|
||||||
|
validateParams: vi.fn().mockReturnValue("Missing required param: path"),
|
||||||
|
})
|
||||||
|
registry.register(tool)
|
||||||
|
const ctx = createMockContext()
|
||||||
|
|
||||||
|
const result = await registry.execute("mock_tool", {}, ctx)
|
||||||
|
|
||||||
|
expect(result.success).toBe(false)
|
||||||
|
expect(result.error).toBe("Missing required param: path")
|
||||||
|
expect(tool.execute).not.toHaveBeenCalled()
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should request confirmation for tools that require it", async () => {
|
||||||
|
const tool = createMockTool({ requiresConfirmation: true })
|
||||||
|
registry.register(tool)
|
||||||
|
const ctx = createMockContext()
|
||||||
|
|
||||||
|
await registry.execute("mock_tool", { path: "test.ts" }, ctx)
|
||||||
|
|
||||||
|
expect(ctx.requestConfirmation).toHaveBeenCalled()
|
||||||
|
expect(tool.execute).toHaveBeenCalled()
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should not execute when confirmation is denied", async () => {
|
||||||
|
const tool = createMockTool({ requiresConfirmation: true })
|
||||||
|
registry.register(tool)
|
||||||
|
const ctx = createMockContext({
|
||||||
|
requestConfirmation: vi.fn().mockResolvedValue(false),
|
||||||
|
})
|
||||||
|
|
||||||
|
const result = await registry.execute("mock_tool", { path: "test.ts" }, ctx)
|
||||||
|
|
||||||
|
expect(result.success).toBe(false)
|
||||||
|
expect(result.error).toBe("User cancelled operation")
|
||||||
|
expect(tool.execute).not.toHaveBeenCalled()
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should not request confirmation for safe tools", async () => {
|
||||||
|
const tool = createMockTool({ requiresConfirmation: false })
|
||||||
|
registry.register(tool)
|
||||||
|
const ctx = createMockContext()
|
||||||
|
|
||||||
|
await registry.execute("mock_tool", { path: "test.ts" }, ctx)
|
||||||
|
|
||||||
|
expect(ctx.requestConfirmation).not.toHaveBeenCalled()
|
||||||
|
expect(tool.execute).toHaveBeenCalled()
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should catch and return errors from tool execution", async () => {
|
||||||
|
const tool = createMockTool({
|
||||||
|
execute: vi.fn().mockRejectedValue(new Error("Execution failed")),
|
||||||
|
})
|
||||||
|
registry.register(tool)
|
||||||
|
const ctx = createMockContext()
|
||||||
|
|
||||||
|
const result = await registry.execute("mock_tool", { path: "test.ts" }, ctx)
|
||||||
|
|
||||||
|
expect(result.success).toBe(false)
|
||||||
|
expect(result.error).toBe("Execution failed")
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should include callId in result", async () => {
|
||||||
|
const tool = createMockTool()
|
||||||
|
registry.register(tool)
|
||||||
|
const ctx = createMockContext()
|
||||||
|
|
||||||
|
const result = await registry.execute("mock_tool", { path: "test.ts" }, ctx)
|
||||||
|
|
||||||
|
expect(result.callId).toMatch(/^mock_tool-\d+$/)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("getToolDefinitions", () => {
|
||||||
|
it("should return empty array when no tools registered", () => {
|
||||||
|
const result = registry.getToolDefinitions()
|
||||||
|
|
||||||
|
expect(result).toEqual([])
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should convert tools to LLM-compatible format", () => {
|
||||||
|
const tool = createMockTool()
|
||||||
|
registry.register(tool)
|
||||||
|
|
||||||
|
const result = registry.getToolDefinitions()
|
||||||
|
|
||||||
|
expect(result).toHaveLength(1)
|
||||||
|
expect(result[0]).toEqual({
|
||||||
|
name: "mock_tool",
|
||||||
|
description: "A mock tool for testing",
|
||||||
|
parameters: {
|
||||||
|
type: "object",
|
||||||
|
properties: {
|
||||||
|
path: {
|
||||||
|
type: "string",
|
||||||
|
description: "File path",
|
||||||
|
},
|
||||||
|
optional: {
|
||||||
|
type: "number",
|
||||||
|
description: "Optional param",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
required: ["path"],
|
||||||
|
},
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should handle tools with no parameters", () => {
|
||||||
|
const tool = createMockTool({ parameters: [] })
|
||||||
|
registry.register(tool)
|
||||||
|
|
||||||
|
const result = registry.getToolDefinitions()
|
||||||
|
|
||||||
|
expect(result[0].parameters).toEqual({
|
||||||
|
type: "object",
|
||||||
|
properties: {},
|
||||||
|
required: [],
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should handle multiple tools", () => {
|
||||||
|
registry.register(createMockTool({ name: "tool_1" }))
|
||||||
|
registry.register(createMockTool({ name: "tool_2" }))
|
||||||
|
|
||||||
|
const result = registry.getToolDefinitions()
|
||||||
|
|
||||||
|
expect(result).toHaveLength(2)
|
||||||
|
expect(result.map((t) => t.name)).toEqual(["tool_1", "tool_2"])
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("clear", () => {
|
||||||
|
it("should remove all tools", () => {
|
||||||
|
registry.register(createMockTool({ name: "tool_1" }))
|
||||||
|
registry.register(createMockTool({ name: "tool_2" }))
|
||||||
|
|
||||||
|
registry.clear()
|
||||||
|
|
||||||
|
expect(registry.size).toBe(0)
|
||||||
|
expect(registry.getAll()).toEqual([])
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("getNames", () => {
|
||||||
|
it("should return all tool names", () => {
|
||||||
|
registry.register(createMockTool({ name: "alpha" }))
|
||||||
|
registry.register(createMockTool({ name: "beta" }))
|
||||||
|
|
||||||
|
const result = registry.getNames()
|
||||||
|
|
||||||
|
expect(result).toEqual(["alpha", "beta"])
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should return empty array when no tools", () => {
|
||||||
|
const result = registry.getNames()
|
||||||
|
|
||||||
|
expect(result).toEqual([])
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("getConfirmationTools", () => {
|
||||||
|
it("should return only tools requiring confirmation", () => {
|
||||||
|
registry.register(createMockTool({ name: "safe", requiresConfirmation: false }))
|
||||||
|
registry.register(createMockTool({ name: "dangerous", requiresConfirmation: true }))
|
||||||
|
registry.register(createMockTool({ name: "also_safe", requiresConfirmation: false }))
|
||||||
|
|
||||||
|
const result = registry.getConfirmationTools()
|
||||||
|
|
||||||
|
expect(result).toHaveLength(1)
|
||||||
|
expect(result[0].name).toBe("dangerous")
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("getSafeTools", () => {
|
||||||
|
it("should return only tools not requiring confirmation", () => {
|
||||||
|
registry.register(createMockTool({ name: "safe", requiresConfirmation: false }))
|
||||||
|
registry.register(createMockTool({ name: "dangerous", requiresConfirmation: true }))
|
||||||
|
registry.register(createMockTool({ name: "also_safe", requiresConfirmation: false }))
|
||||||
|
|
||||||
|
const result = registry.getSafeTools()
|
||||||
|
|
||||||
|
expect(result).toHaveLength(2)
|
||||||
|
expect(result.map((t) => t.name)).toEqual(["safe", "also_safe"])
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("size", () => {
|
||||||
|
it("should return 0 for empty registry", () => {
|
||||||
|
expect(registry.size).toBe(0)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should return correct count", () => {
|
||||||
|
registry.register(createMockTool({ name: "a" }))
|
||||||
|
registry.register(createMockTool({ name: "b" }))
|
||||||
|
registry.register(createMockTool({ name: "c" }))
|
||||||
|
|
||||||
|
expect(registry.size).toBe(3)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("integration scenarios", () => {
|
||||||
|
it("should handle full workflow: register, execute, unregister", async () => {
|
||||||
|
const tool = createMockTool()
|
||||||
|
const ctx = createMockContext()
|
||||||
|
|
||||||
|
registry.register(tool)
|
||||||
|
expect(registry.has("mock_tool")).toBe(true)
|
||||||
|
|
||||||
|
const result = await registry.execute("mock_tool", { path: "test.ts" }, ctx)
|
||||||
|
expect(result.success).toBe(true)
|
||||||
|
|
||||||
|
registry.unregister("mock_tool")
|
||||||
|
expect(registry.has("mock_tool")).toBe(false)
|
||||||
|
|
||||||
|
const afterUnregister = await registry.execute("mock_tool", {}, ctx)
|
||||||
|
expect(afterUnregister.success).toBe(false)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should maintain isolation between registrations", () => {
|
||||||
|
const registry1 = new ToolRegistry()
|
||||||
|
const registry2 = new ToolRegistry()
|
||||||
|
|
||||||
|
registry1.register(createMockTool({ name: "tool_1" }))
|
||||||
|
registry2.register(createMockTool({ name: "tool_2" }))
|
||||||
|
|
||||||
|
expect(registry1.has("tool_1")).toBe(true)
|
||||||
|
expect(registry1.has("tool_2")).toBe(false)
|
||||||
|
expect(registry2.has("tool_1")).toBe(false)
|
||||||
|
expect(registry2.has("tool_2")).toBe(true)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
@@ -1,5 +1,9 @@
|
|||||||
import { describe, it, expect, vi, beforeEach, afterEach } from "vitest"
|
import { describe, it, expect, vi, beforeEach, afterEach } from "vitest"
|
||||||
import { loadConfig, validateConfig, getConfigErrors } from "../../../../src/shared/config/loader.js"
|
import {
|
||||||
|
loadConfig,
|
||||||
|
validateConfig,
|
||||||
|
getConfigErrors,
|
||||||
|
} from "../../../../src/shared/config/loader.js"
|
||||||
import { DEFAULT_CONFIG } from "../../../../src/shared/constants/config.js"
|
import { DEFAULT_CONFIG } from "../../../../src/shared/constants/config.js"
|
||||||
import * as fs from "node:fs"
|
import * as fs from "node:fs"
|
||||||
|
|
||||||
@@ -28,7 +32,7 @@ describe("config loader", () => {
|
|||||||
return path === "/project/.ipuaro.json"
|
return path === "/project/.ipuaro.json"
|
||||||
})
|
})
|
||||||
vi.mocked(fs.readFileSync).mockReturnValue(
|
vi.mocked(fs.readFileSync).mockReturnValue(
|
||||||
JSON.stringify({ llm: { model: "custom-model" } })
|
JSON.stringify({ llm: { model: "custom-model" } }),
|
||||||
)
|
)
|
||||||
|
|
||||||
const config = loadConfig("/project")
|
const config = loadConfig("/project")
|
||||||
|
|||||||
@@ -11,6 +11,7 @@
|
|||||||
"declarationMap": true,
|
"declarationMap": true,
|
||||||
"sourceMap": true,
|
"sourceMap": true,
|
||||||
"strict": true,
|
"strict": true,
|
||||||
|
"resolveJsonModule": true,
|
||||||
"skipLibCheck": true,
|
"skipLibCheck": true,
|
||||||
"esModuleInterop": true,
|
"esModuleInterop": true,
|
||||||
"resolvePackageJsonExports": true,
|
"resolvePackageJsonExports": true,
|
||||||
|
|||||||
@@ -8,11 +8,7 @@ export default defineConfig({
|
|||||||
sourcemap: true,
|
sourcemap: true,
|
||||||
splitting: false,
|
splitting: false,
|
||||||
treeshake: true,
|
treeshake: true,
|
||||||
external: [
|
external: ["tree-sitter", "tree-sitter-typescript", "tree-sitter-javascript"],
|
||||||
"tree-sitter",
|
|
||||||
"tree-sitter-typescript",
|
|
||||||
"tree-sitter-javascript",
|
|
||||||
],
|
|
||||||
esbuildOptions(options) {
|
esbuildOptions(options) {
|
||||||
options.jsx = "automatic"
|
options.jsx = "automatic"
|
||||||
},
|
},
|
||||||
|
|||||||
@@ -9,11 +9,7 @@ export default defineConfig({
|
|||||||
provider: "v8",
|
provider: "v8",
|
||||||
reporter: ["text", "html", "lcov"],
|
reporter: ["text", "html", "lcov"],
|
||||||
include: ["src/**/*.ts", "src/**/*.tsx"],
|
include: ["src/**/*.ts", "src/**/*.tsx"],
|
||||||
exclude: [
|
exclude: ["src/**/*.d.ts", "src/**/index.ts", "src/**/*.test.ts"],
|
||||||
"src/**/*.d.ts",
|
|
||||||
"src/**/index.ts",
|
|
||||||
"src/**/*.test.ts",
|
|
||||||
],
|
|
||||||
thresholds: {
|
thresholds: {
|
||||||
lines: 80,
|
lines: 80,
|
||||||
functions: 80,
|
functions: 80,
|
||||||
|
|||||||
43
pnpm-lock.yaml
generated
43
pnpm-lock.yaml
generated
@@ -141,9 +141,9 @@ importers:
|
|||||||
commander:
|
commander:
|
||||||
specifier: ^11.1.0
|
specifier: ^11.1.0
|
||||||
version: 11.1.0
|
version: 11.1.0
|
||||||
ignore:
|
globby:
|
||||||
specifier: ^5.3.2
|
specifier: ^16.0.0
|
||||||
version: 5.3.2
|
version: 16.0.0
|
||||||
ink:
|
ink:
|
||||||
specifier: ^4.4.1
|
specifier: ^4.4.1
|
||||||
version: 4.4.1(@types/react@18.3.27)(react@18.3.1)
|
version: 4.4.1(@types/react@18.3.27)(react@18.3.1)
|
||||||
@@ -1471,6 +1471,10 @@ packages:
|
|||||||
'@sinclair/typebox@0.34.41':
|
'@sinclair/typebox@0.34.41':
|
||||||
resolution: {integrity: sha512-6gS8pZzSXdyRHTIqoqSVknxolr1kzfy4/CeDnrzsVz8TTIWUbOBr6gnzOmTYJ3eXQNh4IYHIGi5aIL7sOZ2G/g==}
|
resolution: {integrity: sha512-6gS8pZzSXdyRHTIqoqSVknxolr1kzfy4/CeDnrzsVz8TTIWUbOBr6gnzOmTYJ3eXQNh4IYHIGi5aIL7sOZ2G/g==}
|
||||||
|
|
||||||
|
'@sindresorhus/merge-streams@4.0.0':
|
||||||
|
resolution: {integrity: sha512-tlqY9xq5ukxTUZBmoOp+m61cqwQD5pHJtFY3Mn8CA8ps6yghLH/Hw8UPdqg4OLmFW3IFlcXnQNmo/dh8HzXYIQ==}
|
||||||
|
engines: {node: '>=18'}
|
||||||
|
|
||||||
'@sinonjs/commons@3.0.1':
|
'@sinonjs/commons@3.0.1':
|
||||||
resolution: {integrity: sha512-K3mCHKQ9sVh8o1C9cxkwxaOmXoAMlDxC1mYyHrjqOWEcBjYr76t96zL2zlj5dUGZ3HSw240X1qgH3Mjf1yJWpQ==}
|
resolution: {integrity: sha512-K3mCHKQ9sVh8o1C9cxkwxaOmXoAMlDxC1mYyHrjqOWEcBjYr76t96zL2zlj5dUGZ3HSw240X1qgH3Mjf1yJWpQ==}
|
||||||
|
|
||||||
@@ -2732,6 +2736,10 @@ packages:
|
|||||||
resolution: {integrity: sha512-c/c15i26VrJ4IRt5Z89DnIzCGDn9EcebibhAOjw5ibqEHsE1wLUgkPn9RDmNcUKyU87GeaL633nyJ+pplFR2ZQ==}
|
resolution: {integrity: sha512-c/c15i26VrJ4IRt5Z89DnIzCGDn9EcebibhAOjw5ibqEHsE1wLUgkPn9RDmNcUKyU87GeaL633nyJ+pplFR2ZQ==}
|
||||||
engines: {node: '>=18'}
|
engines: {node: '>=18'}
|
||||||
|
|
||||||
|
globby@16.0.0:
|
||||||
|
resolution: {integrity: sha512-ejy4TJFga99yW6Q0uhM3pFawKWZmtZzZD/v/GwI5+9bCV5Ew+D2pSND6W7fUes5UykqSsJkUfxFVdRh7Q1+P3Q==}
|
||||||
|
engines: {node: '>=20'}
|
||||||
|
|
||||||
gopd@1.2.0:
|
gopd@1.2.0:
|
||||||
resolution: {integrity: sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==}
|
resolution: {integrity: sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==}
|
||||||
engines: {node: '>= 0.4'}
|
engines: {node: '>= 0.4'}
|
||||||
@@ -2879,6 +2887,10 @@ packages:
|
|||||||
resolution: {integrity: sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==}
|
resolution: {integrity: sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==}
|
||||||
engines: {node: '>=0.12.0'}
|
engines: {node: '>=0.12.0'}
|
||||||
|
|
||||||
|
is-path-inside@4.0.0:
|
||||||
|
resolution: {integrity: sha512-lJJV/5dYS+RcL8uQdBDW9c9uWFLLBNRyFhnAKXw5tVqLlKZ4RMGZKv+YQ/IA3OhD+RpbJa1LLFM1FQPGyIXvOA==}
|
||||||
|
engines: {node: '>=12'}
|
||||||
|
|
||||||
is-stream@2.0.1:
|
is-stream@2.0.1:
|
||||||
resolution: {integrity: sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==}
|
resolution: {integrity: sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==}
|
||||||
engines: {node: '>=8'}
|
engines: {node: '>=8'}
|
||||||
@@ -3712,6 +3724,10 @@ packages:
|
|||||||
resolution: {integrity: sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==}
|
resolution: {integrity: sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==}
|
||||||
engines: {node: '>=8'}
|
engines: {node: '>=8'}
|
||||||
|
|
||||||
|
slash@5.1.0:
|
||||||
|
resolution: {integrity: sha512-ZA6oR3T/pEyuqwMgAKT0/hAv8oAXckzbkmR0UkUosQ+Mc4RxGoJkRmwHgHufaenlyAgE1Mxgpdcrf75y6XcnDg==}
|
||||||
|
engines: {node: '>=14.16'}
|
||||||
|
|
||||||
slice-ansi@4.0.0:
|
slice-ansi@4.0.0:
|
||||||
resolution: {integrity: sha512-qMCMfhY040cVHT43K9BFygqYbUPFZKHOg7K73mtTWJRb8pyP3fzf4Ixd5SzdEJQ6MRUg/WBnOLxghZtKKurENQ==}
|
resolution: {integrity: sha512-qMCMfhY040cVHT43K9BFygqYbUPFZKHOg7K73mtTWJRb8pyP3fzf4Ixd5SzdEJQ6MRUg/WBnOLxghZtKKurENQ==}
|
||||||
engines: {node: '>=10'}
|
engines: {node: '>=10'}
|
||||||
@@ -4128,6 +4144,10 @@ packages:
|
|||||||
undici-types@6.21.0:
|
undici-types@6.21.0:
|
||||||
resolution: {integrity: sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==}
|
resolution: {integrity: sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==}
|
||||||
|
|
||||||
|
unicorn-magic@0.4.0:
|
||||||
|
resolution: {integrity: sha512-wH590V9VNgYH9g3lH9wWjTrUoKsjLF6sGLjhR4sH1LWpLmCOH0Zf7PukhDA8BiS7KHe4oPNkcTHqYkj7SOGUOw==}
|
||||||
|
engines: {node: '>=20'}
|
||||||
|
|
||||||
universalify@2.0.1:
|
universalify@2.0.1:
|
||||||
resolution: {integrity: sha512-gptHNQghINnc/vTGIk0SOFGFNXw7JVrlRUtConJRlvaw6DuX0wO5Jeko9sWrMBhh+PsYAZ7oXAiOnf/UKogyiw==}
|
resolution: {integrity: sha512-gptHNQghINnc/vTGIk0SOFGFNXw7JVrlRUtConJRlvaw6DuX0wO5Jeko9sWrMBhh+PsYAZ7oXAiOnf/UKogyiw==}
|
||||||
engines: {node: '>= 10.0.0'}
|
engines: {node: '>= 10.0.0'}
|
||||||
@@ -5614,6 +5634,8 @@ snapshots:
|
|||||||
|
|
||||||
'@sinclair/typebox@0.34.41': {}
|
'@sinclair/typebox@0.34.41': {}
|
||||||
|
|
||||||
|
'@sindresorhus/merge-streams@4.0.0': {}
|
||||||
|
|
||||||
'@sinonjs/commons@3.0.1':
|
'@sinonjs/commons@3.0.1':
|
||||||
dependencies:
|
dependencies:
|
||||||
type-detect: 4.0.8
|
type-detect: 4.0.8
|
||||||
@@ -7072,6 +7094,15 @@ snapshots:
|
|||||||
|
|
||||||
globals@16.5.0: {}
|
globals@16.5.0: {}
|
||||||
|
|
||||||
|
globby@16.0.0:
|
||||||
|
dependencies:
|
||||||
|
'@sindresorhus/merge-streams': 4.0.0
|
||||||
|
fast-glob: 3.3.3
|
||||||
|
ignore: 7.0.5
|
||||||
|
is-path-inside: 4.0.0
|
||||||
|
slash: 5.1.0
|
||||||
|
unicorn-magic: 0.4.0
|
||||||
|
|
||||||
gopd@1.2.0: {}
|
gopd@1.2.0: {}
|
||||||
|
|
||||||
graceful-fs@4.2.11: {}
|
graceful-fs@4.2.11: {}
|
||||||
@@ -7221,6 +7252,8 @@ snapshots:
|
|||||||
|
|
||||||
is-number@7.0.0: {}
|
is-number@7.0.0: {}
|
||||||
|
|
||||||
|
is-path-inside@4.0.0: {}
|
||||||
|
|
||||||
is-stream@2.0.1: {}
|
is-stream@2.0.1: {}
|
||||||
|
|
||||||
is-stream@3.0.0: {}
|
is-stream@3.0.0: {}
|
||||||
@@ -8203,6 +8236,8 @@ snapshots:
|
|||||||
|
|
||||||
slash@3.0.0: {}
|
slash@3.0.0: {}
|
||||||
|
|
||||||
|
slash@5.1.0: {}
|
||||||
|
|
||||||
slice-ansi@4.0.0:
|
slice-ansi@4.0.0:
|
||||||
dependencies:
|
dependencies:
|
||||||
ansi-styles: 4.3.0
|
ansi-styles: 4.3.0
|
||||||
@@ -8610,6 +8645,8 @@ snapshots:
|
|||||||
|
|
||||||
undici-types@6.21.0: {}
|
undici-types@6.21.0: {}
|
||||||
|
|
||||||
|
unicorn-magic@0.4.0: {}
|
||||||
|
|
||||||
universalify@2.0.1: {}
|
universalify@2.0.1: {}
|
||||||
|
|
||||||
unrs-resolver@1.11.1:
|
unrs-resolver@1.11.1:
|
||||||
|
|||||||
Reference in New Issue
Block a user