mirror of
https://github.com/samiyev/puaros.git
synced 2025-12-27 23:06:54 +05:00
Compare commits
44 Commits
ipuaro-v0.
...
ipuaro-v0.
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
9c94335729 | ||
|
|
c34d57c231 | ||
|
|
60052c0db9 | ||
|
|
fa647c41aa | ||
|
|
98b365bd94 | ||
|
|
a7669f8947 | ||
|
|
7f0ec49c90 | ||
|
|
077d160343 | ||
|
|
b5ee77d8b8 | ||
|
|
a589b0dfc4 | ||
|
|
908c2f50d7 | ||
|
|
510c42241a | ||
|
|
357cf27765 | ||
|
|
6695cb73d4 | ||
|
|
5a9470929c | ||
|
|
137c77cc53 | ||
|
|
0433ef102c | ||
|
|
902d1db831 | ||
|
|
c843b780a8 | ||
|
|
0dff0e87d0 | ||
|
|
ab2d5d40a5 | ||
|
|
baccfd53c0 | ||
|
|
8f995fc596 | ||
|
|
f947c6d157 | ||
|
|
33d52bc7ca | ||
|
|
2c6eb6ce9b | ||
|
|
7d18e87423 | ||
|
|
fd1e6ad86e | ||
|
|
259ecc181a | ||
|
|
0f2ed5b301 | ||
|
|
56643d903f | ||
|
|
f5f904a847 | ||
|
|
2ae1ac13f5 | ||
|
|
caf7aac116 | ||
|
|
4ad5a209c4 | ||
|
|
25146003cc | ||
|
|
68f927d906 | ||
|
|
b3e04a411c | ||
|
|
294d085ad4 | ||
|
|
958e4daed5 | ||
|
|
6234fbce92 | ||
|
|
af9c2377a0 | ||
|
|
d0c1ddc22e | ||
|
|
225480c806 |
29
CLAUDE.md
29
CLAUDE.md
@@ -447,6 +447,35 @@ Copy and use for each release:
|
||||
- [ ] Published to npm (if public release)
|
||||
```
|
||||
|
||||
## Working with Roadmap
|
||||
|
||||
When the user points to `ROADMAP.md` or asks about the roadmap/next steps:
|
||||
|
||||
1. **Read both files together:**
|
||||
- `packages/<package>/ROADMAP.md` - to understand the planned features and milestones
|
||||
- `packages/<package>/CHANGELOG.md` - to see what's already implemented
|
||||
|
||||
2. **Determine current position:**
|
||||
- Check the latest version in CHANGELOG.md
|
||||
- Cross-reference with ROADMAP.md milestones
|
||||
- Identify which roadmap items are already completed (present in CHANGELOG)
|
||||
|
||||
3. **Suggest next steps:**
|
||||
- Find the first uncompleted item in the current milestone
|
||||
- Or identify the next milestone if current one is complete
|
||||
- Present clear "start here" recommendation
|
||||
|
||||
**Example workflow:**
|
||||
```
|
||||
User: "Let's work on the roadmap" or points to ROADMAP.md
|
||||
|
||||
Claude should:
|
||||
1. Read ROADMAP.md → See milestones v0.1.0, v0.2.0, v0.3.0...
|
||||
2. Read CHANGELOG.md → See latest release is v0.1.1
|
||||
3. Compare → v0.1.0 milestone complete, v0.2.0 in progress
|
||||
4. Report → "v0.1.0 is complete. For v0.2.0, next item is: <feature>"
|
||||
```
|
||||
|
||||
## Common Workflows
|
||||
|
||||
### Adding a new CLI option
|
||||
|
||||
@@ -74,6 +74,7 @@ export default tseslint.config(
|
||||
'@typescript-eslint/require-await': 'warn',
|
||||
'@typescript-eslint/no-unnecessary-condition': 'off', // Sometimes useful for defensive coding
|
||||
'@typescript-eslint/no-non-null-assertion': 'warn',
|
||||
'@typescript-eslint/no-unnecessary-type-parameters': 'warn', // Allow generic JSON parsers
|
||||
|
||||
// ========================================
|
||||
// Code Quality & Best Practices
|
||||
|
||||
@@ -5,6 +5,26 @@ All notable changes to @samiyev/guardian will be documented in this file.
|
||||
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
|
||||
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
||||
|
||||
## [0.9.4] - 2025-11-30
|
||||
|
||||
### Added
|
||||
|
||||
- **VERSION export** - Package version is now exported from index.ts, automatically read from package.json
|
||||
|
||||
### Changed
|
||||
|
||||
- 🔄 **Refactored SecretDetector** - Reduced cyclomatic complexity from 24 to <15:
|
||||
- Extracted helper methods: `extractByRuleId`, `extractAwsType`, `extractGithubType`, `extractSshType`, `extractSlackType`, `extractByMessage`
|
||||
- Used lookup arrays for SSH and message type mappings
|
||||
- 🔄 **Refactored AstNamingTraverser** - Reduced cyclomatic complexity from 17 to <15:
|
||||
- Replaced if-else chain with Map-based node handlers
|
||||
- Added `buildNodeHandlers()` method for cleaner architecture
|
||||
|
||||
### Quality
|
||||
|
||||
- ✅ **Zero lint warnings** - All ESLint warnings resolved
|
||||
- ✅ **All 616 tests pass**
|
||||
|
||||
## [0.9.2] - 2025-11-27
|
||||
|
||||
### Changed
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@samiyev/guardian",
|
||||
"version": "0.9.3",
|
||||
"version": "0.9.4",
|
||||
"description": "Research-backed code quality guardian for AI-assisted development. Detects hardcodes, secrets, circular deps, framework leaks, entity exposure, and 9 architecture violations. Enforces Clean Architecture/DDD principles. Works with GitHub Copilot, Cursor, Windsurf, Claude, ChatGPT, Cline, and any AI coding tool.",
|
||||
"keywords": [
|
||||
"puaros",
|
||||
@@ -40,7 +40,7 @@
|
||||
"license": "MIT",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/samiyev/puaros.git",
|
||||
"url": "git+https://github.com/samiyev/puaros.git",
|
||||
"directory": "packages/guardian"
|
||||
},
|
||||
"bugs": {
|
||||
|
||||
@@ -215,6 +215,7 @@ export class AnalyzeProject extends UseCase<
|
||||
private readonly detectionPipeline: ExecuteDetection
|
||||
private readonly resultAggregator: AggregateResults
|
||||
|
||||
// eslint-disable-next-line max-params
|
||||
constructor(
|
||||
fileScanner: IFileScanner,
|
||||
codeParser: ICodeParser,
|
||||
|
||||
@@ -56,6 +56,7 @@ export interface DetectionResult {
|
||||
* Pipeline step responsible for running all detectors
|
||||
*/
|
||||
export class ExecuteDetection {
|
||||
// eslint-disable-next-line max-params
|
||||
constructor(
|
||||
private readonly hardcodeDetector: IHardcodeDetector,
|
||||
private readonly namingConventionDetector: INamingConventionDetector,
|
||||
|
||||
@@ -171,6 +171,7 @@ export class HardcodedValue extends ValueObject<HardcodedValueProps> {
|
||||
return `${CONSTANT_NAMES.MAGIC_NUMBER}_${String(value)}`
|
||||
}
|
||||
|
||||
// eslint-disable-next-line complexity, max-lines-per-function
|
||||
private suggestStringConstantName(): string {
|
||||
const value = String(this.props.value)
|
||||
const context = this.props.context.toLowerCase()
|
||||
|
||||
@@ -1,3 +1,7 @@
|
||||
import pkg from "../package.json"
|
||||
|
||||
export const VERSION = pkg.version
|
||||
|
||||
export * from "./domain"
|
||||
export * from "./application"
|
||||
export * from "./infrastructure"
|
||||
|
||||
@@ -90,80 +90,98 @@ export class SecretDetector implements ISecretDetector {
|
||||
}
|
||||
|
||||
private extractSecretType(message: string, ruleId: string): string {
|
||||
const lowerMessage = message.toLowerCase()
|
||||
|
||||
const ruleBasedType = this.extractByRuleId(ruleId, lowerMessage)
|
||||
if (ruleBasedType) {
|
||||
return ruleBasedType
|
||||
}
|
||||
|
||||
return this.extractByMessage(lowerMessage)
|
||||
}
|
||||
|
||||
private extractByRuleId(ruleId: string, lowerMessage: string): string | null {
|
||||
if (ruleId.includes(SECRET_KEYWORDS.AWS)) {
|
||||
if (message.toLowerCase().includes(SECRET_KEYWORDS.ACCESS_KEY)) {
|
||||
return SECRET_TYPE_NAMES.AWS_ACCESS_KEY
|
||||
}
|
||||
if (message.toLowerCase().includes(SECRET_KEYWORDS.SECRET)) {
|
||||
return SECRET_TYPE_NAMES.AWS_SECRET_KEY
|
||||
}
|
||||
return SECRET_TYPE_NAMES.AWS_CREDENTIAL
|
||||
return this.extractAwsType(lowerMessage)
|
||||
}
|
||||
|
||||
if (ruleId.includes(SECRET_KEYWORDS.GITHUB)) {
|
||||
if (message.toLowerCase().includes(SECRET_KEYWORDS.PERSONAL_ACCESS_TOKEN)) {
|
||||
return SECRET_TYPE_NAMES.GITHUB_PERSONAL_ACCESS_TOKEN
|
||||
}
|
||||
if (message.toLowerCase().includes(SECRET_KEYWORDS.OAUTH)) {
|
||||
return SECRET_TYPE_NAMES.GITHUB_OAUTH_TOKEN
|
||||
}
|
||||
return SECRET_TYPE_NAMES.GITHUB_TOKEN
|
||||
return this.extractGithubType(lowerMessage)
|
||||
}
|
||||
|
||||
if (ruleId.includes(SECRET_KEYWORDS.NPM)) {
|
||||
return SECRET_TYPE_NAMES.NPM_TOKEN
|
||||
}
|
||||
|
||||
if (ruleId.includes(SECRET_KEYWORDS.GCP) || ruleId.includes(SECRET_KEYWORDS.GOOGLE)) {
|
||||
return SECRET_TYPE_NAMES.GCP_SERVICE_ACCOUNT_KEY
|
||||
}
|
||||
|
||||
if (ruleId.includes(SECRET_KEYWORDS.PRIVATEKEY) || ruleId.includes(SECRET_KEYWORDS.SSH)) {
|
||||
if (message.toLowerCase().includes(SECRET_KEYWORDS.RSA)) {
|
||||
return SECRET_TYPE_NAMES.SSH_RSA_PRIVATE_KEY
|
||||
}
|
||||
if (message.toLowerCase().includes(SECRET_KEYWORDS.DSA)) {
|
||||
return SECRET_TYPE_NAMES.SSH_DSA_PRIVATE_KEY
|
||||
}
|
||||
if (message.toLowerCase().includes(SECRET_KEYWORDS.ECDSA)) {
|
||||
return SECRET_TYPE_NAMES.SSH_ECDSA_PRIVATE_KEY
|
||||
}
|
||||
if (message.toLowerCase().includes(SECRET_KEYWORDS.ED25519)) {
|
||||
return SECRET_TYPE_NAMES.SSH_ED25519_PRIVATE_KEY
|
||||
}
|
||||
return SECRET_TYPE_NAMES.SSH_PRIVATE_KEY
|
||||
return this.extractSshType(lowerMessage)
|
||||
}
|
||||
|
||||
if (ruleId.includes(SECRET_KEYWORDS.SLACK)) {
|
||||
if (message.toLowerCase().includes(SECRET_KEYWORDS.BOT)) {
|
||||
return SECRET_TYPE_NAMES.SLACK_BOT_TOKEN
|
||||
}
|
||||
if (message.toLowerCase().includes(SECRET_KEYWORDS.USER)) {
|
||||
return SECRET_TYPE_NAMES.SLACK_USER_TOKEN
|
||||
}
|
||||
return SECRET_TYPE_NAMES.SLACK_TOKEN
|
||||
return this.extractSlackType(lowerMessage)
|
||||
}
|
||||
|
||||
if (ruleId.includes(SECRET_KEYWORDS.BASICAUTH)) {
|
||||
return SECRET_TYPE_NAMES.BASIC_AUTH_CREDENTIALS
|
||||
}
|
||||
return null
|
||||
}
|
||||
|
||||
if (message.toLowerCase().includes(SECRET_KEYWORDS.API_KEY)) {
|
||||
return SECRET_TYPE_NAMES.API_KEY
|
||||
private extractAwsType(lowerMessage: string): string {
|
||||
if (lowerMessage.includes(SECRET_KEYWORDS.ACCESS_KEY)) {
|
||||
return SECRET_TYPE_NAMES.AWS_ACCESS_KEY
|
||||
}
|
||||
|
||||
if (message.toLowerCase().includes(SECRET_KEYWORDS.TOKEN)) {
|
||||
return SECRET_TYPE_NAMES.AUTHENTICATION_TOKEN
|
||||
if (lowerMessage.includes(SECRET_KEYWORDS.SECRET)) {
|
||||
return SECRET_TYPE_NAMES.AWS_SECRET_KEY
|
||||
}
|
||||
return SECRET_TYPE_NAMES.AWS_CREDENTIAL
|
||||
}
|
||||
|
||||
if (message.toLowerCase().includes(SECRET_KEYWORDS.PASSWORD)) {
|
||||
return SECRET_TYPE_NAMES.PASSWORD
|
||||
private extractGithubType(lowerMessage: string): string {
|
||||
if (lowerMessage.includes(SECRET_KEYWORDS.PERSONAL_ACCESS_TOKEN)) {
|
||||
return SECRET_TYPE_NAMES.GITHUB_PERSONAL_ACCESS_TOKEN
|
||||
}
|
||||
|
||||
if (message.toLowerCase().includes(SECRET_KEYWORDS.SECRET)) {
|
||||
return SECRET_TYPE_NAMES.SECRET
|
||||
if (lowerMessage.includes(SECRET_KEYWORDS.OAUTH)) {
|
||||
return SECRET_TYPE_NAMES.GITHUB_OAUTH_TOKEN
|
||||
}
|
||||
return SECRET_TYPE_NAMES.GITHUB_TOKEN
|
||||
}
|
||||
|
||||
private extractSshType(lowerMessage: string): string {
|
||||
const sshTypeMap: [string, string][] = [
|
||||
[SECRET_KEYWORDS.RSA, SECRET_TYPE_NAMES.SSH_RSA_PRIVATE_KEY],
|
||||
[SECRET_KEYWORDS.DSA, SECRET_TYPE_NAMES.SSH_DSA_PRIVATE_KEY],
|
||||
[SECRET_KEYWORDS.ECDSA, SECRET_TYPE_NAMES.SSH_ECDSA_PRIVATE_KEY],
|
||||
[SECRET_KEYWORDS.ED25519, SECRET_TYPE_NAMES.SSH_ED25519_PRIVATE_KEY],
|
||||
]
|
||||
for (const [keyword, typeName] of sshTypeMap) {
|
||||
if (lowerMessage.includes(keyword)) {
|
||||
return typeName
|
||||
}
|
||||
}
|
||||
return SECRET_TYPE_NAMES.SSH_PRIVATE_KEY
|
||||
}
|
||||
|
||||
private extractSlackType(lowerMessage: string): string {
|
||||
if (lowerMessage.includes(SECRET_KEYWORDS.BOT)) {
|
||||
return SECRET_TYPE_NAMES.SLACK_BOT_TOKEN
|
||||
}
|
||||
if (lowerMessage.includes(SECRET_KEYWORDS.USER)) {
|
||||
return SECRET_TYPE_NAMES.SLACK_USER_TOKEN
|
||||
}
|
||||
return SECRET_TYPE_NAMES.SLACK_TOKEN
|
||||
}
|
||||
|
||||
private extractByMessage(lowerMessage: string): string {
|
||||
const messageTypeMap: [string, string][] = [
|
||||
[SECRET_KEYWORDS.API_KEY, SECRET_TYPE_NAMES.API_KEY],
|
||||
[SECRET_KEYWORDS.TOKEN, SECRET_TYPE_NAMES.AUTHENTICATION_TOKEN],
|
||||
[SECRET_KEYWORDS.PASSWORD, SECRET_TYPE_NAMES.PASSWORD],
|
||||
[SECRET_KEYWORDS.SECRET, SECRET_TYPE_NAMES.SECRET],
|
||||
]
|
||||
for (const [keyword, typeName] of messageTypeMap) {
|
||||
if (lowerMessage.includes(keyword)) {
|
||||
return typeName
|
||||
}
|
||||
}
|
||||
return SECRET_TYPE_NAMES.SENSITIVE_DATA
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6,6 +6,13 @@ import { AstFunctionNameAnalyzer } from "./AstFunctionNameAnalyzer"
|
||||
import { AstInterfaceNameAnalyzer } from "./AstInterfaceNameAnalyzer"
|
||||
import { AstVariableNameAnalyzer } from "./AstVariableNameAnalyzer"
|
||||
|
||||
type NodeAnalyzer = (
|
||||
node: Parser.SyntaxNode,
|
||||
layer: string,
|
||||
filePath: string,
|
||||
lines: string[],
|
||||
) => NamingViolation | null
|
||||
|
||||
/**
|
||||
* AST tree traverser for detecting naming convention violations
|
||||
*
|
||||
@@ -13,12 +20,16 @@ import { AstVariableNameAnalyzer } from "./AstVariableNameAnalyzer"
|
||||
* to detect naming violations in classes, interfaces, functions, and variables.
|
||||
*/
|
||||
export class AstNamingTraverser {
|
||||
private readonly nodeHandlers: Map<string, NodeAnalyzer>
|
||||
|
||||
constructor(
|
||||
private readonly classAnalyzer: AstClassNameAnalyzer,
|
||||
private readonly interfaceAnalyzer: AstInterfaceNameAnalyzer,
|
||||
private readonly functionAnalyzer: AstFunctionNameAnalyzer,
|
||||
private readonly variableAnalyzer: AstVariableNameAnalyzer,
|
||||
) {}
|
||||
) {
|
||||
this.nodeHandlers = this.buildNodeHandlers()
|
||||
}
|
||||
|
||||
/**
|
||||
* Traverses the AST tree and collects naming violations
|
||||
@@ -38,6 +49,33 @@ export class AstNamingTraverser {
|
||||
return results
|
||||
}
|
||||
|
||||
private buildNodeHandlers(): Map<string, NodeAnalyzer> {
|
||||
const handlers = new Map<string, NodeAnalyzer>()
|
||||
|
||||
handlers.set(AST_CLASS_TYPES.CLASS_DECLARATION, (node, layer, filePath, lines) =>
|
||||
this.classAnalyzer.analyze(node, layer, filePath, lines),
|
||||
)
|
||||
handlers.set(AST_CLASS_TYPES.INTERFACE_DECLARATION, (node, layer, filePath, lines) =>
|
||||
this.interfaceAnalyzer.analyze(node, layer, filePath, lines),
|
||||
)
|
||||
|
||||
const functionHandler: NodeAnalyzer = (node, layer, filePath, lines) =>
|
||||
this.functionAnalyzer.analyze(node, layer, filePath, lines)
|
||||
handlers.set(AST_FUNCTION_TYPES.FUNCTION_DECLARATION, functionHandler)
|
||||
handlers.set(AST_FUNCTION_TYPES.METHOD_DEFINITION, functionHandler)
|
||||
handlers.set(AST_FUNCTION_TYPES.FUNCTION_SIGNATURE, functionHandler)
|
||||
|
||||
const variableHandler: NodeAnalyzer = (node, layer, filePath, lines) =>
|
||||
this.variableAnalyzer.analyze(node, layer, filePath, lines)
|
||||
handlers.set(AST_VARIABLE_TYPES.VARIABLE_DECLARATOR, variableHandler)
|
||||
handlers.set(AST_VARIABLE_TYPES.REQUIRED_PARAMETER, variableHandler)
|
||||
handlers.set(AST_VARIABLE_TYPES.OPTIONAL_PARAMETER, variableHandler)
|
||||
handlers.set(AST_VARIABLE_TYPES.PUBLIC_FIELD_DEFINITION, variableHandler)
|
||||
handlers.set(AST_VARIABLE_TYPES.PROPERTY_SIGNATURE, variableHandler)
|
||||
|
||||
return handlers
|
||||
}
|
||||
|
||||
/**
|
||||
* Recursively visits AST nodes
|
||||
*/
|
||||
@@ -49,34 +87,10 @@ export class AstNamingTraverser {
|
||||
results: NamingViolation[],
|
||||
): void {
|
||||
const node = cursor.currentNode
|
||||
const handler = this.nodeHandlers.get(node.type)
|
||||
|
||||
if (node.type === AST_CLASS_TYPES.CLASS_DECLARATION) {
|
||||
const violation = this.classAnalyzer.analyze(node, layer, filePath, lines)
|
||||
if (violation) {
|
||||
results.push(violation)
|
||||
}
|
||||
} else if (node.type === AST_CLASS_TYPES.INTERFACE_DECLARATION) {
|
||||
const violation = this.interfaceAnalyzer.analyze(node, layer, filePath, lines)
|
||||
if (violation) {
|
||||
results.push(violation)
|
||||
}
|
||||
} else if (
|
||||
node.type === AST_FUNCTION_TYPES.FUNCTION_DECLARATION ||
|
||||
node.type === AST_FUNCTION_TYPES.METHOD_DEFINITION ||
|
||||
node.type === AST_FUNCTION_TYPES.FUNCTION_SIGNATURE
|
||||
) {
|
||||
const violation = this.functionAnalyzer.analyze(node, layer, filePath, lines)
|
||||
if (violation) {
|
||||
results.push(violation)
|
||||
}
|
||||
} else if (
|
||||
node.type === AST_VARIABLE_TYPES.VARIABLE_DECLARATOR ||
|
||||
node.type === AST_VARIABLE_TYPES.REQUIRED_PARAMETER ||
|
||||
node.type === AST_VARIABLE_TYPES.OPTIONAL_PARAMETER ||
|
||||
node.type === AST_VARIABLE_TYPES.PUBLIC_FIELD_DEFINITION ||
|
||||
node.type === AST_VARIABLE_TYPES.PROPERTY_SIGNATURE
|
||||
) {
|
||||
const violation = this.variableAnalyzer.analyze(node, layer, filePath, lines)
|
||||
if (handler) {
|
||||
const violation = handler(node, layer, filePath, lines)
|
||||
if (violation) {
|
||||
results.push(violation)
|
||||
}
|
||||
|
||||
566
packages/ipuaro/ARCHITECTURE.md
Normal file
566
packages/ipuaro/ARCHITECTURE.md
Normal file
@@ -0,0 +1,566 @@
|
||||
# ipuaro Architecture
|
||||
|
||||
This document describes the architecture, design decisions, and implementation details of ipuaro.
|
||||
|
||||
## Table of Contents
|
||||
|
||||
- [Overview](#overview)
|
||||
- [Clean Architecture](#clean-architecture)
|
||||
- [Layer Details](#layer-details)
|
||||
- [Data Flow](#data-flow)
|
||||
- [Key Design Decisions](#key-design-decisions)
|
||||
- [Tech Stack](#tech-stack)
|
||||
- [Performance Considerations](#performance-considerations)
|
||||
|
||||
## Overview
|
||||
|
||||
ipuaro is a local AI agent for codebase operations built on Clean Architecture principles. It enables "infinite" context feeling through lazy loading and AST-based code understanding.
|
||||
|
||||
### Core Concepts
|
||||
|
||||
1. **Lazy Loading**: Load code on-demand via tools, not all at once
|
||||
2. **AST-Based Understanding**: Parse and index code structure for fast lookups
|
||||
3. **100% Local**: Ollama LLM + Redis storage, no cloud dependencies
|
||||
4. **Session Persistence**: Resume conversations across restarts
|
||||
5. **Tool-Based Interface**: LLM accesses code through 18 specialized tools
|
||||
|
||||
## Clean Architecture
|
||||
|
||||
The project follows Clean Architecture with strict dependency rules:
|
||||
|
||||
```
|
||||
┌─────────────────────────────────────────────────┐
|
||||
│ TUI Layer │ ← Ink/React components
|
||||
│ (Framework) │
|
||||
├─────────────────────────────────────────────────┤
|
||||
│ CLI Layer │ ← Commander.js entry
|
||||
│ (Interface) │
|
||||
├─────────────────────────────────────────────────┤
|
||||
│ Infrastructure Layer │ ← External adapters
|
||||
│ (Storage, LLM, Indexer, Tools, Security) │
|
||||
├─────────────────────────────────────────────────┤
|
||||
│ Application Layer │ ← Use cases & DTOs
|
||||
│ (StartSession, HandleMessage, etc.) │
|
||||
├─────────────────────────────────────────────────┤
|
||||
│ Domain Layer │ ← Business logic
|
||||
│ (Entities, Value Objects, Service Interfaces) │
|
||||
└─────────────────────────────────────────────────┘
|
||||
```
|
||||
|
||||
**Dependency Rule**: Outer layers depend on inner layers, never the reverse.
|
||||
|
||||
## Layer Details
|
||||
|
||||
### Domain Layer (Core Business Logic)
|
||||
|
||||
**Location**: `src/domain/`
|
||||
|
||||
**Responsibilities**:
|
||||
- Define business entities and value objects
|
||||
- Declare service interfaces (ports)
|
||||
- No external dependencies (pure TypeScript)
|
||||
|
||||
**Components**:
|
||||
|
||||
```
|
||||
domain/
|
||||
├── entities/
|
||||
│ ├── Session.ts # Session entity with history and stats
|
||||
│ └── Project.ts # Project entity with metadata
|
||||
├── value-objects/
|
||||
│ ├── FileData.ts # File content with hash and size
|
||||
│ ├── FileAST.ts # Parsed AST structure
|
||||
│ ├── FileMeta.ts # Complexity, dependencies, hub detection
|
||||
│ ├── ChatMessage.ts # Message with role, content, tool calls
|
||||
│ ├── ToolCall.ts # Tool invocation with parameters
|
||||
│ ├── ToolResult.ts # Tool execution result
|
||||
│ └── UndoEntry.ts # File change for undo stack
|
||||
├── services/
|
||||
│ ├── IStorage.ts # Storage interface (port)
|
||||
│ ├── ILLMClient.ts # LLM interface (port)
|
||||
│ ├── ITool.ts # Tool interface (port)
|
||||
│ └── IIndexer.ts # Indexer interface (port)
|
||||
└── constants/
|
||||
└── index.ts # Domain constants
|
||||
```
|
||||
|
||||
**Key Design**:
|
||||
- Value objects are immutable
|
||||
- Entities have identity and lifecycle
|
||||
- Interfaces define contracts, not implementations
|
||||
|
||||
### Application Layer (Use Cases)
|
||||
|
||||
**Location**: `src/application/`
|
||||
|
||||
**Responsibilities**:
|
||||
- Orchestrate domain logic
|
||||
- Implement use cases (application-specific business rules)
|
||||
- Define DTOs for data transfer
|
||||
- Coordinate between domain and infrastructure
|
||||
|
||||
**Components**:
|
||||
|
||||
```
|
||||
application/
|
||||
├── use-cases/
|
||||
│ ├── StartSession.ts # Initialize or load session
|
||||
│ ├── HandleMessage.ts # Main message orchestrator
|
||||
│ ├── IndexProject.ts # Project indexing workflow
|
||||
│ ├── ExecuteTool.ts # Tool execution with validation
|
||||
│ └── UndoChange.ts # Revert file changes
|
||||
├── dtos/
|
||||
│ ├── SessionDto.ts # Session data transfer object
|
||||
│ ├── MessageDto.ts # Message DTO
|
||||
│ └── ToolCallDto.ts # Tool call DTO
|
||||
├── mappers/
|
||||
│ └── SessionMapper.ts # Domain ↔ DTO conversion
|
||||
└── interfaces/
|
||||
└── IToolRegistry.ts # Tool registry interface
|
||||
```
|
||||
|
||||
**Key Use Cases**:
|
||||
|
||||
1. **StartSession**: Creates new session or loads latest
|
||||
2. **HandleMessage**: Main flow (LLM → Tools → Response)
|
||||
3. **IndexProject**: Scan → Parse → Analyze → Store
|
||||
4. **UndoChange**: Restore file from undo stack
|
||||
|
||||
### Infrastructure Layer (External Implementations)
|
||||
|
||||
**Location**: `src/infrastructure/`
|
||||
|
||||
**Responsibilities**:
|
||||
- Implement domain interfaces
|
||||
- Handle external systems (Redis, Ollama, filesystem)
|
||||
- Provide concrete tool implementations
|
||||
- Security and validation
|
||||
|
||||
**Components**:
|
||||
|
||||
```
|
||||
infrastructure/
|
||||
├── storage/
|
||||
│ ├── RedisClient.ts # Redis connection wrapper
|
||||
│ ├── RedisStorage.ts # IStorage implementation
|
||||
│ └── schema.ts # Redis key schema
|
||||
├── llm/
|
||||
│ ├── OllamaClient.ts # ILLMClient implementation
|
||||
│ ├── prompts.ts # System prompts
|
||||
│ └── ResponseParser.ts # Parse XML tool calls
|
||||
├── indexer/
|
||||
│ ├── FileScanner.ts # Recursive file scanning
|
||||
│ ├── ASTParser.ts # tree-sitter parsing
|
||||
│ ├── MetaAnalyzer.ts # Complexity and dependencies
|
||||
│ ├── IndexBuilder.ts # Symbol index + deps graph
|
||||
│ └── Watchdog.ts # File watching (chokidar)
|
||||
├── tools/ # 18 tool implementations
|
||||
│ ├── registry.ts
|
||||
│ ├── read/ # GetLines, GetFunction, GetClass, GetStructure
|
||||
│ ├── edit/ # EditLines, CreateFile, DeleteFile
|
||||
│ ├── search/ # FindReferences, FindDefinition
|
||||
│ ├── analysis/ # GetDependencies, GetDependents, GetComplexity, GetTodos
|
||||
│ ├── git/ # GitStatus, GitDiff, GitCommit
|
||||
│ └── run/ # RunCommand, RunTests
|
||||
└── security/
|
||||
├── Blacklist.ts # Dangerous commands
|
||||
├── Whitelist.ts # Safe commands
|
||||
└── PathValidator.ts # Path traversal prevention
|
||||
```
|
||||
|
||||
**Key Implementations**:
|
||||
|
||||
1. **RedisStorage**: Uses Redis hashes for files/AST/meta, lists for undo
|
||||
2. **OllamaClient**: HTTP API client with tool calling support
|
||||
3. **ASTParser**: tree-sitter for TS/JS/TSX/JSX parsing
|
||||
4. **ToolRegistry**: Manages tool lifecycle and execution
|
||||
|
||||
### TUI Layer (Terminal UI)
|
||||
|
||||
**Location**: `src/tui/`
|
||||
|
||||
**Responsibilities**:
|
||||
- Render terminal UI with Ink (React for terminal)
|
||||
- Handle user input and hotkeys
|
||||
- Display chat history and status
|
||||
|
||||
**Components**:
|
||||
|
||||
```
|
||||
tui/
|
||||
├── App.tsx # Main app shell
|
||||
├── components/
|
||||
│ ├── StatusBar.tsx # Top status bar
|
||||
│ ├── Chat.tsx # Message history display
|
||||
│ ├── Input.tsx # User input with history
|
||||
│ ├── DiffView.tsx # Inline diff display
|
||||
│ ├── ConfirmDialog.tsx # Edit confirmation
|
||||
│ ├── ErrorDialog.tsx # Error handling
|
||||
│ └── Progress.tsx # Progress bar (indexing)
|
||||
└── hooks/
|
||||
├── useSession.ts # Session state management
|
||||
├── useHotkeys.ts # Keyboard shortcuts
|
||||
└── useCommands.ts # Slash command handling
|
||||
```
|
||||
|
||||
**Key Features**:
|
||||
|
||||
- Real-time status updates (context usage, session time)
|
||||
- Input history with ↑/↓ navigation
|
||||
- Hotkeys: Ctrl+C (interrupt), Ctrl+D (exit), Ctrl+Z (undo)
|
||||
- Diff preview for edits with confirmation
|
||||
- Error recovery with retry/skip/abort options
|
||||
|
||||
### CLI Layer (Entry Point)
|
||||
|
||||
**Location**: `src/cli/`
|
||||
|
||||
**Responsibilities**:
|
||||
- Command-line interface with Commander.js
|
||||
- Dependency injection and initialization
|
||||
- Onboarding checks (Redis, Ollama, model)
|
||||
|
||||
**Components**:
|
||||
|
||||
```
|
||||
cli/
|
||||
├── index.ts # Commander.js setup
|
||||
└── commands/
|
||||
├── start.ts # Start TUI (default command)
|
||||
├── init.ts # Create .ipuaro.json config
|
||||
└── index-cmd.ts # Index-only command
|
||||
```
|
||||
|
||||
**Commands**:
|
||||
|
||||
1. `ipuaro [path]` - Start TUI in directory
|
||||
2. `ipuaro init` - Create config file
|
||||
3. `ipuaro index` - Index without TUI
|
||||
|
||||
### Shared Module
|
||||
|
||||
**Location**: `src/shared/`
|
||||
|
||||
**Responsibilities**:
|
||||
- Cross-cutting concerns
|
||||
- Configuration management
|
||||
- Error handling
|
||||
- Utility functions
|
||||
|
||||
**Components**:
|
||||
|
||||
```
|
||||
shared/
|
||||
├── types/
|
||||
│ └── index.ts # Shared TypeScript types
|
||||
├── constants/
|
||||
│ ├── config.ts # Config schema and loader
|
||||
│ └── messages.ts # User-facing messages
|
||||
├── utils/
|
||||
│ ├── hash.ts # MD5 hashing
|
||||
│ └── tokens.ts # Token estimation
|
||||
└── errors/
|
||||
├── IpuaroError.ts # Custom error class
|
||||
└── ErrorHandler.ts # Error handling service
|
||||
```
|
||||
|
||||
## Data Flow
|
||||
|
||||
### 1. Startup Flow
|
||||
|
||||
```
|
||||
CLI Entry (bin/ipuaro.js)
|
||||
↓
|
||||
Commander.js parses arguments
|
||||
↓
|
||||
Onboarding checks (Redis, Ollama, Model)
|
||||
↓
|
||||
Initialize dependencies:
|
||||
- RedisClient connects
|
||||
- RedisStorage initialized
|
||||
- OllamaClient created
|
||||
- ToolRegistry with 18 tools
|
||||
↓
|
||||
StartSession use case:
|
||||
- Load latest session or create new
|
||||
- Initialize ContextManager
|
||||
↓
|
||||
Launch TUI (App.tsx)
|
||||
- Render StatusBar, Chat, Input
|
||||
- Set up hotkeys
|
||||
```
|
||||
|
||||
### 2. Message Flow
|
||||
|
||||
```
|
||||
User types message in Input.tsx
|
||||
↓
|
||||
useSession.handleMessage()
|
||||
↓
|
||||
HandleMessage use case:
|
||||
1. Add user message to history
|
||||
2. Build context (system prompt + structure + AST)
|
||||
3. Send to OllamaClient.chat()
|
||||
4. Parse tool calls from response
|
||||
5. For each tool call:
|
||||
- If requiresConfirmation: show ConfirmDialog
|
||||
- Execute tool via ToolRegistry
|
||||
- Collect results
|
||||
6. If tool results: goto step 3 (continue loop)
|
||||
7. Add assistant response to history
|
||||
8. Update session in Redis
|
||||
↓
|
||||
Display response in Chat.tsx
|
||||
```
|
||||
|
||||
### 3. Edit Flow
|
||||
|
||||
```
|
||||
LLM calls edit_lines tool
|
||||
↓
|
||||
ToolRegistry.execute()
|
||||
↓
|
||||
EditLinesTool.execute():
|
||||
1. Validate path (PathValidator)
|
||||
2. Check hash conflict
|
||||
3. Build diff
|
||||
↓
|
||||
ConfirmDialog shows diff
|
||||
↓
|
||||
User chooses:
|
||||
- Apply: Continue
|
||||
- Cancel: Return error to LLM
|
||||
- Edit: Manual edit (future)
|
||||
↓
|
||||
If Apply:
|
||||
1. Create UndoEntry
|
||||
2. Push to undo stack (Redis list)
|
||||
3. Write to filesystem
|
||||
4. Update RedisStorage (lines, hash, AST, meta)
|
||||
↓
|
||||
Return success to LLM
|
||||
```
|
||||
|
||||
### 4. Indexing Flow
|
||||
|
||||
```
|
||||
FileScanner.scan()
|
||||
- Recursively walk directory
|
||||
- Filter via .gitignore + ignore patterns
|
||||
- Detect binary files (skip)
|
||||
↓
|
||||
For each file:
|
||||
ASTParser.parse()
|
||||
- tree-sitter parse
|
||||
- Extract imports, exports, functions, classes
|
||||
↓
|
||||
MetaAnalyzer.analyze()
|
||||
- Calculate complexity (LOC, nesting, cyclomatic)
|
||||
- Resolve dependencies (imports → file paths)
|
||||
- Detect hubs (>5 dependents)
|
||||
↓
|
||||
RedisStorage.setFile(), .setAST(), .setMeta()
|
||||
↓
|
||||
IndexBuilder.buildSymbolIndex()
|
||||
- Map symbol names → locations
|
||||
↓
|
||||
IndexBuilder.buildDepsGraph()
|
||||
- Build bidirectional import graph
|
||||
↓
|
||||
Store indexes in Redis
|
||||
↓
|
||||
Watchdog.start()
|
||||
- Watch for file changes
|
||||
- On change: Re-parse and update indexes
|
||||
```
|
||||
|
||||
## Key Design Decisions
|
||||
|
||||
### 1. Why Redis?
|
||||
|
||||
**Pros**:
|
||||
- Fast in-memory access for frequent reads
|
||||
- AOF persistence (append-only file) for durability
|
||||
- Native support for hashes, lists, sets
|
||||
- Simple key-value model fits our needs
|
||||
- Excellent for session data
|
||||
|
||||
**Alternatives considered**:
|
||||
- SQLite: Slower, overkill for our use case
|
||||
- JSON files: No concurrent access, slow for large data
|
||||
- PostgreSQL: Too heavy, we don't need relational features
|
||||
|
||||
### 2. Why tree-sitter?
|
||||
|
||||
**Pros**:
|
||||
- Incremental parsing (fast re-parsing)
|
||||
- Error-tolerant (works with syntax errors)
|
||||
- Multi-language support
|
||||
- Used by GitHub, Neovim, Atom
|
||||
|
||||
**Alternatives considered**:
|
||||
- TypeScript Compiler API: TS-only, not error-tolerant
|
||||
- Babel: JS-focused, heavy dependencies
|
||||
- Regex: Fragile, inaccurate
|
||||
|
||||
### 3. Why Ollama?
|
||||
|
||||
**Pros**:
|
||||
- 100% local, no API keys
|
||||
- Easy installation (brew install ollama)
|
||||
- Good model selection (qwen2.5-coder, deepseek-coder)
|
||||
- Tool calling support
|
||||
|
||||
**Alternatives considered**:
|
||||
- OpenAI: Costs money, sends code to cloud
|
||||
- Anthropic Claude: Same concerns as OpenAI
|
||||
- llama.cpp: Lower level, requires more setup
|
||||
|
||||
Planned: Support for OpenAI/Anthropic in v1.2.0 as optional providers.
|
||||
|
||||
### 4. Why XML for Tool Calls?
|
||||
|
||||
**Pros**:
|
||||
- LLMs trained on XML (very common format)
|
||||
- Self-describing (parameter names in tags)
|
||||
- Easy to parse with regex
|
||||
- More reliable than JSON for smaller models
|
||||
|
||||
**Alternatives considered**:
|
||||
- JSON: Smaller models struggle with exact JSON syntax
|
||||
- Function calling API: Not all models support it
|
||||
|
||||
### 5. Why Clean Architecture?
|
||||
|
||||
**Pros**:
|
||||
- Testability (domain has no external dependencies)
|
||||
- Flexibility (easy to swap Redis for SQLite)
|
||||
- Maintainability (clear separation of concerns)
|
||||
- Scalability (layers can evolve independently)
|
||||
|
||||
**Cost**: More files and indirection, but worth it for long-term maintenance.
|
||||
|
||||
### 6. Why Lazy Loading Instead of RAG?
|
||||
|
||||
**RAG (Retrieval Augmented Generation)**:
|
||||
- Pre-computes embeddings
|
||||
- Searches embeddings for relevant chunks
|
||||
- Adds chunks to context
|
||||
|
||||
**Lazy Loading (our approach)**:
|
||||
- Agent requests specific code via tools
|
||||
- More precise control over what's loaded
|
||||
- Simpler implementation (no embeddings)
|
||||
- Works with any LLM (no embedding model needed)
|
||||
|
||||
**Trade-off**: RAG might be better for semantic search ("find error handling code"), but tool-based approach gives agent explicit control.
|
||||
|
||||
## Tech Stack
|
||||
|
||||
### Core Dependencies
|
||||
|
||||
| Package | Purpose | Why? |
|
||||
|---------|---------|------|
|
||||
| `ioredis` | Redis client | Most popular, excellent TypeScript support |
|
||||
| `ollama` | LLM client | Official SDK, simple API |
|
||||
| `tree-sitter` | AST parsing | Fast, error-tolerant, multi-language |
|
||||
| `tree-sitter-typescript` | TS/TSX parser | Official TypeScript grammar |
|
||||
| `tree-sitter-javascript` | JS/JSX parser | Official JavaScript grammar |
|
||||
| `ink` | Terminal UI | React for terminal, declarative |
|
||||
| `ink-text-input` | Input component | Maintained ink component |
|
||||
| `react` | UI framework | Required by Ink |
|
||||
| `simple-git` | Git operations | Simple API, well-tested |
|
||||
| `chokidar` | File watching | Cross-platform, reliable |
|
||||
| `commander` | CLI framework | Industry standard |
|
||||
| `zod` | Validation | Type-safe validation |
|
||||
| `globby` | File globbing | ESM-native, .gitignore support |
|
||||
|
||||
### Development Dependencies
|
||||
|
||||
| Package | Purpose |
|
||||
|---------|---------|
|
||||
| `vitest` | Testing framework |
|
||||
| `@vitest/coverage-v8` | Coverage reporting |
|
||||
| `@vitest/ui` | Interactive test UI |
|
||||
| `tsup` | TypeScript bundler |
|
||||
| `typescript` | Type checking |
|
||||
|
||||
## Performance Considerations
|
||||
|
||||
### 1. Indexing Performance
|
||||
|
||||
**Problem**: Large projects (10k+ files) take time to index.
|
||||
|
||||
**Optimizations**:
|
||||
- Incremental parsing with tree-sitter (only changed files)
|
||||
- Parallel parsing (planned for v1.1.0)
|
||||
- Ignore patterns (.gitignore, node_modules, dist)
|
||||
- Skip binary files early
|
||||
|
||||
**Current**: ~1000 files/second on M1 Mac
|
||||
|
||||
### 2. Memory Usage
|
||||
|
||||
**Problem**: Entire AST in memory could be 100s of MB.
|
||||
|
||||
**Optimizations**:
|
||||
- Store ASTs in Redis (out of Node.js heap)
|
||||
- Load ASTs on-demand from Redis
|
||||
- Lazy-load file content (not stored in session)
|
||||
|
||||
**Current**: ~200MB for 5000 files indexed
|
||||
|
||||
### 3. Context Window Management
|
||||
|
||||
**Problem**: 128k token context window fills up.
|
||||
|
||||
**Optimizations**:
|
||||
- Auto-compression at 80% usage
|
||||
- LLM summarizes old messages
|
||||
- Remove tool results older than 5 messages
|
||||
- Only load structure + metadata initially (~10k tokens)
|
||||
|
||||
### 4. Redis Performance
|
||||
|
||||
**Problem**: Redis is single-threaded.
|
||||
|
||||
**Optimizations**:
|
||||
- Pipeline commands where possible
|
||||
- Use hashes for related data (fewer keys)
|
||||
- AOF every second (not every command)
|
||||
- Keep undo stack limited (10 entries)
|
||||
|
||||
**Current**: <1ms latency for most operations
|
||||
|
||||
### 5. Tool Execution
|
||||
|
||||
**Problem**: Tool execution could block LLM.
|
||||
|
||||
**Current**: Synchronous execution (simpler)
|
||||
|
||||
**Future**: Async tool execution with progress callbacks (v1.1.0)
|
||||
|
||||
## Future Improvements
|
||||
|
||||
### v1.1.0 - Performance
|
||||
- Parallel AST parsing
|
||||
- Incremental indexing (only changed files)
|
||||
- Response caching
|
||||
- Stream LLM responses
|
||||
|
||||
### v1.2.0 - Features
|
||||
- Multiple file edits in one operation
|
||||
- Batch operations
|
||||
- Custom prompt templates
|
||||
- OpenAI/Anthropic provider support
|
||||
|
||||
### v1.3.0 - Extensibility
|
||||
- Plugin system for custom tools
|
||||
- LSP integration
|
||||
- Multi-language support (Python, Go, Rust)
|
||||
- Custom indexing rules
|
||||
|
||||
---
|
||||
|
||||
**Last Updated**: 2025-12-01
|
||||
**Version**: 0.16.0
|
||||
File diff suppressed because it is too large
Load Diff
@@ -7,9 +7,9 @@
|
||||
[](https://www.npmjs.com/package/@samiyev/ipuaro)
|
||||
[](https://opensource.org/licenses/MIT)
|
||||
|
||||
> **Status:** 🚧 Early Development (v0.1.0 Foundation)
|
||||
> **Status:** 🎉 Release Candidate (v0.16.0 → v1.0.0)
|
||||
>
|
||||
> Core infrastructure is ready. Active development in progress.
|
||||
> All core features complete. Production-ready release coming soon.
|
||||
|
||||
## Vision
|
||||
|
||||
@@ -19,18 +19,20 @@ Work with codebases of any size using local AI:
|
||||
- 🔒 **100% Local**: Your code never leaves your machine
|
||||
- ⚡ **Fast**: Redis persistence + tree-sitter parsing
|
||||
|
||||
## Planned Features
|
||||
## Features
|
||||
|
||||
### 18 LLM Tools
|
||||
### 18 LLM Tools (All Implemented ✅)
|
||||
|
||||
| Category | Tools | Status |
|
||||
|----------|-------|--------|
|
||||
| **Read** | `get_lines`, `get_function`, `get_class`, `get_structure` | 🔜 v0.5.0 |
|
||||
| **Edit** | `edit_lines`, `create_file`, `delete_file` | 🔜 v0.6.0 |
|
||||
| **Search** | `find_references`, `find_definition` | 🔜 v0.7.0 |
|
||||
| **Analysis** | `get_dependencies`, `get_dependents`, `get_complexity`, `get_todos` | 🔜 v0.8.0 |
|
||||
| **Git** | `git_status`, `git_diff`, `git_commit` | 🔜 v0.9.0 |
|
||||
| **Run** | `run_command`, `run_tests` | 🔜 v0.9.0 |
|
||||
| Category | Tools | Description |
|
||||
|----------|-------|-------------|
|
||||
| **Read** | `get_lines`, `get_function`, `get_class`, `get_structure` | Read code without loading everything into context |
|
||||
| **Edit** | `edit_lines`, `create_file`, `delete_file` | Make changes with confirmation and undo support |
|
||||
| **Search** | `find_references`, `find_definition` | Find symbol definitions and usages across codebase |
|
||||
| **Analysis** | `get_dependencies`, `get_dependents`, `get_complexity`, `get_todos` | Analyze code structure, complexity, and TODOs |
|
||||
| **Git** | `git_status`, `git_diff`, `git_commit` | Git operations with safety checks |
|
||||
| **Run** | `run_command`, `run_tests` | Execute commands and tests with security validation |
|
||||
|
||||
See [Tools Documentation](#tools-reference) below for detailed usage examples.
|
||||
|
||||
### Terminal UI
|
||||
|
||||
@@ -54,6 +56,31 @@ Work with codebases of any size using local AI:
|
||||
└───────────────────────────────────────────────────────────┘
|
||||
```
|
||||
|
||||
### Slash Commands
|
||||
|
||||
Control your session with built-in commands:
|
||||
|
||||
| Command | Description |
|
||||
|---------|-------------|
|
||||
| `/help` | Show all commands and hotkeys |
|
||||
| `/clear` | Clear chat history (keeps session) |
|
||||
| `/undo` | Revert last file change from undo stack |
|
||||
| `/sessions [list\|load\|delete] [id]` | Manage sessions |
|
||||
| `/status` | Show system status (LLM, context, stats) |
|
||||
| `/reindex` | Force full project reindexation |
|
||||
| `/eval` | LLM self-check for hallucinations |
|
||||
| `/auto-apply [on\|off]` | Toggle auto-apply mode for edits |
|
||||
|
||||
### Hotkeys
|
||||
|
||||
| Hotkey | Action |
|
||||
|--------|--------|
|
||||
| `Ctrl+C` | Interrupt generation (1st press) / Exit (2nd press within 1s) |
|
||||
| `Ctrl+D` | Exit and save session |
|
||||
| `Ctrl+Z` | Undo last file change |
|
||||
| `↑` / `↓` | Navigate input history |
|
||||
| `Tab` | Path autocomplete (coming soon) |
|
||||
|
||||
### Key Capabilities
|
||||
|
||||
🔍 **Smart Code Understanding**
|
||||
@@ -124,6 +151,23 @@ ipuaro --model qwen2.5-coder:32b-instruct
|
||||
ipuaro --auto-apply
|
||||
```
|
||||
|
||||
## Quick Start
|
||||
|
||||
Try ipuaro with our demo project:
|
||||
|
||||
```bash
|
||||
# Navigate to demo project
|
||||
cd examples/demo-project
|
||||
|
||||
# Install dependencies
|
||||
npm install
|
||||
|
||||
# Start ipuaro
|
||||
npx @samiyev/ipuaro
|
||||
```
|
||||
|
||||
See [examples/demo-project](./examples/demo-project) for detailed usage guide and example conversations.
|
||||
|
||||
## Commands
|
||||
|
||||
| Command | Description |
|
||||
@@ -181,49 +225,263 @@ Clean Architecture with clear separation:
|
||||
|
||||
## Development Status
|
||||
|
||||
### ✅ Completed (v0.1.0)
|
||||
### ✅ Completed (v0.1.0 - v0.16.0)
|
||||
|
||||
- [x] Project setup (tsup, vitest, ESM)
|
||||
- [x] Domain entities (Session, Project)
|
||||
- [x] Value objects (FileData, FileAST, ChatMessage, etc.)
|
||||
- [x] Service interfaces (IStorage, ILLMClient, ITool, IIndexer)
|
||||
- [x] Shared module (Config, Errors, Utils)
|
||||
- [x] CLI placeholder commands
|
||||
- [x] 91 unit tests, 100% coverage
|
||||
- [x] **v0.1.0 - v0.4.0**: Foundation (domain, storage, indexer, LLM integration)
|
||||
- [x] **v0.5.0 - v0.9.0**: All 18 tools implemented
|
||||
- [x] **v0.10.0**: Session management with undo support
|
||||
- [x] **v0.11.0 - v0.12.0**: Full TUI with all components
|
||||
- [x] **v0.13.0**: Security (PathValidator, command validation)
|
||||
- [x] **v0.14.0**: 8 slash commands
|
||||
- [x] **v0.15.0**: CLI entry point with onboarding
|
||||
- [x] **v0.16.0**: Comprehensive error handling system
|
||||
- [x] **1420 tests, 98% coverage**
|
||||
|
||||
### 🔜 Next Up
|
||||
### 🔜 v1.0.0 - Production Ready
|
||||
|
||||
- [ ] **v0.2.0** - Redis Storage
|
||||
- [ ] **v0.3.0** - Indexer (file scanning, AST parsing)
|
||||
- [ ] **v0.4.0** - LLM Integration (Ollama)
|
||||
- [ ] **v0.5.0-0.9.0** - Tools implementation
|
||||
- [ ] **v0.10.0** - Session management
|
||||
- [ ] **v0.11.0** - TUI
|
||||
- [ ] Performance optimizations
|
||||
- [ ] Complete documentation
|
||||
- [ ] Working examples
|
||||
|
||||
See [ROADMAP.md](./ROADMAP.md) for detailed development plan.
|
||||
See [ROADMAP.md](./ROADMAP.md) for detailed development plan and [CHANGELOG.md](./CHANGELOG.md) for release history.
|
||||
|
||||
## API (Coming Soon)
|
||||
## Tools Reference
|
||||
|
||||
The AI agent has access to 18 tools for working with your codebase. Here are the most commonly used ones:
|
||||
|
||||
### Read Tools
|
||||
|
||||
**`get_lines(path, start?, end?)`**
|
||||
Read specific lines from a file.
|
||||
|
||||
```
|
||||
You: Show me the authentication logic
|
||||
Assistant: [get_lines src/auth/service.ts 45 67]
|
||||
# Returns lines 45-67 with line numbers
|
||||
```
|
||||
|
||||
**`get_function(path, name)`**
|
||||
Get a specific function's source code and metadata.
|
||||
|
||||
```
|
||||
You: How does the login function work?
|
||||
Assistant: [get_function src/auth/service.ts login]
|
||||
# Returns function code, params, return type, and metadata
|
||||
```
|
||||
|
||||
**`get_class(path, name)`**
|
||||
Get a specific class's source code and metadata.
|
||||
|
||||
```
|
||||
You: Show me the UserService class
|
||||
Assistant: [get_class src/services/user.ts UserService]
|
||||
# Returns class code, methods, properties, and inheritance info
|
||||
```
|
||||
|
||||
**`get_structure(path?, depth?)`**
|
||||
Get directory tree structure.
|
||||
|
||||
```
|
||||
You: What's in the src/auth directory?
|
||||
Assistant: [get_structure src/auth]
|
||||
# Returns ASCII tree with files and folders
|
||||
```
|
||||
|
||||
### Edit Tools
|
||||
|
||||
**`edit_lines(path, start, end, content)`**
|
||||
Replace lines in a file (requires confirmation).
|
||||
|
||||
```
|
||||
You: Update the timeout to 5000ms
|
||||
Assistant: [edit_lines src/config.ts 23 23 " timeout: 5000,"]
|
||||
# Shows diff, asks for confirmation
|
||||
```
|
||||
|
||||
**`create_file(path, content)`**
|
||||
Create a new file (requires confirmation).
|
||||
|
||||
```
|
||||
You: Create a new utility for date formatting
|
||||
Assistant: [create_file src/utils/date.ts "export function formatDate..."]
|
||||
# Creates file after confirmation
|
||||
```
|
||||
|
||||
**`delete_file(path)`**
|
||||
Delete a file (requires confirmation).
|
||||
|
||||
```
|
||||
You: Remove the old test file
|
||||
Assistant: [delete_file tests/old-test.test.ts]
|
||||
# Deletes after confirmation
|
||||
```
|
||||
|
||||
### Search Tools
|
||||
|
||||
**`find_references(symbol, path?)`**
|
||||
Find all usages of a symbol across the codebase.
|
||||
|
||||
```
|
||||
You: Where is getUserById used?
|
||||
Assistant: [find_references getUserById]
|
||||
# Returns all files/lines where it's called
|
||||
```
|
||||
|
||||
**`find_definition(symbol)`**
|
||||
Find where a symbol is defined.
|
||||
|
||||
```
|
||||
You: Where is ApiClient defined?
|
||||
Assistant: [find_definition ApiClient]
|
||||
# Returns file, line, and context
|
||||
```
|
||||
|
||||
### Analysis Tools
|
||||
|
||||
**`get_dependencies(path)`**
|
||||
Get files that a specific file imports.
|
||||
|
||||
```
|
||||
You: What does auth.ts depend on?
|
||||
Assistant: [get_dependencies src/auth/service.ts]
|
||||
# Returns list of imported files
|
||||
```
|
||||
|
||||
**`get_dependents(path)`**
|
||||
Get files that import a specific file.
|
||||
|
||||
```
|
||||
You: What files use the database module?
|
||||
Assistant: [get_dependents src/db/index.ts]
|
||||
# Returns list of files importing this
|
||||
```
|
||||
|
||||
**`get_complexity(path?, limit?)`**
|
||||
Get complexity metrics for files.
|
||||
|
||||
```
|
||||
You: Which files are most complex?
|
||||
Assistant: [get_complexity null 10]
|
||||
# Returns top 10 most complex files with metrics
|
||||
```
|
||||
|
||||
**`get_todos(path?, type?)`**
|
||||
Find TODO/FIXME/HACK comments.
|
||||
|
||||
```
|
||||
You: What TODOs are there?
|
||||
Assistant: [get_todos]
|
||||
# Returns all TODO comments with locations
|
||||
```
|
||||
|
||||
### Git Tools
|
||||
|
||||
**`git_status()`**
|
||||
Get current git repository status.
|
||||
|
||||
```
|
||||
You: What files have changed?
|
||||
Assistant: [git_status]
|
||||
# Returns branch, staged, modified, untracked files
|
||||
```
|
||||
|
||||
**`git_diff(path?, staged?)`**
|
||||
Get uncommitted changes.
|
||||
|
||||
```
|
||||
You: Show me what changed in auth.ts
|
||||
Assistant: [git_diff src/auth/service.ts]
|
||||
# Returns diff output
|
||||
```
|
||||
|
||||
**`git_commit(message, files?)`**
|
||||
Create a git commit (requires confirmation).
|
||||
|
||||
```
|
||||
You: Commit these auth changes
|
||||
Assistant: [git_commit "feat: add password reset flow" ["src/auth/service.ts"]]
|
||||
# Creates commit after confirmation
|
||||
```
|
||||
|
||||
### Run Tools
|
||||
|
||||
**`run_command(command, timeout?)`**
|
||||
Execute shell commands (with security validation).
|
||||
|
||||
```
|
||||
You: Run the build
|
||||
Assistant: [run_command "npm run build"]
|
||||
# Checks security, then executes
|
||||
```
|
||||
|
||||
**`run_tests(path?, filter?, watch?)`**
|
||||
Run project tests.
|
||||
|
||||
```
|
||||
You: Test the auth module
|
||||
Assistant: [run_tests "tests/auth" null false]
|
||||
# Auto-detects test runner and executes
|
||||
```
|
||||
|
||||
For complete tool documentation with all parameters and options, see [TOOLS.md](./TOOLS.md).
|
||||
|
||||
## Programmatic API
|
||||
|
||||
You can use ipuaro as a library in your own Node.js applications:
|
||||
|
||||
```typescript
|
||||
import { startSession, handleMessage } from "@samiyev/ipuaro"
|
||||
import {
|
||||
createRedisClient,
|
||||
RedisStorage,
|
||||
OllamaClient,
|
||||
ToolRegistry,
|
||||
StartSession,
|
||||
HandleMessage
|
||||
} from "@samiyev/ipuaro"
|
||||
|
||||
// Initialize dependencies
|
||||
const redis = await createRedisClient({ host: "localhost", port: 6379 })
|
||||
const storage = new RedisStorage(redis, "my-project")
|
||||
const llm = new OllamaClient({
|
||||
model: "qwen2.5-coder:7b-instruct",
|
||||
contextWindow: 128000,
|
||||
temperature: 0.1
|
||||
})
|
||||
const tools = new ToolRegistry()
|
||||
|
||||
// Register tools
|
||||
tools.register(new GetLinesTool(storage, "/path/to/project"))
|
||||
// ... register other tools
|
||||
|
||||
// Start a session
|
||||
const session = await startSession({
|
||||
projectPath: "./my-project",
|
||||
model: "qwen2.5-coder:7b-instruct"
|
||||
})
|
||||
const startSession = new StartSession(storage)
|
||||
const session = await startSession.execute("my-project")
|
||||
|
||||
// Send a message
|
||||
const response = await handleMessage(session, "Explain the auth flow")
|
||||
// Handle a message
|
||||
const handleMessage = new HandleMessage(storage, llm, tools)
|
||||
await handleMessage.execute(session, "Show me the auth flow")
|
||||
|
||||
console.log(response.content)
|
||||
console.log(`Tokens: ${response.stats.tokens}`)
|
||||
console.log(`Tool calls: ${response.stats.toolCalls}`)
|
||||
// Session is automatically updated in Redis
|
||||
```
|
||||
|
||||
For full API documentation, see the TypeScript definitions in `src/` or explore the [source code](./src/).
|
||||
|
||||
## How It Works
|
||||
|
||||
### Lazy Loading Context
|
||||
### 1. Project Indexing
|
||||
|
||||
When you start ipuaro, it scans your project and builds an index:
|
||||
|
||||
```
|
||||
1. File Scanner → Recursively scans files (.ts, .js, .tsx, .jsx)
|
||||
2. AST Parser → Parses with tree-sitter (extracts functions, classes, imports)
|
||||
3. Meta Analyzer → Calculates complexity, dependencies, hub detection
|
||||
4. Index Builder → Creates symbol index and dependency graph
|
||||
5. Redis Storage → Persists everything for instant startup next time
|
||||
6. Watchdog → Watches files for changes and updates index in background
|
||||
```
|
||||
|
||||
### 2. Lazy Loading Context
|
||||
|
||||
Instead of loading entire codebase into context:
|
||||
|
||||
@@ -232,24 +490,161 @@ Traditional approach:
|
||||
├── Load all files → 500k tokens → ❌ Exceeds context window
|
||||
|
||||
ipuaro approach:
|
||||
├── Load project structure → 2k tokens
|
||||
├── Load AST metadata → 10k tokens
|
||||
├── On demand: get_function("auth.ts", "login") → 200 tokens
|
||||
├── Total: ~12k tokens → ✅ Fits in context
|
||||
├── Load project structure → ~2k tokens
|
||||
├── Load AST metadata → ~10k tokens
|
||||
├── On demand: get_function("auth.ts", "login") → ~200 tokens
|
||||
├── Total: ~12k tokens → ✅ Fits in 128k context window
|
||||
```
|
||||
|
||||
### Tool-Based Code Access
|
||||
Context automatically compresses when usage exceeds 80% by summarizing old messages.
|
||||
|
||||
### 3. Tool-Based Code Access
|
||||
|
||||
The LLM doesn't see your code initially. It only sees structure and metadata. When it needs code, it uses tools:
|
||||
|
||||
```
|
||||
User: "How does user creation work?"
|
||||
You: "How does user creation work?"
|
||||
|
||||
ipuaro:
|
||||
1. [get_structure src/] → sees user/ folder
|
||||
2. [get_function src/user/service.ts createUser] → gets function code
|
||||
Agent reasoning:
|
||||
1. [get_structure src/] → sees user/ folder exists
|
||||
2. [get_function src/user/service.ts createUser] → loads specific function
|
||||
3. [find_references createUser] → finds all usages
|
||||
4. Synthesizes answer with specific code context
|
||||
4. Synthesizes answer with only relevant code loaded
|
||||
|
||||
Total tokens used: ~2k (vs loading entire src/ which could be 50k+)
|
||||
```
|
||||
|
||||
### 4. Session Persistence
|
||||
|
||||
Everything is saved to Redis:
|
||||
- Chat history and context state
|
||||
- Undo stack (last 10 file changes)
|
||||
- Session metadata and statistics
|
||||
|
||||
Resume your session anytime with `/sessions load <id>`.
|
||||
|
||||
### 5. Security Model
|
||||
|
||||
Three-layer security:
|
||||
1. **Blacklist**: Dangerous commands always blocked (rm -rf, sudo, etc.)
|
||||
2. **Whitelist**: Safe commands auto-approved (npm, git status, etc.)
|
||||
3. **Confirmation**: Unknown commands require user approval
|
||||
|
||||
File operations are restricted to project directory only (path traversal prevention).
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Redis Connection Errors
|
||||
|
||||
**Error**: `Redis connection failed`
|
||||
|
||||
**Solutions**:
|
||||
```bash
|
||||
# Check if Redis is running
|
||||
redis-cli ping # Should return "PONG"
|
||||
|
||||
# Start Redis with AOF persistence
|
||||
redis-server --appendonly yes
|
||||
|
||||
# Check Redis logs
|
||||
tail -f /usr/local/var/log/redis.log # macOS
|
||||
```
|
||||
|
||||
### Ollama Model Not Found
|
||||
|
||||
**Error**: `Model qwen2.5-coder:7b-instruct not found`
|
||||
|
||||
**Solutions**:
|
||||
```bash
|
||||
# Pull the model
|
||||
ollama pull qwen2.5-coder:7b-instruct
|
||||
|
||||
# List installed models
|
||||
ollama list
|
||||
|
||||
# Check Ollama is running
|
||||
ollama serve
|
||||
```
|
||||
|
||||
### Large Project Performance
|
||||
|
||||
**Issue**: Indexing takes too long or uses too much memory
|
||||
|
||||
**Solutions**:
|
||||
```bash
|
||||
# Index only a subdirectory
|
||||
ipuaro ./src
|
||||
|
||||
# Add more ignore patterns to .ipuaro.json
|
||||
{
|
||||
"project": {
|
||||
"ignorePatterns": ["node_modules", "dist", ".git", "coverage", "build"]
|
||||
}
|
||||
}
|
||||
|
||||
# Increase Node.js memory limit
|
||||
NODE_OPTIONS="--max-old-space-size=4096" ipuaro
|
||||
```
|
||||
|
||||
### Context Window Exceeded
|
||||
|
||||
**Issue**: `Context window exceeded` errors
|
||||
|
||||
**Solutions**:
|
||||
- Context auto-compresses at 80%, but you can manually `/clear` history
|
||||
- Use more targeted questions instead of asking about entire codebase
|
||||
- The agent will automatically use tools to load only what's needed
|
||||
|
||||
### File Changes Not Detected
|
||||
|
||||
**Issue**: Made changes but agent doesn't see them
|
||||
|
||||
**Solutions**:
|
||||
```bash
|
||||
# Force reindex
|
||||
/reindex
|
||||
|
||||
# Or restart with fresh index
|
||||
rm -rf ~/.ipuaro/cache
|
||||
ipuaro
|
||||
```
|
||||
|
||||
### Undo Not Working
|
||||
|
||||
**Issue**: `/undo` says no changes to undo
|
||||
|
||||
**Explanation**: Undo stack only tracks the last 10 file edits made through ipuaro. Manual file edits outside ipuaro cannot be undone.
|
||||
|
||||
## FAQ
|
||||
|
||||
**Q: Does ipuaro send my code to any external servers?**
|
||||
|
||||
A: No. Everything runs locally. Ollama runs on your machine, Redis stores data locally, and no network requests are made except to your local Ollama instance.
|
||||
|
||||
**Q: What languages are supported?**
|
||||
|
||||
A: Currently TypeScript, JavaScript (including TSX/JSX). More languages planned for future versions.
|
||||
|
||||
**Q: Can I use OpenAI/Anthropic/other LLM providers?**
|
||||
|
||||
A: Currently only Ollama is supported. OpenAI/Anthropic support is planned for v1.2.0.
|
||||
|
||||
**Q: How much disk space does Redis use?**
|
||||
|
||||
A: Depends on project size. A typical mid-size project (1000 files) uses ~50-100MB. Redis uses AOF persistence, so data survives restarts.
|
||||
|
||||
**Q: Can I use ipuaro in a CI/CD pipeline?**
|
||||
|
||||
A: Yes, but it's designed for interactive use. For automated code analysis, consider the programmatic API.
|
||||
|
||||
**Q: What's the difference between ipuaro and GitHub Copilot?**
|
||||
|
||||
A: Copilot is an autocomplete tool. ipuaro is a conversational agent that can read, analyze, modify files, run commands, and has full codebase understanding through AST parsing.
|
||||
|
||||
**Q: Why Redis instead of SQLite or JSON files?**
|
||||
|
||||
A: Redis provides fast in-memory access, AOF persistence, and handles concurrent access well. The session model fits Redis's data structures perfectly.
|
||||
|
||||
## Contributing
|
||||
|
||||
Contributions welcome! This project is in early development.
|
||||
|
||||
@@ -148,9 +148,10 @@ packages/ipuaro/
|
||||
|
||||
---
|
||||
|
||||
## Version 0.1.0 - Foundation ⚙️
|
||||
## Version 0.1.0 - Foundation ⚙️ ✅
|
||||
|
||||
**Priority:** CRITICAL
|
||||
**Status:** Complete (v0.1.0 released)
|
||||
|
||||
### 0.1.1 - Project Setup
|
||||
|
||||
@@ -310,9 +311,10 @@ interface Config {
|
||||
|
||||
---
|
||||
|
||||
## Version 0.2.0 - Redis Storage 🗄️
|
||||
## Version 0.2.0 - Redis Storage 🗄️ ✅
|
||||
|
||||
**Priority:** CRITICAL
|
||||
**Status:** Complete (v0.2.0 released)
|
||||
|
||||
### 0.2.1 - Redis Client
|
||||
|
||||
@@ -367,9 +369,10 @@ class RedisStorage implements IStorage {
|
||||
|
||||
---
|
||||
|
||||
## Version 0.3.0 - Indexer 📂
|
||||
## Version 0.3.0 - Indexer 📂 ✅
|
||||
|
||||
**Priority:** CRITICAL
|
||||
**Status:** Complete (v0.3.0, v0.3.1 released)
|
||||
|
||||
### 0.3.1 - File Scanner
|
||||
|
||||
@@ -456,9 +459,10 @@ class Watchdog {
|
||||
|
||||
---
|
||||
|
||||
## Version 0.4.0 - LLM Integration 🤖
|
||||
## Version 0.4.0 - LLM Integration 🤖 ✅
|
||||
|
||||
**Priority:** CRITICAL
|
||||
**Status:** Complete (v0.4.0 released)
|
||||
|
||||
### 0.4.1 - Ollama Client
|
||||
|
||||
@@ -531,9 +535,10 @@ function parseToolCalls(response: string): ToolCall[]
|
||||
|
||||
---
|
||||
|
||||
## Version 0.5.0 - Read Tools 📖
|
||||
## Version 0.5.0 - Read Tools 📖 ✅
|
||||
|
||||
**Priority:** HIGH
|
||||
**Status:** Complete (v0.5.0 released)
|
||||
|
||||
4 tools for reading code without modification.
|
||||
|
||||
@@ -609,9 +614,10 @@ class GetStructureTool implements ITool {
|
||||
|
||||
---
|
||||
|
||||
## Version 0.6.0 - Edit Tools ✏️
|
||||
## Version 0.6.0 - Edit Tools ✏️ ✅
|
||||
|
||||
**Priority:** HIGH
|
||||
**Status:** Complete (v0.6.0 released)
|
||||
|
||||
3 tools for file modifications. All require confirmation (unless autoApply).
|
||||
|
||||
@@ -662,9 +668,10 @@ class DeleteFileTool implements ITool {
|
||||
|
||||
---
|
||||
|
||||
## Version 0.7.0 - Search Tools 🔍
|
||||
## Version 0.7.0 - Search Tools 🔍 ✅
|
||||
|
||||
**Priority:** HIGH
|
||||
**Status:** Complete (v0.7.0 released)
|
||||
|
||||
### 0.7.1 - find_references
|
||||
|
||||
@@ -699,9 +706,10 @@ class FindDefinitionTool implements ITool {
|
||||
|
||||
---
|
||||
|
||||
## Version 0.8.0 - Analysis Tools 📊
|
||||
## Version 0.8.0 - Analysis Tools 📊 ✅
|
||||
|
||||
**Priority:** MEDIUM
|
||||
**Status:** Complete (v0.8.0 released)
|
||||
|
||||
### 0.8.1 - get_dependencies
|
||||
|
||||
@@ -742,9 +750,10 @@ class FindDefinitionTool implements ITool {
|
||||
|
||||
---
|
||||
|
||||
## Version 0.9.0 - Git & Run Tools 🚀
|
||||
## Version 0.9.0 - Git & Run Tools 🚀 ✅
|
||||
|
||||
**Priority:** MEDIUM
|
||||
**Status:** Complete (v0.9.0 released) — includes CommandSecurity (Blacklist/Whitelist)
|
||||
|
||||
### 0.9.1 - git_status
|
||||
|
||||
@@ -798,9 +807,10 @@ class FindDefinitionTool implements ITool {
|
||||
|
||||
---
|
||||
|
||||
## Version 0.10.0 - Session Management 💾
|
||||
## Version 0.10.0 - Session Management 💾 ✅
|
||||
|
||||
**Priority:** HIGH
|
||||
**Status:** Complete (v0.10.0 released) — includes HandleMessage orchestrator (originally planned for 0.14.0)
|
||||
|
||||
### 0.10.1 - Session Entity
|
||||
|
||||
@@ -873,9 +883,10 @@ class ContextManager {
|
||||
|
||||
---
|
||||
|
||||
## Version 0.11.0 - TUI Basic 🖥️
|
||||
## Version 0.11.0 - TUI Basic 🖥️ ✅
|
||||
|
||||
**Priority:** CRITICAL
|
||||
**Status:** Complete (v0.11.0 released) — includes useHotkeys (originally planned for 0.16.0)
|
||||
|
||||
### 0.11.1 - App Shell
|
||||
|
||||
@@ -945,9 +956,10 @@ interface Props {
|
||||
|
||||
---
|
||||
|
||||
## Version 0.12.0 - TUI Advanced 🎨
|
||||
## Version 0.12.0 - TUI Advanced 🎨 ✅
|
||||
|
||||
**Priority:** HIGH
|
||||
**Status:** Complete (v0.12.0 released)
|
||||
|
||||
### 0.12.1 - DiffView
|
||||
|
||||
@@ -1009,9 +1021,10 @@ interface Props {
|
||||
|
||||
---
|
||||
|
||||
## Version 0.13.0 - Security 🔒
|
||||
## Version 0.13.0 - Security 🔒 ✅
|
||||
|
||||
**Priority:** HIGH
|
||||
**Status:** Complete (v0.13.0 released) — Blacklist/Whitelist done in v0.9.0, PathValidator in v0.13.0
|
||||
|
||||
### 0.13.1 - Blacklist
|
||||
|
||||
@@ -1055,11 +1068,14 @@ function validatePath(path: string, projectRoot: string): boolean
|
||||
|
||||
---
|
||||
|
||||
## Version 0.14.0 - Orchestrator 🎭
|
||||
## [DONE] Original 0.14.0 - Orchestrator 🎭 ✅
|
||||
|
||||
**Priority:** CRITICAL
|
||||
> **Note:** This was implemented in v0.10.0 as part of Session Management
|
||||
|
||||
### 0.14.1 - HandleMessage Use Case
|
||||
<details>
|
||||
<summary>Originally planned (click to expand)</summary>
|
||||
|
||||
### HandleMessage Use Case (Done in v0.10.5)
|
||||
|
||||
```typescript
|
||||
// src/application/use-cases/HandleMessage.ts
|
||||
@@ -1091,7 +1107,7 @@ class HandleMessage {
|
||||
}
|
||||
```
|
||||
|
||||
### 0.14.2 - Edit Flow
|
||||
### Edit Flow (Done in v0.10.5)
|
||||
|
||||
```typescript
|
||||
// Edit handling inside HandleMessage:
|
||||
@@ -1104,17 +1120,49 @@ class HandleMessage {
|
||||
// - Update storage (lines, AST, meta)
|
||||
```
|
||||
|
||||
**Tests:**
|
||||
- [ ] Unit tests for HandleMessage
|
||||
- [ ] E2E tests for full message flow
|
||||
</details>
|
||||
|
||||
---
|
||||
|
||||
## Version 0.15.0 - Commands 📝
|
||||
## [DONE] Original 0.16.0 - Hotkeys & Polish ⌨️ ✅
|
||||
|
||||
**Priority:** MEDIUM
|
||||
> **Note:** useHotkeys done in v0.11.0, ContextManager auto-compression in v0.10.3
|
||||
|
||||
7 slash commands for TUI.
|
||||
<details>
|
||||
<summary>Originally planned (click to expand)</summary>
|
||||
|
||||
### Hotkeys (Done in v0.11.0)
|
||||
|
||||
```typescript
|
||||
// src/tui/hooks/useHotkeys.ts
|
||||
|
||||
Ctrl+C // Interrupt generation (1st), exit (2nd)
|
||||
Ctrl+D // Exit with session save
|
||||
Ctrl+Z // Undo (= /undo)
|
||||
↑/↓ // Input history
|
||||
Tab // Path autocomplete
|
||||
```
|
||||
|
||||
### Auto-compression (Done in v0.10.3)
|
||||
|
||||
```typescript
|
||||
// Triggered at >80% context:
|
||||
// 1. LLM summarizes old messages
|
||||
// 2. Remove tool results older than 5 messages
|
||||
// 3. Update status bar (ctx% changes)
|
||||
// No modal notification - silent
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
---
|
||||
|
||||
## Version 0.14.0 - Commands 📝 ✅
|
||||
|
||||
**Priority:** HIGH
|
||||
**Status:** Complete (v0.14.0 released)
|
||||
|
||||
8 slash commands for TUI.
|
||||
|
||||
```typescript
|
||||
// src/tui/hooks/useCommands.ts
|
||||
@@ -1130,47 +1178,16 @@ class HandleMessage {
|
||||
```
|
||||
|
||||
**Tests:**
|
||||
- [ ] Unit tests for command handlers
|
||||
- [x] Unit tests for command handlers (38 tests)
|
||||
|
||||
---
|
||||
|
||||
## Version 0.16.0 - Hotkeys & Polish ⌨️
|
||||
|
||||
**Priority:** MEDIUM
|
||||
|
||||
### 0.16.1 - Hotkeys
|
||||
|
||||
```typescript
|
||||
// src/tui/hooks/useHotkeys.ts
|
||||
|
||||
Ctrl+C // Interrupt generation (1st), exit (2nd)
|
||||
Ctrl+D // Exit with session save
|
||||
Ctrl+Z // Undo (= /undo)
|
||||
↑/↓ // Input history
|
||||
Tab // Path autocomplete
|
||||
```
|
||||
|
||||
### 0.16.2 - Auto-compression
|
||||
|
||||
```typescript
|
||||
// Triggered at >80% context:
|
||||
// 1. LLM summarizes old messages
|
||||
// 2. Remove tool results older than 5 messages
|
||||
// 3. Update status bar (ctx% changes)
|
||||
// No modal notification - silent
|
||||
```
|
||||
|
||||
**Tests:**
|
||||
- [ ] Integration tests for hotkeys
|
||||
- [ ] Unit tests for compression
|
||||
|
||||
---
|
||||
|
||||
## Version 0.17.0 - CLI Entry Point 🚪
|
||||
## Version 0.15.0 - CLI Entry Point 🚪 ✅
|
||||
|
||||
**Priority:** HIGH
|
||||
**Status:** Complete (v0.15.0 released)
|
||||
|
||||
### 0.17.1 - CLI Commands
|
||||
### 0.15.1 - CLI Commands
|
||||
|
||||
```typescript
|
||||
// src/cli/index.ts
|
||||
@@ -1180,7 +1197,7 @@ ipuaro init // Create .ipuaro.json config
|
||||
ipuaro index // Index only (no TUI)
|
||||
```
|
||||
|
||||
### 0.17.2 - CLI Options
|
||||
### 0.15.2 - CLI Options
|
||||
|
||||
```bash
|
||||
--auto-apply # Enable auto-apply mode
|
||||
@@ -1189,7 +1206,7 @@ ipuaro index // Index only (no TUI)
|
||||
--version # Show version
|
||||
```
|
||||
|
||||
### 0.17.3 - Onboarding
|
||||
### 0.15.3 - Onboarding
|
||||
|
||||
```typescript
|
||||
// src/cli/commands/start.ts
|
||||
@@ -1202,40 +1219,576 @@ ipuaro index // Index only (no TUI)
|
||||
```
|
||||
|
||||
**Tests:**
|
||||
- [ ] E2E tests for CLI
|
||||
- [x] Unit tests for CLI commands (29 tests)
|
||||
|
||||
---
|
||||
|
||||
## Version 0.18.0 - Error Handling ⚠️
|
||||
## Version 0.16.0 - Error Handling ⚠️ ✅
|
||||
|
||||
**Priority:** HIGH
|
||||
**Status:** Complete (v0.16.0 released)
|
||||
|
||||
### 0.18.1 - Error Types
|
||||
### 0.16.1 - Error Types ✅
|
||||
|
||||
```typescript
|
||||
// src/shared/errors/IpuaroError.ts
|
||||
type ErrorType = "redis" | "parse" | "llm" | "file" | "command" | "conflict"
|
||||
type ErrorType = "redis" | "parse" | "llm" | "file" | "command" | "conflict" | "validation" | "timeout" | "unknown"
|
||||
type ErrorOption = "retry" | "skip" | "abort" | "confirm" | "regenerate"
|
||||
|
||||
interface ErrorMeta {
|
||||
type: ErrorType
|
||||
recoverable: boolean
|
||||
options: ErrorOption[]
|
||||
defaultOption: ErrorOption
|
||||
}
|
||||
|
||||
class IpuaroError extends Error {
|
||||
type: ErrorType
|
||||
recoverable: boolean
|
||||
suggestion?: string
|
||||
options: ErrorOption[]
|
||||
defaultOption: ErrorOption
|
||||
context?: Record<string, unknown>
|
||||
|
||||
getMeta(): ErrorMeta
|
||||
hasOption(option: ErrorOption): boolean
|
||||
toDisplayString(): string
|
||||
}
|
||||
```
|
||||
|
||||
### 0.18.2 - Error Handling Matrix
|
||||
### 0.16.2 - Error Handling Matrix ✅
|
||||
|
||||
| Error | Recoverable | Options |
|
||||
|-------|-------------|---------|
|
||||
| Redis unavailable | No | Retry / Abort |
|
||||
| AST parse failed | Yes | Skip file / Abort |
|
||||
| LLM timeout | Yes | Retry / Skip / Abort |
|
||||
| File not found | Yes | Skip / Abort |
|
||||
| Command not in whitelist | Yes | Confirm / Skip / Abort |
|
||||
| Edit conflict | Yes | Apply / Skip / Regenerate |
|
||||
| Error | Recoverable | Options | Default |
|
||||
|-------|-------------|---------|---------|
|
||||
| Redis unavailable | No | Retry / Abort | Abort |
|
||||
| AST parse failed | Yes | Skip / Abort | Skip |
|
||||
| LLM timeout | Yes | Retry / Skip / Abort | Retry |
|
||||
| File not found | Yes | Skip / Abort | Skip |
|
||||
| Command not in whitelist | Yes | Confirm / Skip / Abort | Confirm |
|
||||
| Edit conflict | Yes | Skip / Regenerate / Abort | Skip |
|
||||
| Validation error | Yes | Skip / Abort | Skip |
|
||||
| Timeout | Yes | Retry / Skip / Abort | Retry |
|
||||
| Unknown | No | Abort | Abort |
|
||||
|
||||
### 0.16.3 - ErrorHandler Service ✅
|
||||
|
||||
```typescript
|
||||
// src/shared/errors/ErrorHandler.ts
|
||||
class ErrorHandler {
|
||||
handle(error: IpuaroError, contextKey?: string): Promise<ErrorHandlingResult>
|
||||
handleSync(error: IpuaroError, contextKey?: string): ErrorHandlingResult
|
||||
wrap<T>(fn: () => Promise<T>, errorType: ErrorType, contextKey?: string): Promise<Result>
|
||||
withRetry<T>(fn: () => Promise<T>, errorType: ErrorType, contextKey: string): Promise<T>
|
||||
resetRetries(contextKey?: string): void
|
||||
getRetryCount(contextKey: string): number
|
||||
isMaxRetriesExceeded(contextKey: string): boolean
|
||||
}
|
||||
```
|
||||
|
||||
**Tests:**
|
||||
- [ ] Unit tests for error handling
|
||||
- [x] Unit tests for IpuaroError (27 tests)
|
||||
- [x] Unit tests for ErrorHandler (32 tests)
|
||||
|
||||
---
|
||||
|
||||
## Version 0.17.0 - Documentation Complete 📚 ✅
|
||||
|
||||
**Priority:** HIGH
|
||||
**Status:** Complete (v0.17.0 released)
|
||||
|
||||
### Documentation
|
||||
|
||||
- [x] README.md comprehensive update with all features
|
||||
- [x] ARCHITECTURE.md explaining design and decisions
|
||||
- [x] TOOLS.md complete reference for all 18 tools
|
||||
- [x] Troubleshooting guide
|
||||
- [x] FAQ section
|
||||
- [x] API examples
|
||||
- [x] ~2500 lines of documentation added
|
||||
|
||||
---
|
||||
|
||||
## Version 0.18.0 - Working Examples 📦 ✅
|
||||
|
||||
**Priority:** HIGH
|
||||
**Status:** Complete (v0.18.0 released)
|
||||
|
||||
### Examples
|
||||
|
||||
- [x] Demo project with TypeScript application (336 LOC)
|
||||
- [x] User management service (UserService)
|
||||
- [x] Authentication service (AuthService)
|
||||
- [x] Utilities (Logger, Validation)
|
||||
- [x] Unit tests (Vitest)
|
||||
- [x] Configuration files (package.json, tsconfig.json, .ipuaro.json)
|
||||
- [x] Comprehensive README with 35+ example queries
|
||||
- [x] Workflow scenarios (bug fix, refactoring, code review)
|
||||
- [x] Demonstrates all 18 tools
|
||||
- [x] 15 files, 977 total lines
|
||||
|
||||
---
|
||||
|
||||
## Version 0.19.0 - XML Tool Format Refactor 🔄 ✅
|
||||
|
||||
**Priority:** HIGH
|
||||
**Status:** Complete (v0.19.0 released)
|
||||
|
||||
Рефакторинг: переход на чистый XML формат для tool calls (как в CONCEPT.md).
|
||||
|
||||
### Текущая проблема
|
||||
|
||||
OllamaClient использует Ollama native tool calling (JSON Schema), а ResponseParser реализует XML парсинг. Это создаёт путаницу и не соответствует CONCEPT.md.
|
||||
|
||||
### 0.19.1 - OllamaClient Refactor
|
||||
|
||||
```typescript
|
||||
// src/infrastructure/llm/OllamaClient.ts
|
||||
|
||||
// БЫЛО:
|
||||
// - Передаём tools в Ollama SDK format
|
||||
// - Извлекаем tool_calls из response.message.tool_calls
|
||||
|
||||
// СТАНЕТ:
|
||||
// - НЕ передаём tools в SDK
|
||||
// - Tools описаны в system prompt как XML
|
||||
// - LLM возвращает XML в content
|
||||
// - Парсим через ResponseParser
|
||||
```
|
||||
|
||||
**Изменения:**
|
||||
- [x] Удалить `convertTools()` метод
|
||||
- [x] Удалить `extractToolCalls()` метод
|
||||
- [x] Убрать передачу `tools` в `client.chat()`
|
||||
- [x] Возвращать только `content` без `toolCalls`
|
||||
|
||||
### 0.19.2 - System Prompt Update
|
||||
|
||||
```typescript
|
||||
// src/infrastructure/llm/prompts.ts
|
||||
|
||||
// Добавить в SYSTEM_PROMPT полное описание XML формата:
|
||||
|
||||
const TOOL_FORMAT_INSTRUCTIONS = `
|
||||
## Tool Calling Format
|
||||
|
||||
When you need to use a tool, format your call as XML:
|
||||
|
||||
<tool_call name="tool_name">
|
||||
<param_name>value</param_name>
|
||||
<another_param>value</another_param>
|
||||
</tool_call>
|
||||
|
||||
Examples:
|
||||
<tool_call name="get_lines">
|
||||
<path>src/index.ts</path>
|
||||
<start>1</start>
|
||||
<end>50</end>
|
||||
</tool_call>
|
||||
|
||||
<tool_call name="edit_lines">
|
||||
<path>src/utils.ts</path>
|
||||
<start>10</start>
|
||||
<end>15</end>
|
||||
<content>const newCode = "hello";</content>
|
||||
</tool_call>
|
||||
|
||||
You can use multiple tool calls in one response.
|
||||
Always wait for tool results before making conclusions.
|
||||
`
|
||||
```
|
||||
|
||||
**Изменения:**
|
||||
- [x] Добавить `TOOL_FORMAT_INSTRUCTIONS` в prompts.ts
|
||||
- [x] Включить в `SYSTEM_PROMPT`
|
||||
- [x] Добавить примеры для всех 18 tools
|
||||
|
||||
### 0.19.3 - HandleMessage Simplification
|
||||
|
||||
```typescript
|
||||
// src/application/use-cases/HandleMessage.ts
|
||||
|
||||
// БЫЛО:
|
||||
// const response = await this.llm.chat(messages)
|
||||
// const parsed = parseToolCalls(response.content)
|
||||
|
||||
// СТАНЕТ:
|
||||
// const response = await this.llm.chat(messages) // без tools
|
||||
// const parsed = parseToolCalls(response.content) // единственный источник
|
||||
```
|
||||
|
||||
**Изменения:**
|
||||
- [x] Убрать передачу tool definitions в `llm.chat()`
|
||||
- [x] ResponseParser — единственный источник tool calls
|
||||
- [x] Упростить логику обработки
|
||||
|
||||
### 0.19.4 - ILLMClient Interface Update
|
||||
|
||||
```typescript
|
||||
// src/domain/services/ILLMClient.ts
|
||||
|
||||
// БЫЛО:
|
||||
interface ILLMClient {
|
||||
chat(messages: ChatMessage[], tools?: ToolDef[]): Promise<LLMResponse>
|
||||
}
|
||||
|
||||
// СТАНЕТ:
|
||||
interface ILLMClient {
|
||||
chat(messages: ChatMessage[]): Promise<LLMResponse>
|
||||
// tools больше не передаются - они в system prompt
|
||||
}
|
||||
```
|
||||
|
||||
**Изменения:**
|
||||
- [x] Убрать `tools` параметр из `chat()`
|
||||
- [x] Убрать `toolCalls` из `LLMResponse` (парсятся из content)
|
||||
- [x] Обновить все реализации
|
||||
|
||||
### 0.19.5 - ResponseParser Enhancements
|
||||
|
||||
```typescript
|
||||
// src/infrastructure/llm/ResponseParser.ts
|
||||
|
||||
// Улучшения:
|
||||
// - Лучшая обработка ошибок парсинга
|
||||
// - Поддержка CDATA для многострочного content
|
||||
// - Валидация имён tools
|
||||
```
|
||||
|
||||
**Изменения:**
|
||||
- [x] Добавить поддержку `<![CDATA[...]]>` для content
|
||||
- [x] Валидация: tool name должен быть из известного списка
|
||||
- [x] Улучшить сообщения об ошибках парсинга
|
||||
|
||||
**Tests:**
|
||||
- [x] Обновить тесты OllamaClient
|
||||
- [x] Обновить тесты HandleMessage
|
||||
- [x] Добавить тесты ResponseParser для edge cases
|
||||
- [ ] E2E тест полного flow с XML (опционально, может быть в 0.20.0)
|
||||
|
||||
---
|
||||
|
||||
## Version 0.20.0 - Missing Use Cases 🔧
|
||||
|
||||
**Priority:** HIGH
|
||||
**Status:** Pending
|
||||
|
||||
### 0.20.1 - IndexProject Use Case
|
||||
|
||||
```typescript
|
||||
// src/application/use-cases/IndexProject.ts
|
||||
class IndexProject {
|
||||
constructor(
|
||||
private storage: IStorage,
|
||||
private indexer: IIndexer
|
||||
)
|
||||
|
||||
async execute(
|
||||
projectRoot: string,
|
||||
onProgress?: (progress: IndexProgress) => void
|
||||
): Promise<IndexingStats>
|
||||
// Full indexing pipeline:
|
||||
// 1. Scan files
|
||||
// 2. Parse AST
|
||||
// 3. Analyze metadata
|
||||
// 4. Build indexes
|
||||
// 5. Store in Redis
|
||||
}
|
||||
```
|
||||
|
||||
**Deliverables:**
|
||||
- [ ] IndexProject use case implementation
|
||||
- [ ] Integration with CLI `index` command
|
||||
- [ ] Integration with `/reindex` slash command
|
||||
- [ ] Progress reporting via callback
|
||||
- [ ] Unit tests
|
||||
|
||||
### 0.20.2 - ExecuteTool Use Case
|
||||
|
||||
```typescript
|
||||
// src/application/use-cases/ExecuteTool.ts
|
||||
class ExecuteTool {
|
||||
constructor(
|
||||
private tools: IToolRegistry,
|
||||
private storage: IStorage
|
||||
)
|
||||
|
||||
async execute(
|
||||
toolName: string,
|
||||
params: Record<string, unknown>,
|
||||
context: ToolContext
|
||||
): Promise<ToolResult>
|
||||
// Orchestrates tool execution with:
|
||||
// - Parameter validation
|
||||
// - Confirmation flow
|
||||
// - Undo stack management
|
||||
// - Storage updates
|
||||
}
|
||||
```
|
||||
|
||||
**Deliverables:**
|
||||
- [ ] ExecuteTool use case implementation
|
||||
- [ ] Refactor HandleMessage to use ExecuteTool
|
||||
- [ ] Unit tests
|
||||
|
||||
**Tests:**
|
||||
- [ ] Unit tests for IndexProject
|
||||
- [ ] Unit tests for ExecuteTool
|
||||
|
||||
---
|
||||
|
||||
## Version 0.21.0 - TUI Enhancements 🎨
|
||||
|
||||
**Priority:** MEDIUM
|
||||
**Status:** In Progress (2/4 complete)
|
||||
|
||||
### 0.21.1 - useAutocomplete Hook ✅
|
||||
|
||||
```typescript
|
||||
// src/tui/hooks/useAutocomplete.ts
|
||||
function useAutocomplete(options: {
|
||||
storage: IStorage
|
||||
projectRoot: string
|
||||
enabled?: boolean
|
||||
maxSuggestions?: number
|
||||
}): {
|
||||
suggestions: string[]
|
||||
complete: (partial: string) => string[]
|
||||
accept: (suggestion: string) => string
|
||||
reset: () => void
|
||||
}
|
||||
|
||||
// Tab autocomplete for file paths
|
||||
// Sources: Redis file index
|
||||
// Fuzzy matching with scoring algorithm
|
||||
```
|
||||
|
||||
**Deliverables:**
|
||||
- [x] useAutocomplete hook implementation
|
||||
- [x] Integration with Input component (Tab key)
|
||||
- [x] Path completion from Redis index
|
||||
- [x] Fuzzy matching support
|
||||
- [x] Unit tests (21 tests)
|
||||
- [x] Visual feedback in Input component
|
||||
- [x] Real-time suggestion updates
|
||||
|
||||
### 0.21.2 - Edit Mode in ConfirmDialog ✅
|
||||
|
||||
```typescript
|
||||
// Enhanced ConfirmDialog with edit mode
|
||||
// When user presses [E]:
|
||||
// 1. Show editable text area with proposed changes
|
||||
// 2. User modifies the content
|
||||
// 3. Apply modified version
|
||||
|
||||
interface ConfirmDialogProps {
|
||||
message: string
|
||||
diff?: DiffViewProps
|
||||
onSelect: (choice: ConfirmChoice, editedContent?: string[]) => void
|
||||
editableContent?: string[]
|
||||
}
|
||||
```
|
||||
|
||||
**Deliverables:**
|
||||
- [x] EditableContent component for inline editing
|
||||
- [x] Integration with ConfirmDialog [E] option
|
||||
- [x] Handler in App.tsx for edit choice
|
||||
- [x] ExecuteTool support for edited content
|
||||
- [x] ConfirmationResult type with editedContent field
|
||||
- [x] All existing tests passing (1484 tests)
|
||||
|
||||
### 0.21.3 - Multiline Input
|
||||
|
||||
```typescript
|
||||
// src/tui/components/Input.tsx enhancements
|
||||
interface InputProps {
|
||||
// ... existing props
|
||||
multiline?: boolean | "auto" // auto = detect based on content
|
||||
}
|
||||
|
||||
// Shift+Enter for new line
|
||||
// Auto-expand height
|
||||
```
|
||||
|
||||
**Deliverables:**
|
||||
- [ ] Multiline support in Input component
|
||||
- [ ] Shift+Enter handling
|
||||
- [ ] Auto-height adjustment
|
||||
- [ ] Config option: `input.multiline`
|
||||
- [ ] Unit tests
|
||||
|
||||
### 0.21.4 - Syntax Highlighting in DiffView
|
||||
|
||||
```typescript
|
||||
// src/tui/components/DiffView.tsx enhancements
|
||||
// Full syntax highlighting for code in diff
|
||||
|
||||
interface DiffViewProps {
|
||||
// ... existing props
|
||||
language?: "ts" | "tsx" | "js" | "jsx"
|
||||
syntaxHighlight?: boolean
|
||||
}
|
||||
|
||||
// Use ink-syntax-highlight or custom tokenizer
|
||||
```
|
||||
|
||||
**Deliverables:**
|
||||
- [ ] Syntax highlighting integration
|
||||
- [ ] Language detection from file extension
|
||||
- [ ] Config option: `edit.syntaxHighlight`
|
||||
- [ ] Unit tests
|
||||
|
||||
**Tests:**
|
||||
- [ ] Unit tests for useAutocomplete
|
||||
- [ ] Unit tests for enhanced ConfirmDialog
|
||||
- [ ] Unit tests for multiline Input
|
||||
- [ ] Unit tests for syntax highlighting
|
||||
|
||||
---
|
||||
|
||||
## Version 0.22.0 - Extended Configuration ⚙️
|
||||
|
||||
**Priority:** MEDIUM
|
||||
**Status:** Complete (5/5 complete) ✅
|
||||
|
||||
### 0.22.1 - Display Configuration ✅
|
||||
|
||||
```typescript
|
||||
// src/shared/constants/config.ts additions
|
||||
export const DisplayConfigSchema = z.object({
|
||||
showStats: z.boolean().default(true),
|
||||
showToolCalls: z.boolean().default(true),
|
||||
theme: z.enum(["dark", "light"]).default("dark"),
|
||||
bellOnComplete: z.boolean().default(false),
|
||||
progressBar: z.boolean().default(true),
|
||||
})
|
||||
```
|
||||
|
||||
**Deliverables:**
|
||||
- [x] DisplayConfigSchema in config.ts
|
||||
- [x] Bell notification on response complete
|
||||
- [x] Theme support (dark/light color schemes)
|
||||
- [x] Configurable stats display
|
||||
- [x] Unit tests (46 new tests: 20 schema, 24 theme, 2 bell)
|
||||
|
||||
### 0.22.2 - Session Configuration ✅
|
||||
|
||||
```typescript
|
||||
// src/shared/constants/config.ts additions
|
||||
export const SessionConfigSchema = z.object({
|
||||
persistIndefinitely: z.boolean().default(true),
|
||||
maxHistoryMessages: z.number().int().positive().default(100),
|
||||
saveInputHistory: z.boolean().default(true),
|
||||
})
|
||||
```
|
||||
|
||||
**Deliverables:**
|
||||
- [x] SessionConfigSchema in config.ts
|
||||
- [x] History truncation based on maxHistoryMessages
|
||||
- [x] Input history persistence toggle
|
||||
- [x] Unit tests (19 new tests)
|
||||
|
||||
### 0.22.3 - Context Configuration ✅
|
||||
|
||||
```typescript
|
||||
// src/shared/constants/config.ts additions
|
||||
export const ContextConfigSchema = z.object({
|
||||
systemPromptTokens: z.number().int().positive().default(2000),
|
||||
maxContextUsage: z.number().min(0).max(1).default(0.8),
|
||||
autoCompressAt: z.number().min(0).max(1).default(0.8),
|
||||
compressionMethod: z.enum(["llm-summary", "truncate"]).default("llm-summary"),
|
||||
})
|
||||
```
|
||||
|
||||
**Deliverables:**
|
||||
- [x] ContextConfigSchema in config.ts
|
||||
- [x] ContextManager reads from config
|
||||
- [x] Configurable compression threshold
|
||||
- [x] Unit tests (40 new tests: 32 schema, 8 ContextManager integration)
|
||||
|
||||
### 0.22.4 - Autocomplete Configuration ✅
|
||||
|
||||
```typescript
|
||||
// src/shared/constants/config.ts additions
|
||||
export const AutocompleteConfigSchema = z.object({
|
||||
enabled: z.boolean().default(true),
|
||||
source: z.enum(["redis-index", "filesystem", "both"]).default("redis-index"),
|
||||
maxSuggestions: z.number().int().positive().default(10),
|
||||
})
|
||||
```
|
||||
|
||||
**Deliverables:**
|
||||
- [x] AutocompleteConfigSchema in config.ts
|
||||
- [x] useAutocomplete reads from config
|
||||
- [x] Unit tests (27 tests)
|
||||
|
||||
### 0.22.5 - Commands Configuration ✅
|
||||
|
||||
```typescript
|
||||
// src/shared/constants/config.ts additions
|
||||
export const CommandsConfigSchema = z.object({
|
||||
timeout: z.number().int().positive().nullable().default(null),
|
||||
})
|
||||
```
|
||||
|
||||
**Deliverables:**
|
||||
- [x] CommandsConfigSchema in config.ts
|
||||
- [x] Timeout support for run_command tool
|
||||
- [x] Unit tests (19 schema tests + 3 RunCommandTool integration tests)
|
||||
|
||||
**Tests:**
|
||||
- [x] Unit tests for CommandsConfigSchema (19 tests)
|
||||
- [x] Integration tests for RunCommandTool with config (3 tests)
|
||||
|
||||
---
|
||||
|
||||
## Version 0.23.0 - JSON/YAML & Symlinks 📄
|
||||
|
||||
**Priority:** LOW
|
||||
**Status:** Pending
|
||||
|
||||
### 0.23.1 - JSON/YAML AST Parsing
|
||||
|
||||
```typescript
|
||||
// src/infrastructure/indexer/ASTParser.ts enhancements
|
||||
type Language = "ts" | "tsx" | "js" | "jsx" | "json" | "yaml"
|
||||
|
||||
// For JSON: extract keys, structure
|
||||
// For YAML: extract keys, structure
|
||||
// Use tree-sitter-json and tree-sitter-yaml
|
||||
```
|
||||
|
||||
**Deliverables:**
|
||||
- [ ] Add tree-sitter-json dependency
|
||||
- [ ] Add tree-sitter-yaml dependency
|
||||
- [ ] JSON parsing in ASTParser
|
||||
- [ ] YAML parsing in ASTParser
|
||||
- [ ] Unit tests
|
||||
|
||||
### 0.23.2 - Symlinks Metadata
|
||||
|
||||
```typescript
|
||||
// src/domain/services/IIndexer.ts enhancements
|
||||
export interface ScanResult {
|
||||
path: string
|
||||
type: "file" | "directory" | "symlink"
|
||||
size: number
|
||||
lastModified: number
|
||||
symlinkTarget?: string // <-- NEW: target path for symlinks
|
||||
}
|
||||
|
||||
// Store symlink metadata in Redis
|
||||
// project:{name}:meta includes symlink info
|
||||
```
|
||||
|
||||
**Deliverables:**
|
||||
- [ ] Add symlinkTarget to ScanResult
|
||||
- [ ] FileScanner extracts symlink targets
|
||||
- [ ] Store symlink metadata in Redis
|
||||
- [ ] Unit tests
|
||||
|
||||
**Tests:**
|
||||
- [ ] Unit tests for JSON/YAML parsing
|
||||
- [ ] Unit tests for symlink handling
|
||||
|
||||
---
|
||||
|
||||
@@ -1244,16 +1797,16 @@ class IpuaroError extends Error {
|
||||
**Target:** Stable release
|
||||
|
||||
**Checklist:**
|
||||
- [ ] All 18 tools implemented and tested
|
||||
- [ ] TUI fully functional
|
||||
- [ ] Session persistence working
|
||||
- [ ] Error handling complete
|
||||
- [x] All 18 tools implemented and tested ✅ (v0.9.0)
|
||||
- [x] TUI fully functional ✅ (v0.11.0, v0.12.0)
|
||||
- [x] Session persistence working ✅ (v0.10.0)
|
||||
- [x] Error handling complete ✅ (v0.16.0)
|
||||
- [ ] Performance optimized
|
||||
- [ ] Documentation complete
|
||||
- [ ] 80%+ test coverage
|
||||
- [ ] 0 ESLint errors
|
||||
- [ ] Examples working
|
||||
- [ ] CHANGELOG.md up to date
|
||||
- [x] Documentation complete ✅ (v0.17.0)
|
||||
- [x] Test coverage ≥92% branches, ≥95% lines/functions/statements ✅ (92.01% branches, 97.84% lines, 99.16% functions, 97.84% statements - 1441 tests)
|
||||
- [x] 0 ESLint errors ✅
|
||||
- [x] Examples working ✅ (v0.18.0)
|
||||
- [x] CHANGELOG.md up to date ✅
|
||||
|
||||
---
|
||||
|
||||
@@ -1327,5 +1880,6 @@ sessions:list # List<session_id>
|
||||
|
||||
---
|
||||
|
||||
**Last Updated:** 2025-11-29
|
||||
**Target Version:** 1.0.0
|
||||
**Last Updated:** 2025-12-02
|
||||
**Target Version:** 1.0.0
|
||||
**Current Version:** 0.22.1
|
||||
@@ -1,40 +1,95 @@
|
||||
# ipuaro TODO
|
||||
|
||||
## Completed
|
||||
|
||||
### Version 0.1.0 - Foundation
|
||||
- [x] Project setup (package.json, tsconfig, vitest)
|
||||
- [x] Domain entities (Session, Project)
|
||||
- [x] Domain value objects (FileData, FileAST, FileMeta, ChatMessage, etc.)
|
||||
- [x] Domain service interfaces (IStorage, ILLMClient, ITool, IIndexer)
|
||||
- [x] Shared config loader with Zod validation
|
||||
- [x] IpuaroError class
|
||||
|
||||
### Version 0.2.0 - Redis Storage
|
||||
- [x] RedisClient with AOF config
|
||||
- [x] Redis schema implementation
|
||||
- [x] RedisStorage class
|
||||
|
||||
### Version 0.3.0 - Indexer
|
||||
- [x] FileScanner with gitignore support
|
||||
- [x] ASTParser with tree-sitter
|
||||
- [x] MetaAnalyzer for complexity
|
||||
- [x] IndexBuilder for symbols
|
||||
- [x] Watchdog for file changes
|
||||
|
||||
### Version 0.4.0 - LLM Integration
|
||||
- [x] OllamaClient implementation
|
||||
- [x] System prompt design
|
||||
- [x] Tool definitions (18 tools)
|
||||
- [x] Response parser (XML format)
|
||||
|
||||
### Version 0.5.0 - Read Tools
|
||||
- [x] ToolRegistry implementation
|
||||
- [x] get_lines tool
|
||||
- [x] get_function tool
|
||||
- [x] get_class tool
|
||||
- [x] get_structure tool
|
||||
|
||||
### Version 0.6.0 - Edit Tools
|
||||
- [x] edit_lines tool
|
||||
- [x] create_file tool
|
||||
- [x] delete_file tool
|
||||
|
||||
### Version 0.7.0 - Search Tools
|
||||
- [x] find_references tool
|
||||
- [x] find_definition tool
|
||||
|
||||
### Version 0.8.0 - Analysis Tools
|
||||
- [x] get_dependencies tool
|
||||
- [x] get_dependents tool
|
||||
- [x] get_complexity tool
|
||||
- [x] get_todos tool
|
||||
|
||||
### Version 0.9.0 - Git & Run Tools
|
||||
- [x] git_status tool
|
||||
- [x] git_diff tool
|
||||
- [x] git_commit tool
|
||||
- [x] CommandSecurity (blacklist/whitelist)
|
||||
- [x] run_command tool
|
||||
- [x] run_tests tool
|
||||
|
||||
### Version 0.10.0 - Session Management
|
||||
- [x] ISessionStorage interface
|
||||
- [x] RedisSessionStorage implementation
|
||||
- [x] ContextManager use case
|
||||
- [x] StartSession use case
|
||||
- [x] HandleMessage use case
|
||||
- [x] UndoChange use case
|
||||
|
||||
## In Progress
|
||||
|
||||
### Version 0.2.0 - Redis Storage
|
||||
- [ ] RedisClient with AOF config
|
||||
- [ ] Redis schema implementation
|
||||
- [ ] RedisStorage class
|
||||
### Version 0.11.0 - TUI Basic
|
||||
- [ ] App shell (Ink/React)
|
||||
- [ ] StatusBar component
|
||||
- [ ] Chat component
|
||||
- [ ] Input component
|
||||
|
||||
## Planned
|
||||
|
||||
### Version 0.3.0 - Indexer
|
||||
- [ ] FileScanner with gitignore support
|
||||
- [ ] ASTParser with tree-sitter
|
||||
- [ ] MetaAnalyzer for complexity
|
||||
- [ ] IndexBuilder for symbols
|
||||
- [ ] Watchdog for file changes
|
||||
### Version 0.12.0 - TUI Advanced
|
||||
- [ ] DiffView component
|
||||
- [ ] ConfirmDialog component
|
||||
- [ ] ErrorDialog component
|
||||
- [ ] Progress component
|
||||
|
||||
### Version 0.4.0 - LLM Integration
|
||||
- [ ] OllamaClient implementation
|
||||
- [ ] System prompt design
|
||||
- [ ] Tool definitions (XML format)
|
||||
- [ ] Response parser
|
||||
### Version 0.13.0+ - Commands & Polish
|
||||
- [ ] Slash commands (/help, /clear, /undo, /sessions, /status)
|
||||
- [ ] Hotkeys (Ctrl+C, Ctrl+D, Ctrl+Z)
|
||||
- [ ] Auto-compression at 80% context
|
||||
|
||||
### Version 0.5.0+ - Tools
|
||||
- [ ] Read tools (get_lines, get_function, get_class, get_structure)
|
||||
- [ ] Edit tools (edit_lines, create_file, delete_file)
|
||||
- [ ] Search tools (find_references, find_definition)
|
||||
- [ ] Analysis tools (get_dependencies, get_dependents, get_complexity, get_todos)
|
||||
- [ ] Git tools (git_status, git_diff, git_commit)
|
||||
- [ ] Run tools (run_command, run_tests)
|
||||
|
||||
### Version 0.10.0+ - Session & TUI
|
||||
- [ ] Session management
|
||||
- [ ] Context compression
|
||||
- [ ] TUI components (StatusBar, Chat, Input, DiffView)
|
||||
- [ ] Slash commands (/help, /clear, /undo, etc.)
|
||||
### Version 0.14.0 - CLI Entry Point
|
||||
- [ ] Full CLI commands (start, init, index)
|
||||
- [ ] Onboarding flow (Redis check, Ollama check, model pull)
|
||||
|
||||
## Technical Debt
|
||||
|
||||
@@ -51,4 +106,4 @@ _None at this time._
|
||||
|
||||
---
|
||||
|
||||
**Last Updated:** 2025-01-29
|
||||
**Last Updated:** 2025-12-01
|
||||
1605
packages/ipuaro/TOOLS.md
Normal file
1605
packages/ipuaro/TOOLS.md
Normal file
File diff suppressed because it is too large
Load Diff
4
packages/ipuaro/examples/demo-project/.gitignore
vendored
Normal file
4
packages/ipuaro/examples/demo-project/.gitignore
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
node_modules/
|
||||
dist/
|
||||
*.log
|
||||
.DS_Store
|
||||
21
packages/ipuaro/examples/demo-project/.ipuaro.json
Normal file
21
packages/ipuaro/examples/demo-project/.ipuaro.json
Normal file
@@ -0,0 +1,21 @@
|
||||
{
|
||||
"redis": {
|
||||
"host": "localhost",
|
||||
"port": 6379
|
||||
},
|
||||
"llm": {
|
||||
"model": "qwen2.5-coder:7b-instruct",
|
||||
"temperature": 0.1
|
||||
},
|
||||
"project": {
|
||||
"ignorePatterns": [
|
||||
"node_modules",
|
||||
"dist",
|
||||
".git",
|
||||
"*.log"
|
||||
]
|
||||
},
|
||||
"edit": {
|
||||
"autoApply": false
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,8 @@
|
||||
# Example Conversations with ipuaro
|
||||
|
||||
This document shows realistic conversations you can have with ipuaro when working with the demo project.
|
||||
|
||||
## Conversation 1: Understanding the Codebase
|
||||
|
||||
```
|
||||
You: What does this project do?
|
||||
406
packages/ipuaro/examples/demo-project/README.md
Normal file
406
packages/ipuaro/examples/demo-project/README.md
Normal file
@@ -0,0 +1,406 @@
|
||||
# ipuaro Demo Project
|
||||
|
||||
This is a demo project showcasing ipuaro's capabilities as a local AI agent for codebase operations.
|
||||
|
||||
## Project Overview
|
||||
|
||||
A simple TypeScript application demonstrating:
|
||||
- User management service
|
||||
- Authentication service
|
||||
- Validation utilities
|
||||
- Logging utilities
|
||||
- Unit tests
|
||||
|
||||
The code intentionally includes various patterns (TODOs, FIXMEs, complex functions, dependencies) to demonstrate ipuaro's analysis tools.
|
||||
|
||||
## Setup
|
||||
|
||||
### Prerequisites
|
||||
|
||||
1. **Redis** - Running locally
|
||||
```bash
|
||||
# macOS
|
||||
brew install redis
|
||||
redis-server --appendonly yes
|
||||
```
|
||||
|
||||
2. **Ollama** - With qwen2.5-coder model
|
||||
```bash
|
||||
brew install ollama
|
||||
ollama serve
|
||||
ollama pull qwen2.5-coder:7b-instruct
|
||||
```
|
||||
|
||||
3. **Node.js** - v20 or higher
|
||||
|
||||
### Installation
|
||||
|
||||
```bash
|
||||
# Install dependencies
|
||||
npm install
|
||||
|
||||
# Or with pnpm
|
||||
pnpm install
|
||||
```
|
||||
|
||||
## Using ipuaro with Demo Project
|
||||
|
||||
### Start ipuaro
|
||||
|
||||
```bash
|
||||
# From this directory
|
||||
npx @samiyev/ipuaro
|
||||
|
||||
# Or if installed globally
|
||||
ipuaro
|
||||
```
|
||||
|
||||
### Example Queries
|
||||
|
||||
Try these queries to explore ipuaro's capabilities:
|
||||
|
||||
#### 1. Understanding the Codebase
|
||||
|
||||
```
|
||||
You: What is the structure of this project?
|
||||
```
|
||||
|
||||
ipuaro will use `get_structure` to show the directory tree.
|
||||
|
||||
```
|
||||
You: How does user creation work?
|
||||
```
|
||||
|
||||
ipuaro will:
|
||||
1. Use `get_structure` to find relevant files
|
||||
2. Use `get_function` to read the `createUser` function
|
||||
3. Use `find_references` to see where it's called
|
||||
4. Explain the flow
|
||||
|
||||
#### 2. Finding Issues
|
||||
|
||||
```
|
||||
You: What TODOs and FIXMEs are in the codebase?
|
||||
```
|
||||
|
||||
ipuaro will use `get_todos` to list all TODO/FIXME comments.
|
||||
|
||||
```
|
||||
You: Which files are most complex?
|
||||
```
|
||||
|
||||
ipuaro will use `get_complexity` to analyze and rank files by complexity.
|
||||
|
||||
#### 3. Understanding Dependencies
|
||||
|
||||
```
|
||||
You: What does the UserService depend on?
|
||||
```
|
||||
|
||||
ipuaro will use `get_dependencies` to show imported modules.
|
||||
|
||||
```
|
||||
You: What files use the validation utilities?
|
||||
```
|
||||
|
||||
ipuaro will use `get_dependents` to show files importing validation.ts.
|
||||
|
||||
#### 4. Code Analysis
|
||||
|
||||
```
|
||||
You: Find all references to the ValidationError class
|
||||
```
|
||||
|
||||
ipuaro will use `find_references` to locate all usages.
|
||||
|
||||
```
|
||||
You: Where is the Logger class defined?
|
||||
```
|
||||
|
||||
ipuaro will use `find_definition` to locate the definition.
|
||||
|
||||
#### 5. Making Changes
|
||||
|
||||
```
|
||||
You: Add a method to UserService to count total users
|
||||
```
|
||||
|
||||
ipuaro will:
|
||||
1. Read UserService class with `get_class`
|
||||
2. Generate the new method
|
||||
3. Use `edit_lines` to add it
|
||||
4. Show diff and ask for confirmation
|
||||
|
||||
```
|
||||
You: Fix the TODO in validation.ts about password validation
|
||||
```
|
||||
|
||||
ipuaro will:
|
||||
1. Find the TODO with `get_todos`
|
||||
2. Read the function with `get_function`
|
||||
3. Implement stronger password validation
|
||||
4. Use `edit_lines` to apply changes
|
||||
|
||||
#### 6. Testing
|
||||
|
||||
```
|
||||
You: Run the tests
|
||||
```
|
||||
|
||||
ipuaro will use `run_tests` to execute the test suite.
|
||||
|
||||
```
|
||||
You: Add a test for the getUserByEmail method
|
||||
```
|
||||
|
||||
ipuaro will:
|
||||
1. Read existing tests with `get_lines`
|
||||
2. Generate new test following the pattern
|
||||
3. Use `edit_lines` to add it
|
||||
|
||||
#### 7. Git Operations
|
||||
|
||||
```
|
||||
You: What files have I changed?
|
||||
```
|
||||
|
||||
ipuaro will use `git_status` to show modified files.
|
||||
|
||||
```
|
||||
You: Show me the diff for UserService
|
||||
```
|
||||
|
||||
ipuaro will use `git_diff` with the file path.
|
||||
|
||||
```
|
||||
You: Commit these changes with message "feat: add user count method"
|
||||
```
|
||||
|
||||
ipuaro will use `git_commit` after confirmation.
|
||||
|
||||
## Tool Demonstration Scenarios
|
||||
|
||||
### Scenario 1: Bug Fix Flow
|
||||
|
||||
```
|
||||
You: There's a bug - we need to sanitize user input before storing. Fix this in UserService.
|
||||
|
||||
Agent will:
|
||||
1. get_function("src/services/user.ts", "createUser")
|
||||
2. See that sanitization is missing
|
||||
3. find_definition("sanitizeInput") to locate the utility
|
||||
4. edit_lines to add sanitization call
|
||||
5. run_tests to verify the fix
|
||||
```
|
||||
|
||||
### Scenario 2: Refactoring Flow
|
||||
|
||||
```
|
||||
You: Extract the ID generation logic into a separate utility function
|
||||
|
||||
Agent will:
|
||||
1. get_class("src/services/user.ts", "UserService")
|
||||
2. Find generateId private method
|
||||
3. create_file("src/utils/id.ts") with the utility
|
||||
4. edit_lines to replace private method with import
|
||||
5. find_references("generateId") to check no other usages
|
||||
6. run_tests to ensure nothing broke
|
||||
```
|
||||
|
||||
### Scenario 3: Feature Addition
|
||||
|
||||
```
|
||||
You: Add password reset functionality to AuthService
|
||||
|
||||
Agent will:
|
||||
1. get_class("src/auth/service.ts", "AuthService")
|
||||
2. get_dependencies to see what's available
|
||||
3. Design the resetPassword method
|
||||
4. edit_lines to add the method
|
||||
5. Suggest creating a test
|
||||
6. create_file("tests/auth.test.ts") if needed
|
||||
```
|
||||
|
||||
### Scenario 4: Code Review
|
||||
|
||||
```
|
||||
You: Review the code for security issues
|
||||
|
||||
Agent will:
|
||||
1. get_todos to find FIXME about XSS
|
||||
2. get_complexity to find complex functions
|
||||
3. get_function for suspicious functions
|
||||
4. Suggest improvements
|
||||
5. Optionally edit_lines to fix issues
|
||||
```
|
||||
|
||||
## Slash Commands
|
||||
|
||||
While exploring, you can use these commands:
|
||||
|
||||
```
|
||||
/help # Show all commands and hotkeys
|
||||
/status # Show system status (LLM, Redis, context)
|
||||
/sessions list # List all sessions
|
||||
/undo # Undo last file change
|
||||
/clear # Clear chat history
|
||||
/reindex # Force project reindexation
|
||||
/auto-apply on # Enable auto-apply mode (skip confirmations)
|
||||
```
|
||||
|
||||
## Hotkeys
|
||||
|
||||
- `Ctrl+C` - Interrupt generation (1st) / Exit (2nd within 1s)
|
||||
- `Ctrl+D` - Exit and save session
|
||||
- `Ctrl+Z` - Undo last change
|
||||
- `↑` / `↓` - Navigate input history
|
||||
|
||||
## Project Files Overview
|
||||
|
||||
```
|
||||
demo-project/
|
||||
├── src/
|
||||
│ ├── auth/
|
||||
│ │ └── service.ts # Authentication logic (login, logout, verify)
|
||||
│ ├── services/
|
||||
│ │ └── user.ts # User CRUD operations
|
||||
│ ├── utils/
|
||||
│ │ ├── logger.ts # Logging utility (multiple methods)
|
||||
│ │ └── validation.ts # Input validation (with TODOs/FIXMEs)
|
||||
│ ├── types/
|
||||
│ │ └── user.ts # TypeScript type definitions
|
||||
│ └── index.ts # Application entry point
|
||||
├── tests/
|
||||
│ └── user.test.ts # User service tests (vitest)
|
||||
├── package.json # Project configuration
|
||||
├── tsconfig.json # TypeScript configuration
|
||||
├── vitest.config.ts # Test configuration
|
||||
└── .ipuaro.json # ipuaro configuration
|
||||
```
|
||||
|
||||
## What ipuaro Can Do With This Project
|
||||
|
||||
### Read Tools ✅
|
||||
- **get_lines**: Read any file or specific line ranges
|
||||
- **get_function**: Extract specific functions (login, createUser, etc.)
|
||||
- **get_class**: Extract classes (UserService, AuthService, Logger, etc.)
|
||||
- **get_structure**: See directory tree
|
||||
|
||||
### Edit Tools ✅
|
||||
- **edit_lines**: Modify functions, fix bugs, add features
|
||||
- **create_file**: Add new utilities, tests, services
|
||||
- **delete_file**: Remove unused files
|
||||
|
||||
### Search Tools ✅
|
||||
- **find_references**: Find all usages of ValidationError, User, etc.
|
||||
- **find_definition**: Locate where Logger, UserService are defined
|
||||
|
||||
### Analysis Tools ✅
|
||||
- **get_dependencies**: See what UserService imports
|
||||
- **get_dependents**: See what imports validation.ts (multiple files!)
|
||||
- **get_complexity**: Identify complex functions (createUser has moderate complexity)
|
||||
- **get_todos**: Find 2 TODOs and 1 FIXME in the project
|
||||
|
||||
### Git Tools ✅
|
||||
- **git_status**: Check working tree
|
||||
- **git_diff**: See changes
|
||||
- **git_commit**: Commit with AI-generated messages
|
||||
|
||||
### Run Tools ✅
|
||||
- **run_command**: Execute npm scripts
|
||||
- **run_tests**: Run vitest tests
|
||||
|
||||
## Tips for Best Experience
|
||||
|
||||
1. **Start Small**: Ask about structure first, then dive into specific files
|
||||
2. **Be Specific**: "Show me the createUser function" vs "How does this work?"
|
||||
3. **Use Tools Implicitly**: Just ask questions, let ipuaro choose the right tools
|
||||
4. **Review Changes**: Always review diffs before applying edits
|
||||
5. **Test Often**: Ask ipuaro to run tests after making changes
|
||||
6. **Commit Incrementally**: Use git_commit for each logical change
|
||||
|
||||
## Advanced Workflows
|
||||
|
||||
### Workflow 1: Add New Feature
|
||||
|
||||
```
|
||||
You: Add email verification to the authentication flow
|
||||
|
||||
Agent will:
|
||||
1. Analyze current auth flow
|
||||
2. Propose design (new fields, methods)
|
||||
3. Edit AuthService to add verification
|
||||
4. Edit User types to add verified field
|
||||
5. Create tests for verification
|
||||
6. Run tests
|
||||
7. Offer to commit
|
||||
```
|
||||
|
||||
### Workflow 2: Performance Optimization
|
||||
|
||||
```
|
||||
You: The user lookup is slow when we have many users. Optimize it.
|
||||
|
||||
Agent will:
|
||||
1. Analyze UserService.getUserByEmail
|
||||
2. See it's using Array.find (O(n))
|
||||
3. Suggest adding an email index
|
||||
4. Edit to add private emailIndex: Map<string, User>
|
||||
5. Update createUser to populate index
|
||||
6. Update deleteUser to maintain index
|
||||
7. Run tests to verify
|
||||
```
|
||||
|
||||
### Workflow 3: Security Audit
|
||||
|
||||
```
|
||||
You: Audit the code for security vulnerabilities
|
||||
|
||||
Agent will:
|
||||
1. get_todos to find FIXME about XSS
|
||||
2. Review sanitizeInput implementation
|
||||
3. Check password validation strength
|
||||
4. Look for SQL injection risks (none here)
|
||||
5. Suggest improvements
|
||||
6. Optionally implement fixes
|
||||
```
|
||||
|
||||
## Next Steps
|
||||
|
||||
After exploring the demo project, try:
|
||||
|
||||
1. **Your Own Project**: Run `ipuaro` in your real codebase
|
||||
2. **Customize Config**: Edit `.ipuaro.json` to fit your needs
|
||||
3. **Different Model**: Try `--model qwen2.5-coder:32b-instruct` for better results
|
||||
4. **Auto-Apply Mode**: Use `--auto-apply` for faster iterations (with caution!)
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Redis Not Connected
|
||||
```bash
|
||||
# Start Redis with persistence
|
||||
redis-server --appendonly yes
|
||||
```
|
||||
|
||||
### Ollama Model Not Found
|
||||
```bash
|
||||
# Pull the model
|
||||
ollama pull qwen2.5-coder:7b-instruct
|
||||
|
||||
# Check it's installed
|
||||
ollama list
|
||||
```
|
||||
|
||||
### Indexing Takes Long
|
||||
The project is small (~10 files) so indexing should be instant. For larger projects, use ignore patterns in `.ipuaro.json`.
|
||||
|
||||
## Learn More
|
||||
|
||||
- [ipuaro Documentation](../../README.md)
|
||||
- [Architecture Guide](../../ARCHITECTURE.md)
|
||||
- [Tools Reference](../../TOOLS.md)
|
||||
- [GitHub Repository](https://github.com/samiyev/puaros)
|
||||
|
||||
---
|
||||
|
||||
**Happy coding with ipuaro!** 🎩✨
|
||||
20
packages/ipuaro/examples/demo-project/package.json
Normal file
20
packages/ipuaro/examples/demo-project/package.json
Normal file
@@ -0,0 +1,20 @@
|
||||
{
|
||||
"name": "ipuaro-demo-project",
|
||||
"version": "1.0.0",
|
||||
"description": "Demo project for ipuaro - showcasing AI agent capabilities",
|
||||
"private": true,
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"dev": "tsx src/index.ts",
|
||||
"test": "vitest",
|
||||
"test:run": "vitest run",
|
||||
"build": "tsc"
|
||||
},
|
||||
"dependencies": {},
|
||||
"devDependencies": {
|
||||
"@types/node": "^22.10.1",
|
||||
"tsx": "^4.19.2",
|
||||
"typescript": "^5.7.2",
|
||||
"vitest": "^1.6.0"
|
||||
}
|
||||
}
|
||||
85
packages/ipuaro/examples/demo-project/src/auth/service.ts
Normal file
85
packages/ipuaro/examples/demo-project/src/auth/service.ts
Normal file
@@ -0,0 +1,85 @@
|
||||
/**
|
||||
* Authentication service
|
||||
*/
|
||||
|
||||
import type { User, AuthToken } from "../types/user"
|
||||
import { UserService } from "../services/user"
|
||||
import { createLogger } from "../utils/logger"
|
||||
|
||||
const logger = createLogger("AuthService")
|
||||
|
||||
export class AuthService {
|
||||
private tokens: Map<string, AuthToken> = new Map()
|
||||
|
||||
constructor(private userService: UserService) {}
|
||||
|
||||
async login(email: string, password: string): Promise<AuthToken> {
|
||||
logger.info("Login attempt", { email })
|
||||
|
||||
// Get user
|
||||
const user = await this.userService.getUserByEmail(email)
|
||||
if (!user) {
|
||||
logger.warn("Login failed - user not found", { email })
|
||||
throw new Error("Invalid credentials")
|
||||
}
|
||||
|
||||
// TODO: Implement actual password verification
|
||||
// For demo purposes, we just check if password is provided
|
||||
if (!password) {
|
||||
logger.warn("Login failed - no password", { email })
|
||||
throw new Error("Invalid credentials")
|
||||
}
|
||||
|
||||
// Generate token
|
||||
const token = this.generateToken(user)
|
||||
this.tokens.set(token.token, token)
|
||||
|
||||
logger.info("Login successful", { userId: user.id })
|
||||
return token
|
||||
}
|
||||
|
||||
async logout(tokenString: string): Promise<void> {
|
||||
logger.info("Logout", { token: tokenString.substring(0, 10) + "..." })
|
||||
|
||||
const token = this.tokens.get(tokenString)
|
||||
if (!token) {
|
||||
throw new Error("Invalid token")
|
||||
}
|
||||
|
||||
this.tokens.delete(tokenString)
|
||||
logger.info("Logout successful", { userId: token.userId })
|
||||
}
|
||||
|
||||
async verifyToken(tokenString: string): Promise<User> {
|
||||
logger.debug("Verifying token")
|
||||
|
||||
const token = this.tokens.get(tokenString)
|
||||
if (!token) {
|
||||
throw new Error("Invalid token")
|
||||
}
|
||||
|
||||
if (token.expiresAt < new Date()) {
|
||||
this.tokens.delete(tokenString)
|
||||
throw new Error("Token expired")
|
||||
}
|
||||
|
||||
const user = await this.userService.getUserById(token.userId)
|
||||
if (!user) {
|
||||
throw new Error("User not found")
|
||||
}
|
||||
|
||||
return user
|
||||
}
|
||||
|
||||
private generateToken(user: User): AuthToken {
|
||||
const token = `tok_${Date.now()}_${Math.random().toString(36).substring(7)}`
|
||||
const expiresAt = new Date()
|
||||
expiresAt.setHours(expiresAt.getHours() + 24) // 24 hours
|
||||
|
||||
return {
|
||||
token,
|
||||
expiresAt,
|
||||
userId: user.id,
|
||||
}
|
||||
}
|
||||
}
|
||||
48
packages/ipuaro/examples/demo-project/src/index.ts
Normal file
48
packages/ipuaro/examples/demo-project/src/index.ts
Normal file
@@ -0,0 +1,48 @@
|
||||
/**
|
||||
* Demo application entry point
|
||||
*/
|
||||
|
||||
import { UserService } from "./services/user"
|
||||
import { AuthService } from "./auth/service"
|
||||
import { createLogger } from "./utils/logger"
|
||||
|
||||
const logger = createLogger("App")
|
||||
|
||||
async function main(): Promise<void> {
|
||||
logger.info("Starting demo application")
|
||||
|
||||
// Initialize services
|
||||
const userService = new UserService()
|
||||
const authService = new AuthService(userService)
|
||||
|
||||
try {
|
||||
// Create a demo user
|
||||
const user = await userService.createUser({
|
||||
email: "demo@example.com",
|
||||
name: "Demo User",
|
||||
password: "password123",
|
||||
role: "admin",
|
||||
})
|
||||
|
||||
logger.info("Demo user created", { userId: user.id })
|
||||
|
||||
// Login
|
||||
const token = await authService.login("demo@example.com", "password123")
|
||||
logger.info("Login successful", { token: token.token })
|
||||
|
||||
// Verify token
|
||||
const verifiedUser = await authService.verifyToken(token.token)
|
||||
logger.info("Token verified", { userId: verifiedUser.id })
|
||||
|
||||
// Logout
|
||||
await authService.logout(token.token)
|
||||
logger.info("Logout successful")
|
||||
} catch (error) {
|
||||
logger.error("Application error", error as Error)
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
logger.info("Demo application finished")
|
||||
}
|
||||
|
||||
main()
|
||||
100
packages/ipuaro/examples/demo-project/src/services/user.ts
Normal file
100
packages/ipuaro/examples/demo-project/src/services/user.ts
Normal file
@@ -0,0 +1,100 @@
|
||||
/**
|
||||
* User service - handles user-related operations
|
||||
*/
|
||||
|
||||
import type { User, CreateUserDto, UpdateUserDto } from "../types/user"
|
||||
import { isValidEmail, isStrongPassword, ValidationError } from "../utils/validation"
|
||||
import { createLogger } from "../utils/logger"
|
||||
|
||||
const logger = createLogger("UserService")
|
||||
|
||||
export class UserService {
|
||||
private users: Map<string, User> = new Map()
|
||||
|
||||
async createUser(dto: CreateUserDto): Promise<User> {
|
||||
logger.info("Creating user", { email: dto.email })
|
||||
|
||||
// Validate email
|
||||
if (!isValidEmail(dto.email)) {
|
||||
throw new ValidationError("Invalid email address", "email")
|
||||
}
|
||||
|
||||
// Validate password
|
||||
if (!isStrongPassword(dto.password)) {
|
||||
throw new ValidationError("Password must be at least 8 characters", "password")
|
||||
}
|
||||
|
||||
// Check if user already exists
|
||||
const existingUser = Array.from(this.users.values()).find((u) => u.email === dto.email)
|
||||
|
||||
if (existingUser) {
|
||||
throw new Error("User with this email already exists")
|
||||
}
|
||||
|
||||
// Create user
|
||||
const user: User = {
|
||||
id: this.generateId(),
|
||||
email: dto.email,
|
||||
name: dto.name,
|
||||
role: dto.role || "user",
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
}
|
||||
|
||||
this.users.set(user.id, user)
|
||||
logger.info("User created", { userId: user.id })
|
||||
|
||||
return user
|
||||
}
|
||||
|
||||
async getUserById(id: string): Promise<User | null> {
|
||||
logger.debug("Getting user by ID", { userId: id })
|
||||
return this.users.get(id) || null
|
||||
}
|
||||
|
||||
async getUserByEmail(email: string): Promise<User | null> {
|
||||
logger.debug("Getting user by email", { email })
|
||||
return Array.from(this.users.values()).find((u) => u.email === email) || null
|
||||
}
|
||||
|
||||
async updateUser(id: string, dto: UpdateUserDto): Promise<User> {
|
||||
logger.info("Updating user", { userId: id })
|
||||
|
||||
const user = this.users.get(id)
|
||||
if (!user) {
|
||||
throw new Error("User not found")
|
||||
}
|
||||
|
||||
const updated: User = {
|
||||
...user,
|
||||
...(dto.name && { name: dto.name }),
|
||||
...(dto.role && { role: dto.role }),
|
||||
updatedAt: new Date(),
|
||||
}
|
||||
|
||||
this.users.set(id, updated)
|
||||
logger.info("User updated", { userId: id })
|
||||
|
||||
return updated
|
||||
}
|
||||
|
||||
async deleteUser(id: string): Promise<void> {
|
||||
logger.info("Deleting user", { userId: id })
|
||||
|
||||
if (!this.users.has(id)) {
|
||||
throw new Error("User not found")
|
||||
}
|
||||
|
||||
this.users.delete(id)
|
||||
logger.info("User deleted", { userId: id })
|
||||
}
|
||||
|
||||
async listUsers(): Promise<User[]> {
|
||||
logger.debug("Listing all users")
|
||||
return Array.from(this.users.values())
|
||||
}
|
||||
|
||||
private generateId(): string {
|
||||
return `user_${Date.now()}_${Math.random().toString(36).substring(7)}`
|
||||
}
|
||||
}
|
||||
32
packages/ipuaro/examples/demo-project/src/types/user.ts
Normal file
32
packages/ipuaro/examples/demo-project/src/types/user.ts
Normal file
@@ -0,0 +1,32 @@
|
||||
/**
|
||||
* User-related type definitions
|
||||
*/
|
||||
|
||||
export interface User {
|
||||
id: string
|
||||
email: string
|
||||
name: string
|
||||
role: UserRole
|
||||
createdAt: Date
|
||||
updatedAt: Date
|
||||
}
|
||||
|
||||
export type UserRole = "admin" | "user" | "guest"
|
||||
|
||||
export interface CreateUserDto {
|
||||
email: string
|
||||
name: string
|
||||
password: string
|
||||
role?: UserRole
|
||||
}
|
||||
|
||||
export interface UpdateUserDto {
|
||||
name?: string
|
||||
role?: UserRole
|
||||
}
|
||||
|
||||
export interface AuthToken {
|
||||
token: string
|
||||
expiresAt: Date
|
||||
userId: string
|
||||
}
|
||||
41
packages/ipuaro/examples/demo-project/src/utils/logger.ts
Normal file
41
packages/ipuaro/examples/demo-project/src/utils/logger.ts
Normal file
@@ -0,0 +1,41 @@
|
||||
/**
|
||||
* Simple logging utility
|
||||
*/
|
||||
|
||||
export type LogLevel = "debug" | "info" | "warn" | "error"
|
||||
|
||||
export class Logger {
|
||||
constructor(private context: string) {}
|
||||
|
||||
debug(message: string, meta?: Record<string, unknown>): void {
|
||||
this.log("debug", message, meta)
|
||||
}
|
||||
|
||||
info(message: string, meta?: Record<string, unknown>): void {
|
||||
this.log("info", message, meta)
|
||||
}
|
||||
|
||||
warn(message: string, meta?: Record<string, unknown>): void {
|
||||
this.log("warn", message, meta)
|
||||
}
|
||||
|
||||
error(message: string, error?: Error, meta?: Record<string, unknown>): void {
|
||||
this.log("error", message, { ...meta, error: error?.message })
|
||||
}
|
||||
|
||||
private log(level: LogLevel, message: string, meta?: Record<string, unknown>): void {
|
||||
const timestamp = new Date().toISOString()
|
||||
const logEntry = {
|
||||
timestamp,
|
||||
level,
|
||||
context: this.context,
|
||||
message,
|
||||
...(meta && { meta }),
|
||||
}
|
||||
console.log(JSON.stringify(logEntry))
|
||||
}
|
||||
}
|
||||
|
||||
export function createLogger(context: string): Logger {
|
||||
return new Logger(context)
|
||||
}
|
||||
@@ -0,0 +1,28 @@
|
||||
/**
|
||||
* Validation utilities
|
||||
*/
|
||||
|
||||
export function isValidEmail(email: string): boolean {
|
||||
const emailRegex = /^[^\s@]+@[^\s@]+\.[^\s@]+$/
|
||||
return emailRegex.test(email)
|
||||
}
|
||||
|
||||
export function isStrongPassword(password: string): boolean {
|
||||
// TODO: Add more sophisticated password validation
|
||||
return password.length >= 8
|
||||
}
|
||||
|
||||
export function sanitizeInput(input: string): string {
|
||||
// FIXME: This is a basic implementation, needs XSS protection
|
||||
return input.trim().replace(/[<>]/g, "")
|
||||
}
|
||||
|
||||
export class ValidationError extends Error {
|
||||
constructor(
|
||||
message: string,
|
||||
public field: string,
|
||||
) {
|
||||
super(message)
|
||||
this.name = "ValidationError"
|
||||
}
|
||||
}
|
||||
141
packages/ipuaro/examples/demo-project/tests/user.test.ts
Normal file
141
packages/ipuaro/examples/demo-project/tests/user.test.ts
Normal file
@@ -0,0 +1,141 @@
|
||||
/**
|
||||
* User service tests
|
||||
*/
|
||||
|
||||
import { describe, it, expect, beforeEach } from "vitest"
|
||||
import { UserService } from "../src/services/user"
|
||||
import { ValidationError } from "../src/utils/validation"
|
||||
|
||||
describe("UserService", () => {
|
||||
let userService: UserService
|
||||
|
||||
beforeEach(() => {
|
||||
userService = new UserService()
|
||||
})
|
||||
|
||||
describe("createUser", () => {
|
||||
it("should create a new user", async () => {
|
||||
const user = await userService.createUser({
|
||||
email: "test@example.com",
|
||||
name: "Test User",
|
||||
password: "password123",
|
||||
})
|
||||
|
||||
expect(user).toBeDefined()
|
||||
expect(user.email).toBe("test@example.com")
|
||||
expect(user.name).toBe("Test User")
|
||||
expect(user.role).toBe("user")
|
||||
})
|
||||
|
||||
it("should reject invalid email", async () => {
|
||||
await expect(
|
||||
userService.createUser({
|
||||
email: "invalid-email",
|
||||
name: "Test User",
|
||||
password: "password123",
|
||||
}),
|
||||
).rejects.toThrow(ValidationError)
|
||||
})
|
||||
|
||||
it("should reject weak password", async () => {
|
||||
await expect(
|
||||
userService.createUser({
|
||||
email: "test@example.com",
|
||||
name: "Test User",
|
||||
password: "weak",
|
||||
}),
|
||||
).rejects.toThrow(ValidationError)
|
||||
})
|
||||
|
||||
it("should prevent duplicate emails", async () => {
|
||||
await userService.createUser({
|
||||
email: "test@example.com",
|
||||
name: "Test User",
|
||||
password: "password123",
|
||||
})
|
||||
|
||||
await expect(
|
||||
userService.createUser({
|
||||
email: "test@example.com",
|
||||
name: "Another User",
|
||||
password: "password123",
|
||||
}),
|
||||
).rejects.toThrow("already exists")
|
||||
})
|
||||
})
|
||||
|
||||
describe("getUserById", () => {
|
||||
it("should return user by ID", async () => {
|
||||
const created = await userService.createUser({
|
||||
email: "test@example.com",
|
||||
name: "Test User",
|
||||
password: "password123",
|
||||
})
|
||||
|
||||
const found = await userService.getUserById(created.id)
|
||||
expect(found).toEqual(created)
|
||||
})
|
||||
|
||||
it("should return null for non-existent ID", async () => {
|
||||
const found = await userService.getUserById("non-existent")
|
||||
expect(found).toBeNull()
|
||||
})
|
||||
})
|
||||
|
||||
describe("updateUser", () => {
|
||||
it("should update user name", async () => {
|
||||
const user = await userService.createUser({
|
||||
email: "test@example.com",
|
||||
name: "Test User",
|
||||
password: "password123",
|
||||
})
|
||||
|
||||
const updated = await userService.updateUser(user.id, {
|
||||
name: "Updated Name",
|
||||
})
|
||||
|
||||
expect(updated.name).toBe("Updated Name")
|
||||
expect(updated.email).toBe(user.email)
|
||||
})
|
||||
|
||||
it("should throw error for non-existent user", async () => {
|
||||
await expect(userService.updateUser("non-existent", { name: "Test" })).rejects.toThrow(
|
||||
"not found",
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe("deleteUser", () => {
|
||||
it("should delete user", async () => {
|
||||
const user = await userService.createUser({
|
||||
email: "test@example.com",
|
||||
name: "Test User",
|
||||
password: "password123",
|
||||
})
|
||||
|
||||
await userService.deleteUser(user.id)
|
||||
|
||||
const found = await userService.getUserById(user.id)
|
||||
expect(found).toBeNull()
|
||||
})
|
||||
})
|
||||
|
||||
describe("listUsers", () => {
|
||||
it("should return all users", async () => {
|
||||
await userService.createUser({
|
||||
email: "user1@example.com",
|
||||
name: "User 1",
|
||||
password: "password123",
|
||||
})
|
||||
|
||||
await userService.createUser({
|
||||
email: "user2@example.com",
|
||||
name: "User 2",
|
||||
password: "password123",
|
||||
})
|
||||
|
||||
const users = await userService.listUsers()
|
||||
expect(users).toHaveLength(2)
|
||||
})
|
||||
})
|
||||
})
|
||||
16
packages/ipuaro/examples/demo-project/tsconfig.json
Normal file
16
packages/ipuaro/examples/demo-project/tsconfig.json
Normal file
@@ -0,0 +1,16 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"target": "ES2023",
|
||||
"module": "ESNext",
|
||||
"lib": ["ES2023"],
|
||||
"moduleResolution": "Bundler",
|
||||
"esModuleInterop": true,
|
||||
"strict": true,
|
||||
"skipLibCheck": true,
|
||||
"resolveJsonModule": true,
|
||||
"outDir": "dist",
|
||||
"rootDir": "src"
|
||||
},
|
||||
"include": ["src/**/*"],
|
||||
"exclude": ["node_modules", "dist", "tests"]
|
||||
}
|
||||
8
packages/ipuaro/examples/demo-project/vitest.config.ts
Normal file
8
packages/ipuaro/examples/demo-project/vitest.config.ts
Normal file
@@ -0,0 +1,8 @@
|
||||
import { defineConfig } from "vitest/config"
|
||||
|
||||
export default defineConfig({
|
||||
test: {
|
||||
globals: true,
|
||||
environment: "node",
|
||||
},
|
||||
})
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@samiyev/ipuaro",
|
||||
"version": "0.1.1",
|
||||
"version": "0.22.4",
|
||||
"description": "Local AI agent for codebase operations with infinite context feeling",
|
||||
"author": "Fozilbek Samiyev <fozilbek.samiyev@gmail.com>",
|
||||
"license": "MIT",
|
||||
@@ -8,7 +8,7 @@
|
||||
"main": "./dist/index.js",
|
||||
"types": "./dist/index.d.ts",
|
||||
"bin": {
|
||||
"ipuaro": "./bin/ipuaro.js"
|
||||
"ipuaro": "bin/ipuaro.js"
|
||||
},
|
||||
"exports": {
|
||||
".": {
|
||||
@@ -33,28 +33,32 @@
|
||||
"format": "prettier --write src"
|
||||
},
|
||||
"dependencies": {
|
||||
"ink": "^4.4.1",
|
||||
"ink-text-input": "^5.0.1",
|
||||
"react": "^18.2.0",
|
||||
"ioredis": "^5.4.1",
|
||||
"tree-sitter": "^0.21.1",
|
||||
"tree-sitter-typescript": "^0.21.2",
|
||||
"tree-sitter-javascript": "^0.21.0",
|
||||
"ollama": "^0.5.11",
|
||||
"simple-git": "^3.27.0",
|
||||
"chokidar": "^3.6.0",
|
||||
"commander": "^11.1.0",
|
||||
"zod": "^3.23.8",
|
||||
"ignore": "^5.3.2"
|
||||
"globby": "^16.0.0",
|
||||
"ink": "^4.4.1",
|
||||
"ink-text-input": "^5.0.1",
|
||||
"ioredis": "^5.4.1",
|
||||
"ollama": "^0.5.11",
|
||||
"react": "^18.2.0",
|
||||
"simple-git": "^3.27.0",
|
||||
"tree-sitter": "^0.21.1",
|
||||
"tree-sitter-javascript": "^0.21.0",
|
||||
"tree-sitter-typescript": "^0.21.2",
|
||||
"zod": "^3.23.8"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@testing-library/react": "^16.3.0",
|
||||
"@types/jsdom": "^27.0.0",
|
||||
"@types/node": "^22.10.1",
|
||||
"@types/react": "^18.2.0",
|
||||
"vitest": "^1.6.0",
|
||||
"@vitest/coverage-v8": "^1.6.0",
|
||||
"@vitest/ui": "^1.6.0",
|
||||
"jsdom": "^27.2.0",
|
||||
"react-dom": "18.3.1",
|
||||
"tsup": "^8.3.5",
|
||||
"typescript": "^5.7.2"
|
||||
"typescript": "^5.7.2",
|
||||
"vitest": "^1.6.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=20.0.0"
|
||||
@@ -70,7 +74,7 @@
|
||||
],
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/samiyev/puaros.git",
|
||||
"url": "git+https://github.com/samiyev/puaros.git",
|
||||
"directory": "packages/ipuaro"
|
||||
},
|
||||
"bugs": {
|
||||
|
||||
234
packages/ipuaro/src/application/use-cases/ContextManager.ts
Normal file
234
packages/ipuaro/src/application/use-cases/ContextManager.ts
Normal file
@@ -0,0 +1,234 @@
|
||||
import type { ContextState, Session } from "../../domain/entities/Session.js"
|
||||
import type { ILLMClient } from "../../domain/services/ILLMClient.js"
|
||||
import { type ChatMessage, createSystemMessage } from "../../domain/value-objects/ChatMessage.js"
|
||||
import { CONTEXT_COMPRESSION_THRESHOLD, CONTEXT_WINDOW_SIZE } from "../../domain/constants/index.js"
|
||||
import type { ContextConfig } from "../../shared/constants/config.js"
|
||||
|
||||
/**
|
||||
* File in context with token count.
|
||||
*/
|
||||
export interface FileContext {
|
||||
path: string
|
||||
tokens: number
|
||||
addedAt: number
|
||||
}
|
||||
|
||||
/**
|
||||
* Compression result.
|
||||
*/
|
||||
export interface CompressionResult {
|
||||
compressed: boolean
|
||||
removedMessages: number
|
||||
tokensSaved: number
|
||||
summary?: string
|
||||
}
|
||||
|
||||
const COMPRESSION_PROMPT = `Summarize the following conversation history in a concise way,
|
||||
preserving key information about:
|
||||
- What files were discussed or modified
|
||||
- What changes were made
|
||||
- Important decisions or context
|
||||
Keep the summary under 500 tokens.`
|
||||
|
||||
const MESSAGES_TO_KEEP = 5
|
||||
const MIN_MESSAGES_FOR_COMPRESSION = 10
|
||||
|
||||
/**
|
||||
* Manages context window token budget and compression.
|
||||
*/
|
||||
export class ContextManager {
|
||||
private readonly filesInContext = new Map<string, FileContext>()
|
||||
private currentTokens = 0
|
||||
private readonly contextWindowSize: number
|
||||
private readonly compressionThreshold: number
|
||||
private readonly compressionMethod: "llm-summary" | "truncate"
|
||||
|
||||
constructor(contextWindowSize: number = CONTEXT_WINDOW_SIZE, config?: ContextConfig) {
|
||||
this.contextWindowSize = contextWindowSize
|
||||
this.compressionThreshold = config?.autoCompressAt ?? CONTEXT_COMPRESSION_THRESHOLD
|
||||
this.compressionMethod = config?.compressionMethod ?? "llm-summary"
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a file to the context.
|
||||
*/
|
||||
addToContext(file: string, tokens: number): void {
|
||||
const existing = this.filesInContext.get(file)
|
||||
if (existing) {
|
||||
this.currentTokens -= existing.tokens
|
||||
}
|
||||
|
||||
this.filesInContext.set(file, {
|
||||
path: file,
|
||||
tokens,
|
||||
addedAt: Date.now(),
|
||||
})
|
||||
this.currentTokens += tokens
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove a file from the context.
|
||||
*/
|
||||
removeFromContext(file: string): void {
|
||||
const existing = this.filesInContext.get(file)
|
||||
if (existing) {
|
||||
this.currentTokens -= existing.tokens
|
||||
this.filesInContext.delete(file)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get current token usage ratio (0-1).
|
||||
*/
|
||||
getUsage(): number {
|
||||
return this.currentTokens / this.contextWindowSize
|
||||
}
|
||||
|
||||
/**
|
||||
* Get current token count.
|
||||
*/
|
||||
getTokenCount(): number {
|
||||
return this.currentTokens
|
||||
}
|
||||
|
||||
/**
|
||||
* Get available tokens.
|
||||
*/
|
||||
getAvailableTokens(): number {
|
||||
return this.contextWindowSize - this.currentTokens
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if compression is needed.
|
||||
*/
|
||||
needsCompression(): boolean {
|
||||
return this.getUsage() > this.compressionThreshold
|
||||
}
|
||||
|
||||
/**
|
||||
* Update token count (e.g., after receiving a message).
|
||||
*/
|
||||
addTokens(tokens: number): void {
|
||||
this.currentTokens += tokens
|
||||
}
|
||||
|
||||
/**
|
||||
* Get files in context.
|
||||
*/
|
||||
getFilesInContext(): string[] {
|
||||
return Array.from(this.filesInContext.keys())
|
||||
}
|
||||
|
||||
/**
|
||||
* Sync context state from session.
|
||||
*/
|
||||
syncFromSession(session: Session): void {
|
||||
this.filesInContext.clear()
|
||||
this.currentTokens = 0
|
||||
|
||||
for (const file of session.context.filesInContext) {
|
||||
this.filesInContext.set(file, {
|
||||
path: file,
|
||||
tokens: 0,
|
||||
addedAt: Date.now(),
|
||||
})
|
||||
}
|
||||
|
||||
this.currentTokens = Math.floor(session.context.tokenUsage * this.contextWindowSize)
|
||||
}
|
||||
|
||||
/**
|
||||
* Update session context state.
|
||||
*/
|
||||
updateSession(session: Session): void {
|
||||
session.context.filesInContext = this.getFilesInContext()
|
||||
session.context.tokenUsage = this.getUsage()
|
||||
session.context.needsCompression = this.needsCompression()
|
||||
}
|
||||
|
||||
/**
|
||||
* Compress context using LLM to summarize old messages.
|
||||
*/
|
||||
async compress(session: Session, llm: ILLMClient): Promise<CompressionResult> {
|
||||
const history = session.history
|
||||
if (history.length < MIN_MESSAGES_FOR_COMPRESSION) {
|
||||
return {
|
||||
compressed: false,
|
||||
removedMessages: 0,
|
||||
tokensSaved: 0,
|
||||
}
|
||||
}
|
||||
|
||||
const messagesToCompress = history.slice(0, -MESSAGES_TO_KEEP)
|
||||
const messagesToKeep = history.slice(-MESSAGES_TO_KEEP)
|
||||
|
||||
const tokensBeforeCompression = await this.countHistoryTokens(messagesToCompress, llm)
|
||||
|
||||
const summary = await this.summarizeMessages(messagesToCompress, llm)
|
||||
const summaryTokens = await llm.countTokens(summary)
|
||||
|
||||
const summaryMessage = createSystemMessage(`[Previous conversation summary]\n${summary}`)
|
||||
|
||||
session.history = [summaryMessage, ...messagesToKeep]
|
||||
|
||||
const tokensSaved = tokensBeforeCompression - summaryTokens
|
||||
this.currentTokens -= tokensSaved
|
||||
|
||||
this.updateSession(session)
|
||||
|
||||
return {
|
||||
compressed: true,
|
||||
removedMessages: messagesToCompress.length,
|
||||
tokensSaved,
|
||||
summary,
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new context state.
|
||||
*/
|
||||
static createInitialState(): ContextState {
|
||||
return {
|
||||
filesInContext: [],
|
||||
tokenUsage: 0,
|
||||
needsCompression: false,
|
||||
}
|
||||
}
|
||||
|
||||
private async summarizeMessages(messages: ChatMessage[], llm: ILLMClient): Promise<string> {
|
||||
const conversation = this.formatMessagesForSummary(messages)
|
||||
|
||||
const response = await llm.chat([
|
||||
createSystemMessage(COMPRESSION_PROMPT),
|
||||
createSystemMessage(conversation),
|
||||
])
|
||||
|
||||
return response.content
|
||||
}
|
||||
|
||||
private formatMessagesForSummary(messages: ChatMessage[]): string {
|
||||
return messages
|
||||
.filter((m) => m.role !== "tool")
|
||||
.map((m) => {
|
||||
const role = m.role === "user" ? "User" : "Assistant"
|
||||
const content = this.truncateContent(m.content, 500)
|
||||
return `${role}: ${content}`
|
||||
})
|
||||
.join("\n\n")
|
||||
}
|
||||
|
||||
private truncateContent(content: string, maxLength: number): string {
|
||||
if (content.length <= maxLength) {
|
||||
return content
|
||||
}
|
||||
return `${content.slice(0, maxLength)}...`
|
||||
}
|
||||
|
||||
private async countHistoryTokens(messages: ChatMessage[], llm: ILLMClient): Promise<number> {
|
||||
let total = 0
|
||||
for (const message of messages) {
|
||||
total += await llm.countTokens(message.content)
|
||||
}
|
||||
return total
|
||||
}
|
||||
}
|
||||
224
packages/ipuaro/src/application/use-cases/ExecuteTool.ts
Normal file
224
packages/ipuaro/src/application/use-cases/ExecuteTool.ts
Normal file
@@ -0,0 +1,224 @@
|
||||
import { randomUUID } from "node:crypto"
|
||||
import type { Session } from "../../domain/entities/Session.js"
|
||||
import type { ISessionStorage } from "../../domain/services/ISessionStorage.js"
|
||||
import type { IStorage } from "../../domain/services/IStorage.js"
|
||||
import type { DiffInfo, ToolContext } from "../../domain/services/ITool.js"
|
||||
import type { ToolCall } from "../../domain/value-objects/ToolCall.js"
|
||||
import { createErrorResult, type ToolResult } from "../../domain/value-objects/ToolResult.js"
|
||||
import { createUndoEntry } from "../../domain/value-objects/UndoEntry.js"
|
||||
import type { IToolRegistry } from "../interfaces/IToolRegistry.js"
|
||||
|
||||
/**
|
||||
* Result of confirmation dialog.
|
||||
*/
|
||||
export interface ConfirmationResult {
|
||||
confirmed: boolean
|
||||
editedContent?: string[]
|
||||
}
|
||||
|
||||
/**
|
||||
* Confirmation handler callback type.
|
||||
* Can return either a boolean (for backward compatibility) or a ConfirmationResult.
|
||||
*/
|
||||
export type ConfirmationHandler = (
|
||||
message: string,
|
||||
diff?: DiffInfo,
|
||||
) => Promise<boolean | ConfirmationResult>
|
||||
|
||||
/**
|
||||
* Progress handler callback type.
|
||||
*/
|
||||
export type ProgressHandler = (message: string) => void
|
||||
|
||||
/**
|
||||
* Options for ExecuteTool.
|
||||
*/
|
||||
export interface ExecuteToolOptions {
|
||||
/** Auto-apply edits without confirmation */
|
||||
autoApply?: boolean
|
||||
/** Confirmation handler */
|
||||
onConfirmation?: ConfirmationHandler
|
||||
/** Progress handler */
|
||||
onProgress?: ProgressHandler
|
||||
}
|
||||
|
||||
/**
|
||||
* Result of tool execution.
|
||||
*/
|
||||
export interface ExecuteToolResult {
|
||||
result: ToolResult
|
||||
undoEntryCreated: boolean
|
||||
undoEntryId?: string
|
||||
}
|
||||
|
||||
/**
|
||||
* Use case for executing a single tool.
|
||||
* Orchestrates tool execution with:
|
||||
* - Parameter validation
|
||||
* - Confirmation flow
|
||||
* - Undo stack management
|
||||
* - Storage updates
|
||||
*/
|
||||
export class ExecuteTool {
|
||||
private readonly storage: IStorage
|
||||
private readonly sessionStorage: ISessionStorage
|
||||
private readonly tools: IToolRegistry
|
||||
private readonly projectRoot: string
|
||||
private lastUndoEntryId?: string
|
||||
|
||||
constructor(
|
||||
storage: IStorage,
|
||||
sessionStorage: ISessionStorage,
|
||||
tools: IToolRegistry,
|
||||
projectRoot: string,
|
||||
) {
|
||||
this.storage = storage
|
||||
this.sessionStorage = sessionStorage
|
||||
this.tools = tools
|
||||
this.projectRoot = projectRoot
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute a tool call.
|
||||
*
|
||||
* @param toolCall - The tool call to execute
|
||||
* @param session - Current session (for undo stack)
|
||||
* @param options - Execution options
|
||||
* @returns Execution result
|
||||
*/
|
||||
async execute(
|
||||
toolCall: ToolCall,
|
||||
session: Session,
|
||||
options: ExecuteToolOptions = {},
|
||||
): Promise<ExecuteToolResult> {
|
||||
this.lastUndoEntryId = undefined
|
||||
const startTime = Date.now()
|
||||
const tool = this.tools.get(toolCall.name)
|
||||
|
||||
if (!tool) {
|
||||
return {
|
||||
result: createErrorResult(
|
||||
toolCall.id,
|
||||
`Unknown tool: ${toolCall.name}`,
|
||||
Date.now() - startTime,
|
||||
),
|
||||
undoEntryCreated: false,
|
||||
}
|
||||
}
|
||||
|
||||
const validationError = tool.validateParams(toolCall.params)
|
||||
if (validationError) {
|
||||
return {
|
||||
result: createErrorResult(toolCall.id, validationError, Date.now() - startTime),
|
||||
undoEntryCreated: false,
|
||||
}
|
||||
}
|
||||
|
||||
const context = this.buildToolContext(toolCall, session, options)
|
||||
|
||||
try {
|
||||
const result = await tool.execute(toolCall.params, context)
|
||||
|
||||
return {
|
||||
result,
|
||||
undoEntryCreated: this.lastUndoEntryId !== undefined,
|
||||
undoEntryId: this.lastUndoEntryId,
|
||||
}
|
||||
} catch (error) {
|
||||
const errorMessage = error instanceof Error ? error.message : String(error)
|
||||
return {
|
||||
result: createErrorResult(toolCall.id, errorMessage, Date.now() - startTime),
|
||||
undoEntryCreated: false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Build tool context for execution.
|
||||
*/
|
||||
private buildToolContext(
|
||||
toolCall: ToolCall,
|
||||
session: Session,
|
||||
options: ExecuteToolOptions,
|
||||
): ToolContext {
|
||||
return {
|
||||
projectRoot: this.projectRoot,
|
||||
storage: this.storage,
|
||||
requestConfirmation: async (msg: string, diff?: DiffInfo) => {
|
||||
return this.handleConfirmation(msg, diff, toolCall, session, options)
|
||||
},
|
||||
onProgress: (msg: string) => {
|
||||
options.onProgress?.(msg)
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle confirmation for tool actions.
|
||||
* Supports edited content from user.
|
||||
*/
|
||||
private async handleConfirmation(
|
||||
msg: string,
|
||||
diff: DiffInfo | undefined,
|
||||
toolCall: ToolCall,
|
||||
session: Session,
|
||||
options: ExecuteToolOptions,
|
||||
): Promise<boolean> {
|
||||
if (options.autoApply) {
|
||||
if (diff) {
|
||||
this.lastUndoEntryId = await this.createUndoEntry(diff, toolCall, session)
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
if (options.onConfirmation) {
|
||||
const result = await options.onConfirmation(msg, diff)
|
||||
|
||||
const confirmed = typeof result === "boolean" ? result : result.confirmed
|
||||
const editedContent = typeof result === "boolean" ? undefined : result.editedContent
|
||||
|
||||
if (confirmed && diff) {
|
||||
if (editedContent && editedContent.length > 0) {
|
||||
diff.newLines = editedContent
|
||||
if (toolCall.params.content && typeof toolCall.params.content === "string") {
|
||||
toolCall.params.content = editedContent.join("\n")
|
||||
}
|
||||
}
|
||||
|
||||
this.lastUndoEntryId = await this.createUndoEntry(diff, toolCall, session)
|
||||
}
|
||||
|
||||
return confirmed
|
||||
}
|
||||
|
||||
if (diff) {
|
||||
this.lastUndoEntryId = await this.createUndoEntry(diff, toolCall, session)
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
/**
|
||||
* Create undo entry from diff.
|
||||
*/
|
||||
private async createUndoEntry(
|
||||
diff: DiffInfo,
|
||||
toolCall: ToolCall,
|
||||
session: Session,
|
||||
): Promise<string> {
|
||||
const entryId = randomUUID()
|
||||
const entry = createUndoEntry(
|
||||
entryId,
|
||||
diff.filePath,
|
||||
diff.oldLines,
|
||||
diff.newLines,
|
||||
`${toolCall.name}: ${diff.filePath}`,
|
||||
toolCall.id,
|
||||
)
|
||||
|
||||
session.addUndoEntry(entry)
|
||||
await this.sessionStorage.pushUndoEntry(session.id, entry)
|
||||
session.stats.editsApplied++
|
||||
|
||||
return entryId
|
||||
}
|
||||
}
|
||||
352
packages/ipuaro/src/application/use-cases/HandleMessage.ts
Normal file
352
packages/ipuaro/src/application/use-cases/HandleMessage.ts
Normal file
@@ -0,0 +1,352 @@
|
||||
import type { Session } from "../../domain/entities/Session.js"
|
||||
import type { ILLMClient } from "../../domain/services/ILLMClient.js"
|
||||
import type { ISessionStorage } from "../../domain/services/ISessionStorage.js"
|
||||
import type { IStorage } from "../../domain/services/IStorage.js"
|
||||
import type { DiffInfo } from "../../domain/services/ITool.js"
|
||||
import {
|
||||
type ChatMessage,
|
||||
createAssistantMessage,
|
||||
createSystemMessage,
|
||||
createToolMessage,
|
||||
createUserMessage,
|
||||
} from "../../domain/value-objects/ChatMessage.js"
|
||||
import type { ToolCall } from "../../domain/value-objects/ToolCall.js"
|
||||
import type { ToolResult } from "../../domain/value-objects/ToolResult.js"
|
||||
import type { UndoEntry } from "../../domain/value-objects/UndoEntry.js"
|
||||
import { type ErrorOption, IpuaroError } from "../../shared/errors/IpuaroError.js"
|
||||
import {
|
||||
buildInitialContext,
|
||||
type ProjectStructure,
|
||||
SYSTEM_PROMPT,
|
||||
} from "../../infrastructure/llm/prompts.js"
|
||||
import { parseToolCalls } from "../../infrastructure/llm/ResponseParser.js"
|
||||
import type { IToolRegistry } from "../interfaces/IToolRegistry.js"
|
||||
import { ContextManager } from "./ContextManager.js"
|
||||
import { type ConfirmationResult, ExecuteTool } from "./ExecuteTool.js"
|
||||
|
||||
/**
|
||||
* Status during message handling.
|
||||
*/
|
||||
export type HandleMessageStatus =
|
||||
| "ready"
|
||||
| "thinking"
|
||||
| "tool_call"
|
||||
| "awaiting_confirmation"
|
||||
| "error"
|
||||
|
||||
/**
|
||||
* Edit request for confirmation.
|
||||
*/
|
||||
export interface EditRequest {
|
||||
toolCall: ToolCall
|
||||
filePath: string
|
||||
description: string
|
||||
diff?: DiffInfo
|
||||
}
|
||||
|
||||
/**
|
||||
* User's choice for edit confirmation.
|
||||
*/
|
||||
export type EditChoice = "apply" | "skip" | "edit" | "abort"
|
||||
|
||||
/**
|
||||
* Event callbacks for HandleMessage.
|
||||
*/
|
||||
export interface HandleMessageEvents {
|
||||
onMessage?: (message: ChatMessage) => void
|
||||
onToolCall?: (call: ToolCall) => void
|
||||
onToolResult?: (result: ToolResult) => void
|
||||
onConfirmation?: (message: string, diff?: DiffInfo) => Promise<boolean | ConfirmationResult>
|
||||
onError?: (error: IpuaroError) => Promise<ErrorOption>
|
||||
onStatusChange?: (status: HandleMessageStatus) => void
|
||||
onUndoEntry?: (entry: UndoEntry) => void
|
||||
}
|
||||
|
||||
/**
|
||||
* Options for HandleMessage.
|
||||
*/
|
||||
export interface HandleMessageOptions {
|
||||
autoApply?: boolean
|
||||
maxToolCalls?: number
|
||||
maxHistoryMessages?: number
|
||||
saveInputHistory?: boolean
|
||||
contextConfig?: import("../../shared/constants/config.js").ContextConfig
|
||||
}
|
||||
|
||||
const DEFAULT_MAX_TOOL_CALLS = 20
|
||||
|
||||
/**
|
||||
* Use case for handling a user message.
|
||||
* Main orchestrator for the LLM interaction loop.
|
||||
*/
|
||||
export class HandleMessage {
|
||||
private readonly storage: IStorage
|
||||
private readonly sessionStorage: ISessionStorage
|
||||
private readonly llm: ILLMClient
|
||||
private readonly tools: IToolRegistry
|
||||
private readonly contextManager: ContextManager
|
||||
private readonly executeTool: ExecuteTool
|
||||
private readonly projectRoot: string
|
||||
private projectStructure?: ProjectStructure
|
||||
|
||||
private events: HandleMessageEvents = {}
|
||||
private options: HandleMessageOptions = {}
|
||||
private aborted = false
|
||||
|
||||
constructor(
|
||||
storage: IStorage,
|
||||
sessionStorage: ISessionStorage,
|
||||
llm: ILLMClient,
|
||||
tools: IToolRegistry,
|
||||
projectRoot: string,
|
||||
contextConfig?: import("../../shared/constants/config.js").ContextConfig,
|
||||
) {
|
||||
this.storage = storage
|
||||
this.sessionStorage = sessionStorage
|
||||
this.llm = llm
|
||||
this.tools = tools
|
||||
this.projectRoot = projectRoot
|
||||
this.contextManager = new ContextManager(llm.getContextWindowSize(), contextConfig)
|
||||
this.executeTool = new ExecuteTool(storage, sessionStorage, tools, projectRoot)
|
||||
}
|
||||
|
||||
/**
|
||||
* Set event callbacks.
|
||||
*/
|
||||
setEvents(events: HandleMessageEvents): void {
|
||||
this.events = events
|
||||
}
|
||||
|
||||
/**
|
||||
* Set options.
|
||||
*/
|
||||
setOptions(options: HandleMessageOptions): void {
|
||||
this.options = options
|
||||
}
|
||||
|
||||
/**
|
||||
* Set project structure for context building.
|
||||
*/
|
||||
setProjectStructure(structure: ProjectStructure): void {
|
||||
this.projectStructure = structure
|
||||
}
|
||||
|
||||
/**
|
||||
* Abort current processing.
|
||||
*/
|
||||
abort(): void {
|
||||
this.aborted = true
|
||||
this.llm.abort()
|
||||
}
|
||||
|
||||
/**
|
||||
* Truncate session history if maxHistoryMessages is set.
|
||||
*/
|
||||
private truncateHistoryIfNeeded(session: Session): void {
|
||||
if (this.options.maxHistoryMessages !== undefined) {
|
||||
session.truncateHistory(this.options.maxHistoryMessages)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute the message handling flow.
|
||||
*/
|
||||
async execute(session: Session, message: string): Promise<void> {
|
||||
this.aborted = false
|
||||
this.contextManager.syncFromSession(session)
|
||||
|
||||
if (message.trim()) {
|
||||
const userMessage = createUserMessage(message)
|
||||
session.addMessage(userMessage)
|
||||
this.truncateHistoryIfNeeded(session)
|
||||
|
||||
if (this.options.saveInputHistory !== false) {
|
||||
session.addInputToHistory(message)
|
||||
}
|
||||
|
||||
this.emitMessage(userMessage)
|
||||
}
|
||||
|
||||
await this.sessionStorage.saveSession(session)
|
||||
|
||||
this.emitStatus("thinking")
|
||||
|
||||
let toolCallCount = 0
|
||||
const maxToolCalls = this.options.maxToolCalls ?? DEFAULT_MAX_TOOL_CALLS
|
||||
|
||||
while (!this.aborted) {
|
||||
const messages = await this.buildMessages(session)
|
||||
|
||||
const startTime = Date.now()
|
||||
let response
|
||||
|
||||
try {
|
||||
response = await this.llm.chat(messages)
|
||||
} catch (error) {
|
||||
await this.handleLLMError(error, session)
|
||||
return
|
||||
}
|
||||
|
||||
if (this.aborted) {
|
||||
return
|
||||
}
|
||||
|
||||
const parsed = parseToolCalls(response.content)
|
||||
const timeMs = Date.now() - startTime
|
||||
|
||||
if (parsed.toolCalls.length === 0) {
|
||||
const assistantMessage = createAssistantMessage(parsed.content, undefined, {
|
||||
tokens: response.tokens,
|
||||
timeMs,
|
||||
toolCalls: 0,
|
||||
})
|
||||
session.addMessage(assistantMessage)
|
||||
this.truncateHistoryIfNeeded(session)
|
||||
this.emitMessage(assistantMessage)
|
||||
this.contextManager.addTokens(response.tokens)
|
||||
this.contextManager.updateSession(session)
|
||||
await this.sessionStorage.saveSession(session)
|
||||
this.emitStatus("ready")
|
||||
return
|
||||
}
|
||||
|
||||
const assistantMessage = createAssistantMessage(parsed.content, parsed.toolCalls, {
|
||||
tokens: response.tokens,
|
||||
timeMs,
|
||||
toolCalls: parsed.toolCalls.length,
|
||||
})
|
||||
session.addMessage(assistantMessage)
|
||||
this.truncateHistoryIfNeeded(session)
|
||||
this.emitMessage(assistantMessage)
|
||||
|
||||
toolCallCount += parsed.toolCalls.length
|
||||
if (toolCallCount > maxToolCalls) {
|
||||
const errorMsg = `Maximum tool calls (${String(maxToolCalls)}) exceeded`
|
||||
const errorMessage = createSystemMessage(errorMsg)
|
||||
session.addMessage(errorMessage)
|
||||
this.truncateHistoryIfNeeded(session)
|
||||
this.emitMessage(errorMessage)
|
||||
this.emitStatus("ready")
|
||||
return
|
||||
}
|
||||
|
||||
this.emitStatus("tool_call")
|
||||
|
||||
const results: ToolResult[] = []
|
||||
|
||||
for (const toolCall of parsed.toolCalls) {
|
||||
if (this.aborted) {
|
||||
return
|
||||
}
|
||||
|
||||
this.emitToolCall(toolCall)
|
||||
|
||||
const result = await this.executeToolCall(toolCall, session)
|
||||
results.push(result)
|
||||
this.emitToolResult(result)
|
||||
}
|
||||
|
||||
const toolMessage = createToolMessage(results)
|
||||
session.addMessage(toolMessage)
|
||||
this.truncateHistoryIfNeeded(session)
|
||||
|
||||
this.contextManager.addTokens(response.tokens)
|
||||
|
||||
if (this.contextManager.needsCompression()) {
|
||||
await this.contextManager.compress(session, this.llm)
|
||||
}
|
||||
|
||||
this.contextManager.updateSession(session)
|
||||
await this.sessionStorage.saveSession(session)
|
||||
|
||||
this.emitStatus("thinking")
|
||||
}
|
||||
}
|
||||
|
||||
private async buildMessages(session: Session): Promise<ChatMessage[]> {
|
||||
const messages: ChatMessage[] = []
|
||||
|
||||
messages.push(createSystemMessage(SYSTEM_PROMPT))
|
||||
|
||||
if (this.projectStructure) {
|
||||
const asts = await this.storage.getAllASTs()
|
||||
const metas = await this.storage.getAllMetas()
|
||||
const context = buildInitialContext(this.projectStructure, asts, metas)
|
||||
messages.push(createSystemMessage(context))
|
||||
}
|
||||
|
||||
messages.push(...session.history)
|
||||
|
||||
return messages
|
||||
}
|
||||
|
||||
private async executeToolCall(toolCall: ToolCall, session: Session): Promise<ToolResult> {
|
||||
const { result, undoEntryCreated, undoEntryId } = await this.executeTool.execute(
|
||||
toolCall,
|
||||
session,
|
||||
{
|
||||
autoApply: this.options.autoApply,
|
||||
onConfirmation: async (msg: string, diff?: DiffInfo) => {
|
||||
this.emitStatus("awaiting_confirmation")
|
||||
if (this.events.onConfirmation) {
|
||||
return this.events.onConfirmation(msg, diff)
|
||||
}
|
||||
return true
|
||||
},
|
||||
onProgress: (_msg: string) => {
|
||||
this.events.onStatusChange?.("tool_call")
|
||||
},
|
||||
},
|
||||
)
|
||||
|
||||
if (undoEntryCreated && undoEntryId) {
|
||||
const undoEntry = session.undoStack.find((entry) => entry.id === undoEntryId)
|
||||
if (undoEntry) {
|
||||
this.events.onUndoEntry?.(undoEntry)
|
||||
}
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
private async handleLLMError(error: unknown, session: Session): Promise<void> {
|
||||
this.emitStatus("error")
|
||||
|
||||
const ipuaroError =
|
||||
error instanceof IpuaroError
|
||||
? error
|
||||
: IpuaroError.llm(error instanceof Error ? error.message : String(error))
|
||||
|
||||
if (this.events.onError) {
|
||||
const choice = await this.events.onError(ipuaroError)
|
||||
|
||||
if (choice === "retry") {
|
||||
this.emitStatus("thinking")
|
||||
return this.execute(session, "")
|
||||
}
|
||||
}
|
||||
|
||||
const errorMessage = createSystemMessage(`Error: ${ipuaroError.message}`)
|
||||
session.addMessage(errorMessage)
|
||||
this.truncateHistoryIfNeeded(session)
|
||||
this.emitMessage(errorMessage)
|
||||
|
||||
this.emitStatus("ready")
|
||||
}
|
||||
|
||||
private emitMessage(message: ChatMessage): void {
|
||||
this.events.onMessage?.(message)
|
||||
}
|
||||
|
||||
private emitToolCall(call: ToolCall): void {
|
||||
this.events.onToolCall?.(call)
|
||||
}
|
||||
|
||||
private emitToolResult(result: ToolResult): void {
|
||||
this.events.onToolResult?.(result)
|
||||
}
|
||||
|
||||
private emitStatus(status: HandleMessageStatus): void {
|
||||
this.events.onStatusChange?.(status)
|
||||
}
|
||||
}
|
||||
184
packages/ipuaro/src/application/use-cases/IndexProject.ts
Normal file
184
packages/ipuaro/src/application/use-cases/IndexProject.ts
Normal file
@@ -0,0 +1,184 @@
|
||||
import * as path from "node:path"
|
||||
import type { IStorage } from "../../domain/services/IStorage.js"
|
||||
import type { IndexingStats, IndexProgress } from "../../domain/services/IIndexer.js"
|
||||
import { FileScanner } from "../../infrastructure/indexer/FileScanner.js"
|
||||
import { ASTParser } from "../../infrastructure/indexer/ASTParser.js"
|
||||
import { MetaAnalyzer } from "../../infrastructure/indexer/MetaAnalyzer.js"
|
||||
import { IndexBuilder } from "../../infrastructure/indexer/IndexBuilder.js"
|
||||
import { createFileData, type FileData } from "../../domain/value-objects/FileData.js"
|
||||
import type { FileAST } from "../../domain/value-objects/FileAST.js"
|
||||
import { md5 } from "../../shared/utils/hash.js"
|
||||
|
||||
/**
|
||||
* Options for indexing a project.
|
||||
*/
|
||||
export interface IndexProjectOptions {
|
||||
/** Additional ignore patterns */
|
||||
additionalIgnore?: string[]
|
||||
/** Progress callback */
|
||||
onProgress?: (progress: IndexProgress) => void
|
||||
}
|
||||
|
||||
/**
|
||||
* Use case for indexing a project.
|
||||
* Orchestrates the full indexing pipeline:
|
||||
* 1. Scan files
|
||||
* 2. Parse AST
|
||||
* 3. Analyze metadata
|
||||
* 4. Build indexes
|
||||
* 5. Store in Redis
|
||||
*/
|
||||
export class IndexProject {
|
||||
private readonly storage: IStorage
|
||||
private readonly scanner: FileScanner
|
||||
private readonly parser: ASTParser
|
||||
private readonly metaAnalyzer: MetaAnalyzer
|
||||
private readonly indexBuilder: IndexBuilder
|
||||
|
||||
constructor(storage: IStorage, projectRoot: string) {
|
||||
this.storage = storage
|
||||
this.scanner = new FileScanner()
|
||||
this.parser = new ASTParser()
|
||||
this.metaAnalyzer = new MetaAnalyzer(projectRoot)
|
||||
this.indexBuilder = new IndexBuilder(projectRoot)
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute the indexing pipeline.
|
||||
*
|
||||
* @param projectRoot - Absolute path to project root
|
||||
* @param options - Optional configuration
|
||||
* @returns Indexing statistics
|
||||
*/
|
||||
async execute(projectRoot: string, options: IndexProjectOptions = {}): Promise<IndexingStats> {
|
||||
const startTime = Date.now()
|
||||
const stats: IndexingStats = {
|
||||
filesScanned: 0,
|
||||
filesParsed: 0,
|
||||
parseErrors: 0,
|
||||
timeMs: 0,
|
||||
}
|
||||
|
||||
const fileDataMap = new Map<string, FileData>()
|
||||
const astMap = new Map<string, FileAST>()
|
||||
const contentMap = new Map<string, string>()
|
||||
|
||||
// Phase 1: Scanning
|
||||
this.reportProgress(options.onProgress, 0, 0, "", "scanning")
|
||||
|
||||
const scanResults = await this.scanner.scanAll(projectRoot)
|
||||
stats.filesScanned = scanResults.length
|
||||
|
||||
// Phase 2: Parsing
|
||||
let current = 0
|
||||
const total = scanResults.length
|
||||
|
||||
for (const scanResult of scanResults) {
|
||||
current++
|
||||
const fullPath = path.join(projectRoot, scanResult.path)
|
||||
this.reportProgress(options.onProgress, current, total, scanResult.path, "parsing")
|
||||
|
||||
const content = await FileScanner.readFileContent(fullPath)
|
||||
if (!content) {
|
||||
continue
|
||||
}
|
||||
|
||||
contentMap.set(scanResult.path, content)
|
||||
|
||||
const lines = content.split("\n")
|
||||
const hash = md5(content)
|
||||
|
||||
const fileData = createFileData(lines, hash, scanResult.size, scanResult.lastModified)
|
||||
fileDataMap.set(scanResult.path, fileData)
|
||||
|
||||
const language = this.detectLanguage(scanResult.path)
|
||||
if (!language) {
|
||||
continue
|
||||
}
|
||||
|
||||
const ast = this.parser.parse(content, language)
|
||||
astMap.set(scanResult.path, ast)
|
||||
|
||||
stats.filesParsed++
|
||||
if (ast.parseError) {
|
||||
stats.parseErrors++
|
||||
}
|
||||
}
|
||||
|
||||
// Phase 3: Analyzing metadata
|
||||
current = 0
|
||||
for (const [filePath, ast] of astMap) {
|
||||
current++
|
||||
this.reportProgress(options.onProgress, current, astMap.size, filePath, "analyzing")
|
||||
|
||||
const content = contentMap.get(filePath)
|
||||
if (!content) {
|
||||
continue
|
||||
}
|
||||
|
||||
const fullPath = path.join(projectRoot, filePath)
|
||||
const meta = this.metaAnalyzer.analyze(fullPath, ast, content, astMap)
|
||||
|
||||
await this.storage.setMeta(filePath, meta)
|
||||
}
|
||||
|
||||
// Phase 4: Building indexes
|
||||
this.reportProgress(options.onProgress, 1, 1, "Building indexes", "indexing")
|
||||
|
||||
const symbolIndex = this.indexBuilder.buildSymbolIndex(astMap)
|
||||
const depsGraph = this.indexBuilder.buildDepsGraph(astMap)
|
||||
|
||||
// Phase 5: Store everything
|
||||
for (const [filePath, fileData] of fileDataMap) {
|
||||
await this.storage.setFile(filePath, fileData)
|
||||
}
|
||||
|
||||
for (const [filePath, ast] of astMap) {
|
||||
await this.storage.setAST(filePath, ast)
|
||||
}
|
||||
|
||||
await this.storage.setSymbolIndex(symbolIndex)
|
||||
await this.storage.setDepsGraph(depsGraph)
|
||||
|
||||
// Store last indexed timestamp
|
||||
await this.storage.setProjectConfig("last_indexed", Date.now())
|
||||
|
||||
stats.timeMs = Date.now() - startTime
|
||||
|
||||
return stats
|
||||
}
|
||||
|
||||
/**
|
||||
* Detect language from file extension.
|
||||
*/
|
||||
private detectLanguage(filePath: string): "ts" | "tsx" | "js" | "jsx" | null {
|
||||
const ext = path.extname(filePath).toLowerCase()
|
||||
switch (ext) {
|
||||
case ".ts":
|
||||
return "ts"
|
||||
case ".tsx":
|
||||
return "tsx"
|
||||
case ".js":
|
||||
return "js"
|
||||
case ".jsx":
|
||||
return "jsx"
|
||||
default:
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Report progress to callback if provided.
|
||||
*/
|
||||
private reportProgress(
|
||||
callback: ((progress: IndexProgress) => void) | undefined,
|
||||
current: number,
|
||||
total: number,
|
||||
currentFile: string,
|
||||
phase: IndexProgress["phase"],
|
||||
): void {
|
||||
if (callback) {
|
||||
callback({ current, total, currentFile, phase })
|
||||
}
|
||||
}
|
||||
}
|
||||
62
packages/ipuaro/src/application/use-cases/StartSession.ts
Normal file
62
packages/ipuaro/src/application/use-cases/StartSession.ts
Normal file
@@ -0,0 +1,62 @@
|
||||
import { randomUUID } from "node:crypto"
|
||||
import { Session } from "../../domain/entities/Session.js"
|
||||
import type { ISessionStorage } from "../../domain/services/ISessionStorage.js"
|
||||
|
||||
/**
|
||||
* Options for starting a session.
|
||||
*/
|
||||
export interface StartSessionOptions {
|
||||
/** Force creation of a new session even if one exists */
|
||||
forceNew?: boolean
|
||||
/** Specific session ID to load */
|
||||
sessionId?: string
|
||||
}
|
||||
|
||||
/**
|
||||
* Result of starting a session.
|
||||
*/
|
||||
export interface StartSessionResult {
|
||||
session: Session
|
||||
isNew: boolean
|
||||
}
|
||||
|
||||
/**
|
||||
* Use case for starting a session.
|
||||
* Creates a new session or loads the latest one for a project.
|
||||
*/
|
||||
export class StartSession {
|
||||
constructor(private readonly sessionStorage: ISessionStorage) {}
|
||||
|
||||
/**
|
||||
* Execute the use case.
|
||||
*
|
||||
* @param projectName - The project name to start a session for
|
||||
* @param options - Optional configuration
|
||||
* @returns The session and whether it was newly created
|
||||
*/
|
||||
async execute(
|
||||
projectName: string,
|
||||
options: StartSessionOptions = {},
|
||||
): Promise<StartSessionResult> {
|
||||
if (options.sessionId) {
|
||||
const session = await this.sessionStorage.loadSession(options.sessionId)
|
||||
if (session) {
|
||||
await this.sessionStorage.touchSession(session.id)
|
||||
return { session, isNew: false }
|
||||
}
|
||||
}
|
||||
|
||||
if (!options.forceNew) {
|
||||
const latestSession = await this.sessionStorage.getLatestSession(projectName)
|
||||
if (latestSession) {
|
||||
await this.sessionStorage.touchSession(latestSession.id)
|
||||
return { session: latestSession, isNew: false }
|
||||
}
|
||||
}
|
||||
|
||||
const session = new Session(randomUUID(), projectName)
|
||||
await this.sessionStorage.saveSession(session)
|
||||
|
||||
return { session, isNew: true }
|
||||
}
|
||||
}
|
||||
119
packages/ipuaro/src/application/use-cases/UndoChange.ts
Normal file
119
packages/ipuaro/src/application/use-cases/UndoChange.ts
Normal file
@@ -0,0 +1,119 @@
|
||||
import { promises as fs } from "node:fs"
|
||||
import type { Session } from "../../domain/entities/Session.js"
|
||||
import type { ISessionStorage } from "../../domain/services/ISessionStorage.js"
|
||||
import type { IStorage } from "../../domain/services/IStorage.js"
|
||||
import { canUndo, type UndoEntry } from "../../domain/value-objects/UndoEntry.js"
|
||||
import { md5 } from "../../shared/utils/hash.js"
|
||||
|
||||
/**
|
||||
* Result of undo operation.
|
||||
*/
|
||||
export interface UndoResult {
|
||||
success: boolean
|
||||
entry?: UndoEntry
|
||||
error?: string
|
||||
}
|
||||
|
||||
/**
|
||||
* Use case for undoing the last file change.
|
||||
*/
|
||||
export class UndoChange {
|
||||
constructor(
|
||||
private readonly sessionStorage: ISessionStorage,
|
||||
private readonly storage: IStorage,
|
||||
) {}
|
||||
|
||||
/**
|
||||
* Execute undo operation.
|
||||
*
|
||||
* @param session - The current session
|
||||
* @returns Result of the undo operation
|
||||
*/
|
||||
async execute(session: Session): Promise<UndoResult> {
|
||||
const entry = await this.sessionStorage.popUndoEntry(session.id)
|
||||
if (!entry) {
|
||||
return {
|
||||
success: false,
|
||||
error: "No changes to undo",
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
const currentContent = await this.readCurrentContent(entry.filePath)
|
||||
|
||||
if (!canUndo(entry, currentContent)) {
|
||||
await this.sessionStorage.pushUndoEntry(session.id, entry)
|
||||
return {
|
||||
success: false,
|
||||
entry,
|
||||
error: "File has been modified since the change was made",
|
||||
}
|
||||
}
|
||||
|
||||
await this.restoreContent(entry.filePath, entry.previousContent)
|
||||
|
||||
session.popUndoEntry()
|
||||
session.stats.editsApplied--
|
||||
|
||||
return {
|
||||
success: true,
|
||||
entry,
|
||||
}
|
||||
} catch (error) {
|
||||
await this.sessionStorage.pushUndoEntry(session.id, entry)
|
||||
|
||||
const message = error instanceof Error ? error.message : "Unknown error"
|
||||
return {
|
||||
success: false,
|
||||
entry,
|
||||
error: `Failed to undo: ${message}`,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if undo is available.
|
||||
*/
|
||||
async canUndo(session: Session): Promise<boolean> {
|
||||
const stack = await this.sessionStorage.getUndoStack(session.id)
|
||||
return stack.length > 0
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the next undo entry without removing it.
|
||||
*/
|
||||
async peekUndoEntry(session: Session): Promise<UndoEntry | null> {
|
||||
const stack = await this.sessionStorage.getUndoStack(session.id)
|
||||
if (stack.length === 0) {
|
||||
return null
|
||||
}
|
||||
return stack[stack.length - 1]
|
||||
}
|
||||
|
||||
private async readCurrentContent(filePath: string): Promise<string[]> {
|
||||
try {
|
||||
const content = await fs.readFile(filePath, "utf-8")
|
||||
return content.split("\n")
|
||||
} catch (error) {
|
||||
if ((error as NodeJS.ErrnoException).code === "ENOENT") {
|
||||
return []
|
||||
}
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
private async restoreContent(filePath: string, content: string[]): Promise<void> {
|
||||
const fileContent = content.join("\n")
|
||||
await fs.writeFile(filePath, fileContent, "utf-8")
|
||||
|
||||
const hash = md5(fileContent)
|
||||
const stats = await fs.stat(filePath)
|
||||
|
||||
await this.storage.setFile(filePath, {
|
||||
lines: content,
|
||||
hash,
|
||||
size: stats.size,
|
||||
lastModified: stats.mtimeMs,
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -1,4 +1,8 @@
|
||||
/*
|
||||
* Application Use Cases
|
||||
* Will be implemented in version 0.10.0+
|
||||
*/
|
||||
// Application Use Cases
|
||||
|
||||
export * from "./StartSession.js"
|
||||
export * from "./HandleMessage.js"
|
||||
export * from "./UndoChange.js"
|
||||
export * from "./ContextManager.js"
|
||||
export * from "./IndexProject.js"
|
||||
export * from "./ExecuteTool.js"
|
||||
|
||||
148
packages/ipuaro/src/cli/commands/index-cmd.ts
Normal file
148
packages/ipuaro/src/cli/commands/index-cmd.ts
Normal file
@@ -0,0 +1,148 @@
|
||||
/**
|
||||
* Index command implementation.
|
||||
* Indexes project without starting TUI.
|
||||
*/
|
||||
|
||||
import * as path from "node:path"
|
||||
import { RedisClient } from "../../infrastructure/storage/RedisClient.js"
|
||||
import { RedisStorage } from "../../infrastructure/storage/RedisStorage.js"
|
||||
import { generateProjectName } from "../../infrastructure/storage/schema.js"
|
||||
import { IndexProject } from "../../application/use-cases/IndexProject.js"
|
||||
import { type Config, DEFAULT_CONFIG } from "../../shared/constants/config.js"
|
||||
import { checkRedis } from "./onboarding.js"
|
||||
|
||||
/**
|
||||
* Result of index command.
|
||||
*/
|
||||
export interface IndexResult {
|
||||
success: boolean
|
||||
filesIndexed: number
|
||||
filesSkipped: number
|
||||
errors: string[]
|
||||
duration: number
|
||||
}
|
||||
|
||||
/**
|
||||
* Progress callback for indexing.
|
||||
*/
|
||||
export type IndexProgressCallback = (
|
||||
phase: "scanning" | "parsing" | "analyzing" | "storing",
|
||||
current: number,
|
||||
total: number,
|
||||
currentFile?: string,
|
||||
) => void
|
||||
|
||||
/**
|
||||
* Execute the index command.
|
||||
*/
|
||||
export async function executeIndex(
|
||||
projectPath: string,
|
||||
config: Config = DEFAULT_CONFIG,
|
||||
onProgress?: IndexProgressCallback,
|
||||
): Promise<IndexResult> {
|
||||
const startTime = Date.now()
|
||||
const resolvedPath = path.resolve(projectPath)
|
||||
const projectName = generateProjectName(resolvedPath)
|
||||
|
||||
console.warn(`📁 Indexing project: ${resolvedPath}`)
|
||||
console.warn(` Project name: ${projectName}\n`)
|
||||
|
||||
const redisResult = await checkRedis(config.redis)
|
||||
if (!redisResult.ok) {
|
||||
console.error(`❌ ${redisResult.error ?? "Redis unavailable"}`)
|
||||
return {
|
||||
success: false,
|
||||
filesIndexed: 0,
|
||||
filesSkipped: 0,
|
||||
errors: [redisResult.error ?? "Redis unavailable"],
|
||||
duration: Date.now() - startTime,
|
||||
}
|
||||
}
|
||||
|
||||
let redisClient: RedisClient | null = null
|
||||
|
||||
try {
|
||||
redisClient = new RedisClient(config.redis)
|
||||
await redisClient.connect()
|
||||
|
||||
const storage = new RedisStorage(redisClient, projectName)
|
||||
const indexProject = new IndexProject(storage, resolvedPath)
|
||||
|
||||
let lastPhase: "scanning" | "parsing" | "analyzing" | "indexing" = "scanning"
|
||||
let lastProgress = 0
|
||||
|
||||
const stats = await indexProject.execute(resolvedPath, {
|
||||
onProgress: (progress) => {
|
||||
if (progress.phase !== lastPhase) {
|
||||
if (lastPhase === "scanning") {
|
||||
console.warn(` Found ${String(progress.total)} files\n`)
|
||||
} else if (lastProgress > 0) {
|
||||
console.warn("")
|
||||
}
|
||||
|
||||
const phaseLabels = {
|
||||
scanning: "🔍 Scanning files...",
|
||||
parsing: "📝 Parsing files...",
|
||||
analyzing: "📊 Analyzing metadata...",
|
||||
indexing: "🏗️ Building indexes...",
|
||||
}
|
||||
console.warn(phaseLabels[progress.phase])
|
||||
lastPhase = progress.phase
|
||||
}
|
||||
|
||||
if (progress.phase === "indexing") {
|
||||
onProgress?.("storing", progress.current, progress.total)
|
||||
} else {
|
||||
onProgress?.(
|
||||
progress.phase,
|
||||
progress.current,
|
||||
progress.total,
|
||||
progress.currentFile,
|
||||
)
|
||||
}
|
||||
|
||||
if (
|
||||
progress.current % 50 === 0 &&
|
||||
progress.phase !== "scanning" &&
|
||||
progress.phase !== "indexing"
|
||||
) {
|
||||
process.stdout.write(
|
||||
`\r ${progress.phase === "parsing" ? "Parsed" : "Analyzed"} ${String(progress.current)}/${String(progress.total)} files...`,
|
||||
)
|
||||
}
|
||||
lastProgress = progress.current
|
||||
},
|
||||
})
|
||||
|
||||
const symbolIndex = await storage.getSymbolIndex()
|
||||
const durationSec = (stats.timeMs / 1000).toFixed(2)
|
||||
|
||||
console.warn(`\n✅ Indexing complete in ${durationSec}s`)
|
||||
console.warn(` Files scanned: ${String(stats.filesScanned)}`)
|
||||
console.warn(` Files parsed: ${String(stats.filesParsed)}`)
|
||||
console.warn(` Parse errors: ${String(stats.parseErrors)}`)
|
||||
console.warn(` Symbols: ${String(symbolIndex.size)}`)
|
||||
|
||||
return {
|
||||
success: true,
|
||||
filesIndexed: stats.filesParsed,
|
||||
filesSkipped: stats.filesScanned - stats.filesParsed,
|
||||
errors: [],
|
||||
duration: stats.timeMs,
|
||||
}
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : String(error)
|
||||
console.error(`❌ Indexing failed: ${message}`)
|
||||
return {
|
||||
success: false,
|
||||
filesIndexed: 0,
|
||||
filesSkipped: 0,
|
||||
errors: [message],
|
||||
duration: Date.now() - startTime,
|
||||
}
|
||||
} finally {
|
||||
if (redisClient) {
|
||||
await redisClient.disconnect()
|
||||
}
|
||||
}
|
||||
}
|
||||
18
packages/ipuaro/src/cli/commands/index.ts
Normal file
18
packages/ipuaro/src/cli/commands/index.ts
Normal file
@@ -0,0 +1,18 @@
|
||||
/**
|
||||
* CLI commands module.
|
||||
*/
|
||||
|
||||
export { executeStart, type StartOptions, type StartResult } from "./start.js"
|
||||
export { executeInit, type InitOptions, type InitResult } from "./init.js"
|
||||
export { executeIndex, type IndexResult, type IndexProgressCallback } from "./index-cmd.js"
|
||||
export {
|
||||
runOnboarding,
|
||||
checkRedis,
|
||||
checkOllama,
|
||||
checkModel,
|
||||
checkProjectSize,
|
||||
pullModel,
|
||||
type OnboardingResult,
|
||||
type OnboardingOptions,
|
||||
} from "./onboarding.js"
|
||||
export { registerAllTools } from "./tools-setup.js"
|
||||
114
packages/ipuaro/src/cli/commands/init.ts
Normal file
114
packages/ipuaro/src/cli/commands/init.ts
Normal file
@@ -0,0 +1,114 @@
|
||||
/**
|
||||
* Init command implementation.
|
||||
* Creates .ipuaro.json configuration file.
|
||||
*/
|
||||
|
||||
import * as fs from "node:fs/promises"
|
||||
import * as path from "node:path"
|
||||
|
||||
/**
|
||||
* Default configuration template for .ipuaro.json
|
||||
*/
|
||||
const CONFIG_TEMPLATE = {
|
||||
$schema: "https://raw.githubusercontent.com/samiyev/puaros/main/packages/ipuaro/schema.json",
|
||||
redis: {
|
||||
host: "localhost",
|
||||
port: 6379,
|
||||
db: 0,
|
||||
},
|
||||
llm: {
|
||||
model: "qwen2.5-coder:7b-instruct",
|
||||
temperature: 0.1,
|
||||
host: "http://localhost:11434",
|
||||
},
|
||||
project: {
|
||||
ignorePatterns: [],
|
||||
},
|
||||
edit: {
|
||||
autoApply: false,
|
||||
},
|
||||
}
|
||||
|
||||
/**
|
||||
* Options for init command.
|
||||
*/
|
||||
export interface InitOptions {
|
||||
force?: boolean
|
||||
}
|
||||
|
||||
/**
|
||||
* Result of init command.
|
||||
*/
|
||||
export interface InitResult {
|
||||
success: boolean
|
||||
filePath?: string
|
||||
error?: string
|
||||
skipped?: boolean
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute the init command.
|
||||
* Creates a .ipuaro.json file in the specified directory.
|
||||
*/
|
||||
export async function executeInit(
|
||||
projectPath = ".",
|
||||
options: InitOptions = {},
|
||||
): Promise<InitResult> {
|
||||
const resolvedPath = path.resolve(projectPath)
|
||||
const configPath = path.join(resolvedPath, ".ipuaro.json")
|
||||
|
||||
try {
|
||||
const exists = await fileExists(configPath)
|
||||
|
||||
if (exists && !options.force) {
|
||||
console.warn(`⚠️ Configuration file already exists: ${configPath}`)
|
||||
console.warn(" Use --force to overwrite.")
|
||||
return {
|
||||
success: true,
|
||||
skipped: true,
|
||||
filePath: configPath,
|
||||
}
|
||||
}
|
||||
|
||||
const dirExists = await fileExists(resolvedPath)
|
||||
if (!dirExists) {
|
||||
await fs.mkdir(resolvedPath, { recursive: true })
|
||||
}
|
||||
|
||||
const content = JSON.stringify(CONFIG_TEMPLATE, null, 4)
|
||||
await fs.writeFile(configPath, content, "utf-8")
|
||||
|
||||
console.warn(`✅ Created ${configPath}`)
|
||||
console.warn("\nConfiguration options:")
|
||||
console.warn(" redis.host - Redis server host (default: localhost)")
|
||||
console.warn(" redis.port - Redis server port (default: 6379)")
|
||||
console.warn(" llm.model - Ollama model name (default: qwen2.5-coder:7b-instruct)")
|
||||
console.warn(" llm.temperature - LLM temperature (default: 0.1)")
|
||||
console.warn(" edit.autoApply - Auto-apply edits without confirmation (default: false)")
|
||||
console.warn("\nRun `ipuaro` to start the AI agent.")
|
||||
|
||||
return {
|
||||
success: true,
|
||||
filePath: configPath,
|
||||
}
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : String(error)
|
||||
console.error(`❌ Failed to create configuration: ${message}`)
|
||||
return {
|
||||
success: false,
|
||||
error: message,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a file or directory exists.
|
||||
*/
|
||||
async function fileExists(filePath: string): Promise<boolean> {
|
||||
try {
|
||||
await fs.access(filePath)
|
||||
return true
|
||||
} catch {
|
||||
return false
|
||||
}
|
||||
}
|
||||
290
packages/ipuaro/src/cli/commands/onboarding.ts
Normal file
290
packages/ipuaro/src/cli/commands/onboarding.ts
Normal file
@@ -0,0 +1,290 @@
|
||||
/**
|
||||
* Onboarding checks for CLI.
|
||||
* Validates environment before starting ipuaro.
|
||||
*/
|
||||
|
||||
import { RedisClient } from "../../infrastructure/storage/RedisClient.js"
|
||||
import { OllamaClient } from "../../infrastructure/llm/OllamaClient.js"
|
||||
import { FileScanner } from "../../infrastructure/indexer/FileScanner.js"
|
||||
import type { LLMConfig, RedisConfig } from "../../shared/constants/config.js"
|
||||
|
||||
/**
|
||||
* Result of onboarding checks.
|
||||
*/
|
||||
export interface OnboardingResult {
|
||||
success: boolean
|
||||
redisOk: boolean
|
||||
ollamaOk: boolean
|
||||
modelOk: boolean
|
||||
projectOk: boolean
|
||||
fileCount: number
|
||||
errors: string[]
|
||||
warnings: string[]
|
||||
}
|
||||
|
||||
/**
|
||||
* Options for onboarding checks.
|
||||
*/
|
||||
export interface OnboardingOptions {
|
||||
redisConfig: RedisConfig
|
||||
llmConfig: LLMConfig
|
||||
projectPath: string
|
||||
maxFiles?: number
|
||||
skipRedis?: boolean
|
||||
skipOllama?: boolean
|
||||
skipModel?: boolean
|
||||
skipProject?: boolean
|
||||
}
|
||||
|
||||
const DEFAULT_MAX_FILES = 10_000
|
||||
|
||||
/**
|
||||
* Check Redis availability.
|
||||
*/
|
||||
export async function checkRedis(config: RedisConfig): Promise<{
|
||||
ok: boolean
|
||||
error?: string
|
||||
}> {
|
||||
const client = new RedisClient(config)
|
||||
|
||||
try {
|
||||
await client.connect()
|
||||
const pingOk = await client.ping()
|
||||
await client.disconnect()
|
||||
|
||||
if (!pingOk) {
|
||||
return {
|
||||
ok: false,
|
||||
error: "Redis ping failed. Server may be overloaded.",
|
||||
}
|
||||
}
|
||||
|
||||
return { ok: true }
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : String(error)
|
||||
return {
|
||||
ok: false,
|
||||
error: `Cannot connect to Redis: ${message}
|
||||
|
||||
Redis is required for ipuaro to store project indexes and session data.
|
||||
|
||||
Install Redis:
|
||||
macOS: brew install redis && brew services start redis
|
||||
Ubuntu: sudo apt install redis-server && sudo systemctl start redis
|
||||
Docker: docker run -d -p 6379:6379 redis`,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check Ollama availability.
|
||||
*/
|
||||
export async function checkOllama(config: LLMConfig): Promise<{
|
||||
ok: boolean
|
||||
error?: string
|
||||
}> {
|
||||
const client = new OllamaClient(config)
|
||||
|
||||
try {
|
||||
const available = await client.isAvailable()
|
||||
|
||||
if (!available) {
|
||||
return {
|
||||
ok: false,
|
||||
error: `Cannot connect to Ollama at ${config.host}
|
||||
|
||||
Ollama is required for ipuaro to process your requests using local LLMs.
|
||||
|
||||
Install Ollama:
|
||||
macOS: brew install ollama && ollama serve
|
||||
Linux: curl -fsSL https://ollama.com/install.sh | sh && ollama serve
|
||||
Manual: https://ollama.com/download
|
||||
|
||||
After installing, ensure Ollama is running with: ollama serve`,
|
||||
}
|
||||
}
|
||||
|
||||
return { ok: true }
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : String(error)
|
||||
return {
|
||||
ok: false,
|
||||
error: `Ollama check failed: ${message}`,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check model availability.
|
||||
*/
|
||||
export async function checkModel(config: LLMConfig): Promise<{
|
||||
ok: boolean
|
||||
needsPull: boolean
|
||||
error?: string
|
||||
}> {
|
||||
const client = new OllamaClient(config)
|
||||
|
||||
try {
|
||||
const hasModel = await client.hasModel(config.model)
|
||||
|
||||
if (!hasModel) {
|
||||
return {
|
||||
ok: false,
|
||||
needsPull: true,
|
||||
error: `Model "${config.model}" is not installed.
|
||||
|
||||
Would you like to pull it? This may take a few minutes.
|
||||
Run: ollama pull ${config.model}`,
|
||||
}
|
||||
}
|
||||
|
||||
return { ok: true, needsPull: false }
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : String(error)
|
||||
return {
|
||||
ok: false,
|
||||
needsPull: false,
|
||||
error: `Model check failed: ${message}`,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Pull model from Ollama.
|
||||
*/
|
||||
export async function pullModel(
|
||||
config: LLMConfig,
|
||||
onProgress?: (status: string) => void,
|
||||
): Promise<{ ok: boolean; error?: string }> {
|
||||
const client = new OllamaClient(config)
|
||||
|
||||
try {
|
||||
onProgress?.(`Pulling model "${config.model}"...`)
|
||||
await client.pullModel(config.model)
|
||||
onProgress?.(`Model "${config.model}" pulled successfully.`)
|
||||
return { ok: true }
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : String(error)
|
||||
return {
|
||||
ok: false,
|
||||
error: `Failed to pull model: ${message}`,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check project size.
|
||||
*/
|
||||
export async function checkProjectSize(
|
||||
projectPath: string,
|
||||
maxFiles: number = DEFAULT_MAX_FILES,
|
||||
): Promise<{
|
||||
ok: boolean
|
||||
fileCount: number
|
||||
warning?: string
|
||||
}> {
|
||||
const scanner = new FileScanner()
|
||||
|
||||
try {
|
||||
const files = await scanner.scanAll(projectPath)
|
||||
const fileCount = files.length
|
||||
|
||||
if (fileCount > maxFiles) {
|
||||
return {
|
||||
ok: true,
|
||||
fileCount,
|
||||
warning: `Project has ${fileCount.toLocaleString()} files (>${maxFiles.toLocaleString()}).
|
||||
This may take a while to index and use more memory.
|
||||
|
||||
Consider:
|
||||
1. Running ipuaro in a subdirectory: ipuaro ./src
|
||||
2. Adding patterns to .gitignore to exclude unnecessary files
|
||||
3. Using a smaller project for better performance`,
|
||||
}
|
||||
}
|
||||
|
||||
if (fileCount === 0) {
|
||||
return {
|
||||
ok: false,
|
||||
fileCount: 0,
|
||||
warning: `No supported files found in "${projectPath}".
|
||||
|
||||
ipuaro supports: .ts, .tsx, .js, .jsx, .json, .yaml, .yml
|
||||
|
||||
Ensure you're running ipuaro in a project directory with source files.`,
|
||||
}
|
||||
}
|
||||
|
||||
return { ok: true, fileCount }
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : String(error)
|
||||
return {
|
||||
ok: false,
|
||||
fileCount: 0,
|
||||
warning: `Failed to scan project: ${message}`,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Run all onboarding checks.
|
||||
*/
|
||||
export async function runOnboarding(options: OnboardingOptions): Promise<OnboardingResult> {
|
||||
const errors: string[] = []
|
||||
const warnings: string[] = []
|
||||
const maxFiles = options.maxFiles ?? DEFAULT_MAX_FILES
|
||||
|
||||
let redisOk = true
|
||||
let ollamaOk = true
|
||||
let modelOk = true
|
||||
let projectOk = true
|
||||
let fileCount = 0
|
||||
|
||||
if (!options.skipRedis) {
|
||||
const redisResult = await checkRedis(options.redisConfig)
|
||||
redisOk = redisResult.ok
|
||||
if (!redisOk && redisResult.error) {
|
||||
errors.push(redisResult.error)
|
||||
}
|
||||
}
|
||||
|
||||
if (!options.skipOllama) {
|
||||
const ollamaResult = await checkOllama(options.llmConfig)
|
||||
ollamaOk = ollamaResult.ok
|
||||
if (!ollamaOk && ollamaResult.error) {
|
||||
errors.push(ollamaResult.error)
|
||||
}
|
||||
}
|
||||
|
||||
if (!options.skipModel && ollamaOk) {
|
||||
const modelResult = await checkModel(options.llmConfig)
|
||||
modelOk = modelResult.ok
|
||||
if (!modelOk && modelResult.error) {
|
||||
errors.push(modelResult.error)
|
||||
}
|
||||
}
|
||||
|
||||
if (!options.skipProject) {
|
||||
const projectResult = await checkProjectSize(options.projectPath, maxFiles)
|
||||
projectOk = projectResult.ok
|
||||
fileCount = projectResult.fileCount
|
||||
if (projectResult.warning) {
|
||||
if (projectResult.ok) {
|
||||
warnings.push(projectResult.warning)
|
||||
} else {
|
||||
errors.push(projectResult.warning)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
success: redisOk && ollamaOk && modelOk && projectOk && errors.length === 0,
|
||||
redisOk,
|
||||
ollamaOk,
|
||||
modelOk,
|
||||
projectOk,
|
||||
fileCount,
|
||||
errors,
|
||||
warnings,
|
||||
}
|
||||
}
|
||||
162
packages/ipuaro/src/cli/commands/start.ts
Normal file
162
packages/ipuaro/src/cli/commands/start.ts
Normal file
@@ -0,0 +1,162 @@
|
||||
/**
|
||||
* Start command implementation.
|
||||
* Launches the ipuaro TUI.
|
||||
*/
|
||||
|
||||
import * as path from "node:path"
|
||||
import * as readline from "node:readline"
|
||||
import { render } from "ink"
|
||||
import React from "react"
|
||||
import { App, type AppDependencies } from "../../tui/App.js"
|
||||
import { RedisClient } from "../../infrastructure/storage/RedisClient.js"
|
||||
import { RedisStorage } from "../../infrastructure/storage/RedisStorage.js"
|
||||
import { RedisSessionStorage } from "../../infrastructure/storage/RedisSessionStorage.js"
|
||||
import { OllamaClient } from "../../infrastructure/llm/OllamaClient.js"
|
||||
import { ToolRegistry } from "../../infrastructure/tools/registry.js"
|
||||
import { generateProjectName } from "../../infrastructure/storage/schema.js"
|
||||
import { type Config, DEFAULT_CONFIG } from "../../shared/constants/config.js"
|
||||
import { checkModel, pullModel, runOnboarding } from "./onboarding.js"
|
||||
import { registerAllTools } from "./tools-setup.js"
|
||||
|
||||
/**
|
||||
* Options for start command.
|
||||
*/
|
||||
export interface StartOptions {
|
||||
autoApply?: boolean
|
||||
model?: string
|
||||
}
|
||||
|
||||
/**
|
||||
* Result of start command.
|
||||
*/
|
||||
export interface StartResult {
|
||||
success: boolean
|
||||
error?: string
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute the start command.
|
||||
*/
|
||||
export async function executeStart(
|
||||
projectPath: string,
|
||||
options: StartOptions,
|
||||
config: Config = DEFAULT_CONFIG,
|
||||
): Promise<StartResult> {
|
||||
const resolvedPath = path.resolve(projectPath)
|
||||
const projectName = generateProjectName(resolvedPath)
|
||||
|
||||
const llmConfig = {
|
||||
...config.llm,
|
||||
model: options.model ?? config.llm.model,
|
||||
}
|
||||
|
||||
console.warn("🔍 Running pre-flight checks...\n")
|
||||
|
||||
const onboardingResult = await runOnboarding({
|
||||
redisConfig: config.redis,
|
||||
llmConfig,
|
||||
projectPath: resolvedPath,
|
||||
})
|
||||
|
||||
for (const warning of onboardingResult.warnings) {
|
||||
console.warn(`⚠️ ${warning}\n`)
|
||||
}
|
||||
|
||||
if (!onboardingResult.success) {
|
||||
for (const error of onboardingResult.errors) {
|
||||
console.error(`❌ ${error}\n`)
|
||||
}
|
||||
|
||||
if (!onboardingResult.modelOk && onboardingResult.ollamaOk) {
|
||||
const shouldPull = await promptYesNo(
|
||||
`Would you like to pull "${llmConfig.model}"? (y/n): `,
|
||||
)
|
||||
|
||||
if (shouldPull) {
|
||||
const pullResult = await pullModel(llmConfig, console.warn)
|
||||
if (!pullResult.ok) {
|
||||
console.error(`❌ ${pullResult.error ?? "Unknown error"}`)
|
||||
return { success: false, error: pullResult.error }
|
||||
}
|
||||
|
||||
const recheckModel = await checkModel(llmConfig)
|
||||
if (!recheckModel.ok) {
|
||||
console.error("❌ Model still not available after pull.")
|
||||
return { success: false, error: "Model pull failed" }
|
||||
}
|
||||
} else {
|
||||
return { success: false, error: "Model not available" }
|
||||
}
|
||||
} else {
|
||||
return {
|
||||
success: false,
|
||||
error: onboardingResult.errors.join("\n"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
console.warn(`✅ All checks passed. Found ${String(onboardingResult.fileCount)} files.\n`)
|
||||
console.warn("🚀 Starting ipuaro...\n")
|
||||
|
||||
const redisClient = new RedisClient(config.redis)
|
||||
|
||||
try {
|
||||
await redisClient.connect()
|
||||
|
||||
const storage = new RedisStorage(redisClient, projectName)
|
||||
const sessionStorage = new RedisSessionStorage(redisClient)
|
||||
const llm = new OllamaClient(llmConfig)
|
||||
const tools = new ToolRegistry()
|
||||
|
||||
registerAllTools(tools)
|
||||
|
||||
const deps: AppDependencies = {
|
||||
storage,
|
||||
sessionStorage,
|
||||
llm,
|
||||
tools,
|
||||
}
|
||||
|
||||
const handleExit = (): void => {
|
||||
void redisClient.disconnect()
|
||||
}
|
||||
|
||||
const { waitUntilExit } = render(
|
||||
React.createElement(App, {
|
||||
projectPath: resolvedPath,
|
||||
autoApply: options.autoApply ?? config.edit.autoApply,
|
||||
deps,
|
||||
onExit: handleExit,
|
||||
}),
|
||||
)
|
||||
|
||||
await waitUntilExit()
|
||||
await redisClient.disconnect()
|
||||
|
||||
return { success: true }
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : String(error)
|
||||
console.error(`❌ Failed to start ipuaro: ${message}`)
|
||||
await redisClient.disconnect()
|
||||
return { success: false, error: message }
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Simple yes/no prompt for CLI.
|
||||
*/
|
||||
async function promptYesNo(question: string): Promise<boolean> {
|
||||
return new Promise((resolve) => {
|
||||
process.stdout.write(question)
|
||||
|
||||
const rl = readline.createInterface({
|
||||
input: process.stdin,
|
||||
output: process.stdout,
|
||||
})
|
||||
|
||||
rl.once("line", (answer: string) => {
|
||||
rl.close()
|
||||
resolve(answer.toLowerCase() === "y" || answer.toLowerCase() === "yes")
|
||||
})
|
||||
})
|
||||
}
|
||||
59
packages/ipuaro/src/cli/commands/tools-setup.ts
Normal file
59
packages/ipuaro/src/cli/commands/tools-setup.ts
Normal file
@@ -0,0 +1,59 @@
|
||||
/**
|
||||
* Tool registration helper for CLI.
|
||||
* Registers all 18 tools with the tool registry.
|
||||
*/
|
||||
|
||||
import type { IToolRegistry } from "../../application/interfaces/IToolRegistry.js"
|
||||
|
||||
import { GetLinesTool } from "../../infrastructure/tools/read/GetLinesTool.js"
|
||||
import { GetFunctionTool } from "../../infrastructure/tools/read/GetFunctionTool.js"
|
||||
import { GetClassTool } from "../../infrastructure/tools/read/GetClassTool.js"
|
||||
import { GetStructureTool } from "../../infrastructure/tools/read/GetStructureTool.js"
|
||||
|
||||
import { EditLinesTool } from "../../infrastructure/tools/edit/EditLinesTool.js"
|
||||
import { CreateFileTool } from "../../infrastructure/tools/edit/CreateFileTool.js"
|
||||
import { DeleteFileTool } from "../../infrastructure/tools/edit/DeleteFileTool.js"
|
||||
|
||||
import { FindReferencesTool } from "../../infrastructure/tools/search/FindReferencesTool.js"
|
||||
import { FindDefinitionTool } from "../../infrastructure/tools/search/FindDefinitionTool.js"
|
||||
|
||||
import { GetDependenciesTool } from "../../infrastructure/tools/analysis/GetDependenciesTool.js"
|
||||
import { GetDependentsTool } from "../../infrastructure/tools/analysis/GetDependentsTool.js"
|
||||
import { GetComplexityTool } from "../../infrastructure/tools/analysis/GetComplexityTool.js"
|
||||
import { GetTodosTool } from "../../infrastructure/tools/analysis/GetTodosTool.js"
|
||||
|
||||
import { GitStatusTool } from "../../infrastructure/tools/git/GitStatusTool.js"
|
||||
import { GitDiffTool } from "../../infrastructure/tools/git/GitDiffTool.js"
|
||||
import { GitCommitTool } from "../../infrastructure/tools/git/GitCommitTool.js"
|
||||
|
||||
import { RunCommandTool } from "../../infrastructure/tools/run/RunCommandTool.js"
|
||||
import { RunTestsTool } from "../../infrastructure/tools/run/RunTestsTool.js"
|
||||
|
||||
/**
|
||||
* Register all 18 tools with the tool registry.
|
||||
*/
|
||||
export function registerAllTools(registry: IToolRegistry): void {
|
||||
registry.register(new GetLinesTool())
|
||||
registry.register(new GetFunctionTool())
|
||||
registry.register(new GetClassTool())
|
||||
registry.register(new GetStructureTool())
|
||||
|
||||
registry.register(new EditLinesTool())
|
||||
registry.register(new CreateFileTool())
|
||||
registry.register(new DeleteFileTool())
|
||||
|
||||
registry.register(new FindReferencesTool())
|
||||
registry.register(new FindDefinitionTool())
|
||||
|
||||
registry.register(new GetDependenciesTool())
|
||||
registry.register(new GetDependentsTool())
|
||||
registry.register(new GetComplexityTool())
|
||||
registry.register(new GetTodosTool())
|
||||
|
||||
registry.register(new GitStatusTool())
|
||||
registry.register(new GitDiffTool())
|
||||
registry.register(new GitCommitTool())
|
||||
|
||||
registry.register(new RunCommandTool())
|
||||
registry.register(new RunTestsTool())
|
||||
}
|
||||
@@ -1,44 +1,63 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
/**
|
||||
* ipuaro CLI entry point.
|
||||
* Local AI agent for codebase operations with infinite context feeling.
|
||||
*/
|
||||
|
||||
import { createRequire } from "node:module"
|
||||
import { Command } from "commander"
|
||||
import { executeStart } from "./commands/start.js"
|
||||
import { executeInit } from "./commands/init.js"
|
||||
import { executeIndex } from "./commands/index-cmd.js"
|
||||
import { loadConfig } from "../shared/config/loader.js"
|
||||
|
||||
const require = createRequire(import.meta.url)
|
||||
const pkg = require("../../package.json") as { version: string }
|
||||
|
||||
const program = new Command()
|
||||
|
||||
program
|
||||
.name("ipuaro")
|
||||
.description("Local AI agent for codebase operations with infinite context feeling")
|
||||
.version("0.1.0")
|
||||
.version(pkg.version)
|
||||
|
||||
program
|
||||
.command("start")
|
||||
.command("start", { isDefault: true })
|
||||
.description("Start ipuaro TUI in the current directory")
|
||||
.argument("[path]", "Project path", ".")
|
||||
.option("--auto-apply", "Enable auto-apply mode for edits")
|
||||
.option("--model <name>", "Override LLM model", "qwen2.5-coder:7b-instruct")
|
||||
.action((path: string, options: { autoApply?: boolean; model?: string }) => {
|
||||
const model = options.model ?? "default"
|
||||
const autoApply = options.autoApply ?? false
|
||||
console.warn(`Starting ipuaro in ${path}...`)
|
||||
console.warn(`Model: ${model}`)
|
||||
console.warn(`Auto-apply: ${autoApply ? "enabled" : "disabled"}`)
|
||||
console.warn("\nNot implemented yet. Coming in version 0.11.0!")
|
||||
.option("--model <name>", "Override LLM model")
|
||||
.action(async (projectPath: string, options: { autoApply?: boolean; model?: string }) => {
|
||||
const config = loadConfig(projectPath)
|
||||
const result = await executeStart(projectPath, options, config)
|
||||
if (!result.success) {
|
||||
process.exit(1)
|
||||
}
|
||||
})
|
||||
|
||||
program
|
||||
.command("init")
|
||||
.description("Create .ipuaro.json config file")
|
||||
.action(() => {
|
||||
console.warn("Creating .ipuaro.json...")
|
||||
console.warn("\nNot implemented yet. Coming in version 0.17.0!")
|
||||
.argument("[path]", "Project path", ".")
|
||||
.option("--force", "Overwrite existing config file")
|
||||
.action(async (projectPath: string, options: { force?: boolean }) => {
|
||||
const result = await executeInit(projectPath, options)
|
||||
if (!result.success) {
|
||||
process.exit(1)
|
||||
}
|
||||
})
|
||||
|
||||
program
|
||||
.command("index")
|
||||
.description("Index project without starting TUI")
|
||||
.argument("[path]", "Project path", ".")
|
||||
.action((path: string) => {
|
||||
console.warn(`Indexing ${path}...`)
|
||||
console.warn("\nNot implemented yet. Coming in version 0.3.0!")
|
||||
.action(async (projectPath: string) => {
|
||||
const config = loadConfig(projectPath)
|
||||
const result = await executeIndex(projectPath, config)
|
||||
if (!result.success) {
|
||||
process.exit(1)
|
||||
}
|
||||
})
|
||||
|
||||
program.parse()
|
||||
|
||||
@@ -94,6 +94,12 @@ export class Session {
|
||||
}
|
||||
}
|
||||
|
||||
truncateHistory(maxMessages: number): void {
|
||||
if (this.history.length > maxMessages) {
|
||||
this.history = this.history.slice(-maxMessages)
|
||||
}
|
||||
}
|
||||
|
||||
clearHistory(): void {
|
||||
this.history = []
|
||||
this.context = {
|
||||
|
||||
@@ -1,26 +1,6 @@
|
||||
import type { ChatMessage } from "../value-objects/ChatMessage.js"
|
||||
import type { ToolCall } from "../value-objects/ToolCall.js"
|
||||
|
||||
/**
|
||||
* Tool parameter definition for LLM.
|
||||
*/
|
||||
export interface ToolParameter {
|
||||
name: string
|
||||
type: "string" | "number" | "boolean" | "array" | "object"
|
||||
description: string
|
||||
required: boolean
|
||||
enum?: string[]
|
||||
}
|
||||
|
||||
/**
|
||||
* Tool definition for LLM function calling.
|
||||
*/
|
||||
export interface ToolDef {
|
||||
name: string
|
||||
description: string
|
||||
parameters: ToolParameter[]
|
||||
}
|
||||
|
||||
/**
|
||||
* Response from LLM.
|
||||
*/
|
||||
@@ -42,12 +22,16 @@ export interface LLMResponse {
|
||||
/**
|
||||
* LLM client service interface (port).
|
||||
* Abstracts the LLM provider.
|
||||
*
|
||||
* Tool definitions should be included in the system prompt as XML format,
|
||||
* not passed as a separate parameter.
|
||||
*/
|
||||
export interface ILLMClient {
|
||||
/**
|
||||
* Send messages to LLM and get response.
|
||||
* Tool calls are extracted from the response content using XML parsing.
|
||||
*/
|
||||
chat(messages: ChatMessage[], tools?: ToolDef[]): Promise<LLMResponse>
|
||||
chat(messages: ChatMessage[]): Promise<LLMResponse>
|
||||
|
||||
/**
|
||||
* Count tokens in text.
|
||||
|
||||
88
packages/ipuaro/src/domain/services/ISessionStorage.ts
Normal file
88
packages/ipuaro/src/domain/services/ISessionStorage.ts
Normal file
@@ -0,0 +1,88 @@
|
||||
import type { ContextState, Session, SessionStats } from "../entities/Session.js"
|
||||
import type { ChatMessage } from "../value-objects/ChatMessage.js"
|
||||
import type { UndoEntry } from "../value-objects/UndoEntry.js"
|
||||
|
||||
/**
|
||||
* Session data stored in persistence layer.
|
||||
*/
|
||||
export interface SessionData {
|
||||
id: string
|
||||
projectName: string
|
||||
createdAt: number
|
||||
lastActivityAt: number
|
||||
history: ChatMessage[]
|
||||
context: ContextState
|
||||
stats: SessionStats
|
||||
inputHistory: string[]
|
||||
}
|
||||
|
||||
/**
|
||||
* Session list item (minimal info for listing).
|
||||
*/
|
||||
export interface SessionListItem {
|
||||
id: string
|
||||
projectName: string
|
||||
createdAt: number
|
||||
lastActivityAt: number
|
||||
messageCount: number
|
||||
}
|
||||
|
||||
/**
|
||||
* Storage service interface for session persistence.
|
||||
*/
|
||||
export interface ISessionStorage {
|
||||
/**
|
||||
* Save a session to storage.
|
||||
*/
|
||||
saveSession(session: Session): Promise<void>
|
||||
|
||||
/**
|
||||
* Load a session by ID.
|
||||
*/
|
||||
loadSession(sessionId: string): Promise<Session | null>
|
||||
|
||||
/**
|
||||
* Delete a session.
|
||||
*/
|
||||
deleteSession(sessionId: string): Promise<void>
|
||||
|
||||
/**
|
||||
* Get list of all sessions for a project.
|
||||
*/
|
||||
listSessions(projectName?: string): Promise<SessionListItem[]>
|
||||
|
||||
/**
|
||||
* Get the latest session for a project.
|
||||
*/
|
||||
getLatestSession(projectName: string): Promise<Session | null>
|
||||
|
||||
/**
|
||||
* Check if a session exists.
|
||||
*/
|
||||
sessionExists(sessionId: string): Promise<boolean>
|
||||
|
||||
/**
|
||||
* Add undo entry to session's undo stack.
|
||||
*/
|
||||
pushUndoEntry(sessionId: string, entry: UndoEntry): Promise<void>
|
||||
|
||||
/**
|
||||
* Pop undo entry from session's undo stack.
|
||||
*/
|
||||
popUndoEntry(sessionId: string): Promise<UndoEntry | null>
|
||||
|
||||
/**
|
||||
* Get undo stack for a session.
|
||||
*/
|
||||
getUndoStack(sessionId: string): Promise<UndoEntry[]>
|
||||
|
||||
/**
|
||||
* Update session's last activity timestamp.
|
||||
*/
|
||||
touchSession(sessionId: string): Promise<void>
|
||||
|
||||
/**
|
||||
* Clear all sessions.
|
||||
*/
|
||||
clearAllSessions(): Promise<void>
|
||||
}
|
||||
@@ -1,5 +1,6 @@
|
||||
// Domain Service Interfaces (Ports)
|
||||
export * from "./IStorage.js"
|
||||
export * from "./ISessionStorage.js"
|
||||
export * from "./ILLMClient.js"
|
||||
export * from "./ITool.js"
|
||||
export * from "./IIndexer.js"
|
||||
|
||||
@@ -4,6 +4,11 @@
|
||||
* Main entry point for the library.
|
||||
*/
|
||||
|
||||
import { createRequire } from "node:module"
|
||||
|
||||
const require = createRequire(import.meta.url)
|
||||
const pkg = require("../package.json") as { version: string }
|
||||
|
||||
// Domain exports
|
||||
export * from "./domain/index.js"
|
||||
|
||||
@@ -13,5 +18,11 @@ export * from "./application/index.js"
|
||||
// Shared exports
|
||||
export * from "./shared/index.js"
|
||||
|
||||
// Infrastructure exports
|
||||
export * from "./infrastructure/index.js"
|
||||
|
||||
// TUI exports
|
||||
export * from "./tui/index.js"
|
||||
|
||||
// Version
|
||||
export const VERSION = "0.1.0"
|
||||
export const VERSION = pkg.version
|
||||
|
||||
6
packages/ipuaro/src/infrastructure/index.ts
Normal file
6
packages/ipuaro/src/infrastructure/index.ts
Normal file
@@ -0,0 +1,6 @@
|
||||
// Infrastructure layer exports
|
||||
export * from "./storage/index.js"
|
||||
export * from "./indexer/index.js"
|
||||
export * from "./llm/index.js"
|
||||
export * from "./tools/index.js"
|
||||
export * from "./security/index.js"
|
||||
551
packages/ipuaro/src/infrastructure/indexer/ASTParser.ts
Normal file
551
packages/ipuaro/src/infrastructure/indexer/ASTParser.ts
Normal file
@@ -0,0 +1,551 @@
|
||||
import { builtinModules } from "node:module"
|
||||
import Parser from "tree-sitter"
|
||||
import TypeScript from "tree-sitter-typescript"
|
||||
import JavaScript from "tree-sitter-javascript"
|
||||
import {
|
||||
createEmptyFileAST,
|
||||
type ExportInfo,
|
||||
type FileAST,
|
||||
type ImportInfo,
|
||||
type MethodInfo,
|
||||
type ParameterInfo,
|
||||
type PropertyInfo,
|
||||
} from "../../domain/value-objects/FileAST.js"
|
||||
import { FieldName, NodeType } from "./tree-sitter-types.js"
|
||||
|
||||
type Language = "ts" | "tsx" | "js" | "jsx"
|
||||
type SyntaxNode = Parser.SyntaxNode
|
||||
|
||||
/**
|
||||
* Parses source code into AST using tree-sitter.
|
||||
*/
|
||||
export class ASTParser {
|
||||
private readonly parsers = new Map<Language, Parser>()
|
||||
|
||||
constructor() {
|
||||
this.initializeParsers()
|
||||
}
|
||||
|
||||
private initializeParsers(): void {
|
||||
const tsParser = new Parser()
|
||||
tsParser.setLanguage(TypeScript.typescript)
|
||||
this.parsers.set("ts", tsParser)
|
||||
|
||||
const tsxParser = new Parser()
|
||||
tsxParser.setLanguage(TypeScript.tsx)
|
||||
this.parsers.set("tsx", tsxParser)
|
||||
|
||||
const jsParser = new Parser()
|
||||
jsParser.setLanguage(JavaScript)
|
||||
this.parsers.set("js", jsParser)
|
||||
this.parsers.set("jsx", jsParser)
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse source code and extract AST information.
|
||||
*/
|
||||
parse(content: string, language: Language): FileAST {
|
||||
const parser = this.parsers.get(language)
|
||||
if (!parser) {
|
||||
return {
|
||||
...createEmptyFileAST(),
|
||||
parseError: true,
|
||||
parseErrorMessage: `Unsupported language: ${language}`,
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
const tree = parser.parse(content)
|
||||
const root = tree.rootNode
|
||||
|
||||
if (root.hasError) {
|
||||
const ast = this.extractAST(root, language)
|
||||
ast.parseError = true
|
||||
ast.parseErrorMessage = "Syntax error in source code"
|
||||
return ast
|
||||
}
|
||||
|
||||
return this.extractAST(root, language)
|
||||
} catch (error) {
|
||||
return {
|
||||
...createEmptyFileAST(),
|
||||
parseError: true,
|
||||
parseErrorMessage: error instanceof Error ? error.message : "Unknown parse error",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private extractAST(root: SyntaxNode, language: Language): FileAST {
|
||||
const ast = createEmptyFileAST()
|
||||
const isTypeScript = language === "ts" || language === "tsx"
|
||||
|
||||
for (const child of root.children) {
|
||||
this.visitNode(child, ast, isTypeScript)
|
||||
}
|
||||
|
||||
return ast
|
||||
}
|
||||
|
||||
private visitNode(node: SyntaxNode, ast: FileAST, isTypeScript: boolean): void {
|
||||
switch (node.type) {
|
||||
case NodeType.IMPORT_STATEMENT:
|
||||
this.extractImport(node, ast)
|
||||
break
|
||||
case NodeType.EXPORT_STATEMENT:
|
||||
this.extractExport(node, ast)
|
||||
break
|
||||
case NodeType.FUNCTION_DECLARATION:
|
||||
this.extractFunction(node, ast, false)
|
||||
break
|
||||
case NodeType.LEXICAL_DECLARATION:
|
||||
this.extractLexicalDeclaration(node, ast)
|
||||
break
|
||||
case NodeType.CLASS_DECLARATION:
|
||||
this.extractClass(node, ast, false)
|
||||
break
|
||||
case NodeType.INTERFACE_DECLARATION:
|
||||
if (isTypeScript) {
|
||||
this.extractInterface(node, ast, false)
|
||||
}
|
||||
break
|
||||
case NodeType.TYPE_ALIAS_DECLARATION:
|
||||
if (isTypeScript) {
|
||||
this.extractTypeAlias(node, ast, false)
|
||||
}
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
private extractImport(node: SyntaxNode, ast: FileAST): void {
|
||||
const sourceNode = node.childForFieldName(FieldName.SOURCE)
|
||||
if (!sourceNode) {
|
||||
return
|
||||
}
|
||||
|
||||
const from = this.getStringValue(sourceNode)
|
||||
const line = node.startPosition.row + 1
|
||||
const importType = this.classifyImport(from)
|
||||
|
||||
const importClause = node.children.find((c) => c.type === NodeType.IMPORT_CLAUSE)
|
||||
if (!importClause) {
|
||||
ast.imports.push({
|
||||
name: "*",
|
||||
from,
|
||||
line,
|
||||
type: importType,
|
||||
isDefault: false,
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
for (const child of importClause.children) {
|
||||
if (child.type === NodeType.IDENTIFIER) {
|
||||
ast.imports.push({
|
||||
name: child.text,
|
||||
from,
|
||||
line,
|
||||
type: importType,
|
||||
isDefault: true,
|
||||
})
|
||||
} else if (child.type === NodeType.NAMESPACE_IMPORT) {
|
||||
const alias = child.children.find((c) => c.type === NodeType.IDENTIFIER)
|
||||
ast.imports.push({
|
||||
name: alias?.text ?? "*",
|
||||
from,
|
||||
line,
|
||||
type: importType,
|
||||
isDefault: false,
|
||||
})
|
||||
} else if (child.type === NodeType.NAMED_IMPORTS) {
|
||||
for (const specifier of child.children) {
|
||||
if (specifier.type === NodeType.IMPORT_SPECIFIER) {
|
||||
const nameNode = specifier.childForFieldName(FieldName.NAME)
|
||||
const aliasNode = specifier.childForFieldName(FieldName.ALIAS)
|
||||
ast.imports.push({
|
||||
name: aliasNode?.text ?? nameNode?.text ?? "",
|
||||
from,
|
||||
line,
|
||||
type: importType,
|
||||
isDefault: false,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private extractExport(node: SyntaxNode, ast: FileAST): void {
|
||||
const isDefault = node.children.some((c) => c.type === NodeType.DEFAULT)
|
||||
const declaration = node.childForFieldName(FieldName.DECLARATION)
|
||||
|
||||
if (declaration) {
|
||||
switch (declaration.type) {
|
||||
case NodeType.FUNCTION_DECLARATION:
|
||||
this.extractFunction(declaration, ast, true)
|
||||
this.addExportInfo(ast, declaration, "function", isDefault)
|
||||
break
|
||||
case NodeType.CLASS_DECLARATION:
|
||||
this.extractClass(declaration, ast, true)
|
||||
this.addExportInfo(ast, declaration, "class", isDefault)
|
||||
break
|
||||
case NodeType.INTERFACE_DECLARATION:
|
||||
this.extractInterface(declaration, ast, true)
|
||||
this.addExportInfo(ast, declaration, "interface", isDefault)
|
||||
break
|
||||
case NodeType.TYPE_ALIAS_DECLARATION:
|
||||
this.extractTypeAlias(declaration, ast, true)
|
||||
this.addExportInfo(ast, declaration, "type", isDefault)
|
||||
break
|
||||
case NodeType.LEXICAL_DECLARATION:
|
||||
this.extractLexicalDeclaration(declaration, ast, true)
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
const exportClause = node.children.find((c) => c.type === NodeType.EXPORT_CLAUSE)
|
||||
if (exportClause) {
|
||||
for (const specifier of exportClause.children) {
|
||||
if (specifier.type === NodeType.EXPORT_SPECIFIER) {
|
||||
const nameNode = specifier.childForFieldName(FieldName.NAME)
|
||||
if (nameNode) {
|
||||
ast.exports.push({
|
||||
name: nameNode.text,
|
||||
line: node.startPosition.row + 1,
|
||||
isDefault: false,
|
||||
kind: "variable",
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private extractFunction(node: SyntaxNode, ast: FileAST, isExported: boolean): void {
|
||||
const nameNode = node.childForFieldName(FieldName.NAME)
|
||||
if (!nameNode) {
|
||||
return
|
||||
}
|
||||
|
||||
const params = this.extractParameters(node)
|
||||
const isAsync = node.children.some((c) => c.type === NodeType.ASYNC)
|
||||
const returnTypeNode = node.childForFieldName(FieldName.RETURN_TYPE)
|
||||
|
||||
ast.functions.push({
|
||||
name: nameNode.text,
|
||||
lineStart: node.startPosition.row + 1,
|
||||
lineEnd: node.endPosition.row + 1,
|
||||
params,
|
||||
isAsync,
|
||||
isExported,
|
||||
returnType: returnTypeNode?.text?.replace(/^:\s*/, ""),
|
||||
})
|
||||
}
|
||||
|
||||
private extractLexicalDeclaration(node: SyntaxNode, ast: FileAST, isExported = false): void {
|
||||
for (const child of node.children) {
|
||||
if (child.type === NodeType.VARIABLE_DECLARATOR) {
|
||||
const nameNode = child.childForFieldName(FieldName.NAME)
|
||||
const valueNode = child.childForFieldName(FieldName.VALUE)
|
||||
|
||||
if (
|
||||
valueNode?.type === NodeType.ARROW_FUNCTION ||
|
||||
valueNode?.type === NodeType.FUNCTION
|
||||
) {
|
||||
const params = this.extractParameters(valueNode)
|
||||
const isAsync = valueNode.children.some((c) => c.type === NodeType.ASYNC)
|
||||
|
||||
ast.functions.push({
|
||||
name: nameNode?.text ?? "",
|
||||
lineStart: node.startPosition.row + 1,
|
||||
lineEnd: node.endPosition.row + 1,
|
||||
params,
|
||||
isAsync,
|
||||
isExported,
|
||||
})
|
||||
|
||||
if (isExported) {
|
||||
ast.exports.push({
|
||||
name: nameNode?.text ?? "",
|
||||
line: node.startPosition.row + 1,
|
||||
isDefault: false,
|
||||
kind: "function",
|
||||
})
|
||||
}
|
||||
} else if (isExported && nameNode) {
|
||||
ast.exports.push({
|
||||
name: nameNode.text,
|
||||
line: node.startPosition.row + 1,
|
||||
isDefault: false,
|
||||
kind: "variable",
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private extractClass(node: SyntaxNode, ast: FileAST, isExported: boolean): void {
|
||||
const nameNode = node.childForFieldName(FieldName.NAME)
|
||||
if (!nameNode) {
|
||||
return
|
||||
}
|
||||
|
||||
const body = node.childForFieldName(FieldName.BODY)
|
||||
const methods: MethodInfo[] = []
|
||||
const properties: PropertyInfo[] = []
|
||||
|
||||
if (body) {
|
||||
for (const member of body.children) {
|
||||
if (member.type === NodeType.METHOD_DEFINITION) {
|
||||
methods.push(this.extractMethod(member))
|
||||
} else if (
|
||||
member.type === NodeType.PUBLIC_FIELD_DEFINITION ||
|
||||
member.type === NodeType.FIELD_DEFINITION
|
||||
) {
|
||||
properties.push(this.extractProperty(member))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const { extendsName, implementsList } = this.extractClassHeritage(node)
|
||||
const isAbstract = node.children.some((c) => c.type === NodeType.ABSTRACT)
|
||||
|
||||
ast.classes.push({
|
||||
name: nameNode.text,
|
||||
lineStart: node.startPosition.row + 1,
|
||||
lineEnd: node.endPosition.row + 1,
|
||||
methods,
|
||||
properties,
|
||||
extends: extendsName,
|
||||
implements: implementsList,
|
||||
isExported,
|
||||
isAbstract,
|
||||
})
|
||||
}
|
||||
|
||||
private extractClassHeritage(node: SyntaxNode): {
|
||||
extendsName: string | undefined
|
||||
implementsList: string[]
|
||||
} {
|
||||
let extendsName: string | undefined
|
||||
const implementsList: string[] = []
|
||||
|
||||
for (const child of node.children) {
|
||||
if (child.type === NodeType.CLASS_HERITAGE) {
|
||||
this.parseHeritageClause(child, (ext) => (extendsName = ext), implementsList)
|
||||
} else if (child.type === NodeType.EXTENDS_CLAUSE) {
|
||||
extendsName = this.findTypeIdentifier(child)
|
||||
}
|
||||
}
|
||||
|
||||
return { extendsName, implementsList }
|
||||
}
|
||||
|
||||
private parseHeritageClause(
|
||||
heritage: SyntaxNode,
|
||||
setExtends: (name: string) => void,
|
||||
implementsList: string[],
|
||||
): void {
|
||||
for (const clause of heritage.children) {
|
||||
if (clause.type === NodeType.EXTENDS_CLAUSE) {
|
||||
const typeId = this.findTypeIdentifier(clause)
|
||||
if (typeId) {
|
||||
setExtends(typeId)
|
||||
}
|
||||
} else if (clause.type === NodeType.IMPLEMENTS_CLAUSE) {
|
||||
this.collectImplements(clause, implementsList)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private findTypeIdentifier(node: SyntaxNode): string | undefined {
|
||||
const typeNode = node.children.find(
|
||||
(c) => c.type === NodeType.TYPE_IDENTIFIER || c.type === NodeType.IDENTIFIER,
|
||||
)
|
||||
return typeNode?.text
|
||||
}
|
||||
|
||||
private collectImplements(clause: SyntaxNode, list: string[]): void {
|
||||
for (const impl of clause.children) {
|
||||
if (impl.type === NodeType.TYPE_IDENTIFIER || impl.type === NodeType.IDENTIFIER) {
|
||||
list.push(impl.text)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private extractMethod(node: SyntaxNode): MethodInfo {
|
||||
const nameNode = node.childForFieldName(FieldName.NAME)
|
||||
const params = this.extractParameters(node)
|
||||
const isAsync = node.children.some((c) => c.type === NodeType.ASYNC)
|
||||
const isStatic = node.children.some((c) => c.type === NodeType.STATIC)
|
||||
|
||||
let visibility: "public" | "private" | "protected" = "public"
|
||||
for (const child of node.children) {
|
||||
if (child.type === NodeType.ACCESSIBILITY_MODIFIER) {
|
||||
visibility = child.text as "public" | "private" | "protected"
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
name: nameNode?.text ?? "",
|
||||
lineStart: node.startPosition.row + 1,
|
||||
lineEnd: node.endPosition.row + 1,
|
||||
params,
|
||||
isAsync,
|
||||
visibility,
|
||||
isStatic,
|
||||
}
|
||||
}
|
||||
|
||||
private extractProperty(node: SyntaxNode): PropertyInfo {
|
||||
const nameNode = node.childForFieldName(FieldName.NAME)
|
||||
const typeNode = node.childForFieldName(FieldName.TYPE)
|
||||
const isStatic = node.children.some((c) => c.type === NodeType.STATIC)
|
||||
const isReadonly = node.children.some((c) => c.text === NodeType.READONLY)
|
||||
|
||||
let visibility: "public" | "private" | "protected" = "public"
|
||||
for (const child of node.children) {
|
||||
if (child.type === NodeType.ACCESSIBILITY_MODIFIER) {
|
||||
visibility = child.text as "public" | "private" | "protected"
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
name: nameNode?.text ?? "",
|
||||
line: node.startPosition.row + 1,
|
||||
type: typeNode?.text,
|
||||
visibility,
|
||||
isStatic,
|
||||
isReadonly,
|
||||
}
|
||||
}
|
||||
|
||||
private extractInterface(node: SyntaxNode, ast: FileAST, isExported: boolean): void {
|
||||
const nameNode = node.childForFieldName(FieldName.NAME)
|
||||
if (!nameNode) {
|
||||
return
|
||||
}
|
||||
|
||||
const body = node.childForFieldName(FieldName.BODY)
|
||||
const properties: PropertyInfo[] = []
|
||||
|
||||
if (body) {
|
||||
for (const member of body.children) {
|
||||
if (member.type === NodeType.PROPERTY_SIGNATURE) {
|
||||
const propName = member.childForFieldName(FieldName.NAME)
|
||||
const propType = member.childForFieldName(FieldName.TYPE)
|
||||
properties.push({
|
||||
name: propName?.text ?? "",
|
||||
line: member.startPosition.row + 1,
|
||||
type: propType?.text,
|
||||
visibility: "public",
|
||||
isStatic: false,
|
||||
isReadonly: member.children.some((c) => c.text === NodeType.READONLY),
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const extendsList: string[] = []
|
||||
const extendsClause = node.children.find((c) => c.type === NodeType.EXTENDS_TYPE_CLAUSE)
|
||||
if (extendsClause) {
|
||||
for (const child of extendsClause.children) {
|
||||
if (child.type === NodeType.TYPE_IDENTIFIER) {
|
||||
extendsList.push(child.text)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
ast.interfaces.push({
|
||||
name: nameNode.text,
|
||||
lineStart: node.startPosition.row + 1,
|
||||
lineEnd: node.endPosition.row + 1,
|
||||
properties,
|
||||
extends: extendsList,
|
||||
isExported,
|
||||
})
|
||||
}
|
||||
|
||||
private extractTypeAlias(node: SyntaxNode, ast: FileAST, isExported: boolean): void {
|
||||
const nameNode = node.childForFieldName(FieldName.NAME)
|
||||
if (!nameNode) {
|
||||
return
|
||||
}
|
||||
|
||||
ast.typeAliases.push({
|
||||
name: nameNode.text,
|
||||
line: node.startPosition.row + 1,
|
||||
isExported,
|
||||
})
|
||||
}
|
||||
|
||||
private extractParameters(node: SyntaxNode): ParameterInfo[] {
|
||||
const params: ParameterInfo[] = []
|
||||
const paramsNode = node.childForFieldName(FieldName.PARAMETERS)
|
||||
|
||||
if (paramsNode) {
|
||||
for (const param of paramsNode.children) {
|
||||
if (
|
||||
param.type === NodeType.REQUIRED_PARAMETER ||
|
||||
param.type === NodeType.OPTIONAL_PARAMETER ||
|
||||
param.type === NodeType.IDENTIFIER
|
||||
) {
|
||||
const nameNode =
|
||||
param.type === NodeType.IDENTIFIER
|
||||
? param
|
||||
: param.childForFieldName(FieldName.PATTERN)
|
||||
const typeNode = param.childForFieldName(FieldName.TYPE)
|
||||
const defaultValue = param.childForFieldName(FieldName.VALUE)
|
||||
|
||||
params.push({
|
||||
name: nameNode?.text ?? "",
|
||||
type: typeNode?.text,
|
||||
optional: param.type === NodeType.OPTIONAL_PARAMETER,
|
||||
hasDefault: defaultValue !== null,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return params
|
||||
}
|
||||
|
||||
private addExportInfo(
|
||||
ast: FileAST,
|
||||
node: SyntaxNode,
|
||||
kind: ExportInfo["kind"],
|
||||
isDefault: boolean,
|
||||
): void {
|
||||
const nameNode = node.childForFieldName(FieldName.NAME)
|
||||
if (nameNode) {
|
||||
ast.exports.push({
|
||||
name: nameNode.text,
|
||||
line: node.startPosition.row + 1,
|
||||
isDefault,
|
||||
kind,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
private classifyImport(from: string): ImportInfo["type"] {
|
||||
if (from.startsWith(".") || from.startsWith("/")) {
|
||||
return "internal"
|
||||
}
|
||||
if (from.startsWith("node:") || builtinModules.includes(from)) {
|
||||
return "builtin"
|
||||
}
|
||||
return "external"
|
||||
}
|
||||
|
||||
private getStringValue(node: SyntaxNode): string {
|
||||
const text = node.text
|
||||
if (
|
||||
(text.startsWith('"') && text.endsWith('"')) ||
|
||||
(text.startsWith("'") && text.endsWith("'"))
|
||||
) {
|
||||
return text.slice(1, -1)
|
||||
}
|
||||
return text
|
||||
}
|
||||
}
|
||||
189
packages/ipuaro/src/infrastructure/indexer/FileScanner.ts
Normal file
189
packages/ipuaro/src/infrastructure/indexer/FileScanner.ts
Normal file
@@ -0,0 +1,189 @@
|
||||
import * as fs from "node:fs/promises"
|
||||
import type { Stats } from "node:fs"
|
||||
import * as path from "node:path"
|
||||
import { globby } from "globby"
|
||||
import {
|
||||
BINARY_EXTENSIONS,
|
||||
DEFAULT_IGNORE_PATTERNS,
|
||||
SUPPORTED_EXTENSIONS,
|
||||
} from "../../domain/constants/index.js"
|
||||
import type { ScanResult } from "../../domain/services/IIndexer.js"
|
||||
|
||||
/**
|
||||
* Progress callback for file scanning.
|
||||
*/
|
||||
export interface ScanProgress {
|
||||
current: number
|
||||
total: number
|
||||
currentFile: string
|
||||
}
|
||||
|
||||
/**
|
||||
* Options for FileScanner.
|
||||
*/
|
||||
export interface FileScannerOptions {
|
||||
/** Additional ignore patterns (besides .gitignore and defaults) */
|
||||
additionalIgnore?: string[]
|
||||
/** Only include files with these extensions. Defaults to SUPPORTED_EXTENSIONS. */
|
||||
extensions?: readonly string[]
|
||||
/** Callback for progress updates */
|
||||
onProgress?: (progress: ScanProgress) => void
|
||||
}
|
||||
|
||||
/**
|
||||
* Scans project directories recursively using globby.
|
||||
* Respects .gitignore, skips binary files and default ignore patterns.
|
||||
*/
|
||||
export class FileScanner {
|
||||
private readonly extensions: Set<string>
|
||||
private readonly additionalIgnore: string[]
|
||||
private readonly onProgress?: (progress: ScanProgress) => void
|
||||
|
||||
constructor(options: FileScannerOptions = {}) {
|
||||
this.extensions = new Set(options.extensions ?? SUPPORTED_EXTENSIONS)
|
||||
this.additionalIgnore = options.additionalIgnore ?? []
|
||||
this.onProgress = options.onProgress
|
||||
}
|
||||
|
||||
/**
|
||||
* Build glob patterns from extensions.
|
||||
*/
|
||||
private buildGlobPatterns(): string[] {
|
||||
const exts = [...this.extensions].map((ext) => ext.replace(".", ""))
|
||||
if (exts.length === 1) {
|
||||
return [`**/*.${exts[0]}`]
|
||||
}
|
||||
return [`**/*.{${exts.join(",")}}`]
|
||||
}
|
||||
|
||||
/**
|
||||
* Build ignore patterns.
|
||||
*/
|
||||
private buildIgnorePatterns(): string[] {
|
||||
const patterns = [
|
||||
...DEFAULT_IGNORE_PATTERNS,
|
||||
...this.additionalIgnore,
|
||||
...BINARY_EXTENSIONS.map((ext) => `**/*${ext}`),
|
||||
]
|
||||
return patterns
|
||||
}
|
||||
|
||||
/**
|
||||
* Scan directory and yield file results.
|
||||
* @param root - Root directory to scan
|
||||
*/
|
||||
async *scan(root: string): AsyncGenerator<ScanResult> {
|
||||
const globPatterns = this.buildGlobPatterns()
|
||||
const ignorePatterns = this.buildIgnorePatterns()
|
||||
|
||||
const files = await globby(globPatterns, {
|
||||
cwd: root,
|
||||
gitignore: true,
|
||||
ignore: ignorePatterns,
|
||||
absolute: false,
|
||||
onlyFiles: true,
|
||||
followSymbolicLinks: false,
|
||||
})
|
||||
|
||||
const total = files.length
|
||||
let current = 0
|
||||
|
||||
for (const relativePath of files) {
|
||||
current++
|
||||
this.reportProgress(relativePath, current, total)
|
||||
|
||||
const fullPath = path.join(root, relativePath)
|
||||
const stats = await this.safeStats(fullPath)
|
||||
|
||||
if (stats) {
|
||||
yield {
|
||||
path: relativePath,
|
||||
type: "file",
|
||||
size: stats.size,
|
||||
lastModified: stats.mtimeMs,
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Scan and return all results as array.
|
||||
*/
|
||||
async scanAll(root: string): Promise<ScanResult[]> {
|
||||
const results: ScanResult[] = []
|
||||
for await (const result of this.scan(root)) {
|
||||
results.push(result)
|
||||
}
|
||||
return results
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if file has supported extension.
|
||||
*/
|
||||
isSupportedExtension(filePath: string): boolean {
|
||||
const ext = path.extname(filePath).toLowerCase()
|
||||
return this.extensions.has(ext)
|
||||
}
|
||||
|
||||
/**
|
||||
* Safely get file stats without throwing.
|
||||
*/
|
||||
private async safeStats(filePath: string): Promise<Stats | null> {
|
||||
try {
|
||||
return await fs.stat(filePath)
|
||||
} catch {
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Report progress if callback is set.
|
||||
*/
|
||||
private reportProgress(currentFile: string, current: number, total: number): void {
|
||||
if (this.onProgress) {
|
||||
this.onProgress({ current, total, currentFile })
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if file content is likely UTF-8 text.
|
||||
* Reads first 8KB and checks for null bytes.
|
||||
*/
|
||||
static async isTextFile(filePath: string): Promise<boolean> {
|
||||
try {
|
||||
const handle = await fs.open(filePath, "r")
|
||||
try {
|
||||
const buffer = Buffer.alloc(8192)
|
||||
const { bytesRead } = await handle.read(buffer, 0, 8192, 0)
|
||||
if (bytesRead === 0) {
|
||||
return true
|
||||
}
|
||||
for (let i = 0; i < bytesRead; i++) {
|
||||
if (buffer[i] === 0) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
} finally {
|
||||
await handle.close()
|
||||
}
|
||||
} catch {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Read file content as string.
|
||||
* Returns null if file is binary or unreadable.
|
||||
*/
|
||||
static async readFileContent(filePath: string): Promise<string | null> {
|
||||
if (!(await FileScanner.isTextFile(filePath))) {
|
||||
return null
|
||||
}
|
||||
try {
|
||||
return await fs.readFile(filePath, "utf-8")
|
||||
} catch {
|
||||
return null
|
||||
}
|
||||
}
|
||||
}
|
||||
406
packages/ipuaro/src/infrastructure/indexer/IndexBuilder.ts
Normal file
406
packages/ipuaro/src/infrastructure/indexer/IndexBuilder.ts
Normal file
@@ -0,0 +1,406 @@
|
||||
import * as path from "node:path"
|
||||
import type { FileAST } from "../../domain/value-objects/FileAST.js"
|
||||
import type { DepsGraph, SymbolIndex, SymbolLocation } from "../../domain/services/IStorage.js"
|
||||
|
||||
/**
|
||||
* Builds searchable indexes from parsed ASTs.
|
||||
*/
|
||||
export class IndexBuilder {
|
||||
private readonly projectRoot: string
|
||||
|
||||
constructor(projectRoot: string) {
|
||||
this.projectRoot = projectRoot
|
||||
}
|
||||
|
||||
/**
|
||||
* Build symbol index from all ASTs.
|
||||
* Maps symbol names to their locations for quick lookup.
|
||||
*/
|
||||
buildSymbolIndex(asts: Map<string, FileAST>): SymbolIndex {
|
||||
const index: SymbolIndex = new Map()
|
||||
|
||||
for (const [filePath, ast] of asts) {
|
||||
this.indexFunctions(filePath, ast, index)
|
||||
this.indexClasses(filePath, ast, index)
|
||||
this.indexInterfaces(filePath, ast, index)
|
||||
this.indexTypeAliases(filePath, ast, index)
|
||||
this.indexExportedVariables(filePath, ast, index)
|
||||
}
|
||||
|
||||
return index
|
||||
}
|
||||
|
||||
/**
|
||||
* Index function declarations.
|
||||
*/
|
||||
private indexFunctions(filePath: string, ast: FileAST, index: SymbolIndex): void {
|
||||
for (const func of ast.functions) {
|
||||
this.addSymbol(index, func.name, {
|
||||
path: filePath,
|
||||
line: func.lineStart,
|
||||
type: "function",
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Index class declarations.
|
||||
*/
|
||||
private indexClasses(filePath: string, ast: FileAST, index: SymbolIndex): void {
|
||||
for (const cls of ast.classes) {
|
||||
this.addSymbol(index, cls.name, {
|
||||
path: filePath,
|
||||
line: cls.lineStart,
|
||||
type: "class",
|
||||
})
|
||||
|
||||
for (const method of cls.methods) {
|
||||
const qualifiedName = `${cls.name}.${method.name}`
|
||||
this.addSymbol(index, qualifiedName, {
|
||||
path: filePath,
|
||||
line: method.lineStart,
|
||||
type: "function",
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Index interface declarations.
|
||||
*/
|
||||
private indexInterfaces(filePath: string, ast: FileAST, index: SymbolIndex): void {
|
||||
for (const iface of ast.interfaces) {
|
||||
this.addSymbol(index, iface.name, {
|
||||
path: filePath,
|
||||
line: iface.lineStart,
|
||||
type: "interface",
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Index type alias declarations.
|
||||
*/
|
||||
private indexTypeAliases(filePath: string, ast: FileAST, index: SymbolIndex): void {
|
||||
for (const typeAlias of ast.typeAliases) {
|
||||
this.addSymbol(index, typeAlias.name, {
|
||||
path: filePath,
|
||||
line: typeAlias.line,
|
||||
type: "type",
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Index exported variables (not functions).
|
||||
*/
|
||||
private indexExportedVariables(filePath: string, ast: FileAST, index: SymbolIndex): void {
|
||||
const functionNames = new Set(ast.functions.map((f) => f.name))
|
||||
|
||||
for (const exp of ast.exports) {
|
||||
if (exp.kind === "variable" && !functionNames.has(exp.name)) {
|
||||
this.addSymbol(index, exp.name, {
|
||||
path: filePath,
|
||||
line: exp.line,
|
||||
type: "variable",
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a symbol to the index.
|
||||
*/
|
||||
private addSymbol(index: SymbolIndex, name: string, location: SymbolLocation): void {
|
||||
if (!name) {
|
||||
return
|
||||
}
|
||||
|
||||
const existing = index.get(name)
|
||||
if (existing) {
|
||||
const isDuplicate = existing.some(
|
||||
(loc) => loc.path === location.path && loc.line === location.line,
|
||||
)
|
||||
if (!isDuplicate) {
|
||||
existing.push(location)
|
||||
}
|
||||
} else {
|
||||
index.set(name, [location])
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Build dependency graph from all ASTs.
|
||||
* Creates bidirectional mapping of imports.
|
||||
*/
|
||||
buildDepsGraph(asts: Map<string, FileAST>): DepsGraph {
|
||||
const imports = new Map<string, string[]>()
|
||||
const importedBy = new Map<string, string[]>()
|
||||
|
||||
for (const filePath of asts.keys()) {
|
||||
imports.set(filePath, [])
|
||||
importedBy.set(filePath, [])
|
||||
}
|
||||
|
||||
for (const [filePath, ast] of asts) {
|
||||
const fileImports = this.resolveFileImports(filePath, ast, asts)
|
||||
imports.set(filePath, fileImports)
|
||||
|
||||
for (const importedFile of fileImports) {
|
||||
const dependents = importedBy.get(importedFile) ?? []
|
||||
if (!dependents.includes(filePath)) {
|
||||
dependents.push(filePath)
|
||||
importedBy.set(importedFile, dependents)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (const [filePath, deps] of imports) {
|
||||
imports.set(filePath, deps.sort())
|
||||
}
|
||||
for (const [filePath, deps] of importedBy) {
|
||||
importedBy.set(filePath, deps.sort())
|
||||
}
|
||||
|
||||
return { imports, importedBy }
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolve internal imports for a file.
|
||||
*/
|
||||
private resolveFileImports(
|
||||
filePath: string,
|
||||
ast: FileAST,
|
||||
allASTs: Map<string, FileAST>,
|
||||
): string[] {
|
||||
const fileDir = path.dirname(filePath)
|
||||
const resolvedImports: string[] = []
|
||||
|
||||
for (const imp of ast.imports) {
|
||||
if (imp.type !== "internal") {
|
||||
continue
|
||||
}
|
||||
|
||||
const resolved = this.resolveImportPath(fileDir, imp.from, allASTs)
|
||||
if (resolved && !resolvedImports.includes(resolved)) {
|
||||
resolvedImports.push(resolved)
|
||||
}
|
||||
}
|
||||
|
||||
return resolvedImports
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolve import path to actual file path.
|
||||
*/
|
||||
private resolveImportPath(
|
||||
fromDir: string,
|
||||
importPath: string,
|
||||
allASTs: Map<string, FileAST>,
|
||||
): string | null {
|
||||
const absolutePath = path.resolve(fromDir, importPath)
|
||||
|
||||
const candidates = this.getImportCandidates(absolutePath)
|
||||
for (const candidate of candidates) {
|
||||
if (allASTs.has(candidate)) {
|
||||
return candidate
|
||||
}
|
||||
}
|
||||
|
||||
return null
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate possible file paths for an import.
|
||||
*/
|
||||
private getImportCandidates(basePath: string): string[] {
|
||||
const candidates: string[] = []
|
||||
|
||||
if (/\.(ts|tsx|js|jsx)$/.test(basePath)) {
|
||||
candidates.push(basePath)
|
||||
|
||||
if (basePath.endsWith(".js")) {
|
||||
candidates.push(`${basePath.slice(0, -3)}.ts`)
|
||||
} else if (basePath.endsWith(".jsx")) {
|
||||
candidates.push(`${basePath.slice(0, -4)}.tsx`)
|
||||
}
|
||||
} else {
|
||||
candidates.push(`${basePath}.ts`)
|
||||
candidates.push(`${basePath}.tsx`)
|
||||
candidates.push(`${basePath}.js`)
|
||||
candidates.push(`${basePath}.jsx`)
|
||||
candidates.push(`${basePath}/index.ts`)
|
||||
candidates.push(`${basePath}/index.tsx`)
|
||||
candidates.push(`${basePath}/index.js`)
|
||||
candidates.push(`${basePath}/index.jsx`)
|
||||
}
|
||||
|
||||
return candidates
|
||||
}
|
||||
|
||||
/**
|
||||
* Find all locations of a symbol by name.
|
||||
*/
|
||||
findSymbol(index: SymbolIndex, name: string): SymbolLocation[] {
|
||||
return index.get(name) ?? []
|
||||
}
|
||||
|
||||
/**
|
||||
* Find symbols matching a pattern.
|
||||
*/
|
||||
searchSymbols(index: SymbolIndex, pattern: string): Map<string, SymbolLocation[]> {
|
||||
const results = new Map<string, SymbolLocation[]>()
|
||||
const regex = new RegExp(pattern, "i")
|
||||
|
||||
for (const [name, locations] of index) {
|
||||
if (regex.test(name)) {
|
||||
results.set(name, locations)
|
||||
}
|
||||
}
|
||||
|
||||
return results
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all files that the given file depends on (imports).
|
||||
*/
|
||||
getDependencies(graph: DepsGraph, filePath: string): string[] {
|
||||
return graph.imports.get(filePath) ?? []
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all files that depend on the given file (import it).
|
||||
*/
|
||||
getDependents(graph: DepsGraph, filePath: string): string[] {
|
||||
return graph.importedBy.get(filePath) ?? []
|
||||
}
|
||||
|
||||
/**
|
||||
* Find circular dependencies in the graph.
|
||||
*/
|
||||
findCircularDependencies(graph: DepsGraph): string[][] {
|
||||
const cycles: string[][] = []
|
||||
const visited = new Set<string>()
|
||||
const recursionStack = new Set<string>()
|
||||
|
||||
const dfs = (node: string, path: string[]): void => {
|
||||
visited.add(node)
|
||||
recursionStack.add(node)
|
||||
path.push(node)
|
||||
|
||||
const deps = graph.imports.get(node) ?? []
|
||||
for (const dep of deps) {
|
||||
if (!visited.has(dep)) {
|
||||
dfs(dep, [...path])
|
||||
} else if (recursionStack.has(dep)) {
|
||||
const cycleStart = path.indexOf(dep)
|
||||
if (cycleStart !== -1) {
|
||||
const cycle = [...path.slice(cycleStart), dep]
|
||||
const normalized = this.normalizeCycle(cycle)
|
||||
if (!this.cycleExists(cycles, normalized)) {
|
||||
cycles.push(normalized)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
recursionStack.delete(node)
|
||||
}
|
||||
|
||||
for (const node of graph.imports.keys()) {
|
||||
if (!visited.has(node)) {
|
||||
dfs(node, [])
|
||||
}
|
||||
}
|
||||
|
||||
return cycles
|
||||
}
|
||||
|
||||
/**
|
||||
* Normalize a cycle to start with the smallest path.
|
||||
*/
|
||||
private normalizeCycle(cycle: string[]): string[] {
|
||||
if (cycle.length <= 1) {
|
||||
return cycle
|
||||
}
|
||||
|
||||
const withoutLast = cycle.slice(0, -1)
|
||||
const minIndex = withoutLast.reduce(
|
||||
(minIdx, path, idx) => (path < withoutLast[minIdx] ? idx : minIdx),
|
||||
0,
|
||||
)
|
||||
|
||||
const rotated = [...withoutLast.slice(minIndex), ...withoutLast.slice(0, minIndex)]
|
||||
rotated.push(rotated[0])
|
||||
|
||||
return rotated
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a cycle already exists in the list.
|
||||
*/
|
||||
private cycleExists(cycles: string[][], newCycle: string[]): boolean {
|
||||
const newKey = newCycle.join("→")
|
||||
return cycles.some((cycle) => cycle.join("→") === newKey)
|
||||
}
|
||||
|
||||
/**
|
||||
* Get statistics about the indexes.
|
||||
*/
|
||||
getStats(
|
||||
symbolIndex: SymbolIndex,
|
||||
depsGraph: DepsGraph,
|
||||
): {
|
||||
totalSymbols: number
|
||||
symbolsByType: Record<SymbolLocation["type"], number>
|
||||
totalFiles: number
|
||||
totalDependencies: number
|
||||
averageDependencies: number
|
||||
hubs: string[]
|
||||
orphans: string[]
|
||||
} {
|
||||
const symbolsByType: Record<SymbolLocation["type"], number> = {
|
||||
function: 0,
|
||||
class: 0,
|
||||
interface: 0,
|
||||
type: 0,
|
||||
variable: 0,
|
||||
}
|
||||
|
||||
let totalSymbols = 0
|
||||
for (const locations of symbolIndex.values()) {
|
||||
totalSymbols += locations.length
|
||||
for (const loc of locations) {
|
||||
symbolsByType[loc.type]++
|
||||
}
|
||||
}
|
||||
|
||||
const totalFiles = depsGraph.imports.size
|
||||
let totalDependencies = 0
|
||||
const hubs: string[] = []
|
||||
const orphans: string[] = []
|
||||
|
||||
for (const [_filePath, deps] of depsGraph.imports) {
|
||||
totalDependencies += deps.length
|
||||
}
|
||||
|
||||
for (const [filePath, dependents] of depsGraph.importedBy) {
|
||||
if (dependents.length > 5) {
|
||||
hubs.push(filePath)
|
||||
}
|
||||
if (dependents.length === 0 && (depsGraph.imports.get(filePath)?.length ?? 0) === 0) {
|
||||
orphans.push(filePath)
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
totalSymbols,
|
||||
symbolsByType,
|
||||
totalFiles,
|
||||
totalDependencies,
|
||||
averageDependencies: totalFiles > 0 ? totalDependencies / totalFiles : 0,
|
||||
hubs: hubs.sort(),
|
||||
orphans: orphans.sort(),
|
||||
}
|
||||
}
|
||||
}
|
||||
448
packages/ipuaro/src/infrastructure/indexer/MetaAnalyzer.ts
Normal file
448
packages/ipuaro/src/infrastructure/indexer/MetaAnalyzer.ts
Normal file
@@ -0,0 +1,448 @@
|
||||
import * as path from "node:path"
|
||||
import {
|
||||
type ComplexityMetrics,
|
||||
createFileMeta,
|
||||
type FileMeta,
|
||||
isHubFile,
|
||||
} from "../../domain/value-objects/FileMeta.js"
|
||||
import type { ClassInfo, FileAST, FunctionInfo } from "../../domain/value-objects/FileAST.js"
|
||||
|
||||
/**
|
||||
* Analyzes file metadata including complexity, dependencies, and classification.
|
||||
*/
|
||||
export class MetaAnalyzer {
|
||||
private readonly projectRoot: string
|
||||
|
||||
constructor(projectRoot: string) {
|
||||
this.projectRoot = projectRoot
|
||||
}
|
||||
|
||||
/**
|
||||
* Analyze a file and compute its metadata.
|
||||
* @param filePath - Absolute path to the file
|
||||
* @param ast - Parsed AST for the file
|
||||
* @param content - Raw file content (for LOC calculation)
|
||||
* @param allASTs - Map of all file paths to their ASTs (for dependents)
|
||||
*/
|
||||
analyze(
|
||||
filePath: string,
|
||||
ast: FileAST,
|
||||
content: string,
|
||||
allASTs: Map<string, FileAST>,
|
||||
): FileMeta {
|
||||
const complexity = this.calculateComplexity(ast, content)
|
||||
const dependencies = this.resolveDependencies(filePath, ast)
|
||||
const dependents = this.findDependents(filePath, allASTs)
|
||||
const fileType = this.classifyFileType(filePath)
|
||||
const isEntryPoint = this.isEntryPointFile(filePath, dependents.length)
|
||||
|
||||
return createFileMeta({
|
||||
complexity,
|
||||
dependencies,
|
||||
dependents,
|
||||
isHub: isHubFile(dependents.length),
|
||||
isEntryPoint,
|
||||
fileType,
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate complexity metrics for a file.
|
||||
*/
|
||||
calculateComplexity(ast: FileAST, content: string): ComplexityMetrics {
|
||||
const loc = this.countLinesOfCode(content)
|
||||
const nesting = this.calculateMaxNesting(ast)
|
||||
const cyclomaticComplexity = this.calculateCyclomaticComplexity(ast)
|
||||
const score = this.calculateComplexityScore(loc, nesting, cyclomaticComplexity)
|
||||
|
||||
return {
|
||||
loc,
|
||||
nesting,
|
||||
cyclomaticComplexity,
|
||||
score,
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Count lines of code (excluding empty lines and comments).
|
||||
*/
|
||||
countLinesOfCode(content: string): number {
|
||||
const lines = content.split("\n")
|
||||
let loc = 0
|
||||
let inBlockComment = false
|
||||
|
||||
for (const line of lines) {
|
||||
const trimmed = line.trim()
|
||||
|
||||
if (inBlockComment) {
|
||||
if (trimmed.includes("*/")) {
|
||||
inBlockComment = false
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
if (trimmed.startsWith("/*")) {
|
||||
if (!trimmed.includes("*/")) {
|
||||
inBlockComment = true
|
||||
continue
|
||||
}
|
||||
const afterComment = trimmed.substring(trimmed.indexOf("*/") + 2).trim()
|
||||
if (afterComment === "" || afterComment.startsWith("//")) {
|
||||
continue
|
||||
}
|
||||
loc++
|
||||
continue
|
||||
}
|
||||
|
||||
if (trimmed === "" || trimmed.startsWith("//")) {
|
||||
continue
|
||||
}
|
||||
|
||||
loc++
|
||||
}
|
||||
|
||||
return loc
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate maximum nesting depth from AST.
|
||||
*/
|
||||
calculateMaxNesting(ast: FileAST): number {
|
||||
let maxNesting = 0
|
||||
|
||||
for (const func of ast.functions) {
|
||||
const depth = this.estimateFunctionNesting(func)
|
||||
maxNesting = Math.max(maxNesting, depth)
|
||||
}
|
||||
|
||||
for (const cls of ast.classes) {
|
||||
const depth = this.estimateClassNesting(cls)
|
||||
maxNesting = Math.max(maxNesting, depth)
|
||||
}
|
||||
|
||||
return maxNesting
|
||||
}
|
||||
|
||||
/**
|
||||
* Estimate nesting depth for a function based on line count.
|
||||
* More accurate nesting would require full AST traversal.
|
||||
*/
|
||||
private estimateFunctionNesting(func: FunctionInfo): number {
|
||||
const lines = func.lineEnd - func.lineStart + 1
|
||||
if (lines <= 5) {
|
||||
return 1
|
||||
}
|
||||
if (lines <= 15) {
|
||||
return 2
|
||||
}
|
||||
if (lines <= 30) {
|
||||
return 3
|
||||
}
|
||||
if (lines <= 50) {
|
||||
return 4
|
||||
}
|
||||
return 5
|
||||
}
|
||||
|
||||
/**
|
||||
* Estimate nesting depth for a class.
|
||||
*/
|
||||
private estimateClassNesting(cls: ClassInfo): number {
|
||||
let maxMethodNesting = 1
|
||||
|
||||
for (const method of cls.methods) {
|
||||
const lines = method.lineEnd - method.lineStart + 1
|
||||
let depth = 1
|
||||
if (lines > 5) {
|
||||
depth = 2
|
||||
}
|
||||
if (lines > 15) {
|
||||
depth = 3
|
||||
}
|
||||
if (lines > 30) {
|
||||
depth = 4
|
||||
}
|
||||
maxMethodNesting = Math.max(maxMethodNesting, depth)
|
||||
}
|
||||
|
||||
return maxMethodNesting + 1
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate cyclomatic complexity from AST.
|
||||
* Base complexity is 1, +1 for each decision point.
|
||||
*/
|
||||
calculateCyclomaticComplexity(ast: FileAST): number {
|
||||
let complexity = 1
|
||||
|
||||
for (const func of ast.functions) {
|
||||
complexity += this.estimateFunctionComplexity(func)
|
||||
}
|
||||
|
||||
for (const cls of ast.classes) {
|
||||
for (const method of cls.methods) {
|
||||
const lines = method.lineEnd - method.lineStart + 1
|
||||
complexity += Math.max(1, Math.floor(lines / 10))
|
||||
}
|
||||
}
|
||||
|
||||
return complexity
|
||||
}
|
||||
|
||||
/**
|
||||
* Estimate function complexity based on size.
|
||||
*/
|
||||
private estimateFunctionComplexity(func: FunctionInfo): number {
|
||||
const lines = func.lineEnd - func.lineStart + 1
|
||||
return Math.max(1, Math.floor(lines / 8))
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate overall complexity score (0-100).
|
||||
*/
|
||||
calculateComplexityScore(loc: number, nesting: number, cyclomatic: number): number {
|
||||
const locWeight = 0.3
|
||||
const nestingWeight = 0.35
|
||||
const cyclomaticWeight = 0.35
|
||||
|
||||
const locScore = Math.min(100, (loc / 500) * 100)
|
||||
const nestingScore = Math.min(100, (nesting / 6) * 100)
|
||||
const cyclomaticScore = Math.min(100, (cyclomatic / 30) * 100)
|
||||
|
||||
const score =
|
||||
locScore * locWeight + nestingScore * nestingWeight + cyclomaticScore * cyclomaticWeight
|
||||
|
||||
return Math.round(Math.min(100, score))
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolve internal imports to absolute file paths.
|
||||
*/
|
||||
resolveDependencies(filePath: string, ast: FileAST): string[] {
|
||||
const dependencies: string[] = []
|
||||
const fileDir = path.dirname(filePath)
|
||||
|
||||
for (const imp of ast.imports) {
|
||||
if (imp.type !== "internal") {
|
||||
continue
|
||||
}
|
||||
|
||||
const resolved = this.resolveImportPath(fileDir, imp.from)
|
||||
if (resolved && !dependencies.includes(resolved)) {
|
||||
dependencies.push(resolved)
|
||||
}
|
||||
}
|
||||
|
||||
return dependencies.sort()
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolve a relative import path to an absolute path.
|
||||
*/
|
||||
private resolveImportPath(fromDir: string, importPath: string): string | null {
|
||||
const absolutePath = path.resolve(fromDir, importPath)
|
||||
const normalized = this.normalizeImportPath(absolutePath)
|
||||
|
||||
if (normalized.startsWith(this.projectRoot)) {
|
||||
return normalized
|
||||
}
|
||||
|
||||
return null
|
||||
}
|
||||
|
||||
/**
|
||||
* Normalize import path by removing file extension if present
|
||||
* and handling index imports.
|
||||
*/
|
||||
private normalizeImportPath(importPath: string): string {
|
||||
let normalized = importPath
|
||||
|
||||
if (normalized.endsWith(".js")) {
|
||||
normalized = `${normalized.slice(0, -3)}.ts`
|
||||
} else if (normalized.endsWith(".jsx")) {
|
||||
normalized = `${normalized.slice(0, -4)}.tsx`
|
||||
} else if (!/\.(ts|tsx|js|jsx)$/.exec(normalized)) {
|
||||
normalized = `${normalized}.ts`
|
||||
}
|
||||
|
||||
return normalized
|
||||
}
|
||||
|
||||
/**
|
||||
* Find all files that import the given file.
|
||||
*/
|
||||
findDependents(filePath: string, allASTs: Map<string, FileAST>): string[] {
|
||||
const dependents: string[] = []
|
||||
const normalizedPath = this.normalizePathForComparison(filePath)
|
||||
|
||||
for (const [otherPath, ast] of allASTs) {
|
||||
if (otherPath === filePath) {
|
||||
continue
|
||||
}
|
||||
|
||||
if (this.fileImportsTarget(otherPath, ast, normalizedPath)) {
|
||||
dependents.push(otherPath)
|
||||
}
|
||||
}
|
||||
|
||||
return dependents.sort()
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a file imports the target path.
|
||||
*/
|
||||
private fileImportsTarget(filePath: string, ast: FileAST, normalizedTarget: string): boolean {
|
||||
const fileDir = path.dirname(filePath)
|
||||
|
||||
for (const imp of ast.imports) {
|
||||
if (imp.type !== "internal") {
|
||||
continue
|
||||
}
|
||||
|
||||
const resolvedImport = this.resolveImportPath(fileDir, imp.from)
|
||||
if (!resolvedImport) {
|
||||
continue
|
||||
}
|
||||
|
||||
const normalizedImport = this.normalizePathForComparison(resolvedImport)
|
||||
if (this.pathsMatch(normalizedTarget, normalizedImport)) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
/**
|
||||
* Normalize path for comparison (handle index.ts and extensions).
|
||||
*/
|
||||
private normalizePathForComparison(filePath: string): string {
|
||||
let normalized = filePath
|
||||
|
||||
if (normalized.endsWith(".js")) {
|
||||
normalized = normalized.slice(0, -3)
|
||||
} else if (normalized.endsWith(".ts")) {
|
||||
normalized = normalized.slice(0, -3)
|
||||
} else if (normalized.endsWith(".jsx")) {
|
||||
normalized = normalized.slice(0, -4)
|
||||
} else if (normalized.endsWith(".tsx")) {
|
||||
normalized = normalized.slice(0, -4)
|
||||
}
|
||||
|
||||
return normalized
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if two normalized paths match (including index.ts resolution).
|
||||
*/
|
||||
private pathsMatch(path1: string, path2: string): boolean {
|
||||
if (path1 === path2) {
|
||||
return true
|
||||
}
|
||||
|
||||
if (path1.endsWith("/index") && path2 === path1.slice(0, -6)) {
|
||||
return true
|
||||
}
|
||||
if (path2.endsWith("/index") && path1 === path2.slice(0, -6)) {
|
||||
return true
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
/**
|
||||
* Classify file type based on path and name.
|
||||
*/
|
||||
classifyFileType(filePath: string): FileMeta["fileType"] {
|
||||
const basename = path.basename(filePath)
|
||||
const lowercasePath = filePath.toLowerCase()
|
||||
|
||||
if (basename.includes(".test.") || basename.includes(".spec.")) {
|
||||
return "test"
|
||||
}
|
||||
|
||||
if (lowercasePath.includes("/tests/") || lowercasePath.includes("/__tests__/")) {
|
||||
return "test"
|
||||
}
|
||||
|
||||
if (basename.endsWith(".d.ts")) {
|
||||
return "types"
|
||||
}
|
||||
|
||||
if (lowercasePath.includes("/types/") || basename === "types.ts") {
|
||||
return "types"
|
||||
}
|
||||
|
||||
const configPatterns = [
|
||||
"config",
|
||||
"tsconfig",
|
||||
"eslint",
|
||||
"prettier",
|
||||
"vitest",
|
||||
"jest",
|
||||
"babel",
|
||||
"webpack",
|
||||
"vite",
|
||||
"rollup",
|
||||
]
|
||||
|
||||
for (const pattern of configPatterns) {
|
||||
if (basename.toLowerCase().includes(pattern)) {
|
||||
return "config"
|
||||
}
|
||||
}
|
||||
|
||||
if (
|
||||
filePath.endsWith(".ts") ||
|
||||
filePath.endsWith(".tsx") ||
|
||||
filePath.endsWith(".js") ||
|
||||
filePath.endsWith(".jsx")
|
||||
) {
|
||||
return "source"
|
||||
}
|
||||
|
||||
return "unknown"
|
||||
}
|
||||
|
||||
/**
|
||||
* Determine if file is an entry point.
|
||||
*/
|
||||
isEntryPointFile(filePath: string, dependentCount: number): boolean {
|
||||
const basename = path.basename(filePath)
|
||||
|
||||
if (basename.startsWith("index.")) {
|
||||
return true
|
||||
}
|
||||
|
||||
if (dependentCount === 0) {
|
||||
return true
|
||||
}
|
||||
|
||||
const entryPatterns = ["main.", "app.", "cli.", "server.", "index."]
|
||||
for (const pattern of entryPatterns) {
|
||||
if (basename.toLowerCase().startsWith(pattern)) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
/**
|
||||
* Batch analyze multiple files.
|
||||
*/
|
||||
analyzeAll(files: Map<string, { ast: FileAST; content: string }>): Map<string, FileMeta> {
|
||||
const allASTs = new Map<string, FileAST>()
|
||||
for (const [filePath, { ast }] of files) {
|
||||
allASTs.set(filePath, ast)
|
||||
}
|
||||
|
||||
const results = new Map<string, FileMeta>()
|
||||
for (const [filePath, { ast, content }] of files) {
|
||||
const meta = this.analyze(filePath, ast, content, allASTs)
|
||||
results.set(filePath, meta)
|
||||
}
|
||||
|
||||
return results
|
||||
}
|
||||
}
|
||||
285
packages/ipuaro/src/infrastructure/indexer/Watchdog.ts
Normal file
285
packages/ipuaro/src/infrastructure/indexer/Watchdog.ts
Normal file
@@ -0,0 +1,285 @@
|
||||
import * as chokidar from "chokidar"
|
||||
import * as path from "node:path"
|
||||
import { DEFAULT_IGNORE_PATTERNS, SUPPORTED_EXTENSIONS } from "../../domain/constants/index.js"
|
||||
|
||||
export type FileChangeType = "add" | "change" | "unlink"
|
||||
|
||||
export interface FileChangeEvent {
|
||||
type: FileChangeType
|
||||
path: string
|
||||
timestamp: number
|
||||
}
|
||||
|
||||
export type FileChangeCallback = (event: FileChangeEvent) => void
|
||||
|
||||
export interface WatchdogOptions {
|
||||
/** Debounce delay in milliseconds (default: 500) */
|
||||
debounceMs?: number
|
||||
/** Patterns to ignore (default: DEFAULT_IGNORE_PATTERNS) */
|
||||
ignorePatterns?: readonly string[]
|
||||
/** File extensions to watch (default: SUPPORTED_EXTENSIONS) */
|
||||
extensions?: readonly string[]
|
||||
/** Use polling instead of native events (useful for network drives) */
|
||||
usePolling?: boolean
|
||||
/** Polling interval in milliseconds (default: 1000) */
|
||||
pollInterval?: number
|
||||
}
|
||||
|
||||
interface ResolvedWatchdogOptions {
|
||||
debounceMs: number
|
||||
ignorePatterns: readonly string[]
|
||||
extensions: readonly string[]
|
||||
usePolling: boolean
|
||||
pollInterval: number
|
||||
}
|
||||
|
||||
const DEFAULT_OPTIONS: ResolvedWatchdogOptions = {
|
||||
debounceMs: 500,
|
||||
ignorePatterns: DEFAULT_IGNORE_PATTERNS,
|
||||
extensions: SUPPORTED_EXTENSIONS,
|
||||
usePolling: false,
|
||||
pollInterval: 1000,
|
||||
}
|
||||
|
||||
/**
|
||||
* Watches for file changes in a directory using chokidar.
|
||||
*/
|
||||
export class Watchdog {
|
||||
private watcher: chokidar.FSWatcher | null = null
|
||||
private readonly callbacks: FileChangeCallback[] = []
|
||||
private readonly pendingChanges = new Map<string, FileChangeEvent>()
|
||||
private readonly debounceTimers = new Map<string, NodeJS.Timeout>()
|
||||
private readonly options: ResolvedWatchdogOptions
|
||||
private root = ""
|
||||
private isRunning = false
|
||||
|
||||
constructor(options: WatchdogOptions = {}) {
|
||||
this.options = { ...DEFAULT_OPTIONS, ...options }
|
||||
}
|
||||
|
||||
/**
|
||||
* Start watching a directory for file changes.
|
||||
*/
|
||||
start(root: string): void {
|
||||
if (this.isRunning) {
|
||||
void this.stop()
|
||||
}
|
||||
|
||||
this.root = root
|
||||
this.isRunning = true
|
||||
|
||||
const globPatterns = this.buildGlobPatterns(root)
|
||||
const ignorePatterns = this.buildIgnorePatterns()
|
||||
|
||||
this.watcher = chokidar.watch(globPatterns, {
|
||||
ignored: ignorePatterns,
|
||||
persistent: true,
|
||||
ignoreInitial: true,
|
||||
usePolling: this.options.usePolling,
|
||||
interval: this.options.pollInterval,
|
||||
awaitWriteFinish: {
|
||||
stabilityThreshold: 100,
|
||||
pollInterval: 100,
|
||||
},
|
||||
})
|
||||
|
||||
this.watcher.on("add", (filePath) => {
|
||||
this.handleChange("add", filePath)
|
||||
})
|
||||
this.watcher.on("change", (filePath) => {
|
||||
this.handleChange("change", filePath)
|
||||
})
|
||||
this.watcher.on("unlink", (filePath) => {
|
||||
this.handleChange("unlink", filePath)
|
||||
})
|
||||
this.watcher.on("error", (error) => {
|
||||
this.handleError(error)
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Stop watching for file changes.
|
||||
*/
|
||||
async stop(): Promise<void> {
|
||||
if (!this.isRunning) {
|
||||
return
|
||||
}
|
||||
|
||||
for (const timer of this.debounceTimers.values()) {
|
||||
clearTimeout(timer)
|
||||
}
|
||||
this.debounceTimers.clear()
|
||||
this.pendingChanges.clear()
|
||||
|
||||
if (this.watcher) {
|
||||
await this.watcher.close()
|
||||
this.watcher = null
|
||||
}
|
||||
|
||||
this.isRunning = false
|
||||
}
|
||||
|
||||
/**
|
||||
* Register a callback for file change events.
|
||||
*/
|
||||
onFileChange(callback: FileChangeCallback): void {
|
||||
this.callbacks.push(callback)
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove a callback.
|
||||
*/
|
||||
offFileChange(callback: FileChangeCallback): void {
|
||||
const index = this.callbacks.indexOf(callback)
|
||||
if (index !== -1) {
|
||||
this.callbacks.splice(index, 1)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if the watchdog is currently running.
|
||||
*/
|
||||
isWatching(): boolean {
|
||||
return this.isRunning
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the root directory being watched.
|
||||
*/
|
||||
getRoot(): string {
|
||||
return this.root
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the number of pending changes waiting to be processed.
|
||||
*/
|
||||
getPendingCount(): number {
|
||||
return this.pendingChanges.size
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle a file change event with debouncing.
|
||||
*/
|
||||
private handleChange(type: FileChangeType, filePath: string): void {
|
||||
if (!this.isValidFile(filePath)) {
|
||||
return
|
||||
}
|
||||
|
||||
const normalizedPath = path.resolve(filePath)
|
||||
|
||||
const event: FileChangeEvent = {
|
||||
type,
|
||||
path: normalizedPath,
|
||||
timestamp: Date.now(),
|
||||
}
|
||||
|
||||
this.pendingChanges.set(normalizedPath, event)
|
||||
|
||||
const existingTimer = this.debounceTimers.get(normalizedPath)
|
||||
if (existingTimer) {
|
||||
clearTimeout(existingTimer)
|
||||
}
|
||||
|
||||
const timer = setTimeout(() => {
|
||||
this.flushChange(normalizedPath)
|
||||
}, this.options.debounceMs)
|
||||
|
||||
this.debounceTimers.set(normalizedPath, timer)
|
||||
}
|
||||
|
||||
/**
|
||||
* Flush a pending change and notify callbacks.
|
||||
*/
|
||||
private flushChange(filePath: string): void {
|
||||
const event = this.pendingChanges.get(filePath)
|
||||
if (!event) {
|
||||
return
|
||||
}
|
||||
|
||||
this.pendingChanges.delete(filePath)
|
||||
this.debounceTimers.delete(filePath)
|
||||
|
||||
for (const callback of this.callbacks) {
|
||||
try {
|
||||
callback(event)
|
||||
} catch {
|
||||
// Silently ignore callback errors
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle watcher errors.
|
||||
*/
|
||||
private handleError(error: Error): void {
|
||||
// Log error but don't crash
|
||||
console.error(`[Watchdog] Error: ${error.message}`)
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a file should be watched based on extension.
|
||||
*/
|
||||
private isValidFile(filePath: string): boolean {
|
||||
const ext = path.extname(filePath)
|
||||
return this.options.extensions.includes(ext)
|
||||
}
|
||||
|
||||
/**
|
||||
* Build glob patterns for watching.
|
||||
*/
|
||||
private buildGlobPatterns(root: string): string[] {
|
||||
return this.options.extensions.map((ext) => path.join(root, "**", `*${ext}`))
|
||||
}
|
||||
|
||||
/**
|
||||
* Build ignore patterns for chokidar.
|
||||
*/
|
||||
private buildIgnorePatterns(): (string | RegExp)[] {
|
||||
const patterns: (string | RegExp)[] = []
|
||||
|
||||
for (const pattern of this.options.ignorePatterns) {
|
||||
if (pattern.includes("*")) {
|
||||
const regexPattern = pattern
|
||||
.replace(/\./g, "\\.")
|
||||
.replace(/\*\*/g, ".*")
|
||||
.replace(/\*/g, "[^/]*")
|
||||
patterns.push(new RegExp(regexPattern))
|
||||
} else {
|
||||
patterns.push(`**/${pattern}/**`)
|
||||
}
|
||||
}
|
||||
|
||||
return patterns
|
||||
}
|
||||
|
||||
/**
|
||||
* Force flush all pending changes immediately.
|
||||
*/
|
||||
flushAll(): void {
|
||||
for (const timer of this.debounceTimers.values()) {
|
||||
clearTimeout(timer)
|
||||
}
|
||||
this.debounceTimers.clear()
|
||||
|
||||
for (const filePath of this.pendingChanges.keys()) {
|
||||
this.flushChange(filePath)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get watched paths (for debugging).
|
||||
*/
|
||||
getWatchedPaths(): string[] {
|
||||
if (!this.watcher) {
|
||||
return []
|
||||
}
|
||||
const watched = this.watcher.getWatched()
|
||||
const paths: string[] = []
|
||||
for (const dir of Object.keys(watched)) {
|
||||
for (const file of watched[dir]) {
|
||||
paths.push(path.join(dir, file))
|
||||
}
|
||||
}
|
||||
return paths.sort()
|
||||
}
|
||||
}
|
||||
6
packages/ipuaro/src/infrastructure/indexer/index.ts
Normal file
6
packages/ipuaro/src/infrastructure/indexer/index.ts
Normal file
@@ -0,0 +1,6 @@
|
||||
export * from "./FileScanner.js"
|
||||
export * from "./ASTParser.js"
|
||||
export * from "./MetaAnalyzer.js"
|
||||
export * from "./IndexBuilder.js"
|
||||
export * from "./Watchdog.js"
|
||||
export * from "./tree-sitter-types.js"
|
||||
@@ -0,0 +1,77 @@
|
||||
/**
|
||||
* Tree-sitter node type constants for TypeScript/JavaScript parsing.
|
||||
* These are infrastructure-level constants, not exposed to domain/application layers.
|
||||
*
|
||||
* Source: tree-sitter-typescript/typescript/src/node-types.json
|
||||
*/
|
||||
|
||||
export const NodeType = {
|
||||
// Statements
|
||||
IMPORT_STATEMENT: "import_statement",
|
||||
EXPORT_STATEMENT: "export_statement",
|
||||
LEXICAL_DECLARATION: "lexical_declaration",
|
||||
|
||||
// Declarations
|
||||
FUNCTION_DECLARATION: "function_declaration",
|
||||
CLASS_DECLARATION: "class_declaration",
|
||||
INTERFACE_DECLARATION: "interface_declaration",
|
||||
TYPE_ALIAS_DECLARATION: "type_alias_declaration",
|
||||
|
||||
// Clauses
|
||||
IMPORT_CLAUSE: "import_clause",
|
||||
EXPORT_CLAUSE: "export_clause",
|
||||
EXTENDS_CLAUSE: "extends_clause",
|
||||
IMPLEMENTS_CLAUSE: "implements_clause",
|
||||
EXTENDS_TYPE_CLAUSE: "extends_type_clause",
|
||||
CLASS_HERITAGE: "class_heritage",
|
||||
|
||||
// Import specifiers
|
||||
NAMESPACE_IMPORT: "namespace_import",
|
||||
NAMED_IMPORTS: "named_imports",
|
||||
IMPORT_SPECIFIER: "import_specifier",
|
||||
EXPORT_SPECIFIER: "export_specifier",
|
||||
|
||||
// Class members
|
||||
METHOD_DEFINITION: "method_definition",
|
||||
PUBLIC_FIELD_DEFINITION: "public_field_definition",
|
||||
FIELD_DEFINITION: "field_definition",
|
||||
PROPERTY_SIGNATURE: "property_signature",
|
||||
|
||||
// Parameters
|
||||
REQUIRED_PARAMETER: "required_parameter",
|
||||
OPTIONAL_PARAMETER: "optional_parameter",
|
||||
|
||||
// Expressions & values
|
||||
ARROW_FUNCTION: "arrow_function",
|
||||
FUNCTION: "function",
|
||||
VARIABLE_DECLARATOR: "variable_declarator",
|
||||
|
||||
// Identifiers & types
|
||||
IDENTIFIER: "identifier",
|
||||
TYPE_IDENTIFIER: "type_identifier",
|
||||
|
||||
// Modifiers
|
||||
ASYNC: "async",
|
||||
STATIC: "static",
|
||||
ABSTRACT: "abstract",
|
||||
DEFAULT: "default",
|
||||
ACCESSIBILITY_MODIFIER: "accessibility_modifier",
|
||||
READONLY: "readonly",
|
||||
} as const
|
||||
|
||||
export type NodeTypeValue = (typeof NodeType)[keyof typeof NodeType]
|
||||
|
||||
export const FieldName = {
|
||||
SOURCE: "source",
|
||||
NAME: "name",
|
||||
ALIAS: "alias",
|
||||
DECLARATION: "declaration",
|
||||
PARAMETERS: "parameters",
|
||||
RETURN_TYPE: "return_type",
|
||||
BODY: "body",
|
||||
TYPE: "type",
|
||||
PATTERN: "pattern",
|
||||
VALUE: "value",
|
||||
} as const
|
||||
|
||||
export type FieldNameValue = (typeof FieldName)[keyof typeof FieldName]
|
||||
239
packages/ipuaro/src/infrastructure/llm/OllamaClient.ts
Normal file
239
packages/ipuaro/src/infrastructure/llm/OllamaClient.ts
Normal file
@@ -0,0 +1,239 @@
|
||||
import { type Message, Ollama } from "ollama"
|
||||
import type { ILLMClient, LLMResponse } from "../../domain/services/ILLMClient.js"
|
||||
import type { ChatMessage } from "../../domain/value-objects/ChatMessage.js"
|
||||
import type { LLMConfig } from "../../shared/constants/config.js"
|
||||
import { IpuaroError } from "../../shared/errors/IpuaroError.js"
|
||||
import { estimateTokens } from "../../shared/utils/tokens.js"
|
||||
import { parseToolCalls } from "./ResponseParser.js"
|
||||
|
||||
/**
|
||||
* Ollama LLM client implementation.
|
||||
* Wraps the Ollama SDK for chat completions with tool support.
|
||||
*/
|
||||
export class OllamaClient implements ILLMClient {
|
||||
private readonly client: Ollama
|
||||
private readonly host: string
|
||||
private readonly model: string
|
||||
private readonly contextWindow: number
|
||||
private readonly temperature: number
|
||||
private readonly timeout: number
|
||||
private abortController: AbortController | null = null
|
||||
|
||||
constructor(config: LLMConfig) {
|
||||
this.host = config.host
|
||||
this.client = new Ollama({ host: this.host })
|
||||
this.model = config.model
|
||||
this.contextWindow = config.contextWindow
|
||||
this.temperature = config.temperature
|
||||
this.timeout = config.timeout
|
||||
}
|
||||
|
||||
/**
|
||||
* Send messages to LLM and get response.
|
||||
* Tool definitions should be included in the system prompt as XML format.
|
||||
*/
|
||||
async chat(messages: ChatMessage[]): Promise<LLMResponse> {
|
||||
const startTime = Date.now()
|
||||
this.abortController = new AbortController()
|
||||
|
||||
try {
|
||||
const ollamaMessages = this.convertMessages(messages)
|
||||
|
||||
const response = await this.client.chat({
|
||||
model: this.model,
|
||||
messages: ollamaMessages,
|
||||
options: {
|
||||
temperature: this.temperature,
|
||||
},
|
||||
stream: false,
|
||||
})
|
||||
|
||||
const timeMs = Date.now() - startTime
|
||||
const parsed = parseToolCalls(response.message.content)
|
||||
|
||||
return {
|
||||
content: parsed.content,
|
||||
toolCalls: parsed.toolCalls,
|
||||
tokens: response.eval_count ?? estimateTokens(response.message.content),
|
||||
timeMs,
|
||||
truncated: false,
|
||||
stopReason: this.determineStopReason(response, parsed.toolCalls),
|
||||
}
|
||||
} catch (error) {
|
||||
if (error instanceof Error && error.name === "AbortError") {
|
||||
throw IpuaroError.llm("Request was aborted")
|
||||
}
|
||||
throw this.handleError(error)
|
||||
} finally {
|
||||
this.abortController = null
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Count tokens in text.
|
||||
* Uses estimation since Ollama doesn't provide a tokenizer endpoint.
|
||||
*/
|
||||
async countTokens(text: string): Promise<number> {
|
||||
return Promise.resolve(estimateTokens(text))
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if LLM service is available.
|
||||
*/
|
||||
async isAvailable(): Promise<boolean> {
|
||||
try {
|
||||
await this.client.list()
|
||||
return true
|
||||
} catch {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get current model name.
|
||||
*/
|
||||
getModelName(): string {
|
||||
return this.model
|
||||
}
|
||||
|
||||
/**
|
||||
* Get context window size.
|
||||
*/
|
||||
getContextWindowSize(): number {
|
||||
return this.contextWindow
|
||||
}
|
||||
|
||||
/**
|
||||
* Pull/download model if not available locally.
|
||||
*/
|
||||
async pullModel(model: string): Promise<void> {
|
||||
try {
|
||||
await this.client.pull({ model, stream: false })
|
||||
} catch (error) {
|
||||
throw this.handleError(error, `Failed to pull model: ${model}`)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a specific model is available locally.
|
||||
*/
|
||||
async hasModel(model: string): Promise<boolean> {
|
||||
try {
|
||||
const result = await this.client.list()
|
||||
return result.models.some((m) => m.name === model || m.name.startsWith(`${model}:`))
|
||||
} catch {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* List available models.
|
||||
*/
|
||||
async listModels(): Promise<string[]> {
|
||||
try {
|
||||
const result = await this.client.list()
|
||||
return result.models.map((m) => m.name)
|
||||
} catch (error) {
|
||||
throw this.handleError(error, "Failed to list models")
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Abort current generation.
|
||||
*/
|
||||
abort(): void {
|
||||
if (this.abortController) {
|
||||
this.abortController.abort()
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert ChatMessage array to Ollama Message format.
|
||||
*/
|
||||
private convertMessages(messages: ChatMessage[]): Message[] {
|
||||
return messages.map((msg): Message => {
|
||||
const role = this.convertRole(msg.role)
|
||||
|
||||
if (msg.role === "tool" && msg.toolResults) {
|
||||
return {
|
||||
role: "tool",
|
||||
content: msg.content,
|
||||
}
|
||||
}
|
||||
|
||||
if (msg.role === "assistant" && msg.toolCalls && msg.toolCalls.length > 0) {
|
||||
return {
|
||||
role: "assistant",
|
||||
content: msg.content,
|
||||
tool_calls: msg.toolCalls.map((tc) => ({
|
||||
function: {
|
||||
name: tc.name,
|
||||
arguments: tc.params,
|
||||
},
|
||||
})),
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
role,
|
||||
content: msg.content,
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert message role to Ollama role.
|
||||
*/
|
||||
private convertRole(role: ChatMessage["role"]): "user" | "assistant" | "system" | "tool" {
|
||||
switch (role) {
|
||||
case "user":
|
||||
return "user"
|
||||
case "assistant":
|
||||
return "assistant"
|
||||
case "system":
|
||||
return "system"
|
||||
case "tool":
|
||||
return "tool"
|
||||
default:
|
||||
return "user"
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Determine stop reason from response.
|
||||
*/
|
||||
private determineStopReason(
|
||||
response: { done_reason?: string },
|
||||
toolCalls: { name: string; params: Record<string, unknown> }[],
|
||||
): "end" | "length" | "tool_use" {
|
||||
if (toolCalls.length > 0) {
|
||||
return "tool_use"
|
||||
}
|
||||
|
||||
if (response.done_reason === "length") {
|
||||
return "length"
|
||||
}
|
||||
|
||||
return "end"
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle and wrap errors.
|
||||
*/
|
||||
private handleError(error: unknown, context?: string): IpuaroError {
|
||||
const message = error instanceof Error ? error.message : String(error)
|
||||
const fullMessage = context ? `${context}: ${message}` : message
|
||||
|
||||
if (message.includes("ECONNREFUSED") || message.includes("fetch failed")) {
|
||||
return IpuaroError.llm(`Cannot connect to Ollama at ${this.host}`)
|
||||
}
|
||||
|
||||
if (message.includes("model") && message.includes("not found")) {
|
||||
return IpuaroError.llm(
|
||||
`Model "${this.model}" not found. Run: ollama pull ${this.model}`,
|
||||
)
|
||||
}
|
||||
|
||||
return IpuaroError.llm(fullMessage)
|
||||
}
|
||||
}
|
||||
265
packages/ipuaro/src/infrastructure/llm/ResponseParser.ts
Normal file
265
packages/ipuaro/src/infrastructure/llm/ResponseParser.ts
Normal file
@@ -0,0 +1,265 @@
|
||||
import { createToolCall, type ToolCall } from "../../domain/value-objects/ToolCall.js"
|
||||
|
||||
/**
|
||||
* Parsed response from LLM.
|
||||
*/
|
||||
export interface ParsedResponse {
|
||||
/** Text content (excluding tool calls) */
|
||||
content: string
|
||||
/** Extracted tool calls */
|
||||
toolCalls: ToolCall[]
|
||||
/** Whether parsing encountered issues */
|
||||
hasParseErrors: boolean
|
||||
/** Parse error messages */
|
||||
parseErrors: string[]
|
||||
}
|
||||
|
||||
/**
|
||||
* XML tool call tag pattern.
|
||||
* Matches: <tool_call name="tool_name">...</tool_call>
|
||||
*/
|
||||
const TOOL_CALL_REGEX = /<tool_call\s+name\s*=\s*"([^"]+)">([\s\S]*?)<\/tool_call>/gi
|
||||
|
||||
/**
|
||||
* XML parameter tag pattern.
|
||||
* Matches: <param name="param_name">value</param> or <param_name>value</param_name>
|
||||
*/
|
||||
const PARAM_REGEX_NAMED = /<param\s+name\s*=\s*"([^"]+)">([\s\S]*?)<\/param>/gi
|
||||
const PARAM_REGEX_ELEMENT = /<([a-z_][a-z0-9_]*)>([\s\S]*?)<\/\1>/gi
|
||||
|
||||
/**
|
||||
* CDATA section pattern.
|
||||
* Matches: <![CDATA[...]]>
|
||||
*/
|
||||
const CDATA_REGEX = /<!\[CDATA\[([\s\S]*?)\]\]>/g
|
||||
|
||||
/**
|
||||
* Valid tool names.
|
||||
* Used for validation to catch typos or hallucinations.
|
||||
*/
|
||||
const VALID_TOOL_NAMES = new Set([
|
||||
"get_lines",
|
||||
"get_function",
|
||||
"get_class",
|
||||
"get_structure",
|
||||
"edit_lines",
|
||||
"create_file",
|
||||
"delete_file",
|
||||
"find_references",
|
||||
"find_definition",
|
||||
"get_dependencies",
|
||||
"get_dependents",
|
||||
"get_complexity",
|
||||
"get_todos",
|
||||
"git_status",
|
||||
"git_diff",
|
||||
"git_commit",
|
||||
"run_command",
|
||||
"run_tests",
|
||||
])
|
||||
|
||||
/**
|
||||
* Parse tool calls from LLM response text.
|
||||
* Supports XML format: <tool_call name="get_lines"><path>src/index.ts</path></tool_call>
|
||||
* Validates tool names and provides helpful error messages.
|
||||
*/
|
||||
export function parseToolCalls(response: string): ParsedResponse {
|
||||
const toolCalls: ToolCall[] = []
|
||||
const parseErrors: string[] = []
|
||||
let content = response
|
||||
|
||||
const matches = [...response.matchAll(TOOL_CALL_REGEX)]
|
||||
|
||||
for (const match of matches) {
|
||||
const [fullMatch, toolName, paramsXml] = match
|
||||
|
||||
if (!VALID_TOOL_NAMES.has(toolName)) {
|
||||
parseErrors.push(
|
||||
`Unknown tool "${toolName}". Valid tools: ${[...VALID_TOOL_NAMES].join(", ")}`,
|
||||
)
|
||||
continue
|
||||
}
|
||||
|
||||
try {
|
||||
const params = parseParameters(paramsXml)
|
||||
const toolCall = createToolCall(
|
||||
`xml_${String(Date.now())}_${String(toolCalls.length)}`,
|
||||
toolName,
|
||||
params,
|
||||
)
|
||||
toolCalls.push(toolCall)
|
||||
content = content.replace(fullMatch, "")
|
||||
} catch (error) {
|
||||
const errorMsg = error instanceof Error ? error.message : String(error)
|
||||
parseErrors.push(`Failed to parse tool call "${toolName}": ${errorMsg}`)
|
||||
}
|
||||
}
|
||||
|
||||
content = content.trim()
|
||||
|
||||
return {
|
||||
content,
|
||||
toolCalls,
|
||||
hasParseErrors: parseErrors.length > 0,
|
||||
parseErrors,
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse parameters from XML content.
|
||||
*/
|
||||
function parseParameters(xml: string): Record<string, unknown> {
|
||||
const params: Record<string, unknown> = {}
|
||||
|
||||
const namedMatches = [...xml.matchAll(PARAM_REGEX_NAMED)]
|
||||
for (const match of namedMatches) {
|
||||
const [, name, value] = match
|
||||
params[name] = parseValue(value)
|
||||
}
|
||||
|
||||
if (namedMatches.length === 0) {
|
||||
const elementMatches = [...xml.matchAll(PARAM_REGEX_ELEMENT)]
|
||||
for (const match of elementMatches) {
|
||||
const [, name, value] = match
|
||||
params[name] = parseValue(value)
|
||||
}
|
||||
}
|
||||
|
||||
return params
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse a value string to appropriate type.
|
||||
* Supports CDATA sections for multiline content.
|
||||
*/
|
||||
function parseValue(value: string): unknown {
|
||||
const trimmed = value.trim()
|
||||
|
||||
const cdataMatches = [...trimmed.matchAll(CDATA_REGEX)]
|
||||
if (cdataMatches.length > 0 && cdataMatches[0][1] !== undefined) {
|
||||
return cdataMatches[0][1]
|
||||
}
|
||||
|
||||
if (trimmed === "true") {
|
||||
return true
|
||||
}
|
||||
|
||||
if (trimmed === "false") {
|
||||
return false
|
||||
}
|
||||
|
||||
if (trimmed === "null") {
|
||||
return null
|
||||
}
|
||||
|
||||
const num = Number(trimmed)
|
||||
if (!isNaN(num) && trimmed !== "") {
|
||||
return num
|
||||
}
|
||||
|
||||
if (
|
||||
(trimmed.startsWith("[") && trimmed.endsWith("]")) ||
|
||||
(trimmed.startsWith("{") && trimmed.endsWith("}"))
|
||||
) {
|
||||
try {
|
||||
return JSON.parse(trimmed)
|
||||
} catch {
|
||||
return trimmed
|
||||
}
|
||||
}
|
||||
|
||||
return trimmed
|
||||
}
|
||||
|
||||
/**
|
||||
* Format tool calls to XML for prompt injection.
|
||||
* Useful when you need to show the LLM the expected format.
|
||||
*/
|
||||
export function formatToolCallsAsXml(toolCalls: ToolCall[]): string {
|
||||
return toolCalls
|
||||
.map((tc) => {
|
||||
const params = Object.entries(tc.params)
|
||||
.map(([key, value]) => ` <${key}>${formatValueForXml(value)}</${key}>`)
|
||||
.join("\n")
|
||||
return `<tool_call name="${tc.name}">\n${params}\n</tool_call>`
|
||||
})
|
||||
.join("\n\n")
|
||||
}
|
||||
|
||||
/**
|
||||
* Format a value for XML output.
|
||||
*/
|
||||
function formatValueForXml(value: unknown): string {
|
||||
if (value === null || value === undefined) {
|
||||
return ""
|
||||
}
|
||||
|
||||
if (typeof value === "object") {
|
||||
return JSON.stringify(value)
|
||||
}
|
||||
|
||||
if (typeof value === "string") {
|
||||
return value
|
||||
}
|
||||
|
||||
if (typeof value === "number" || typeof value === "boolean") {
|
||||
return String(value)
|
||||
}
|
||||
|
||||
return JSON.stringify(value)
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract thinking/reasoning from response.
|
||||
* Matches content between <thinking>...</thinking> tags.
|
||||
*/
|
||||
export function extractThinking(response: string): { thinking: string; content: string } {
|
||||
const thinkingRegex = /<thinking>([\s\S]*?)<\/thinking>/gi
|
||||
const matches = [...response.matchAll(thinkingRegex)]
|
||||
|
||||
if (matches.length === 0) {
|
||||
return { thinking: "", content: response }
|
||||
}
|
||||
|
||||
let content = response
|
||||
const thoughts: string[] = []
|
||||
|
||||
for (const match of matches) {
|
||||
thoughts.push(match[1].trim())
|
||||
content = content.replace(match[0], "")
|
||||
}
|
||||
|
||||
return {
|
||||
thinking: thoughts.join("\n\n"),
|
||||
content: content.trim(),
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if response contains tool calls.
|
||||
*/
|
||||
export function hasToolCalls(response: string): boolean {
|
||||
return TOOL_CALL_REGEX.test(response)
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate tool call parameters against expected schema.
|
||||
*/
|
||||
export function validateToolCallParams(
|
||||
toolName: string,
|
||||
params: Record<string, unknown>,
|
||||
requiredParams: string[],
|
||||
): { valid: boolean; errors: string[] } {
|
||||
const errors: string[] = []
|
||||
|
||||
for (const param of requiredParams) {
|
||||
if (!(param in params) || params[param] === undefined || params[param] === null) {
|
||||
errors.push(`Missing required parameter: ${param}`)
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
valid: errors.length === 0,
|
||||
errors,
|
||||
}
|
||||
}
|
||||
48
packages/ipuaro/src/infrastructure/llm/index.ts
Normal file
48
packages/ipuaro/src/infrastructure/llm/index.ts
Normal file
@@ -0,0 +1,48 @@
|
||||
// LLM infrastructure exports
|
||||
export { OllamaClient } from "./OllamaClient.js"
|
||||
export {
|
||||
SYSTEM_PROMPT,
|
||||
buildInitialContext,
|
||||
buildFileContext,
|
||||
truncateContext,
|
||||
type ProjectStructure,
|
||||
} from "./prompts.js"
|
||||
export {
|
||||
ALL_TOOLS,
|
||||
READ_TOOLS,
|
||||
EDIT_TOOLS,
|
||||
SEARCH_TOOLS,
|
||||
ANALYSIS_TOOLS,
|
||||
GIT_TOOLS,
|
||||
RUN_TOOLS,
|
||||
CONFIRMATION_TOOLS,
|
||||
requiresConfirmation,
|
||||
getToolDef,
|
||||
getToolsByCategory,
|
||||
GET_LINES_TOOL,
|
||||
GET_FUNCTION_TOOL,
|
||||
GET_CLASS_TOOL,
|
||||
GET_STRUCTURE_TOOL,
|
||||
EDIT_LINES_TOOL,
|
||||
CREATE_FILE_TOOL,
|
||||
DELETE_FILE_TOOL,
|
||||
FIND_REFERENCES_TOOL,
|
||||
FIND_DEFINITION_TOOL,
|
||||
GET_DEPENDENCIES_TOOL,
|
||||
GET_DEPENDENTS_TOOL,
|
||||
GET_COMPLEXITY_TOOL,
|
||||
GET_TODOS_TOOL,
|
||||
GIT_STATUS_TOOL,
|
||||
GIT_DIFF_TOOL,
|
||||
GIT_COMMIT_TOOL,
|
||||
RUN_COMMAND_TOOL,
|
||||
RUN_TESTS_TOOL,
|
||||
} from "./toolDefs.js"
|
||||
export {
|
||||
parseToolCalls,
|
||||
formatToolCallsAsXml,
|
||||
extractThinking,
|
||||
hasToolCalls,
|
||||
validateToolCallParams,
|
||||
type ParsedResponse,
|
||||
} from "./ResponseParser.js"
|
||||
365
packages/ipuaro/src/infrastructure/llm/prompts.ts
Normal file
365
packages/ipuaro/src/infrastructure/llm/prompts.ts
Normal file
@@ -0,0 +1,365 @@
|
||||
import type { FileAST } from "../../domain/value-objects/FileAST.js"
|
||||
import type { FileMeta } from "../../domain/value-objects/FileMeta.js"
|
||||
|
||||
/**
|
||||
* Project structure for context building.
|
||||
*/
|
||||
export interface ProjectStructure {
|
||||
name: string
|
||||
rootPath: string
|
||||
files: string[]
|
||||
directories: string[]
|
||||
}
|
||||
|
||||
/**
|
||||
* System prompt for the ipuaro AI agent.
|
||||
*/
|
||||
export const SYSTEM_PROMPT = `You are ipuaro, a local AI code assistant specialized in helping developers understand and modify their codebase. You operate within a single project directory and have access to powerful tools for reading, searching, analyzing, and editing code.
|
||||
|
||||
## Core Principles
|
||||
|
||||
1. **Lazy Loading**: You don't have the full code in context. Use tools to fetch exactly what you need.
|
||||
2. **Precision**: Always verify file paths and line numbers before making changes.
|
||||
3. **Safety**: Confirm destructive operations. Never execute dangerous commands.
|
||||
4. **Efficiency**: Minimize context usage. Request only necessary code sections.
|
||||
|
||||
## Tool Calling Format
|
||||
|
||||
When you need to use a tool, format your call as XML:
|
||||
|
||||
<tool_call name="tool_name">
|
||||
<param_name>value</param_name>
|
||||
<another_param>value</another_param>
|
||||
</tool_call>
|
||||
|
||||
You can call multiple tools in one response. Always wait for tool results before making conclusions.
|
||||
|
||||
**Examples:**
|
||||
|
||||
<tool_call name="get_lines">
|
||||
<path>src/index.ts</path>
|
||||
<start>1</start>
|
||||
<end>50</end>
|
||||
</tool_call>
|
||||
|
||||
<tool_call name="edit_lines">
|
||||
<path>src/utils.ts</path>
|
||||
<start>10</start>
|
||||
<end>15</end>
|
||||
<content>const newCode = "hello";</content>
|
||||
</tool_call>
|
||||
|
||||
<tool_call name="find_references">
|
||||
<symbol>getUserById</symbol>
|
||||
</tool_call>
|
||||
|
||||
## Available Tools
|
||||
|
||||
### Reading Tools
|
||||
- \`get_lines(path, start?, end?)\`: Get specific lines from a file
|
||||
- \`get_function(path, name)\`: Get a function by name
|
||||
- \`get_class(path, name)\`: Get a class by name
|
||||
- \`get_structure(path?, depth?)\`: Get project directory structure
|
||||
|
||||
### Editing Tools (require confirmation)
|
||||
- \`edit_lines(path, start, end, content)\`: Replace specific lines in a file
|
||||
- \`create_file(path, content)\`: Create a new file
|
||||
- \`delete_file(path)\`: Delete a file
|
||||
|
||||
### Search Tools
|
||||
- \`find_references(symbol, path?)\`: Find all usages of a symbol
|
||||
- \`find_definition(symbol)\`: Find where a symbol is defined
|
||||
|
||||
### Analysis Tools
|
||||
- \`get_dependencies(path)\`: Get files this file imports
|
||||
- \`get_dependents(path)\`: Get files that import this file
|
||||
- \`get_complexity(path?, limit?)\`: Get complexity metrics
|
||||
- \`get_todos(path?, type?)\`: Find TODO/FIXME comments
|
||||
|
||||
### Git Tools
|
||||
- \`git_status()\`: Get repository status
|
||||
- \`git_diff(path?, staged?)\`: Get uncommitted changes
|
||||
- \`git_commit(message, files?)\`: Create a commit (requires confirmation)
|
||||
|
||||
### Run Tools
|
||||
- \`run_command(command, timeout?)\`: Execute a shell command (security checked)
|
||||
- \`run_tests(path?, filter?, watch?)\`: Run the test suite
|
||||
|
||||
## Response Guidelines
|
||||
|
||||
1. **Be concise**: Don't repeat information already in context.
|
||||
2. **Show your work**: Explain what tools you're using and why.
|
||||
3. **Verify before editing**: Always read the target code before modifying it.
|
||||
4. **Handle errors gracefully**: If a tool fails, explain what went wrong and suggest alternatives.
|
||||
|
||||
## Code Editing Rules
|
||||
|
||||
1. Always use \`get_lines\` or \`get_function\` before \`edit_lines\`.
|
||||
2. Provide exact line numbers for edits.
|
||||
3. For large changes, break into multiple small edits.
|
||||
4. After editing, suggest running tests if available.
|
||||
|
||||
## Safety Rules
|
||||
|
||||
1. Never execute commands that could harm the system.
|
||||
2. Never expose sensitive data (API keys, passwords).
|
||||
3. Always confirm file deletions and destructive git operations.
|
||||
4. Stay within the project directory.
|
||||
|
||||
When you need to perform an action, use the appropriate tool. Think step by step about what information you need and which tools will provide it most efficiently.`
|
||||
|
||||
/**
|
||||
* Build initial context from project structure and AST metadata.
|
||||
* Returns a compact representation without actual code.
|
||||
*/
|
||||
export function buildInitialContext(
|
||||
structure: ProjectStructure,
|
||||
asts: Map<string, FileAST>,
|
||||
metas?: Map<string, FileMeta>,
|
||||
): string {
|
||||
const sections: string[] = []
|
||||
|
||||
sections.push(formatProjectHeader(structure))
|
||||
sections.push(formatDirectoryTree(structure))
|
||||
sections.push(formatFileOverview(asts, metas))
|
||||
|
||||
return sections.join("\n\n")
|
||||
}
|
||||
|
||||
/**
|
||||
* Format project header section.
|
||||
*/
|
||||
function formatProjectHeader(structure: ProjectStructure): string {
|
||||
const fileCount = String(structure.files.length)
|
||||
const dirCount = String(structure.directories.length)
|
||||
return `# Project: ${structure.name}
|
||||
Root: ${structure.rootPath}
|
||||
Files: ${fileCount} | Directories: ${dirCount}`
|
||||
}
|
||||
|
||||
/**
|
||||
* Format directory tree.
|
||||
*/
|
||||
function formatDirectoryTree(structure: ProjectStructure): string {
|
||||
const lines: string[] = ["## Structure", ""]
|
||||
|
||||
const sortedDirs = [...structure.directories].sort()
|
||||
for (const dir of sortedDirs) {
|
||||
const depth = dir.split("/").length - 1
|
||||
const indent = " ".repeat(depth)
|
||||
const name = dir.split("/").pop() ?? dir
|
||||
lines.push(`${indent}${name}/`)
|
||||
}
|
||||
|
||||
return lines.join("\n")
|
||||
}
|
||||
|
||||
/**
|
||||
* Format file overview with AST summaries.
|
||||
*/
|
||||
function formatFileOverview(asts: Map<string, FileAST>, metas?: Map<string, FileMeta>): string {
|
||||
const lines: string[] = ["## Files", ""]
|
||||
|
||||
const sortedPaths = [...asts.keys()].sort()
|
||||
for (const path of sortedPaths) {
|
||||
const ast = asts.get(path)
|
||||
if (!ast) {
|
||||
continue
|
||||
}
|
||||
|
||||
const meta = metas?.get(path)
|
||||
lines.push(formatFileSummary(path, ast, meta))
|
||||
}
|
||||
|
||||
return lines.join("\n")
|
||||
}
|
||||
|
||||
/**
|
||||
* Format a single file's AST summary.
|
||||
*/
|
||||
function formatFileSummary(path: string, ast: FileAST, meta?: FileMeta): string {
|
||||
const parts: string[] = []
|
||||
|
||||
if (ast.functions.length > 0) {
|
||||
const names = ast.functions.map((f) => f.name).join(", ")
|
||||
parts.push(`fn: ${names}`)
|
||||
}
|
||||
|
||||
if (ast.classes.length > 0) {
|
||||
const names = ast.classes.map((c) => c.name).join(", ")
|
||||
parts.push(`class: ${names}`)
|
||||
}
|
||||
|
||||
if (ast.interfaces.length > 0) {
|
||||
const names = ast.interfaces.map((i) => i.name).join(", ")
|
||||
parts.push(`interface: ${names}`)
|
||||
}
|
||||
|
||||
if (ast.typeAliases.length > 0) {
|
||||
const names = ast.typeAliases.map((t) => t.name).join(", ")
|
||||
parts.push(`type: ${names}`)
|
||||
}
|
||||
|
||||
const summary = parts.length > 0 ? ` [${parts.join(" | ")}]` : ""
|
||||
const flags = formatFileFlags(meta)
|
||||
|
||||
return `- ${path}${summary}${flags}`
|
||||
}
|
||||
|
||||
/**
|
||||
* Format file metadata flags.
|
||||
*/
|
||||
function formatFileFlags(meta?: FileMeta): string {
|
||||
if (!meta) {
|
||||
return ""
|
||||
}
|
||||
|
||||
const flags: string[] = []
|
||||
|
||||
if (meta.isHub) {
|
||||
flags.push("hub")
|
||||
}
|
||||
|
||||
if (meta.isEntryPoint) {
|
||||
flags.push("entry")
|
||||
}
|
||||
|
||||
if (meta.complexity.score > 70) {
|
||||
flags.push("complex")
|
||||
}
|
||||
|
||||
return flags.length > 0 ? ` (${flags.join(", ")})` : ""
|
||||
}
|
||||
|
||||
/**
|
||||
* Format line range for display.
|
||||
*/
|
||||
function formatLineRange(start: number, end: number): string {
|
||||
return `[${String(start)}-${String(end)}]`
|
||||
}
|
||||
|
||||
/**
|
||||
* Format imports section.
|
||||
*/
|
||||
function formatImportsSection(ast: FileAST): string[] {
|
||||
if (ast.imports.length === 0) {
|
||||
return []
|
||||
}
|
||||
const lines = ["### Imports"]
|
||||
for (const imp of ast.imports) {
|
||||
lines.push(`- ${imp.name} from "${imp.from}" (${imp.type})`)
|
||||
}
|
||||
lines.push("")
|
||||
return lines
|
||||
}
|
||||
|
||||
/**
|
||||
* Format exports section.
|
||||
*/
|
||||
function formatExportsSection(ast: FileAST): string[] {
|
||||
if (ast.exports.length === 0) {
|
||||
return []
|
||||
}
|
||||
const lines = ["### Exports"]
|
||||
for (const exp of ast.exports) {
|
||||
const defaultMark = exp.isDefault ? " (default)" : ""
|
||||
lines.push(`- ${exp.kind} ${exp.name}${defaultMark}`)
|
||||
}
|
||||
lines.push("")
|
||||
return lines
|
||||
}
|
||||
|
||||
/**
|
||||
* Format functions section.
|
||||
*/
|
||||
function formatFunctionsSection(ast: FileAST): string[] {
|
||||
if (ast.functions.length === 0) {
|
||||
return []
|
||||
}
|
||||
const lines = ["### Functions"]
|
||||
for (const fn of ast.functions) {
|
||||
const params = fn.params.map((p) => p.name).join(", ")
|
||||
const asyncMark = fn.isAsync ? "async " : ""
|
||||
const range = formatLineRange(fn.lineStart, fn.lineEnd)
|
||||
lines.push(`- ${asyncMark}${fn.name}(${params}) ${range}`)
|
||||
}
|
||||
lines.push("")
|
||||
return lines
|
||||
}
|
||||
|
||||
/**
|
||||
* Format classes section.
|
||||
*/
|
||||
function formatClassesSection(ast: FileAST): string[] {
|
||||
if (ast.classes.length === 0) {
|
||||
return []
|
||||
}
|
||||
const lines = ["### Classes"]
|
||||
for (const cls of ast.classes) {
|
||||
const ext = cls.extends ? ` extends ${cls.extends}` : ""
|
||||
const impl = cls.implements.length > 0 ? ` implements ${cls.implements.join(", ")}` : ""
|
||||
const range = formatLineRange(cls.lineStart, cls.lineEnd)
|
||||
lines.push(`- ${cls.name}${ext}${impl} ${range}`)
|
||||
|
||||
for (const method of cls.methods) {
|
||||
const vis = method.visibility === "public" ? "" : `${method.visibility} `
|
||||
const methodRange = formatLineRange(method.lineStart, method.lineEnd)
|
||||
lines.push(` - ${vis}${method.name}() ${methodRange}`)
|
||||
}
|
||||
}
|
||||
lines.push("")
|
||||
return lines
|
||||
}
|
||||
|
||||
/**
|
||||
* Format metadata section.
|
||||
*/
|
||||
function formatMetadataSection(meta: FileMeta): string[] {
|
||||
const loc = String(meta.complexity.loc)
|
||||
const score = String(meta.complexity.score)
|
||||
const deps = String(meta.dependencies.length)
|
||||
const dependents = String(meta.dependents.length)
|
||||
return [
|
||||
"### Metadata",
|
||||
`- LOC: ${loc}`,
|
||||
`- Complexity: ${score}/100`,
|
||||
`- Dependencies: ${deps}`,
|
||||
`- Dependents: ${dependents}`,
|
||||
]
|
||||
}
|
||||
|
||||
/**
|
||||
* Build context for a specific file request.
|
||||
*/
|
||||
export function buildFileContext(path: string, ast: FileAST, meta?: FileMeta): string {
|
||||
const lines: string[] = [`## ${path}`, ""]
|
||||
|
||||
lines.push(...formatImportsSection(ast))
|
||||
lines.push(...formatExportsSection(ast))
|
||||
lines.push(...formatFunctionsSection(ast))
|
||||
lines.push(...formatClassesSection(ast))
|
||||
|
||||
if (meta) {
|
||||
lines.push(...formatMetadataSection(meta))
|
||||
}
|
||||
|
||||
return lines.join("\n")
|
||||
}
|
||||
|
||||
/**
|
||||
* Truncate context to fit within token budget.
|
||||
*/
|
||||
export function truncateContext(context: string, maxTokens: number): string {
|
||||
const charsPerToken = 4
|
||||
const maxChars = maxTokens * charsPerToken
|
||||
|
||||
if (context.length <= maxChars) {
|
||||
return context
|
||||
}
|
||||
|
||||
const truncated = context.slice(0, maxChars - 100)
|
||||
const lastNewline = truncated.lastIndexOf("\n")
|
||||
const remaining = String(context.length - lastNewline)
|
||||
|
||||
return `${truncated.slice(0, lastNewline)}\n\n... (truncated, ${remaining} chars remaining)`
|
||||
}
|
||||
511
packages/ipuaro/src/infrastructure/llm/toolDefs.ts
Normal file
511
packages/ipuaro/src/infrastructure/llm/toolDefs.ts
Normal file
@@ -0,0 +1,511 @@
|
||||
import type { ToolDef } from "../../shared/types/tool-definitions.js"
|
||||
|
||||
/**
|
||||
* Tool definitions for ipuaro LLM.
|
||||
* 18 tools across 6 categories: read, edit, search, analysis, git, run.
|
||||
*/
|
||||
|
||||
/*
|
||||
* =============================================================================
|
||||
* Read Tools (4)
|
||||
* =============================================================================
|
||||
*/
|
||||
|
||||
export const GET_LINES_TOOL: ToolDef = {
|
||||
name: "get_lines",
|
||||
description:
|
||||
"Get specific lines from a file. Returns the content with line numbers. " +
|
||||
"If no range is specified, returns the entire file.",
|
||||
parameters: [
|
||||
{
|
||||
name: "path",
|
||||
type: "string",
|
||||
description: "File path relative to project root",
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
name: "start",
|
||||
type: "number",
|
||||
description: "Start line number (1-based, inclusive)",
|
||||
required: false,
|
||||
},
|
||||
{
|
||||
name: "end",
|
||||
type: "number",
|
||||
description: "End line number (1-based, inclusive)",
|
||||
required: false,
|
||||
},
|
||||
],
|
||||
}
|
||||
|
||||
export const GET_FUNCTION_TOOL: ToolDef = {
|
||||
name: "get_function",
|
||||
description:
|
||||
"Get a function's source code by name. Uses AST to find exact line range. " +
|
||||
"Returns the function code with line numbers.",
|
||||
parameters: [
|
||||
{
|
||||
name: "path",
|
||||
type: "string",
|
||||
description: "File path relative to project root",
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
name: "name",
|
||||
type: "string",
|
||||
description: "Function name to retrieve",
|
||||
required: true,
|
||||
},
|
||||
],
|
||||
}
|
||||
|
||||
export const GET_CLASS_TOOL: ToolDef = {
|
||||
name: "get_class",
|
||||
description:
|
||||
"Get a class's source code by name. Uses AST to find exact line range. " +
|
||||
"Returns the class code with line numbers.",
|
||||
parameters: [
|
||||
{
|
||||
name: "path",
|
||||
type: "string",
|
||||
description: "File path relative to project root",
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
name: "name",
|
||||
type: "string",
|
||||
description: "Class name to retrieve",
|
||||
required: true,
|
||||
},
|
||||
],
|
||||
}
|
||||
|
||||
export const GET_STRUCTURE_TOOL: ToolDef = {
|
||||
name: "get_structure",
|
||||
description:
|
||||
"Get project directory structure as a tree. " +
|
||||
"If path is specified, shows structure of that subdirectory only.",
|
||||
parameters: [
|
||||
{
|
||||
name: "path",
|
||||
type: "string",
|
||||
description: "Subdirectory path relative to project root (optional, defaults to root)",
|
||||
required: false,
|
||||
},
|
||||
{
|
||||
name: "depth",
|
||||
type: "number",
|
||||
description: "Maximum depth to traverse (default: unlimited)",
|
||||
required: false,
|
||||
},
|
||||
],
|
||||
}
|
||||
|
||||
/*
|
||||
* =============================================================================
|
||||
* Edit Tools (3) - All require confirmation
|
||||
* =============================================================================
|
||||
*/
|
||||
|
||||
export const EDIT_LINES_TOOL: ToolDef = {
|
||||
name: "edit_lines",
|
||||
description:
|
||||
"Replace lines in a file with new content. Requires reading the file first. " +
|
||||
"Will show diff and ask for confirmation before applying.",
|
||||
parameters: [
|
||||
{
|
||||
name: "path",
|
||||
type: "string",
|
||||
description: "File path relative to project root",
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
name: "start",
|
||||
type: "number",
|
||||
description: "Start line number (1-based, inclusive) to replace",
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
name: "end",
|
||||
type: "number",
|
||||
description: "End line number (1-based, inclusive) to replace",
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
name: "content",
|
||||
type: "string",
|
||||
description: "New content to insert (can be multiple lines)",
|
||||
required: true,
|
||||
},
|
||||
],
|
||||
}
|
||||
|
||||
export const CREATE_FILE_TOOL: ToolDef = {
|
||||
name: "create_file",
|
||||
description:
|
||||
"Create a new file with specified content. " +
|
||||
"Will fail if file already exists. Will ask for confirmation.",
|
||||
parameters: [
|
||||
{
|
||||
name: "path",
|
||||
type: "string",
|
||||
description: "File path relative to project root",
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
name: "content",
|
||||
type: "string",
|
||||
description: "File content",
|
||||
required: true,
|
||||
},
|
||||
],
|
||||
}
|
||||
|
||||
export const DELETE_FILE_TOOL: ToolDef = {
|
||||
name: "delete_file",
|
||||
description:
|
||||
"Delete a file from the project. " +
|
||||
"Will ask for confirmation. Previous content is saved to undo stack.",
|
||||
parameters: [
|
||||
{
|
||||
name: "path",
|
||||
type: "string",
|
||||
description: "File path relative to project root",
|
||||
required: true,
|
||||
},
|
||||
],
|
||||
}
|
||||
|
||||
/*
|
||||
* =============================================================================
|
||||
* Search Tools (2)
|
||||
* =============================================================================
|
||||
*/
|
||||
|
||||
export const FIND_REFERENCES_TOOL: ToolDef = {
|
||||
name: "find_references",
|
||||
description:
|
||||
"Find all usages of a symbol across the codebase. " +
|
||||
"Returns list of file paths, line numbers, and context.",
|
||||
parameters: [
|
||||
{
|
||||
name: "symbol",
|
||||
type: "string",
|
||||
description: "Symbol name to search for (function, class, variable, etc.)",
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
name: "path",
|
||||
type: "string",
|
||||
description: "Limit search to specific file or directory",
|
||||
required: false,
|
||||
},
|
||||
],
|
||||
}
|
||||
|
||||
export const FIND_DEFINITION_TOOL: ToolDef = {
|
||||
name: "find_definition",
|
||||
description:
|
||||
"Find where a symbol is defined. " + "Returns file path, line number, and symbol type.",
|
||||
parameters: [
|
||||
{
|
||||
name: "symbol",
|
||||
type: "string",
|
||||
description: "Symbol name to find definition for",
|
||||
required: true,
|
||||
},
|
||||
],
|
||||
}
|
||||
|
||||
/*
|
||||
* =============================================================================
|
||||
* Analysis Tools (4)
|
||||
* =============================================================================
|
||||
*/
|
||||
|
||||
export const GET_DEPENDENCIES_TOOL: ToolDef = {
|
||||
name: "get_dependencies",
|
||||
description:
|
||||
"Get files that this file imports (internal dependencies). " +
|
||||
"Returns list of imported file paths.",
|
||||
parameters: [
|
||||
{
|
||||
name: "path",
|
||||
type: "string",
|
||||
description: "File path relative to project root",
|
||||
required: true,
|
||||
},
|
||||
],
|
||||
}
|
||||
|
||||
export const GET_DEPENDENTS_TOOL: ToolDef = {
|
||||
name: "get_dependents",
|
||||
description:
|
||||
"Get files that import this file (reverse dependencies). " +
|
||||
"Returns list of file paths that depend on this file.",
|
||||
parameters: [
|
||||
{
|
||||
name: "path",
|
||||
type: "string",
|
||||
description: "File path relative to project root",
|
||||
required: true,
|
||||
},
|
||||
],
|
||||
}
|
||||
|
||||
export const GET_COMPLEXITY_TOOL: ToolDef = {
|
||||
name: "get_complexity",
|
||||
description:
|
||||
"Get complexity metrics for a file or the entire project. " +
|
||||
"Returns LOC, nesting depth, cyclomatic complexity, and overall score.",
|
||||
parameters: [
|
||||
{
|
||||
name: "path",
|
||||
type: "string",
|
||||
description: "File path (optional, defaults to all files sorted by complexity)",
|
||||
required: false,
|
||||
},
|
||||
{
|
||||
name: "limit",
|
||||
type: "number",
|
||||
description: "Max files to return when showing all (default: 10)",
|
||||
required: false,
|
||||
},
|
||||
],
|
||||
}
|
||||
|
||||
export const GET_TODOS_TOOL: ToolDef = {
|
||||
name: "get_todos",
|
||||
description:
|
||||
"Find TODO, FIXME, HACK, and XXX comments in the codebase. " +
|
||||
"Returns list with file paths, line numbers, and comment text.",
|
||||
parameters: [
|
||||
{
|
||||
name: "path",
|
||||
type: "string",
|
||||
description: "Limit search to specific file or directory",
|
||||
required: false,
|
||||
},
|
||||
{
|
||||
name: "type",
|
||||
type: "string",
|
||||
description: "Filter by comment type",
|
||||
required: false,
|
||||
enum: ["TODO", "FIXME", "HACK", "XXX"],
|
||||
},
|
||||
],
|
||||
}
|
||||
|
||||
/*
|
||||
* =============================================================================
|
||||
* Git Tools (3)
|
||||
* =============================================================================
|
||||
*/
|
||||
|
||||
export const GIT_STATUS_TOOL: ToolDef = {
|
||||
name: "git_status",
|
||||
description:
|
||||
"Get current git repository status. " +
|
||||
"Returns branch name, staged files, modified files, and untracked files.",
|
||||
parameters: [],
|
||||
}
|
||||
|
||||
export const GIT_DIFF_TOOL: ToolDef = {
|
||||
name: "git_diff",
|
||||
description:
|
||||
"Get uncommitted changes (diff). " + "Shows what has changed but not yet committed.",
|
||||
parameters: [
|
||||
{
|
||||
name: "path",
|
||||
type: "string",
|
||||
description: "Limit diff to specific file or directory",
|
||||
required: false,
|
||||
},
|
||||
{
|
||||
name: "staged",
|
||||
type: "boolean",
|
||||
description: "Show only staged changes (default: false, shows all)",
|
||||
required: false,
|
||||
},
|
||||
],
|
||||
}
|
||||
|
||||
export const GIT_COMMIT_TOOL: ToolDef = {
|
||||
name: "git_commit",
|
||||
description:
|
||||
"Create a git commit with the specified message. " +
|
||||
"Will ask for confirmation. Optionally stage specific files first.",
|
||||
parameters: [
|
||||
{
|
||||
name: "message",
|
||||
type: "string",
|
||||
description: "Commit message",
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
name: "files",
|
||||
type: "array",
|
||||
description: "Files to stage before commit (optional, defaults to all staged)",
|
||||
required: false,
|
||||
},
|
||||
],
|
||||
}
|
||||
|
||||
/*
|
||||
* =============================================================================
|
||||
* Run Tools (2)
|
||||
* =============================================================================
|
||||
*/
|
||||
|
||||
export const RUN_COMMAND_TOOL: ToolDef = {
|
||||
name: "run_command",
|
||||
description:
|
||||
"Execute a shell command in the project directory. " +
|
||||
"Commands are checked against blacklist/whitelist for security. " +
|
||||
"Unknown commands require user confirmation.",
|
||||
parameters: [
|
||||
{
|
||||
name: "command",
|
||||
type: "string",
|
||||
description: "Shell command to execute",
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
name: "timeout",
|
||||
type: "number",
|
||||
description: "Timeout in milliseconds (default: 30000)",
|
||||
required: false,
|
||||
},
|
||||
],
|
||||
}
|
||||
|
||||
export const RUN_TESTS_TOOL: ToolDef = {
|
||||
name: "run_tests",
|
||||
description:
|
||||
"Run the project's test suite. Auto-detects test runner (vitest, jest, npm test). " +
|
||||
"Returns test results summary.",
|
||||
parameters: [
|
||||
{
|
||||
name: "path",
|
||||
type: "string",
|
||||
description: "Run tests for specific file or directory",
|
||||
required: false,
|
||||
},
|
||||
{
|
||||
name: "filter",
|
||||
type: "string",
|
||||
description: "Filter tests by name pattern",
|
||||
required: false,
|
||||
},
|
||||
{
|
||||
name: "watch",
|
||||
type: "boolean",
|
||||
description: "Run in watch mode (default: false)",
|
||||
required: false,
|
||||
},
|
||||
],
|
||||
}
|
||||
|
||||
/*
|
||||
* =============================================================================
|
||||
* Tool Collection
|
||||
* =============================================================================
|
||||
*/
|
||||
|
||||
/**
|
||||
* All read tools (no confirmation required).
|
||||
*/
|
||||
export const READ_TOOLS: ToolDef[] = [
|
||||
GET_LINES_TOOL,
|
||||
GET_FUNCTION_TOOL,
|
||||
GET_CLASS_TOOL,
|
||||
GET_STRUCTURE_TOOL,
|
||||
]
|
||||
|
||||
/**
|
||||
* All edit tools (require confirmation).
|
||||
*/
|
||||
export const EDIT_TOOLS: ToolDef[] = [EDIT_LINES_TOOL, CREATE_FILE_TOOL, DELETE_FILE_TOOL]
|
||||
|
||||
/**
|
||||
* All search tools (no confirmation required).
|
||||
*/
|
||||
export const SEARCH_TOOLS: ToolDef[] = [FIND_REFERENCES_TOOL, FIND_DEFINITION_TOOL]
|
||||
|
||||
/**
|
||||
* All analysis tools (no confirmation required).
|
||||
*/
|
||||
export const ANALYSIS_TOOLS: ToolDef[] = [
|
||||
GET_DEPENDENCIES_TOOL,
|
||||
GET_DEPENDENTS_TOOL,
|
||||
GET_COMPLEXITY_TOOL,
|
||||
GET_TODOS_TOOL,
|
||||
]
|
||||
|
||||
/**
|
||||
* All git tools (git_commit requires confirmation).
|
||||
*/
|
||||
export const GIT_TOOLS: ToolDef[] = [GIT_STATUS_TOOL, GIT_DIFF_TOOL, GIT_COMMIT_TOOL]
|
||||
|
||||
/**
|
||||
* All run tools (run_command may require confirmation).
|
||||
*/
|
||||
export const RUN_TOOLS: ToolDef[] = [RUN_COMMAND_TOOL, RUN_TESTS_TOOL]
|
||||
|
||||
/**
|
||||
* All 18 tool definitions.
|
||||
*/
|
||||
export const ALL_TOOLS: ToolDef[] = [
|
||||
...READ_TOOLS,
|
||||
...EDIT_TOOLS,
|
||||
...SEARCH_TOOLS,
|
||||
...ANALYSIS_TOOLS,
|
||||
...GIT_TOOLS,
|
||||
...RUN_TOOLS,
|
||||
]
|
||||
|
||||
/**
|
||||
* Tools that require user confirmation before execution.
|
||||
*/
|
||||
export const CONFIRMATION_TOOLS = new Set([
|
||||
"edit_lines",
|
||||
"create_file",
|
||||
"delete_file",
|
||||
"git_commit",
|
||||
])
|
||||
|
||||
/**
|
||||
* Check if a tool requires confirmation.
|
||||
*/
|
||||
export function requiresConfirmation(toolName: string): boolean {
|
||||
return CONFIRMATION_TOOLS.has(toolName)
|
||||
}
|
||||
|
||||
/**
|
||||
* Get tool definition by name.
|
||||
*/
|
||||
export function getToolDef(name: string): ToolDef | undefined {
|
||||
return ALL_TOOLS.find((t) => t.name === name)
|
||||
}
|
||||
|
||||
/**
|
||||
* Get tool definitions by category.
|
||||
*/
|
||||
export function getToolsByCategory(category: string): ToolDef[] {
|
||||
switch (category) {
|
||||
case "read":
|
||||
return READ_TOOLS
|
||||
case "edit":
|
||||
return EDIT_TOOLS
|
||||
case "search":
|
||||
return SEARCH_TOOLS
|
||||
case "analysis":
|
||||
return ANALYSIS_TOOLS
|
||||
case "git":
|
||||
return GIT_TOOLS
|
||||
case "run":
|
||||
return RUN_TOOLS
|
||||
default:
|
||||
return []
|
||||
}
|
||||
}
|
||||
293
packages/ipuaro/src/infrastructure/security/PathValidator.ts
Normal file
293
packages/ipuaro/src/infrastructure/security/PathValidator.ts
Normal file
@@ -0,0 +1,293 @@
|
||||
import * as path from "node:path"
|
||||
import { promises as fs } from "node:fs"
|
||||
|
||||
/**
|
||||
* Path validation result classification.
|
||||
*/
|
||||
export type PathValidationStatus = "valid" | "invalid" | "outside_project"
|
||||
|
||||
/**
|
||||
* Result of path validation.
|
||||
*/
|
||||
export interface PathValidationResult {
|
||||
/** Validation status */
|
||||
status: PathValidationStatus
|
||||
/** Reason for the status */
|
||||
reason: string
|
||||
/** Normalized absolute path (only if valid) */
|
||||
absolutePath?: string
|
||||
/** Normalized relative path (only if valid) */
|
||||
relativePath?: string
|
||||
}
|
||||
|
||||
/**
|
||||
* Options for path validation.
|
||||
*/
|
||||
export interface PathValidatorOptions {
|
||||
/** Allow paths that don't exist yet (for create operations) */
|
||||
allowNonExistent?: boolean
|
||||
/** Check if path is a directory */
|
||||
requireDirectory?: boolean
|
||||
/** Check if path is a file */
|
||||
requireFile?: boolean
|
||||
/** Follow symlinks when checking existence */
|
||||
followSymlinks?: boolean
|
||||
}
|
||||
|
||||
/**
|
||||
* Path validator for ensuring file operations stay within project boundaries.
|
||||
* Prevents path traversal attacks and unauthorized file access.
|
||||
*/
|
||||
export class PathValidator {
|
||||
private readonly projectRoot: string
|
||||
|
||||
constructor(projectRoot: string) {
|
||||
this.projectRoot = path.resolve(projectRoot)
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate a path and return detailed result.
|
||||
* @param inputPath - Path to validate (relative or absolute)
|
||||
* @param options - Validation options
|
||||
*/
|
||||
async validate(
|
||||
inputPath: string,
|
||||
options: PathValidatorOptions = {},
|
||||
): Promise<PathValidationResult> {
|
||||
if (!inputPath || inputPath.trim() === "") {
|
||||
return {
|
||||
status: "invalid",
|
||||
reason: "Path is empty",
|
||||
}
|
||||
}
|
||||
|
||||
const normalizedInput = inputPath.trim()
|
||||
|
||||
if (this.containsTraversalPatterns(normalizedInput)) {
|
||||
return {
|
||||
status: "invalid",
|
||||
reason: "Path contains traversal patterns",
|
||||
}
|
||||
}
|
||||
|
||||
const absolutePath = path.resolve(this.projectRoot, normalizedInput)
|
||||
|
||||
if (!this.isWithinProject(absolutePath)) {
|
||||
return {
|
||||
status: "outside_project",
|
||||
reason: "Path is outside project root",
|
||||
}
|
||||
}
|
||||
|
||||
const relativePath = path.relative(this.projectRoot, absolutePath)
|
||||
|
||||
if (!options.allowNonExistent) {
|
||||
const existsResult = await this.checkExists(absolutePath, options)
|
||||
if (existsResult) {
|
||||
return existsResult
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
status: "valid",
|
||||
reason: "Path is valid",
|
||||
absolutePath,
|
||||
relativePath,
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Synchronous validation for simple checks.
|
||||
* Does not check file existence or type.
|
||||
* @param inputPath - Path to validate (relative or absolute)
|
||||
*/
|
||||
validateSync(inputPath: string): PathValidationResult {
|
||||
if (!inputPath || inputPath.trim() === "") {
|
||||
return {
|
||||
status: "invalid",
|
||||
reason: "Path is empty",
|
||||
}
|
||||
}
|
||||
|
||||
const normalizedInput = inputPath.trim()
|
||||
|
||||
if (this.containsTraversalPatterns(normalizedInput)) {
|
||||
return {
|
||||
status: "invalid",
|
||||
reason: "Path contains traversal patterns",
|
||||
}
|
||||
}
|
||||
|
||||
const absolutePath = path.resolve(this.projectRoot, normalizedInput)
|
||||
|
||||
if (!this.isWithinProject(absolutePath)) {
|
||||
return {
|
||||
status: "outside_project",
|
||||
reason: "Path is outside project root",
|
||||
}
|
||||
}
|
||||
|
||||
const relativePath = path.relative(this.projectRoot, absolutePath)
|
||||
|
||||
return {
|
||||
status: "valid",
|
||||
reason: "Path is valid",
|
||||
absolutePath,
|
||||
relativePath,
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Quick check if path is within project.
|
||||
* @param inputPath - Path to check (relative or absolute)
|
||||
*/
|
||||
isWithin(inputPath: string): boolean {
|
||||
if (!inputPath || inputPath.trim() === "") {
|
||||
return false
|
||||
}
|
||||
|
||||
const normalizedInput = inputPath.trim()
|
||||
|
||||
if (this.containsTraversalPatterns(normalizedInput)) {
|
||||
return false
|
||||
}
|
||||
|
||||
const absolutePath = path.resolve(this.projectRoot, normalizedInput)
|
||||
return this.isWithinProject(absolutePath)
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolve a path relative to project root.
|
||||
* Returns null if path would be outside project.
|
||||
* @param inputPath - Path to resolve
|
||||
*/
|
||||
resolve(inputPath: string): string | null {
|
||||
const result = this.validateSync(inputPath)
|
||||
return result.status === "valid" ? (result.absolutePath ?? null) : null
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolve a path or throw an error if invalid.
|
||||
* @param inputPath - Path to resolve
|
||||
* @returns Tuple of [absolutePath, relativePath]
|
||||
* @throws Error if path is invalid
|
||||
*/
|
||||
resolveOrThrow(inputPath: string): [absolutePath: string, relativePath: string] {
|
||||
const result = this.validateSync(inputPath)
|
||||
if (result.status !== "valid" || result.absolutePath === undefined) {
|
||||
throw new Error(result.reason)
|
||||
}
|
||||
return [result.absolutePath, result.relativePath ?? ""]
|
||||
}
|
||||
|
||||
/**
|
||||
* Get relative path from project root.
|
||||
* Returns null if path would be outside project.
|
||||
* @param inputPath - Path to make relative
|
||||
*/
|
||||
relativize(inputPath: string): string | null {
|
||||
const result = this.validateSync(inputPath)
|
||||
return result.status === "valid" ? (result.relativePath ?? null) : null
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the project root path.
|
||||
*/
|
||||
getProjectRoot(): string {
|
||||
return this.projectRoot
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if path contains directory traversal patterns.
|
||||
*/
|
||||
private containsTraversalPatterns(inputPath: string): boolean {
|
||||
const normalized = inputPath.replace(/\\/g, "/")
|
||||
|
||||
if (normalized.includes("..")) {
|
||||
return true
|
||||
}
|
||||
|
||||
if (normalized.startsWith("~")) {
|
||||
return true
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if absolute path is within project root.
|
||||
*/
|
||||
private isWithinProject(absolutePath: string): boolean {
|
||||
const normalizedProject = this.projectRoot.replace(/\\/g, "/")
|
||||
const normalizedPath = absolutePath.replace(/\\/g, "/")
|
||||
|
||||
if (normalizedPath === normalizedProject) {
|
||||
return true
|
||||
}
|
||||
|
||||
const projectWithSep = normalizedProject.endsWith("/")
|
||||
? normalizedProject
|
||||
: `${normalizedProject}/`
|
||||
|
||||
return normalizedPath.startsWith(projectWithSep)
|
||||
}
|
||||
|
||||
/**
|
||||
* Check file existence and type.
|
||||
*/
|
||||
private async checkExists(
|
||||
absolutePath: string,
|
||||
options: PathValidatorOptions,
|
||||
): Promise<PathValidationResult | null> {
|
||||
try {
|
||||
const statFn = options.followSymlinks ? fs.stat : fs.lstat
|
||||
const stats = await statFn(absolutePath)
|
||||
|
||||
if (options.requireDirectory && !stats.isDirectory()) {
|
||||
return {
|
||||
status: "invalid",
|
||||
reason: "Path is not a directory",
|
||||
}
|
||||
}
|
||||
|
||||
if (options.requireFile && !stats.isFile()) {
|
||||
return {
|
||||
status: "invalid",
|
||||
reason: "Path is not a file",
|
||||
}
|
||||
}
|
||||
|
||||
return null
|
||||
} catch (error) {
|
||||
if ((error as NodeJS.ErrnoException).code === "ENOENT") {
|
||||
return {
|
||||
status: "invalid",
|
||||
reason: "Path does not exist",
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
status: "invalid",
|
||||
reason: `Cannot access path: ${(error as Error).message}`,
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a path validator for a project.
|
||||
* @param projectRoot - Root directory of the project
|
||||
*/
|
||||
export function createPathValidator(projectRoot: string): PathValidator {
|
||||
return new PathValidator(projectRoot)
|
||||
}
|
||||
|
||||
/**
|
||||
* Standalone function for quick path validation.
|
||||
* @param inputPath - Path to validate
|
||||
* @param projectRoot - Project root directory
|
||||
*/
|
||||
export function validatePath(inputPath: string, projectRoot: string): boolean {
|
||||
const validator = new PathValidator(projectRoot)
|
||||
return validator.isWithin(inputPath)
|
||||
}
|
||||
9
packages/ipuaro/src/infrastructure/security/index.ts
Normal file
9
packages/ipuaro/src/infrastructure/security/index.ts
Normal file
@@ -0,0 +1,9 @@
|
||||
// Security module exports
|
||||
export {
|
||||
PathValidator,
|
||||
createPathValidator,
|
||||
validatePath,
|
||||
type PathValidationResult,
|
||||
type PathValidationStatus,
|
||||
type PathValidatorOptions,
|
||||
} from "./PathValidator.js"
|
||||
119
packages/ipuaro/src/infrastructure/storage/RedisClient.ts
Normal file
119
packages/ipuaro/src/infrastructure/storage/RedisClient.ts
Normal file
@@ -0,0 +1,119 @@
|
||||
import { Redis } from "ioredis"
|
||||
import type { RedisConfig } from "../../shared/constants/config.js"
|
||||
import { IpuaroError } from "../../shared/errors/IpuaroError.js"
|
||||
|
||||
/**
|
||||
* Redis client wrapper with connection management.
|
||||
* Handles connection lifecycle and AOF configuration.
|
||||
*/
|
||||
export class RedisClient {
|
||||
private client: Redis | null = null
|
||||
private readonly config: RedisConfig
|
||||
private connected = false
|
||||
|
||||
constructor(config: RedisConfig) {
|
||||
this.config = config
|
||||
}
|
||||
|
||||
/**
|
||||
* Connect to Redis server.
|
||||
* Configures AOF persistence on successful connection.
|
||||
*/
|
||||
async connect(): Promise<void> {
|
||||
if (this.connected && this.client) {
|
||||
return
|
||||
}
|
||||
|
||||
try {
|
||||
this.client = new Redis({
|
||||
host: this.config.host,
|
||||
port: this.config.port,
|
||||
db: this.config.db,
|
||||
password: this.config.password,
|
||||
keyPrefix: this.config.keyPrefix,
|
||||
lazyConnect: true,
|
||||
retryStrategy: (times: number): number | null => {
|
||||
if (times > 3) {
|
||||
return null
|
||||
}
|
||||
return Math.min(times * 200, 1000)
|
||||
},
|
||||
maxRetriesPerRequest: 3,
|
||||
enableReadyCheck: true,
|
||||
})
|
||||
|
||||
await this.client.connect()
|
||||
await this.configureAOF()
|
||||
this.connected = true
|
||||
} catch (error) {
|
||||
this.connected = false
|
||||
this.client = null
|
||||
const message = error instanceof Error ? error.message : "Unknown error"
|
||||
throw IpuaroError.redis(`Failed to connect to Redis: ${message}`)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Disconnect from Redis server.
|
||||
*/
|
||||
async disconnect(): Promise<void> {
|
||||
if (this.client) {
|
||||
await this.client.quit()
|
||||
this.client = null
|
||||
this.connected = false
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if connected to Redis.
|
||||
*/
|
||||
isConnected(): boolean {
|
||||
return this.connected && this.client !== null && this.client.status === "ready"
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the underlying Redis client.
|
||||
* @throws IpuaroError if not connected
|
||||
*/
|
||||
getClient(): Redis {
|
||||
if (!this.client || !this.connected) {
|
||||
throw IpuaroError.redis("Redis client is not connected")
|
||||
}
|
||||
return this.client
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute a health check ping.
|
||||
*/
|
||||
async ping(): Promise<boolean> {
|
||||
if (!this.client) {
|
||||
return false
|
||||
}
|
||||
try {
|
||||
const result = await this.client.ping()
|
||||
return result === "PONG"
|
||||
} catch {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Configure AOF (Append Only File) persistence.
|
||||
* AOF provides better durability by logging every write operation.
|
||||
*/
|
||||
private async configureAOF(): Promise<void> {
|
||||
if (!this.client) {
|
||||
return
|
||||
}
|
||||
|
||||
try {
|
||||
await this.client.config("SET", "appendonly", "yes")
|
||||
await this.client.config("SET", "appendfsync", "everysec")
|
||||
} catch {
|
||||
/*
|
||||
* AOF config may fail if Redis doesn't allow CONFIG SET.
|
||||
* This is non-fatal - persistence will still work with default settings.
|
||||
*/
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,225 @@
|
||||
import type { ISessionStorage, SessionListItem } from "../../domain/services/ISessionStorage.js"
|
||||
import { type ContextState, Session, type SessionStats } from "../../domain/entities/Session.js"
|
||||
import type { ChatMessage } from "../../domain/value-objects/ChatMessage.js"
|
||||
import type { UndoEntry } from "../../domain/value-objects/UndoEntry.js"
|
||||
import { MAX_UNDO_STACK_SIZE } from "../../domain/constants/index.js"
|
||||
import { IpuaroError } from "../../shared/errors/IpuaroError.js"
|
||||
import { RedisClient } from "./RedisClient.js"
|
||||
import { SessionFields, SessionKeys } from "./schema.js"
|
||||
|
||||
/**
|
||||
* Redis implementation of ISessionStorage.
|
||||
* Stores session data in Redis hashes and lists.
|
||||
*/
|
||||
export class RedisSessionStorage implements ISessionStorage {
|
||||
private readonly client: RedisClient
|
||||
|
||||
constructor(client: RedisClient) {
|
||||
this.client = client
|
||||
}
|
||||
|
||||
async saveSession(session: Session): Promise<void> {
|
||||
const redis = this.getRedis()
|
||||
const dataKey = SessionKeys.data(session.id)
|
||||
|
||||
const pipeline = redis.pipeline()
|
||||
|
||||
pipeline.hset(dataKey, SessionFields.projectName, session.projectName)
|
||||
pipeline.hset(dataKey, SessionFields.createdAt, String(session.createdAt))
|
||||
pipeline.hset(dataKey, SessionFields.lastActivityAt, String(session.lastActivityAt))
|
||||
pipeline.hset(dataKey, SessionFields.history, JSON.stringify(session.history))
|
||||
pipeline.hset(dataKey, SessionFields.context, JSON.stringify(session.context))
|
||||
pipeline.hset(dataKey, SessionFields.stats, JSON.stringify(session.stats))
|
||||
pipeline.hset(dataKey, SessionFields.inputHistory, JSON.stringify(session.inputHistory))
|
||||
|
||||
await this.addToSessionsList(session.id)
|
||||
|
||||
await pipeline.exec()
|
||||
}
|
||||
|
||||
async loadSession(sessionId: string): Promise<Session | null> {
|
||||
const redis = this.getRedis()
|
||||
const dataKey = SessionKeys.data(sessionId)
|
||||
|
||||
const data = await redis.hgetall(dataKey)
|
||||
if (!data || Object.keys(data).length === 0) {
|
||||
return null
|
||||
}
|
||||
|
||||
const session = new Session(
|
||||
sessionId,
|
||||
data[SessionFields.projectName],
|
||||
Number(data[SessionFields.createdAt]),
|
||||
)
|
||||
|
||||
session.lastActivityAt = Number(data[SessionFields.lastActivityAt])
|
||||
session.history = this.parseJSON(data[SessionFields.history], "history") as ChatMessage[]
|
||||
session.context = this.parseJSON(data[SessionFields.context], "context") as ContextState
|
||||
session.stats = this.parseJSON(data[SessionFields.stats], "stats") as SessionStats
|
||||
session.inputHistory = this.parseJSON(
|
||||
data[SessionFields.inputHistory],
|
||||
"inputHistory",
|
||||
) as string[]
|
||||
|
||||
const undoStack = await this.getUndoStack(sessionId)
|
||||
for (const entry of undoStack) {
|
||||
session.undoStack.push(entry)
|
||||
}
|
||||
|
||||
return session
|
||||
}
|
||||
|
||||
async deleteSession(sessionId: string): Promise<void> {
|
||||
const redis = this.getRedis()
|
||||
|
||||
await Promise.all([
|
||||
redis.del(SessionKeys.data(sessionId)),
|
||||
redis.del(SessionKeys.undo(sessionId)),
|
||||
redis.lrem(SessionKeys.list, 0, sessionId),
|
||||
])
|
||||
}
|
||||
|
||||
async listSessions(projectName?: string): Promise<SessionListItem[]> {
|
||||
const redis = this.getRedis()
|
||||
const sessionIds = await redis.lrange(SessionKeys.list, 0, -1)
|
||||
|
||||
const sessions: SessionListItem[] = []
|
||||
|
||||
for (const id of sessionIds) {
|
||||
const data = await redis.hgetall(SessionKeys.data(id))
|
||||
if (!data || Object.keys(data).length === 0) {
|
||||
continue
|
||||
}
|
||||
|
||||
const sessionProjectName = data[SessionFields.projectName]
|
||||
if (projectName && sessionProjectName !== projectName) {
|
||||
continue
|
||||
}
|
||||
|
||||
const history = this.parseJSON(data[SessionFields.history], "history") as ChatMessage[]
|
||||
|
||||
sessions.push({
|
||||
id,
|
||||
projectName: sessionProjectName,
|
||||
createdAt: Number(data[SessionFields.createdAt]),
|
||||
lastActivityAt: Number(data[SessionFields.lastActivityAt]),
|
||||
messageCount: history.length,
|
||||
})
|
||||
}
|
||||
|
||||
sessions.sort((a, b) => b.lastActivityAt - a.lastActivityAt)
|
||||
|
||||
return sessions
|
||||
}
|
||||
|
||||
async getLatestSession(projectName: string): Promise<Session | null> {
|
||||
const sessions = await this.listSessions(projectName)
|
||||
if (sessions.length === 0) {
|
||||
return null
|
||||
}
|
||||
|
||||
return this.loadSession(sessions[0].id)
|
||||
}
|
||||
|
||||
async sessionExists(sessionId: string): Promise<boolean> {
|
||||
const redis = this.getRedis()
|
||||
const exists = await redis.exists(SessionKeys.data(sessionId))
|
||||
return exists === 1
|
||||
}
|
||||
|
||||
async pushUndoEntry(sessionId: string, entry: UndoEntry): Promise<void> {
|
||||
const redis = this.getRedis()
|
||||
const undoKey = SessionKeys.undo(sessionId)
|
||||
|
||||
await redis.rpush(undoKey, JSON.stringify(entry))
|
||||
|
||||
const length = await redis.llen(undoKey)
|
||||
if (length > MAX_UNDO_STACK_SIZE) {
|
||||
await redis.lpop(undoKey)
|
||||
}
|
||||
}
|
||||
|
||||
async popUndoEntry(sessionId: string): Promise<UndoEntry | null> {
|
||||
const redis = this.getRedis()
|
||||
const undoKey = SessionKeys.undo(sessionId)
|
||||
|
||||
const data = await redis.rpop(undoKey)
|
||||
if (!data) {
|
||||
return null
|
||||
}
|
||||
|
||||
return this.parseJSON(data, "UndoEntry") as UndoEntry
|
||||
}
|
||||
|
||||
async getUndoStack(sessionId: string): Promise<UndoEntry[]> {
|
||||
const redis = this.getRedis()
|
||||
const undoKey = SessionKeys.undo(sessionId)
|
||||
|
||||
const entries = await redis.lrange(undoKey, 0, -1)
|
||||
return entries.map((entry) => this.parseJSON(entry, "UndoEntry") as UndoEntry)
|
||||
}
|
||||
|
||||
async touchSession(sessionId: string): Promise<void> {
|
||||
const redis = this.getRedis()
|
||||
await redis.hset(
|
||||
SessionKeys.data(sessionId),
|
||||
SessionFields.lastActivityAt,
|
||||
String(Date.now()),
|
||||
)
|
||||
}
|
||||
|
||||
async clearAllSessions(): Promise<void> {
|
||||
const redis = this.getRedis()
|
||||
const sessionIds = await redis.lrange(SessionKeys.list, 0, -1)
|
||||
|
||||
const pipeline = redis.pipeline()
|
||||
for (const id of sessionIds) {
|
||||
pipeline.del(SessionKeys.data(id))
|
||||
pipeline.del(SessionKeys.undo(id))
|
||||
}
|
||||
pipeline.del(SessionKeys.list)
|
||||
|
||||
await pipeline.exec()
|
||||
}
|
||||
|
||||
private async addToSessionsList(sessionId: string): Promise<void> {
|
||||
const redis = this.getRedis()
|
||||
|
||||
const exists = await redis.lpos(SessionKeys.list, sessionId)
|
||||
if (exists === null) {
|
||||
await redis.lpush(SessionKeys.list, sessionId)
|
||||
}
|
||||
}
|
||||
|
||||
private getRedis(): ReturnType<RedisClient["getClient"]> {
|
||||
return this.client.getClient()
|
||||
}
|
||||
|
||||
private parseJSON(data: string | undefined, type: string): unknown {
|
||||
if (!data) {
|
||||
if (type === "history" || type === "inputHistory") {
|
||||
return []
|
||||
}
|
||||
if (type === "context") {
|
||||
return { filesInContext: [], tokenUsage: 0, needsCompression: false }
|
||||
}
|
||||
if (type === "stats") {
|
||||
return {
|
||||
totalTokens: 0,
|
||||
totalTimeMs: 0,
|
||||
toolCalls: 0,
|
||||
editsApplied: 0,
|
||||
editsRejected: 0,
|
||||
}
|
||||
}
|
||||
return null
|
||||
}
|
||||
|
||||
try {
|
||||
return JSON.parse(data) as unknown
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : "Unknown error"
|
||||
throw IpuaroError.parse(`Failed to parse ${type}: ${message}`)
|
||||
}
|
||||
}
|
||||
}
|
||||
236
packages/ipuaro/src/infrastructure/storage/RedisStorage.ts
Normal file
236
packages/ipuaro/src/infrastructure/storage/RedisStorage.ts
Normal file
@@ -0,0 +1,236 @@
|
||||
import type { DepsGraph, IStorage, SymbolIndex } from "../../domain/services/IStorage.js"
|
||||
import type { FileAST } from "../../domain/value-objects/FileAST.js"
|
||||
import type { FileData } from "../../domain/value-objects/FileData.js"
|
||||
import type { FileMeta } from "../../domain/value-objects/FileMeta.js"
|
||||
import { IpuaroError } from "../../shared/errors/IpuaroError.js"
|
||||
import { RedisClient } from "./RedisClient.js"
|
||||
import { IndexFields, ProjectKeys } from "./schema.js"
|
||||
|
||||
/**
|
||||
* Redis implementation of IStorage.
|
||||
* Stores project data (files, AST, meta, indexes) in Redis hashes.
|
||||
*/
|
||||
export class RedisStorage implements IStorage {
|
||||
private readonly client: RedisClient
|
||||
private readonly projectName: string
|
||||
|
||||
constructor(client: RedisClient, projectName: string) {
|
||||
this.client = client
|
||||
this.projectName = projectName
|
||||
}
|
||||
|
||||
async getFile(path: string): Promise<FileData | null> {
|
||||
const redis = this.getRedis()
|
||||
const data = await redis.hget(ProjectKeys.files(this.projectName), path)
|
||||
if (!data) {
|
||||
return null
|
||||
}
|
||||
return this.parseJSON(data, "FileData") as FileData
|
||||
}
|
||||
|
||||
async setFile(path: string, data: FileData): Promise<void> {
|
||||
const redis = this.getRedis()
|
||||
await redis.hset(ProjectKeys.files(this.projectName), path, JSON.stringify(data))
|
||||
}
|
||||
|
||||
async deleteFile(path: string): Promise<void> {
|
||||
const redis = this.getRedis()
|
||||
await redis.hdel(ProjectKeys.files(this.projectName), path)
|
||||
}
|
||||
|
||||
async getAllFiles(): Promise<Map<string, FileData>> {
|
||||
const redis = this.getRedis()
|
||||
const data = await redis.hgetall(ProjectKeys.files(this.projectName))
|
||||
const result = new Map<string, FileData>()
|
||||
|
||||
for (const [path, value] of Object.entries(data)) {
|
||||
const parsed = this.parseJSON(value, "FileData") as FileData | null
|
||||
if (parsed) {
|
||||
result.set(path, parsed)
|
||||
}
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
async getFileCount(): Promise<number> {
|
||||
const redis = this.getRedis()
|
||||
return redis.hlen(ProjectKeys.files(this.projectName))
|
||||
}
|
||||
|
||||
async getAST(path: string): Promise<FileAST | null> {
|
||||
const redis = this.getRedis()
|
||||
const data = await redis.hget(ProjectKeys.ast(this.projectName), path)
|
||||
if (!data) {
|
||||
return null
|
||||
}
|
||||
return this.parseJSON(data, "FileAST") as FileAST
|
||||
}
|
||||
|
||||
async setAST(path: string, ast: FileAST): Promise<void> {
|
||||
const redis = this.getRedis()
|
||||
await redis.hset(ProjectKeys.ast(this.projectName), path, JSON.stringify(ast))
|
||||
}
|
||||
|
||||
async deleteAST(path: string): Promise<void> {
|
||||
const redis = this.getRedis()
|
||||
await redis.hdel(ProjectKeys.ast(this.projectName), path)
|
||||
}
|
||||
|
||||
async getAllASTs(): Promise<Map<string, FileAST>> {
|
||||
const redis = this.getRedis()
|
||||
const data = await redis.hgetall(ProjectKeys.ast(this.projectName))
|
||||
const result = new Map<string, FileAST>()
|
||||
|
||||
for (const [path, value] of Object.entries(data)) {
|
||||
const parsed = this.parseJSON(value, "FileAST") as FileAST | null
|
||||
if (parsed) {
|
||||
result.set(path, parsed)
|
||||
}
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
async getMeta(path: string): Promise<FileMeta | null> {
|
||||
const redis = this.getRedis()
|
||||
const data = await redis.hget(ProjectKeys.meta(this.projectName), path)
|
||||
if (!data) {
|
||||
return null
|
||||
}
|
||||
return this.parseJSON(data, "FileMeta") as FileMeta
|
||||
}
|
||||
|
||||
async setMeta(path: string, meta: FileMeta): Promise<void> {
|
||||
const redis = this.getRedis()
|
||||
await redis.hset(ProjectKeys.meta(this.projectName), path, JSON.stringify(meta))
|
||||
}
|
||||
|
||||
async deleteMeta(path: string): Promise<void> {
|
||||
const redis = this.getRedis()
|
||||
await redis.hdel(ProjectKeys.meta(this.projectName), path)
|
||||
}
|
||||
|
||||
async getAllMetas(): Promise<Map<string, FileMeta>> {
|
||||
const redis = this.getRedis()
|
||||
const data = await redis.hgetall(ProjectKeys.meta(this.projectName))
|
||||
const result = new Map<string, FileMeta>()
|
||||
|
||||
for (const [path, value] of Object.entries(data)) {
|
||||
const parsed = this.parseJSON(value, "FileMeta") as FileMeta | null
|
||||
if (parsed) {
|
||||
result.set(path, parsed)
|
||||
}
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
async getSymbolIndex(): Promise<SymbolIndex> {
|
||||
const redis = this.getRedis()
|
||||
const data = await redis.hget(ProjectKeys.indexes(this.projectName), IndexFields.symbols)
|
||||
if (!data) {
|
||||
return new Map()
|
||||
}
|
||||
|
||||
const parsed = this.parseJSON(data, "SymbolIndex") as [string, unknown[]][] | null
|
||||
if (!parsed) {
|
||||
return new Map()
|
||||
}
|
||||
|
||||
return new Map(parsed) as SymbolIndex
|
||||
}
|
||||
|
||||
async setSymbolIndex(index: SymbolIndex): Promise<void> {
|
||||
const redis = this.getRedis()
|
||||
const serialized = JSON.stringify([...index.entries()])
|
||||
await redis.hset(ProjectKeys.indexes(this.projectName), IndexFields.symbols, serialized)
|
||||
}
|
||||
|
||||
async getDepsGraph(): Promise<DepsGraph> {
|
||||
const redis = this.getRedis()
|
||||
const data = await redis.hget(ProjectKeys.indexes(this.projectName), IndexFields.depsGraph)
|
||||
if (!data) {
|
||||
return {
|
||||
imports: new Map(),
|
||||
importedBy: new Map(),
|
||||
}
|
||||
}
|
||||
|
||||
const parsed = this.parseJSON(data, "DepsGraph") as {
|
||||
imports: [string, string[]][]
|
||||
importedBy: [string, string[]][]
|
||||
} | null
|
||||
|
||||
if (!parsed) {
|
||||
return {
|
||||
imports: new Map(),
|
||||
importedBy: new Map(),
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
imports: new Map(parsed.imports),
|
||||
importedBy: new Map(parsed.importedBy),
|
||||
}
|
||||
}
|
||||
|
||||
async setDepsGraph(graph: DepsGraph): Promise<void> {
|
||||
const redis = this.getRedis()
|
||||
const serialized = JSON.stringify({
|
||||
imports: [...graph.imports.entries()],
|
||||
importedBy: [...graph.importedBy.entries()],
|
||||
})
|
||||
await redis.hset(ProjectKeys.indexes(this.projectName), IndexFields.depsGraph, serialized)
|
||||
}
|
||||
|
||||
async getProjectConfig(key: string): Promise<unknown> {
|
||||
const redis = this.getRedis()
|
||||
const data = await redis.hget(ProjectKeys.config(this.projectName), key)
|
||||
if (!data) {
|
||||
return null
|
||||
}
|
||||
return this.parseJSON(data, "ProjectConfig")
|
||||
}
|
||||
|
||||
async setProjectConfig(key: string, value: unknown): Promise<void> {
|
||||
const redis = this.getRedis()
|
||||
await redis.hset(ProjectKeys.config(this.projectName), key, JSON.stringify(value))
|
||||
}
|
||||
|
||||
async connect(): Promise<void> {
|
||||
await this.client.connect()
|
||||
}
|
||||
|
||||
async disconnect(): Promise<void> {
|
||||
await this.client.disconnect()
|
||||
}
|
||||
|
||||
isConnected(): boolean {
|
||||
return this.client.isConnected()
|
||||
}
|
||||
|
||||
async clear(): Promise<void> {
|
||||
const redis = this.getRedis()
|
||||
await Promise.all([
|
||||
redis.del(ProjectKeys.files(this.projectName)),
|
||||
redis.del(ProjectKeys.ast(this.projectName)),
|
||||
redis.del(ProjectKeys.meta(this.projectName)),
|
||||
redis.del(ProjectKeys.indexes(this.projectName)),
|
||||
redis.del(ProjectKeys.config(this.projectName)),
|
||||
])
|
||||
}
|
||||
|
||||
private getRedis(): ReturnType<RedisClient["getClient"]> {
|
||||
return this.client.getClient()
|
||||
}
|
||||
|
||||
private parseJSON(data: string, type: string): unknown {
|
||||
try {
|
||||
return JSON.parse(data) as unknown
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : "Unknown error"
|
||||
throw IpuaroError.parse(`Failed to parse ${type}: ${message}`)
|
||||
}
|
||||
}
|
||||
}
|
||||
11
packages/ipuaro/src/infrastructure/storage/index.ts
Normal file
11
packages/ipuaro/src/infrastructure/storage/index.ts
Normal file
@@ -0,0 +1,11 @@
|
||||
// Storage module exports
|
||||
export { RedisClient } from "./RedisClient.js"
|
||||
export { RedisStorage } from "./RedisStorage.js"
|
||||
export { RedisSessionStorage } from "./RedisSessionStorage.js"
|
||||
export {
|
||||
ProjectKeys,
|
||||
SessionKeys,
|
||||
IndexFields,
|
||||
SessionFields,
|
||||
generateProjectName,
|
||||
} from "./schema.js"
|
||||
95
packages/ipuaro/src/infrastructure/storage/schema.ts
Normal file
95
packages/ipuaro/src/infrastructure/storage/schema.ts
Normal file
@@ -0,0 +1,95 @@
|
||||
/**
|
||||
* Redis key schema for ipuaro data storage.
|
||||
*
|
||||
* Key structure:
|
||||
* - project:{name}:files # Hash<path, FileData>
|
||||
* - project:{name}:ast # Hash<path, FileAST>
|
||||
* - project:{name}:meta # Hash<path, FileMeta>
|
||||
* - project:{name}:indexes # Hash<name, JSON> (symbols, deps_graph)
|
||||
* - project:{name}:config # Hash<key, JSON>
|
||||
*
|
||||
* - session:{id}:data # Hash<field, JSON> (history, context, stats)
|
||||
* - session:{id}:undo # List<UndoEntry> (max 10)
|
||||
* - sessions:list # List<session_id>
|
||||
*
|
||||
* Project name format: {parent-folder}-{project-folder}
|
||||
*/
|
||||
|
||||
/**
|
||||
* Project-related Redis keys.
|
||||
*/
|
||||
export const ProjectKeys = {
|
||||
files: (projectName: string): string => `project:${projectName}:files`,
|
||||
ast: (projectName: string): string => `project:${projectName}:ast`,
|
||||
meta: (projectName: string): string => `project:${projectName}:meta`,
|
||||
indexes: (projectName: string): string => `project:${projectName}:indexes`,
|
||||
config: (projectName: string): string => `project:${projectName}:config`,
|
||||
} as const
|
||||
|
||||
/**
|
||||
* Session-related Redis keys.
|
||||
*/
|
||||
export const SessionKeys = {
|
||||
data: (sessionId: string): string => `session:${sessionId}:data`,
|
||||
undo: (sessionId: string): string => `session:${sessionId}:undo`,
|
||||
list: "sessions:list",
|
||||
} as const
|
||||
|
||||
/**
|
||||
* Index field names within project:indexes hash.
|
||||
*/
|
||||
export const IndexFields = {
|
||||
symbols: "symbols",
|
||||
depsGraph: "deps_graph",
|
||||
} as const
|
||||
|
||||
/**
|
||||
* Session data field names within session:data hash.
|
||||
*/
|
||||
export const SessionFields = {
|
||||
history: "history",
|
||||
context: "context",
|
||||
stats: "stats",
|
||||
inputHistory: "input_history",
|
||||
createdAt: "created_at",
|
||||
lastActivityAt: "last_activity_at",
|
||||
projectName: "project_name",
|
||||
} as const
|
||||
|
||||
/**
|
||||
* Generate project name from path.
|
||||
* Format: {parent-folder}-{project-folder}
|
||||
*
|
||||
* @example
|
||||
* generateProjectName("/home/user/projects/myapp") -> "projects-myapp"
|
||||
* generateProjectName("/app") -> "app"
|
||||
*/
|
||||
export function generateProjectName(projectPath: string): string {
|
||||
const normalized = projectPath.replace(/\\/g, "/").replace(/\/+$/, "")
|
||||
const parts = normalized.split("/").filter(Boolean)
|
||||
|
||||
if (parts.length === 0) {
|
||||
return "root"
|
||||
}
|
||||
|
||||
if (parts.length === 1) {
|
||||
return sanitizeName(parts[0])
|
||||
}
|
||||
|
||||
const projectFolder = sanitizeName(parts[parts.length - 1])
|
||||
const parentFolder = sanitizeName(parts[parts.length - 2])
|
||||
|
||||
return `${parentFolder}-${projectFolder}`
|
||||
}
|
||||
|
||||
/**
|
||||
* Sanitize a name for use in Redis keys.
|
||||
* Replaces non-alphanumeric characters with hyphens.
|
||||
*/
|
||||
function sanitizeName(name: string): string {
|
||||
return name
|
||||
.toLowerCase()
|
||||
.replace(/[^a-z0-9-]/g, "-")
|
||||
.replace(/-+/g, "-")
|
||||
.replace(/^-|-$/g, "")
|
||||
}
|
||||
@@ -0,0 +1,232 @@
|
||||
import * as path from "node:path"
|
||||
import type { ITool, ToolContext, ToolParameterSchema } from "../../../domain/services/ITool.js"
|
||||
import type { ComplexityMetrics, FileMeta } from "../../../domain/value-objects/FileMeta.js"
|
||||
import {
|
||||
createErrorResult,
|
||||
createSuccessResult,
|
||||
type ToolResult,
|
||||
} from "../../../domain/value-objects/ToolResult.js"
|
||||
|
||||
/**
|
||||
* Complexity entry for a single file.
|
||||
*/
|
||||
export interface ComplexityEntry {
|
||||
/** Relative path to the file */
|
||||
path: string
|
||||
/** Complexity metrics */
|
||||
metrics: ComplexityMetrics
|
||||
/** File type classification */
|
||||
fileType: "source" | "test" | "config" | "types" | "unknown"
|
||||
/** Whether the file is a hub */
|
||||
isHub: boolean
|
||||
}
|
||||
|
||||
/**
|
||||
* Result data from get_complexity tool.
|
||||
*/
|
||||
export interface GetComplexityResult {
|
||||
/** The path that was analyzed (file or directory) */
|
||||
analyzedPath: string | null
|
||||
/** Total files analyzed */
|
||||
totalFiles: number
|
||||
/** Average complexity score */
|
||||
averageScore: number
|
||||
/** Files sorted by complexity score (descending) */
|
||||
files: ComplexityEntry[]
|
||||
/** Summary statistics */
|
||||
summary: {
|
||||
highComplexity: number
|
||||
mediumComplexity: number
|
||||
lowComplexity: number
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Complexity thresholds for classification.
|
||||
*/
|
||||
const COMPLEXITY_THRESHOLDS = {
|
||||
high: 60,
|
||||
medium: 30,
|
||||
}
|
||||
|
||||
/**
|
||||
* Tool for getting complexity metrics for files.
|
||||
* Can analyze a single file or all files in the project.
|
||||
*/
|
||||
export class GetComplexityTool implements ITool {
|
||||
readonly name = "get_complexity"
|
||||
readonly description =
|
||||
"Get complexity metrics for files. " +
|
||||
"Returns LOC, nesting depth, cyclomatic complexity, and overall score. " +
|
||||
"Without path, returns all files sorted by complexity."
|
||||
readonly parameters: ToolParameterSchema[] = [
|
||||
{
|
||||
name: "path",
|
||||
type: "string",
|
||||
description: "File or directory path to analyze (optional, defaults to entire project)",
|
||||
required: false,
|
||||
},
|
||||
{
|
||||
name: "limit",
|
||||
type: "number",
|
||||
description: "Maximum number of files to return (default: 20)",
|
||||
required: false,
|
||||
default: 20,
|
||||
},
|
||||
]
|
||||
readonly requiresConfirmation = false
|
||||
readonly category = "analysis" as const
|
||||
|
||||
validateParams(params: Record<string, unknown>): string | null {
|
||||
if (params.path !== undefined && typeof params.path !== "string") {
|
||||
return "Parameter 'path' must be a string"
|
||||
}
|
||||
if (params.limit !== undefined) {
|
||||
if (typeof params.limit !== "number" || !Number.isInteger(params.limit)) {
|
||||
return "Parameter 'limit' must be an integer"
|
||||
}
|
||||
if (params.limit < 1) {
|
||||
return "Parameter 'limit' must be at least 1"
|
||||
}
|
||||
}
|
||||
return null
|
||||
}
|
||||
|
||||
async execute(params: Record<string, unknown>, ctx: ToolContext): Promise<ToolResult> {
|
||||
const startTime = Date.now()
|
||||
const callId = `${this.name}-${String(startTime)}`
|
||||
|
||||
const inputPath = params.path as string | undefined
|
||||
const limit = (params.limit as number | undefined) ?? 20
|
||||
|
||||
try {
|
||||
const allMetas = await ctx.storage.getAllMetas()
|
||||
|
||||
if (allMetas.size === 0) {
|
||||
return createSuccessResult(
|
||||
callId,
|
||||
{
|
||||
analyzedPath: inputPath ?? null,
|
||||
totalFiles: 0,
|
||||
averageScore: 0,
|
||||
files: [],
|
||||
summary: { highComplexity: 0, mediumComplexity: 0, lowComplexity: 0 },
|
||||
} satisfies GetComplexityResult,
|
||||
Date.now() - startTime,
|
||||
)
|
||||
}
|
||||
|
||||
let filteredMetas = allMetas
|
||||
let analyzedPath: string | null = null
|
||||
|
||||
if (inputPath) {
|
||||
const relativePath = this.normalizePathToRelative(inputPath, ctx.projectRoot)
|
||||
analyzedPath = relativePath
|
||||
filteredMetas = this.filterByPath(allMetas, relativePath)
|
||||
|
||||
if (filteredMetas.size === 0) {
|
||||
return createErrorResult(
|
||||
callId,
|
||||
`No files found at path: ${relativePath}`,
|
||||
Date.now() - startTime,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
const entries: ComplexityEntry[] = []
|
||||
for (const [filePath, meta] of filteredMetas) {
|
||||
entries.push({
|
||||
path: filePath,
|
||||
metrics: meta.complexity,
|
||||
fileType: meta.fileType,
|
||||
isHub: meta.isHub,
|
||||
})
|
||||
}
|
||||
|
||||
entries.sort((a, b) => b.metrics.score - a.metrics.score)
|
||||
|
||||
const summary = this.calculateSummary(entries)
|
||||
const averageScore = this.calculateAverageScore(entries)
|
||||
|
||||
const limitedEntries = entries.slice(0, limit)
|
||||
|
||||
const result: GetComplexityResult = {
|
||||
analyzedPath,
|
||||
totalFiles: entries.length,
|
||||
averageScore,
|
||||
files: limitedEntries,
|
||||
summary,
|
||||
}
|
||||
|
||||
return createSuccessResult(callId, result, Date.now() - startTime)
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : String(error)
|
||||
return createErrorResult(callId, message, Date.now() - startTime)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Normalize input path to relative path from project root.
|
||||
*/
|
||||
private normalizePathToRelative(inputPath: string, projectRoot: string): string {
|
||||
if (path.isAbsolute(inputPath)) {
|
||||
return path.relative(projectRoot, inputPath)
|
||||
}
|
||||
return inputPath
|
||||
}
|
||||
|
||||
/**
|
||||
* Filter metas by path prefix (file or directory).
|
||||
*/
|
||||
private filterByPath(
|
||||
allMetas: Map<string, FileMeta>,
|
||||
targetPath: string,
|
||||
): Map<string, FileMeta> {
|
||||
const filtered = new Map<string, FileMeta>()
|
||||
|
||||
for (const [filePath, meta] of allMetas) {
|
||||
if (filePath === targetPath || filePath.startsWith(`${targetPath}/`)) {
|
||||
filtered.set(filePath, meta)
|
||||
}
|
||||
}
|
||||
|
||||
return filtered
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate summary statistics for complexity entries.
|
||||
*/
|
||||
private calculateSummary(entries: ComplexityEntry[]): {
|
||||
highComplexity: number
|
||||
mediumComplexity: number
|
||||
lowComplexity: number
|
||||
} {
|
||||
let high = 0
|
||||
let medium = 0
|
||||
let low = 0
|
||||
|
||||
for (const entry of entries) {
|
||||
const score = entry.metrics.score
|
||||
if (score >= COMPLEXITY_THRESHOLDS.high) {
|
||||
high++
|
||||
} else if (score >= COMPLEXITY_THRESHOLDS.medium) {
|
||||
medium++
|
||||
} else {
|
||||
low++
|
||||
}
|
||||
}
|
||||
|
||||
return { highComplexity: high, mediumComplexity: medium, lowComplexity: low }
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate average complexity score.
|
||||
*/
|
||||
private calculateAverageScore(entries: ComplexityEntry[]): number {
|
||||
if (entries.length === 0) {
|
||||
return 0
|
||||
}
|
||||
const total = entries.reduce((sum, entry) => sum + entry.metrics.score, 0)
|
||||
return Math.round((total / entries.length) * 100) / 100
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,121 @@
|
||||
import * as path from "node:path"
|
||||
import type { ITool, ToolContext, ToolParameterSchema } from "../../../domain/services/ITool.js"
|
||||
import {
|
||||
createErrorResult,
|
||||
createSuccessResult,
|
||||
type ToolResult,
|
||||
} from "../../../domain/value-objects/ToolResult.js"
|
||||
|
||||
/**
|
||||
* Single dependency entry with metadata.
|
||||
*/
|
||||
export interface DependencyEntry {
|
||||
/** Relative path to the dependency */
|
||||
path: string
|
||||
/** Whether the file exists in the project */
|
||||
exists: boolean
|
||||
/** Whether it's an entry point */
|
||||
isEntryPoint: boolean
|
||||
/** Whether it's a hub file */
|
||||
isHub: boolean
|
||||
/** File type classification */
|
||||
fileType: "source" | "test" | "config" | "types" | "unknown"
|
||||
}
|
||||
|
||||
/**
|
||||
* Result data from get_dependencies tool.
|
||||
*/
|
||||
export interface GetDependenciesResult {
|
||||
/** The file being analyzed */
|
||||
file: string
|
||||
/** Total number of dependencies */
|
||||
totalDependencies: number
|
||||
/** List of dependencies with metadata */
|
||||
dependencies: DependencyEntry[]
|
||||
/** File type of the source file */
|
||||
fileType: "source" | "test" | "config" | "types" | "unknown"
|
||||
}
|
||||
|
||||
/**
|
||||
* Tool for getting files that a specific file imports.
|
||||
* Returns the list of internal dependencies from FileMeta.
|
||||
*/
|
||||
export class GetDependenciesTool implements ITool {
|
||||
readonly name = "get_dependencies"
|
||||
readonly description =
|
||||
"Get files that a specific file imports. " +
|
||||
"Returns internal dependencies resolved to file paths."
|
||||
readonly parameters: ToolParameterSchema[] = [
|
||||
{
|
||||
name: "path",
|
||||
type: "string",
|
||||
description: "File path to analyze (relative to project root or absolute)",
|
||||
required: true,
|
||||
},
|
||||
]
|
||||
readonly requiresConfirmation = false
|
||||
readonly category = "analysis" as const
|
||||
|
||||
validateParams(params: Record<string, unknown>): string | null {
|
||||
if (typeof params.path !== "string" || params.path.trim() === "") {
|
||||
return "Parameter 'path' is required and must be a non-empty string"
|
||||
}
|
||||
return null
|
||||
}
|
||||
|
||||
async execute(params: Record<string, unknown>, ctx: ToolContext): Promise<ToolResult> {
|
||||
const startTime = Date.now()
|
||||
const callId = `${this.name}-${String(startTime)}`
|
||||
|
||||
const inputPath = (params.path as string).trim()
|
||||
|
||||
try {
|
||||
const relativePath = this.normalizePathToRelative(inputPath, ctx.projectRoot)
|
||||
|
||||
const meta = await ctx.storage.getMeta(relativePath)
|
||||
if (!meta) {
|
||||
return createErrorResult(
|
||||
callId,
|
||||
`File not found or not indexed: ${relativePath}`,
|
||||
Date.now() - startTime,
|
||||
)
|
||||
}
|
||||
|
||||
const dependencies: DependencyEntry[] = []
|
||||
for (const depPath of meta.dependencies) {
|
||||
const depMeta = await ctx.storage.getMeta(depPath)
|
||||
dependencies.push({
|
||||
path: depPath,
|
||||
exists: depMeta !== null,
|
||||
isEntryPoint: depMeta?.isEntryPoint ?? false,
|
||||
isHub: depMeta?.isHub ?? false,
|
||||
fileType: depMeta?.fileType ?? "unknown",
|
||||
})
|
||||
}
|
||||
|
||||
dependencies.sort((a, b) => a.path.localeCompare(b.path))
|
||||
|
||||
const result: GetDependenciesResult = {
|
||||
file: relativePath,
|
||||
totalDependencies: dependencies.length,
|
||||
dependencies,
|
||||
fileType: meta.fileType,
|
||||
}
|
||||
|
||||
return createSuccessResult(callId, result, Date.now() - startTime)
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : String(error)
|
||||
return createErrorResult(callId, message, Date.now() - startTime)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Normalize input path to relative path from project root.
|
||||
*/
|
||||
private normalizePathToRelative(inputPath: string, projectRoot: string): string {
|
||||
if (path.isAbsolute(inputPath)) {
|
||||
return path.relative(projectRoot, inputPath)
|
||||
}
|
||||
return inputPath
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,124 @@
|
||||
import * as path from "node:path"
|
||||
import type { ITool, ToolContext, ToolParameterSchema } from "../../../domain/services/ITool.js"
|
||||
import {
|
||||
createErrorResult,
|
||||
createSuccessResult,
|
||||
type ToolResult,
|
||||
} from "../../../domain/value-objects/ToolResult.js"
|
||||
|
||||
/**
|
||||
* Single dependent entry with metadata.
|
||||
*/
|
||||
export interface DependentEntry {
|
||||
/** Relative path to the dependent file */
|
||||
path: string
|
||||
/** Whether the file is an entry point */
|
||||
isEntryPoint: boolean
|
||||
/** Whether the file is a hub */
|
||||
isHub: boolean
|
||||
/** File type classification */
|
||||
fileType: "source" | "test" | "config" | "types" | "unknown"
|
||||
/** Complexity score of the dependent */
|
||||
complexityScore: number
|
||||
}
|
||||
|
||||
/**
|
||||
* Result data from get_dependents tool.
|
||||
*/
|
||||
export interface GetDependentsResult {
|
||||
/** The file being analyzed */
|
||||
file: string
|
||||
/** Total number of dependents */
|
||||
totalDependents: number
|
||||
/** Whether this file is a hub (>5 dependents) */
|
||||
isHub: boolean
|
||||
/** List of files that import this file */
|
||||
dependents: DependentEntry[]
|
||||
/** File type of the source file */
|
||||
fileType: "source" | "test" | "config" | "types" | "unknown"
|
||||
}
|
||||
|
||||
/**
|
||||
* Tool for getting files that import a specific file.
|
||||
* Returns the list of files that depend on the target file.
|
||||
*/
|
||||
export class GetDependentsTool implements ITool {
|
||||
readonly name = "get_dependents"
|
||||
readonly description =
|
||||
"Get files that import a specific file. " +
|
||||
"Returns list of files that depend on the target."
|
||||
readonly parameters: ToolParameterSchema[] = [
|
||||
{
|
||||
name: "path",
|
||||
type: "string",
|
||||
description: "File path to analyze (relative to project root or absolute)",
|
||||
required: true,
|
||||
},
|
||||
]
|
||||
readonly requiresConfirmation = false
|
||||
readonly category = "analysis" as const
|
||||
|
||||
validateParams(params: Record<string, unknown>): string | null {
|
||||
if (typeof params.path !== "string" || params.path.trim() === "") {
|
||||
return "Parameter 'path' is required and must be a non-empty string"
|
||||
}
|
||||
return null
|
||||
}
|
||||
|
||||
async execute(params: Record<string, unknown>, ctx: ToolContext): Promise<ToolResult> {
|
||||
const startTime = Date.now()
|
||||
const callId = `${this.name}-${String(startTime)}`
|
||||
|
||||
const inputPath = (params.path as string).trim()
|
||||
|
||||
try {
|
||||
const relativePath = this.normalizePathToRelative(inputPath, ctx.projectRoot)
|
||||
|
||||
const meta = await ctx.storage.getMeta(relativePath)
|
||||
if (!meta) {
|
||||
return createErrorResult(
|
||||
callId,
|
||||
`File not found or not indexed: ${relativePath}`,
|
||||
Date.now() - startTime,
|
||||
)
|
||||
}
|
||||
|
||||
const dependents: DependentEntry[] = []
|
||||
for (const depPath of meta.dependents) {
|
||||
const depMeta = await ctx.storage.getMeta(depPath)
|
||||
dependents.push({
|
||||
path: depPath,
|
||||
isEntryPoint: depMeta?.isEntryPoint ?? false,
|
||||
isHub: depMeta?.isHub ?? false,
|
||||
fileType: depMeta?.fileType ?? "unknown",
|
||||
complexityScore: depMeta?.complexity.score ?? 0,
|
||||
})
|
||||
}
|
||||
|
||||
dependents.sort((a, b) => a.path.localeCompare(b.path))
|
||||
|
||||
const result: GetDependentsResult = {
|
||||
file: relativePath,
|
||||
totalDependents: dependents.length,
|
||||
isHub: meta.isHub,
|
||||
dependents,
|
||||
fileType: meta.fileType,
|
||||
}
|
||||
|
||||
return createSuccessResult(callId, result, Date.now() - startTime)
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : String(error)
|
||||
return createErrorResult(callId, message, Date.now() - startTime)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Normalize input path to relative path from project root.
|
||||
*/
|
||||
private normalizePathToRelative(inputPath: string, projectRoot: string): string {
|
||||
if (path.isAbsolute(inputPath)) {
|
||||
return path.relative(projectRoot, inputPath)
|
||||
}
|
||||
return inputPath
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,276 @@
|
||||
import * as path from "node:path"
|
||||
import type { ITool, ToolContext, ToolParameterSchema } from "../../../domain/services/ITool.js"
|
||||
import type { FileData } from "../../../domain/value-objects/FileData.js"
|
||||
import {
|
||||
createErrorResult,
|
||||
createSuccessResult,
|
||||
type ToolResult,
|
||||
} from "../../../domain/value-objects/ToolResult.js"
|
||||
|
||||
/**
|
||||
* Types of TODO markers to search for.
|
||||
*/
|
||||
export type TodoType = "TODO" | "FIXME" | "HACK" | "XXX" | "BUG" | "NOTE"
|
||||
|
||||
/**
|
||||
* A single TODO entry found in the codebase.
|
||||
*/
|
||||
export interface TodoEntry {
|
||||
/** Relative path to the file */
|
||||
path: string
|
||||
/** Line number where the TODO is found */
|
||||
line: number
|
||||
/** Type of TODO marker (TODO, FIXME, etc.) */
|
||||
type: TodoType
|
||||
/** The TODO text content */
|
||||
text: string
|
||||
/** Full line content for context */
|
||||
context: string
|
||||
}
|
||||
|
||||
/**
|
||||
* Result data from get_todos tool.
|
||||
*/
|
||||
export interface GetTodosResult {
|
||||
/** The path that was searched (file or directory) */
|
||||
searchedPath: string | null
|
||||
/** Total number of TODOs found */
|
||||
totalTodos: number
|
||||
/** Number of files with TODOs */
|
||||
filesWithTodos: number
|
||||
/** TODOs grouped by type */
|
||||
byType: Record<TodoType, number>
|
||||
/** List of TODO entries */
|
||||
todos: TodoEntry[]
|
||||
}
|
||||
|
||||
/**
|
||||
* Supported TODO marker patterns.
|
||||
*/
|
||||
const TODO_MARKERS: TodoType[] = ["TODO", "FIXME", "HACK", "XXX", "BUG", "NOTE"]
|
||||
|
||||
/**
|
||||
* Regex pattern for matching TODO markers in comments.
|
||||
*/
|
||||
const TODO_PATTERN = new RegExp(
|
||||
`(?://|/\\*|\\*|#)\\s*(${TODO_MARKERS.join("|")})(?:\\([^)]*\\))?:?\\s*(.*)`,
|
||||
"i",
|
||||
)
|
||||
|
||||
/**
|
||||
* Tool for finding TODO/FIXME/HACK comments in the codebase.
|
||||
* Searches through indexed files for common task markers.
|
||||
*/
|
||||
export class GetTodosTool implements ITool {
|
||||
readonly name = "get_todos"
|
||||
readonly description =
|
||||
"Find TODO, FIXME, HACK, XXX, BUG, and NOTE comments in the codebase. " +
|
||||
"Returns list of locations with context."
|
||||
readonly parameters: ToolParameterSchema[] = [
|
||||
{
|
||||
name: "path",
|
||||
type: "string",
|
||||
description: "File or directory to search (optional, defaults to entire project)",
|
||||
required: false,
|
||||
},
|
||||
{
|
||||
name: "type",
|
||||
type: "string",
|
||||
description:
|
||||
"Filter by TODO type: TODO, FIXME, HACK, XXX, BUG, NOTE (optional, defaults to all)",
|
||||
required: false,
|
||||
},
|
||||
]
|
||||
readonly requiresConfirmation = false
|
||||
readonly category = "analysis" as const
|
||||
|
||||
validateParams(params: Record<string, unknown>): string | null {
|
||||
if (params.path !== undefined && typeof params.path !== "string") {
|
||||
return "Parameter 'path' must be a string"
|
||||
}
|
||||
if (params.type !== undefined) {
|
||||
if (typeof params.type !== "string") {
|
||||
return "Parameter 'type' must be a string"
|
||||
}
|
||||
const upperType = params.type.toUpperCase()
|
||||
if (!TODO_MARKERS.includes(upperType as TodoType)) {
|
||||
return `Parameter 'type' must be one of: ${TODO_MARKERS.join(", ")}`
|
||||
}
|
||||
}
|
||||
return null
|
||||
}
|
||||
|
||||
async execute(params: Record<string, unknown>, ctx: ToolContext): Promise<ToolResult> {
|
||||
const startTime = Date.now()
|
||||
const callId = `${this.name}-${String(startTime)}`
|
||||
|
||||
const inputPath = params.path as string | undefined
|
||||
const filterType = params.type ? ((params.type as string).toUpperCase() as TodoType) : null
|
||||
|
||||
try {
|
||||
const allFiles = await ctx.storage.getAllFiles()
|
||||
|
||||
if (allFiles.size === 0) {
|
||||
return createSuccessResult(
|
||||
callId,
|
||||
this.createEmptyResult(inputPath ?? null),
|
||||
Date.now() - startTime,
|
||||
)
|
||||
}
|
||||
|
||||
let filesToSearch = allFiles
|
||||
let searchedPath: string | null = null
|
||||
|
||||
if (inputPath) {
|
||||
const relativePath = this.normalizePathToRelative(inputPath, ctx.projectRoot)
|
||||
searchedPath = relativePath
|
||||
filesToSearch = this.filterByPath(allFiles, relativePath)
|
||||
|
||||
if (filesToSearch.size === 0) {
|
||||
return createErrorResult(
|
||||
callId,
|
||||
`No files found at path: ${relativePath}`,
|
||||
Date.now() - startTime,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
const todos: TodoEntry[] = []
|
||||
const filesWithTodos = new Set<string>()
|
||||
|
||||
for (const [filePath, fileData] of filesToSearch) {
|
||||
const fileTodos = this.findTodosInFile(filePath, fileData.lines, filterType)
|
||||
if (fileTodos.length > 0) {
|
||||
filesWithTodos.add(filePath)
|
||||
todos.push(...fileTodos)
|
||||
}
|
||||
}
|
||||
|
||||
todos.sort((a, b) => {
|
||||
const pathCompare = a.path.localeCompare(b.path)
|
||||
if (pathCompare !== 0) {
|
||||
return pathCompare
|
||||
}
|
||||
return a.line - b.line
|
||||
})
|
||||
|
||||
const byType = this.countByType(todos)
|
||||
|
||||
const result: GetTodosResult = {
|
||||
searchedPath,
|
||||
totalTodos: todos.length,
|
||||
filesWithTodos: filesWithTodos.size,
|
||||
byType,
|
||||
todos,
|
||||
}
|
||||
|
||||
return createSuccessResult(callId, result, Date.now() - startTime)
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : String(error)
|
||||
return createErrorResult(callId, message, Date.now() - startTime)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Normalize input path to relative path from project root.
|
||||
*/
|
||||
private normalizePathToRelative(inputPath: string, projectRoot: string): string {
|
||||
if (path.isAbsolute(inputPath)) {
|
||||
return path.relative(projectRoot, inputPath)
|
||||
}
|
||||
return inputPath
|
||||
}
|
||||
|
||||
/**
|
||||
* Filter files by path prefix.
|
||||
*/
|
||||
private filterByPath(
|
||||
allFiles: Map<string, FileData>,
|
||||
targetPath: string,
|
||||
): Map<string, FileData> {
|
||||
const filtered = new Map<string, FileData>()
|
||||
|
||||
for (const [filePath, fileData] of allFiles) {
|
||||
if (filePath === targetPath || filePath.startsWith(`${targetPath}/`)) {
|
||||
filtered.set(filePath, fileData)
|
||||
}
|
||||
}
|
||||
|
||||
return filtered
|
||||
}
|
||||
|
||||
/**
|
||||
* Find all TODOs in a file.
|
||||
*/
|
||||
private findTodosInFile(
|
||||
filePath: string,
|
||||
lines: string[],
|
||||
filterType: TodoType | null,
|
||||
): TodoEntry[] {
|
||||
const todos: TodoEntry[] = []
|
||||
|
||||
for (let i = 0; i < lines.length; i++) {
|
||||
const line = lines[i]
|
||||
const match = TODO_PATTERN.exec(line)
|
||||
|
||||
if (match) {
|
||||
const type = match[1].toUpperCase() as TodoType
|
||||
const text = match[2].trim()
|
||||
|
||||
if (filterType && type !== filterType) {
|
||||
continue
|
||||
}
|
||||
|
||||
todos.push({
|
||||
path: filePath,
|
||||
line: i + 1,
|
||||
type,
|
||||
text: text || "(no description)",
|
||||
context: line.trim(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
return todos
|
||||
}
|
||||
|
||||
/**
|
||||
* Count TODOs by type.
|
||||
*/
|
||||
private countByType(todos: TodoEntry[]): Record<TodoType, number> {
|
||||
const counts: Record<TodoType, number> = {
|
||||
TODO: 0,
|
||||
FIXME: 0,
|
||||
HACK: 0,
|
||||
XXX: 0,
|
||||
BUG: 0,
|
||||
NOTE: 0,
|
||||
}
|
||||
|
||||
for (const todo of todos) {
|
||||
counts[todo.type]++
|
||||
}
|
||||
|
||||
return counts
|
||||
}
|
||||
|
||||
/**
|
||||
* Create empty result structure.
|
||||
*/
|
||||
private createEmptyResult(searchedPath: string | null): GetTodosResult {
|
||||
return {
|
||||
searchedPath,
|
||||
totalTodos: 0,
|
||||
filesWithTodos: 0,
|
||||
byType: {
|
||||
TODO: 0,
|
||||
FIXME: 0,
|
||||
HACK: 0,
|
||||
XXX: 0,
|
||||
BUG: 0,
|
||||
NOTE: 0,
|
||||
},
|
||||
todos: [],
|
||||
}
|
||||
}
|
||||
}
|
||||
20
packages/ipuaro/src/infrastructure/tools/analysis/index.ts
Normal file
20
packages/ipuaro/src/infrastructure/tools/analysis/index.ts
Normal file
@@ -0,0 +1,20 @@
|
||||
// Analysis tools module exports
|
||||
export {
|
||||
GetDependenciesTool,
|
||||
type GetDependenciesResult,
|
||||
type DependencyEntry,
|
||||
} from "./GetDependenciesTool.js"
|
||||
|
||||
export {
|
||||
GetDependentsTool,
|
||||
type GetDependentsResult,
|
||||
type DependentEntry,
|
||||
} from "./GetDependentsTool.js"
|
||||
|
||||
export {
|
||||
GetComplexityTool,
|
||||
type GetComplexityResult,
|
||||
type ComplexityEntry,
|
||||
} from "./GetComplexityTool.js"
|
||||
|
||||
export { GetTodosTool, type GetTodosResult, type TodoEntry, type TodoType } from "./GetTodosTool.js"
|
||||
142
packages/ipuaro/src/infrastructure/tools/edit/CreateFileTool.ts
Normal file
142
packages/ipuaro/src/infrastructure/tools/edit/CreateFileTool.ts
Normal file
@@ -0,0 +1,142 @@
|
||||
import { promises as fs } from "node:fs"
|
||||
import * as path from "node:path"
|
||||
import type { ITool, ToolContext, ToolParameterSchema } from "../../../domain/services/ITool.js"
|
||||
import { createFileData } from "../../../domain/value-objects/FileData.js"
|
||||
import {
|
||||
createErrorResult,
|
||||
createSuccessResult,
|
||||
type ToolResult,
|
||||
} from "../../../domain/value-objects/ToolResult.js"
|
||||
import { hashLines } from "../../../shared/utils/hash.js"
|
||||
import { PathValidator } from "../../security/PathValidator.js"
|
||||
|
||||
/**
|
||||
* Result data from create_file tool.
|
||||
*/
|
||||
export interface CreateFileResult {
|
||||
path: string
|
||||
lines: number
|
||||
size: number
|
||||
}
|
||||
|
||||
/**
|
||||
* Tool for creating new files.
|
||||
* Creates a new file with the specified content.
|
||||
* Requires user confirmation before creating.
|
||||
*/
|
||||
export class CreateFileTool implements ITool {
|
||||
readonly name = "create_file"
|
||||
readonly description =
|
||||
"Create a new file with the specified content. " +
|
||||
"The file path must be within the project root. " +
|
||||
"Requires confirmation before creating."
|
||||
readonly parameters: ToolParameterSchema[] = [
|
||||
{
|
||||
name: "path",
|
||||
type: "string",
|
||||
description: "File path relative to project root",
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
name: "content",
|
||||
type: "string",
|
||||
description: "File content",
|
||||
required: true,
|
||||
},
|
||||
]
|
||||
readonly requiresConfirmation = true
|
||||
readonly category = "edit" as const
|
||||
|
||||
validateParams(params: Record<string, unknown>): string | null {
|
||||
if (typeof params.path !== "string" || params.path.trim() === "") {
|
||||
return "Parameter 'path' is required and must be a non-empty string"
|
||||
}
|
||||
|
||||
if (typeof params.content !== "string") {
|
||||
return "Parameter 'content' is required and must be a string"
|
||||
}
|
||||
|
||||
return null
|
||||
}
|
||||
|
||||
async execute(params: Record<string, unknown>, ctx: ToolContext): Promise<ToolResult> {
|
||||
const startTime = Date.now()
|
||||
const callId = `${this.name}-${String(startTime)}`
|
||||
|
||||
const inputPath = params.path as string
|
||||
const content = params.content as string
|
||||
|
||||
const pathValidator = new PathValidator(ctx.projectRoot)
|
||||
|
||||
let absolutePath: string
|
||||
let relativePath: string
|
||||
try {
|
||||
;[absolutePath, relativePath] = pathValidator.resolveOrThrow(inputPath)
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : String(error)
|
||||
return createErrorResult(callId, message, Date.now() - startTime)
|
||||
}
|
||||
|
||||
try {
|
||||
const exists = await this.fileExists(absolutePath)
|
||||
if (exists) {
|
||||
return createErrorResult(
|
||||
callId,
|
||||
`File already exists: ${relativePath}`,
|
||||
Date.now() - startTime,
|
||||
)
|
||||
}
|
||||
|
||||
const lines = content.split("\n")
|
||||
|
||||
const confirmed = await ctx.requestConfirmation(
|
||||
`Create new file: ${relativePath} (${String(lines.length)} lines)`,
|
||||
{
|
||||
filePath: relativePath,
|
||||
oldLines: [],
|
||||
newLines: lines,
|
||||
startLine: 1,
|
||||
},
|
||||
)
|
||||
|
||||
if (!confirmed) {
|
||||
return createErrorResult(
|
||||
callId,
|
||||
"File creation cancelled by user",
|
||||
Date.now() - startTime,
|
||||
)
|
||||
}
|
||||
|
||||
const dirPath = path.dirname(absolutePath)
|
||||
await fs.mkdir(dirPath, { recursive: true })
|
||||
await fs.writeFile(absolutePath, content, "utf-8")
|
||||
|
||||
const stats = await fs.stat(absolutePath)
|
||||
const fileData = createFileData(lines, hashLines(lines), stats.size, stats.mtimeMs)
|
||||
await ctx.storage.setFile(relativePath, fileData)
|
||||
|
||||
const result: CreateFileResult = {
|
||||
path: relativePath,
|
||||
lines: lines.length,
|
||||
size: stats.size,
|
||||
}
|
||||
|
||||
return createSuccessResult(callId, result, Date.now() - startTime)
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : String(error)
|
||||
return createErrorResult(callId, message, Date.now() - startTime)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if file exists.
|
||||
*/
|
||||
private async fileExists(filePath: string): Promise<boolean> {
|
||||
try {
|
||||
await fs.access(filePath)
|
||||
return true
|
||||
} catch {
|
||||
return false
|
||||
}
|
||||
}
|
||||
}
|
||||
137
packages/ipuaro/src/infrastructure/tools/edit/DeleteFileTool.ts
Normal file
137
packages/ipuaro/src/infrastructure/tools/edit/DeleteFileTool.ts
Normal file
@@ -0,0 +1,137 @@
|
||||
import { promises as fs } from "node:fs"
|
||||
import type { ITool, ToolContext, ToolParameterSchema } from "../../../domain/services/ITool.js"
|
||||
import {
|
||||
createErrorResult,
|
||||
createSuccessResult,
|
||||
type ToolResult,
|
||||
} from "../../../domain/value-objects/ToolResult.js"
|
||||
import { PathValidator } from "../../security/PathValidator.js"
|
||||
|
||||
/**
|
||||
* Result data from delete_file tool.
|
||||
*/
|
||||
export interface DeleteFileResult {
|
||||
path: string
|
||||
deleted: boolean
|
||||
}
|
||||
|
||||
/**
|
||||
* Tool for deleting files.
|
||||
* Deletes a file from the filesystem and storage.
|
||||
* Requires user confirmation before deleting.
|
||||
*/
|
||||
export class DeleteFileTool implements ITool {
|
||||
readonly name = "delete_file"
|
||||
readonly description =
|
||||
"Delete a file from the project. " +
|
||||
"The file path must be within the project root. " +
|
||||
"Requires confirmation before deleting."
|
||||
readonly parameters: ToolParameterSchema[] = [
|
||||
{
|
||||
name: "path",
|
||||
type: "string",
|
||||
description: "File path relative to project root",
|
||||
required: true,
|
||||
},
|
||||
]
|
||||
readonly requiresConfirmation = true
|
||||
readonly category = "edit" as const
|
||||
|
||||
validateParams(params: Record<string, unknown>): string | null {
|
||||
if (typeof params.path !== "string" || params.path.trim() === "") {
|
||||
return "Parameter 'path' is required and must be a non-empty string"
|
||||
}
|
||||
|
||||
return null
|
||||
}
|
||||
|
||||
async execute(params: Record<string, unknown>, ctx: ToolContext): Promise<ToolResult> {
|
||||
const startTime = Date.now()
|
||||
const callId = `${this.name}-${String(startTime)}`
|
||||
|
||||
const inputPath = params.path as string
|
||||
const pathValidator = new PathValidator(ctx.projectRoot)
|
||||
|
||||
let absolutePath: string
|
||||
let relativePath: string
|
||||
try {
|
||||
;[absolutePath, relativePath] = pathValidator.resolveOrThrow(inputPath)
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : String(error)
|
||||
return createErrorResult(callId, message, Date.now() - startTime)
|
||||
}
|
||||
|
||||
try {
|
||||
const exists = await this.fileExists(absolutePath)
|
||||
if (!exists) {
|
||||
return createErrorResult(
|
||||
callId,
|
||||
`File not found: ${relativePath}`,
|
||||
Date.now() - startTime,
|
||||
)
|
||||
}
|
||||
|
||||
const fileContent = await this.getFileContent(absolutePath, relativePath, ctx)
|
||||
|
||||
const confirmed = await ctx.requestConfirmation(`Delete file: ${relativePath}`, {
|
||||
filePath: relativePath,
|
||||
oldLines: fileContent,
|
||||
newLines: [],
|
||||
startLine: 1,
|
||||
})
|
||||
|
||||
if (!confirmed) {
|
||||
return createErrorResult(
|
||||
callId,
|
||||
"File deletion cancelled by user",
|
||||
Date.now() - startTime,
|
||||
)
|
||||
}
|
||||
|
||||
await fs.unlink(absolutePath)
|
||||
|
||||
await ctx.storage.deleteFile(relativePath)
|
||||
await ctx.storage.deleteAST(relativePath)
|
||||
await ctx.storage.deleteMeta(relativePath)
|
||||
|
||||
const result: DeleteFileResult = {
|
||||
path: relativePath,
|
||||
deleted: true,
|
||||
}
|
||||
|
||||
return createSuccessResult(callId, result, Date.now() - startTime)
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : String(error)
|
||||
return createErrorResult(callId, message, Date.now() - startTime)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if file exists.
|
||||
*/
|
||||
private async fileExists(filePath: string): Promise<boolean> {
|
||||
try {
|
||||
const stats = await fs.stat(filePath)
|
||||
return stats.isFile()
|
||||
} catch {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get file content for diff display.
|
||||
*/
|
||||
private async getFileContent(
|
||||
absolutePath: string,
|
||||
relativePath: string,
|
||||
ctx: ToolContext,
|
||||
): Promise<string[]> {
|
||||
const fileData = await ctx.storage.getFile(relativePath)
|
||||
if (fileData) {
|
||||
return fileData.lines
|
||||
}
|
||||
|
||||
const content = await fs.readFile(absolutePath, "utf-8")
|
||||
return content.split("\n")
|
||||
}
|
||||
}
|
||||
227
packages/ipuaro/src/infrastructure/tools/edit/EditLinesTool.ts
Normal file
227
packages/ipuaro/src/infrastructure/tools/edit/EditLinesTool.ts
Normal file
@@ -0,0 +1,227 @@
|
||||
import { promises as fs } from "node:fs"
|
||||
import type { ITool, ToolContext, ToolParameterSchema } from "../../../domain/services/ITool.js"
|
||||
import { createFileData } from "../../../domain/value-objects/FileData.js"
|
||||
import {
|
||||
createErrorResult,
|
||||
createSuccessResult,
|
||||
type ToolResult,
|
||||
} from "../../../domain/value-objects/ToolResult.js"
|
||||
import { hashLines } from "../../../shared/utils/hash.js"
|
||||
import { PathValidator } from "../../security/PathValidator.js"
|
||||
|
||||
/**
|
||||
* Result data from edit_lines tool.
|
||||
*/
|
||||
export interface EditLinesResult {
|
||||
path: string
|
||||
startLine: number
|
||||
endLine: number
|
||||
linesReplaced: number
|
||||
linesInserted: number
|
||||
totalLines: number
|
||||
}
|
||||
|
||||
/**
|
||||
* Tool for editing specific lines in a file.
|
||||
* Replaces lines from start to end with new content.
|
||||
* Requires user confirmation before applying changes.
|
||||
*/
|
||||
export class EditLinesTool implements ITool {
|
||||
readonly name = "edit_lines"
|
||||
readonly description =
|
||||
"Replace lines in a file. Replaces lines from start to end (inclusive) with new content. " +
|
||||
"Requires confirmation before applying changes."
|
||||
readonly parameters: ToolParameterSchema[] = [
|
||||
{
|
||||
name: "path",
|
||||
type: "string",
|
||||
description: "File path relative to project root",
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
name: "start",
|
||||
type: "number",
|
||||
description: "Start line number (1-based, inclusive)",
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
name: "end",
|
||||
type: "number",
|
||||
description: "End line number (1-based, inclusive)",
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
name: "content",
|
||||
type: "string",
|
||||
description: "New content to insert (can be multi-line)",
|
||||
required: true,
|
||||
},
|
||||
]
|
||||
readonly requiresConfirmation = true
|
||||
readonly category = "edit" as const
|
||||
|
||||
validateParams(params: Record<string, unknown>): string | null {
|
||||
if (typeof params.path !== "string" || params.path.trim() === "") {
|
||||
return "Parameter 'path' is required and must be a non-empty string"
|
||||
}
|
||||
|
||||
if (typeof params.start !== "number" || !Number.isInteger(params.start)) {
|
||||
return "Parameter 'start' is required and must be an integer"
|
||||
}
|
||||
if (params.start < 1) {
|
||||
return "Parameter 'start' must be >= 1"
|
||||
}
|
||||
|
||||
if (typeof params.end !== "number" || !Number.isInteger(params.end)) {
|
||||
return "Parameter 'end' is required and must be an integer"
|
||||
}
|
||||
if (params.end < 1) {
|
||||
return "Parameter 'end' must be >= 1"
|
||||
}
|
||||
|
||||
if (params.start > params.end) {
|
||||
return "Parameter 'start' must be <= 'end'"
|
||||
}
|
||||
|
||||
if (typeof params.content !== "string") {
|
||||
return "Parameter 'content' is required and must be a string"
|
||||
}
|
||||
|
||||
return null
|
||||
}
|
||||
|
||||
async execute(params: Record<string, unknown>, ctx: ToolContext): Promise<ToolResult> {
|
||||
const startTime = Date.now()
|
||||
const callId = `${this.name}-${String(startTime)}`
|
||||
|
||||
const inputPath = params.path as string
|
||||
const startLine = params.start as number
|
||||
const endLine = params.end as number
|
||||
const newContent = params.content as string
|
||||
|
||||
const pathValidator = new PathValidator(ctx.projectRoot)
|
||||
|
||||
let absolutePath: string
|
||||
let relativePath: string
|
||||
try {
|
||||
;[absolutePath, relativePath] = pathValidator.resolveOrThrow(inputPath)
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : String(error)
|
||||
return createErrorResult(callId, message, Date.now() - startTime)
|
||||
}
|
||||
|
||||
try {
|
||||
const currentLines = await this.getCurrentLines(absolutePath, relativePath, ctx)
|
||||
const totalLines = currentLines.length
|
||||
|
||||
if (startLine > totalLines) {
|
||||
return createErrorResult(
|
||||
callId,
|
||||
`Start line ${String(startLine)} exceeds file length (${String(totalLines)} lines)`,
|
||||
Date.now() - startTime,
|
||||
)
|
||||
}
|
||||
|
||||
const adjustedEnd = Math.min(endLine, totalLines)
|
||||
const conflictCheck = await this.checkHashConflict(relativePath, currentLines, ctx)
|
||||
if (conflictCheck) {
|
||||
return createErrorResult(callId, conflictCheck, Date.now() - startTime)
|
||||
}
|
||||
|
||||
const oldLines = currentLines.slice(startLine - 1, adjustedEnd)
|
||||
const newLines = newContent.split("\n")
|
||||
|
||||
const confirmed = await ctx.requestConfirmation(
|
||||
`Replace lines ${String(startLine)}-${String(adjustedEnd)} in ${relativePath}`,
|
||||
{
|
||||
filePath: relativePath,
|
||||
oldLines,
|
||||
newLines,
|
||||
startLine,
|
||||
},
|
||||
)
|
||||
|
||||
if (!confirmed) {
|
||||
return createErrorResult(callId, "Edit cancelled by user", Date.now() - startTime)
|
||||
}
|
||||
|
||||
const updatedLines = [
|
||||
...currentLines.slice(0, startLine - 1),
|
||||
...newLines,
|
||||
...currentLines.slice(adjustedEnd),
|
||||
]
|
||||
|
||||
await this.applyChanges(absolutePath, relativePath, updatedLines, ctx)
|
||||
|
||||
const result: EditLinesResult = {
|
||||
path: relativePath,
|
||||
startLine,
|
||||
endLine: adjustedEnd,
|
||||
linesReplaced: adjustedEnd - startLine + 1,
|
||||
linesInserted: newLines.length,
|
||||
totalLines: updatedLines.length,
|
||||
}
|
||||
|
||||
return createSuccessResult(callId, result, Date.now() - startTime)
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : String(error)
|
||||
return createErrorResult(callId, message, Date.now() - startTime)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get current file lines from storage or filesystem.
|
||||
*/
|
||||
private async getCurrentLines(
|
||||
absolutePath: string,
|
||||
relativePath: string,
|
||||
ctx: ToolContext,
|
||||
): Promise<string[]> {
|
||||
const fileData = await ctx.storage.getFile(relativePath)
|
||||
if (fileData) {
|
||||
return fileData.lines
|
||||
}
|
||||
|
||||
const content = await fs.readFile(absolutePath, "utf-8")
|
||||
return content.split("\n")
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if file has changed since it was indexed.
|
||||
* Returns error message if conflict detected, null otherwise.
|
||||
*/
|
||||
private async checkHashConflict(
|
||||
relativePath: string,
|
||||
currentLines: string[],
|
||||
ctx: ToolContext,
|
||||
): Promise<string | null> {
|
||||
const storedFile = await ctx.storage.getFile(relativePath)
|
||||
if (!storedFile) {
|
||||
return null
|
||||
}
|
||||
|
||||
const currentHash = hashLines(currentLines)
|
||||
if (storedFile.hash !== currentHash) {
|
||||
return "File has been modified externally. Please refresh the file before editing."
|
||||
}
|
||||
|
||||
return null
|
||||
}
|
||||
|
||||
/**
|
||||
* Apply changes to filesystem and storage.
|
||||
*/
|
||||
private async applyChanges(
|
||||
absolutePath: string,
|
||||
relativePath: string,
|
||||
lines: string[],
|
||||
ctx: ToolContext,
|
||||
): Promise<void> {
|
||||
const content = lines.join("\n")
|
||||
await fs.writeFile(absolutePath, content, "utf-8")
|
||||
|
||||
const stats = await fs.stat(absolutePath)
|
||||
const fileData = createFileData(lines, hashLines(lines), stats.size, stats.mtimeMs)
|
||||
await ctx.storage.setFile(relativePath, fileData)
|
||||
}
|
||||
}
|
||||
4
packages/ipuaro/src/infrastructure/tools/edit/index.ts
Normal file
4
packages/ipuaro/src/infrastructure/tools/edit/index.ts
Normal file
@@ -0,0 +1,4 @@
|
||||
// Edit tools exports
|
||||
export { EditLinesTool, type EditLinesResult } from "./EditLinesTool.js"
|
||||
export { CreateFileTool, type CreateFileResult } from "./CreateFileTool.js"
|
||||
export { DeleteFileTool, type DeleteFileResult } from "./DeleteFileTool.js"
|
||||
155
packages/ipuaro/src/infrastructure/tools/git/GitCommitTool.ts
Normal file
155
packages/ipuaro/src/infrastructure/tools/git/GitCommitTool.ts
Normal file
@@ -0,0 +1,155 @@
|
||||
import { type CommitResult, type SimpleGit, simpleGit } from "simple-git"
|
||||
import type { ITool, ToolContext, ToolParameterSchema } from "../../../domain/services/ITool.js"
|
||||
import {
|
||||
createErrorResult,
|
||||
createSuccessResult,
|
||||
type ToolResult,
|
||||
} from "../../../domain/value-objects/ToolResult.js"
|
||||
|
||||
/**
|
||||
* Author information.
|
||||
*/
|
||||
export interface CommitAuthor {
|
||||
name: string
|
||||
email: string
|
||||
}
|
||||
|
||||
/**
|
||||
* Result data from git_commit tool.
|
||||
*/
|
||||
export interface GitCommitResult {
|
||||
/** Commit hash */
|
||||
hash: string
|
||||
/** Current branch */
|
||||
branch: string
|
||||
/** Commit message */
|
||||
message: string
|
||||
/** Number of files changed */
|
||||
filesChanged: number
|
||||
/** Number of insertions */
|
||||
insertions: number
|
||||
/** Number of deletions */
|
||||
deletions: number
|
||||
/** Author information */
|
||||
author: CommitAuthor | null
|
||||
}
|
||||
|
||||
/**
|
||||
* Tool for creating git commits.
|
||||
* Requires confirmation before execution.
|
||||
*/
|
||||
export class GitCommitTool implements ITool {
|
||||
readonly name = "git_commit"
|
||||
readonly description =
|
||||
"Create a git commit with the specified message. " +
|
||||
"Will ask for confirmation. Optionally stage specific files first."
|
||||
readonly parameters: ToolParameterSchema[] = [
|
||||
{
|
||||
name: "message",
|
||||
type: "string",
|
||||
description: "Commit message",
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
name: "files",
|
||||
type: "array",
|
||||
description: "Files to stage before commit (optional, defaults to all staged)",
|
||||
required: false,
|
||||
},
|
||||
]
|
||||
readonly requiresConfirmation = true
|
||||
readonly category = "git" as const
|
||||
|
||||
private readonly gitFactory: (basePath: string) => SimpleGit
|
||||
|
||||
constructor(gitFactory?: (basePath: string) => SimpleGit) {
|
||||
this.gitFactory = gitFactory ?? ((basePath: string) => simpleGit(basePath))
|
||||
}
|
||||
|
||||
validateParams(params: Record<string, unknown>): string | null {
|
||||
if (params.message === undefined) {
|
||||
return "Parameter 'message' is required"
|
||||
}
|
||||
if (typeof params.message !== "string") {
|
||||
return "Parameter 'message' must be a string"
|
||||
}
|
||||
if (params.message.trim() === "") {
|
||||
return "Parameter 'message' cannot be empty"
|
||||
}
|
||||
if (params.files !== undefined) {
|
||||
if (!Array.isArray(params.files)) {
|
||||
return "Parameter 'files' must be an array"
|
||||
}
|
||||
for (const file of params.files) {
|
||||
if (typeof file !== "string") {
|
||||
return "Parameter 'files' must be an array of strings"
|
||||
}
|
||||
}
|
||||
}
|
||||
return null
|
||||
}
|
||||
|
||||
async execute(params: Record<string, unknown>, ctx: ToolContext): Promise<ToolResult> {
|
||||
const startTime = Date.now()
|
||||
const callId = `${this.name}-${String(startTime)}`
|
||||
|
||||
const message = params.message as string
|
||||
const files = params.files as string[] | undefined
|
||||
|
||||
try {
|
||||
const git = this.gitFactory(ctx.projectRoot)
|
||||
|
||||
const isRepo = await git.checkIsRepo()
|
||||
if (!isRepo) {
|
||||
return createErrorResult(
|
||||
callId,
|
||||
"Not a git repository. Initialize with 'git init' first.",
|
||||
Date.now() - startTime,
|
||||
)
|
||||
}
|
||||
|
||||
if (files && files.length > 0) {
|
||||
await git.add(files)
|
||||
}
|
||||
|
||||
const status = await git.status()
|
||||
if (status.staged.length === 0 && (!files || files.length === 0)) {
|
||||
return createErrorResult(
|
||||
callId,
|
||||
"Nothing to commit. Stage files first with 'git add' or provide 'files' parameter.",
|
||||
Date.now() - startTime,
|
||||
)
|
||||
}
|
||||
|
||||
const commitSummary = `Committing ${String(status.staged.length)} file(s): ${message}`
|
||||
const confirmed = await ctx.requestConfirmation(commitSummary)
|
||||
|
||||
if (!confirmed) {
|
||||
return createErrorResult(callId, "Commit cancelled by user", Date.now() - startTime)
|
||||
}
|
||||
|
||||
const commitResult = await git.commit(message)
|
||||
const result = this.formatCommitResult(commitResult, message)
|
||||
|
||||
return createSuccessResult(callId, result, Date.now() - startTime)
|
||||
} catch (error) {
|
||||
const message_ = error instanceof Error ? error.message : String(error)
|
||||
return createErrorResult(callId, message_, Date.now() - startTime)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Format simple-git CommitResult into our result structure.
|
||||
*/
|
||||
private formatCommitResult(commit: CommitResult, message: string): GitCommitResult {
|
||||
return {
|
||||
hash: commit.commit,
|
||||
branch: commit.branch,
|
||||
message,
|
||||
filesChanged: commit.summary.changes,
|
||||
insertions: commit.summary.insertions,
|
||||
deletions: commit.summary.deletions,
|
||||
author: commit.author ?? null,
|
||||
}
|
||||
}
|
||||
}
|
||||
155
packages/ipuaro/src/infrastructure/tools/git/GitDiffTool.ts
Normal file
155
packages/ipuaro/src/infrastructure/tools/git/GitDiffTool.ts
Normal file
@@ -0,0 +1,155 @@
|
||||
import { simpleGit, type SimpleGit } from "simple-git"
|
||||
import type { ITool, ToolContext, ToolParameterSchema } from "../../../domain/services/ITool.js"
|
||||
import {
|
||||
createErrorResult,
|
||||
createSuccessResult,
|
||||
type ToolResult,
|
||||
} from "../../../domain/value-objects/ToolResult.js"
|
||||
|
||||
/**
|
||||
* A single file diff entry.
|
||||
*/
|
||||
export interface DiffEntry {
|
||||
/** File path */
|
||||
file: string
|
||||
/** Number of insertions */
|
||||
insertions: number
|
||||
/** Number of deletions */
|
||||
deletions: number
|
||||
/** Whether the file is binary */
|
||||
binary: boolean
|
||||
}
|
||||
|
||||
/**
|
||||
* Result data from git_diff tool.
|
||||
*/
|
||||
export interface GitDiffResult {
|
||||
/** Whether showing staged or all changes */
|
||||
staged: boolean
|
||||
/** Path filter applied (null if all files) */
|
||||
pathFilter: string | null
|
||||
/** Whether there are any changes */
|
||||
hasChanges: boolean
|
||||
/** Summary of changes */
|
||||
summary: {
|
||||
/** Number of files changed */
|
||||
filesChanged: number
|
||||
/** Total insertions */
|
||||
insertions: number
|
||||
/** Total deletions */
|
||||
deletions: number
|
||||
}
|
||||
/** List of changed files */
|
||||
files: DiffEntry[]
|
||||
/** Full diff text */
|
||||
diff: string
|
||||
}
|
||||
|
||||
/**
|
||||
* Tool for getting uncommitted git changes (diff).
|
||||
* Shows what has changed but not yet committed.
|
||||
*/
|
||||
export class GitDiffTool implements ITool {
|
||||
readonly name = "git_diff"
|
||||
readonly description =
|
||||
"Get uncommitted changes (diff). " + "Shows what has changed but not yet committed."
|
||||
readonly parameters: ToolParameterSchema[] = [
|
||||
{
|
||||
name: "path",
|
||||
type: "string",
|
||||
description: "Limit diff to specific file or directory",
|
||||
required: false,
|
||||
},
|
||||
{
|
||||
name: "staged",
|
||||
type: "boolean",
|
||||
description: "Show only staged changes (default: false, shows all)",
|
||||
required: false,
|
||||
},
|
||||
]
|
||||
readonly requiresConfirmation = false
|
||||
readonly category = "git" as const
|
||||
|
||||
private readonly gitFactory: (basePath: string) => SimpleGit
|
||||
|
||||
constructor(gitFactory?: (basePath: string) => SimpleGit) {
|
||||
this.gitFactory = gitFactory ?? ((basePath: string) => simpleGit(basePath))
|
||||
}
|
||||
|
||||
validateParams(params: Record<string, unknown>): string | null {
|
||||
if (params.path !== undefined && typeof params.path !== "string") {
|
||||
return "Parameter 'path' must be a string"
|
||||
}
|
||||
if (params.staged !== undefined && typeof params.staged !== "boolean") {
|
||||
return "Parameter 'staged' must be a boolean"
|
||||
}
|
||||
return null
|
||||
}
|
||||
|
||||
async execute(params: Record<string, unknown>, ctx: ToolContext): Promise<ToolResult> {
|
||||
const startTime = Date.now()
|
||||
const callId = `${this.name}-${String(startTime)}`
|
||||
|
||||
const pathFilter = (params.path as string) ?? null
|
||||
const staged = (params.staged as boolean) ?? false
|
||||
|
||||
try {
|
||||
const git = this.gitFactory(ctx.projectRoot)
|
||||
|
||||
const isRepo = await git.checkIsRepo()
|
||||
if (!isRepo) {
|
||||
return createErrorResult(
|
||||
callId,
|
||||
"Not a git repository. Initialize with 'git init' first.",
|
||||
Date.now() - startTime,
|
||||
)
|
||||
}
|
||||
|
||||
const diffArgs = this.buildDiffArgs(staged, pathFilter)
|
||||
const diffSummary = await git.diffSummary(diffArgs)
|
||||
const diffText = await git.diff(diffArgs)
|
||||
|
||||
const files: DiffEntry[] = diffSummary.files.map((f) => ({
|
||||
file: f.file,
|
||||
insertions: "insertions" in f ? f.insertions : 0,
|
||||
deletions: "deletions" in f ? f.deletions : 0,
|
||||
binary: f.binary,
|
||||
}))
|
||||
|
||||
const result: GitDiffResult = {
|
||||
staged,
|
||||
pathFilter,
|
||||
hasChanges: diffSummary.files.length > 0,
|
||||
summary: {
|
||||
filesChanged: diffSummary.files.length,
|
||||
insertions: diffSummary.insertions,
|
||||
deletions: diffSummary.deletions,
|
||||
},
|
||||
files,
|
||||
diff: diffText,
|
||||
}
|
||||
|
||||
return createSuccessResult(callId, result, Date.now() - startTime)
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : String(error)
|
||||
return createErrorResult(callId, message, Date.now() - startTime)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Build diff arguments array.
|
||||
*/
|
||||
private buildDiffArgs(staged: boolean, pathFilter: string | null): string[] {
|
||||
const args: string[] = []
|
||||
|
||||
if (staged) {
|
||||
args.push("--cached")
|
||||
}
|
||||
|
||||
if (pathFilter) {
|
||||
args.push("--", pathFilter)
|
||||
}
|
||||
|
||||
return args
|
||||
}
|
||||
}
|
||||
129
packages/ipuaro/src/infrastructure/tools/git/GitStatusTool.ts
Normal file
129
packages/ipuaro/src/infrastructure/tools/git/GitStatusTool.ts
Normal file
@@ -0,0 +1,129 @@
|
||||
import { simpleGit, type SimpleGit, type StatusResult } from "simple-git"
|
||||
import type { ITool, ToolContext, ToolParameterSchema } from "../../../domain/services/ITool.js"
|
||||
import {
|
||||
createErrorResult,
|
||||
createSuccessResult,
|
||||
type ToolResult,
|
||||
} from "../../../domain/value-objects/ToolResult.js"
|
||||
|
||||
/**
|
||||
* File status entry in git status.
|
||||
*/
|
||||
export interface FileStatusEntry {
|
||||
/** Relative file path */
|
||||
path: string
|
||||
/** Working directory status (modified, deleted, etc.) */
|
||||
workingDir: string
|
||||
/** Index/staging status */
|
||||
index: string
|
||||
}
|
||||
|
||||
/**
|
||||
* Result data from git_status tool.
|
||||
*/
|
||||
export interface GitStatusResult {
|
||||
/** Current branch name */
|
||||
branch: string
|
||||
/** Tracking branch (e.g., origin/main) */
|
||||
tracking: string | null
|
||||
/** Number of commits ahead of tracking */
|
||||
ahead: number
|
||||
/** Number of commits behind tracking */
|
||||
behind: number
|
||||
/** Files staged for commit */
|
||||
staged: FileStatusEntry[]
|
||||
/** Modified files not staged */
|
||||
modified: FileStatusEntry[]
|
||||
/** Untracked files */
|
||||
untracked: string[]
|
||||
/** Files with merge conflicts */
|
||||
conflicted: string[]
|
||||
/** Whether working directory is clean */
|
||||
isClean: boolean
|
||||
}
|
||||
|
||||
/**
|
||||
* Tool for getting git repository status.
|
||||
* Returns branch info, staged/modified/untracked files.
|
||||
*/
|
||||
export class GitStatusTool implements ITool {
|
||||
readonly name = "git_status"
|
||||
readonly description =
|
||||
"Get current git repository status. " +
|
||||
"Returns branch name, staged files, modified files, and untracked files."
|
||||
readonly parameters: ToolParameterSchema[] = []
|
||||
readonly requiresConfirmation = false
|
||||
readonly category = "git" as const
|
||||
|
||||
private readonly gitFactory: (basePath: string) => SimpleGit
|
||||
|
||||
constructor(gitFactory?: (basePath: string) => SimpleGit) {
|
||||
this.gitFactory = gitFactory ?? ((basePath: string) => simpleGit(basePath))
|
||||
}
|
||||
|
||||
validateParams(_params: Record<string, unknown>): string | null {
|
||||
return null
|
||||
}
|
||||
|
||||
async execute(_params: Record<string, unknown>, ctx: ToolContext): Promise<ToolResult> {
|
||||
const startTime = Date.now()
|
||||
const callId = `${this.name}-${String(startTime)}`
|
||||
|
||||
try {
|
||||
const git = this.gitFactory(ctx.projectRoot)
|
||||
|
||||
const isRepo = await git.checkIsRepo()
|
||||
if (!isRepo) {
|
||||
return createErrorResult(
|
||||
callId,
|
||||
"Not a git repository. Initialize with 'git init' first.",
|
||||
Date.now() - startTime,
|
||||
)
|
||||
}
|
||||
|
||||
const status = await git.status()
|
||||
const result = this.formatStatus(status)
|
||||
|
||||
return createSuccessResult(callId, result, Date.now() - startTime)
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : String(error)
|
||||
return createErrorResult(callId, message, Date.now() - startTime)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Format simple-git StatusResult into our result structure.
|
||||
*/
|
||||
private formatStatus(status: StatusResult): GitStatusResult {
|
||||
const staged: FileStatusEntry[] = []
|
||||
const modified: FileStatusEntry[] = []
|
||||
|
||||
for (const file of status.files) {
|
||||
const entry: FileStatusEntry = {
|
||||
path: file.path,
|
||||
workingDir: file.working_dir,
|
||||
index: file.index,
|
||||
}
|
||||
|
||||
if (file.index !== " " && file.index !== "?") {
|
||||
staged.push(entry)
|
||||
}
|
||||
|
||||
if (file.working_dir !== " " && file.working_dir !== "?") {
|
||||
modified.push(entry)
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
branch: status.current ?? "HEAD (detached)",
|
||||
tracking: status.tracking ?? null,
|
||||
ahead: status.ahead,
|
||||
behind: status.behind,
|
||||
staged,
|
||||
modified,
|
||||
untracked: status.not_added,
|
||||
conflicted: status.conflicted,
|
||||
isClean: status.isClean(),
|
||||
}
|
||||
}
|
||||
}
|
||||
6
packages/ipuaro/src/infrastructure/tools/git/index.ts
Normal file
6
packages/ipuaro/src/infrastructure/tools/git/index.ts
Normal file
@@ -0,0 +1,6 @@
|
||||
// Git tools exports
|
||||
export { GitStatusTool, type GitStatusResult, type FileStatusEntry } from "./GitStatusTool.js"
|
||||
|
||||
export { GitDiffTool, type GitDiffResult, type DiffEntry } from "./GitDiffTool.js"
|
||||
|
||||
export { GitCommitTool, type GitCommitResult, type CommitAuthor } from "./GitCommitTool.js"
|
||||
75
packages/ipuaro/src/infrastructure/tools/index.ts
Normal file
75
packages/ipuaro/src/infrastructure/tools/index.ts
Normal file
@@ -0,0 +1,75 @@
|
||||
// Tools module exports
|
||||
export { ToolRegistry } from "./registry.js"
|
||||
|
||||
// Read tools
|
||||
export { GetLinesTool, type GetLinesResult } from "./read/GetLinesTool.js"
|
||||
export { GetFunctionTool, type GetFunctionResult } from "./read/GetFunctionTool.js"
|
||||
export { GetClassTool, type GetClassResult } from "./read/GetClassTool.js"
|
||||
export {
|
||||
GetStructureTool,
|
||||
type GetStructureResult,
|
||||
type TreeNode,
|
||||
} from "./read/GetStructureTool.js"
|
||||
|
||||
// Edit tools
|
||||
export { EditLinesTool, type EditLinesResult } from "./edit/EditLinesTool.js"
|
||||
export { CreateFileTool, type CreateFileResult } from "./edit/CreateFileTool.js"
|
||||
export { DeleteFileTool, type DeleteFileResult } from "./edit/DeleteFileTool.js"
|
||||
|
||||
// Search tools
|
||||
export {
|
||||
FindReferencesTool,
|
||||
type FindReferencesResult,
|
||||
type SymbolReference,
|
||||
} from "./search/FindReferencesTool.js"
|
||||
export {
|
||||
FindDefinitionTool,
|
||||
type FindDefinitionResult,
|
||||
type DefinitionLocation,
|
||||
} from "./search/FindDefinitionTool.js"
|
||||
|
||||
// Analysis tools
|
||||
export {
|
||||
GetDependenciesTool,
|
||||
type GetDependenciesResult,
|
||||
type DependencyEntry,
|
||||
} from "./analysis/GetDependenciesTool.js"
|
||||
|
||||
export {
|
||||
GetDependentsTool,
|
||||
type GetDependentsResult,
|
||||
type DependentEntry,
|
||||
} from "./analysis/GetDependentsTool.js"
|
||||
|
||||
export {
|
||||
GetComplexityTool,
|
||||
type GetComplexityResult,
|
||||
type ComplexityEntry,
|
||||
} from "./analysis/GetComplexityTool.js"
|
||||
|
||||
export {
|
||||
GetTodosTool,
|
||||
type GetTodosResult,
|
||||
type TodoEntry,
|
||||
type TodoType,
|
||||
} from "./analysis/GetTodosTool.js"
|
||||
|
||||
// Git tools
|
||||
export { GitStatusTool, type GitStatusResult, type FileStatusEntry } from "./git/GitStatusTool.js"
|
||||
|
||||
export { GitDiffTool, type GitDiffResult, type DiffEntry } from "./git/GitDiffTool.js"
|
||||
|
||||
export { GitCommitTool, type GitCommitResult, type CommitAuthor } from "./git/GitCommitTool.js"
|
||||
|
||||
// Run tools
|
||||
export {
|
||||
CommandSecurity,
|
||||
DEFAULT_BLACKLIST,
|
||||
DEFAULT_WHITELIST,
|
||||
type CommandClassification,
|
||||
type SecurityCheckResult,
|
||||
} from "./run/CommandSecurity.js"
|
||||
|
||||
export { RunCommandTool, type RunCommandResult } from "./run/RunCommandTool.js"
|
||||
|
||||
export { RunTestsTool, type RunTestsResult, type TestRunner } from "./run/RunTestsTool.js"
|
||||
166
packages/ipuaro/src/infrastructure/tools/read/GetClassTool.ts
Normal file
166
packages/ipuaro/src/infrastructure/tools/read/GetClassTool.ts
Normal file
@@ -0,0 +1,166 @@
|
||||
import { promises as fs } from "node:fs"
|
||||
import type { ITool, ToolContext, ToolParameterSchema } from "../../../domain/services/ITool.js"
|
||||
import type { ClassInfo } from "../../../domain/value-objects/FileAST.js"
|
||||
import {
|
||||
createErrorResult,
|
||||
createSuccessResult,
|
||||
type ToolResult,
|
||||
} from "../../../domain/value-objects/ToolResult.js"
|
||||
import { PathValidator } from "../../security/PathValidator.js"
|
||||
|
||||
/**
|
||||
* Result data from get_class tool.
|
||||
*/
|
||||
export interface GetClassResult {
|
||||
path: string
|
||||
name: string
|
||||
startLine: number
|
||||
endLine: number
|
||||
isExported: boolean
|
||||
isAbstract: boolean
|
||||
extends?: string
|
||||
implements: string[]
|
||||
methods: string[]
|
||||
properties: string[]
|
||||
content: string
|
||||
}
|
||||
|
||||
/**
|
||||
* Tool for retrieving a class's source code by name.
|
||||
* Uses AST to find exact line range.
|
||||
*/
|
||||
export class GetClassTool implements ITool {
|
||||
readonly name = "get_class"
|
||||
readonly description =
|
||||
"Get a class's source code by name. Uses AST to find exact line range. " +
|
||||
"Returns the class code with line numbers."
|
||||
readonly parameters: ToolParameterSchema[] = [
|
||||
{
|
||||
name: "path",
|
||||
type: "string",
|
||||
description: "File path relative to project root",
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
name: "name",
|
||||
type: "string",
|
||||
description: "Class name to retrieve",
|
||||
required: true,
|
||||
},
|
||||
]
|
||||
readonly requiresConfirmation = false
|
||||
readonly category = "read" as const
|
||||
|
||||
validateParams(params: Record<string, unknown>): string | null {
|
||||
if (typeof params.path !== "string" || params.path.trim() === "") {
|
||||
return "Parameter 'path' is required and must be a non-empty string"
|
||||
}
|
||||
|
||||
if (typeof params.name !== "string" || params.name.trim() === "") {
|
||||
return "Parameter 'name' is required and must be a non-empty string"
|
||||
}
|
||||
|
||||
return null
|
||||
}
|
||||
|
||||
async execute(params: Record<string, unknown>, ctx: ToolContext): Promise<ToolResult> {
|
||||
const startTime = Date.now()
|
||||
const callId = `${this.name}-${String(startTime)}`
|
||||
|
||||
const inputPath = params.path as string
|
||||
const className = params.name as string
|
||||
const pathValidator = new PathValidator(ctx.projectRoot)
|
||||
|
||||
let absolutePath: string
|
||||
let relativePath: string
|
||||
try {
|
||||
;[absolutePath, relativePath] = pathValidator.resolveOrThrow(inputPath)
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : String(error)
|
||||
return createErrorResult(callId, message, Date.now() - startTime)
|
||||
}
|
||||
|
||||
try {
|
||||
const ast = await ctx.storage.getAST(relativePath)
|
||||
if (!ast) {
|
||||
return createErrorResult(
|
||||
callId,
|
||||
`AST not found for "${relativePath}". File may not be indexed.`,
|
||||
Date.now() - startTime,
|
||||
)
|
||||
}
|
||||
|
||||
const classInfo = this.findClass(ast.classes, className)
|
||||
if (!classInfo) {
|
||||
const available = ast.classes.map((c) => c.name).join(", ") || "none"
|
||||
return createErrorResult(
|
||||
callId,
|
||||
`Class "${className}" not found in "${relativePath}". Available: ${available}`,
|
||||
Date.now() - startTime,
|
||||
)
|
||||
}
|
||||
|
||||
const lines = await this.getFileLines(absolutePath, relativePath, ctx)
|
||||
const classLines = lines.slice(classInfo.lineStart - 1, classInfo.lineEnd)
|
||||
const content = this.formatLinesWithNumbers(classLines, classInfo.lineStart)
|
||||
|
||||
const result: GetClassResult = {
|
||||
path: relativePath,
|
||||
name: classInfo.name,
|
||||
startLine: classInfo.lineStart,
|
||||
endLine: classInfo.lineEnd,
|
||||
isExported: classInfo.isExported,
|
||||
isAbstract: classInfo.isAbstract,
|
||||
extends: classInfo.extends,
|
||||
implements: classInfo.implements,
|
||||
methods: classInfo.methods.map((m) => m.name),
|
||||
properties: classInfo.properties.map((p) => p.name),
|
||||
content,
|
||||
}
|
||||
|
||||
return createSuccessResult(callId, result, Date.now() - startTime)
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : String(error)
|
||||
return createErrorResult(callId, message, Date.now() - startTime)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Find class by name in AST.
|
||||
*/
|
||||
private findClass(classes: ClassInfo[], name: string): ClassInfo | undefined {
|
||||
return classes.find((c) => c.name === name)
|
||||
}
|
||||
|
||||
/**
|
||||
* Get file lines from storage or filesystem.
|
||||
*/
|
||||
private async getFileLines(
|
||||
absolutePath: string,
|
||||
relativePath: string,
|
||||
ctx: ToolContext,
|
||||
): Promise<string[]> {
|
||||
const fileData = await ctx.storage.getFile(relativePath)
|
||||
if (fileData) {
|
||||
return fileData.lines
|
||||
}
|
||||
|
||||
const content = await fs.readFile(absolutePath, "utf-8")
|
||||
return content.split("\n")
|
||||
}
|
||||
|
||||
/**
|
||||
* Format lines with line numbers.
|
||||
*/
|
||||
private formatLinesWithNumbers(lines: string[], startLine: number): string {
|
||||
const maxLineNum = startLine + lines.length - 1
|
||||
const padWidth = String(maxLineNum).length
|
||||
|
||||
return lines
|
||||
.map((line, index) => {
|
||||
const lineNum = String(startLine + index).padStart(padWidth, " ")
|
||||
return `${lineNum}│${line}`
|
||||
})
|
||||
.join("\n")
|
||||
}
|
||||
}
|
||||
162
packages/ipuaro/src/infrastructure/tools/read/GetFunctionTool.ts
Normal file
162
packages/ipuaro/src/infrastructure/tools/read/GetFunctionTool.ts
Normal file
@@ -0,0 +1,162 @@
|
||||
import { promises as fs } from "node:fs"
|
||||
import type { ITool, ToolContext, ToolParameterSchema } from "../../../domain/services/ITool.js"
|
||||
import type { FunctionInfo } from "../../../domain/value-objects/FileAST.js"
|
||||
import {
|
||||
createErrorResult,
|
||||
createSuccessResult,
|
||||
type ToolResult,
|
||||
} from "../../../domain/value-objects/ToolResult.js"
|
||||
import { PathValidator } from "../../security/PathValidator.js"
|
||||
|
||||
/**
|
||||
* Result data from get_function tool.
|
||||
*/
|
||||
export interface GetFunctionResult {
|
||||
path: string
|
||||
name: string
|
||||
startLine: number
|
||||
endLine: number
|
||||
isAsync: boolean
|
||||
isExported: boolean
|
||||
params: string[]
|
||||
returnType?: string
|
||||
content: string
|
||||
}
|
||||
|
||||
/**
|
||||
* Tool for retrieving a function's source code by name.
|
||||
* Uses AST to find exact line range.
|
||||
*/
|
||||
export class GetFunctionTool implements ITool {
|
||||
readonly name = "get_function"
|
||||
readonly description =
|
||||
"Get a function's source code by name. Uses AST to find exact line range. " +
|
||||
"Returns the function code with line numbers."
|
||||
readonly parameters: ToolParameterSchema[] = [
|
||||
{
|
||||
name: "path",
|
||||
type: "string",
|
||||
description: "File path relative to project root",
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
name: "name",
|
||||
type: "string",
|
||||
description: "Function name to retrieve",
|
||||
required: true,
|
||||
},
|
||||
]
|
||||
readonly requiresConfirmation = false
|
||||
readonly category = "read" as const
|
||||
|
||||
validateParams(params: Record<string, unknown>): string | null {
|
||||
if (typeof params.path !== "string" || params.path.trim() === "") {
|
||||
return "Parameter 'path' is required and must be a non-empty string"
|
||||
}
|
||||
|
||||
if (typeof params.name !== "string" || params.name.trim() === "") {
|
||||
return "Parameter 'name' is required and must be a non-empty string"
|
||||
}
|
||||
|
||||
return null
|
||||
}
|
||||
|
||||
async execute(params: Record<string, unknown>, ctx: ToolContext): Promise<ToolResult> {
|
||||
const startTime = Date.now()
|
||||
const callId = `${this.name}-${String(startTime)}`
|
||||
|
||||
const inputPath = params.path as string
|
||||
const functionName = params.name as string
|
||||
const pathValidator = new PathValidator(ctx.projectRoot)
|
||||
|
||||
let absolutePath: string
|
||||
let relativePath: string
|
||||
try {
|
||||
;[absolutePath, relativePath] = pathValidator.resolveOrThrow(inputPath)
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : String(error)
|
||||
return createErrorResult(callId, message, Date.now() - startTime)
|
||||
}
|
||||
|
||||
try {
|
||||
const ast = await ctx.storage.getAST(relativePath)
|
||||
if (!ast) {
|
||||
return createErrorResult(
|
||||
callId,
|
||||
`AST not found for "${relativePath}". File may not be indexed.`,
|
||||
Date.now() - startTime,
|
||||
)
|
||||
}
|
||||
|
||||
const functionInfo = this.findFunction(ast.functions, functionName)
|
||||
if (!functionInfo) {
|
||||
const available = ast.functions.map((f) => f.name).join(", ") || "none"
|
||||
return createErrorResult(
|
||||
callId,
|
||||
`Function "${functionName}" not found in "${relativePath}". Available: ${available}`,
|
||||
Date.now() - startTime,
|
||||
)
|
||||
}
|
||||
|
||||
const lines = await this.getFileLines(absolutePath, relativePath, ctx)
|
||||
const functionLines = lines.slice(functionInfo.lineStart - 1, functionInfo.lineEnd)
|
||||
const content = this.formatLinesWithNumbers(functionLines, functionInfo.lineStart)
|
||||
|
||||
const result: GetFunctionResult = {
|
||||
path: relativePath,
|
||||
name: functionInfo.name,
|
||||
startLine: functionInfo.lineStart,
|
||||
endLine: functionInfo.lineEnd,
|
||||
isAsync: functionInfo.isAsync,
|
||||
isExported: functionInfo.isExported,
|
||||
params: functionInfo.params.map((p) => p.name),
|
||||
returnType: functionInfo.returnType,
|
||||
content,
|
||||
}
|
||||
|
||||
return createSuccessResult(callId, result, Date.now() - startTime)
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : String(error)
|
||||
return createErrorResult(callId, message, Date.now() - startTime)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Find function by name in AST.
|
||||
*/
|
||||
private findFunction(functions: FunctionInfo[], name: string): FunctionInfo | undefined {
|
||||
return functions.find((f) => f.name === name)
|
||||
}
|
||||
|
||||
/**
|
||||
* Get file lines from storage or filesystem.
|
||||
*/
|
||||
private async getFileLines(
|
||||
absolutePath: string,
|
||||
relativePath: string,
|
||||
ctx: ToolContext,
|
||||
): Promise<string[]> {
|
||||
const fileData = await ctx.storage.getFile(relativePath)
|
||||
if (fileData) {
|
||||
return fileData.lines
|
||||
}
|
||||
|
||||
const content = await fs.readFile(absolutePath, "utf-8")
|
||||
return content.split("\n")
|
||||
}
|
||||
|
||||
/**
|
||||
* Format lines with line numbers.
|
||||
*/
|
||||
private formatLinesWithNumbers(lines: string[], startLine: number): string {
|
||||
const maxLineNum = startLine + lines.length - 1
|
||||
const padWidth = String(maxLineNum).length
|
||||
|
||||
return lines
|
||||
.map((line, index) => {
|
||||
const lineNum = String(startLine + index).padStart(padWidth, " ")
|
||||
return `${lineNum}│${line}`
|
||||
})
|
||||
.join("\n")
|
||||
}
|
||||
}
|
||||
159
packages/ipuaro/src/infrastructure/tools/read/GetLinesTool.ts
Normal file
159
packages/ipuaro/src/infrastructure/tools/read/GetLinesTool.ts
Normal file
@@ -0,0 +1,159 @@
|
||||
import { promises as fs } from "node:fs"
|
||||
import type { ITool, ToolContext, ToolParameterSchema } from "../../../domain/services/ITool.js"
|
||||
import {
|
||||
createErrorResult,
|
||||
createSuccessResult,
|
||||
type ToolResult,
|
||||
} from "../../../domain/value-objects/ToolResult.js"
|
||||
import { PathValidator } from "../../security/PathValidator.js"
|
||||
|
||||
/**
|
||||
* Result data from get_lines tool.
|
||||
*/
|
||||
export interface GetLinesResult {
|
||||
path: string
|
||||
startLine: number
|
||||
endLine: number
|
||||
totalLines: number
|
||||
content: string
|
||||
}
|
||||
|
||||
/**
|
||||
* Tool for reading specific lines from a file.
|
||||
* Returns content with line numbers.
|
||||
*/
|
||||
export class GetLinesTool implements ITool {
|
||||
readonly name = "get_lines"
|
||||
readonly description =
|
||||
"Get specific lines from a file. Returns the content with line numbers. " +
|
||||
"If no range is specified, returns the entire file."
|
||||
readonly parameters: ToolParameterSchema[] = [
|
||||
{
|
||||
name: "path",
|
||||
type: "string",
|
||||
description: "File path relative to project root",
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
name: "start",
|
||||
type: "number",
|
||||
description: "Start line number (1-based, inclusive)",
|
||||
required: false,
|
||||
},
|
||||
{
|
||||
name: "end",
|
||||
type: "number",
|
||||
description: "End line number (1-based, inclusive)",
|
||||
required: false,
|
||||
},
|
||||
]
|
||||
readonly requiresConfirmation = false
|
||||
readonly category = "read" as const
|
||||
|
||||
validateParams(params: Record<string, unknown>): string | null {
|
||||
if (typeof params.path !== "string" || params.path.trim() === "") {
|
||||
return "Parameter 'path' is required and must be a non-empty string"
|
||||
}
|
||||
|
||||
if (params.start !== undefined) {
|
||||
if (typeof params.start !== "number" || !Number.isInteger(params.start)) {
|
||||
return "Parameter 'start' must be an integer"
|
||||
}
|
||||
if (params.start < 1) {
|
||||
return "Parameter 'start' must be >= 1"
|
||||
}
|
||||
}
|
||||
|
||||
if (params.end !== undefined) {
|
||||
if (typeof params.end !== "number" || !Number.isInteger(params.end)) {
|
||||
return "Parameter 'end' must be an integer"
|
||||
}
|
||||
if (params.end < 1) {
|
||||
return "Parameter 'end' must be >= 1"
|
||||
}
|
||||
}
|
||||
|
||||
if (params.start !== undefined && params.end !== undefined && params.start > params.end) {
|
||||
return "Parameter 'start' must be <= 'end'"
|
||||
}
|
||||
|
||||
return null
|
||||
}
|
||||
|
||||
async execute(params: Record<string, unknown>, ctx: ToolContext): Promise<ToolResult> {
|
||||
const startTime = Date.now()
|
||||
const callId = `${this.name}-${String(startTime)}`
|
||||
|
||||
const inputPath = params.path as string
|
||||
const pathValidator = new PathValidator(ctx.projectRoot)
|
||||
|
||||
let absolutePath: string
|
||||
let relativePath: string
|
||||
try {
|
||||
;[absolutePath, relativePath] = pathValidator.resolveOrThrow(inputPath)
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : String(error)
|
||||
return createErrorResult(callId, message, Date.now() - startTime)
|
||||
}
|
||||
|
||||
try {
|
||||
const lines = await this.getFileLines(absolutePath, relativePath, ctx)
|
||||
const totalLines = lines.length
|
||||
|
||||
let startLine = (params.start as number | undefined) ?? 1
|
||||
let endLine = (params.end as number | undefined) ?? totalLines
|
||||
|
||||
startLine = Math.max(1, Math.min(startLine, totalLines))
|
||||
endLine = Math.max(startLine, Math.min(endLine, totalLines))
|
||||
|
||||
const selectedLines = lines.slice(startLine - 1, endLine)
|
||||
const content = this.formatLinesWithNumbers(selectedLines, startLine)
|
||||
|
||||
const result: GetLinesResult = {
|
||||
path: relativePath,
|
||||
startLine,
|
||||
endLine,
|
||||
totalLines,
|
||||
content,
|
||||
}
|
||||
|
||||
return createSuccessResult(callId, result, Date.now() - startTime)
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : String(error)
|
||||
return createErrorResult(callId, message, Date.now() - startTime)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get file lines from storage or filesystem.
|
||||
*/
|
||||
private async getFileLines(
|
||||
absolutePath: string,
|
||||
relativePath: string,
|
||||
ctx: ToolContext,
|
||||
): Promise<string[]> {
|
||||
const fileData = await ctx.storage.getFile(relativePath)
|
||||
if (fileData) {
|
||||
return fileData.lines
|
||||
}
|
||||
|
||||
const content = await fs.readFile(absolutePath, "utf-8")
|
||||
return content.split("\n")
|
||||
}
|
||||
|
||||
/**
|
||||
* Format lines with line numbers.
|
||||
* Example: " 1│const x = 1"
|
||||
*/
|
||||
private formatLinesWithNumbers(lines: string[], startLine: number): string {
|
||||
const maxLineNum = startLine + lines.length - 1
|
||||
const padWidth = String(maxLineNum).length
|
||||
|
||||
return lines
|
||||
.map((line, index) => {
|
||||
const lineNum = String(startLine + index).padStart(padWidth, " ")
|
||||
return `${lineNum}│${line}`
|
||||
})
|
||||
.join("\n")
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,207 @@
|
||||
import { promises as fs } from "node:fs"
|
||||
import * as path from "node:path"
|
||||
import type { ITool, ToolContext, ToolParameterSchema } from "../../../domain/services/ITool.js"
|
||||
import {
|
||||
createErrorResult,
|
||||
createSuccessResult,
|
||||
type ToolResult,
|
||||
} from "../../../domain/value-objects/ToolResult.js"
|
||||
import { DEFAULT_IGNORE_PATTERNS } from "../../../domain/constants/index.js"
|
||||
import { PathValidator } from "../../security/PathValidator.js"
|
||||
|
||||
/**
|
||||
* Tree node representing a file or directory.
|
||||
*/
|
||||
export interface TreeNode {
|
||||
name: string
|
||||
type: "file" | "directory"
|
||||
children?: TreeNode[]
|
||||
}
|
||||
|
||||
/**
|
||||
* Result data from get_structure tool.
|
||||
*/
|
||||
export interface GetStructureResult {
|
||||
path: string
|
||||
tree: TreeNode
|
||||
content: string
|
||||
stats: {
|
||||
directories: number
|
||||
files: number
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Tool for getting project directory structure as a tree.
|
||||
*/
|
||||
export class GetStructureTool implements ITool {
|
||||
readonly name = "get_structure"
|
||||
readonly description =
|
||||
"Get project directory structure as a tree. " +
|
||||
"If path is specified, shows structure of that subdirectory only."
|
||||
readonly parameters: ToolParameterSchema[] = [
|
||||
{
|
||||
name: "path",
|
||||
type: "string",
|
||||
description: "Subdirectory path relative to project root (optional, defaults to root)",
|
||||
required: false,
|
||||
},
|
||||
{
|
||||
name: "depth",
|
||||
type: "number",
|
||||
description: "Maximum depth to traverse (default: unlimited)",
|
||||
required: false,
|
||||
},
|
||||
]
|
||||
readonly requiresConfirmation = false
|
||||
readonly category = "read" as const
|
||||
|
||||
private readonly defaultIgnorePatterns = new Set([
|
||||
...DEFAULT_IGNORE_PATTERNS,
|
||||
".git",
|
||||
".idea",
|
||||
".vscode",
|
||||
"__pycache__",
|
||||
".pytest_cache",
|
||||
".nyc_output",
|
||||
"coverage",
|
||||
])
|
||||
|
||||
validateParams(params: Record<string, unknown>): string | null {
|
||||
if (params.path !== undefined) {
|
||||
if (typeof params.path !== "string") {
|
||||
return "Parameter 'path' must be a string"
|
||||
}
|
||||
}
|
||||
|
||||
if (params.depth !== undefined) {
|
||||
if (typeof params.depth !== "number" || !Number.isInteger(params.depth)) {
|
||||
return "Parameter 'depth' must be an integer"
|
||||
}
|
||||
if (params.depth < 1) {
|
||||
return "Parameter 'depth' must be >= 1"
|
||||
}
|
||||
}
|
||||
|
||||
return null
|
||||
}
|
||||
|
||||
async execute(params: Record<string, unknown>, ctx: ToolContext): Promise<ToolResult> {
|
||||
const startTime = Date.now()
|
||||
const callId = `${this.name}-${String(startTime)}`
|
||||
|
||||
const inputPath = (params.path as string | undefined) ?? "."
|
||||
const maxDepth = params.depth as number | undefined
|
||||
const pathValidator = new PathValidator(ctx.projectRoot)
|
||||
|
||||
let absolutePath: string
|
||||
let relativePath: string
|
||||
try {
|
||||
;[absolutePath, relativePath] = pathValidator.resolveOrThrow(inputPath)
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : String(error)
|
||||
return createErrorResult(callId, message, Date.now() - startTime)
|
||||
}
|
||||
|
||||
try {
|
||||
const stat = await fs.stat(absolutePath)
|
||||
if (!stat.isDirectory()) {
|
||||
return createErrorResult(
|
||||
callId,
|
||||
`Path "${relativePath}" is not a directory`,
|
||||
Date.now() - startTime,
|
||||
)
|
||||
}
|
||||
|
||||
const stats = { directories: 0, files: 0 }
|
||||
const tree = await this.buildTree(absolutePath, maxDepth, 0, stats)
|
||||
const content = this.formatTree(tree)
|
||||
|
||||
const result: GetStructureResult = {
|
||||
path: relativePath || ".",
|
||||
tree,
|
||||
content,
|
||||
stats,
|
||||
}
|
||||
|
||||
return createSuccessResult(callId, result, Date.now() - startTime)
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : String(error)
|
||||
return createErrorResult(callId, message, Date.now() - startTime)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Build tree structure recursively.
|
||||
*/
|
||||
private async buildTree(
|
||||
dirPath: string,
|
||||
maxDepth: number | undefined,
|
||||
currentDepth: number,
|
||||
stats: { directories: number; files: number },
|
||||
): Promise<TreeNode> {
|
||||
const name = path.basename(dirPath) || dirPath
|
||||
const node: TreeNode = { name, type: "directory", children: [] }
|
||||
stats.directories++
|
||||
|
||||
if (maxDepth !== undefined && currentDepth >= maxDepth) {
|
||||
return node
|
||||
}
|
||||
|
||||
const entries = await fs.readdir(dirPath, { withFileTypes: true })
|
||||
const sortedEntries = entries
|
||||
.filter((e) => !this.shouldIgnore(e.name))
|
||||
.sort((a, b) => {
|
||||
if (a.isDirectory() && !b.isDirectory()) {
|
||||
return -1
|
||||
}
|
||||
if (!a.isDirectory() && b.isDirectory()) {
|
||||
return 1
|
||||
}
|
||||
return a.name.localeCompare(b.name)
|
||||
})
|
||||
|
||||
for (const entry of sortedEntries) {
|
||||
const entryPath = path.join(dirPath, entry.name)
|
||||
|
||||
if (entry.isDirectory()) {
|
||||
const childNode = await this.buildTree(entryPath, maxDepth, currentDepth + 1, stats)
|
||||
node.children?.push(childNode)
|
||||
} else if (entry.isFile()) {
|
||||
node.children?.push({ name: entry.name, type: "file" })
|
||||
stats.files++
|
||||
}
|
||||
}
|
||||
|
||||
return node
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if entry should be ignored.
|
||||
*/
|
||||
private shouldIgnore(name: string): boolean {
|
||||
return this.defaultIgnorePatterns.has(name)
|
||||
}
|
||||
|
||||
/**
|
||||
* Format tree as ASCII art.
|
||||
*/
|
||||
private formatTree(node: TreeNode, prefix = "", isLast = true): string {
|
||||
const lines: string[] = []
|
||||
const connector = isLast ? "└── " : "├── "
|
||||
const icon = node.type === "directory" ? "📁 " : "📄 "
|
||||
|
||||
lines.push(`${prefix}${connector}${icon}${node.name}`)
|
||||
|
||||
if (node.children) {
|
||||
const childPrefix = prefix + (isLast ? " " : "│ ")
|
||||
const childCount = node.children.length
|
||||
node.children.forEach((child, index) => {
|
||||
const childIsLast = index === childCount - 1
|
||||
lines.push(this.formatTree(child, childPrefix, childIsLast))
|
||||
})
|
||||
}
|
||||
|
||||
return lines.join("\n")
|
||||
}
|
||||
}
|
||||
185
packages/ipuaro/src/infrastructure/tools/registry.ts
Normal file
185
packages/ipuaro/src/infrastructure/tools/registry.ts
Normal file
@@ -0,0 +1,185 @@
|
||||
import type { IToolRegistry } from "../../application/interfaces/IToolRegistry.js"
|
||||
import type { ITool, ToolContext, ToolParameterSchema } from "../../domain/services/ITool.js"
|
||||
import { createErrorResult, type ToolResult } from "../../domain/value-objects/ToolResult.js"
|
||||
import { IpuaroError } from "../../shared/errors/IpuaroError.js"
|
||||
|
||||
/**
|
||||
* Tool registry implementation.
|
||||
* Manages registration and execution of tools.
|
||||
*/
|
||||
export class ToolRegistry implements IToolRegistry {
|
||||
private readonly tools = new Map<string, ITool>()
|
||||
|
||||
/**
|
||||
* Register a tool.
|
||||
* @throws IpuaroError if tool with same name already registered
|
||||
*/
|
||||
register(tool: ITool): void {
|
||||
if (this.tools.has(tool.name)) {
|
||||
throw IpuaroError.validation(`Tool "${tool.name}" is already registered`)
|
||||
}
|
||||
this.tools.set(tool.name, tool)
|
||||
}
|
||||
|
||||
/**
|
||||
* Unregister a tool by name.
|
||||
* @returns true if tool was removed, false if not found
|
||||
*/
|
||||
unregister(name: string): boolean {
|
||||
return this.tools.delete(name)
|
||||
}
|
||||
|
||||
/**
|
||||
* Get tool by name.
|
||||
*/
|
||||
get(name: string): ITool | undefined {
|
||||
return this.tools.get(name)
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all registered tools.
|
||||
*/
|
||||
getAll(): ITool[] {
|
||||
return Array.from(this.tools.values())
|
||||
}
|
||||
|
||||
/**
|
||||
* Get tools by category.
|
||||
*/
|
||||
getByCategory(category: ITool["category"]): ITool[] {
|
||||
return this.getAll().filter((tool) => tool.category === category)
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if tool exists.
|
||||
*/
|
||||
has(name: string): boolean {
|
||||
return this.tools.has(name)
|
||||
}
|
||||
|
||||
/**
|
||||
* Get number of registered tools.
|
||||
*/
|
||||
get size(): number {
|
||||
return this.tools.size
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute tool by name.
|
||||
* @throws IpuaroError if tool not found
|
||||
*/
|
||||
async execute(
|
||||
name: string,
|
||||
params: Record<string, unknown>,
|
||||
ctx: ToolContext,
|
||||
): Promise<ToolResult> {
|
||||
const startTime = Date.now()
|
||||
const callId = `${name}-${String(startTime)}`
|
||||
|
||||
const tool = this.tools.get(name)
|
||||
if (!tool) {
|
||||
return createErrorResult(callId, `Tool "${name}" not found`, Date.now() - startTime)
|
||||
}
|
||||
|
||||
const validationError = tool.validateParams(params)
|
||||
if (validationError) {
|
||||
return createErrorResult(callId, validationError, Date.now() - startTime)
|
||||
}
|
||||
|
||||
if (tool.requiresConfirmation) {
|
||||
const confirmed = await ctx.requestConfirmation(
|
||||
`Execute "${name}" with params: ${JSON.stringify(params)}`,
|
||||
)
|
||||
if (!confirmed) {
|
||||
return createErrorResult(callId, "User cancelled operation", Date.now() - startTime)
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
const result = await tool.execute(params, ctx)
|
||||
return {
|
||||
...result,
|
||||
callId,
|
||||
}
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : String(error)
|
||||
return createErrorResult(callId, message, Date.now() - startTime)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get tool definitions for LLM.
|
||||
* Converts ITool[] to LLM-compatible format.
|
||||
*/
|
||||
getToolDefinitions(): {
|
||||
name: string
|
||||
description: string
|
||||
parameters: {
|
||||
type: "object"
|
||||
properties: Record<string, { type: string; description: string }>
|
||||
required: string[]
|
||||
}
|
||||
}[] {
|
||||
return this.getAll().map((tool) => ({
|
||||
name: tool.name,
|
||||
description: tool.description,
|
||||
parameters: this.convertParametersToSchema(tool.parameters),
|
||||
}))
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert tool parameters to JSON Schema format.
|
||||
*/
|
||||
private convertParametersToSchema(params: ToolParameterSchema[]): {
|
||||
type: "object"
|
||||
properties: Record<string, { type: string; description: string }>
|
||||
required: string[]
|
||||
} {
|
||||
const properties: Record<string, { type: string; description: string }> = {}
|
||||
const required: string[] = []
|
||||
|
||||
for (const param of params) {
|
||||
properties[param.name] = {
|
||||
type: param.type,
|
||||
description: param.description,
|
||||
}
|
||||
if (param.required) {
|
||||
required.push(param.name)
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
type: "object",
|
||||
properties,
|
||||
required,
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Clear all registered tools.
|
||||
*/
|
||||
clear(): void {
|
||||
this.tools.clear()
|
||||
}
|
||||
|
||||
/**
|
||||
* Get tool names.
|
||||
*/
|
||||
getNames(): string[] {
|
||||
return Array.from(this.tools.keys())
|
||||
}
|
||||
|
||||
/**
|
||||
* Get tools that require confirmation.
|
||||
*/
|
||||
getConfirmationTools(): ITool[] {
|
||||
return this.getAll().filter((tool) => tool.requiresConfirmation)
|
||||
}
|
||||
|
||||
/**
|
||||
* Get tools that don't require confirmation.
|
||||
*/
|
||||
getSafeTools(): ITool[] {
|
||||
return this.getAll().filter((tool) => !tool.requiresConfirmation)
|
||||
}
|
||||
}
|
||||
257
packages/ipuaro/src/infrastructure/tools/run/CommandSecurity.ts
Normal file
257
packages/ipuaro/src/infrastructure/tools/run/CommandSecurity.ts
Normal file
@@ -0,0 +1,257 @@
|
||||
/**
|
||||
* Command security classification.
|
||||
*/
|
||||
export type CommandClassification = "allowed" | "blocked" | "requires_confirmation"
|
||||
|
||||
/**
|
||||
* Result of command security check.
|
||||
*/
|
||||
export interface SecurityCheckResult {
|
||||
/** Classification of the command */
|
||||
classification: CommandClassification
|
||||
/** Reason for the classification */
|
||||
reason: string
|
||||
}
|
||||
|
||||
/**
|
||||
* Dangerous commands that are always blocked.
|
||||
* These commands can cause data loss or security issues.
|
||||
*/
|
||||
export const DEFAULT_BLACKLIST: string[] = [
|
||||
// Destructive file operations
|
||||
"rm -rf",
|
||||
"rm -r",
|
||||
"rm -fr",
|
||||
"rmdir",
|
||||
// Dangerous git operations
|
||||
"git push --force",
|
||||
"git push -f",
|
||||
"git reset --hard",
|
||||
"git clean -fd",
|
||||
"git clean -f",
|
||||
// Publishing/deployment
|
||||
"npm publish",
|
||||
"yarn publish",
|
||||
"pnpm publish",
|
||||
// System commands
|
||||
"sudo",
|
||||
"su ",
|
||||
"chmod",
|
||||
"chown",
|
||||
// Network/download commands that could be dangerous
|
||||
"| sh",
|
||||
"| bash",
|
||||
// Environment manipulation
|
||||
"export ",
|
||||
"unset ",
|
||||
// Process control
|
||||
"kill -9",
|
||||
"killall",
|
||||
"pkill",
|
||||
// Disk operations (require exact command start)
|
||||
"mkfs",
|
||||
"fdisk",
|
||||
// Other dangerous
|
||||
":(){ :|:& };:",
|
||||
"eval ",
|
||||
]
|
||||
|
||||
/**
|
||||
* Safe commands that don't require confirmation.
|
||||
* Matched by first word (command name).
|
||||
*/
|
||||
export const DEFAULT_WHITELIST: string[] = [
|
||||
// Package managers
|
||||
"npm",
|
||||
"pnpm",
|
||||
"yarn",
|
||||
"npx",
|
||||
"bun",
|
||||
// Node.js
|
||||
"node",
|
||||
"tsx",
|
||||
"ts-node",
|
||||
// Git (read operations)
|
||||
"git",
|
||||
// Build tools
|
||||
"tsc",
|
||||
"tsup",
|
||||
"esbuild",
|
||||
"vite",
|
||||
"webpack",
|
||||
"rollup",
|
||||
// Testing
|
||||
"vitest",
|
||||
"jest",
|
||||
"mocha",
|
||||
"playwright",
|
||||
"cypress",
|
||||
// Linting/formatting
|
||||
"eslint",
|
||||
"prettier",
|
||||
"biome",
|
||||
// Utilities
|
||||
"echo",
|
||||
"cat",
|
||||
"ls",
|
||||
"pwd",
|
||||
"which",
|
||||
"head",
|
||||
"tail",
|
||||
"grep",
|
||||
"find",
|
||||
"wc",
|
||||
"sort",
|
||||
"diff",
|
||||
]
|
||||
|
||||
/**
|
||||
* Git subcommands that are safe and don't need confirmation.
|
||||
*/
|
||||
const SAFE_GIT_SUBCOMMANDS: string[] = [
|
||||
"status",
|
||||
"log",
|
||||
"diff",
|
||||
"show",
|
||||
"branch",
|
||||
"remote",
|
||||
"fetch",
|
||||
"pull",
|
||||
"stash",
|
||||
"tag",
|
||||
"blame",
|
||||
"ls-files",
|
||||
"ls-tree",
|
||||
"rev-parse",
|
||||
"describe",
|
||||
]
|
||||
|
||||
/**
|
||||
* Command security checker.
|
||||
* Determines if a command is safe to execute, blocked, or requires confirmation.
|
||||
*/
|
||||
export class CommandSecurity {
|
||||
private readonly blacklist: string[]
|
||||
private readonly whitelist: string[]
|
||||
|
||||
constructor(blacklist: string[] = DEFAULT_BLACKLIST, whitelist: string[] = DEFAULT_WHITELIST) {
|
||||
this.blacklist = blacklist.map((cmd) => cmd.toLowerCase())
|
||||
this.whitelist = whitelist.map((cmd) => cmd.toLowerCase())
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a command is safe to execute.
|
||||
*/
|
||||
check(command: string): SecurityCheckResult {
|
||||
const normalized = command.trim().toLowerCase()
|
||||
|
||||
const blacklistMatch = this.isBlacklisted(normalized)
|
||||
if (blacklistMatch) {
|
||||
return {
|
||||
classification: "blocked",
|
||||
reason: `Command contains blocked pattern: '${blacklistMatch}'`,
|
||||
}
|
||||
}
|
||||
|
||||
if (this.isWhitelisted(normalized)) {
|
||||
return {
|
||||
classification: "allowed",
|
||||
reason: "Command is in the whitelist",
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
classification: "requires_confirmation",
|
||||
reason: "Command is not in the whitelist and requires user confirmation",
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if command matches any blacklist pattern.
|
||||
* Returns the matched pattern or null.
|
||||
*/
|
||||
private isBlacklisted(command: string): string | null {
|
||||
for (const pattern of this.blacklist) {
|
||||
if (command.includes(pattern)) {
|
||||
return pattern
|
||||
}
|
||||
}
|
||||
return null
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if command's first word is in the whitelist.
|
||||
*/
|
||||
private isWhitelisted(command: string): boolean {
|
||||
const firstWord = this.getFirstWord(command)
|
||||
|
||||
if (!this.whitelist.includes(firstWord)) {
|
||||
return false
|
||||
}
|
||||
|
||||
if (firstWord === "git") {
|
||||
return this.isGitCommandSafe(command)
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if git command is safe (read-only operations).
|
||||
*/
|
||||
private isGitCommandSafe(command: string): boolean {
|
||||
const parts = command.split(/\s+/)
|
||||
if (parts.length < 2) {
|
||||
return false
|
||||
}
|
||||
|
||||
const subcommand = parts[1]
|
||||
return SAFE_GIT_SUBCOMMANDS.includes(subcommand)
|
||||
}
|
||||
|
||||
/**
|
||||
* Get first word from command.
|
||||
*/
|
||||
private getFirstWord(command: string): string {
|
||||
const match = /^(\S+)/.exec(command)
|
||||
return match ? match[1] : ""
|
||||
}
|
||||
|
||||
/**
|
||||
* Add patterns to the blacklist.
|
||||
*/
|
||||
addToBlacklist(patterns: string[]): void {
|
||||
for (const pattern of patterns) {
|
||||
const normalized = pattern.toLowerCase()
|
||||
if (!this.blacklist.includes(normalized)) {
|
||||
this.blacklist.push(normalized)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Add commands to the whitelist.
|
||||
*/
|
||||
addToWhitelist(commands: string[]): void {
|
||||
for (const cmd of commands) {
|
||||
const normalized = cmd.toLowerCase()
|
||||
if (!this.whitelist.includes(normalized)) {
|
||||
this.whitelist.push(normalized)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get current blacklist.
|
||||
*/
|
||||
getBlacklist(): string[] {
|
||||
return [...this.blacklist]
|
||||
}
|
||||
|
||||
/**
|
||||
* Get current whitelist.
|
||||
*/
|
||||
getWhitelist(): string[] {
|
||||
return [...this.whitelist]
|
||||
}
|
||||
}
|
||||
230
packages/ipuaro/src/infrastructure/tools/run/RunCommandTool.ts
Normal file
230
packages/ipuaro/src/infrastructure/tools/run/RunCommandTool.ts
Normal file
@@ -0,0 +1,230 @@
|
||||
import { exec } from "node:child_process"
|
||||
import { promisify } from "node:util"
|
||||
import type { ITool, ToolContext, ToolParameterSchema } from "../../../domain/services/ITool.js"
|
||||
import {
|
||||
createErrorResult,
|
||||
createSuccessResult,
|
||||
type ToolResult,
|
||||
} from "../../../domain/value-objects/ToolResult.js"
|
||||
import type { CommandsConfig } from "../../../shared/constants/config.js"
|
||||
import { CommandSecurity } from "./CommandSecurity.js"
|
||||
|
||||
const execAsync = promisify(exec)
|
||||
|
||||
/**
|
||||
* Result data from run_command tool.
|
||||
*/
|
||||
export interface RunCommandResult {
|
||||
/** The command that was executed */
|
||||
command: string
|
||||
/** Exit code (0 = success) */
|
||||
exitCode: number
|
||||
/** Standard output */
|
||||
stdout: string
|
||||
/** Standard error output */
|
||||
stderr: string
|
||||
/** Whether command was successful (exit code 0) */
|
||||
success: boolean
|
||||
/** Execution time in milliseconds */
|
||||
durationMs: number
|
||||
/** Whether user confirmation was required */
|
||||
requiredConfirmation: boolean
|
||||
}
|
||||
|
||||
/**
|
||||
* Default command timeout in milliseconds.
|
||||
*/
|
||||
const DEFAULT_TIMEOUT = 30000
|
||||
|
||||
/**
|
||||
* Maximum output size in characters.
|
||||
*/
|
||||
const MAX_OUTPUT_SIZE = 100000
|
||||
|
||||
/**
|
||||
* Tool for executing shell commands.
|
||||
* Commands are checked against blacklist/whitelist for security.
|
||||
*/
|
||||
export class RunCommandTool implements ITool {
|
||||
readonly name = "run_command"
|
||||
readonly description =
|
||||
"Execute a shell command in the project directory. " +
|
||||
"Commands are checked against blacklist/whitelist for security. " +
|
||||
"Unknown commands require user confirmation."
|
||||
readonly parameters: ToolParameterSchema[] = [
|
||||
{
|
||||
name: "command",
|
||||
type: "string",
|
||||
description: "Shell command to execute",
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
name: "timeout",
|
||||
type: "number",
|
||||
description: "Timeout in milliseconds (default: from config or 30000, max: 600000)",
|
||||
required: false,
|
||||
},
|
||||
]
|
||||
readonly requiresConfirmation = false
|
||||
readonly category = "run" as const
|
||||
|
||||
private readonly security: CommandSecurity
|
||||
private readonly execFn: typeof execAsync
|
||||
private readonly configTimeout: number | null
|
||||
|
||||
constructor(security?: CommandSecurity, execFn?: typeof execAsync, config?: CommandsConfig) {
|
||||
this.security = security ?? new CommandSecurity()
|
||||
this.execFn = execFn ?? execAsync
|
||||
this.configTimeout = config?.timeout ?? null
|
||||
}
|
||||
|
||||
validateParams(params: Record<string, unknown>): string | null {
|
||||
if (params.command === undefined) {
|
||||
return "Parameter 'command' is required"
|
||||
}
|
||||
if (typeof params.command !== "string") {
|
||||
return "Parameter 'command' must be a string"
|
||||
}
|
||||
if (params.command.trim() === "") {
|
||||
return "Parameter 'command' cannot be empty"
|
||||
}
|
||||
if (params.timeout !== undefined) {
|
||||
if (typeof params.timeout !== "number") {
|
||||
return "Parameter 'timeout' must be a number"
|
||||
}
|
||||
if (params.timeout <= 0) {
|
||||
return "Parameter 'timeout' must be positive"
|
||||
}
|
||||
if (params.timeout > 600000) {
|
||||
return "Parameter 'timeout' cannot exceed 600000ms (10 minutes)"
|
||||
}
|
||||
}
|
||||
return null
|
||||
}
|
||||
|
||||
async execute(params: Record<string, unknown>, ctx: ToolContext): Promise<ToolResult> {
|
||||
const startTime = Date.now()
|
||||
const callId = `${this.name}-${String(startTime)}`
|
||||
|
||||
const command = params.command as string
|
||||
const timeout = (params.timeout as number) ?? this.configTimeout ?? DEFAULT_TIMEOUT
|
||||
|
||||
const securityCheck = this.security.check(command)
|
||||
|
||||
if (securityCheck.classification === "blocked") {
|
||||
return createErrorResult(
|
||||
callId,
|
||||
`Command blocked for security: ${securityCheck.reason}`,
|
||||
Date.now() - startTime,
|
||||
)
|
||||
}
|
||||
|
||||
let requiredConfirmation = false
|
||||
|
||||
if (securityCheck.classification === "requires_confirmation") {
|
||||
requiredConfirmation = true
|
||||
const confirmed = await ctx.requestConfirmation(
|
||||
`Execute command: ${command}\n\nReason: ${securityCheck.reason}`,
|
||||
)
|
||||
|
||||
if (!confirmed) {
|
||||
return createErrorResult(
|
||||
callId,
|
||||
"Command execution cancelled by user",
|
||||
Date.now() - startTime,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
const execStartTime = Date.now()
|
||||
|
||||
const { stdout, stderr } = await this.execFn(command, {
|
||||
cwd: ctx.projectRoot,
|
||||
timeout,
|
||||
maxBuffer: MAX_OUTPUT_SIZE,
|
||||
env: { ...process.env, FORCE_COLOR: "0" },
|
||||
})
|
||||
|
||||
const durationMs = Date.now() - execStartTime
|
||||
|
||||
const result: RunCommandResult = {
|
||||
command,
|
||||
exitCode: 0,
|
||||
stdout: this.truncateOutput(stdout),
|
||||
stderr: this.truncateOutput(stderr),
|
||||
success: true,
|
||||
durationMs,
|
||||
requiredConfirmation,
|
||||
}
|
||||
|
||||
return createSuccessResult(callId, result, Date.now() - startTime)
|
||||
} catch (error) {
|
||||
return this.handleExecError(callId, command, error, requiredConfirmation, startTime)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle exec errors and return appropriate result.
|
||||
*/
|
||||
private handleExecError(
|
||||
callId: string,
|
||||
command: string,
|
||||
error: unknown,
|
||||
requiredConfirmation: boolean,
|
||||
startTime: number,
|
||||
): ToolResult {
|
||||
if (this.isExecError(error)) {
|
||||
const result: RunCommandResult = {
|
||||
command,
|
||||
exitCode: error.code ?? 1,
|
||||
stdout: this.truncateOutput(error.stdout ?? ""),
|
||||
stderr: this.truncateOutput(error.stderr ?? error.message),
|
||||
success: false,
|
||||
durationMs: Date.now() - startTime,
|
||||
requiredConfirmation,
|
||||
}
|
||||
|
||||
return createSuccessResult(callId, result, Date.now() - startTime)
|
||||
}
|
||||
|
||||
if (error instanceof Error) {
|
||||
if (error.message.includes("ETIMEDOUT") || error.message.includes("timed out")) {
|
||||
return createErrorResult(
|
||||
callId,
|
||||
`Command timed out: ${command}`,
|
||||
Date.now() - startTime,
|
||||
)
|
||||
}
|
||||
return createErrorResult(callId, error.message, Date.now() - startTime)
|
||||
}
|
||||
|
||||
return createErrorResult(callId, String(error), Date.now() - startTime)
|
||||
}
|
||||
|
||||
/**
|
||||
* Type guard for exec error.
|
||||
*/
|
||||
private isExecError(
|
||||
error: unknown,
|
||||
): error is Error & { code?: number; stdout?: string; stderr?: string } {
|
||||
return error instanceof Error && "code" in error
|
||||
}
|
||||
|
||||
/**
|
||||
* Truncate output if too large.
|
||||
*/
|
||||
private truncateOutput(output: string): string {
|
||||
if (output.length <= MAX_OUTPUT_SIZE) {
|
||||
return output
|
||||
}
|
||||
return `${output.slice(0, MAX_OUTPUT_SIZE)}\n... (output truncated)`
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the security checker instance.
|
||||
*/
|
||||
getSecurity(): CommandSecurity {
|
||||
return this.security
|
||||
}
|
||||
}
|
||||
365
packages/ipuaro/src/infrastructure/tools/run/RunTestsTool.ts
Normal file
365
packages/ipuaro/src/infrastructure/tools/run/RunTestsTool.ts
Normal file
@@ -0,0 +1,365 @@
|
||||
import { exec } from "node:child_process"
|
||||
import { promisify } from "node:util"
|
||||
import * as path from "node:path"
|
||||
import * as fs from "node:fs/promises"
|
||||
import type { ITool, ToolContext, ToolParameterSchema } from "../../../domain/services/ITool.js"
|
||||
import {
|
||||
createErrorResult,
|
||||
createSuccessResult,
|
||||
type ToolResult,
|
||||
} from "../../../domain/value-objects/ToolResult.js"
|
||||
|
||||
const execAsync = promisify(exec)
|
||||
|
||||
/**
|
||||
* Supported test runners.
|
||||
*/
|
||||
export type TestRunner = "vitest" | "jest" | "mocha" | "npm"
|
||||
|
||||
/**
|
||||
* Result data from run_tests tool.
|
||||
*/
|
||||
export interface RunTestsResult {
|
||||
/** Test runner that was used */
|
||||
runner: TestRunner
|
||||
/** Command that was executed */
|
||||
command: string
|
||||
/** Whether all tests passed */
|
||||
passed: boolean
|
||||
/** Exit code */
|
||||
exitCode: number
|
||||
/** Standard output */
|
||||
stdout: string
|
||||
/** Standard error output */
|
||||
stderr: string
|
||||
/** Execution time in milliseconds */
|
||||
durationMs: number
|
||||
}
|
||||
|
||||
/**
|
||||
* Default test timeout in milliseconds (5 minutes).
|
||||
*/
|
||||
const DEFAULT_TIMEOUT = 300000
|
||||
|
||||
/**
|
||||
* Maximum output size in characters.
|
||||
*/
|
||||
const MAX_OUTPUT_SIZE = 200000
|
||||
|
||||
/**
|
||||
* Tool for running project tests.
|
||||
* Auto-detects test runner (vitest, jest, mocha, npm test).
|
||||
*/
|
||||
export class RunTestsTool implements ITool {
|
||||
readonly name = "run_tests"
|
||||
readonly description =
|
||||
"Run the project's test suite. Auto-detects test runner (vitest, jest, npm test). " +
|
||||
"Returns test results summary."
|
||||
readonly parameters: ToolParameterSchema[] = [
|
||||
{
|
||||
name: "path",
|
||||
type: "string",
|
||||
description: "Run tests for specific file or directory",
|
||||
required: false,
|
||||
},
|
||||
{
|
||||
name: "filter",
|
||||
type: "string",
|
||||
description: "Filter tests by name pattern",
|
||||
required: false,
|
||||
},
|
||||
{
|
||||
name: "watch",
|
||||
type: "boolean",
|
||||
description: "Run in watch mode (default: false)",
|
||||
required: false,
|
||||
},
|
||||
]
|
||||
readonly requiresConfirmation = false
|
||||
readonly category = "run" as const
|
||||
|
||||
private readonly execFn: typeof execAsync
|
||||
private readonly fsAccess: typeof fs.access
|
||||
private readonly fsReadFile: typeof fs.readFile
|
||||
|
||||
constructor(
|
||||
execFn?: typeof execAsync,
|
||||
fsAccess?: typeof fs.access,
|
||||
fsReadFile?: typeof fs.readFile,
|
||||
) {
|
||||
this.execFn = execFn ?? execAsync
|
||||
this.fsAccess = fsAccess ?? fs.access
|
||||
this.fsReadFile = fsReadFile ?? fs.readFile
|
||||
}
|
||||
|
||||
validateParams(params: Record<string, unknown>): string | null {
|
||||
if (params.path !== undefined && typeof params.path !== "string") {
|
||||
return "Parameter 'path' must be a string"
|
||||
}
|
||||
if (params.filter !== undefined && typeof params.filter !== "string") {
|
||||
return "Parameter 'filter' must be a string"
|
||||
}
|
||||
if (params.watch !== undefined && typeof params.watch !== "boolean") {
|
||||
return "Parameter 'watch' must be a boolean"
|
||||
}
|
||||
return null
|
||||
}
|
||||
|
||||
async execute(params: Record<string, unknown>, ctx: ToolContext): Promise<ToolResult> {
|
||||
const startTime = Date.now()
|
||||
const callId = `${this.name}-${String(startTime)}`
|
||||
|
||||
const testPath = params.path as string | undefined
|
||||
const filter = params.filter as string | undefined
|
||||
const watch = (params.watch as boolean) ?? false
|
||||
|
||||
try {
|
||||
const runner = await this.detectTestRunner(ctx.projectRoot)
|
||||
|
||||
if (!runner) {
|
||||
return createErrorResult(
|
||||
callId,
|
||||
"No test runner detected. Ensure vitest, jest, or mocha is installed, or 'test' script exists in package.json.",
|
||||
Date.now() - startTime,
|
||||
)
|
||||
}
|
||||
|
||||
const command = this.buildCommand(runner, testPath, filter, watch)
|
||||
const execStartTime = Date.now()
|
||||
|
||||
try {
|
||||
const { stdout, stderr } = await this.execFn(command, {
|
||||
cwd: ctx.projectRoot,
|
||||
timeout: DEFAULT_TIMEOUT,
|
||||
maxBuffer: MAX_OUTPUT_SIZE,
|
||||
env: { ...process.env, FORCE_COLOR: "0", CI: "true" },
|
||||
})
|
||||
|
||||
const durationMs = Date.now() - execStartTime
|
||||
|
||||
const result: RunTestsResult = {
|
||||
runner,
|
||||
command,
|
||||
passed: true,
|
||||
exitCode: 0,
|
||||
stdout: this.truncateOutput(stdout),
|
||||
stderr: this.truncateOutput(stderr),
|
||||
durationMs,
|
||||
}
|
||||
|
||||
return createSuccessResult(callId, result, Date.now() - startTime)
|
||||
} catch (error) {
|
||||
return this.handleExecError(
|
||||
{ callId, runner, command, startTime },
|
||||
error,
|
||||
execStartTime,
|
||||
)
|
||||
}
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : String(error)
|
||||
return createErrorResult(callId, message, Date.now() - startTime)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Detect which test runner is available in the project.
|
||||
*/
|
||||
async detectTestRunner(projectRoot: string): Promise<TestRunner | null> {
|
||||
const configRunner = await this.detectByConfigFile(projectRoot)
|
||||
if (configRunner) {
|
||||
return configRunner
|
||||
}
|
||||
|
||||
return this.detectByPackageJson(projectRoot)
|
||||
}
|
||||
|
||||
private async detectByConfigFile(projectRoot: string): Promise<TestRunner | null> {
|
||||
const configFiles: { files: string[]; runner: TestRunner }[] = [
|
||||
{
|
||||
files: ["vitest.config.ts", "vitest.config.js", "vitest.config.mts"],
|
||||
runner: "vitest",
|
||||
},
|
||||
{
|
||||
files: ["jest.config.js", "jest.config.ts", "jest.config.json"],
|
||||
runner: "jest",
|
||||
},
|
||||
]
|
||||
|
||||
for (const { files, runner } of configFiles) {
|
||||
for (const file of files) {
|
||||
if (await this.hasFile(projectRoot, file)) {
|
||||
return runner
|
||||
}
|
||||
}
|
||||
}
|
||||
return null
|
||||
}
|
||||
|
||||
private async detectByPackageJson(projectRoot: string): Promise<TestRunner | null> {
|
||||
const packageJsonPath = path.join(projectRoot, "package.json")
|
||||
try {
|
||||
const content = await this.fsReadFile(packageJsonPath, "utf-8")
|
||||
const pkg = JSON.parse(content) as {
|
||||
scripts?: Record<string, string>
|
||||
devDependencies?: Record<string, string>
|
||||
dependencies?: Record<string, string>
|
||||
}
|
||||
|
||||
const deps = { ...pkg.devDependencies, ...pkg.dependencies }
|
||||
if (deps.vitest) {
|
||||
return "vitest"
|
||||
}
|
||||
if (deps.jest) {
|
||||
return "jest"
|
||||
}
|
||||
if (deps.mocha) {
|
||||
return "mocha"
|
||||
}
|
||||
if (pkg.scripts?.test) {
|
||||
return "npm"
|
||||
}
|
||||
} catch {
|
||||
// package.json doesn't exist or is invalid
|
||||
}
|
||||
return null
|
||||
}
|
||||
|
||||
/**
|
||||
* Build the test command based on runner and options.
|
||||
*/
|
||||
buildCommand(runner: TestRunner, testPath?: string, filter?: string, watch?: boolean): string {
|
||||
const builders: Record<TestRunner, () => string[]> = {
|
||||
vitest: () => this.buildVitestCommand(testPath, filter, watch),
|
||||
jest: () => this.buildJestCommand(testPath, filter, watch),
|
||||
mocha: () => this.buildMochaCommand(testPath, filter, watch),
|
||||
npm: () => this.buildNpmCommand(testPath, filter),
|
||||
}
|
||||
return builders[runner]().join(" ")
|
||||
}
|
||||
|
||||
private buildVitestCommand(testPath?: string, filter?: string, watch?: boolean): string[] {
|
||||
const parts = ["npx vitest"]
|
||||
if (!watch) {
|
||||
parts.push("run")
|
||||
}
|
||||
if (testPath) {
|
||||
parts.push(testPath)
|
||||
}
|
||||
if (filter) {
|
||||
parts.push("-t", `"${filter}"`)
|
||||
}
|
||||
return parts
|
||||
}
|
||||
|
||||
private buildJestCommand(testPath?: string, filter?: string, watch?: boolean): string[] {
|
||||
const parts = ["npx jest"]
|
||||
if (testPath) {
|
||||
parts.push(testPath)
|
||||
}
|
||||
if (filter) {
|
||||
parts.push("-t", `"${filter}"`)
|
||||
}
|
||||
if (watch) {
|
||||
parts.push("--watch")
|
||||
}
|
||||
return parts
|
||||
}
|
||||
|
||||
private buildMochaCommand(testPath?: string, filter?: string, watch?: boolean): string[] {
|
||||
const parts = ["npx mocha"]
|
||||
if (testPath) {
|
||||
parts.push(testPath)
|
||||
}
|
||||
if (filter) {
|
||||
parts.push("--grep", `"${filter}"`)
|
||||
}
|
||||
if (watch) {
|
||||
parts.push("--watch")
|
||||
}
|
||||
return parts
|
||||
}
|
||||
|
||||
private buildNpmCommand(testPath?: string, filter?: string): string[] {
|
||||
const parts = ["npm test"]
|
||||
if (testPath || filter) {
|
||||
parts.push("--")
|
||||
if (testPath) {
|
||||
parts.push(testPath)
|
||||
}
|
||||
if (filter) {
|
||||
parts.push(`"${filter}"`)
|
||||
}
|
||||
}
|
||||
return parts
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a file exists.
|
||||
*/
|
||||
private async hasFile(projectRoot: string, filename: string): Promise<boolean> {
|
||||
try {
|
||||
await this.fsAccess(path.join(projectRoot, filename))
|
||||
return true
|
||||
} catch {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle exec errors and return appropriate result.
|
||||
*/
|
||||
private handleExecError(
|
||||
ctx: { callId: string; runner: TestRunner; command: string; startTime: number },
|
||||
error: unknown,
|
||||
execStartTime: number,
|
||||
): ToolResult {
|
||||
const { callId, runner, command, startTime } = ctx
|
||||
const durationMs = Date.now() - execStartTime
|
||||
|
||||
if (this.isExecError(error)) {
|
||||
const result: RunTestsResult = {
|
||||
runner,
|
||||
command,
|
||||
passed: false,
|
||||
exitCode: error.code ?? 1,
|
||||
stdout: this.truncateOutput(error.stdout ?? ""),
|
||||
stderr: this.truncateOutput(error.stderr ?? error.message),
|
||||
durationMs,
|
||||
}
|
||||
|
||||
return createSuccessResult(callId, result, Date.now() - startTime)
|
||||
}
|
||||
|
||||
if (error instanceof Error) {
|
||||
if (error.message.includes("ETIMEDOUT") || error.message.includes("timed out")) {
|
||||
return createErrorResult(
|
||||
callId,
|
||||
`Tests timed out after ${String(DEFAULT_TIMEOUT / 1000)} seconds`,
|
||||
Date.now() - startTime,
|
||||
)
|
||||
}
|
||||
return createErrorResult(callId, error.message, Date.now() - startTime)
|
||||
}
|
||||
|
||||
return createErrorResult(callId, String(error), Date.now() - startTime)
|
||||
}
|
||||
|
||||
/**
|
||||
* Type guard for exec error.
|
||||
*/
|
||||
private isExecError(
|
||||
error: unknown,
|
||||
): error is Error & { code?: number; stdout?: string; stderr?: string } {
|
||||
return error instanceof Error && "code" in error
|
||||
}
|
||||
|
||||
/**
|
||||
* Truncate output if too large.
|
||||
*/
|
||||
private truncateOutput(output: string): string {
|
||||
if (output.length <= MAX_OUTPUT_SIZE) {
|
||||
return output
|
||||
}
|
||||
return `${output.slice(0, MAX_OUTPUT_SIZE)}\n... (output truncated)`
|
||||
}
|
||||
}
|
||||
12
packages/ipuaro/src/infrastructure/tools/run/index.ts
Normal file
12
packages/ipuaro/src/infrastructure/tools/run/index.ts
Normal file
@@ -0,0 +1,12 @@
|
||||
// Run tools exports
|
||||
export {
|
||||
CommandSecurity,
|
||||
DEFAULT_BLACKLIST,
|
||||
DEFAULT_WHITELIST,
|
||||
type CommandClassification,
|
||||
type SecurityCheckResult,
|
||||
} from "./CommandSecurity.js"
|
||||
|
||||
export { RunCommandTool, type RunCommandResult } from "./RunCommandTool.js"
|
||||
|
||||
export { RunTestsTool, type RunTestsResult, type TestRunner } from "./RunTestsTool.js"
|
||||
@@ -0,0 +1,221 @@
|
||||
import { promises as fs } from "node:fs"
|
||||
import * as path from "node:path"
|
||||
import type { ITool, ToolContext, ToolParameterSchema } from "../../../domain/services/ITool.js"
|
||||
import type { SymbolLocation } from "../../../domain/services/IStorage.js"
|
||||
import {
|
||||
createErrorResult,
|
||||
createSuccessResult,
|
||||
type ToolResult,
|
||||
} from "../../../domain/value-objects/ToolResult.js"
|
||||
|
||||
/**
|
||||
* A single definition location with context.
|
||||
*/
|
||||
export interface DefinitionLocation {
|
||||
path: string
|
||||
line: number
|
||||
type: SymbolLocation["type"]
|
||||
context: string
|
||||
}
|
||||
|
||||
/**
|
||||
* Result data from find_definition tool.
|
||||
*/
|
||||
export interface FindDefinitionResult {
|
||||
symbol: string
|
||||
found: boolean
|
||||
definitions: DefinitionLocation[]
|
||||
suggestions?: string[]
|
||||
}
|
||||
|
||||
/**
|
||||
* Tool for finding where a symbol is defined.
|
||||
* Uses the SymbolIndex to locate definitions.
|
||||
*/
|
||||
export class FindDefinitionTool implements ITool {
|
||||
readonly name = "find_definition"
|
||||
readonly description =
|
||||
"Find where a symbol is defined. " + "Returns file path, line number, and symbol type."
|
||||
readonly parameters: ToolParameterSchema[] = [
|
||||
{
|
||||
name: "symbol",
|
||||
type: "string",
|
||||
description: "Symbol name to find definition for",
|
||||
required: true,
|
||||
},
|
||||
]
|
||||
readonly requiresConfirmation = false
|
||||
readonly category = "search" as const
|
||||
|
||||
private readonly contextLines = 2
|
||||
|
||||
validateParams(params: Record<string, unknown>): string | null {
|
||||
if (typeof params.symbol !== "string" || params.symbol.trim() === "") {
|
||||
return "Parameter 'symbol' is required and must be a non-empty string"
|
||||
}
|
||||
|
||||
return null
|
||||
}
|
||||
|
||||
async execute(params: Record<string, unknown>, ctx: ToolContext): Promise<ToolResult> {
|
||||
const startTime = Date.now()
|
||||
const callId = `${this.name}-${String(startTime)}`
|
||||
|
||||
const symbol = (params.symbol as string).trim()
|
||||
|
||||
try {
|
||||
const symbolIndex = await ctx.storage.getSymbolIndex()
|
||||
const locations = symbolIndex.get(symbol)
|
||||
|
||||
if (!locations || locations.length === 0) {
|
||||
const suggestions = this.findSimilarSymbols(symbol, symbolIndex)
|
||||
return createSuccessResult(
|
||||
callId,
|
||||
{
|
||||
symbol,
|
||||
found: false,
|
||||
definitions: [],
|
||||
suggestions: suggestions.length > 0 ? suggestions : undefined,
|
||||
} satisfies FindDefinitionResult,
|
||||
Date.now() - startTime,
|
||||
)
|
||||
}
|
||||
|
||||
const definitions: DefinitionLocation[] = []
|
||||
for (const loc of locations) {
|
||||
const context = await this.getContext(loc, ctx)
|
||||
definitions.push({
|
||||
path: loc.path,
|
||||
line: loc.line,
|
||||
type: loc.type,
|
||||
context,
|
||||
})
|
||||
}
|
||||
|
||||
definitions.sort((a, b) => {
|
||||
const pathCompare = a.path.localeCompare(b.path)
|
||||
if (pathCompare !== 0) {
|
||||
return pathCompare
|
||||
}
|
||||
return a.line - b.line
|
||||
})
|
||||
|
||||
const result: FindDefinitionResult = {
|
||||
symbol,
|
||||
found: true,
|
||||
definitions,
|
||||
}
|
||||
|
||||
return createSuccessResult(callId, result, Date.now() - startTime)
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : String(error)
|
||||
return createErrorResult(callId, message, Date.now() - startTime)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get context lines around the definition.
|
||||
*/
|
||||
private async getContext(loc: SymbolLocation, ctx: ToolContext): Promise<string> {
|
||||
try {
|
||||
const lines = await this.getFileLines(loc.path, ctx)
|
||||
if (lines.length === 0) {
|
||||
return ""
|
||||
}
|
||||
|
||||
const lineIndex = loc.line - 1
|
||||
const startIndex = Math.max(0, lineIndex - this.contextLines)
|
||||
const endIndex = Math.min(lines.length - 1, lineIndex + this.contextLines)
|
||||
|
||||
const contextLines: string[] = []
|
||||
for (let i = startIndex; i <= endIndex; i++) {
|
||||
const lineNum = i + 1
|
||||
const prefix = i === lineIndex ? ">" : " "
|
||||
contextLines.push(`${prefix}${String(lineNum).padStart(4)}│${lines[i]}`)
|
||||
}
|
||||
|
||||
return contextLines.join("\n")
|
||||
} catch {
|
||||
return ""
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get file lines from storage or filesystem.
|
||||
*/
|
||||
private async getFileLines(relativePath: string, ctx: ToolContext): Promise<string[]> {
|
||||
const fileData = await ctx.storage.getFile(relativePath)
|
||||
if (fileData) {
|
||||
return fileData.lines
|
||||
}
|
||||
|
||||
const absolutePath = path.resolve(ctx.projectRoot, relativePath)
|
||||
try {
|
||||
const content = await fs.readFile(absolutePath, "utf-8")
|
||||
return content.split("\n")
|
||||
} catch {
|
||||
return []
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Find similar symbol names for suggestions.
|
||||
*/
|
||||
private findSimilarSymbols(symbol: string, symbolIndex: Map<string, unknown>): string[] {
|
||||
const suggestions: string[] = []
|
||||
const lowerSymbol = symbol.toLowerCase()
|
||||
const maxSuggestions = 5
|
||||
|
||||
for (const name of symbolIndex.keys()) {
|
||||
if (suggestions.length >= maxSuggestions) {
|
||||
break
|
||||
}
|
||||
|
||||
const lowerName = name.toLowerCase()
|
||||
if (lowerName.includes(lowerSymbol) || lowerSymbol.includes(lowerName)) {
|
||||
suggestions.push(name)
|
||||
} else if (this.levenshteinDistance(lowerSymbol, lowerName) <= 2) {
|
||||
suggestions.push(name)
|
||||
}
|
||||
}
|
||||
|
||||
return suggestions.sort()
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate Levenshtein distance between two strings.
|
||||
*/
|
||||
private levenshteinDistance(a: string, b: string): number {
|
||||
if (a.length === 0) {
|
||||
return b.length
|
||||
}
|
||||
if (b.length === 0) {
|
||||
return a.length
|
||||
}
|
||||
|
||||
const matrix: number[][] = []
|
||||
|
||||
for (let i = 0; i <= b.length; i++) {
|
||||
matrix[i] = [i]
|
||||
}
|
||||
for (let j = 0; j <= a.length; j++) {
|
||||
matrix[0][j] = j
|
||||
}
|
||||
|
||||
for (let i = 1; i <= b.length; i++) {
|
||||
for (let j = 1; j <= a.length; j++) {
|
||||
if (b.charAt(i - 1) === a.charAt(j - 1)) {
|
||||
matrix[i][j] = matrix[i - 1][j - 1]
|
||||
} else {
|
||||
matrix[i][j] = Math.min(
|
||||
matrix[i - 1][j - 1] + 1,
|
||||
matrix[i][j - 1] + 1,
|
||||
matrix[i - 1][j] + 1,
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return matrix[b.length][a.length]
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,260 @@
|
||||
import * as path from "node:path"
|
||||
import type { ITool, ToolContext, ToolParameterSchema } from "../../../domain/services/ITool.js"
|
||||
import {
|
||||
createErrorResult,
|
||||
createSuccessResult,
|
||||
type ToolResult,
|
||||
} from "../../../domain/value-objects/ToolResult.js"
|
||||
|
||||
/**
|
||||
* A single reference to a symbol.
|
||||
*/
|
||||
export interface SymbolReference {
|
||||
path: string
|
||||
line: number
|
||||
column: number
|
||||
context: string
|
||||
isDefinition: boolean
|
||||
}
|
||||
|
||||
/**
|
||||
* Result data from find_references tool.
|
||||
*/
|
||||
export interface FindReferencesResult {
|
||||
symbol: string
|
||||
totalReferences: number
|
||||
files: number
|
||||
references: SymbolReference[]
|
||||
definitionLocations: {
|
||||
path: string
|
||||
line: number
|
||||
type: string
|
||||
}[]
|
||||
}
|
||||
|
||||
/**
|
||||
* Tool for finding all usages of a symbol across the codebase.
|
||||
* Searches through indexed files for symbol references.
|
||||
*/
|
||||
export class FindReferencesTool implements ITool {
|
||||
readonly name = "find_references"
|
||||
readonly description =
|
||||
"Find all usages of a symbol across the codebase. " +
|
||||
"Returns list of file paths, line numbers, and context."
|
||||
readonly parameters: ToolParameterSchema[] = [
|
||||
{
|
||||
name: "symbol",
|
||||
type: "string",
|
||||
description: "Symbol name to search for (function, class, variable, etc.)",
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
name: "path",
|
||||
type: "string",
|
||||
description: "Limit search to specific file or directory",
|
||||
required: false,
|
||||
},
|
||||
]
|
||||
readonly requiresConfirmation = false
|
||||
readonly category = "search" as const
|
||||
|
||||
private readonly contextLines = 1
|
||||
|
||||
validateParams(params: Record<string, unknown>): string | null {
|
||||
if (typeof params.symbol !== "string" || params.symbol.trim() === "") {
|
||||
return "Parameter 'symbol' is required and must be a non-empty string"
|
||||
}
|
||||
|
||||
if (params.path !== undefined && typeof params.path !== "string") {
|
||||
return "Parameter 'path' must be a string"
|
||||
}
|
||||
|
||||
return null
|
||||
}
|
||||
|
||||
async execute(params: Record<string, unknown>, ctx: ToolContext): Promise<ToolResult> {
|
||||
const startTime = Date.now()
|
||||
const callId = `${this.name}-${String(startTime)}`
|
||||
|
||||
const symbol = (params.symbol as string).trim()
|
||||
const filterPath = params.path as string | undefined
|
||||
|
||||
try {
|
||||
const symbolIndex = await ctx.storage.getSymbolIndex()
|
||||
const definitionLocations = symbolIndex.get(symbol) ?? []
|
||||
|
||||
const allFiles = await ctx.storage.getAllFiles()
|
||||
const filesToSearch = this.filterFiles(allFiles, filterPath, ctx.projectRoot)
|
||||
|
||||
if (filesToSearch.size === 0) {
|
||||
return createSuccessResult(
|
||||
callId,
|
||||
{
|
||||
symbol,
|
||||
totalReferences: 0,
|
||||
files: 0,
|
||||
references: [],
|
||||
definitionLocations: definitionLocations.map((loc) => ({
|
||||
path: loc.path,
|
||||
line: loc.line,
|
||||
type: loc.type,
|
||||
})),
|
||||
} satisfies FindReferencesResult,
|
||||
Date.now() - startTime,
|
||||
)
|
||||
}
|
||||
|
||||
const references: SymbolReference[] = []
|
||||
const filesWithReferences = new Set<string>()
|
||||
|
||||
for (const [filePath, fileData] of filesToSearch) {
|
||||
const fileRefs = this.findReferencesInFile(
|
||||
filePath,
|
||||
fileData.lines,
|
||||
symbol,
|
||||
definitionLocations,
|
||||
)
|
||||
|
||||
if (fileRefs.length > 0) {
|
||||
filesWithReferences.add(filePath)
|
||||
references.push(...fileRefs)
|
||||
}
|
||||
}
|
||||
|
||||
references.sort((a, b) => {
|
||||
const pathCompare = a.path.localeCompare(b.path)
|
||||
if (pathCompare !== 0) {
|
||||
return pathCompare
|
||||
}
|
||||
return a.line - b.line
|
||||
})
|
||||
|
||||
const result: FindReferencesResult = {
|
||||
symbol,
|
||||
totalReferences: references.length,
|
||||
files: filesWithReferences.size,
|
||||
references,
|
||||
definitionLocations: definitionLocations.map((loc) => ({
|
||||
path: loc.path,
|
||||
line: loc.line,
|
||||
type: loc.type,
|
||||
})),
|
||||
}
|
||||
|
||||
return createSuccessResult(callId, result, Date.now() - startTime)
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : String(error)
|
||||
return createErrorResult(callId, message, Date.now() - startTime)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Filter files by path prefix if specified.
|
||||
*/
|
||||
private filterFiles(
|
||||
allFiles: Map<string, { lines: string[] }>,
|
||||
filterPath: string | undefined,
|
||||
projectRoot: string,
|
||||
): Map<string, { lines: string[] }> {
|
||||
if (!filterPath) {
|
||||
return allFiles
|
||||
}
|
||||
|
||||
const normalizedFilter = filterPath.startsWith("/")
|
||||
? path.relative(projectRoot, filterPath)
|
||||
: filterPath
|
||||
|
||||
const filtered = new Map<string, { lines: string[] }>()
|
||||
for (const [filePath, fileData] of allFiles) {
|
||||
if (filePath === normalizedFilter || filePath.startsWith(`${normalizedFilter}/`)) {
|
||||
filtered.set(filePath, fileData)
|
||||
}
|
||||
}
|
||||
|
||||
return filtered
|
||||
}
|
||||
|
||||
/**
|
||||
* Find all references to the symbol in a file.
|
||||
*/
|
||||
private findReferencesInFile(
|
||||
filePath: string,
|
||||
lines: string[],
|
||||
symbol: string,
|
||||
definitionLocations: { path: string; line: number }[],
|
||||
): SymbolReference[] {
|
||||
const references: SymbolReference[] = []
|
||||
const symbolRegex = this.createSymbolRegex(symbol)
|
||||
|
||||
for (let lineIndex = 0; lineIndex < lines.length; lineIndex++) {
|
||||
const line = lines[lineIndex]
|
||||
const lineNumber = lineIndex + 1
|
||||
let match: RegExpExecArray | null
|
||||
|
||||
symbolRegex.lastIndex = 0
|
||||
while ((match = symbolRegex.exec(line)) !== null) {
|
||||
const column = match.index + 1
|
||||
const context = this.buildContext(lines, lineIndex)
|
||||
const isDefinition = this.isDefinitionLine(
|
||||
filePath,
|
||||
lineNumber,
|
||||
definitionLocations,
|
||||
)
|
||||
|
||||
references.push({
|
||||
path: filePath,
|
||||
line: lineNumber,
|
||||
column,
|
||||
context,
|
||||
isDefinition,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
return references
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a regex for matching the symbol with appropriate boundaries.
|
||||
* Handles symbols that start or end with non-word characters (like $value).
|
||||
*/
|
||||
private createSymbolRegex(symbol: string): RegExp {
|
||||
const escaped = symbol.replace(/[.*+?^${}()|[\]\\]/g, "\\$&")
|
||||
|
||||
const startsWithWordChar = /^\w/.test(symbol)
|
||||
const endsWithWordChar = /\w$/.test(symbol)
|
||||
|
||||
const prefix = startsWithWordChar ? "\\b" : "(?<![\\w$])"
|
||||
const suffix = endsWithWordChar ? "\\b" : "(?![\\w$])"
|
||||
|
||||
return new RegExp(`${prefix}${escaped}${suffix}`, "g")
|
||||
}
|
||||
|
||||
/**
|
||||
* Build context string with surrounding lines.
|
||||
*/
|
||||
private buildContext(lines: string[], currentIndex: number): string {
|
||||
const startIndex = Math.max(0, currentIndex - this.contextLines)
|
||||
const endIndex = Math.min(lines.length - 1, currentIndex + this.contextLines)
|
||||
|
||||
const contextLines: string[] = []
|
||||
for (let i = startIndex; i <= endIndex; i++) {
|
||||
const lineNum = i + 1
|
||||
const prefix = i === currentIndex ? ">" : " "
|
||||
contextLines.push(`${prefix}${String(lineNum).padStart(4)}│${lines[i]}`)
|
||||
}
|
||||
|
||||
return contextLines.join("\n")
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if this line is a definition location.
|
||||
*/
|
||||
private isDefinitionLine(
|
||||
filePath: string,
|
||||
lineNumber: number,
|
||||
definitionLocations: { path: string; line: number }[],
|
||||
): boolean {
|
||||
return definitionLocations.some((loc) => loc.path === filePath && loc.line === lineNumber)
|
||||
}
|
||||
}
|
||||
12
packages/ipuaro/src/infrastructure/tools/search/index.ts
Normal file
12
packages/ipuaro/src/infrastructure/tools/search/index.ts
Normal file
@@ -0,0 +1,12 @@
|
||||
// Search tools exports
|
||||
export {
|
||||
FindReferencesTool,
|
||||
type FindReferencesResult,
|
||||
type SymbolReference,
|
||||
} from "./FindReferencesTool.js"
|
||||
|
||||
export {
|
||||
FindDefinitionTool,
|
||||
type FindDefinitionResult,
|
||||
type DefinitionLocation,
|
||||
} from "./FindDefinitionTool.js"
|
||||
@@ -76,6 +76,60 @@ export const UndoConfigSchema = z.object({
|
||||
*/
|
||||
export const EditConfigSchema = z.object({
|
||||
autoApply: z.boolean().default(false),
|
||||
syntaxHighlight: z.boolean().default(true),
|
||||
})
|
||||
|
||||
/**
|
||||
* Input configuration schema.
|
||||
*/
|
||||
export const InputConfigSchema = z.object({
|
||||
multiline: z.union([z.boolean(), z.literal("auto")]).default(false),
|
||||
})
|
||||
|
||||
/**
|
||||
* Display configuration schema.
|
||||
*/
|
||||
export const DisplayConfigSchema = z.object({
|
||||
showStats: z.boolean().default(true),
|
||||
showToolCalls: z.boolean().default(true),
|
||||
theme: z.enum(["dark", "light"]).default("dark"),
|
||||
bellOnComplete: z.boolean().default(false),
|
||||
progressBar: z.boolean().default(true),
|
||||
})
|
||||
|
||||
/**
|
||||
* Session configuration schema.
|
||||
*/
|
||||
export const SessionConfigSchema = z.object({
|
||||
persistIndefinitely: z.boolean().default(true),
|
||||
maxHistoryMessages: z.number().int().positive().default(100),
|
||||
saveInputHistory: z.boolean().default(true),
|
||||
})
|
||||
|
||||
/**
|
||||
* Context configuration schema.
|
||||
*/
|
||||
export const ContextConfigSchema = z.object({
|
||||
systemPromptTokens: z.number().int().positive().default(2000),
|
||||
maxContextUsage: z.number().min(0).max(1).default(0.8),
|
||||
autoCompressAt: z.number().min(0).max(1).default(0.8),
|
||||
compressionMethod: z.enum(["llm-summary", "truncate"]).default("llm-summary"),
|
||||
})
|
||||
|
||||
/**
|
||||
* Autocomplete configuration schema.
|
||||
*/
|
||||
export const AutocompleteConfigSchema = z.object({
|
||||
enabled: z.boolean().default(true),
|
||||
source: z.enum(["redis-index", "filesystem", "both"]).default("redis-index"),
|
||||
maxSuggestions: z.number().int().positive().default(10),
|
||||
})
|
||||
|
||||
/**
|
||||
* Commands configuration schema.
|
||||
*/
|
||||
export const CommandsConfigSchema = z.object({
|
||||
timeout: z.number().int().positive().nullable().default(null),
|
||||
})
|
||||
|
||||
/**
|
||||
@@ -88,6 +142,12 @@ export const ConfigSchema = z.object({
|
||||
watchdog: WatchdogConfigSchema.default({}),
|
||||
undo: UndoConfigSchema.default({}),
|
||||
edit: EditConfigSchema.default({}),
|
||||
input: InputConfigSchema.default({}),
|
||||
display: DisplayConfigSchema.default({}),
|
||||
session: SessionConfigSchema.default({}),
|
||||
context: ContextConfigSchema.default({}),
|
||||
autocomplete: AutocompleteConfigSchema.default({}),
|
||||
commands: CommandsConfigSchema.default({}),
|
||||
})
|
||||
|
||||
/**
|
||||
@@ -100,6 +160,12 @@ export type ProjectConfig = z.infer<typeof ProjectConfigSchema>
|
||||
export type WatchdogConfig = z.infer<typeof WatchdogConfigSchema>
|
||||
export type UndoConfig = z.infer<typeof UndoConfigSchema>
|
||||
export type EditConfig = z.infer<typeof EditConfigSchema>
|
||||
export type InputConfig = z.infer<typeof InputConfigSchema>
|
||||
export type DisplayConfig = z.infer<typeof DisplayConfigSchema>
|
||||
export type SessionConfig = z.infer<typeof SessionConfigSchema>
|
||||
export type ContextConfig = z.infer<typeof ContextConfigSchema>
|
||||
export type AutocompleteConfig = z.infer<typeof AutocompleteConfigSchema>
|
||||
export type CommandsConfig = z.infer<typeof CommandsConfigSchema>
|
||||
|
||||
/**
|
||||
* Default configuration.
|
||||
|
||||
295
packages/ipuaro/src/shared/errors/ErrorHandler.ts
Normal file
295
packages/ipuaro/src/shared/errors/ErrorHandler.ts
Normal file
@@ -0,0 +1,295 @@
|
||||
/**
|
||||
* ErrorHandler service for handling errors with user interaction.
|
||||
* Implements the error handling matrix from ROADMAP.md.
|
||||
*/
|
||||
|
||||
import { ERROR_MATRIX, type ErrorOption, type ErrorType, IpuaroError } from "./IpuaroError.js"
|
||||
|
||||
/**
|
||||
* Result of error handling.
|
||||
*/
|
||||
export interface ErrorHandlingResult {
|
||||
action: ErrorOption
|
||||
shouldContinue: boolean
|
||||
retryCount?: number
|
||||
}
|
||||
|
||||
/**
|
||||
* Callback for requesting user choice on error.
|
||||
*/
|
||||
export type ErrorChoiceCallback = (
|
||||
error: IpuaroError,
|
||||
availableOptions: ErrorOption[],
|
||||
defaultOption: ErrorOption,
|
||||
) => Promise<ErrorOption>
|
||||
|
||||
/**
|
||||
* Options for ErrorHandler.
|
||||
*/
|
||||
export interface ErrorHandlerOptions {
|
||||
maxRetries?: number
|
||||
autoSkipParseErrors?: boolean
|
||||
autoRetryLLMErrors?: boolean
|
||||
onError?: ErrorChoiceCallback
|
||||
}
|
||||
|
||||
const DEFAULT_MAX_RETRIES = 3
|
||||
|
||||
/**
|
||||
* Error handler service with matrix-based logic.
|
||||
*/
|
||||
export class ErrorHandler {
|
||||
private readonly maxRetries: number
|
||||
private readonly autoSkipParseErrors: boolean
|
||||
private readonly autoRetryLLMErrors: boolean
|
||||
private readonly onError?: ErrorChoiceCallback
|
||||
|
||||
private readonly retryCounters = new Map<string, number>()
|
||||
|
||||
constructor(options: ErrorHandlerOptions = {}) {
|
||||
this.maxRetries = options.maxRetries ?? DEFAULT_MAX_RETRIES
|
||||
this.autoSkipParseErrors = options.autoSkipParseErrors ?? true
|
||||
this.autoRetryLLMErrors = options.autoRetryLLMErrors ?? false
|
||||
this.onError = options.onError
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle an error and determine the action to take.
|
||||
*/
|
||||
async handle(error: IpuaroError, contextKey?: string): Promise<ErrorHandlingResult> {
|
||||
const key = contextKey ?? error.message
|
||||
const currentRetries = this.retryCounters.get(key) ?? 0
|
||||
|
||||
if (this.shouldAutoHandle(error)) {
|
||||
const autoAction = this.getAutoAction(error, currentRetries)
|
||||
if (autoAction) {
|
||||
return this.createResult(autoAction, key, currentRetries)
|
||||
}
|
||||
}
|
||||
|
||||
if (!error.recoverable) {
|
||||
return {
|
||||
action: "abort",
|
||||
shouldContinue: false,
|
||||
}
|
||||
}
|
||||
|
||||
if (this.onError) {
|
||||
const choice = await this.onError(error, error.options, error.defaultOption)
|
||||
return this.createResult(choice, key, currentRetries)
|
||||
}
|
||||
|
||||
return this.createResult(error.defaultOption, key, currentRetries)
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle an error synchronously with default behavior.
|
||||
*/
|
||||
handleSync(error: IpuaroError, contextKey?: string): ErrorHandlingResult {
|
||||
const key = contextKey ?? error.message
|
||||
const currentRetries = this.retryCounters.get(key) ?? 0
|
||||
|
||||
if (this.shouldAutoHandle(error)) {
|
||||
const autoAction = this.getAutoAction(error, currentRetries)
|
||||
if (autoAction) {
|
||||
return this.createResult(autoAction, key, currentRetries)
|
||||
}
|
||||
}
|
||||
|
||||
if (!error.recoverable) {
|
||||
return {
|
||||
action: "abort",
|
||||
shouldContinue: false,
|
||||
}
|
||||
}
|
||||
|
||||
return this.createResult(error.defaultOption, key, currentRetries)
|
||||
}
|
||||
|
||||
/**
|
||||
* Reset retry counters.
|
||||
*/
|
||||
resetRetries(contextKey?: string): void {
|
||||
if (contextKey) {
|
||||
this.retryCounters.delete(contextKey)
|
||||
} else {
|
||||
this.retryCounters.clear()
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get retry count for a context.
|
||||
*/
|
||||
getRetryCount(contextKey: string): number {
|
||||
return this.retryCounters.get(contextKey) ?? 0
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if max retries exceeded for a context.
|
||||
*/
|
||||
isMaxRetriesExceeded(contextKey: string): boolean {
|
||||
return this.getRetryCount(contextKey) >= this.maxRetries
|
||||
}
|
||||
|
||||
/**
|
||||
* Wrap a function with error handling.
|
||||
*/
|
||||
async wrap<T>(
|
||||
fn: () => Promise<T>,
|
||||
errorType: ErrorType,
|
||||
contextKey?: string,
|
||||
): Promise<{ success: true; data: T } | { success: false; result: ErrorHandlingResult }> {
|
||||
try {
|
||||
const data = await fn()
|
||||
if (contextKey) {
|
||||
this.resetRetries(contextKey)
|
||||
}
|
||||
return { success: true, data }
|
||||
} catch (err) {
|
||||
const error =
|
||||
err instanceof IpuaroError
|
||||
? err
|
||||
: new IpuaroError(errorType, err instanceof Error ? err.message : String(err))
|
||||
|
||||
const result = await this.handle(error, contextKey)
|
||||
return { success: false, result }
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Wrap a function with retry logic.
|
||||
*/
|
||||
async withRetry<T>(fn: () => Promise<T>, errorType: ErrorType, contextKey: string): Promise<T> {
|
||||
const key = contextKey
|
||||
|
||||
while (!this.isMaxRetriesExceeded(key)) {
|
||||
try {
|
||||
const result = await fn()
|
||||
this.resetRetries(key)
|
||||
return result
|
||||
} catch (err) {
|
||||
const error =
|
||||
err instanceof IpuaroError
|
||||
? err
|
||||
: new IpuaroError(
|
||||
errorType,
|
||||
err instanceof Error ? err.message : String(err),
|
||||
)
|
||||
|
||||
const handlingResult = await this.handle(error, key)
|
||||
|
||||
if (handlingResult.action !== "retry" || !handlingResult.shouldContinue) {
|
||||
throw error
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
throw new IpuaroError(
|
||||
errorType,
|
||||
`Max retries (${String(this.maxRetries)}) exceeded for: ${key}`,
|
||||
)
|
||||
}
|
||||
|
||||
private shouldAutoHandle(error: IpuaroError): boolean {
|
||||
if (error.type === "parse" && this.autoSkipParseErrors) {
|
||||
return true
|
||||
}
|
||||
if ((error.type === "llm" || error.type === "timeout") && this.autoRetryLLMErrors) {
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
private getAutoAction(error: IpuaroError, currentRetries: number): ErrorOption | null {
|
||||
if (error.type === "parse" && this.autoSkipParseErrors) {
|
||||
return "skip"
|
||||
}
|
||||
|
||||
if ((error.type === "llm" || error.type === "timeout") && this.autoRetryLLMErrors) {
|
||||
if (currentRetries < this.maxRetries) {
|
||||
return "retry"
|
||||
}
|
||||
return "abort"
|
||||
}
|
||||
|
||||
return null
|
||||
}
|
||||
|
||||
private createResult(
|
||||
action: ErrorOption,
|
||||
key: string,
|
||||
currentRetries: number,
|
||||
): ErrorHandlingResult {
|
||||
if (action === "retry") {
|
||||
this.retryCounters.set(key, currentRetries + 1)
|
||||
const newRetryCount = currentRetries + 1
|
||||
|
||||
if (newRetryCount > this.maxRetries) {
|
||||
return {
|
||||
action: "abort",
|
||||
shouldContinue: false,
|
||||
retryCount: newRetryCount,
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
action: "retry",
|
||||
shouldContinue: true,
|
||||
retryCount: newRetryCount,
|
||||
}
|
||||
}
|
||||
|
||||
this.retryCounters.delete(key)
|
||||
|
||||
return {
|
||||
action,
|
||||
shouldContinue: action === "skip" || action === "confirm" || action === "regenerate",
|
||||
retryCount: currentRetries,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get available options for an error type.
|
||||
*/
|
||||
export function getErrorOptions(errorType: ErrorType): ErrorOption[] {
|
||||
return ERROR_MATRIX[errorType].options
|
||||
}
|
||||
|
||||
/**
|
||||
* Get default option for an error type.
|
||||
*/
|
||||
export function getDefaultErrorOption(errorType: ErrorType): ErrorOption {
|
||||
return ERROR_MATRIX[errorType].defaultOption
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if an error type is recoverable by default.
|
||||
*/
|
||||
export function isRecoverableError(errorType: ErrorType): boolean {
|
||||
return ERROR_MATRIX[errorType].recoverable
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert any error to IpuaroError.
|
||||
*/
|
||||
export function toIpuaroError(error: unknown, defaultType: ErrorType = "unknown"): IpuaroError {
|
||||
if (error instanceof IpuaroError) {
|
||||
return error
|
||||
}
|
||||
|
||||
if (error instanceof Error) {
|
||||
return new IpuaroError(defaultType, error.message, {
|
||||
context: { originalError: error.name },
|
||||
})
|
||||
}
|
||||
|
||||
return new IpuaroError(defaultType, String(error))
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a default ErrorHandler instance.
|
||||
*/
|
||||
export function createErrorHandler(options?: ErrorHandlerOptions): ErrorHandler {
|
||||
return new ErrorHandler(options)
|
||||
}
|
||||
@@ -12,6 +12,72 @@ export type ErrorType =
|
||||
| "timeout"
|
||||
| "unknown"
|
||||
|
||||
/**
|
||||
* Available options for error recovery.
|
||||
*/
|
||||
export type ErrorOption = "retry" | "skip" | "abort" | "confirm" | "regenerate"
|
||||
|
||||
/**
|
||||
* Error metadata with available options.
|
||||
*/
|
||||
export interface ErrorMeta {
|
||||
type: ErrorType
|
||||
recoverable: boolean
|
||||
options: ErrorOption[]
|
||||
defaultOption: ErrorOption
|
||||
}
|
||||
|
||||
/**
|
||||
* Error handling matrix - defines behavior for each error type.
|
||||
*/
|
||||
export const ERROR_MATRIX: Record<ErrorType, Omit<ErrorMeta, "type">> = {
|
||||
redis: {
|
||||
recoverable: false,
|
||||
options: ["retry", "abort"],
|
||||
defaultOption: "abort",
|
||||
},
|
||||
parse: {
|
||||
recoverable: true,
|
||||
options: ["skip", "abort"],
|
||||
defaultOption: "skip",
|
||||
},
|
||||
llm: {
|
||||
recoverable: true,
|
||||
options: ["retry", "skip", "abort"],
|
||||
defaultOption: "retry",
|
||||
},
|
||||
file: {
|
||||
recoverable: true,
|
||||
options: ["skip", "abort"],
|
||||
defaultOption: "skip",
|
||||
},
|
||||
command: {
|
||||
recoverable: true,
|
||||
options: ["confirm", "skip", "abort"],
|
||||
defaultOption: "confirm",
|
||||
},
|
||||
conflict: {
|
||||
recoverable: true,
|
||||
options: ["skip", "regenerate", "abort"],
|
||||
defaultOption: "skip",
|
||||
},
|
||||
validation: {
|
||||
recoverable: true,
|
||||
options: ["skip", "abort"],
|
||||
defaultOption: "skip",
|
||||
},
|
||||
timeout: {
|
||||
recoverable: true,
|
||||
options: ["retry", "skip", "abort"],
|
||||
defaultOption: "retry",
|
||||
},
|
||||
unknown: {
|
||||
recoverable: false,
|
||||
options: ["abort"],
|
||||
defaultOption: "abort",
|
||||
},
|
||||
}
|
||||
|
||||
/**
|
||||
* Base error class for ipuaro.
|
||||
*/
|
||||
@@ -19,60 +85,142 @@ export class IpuaroError extends Error {
|
||||
readonly type: ErrorType
|
||||
readonly recoverable: boolean
|
||||
readonly suggestion?: string
|
||||
readonly options: ErrorOption[]
|
||||
readonly defaultOption: ErrorOption
|
||||
readonly context?: Record<string, unknown>
|
||||
|
||||
constructor(type: ErrorType, message: string, recoverable = true, suggestion?: string) {
|
||||
constructor(
|
||||
type: ErrorType,
|
||||
message: string,
|
||||
options?: {
|
||||
recoverable?: boolean
|
||||
suggestion?: string
|
||||
context?: Record<string, unknown>
|
||||
},
|
||||
) {
|
||||
super(message)
|
||||
this.name = "IpuaroError"
|
||||
this.type = type
|
||||
this.recoverable = recoverable
|
||||
this.suggestion = suggestion
|
||||
|
||||
const meta = ERROR_MATRIX[type]
|
||||
this.recoverable = options?.recoverable ?? meta.recoverable
|
||||
this.options = meta.options
|
||||
this.defaultOption = meta.defaultOption
|
||||
this.suggestion = options?.suggestion
|
||||
this.context = options?.context
|
||||
}
|
||||
|
||||
static redis(message: string): IpuaroError {
|
||||
return new IpuaroError(
|
||||
"redis",
|
||||
message,
|
||||
false,
|
||||
"Please ensure Redis is running: redis-server",
|
||||
)
|
||||
/**
|
||||
* Get error metadata.
|
||||
*/
|
||||
getMeta(): ErrorMeta {
|
||||
return {
|
||||
type: this.type,
|
||||
recoverable: this.recoverable,
|
||||
options: this.options,
|
||||
defaultOption: this.defaultOption,
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if an option is available for this error.
|
||||
*/
|
||||
hasOption(option: ErrorOption): boolean {
|
||||
return this.options.includes(option)
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a formatted error message with suggestion.
|
||||
*/
|
||||
toDisplayString(): string {
|
||||
let result = `[${this.type}] ${this.message}`
|
||||
if (this.suggestion) {
|
||||
result += `\n Suggestion: ${this.suggestion}`
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
static redis(message: string, context?: Record<string, unknown>): IpuaroError {
|
||||
return new IpuaroError("redis", message, {
|
||||
suggestion: "Please ensure Redis is running: redis-server",
|
||||
context,
|
||||
})
|
||||
}
|
||||
|
||||
static parse(message: string, filePath?: string): IpuaroError {
|
||||
const msg = filePath ? `${message} in ${filePath}` : message
|
||||
return new IpuaroError("parse", msg, true, "File will be skipped")
|
||||
return new IpuaroError("parse", msg, {
|
||||
suggestion: "File will be skipped during indexing",
|
||||
context: filePath ? { filePath } : undefined,
|
||||
})
|
||||
}
|
||||
|
||||
static llm(message: string): IpuaroError {
|
||||
return new IpuaroError(
|
||||
"llm",
|
||||
message,
|
||||
true,
|
||||
"Please ensure Ollama is running and model is available",
|
||||
)
|
||||
static llm(message: string, context?: Record<string, unknown>): IpuaroError {
|
||||
return new IpuaroError("llm", message, {
|
||||
suggestion: "Please ensure Ollama is running and model is available",
|
||||
context,
|
||||
})
|
||||
}
|
||||
|
||||
static file(message: string): IpuaroError {
|
||||
return new IpuaroError("file", message, true)
|
||||
static llmTimeout(message: string): IpuaroError {
|
||||
return new IpuaroError("timeout", message, {
|
||||
suggestion: "The LLM request timed out. Try again or check Ollama status.",
|
||||
})
|
||||
}
|
||||
|
||||
static command(message: string): IpuaroError {
|
||||
return new IpuaroError("command", message, true)
|
||||
static file(message: string, filePath?: string): IpuaroError {
|
||||
return new IpuaroError("file", message, {
|
||||
suggestion: "Check if the file exists and you have permission to access it",
|
||||
context: filePath ? { filePath } : undefined,
|
||||
})
|
||||
}
|
||||
|
||||
static conflict(message: string): IpuaroError {
|
||||
return new IpuaroError(
|
||||
"conflict",
|
||||
message,
|
||||
true,
|
||||
"File was modified externally. Regenerate or skip.",
|
||||
)
|
||||
static fileNotFound(filePath: string): IpuaroError {
|
||||
return new IpuaroError("file", `File not found: ${filePath}`, {
|
||||
suggestion: "Check the file path and try again",
|
||||
context: { filePath },
|
||||
})
|
||||
}
|
||||
|
||||
static validation(message: string): IpuaroError {
|
||||
return new IpuaroError("validation", message, true)
|
||||
static command(message: string, command?: string): IpuaroError {
|
||||
return new IpuaroError("command", message, {
|
||||
suggestion: "Command requires confirmation or is not in whitelist",
|
||||
context: command ? { command } : undefined,
|
||||
})
|
||||
}
|
||||
|
||||
static timeout(message: string): IpuaroError {
|
||||
return new IpuaroError("timeout", message, true, "Try again or increase timeout")
|
||||
static commandBlacklisted(command: string): IpuaroError {
|
||||
return new IpuaroError("command", `Command is blacklisted: ${command}`, {
|
||||
recoverable: false,
|
||||
suggestion: "This command is not allowed for security reasons",
|
||||
context: { command },
|
||||
})
|
||||
}
|
||||
|
||||
static conflict(message: string, filePath?: string): IpuaroError {
|
||||
return new IpuaroError("conflict", message, {
|
||||
suggestion: "File was modified externally. Regenerate or skip the change.",
|
||||
context: filePath ? { filePath } : undefined,
|
||||
})
|
||||
}
|
||||
|
||||
static validation(message: string, field?: string): IpuaroError {
|
||||
return new IpuaroError("validation", message, {
|
||||
suggestion: "Please check the input and try again",
|
||||
context: field ? { field } : undefined,
|
||||
})
|
||||
}
|
||||
|
||||
static timeout(message: string, timeoutMs?: number): IpuaroError {
|
||||
return new IpuaroError("timeout", message, {
|
||||
suggestion: "Try again or increase the timeout value",
|
||||
context: timeoutMs ? { timeoutMs } : undefined,
|
||||
})
|
||||
}
|
||||
|
||||
static unknown(message: string, originalError?: Error): IpuaroError {
|
||||
return new IpuaroError("unknown", message, {
|
||||
context: originalError ? { originalError: originalError.message } : undefined,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,2 +1,3 @@
|
||||
// Shared errors
|
||||
export * from "./IpuaroError.js"
|
||||
export * from "./ErrorHandler.js"
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user