mirror of
https://github.com/samiyev/puaros.git
synced 2025-12-28 07:16:53 +05:00
Compare commits
66 Commits
ipuaro-v0.
...
ipuaro-v0.
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
17d75dbd54 | ||
|
|
fac5966678 | ||
|
|
92ba3fd9ba | ||
|
|
e9aaa708fe | ||
|
|
d6d15dd271 | ||
|
|
d63d85d850 | ||
|
|
41cfc21f20 | ||
|
|
eeaa223436 | ||
|
|
36768c06d1 | ||
|
|
5a22cd5c9b | ||
|
|
806c9281b0 | ||
|
|
12197a9624 | ||
|
|
1489b69e69 | ||
|
|
2dcb22812c | ||
|
|
7d7c99fe4d | ||
|
|
a3f0ba948f | ||
|
|
141888bf59 | ||
|
|
b0f1778f3a | ||
|
|
9c94335729 | ||
|
|
c34d57c231 | ||
|
|
60052c0db9 | ||
|
|
fa647c41aa | ||
|
|
98b365bd94 | ||
|
|
a7669f8947 | ||
|
|
7f0ec49c90 | ||
|
|
077d160343 | ||
|
|
b5ee77d8b8 | ||
|
|
a589b0dfc4 | ||
|
|
908c2f50d7 | ||
|
|
510c42241a | ||
|
|
357cf27765 | ||
|
|
6695cb73d4 | ||
|
|
5a9470929c | ||
|
|
137c77cc53 | ||
|
|
0433ef102c | ||
|
|
902d1db831 | ||
|
|
c843b780a8 | ||
|
|
0dff0e87d0 | ||
|
|
ab2d5d40a5 | ||
|
|
baccfd53c0 | ||
|
|
8f995fc596 | ||
|
|
f947c6d157 | ||
|
|
33d52bc7ca | ||
|
|
2c6eb6ce9b | ||
|
|
7d18e87423 | ||
|
|
fd1e6ad86e | ||
|
|
259ecc181a | ||
|
|
0f2ed5b301 | ||
|
|
56643d903f | ||
|
|
f5f904a847 | ||
|
|
2ae1ac13f5 | ||
|
|
caf7aac116 | ||
|
|
4ad5a209c4 | ||
|
|
25146003cc | ||
|
|
68f927d906 | ||
|
|
b3e04a411c | ||
|
|
294d085ad4 | ||
|
|
958e4daed5 | ||
|
|
6234fbce92 | ||
|
|
af9c2377a0 | ||
|
|
d0c1ddc22e | ||
|
|
225480c806 | ||
|
|
fd8e97af0e | ||
|
|
d36f9a6e21 | ||
|
|
4267938dcd | ||
|
|
127c7e2185 |
29
CLAUDE.md
29
CLAUDE.md
@@ -447,6 +447,35 @@ Copy and use for each release:
|
|||||||
- [ ] Published to npm (if public release)
|
- [ ] Published to npm (if public release)
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## Working with Roadmap
|
||||||
|
|
||||||
|
When the user points to `ROADMAP.md` or asks about the roadmap/next steps:
|
||||||
|
|
||||||
|
1. **Read both files together:**
|
||||||
|
- `packages/<package>/ROADMAP.md` - to understand the planned features and milestones
|
||||||
|
- `packages/<package>/CHANGELOG.md` - to see what's already implemented
|
||||||
|
|
||||||
|
2. **Determine current position:**
|
||||||
|
- Check the latest version in CHANGELOG.md
|
||||||
|
- Cross-reference with ROADMAP.md milestones
|
||||||
|
- Identify which roadmap items are already completed (present in CHANGELOG)
|
||||||
|
|
||||||
|
3. **Suggest next steps:**
|
||||||
|
- Find the first uncompleted item in the current milestone
|
||||||
|
- Or identify the next milestone if current one is complete
|
||||||
|
- Present clear "start here" recommendation
|
||||||
|
|
||||||
|
**Example workflow:**
|
||||||
|
```
|
||||||
|
User: "Let's work on the roadmap" or points to ROADMAP.md
|
||||||
|
|
||||||
|
Claude should:
|
||||||
|
1. Read ROADMAP.md → See milestones v0.1.0, v0.2.0, v0.3.0...
|
||||||
|
2. Read CHANGELOG.md → See latest release is v0.1.1
|
||||||
|
3. Compare → v0.1.0 milestone complete, v0.2.0 in progress
|
||||||
|
4. Report → "v0.1.0 is complete. For v0.2.0, next item is: <feature>"
|
||||||
|
```
|
||||||
|
|
||||||
## Common Workflows
|
## Common Workflows
|
||||||
|
|
||||||
### Adding a new CLI option
|
### Adding a new CLI option
|
||||||
|
|||||||
@@ -74,6 +74,7 @@ export default tseslint.config(
|
|||||||
'@typescript-eslint/require-await': 'warn',
|
'@typescript-eslint/require-await': 'warn',
|
||||||
'@typescript-eslint/no-unnecessary-condition': 'off', // Sometimes useful for defensive coding
|
'@typescript-eslint/no-unnecessary-condition': 'off', // Sometimes useful for defensive coding
|
||||||
'@typescript-eslint/no-non-null-assertion': 'warn',
|
'@typescript-eslint/no-non-null-assertion': 'warn',
|
||||||
|
'@typescript-eslint/no-unnecessary-type-parameters': 'warn', // Allow generic JSON parsers
|
||||||
|
|
||||||
// ========================================
|
// ========================================
|
||||||
// Code Quality & Best Practices
|
// Code Quality & Best Practices
|
||||||
|
|||||||
@@ -5,6 +5,26 @@ All notable changes to @samiyev/guardian will be documented in this file.
|
|||||||
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
|
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
|
||||||
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
||||||
|
|
||||||
|
## [0.9.4] - 2025-11-30
|
||||||
|
|
||||||
|
### Added
|
||||||
|
|
||||||
|
- **VERSION export** - Package version is now exported from index.ts, automatically read from package.json
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
|
||||||
|
- 🔄 **Refactored SecretDetector** - Reduced cyclomatic complexity from 24 to <15:
|
||||||
|
- Extracted helper methods: `extractByRuleId`, `extractAwsType`, `extractGithubType`, `extractSshType`, `extractSlackType`, `extractByMessage`
|
||||||
|
- Used lookup arrays for SSH and message type mappings
|
||||||
|
- 🔄 **Refactored AstNamingTraverser** - Reduced cyclomatic complexity from 17 to <15:
|
||||||
|
- Replaced if-else chain with Map-based node handlers
|
||||||
|
- Added `buildNodeHandlers()` method for cleaner architecture
|
||||||
|
|
||||||
|
### Quality
|
||||||
|
|
||||||
|
- ✅ **Zero lint warnings** - All ESLint warnings resolved
|
||||||
|
- ✅ **All 616 tests pass**
|
||||||
|
|
||||||
## [0.9.2] - 2025-11-27
|
## [0.9.2] - 2025-11-27
|
||||||
|
|
||||||
### Changed
|
### Changed
|
||||||
|
|||||||
@@ -325,17 +325,6 @@ await reportMetrics({
|
|||||||
| **AI Enablement** | Safely adopt AI coding tools at scale |
|
| **AI Enablement** | Safely adopt AI coding tools at scale |
|
||||||
| **Technical Debt Visibility** | Metrics and trends for data-driven decisions |
|
| **Technical Debt Visibility** | Metrics and trends for data-driven decisions |
|
||||||
|
|
||||||
### Enterprise Success Stories
|
|
||||||
|
|
||||||
**Fortune 500 Financial Services** 🏦
|
|
||||||
> "We have 200+ developers and were struggling with architectural consistency. Guardian reduced our code review cycle time by 35% and caught 12 hardcoded API keys before they hit production. ROI in first month." - VP Engineering
|
|
||||||
|
|
||||||
**Scale-up SaaS (Series B)** 📈
|
|
||||||
> "Guardian allowed us to confidently adopt GitHub Copilot across our team. AI writes code 3x faster, Guardian ensures quality. We ship more features without increasing tech debt." - CTO
|
|
||||||
|
|
||||||
**Consulting Firm** 💼
|
|
||||||
> "We use Guardian on every client project. It enforces our standards automatically, and clients love the quality metrics reports. Saved us from a major security incident when it caught hardcoded AWS credentials." - Lead Architect
|
|
||||||
|
|
||||||
## Installation
|
## Installation
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
@@ -970,36 +959,6 @@ Guardian follows Clean Architecture principles:
|
|||||||
- Node.js >= 18.0.0
|
- Node.js >= 18.0.0
|
||||||
- TypeScript >= 5.0.0 (for TypeScript projects)
|
- TypeScript >= 5.0.0 (for TypeScript projects)
|
||||||
|
|
||||||
## Real-World Vibe Coding Stats
|
|
||||||
|
|
||||||
Based on testing Guardian with AI-generated codebases:
|
|
||||||
|
|
||||||
| Metric | Typical AI Code | After Guardian |
|
|
||||||
|--------|----------------|----------------|
|
|
||||||
| Hardcoded values | 15-30 per 1000 LOC | 0-2 per 1000 LOC |
|
|
||||||
| Circular deps | 2-5 per project | 0 per project |
|
|
||||||
| Architecture violations | 10-20% of files | <1% of files |
|
|
||||||
| Time to fix issues | Manual review: 2-4 hours | Guardian + AI: 5-10 minutes |
|
|
||||||
|
|
||||||
**Common Issues Guardian Finds in AI Code:**
|
|
||||||
- 🔐 Hardcoded secrets and API keys (CRITICAL)
|
|
||||||
- ⏱️ Magic timeouts and retry counts
|
|
||||||
- 🌐 Hardcoded URLs and endpoints
|
|
||||||
- 🔄 Accidental circular imports
|
|
||||||
- 📁 Files in wrong architectural layers
|
|
||||||
- 🏷️ Inconsistent naming patterns
|
|
||||||
|
|
||||||
## Success Stories
|
|
||||||
|
|
||||||
**Prototype to Production** ⚡
|
|
||||||
> "Built a SaaS MVP with Claude in 3 days. Guardian caught 47 hardcoded values before first deploy. Saved us from production disasters." - Indie Hacker
|
|
||||||
|
|
||||||
**Learning Clean Architecture** 📚
|
|
||||||
> "Guardian taught me Clean Architecture better than any tutorial. Every violation is a mini lesson with suggestions." - Junior Dev
|
|
||||||
|
|
||||||
**AI-First Startup** 🚀
|
|
||||||
> "We ship 5+ features daily using Claude + Guardian. No human code reviews needed for AI-generated code anymore." - Tech Lead
|
|
||||||
|
|
||||||
## FAQ for Vibe Coders
|
## FAQ for Vibe Coders
|
||||||
|
|
||||||
**Q: Will Guardian slow down my AI workflow?**
|
**Q: Will Guardian slow down my AI workflow?**
|
||||||
|
|||||||
@@ -20,6 +20,21 @@ This document provides authoritative sources, academic papers, industry standard
|
|||||||
12. [Aggregate Boundary Validation (DDD Tactical Patterns)](#12-aggregate-boundary-validation-ddd-tactical-patterns)
|
12. [Aggregate Boundary Validation (DDD Tactical Patterns)](#12-aggregate-boundary-validation-ddd-tactical-patterns)
|
||||||
13. [Secret Detection & Security](#13-secret-detection--security)
|
13. [Secret Detection & Security](#13-secret-detection--security)
|
||||||
14. [Severity-Based Prioritization & Technical Debt](#14-severity-based-prioritization--technical-debt)
|
14. [Severity-Based Prioritization & Technical Debt](#14-severity-based-prioritization--technical-debt)
|
||||||
|
15. [Domain Event Usage Validation](#15-domain-event-usage-validation)
|
||||||
|
16. [Value Object Immutability](#16-value-object-immutability)
|
||||||
|
17. [Command Query Separation (CQS/CQRS)](#17-command-query-separation-cqscqrs)
|
||||||
|
18. [Factory Pattern](#18-factory-pattern)
|
||||||
|
19. [Specification Pattern](#19-specification-pattern)
|
||||||
|
20. [Bounded Context](#20-bounded-context)
|
||||||
|
21. [Persistence Ignorance](#21-persistence-ignorance)
|
||||||
|
22. [Null Object Pattern](#22-null-object-pattern)
|
||||||
|
23. [Primitive Obsession](#23-primitive-obsession)
|
||||||
|
24. [Service Locator Anti-pattern](#24-service-locator-anti-pattern)
|
||||||
|
25. [Double Dispatch and Visitor Pattern](#25-double-dispatch-and-visitor-pattern)
|
||||||
|
26. [Entity Identity](#26-entity-identity)
|
||||||
|
27. [Saga Pattern](#27-saga-pattern)
|
||||||
|
28. [Anti-Corruption Layer](#28-anti-corruption-layer)
|
||||||
|
29. [Ubiquitous Language](#29-ubiquitous-language)
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
@@ -801,22 +816,840 @@ This document provides authoritative sources, academic papers, industry standard
|
|||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
|
## 15. Domain Event Usage Validation
|
||||||
|
|
||||||
|
### Eric Evans: Domain-Driven Design (2003)
|
||||||
|
|
||||||
|
**Original Definition:**
|
||||||
|
- Domain Events: "Something happened that domain experts care about"
|
||||||
|
- Events capture facts about the domain that have already occurred
|
||||||
|
- Distinct from system events - they model business-relevant occurrences
|
||||||
|
- Reference: [Martin Fowler - Domain Event](https://martinfowler.com/eaaDev/DomainEvent.html)
|
||||||
|
|
||||||
|
**Book: Domain-Driven Design** (2003)
|
||||||
|
- Author: Eric Evans
|
||||||
|
- Publisher: Addison-Wesley Professional
|
||||||
|
- ISBN: 978-0321125217
|
||||||
|
- Domain Events weren't explicitly in the original book but evolved from DDD community
|
||||||
|
- Reference: [DDD Community - Domain Events](https://www.domainlanguage.com/)
|
||||||
|
|
||||||
|
### Vaughn Vernon: Implementing Domain-Driven Design (2013)
|
||||||
|
|
||||||
|
**Chapter 8: Domain Events**
|
||||||
|
- Author: Vaughn Vernon
|
||||||
|
- Comprehensive coverage of Domain Events implementation
|
||||||
|
- "Model information about activity in the domain as a series of discrete events"
|
||||||
|
- Reference: [Amazon - Implementing DDD](https://www.amazon.com/Implementing-Domain-Driven-Design-Vaughn-Vernon/dp/0321834577)
|
||||||
|
|
||||||
|
**Key Principles:**
|
||||||
|
- Events should be immutable
|
||||||
|
- Named in past tense (OrderPlaced, UserRegistered)
|
||||||
|
- Contain all data needed by handlers
|
||||||
|
- Enable loose coupling between aggregates
|
||||||
|
|
||||||
|
### Martin Fowler's Event Patterns
|
||||||
|
|
||||||
|
**Event Sourcing:**
|
||||||
|
- "Capture all changes to an application state as a sequence of events"
|
||||||
|
- Events become the primary source of truth
|
||||||
|
- Reference: [Martin Fowler - Event Sourcing](https://martinfowler.com/eaaDev/EventSourcing.html)
|
||||||
|
|
||||||
|
**Event-Driven Architecture:**
|
||||||
|
- Promotes loose coupling between components
|
||||||
|
- Enables asynchronous processing
|
||||||
|
- Reference: [Martin Fowler - Event-Driven](https://martinfowler.com/articles/201701-event-driven.html)
|
||||||
|
|
||||||
|
### Why Direct Infrastructure Calls Are Bad
|
||||||
|
|
||||||
|
**Coupling Issues:**
|
||||||
|
- Direct calls create tight coupling between domain and infrastructure
|
||||||
|
- Makes testing difficult (need to mock infrastructure)
|
||||||
|
- Violates Single Responsibility Principle
|
||||||
|
- Reference: [Microsoft - Domain Events Design](https://learn.microsoft.com/en-us/dotnet/architecture/microservices/microservice-ddd-cqrs-patterns/domain-events-design-implementation)
|
||||||
|
|
||||||
|
**Benefits of Domain Events:**
|
||||||
|
- Decouples domain from side effects
|
||||||
|
- Enables eventual consistency
|
||||||
|
- Improves testability
|
||||||
|
- Supports audit logging naturally
|
||||||
|
- Reference: [Jimmy Bogard - Domain Events](https://lostechies.com/jimmybogard/2010/04/08/strengthening-your-domain-domain-events/)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 16. Value Object Immutability
|
||||||
|
|
||||||
|
### Eric Evans: Domain-Driven Design (2003)
|
||||||
|
|
||||||
|
**Value Object Definition:**
|
||||||
|
- "An object that describes some characteristic or attribute but carries no concept of identity"
|
||||||
|
- "Value Objects should be immutable"
|
||||||
|
- When you care only about the attributes of an element, classify it as a Value Object
|
||||||
|
- Reference: [Martin Fowler - Value Object](https://martinfowler.com/bliki/ValueObject.html)
|
||||||
|
|
||||||
|
**Immutability Requirement:**
|
||||||
|
- "Treat the Value Object as immutable"
|
||||||
|
- "Don't give it any identity and avoid the design complexities necessary to maintain Entities"
|
||||||
|
- Reference: [DDD Reference - Value Objects](https://www.domainlanguage.com/wp-content/uploads/2016/05/DDD_Reference_2015-03.pdf)
|
||||||
|
|
||||||
|
### Martin Fowler on Value Objects
|
||||||
|
|
||||||
|
**Blog Post: Value Object** (2016)
|
||||||
|
- "A small simple object, like money or a date range, whose equality isn't based on identity"
|
||||||
|
- "I consider value objects to be one of the most important building blocks of good domain models"
|
||||||
|
- Reference: [Martin Fowler - Value Object](https://martinfowler.com/bliki/ValueObject.html)
|
||||||
|
|
||||||
|
**Key Properties:**
|
||||||
|
- Equality based on attribute values, not identity
|
||||||
|
- Should be immutable (once created, cannot be changed)
|
||||||
|
- Side-effect free behavior
|
||||||
|
- Self-validating (validate in constructor)
|
||||||
|
|
||||||
|
### Vaughn Vernon: Implementing DDD
|
||||||
|
|
||||||
|
**Chapter 6: Value Objects**
|
||||||
|
- Detailed implementation guidance
|
||||||
|
- "Measures, quantifies, or describes a thing in the domain"
|
||||||
|
- "Can be compared with other Value Objects using value equality"
|
||||||
|
- "Completely replaceable when the measurement changes"
|
||||||
|
- Reference: [Vaughn Vernon - Implementing DDD](https://www.amazon.com/Implementing-Domain-Driven-Design-Vaughn-Vernon/dp/0321834577)
|
||||||
|
|
||||||
|
### Why Immutability Matters
|
||||||
|
|
||||||
|
**Thread Safety:**
|
||||||
|
- Immutable objects are inherently thread-safe
|
||||||
|
- No synchronization needed for concurrent access
|
||||||
|
- Reference: [Effective Java - Item 17](https://www.amazon.com/Effective-Java-Joshua-Bloch/dp/0134685997)
|
||||||
|
|
||||||
|
**Reasoning About Code:**
|
||||||
|
- Easier to understand code when objects don't change
|
||||||
|
- No defensive copying needed
|
||||||
|
- Simplifies caching and optimization
|
||||||
|
- Reference: [Oracle Java Tutorials - Immutable Objects](https://docs.oracle.com/javase/tutorial/essential/concurrency/immutable.html)
|
||||||
|
|
||||||
|
**Functional Programming Influence:**
|
||||||
|
- Immutability is a core principle of functional programming
|
||||||
|
- Reduces side effects and makes code more predictable
|
||||||
|
- Reference: [Wikipedia - Immutable Object](https://en.wikipedia.org/wiki/Immutable_object)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 17. Command Query Separation (CQS/CQRS)
|
||||||
|
|
||||||
|
### Bertrand Meyer: Original CQS Principle
|
||||||
|
|
||||||
|
**Book: Object-Oriented Software Construction** (1988, 2nd Ed. 1997)
|
||||||
|
- Author: Bertrand Meyer
|
||||||
|
- Publisher: Prentice Hall
|
||||||
|
- ISBN: 978-0136291558
|
||||||
|
- Introduced Command Query Separation principle
|
||||||
|
- Reference: [Wikipedia - CQS](https://en.wikipedia.org/wiki/Command%E2%80%93query_separation)
|
||||||
|
|
||||||
|
**CQS Principle:**
|
||||||
|
- "Every method should either be a command that performs an action, or a query that returns data to the caller, but not both"
|
||||||
|
- Commands: change state, return nothing (void)
|
||||||
|
- Queries: return data, change nothing (side-effect free)
|
||||||
|
- Reference: [Martin Fowler - CommandQuerySeparation](https://martinfowler.com/bliki/CommandQuerySeparation.html)
|
||||||
|
|
||||||
|
### Greg Young: CQRS Pattern
|
||||||
|
|
||||||
|
**CQRS Documents** (2010)
|
||||||
|
- Author: Greg Young
|
||||||
|
- Extended CQS to architectural pattern
|
||||||
|
- "CQRS is simply the creation of two objects where there was previously only one"
|
||||||
|
- Reference: [Greg Young - CQRS Documents](https://cqrs.files.wordpress.com/2010/11/cqrs_documents.pdf)
|
||||||
|
|
||||||
|
**Key Concepts:**
|
||||||
|
- Separate models for reading and writing
|
||||||
|
- Write model (commands) optimized for business logic
|
||||||
|
- Read model (queries) optimized for display/reporting
|
||||||
|
- Reference: [Microsoft - CQRS Pattern](https://learn.microsoft.com/en-us/azure/architecture/patterns/cqrs)
|
||||||
|
|
||||||
|
### Martin Fowler on CQRS
|
||||||
|
|
||||||
|
**Blog Post: CQRS** (2011)
|
||||||
|
- "At its heart is the notion that you can use a different model to update information than the model you use to read information"
|
||||||
|
- Warns against overuse: "CQRS is a significant mental leap for all concerned"
|
||||||
|
- Reference: [Martin Fowler - CQRS](https://martinfowler.com/bliki/CQRS.html)
|
||||||
|
|
||||||
|
### Benefits and Trade-offs
|
||||||
|
|
||||||
|
**Benefits:**
|
||||||
|
- Independent scaling of read and write workloads
|
||||||
|
- Optimized data schemas for each side
|
||||||
|
- Improved security (separate read/write permissions)
|
||||||
|
- Reference: [AWS - CQRS Pattern](https://docs.aws.amazon.com/prescriptive-guidance/latest/modernization-data-persistence/cqrs-pattern.html)
|
||||||
|
|
||||||
|
**Trade-offs:**
|
||||||
|
- Increased complexity
|
||||||
|
- Eventual consistency challenges
|
||||||
|
- More code to maintain
|
||||||
|
- Reference: [Microsoft - CQRS Considerations](https://learn.microsoft.com/en-us/azure/architecture/patterns/cqrs#issues-and-considerations)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 18. Factory Pattern
|
||||||
|
|
||||||
|
### Gang of Four: Design Patterns (1994)
|
||||||
|
|
||||||
|
**Book: Design Patterns: Elements of Reusable Object-Oriented Software**
|
||||||
|
- Authors: Erich Gamma, Richard Helm, Ralph Johnson, John Vlissides (Gang of Four)
|
||||||
|
- Publisher: Addison-Wesley
|
||||||
|
- ISBN: 978-0201633610
|
||||||
|
- Defines Factory Method and Abstract Factory patterns
|
||||||
|
- Reference: [Wikipedia - Design Patterns](https://en.wikipedia.org/wiki/Design_Patterns)
|
||||||
|
|
||||||
|
**Factory Method Pattern:**
|
||||||
|
- "Define an interface for creating an object, but let subclasses decide which class to instantiate"
|
||||||
|
- Lets a class defer instantiation to subclasses
|
||||||
|
- Reference: [Refactoring Guru - Factory Method](https://refactoring.guru/design-patterns/factory-method)
|
||||||
|
|
||||||
|
**Abstract Factory Pattern:**
|
||||||
|
- "Provide an interface for creating families of related or dependent objects without specifying their concrete classes"
|
||||||
|
- Reference: [Refactoring Guru - Abstract Factory](https://refactoring.guru/design-patterns/abstract-factory)
|
||||||
|
|
||||||
|
### Eric Evans: Factory in DDD Context
|
||||||
|
|
||||||
|
**Domain-Driven Design** (2003)
|
||||||
|
- Chapter 6: "The Life Cycle of a Domain Object"
|
||||||
|
- Factories encapsulate complex object creation
|
||||||
|
- "Shift the responsibility for creating instances of complex objects and Aggregates to a separate object"
|
||||||
|
- Reference: [DDD Reference](https://www.domainlanguage.com/wp-content/uploads/2016/05/DDD_Reference_2015-03.pdf)
|
||||||
|
|
||||||
|
**DDD Factory Guidelines:**
|
||||||
|
- Factory should create valid objects (invariants satisfied)
|
||||||
|
- Two types: Factory for new objects, Factory for reconstitution
|
||||||
|
- Keep creation logic out of the entity itself
|
||||||
|
- Reference: Already in Section 10 - Domain-Driven Design
|
||||||
|
|
||||||
|
### Why Factories Matter in DDD
|
||||||
|
|
||||||
|
**Encapsulation of Creation Logic:**
|
||||||
|
- Complex aggregates need coordinated creation
|
||||||
|
- Business rules should be enforced at creation time
|
||||||
|
- Clients shouldn't know construction details
|
||||||
|
- Reference: [Vaughn Vernon - Implementing DDD, Chapter 11](https://www.amazon.com/Implementing-Domain-Driven-Design-Vaughn-Vernon/dp/0321834577)
|
||||||
|
|
||||||
|
**Factory vs Constructor:**
|
||||||
|
- Constructors should be simple (assign values)
|
||||||
|
- Factories handle complex creation logic
|
||||||
|
- Factories can return different types
|
||||||
|
- Reference: [Effective Java - Item 1: Static Factory Methods](https://www.amazon.com/Effective-Java-Joshua-Bloch/dp/0134685997)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 19. Specification Pattern
|
||||||
|
|
||||||
|
### Eric Evans & Martin Fowler
|
||||||
|
|
||||||
|
**Original Paper: Specifications** (1997)
|
||||||
|
- Authors: Eric Evans and Martin Fowler
|
||||||
|
- Introduced the Specification pattern
|
||||||
|
- "A Specification states a constraint on the state of another object"
|
||||||
|
- Reference: [Martin Fowler - Specification](https://martinfowler.com/apsupp/spec.pdf)
|
||||||
|
|
||||||
|
**Domain-Driven Design** (2003)
|
||||||
|
- Chapter 9: "Making Implicit Concepts Explicit"
|
||||||
|
- Specifications make business rules explicit and reusable
|
||||||
|
- "Create explicit predicate-like Value Objects for specialized purposes"
|
||||||
|
- Reference: [DDD Reference](https://www.domainlanguage.com/wp-content/uploads/2016/05/DDD_Reference_2015-03.pdf)
|
||||||
|
|
||||||
|
### Pattern Definition
|
||||||
|
|
||||||
|
**Core Concept:**
|
||||||
|
- Specification is a predicate that determines if an object satisfies some criteria
|
||||||
|
- Encapsulates business rules that can be reused and combined
|
||||||
|
- Reference: [Wikipedia - Specification Pattern](https://en.wikipedia.org/wiki/Specification_pattern)
|
||||||
|
|
||||||
|
**Three Main Uses:**
|
||||||
|
1. **Selection**: Finding objects that match criteria
|
||||||
|
2. **Validation**: Checking if object satisfies rules
|
||||||
|
3. **Construction**: Describing what needs to be created
|
||||||
|
- Reference: [Martin Fowler - Specification](https://martinfowler.com/apsupp/spec.pdf)
|
||||||
|
|
||||||
|
### Composite Specifications
|
||||||
|
|
||||||
|
**Combining Specifications:**
|
||||||
|
- AND: Both specifications must be satisfied
|
||||||
|
- OR: Either specification must be satisfied
|
||||||
|
- NOT: Specification must not be satisfied
|
||||||
|
- Reference: [Refactoring Guru - Specification Pattern](https://refactoring.guru/design-patterns/specification)
|
||||||
|
|
||||||
|
**Benefits:**
|
||||||
|
- Reusable business rules
|
||||||
|
- Testable in isolation
|
||||||
|
- Readable domain language
|
||||||
|
- Composable for complex rules
|
||||||
|
- Reference: [Enterprise Craftsmanship - Specification Pattern](https://enterprisecraftsmanship.com/posts/specification-pattern-c-implementation/)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 20. Bounded Context
|
||||||
|
|
||||||
|
### Eric Evans: Domain-Driven Design (2003)
|
||||||
|
|
||||||
|
**Original Definition:**
|
||||||
|
- "A Bounded Context delimits the applicability of a particular model"
|
||||||
|
- "Explicitly define the context within which a model applies"
|
||||||
|
- Chapter 14: "Maintaining Model Integrity"
|
||||||
|
- Reference: [Martin Fowler - Bounded Context](https://martinfowler.com/bliki/BoundedContext.html)
|
||||||
|
|
||||||
|
**Key Principles:**
|
||||||
|
- Each Bounded Context has its own Ubiquitous Language
|
||||||
|
- Same term can mean different things in different contexts
|
||||||
|
- Models should not be shared across context boundaries
|
||||||
|
- Reference: [DDD Reference](https://www.domainlanguage.com/wp-content/uploads/2016/05/DDD_Reference_2015-03.pdf)
|
||||||
|
|
||||||
|
### Vaughn Vernon: Strategic Design
|
||||||
|
|
||||||
|
**Implementing Domain-Driven Design** (2013)
|
||||||
|
- Chapter 2: "Domains, Subdomains, and Bounded Contexts"
|
||||||
|
- Detailed guidance on identifying and mapping contexts
|
||||||
|
- Reference: [Vaughn Vernon - Implementing DDD](https://www.amazon.com/Implementing-Domain-Driven-Design-Vaughn-Vernon/dp/0321834577)
|
||||||
|
|
||||||
|
**Context Mapping Patterns:**
|
||||||
|
- Shared Kernel
|
||||||
|
- Customer/Supplier
|
||||||
|
- Conformist
|
||||||
|
- Anti-Corruption Layer
|
||||||
|
- Open Host Service / Published Language
|
||||||
|
- Reference: [Context Mapping Patterns](https://www.infoq.com/articles/ddd-contextmapping/)
|
||||||
|
|
||||||
|
### Why Bounded Contexts Matter
|
||||||
|
|
||||||
|
**Avoiding Big Ball of Mud:**
|
||||||
|
- Without explicit boundaries, models become entangled
|
||||||
|
- Different teams step on each other's models
|
||||||
|
- Reference: [Wikipedia - Big Ball of Mud](https://en.wikipedia.org/wiki/Big_ball_of_mud)
|
||||||
|
|
||||||
|
**Microservices and Bounded Contexts:**
|
||||||
|
- "Microservices should be designed around business capabilities, aligned with bounded contexts"
|
||||||
|
- Each microservice typically represents one bounded context
|
||||||
|
- Reference: [Microsoft - Microservices and Bounded Contexts](https://learn.microsoft.com/en-us/azure/architecture/microservices/model/domain-analysis)
|
||||||
|
|
||||||
|
### Cross-Context Communication
|
||||||
|
|
||||||
|
**Integration Patterns:**
|
||||||
|
- Never share domain models across contexts
|
||||||
|
- Use integration events or APIs
|
||||||
|
- Translate between context languages
|
||||||
|
- Reference: [Microsoft - Tactical DDD](https://learn.microsoft.com/en-us/azure/architecture/microservices/model/tactical-ddd)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 21. Persistence Ignorance
|
||||||
|
|
||||||
|
### Definition and Principles
|
||||||
|
|
||||||
|
**Core Concept:**
|
||||||
|
- Domain objects should have no knowledge of how they are persisted
|
||||||
|
- Business logic remains pure and testable
|
||||||
|
- Infrastructure concerns are separated from domain
|
||||||
|
- Reference: [Microsoft - Persistence Ignorance](https://learn.microsoft.com/en-us/dotnet/architecture/microservices/microservice-ddd-cqrs-patterns/infrastructure-persistence-layer-design#the-persistence-ignorance-principle)
|
||||||
|
|
||||||
|
**Wikipedia Definition:**
|
||||||
|
- "Persistence ignorance is the ability of a class to be used without any underlying persistence mechanism"
|
||||||
|
- Objects don't know if/how they'll be stored
|
||||||
|
- Reference: [Wikipedia - Persistence Ignorance](https://en.wikipedia.org/wiki/Persistence_ignorance)
|
||||||
|
|
||||||
|
### Eric Evans: DDD and Persistence
|
||||||
|
|
||||||
|
**Domain-Driven Design** (2003)
|
||||||
|
- Repositories abstract away persistence details
|
||||||
|
- Domain model should not reference ORM or database concepts
|
||||||
|
- Reference: Already covered in Section 6 - Repository Pattern
|
||||||
|
|
||||||
|
**Key Quote:**
|
||||||
|
- "The domain layer should be kept clean of all technical concerns"
|
||||||
|
- ORM annotations violate this principle
|
||||||
|
- Reference: [Clean Architecture and DDD](https://herbertograca.com/2017/11/16/explicit-architecture-01-ddd-hexagonal-onion-clean-cqrs-how-i-put-it-all-together/)
|
||||||
|
|
||||||
|
### Clean Architecture Alignment
|
||||||
|
|
||||||
|
**Robert C. Martin:**
|
||||||
|
- "The database is a detail"
|
||||||
|
- Domain entities should not depend on persistence frameworks
|
||||||
|
- Use Repository interfaces to abstract persistence
|
||||||
|
- Reference: [Clean Architecture Book](https://www.amazon.com/Clean-Architecture-Craftsmans-Software-Structure/dp/0134494164)
|
||||||
|
|
||||||
|
### Practical Implementation
|
||||||
|
|
||||||
|
**Two-Model Approach:**
|
||||||
|
- Domain Model: Pure business objects
|
||||||
|
- Persistence Model: ORM-annotated entities
|
||||||
|
- Mappers translate between them
|
||||||
|
- Reference: [Microsoft - Infrastructure Layer](https://learn.microsoft.com/en-us/dotnet/architecture/microservices/microservice-ddd-cqrs-patterns/infrastructure-persistence-layer-design)
|
||||||
|
|
||||||
|
**Benefits:**
|
||||||
|
- Domain model can evolve independently of database schema
|
||||||
|
- Easier testing (no ORM required)
|
||||||
|
- Database can be changed without affecting domain
|
||||||
|
- Reference: [Enterprise Craftsmanship - Persistence Ignorance](https://enterprisecraftsmanship.com/posts/persistence-ignorance/)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 22. Null Object Pattern
|
||||||
|
|
||||||
|
### Original Pattern
|
||||||
|
|
||||||
|
**Pattern Languages of Program Design 3** (1997)
|
||||||
|
- Author: Bobby Woolf
|
||||||
|
- Chapter: "Null Object"
|
||||||
|
- Publisher: Addison-Wesley
|
||||||
|
- ISBN: 978-0201310115
|
||||||
|
- Reference: [Wikipedia - Null Object Pattern](https://en.wikipedia.org/wiki/Null_object_pattern)
|
||||||
|
|
||||||
|
**Definition:**
|
||||||
|
- "A Null Object provides a 'do nothing' behavior, hiding the details from its collaborators"
|
||||||
|
- Replaces null checks with polymorphism
|
||||||
|
- Reference: [Refactoring Guru - Null Object](https://refactoring.guru/introduce-null-object)
|
||||||
|
|
||||||
|
### Martin Fowler's Coverage
|
||||||
|
|
||||||
|
**Refactoring Book** (1999, 2018)
|
||||||
|
- "Introduce Null Object" refactoring
|
||||||
|
- "Replace conditional logic that checks for null with a null object"
|
||||||
|
- Reference: [Refactoring Catalog](https://refactoring.com/catalog/introduceNullObject.html)
|
||||||
|
|
||||||
|
**Special Case Pattern:**
|
||||||
|
- More general pattern that includes Null Object
|
||||||
|
- "A subclass that provides special behavior for particular cases"
|
||||||
|
- Reference: [Martin Fowler - Special Case](https://martinfowler.com/eaaCatalog/specialCase.html)
|
||||||
|
|
||||||
|
### Benefits
|
||||||
|
|
||||||
|
**Eliminates Null Checks:**
|
||||||
|
- Reduces cyclomatic complexity
|
||||||
|
- Cleaner, more readable code
|
||||||
|
- Follows "Tell, Don't Ask" principle
|
||||||
|
- Reference: [SourceMaking - Null Object](https://sourcemaking.com/design_patterns/null_object)
|
||||||
|
|
||||||
|
**Polymorphism Over Conditionals:**
|
||||||
|
- Null Object responds to same interface as real object
|
||||||
|
- Default/neutral behavior instead of null checks
|
||||||
|
- Reference: [C2 Wiki - Null Object](https://wiki.c2.com/?NullObject)
|
||||||
|
|
||||||
|
### When to Use
|
||||||
|
|
||||||
|
**Good Candidates:**
|
||||||
|
- Objects frequently checked for null
|
||||||
|
- Null represents "absence" with sensible default behavior
|
||||||
|
- Reference: [Baeldung - Null Object Pattern](https://www.baeldung.com/java-null-object-pattern)
|
||||||
|
|
||||||
|
**Cautions:**
|
||||||
|
- Don't use when null has semantic meaning
|
||||||
|
- Can hide bugs if misapplied
|
||||||
|
- Reference: [Stack Overflow - Null Object Considerations](https://stackoverflow.com/questions/1274792/is-the-null-object-pattern-a-bad-practice)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 23. Primitive Obsession
|
||||||
|
|
||||||
|
### Code Smell Definition
|
||||||
|
|
||||||
|
**Martin Fowler: Refactoring** (1999, 2018)
|
||||||
|
- Primitive Obsession is a code smell
|
||||||
|
- "Using primitives instead of small objects for simple tasks"
|
||||||
|
- Reference: [Refactoring Catalog](https://refactoring.com/catalog/)
|
||||||
|
|
||||||
|
**Wikipedia Definition:**
|
||||||
|
- "Using primitive data types to represent domain ideas"
|
||||||
|
- Example: Using string for email, int for money
|
||||||
|
- Reference: [Wikipedia - Code Smell](https://en.wikipedia.org/wiki/Code_smell)
|
||||||
|
|
||||||
|
### Why It's a Problem
|
||||||
|
|
||||||
|
**Lost Type Safety:**
|
||||||
|
- String can contain anything, Email cannot
|
||||||
|
- Compiler can't catch domain errors
|
||||||
|
- Reference: [Refactoring Guru - Primitive Obsession](https://refactoring.guru/smells/primitive-obsession)
|
||||||
|
|
||||||
|
**Scattered Validation:**
|
||||||
|
- Same validation repeated in multiple places
|
||||||
|
- Violates DRY principle
|
||||||
|
- Reference: [SourceMaking - Primitive Obsession](https://sourcemaking.com/refactoring/smells/primitive-obsession)
|
||||||
|
|
||||||
|
**Missing Behavior:**
|
||||||
|
- Primitives can't have domain-specific methods
|
||||||
|
- Logic lives in services instead of objects
|
||||||
|
- Reference: [Enterprise Craftsmanship - Primitive Obsession](https://enterprisecraftsmanship.com/posts/functional-c-primitive-obsession/)
|
||||||
|
|
||||||
|
### Solutions
|
||||||
|
|
||||||
|
**Replace with Value Objects:**
|
||||||
|
- Money instead of decimal
|
||||||
|
- Email instead of string
|
||||||
|
- PhoneNumber instead of string
|
||||||
|
- Reference: Already covered in Section 16 - Value Object Immutability
|
||||||
|
|
||||||
|
**Replace Data Value with Object:**
|
||||||
|
- Refactoring: "Replace Data Value with Object"
|
||||||
|
- Introduce Parameter Object for related primitives
|
||||||
|
- Reference: [Refactoring - Replace Data Value with Object](https://refactoring.com/catalog/replaceDataValueWithObject.html)
|
||||||
|
|
||||||
|
### Common Primitive Obsession Examples
|
||||||
|
|
||||||
|
**Frequently Misused Primitives:**
|
||||||
|
- string for: email, phone, URL, currency code, country code
|
||||||
|
- int/decimal for: money, percentage, age, quantity
|
||||||
|
- DateTime for: date ranges, business dates
|
||||||
|
- Reference: [DDD - Value Objects](https://martinfowler.com/bliki/ValueObject.html)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 24. Service Locator Anti-pattern
|
||||||
|
|
||||||
|
### Martin Fowler's Analysis
|
||||||
|
|
||||||
|
**Blog Post: Inversion of Control Containers and the Dependency Injection pattern** (2004)
|
||||||
|
- Compares Service Locator with Dependency Injection
|
||||||
|
- "With service locator the application class asks for it explicitly by a message to the locator"
|
||||||
|
- Reference: [Martin Fowler - Inversion of Control](https://martinfowler.com/articles/injection.html)
|
||||||
|
|
||||||
|
**Service Locator Definition:**
|
||||||
|
- "The basic idea behind a service locator is to have an object that knows how to get hold of all of the services that an application might need"
|
||||||
|
- Acts as a registry that provides dependencies on demand
|
||||||
|
- Reference: [Martin Fowler - Service Locator](https://martinfowler.com/articles/injection.html#UsingAServiceLocator)
|
||||||
|
|
||||||
|
### Why It's Considered an Anti-pattern
|
||||||
|
|
||||||
|
**Mark Seemann: Dependency Injection in .NET** (2011, 2nd Ed. 2019)
|
||||||
|
- Author: Mark Seemann
|
||||||
|
- Extensively covers why Service Locator is problematic
|
||||||
|
- "Service Locator is an anti-pattern"
|
||||||
|
- Reference: [Mark Seemann - Service Locator is an Anti-Pattern](https://blog.ploeh.dk/2010/02/03/ServiceLocatorisanAnti-Pattern/)
|
||||||
|
|
||||||
|
**Hidden Dependencies:**
|
||||||
|
- Dependencies are not visible in constructor
|
||||||
|
- Makes code harder to understand and test
|
||||||
|
- Violates Explicit Dependencies Principle
|
||||||
|
- Reference: [DevIQ - Explicit Dependencies](https://deviq.com/principles/explicit-dependencies-principle)
|
||||||
|
|
||||||
|
**Testing Difficulties:**
|
||||||
|
- Need to set up global locator for tests
|
||||||
|
- Tests become coupled to locator setup
|
||||||
|
- Reference: [Stack Overflow - Service Locator Testing](https://stackoverflow.com/questions/1557781/is-service-locator-an-anti-pattern)
|
||||||
|
|
||||||
|
### Dependency Injection Alternative
|
||||||
|
|
||||||
|
**Constructor Injection:**
|
||||||
|
- Dependencies declared in constructor
|
||||||
|
- Compiler enforces dependency provision
|
||||||
|
- Clear, testable code
|
||||||
|
- Reference: Already covered in Section 6 - Repository Pattern
|
||||||
|
|
||||||
|
**Benefits over Service Locator:**
|
||||||
|
- Explicit dependencies
|
||||||
|
- Easier testing (just pass mocks)
|
||||||
|
- IDE support for navigation
|
||||||
|
- Compile-time checking
|
||||||
|
- Reference: [Martin Fowler - Constructor Injection](https://martinfowler.com/articles/injection.html#ConstructorInjectionWithPicocontainer)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 25. Double Dispatch and Visitor Pattern
|
||||||
|
|
||||||
|
### Gang of Four: Visitor Pattern
|
||||||
|
|
||||||
|
**Design Patterns** (1994)
|
||||||
|
- Authors: Gang of Four
|
||||||
|
- Visitor Pattern chapter
|
||||||
|
- "Represent an operation to be performed on the elements of an object structure"
|
||||||
|
- Reference: [Wikipedia - Visitor Pattern](https://en.wikipedia.org/wiki/Visitor_pattern)
|
||||||
|
|
||||||
|
**Intent:**
|
||||||
|
- "Lets you define a new operation without changing the classes of the elements on which it operates"
|
||||||
|
- Separates algorithms from object structure
|
||||||
|
- Reference: [Refactoring Guru - Visitor](https://refactoring.guru/design-patterns/visitor)
|
||||||
|
|
||||||
|
### Double Dispatch Mechanism
|
||||||
|
|
||||||
|
**Definition:**
|
||||||
|
- "A mechanism that dispatches a function call to different concrete functions depending on the runtime types of two objects involved in the call"
|
||||||
|
- Visitor pattern uses double dispatch
|
||||||
|
- Reference: [Wikipedia - Double Dispatch](https://en.wikipedia.org/wiki/Double_dispatch)
|
||||||
|
|
||||||
|
**How It Works:**
|
||||||
|
1. Client calls element.accept(visitor)
|
||||||
|
2. Element calls visitor.visit(this) - first dispatch
|
||||||
|
3. Correct visit() overload selected - second dispatch
|
||||||
|
- Reference: [SourceMaking - Visitor](https://sourcemaking.com/design_patterns/visitor)
|
||||||
|
|
||||||
|
### When to Use
|
||||||
|
|
||||||
|
**Good Use Cases:**
|
||||||
|
- Operations on complex object structures
|
||||||
|
- Many distinct operations needed
|
||||||
|
- Object structure rarely changes but operations change often
|
||||||
|
- Reference: [Refactoring Guru - Visitor Use Cases](https://refactoring.guru/design-patterns/visitor)
|
||||||
|
|
||||||
|
**Alternative to Type Checking:**
|
||||||
|
- Replace instanceof/typeof checks with polymorphism
|
||||||
|
- More maintainable and extensible
|
||||||
|
- Reference: [Replace Conditional with Polymorphism](https://refactoring.guru/replace-conditional-with-polymorphism)
|
||||||
|
|
||||||
|
### Trade-offs
|
||||||
|
|
||||||
|
**Advantages:**
|
||||||
|
- Open/Closed Principle for new operations
|
||||||
|
- Related operations grouped in one class
|
||||||
|
- Accumulate state while traversing
|
||||||
|
- Reference: [GoF Design Patterns](https://www.amazon.com/Design-Patterns-Elements-Reusable-Object-Oriented/dp/0201633612)
|
||||||
|
|
||||||
|
**Disadvantages:**
|
||||||
|
- Adding new element types requires changing all visitors
|
||||||
|
- May break encapsulation (visitors need access to element internals)
|
||||||
|
- Reference: [C2 Wiki - Visitor Pattern](https://wiki.c2.com/?VisitorPattern)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 26. Entity Identity
|
||||||
|
|
||||||
|
### Eric Evans: Domain-Driven Design (2003)
|
||||||
|
|
||||||
|
**Entity Definition:**
|
||||||
|
- "An object that is not defined by its attributes, but rather by a thread of continuity and its identity"
|
||||||
|
- "Some objects are not defined primarily by their attributes. They represent a thread of identity"
|
||||||
|
- Reference: [Martin Fowler - Evans Classification](https://martinfowler.com/bliki/EvansClassification.html)
|
||||||
|
|
||||||
|
**Identity Characteristics:**
|
||||||
|
- Unique within the system
|
||||||
|
- Stable over time (doesn't change)
|
||||||
|
- Survives state changes
|
||||||
|
- Reference: [DDD Reference](https://www.domainlanguage.com/wp-content/uploads/2016/05/DDD_Reference_2015-03.pdf)
|
||||||
|
|
||||||
|
### Vaughn Vernon: Identity Implementation
|
||||||
|
|
||||||
|
**Implementing Domain-Driven Design** (2013)
|
||||||
|
- Chapter 5: "Entities"
|
||||||
|
- Detailed coverage of identity strategies
|
||||||
|
- "The primary characteristic of an Entity is that it has a unique identity"
|
||||||
|
- Reference: [Vaughn Vernon - Implementing DDD](https://www.amazon.com/Implementing-Domain-Driven-Design-Vaughn-Vernon/dp/0321834577)
|
||||||
|
|
||||||
|
**Identity Types:**
|
||||||
|
- Natural keys (SSN, email)
|
||||||
|
- Surrogate keys (UUID, auto-increment)
|
||||||
|
- Domain-generated IDs
|
||||||
|
- Reference: [Microsoft - Entity Keys](https://learn.microsoft.com/en-us/ef/core/modeling/keys)
|
||||||
|
|
||||||
|
### Identity Best Practices
|
||||||
|
|
||||||
|
**Immutability of Identity:**
|
||||||
|
- Identity should never change after creation
|
||||||
|
- Use readonly/final fields
|
||||||
|
- Reference: [StackExchange - Mutable Entity ID](https://softwareengineering.stackexchange.com/questions/375765/is-it-bad-practice-to-have-mutable-entity-ids)
|
||||||
|
|
||||||
|
**Value Object for Identity:**
|
||||||
|
- Wrap identity in Value Object (UserId, OrderId)
|
||||||
|
- Type safety prevents mixing IDs
|
||||||
|
- Can include validation logic
|
||||||
|
- Reference: [Enterprise Craftsmanship - Strongly Typed IDs](https://enterprisecraftsmanship.com/posts/strongly-typed-ids/)
|
||||||
|
|
||||||
|
**Equality Based on Identity:**
|
||||||
|
- Entity equality should compare only identity
|
||||||
|
- Not all attributes
|
||||||
|
- Reference: [Vaughn Vernon - Entity Equality](https://www.amazon.com/Implementing-Domain-Driven-Design-Vaughn-Vernon/dp/0321834577)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 27. Saga Pattern
|
||||||
|
|
||||||
|
### Original Research
|
||||||
|
|
||||||
|
**Paper: Sagas** (1987)
|
||||||
|
- Authors: Hector Garcia-Molina and Kenneth Salem
|
||||||
|
- Published: ACM SIGMOD Conference
|
||||||
|
- Introduced Sagas for long-lived transactions
|
||||||
|
- Reference: [ACM Digital Library - Sagas](https://dl.acm.org/doi/10.1145/38713.38742)
|
||||||
|
|
||||||
|
**Definition:**
|
||||||
|
- "A saga is a sequence of local transactions where each transaction updates data within a single service"
|
||||||
|
- Alternative to distributed transactions
|
||||||
|
- Reference: [Microsoft - Saga Pattern](https://learn.microsoft.com/en-us/azure/architecture/reference-architectures/saga/saga)
|
||||||
|
|
||||||
|
### Chris Richardson: Microservices Patterns
|
||||||
|
|
||||||
|
**Book: Microservices Patterns** (2018)
|
||||||
|
- Author: Chris Richardson
|
||||||
|
- Publisher: Manning
|
||||||
|
- ISBN: 978-1617294549
|
||||||
|
- Chapter 4: "Managing Transactions with Sagas"
|
||||||
|
- Reference: [Manning - Microservices Patterns](https://www.manning.com/books/microservices-patterns)
|
||||||
|
|
||||||
|
**Saga Types:**
|
||||||
|
1. **Choreography**: Each service publishes events that trigger next steps
|
||||||
|
2. **Orchestration**: Central coordinator tells services what to do
|
||||||
|
- Reference: [Microservices.io - Saga](https://microservices.io/patterns/data/saga.html)
|
||||||
|
|
||||||
|
### Compensating Transactions
|
||||||
|
|
||||||
|
**Core Concept:**
|
||||||
|
- Each step has a compensating action to undo it
|
||||||
|
- If step N fails, compensate steps N-1, N-2, ..., 1
|
||||||
|
- Reference: [AWS - Saga Pattern](https://docs.aws.amazon.com/prescriptive-guidance/latest/modernization-data-persistence/saga-pattern.html)
|
||||||
|
|
||||||
|
**Compensation Examples:**
|
||||||
|
- CreateOrder → DeleteOrder
|
||||||
|
- ReserveInventory → ReleaseInventory
|
||||||
|
- ChargePayment → RefundPayment
|
||||||
|
- Reference: [Microsoft - Compensating Transactions](https://learn.microsoft.com/en-us/azure/architecture/patterns/compensating-transaction)
|
||||||
|
|
||||||
|
### Trade-offs
|
||||||
|
|
||||||
|
**Advantages:**
|
||||||
|
- Works across service boundaries
|
||||||
|
- No distributed locks
|
||||||
|
- Services remain autonomous
|
||||||
|
- Reference: [Chris Richardson - Saga](https://chrisrichardson.net/post/microservices/patterns/data/2019/07/22/design-sagas.html)
|
||||||
|
|
||||||
|
**Challenges:**
|
||||||
|
- Complexity of compensation logic
|
||||||
|
- Eventual consistency
|
||||||
|
- Debugging distributed sagas
|
||||||
|
- Reference: [Microsoft - Saga Considerations](https://learn.microsoft.com/en-us/azure/architecture/reference-architectures/saga/saga#issues-and-considerations)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 28. Anti-Corruption Layer
|
||||||
|
|
||||||
|
### Eric Evans: Domain-Driven Design (2003)
|
||||||
|
|
||||||
|
**Original Definition:**
|
||||||
|
- Chapter 14: "Maintaining Model Integrity"
|
||||||
|
- "Create an isolating layer to provide clients with functionality in terms of their own domain model"
|
||||||
|
- Protects your model from external/legacy models
|
||||||
|
- Reference: [DDD Reference](https://www.domainlanguage.com/wp-content/uploads/2016/05/DDD_Reference_2015-03.pdf)
|
||||||
|
|
||||||
|
**Purpose:**
|
||||||
|
- "The translation layer between a new system and an external system"
|
||||||
|
- Prevents external model concepts from leaking in
|
||||||
|
- Reference: [Martin Fowler - Anti-Corruption Layer](https://martinfowler.com/bliki/AntiCorruptionLayer.html)
|
||||||
|
|
||||||
|
### Microsoft Guidance
|
||||||
|
|
||||||
|
**Azure Architecture Center:**
|
||||||
|
- "Implement a facade or adapter layer between different subsystems that don't share the same semantics"
|
||||||
|
- Isolate subsystems by placing an anti-corruption layer between them
|
||||||
|
- Reference: [Microsoft - ACL Pattern](https://learn.microsoft.com/en-us/azure/architecture/patterns/anti-corruption-layer)
|
||||||
|
|
||||||
|
**When to Use:**
|
||||||
|
- Integrating with legacy systems
|
||||||
|
- Migrating from monolith to microservices
|
||||||
|
- Working with third-party APIs
|
||||||
|
- Reference: [Microsoft - ACL When to Use](https://learn.microsoft.com/en-us/azure/architecture/patterns/anti-corruption-layer#when-to-use-this-pattern)
|
||||||
|
|
||||||
|
### Components of ACL
|
||||||
|
|
||||||
|
**Facade:**
|
||||||
|
- Simplified interface to external system
|
||||||
|
- Hides complexity from domain
|
||||||
|
- Reference: [Refactoring Guru - Facade](https://refactoring.guru/design-patterns/facade)
|
||||||
|
|
||||||
|
**Adapter:**
|
||||||
|
- Translates between interfaces
|
||||||
|
- Maps external model to domain model
|
||||||
|
- Reference: [Refactoring Guru - Adapter](https://refactoring.guru/design-patterns/adapter)
|
||||||
|
|
||||||
|
**Translator:**
|
||||||
|
- Converts data structures
|
||||||
|
- Maps field names and types
|
||||||
|
- Handles semantic differences
|
||||||
|
- Reference: [Evans DDD - Model Translation](https://www.domainlanguage.com/)
|
||||||
|
|
||||||
|
### Benefits
|
||||||
|
|
||||||
|
**Isolation:**
|
||||||
|
- Changes to external system don't ripple through domain
|
||||||
|
- Domain model remains pure
|
||||||
|
- Reference: [Microsoft - ACL Benefits](https://learn.microsoft.com/en-us/azure/architecture/patterns/anti-corruption-layer)
|
||||||
|
|
||||||
|
**Gradual Migration:**
|
||||||
|
- Replace legacy components incrementally
|
||||||
|
- Strangler Fig pattern compatibility
|
||||||
|
- Reference: [Martin Fowler - Strangler Fig](https://martinfowler.com/bliki/StranglerFigApplication.html)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 29. Ubiquitous Language
|
||||||
|
|
||||||
|
### Eric Evans: Domain-Driven Design (2003)
|
||||||
|
|
||||||
|
**Original Definition:**
|
||||||
|
- Chapter 2: "Communication and the Use of Language"
|
||||||
|
- "A language structured around the domain model and used by all team members"
|
||||||
|
- "The vocabulary of that Ubiquitous Language includes the names of classes and prominent operations"
|
||||||
|
- Reference: [Martin Fowler - Ubiquitous Language](https://martinfowler.com/bliki/UbiquitousLanguage.html)
|
||||||
|
|
||||||
|
**Key Principles:**
|
||||||
|
- Shared by developers and domain experts
|
||||||
|
- Used in code, conversations, and documentation
|
||||||
|
- Changes to language reflect model changes
|
||||||
|
- Reference: [DDD Reference](https://www.domainlanguage.com/wp-content/uploads/2016/05/DDD_Reference_2015-03.pdf)
|
||||||
|
|
||||||
|
### Why It Matters
|
||||||
|
|
||||||
|
**Communication Benefits:**
|
||||||
|
- Reduces translation between business and tech
|
||||||
|
- Catches misunderstandings early
|
||||||
|
- Domain experts can read code names
|
||||||
|
- Reference: [InfoQ - Ubiquitous Language](https://www.infoq.com/articles/ddd-ubiquitous-language/)
|
||||||
|
|
||||||
|
**Design Benefits:**
|
||||||
|
- Model reflects real domain concepts
|
||||||
|
- Code becomes self-documenting
|
||||||
|
- Easier onboarding for new team members
|
||||||
|
- Reference: [Vaughn Vernon - Implementing DDD](https://www.amazon.com/Implementing-Domain-Driven-Design-Vaughn-Vernon/dp/0321834577)
|
||||||
|
|
||||||
|
### Building Ubiquitous Language
|
||||||
|
|
||||||
|
**Glossary:**
|
||||||
|
- Document key terms and definitions
|
||||||
|
- Keep updated as understanding evolves
|
||||||
|
- Reference: [DDD Community - Glossary](https://thedomaindrivendesign.io/glossary/)
|
||||||
|
|
||||||
|
**Event Storming:**
|
||||||
|
- Collaborative workshop technique
|
||||||
|
- Discover domain events and concepts
|
||||||
|
- Build shared understanding and language
|
||||||
|
- Reference: [Alberto Brandolini - Event Storming](https://www.eventstorming.com/)
|
||||||
|
|
||||||
|
### Common Pitfalls
|
||||||
|
|
||||||
|
**Inconsistent Terminology:**
|
||||||
|
- Same concept with different names (Customer/Client/User)
|
||||||
|
- Different concepts with same name
|
||||||
|
- Reference: [Domain Language - Building UL](https://www.domainlanguage.com/)
|
||||||
|
|
||||||
|
**Technical Terms in Domain:**
|
||||||
|
- "DTO", "Entity", "Repository" are technical
|
||||||
|
- Domain should use business terms
|
||||||
|
- Reference: [Evans DDD - Model-Driven Design](https://www.domainlanguage.com/)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
## Conclusion
|
## Conclusion
|
||||||
|
|
||||||
The code quality detection rules implemented in Guardian are firmly grounded in:
|
The code quality detection rules implemented in Guardian are firmly grounded in:
|
||||||
|
|
||||||
1. **Academic Research**: Peer-reviewed papers on software maintainability, complexity metrics, code quality, technical debt prioritization, and severity classification
|
1. **Academic Research**: Peer-reviewed papers on software maintainability, complexity metrics, code quality, technical debt prioritization, severity classification, and distributed systems (Sagas)
|
||||||
2. **Industry Standards**: ISO/IEC 25010, SonarQube rules, OWASP security guidelines, Google and Airbnb style guides
|
2. **Industry Standards**: ISO/IEC 25010, SonarQube rules, OWASP security guidelines, Google and Airbnb style guides
|
||||||
3. **Authoritative Books**:
|
3. **Authoritative Books**:
|
||||||
|
- Gang of Four's "Design Patterns" (1994)
|
||||||
|
- Bertrand Meyer's "Object-Oriented Software Construction" (1988, 1997)
|
||||||
- Robert C. Martin's "Clean Architecture" (2017)
|
- Robert C. Martin's "Clean Architecture" (2017)
|
||||||
- Vaughn Vernon's "Implementing Domain-Driven Design" (2013)
|
- Vaughn Vernon's "Implementing Domain-Driven Design" (2013)
|
||||||
|
- Chris Richardson's "Microservices Patterns" (2018)
|
||||||
- Eric Evans' "Domain-Driven Design" (2003)
|
- Eric Evans' "Domain-Driven Design" (2003)
|
||||||
- Martin Fowler's "Patterns of Enterprise Application Architecture" (2002)
|
- Martin Fowler's "Patterns of Enterprise Application Architecture" (2002)
|
||||||
- Martin Fowler's "Refactoring" (1999, 2018)
|
- Martin Fowler's "Refactoring" (1999, 2018)
|
||||||
- Steve McConnell's "Code Complete" (1993, 2004)
|
- Steve McConnell's "Code Complete" (1993, 2004)
|
||||||
4. **Expert Guidance**: Martin Fowler, Robert C. Martin (Uncle Bob), Eric Evans, Vaughn Vernon, Alistair Cockburn, Kent Beck
|
- Joshua Bloch's "Effective Java" (2001, 2018)
|
||||||
|
- Mark Seemann's "Dependency Injection in .NET" (2011, 2019)
|
||||||
|
- Bobby Woolf's "Null Object" in PLoPD3 (1997)
|
||||||
|
4. **Expert Guidance**: Martin Fowler, Robert C. Martin (Uncle Bob), Eric Evans, Vaughn Vernon, Alistair Cockburn, Kent Beck, Greg Young, Bertrand Meyer, Mark Seemann, Chris Richardson, Alberto Brandolini
|
||||||
5. **Security Standards**: OWASP Secrets Management, GitHub Secret Scanning, GitGuardian best practices
|
5. **Security Standards**: OWASP Secrets Management, GitHub Secret Scanning, GitGuardian best practices
|
||||||
6. **Open Source Tools**: ArchUnit, SonarQube, ESLint, Secretlint - widely adopted in enterprise environments
|
6. **Open Source Tools**: ArchUnit, SonarQube, ESLint, Secretlint - widely adopted in enterprise environments
|
||||||
|
7. **DDD Tactical & Strategic Patterns**: Domain Events, Value Objects, Entities, Aggregates, Bounded Contexts, Anti-Corruption Layer, Ubiquitous Language, Specifications, Factories
|
||||||
|
8. **Architectural Patterns**: CQS/CQRS, Saga, Visitor/Double Dispatch, Null Object, Persistence Ignorance
|
||||||
|
|
||||||
These rules represent decades of software engineering wisdom, empirical research, security best practices, and battle-tested practices from the world's leading software organizations and thought leaders.
|
These rules represent decades of software engineering wisdom, empirical research, security best practices, and battle-tested practices from the world's leading software organizations and thought leaders.
|
||||||
|
|
||||||
@@ -845,9 +1678,9 @@ These rules represent decades of software engineering wisdom, empirical research
|
|||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
**Document Version**: 1.1
|
**Document Version**: 2.0
|
||||||
**Last Updated**: 2025-11-26
|
**Last Updated**: 2025-12-04
|
||||||
**Questions or want to contribute research?**
|
**Questions or want to contribute research?**
|
||||||
- 📧 Email: fozilbek.samiyev@gmail.com
|
- 📧 Email: fozilbek.samiyev@gmail.com
|
||||||
- 🐙 GitHub: https://github.com/samiyev/puaros/issues
|
- 🐙 GitHub: https://github.com/samiyev/puaros/issues
|
||||||
**Based on research as of**: November 2025
|
**Based on research as of**: December 2025
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "@samiyev/guardian",
|
"name": "@samiyev/guardian",
|
||||||
"version": "0.9.2",
|
"version": "0.9.4",
|
||||||
"description": "Research-backed code quality guardian for AI-assisted development. Detects hardcodes, secrets, circular deps, framework leaks, entity exposure, and 9 architecture violations. Enforces Clean Architecture/DDD principles. Works with GitHub Copilot, Cursor, Windsurf, Claude, ChatGPT, Cline, and any AI coding tool.",
|
"description": "Research-backed code quality guardian for AI-assisted development. Detects hardcodes, secrets, circular deps, framework leaks, entity exposure, and 9 architecture violations. Enforces Clean Architecture/DDD principles. Works with GitHub Copilot, Cursor, Windsurf, Claude, ChatGPT, Cline, and any AI coding tool.",
|
||||||
"keywords": [
|
"keywords": [
|
||||||
"puaros",
|
"puaros",
|
||||||
@@ -40,7 +40,7 @@
|
|||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"repository": {
|
"repository": {
|
||||||
"type": "git",
|
"type": "git",
|
||||||
"url": "https://github.com/samiyev/puaros.git",
|
"url": "git+https://github.com/samiyev/puaros.git",
|
||||||
"directory": "packages/guardian"
|
"directory": "packages/guardian"
|
||||||
},
|
},
|
||||||
"bugs": {
|
"bugs": {
|
||||||
|
|||||||
@@ -215,6 +215,7 @@ export class AnalyzeProject extends UseCase<
|
|||||||
private readonly detectionPipeline: ExecuteDetection
|
private readonly detectionPipeline: ExecuteDetection
|
||||||
private readonly resultAggregator: AggregateResults
|
private readonly resultAggregator: AggregateResults
|
||||||
|
|
||||||
|
// eslint-disable-next-line max-params
|
||||||
constructor(
|
constructor(
|
||||||
fileScanner: IFileScanner,
|
fileScanner: IFileScanner,
|
||||||
codeParser: ICodeParser,
|
codeParser: ICodeParser,
|
||||||
|
|||||||
@@ -56,6 +56,7 @@ export interface DetectionResult {
|
|||||||
* Pipeline step responsible for running all detectors
|
* Pipeline step responsible for running all detectors
|
||||||
*/
|
*/
|
||||||
export class ExecuteDetection {
|
export class ExecuteDetection {
|
||||||
|
// eslint-disable-next-line max-params
|
||||||
constructor(
|
constructor(
|
||||||
private readonly hardcodeDetector: IHardcodeDetector,
|
private readonly hardcodeDetector: IHardcodeDetector,
|
||||||
private readonly namingConventionDetector: INamingConventionDetector,
|
private readonly namingConventionDetector: INamingConventionDetector,
|
||||||
|
|||||||
@@ -171,6 +171,7 @@ export class HardcodedValue extends ValueObject<HardcodedValueProps> {
|
|||||||
return `${CONSTANT_NAMES.MAGIC_NUMBER}_${String(value)}`
|
return `${CONSTANT_NAMES.MAGIC_NUMBER}_${String(value)}`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// eslint-disable-next-line complexity, max-lines-per-function
|
||||||
private suggestStringConstantName(): string {
|
private suggestStringConstantName(): string {
|
||||||
const value = String(this.props.value)
|
const value = String(this.props.value)
|
||||||
const context = this.props.context.toLowerCase()
|
const context = this.props.context.toLowerCase()
|
||||||
|
|||||||
@@ -1,3 +1,7 @@
|
|||||||
|
import pkg from "../package.json"
|
||||||
|
|
||||||
|
export const VERSION = pkg.version
|
||||||
|
|
||||||
export * from "./domain"
|
export * from "./domain"
|
||||||
export * from "./application"
|
export * from "./application"
|
||||||
export * from "./infrastructure"
|
export * from "./infrastructure"
|
||||||
|
|||||||
@@ -90,80 +90,98 @@ export class SecretDetector implements ISecretDetector {
|
|||||||
}
|
}
|
||||||
|
|
||||||
private extractSecretType(message: string, ruleId: string): string {
|
private extractSecretType(message: string, ruleId: string): string {
|
||||||
|
const lowerMessage = message.toLowerCase()
|
||||||
|
|
||||||
|
const ruleBasedType = this.extractByRuleId(ruleId, lowerMessage)
|
||||||
|
if (ruleBasedType) {
|
||||||
|
return ruleBasedType
|
||||||
|
}
|
||||||
|
|
||||||
|
return this.extractByMessage(lowerMessage)
|
||||||
|
}
|
||||||
|
|
||||||
|
private extractByRuleId(ruleId: string, lowerMessage: string): string | null {
|
||||||
if (ruleId.includes(SECRET_KEYWORDS.AWS)) {
|
if (ruleId.includes(SECRET_KEYWORDS.AWS)) {
|
||||||
if (message.toLowerCase().includes(SECRET_KEYWORDS.ACCESS_KEY)) {
|
return this.extractAwsType(lowerMessage)
|
||||||
return SECRET_TYPE_NAMES.AWS_ACCESS_KEY
|
|
||||||
}
|
|
||||||
if (message.toLowerCase().includes(SECRET_KEYWORDS.SECRET)) {
|
|
||||||
return SECRET_TYPE_NAMES.AWS_SECRET_KEY
|
|
||||||
}
|
|
||||||
return SECRET_TYPE_NAMES.AWS_CREDENTIAL
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (ruleId.includes(SECRET_KEYWORDS.GITHUB)) {
|
if (ruleId.includes(SECRET_KEYWORDS.GITHUB)) {
|
||||||
if (message.toLowerCase().includes(SECRET_KEYWORDS.PERSONAL_ACCESS_TOKEN)) {
|
return this.extractGithubType(lowerMessage)
|
||||||
return SECRET_TYPE_NAMES.GITHUB_PERSONAL_ACCESS_TOKEN
|
|
||||||
}
|
|
||||||
if (message.toLowerCase().includes(SECRET_KEYWORDS.OAUTH)) {
|
|
||||||
return SECRET_TYPE_NAMES.GITHUB_OAUTH_TOKEN
|
|
||||||
}
|
|
||||||
return SECRET_TYPE_NAMES.GITHUB_TOKEN
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (ruleId.includes(SECRET_KEYWORDS.NPM)) {
|
if (ruleId.includes(SECRET_KEYWORDS.NPM)) {
|
||||||
return SECRET_TYPE_NAMES.NPM_TOKEN
|
return SECRET_TYPE_NAMES.NPM_TOKEN
|
||||||
}
|
}
|
||||||
|
|
||||||
if (ruleId.includes(SECRET_KEYWORDS.GCP) || ruleId.includes(SECRET_KEYWORDS.GOOGLE)) {
|
if (ruleId.includes(SECRET_KEYWORDS.GCP) || ruleId.includes(SECRET_KEYWORDS.GOOGLE)) {
|
||||||
return SECRET_TYPE_NAMES.GCP_SERVICE_ACCOUNT_KEY
|
return SECRET_TYPE_NAMES.GCP_SERVICE_ACCOUNT_KEY
|
||||||
}
|
}
|
||||||
|
|
||||||
if (ruleId.includes(SECRET_KEYWORDS.PRIVATEKEY) || ruleId.includes(SECRET_KEYWORDS.SSH)) {
|
if (ruleId.includes(SECRET_KEYWORDS.PRIVATEKEY) || ruleId.includes(SECRET_KEYWORDS.SSH)) {
|
||||||
if (message.toLowerCase().includes(SECRET_KEYWORDS.RSA)) {
|
return this.extractSshType(lowerMessage)
|
||||||
return SECRET_TYPE_NAMES.SSH_RSA_PRIVATE_KEY
|
|
||||||
}
|
|
||||||
if (message.toLowerCase().includes(SECRET_KEYWORDS.DSA)) {
|
|
||||||
return SECRET_TYPE_NAMES.SSH_DSA_PRIVATE_KEY
|
|
||||||
}
|
|
||||||
if (message.toLowerCase().includes(SECRET_KEYWORDS.ECDSA)) {
|
|
||||||
return SECRET_TYPE_NAMES.SSH_ECDSA_PRIVATE_KEY
|
|
||||||
}
|
|
||||||
if (message.toLowerCase().includes(SECRET_KEYWORDS.ED25519)) {
|
|
||||||
return SECRET_TYPE_NAMES.SSH_ED25519_PRIVATE_KEY
|
|
||||||
}
|
|
||||||
return SECRET_TYPE_NAMES.SSH_PRIVATE_KEY
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (ruleId.includes(SECRET_KEYWORDS.SLACK)) {
|
if (ruleId.includes(SECRET_KEYWORDS.SLACK)) {
|
||||||
if (message.toLowerCase().includes(SECRET_KEYWORDS.BOT)) {
|
return this.extractSlackType(lowerMessage)
|
||||||
return SECRET_TYPE_NAMES.SLACK_BOT_TOKEN
|
|
||||||
}
|
|
||||||
if (message.toLowerCase().includes(SECRET_KEYWORDS.USER)) {
|
|
||||||
return SECRET_TYPE_NAMES.SLACK_USER_TOKEN
|
|
||||||
}
|
|
||||||
return SECRET_TYPE_NAMES.SLACK_TOKEN
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (ruleId.includes(SECRET_KEYWORDS.BASICAUTH)) {
|
if (ruleId.includes(SECRET_KEYWORDS.BASICAUTH)) {
|
||||||
return SECRET_TYPE_NAMES.BASIC_AUTH_CREDENTIALS
|
return SECRET_TYPE_NAMES.BASIC_AUTH_CREDENTIALS
|
||||||
}
|
}
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
if (message.toLowerCase().includes(SECRET_KEYWORDS.API_KEY)) {
|
private extractAwsType(lowerMessage: string): string {
|
||||||
return SECRET_TYPE_NAMES.API_KEY
|
if (lowerMessage.includes(SECRET_KEYWORDS.ACCESS_KEY)) {
|
||||||
|
return SECRET_TYPE_NAMES.AWS_ACCESS_KEY
|
||||||
}
|
}
|
||||||
|
if (lowerMessage.includes(SECRET_KEYWORDS.SECRET)) {
|
||||||
if (message.toLowerCase().includes(SECRET_KEYWORDS.TOKEN)) {
|
return SECRET_TYPE_NAMES.AWS_SECRET_KEY
|
||||||
return SECRET_TYPE_NAMES.AUTHENTICATION_TOKEN
|
|
||||||
}
|
}
|
||||||
|
return SECRET_TYPE_NAMES.AWS_CREDENTIAL
|
||||||
|
}
|
||||||
|
|
||||||
if (message.toLowerCase().includes(SECRET_KEYWORDS.PASSWORD)) {
|
private extractGithubType(lowerMessage: string): string {
|
||||||
return SECRET_TYPE_NAMES.PASSWORD
|
if (lowerMessage.includes(SECRET_KEYWORDS.PERSONAL_ACCESS_TOKEN)) {
|
||||||
|
return SECRET_TYPE_NAMES.GITHUB_PERSONAL_ACCESS_TOKEN
|
||||||
}
|
}
|
||||||
|
if (lowerMessage.includes(SECRET_KEYWORDS.OAUTH)) {
|
||||||
if (message.toLowerCase().includes(SECRET_KEYWORDS.SECRET)) {
|
return SECRET_TYPE_NAMES.GITHUB_OAUTH_TOKEN
|
||||||
return SECRET_TYPE_NAMES.SECRET
|
|
||||||
}
|
}
|
||||||
|
return SECRET_TYPE_NAMES.GITHUB_TOKEN
|
||||||
|
}
|
||||||
|
|
||||||
|
private extractSshType(lowerMessage: string): string {
|
||||||
|
const sshTypeMap: [string, string][] = [
|
||||||
|
[SECRET_KEYWORDS.RSA, SECRET_TYPE_NAMES.SSH_RSA_PRIVATE_KEY],
|
||||||
|
[SECRET_KEYWORDS.DSA, SECRET_TYPE_NAMES.SSH_DSA_PRIVATE_KEY],
|
||||||
|
[SECRET_KEYWORDS.ECDSA, SECRET_TYPE_NAMES.SSH_ECDSA_PRIVATE_KEY],
|
||||||
|
[SECRET_KEYWORDS.ED25519, SECRET_TYPE_NAMES.SSH_ED25519_PRIVATE_KEY],
|
||||||
|
]
|
||||||
|
for (const [keyword, typeName] of sshTypeMap) {
|
||||||
|
if (lowerMessage.includes(keyword)) {
|
||||||
|
return typeName
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return SECRET_TYPE_NAMES.SSH_PRIVATE_KEY
|
||||||
|
}
|
||||||
|
|
||||||
|
private extractSlackType(lowerMessage: string): string {
|
||||||
|
if (lowerMessage.includes(SECRET_KEYWORDS.BOT)) {
|
||||||
|
return SECRET_TYPE_NAMES.SLACK_BOT_TOKEN
|
||||||
|
}
|
||||||
|
if (lowerMessage.includes(SECRET_KEYWORDS.USER)) {
|
||||||
|
return SECRET_TYPE_NAMES.SLACK_USER_TOKEN
|
||||||
|
}
|
||||||
|
return SECRET_TYPE_NAMES.SLACK_TOKEN
|
||||||
|
}
|
||||||
|
|
||||||
|
private extractByMessage(lowerMessage: string): string {
|
||||||
|
const messageTypeMap: [string, string][] = [
|
||||||
|
[SECRET_KEYWORDS.API_KEY, SECRET_TYPE_NAMES.API_KEY],
|
||||||
|
[SECRET_KEYWORDS.TOKEN, SECRET_TYPE_NAMES.AUTHENTICATION_TOKEN],
|
||||||
|
[SECRET_KEYWORDS.PASSWORD, SECRET_TYPE_NAMES.PASSWORD],
|
||||||
|
[SECRET_KEYWORDS.SECRET, SECRET_TYPE_NAMES.SECRET],
|
||||||
|
]
|
||||||
|
for (const [keyword, typeName] of messageTypeMap) {
|
||||||
|
if (lowerMessage.includes(keyword)) {
|
||||||
|
return typeName
|
||||||
|
}
|
||||||
|
}
|
||||||
return SECRET_TYPE_NAMES.SENSITIVE_DATA
|
return SECRET_TYPE_NAMES.SENSITIVE_DATA
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -6,6 +6,13 @@ import { AstFunctionNameAnalyzer } from "./AstFunctionNameAnalyzer"
|
|||||||
import { AstInterfaceNameAnalyzer } from "./AstInterfaceNameAnalyzer"
|
import { AstInterfaceNameAnalyzer } from "./AstInterfaceNameAnalyzer"
|
||||||
import { AstVariableNameAnalyzer } from "./AstVariableNameAnalyzer"
|
import { AstVariableNameAnalyzer } from "./AstVariableNameAnalyzer"
|
||||||
|
|
||||||
|
type NodeAnalyzer = (
|
||||||
|
node: Parser.SyntaxNode,
|
||||||
|
layer: string,
|
||||||
|
filePath: string,
|
||||||
|
lines: string[],
|
||||||
|
) => NamingViolation | null
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* AST tree traverser for detecting naming convention violations
|
* AST tree traverser for detecting naming convention violations
|
||||||
*
|
*
|
||||||
@@ -13,12 +20,16 @@ import { AstVariableNameAnalyzer } from "./AstVariableNameAnalyzer"
|
|||||||
* to detect naming violations in classes, interfaces, functions, and variables.
|
* to detect naming violations in classes, interfaces, functions, and variables.
|
||||||
*/
|
*/
|
||||||
export class AstNamingTraverser {
|
export class AstNamingTraverser {
|
||||||
|
private readonly nodeHandlers: Map<string, NodeAnalyzer>
|
||||||
|
|
||||||
constructor(
|
constructor(
|
||||||
private readonly classAnalyzer: AstClassNameAnalyzer,
|
private readonly classAnalyzer: AstClassNameAnalyzer,
|
||||||
private readonly interfaceAnalyzer: AstInterfaceNameAnalyzer,
|
private readonly interfaceAnalyzer: AstInterfaceNameAnalyzer,
|
||||||
private readonly functionAnalyzer: AstFunctionNameAnalyzer,
|
private readonly functionAnalyzer: AstFunctionNameAnalyzer,
|
||||||
private readonly variableAnalyzer: AstVariableNameAnalyzer,
|
private readonly variableAnalyzer: AstVariableNameAnalyzer,
|
||||||
) {}
|
) {
|
||||||
|
this.nodeHandlers = this.buildNodeHandlers()
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Traverses the AST tree and collects naming violations
|
* Traverses the AST tree and collects naming violations
|
||||||
@@ -38,6 +49,33 @@ export class AstNamingTraverser {
|
|||||||
return results
|
return results
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private buildNodeHandlers(): Map<string, NodeAnalyzer> {
|
||||||
|
const handlers = new Map<string, NodeAnalyzer>()
|
||||||
|
|
||||||
|
handlers.set(AST_CLASS_TYPES.CLASS_DECLARATION, (node, layer, filePath, lines) =>
|
||||||
|
this.classAnalyzer.analyze(node, layer, filePath, lines),
|
||||||
|
)
|
||||||
|
handlers.set(AST_CLASS_TYPES.INTERFACE_DECLARATION, (node, layer, filePath, lines) =>
|
||||||
|
this.interfaceAnalyzer.analyze(node, layer, filePath, lines),
|
||||||
|
)
|
||||||
|
|
||||||
|
const functionHandler: NodeAnalyzer = (node, layer, filePath, lines) =>
|
||||||
|
this.functionAnalyzer.analyze(node, layer, filePath, lines)
|
||||||
|
handlers.set(AST_FUNCTION_TYPES.FUNCTION_DECLARATION, functionHandler)
|
||||||
|
handlers.set(AST_FUNCTION_TYPES.METHOD_DEFINITION, functionHandler)
|
||||||
|
handlers.set(AST_FUNCTION_TYPES.FUNCTION_SIGNATURE, functionHandler)
|
||||||
|
|
||||||
|
const variableHandler: NodeAnalyzer = (node, layer, filePath, lines) =>
|
||||||
|
this.variableAnalyzer.analyze(node, layer, filePath, lines)
|
||||||
|
handlers.set(AST_VARIABLE_TYPES.VARIABLE_DECLARATOR, variableHandler)
|
||||||
|
handlers.set(AST_VARIABLE_TYPES.REQUIRED_PARAMETER, variableHandler)
|
||||||
|
handlers.set(AST_VARIABLE_TYPES.OPTIONAL_PARAMETER, variableHandler)
|
||||||
|
handlers.set(AST_VARIABLE_TYPES.PUBLIC_FIELD_DEFINITION, variableHandler)
|
||||||
|
handlers.set(AST_VARIABLE_TYPES.PROPERTY_SIGNATURE, variableHandler)
|
||||||
|
|
||||||
|
return handlers
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Recursively visits AST nodes
|
* Recursively visits AST nodes
|
||||||
*/
|
*/
|
||||||
@@ -49,34 +87,10 @@ export class AstNamingTraverser {
|
|||||||
results: NamingViolation[],
|
results: NamingViolation[],
|
||||||
): void {
|
): void {
|
||||||
const node = cursor.currentNode
|
const node = cursor.currentNode
|
||||||
|
const handler = this.nodeHandlers.get(node.type)
|
||||||
|
|
||||||
if (node.type === AST_CLASS_TYPES.CLASS_DECLARATION) {
|
if (handler) {
|
||||||
const violation = this.classAnalyzer.analyze(node, layer, filePath, lines)
|
const violation = handler(node, layer, filePath, lines)
|
||||||
if (violation) {
|
|
||||||
results.push(violation)
|
|
||||||
}
|
|
||||||
} else if (node.type === AST_CLASS_TYPES.INTERFACE_DECLARATION) {
|
|
||||||
const violation = this.interfaceAnalyzer.analyze(node, layer, filePath, lines)
|
|
||||||
if (violation) {
|
|
||||||
results.push(violation)
|
|
||||||
}
|
|
||||||
} else if (
|
|
||||||
node.type === AST_FUNCTION_TYPES.FUNCTION_DECLARATION ||
|
|
||||||
node.type === AST_FUNCTION_TYPES.METHOD_DEFINITION ||
|
|
||||||
node.type === AST_FUNCTION_TYPES.FUNCTION_SIGNATURE
|
|
||||||
) {
|
|
||||||
const violation = this.functionAnalyzer.analyze(node, layer, filePath, lines)
|
|
||||||
if (violation) {
|
|
||||||
results.push(violation)
|
|
||||||
}
|
|
||||||
} else if (
|
|
||||||
node.type === AST_VARIABLE_TYPES.VARIABLE_DECLARATOR ||
|
|
||||||
node.type === AST_VARIABLE_TYPES.REQUIRED_PARAMETER ||
|
|
||||||
node.type === AST_VARIABLE_TYPES.OPTIONAL_PARAMETER ||
|
|
||||||
node.type === AST_VARIABLE_TYPES.PUBLIC_FIELD_DEFINITION ||
|
|
||||||
node.type === AST_VARIABLE_TYPES.PROPERTY_SIGNATURE
|
|
||||||
) {
|
|
||||||
const violation = this.variableAnalyzer.analyze(node, layer, filePath, lines)
|
|
||||||
if (violation) {
|
if (violation) {
|
||||||
results.push(violation)
|
results.push(violation)
|
||||||
}
|
}
|
||||||
|
|||||||
566
packages/ipuaro/ARCHITECTURE.md
Normal file
566
packages/ipuaro/ARCHITECTURE.md
Normal file
@@ -0,0 +1,566 @@
|
|||||||
|
# ipuaro Architecture
|
||||||
|
|
||||||
|
This document describes the architecture, design decisions, and implementation details of ipuaro.
|
||||||
|
|
||||||
|
## Table of Contents
|
||||||
|
|
||||||
|
- [Overview](#overview)
|
||||||
|
- [Clean Architecture](#clean-architecture)
|
||||||
|
- [Layer Details](#layer-details)
|
||||||
|
- [Data Flow](#data-flow)
|
||||||
|
- [Key Design Decisions](#key-design-decisions)
|
||||||
|
- [Tech Stack](#tech-stack)
|
||||||
|
- [Performance Considerations](#performance-considerations)
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
|
||||||
|
ipuaro is a local AI agent for codebase operations built on Clean Architecture principles. It enables "infinite" context feeling through lazy loading and AST-based code understanding.
|
||||||
|
|
||||||
|
### Core Concepts
|
||||||
|
|
||||||
|
1. **Lazy Loading**: Load code on-demand via tools, not all at once
|
||||||
|
2. **AST-Based Understanding**: Parse and index code structure for fast lookups
|
||||||
|
3. **100% Local**: Ollama LLM + Redis storage, no cloud dependencies
|
||||||
|
4. **Session Persistence**: Resume conversations across restarts
|
||||||
|
5. **Tool-Based Interface**: LLM accesses code through 18 specialized tools
|
||||||
|
|
||||||
|
## Clean Architecture
|
||||||
|
|
||||||
|
The project follows Clean Architecture with strict dependency rules:
|
||||||
|
|
||||||
|
```
|
||||||
|
┌─────────────────────────────────────────────────┐
|
||||||
|
│ TUI Layer │ ← Ink/React components
|
||||||
|
│ (Framework) │
|
||||||
|
├─────────────────────────────────────────────────┤
|
||||||
|
│ CLI Layer │ ← Commander.js entry
|
||||||
|
│ (Interface) │
|
||||||
|
├─────────────────────────────────────────────────┤
|
||||||
|
│ Infrastructure Layer │ ← External adapters
|
||||||
|
│ (Storage, LLM, Indexer, Tools, Security) │
|
||||||
|
├─────────────────────────────────────────────────┤
|
||||||
|
│ Application Layer │ ← Use cases & DTOs
|
||||||
|
│ (StartSession, HandleMessage, etc.) │
|
||||||
|
├─────────────────────────────────────────────────┤
|
||||||
|
│ Domain Layer │ ← Business logic
|
||||||
|
│ (Entities, Value Objects, Service Interfaces) │
|
||||||
|
└─────────────────────────────────────────────────┘
|
||||||
|
```
|
||||||
|
|
||||||
|
**Dependency Rule**: Outer layers depend on inner layers, never the reverse.
|
||||||
|
|
||||||
|
## Layer Details
|
||||||
|
|
||||||
|
### Domain Layer (Core Business Logic)
|
||||||
|
|
||||||
|
**Location**: `src/domain/`
|
||||||
|
|
||||||
|
**Responsibilities**:
|
||||||
|
- Define business entities and value objects
|
||||||
|
- Declare service interfaces (ports)
|
||||||
|
- No external dependencies (pure TypeScript)
|
||||||
|
|
||||||
|
**Components**:
|
||||||
|
|
||||||
|
```
|
||||||
|
domain/
|
||||||
|
├── entities/
|
||||||
|
│ ├── Session.ts # Session entity with history and stats
|
||||||
|
│ └── Project.ts # Project entity with metadata
|
||||||
|
├── value-objects/
|
||||||
|
│ ├── FileData.ts # File content with hash and size
|
||||||
|
│ ├── FileAST.ts # Parsed AST structure
|
||||||
|
│ ├── FileMeta.ts # Complexity, dependencies, hub detection
|
||||||
|
│ ├── ChatMessage.ts # Message with role, content, tool calls
|
||||||
|
│ ├── ToolCall.ts # Tool invocation with parameters
|
||||||
|
│ ├── ToolResult.ts # Tool execution result
|
||||||
|
│ └── UndoEntry.ts # File change for undo stack
|
||||||
|
├── services/
|
||||||
|
│ ├── IStorage.ts # Storage interface (port)
|
||||||
|
│ ├── ILLMClient.ts # LLM interface (port)
|
||||||
|
│ ├── ITool.ts # Tool interface (port)
|
||||||
|
│ └── IIndexer.ts # Indexer interface (port)
|
||||||
|
└── constants/
|
||||||
|
└── index.ts # Domain constants
|
||||||
|
```
|
||||||
|
|
||||||
|
**Key Design**:
|
||||||
|
- Value objects are immutable
|
||||||
|
- Entities have identity and lifecycle
|
||||||
|
- Interfaces define contracts, not implementations
|
||||||
|
|
||||||
|
### Application Layer (Use Cases)
|
||||||
|
|
||||||
|
**Location**: `src/application/`
|
||||||
|
|
||||||
|
**Responsibilities**:
|
||||||
|
- Orchestrate domain logic
|
||||||
|
- Implement use cases (application-specific business rules)
|
||||||
|
- Define DTOs for data transfer
|
||||||
|
- Coordinate between domain and infrastructure
|
||||||
|
|
||||||
|
**Components**:
|
||||||
|
|
||||||
|
```
|
||||||
|
application/
|
||||||
|
├── use-cases/
|
||||||
|
│ ├── StartSession.ts # Initialize or load session
|
||||||
|
│ ├── HandleMessage.ts # Main message orchestrator
|
||||||
|
│ ├── IndexProject.ts # Project indexing workflow
|
||||||
|
│ ├── ExecuteTool.ts # Tool execution with validation
|
||||||
|
│ └── UndoChange.ts # Revert file changes
|
||||||
|
├── dtos/
|
||||||
|
│ ├── SessionDto.ts # Session data transfer object
|
||||||
|
│ ├── MessageDto.ts # Message DTO
|
||||||
|
│ └── ToolCallDto.ts # Tool call DTO
|
||||||
|
├── mappers/
|
||||||
|
│ └── SessionMapper.ts # Domain ↔ DTO conversion
|
||||||
|
└── interfaces/
|
||||||
|
└── IToolRegistry.ts # Tool registry interface
|
||||||
|
```
|
||||||
|
|
||||||
|
**Key Use Cases**:
|
||||||
|
|
||||||
|
1. **StartSession**: Creates new session or loads latest
|
||||||
|
2. **HandleMessage**: Main flow (LLM → Tools → Response)
|
||||||
|
3. **IndexProject**: Scan → Parse → Analyze → Store
|
||||||
|
4. **UndoChange**: Restore file from undo stack
|
||||||
|
|
||||||
|
### Infrastructure Layer (External Implementations)
|
||||||
|
|
||||||
|
**Location**: `src/infrastructure/`
|
||||||
|
|
||||||
|
**Responsibilities**:
|
||||||
|
- Implement domain interfaces
|
||||||
|
- Handle external systems (Redis, Ollama, filesystem)
|
||||||
|
- Provide concrete tool implementations
|
||||||
|
- Security and validation
|
||||||
|
|
||||||
|
**Components**:
|
||||||
|
|
||||||
|
```
|
||||||
|
infrastructure/
|
||||||
|
├── storage/
|
||||||
|
│ ├── RedisClient.ts # Redis connection wrapper
|
||||||
|
│ ├── RedisStorage.ts # IStorage implementation
|
||||||
|
│ └── schema.ts # Redis key schema
|
||||||
|
├── llm/
|
||||||
|
│ ├── OllamaClient.ts # ILLMClient implementation
|
||||||
|
│ ├── prompts.ts # System prompts
|
||||||
|
│ └── ResponseParser.ts # Parse XML tool calls
|
||||||
|
├── indexer/
|
||||||
|
│ ├── FileScanner.ts # Recursive file scanning
|
||||||
|
│ ├── ASTParser.ts # tree-sitter parsing
|
||||||
|
│ ├── MetaAnalyzer.ts # Complexity and dependencies
|
||||||
|
│ ├── IndexBuilder.ts # Symbol index + deps graph
|
||||||
|
│ └── Watchdog.ts # File watching (chokidar)
|
||||||
|
├── tools/ # 18 tool implementations
|
||||||
|
│ ├── registry.ts
|
||||||
|
│ ├── read/ # GetLines, GetFunction, GetClass, GetStructure
|
||||||
|
│ ├── edit/ # EditLines, CreateFile, DeleteFile
|
||||||
|
│ ├── search/ # FindReferences, FindDefinition
|
||||||
|
│ ├── analysis/ # GetDependencies, GetDependents, GetComplexity, GetTodos
|
||||||
|
│ ├── git/ # GitStatus, GitDiff, GitCommit
|
||||||
|
│ └── run/ # RunCommand, RunTests
|
||||||
|
└── security/
|
||||||
|
├── Blacklist.ts # Dangerous commands
|
||||||
|
├── Whitelist.ts # Safe commands
|
||||||
|
└── PathValidator.ts # Path traversal prevention
|
||||||
|
```
|
||||||
|
|
||||||
|
**Key Implementations**:
|
||||||
|
|
||||||
|
1. **RedisStorage**: Uses Redis hashes for files/AST/meta, lists for undo
|
||||||
|
2. **OllamaClient**: HTTP API client with tool calling support
|
||||||
|
3. **ASTParser**: tree-sitter for TS/JS/TSX/JSX parsing
|
||||||
|
4. **ToolRegistry**: Manages tool lifecycle and execution
|
||||||
|
|
||||||
|
### TUI Layer (Terminal UI)
|
||||||
|
|
||||||
|
**Location**: `src/tui/`
|
||||||
|
|
||||||
|
**Responsibilities**:
|
||||||
|
- Render terminal UI with Ink (React for terminal)
|
||||||
|
- Handle user input and hotkeys
|
||||||
|
- Display chat history and status
|
||||||
|
|
||||||
|
**Components**:
|
||||||
|
|
||||||
|
```
|
||||||
|
tui/
|
||||||
|
├── App.tsx # Main app shell
|
||||||
|
├── components/
|
||||||
|
│ ├── StatusBar.tsx # Top status bar
|
||||||
|
│ ├── Chat.tsx # Message history display
|
||||||
|
│ ├── Input.tsx # User input with history
|
||||||
|
│ ├── DiffView.tsx # Inline diff display
|
||||||
|
│ ├── ConfirmDialog.tsx # Edit confirmation
|
||||||
|
│ ├── ErrorDialog.tsx # Error handling
|
||||||
|
│ └── Progress.tsx # Progress bar (indexing)
|
||||||
|
└── hooks/
|
||||||
|
├── useSession.ts # Session state management
|
||||||
|
├── useHotkeys.ts # Keyboard shortcuts
|
||||||
|
└── useCommands.ts # Slash command handling
|
||||||
|
```
|
||||||
|
|
||||||
|
**Key Features**:
|
||||||
|
|
||||||
|
- Real-time status updates (context usage, session time)
|
||||||
|
- Input history with ↑/↓ navigation
|
||||||
|
- Hotkeys: Ctrl+C (interrupt), Ctrl+D (exit), Ctrl+Z (undo)
|
||||||
|
- Diff preview for edits with confirmation
|
||||||
|
- Error recovery with retry/skip/abort options
|
||||||
|
|
||||||
|
### CLI Layer (Entry Point)
|
||||||
|
|
||||||
|
**Location**: `src/cli/`
|
||||||
|
|
||||||
|
**Responsibilities**:
|
||||||
|
- Command-line interface with Commander.js
|
||||||
|
- Dependency injection and initialization
|
||||||
|
- Onboarding checks (Redis, Ollama, model)
|
||||||
|
|
||||||
|
**Components**:
|
||||||
|
|
||||||
|
```
|
||||||
|
cli/
|
||||||
|
├── index.ts # Commander.js setup
|
||||||
|
└── commands/
|
||||||
|
├── start.ts # Start TUI (default command)
|
||||||
|
├── init.ts # Create .ipuaro.json config
|
||||||
|
└── index-cmd.ts # Index-only command
|
||||||
|
```
|
||||||
|
|
||||||
|
**Commands**:
|
||||||
|
|
||||||
|
1. `ipuaro [path]` - Start TUI in directory
|
||||||
|
2. `ipuaro init` - Create config file
|
||||||
|
3. `ipuaro index` - Index without TUI
|
||||||
|
|
||||||
|
### Shared Module
|
||||||
|
|
||||||
|
**Location**: `src/shared/`
|
||||||
|
|
||||||
|
**Responsibilities**:
|
||||||
|
- Cross-cutting concerns
|
||||||
|
- Configuration management
|
||||||
|
- Error handling
|
||||||
|
- Utility functions
|
||||||
|
|
||||||
|
**Components**:
|
||||||
|
|
||||||
|
```
|
||||||
|
shared/
|
||||||
|
├── types/
|
||||||
|
│ └── index.ts # Shared TypeScript types
|
||||||
|
├── constants/
|
||||||
|
│ ├── config.ts # Config schema and loader
|
||||||
|
│ └── messages.ts # User-facing messages
|
||||||
|
├── utils/
|
||||||
|
│ ├── hash.ts # MD5 hashing
|
||||||
|
│ └── tokens.ts # Token estimation
|
||||||
|
└── errors/
|
||||||
|
├── IpuaroError.ts # Custom error class
|
||||||
|
└── ErrorHandler.ts # Error handling service
|
||||||
|
```
|
||||||
|
|
||||||
|
## Data Flow
|
||||||
|
|
||||||
|
### 1. Startup Flow
|
||||||
|
|
||||||
|
```
|
||||||
|
CLI Entry (bin/ipuaro.js)
|
||||||
|
↓
|
||||||
|
Commander.js parses arguments
|
||||||
|
↓
|
||||||
|
Onboarding checks (Redis, Ollama, Model)
|
||||||
|
↓
|
||||||
|
Initialize dependencies:
|
||||||
|
- RedisClient connects
|
||||||
|
- RedisStorage initialized
|
||||||
|
- OllamaClient created
|
||||||
|
- ToolRegistry with 18 tools
|
||||||
|
↓
|
||||||
|
StartSession use case:
|
||||||
|
- Load latest session or create new
|
||||||
|
- Initialize ContextManager
|
||||||
|
↓
|
||||||
|
Launch TUI (App.tsx)
|
||||||
|
- Render StatusBar, Chat, Input
|
||||||
|
- Set up hotkeys
|
||||||
|
```
|
||||||
|
|
||||||
|
### 2. Message Flow
|
||||||
|
|
||||||
|
```
|
||||||
|
User types message in Input.tsx
|
||||||
|
↓
|
||||||
|
useSession.handleMessage()
|
||||||
|
↓
|
||||||
|
HandleMessage use case:
|
||||||
|
1. Add user message to history
|
||||||
|
2. Build context (system prompt + structure + AST)
|
||||||
|
3. Send to OllamaClient.chat()
|
||||||
|
4. Parse tool calls from response
|
||||||
|
5. For each tool call:
|
||||||
|
- If requiresConfirmation: show ConfirmDialog
|
||||||
|
- Execute tool via ToolRegistry
|
||||||
|
- Collect results
|
||||||
|
6. If tool results: goto step 3 (continue loop)
|
||||||
|
7. Add assistant response to history
|
||||||
|
8. Update session in Redis
|
||||||
|
↓
|
||||||
|
Display response in Chat.tsx
|
||||||
|
```
|
||||||
|
|
||||||
|
### 3. Edit Flow
|
||||||
|
|
||||||
|
```
|
||||||
|
LLM calls edit_lines tool
|
||||||
|
↓
|
||||||
|
ToolRegistry.execute()
|
||||||
|
↓
|
||||||
|
EditLinesTool.execute():
|
||||||
|
1. Validate path (PathValidator)
|
||||||
|
2. Check hash conflict
|
||||||
|
3. Build diff
|
||||||
|
↓
|
||||||
|
ConfirmDialog shows diff
|
||||||
|
↓
|
||||||
|
User chooses:
|
||||||
|
- Apply: Continue
|
||||||
|
- Cancel: Return error to LLM
|
||||||
|
- Edit: Manual edit (future)
|
||||||
|
↓
|
||||||
|
If Apply:
|
||||||
|
1. Create UndoEntry
|
||||||
|
2. Push to undo stack (Redis list)
|
||||||
|
3. Write to filesystem
|
||||||
|
4. Update RedisStorage (lines, hash, AST, meta)
|
||||||
|
↓
|
||||||
|
Return success to LLM
|
||||||
|
```
|
||||||
|
|
||||||
|
### 4. Indexing Flow
|
||||||
|
|
||||||
|
```
|
||||||
|
FileScanner.scan()
|
||||||
|
- Recursively walk directory
|
||||||
|
- Filter via .gitignore + ignore patterns
|
||||||
|
- Detect binary files (skip)
|
||||||
|
↓
|
||||||
|
For each file:
|
||||||
|
ASTParser.parse()
|
||||||
|
- tree-sitter parse
|
||||||
|
- Extract imports, exports, functions, classes
|
||||||
|
↓
|
||||||
|
MetaAnalyzer.analyze()
|
||||||
|
- Calculate complexity (LOC, nesting, cyclomatic)
|
||||||
|
- Resolve dependencies (imports → file paths)
|
||||||
|
- Detect hubs (>5 dependents)
|
||||||
|
↓
|
||||||
|
RedisStorage.setFile(), .setAST(), .setMeta()
|
||||||
|
↓
|
||||||
|
IndexBuilder.buildSymbolIndex()
|
||||||
|
- Map symbol names → locations
|
||||||
|
↓
|
||||||
|
IndexBuilder.buildDepsGraph()
|
||||||
|
- Build bidirectional import graph
|
||||||
|
↓
|
||||||
|
Store indexes in Redis
|
||||||
|
↓
|
||||||
|
Watchdog.start()
|
||||||
|
- Watch for file changes
|
||||||
|
- On change: Re-parse and update indexes
|
||||||
|
```
|
||||||
|
|
||||||
|
## Key Design Decisions
|
||||||
|
|
||||||
|
### 1. Why Redis?
|
||||||
|
|
||||||
|
**Pros**:
|
||||||
|
- Fast in-memory access for frequent reads
|
||||||
|
- AOF persistence (append-only file) for durability
|
||||||
|
- Native support for hashes, lists, sets
|
||||||
|
- Simple key-value model fits our needs
|
||||||
|
- Excellent for session data
|
||||||
|
|
||||||
|
**Alternatives considered**:
|
||||||
|
- SQLite: Slower, overkill for our use case
|
||||||
|
- JSON files: No concurrent access, slow for large data
|
||||||
|
- PostgreSQL: Too heavy, we don't need relational features
|
||||||
|
|
||||||
|
### 2. Why tree-sitter?
|
||||||
|
|
||||||
|
**Pros**:
|
||||||
|
- Incremental parsing (fast re-parsing)
|
||||||
|
- Error-tolerant (works with syntax errors)
|
||||||
|
- Multi-language support
|
||||||
|
- Used by GitHub, Neovim, Atom
|
||||||
|
|
||||||
|
**Alternatives considered**:
|
||||||
|
- TypeScript Compiler API: TS-only, not error-tolerant
|
||||||
|
- Babel: JS-focused, heavy dependencies
|
||||||
|
- Regex: Fragile, inaccurate
|
||||||
|
|
||||||
|
### 3. Why Ollama?
|
||||||
|
|
||||||
|
**Pros**:
|
||||||
|
- 100% local, no API keys
|
||||||
|
- Easy installation (brew install ollama)
|
||||||
|
- Good model selection (qwen2.5-coder, deepseek-coder)
|
||||||
|
- Tool calling support
|
||||||
|
|
||||||
|
**Alternatives considered**:
|
||||||
|
- OpenAI: Costs money, sends code to cloud
|
||||||
|
- Anthropic Claude: Same concerns as OpenAI
|
||||||
|
- llama.cpp: Lower level, requires more setup
|
||||||
|
|
||||||
|
Planned: Support for OpenAI/Anthropic in v1.2.0 as optional providers.
|
||||||
|
|
||||||
|
### 4. Why XML for Tool Calls?
|
||||||
|
|
||||||
|
**Pros**:
|
||||||
|
- LLMs trained on XML (very common format)
|
||||||
|
- Self-describing (parameter names in tags)
|
||||||
|
- Easy to parse with regex
|
||||||
|
- More reliable than JSON for smaller models
|
||||||
|
|
||||||
|
**Alternatives considered**:
|
||||||
|
- JSON: Smaller models struggle with exact JSON syntax
|
||||||
|
- Function calling API: Not all models support it
|
||||||
|
|
||||||
|
### 5. Why Clean Architecture?
|
||||||
|
|
||||||
|
**Pros**:
|
||||||
|
- Testability (domain has no external dependencies)
|
||||||
|
- Flexibility (easy to swap Redis for SQLite)
|
||||||
|
- Maintainability (clear separation of concerns)
|
||||||
|
- Scalability (layers can evolve independently)
|
||||||
|
|
||||||
|
**Cost**: More files and indirection, but worth it for long-term maintenance.
|
||||||
|
|
||||||
|
### 6. Why Lazy Loading Instead of RAG?
|
||||||
|
|
||||||
|
**RAG (Retrieval Augmented Generation)**:
|
||||||
|
- Pre-computes embeddings
|
||||||
|
- Searches embeddings for relevant chunks
|
||||||
|
- Adds chunks to context
|
||||||
|
|
||||||
|
**Lazy Loading (our approach)**:
|
||||||
|
- Agent requests specific code via tools
|
||||||
|
- More precise control over what's loaded
|
||||||
|
- Simpler implementation (no embeddings)
|
||||||
|
- Works with any LLM (no embedding model needed)
|
||||||
|
|
||||||
|
**Trade-off**: RAG might be better for semantic search ("find error handling code"), but tool-based approach gives agent explicit control.
|
||||||
|
|
||||||
|
## Tech Stack
|
||||||
|
|
||||||
|
### Core Dependencies
|
||||||
|
|
||||||
|
| Package | Purpose | Why? |
|
||||||
|
|---------|---------|------|
|
||||||
|
| `ioredis` | Redis client | Most popular, excellent TypeScript support |
|
||||||
|
| `ollama` | LLM client | Official SDK, simple API |
|
||||||
|
| `tree-sitter` | AST parsing | Fast, error-tolerant, multi-language |
|
||||||
|
| `tree-sitter-typescript` | TS/TSX parser | Official TypeScript grammar |
|
||||||
|
| `tree-sitter-javascript` | JS/JSX parser | Official JavaScript grammar |
|
||||||
|
| `ink` | Terminal UI | React for terminal, declarative |
|
||||||
|
| `ink-text-input` | Input component | Maintained ink component |
|
||||||
|
| `react` | UI framework | Required by Ink |
|
||||||
|
| `simple-git` | Git operations | Simple API, well-tested |
|
||||||
|
| `chokidar` | File watching | Cross-platform, reliable |
|
||||||
|
| `commander` | CLI framework | Industry standard |
|
||||||
|
| `zod` | Validation | Type-safe validation |
|
||||||
|
| `globby` | File globbing | ESM-native, .gitignore support |
|
||||||
|
|
||||||
|
### Development Dependencies
|
||||||
|
|
||||||
|
| Package | Purpose |
|
||||||
|
|---------|---------|
|
||||||
|
| `vitest` | Testing framework |
|
||||||
|
| `@vitest/coverage-v8` | Coverage reporting |
|
||||||
|
| `@vitest/ui` | Interactive test UI |
|
||||||
|
| `tsup` | TypeScript bundler |
|
||||||
|
| `typescript` | Type checking |
|
||||||
|
|
||||||
|
## Performance Considerations
|
||||||
|
|
||||||
|
### 1. Indexing Performance
|
||||||
|
|
||||||
|
**Problem**: Large projects (10k+ files) take time to index.
|
||||||
|
|
||||||
|
**Optimizations**:
|
||||||
|
- Incremental parsing with tree-sitter (only changed files)
|
||||||
|
- Parallel parsing (planned for v1.1.0)
|
||||||
|
- Ignore patterns (.gitignore, node_modules, dist)
|
||||||
|
- Skip binary files early
|
||||||
|
|
||||||
|
**Current**: ~1000 files/second on M1 Mac
|
||||||
|
|
||||||
|
### 2. Memory Usage
|
||||||
|
|
||||||
|
**Problem**: Entire AST in memory could be 100s of MB.
|
||||||
|
|
||||||
|
**Optimizations**:
|
||||||
|
- Store ASTs in Redis (out of Node.js heap)
|
||||||
|
- Load ASTs on-demand from Redis
|
||||||
|
- Lazy-load file content (not stored in session)
|
||||||
|
|
||||||
|
**Current**: ~200MB for 5000 files indexed
|
||||||
|
|
||||||
|
### 3. Context Window Management
|
||||||
|
|
||||||
|
**Problem**: 128k token context window fills up.
|
||||||
|
|
||||||
|
**Optimizations**:
|
||||||
|
- Auto-compression at 80% usage
|
||||||
|
- LLM summarizes old messages
|
||||||
|
- Remove tool results older than 5 messages
|
||||||
|
- Only load structure + metadata initially (~10k tokens)
|
||||||
|
|
||||||
|
### 4. Redis Performance
|
||||||
|
|
||||||
|
**Problem**: Redis is single-threaded.
|
||||||
|
|
||||||
|
**Optimizations**:
|
||||||
|
- Pipeline commands where possible
|
||||||
|
- Use hashes for related data (fewer keys)
|
||||||
|
- AOF every second (not every command)
|
||||||
|
- Keep undo stack limited (10 entries)
|
||||||
|
|
||||||
|
**Current**: <1ms latency for most operations
|
||||||
|
|
||||||
|
### 5. Tool Execution
|
||||||
|
|
||||||
|
**Problem**: Tool execution could block LLM.
|
||||||
|
|
||||||
|
**Current**: Synchronous execution (simpler)
|
||||||
|
|
||||||
|
**Future**: Async tool execution with progress callbacks (v1.1.0)
|
||||||
|
|
||||||
|
## Future Improvements
|
||||||
|
|
||||||
|
### v1.1.0 - Performance
|
||||||
|
- Parallel AST parsing
|
||||||
|
- Incremental indexing (only changed files)
|
||||||
|
- Response caching
|
||||||
|
- Stream LLM responses
|
||||||
|
|
||||||
|
### v1.2.0 - Features
|
||||||
|
- Multiple file edits in one operation
|
||||||
|
- Batch operations
|
||||||
|
- Custom prompt templates
|
||||||
|
- OpenAI/Anthropic provider support
|
||||||
|
|
||||||
|
### v1.3.0 - Extensibility
|
||||||
|
- Plugin system for custom tools
|
||||||
|
- LSP integration
|
||||||
|
- Multi-language support (Python, Go, Rust)
|
||||||
|
- Custom indexing rules
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
**Last Updated**: 2025-12-01
|
||||||
|
**Version**: 0.16.0
|
||||||
File diff suppressed because it is too large
Load Diff
@@ -1,17 +1,102 @@
|
|||||||
# @samiyev/ipuaro
|
# @samiyev/ipuaro 🎩
|
||||||
|
|
||||||
Local AI agent for codebase operations with "infinite" context feeling through lazy loading.
|
**Local AI Agent for Codebase Operations**
|
||||||
|
|
||||||
|
"Infinite" context feeling through lazy loading - work with your entire codebase using local LLM.
|
||||||
|
|
||||||
|
[](https://www.npmjs.com/package/@samiyev/ipuaro)
|
||||||
|
[](https://opensource.org/licenses/MIT)
|
||||||
|
|
||||||
|
> **Status:** 🎉 Release Candidate (v0.16.0 → v1.0.0)
|
||||||
|
>
|
||||||
|
> All core features complete. Production-ready release coming soon.
|
||||||
|
|
||||||
|
## Vision
|
||||||
|
|
||||||
|
Work with codebases of any size using local AI:
|
||||||
|
- 📂 **Lazy Loading**: Load code on-demand, not all at once
|
||||||
|
- 🧠 **Smart Context**: AST-based understanding of your code structure
|
||||||
|
- 🔒 **100% Local**: Your code never leaves your machine
|
||||||
|
- ⚡ **Fast**: Redis persistence + tree-sitter parsing
|
||||||
|
|
||||||
## Features
|
## Features
|
||||||
|
|
||||||
- 18 LLM tools for code operations (read, edit, search, analysis, git, run)
|
### 18 LLM Tools (All Implemented ✅)
|
||||||
- Redis persistence with AOF for durability
|
|
||||||
|
| Category | Tools | Description |
|
||||||
|
|----------|-------|-------------|
|
||||||
|
| **Read** | `get_lines`, `get_function`, `get_class`, `get_structure` | Read code without loading everything into context |
|
||||||
|
| **Edit** | `edit_lines`, `create_file`, `delete_file` | Make changes with confirmation and undo support |
|
||||||
|
| **Search** | `find_references`, `find_definition` | Find symbol definitions and usages across codebase |
|
||||||
|
| **Analysis** | `get_dependencies`, `get_dependents`, `get_complexity`, `get_todos` | Analyze code structure, complexity, and TODOs |
|
||||||
|
| **Git** | `git_status`, `git_diff`, `git_commit` | Git operations with safety checks |
|
||||||
|
| **Run** | `run_command`, `run_tests` | Execute commands and tests with security validation |
|
||||||
|
|
||||||
|
See [Tools Documentation](#tools-reference) below for detailed usage examples.
|
||||||
|
|
||||||
|
### Terminal UI
|
||||||
|
|
||||||
|
```
|
||||||
|
┌─ ipuaro ──────────────────────────────────────────────────┐
|
||||||
|
│ [ctx: 12%] [project: myapp] [main] [47m] ✓ Ready │
|
||||||
|
├───────────────────────────────────────────────────────────┤
|
||||||
|
│ You: How does the authentication flow work? │
|
||||||
|
│ │
|
||||||
|
│ Assistant: Let me analyze the auth module... │
|
||||||
|
│ [get_structure src/auth/] │
|
||||||
|
│ [get_function src/auth/service.ts login] │
|
||||||
|
│ │
|
||||||
|
│ The authentication flow works as follows: │
|
||||||
|
│ 1. User calls POST /auth/login │
|
||||||
|
│ 2. AuthService.login() validates credentials... │
|
||||||
|
│ │
|
||||||
|
│ ⏱ 3.2s │ 1,247 tokens │ 2 tool calls │
|
||||||
|
├───────────────────────────────────────────────────────────┤
|
||||||
|
│ > _ │
|
||||||
|
└───────────────────────────────────────────────────────────┘
|
||||||
|
```
|
||||||
|
|
||||||
|
### Slash Commands
|
||||||
|
|
||||||
|
Control your session with built-in commands:
|
||||||
|
|
||||||
|
| Command | Description |
|
||||||
|
|---------|-------------|
|
||||||
|
| `/help` | Show all commands and hotkeys |
|
||||||
|
| `/clear` | Clear chat history (keeps session) |
|
||||||
|
| `/undo` | Revert last file change from undo stack |
|
||||||
|
| `/sessions [list\|load\|delete] [id]` | Manage sessions |
|
||||||
|
| `/status` | Show system status (LLM, context, stats) |
|
||||||
|
| `/reindex` | Force full project reindexation |
|
||||||
|
| `/eval` | LLM self-check for hallucinations |
|
||||||
|
| `/auto-apply [on\|off]` | Toggle auto-apply mode for edits |
|
||||||
|
|
||||||
|
### Hotkeys
|
||||||
|
|
||||||
|
| Hotkey | Action |
|
||||||
|
|--------|--------|
|
||||||
|
| `Ctrl+C` | Interrupt generation (1st press) / Exit (2nd press within 1s) |
|
||||||
|
| `Ctrl+D` | Exit and save session |
|
||||||
|
| `Ctrl+Z` | Undo last file change |
|
||||||
|
| `↑` / `↓` | Navigate input history |
|
||||||
|
| `Tab` | Path autocomplete (coming soon) |
|
||||||
|
|
||||||
|
### Key Capabilities
|
||||||
|
|
||||||
|
🔍 **Smart Code Understanding**
|
||||||
- tree-sitter AST parsing (TypeScript, JavaScript)
|
- tree-sitter AST parsing (TypeScript, JavaScript)
|
||||||
- Ollama LLM integration (local, private)
|
- Symbol index for fast lookups
|
||||||
- File watching for live index updates
|
- Dependency graph analysis
|
||||||
- Session and undo management
|
|
||||||
- Security (blacklist/whitelist for shell commands)
|
💾 **Persistent Sessions**
|
||||||
- Terminal UI with Ink/React
|
- Redis storage with AOF persistence
|
||||||
|
- Session history across restarts
|
||||||
|
- Undo stack for file changes
|
||||||
|
|
||||||
|
🛡️ **Security**
|
||||||
|
- Command blacklist (dangerous operations blocked)
|
||||||
|
- Command whitelist (safe commands auto-approved)
|
||||||
|
- Path validation (no access outside project)
|
||||||
|
|
||||||
## Installation
|
## Installation
|
||||||
|
|
||||||
@@ -23,32 +108,72 @@ pnpm add @samiyev/ipuaro
|
|||||||
|
|
||||||
## Requirements
|
## Requirements
|
||||||
|
|
||||||
- Node.js >= 20.0.0
|
- **Node.js** >= 20.0.0
|
||||||
- Redis server (for persistence)
|
- **Redis** (for persistence)
|
||||||
- Ollama (for LLM inference)
|
- **Ollama** (for local LLM inference)
|
||||||
|
|
||||||
## Quick Start
|
### Setup Ollama
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# Start in current directory
|
# Install Ollama (macOS)
|
||||||
|
brew install ollama
|
||||||
|
|
||||||
|
# Start Ollama
|
||||||
|
ollama serve
|
||||||
|
|
||||||
|
# Pull recommended model
|
||||||
|
ollama pull qwen2.5-coder:7b-instruct
|
||||||
|
```
|
||||||
|
|
||||||
|
### Setup Redis
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Install Redis (macOS)
|
||||||
|
brew install redis
|
||||||
|
|
||||||
|
# Start Redis with persistence
|
||||||
|
redis-server --appendonly yes
|
||||||
|
```
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Start ipuaro in current directory
|
||||||
ipuaro
|
ipuaro
|
||||||
|
|
||||||
# Start in specific directory
|
# Start in specific directory
|
||||||
ipuaro /path/to/project
|
ipuaro /path/to/project
|
||||||
|
|
||||||
# With auto-apply mode
|
|
||||||
ipuaro --auto-apply
|
|
||||||
|
|
||||||
# With custom model
|
# With custom model
|
||||||
ipuaro --model qwen2.5-coder:32b-instruct
|
ipuaro --model qwen2.5-coder:32b-instruct
|
||||||
|
|
||||||
|
# With auto-apply mode (skip edit confirmations)
|
||||||
|
ipuaro --auto-apply
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## Quick Start
|
||||||
|
|
||||||
|
Try ipuaro with our demo project:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Navigate to demo project
|
||||||
|
cd examples/demo-project
|
||||||
|
|
||||||
|
# Install dependencies
|
||||||
|
npm install
|
||||||
|
|
||||||
|
# Start ipuaro
|
||||||
|
npx @samiyev/ipuaro
|
||||||
|
```
|
||||||
|
|
||||||
|
See [examples/demo-project](./examples/demo-project) for detailed usage guide and example conversations.
|
||||||
|
|
||||||
## Commands
|
## Commands
|
||||||
|
|
||||||
| Command | Description |
|
| Command | Description |
|
||||||
|---------|-------------|
|
|---------|-------------|
|
||||||
| `ipuaro [path]` | Start TUI in directory |
|
| `ipuaro [path]` | Start TUI in directory |
|
||||||
| `ipuaro init` | Create .ipuaro.json config |
|
| `ipuaro init` | Create `.ipuaro.json` config |
|
||||||
| `ipuaro index` | Index project without TUI |
|
| `ipuaro index` | Index project without TUI |
|
||||||
|
|
||||||
## Configuration
|
## Configuration
|
||||||
@@ -65,6 +190,9 @@ Create `.ipuaro.json` in your project root:
|
|||||||
"model": "qwen2.5-coder:7b-instruct",
|
"model": "qwen2.5-coder:7b-instruct",
|
||||||
"temperature": 0.1
|
"temperature": 0.1
|
||||||
},
|
},
|
||||||
|
"project": {
|
||||||
|
"ignorePatterns": ["node_modules", "dist", ".git"]
|
||||||
|
},
|
||||||
"edit": {
|
"edit": {
|
||||||
"autoApply": false
|
"autoApply": false
|
||||||
}
|
}
|
||||||
@@ -76,55 +204,476 @@ Create `.ipuaro.json` in your project root:
|
|||||||
Clean Architecture with clear separation:
|
Clean Architecture with clear separation:
|
||||||
|
|
||||||
```
|
```
|
||||||
src/
|
@samiyev/ipuaro/
|
||||||
├── domain/ # Business logic (entities, value objects, interfaces)
|
├── domain/ # Business logic (no dependencies)
|
||||||
├── application/ # Use cases, DTOs, orchestration
|
│ ├── entities/ # Session, Project
|
||||||
├── infrastructure/ # External implementations (Redis, Ollama, tools)
|
│ ├── value-objects/ # FileData, FileAST, ChatMessage, etc.
|
||||||
├── tui/ # Terminal UI (Ink/React components)
|
│ └── services/ # IStorage, ILLMClient, ITool, IIndexer
|
||||||
├── cli/ # CLI commands
|
├── application/ # Use cases & orchestration
|
||||||
└── shared/ # Cross-cutting concerns
|
│ ├── use-cases/ # StartSession, HandleMessage, etc.
|
||||||
|
│ └── interfaces/ # IToolRegistry
|
||||||
|
├── infrastructure/ # External implementations
|
||||||
|
│ ├── storage/ # Redis client & storage
|
||||||
|
│ ├── llm/ # Ollama client & prompts
|
||||||
|
│ ├── indexer/ # File scanner, AST parser
|
||||||
|
│ └── tools/ # 18 tool implementations
|
||||||
|
├── tui/ # Terminal UI (Ink/React)
|
||||||
|
│ └── components/ # StatusBar, Chat, Input, etc.
|
||||||
|
├── cli/ # CLI entry point
|
||||||
|
└── shared/ # Config, errors, utils
|
||||||
```
|
```
|
||||||
|
|
||||||
## Tools (18 total)
|
|
||||||
|
|
||||||
| Category | Tool | Description |
|
|
||||||
|----------|------|-------------|
|
|
||||||
| **Read** | `get_lines` | Get file lines |
|
|
||||||
| | `get_function` | Get function by name |
|
|
||||||
| | `get_class` | Get class by name |
|
|
||||||
| | `get_structure` | Get project tree |
|
|
||||||
| **Edit** | `edit_lines` | Replace lines |
|
|
||||||
| | `create_file` | Create new file |
|
|
||||||
| | `delete_file` | Delete file |
|
|
||||||
| **Search** | `find_references` | Find symbol usages |
|
|
||||||
| | `find_definition` | Find symbol definition |
|
|
||||||
| **Analysis** | `get_dependencies` | File imports |
|
|
||||||
| | `get_dependents` | Files importing this |
|
|
||||||
| | `get_complexity` | Complexity metrics |
|
|
||||||
| | `get_todos` | Find TODO/FIXME |
|
|
||||||
| **Git** | `git_status` | Repository status |
|
|
||||||
| | `git_diff` | Uncommitted changes |
|
|
||||||
| | `git_commit` | Create commit |
|
|
||||||
| **Run** | `run_command` | Execute shell command |
|
|
||||||
| | `run_tests` | Run test suite |
|
|
||||||
|
|
||||||
## Development Status
|
## Development Status
|
||||||
|
|
||||||
Currently at version **0.1.0** (Foundation). See [ROADMAP.md](./ROADMAP.md) for full development plan.
|
### ✅ Completed (v0.1.0 - v0.16.0)
|
||||||
|
|
||||||
### Completed
|
- [x] **v0.1.0 - v0.4.0**: Foundation (domain, storage, indexer, LLM integration)
|
||||||
|
- [x] **v0.5.0 - v0.9.0**: All 18 tools implemented
|
||||||
|
- [x] **v0.10.0**: Session management with undo support
|
||||||
|
- [x] **v0.11.0 - v0.12.0**: Full TUI with all components
|
||||||
|
- [x] **v0.13.0**: Security (PathValidator, command validation)
|
||||||
|
- [x] **v0.14.0**: 8 slash commands
|
||||||
|
- [x] **v0.15.0**: CLI entry point with onboarding
|
||||||
|
- [x] **v0.16.0**: Comprehensive error handling system
|
||||||
|
- [x] **1420 tests, 98% coverage**
|
||||||
|
|
||||||
- [x] 0.1.1 Project Setup
|
### 🔜 v1.0.0 - Production Ready
|
||||||
- [x] 0.1.2 Domain Value Objects
|
|
||||||
- [x] 0.1.3 Domain Services Interfaces
|
|
||||||
- [x] 0.1.4 Shared Config
|
|
||||||
|
|
||||||
### Next
|
- [ ] Performance optimizations
|
||||||
|
- [ ] Complete documentation
|
||||||
|
- [ ] Working examples
|
||||||
|
|
||||||
- [ ] 0.2.0 Redis Storage
|
See [ROADMAP.md](./ROADMAP.md) for detailed development plan and [CHANGELOG.md](./CHANGELOG.md) for release history.
|
||||||
- [ ] 0.3.0 Indexer
|
|
||||||
- [ ] 0.4.0 LLM Integration
|
## Tools Reference
|
||||||
|
|
||||||
|
The AI agent has access to 18 tools for working with your codebase. Here are the most commonly used ones:
|
||||||
|
|
||||||
|
### Read Tools
|
||||||
|
|
||||||
|
**`get_lines(path, start?, end?)`**
|
||||||
|
Read specific lines from a file.
|
||||||
|
|
||||||
|
```
|
||||||
|
You: Show me the authentication logic
|
||||||
|
Assistant: [get_lines src/auth/service.ts 45 67]
|
||||||
|
# Returns lines 45-67 with line numbers
|
||||||
|
```
|
||||||
|
|
||||||
|
**`get_function(path, name)`**
|
||||||
|
Get a specific function's source code and metadata.
|
||||||
|
|
||||||
|
```
|
||||||
|
You: How does the login function work?
|
||||||
|
Assistant: [get_function src/auth/service.ts login]
|
||||||
|
# Returns function code, params, return type, and metadata
|
||||||
|
```
|
||||||
|
|
||||||
|
**`get_class(path, name)`**
|
||||||
|
Get a specific class's source code and metadata.
|
||||||
|
|
||||||
|
```
|
||||||
|
You: Show me the UserService class
|
||||||
|
Assistant: [get_class src/services/user.ts UserService]
|
||||||
|
# Returns class code, methods, properties, and inheritance info
|
||||||
|
```
|
||||||
|
|
||||||
|
**`get_structure(path?, depth?)`**
|
||||||
|
Get directory tree structure.
|
||||||
|
|
||||||
|
```
|
||||||
|
You: What's in the src/auth directory?
|
||||||
|
Assistant: [get_structure src/auth]
|
||||||
|
# Returns ASCII tree with files and folders
|
||||||
|
```
|
||||||
|
|
||||||
|
### Edit Tools
|
||||||
|
|
||||||
|
**`edit_lines(path, start, end, content)`**
|
||||||
|
Replace lines in a file (requires confirmation).
|
||||||
|
|
||||||
|
```
|
||||||
|
You: Update the timeout to 5000ms
|
||||||
|
Assistant: [edit_lines src/config.ts 23 23 " timeout: 5000,"]
|
||||||
|
# Shows diff, asks for confirmation
|
||||||
|
```
|
||||||
|
|
||||||
|
**`create_file(path, content)`**
|
||||||
|
Create a new file (requires confirmation).
|
||||||
|
|
||||||
|
```
|
||||||
|
You: Create a new utility for date formatting
|
||||||
|
Assistant: [create_file src/utils/date.ts "export function formatDate..."]
|
||||||
|
# Creates file after confirmation
|
||||||
|
```
|
||||||
|
|
||||||
|
**`delete_file(path)`**
|
||||||
|
Delete a file (requires confirmation).
|
||||||
|
|
||||||
|
```
|
||||||
|
You: Remove the old test file
|
||||||
|
Assistant: [delete_file tests/old-test.test.ts]
|
||||||
|
# Deletes after confirmation
|
||||||
|
```
|
||||||
|
|
||||||
|
### Search Tools
|
||||||
|
|
||||||
|
**`find_references(symbol, path?)`**
|
||||||
|
Find all usages of a symbol across the codebase.
|
||||||
|
|
||||||
|
```
|
||||||
|
You: Where is getUserById used?
|
||||||
|
Assistant: [find_references getUserById]
|
||||||
|
# Returns all files/lines where it's called
|
||||||
|
```
|
||||||
|
|
||||||
|
**`find_definition(symbol)`**
|
||||||
|
Find where a symbol is defined.
|
||||||
|
|
||||||
|
```
|
||||||
|
You: Where is ApiClient defined?
|
||||||
|
Assistant: [find_definition ApiClient]
|
||||||
|
# Returns file, line, and context
|
||||||
|
```
|
||||||
|
|
||||||
|
### Analysis Tools
|
||||||
|
|
||||||
|
**`get_dependencies(path)`**
|
||||||
|
Get files that a specific file imports.
|
||||||
|
|
||||||
|
```
|
||||||
|
You: What does auth.ts depend on?
|
||||||
|
Assistant: [get_dependencies src/auth/service.ts]
|
||||||
|
# Returns list of imported files
|
||||||
|
```
|
||||||
|
|
||||||
|
**`get_dependents(path)`**
|
||||||
|
Get files that import a specific file.
|
||||||
|
|
||||||
|
```
|
||||||
|
You: What files use the database module?
|
||||||
|
Assistant: [get_dependents src/db/index.ts]
|
||||||
|
# Returns list of files importing this
|
||||||
|
```
|
||||||
|
|
||||||
|
**`get_complexity(path?, limit?)`**
|
||||||
|
Get complexity metrics for files.
|
||||||
|
|
||||||
|
```
|
||||||
|
You: Which files are most complex?
|
||||||
|
Assistant: [get_complexity null 10]
|
||||||
|
# Returns top 10 most complex files with metrics
|
||||||
|
```
|
||||||
|
|
||||||
|
**`get_todos(path?, type?)`**
|
||||||
|
Find TODO/FIXME/HACK comments.
|
||||||
|
|
||||||
|
```
|
||||||
|
You: What TODOs are there?
|
||||||
|
Assistant: [get_todos]
|
||||||
|
# Returns all TODO comments with locations
|
||||||
|
```
|
||||||
|
|
||||||
|
### Git Tools
|
||||||
|
|
||||||
|
**`git_status()`**
|
||||||
|
Get current git repository status.
|
||||||
|
|
||||||
|
```
|
||||||
|
You: What files have changed?
|
||||||
|
Assistant: [git_status]
|
||||||
|
# Returns branch, staged, modified, untracked files
|
||||||
|
```
|
||||||
|
|
||||||
|
**`git_diff(path?, staged?)`**
|
||||||
|
Get uncommitted changes.
|
||||||
|
|
||||||
|
```
|
||||||
|
You: Show me what changed in auth.ts
|
||||||
|
Assistant: [git_diff src/auth/service.ts]
|
||||||
|
# Returns diff output
|
||||||
|
```
|
||||||
|
|
||||||
|
**`git_commit(message, files?)`**
|
||||||
|
Create a git commit (requires confirmation).
|
||||||
|
|
||||||
|
```
|
||||||
|
You: Commit these auth changes
|
||||||
|
Assistant: [git_commit "feat: add password reset flow" ["src/auth/service.ts"]]
|
||||||
|
# Creates commit after confirmation
|
||||||
|
```
|
||||||
|
|
||||||
|
### Run Tools
|
||||||
|
|
||||||
|
**`run_command(command, timeout?)`**
|
||||||
|
Execute shell commands (with security validation).
|
||||||
|
|
||||||
|
```
|
||||||
|
You: Run the build
|
||||||
|
Assistant: [run_command "npm run build"]
|
||||||
|
# Checks security, then executes
|
||||||
|
```
|
||||||
|
|
||||||
|
**`run_tests(path?, filter?, watch?)`**
|
||||||
|
Run project tests.
|
||||||
|
|
||||||
|
```
|
||||||
|
You: Test the auth module
|
||||||
|
Assistant: [run_tests "tests/auth" null false]
|
||||||
|
# Auto-detects test runner and executes
|
||||||
|
```
|
||||||
|
|
||||||
|
For complete tool documentation with all parameters and options, see [TOOLS.md](./TOOLS.md).
|
||||||
|
|
||||||
|
## Programmatic API
|
||||||
|
|
||||||
|
You can use ipuaro as a library in your own Node.js applications:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import {
|
||||||
|
createRedisClient,
|
||||||
|
RedisStorage,
|
||||||
|
OllamaClient,
|
||||||
|
ToolRegistry,
|
||||||
|
StartSession,
|
||||||
|
HandleMessage
|
||||||
|
} from "@samiyev/ipuaro"
|
||||||
|
|
||||||
|
// Initialize dependencies
|
||||||
|
const redis = await createRedisClient({ host: "localhost", port: 6379 })
|
||||||
|
const storage = new RedisStorage(redis, "my-project")
|
||||||
|
const llm = new OllamaClient({
|
||||||
|
model: "qwen2.5-coder:7b-instruct",
|
||||||
|
contextWindow: 128000,
|
||||||
|
temperature: 0.1
|
||||||
|
})
|
||||||
|
const tools = new ToolRegistry()
|
||||||
|
|
||||||
|
// Register tools
|
||||||
|
tools.register(new GetLinesTool(storage, "/path/to/project"))
|
||||||
|
// ... register other tools
|
||||||
|
|
||||||
|
// Start a session
|
||||||
|
const startSession = new StartSession(storage)
|
||||||
|
const session = await startSession.execute("my-project")
|
||||||
|
|
||||||
|
// Handle a message
|
||||||
|
const handleMessage = new HandleMessage(storage, llm, tools)
|
||||||
|
await handleMessage.execute(session, "Show me the auth flow")
|
||||||
|
|
||||||
|
// Session is automatically updated in Redis
|
||||||
|
```
|
||||||
|
|
||||||
|
For full API documentation, see the TypeScript definitions in `src/` or explore the [source code](./src/).
|
||||||
|
|
||||||
|
## How It Works
|
||||||
|
|
||||||
|
### 1. Project Indexing
|
||||||
|
|
||||||
|
When you start ipuaro, it scans your project and builds an index:
|
||||||
|
|
||||||
|
```
|
||||||
|
1. File Scanner → Recursively scans files (.ts, .js, .tsx, .jsx)
|
||||||
|
2. AST Parser → Parses with tree-sitter (extracts functions, classes, imports)
|
||||||
|
3. Meta Analyzer → Calculates complexity, dependencies, hub detection
|
||||||
|
4. Index Builder → Creates symbol index and dependency graph
|
||||||
|
5. Redis Storage → Persists everything for instant startup next time
|
||||||
|
6. Watchdog → Watches files for changes and updates index in background
|
||||||
|
```
|
||||||
|
|
||||||
|
### 2. Lazy Loading Context
|
||||||
|
|
||||||
|
Instead of loading entire codebase into context:
|
||||||
|
|
||||||
|
```
|
||||||
|
Traditional approach:
|
||||||
|
├── Load all files → 500k tokens → ❌ Exceeds context window
|
||||||
|
|
||||||
|
ipuaro approach:
|
||||||
|
├── Load project structure → ~2k tokens
|
||||||
|
├── Load AST metadata → ~10k tokens
|
||||||
|
├── On demand: get_function("auth.ts", "login") → ~200 tokens
|
||||||
|
├── Total: ~12k tokens → ✅ Fits in 128k context window
|
||||||
|
```
|
||||||
|
|
||||||
|
Context automatically compresses when usage exceeds 80% by summarizing old messages.
|
||||||
|
|
||||||
|
### 3. Tool-Based Code Access
|
||||||
|
|
||||||
|
The LLM doesn't see your code initially. It only sees structure and metadata. When it needs code, it uses tools:
|
||||||
|
|
||||||
|
```
|
||||||
|
You: "How does user creation work?"
|
||||||
|
|
||||||
|
Agent reasoning:
|
||||||
|
1. [get_structure src/] → sees user/ folder exists
|
||||||
|
2. [get_function src/user/service.ts createUser] → loads specific function
|
||||||
|
3. [find_references createUser] → finds all usages
|
||||||
|
4. Synthesizes answer with only relevant code loaded
|
||||||
|
|
||||||
|
Total tokens used: ~2k (vs loading entire src/ which could be 50k+)
|
||||||
|
```
|
||||||
|
|
||||||
|
### 4. Session Persistence
|
||||||
|
|
||||||
|
Everything is saved to Redis:
|
||||||
|
- Chat history and context state
|
||||||
|
- Undo stack (last 10 file changes)
|
||||||
|
- Session metadata and statistics
|
||||||
|
|
||||||
|
Resume your session anytime with `/sessions load <id>`.
|
||||||
|
|
||||||
|
### 5. Security Model
|
||||||
|
|
||||||
|
Three-layer security:
|
||||||
|
1. **Blacklist**: Dangerous commands always blocked (rm -rf, sudo, etc.)
|
||||||
|
2. **Whitelist**: Safe commands auto-approved (npm, git status, etc.)
|
||||||
|
3. **Confirmation**: Unknown commands require user approval
|
||||||
|
|
||||||
|
File operations are restricted to project directory only (path traversal prevention).
|
||||||
|
|
||||||
|
## Troubleshooting
|
||||||
|
|
||||||
|
### Redis Connection Errors
|
||||||
|
|
||||||
|
**Error**: `Redis connection failed`
|
||||||
|
|
||||||
|
**Solutions**:
|
||||||
|
```bash
|
||||||
|
# Check if Redis is running
|
||||||
|
redis-cli ping # Should return "PONG"
|
||||||
|
|
||||||
|
# Start Redis with AOF persistence
|
||||||
|
redis-server --appendonly yes
|
||||||
|
|
||||||
|
# Check Redis logs
|
||||||
|
tail -f /usr/local/var/log/redis.log # macOS
|
||||||
|
```
|
||||||
|
|
||||||
|
### Ollama Model Not Found
|
||||||
|
|
||||||
|
**Error**: `Model qwen2.5-coder:7b-instruct not found`
|
||||||
|
|
||||||
|
**Solutions**:
|
||||||
|
```bash
|
||||||
|
# Pull the model
|
||||||
|
ollama pull qwen2.5-coder:7b-instruct
|
||||||
|
|
||||||
|
# List installed models
|
||||||
|
ollama list
|
||||||
|
|
||||||
|
# Check Ollama is running
|
||||||
|
ollama serve
|
||||||
|
```
|
||||||
|
|
||||||
|
### Large Project Performance
|
||||||
|
|
||||||
|
**Issue**: Indexing takes too long or uses too much memory
|
||||||
|
|
||||||
|
**Solutions**:
|
||||||
|
```bash
|
||||||
|
# Index only a subdirectory
|
||||||
|
ipuaro ./src
|
||||||
|
|
||||||
|
# Add more ignore patterns to .ipuaro.json
|
||||||
|
{
|
||||||
|
"project": {
|
||||||
|
"ignorePatterns": ["node_modules", "dist", ".git", "coverage", "build"]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
# Increase Node.js memory limit
|
||||||
|
NODE_OPTIONS="--max-old-space-size=4096" ipuaro
|
||||||
|
```
|
||||||
|
|
||||||
|
### Context Window Exceeded
|
||||||
|
|
||||||
|
**Issue**: `Context window exceeded` errors
|
||||||
|
|
||||||
|
**Solutions**:
|
||||||
|
- Context auto-compresses at 80%, but you can manually `/clear` history
|
||||||
|
- Use more targeted questions instead of asking about entire codebase
|
||||||
|
- The agent will automatically use tools to load only what's needed
|
||||||
|
|
||||||
|
### File Changes Not Detected
|
||||||
|
|
||||||
|
**Issue**: Made changes but agent doesn't see them
|
||||||
|
|
||||||
|
**Solutions**:
|
||||||
|
```bash
|
||||||
|
# Force reindex
|
||||||
|
/reindex
|
||||||
|
|
||||||
|
# Or restart with fresh index
|
||||||
|
rm -rf ~/.ipuaro/cache
|
||||||
|
ipuaro
|
||||||
|
```
|
||||||
|
|
||||||
|
### Undo Not Working
|
||||||
|
|
||||||
|
**Issue**: `/undo` says no changes to undo
|
||||||
|
|
||||||
|
**Explanation**: Undo stack only tracks the last 10 file edits made through ipuaro. Manual file edits outside ipuaro cannot be undone.
|
||||||
|
|
||||||
|
## FAQ
|
||||||
|
|
||||||
|
**Q: Does ipuaro send my code to any external servers?**
|
||||||
|
|
||||||
|
A: No. Everything runs locally. Ollama runs on your machine, Redis stores data locally, and no network requests are made except to your local Ollama instance.
|
||||||
|
|
||||||
|
**Q: What languages are supported?**
|
||||||
|
|
||||||
|
A: Currently TypeScript, JavaScript (including TSX/JSX). More languages planned for future versions.
|
||||||
|
|
||||||
|
**Q: Can I use OpenAI/Anthropic/other LLM providers?**
|
||||||
|
|
||||||
|
A: Currently only Ollama is supported. OpenAI/Anthropic support is planned for v1.2.0.
|
||||||
|
|
||||||
|
**Q: How much disk space does Redis use?**
|
||||||
|
|
||||||
|
A: Depends on project size. A typical mid-size project (1000 files) uses ~50-100MB. Redis uses AOF persistence, so data survives restarts.
|
||||||
|
|
||||||
|
**Q: Can I use ipuaro in a CI/CD pipeline?**
|
||||||
|
|
||||||
|
A: Yes, but it's designed for interactive use. For automated code analysis, consider the programmatic API.
|
||||||
|
|
||||||
|
**Q: What's the difference between ipuaro and GitHub Copilot?**
|
||||||
|
|
||||||
|
A: Copilot is an autocomplete tool. ipuaro is a conversational agent that can read, analyze, modify files, run commands, and has full codebase understanding through AST parsing.
|
||||||
|
|
||||||
|
**Q: Why Redis instead of SQLite or JSON files?**
|
||||||
|
|
||||||
|
A: Redis provides fast in-memory access, AOF persistence, and handles concurrent access well. The session model fits Redis's data structures perfectly.
|
||||||
|
|
||||||
|
## Contributing
|
||||||
|
|
||||||
|
Contributions welcome! This project is in early development.
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Clone
|
||||||
|
git clone https://github.com/samiyev/puaros.git
|
||||||
|
cd puaros/packages/ipuaro
|
||||||
|
|
||||||
|
# Install
|
||||||
|
pnpm install
|
||||||
|
|
||||||
|
# Build
|
||||||
|
pnpm build
|
||||||
|
|
||||||
|
# Test
|
||||||
|
pnpm test:run
|
||||||
|
|
||||||
|
# Coverage
|
||||||
|
pnpm test:coverage
|
||||||
|
```
|
||||||
|
|
||||||
## License
|
## License
|
||||||
|
|
||||||
MIT
|
MIT © Fozilbek Samiyev
|
||||||
|
|
||||||
|
## Links
|
||||||
|
|
||||||
|
- [GitHub Repository](https://github.com/samiyev/puaros/tree/main/packages/ipuaro)
|
||||||
|
- [Issues](https://github.com/samiyev/puaros/issues)
|
||||||
|
- [Changelog](./CHANGELOG.md)
|
||||||
|
- [Roadmap](./ROADMAP.md)
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -1,40 +1,95 @@
|
|||||||
# ipuaro TODO
|
# ipuaro TODO
|
||||||
|
|
||||||
|
## Completed
|
||||||
|
|
||||||
|
### Version 0.1.0 - Foundation
|
||||||
|
- [x] Project setup (package.json, tsconfig, vitest)
|
||||||
|
- [x] Domain entities (Session, Project)
|
||||||
|
- [x] Domain value objects (FileData, FileAST, FileMeta, ChatMessage, etc.)
|
||||||
|
- [x] Domain service interfaces (IStorage, ILLMClient, ITool, IIndexer)
|
||||||
|
- [x] Shared config loader with Zod validation
|
||||||
|
- [x] IpuaroError class
|
||||||
|
|
||||||
|
### Version 0.2.0 - Redis Storage
|
||||||
|
- [x] RedisClient with AOF config
|
||||||
|
- [x] Redis schema implementation
|
||||||
|
- [x] RedisStorage class
|
||||||
|
|
||||||
|
### Version 0.3.0 - Indexer
|
||||||
|
- [x] FileScanner with gitignore support
|
||||||
|
- [x] ASTParser with tree-sitter
|
||||||
|
- [x] MetaAnalyzer for complexity
|
||||||
|
- [x] IndexBuilder for symbols
|
||||||
|
- [x] Watchdog for file changes
|
||||||
|
|
||||||
|
### Version 0.4.0 - LLM Integration
|
||||||
|
- [x] OllamaClient implementation
|
||||||
|
- [x] System prompt design
|
||||||
|
- [x] Tool definitions (18 tools)
|
||||||
|
- [x] Response parser (XML format)
|
||||||
|
|
||||||
|
### Version 0.5.0 - Read Tools
|
||||||
|
- [x] ToolRegistry implementation
|
||||||
|
- [x] get_lines tool
|
||||||
|
- [x] get_function tool
|
||||||
|
- [x] get_class tool
|
||||||
|
- [x] get_structure tool
|
||||||
|
|
||||||
|
### Version 0.6.0 - Edit Tools
|
||||||
|
- [x] edit_lines tool
|
||||||
|
- [x] create_file tool
|
||||||
|
- [x] delete_file tool
|
||||||
|
|
||||||
|
### Version 0.7.0 - Search Tools
|
||||||
|
- [x] find_references tool
|
||||||
|
- [x] find_definition tool
|
||||||
|
|
||||||
|
### Version 0.8.0 - Analysis Tools
|
||||||
|
- [x] get_dependencies tool
|
||||||
|
- [x] get_dependents tool
|
||||||
|
- [x] get_complexity tool
|
||||||
|
- [x] get_todos tool
|
||||||
|
|
||||||
|
### Version 0.9.0 - Git & Run Tools
|
||||||
|
- [x] git_status tool
|
||||||
|
- [x] git_diff tool
|
||||||
|
- [x] git_commit tool
|
||||||
|
- [x] CommandSecurity (blacklist/whitelist)
|
||||||
|
- [x] run_command tool
|
||||||
|
- [x] run_tests tool
|
||||||
|
|
||||||
|
### Version 0.10.0 - Session Management
|
||||||
|
- [x] ISessionStorage interface
|
||||||
|
- [x] RedisSessionStorage implementation
|
||||||
|
- [x] ContextManager use case
|
||||||
|
- [x] StartSession use case
|
||||||
|
- [x] HandleMessage use case
|
||||||
|
- [x] UndoChange use case
|
||||||
|
|
||||||
## In Progress
|
## In Progress
|
||||||
|
|
||||||
### Version 0.2.0 - Redis Storage
|
### Version 0.11.0 - TUI Basic
|
||||||
- [ ] RedisClient with AOF config
|
- [ ] App shell (Ink/React)
|
||||||
- [ ] Redis schema implementation
|
- [ ] StatusBar component
|
||||||
- [ ] RedisStorage class
|
- [ ] Chat component
|
||||||
|
- [ ] Input component
|
||||||
|
|
||||||
## Planned
|
## Planned
|
||||||
|
|
||||||
### Version 0.3.0 - Indexer
|
### Version 0.12.0 - TUI Advanced
|
||||||
- [ ] FileScanner with gitignore support
|
- [ ] DiffView component
|
||||||
- [ ] ASTParser with tree-sitter
|
- [ ] ConfirmDialog component
|
||||||
- [ ] MetaAnalyzer for complexity
|
- [ ] ErrorDialog component
|
||||||
- [ ] IndexBuilder for symbols
|
- [ ] Progress component
|
||||||
- [ ] Watchdog for file changes
|
|
||||||
|
|
||||||
### Version 0.4.0 - LLM Integration
|
### Version 0.13.0+ - Commands & Polish
|
||||||
- [ ] OllamaClient implementation
|
- [ ] Slash commands (/help, /clear, /undo, /sessions, /status)
|
||||||
- [ ] System prompt design
|
- [ ] Hotkeys (Ctrl+C, Ctrl+D, Ctrl+Z)
|
||||||
- [ ] Tool definitions (XML format)
|
- [ ] Auto-compression at 80% context
|
||||||
- [ ] Response parser
|
|
||||||
|
|
||||||
### Version 0.5.0+ - Tools
|
### Version 0.14.0 - CLI Entry Point
|
||||||
- [ ] Read tools (get_lines, get_function, get_class, get_structure)
|
- [ ] Full CLI commands (start, init, index)
|
||||||
- [ ] Edit tools (edit_lines, create_file, delete_file)
|
- [ ] Onboarding flow (Redis check, Ollama check, model pull)
|
||||||
- [ ] Search tools (find_references, find_definition)
|
|
||||||
- [ ] Analysis tools (get_dependencies, get_dependents, get_complexity, get_todos)
|
|
||||||
- [ ] Git tools (git_status, git_diff, git_commit)
|
|
||||||
- [ ] Run tools (run_command, run_tests)
|
|
||||||
|
|
||||||
### Version 0.10.0+ - Session & TUI
|
|
||||||
- [ ] Session management
|
|
||||||
- [ ] Context compression
|
|
||||||
- [ ] TUI components (StatusBar, Chat, Input, DiffView)
|
|
||||||
- [ ] Slash commands (/help, /clear, /undo, etc.)
|
|
||||||
|
|
||||||
## Technical Debt
|
## Technical Debt
|
||||||
|
|
||||||
@@ -51,4 +106,4 @@ _None at this time._
|
|||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
**Last Updated:** 2025-01-29
|
**Last Updated:** 2025-12-01
|
||||||
1605
packages/ipuaro/TOOLS.md
Normal file
1605
packages/ipuaro/TOOLS.md
Normal file
File diff suppressed because it is too large
Load Diff
4
packages/ipuaro/examples/demo-project/.gitignore
vendored
Normal file
4
packages/ipuaro/examples/demo-project/.gitignore
vendored
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
node_modules/
|
||||||
|
dist/
|
||||||
|
*.log
|
||||||
|
.DS_Store
|
||||||
21
packages/ipuaro/examples/demo-project/.ipuaro.json
Normal file
21
packages/ipuaro/examples/demo-project/.ipuaro.json
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
{
|
||||||
|
"redis": {
|
||||||
|
"host": "localhost",
|
||||||
|
"port": 6379
|
||||||
|
},
|
||||||
|
"llm": {
|
||||||
|
"model": "qwen2.5-coder:7b-instruct",
|
||||||
|
"temperature": 0.1
|
||||||
|
},
|
||||||
|
"project": {
|
||||||
|
"ignorePatterns": [
|
||||||
|
"node_modules",
|
||||||
|
"dist",
|
||||||
|
".git",
|
||||||
|
"*.log"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"edit": {
|
||||||
|
"autoApply": false
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,8 @@
|
|||||||
|
# Example Conversations with ipuaro
|
||||||
|
|
||||||
|
This document shows realistic conversations you can have with ipuaro when working with the demo project.
|
||||||
|
|
||||||
|
## Conversation 1: Understanding the Codebase
|
||||||
|
|
||||||
|
```
|
||||||
|
You: What does this project do?
|
||||||
406
packages/ipuaro/examples/demo-project/README.md
Normal file
406
packages/ipuaro/examples/demo-project/README.md
Normal file
@@ -0,0 +1,406 @@
|
|||||||
|
# ipuaro Demo Project
|
||||||
|
|
||||||
|
This is a demo project showcasing ipuaro's capabilities as a local AI agent for codebase operations.
|
||||||
|
|
||||||
|
## Project Overview
|
||||||
|
|
||||||
|
A simple TypeScript application demonstrating:
|
||||||
|
- User management service
|
||||||
|
- Authentication service
|
||||||
|
- Validation utilities
|
||||||
|
- Logging utilities
|
||||||
|
- Unit tests
|
||||||
|
|
||||||
|
The code intentionally includes various patterns (TODOs, FIXMEs, complex functions, dependencies) to demonstrate ipuaro's analysis tools.
|
||||||
|
|
||||||
|
## Setup
|
||||||
|
|
||||||
|
### Prerequisites
|
||||||
|
|
||||||
|
1. **Redis** - Running locally
|
||||||
|
```bash
|
||||||
|
# macOS
|
||||||
|
brew install redis
|
||||||
|
redis-server --appendonly yes
|
||||||
|
```
|
||||||
|
|
||||||
|
2. **Ollama** - With qwen2.5-coder model
|
||||||
|
```bash
|
||||||
|
brew install ollama
|
||||||
|
ollama serve
|
||||||
|
ollama pull qwen2.5-coder:7b-instruct
|
||||||
|
```
|
||||||
|
|
||||||
|
3. **Node.js** - v20 or higher
|
||||||
|
|
||||||
|
### Installation
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Install dependencies
|
||||||
|
npm install
|
||||||
|
|
||||||
|
# Or with pnpm
|
||||||
|
pnpm install
|
||||||
|
```
|
||||||
|
|
||||||
|
## Using ipuaro with Demo Project
|
||||||
|
|
||||||
|
### Start ipuaro
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# From this directory
|
||||||
|
npx @samiyev/ipuaro
|
||||||
|
|
||||||
|
# Or if installed globally
|
||||||
|
ipuaro
|
||||||
|
```
|
||||||
|
|
||||||
|
### Example Queries
|
||||||
|
|
||||||
|
Try these queries to explore ipuaro's capabilities:
|
||||||
|
|
||||||
|
#### 1. Understanding the Codebase
|
||||||
|
|
||||||
|
```
|
||||||
|
You: What is the structure of this project?
|
||||||
|
```
|
||||||
|
|
||||||
|
ipuaro will use `get_structure` to show the directory tree.
|
||||||
|
|
||||||
|
```
|
||||||
|
You: How does user creation work?
|
||||||
|
```
|
||||||
|
|
||||||
|
ipuaro will:
|
||||||
|
1. Use `get_structure` to find relevant files
|
||||||
|
2. Use `get_function` to read the `createUser` function
|
||||||
|
3. Use `find_references` to see where it's called
|
||||||
|
4. Explain the flow
|
||||||
|
|
||||||
|
#### 2. Finding Issues
|
||||||
|
|
||||||
|
```
|
||||||
|
You: What TODOs and FIXMEs are in the codebase?
|
||||||
|
```
|
||||||
|
|
||||||
|
ipuaro will use `get_todos` to list all TODO/FIXME comments.
|
||||||
|
|
||||||
|
```
|
||||||
|
You: Which files are most complex?
|
||||||
|
```
|
||||||
|
|
||||||
|
ipuaro will use `get_complexity` to analyze and rank files by complexity.
|
||||||
|
|
||||||
|
#### 3. Understanding Dependencies
|
||||||
|
|
||||||
|
```
|
||||||
|
You: What does the UserService depend on?
|
||||||
|
```
|
||||||
|
|
||||||
|
ipuaro will use `get_dependencies` to show imported modules.
|
||||||
|
|
||||||
|
```
|
||||||
|
You: What files use the validation utilities?
|
||||||
|
```
|
||||||
|
|
||||||
|
ipuaro will use `get_dependents` to show files importing validation.ts.
|
||||||
|
|
||||||
|
#### 4. Code Analysis
|
||||||
|
|
||||||
|
```
|
||||||
|
You: Find all references to the ValidationError class
|
||||||
|
```
|
||||||
|
|
||||||
|
ipuaro will use `find_references` to locate all usages.
|
||||||
|
|
||||||
|
```
|
||||||
|
You: Where is the Logger class defined?
|
||||||
|
```
|
||||||
|
|
||||||
|
ipuaro will use `find_definition` to locate the definition.
|
||||||
|
|
||||||
|
#### 5. Making Changes
|
||||||
|
|
||||||
|
```
|
||||||
|
You: Add a method to UserService to count total users
|
||||||
|
```
|
||||||
|
|
||||||
|
ipuaro will:
|
||||||
|
1. Read UserService class with `get_class`
|
||||||
|
2. Generate the new method
|
||||||
|
3. Use `edit_lines` to add it
|
||||||
|
4. Show diff and ask for confirmation
|
||||||
|
|
||||||
|
```
|
||||||
|
You: Fix the TODO in validation.ts about password validation
|
||||||
|
```
|
||||||
|
|
||||||
|
ipuaro will:
|
||||||
|
1. Find the TODO with `get_todos`
|
||||||
|
2. Read the function with `get_function`
|
||||||
|
3. Implement stronger password validation
|
||||||
|
4. Use `edit_lines` to apply changes
|
||||||
|
|
||||||
|
#### 6. Testing
|
||||||
|
|
||||||
|
```
|
||||||
|
You: Run the tests
|
||||||
|
```
|
||||||
|
|
||||||
|
ipuaro will use `run_tests` to execute the test suite.
|
||||||
|
|
||||||
|
```
|
||||||
|
You: Add a test for the getUserByEmail method
|
||||||
|
```
|
||||||
|
|
||||||
|
ipuaro will:
|
||||||
|
1. Read existing tests with `get_lines`
|
||||||
|
2. Generate new test following the pattern
|
||||||
|
3. Use `edit_lines` to add it
|
||||||
|
|
||||||
|
#### 7. Git Operations
|
||||||
|
|
||||||
|
```
|
||||||
|
You: What files have I changed?
|
||||||
|
```
|
||||||
|
|
||||||
|
ipuaro will use `git_status` to show modified files.
|
||||||
|
|
||||||
|
```
|
||||||
|
You: Show me the diff for UserService
|
||||||
|
```
|
||||||
|
|
||||||
|
ipuaro will use `git_diff` with the file path.
|
||||||
|
|
||||||
|
```
|
||||||
|
You: Commit these changes with message "feat: add user count method"
|
||||||
|
```
|
||||||
|
|
||||||
|
ipuaro will use `git_commit` after confirmation.
|
||||||
|
|
||||||
|
## Tool Demonstration Scenarios
|
||||||
|
|
||||||
|
### Scenario 1: Bug Fix Flow
|
||||||
|
|
||||||
|
```
|
||||||
|
You: There's a bug - we need to sanitize user input before storing. Fix this in UserService.
|
||||||
|
|
||||||
|
Agent will:
|
||||||
|
1. get_function("src/services/user.ts", "createUser")
|
||||||
|
2. See that sanitization is missing
|
||||||
|
3. find_definition("sanitizeInput") to locate the utility
|
||||||
|
4. edit_lines to add sanitization call
|
||||||
|
5. run_tests to verify the fix
|
||||||
|
```
|
||||||
|
|
||||||
|
### Scenario 2: Refactoring Flow
|
||||||
|
|
||||||
|
```
|
||||||
|
You: Extract the ID generation logic into a separate utility function
|
||||||
|
|
||||||
|
Agent will:
|
||||||
|
1. get_class("src/services/user.ts", "UserService")
|
||||||
|
2. Find generateId private method
|
||||||
|
3. create_file("src/utils/id.ts") with the utility
|
||||||
|
4. edit_lines to replace private method with import
|
||||||
|
5. find_references("generateId") to check no other usages
|
||||||
|
6. run_tests to ensure nothing broke
|
||||||
|
```
|
||||||
|
|
||||||
|
### Scenario 3: Feature Addition
|
||||||
|
|
||||||
|
```
|
||||||
|
You: Add password reset functionality to AuthService
|
||||||
|
|
||||||
|
Agent will:
|
||||||
|
1. get_class("src/auth/service.ts", "AuthService")
|
||||||
|
2. get_dependencies to see what's available
|
||||||
|
3. Design the resetPassword method
|
||||||
|
4. edit_lines to add the method
|
||||||
|
5. Suggest creating a test
|
||||||
|
6. create_file("tests/auth.test.ts") if needed
|
||||||
|
```
|
||||||
|
|
||||||
|
### Scenario 4: Code Review
|
||||||
|
|
||||||
|
```
|
||||||
|
You: Review the code for security issues
|
||||||
|
|
||||||
|
Agent will:
|
||||||
|
1. get_todos to find FIXME about XSS
|
||||||
|
2. get_complexity to find complex functions
|
||||||
|
3. get_function for suspicious functions
|
||||||
|
4. Suggest improvements
|
||||||
|
5. Optionally edit_lines to fix issues
|
||||||
|
```
|
||||||
|
|
||||||
|
## Slash Commands
|
||||||
|
|
||||||
|
While exploring, you can use these commands:
|
||||||
|
|
||||||
|
```
|
||||||
|
/help # Show all commands and hotkeys
|
||||||
|
/status # Show system status (LLM, Redis, context)
|
||||||
|
/sessions list # List all sessions
|
||||||
|
/undo # Undo last file change
|
||||||
|
/clear # Clear chat history
|
||||||
|
/reindex # Force project reindexation
|
||||||
|
/auto-apply on # Enable auto-apply mode (skip confirmations)
|
||||||
|
```
|
||||||
|
|
||||||
|
## Hotkeys
|
||||||
|
|
||||||
|
- `Ctrl+C` - Interrupt generation (1st) / Exit (2nd within 1s)
|
||||||
|
- `Ctrl+D` - Exit and save session
|
||||||
|
- `Ctrl+Z` - Undo last change
|
||||||
|
- `↑` / `↓` - Navigate input history
|
||||||
|
|
||||||
|
## Project Files Overview
|
||||||
|
|
||||||
|
```
|
||||||
|
demo-project/
|
||||||
|
├── src/
|
||||||
|
│ ├── auth/
|
||||||
|
│ │ └── service.ts # Authentication logic (login, logout, verify)
|
||||||
|
│ ├── services/
|
||||||
|
│ │ └── user.ts # User CRUD operations
|
||||||
|
│ ├── utils/
|
||||||
|
│ │ ├── logger.ts # Logging utility (multiple methods)
|
||||||
|
│ │ └── validation.ts # Input validation (with TODOs/FIXMEs)
|
||||||
|
│ ├── types/
|
||||||
|
│ │ └── user.ts # TypeScript type definitions
|
||||||
|
│ └── index.ts # Application entry point
|
||||||
|
├── tests/
|
||||||
|
│ └── user.test.ts # User service tests (vitest)
|
||||||
|
├── package.json # Project configuration
|
||||||
|
├── tsconfig.json # TypeScript configuration
|
||||||
|
├── vitest.config.ts # Test configuration
|
||||||
|
└── .ipuaro.json # ipuaro configuration
|
||||||
|
```
|
||||||
|
|
||||||
|
## What ipuaro Can Do With This Project
|
||||||
|
|
||||||
|
### Read Tools ✅
|
||||||
|
- **get_lines**: Read any file or specific line ranges
|
||||||
|
- **get_function**: Extract specific functions (login, createUser, etc.)
|
||||||
|
- **get_class**: Extract classes (UserService, AuthService, Logger, etc.)
|
||||||
|
- **get_structure**: See directory tree
|
||||||
|
|
||||||
|
### Edit Tools ✅
|
||||||
|
- **edit_lines**: Modify functions, fix bugs, add features
|
||||||
|
- **create_file**: Add new utilities, tests, services
|
||||||
|
- **delete_file**: Remove unused files
|
||||||
|
|
||||||
|
### Search Tools ✅
|
||||||
|
- **find_references**: Find all usages of ValidationError, User, etc.
|
||||||
|
- **find_definition**: Locate where Logger, UserService are defined
|
||||||
|
|
||||||
|
### Analysis Tools ✅
|
||||||
|
- **get_dependencies**: See what UserService imports
|
||||||
|
- **get_dependents**: See what imports validation.ts (multiple files!)
|
||||||
|
- **get_complexity**: Identify complex functions (createUser has moderate complexity)
|
||||||
|
- **get_todos**: Find 2 TODOs and 1 FIXME in the project
|
||||||
|
|
||||||
|
### Git Tools ✅
|
||||||
|
- **git_status**: Check working tree
|
||||||
|
- **git_diff**: See changes
|
||||||
|
- **git_commit**: Commit with AI-generated messages
|
||||||
|
|
||||||
|
### Run Tools ✅
|
||||||
|
- **run_command**: Execute npm scripts
|
||||||
|
- **run_tests**: Run vitest tests
|
||||||
|
|
||||||
|
## Tips for Best Experience
|
||||||
|
|
||||||
|
1. **Start Small**: Ask about structure first, then dive into specific files
|
||||||
|
2. **Be Specific**: "Show me the createUser function" vs "How does this work?"
|
||||||
|
3. **Use Tools Implicitly**: Just ask questions, let ipuaro choose the right tools
|
||||||
|
4. **Review Changes**: Always review diffs before applying edits
|
||||||
|
5. **Test Often**: Ask ipuaro to run tests after making changes
|
||||||
|
6. **Commit Incrementally**: Use git_commit for each logical change
|
||||||
|
|
||||||
|
## Advanced Workflows
|
||||||
|
|
||||||
|
### Workflow 1: Add New Feature
|
||||||
|
|
||||||
|
```
|
||||||
|
You: Add email verification to the authentication flow
|
||||||
|
|
||||||
|
Agent will:
|
||||||
|
1. Analyze current auth flow
|
||||||
|
2. Propose design (new fields, methods)
|
||||||
|
3. Edit AuthService to add verification
|
||||||
|
4. Edit User types to add verified field
|
||||||
|
5. Create tests for verification
|
||||||
|
6. Run tests
|
||||||
|
7. Offer to commit
|
||||||
|
```
|
||||||
|
|
||||||
|
### Workflow 2: Performance Optimization
|
||||||
|
|
||||||
|
```
|
||||||
|
You: The user lookup is slow when we have many users. Optimize it.
|
||||||
|
|
||||||
|
Agent will:
|
||||||
|
1. Analyze UserService.getUserByEmail
|
||||||
|
2. See it's using Array.find (O(n))
|
||||||
|
3. Suggest adding an email index
|
||||||
|
4. Edit to add private emailIndex: Map<string, User>
|
||||||
|
5. Update createUser to populate index
|
||||||
|
6. Update deleteUser to maintain index
|
||||||
|
7. Run tests to verify
|
||||||
|
```
|
||||||
|
|
||||||
|
### Workflow 3: Security Audit
|
||||||
|
|
||||||
|
```
|
||||||
|
You: Audit the code for security vulnerabilities
|
||||||
|
|
||||||
|
Agent will:
|
||||||
|
1. get_todos to find FIXME about XSS
|
||||||
|
2. Review sanitizeInput implementation
|
||||||
|
3. Check password validation strength
|
||||||
|
4. Look for SQL injection risks (none here)
|
||||||
|
5. Suggest improvements
|
||||||
|
6. Optionally implement fixes
|
||||||
|
```
|
||||||
|
|
||||||
|
## Next Steps
|
||||||
|
|
||||||
|
After exploring the demo project, try:
|
||||||
|
|
||||||
|
1. **Your Own Project**: Run `ipuaro` in your real codebase
|
||||||
|
2. **Customize Config**: Edit `.ipuaro.json` to fit your needs
|
||||||
|
3. **Different Model**: Try `--model qwen2.5-coder:32b-instruct` for better results
|
||||||
|
4. **Auto-Apply Mode**: Use `--auto-apply` for faster iterations (with caution!)
|
||||||
|
|
||||||
|
## Troubleshooting
|
||||||
|
|
||||||
|
### Redis Not Connected
|
||||||
|
```bash
|
||||||
|
# Start Redis with persistence
|
||||||
|
redis-server --appendonly yes
|
||||||
|
```
|
||||||
|
|
||||||
|
### Ollama Model Not Found
|
||||||
|
```bash
|
||||||
|
# Pull the model
|
||||||
|
ollama pull qwen2.5-coder:7b-instruct
|
||||||
|
|
||||||
|
# Check it's installed
|
||||||
|
ollama list
|
||||||
|
```
|
||||||
|
|
||||||
|
### Indexing Takes Long
|
||||||
|
The project is small (~10 files) so indexing should be instant. For larger projects, use ignore patterns in `.ipuaro.json`.
|
||||||
|
|
||||||
|
## Learn More
|
||||||
|
|
||||||
|
- [ipuaro Documentation](../../README.md)
|
||||||
|
- [Architecture Guide](../../ARCHITECTURE.md)
|
||||||
|
- [Tools Reference](../../TOOLS.md)
|
||||||
|
- [GitHub Repository](https://github.com/samiyev/puaros)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
**Happy coding with ipuaro!** 🎩✨
|
||||||
20
packages/ipuaro/examples/demo-project/package.json
Normal file
20
packages/ipuaro/examples/demo-project/package.json
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
{
|
||||||
|
"name": "ipuaro-demo-project",
|
||||||
|
"version": "1.0.0",
|
||||||
|
"description": "Demo project for ipuaro - showcasing AI agent capabilities",
|
||||||
|
"private": true,
|
||||||
|
"type": "module",
|
||||||
|
"scripts": {
|
||||||
|
"dev": "tsx src/index.ts",
|
||||||
|
"test": "vitest",
|
||||||
|
"test:run": "vitest run",
|
||||||
|
"build": "tsc"
|
||||||
|
},
|
||||||
|
"dependencies": {},
|
||||||
|
"devDependencies": {
|
||||||
|
"@types/node": "^22.10.1",
|
||||||
|
"tsx": "^4.19.2",
|
||||||
|
"typescript": "^5.7.2",
|
||||||
|
"vitest": "^1.6.0"
|
||||||
|
}
|
||||||
|
}
|
||||||
85
packages/ipuaro/examples/demo-project/src/auth/service.ts
Normal file
85
packages/ipuaro/examples/demo-project/src/auth/service.ts
Normal file
@@ -0,0 +1,85 @@
|
|||||||
|
/**
|
||||||
|
* Authentication service
|
||||||
|
*/
|
||||||
|
|
||||||
|
import type { User, AuthToken } from "../types/user"
|
||||||
|
import { UserService } from "../services/user"
|
||||||
|
import { createLogger } from "../utils/logger"
|
||||||
|
|
||||||
|
const logger = createLogger("AuthService")
|
||||||
|
|
||||||
|
export class AuthService {
|
||||||
|
private tokens: Map<string, AuthToken> = new Map()
|
||||||
|
|
||||||
|
constructor(private userService: UserService) {}
|
||||||
|
|
||||||
|
async login(email: string, password: string): Promise<AuthToken> {
|
||||||
|
logger.info("Login attempt", { email })
|
||||||
|
|
||||||
|
// Get user
|
||||||
|
const user = await this.userService.getUserByEmail(email)
|
||||||
|
if (!user) {
|
||||||
|
logger.warn("Login failed - user not found", { email })
|
||||||
|
throw new Error("Invalid credentials")
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO: Implement actual password verification
|
||||||
|
// For demo purposes, we just check if password is provided
|
||||||
|
if (!password) {
|
||||||
|
logger.warn("Login failed - no password", { email })
|
||||||
|
throw new Error("Invalid credentials")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Generate token
|
||||||
|
const token = this.generateToken(user)
|
||||||
|
this.tokens.set(token.token, token)
|
||||||
|
|
||||||
|
logger.info("Login successful", { userId: user.id })
|
||||||
|
return token
|
||||||
|
}
|
||||||
|
|
||||||
|
async logout(tokenString: string): Promise<void> {
|
||||||
|
logger.info("Logout", { token: tokenString.substring(0, 10) + "..." })
|
||||||
|
|
||||||
|
const token = this.tokens.get(tokenString)
|
||||||
|
if (!token) {
|
||||||
|
throw new Error("Invalid token")
|
||||||
|
}
|
||||||
|
|
||||||
|
this.tokens.delete(tokenString)
|
||||||
|
logger.info("Logout successful", { userId: token.userId })
|
||||||
|
}
|
||||||
|
|
||||||
|
async verifyToken(tokenString: string): Promise<User> {
|
||||||
|
logger.debug("Verifying token")
|
||||||
|
|
||||||
|
const token = this.tokens.get(tokenString)
|
||||||
|
if (!token) {
|
||||||
|
throw new Error("Invalid token")
|
||||||
|
}
|
||||||
|
|
||||||
|
if (token.expiresAt < new Date()) {
|
||||||
|
this.tokens.delete(tokenString)
|
||||||
|
throw new Error("Token expired")
|
||||||
|
}
|
||||||
|
|
||||||
|
const user = await this.userService.getUserById(token.userId)
|
||||||
|
if (!user) {
|
||||||
|
throw new Error("User not found")
|
||||||
|
}
|
||||||
|
|
||||||
|
return user
|
||||||
|
}
|
||||||
|
|
||||||
|
private generateToken(user: User): AuthToken {
|
||||||
|
const token = `tok_${Date.now()}_${Math.random().toString(36).substring(7)}`
|
||||||
|
const expiresAt = new Date()
|
||||||
|
expiresAt.setHours(expiresAt.getHours() + 24) // 24 hours
|
||||||
|
|
||||||
|
return {
|
||||||
|
token,
|
||||||
|
expiresAt,
|
||||||
|
userId: user.id,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
48
packages/ipuaro/examples/demo-project/src/index.ts
Normal file
48
packages/ipuaro/examples/demo-project/src/index.ts
Normal file
@@ -0,0 +1,48 @@
|
|||||||
|
/**
|
||||||
|
* Demo application entry point
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { UserService } from "./services/user"
|
||||||
|
import { AuthService } from "./auth/service"
|
||||||
|
import { createLogger } from "./utils/logger"
|
||||||
|
|
||||||
|
const logger = createLogger("App")
|
||||||
|
|
||||||
|
async function main(): Promise<void> {
|
||||||
|
logger.info("Starting demo application")
|
||||||
|
|
||||||
|
// Initialize services
|
||||||
|
const userService = new UserService()
|
||||||
|
const authService = new AuthService(userService)
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Create a demo user
|
||||||
|
const user = await userService.createUser({
|
||||||
|
email: "demo@example.com",
|
||||||
|
name: "Demo User",
|
||||||
|
password: "password123",
|
||||||
|
role: "admin",
|
||||||
|
})
|
||||||
|
|
||||||
|
logger.info("Demo user created", { userId: user.id })
|
||||||
|
|
||||||
|
// Login
|
||||||
|
const token = await authService.login("demo@example.com", "password123")
|
||||||
|
logger.info("Login successful", { token: token.token })
|
||||||
|
|
||||||
|
// Verify token
|
||||||
|
const verifiedUser = await authService.verifyToken(token.token)
|
||||||
|
logger.info("Token verified", { userId: verifiedUser.id })
|
||||||
|
|
||||||
|
// Logout
|
||||||
|
await authService.logout(token.token)
|
||||||
|
logger.info("Logout successful")
|
||||||
|
} catch (error) {
|
||||||
|
logger.error("Application error", error as Error)
|
||||||
|
process.exit(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info("Demo application finished")
|
||||||
|
}
|
||||||
|
|
||||||
|
main()
|
||||||
100
packages/ipuaro/examples/demo-project/src/services/user.ts
Normal file
100
packages/ipuaro/examples/demo-project/src/services/user.ts
Normal file
@@ -0,0 +1,100 @@
|
|||||||
|
/**
|
||||||
|
* User service - handles user-related operations
|
||||||
|
*/
|
||||||
|
|
||||||
|
import type { User, CreateUserDto, UpdateUserDto } from "../types/user"
|
||||||
|
import { isValidEmail, isStrongPassword, ValidationError } from "../utils/validation"
|
||||||
|
import { createLogger } from "../utils/logger"
|
||||||
|
|
||||||
|
const logger = createLogger("UserService")
|
||||||
|
|
||||||
|
export class UserService {
|
||||||
|
private users: Map<string, User> = new Map()
|
||||||
|
|
||||||
|
async createUser(dto: CreateUserDto): Promise<User> {
|
||||||
|
logger.info("Creating user", { email: dto.email })
|
||||||
|
|
||||||
|
// Validate email
|
||||||
|
if (!isValidEmail(dto.email)) {
|
||||||
|
throw new ValidationError("Invalid email address", "email")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Validate password
|
||||||
|
if (!isStrongPassword(dto.password)) {
|
||||||
|
throw new ValidationError("Password must be at least 8 characters", "password")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if user already exists
|
||||||
|
const existingUser = Array.from(this.users.values()).find((u) => u.email === dto.email)
|
||||||
|
|
||||||
|
if (existingUser) {
|
||||||
|
throw new Error("User with this email already exists")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create user
|
||||||
|
const user: User = {
|
||||||
|
id: this.generateId(),
|
||||||
|
email: dto.email,
|
||||||
|
name: dto.name,
|
||||||
|
role: dto.role || "user",
|
||||||
|
createdAt: new Date(),
|
||||||
|
updatedAt: new Date(),
|
||||||
|
}
|
||||||
|
|
||||||
|
this.users.set(user.id, user)
|
||||||
|
logger.info("User created", { userId: user.id })
|
||||||
|
|
||||||
|
return user
|
||||||
|
}
|
||||||
|
|
||||||
|
async getUserById(id: string): Promise<User | null> {
|
||||||
|
logger.debug("Getting user by ID", { userId: id })
|
||||||
|
return this.users.get(id) || null
|
||||||
|
}
|
||||||
|
|
||||||
|
async getUserByEmail(email: string): Promise<User | null> {
|
||||||
|
logger.debug("Getting user by email", { email })
|
||||||
|
return Array.from(this.users.values()).find((u) => u.email === email) || null
|
||||||
|
}
|
||||||
|
|
||||||
|
async updateUser(id: string, dto: UpdateUserDto): Promise<User> {
|
||||||
|
logger.info("Updating user", { userId: id })
|
||||||
|
|
||||||
|
const user = this.users.get(id)
|
||||||
|
if (!user) {
|
||||||
|
throw new Error("User not found")
|
||||||
|
}
|
||||||
|
|
||||||
|
const updated: User = {
|
||||||
|
...user,
|
||||||
|
...(dto.name && { name: dto.name }),
|
||||||
|
...(dto.role && { role: dto.role }),
|
||||||
|
updatedAt: new Date(),
|
||||||
|
}
|
||||||
|
|
||||||
|
this.users.set(id, updated)
|
||||||
|
logger.info("User updated", { userId: id })
|
||||||
|
|
||||||
|
return updated
|
||||||
|
}
|
||||||
|
|
||||||
|
async deleteUser(id: string): Promise<void> {
|
||||||
|
logger.info("Deleting user", { userId: id })
|
||||||
|
|
||||||
|
if (!this.users.has(id)) {
|
||||||
|
throw new Error("User not found")
|
||||||
|
}
|
||||||
|
|
||||||
|
this.users.delete(id)
|
||||||
|
logger.info("User deleted", { userId: id })
|
||||||
|
}
|
||||||
|
|
||||||
|
async listUsers(): Promise<User[]> {
|
||||||
|
logger.debug("Listing all users")
|
||||||
|
return Array.from(this.users.values())
|
||||||
|
}
|
||||||
|
|
||||||
|
private generateId(): string {
|
||||||
|
return `user_${Date.now()}_${Math.random().toString(36).substring(7)}`
|
||||||
|
}
|
||||||
|
}
|
||||||
32
packages/ipuaro/examples/demo-project/src/types/user.ts
Normal file
32
packages/ipuaro/examples/demo-project/src/types/user.ts
Normal file
@@ -0,0 +1,32 @@
|
|||||||
|
/**
|
||||||
|
* User-related type definitions
|
||||||
|
*/
|
||||||
|
|
||||||
|
export interface User {
|
||||||
|
id: string
|
||||||
|
email: string
|
||||||
|
name: string
|
||||||
|
role: UserRole
|
||||||
|
createdAt: Date
|
||||||
|
updatedAt: Date
|
||||||
|
}
|
||||||
|
|
||||||
|
export type UserRole = "admin" | "user" | "guest"
|
||||||
|
|
||||||
|
export interface CreateUserDto {
|
||||||
|
email: string
|
||||||
|
name: string
|
||||||
|
password: string
|
||||||
|
role?: UserRole
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface UpdateUserDto {
|
||||||
|
name?: string
|
||||||
|
role?: UserRole
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface AuthToken {
|
||||||
|
token: string
|
||||||
|
expiresAt: Date
|
||||||
|
userId: string
|
||||||
|
}
|
||||||
41
packages/ipuaro/examples/demo-project/src/utils/logger.ts
Normal file
41
packages/ipuaro/examples/demo-project/src/utils/logger.ts
Normal file
@@ -0,0 +1,41 @@
|
|||||||
|
/**
|
||||||
|
* Simple logging utility
|
||||||
|
*/
|
||||||
|
|
||||||
|
export type LogLevel = "debug" | "info" | "warn" | "error"
|
||||||
|
|
||||||
|
export class Logger {
|
||||||
|
constructor(private context: string) {}
|
||||||
|
|
||||||
|
debug(message: string, meta?: Record<string, unknown>): void {
|
||||||
|
this.log("debug", message, meta)
|
||||||
|
}
|
||||||
|
|
||||||
|
info(message: string, meta?: Record<string, unknown>): void {
|
||||||
|
this.log("info", message, meta)
|
||||||
|
}
|
||||||
|
|
||||||
|
warn(message: string, meta?: Record<string, unknown>): void {
|
||||||
|
this.log("warn", message, meta)
|
||||||
|
}
|
||||||
|
|
||||||
|
error(message: string, error?: Error, meta?: Record<string, unknown>): void {
|
||||||
|
this.log("error", message, { ...meta, error: error?.message })
|
||||||
|
}
|
||||||
|
|
||||||
|
private log(level: LogLevel, message: string, meta?: Record<string, unknown>): void {
|
||||||
|
const timestamp = new Date().toISOString()
|
||||||
|
const logEntry = {
|
||||||
|
timestamp,
|
||||||
|
level,
|
||||||
|
context: this.context,
|
||||||
|
message,
|
||||||
|
...(meta && { meta }),
|
||||||
|
}
|
||||||
|
console.log(JSON.stringify(logEntry))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export function createLogger(context: string): Logger {
|
||||||
|
return new Logger(context)
|
||||||
|
}
|
||||||
@@ -0,0 +1,28 @@
|
|||||||
|
/**
|
||||||
|
* Validation utilities
|
||||||
|
*/
|
||||||
|
|
||||||
|
export function isValidEmail(email: string): boolean {
|
||||||
|
const emailRegex = /^[^\s@]+@[^\s@]+\.[^\s@]+$/
|
||||||
|
return emailRegex.test(email)
|
||||||
|
}
|
||||||
|
|
||||||
|
export function isStrongPassword(password: string): boolean {
|
||||||
|
// TODO: Add more sophisticated password validation
|
||||||
|
return password.length >= 8
|
||||||
|
}
|
||||||
|
|
||||||
|
export function sanitizeInput(input: string): string {
|
||||||
|
// FIXME: This is a basic implementation, needs XSS protection
|
||||||
|
return input.trim().replace(/[<>]/g, "")
|
||||||
|
}
|
||||||
|
|
||||||
|
export class ValidationError extends Error {
|
||||||
|
constructor(
|
||||||
|
message: string,
|
||||||
|
public field: string,
|
||||||
|
) {
|
||||||
|
super(message)
|
||||||
|
this.name = "ValidationError"
|
||||||
|
}
|
||||||
|
}
|
||||||
141
packages/ipuaro/examples/demo-project/tests/user.test.ts
Normal file
141
packages/ipuaro/examples/demo-project/tests/user.test.ts
Normal file
@@ -0,0 +1,141 @@
|
|||||||
|
/**
|
||||||
|
* User service tests
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { describe, it, expect, beforeEach } from "vitest"
|
||||||
|
import { UserService } from "../src/services/user"
|
||||||
|
import { ValidationError } from "../src/utils/validation"
|
||||||
|
|
||||||
|
describe("UserService", () => {
|
||||||
|
let userService: UserService
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
userService = new UserService()
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("createUser", () => {
|
||||||
|
it("should create a new user", async () => {
|
||||||
|
const user = await userService.createUser({
|
||||||
|
email: "test@example.com",
|
||||||
|
name: "Test User",
|
||||||
|
password: "password123",
|
||||||
|
})
|
||||||
|
|
||||||
|
expect(user).toBeDefined()
|
||||||
|
expect(user.email).toBe("test@example.com")
|
||||||
|
expect(user.name).toBe("Test User")
|
||||||
|
expect(user.role).toBe("user")
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should reject invalid email", async () => {
|
||||||
|
await expect(
|
||||||
|
userService.createUser({
|
||||||
|
email: "invalid-email",
|
||||||
|
name: "Test User",
|
||||||
|
password: "password123",
|
||||||
|
}),
|
||||||
|
).rejects.toThrow(ValidationError)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should reject weak password", async () => {
|
||||||
|
await expect(
|
||||||
|
userService.createUser({
|
||||||
|
email: "test@example.com",
|
||||||
|
name: "Test User",
|
||||||
|
password: "weak",
|
||||||
|
}),
|
||||||
|
).rejects.toThrow(ValidationError)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should prevent duplicate emails", async () => {
|
||||||
|
await userService.createUser({
|
||||||
|
email: "test@example.com",
|
||||||
|
name: "Test User",
|
||||||
|
password: "password123",
|
||||||
|
})
|
||||||
|
|
||||||
|
await expect(
|
||||||
|
userService.createUser({
|
||||||
|
email: "test@example.com",
|
||||||
|
name: "Another User",
|
||||||
|
password: "password123",
|
||||||
|
}),
|
||||||
|
).rejects.toThrow("already exists")
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("getUserById", () => {
|
||||||
|
it("should return user by ID", async () => {
|
||||||
|
const created = await userService.createUser({
|
||||||
|
email: "test@example.com",
|
||||||
|
name: "Test User",
|
||||||
|
password: "password123",
|
||||||
|
})
|
||||||
|
|
||||||
|
const found = await userService.getUserById(created.id)
|
||||||
|
expect(found).toEqual(created)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should return null for non-existent ID", async () => {
|
||||||
|
const found = await userService.getUserById("non-existent")
|
||||||
|
expect(found).toBeNull()
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("updateUser", () => {
|
||||||
|
it("should update user name", async () => {
|
||||||
|
const user = await userService.createUser({
|
||||||
|
email: "test@example.com",
|
||||||
|
name: "Test User",
|
||||||
|
password: "password123",
|
||||||
|
})
|
||||||
|
|
||||||
|
const updated = await userService.updateUser(user.id, {
|
||||||
|
name: "Updated Name",
|
||||||
|
})
|
||||||
|
|
||||||
|
expect(updated.name).toBe("Updated Name")
|
||||||
|
expect(updated.email).toBe(user.email)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should throw error for non-existent user", async () => {
|
||||||
|
await expect(userService.updateUser("non-existent", { name: "Test" })).rejects.toThrow(
|
||||||
|
"not found",
|
||||||
|
)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("deleteUser", () => {
|
||||||
|
it("should delete user", async () => {
|
||||||
|
const user = await userService.createUser({
|
||||||
|
email: "test@example.com",
|
||||||
|
name: "Test User",
|
||||||
|
password: "password123",
|
||||||
|
})
|
||||||
|
|
||||||
|
await userService.deleteUser(user.id)
|
||||||
|
|
||||||
|
const found = await userService.getUserById(user.id)
|
||||||
|
expect(found).toBeNull()
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("listUsers", () => {
|
||||||
|
it("should return all users", async () => {
|
||||||
|
await userService.createUser({
|
||||||
|
email: "user1@example.com",
|
||||||
|
name: "User 1",
|
||||||
|
password: "password123",
|
||||||
|
})
|
||||||
|
|
||||||
|
await userService.createUser({
|
||||||
|
email: "user2@example.com",
|
||||||
|
name: "User 2",
|
||||||
|
password: "password123",
|
||||||
|
})
|
||||||
|
|
||||||
|
const users = await userService.listUsers()
|
||||||
|
expect(users).toHaveLength(2)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
16
packages/ipuaro/examples/demo-project/tsconfig.json
Normal file
16
packages/ipuaro/examples/demo-project/tsconfig.json
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
{
|
||||||
|
"compilerOptions": {
|
||||||
|
"target": "ES2023",
|
||||||
|
"module": "ESNext",
|
||||||
|
"lib": ["ES2023"],
|
||||||
|
"moduleResolution": "Bundler",
|
||||||
|
"esModuleInterop": true,
|
||||||
|
"strict": true,
|
||||||
|
"skipLibCheck": true,
|
||||||
|
"resolveJsonModule": true,
|
||||||
|
"outDir": "dist",
|
||||||
|
"rootDir": "src"
|
||||||
|
},
|
||||||
|
"include": ["src/**/*"],
|
||||||
|
"exclude": ["node_modules", "dist", "tests"]
|
||||||
|
}
|
||||||
8
packages/ipuaro/examples/demo-project/vitest.config.ts
Normal file
8
packages/ipuaro/examples/demo-project/vitest.config.ts
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
import { defineConfig } from "vitest/config"
|
||||||
|
|
||||||
|
export default defineConfig({
|
||||||
|
test: {
|
||||||
|
globals: true,
|
||||||
|
environment: "node",
|
||||||
|
},
|
||||||
|
})
|
||||||
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "@samiyev/ipuaro",
|
"name": "@samiyev/ipuaro",
|
||||||
"version": "0.1.0",
|
"version": "0.30.0",
|
||||||
"description": "Local AI agent for codebase operations with infinite context feeling",
|
"description": "Local AI agent for codebase operations with infinite context feeling",
|
||||||
"author": "Fozilbek Samiyev <fozilbek.samiyev@gmail.com>",
|
"author": "Fozilbek Samiyev <fozilbek.samiyev@gmail.com>",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
@@ -8,7 +8,7 @@
|
|||||||
"main": "./dist/index.js",
|
"main": "./dist/index.js",
|
||||||
"types": "./dist/index.d.ts",
|
"types": "./dist/index.d.ts",
|
||||||
"bin": {
|
"bin": {
|
||||||
"ipuaro": "./bin/ipuaro.js"
|
"ipuaro": "bin/ipuaro.js"
|
||||||
},
|
},
|
||||||
"exports": {
|
"exports": {
|
||||||
".": {
|
".": {
|
||||||
@@ -33,28 +33,34 @@
|
|||||||
"format": "prettier --write src"
|
"format": "prettier --write src"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"ink": "^4.4.1",
|
|
||||||
"ink-text-input": "^5.0.1",
|
|
||||||
"react": "^18.2.0",
|
|
||||||
"ioredis": "^5.4.1",
|
|
||||||
"tree-sitter": "^0.21.1",
|
|
||||||
"tree-sitter-typescript": "^0.21.2",
|
|
||||||
"tree-sitter-javascript": "^0.21.0",
|
|
||||||
"ollama": "^0.5.11",
|
|
||||||
"simple-git": "^3.27.0",
|
|
||||||
"chokidar": "^3.6.0",
|
"chokidar": "^3.6.0",
|
||||||
"commander": "^11.1.0",
|
"commander": "^11.1.0",
|
||||||
"zod": "^3.23.8",
|
"globby": "^16.0.0",
|
||||||
"ignore": "^5.3.2"
|
"ink": "^4.4.1",
|
||||||
|
"ink-text-input": "^5.0.1",
|
||||||
|
"ioredis": "^5.4.1",
|
||||||
|
"ollama": "^0.5.11",
|
||||||
|
"react": "^18.2.0",
|
||||||
|
"simple-git": "^3.27.0",
|
||||||
|
"tree-sitter": "^0.21.1",
|
||||||
|
"tree-sitter-javascript": "^0.21.0",
|
||||||
|
"tree-sitter-json": "^0.24.8",
|
||||||
|
"tree-sitter-typescript": "^0.21.2",
|
||||||
|
"yaml": "^2.8.2",
|
||||||
|
"zod": "^3.23.8"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
|
"@testing-library/react": "^16.3.0",
|
||||||
|
"@types/jsdom": "^27.0.0",
|
||||||
"@types/node": "^22.10.1",
|
"@types/node": "^22.10.1",
|
||||||
"@types/react": "^18.2.0",
|
"@types/react": "^18.2.0",
|
||||||
"vitest": "^1.6.0",
|
|
||||||
"@vitest/coverage-v8": "^1.6.0",
|
"@vitest/coverage-v8": "^1.6.0",
|
||||||
"@vitest/ui": "^1.6.0",
|
"@vitest/ui": "^1.6.0",
|
||||||
|
"jsdom": "^27.2.0",
|
||||||
|
"react-dom": "18.3.1",
|
||||||
"tsup": "^8.3.5",
|
"tsup": "^8.3.5",
|
||||||
"typescript": "^5.7.2"
|
"typescript": "^5.7.2",
|
||||||
|
"vitest": "^1.6.0"
|
||||||
},
|
},
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": ">=20.0.0"
|
"node": ">=20.0.0"
|
||||||
@@ -70,11 +76,11 @@
|
|||||||
],
|
],
|
||||||
"repository": {
|
"repository": {
|
||||||
"type": "git",
|
"type": "git",
|
||||||
"url": "https://github.com/samiyev/puaros.git",
|
"url": "git+https://github.com/samiyev/puaros.git",
|
||||||
"directory": "packages/ipuaro"
|
"directory": "packages/ipuaro"
|
||||||
},
|
},
|
||||||
"bugs": {
|
"bugs": {
|
||||||
"url": "https://github.com/samiyev/puaros/issues"
|
"url": "https://github.com/samiyev/puaros/issues"
|
||||||
},
|
},
|
||||||
"homepage": "https://github.com/samiyev/puaros/tree/main/packages/ipuaro#readme"
|
"homepage": "https://github.com/samiyev/puaros/tree/main/packages/ipuaro#readme"
|
||||||
}
|
}
|
||||||
|
|||||||
234
packages/ipuaro/src/application/use-cases/ContextManager.ts
Normal file
234
packages/ipuaro/src/application/use-cases/ContextManager.ts
Normal file
@@ -0,0 +1,234 @@
|
|||||||
|
import type { ContextState, Session } from "../../domain/entities/Session.js"
|
||||||
|
import type { ILLMClient } from "../../domain/services/ILLMClient.js"
|
||||||
|
import { type ChatMessage, createSystemMessage } from "../../domain/value-objects/ChatMessage.js"
|
||||||
|
import { CONTEXT_COMPRESSION_THRESHOLD, CONTEXT_WINDOW_SIZE } from "../../domain/constants/index.js"
|
||||||
|
import type { ContextConfig } from "../../shared/constants/config.js"
|
||||||
|
|
||||||
|
/**
|
||||||
|
* File in context with token count.
|
||||||
|
*/
|
||||||
|
export interface FileContext {
|
||||||
|
path: string
|
||||||
|
tokens: number
|
||||||
|
addedAt: number
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Compression result.
|
||||||
|
*/
|
||||||
|
export interface CompressionResult {
|
||||||
|
compressed: boolean
|
||||||
|
removedMessages: number
|
||||||
|
tokensSaved: number
|
||||||
|
summary?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
const COMPRESSION_PROMPT = `Summarize the following conversation history in a concise way,
|
||||||
|
preserving key information about:
|
||||||
|
- What files were discussed or modified
|
||||||
|
- What changes were made
|
||||||
|
- Important decisions or context
|
||||||
|
Keep the summary under 500 tokens.`
|
||||||
|
|
||||||
|
const MESSAGES_TO_KEEP = 5
|
||||||
|
const MIN_MESSAGES_FOR_COMPRESSION = 10
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Manages context window token budget and compression.
|
||||||
|
*/
|
||||||
|
export class ContextManager {
|
||||||
|
private readonly filesInContext = new Map<string, FileContext>()
|
||||||
|
private currentTokens = 0
|
||||||
|
private readonly contextWindowSize: number
|
||||||
|
private readonly compressionThreshold: number
|
||||||
|
private readonly compressionMethod: "llm-summary" | "truncate"
|
||||||
|
|
||||||
|
constructor(contextWindowSize: number = CONTEXT_WINDOW_SIZE, config?: ContextConfig) {
|
||||||
|
this.contextWindowSize = contextWindowSize
|
||||||
|
this.compressionThreshold = config?.autoCompressAt ?? CONTEXT_COMPRESSION_THRESHOLD
|
||||||
|
this.compressionMethod = config?.compressionMethod ?? "llm-summary"
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Add a file to the context.
|
||||||
|
*/
|
||||||
|
addToContext(file: string, tokens: number): void {
|
||||||
|
const existing = this.filesInContext.get(file)
|
||||||
|
if (existing) {
|
||||||
|
this.currentTokens -= existing.tokens
|
||||||
|
}
|
||||||
|
|
||||||
|
this.filesInContext.set(file, {
|
||||||
|
path: file,
|
||||||
|
tokens,
|
||||||
|
addedAt: Date.now(),
|
||||||
|
})
|
||||||
|
this.currentTokens += tokens
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Remove a file from the context.
|
||||||
|
*/
|
||||||
|
removeFromContext(file: string): void {
|
||||||
|
const existing = this.filesInContext.get(file)
|
||||||
|
if (existing) {
|
||||||
|
this.currentTokens -= existing.tokens
|
||||||
|
this.filesInContext.delete(file)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get current token usage ratio (0-1).
|
||||||
|
*/
|
||||||
|
getUsage(): number {
|
||||||
|
return this.currentTokens / this.contextWindowSize
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get current token count.
|
||||||
|
*/
|
||||||
|
getTokenCount(): number {
|
||||||
|
return this.currentTokens
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get available tokens.
|
||||||
|
*/
|
||||||
|
getAvailableTokens(): number {
|
||||||
|
return this.contextWindowSize - this.currentTokens
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if compression is needed.
|
||||||
|
*/
|
||||||
|
needsCompression(): boolean {
|
||||||
|
return this.getUsage() > this.compressionThreshold
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Update token count (e.g., after receiving a message).
|
||||||
|
*/
|
||||||
|
addTokens(tokens: number): void {
|
||||||
|
this.currentTokens += tokens
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get files in context.
|
||||||
|
*/
|
||||||
|
getFilesInContext(): string[] {
|
||||||
|
return Array.from(this.filesInContext.keys())
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Sync context state from session.
|
||||||
|
*/
|
||||||
|
syncFromSession(session: Session): void {
|
||||||
|
this.filesInContext.clear()
|
||||||
|
this.currentTokens = 0
|
||||||
|
|
||||||
|
for (const file of session.context.filesInContext) {
|
||||||
|
this.filesInContext.set(file, {
|
||||||
|
path: file,
|
||||||
|
tokens: 0,
|
||||||
|
addedAt: Date.now(),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
this.currentTokens = Math.floor(session.context.tokenUsage * this.contextWindowSize)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Update session context state.
|
||||||
|
*/
|
||||||
|
updateSession(session: Session): void {
|
||||||
|
session.context.filesInContext = this.getFilesInContext()
|
||||||
|
session.context.tokenUsage = this.getUsage()
|
||||||
|
session.context.needsCompression = this.needsCompression()
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Compress context using LLM to summarize old messages.
|
||||||
|
*/
|
||||||
|
async compress(session: Session, llm: ILLMClient): Promise<CompressionResult> {
|
||||||
|
const history = session.history
|
||||||
|
if (history.length < MIN_MESSAGES_FOR_COMPRESSION) {
|
||||||
|
return {
|
||||||
|
compressed: false,
|
||||||
|
removedMessages: 0,
|
||||||
|
tokensSaved: 0,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const messagesToCompress = history.slice(0, -MESSAGES_TO_KEEP)
|
||||||
|
const messagesToKeep = history.slice(-MESSAGES_TO_KEEP)
|
||||||
|
|
||||||
|
const tokensBeforeCompression = await this.countHistoryTokens(messagesToCompress, llm)
|
||||||
|
|
||||||
|
const summary = await this.summarizeMessages(messagesToCompress, llm)
|
||||||
|
const summaryTokens = await llm.countTokens(summary)
|
||||||
|
|
||||||
|
const summaryMessage = createSystemMessage(`[Previous conversation summary]\n${summary}`)
|
||||||
|
|
||||||
|
session.history = [summaryMessage, ...messagesToKeep]
|
||||||
|
|
||||||
|
const tokensSaved = tokensBeforeCompression - summaryTokens
|
||||||
|
this.currentTokens -= tokensSaved
|
||||||
|
|
||||||
|
this.updateSession(session)
|
||||||
|
|
||||||
|
return {
|
||||||
|
compressed: true,
|
||||||
|
removedMessages: messagesToCompress.length,
|
||||||
|
tokensSaved,
|
||||||
|
summary,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a new context state.
|
||||||
|
*/
|
||||||
|
static createInitialState(): ContextState {
|
||||||
|
return {
|
||||||
|
filesInContext: [],
|
||||||
|
tokenUsage: 0,
|
||||||
|
needsCompression: false,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private async summarizeMessages(messages: ChatMessage[], llm: ILLMClient): Promise<string> {
|
||||||
|
const conversation = this.formatMessagesForSummary(messages)
|
||||||
|
|
||||||
|
const response = await llm.chat([
|
||||||
|
createSystemMessage(COMPRESSION_PROMPT),
|
||||||
|
createSystemMessage(conversation),
|
||||||
|
])
|
||||||
|
|
||||||
|
return response.content
|
||||||
|
}
|
||||||
|
|
||||||
|
private formatMessagesForSummary(messages: ChatMessage[]): string {
|
||||||
|
return messages
|
||||||
|
.filter((m) => m.role !== "tool")
|
||||||
|
.map((m) => {
|
||||||
|
const role = m.role === "user" ? "User" : "Assistant"
|
||||||
|
const content = this.truncateContent(m.content, 500)
|
||||||
|
return `${role}: ${content}`
|
||||||
|
})
|
||||||
|
.join("\n\n")
|
||||||
|
}
|
||||||
|
|
||||||
|
private truncateContent(content: string, maxLength: number): string {
|
||||||
|
if (content.length <= maxLength) {
|
||||||
|
return content
|
||||||
|
}
|
||||||
|
return `${content.slice(0, maxLength)}...`
|
||||||
|
}
|
||||||
|
|
||||||
|
private async countHistoryTokens(messages: ChatMessage[], llm: ILLMClient): Promise<number> {
|
||||||
|
let total = 0
|
||||||
|
for (const message of messages) {
|
||||||
|
total += await llm.countTokens(message.content)
|
||||||
|
}
|
||||||
|
return total
|
||||||
|
}
|
||||||
|
}
|
||||||
224
packages/ipuaro/src/application/use-cases/ExecuteTool.ts
Normal file
224
packages/ipuaro/src/application/use-cases/ExecuteTool.ts
Normal file
@@ -0,0 +1,224 @@
|
|||||||
|
import { randomUUID } from "node:crypto"
|
||||||
|
import type { Session } from "../../domain/entities/Session.js"
|
||||||
|
import type { ISessionStorage } from "../../domain/services/ISessionStorage.js"
|
||||||
|
import type { IStorage } from "../../domain/services/IStorage.js"
|
||||||
|
import type { DiffInfo, ToolContext } from "../../domain/services/ITool.js"
|
||||||
|
import type { ToolCall } from "../../domain/value-objects/ToolCall.js"
|
||||||
|
import { createErrorResult, type ToolResult } from "../../domain/value-objects/ToolResult.js"
|
||||||
|
import { createUndoEntry } from "../../domain/value-objects/UndoEntry.js"
|
||||||
|
import type { IToolRegistry } from "../interfaces/IToolRegistry.js"
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Result of confirmation dialog.
|
||||||
|
*/
|
||||||
|
export interface ConfirmationResult {
|
||||||
|
confirmed: boolean
|
||||||
|
editedContent?: string[]
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Confirmation handler callback type.
|
||||||
|
* Can return either a boolean (for backward compatibility) or a ConfirmationResult.
|
||||||
|
*/
|
||||||
|
export type ConfirmationHandler = (
|
||||||
|
message: string,
|
||||||
|
diff?: DiffInfo,
|
||||||
|
) => Promise<boolean | ConfirmationResult>
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Progress handler callback type.
|
||||||
|
*/
|
||||||
|
export type ProgressHandler = (message: string) => void
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Options for ExecuteTool.
|
||||||
|
*/
|
||||||
|
export interface ExecuteToolOptions {
|
||||||
|
/** Auto-apply edits without confirmation */
|
||||||
|
autoApply?: boolean
|
||||||
|
/** Confirmation handler */
|
||||||
|
onConfirmation?: ConfirmationHandler
|
||||||
|
/** Progress handler */
|
||||||
|
onProgress?: ProgressHandler
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Result of tool execution.
|
||||||
|
*/
|
||||||
|
export interface ExecuteToolResult {
|
||||||
|
result: ToolResult
|
||||||
|
undoEntryCreated: boolean
|
||||||
|
undoEntryId?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Use case for executing a single tool.
|
||||||
|
* Orchestrates tool execution with:
|
||||||
|
* - Parameter validation
|
||||||
|
* - Confirmation flow
|
||||||
|
* - Undo stack management
|
||||||
|
* - Storage updates
|
||||||
|
*/
|
||||||
|
export class ExecuteTool {
|
||||||
|
private readonly storage: IStorage
|
||||||
|
private readonly sessionStorage: ISessionStorage
|
||||||
|
private readonly tools: IToolRegistry
|
||||||
|
private readonly projectRoot: string
|
||||||
|
private lastUndoEntryId?: string
|
||||||
|
|
||||||
|
constructor(
|
||||||
|
storage: IStorage,
|
||||||
|
sessionStorage: ISessionStorage,
|
||||||
|
tools: IToolRegistry,
|
||||||
|
projectRoot: string,
|
||||||
|
) {
|
||||||
|
this.storage = storage
|
||||||
|
this.sessionStorage = sessionStorage
|
||||||
|
this.tools = tools
|
||||||
|
this.projectRoot = projectRoot
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Execute a tool call.
|
||||||
|
*
|
||||||
|
* @param toolCall - The tool call to execute
|
||||||
|
* @param session - Current session (for undo stack)
|
||||||
|
* @param options - Execution options
|
||||||
|
* @returns Execution result
|
||||||
|
*/
|
||||||
|
async execute(
|
||||||
|
toolCall: ToolCall,
|
||||||
|
session: Session,
|
||||||
|
options: ExecuteToolOptions = {},
|
||||||
|
): Promise<ExecuteToolResult> {
|
||||||
|
this.lastUndoEntryId = undefined
|
||||||
|
const startTime = Date.now()
|
||||||
|
const tool = this.tools.get(toolCall.name)
|
||||||
|
|
||||||
|
if (!tool) {
|
||||||
|
return {
|
||||||
|
result: createErrorResult(
|
||||||
|
toolCall.id,
|
||||||
|
`Unknown tool: ${toolCall.name}`,
|
||||||
|
Date.now() - startTime,
|
||||||
|
),
|
||||||
|
undoEntryCreated: false,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const validationError = tool.validateParams(toolCall.params)
|
||||||
|
if (validationError) {
|
||||||
|
return {
|
||||||
|
result: createErrorResult(toolCall.id, validationError, Date.now() - startTime),
|
||||||
|
undoEntryCreated: false,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const context = this.buildToolContext(toolCall, session, options)
|
||||||
|
|
||||||
|
try {
|
||||||
|
const result = await tool.execute(toolCall.params, context)
|
||||||
|
|
||||||
|
return {
|
||||||
|
result,
|
||||||
|
undoEntryCreated: this.lastUndoEntryId !== undefined,
|
||||||
|
undoEntryId: this.lastUndoEntryId,
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
const errorMessage = error instanceof Error ? error.message : String(error)
|
||||||
|
return {
|
||||||
|
result: createErrorResult(toolCall.id, errorMessage, Date.now() - startTime),
|
||||||
|
undoEntryCreated: false,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Build tool context for execution.
|
||||||
|
*/
|
||||||
|
private buildToolContext(
|
||||||
|
toolCall: ToolCall,
|
||||||
|
session: Session,
|
||||||
|
options: ExecuteToolOptions,
|
||||||
|
): ToolContext {
|
||||||
|
return {
|
||||||
|
projectRoot: this.projectRoot,
|
||||||
|
storage: this.storage,
|
||||||
|
requestConfirmation: async (msg: string, diff?: DiffInfo) => {
|
||||||
|
return this.handleConfirmation(msg, diff, toolCall, session, options)
|
||||||
|
},
|
||||||
|
onProgress: (msg: string) => {
|
||||||
|
options.onProgress?.(msg)
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handle confirmation for tool actions.
|
||||||
|
* Supports edited content from user.
|
||||||
|
*/
|
||||||
|
private async handleConfirmation(
|
||||||
|
msg: string,
|
||||||
|
diff: DiffInfo | undefined,
|
||||||
|
toolCall: ToolCall,
|
||||||
|
session: Session,
|
||||||
|
options: ExecuteToolOptions,
|
||||||
|
): Promise<boolean> {
|
||||||
|
if (options.autoApply) {
|
||||||
|
if (diff) {
|
||||||
|
this.lastUndoEntryId = await this.createUndoEntry(diff, toolCall, session)
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
if (options.onConfirmation) {
|
||||||
|
const result = await options.onConfirmation(msg, diff)
|
||||||
|
|
||||||
|
const confirmed = typeof result === "boolean" ? result : result.confirmed
|
||||||
|
const editedContent = typeof result === "boolean" ? undefined : result.editedContent
|
||||||
|
|
||||||
|
if (confirmed && diff) {
|
||||||
|
if (editedContent && editedContent.length > 0) {
|
||||||
|
diff.newLines = editedContent
|
||||||
|
if (toolCall.params.content && typeof toolCall.params.content === "string") {
|
||||||
|
toolCall.params.content = editedContent.join("\n")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
this.lastUndoEntryId = await this.createUndoEntry(diff, toolCall, session)
|
||||||
|
}
|
||||||
|
|
||||||
|
return confirmed
|
||||||
|
}
|
||||||
|
|
||||||
|
if (diff) {
|
||||||
|
this.lastUndoEntryId = await this.createUndoEntry(diff, toolCall, session)
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create undo entry from diff.
|
||||||
|
*/
|
||||||
|
private async createUndoEntry(
|
||||||
|
diff: DiffInfo,
|
||||||
|
toolCall: ToolCall,
|
||||||
|
session: Session,
|
||||||
|
): Promise<string> {
|
||||||
|
const entryId = randomUUID()
|
||||||
|
const entry = createUndoEntry(
|
||||||
|
entryId,
|
||||||
|
diff.filePath,
|
||||||
|
diff.oldLines,
|
||||||
|
diff.newLines,
|
||||||
|
`${toolCall.name}: ${diff.filePath}`,
|
||||||
|
toolCall.id,
|
||||||
|
)
|
||||||
|
|
||||||
|
session.addUndoEntry(entry)
|
||||||
|
await this.sessionStorage.pushUndoEntry(session.id, entry)
|
||||||
|
session.stats.editsApplied++
|
||||||
|
|
||||||
|
return entryId
|
||||||
|
}
|
||||||
|
}
|
||||||
352
packages/ipuaro/src/application/use-cases/HandleMessage.ts
Normal file
352
packages/ipuaro/src/application/use-cases/HandleMessage.ts
Normal file
@@ -0,0 +1,352 @@
|
|||||||
|
import type { Session } from "../../domain/entities/Session.js"
|
||||||
|
import type { ILLMClient } from "../../domain/services/ILLMClient.js"
|
||||||
|
import type { ISessionStorage } from "../../domain/services/ISessionStorage.js"
|
||||||
|
import type { IStorage } from "../../domain/services/IStorage.js"
|
||||||
|
import type { DiffInfo } from "../../domain/services/ITool.js"
|
||||||
|
import {
|
||||||
|
type ChatMessage,
|
||||||
|
createAssistantMessage,
|
||||||
|
createSystemMessage,
|
||||||
|
createToolMessage,
|
||||||
|
createUserMessage,
|
||||||
|
} from "../../domain/value-objects/ChatMessage.js"
|
||||||
|
import type { ToolCall } from "../../domain/value-objects/ToolCall.js"
|
||||||
|
import type { ToolResult } from "../../domain/value-objects/ToolResult.js"
|
||||||
|
import type { UndoEntry } from "../../domain/value-objects/UndoEntry.js"
|
||||||
|
import { type ErrorOption, IpuaroError } from "../../shared/errors/IpuaroError.js"
|
||||||
|
import {
|
||||||
|
buildInitialContext,
|
||||||
|
type ProjectStructure,
|
||||||
|
SYSTEM_PROMPT,
|
||||||
|
} from "../../infrastructure/llm/prompts.js"
|
||||||
|
import { parseToolCalls } from "../../infrastructure/llm/ResponseParser.js"
|
||||||
|
import type { IToolRegistry } from "../interfaces/IToolRegistry.js"
|
||||||
|
import { ContextManager } from "./ContextManager.js"
|
||||||
|
import { type ConfirmationResult, ExecuteTool } from "./ExecuteTool.js"
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Status during message handling.
|
||||||
|
*/
|
||||||
|
export type HandleMessageStatus =
|
||||||
|
| "ready"
|
||||||
|
| "thinking"
|
||||||
|
| "tool_call"
|
||||||
|
| "awaiting_confirmation"
|
||||||
|
| "error"
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Edit request for confirmation.
|
||||||
|
*/
|
||||||
|
export interface EditRequest {
|
||||||
|
toolCall: ToolCall
|
||||||
|
filePath: string
|
||||||
|
description: string
|
||||||
|
diff?: DiffInfo
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* User's choice for edit confirmation.
|
||||||
|
*/
|
||||||
|
export type EditChoice = "apply" | "skip" | "edit" | "abort"
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Event callbacks for HandleMessage.
|
||||||
|
*/
|
||||||
|
export interface HandleMessageEvents {
|
||||||
|
onMessage?: (message: ChatMessage) => void
|
||||||
|
onToolCall?: (call: ToolCall) => void
|
||||||
|
onToolResult?: (result: ToolResult) => void
|
||||||
|
onConfirmation?: (message: string, diff?: DiffInfo) => Promise<boolean | ConfirmationResult>
|
||||||
|
onError?: (error: IpuaroError) => Promise<ErrorOption>
|
||||||
|
onStatusChange?: (status: HandleMessageStatus) => void
|
||||||
|
onUndoEntry?: (entry: UndoEntry) => void
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Options for HandleMessage.
|
||||||
|
*/
|
||||||
|
export interface HandleMessageOptions {
|
||||||
|
autoApply?: boolean
|
||||||
|
maxToolCalls?: number
|
||||||
|
maxHistoryMessages?: number
|
||||||
|
saveInputHistory?: boolean
|
||||||
|
contextConfig?: import("../../shared/constants/config.js").ContextConfig
|
||||||
|
}
|
||||||
|
|
||||||
|
const DEFAULT_MAX_TOOL_CALLS = 20
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Use case for handling a user message.
|
||||||
|
* Main orchestrator for the LLM interaction loop.
|
||||||
|
*/
|
||||||
|
export class HandleMessage {
|
||||||
|
private readonly storage: IStorage
|
||||||
|
private readonly sessionStorage: ISessionStorage
|
||||||
|
private readonly llm: ILLMClient
|
||||||
|
private readonly tools: IToolRegistry
|
||||||
|
private readonly contextManager: ContextManager
|
||||||
|
private readonly executeTool: ExecuteTool
|
||||||
|
private readonly projectRoot: string
|
||||||
|
private projectStructure?: ProjectStructure
|
||||||
|
|
||||||
|
private events: HandleMessageEvents = {}
|
||||||
|
private options: HandleMessageOptions = {}
|
||||||
|
private aborted = false
|
||||||
|
|
||||||
|
constructor(
|
||||||
|
storage: IStorage,
|
||||||
|
sessionStorage: ISessionStorage,
|
||||||
|
llm: ILLMClient,
|
||||||
|
tools: IToolRegistry,
|
||||||
|
projectRoot: string,
|
||||||
|
contextConfig?: import("../../shared/constants/config.js").ContextConfig,
|
||||||
|
) {
|
||||||
|
this.storage = storage
|
||||||
|
this.sessionStorage = sessionStorage
|
||||||
|
this.llm = llm
|
||||||
|
this.tools = tools
|
||||||
|
this.projectRoot = projectRoot
|
||||||
|
this.contextManager = new ContextManager(llm.getContextWindowSize(), contextConfig)
|
||||||
|
this.executeTool = new ExecuteTool(storage, sessionStorage, tools, projectRoot)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Set event callbacks.
|
||||||
|
*/
|
||||||
|
setEvents(events: HandleMessageEvents): void {
|
||||||
|
this.events = events
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Set options.
|
||||||
|
*/
|
||||||
|
setOptions(options: HandleMessageOptions): void {
|
||||||
|
this.options = options
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Set project structure for context building.
|
||||||
|
*/
|
||||||
|
setProjectStructure(structure: ProjectStructure): void {
|
||||||
|
this.projectStructure = structure
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Abort current processing.
|
||||||
|
*/
|
||||||
|
abort(): void {
|
||||||
|
this.aborted = true
|
||||||
|
this.llm.abort()
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Truncate session history if maxHistoryMessages is set.
|
||||||
|
*/
|
||||||
|
private truncateHistoryIfNeeded(session: Session): void {
|
||||||
|
if (this.options.maxHistoryMessages !== undefined) {
|
||||||
|
session.truncateHistory(this.options.maxHistoryMessages)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Execute the message handling flow.
|
||||||
|
*/
|
||||||
|
async execute(session: Session, message: string): Promise<void> {
|
||||||
|
this.aborted = false
|
||||||
|
this.contextManager.syncFromSession(session)
|
||||||
|
|
||||||
|
if (message.trim()) {
|
||||||
|
const userMessage = createUserMessage(message)
|
||||||
|
session.addMessage(userMessage)
|
||||||
|
this.truncateHistoryIfNeeded(session)
|
||||||
|
|
||||||
|
if (this.options.saveInputHistory !== false) {
|
||||||
|
session.addInputToHistory(message)
|
||||||
|
}
|
||||||
|
|
||||||
|
this.emitMessage(userMessage)
|
||||||
|
}
|
||||||
|
|
||||||
|
await this.sessionStorage.saveSession(session)
|
||||||
|
|
||||||
|
this.emitStatus("thinking")
|
||||||
|
|
||||||
|
let toolCallCount = 0
|
||||||
|
const maxToolCalls = this.options.maxToolCalls ?? DEFAULT_MAX_TOOL_CALLS
|
||||||
|
|
||||||
|
while (!this.aborted) {
|
||||||
|
const messages = await this.buildMessages(session)
|
||||||
|
|
||||||
|
const startTime = Date.now()
|
||||||
|
let response
|
||||||
|
|
||||||
|
try {
|
||||||
|
response = await this.llm.chat(messages)
|
||||||
|
} catch (error) {
|
||||||
|
await this.handleLLMError(error, session)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if (this.aborted) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
const parsed = parseToolCalls(response.content)
|
||||||
|
const timeMs = Date.now() - startTime
|
||||||
|
|
||||||
|
if (parsed.toolCalls.length === 0) {
|
||||||
|
const assistantMessage = createAssistantMessage(parsed.content, undefined, {
|
||||||
|
tokens: response.tokens,
|
||||||
|
timeMs,
|
||||||
|
toolCalls: 0,
|
||||||
|
})
|
||||||
|
session.addMessage(assistantMessage)
|
||||||
|
this.truncateHistoryIfNeeded(session)
|
||||||
|
this.emitMessage(assistantMessage)
|
||||||
|
this.contextManager.addTokens(response.tokens)
|
||||||
|
this.contextManager.updateSession(session)
|
||||||
|
await this.sessionStorage.saveSession(session)
|
||||||
|
this.emitStatus("ready")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
const assistantMessage = createAssistantMessage(parsed.content, parsed.toolCalls, {
|
||||||
|
tokens: response.tokens,
|
||||||
|
timeMs,
|
||||||
|
toolCalls: parsed.toolCalls.length,
|
||||||
|
})
|
||||||
|
session.addMessage(assistantMessage)
|
||||||
|
this.truncateHistoryIfNeeded(session)
|
||||||
|
this.emitMessage(assistantMessage)
|
||||||
|
|
||||||
|
toolCallCount += parsed.toolCalls.length
|
||||||
|
if (toolCallCount > maxToolCalls) {
|
||||||
|
const errorMsg = `Maximum tool calls (${String(maxToolCalls)}) exceeded`
|
||||||
|
const errorMessage = createSystemMessage(errorMsg)
|
||||||
|
session.addMessage(errorMessage)
|
||||||
|
this.truncateHistoryIfNeeded(session)
|
||||||
|
this.emitMessage(errorMessage)
|
||||||
|
this.emitStatus("ready")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
this.emitStatus("tool_call")
|
||||||
|
|
||||||
|
const results: ToolResult[] = []
|
||||||
|
|
||||||
|
for (const toolCall of parsed.toolCalls) {
|
||||||
|
if (this.aborted) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
this.emitToolCall(toolCall)
|
||||||
|
|
||||||
|
const result = await this.executeToolCall(toolCall, session)
|
||||||
|
results.push(result)
|
||||||
|
this.emitToolResult(result)
|
||||||
|
}
|
||||||
|
|
||||||
|
const toolMessage = createToolMessage(results)
|
||||||
|
session.addMessage(toolMessage)
|
||||||
|
this.truncateHistoryIfNeeded(session)
|
||||||
|
|
||||||
|
this.contextManager.addTokens(response.tokens)
|
||||||
|
|
||||||
|
if (this.contextManager.needsCompression()) {
|
||||||
|
await this.contextManager.compress(session, this.llm)
|
||||||
|
}
|
||||||
|
|
||||||
|
this.contextManager.updateSession(session)
|
||||||
|
await this.sessionStorage.saveSession(session)
|
||||||
|
|
||||||
|
this.emitStatus("thinking")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private async buildMessages(session: Session): Promise<ChatMessage[]> {
|
||||||
|
const messages: ChatMessage[] = []
|
||||||
|
|
||||||
|
messages.push(createSystemMessage(SYSTEM_PROMPT))
|
||||||
|
|
||||||
|
if (this.projectStructure) {
|
||||||
|
const asts = await this.storage.getAllASTs()
|
||||||
|
const metas = await this.storage.getAllMetas()
|
||||||
|
const context = buildInitialContext(this.projectStructure, asts, metas)
|
||||||
|
messages.push(createSystemMessage(context))
|
||||||
|
}
|
||||||
|
|
||||||
|
messages.push(...session.history)
|
||||||
|
|
||||||
|
return messages
|
||||||
|
}
|
||||||
|
|
||||||
|
private async executeToolCall(toolCall: ToolCall, session: Session): Promise<ToolResult> {
|
||||||
|
const { result, undoEntryCreated, undoEntryId } = await this.executeTool.execute(
|
||||||
|
toolCall,
|
||||||
|
session,
|
||||||
|
{
|
||||||
|
autoApply: this.options.autoApply,
|
||||||
|
onConfirmation: async (msg: string, diff?: DiffInfo) => {
|
||||||
|
this.emitStatus("awaiting_confirmation")
|
||||||
|
if (this.events.onConfirmation) {
|
||||||
|
return this.events.onConfirmation(msg, diff)
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
},
|
||||||
|
onProgress: (_msg: string) => {
|
||||||
|
this.events.onStatusChange?.("tool_call")
|
||||||
|
},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
if (undoEntryCreated && undoEntryId) {
|
||||||
|
const undoEntry = session.undoStack.find((entry) => entry.id === undoEntryId)
|
||||||
|
if (undoEntry) {
|
||||||
|
this.events.onUndoEntry?.(undoEntry)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
private async handleLLMError(error: unknown, session: Session): Promise<void> {
|
||||||
|
this.emitStatus("error")
|
||||||
|
|
||||||
|
const ipuaroError =
|
||||||
|
error instanceof IpuaroError
|
||||||
|
? error
|
||||||
|
: IpuaroError.llm(error instanceof Error ? error.message : String(error))
|
||||||
|
|
||||||
|
if (this.events.onError) {
|
||||||
|
const choice = await this.events.onError(ipuaroError)
|
||||||
|
|
||||||
|
if (choice === "retry") {
|
||||||
|
this.emitStatus("thinking")
|
||||||
|
return this.execute(session, "")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const errorMessage = createSystemMessage(`Error: ${ipuaroError.message}`)
|
||||||
|
session.addMessage(errorMessage)
|
||||||
|
this.truncateHistoryIfNeeded(session)
|
||||||
|
this.emitMessage(errorMessage)
|
||||||
|
|
||||||
|
this.emitStatus("ready")
|
||||||
|
}
|
||||||
|
|
||||||
|
private emitMessage(message: ChatMessage): void {
|
||||||
|
this.events.onMessage?.(message)
|
||||||
|
}
|
||||||
|
|
||||||
|
private emitToolCall(call: ToolCall): void {
|
||||||
|
this.events.onToolCall?.(call)
|
||||||
|
}
|
||||||
|
|
||||||
|
private emitToolResult(result: ToolResult): void {
|
||||||
|
this.events.onToolResult?.(result)
|
||||||
|
}
|
||||||
|
|
||||||
|
private emitStatus(status: HandleMessageStatus): void {
|
||||||
|
this.events.onStatusChange?.(status)
|
||||||
|
}
|
||||||
|
}
|
||||||
184
packages/ipuaro/src/application/use-cases/IndexProject.ts
Normal file
184
packages/ipuaro/src/application/use-cases/IndexProject.ts
Normal file
@@ -0,0 +1,184 @@
|
|||||||
|
import * as path from "node:path"
|
||||||
|
import type { IStorage } from "../../domain/services/IStorage.js"
|
||||||
|
import type { IndexingStats, IndexProgress } from "../../domain/services/IIndexer.js"
|
||||||
|
import { FileScanner } from "../../infrastructure/indexer/FileScanner.js"
|
||||||
|
import { ASTParser } from "../../infrastructure/indexer/ASTParser.js"
|
||||||
|
import { MetaAnalyzer } from "../../infrastructure/indexer/MetaAnalyzer.js"
|
||||||
|
import { IndexBuilder } from "../../infrastructure/indexer/IndexBuilder.js"
|
||||||
|
import { createFileData, type FileData } from "../../domain/value-objects/FileData.js"
|
||||||
|
import type { FileAST } from "../../domain/value-objects/FileAST.js"
|
||||||
|
import { md5 } from "../../shared/utils/hash.js"
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Options for indexing a project.
|
||||||
|
*/
|
||||||
|
export interface IndexProjectOptions {
|
||||||
|
/** Additional ignore patterns */
|
||||||
|
additionalIgnore?: string[]
|
||||||
|
/** Progress callback */
|
||||||
|
onProgress?: (progress: IndexProgress) => void
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Use case for indexing a project.
|
||||||
|
* Orchestrates the full indexing pipeline:
|
||||||
|
* 1. Scan files
|
||||||
|
* 2. Parse AST
|
||||||
|
* 3. Analyze metadata
|
||||||
|
* 4. Build indexes
|
||||||
|
* 5. Store in Redis
|
||||||
|
*/
|
||||||
|
export class IndexProject {
|
||||||
|
private readonly storage: IStorage
|
||||||
|
private readonly scanner: FileScanner
|
||||||
|
private readonly parser: ASTParser
|
||||||
|
private readonly metaAnalyzer: MetaAnalyzer
|
||||||
|
private readonly indexBuilder: IndexBuilder
|
||||||
|
|
||||||
|
constructor(storage: IStorage, projectRoot: string) {
|
||||||
|
this.storage = storage
|
||||||
|
this.scanner = new FileScanner()
|
||||||
|
this.parser = new ASTParser()
|
||||||
|
this.metaAnalyzer = new MetaAnalyzer(projectRoot)
|
||||||
|
this.indexBuilder = new IndexBuilder(projectRoot)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Execute the indexing pipeline.
|
||||||
|
*
|
||||||
|
* @param projectRoot - Absolute path to project root
|
||||||
|
* @param options - Optional configuration
|
||||||
|
* @returns Indexing statistics
|
||||||
|
*/
|
||||||
|
async execute(projectRoot: string, options: IndexProjectOptions = {}): Promise<IndexingStats> {
|
||||||
|
const startTime = Date.now()
|
||||||
|
const stats: IndexingStats = {
|
||||||
|
filesScanned: 0,
|
||||||
|
filesParsed: 0,
|
||||||
|
parseErrors: 0,
|
||||||
|
timeMs: 0,
|
||||||
|
}
|
||||||
|
|
||||||
|
const fileDataMap = new Map<string, FileData>()
|
||||||
|
const astMap = new Map<string, FileAST>()
|
||||||
|
const contentMap = new Map<string, string>()
|
||||||
|
|
||||||
|
// Phase 1: Scanning
|
||||||
|
this.reportProgress(options.onProgress, 0, 0, "", "scanning")
|
||||||
|
|
||||||
|
const scanResults = await this.scanner.scanAll(projectRoot)
|
||||||
|
stats.filesScanned = scanResults.length
|
||||||
|
|
||||||
|
// Phase 2: Parsing
|
||||||
|
let current = 0
|
||||||
|
const total = scanResults.length
|
||||||
|
|
||||||
|
for (const scanResult of scanResults) {
|
||||||
|
current++
|
||||||
|
const fullPath = path.join(projectRoot, scanResult.path)
|
||||||
|
this.reportProgress(options.onProgress, current, total, scanResult.path, "parsing")
|
||||||
|
|
||||||
|
const content = await FileScanner.readFileContent(fullPath)
|
||||||
|
if (!content) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
contentMap.set(scanResult.path, content)
|
||||||
|
|
||||||
|
const lines = content.split("\n")
|
||||||
|
const hash = md5(content)
|
||||||
|
|
||||||
|
const fileData = createFileData(lines, hash, scanResult.size, scanResult.lastModified)
|
||||||
|
fileDataMap.set(scanResult.path, fileData)
|
||||||
|
|
||||||
|
const language = this.detectLanguage(scanResult.path)
|
||||||
|
if (!language) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
const ast = this.parser.parse(content, language)
|
||||||
|
astMap.set(scanResult.path, ast)
|
||||||
|
|
||||||
|
stats.filesParsed++
|
||||||
|
if (ast.parseError) {
|
||||||
|
stats.parseErrors++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Phase 3: Analyzing metadata
|
||||||
|
current = 0
|
||||||
|
for (const [filePath, ast] of astMap) {
|
||||||
|
current++
|
||||||
|
this.reportProgress(options.onProgress, current, astMap.size, filePath, "analyzing")
|
||||||
|
|
||||||
|
const content = contentMap.get(filePath)
|
||||||
|
if (!content) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
const fullPath = path.join(projectRoot, filePath)
|
||||||
|
const meta = this.metaAnalyzer.analyze(fullPath, ast, content, astMap)
|
||||||
|
|
||||||
|
await this.storage.setMeta(filePath, meta)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Phase 4: Building indexes
|
||||||
|
this.reportProgress(options.onProgress, 1, 1, "Building indexes", "indexing")
|
||||||
|
|
||||||
|
const symbolIndex = this.indexBuilder.buildSymbolIndex(astMap)
|
||||||
|
const depsGraph = this.indexBuilder.buildDepsGraph(astMap)
|
||||||
|
|
||||||
|
// Phase 5: Store everything
|
||||||
|
for (const [filePath, fileData] of fileDataMap) {
|
||||||
|
await this.storage.setFile(filePath, fileData)
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const [filePath, ast] of astMap) {
|
||||||
|
await this.storage.setAST(filePath, ast)
|
||||||
|
}
|
||||||
|
|
||||||
|
await this.storage.setSymbolIndex(symbolIndex)
|
||||||
|
await this.storage.setDepsGraph(depsGraph)
|
||||||
|
|
||||||
|
// Store last indexed timestamp
|
||||||
|
await this.storage.setProjectConfig("last_indexed", Date.now())
|
||||||
|
|
||||||
|
stats.timeMs = Date.now() - startTime
|
||||||
|
|
||||||
|
return stats
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Detect language from file extension.
|
||||||
|
*/
|
||||||
|
private detectLanguage(filePath: string): "ts" | "tsx" | "js" | "jsx" | null {
|
||||||
|
const ext = path.extname(filePath).toLowerCase()
|
||||||
|
switch (ext) {
|
||||||
|
case ".ts":
|
||||||
|
return "ts"
|
||||||
|
case ".tsx":
|
||||||
|
return "tsx"
|
||||||
|
case ".js":
|
||||||
|
return "js"
|
||||||
|
case ".jsx":
|
||||||
|
return "jsx"
|
||||||
|
default:
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Report progress to callback if provided.
|
||||||
|
*/
|
||||||
|
private reportProgress(
|
||||||
|
callback: ((progress: IndexProgress) => void) | undefined,
|
||||||
|
current: number,
|
||||||
|
total: number,
|
||||||
|
currentFile: string,
|
||||||
|
phase: IndexProgress["phase"],
|
||||||
|
): void {
|
||||||
|
if (callback) {
|
||||||
|
callback({ current, total, currentFile, phase })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
62
packages/ipuaro/src/application/use-cases/StartSession.ts
Normal file
62
packages/ipuaro/src/application/use-cases/StartSession.ts
Normal file
@@ -0,0 +1,62 @@
|
|||||||
|
import { randomUUID } from "node:crypto"
|
||||||
|
import { Session } from "../../domain/entities/Session.js"
|
||||||
|
import type { ISessionStorage } from "../../domain/services/ISessionStorage.js"
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Options for starting a session.
|
||||||
|
*/
|
||||||
|
export interface StartSessionOptions {
|
||||||
|
/** Force creation of a new session even if one exists */
|
||||||
|
forceNew?: boolean
|
||||||
|
/** Specific session ID to load */
|
||||||
|
sessionId?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Result of starting a session.
|
||||||
|
*/
|
||||||
|
export interface StartSessionResult {
|
||||||
|
session: Session
|
||||||
|
isNew: boolean
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Use case for starting a session.
|
||||||
|
* Creates a new session or loads the latest one for a project.
|
||||||
|
*/
|
||||||
|
export class StartSession {
|
||||||
|
constructor(private readonly sessionStorage: ISessionStorage) {}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Execute the use case.
|
||||||
|
*
|
||||||
|
* @param projectName - The project name to start a session for
|
||||||
|
* @param options - Optional configuration
|
||||||
|
* @returns The session and whether it was newly created
|
||||||
|
*/
|
||||||
|
async execute(
|
||||||
|
projectName: string,
|
||||||
|
options: StartSessionOptions = {},
|
||||||
|
): Promise<StartSessionResult> {
|
||||||
|
if (options.sessionId) {
|
||||||
|
const session = await this.sessionStorage.loadSession(options.sessionId)
|
||||||
|
if (session) {
|
||||||
|
await this.sessionStorage.touchSession(session.id)
|
||||||
|
return { session, isNew: false }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!options.forceNew) {
|
||||||
|
const latestSession = await this.sessionStorage.getLatestSession(projectName)
|
||||||
|
if (latestSession) {
|
||||||
|
await this.sessionStorage.touchSession(latestSession.id)
|
||||||
|
return { session: latestSession, isNew: false }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const session = new Session(randomUUID(), projectName)
|
||||||
|
await this.sessionStorage.saveSession(session)
|
||||||
|
|
||||||
|
return { session, isNew: true }
|
||||||
|
}
|
||||||
|
}
|
||||||
119
packages/ipuaro/src/application/use-cases/UndoChange.ts
Normal file
119
packages/ipuaro/src/application/use-cases/UndoChange.ts
Normal file
@@ -0,0 +1,119 @@
|
|||||||
|
import { promises as fs } from "node:fs"
|
||||||
|
import type { Session } from "../../domain/entities/Session.js"
|
||||||
|
import type { ISessionStorage } from "../../domain/services/ISessionStorage.js"
|
||||||
|
import type { IStorage } from "../../domain/services/IStorage.js"
|
||||||
|
import { canUndo, type UndoEntry } from "../../domain/value-objects/UndoEntry.js"
|
||||||
|
import { md5 } from "../../shared/utils/hash.js"
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Result of undo operation.
|
||||||
|
*/
|
||||||
|
export interface UndoResult {
|
||||||
|
success: boolean
|
||||||
|
entry?: UndoEntry
|
||||||
|
error?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Use case for undoing the last file change.
|
||||||
|
*/
|
||||||
|
export class UndoChange {
|
||||||
|
constructor(
|
||||||
|
private readonly sessionStorage: ISessionStorage,
|
||||||
|
private readonly storage: IStorage,
|
||||||
|
) {}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Execute undo operation.
|
||||||
|
*
|
||||||
|
* @param session - The current session
|
||||||
|
* @returns Result of the undo operation
|
||||||
|
*/
|
||||||
|
async execute(session: Session): Promise<UndoResult> {
|
||||||
|
const entry = await this.sessionStorage.popUndoEntry(session.id)
|
||||||
|
if (!entry) {
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
error: "No changes to undo",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const currentContent = await this.readCurrentContent(entry.filePath)
|
||||||
|
|
||||||
|
if (!canUndo(entry, currentContent)) {
|
||||||
|
await this.sessionStorage.pushUndoEntry(session.id, entry)
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
entry,
|
||||||
|
error: "File has been modified since the change was made",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
await this.restoreContent(entry.filePath, entry.previousContent)
|
||||||
|
|
||||||
|
session.popUndoEntry()
|
||||||
|
session.stats.editsApplied--
|
||||||
|
|
||||||
|
return {
|
||||||
|
success: true,
|
||||||
|
entry,
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
await this.sessionStorage.pushUndoEntry(session.id, entry)
|
||||||
|
|
||||||
|
const message = error instanceof Error ? error.message : "Unknown error"
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
entry,
|
||||||
|
error: `Failed to undo: ${message}`,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if undo is available.
|
||||||
|
*/
|
||||||
|
async canUndo(session: Session): Promise<boolean> {
|
||||||
|
const stack = await this.sessionStorage.getUndoStack(session.id)
|
||||||
|
return stack.length > 0
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the next undo entry without removing it.
|
||||||
|
*/
|
||||||
|
async peekUndoEntry(session: Session): Promise<UndoEntry | null> {
|
||||||
|
const stack = await this.sessionStorage.getUndoStack(session.id)
|
||||||
|
if (stack.length === 0) {
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
return stack[stack.length - 1]
|
||||||
|
}
|
||||||
|
|
||||||
|
private async readCurrentContent(filePath: string): Promise<string[]> {
|
||||||
|
try {
|
||||||
|
const content = await fs.readFile(filePath, "utf-8")
|
||||||
|
return content.split("\n")
|
||||||
|
} catch (error) {
|
||||||
|
if ((error as NodeJS.ErrnoException).code === "ENOENT") {
|
||||||
|
return []
|
||||||
|
}
|
||||||
|
throw error
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private async restoreContent(filePath: string, content: string[]): Promise<void> {
|
||||||
|
const fileContent = content.join("\n")
|
||||||
|
await fs.writeFile(filePath, fileContent, "utf-8")
|
||||||
|
|
||||||
|
const hash = md5(fileContent)
|
||||||
|
const stats = await fs.stat(filePath)
|
||||||
|
|
||||||
|
await this.storage.setFile(filePath, {
|
||||||
|
lines: content,
|
||||||
|
hash,
|
||||||
|
size: stats.size,
|
||||||
|
lastModified: stats.mtimeMs,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,4 +1,8 @@
|
|||||||
/*
|
// Application Use Cases
|
||||||
* Application Use Cases
|
|
||||||
* Will be implemented in version 0.10.0+
|
export * from "./StartSession.js"
|
||||||
*/
|
export * from "./HandleMessage.js"
|
||||||
|
export * from "./UndoChange.js"
|
||||||
|
export * from "./ContextManager.js"
|
||||||
|
export * from "./IndexProject.js"
|
||||||
|
export * from "./ExecuteTool.js"
|
||||||
|
|||||||
148
packages/ipuaro/src/cli/commands/index-cmd.ts
Normal file
148
packages/ipuaro/src/cli/commands/index-cmd.ts
Normal file
@@ -0,0 +1,148 @@
|
|||||||
|
/**
|
||||||
|
* Index command implementation.
|
||||||
|
* Indexes project without starting TUI.
|
||||||
|
*/
|
||||||
|
|
||||||
|
import * as path from "node:path"
|
||||||
|
import { RedisClient } from "../../infrastructure/storage/RedisClient.js"
|
||||||
|
import { RedisStorage } from "../../infrastructure/storage/RedisStorage.js"
|
||||||
|
import { generateProjectName } from "../../infrastructure/storage/schema.js"
|
||||||
|
import { IndexProject } from "../../application/use-cases/IndexProject.js"
|
||||||
|
import { type Config, DEFAULT_CONFIG } from "../../shared/constants/config.js"
|
||||||
|
import { checkRedis } from "./onboarding.js"
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Result of index command.
|
||||||
|
*/
|
||||||
|
export interface IndexResult {
|
||||||
|
success: boolean
|
||||||
|
filesIndexed: number
|
||||||
|
filesSkipped: number
|
||||||
|
errors: string[]
|
||||||
|
duration: number
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Progress callback for indexing.
|
||||||
|
*/
|
||||||
|
export type IndexProgressCallback = (
|
||||||
|
phase: "scanning" | "parsing" | "analyzing" | "storing",
|
||||||
|
current: number,
|
||||||
|
total: number,
|
||||||
|
currentFile?: string,
|
||||||
|
) => void
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Execute the index command.
|
||||||
|
*/
|
||||||
|
export async function executeIndex(
|
||||||
|
projectPath: string,
|
||||||
|
config: Config = DEFAULT_CONFIG,
|
||||||
|
onProgress?: IndexProgressCallback,
|
||||||
|
): Promise<IndexResult> {
|
||||||
|
const startTime = Date.now()
|
||||||
|
const resolvedPath = path.resolve(projectPath)
|
||||||
|
const projectName = generateProjectName(resolvedPath)
|
||||||
|
|
||||||
|
console.warn(`📁 Indexing project: ${resolvedPath}`)
|
||||||
|
console.warn(` Project name: ${projectName}\n`)
|
||||||
|
|
||||||
|
const redisResult = await checkRedis(config.redis)
|
||||||
|
if (!redisResult.ok) {
|
||||||
|
console.error(`❌ ${redisResult.error ?? "Redis unavailable"}`)
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
filesIndexed: 0,
|
||||||
|
filesSkipped: 0,
|
||||||
|
errors: [redisResult.error ?? "Redis unavailable"],
|
||||||
|
duration: Date.now() - startTime,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let redisClient: RedisClient | null = null
|
||||||
|
|
||||||
|
try {
|
||||||
|
redisClient = new RedisClient(config.redis)
|
||||||
|
await redisClient.connect()
|
||||||
|
|
||||||
|
const storage = new RedisStorage(redisClient, projectName)
|
||||||
|
const indexProject = new IndexProject(storage, resolvedPath)
|
||||||
|
|
||||||
|
let lastPhase: "scanning" | "parsing" | "analyzing" | "indexing" = "scanning"
|
||||||
|
let lastProgress = 0
|
||||||
|
|
||||||
|
const stats = await indexProject.execute(resolvedPath, {
|
||||||
|
onProgress: (progress) => {
|
||||||
|
if (progress.phase !== lastPhase) {
|
||||||
|
if (lastPhase === "scanning") {
|
||||||
|
console.warn(` Found ${String(progress.total)} files\n`)
|
||||||
|
} else if (lastProgress > 0) {
|
||||||
|
console.warn("")
|
||||||
|
}
|
||||||
|
|
||||||
|
const phaseLabels = {
|
||||||
|
scanning: "🔍 Scanning files...",
|
||||||
|
parsing: "📝 Parsing files...",
|
||||||
|
analyzing: "📊 Analyzing metadata...",
|
||||||
|
indexing: "🏗️ Building indexes...",
|
||||||
|
}
|
||||||
|
console.warn(phaseLabels[progress.phase])
|
||||||
|
lastPhase = progress.phase
|
||||||
|
}
|
||||||
|
|
||||||
|
if (progress.phase === "indexing") {
|
||||||
|
onProgress?.("storing", progress.current, progress.total)
|
||||||
|
} else {
|
||||||
|
onProgress?.(
|
||||||
|
progress.phase,
|
||||||
|
progress.current,
|
||||||
|
progress.total,
|
||||||
|
progress.currentFile,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (
|
||||||
|
progress.current % 50 === 0 &&
|
||||||
|
progress.phase !== "scanning" &&
|
||||||
|
progress.phase !== "indexing"
|
||||||
|
) {
|
||||||
|
process.stdout.write(
|
||||||
|
`\r ${progress.phase === "parsing" ? "Parsed" : "Analyzed"} ${String(progress.current)}/${String(progress.total)} files...`,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
lastProgress = progress.current
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
const symbolIndex = await storage.getSymbolIndex()
|
||||||
|
const durationSec = (stats.timeMs / 1000).toFixed(2)
|
||||||
|
|
||||||
|
console.warn(`\n✅ Indexing complete in ${durationSec}s`)
|
||||||
|
console.warn(` Files scanned: ${String(stats.filesScanned)}`)
|
||||||
|
console.warn(` Files parsed: ${String(stats.filesParsed)}`)
|
||||||
|
console.warn(` Parse errors: ${String(stats.parseErrors)}`)
|
||||||
|
console.warn(` Symbols: ${String(symbolIndex.size)}`)
|
||||||
|
|
||||||
|
return {
|
||||||
|
success: true,
|
||||||
|
filesIndexed: stats.filesParsed,
|
||||||
|
filesSkipped: stats.filesScanned - stats.filesParsed,
|
||||||
|
errors: [],
|
||||||
|
duration: stats.timeMs,
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
const message = error instanceof Error ? error.message : String(error)
|
||||||
|
console.error(`❌ Indexing failed: ${message}`)
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
filesIndexed: 0,
|
||||||
|
filesSkipped: 0,
|
||||||
|
errors: [message],
|
||||||
|
duration: Date.now() - startTime,
|
||||||
|
}
|
||||||
|
} finally {
|
||||||
|
if (redisClient) {
|
||||||
|
await redisClient.disconnect()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
18
packages/ipuaro/src/cli/commands/index.ts
Normal file
18
packages/ipuaro/src/cli/commands/index.ts
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
/**
|
||||||
|
* CLI commands module.
|
||||||
|
*/
|
||||||
|
|
||||||
|
export { executeStart, type StartOptions, type StartResult } from "./start.js"
|
||||||
|
export { executeInit, type InitOptions, type InitResult } from "./init.js"
|
||||||
|
export { executeIndex, type IndexResult, type IndexProgressCallback } from "./index-cmd.js"
|
||||||
|
export {
|
||||||
|
runOnboarding,
|
||||||
|
checkRedis,
|
||||||
|
checkOllama,
|
||||||
|
checkModel,
|
||||||
|
checkProjectSize,
|
||||||
|
pullModel,
|
||||||
|
type OnboardingResult,
|
||||||
|
type OnboardingOptions,
|
||||||
|
} from "./onboarding.js"
|
||||||
|
export { registerAllTools } from "./tools-setup.js"
|
||||||
114
packages/ipuaro/src/cli/commands/init.ts
Normal file
114
packages/ipuaro/src/cli/commands/init.ts
Normal file
@@ -0,0 +1,114 @@
|
|||||||
|
/**
|
||||||
|
* Init command implementation.
|
||||||
|
* Creates .ipuaro.json configuration file.
|
||||||
|
*/
|
||||||
|
|
||||||
|
import * as fs from "node:fs/promises"
|
||||||
|
import * as path from "node:path"
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Default configuration template for .ipuaro.json
|
||||||
|
*/
|
||||||
|
const CONFIG_TEMPLATE = {
|
||||||
|
$schema: "https://raw.githubusercontent.com/samiyev/puaros/main/packages/ipuaro/schema.json",
|
||||||
|
redis: {
|
||||||
|
host: "localhost",
|
||||||
|
port: 6379,
|
||||||
|
db: 0,
|
||||||
|
},
|
||||||
|
llm: {
|
||||||
|
model: "qwen2.5-coder:7b-instruct",
|
||||||
|
temperature: 0.1,
|
||||||
|
host: "http://localhost:11434",
|
||||||
|
},
|
||||||
|
project: {
|
||||||
|
ignorePatterns: [],
|
||||||
|
},
|
||||||
|
edit: {
|
||||||
|
autoApply: false,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Options for init command.
|
||||||
|
*/
|
||||||
|
export interface InitOptions {
|
||||||
|
force?: boolean
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Result of init command.
|
||||||
|
*/
|
||||||
|
export interface InitResult {
|
||||||
|
success: boolean
|
||||||
|
filePath?: string
|
||||||
|
error?: string
|
||||||
|
skipped?: boolean
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Execute the init command.
|
||||||
|
* Creates a .ipuaro.json file in the specified directory.
|
||||||
|
*/
|
||||||
|
export async function executeInit(
|
||||||
|
projectPath = ".",
|
||||||
|
options: InitOptions = {},
|
||||||
|
): Promise<InitResult> {
|
||||||
|
const resolvedPath = path.resolve(projectPath)
|
||||||
|
const configPath = path.join(resolvedPath, ".ipuaro.json")
|
||||||
|
|
||||||
|
try {
|
||||||
|
const exists = await fileExists(configPath)
|
||||||
|
|
||||||
|
if (exists && !options.force) {
|
||||||
|
console.warn(`⚠️ Configuration file already exists: ${configPath}`)
|
||||||
|
console.warn(" Use --force to overwrite.")
|
||||||
|
return {
|
||||||
|
success: true,
|
||||||
|
skipped: true,
|
||||||
|
filePath: configPath,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const dirExists = await fileExists(resolvedPath)
|
||||||
|
if (!dirExists) {
|
||||||
|
await fs.mkdir(resolvedPath, { recursive: true })
|
||||||
|
}
|
||||||
|
|
||||||
|
const content = JSON.stringify(CONFIG_TEMPLATE, null, 4)
|
||||||
|
await fs.writeFile(configPath, content, "utf-8")
|
||||||
|
|
||||||
|
console.warn(`✅ Created ${configPath}`)
|
||||||
|
console.warn("\nConfiguration options:")
|
||||||
|
console.warn(" redis.host - Redis server host (default: localhost)")
|
||||||
|
console.warn(" redis.port - Redis server port (default: 6379)")
|
||||||
|
console.warn(" llm.model - Ollama model name (default: qwen2.5-coder:7b-instruct)")
|
||||||
|
console.warn(" llm.temperature - LLM temperature (default: 0.1)")
|
||||||
|
console.warn(" edit.autoApply - Auto-apply edits without confirmation (default: false)")
|
||||||
|
console.warn("\nRun `ipuaro` to start the AI agent.")
|
||||||
|
|
||||||
|
return {
|
||||||
|
success: true,
|
||||||
|
filePath: configPath,
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
const message = error instanceof Error ? error.message : String(error)
|
||||||
|
console.error(`❌ Failed to create configuration: ${message}`)
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
error: message,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if a file or directory exists.
|
||||||
|
*/
|
||||||
|
async function fileExists(filePath: string): Promise<boolean> {
|
||||||
|
try {
|
||||||
|
await fs.access(filePath)
|
||||||
|
return true
|
||||||
|
} catch {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
290
packages/ipuaro/src/cli/commands/onboarding.ts
Normal file
290
packages/ipuaro/src/cli/commands/onboarding.ts
Normal file
@@ -0,0 +1,290 @@
|
|||||||
|
/**
|
||||||
|
* Onboarding checks for CLI.
|
||||||
|
* Validates environment before starting ipuaro.
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { RedisClient } from "../../infrastructure/storage/RedisClient.js"
|
||||||
|
import { OllamaClient } from "../../infrastructure/llm/OllamaClient.js"
|
||||||
|
import { FileScanner } from "../../infrastructure/indexer/FileScanner.js"
|
||||||
|
import type { LLMConfig, RedisConfig } from "../../shared/constants/config.js"
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Result of onboarding checks.
|
||||||
|
*/
|
||||||
|
export interface OnboardingResult {
|
||||||
|
success: boolean
|
||||||
|
redisOk: boolean
|
||||||
|
ollamaOk: boolean
|
||||||
|
modelOk: boolean
|
||||||
|
projectOk: boolean
|
||||||
|
fileCount: number
|
||||||
|
errors: string[]
|
||||||
|
warnings: string[]
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Options for onboarding checks.
|
||||||
|
*/
|
||||||
|
export interface OnboardingOptions {
|
||||||
|
redisConfig: RedisConfig
|
||||||
|
llmConfig: LLMConfig
|
||||||
|
projectPath: string
|
||||||
|
maxFiles?: number
|
||||||
|
skipRedis?: boolean
|
||||||
|
skipOllama?: boolean
|
||||||
|
skipModel?: boolean
|
||||||
|
skipProject?: boolean
|
||||||
|
}
|
||||||
|
|
||||||
|
const DEFAULT_MAX_FILES = 10_000
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check Redis availability.
|
||||||
|
*/
|
||||||
|
export async function checkRedis(config: RedisConfig): Promise<{
|
||||||
|
ok: boolean
|
||||||
|
error?: string
|
||||||
|
}> {
|
||||||
|
const client = new RedisClient(config)
|
||||||
|
|
||||||
|
try {
|
||||||
|
await client.connect()
|
||||||
|
const pingOk = await client.ping()
|
||||||
|
await client.disconnect()
|
||||||
|
|
||||||
|
if (!pingOk) {
|
||||||
|
return {
|
||||||
|
ok: false,
|
||||||
|
error: "Redis ping failed. Server may be overloaded.",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return { ok: true }
|
||||||
|
} catch (error) {
|
||||||
|
const message = error instanceof Error ? error.message : String(error)
|
||||||
|
return {
|
||||||
|
ok: false,
|
||||||
|
error: `Cannot connect to Redis: ${message}
|
||||||
|
|
||||||
|
Redis is required for ipuaro to store project indexes and session data.
|
||||||
|
|
||||||
|
Install Redis:
|
||||||
|
macOS: brew install redis && brew services start redis
|
||||||
|
Ubuntu: sudo apt install redis-server && sudo systemctl start redis
|
||||||
|
Docker: docker run -d -p 6379:6379 redis`,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check Ollama availability.
|
||||||
|
*/
|
||||||
|
export async function checkOllama(config: LLMConfig): Promise<{
|
||||||
|
ok: boolean
|
||||||
|
error?: string
|
||||||
|
}> {
|
||||||
|
const client = new OllamaClient(config)
|
||||||
|
|
||||||
|
try {
|
||||||
|
const available = await client.isAvailable()
|
||||||
|
|
||||||
|
if (!available) {
|
||||||
|
return {
|
||||||
|
ok: false,
|
||||||
|
error: `Cannot connect to Ollama at ${config.host}
|
||||||
|
|
||||||
|
Ollama is required for ipuaro to process your requests using local LLMs.
|
||||||
|
|
||||||
|
Install Ollama:
|
||||||
|
macOS: brew install ollama && ollama serve
|
||||||
|
Linux: curl -fsSL https://ollama.com/install.sh | sh && ollama serve
|
||||||
|
Manual: https://ollama.com/download
|
||||||
|
|
||||||
|
After installing, ensure Ollama is running with: ollama serve`,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return { ok: true }
|
||||||
|
} catch (error) {
|
||||||
|
const message = error instanceof Error ? error.message : String(error)
|
||||||
|
return {
|
||||||
|
ok: false,
|
||||||
|
error: `Ollama check failed: ${message}`,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check model availability.
|
||||||
|
*/
|
||||||
|
export async function checkModel(config: LLMConfig): Promise<{
|
||||||
|
ok: boolean
|
||||||
|
needsPull: boolean
|
||||||
|
error?: string
|
||||||
|
}> {
|
||||||
|
const client = new OllamaClient(config)
|
||||||
|
|
||||||
|
try {
|
||||||
|
const hasModel = await client.hasModel(config.model)
|
||||||
|
|
||||||
|
if (!hasModel) {
|
||||||
|
return {
|
||||||
|
ok: false,
|
||||||
|
needsPull: true,
|
||||||
|
error: `Model "${config.model}" is not installed.
|
||||||
|
|
||||||
|
Would you like to pull it? This may take a few minutes.
|
||||||
|
Run: ollama pull ${config.model}`,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return { ok: true, needsPull: false }
|
||||||
|
} catch (error) {
|
||||||
|
const message = error instanceof Error ? error.message : String(error)
|
||||||
|
return {
|
||||||
|
ok: false,
|
||||||
|
needsPull: false,
|
||||||
|
error: `Model check failed: ${message}`,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Pull model from Ollama.
|
||||||
|
*/
|
||||||
|
export async function pullModel(
|
||||||
|
config: LLMConfig,
|
||||||
|
onProgress?: (status: string) => void,
|
||||||
|
): Promise<{ ok: boolean; error?: string }> {
|
||||||
|
const client = new OllamaClient(config)
|
||||||
|
|
||||||
|
try {
|
||||||
|
onProgress?.(`Pulling model "${config.model}"...`)
|
||||||
|
await client.pullModel(config.model)
|
||||||
|
onProgress?.(`Model "${config.model}" pulled successfully.`)
|
||||||
|
return { ok: true }
|
||||||
|
} catch (error) {
|
||||||
|
const message = error instanceof Error ? error.message : String(error)
|
||||||
|
return {
|
||||||
|
ok: false,
|
||||||
|
error: `Failed to pull model: ${message}`,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check project size.
|
||||||
|
*/
|
||||||
|
export async function checkProjectSize(
|
||||||
|
projectPath: string,
|
||||||
|
maxFiles: number = DEFAULT_MAX_FILES,
|
||||||
|
): Promise<{
|
||||||
|
ok: boolean
|
||||||
|
fileCount: number
|
||||||
|
warning?: string
|
||||||
|
}> {
|
||||||
|
const scanner = new FileScanner()
|
||||||
|
|
||||||
|
try {
|
||||||
|
const files = await scanner.scanAll(projectPath)
|
||||||
|
const fileCount = files.length
|
||||||
|
|
||||||
|
if (fileCount > maxFiles) {
|
||||||
|
return {
|
||||||
|
ok: true,
|
||||||
|
fileCount,
|
||||||
|
warning: `Project has ${fileCount.toLocaleString()} files (>${maxFiles.toLocaleString()}).
|
||||||
|
This may take a while to index and use more memory.
|
||||||
|
|
||||||
|
Consider:
|
||||||
|
1. Running ipuaro in a subdirectory: ipuaro ./src
|
||||||
|
2. Adding patterns to .gitignore to exclude unnecessary files
|
||||||
|
3. Using a smaller project for better performance`,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (fileCount === 0) {
|
||||||
|
return {
|
||||||
|
ok: false,
|
||||||
|
fileCount: 0,
|
||||||
|
warning: `No supported files found in "${projectPath}".
|
||||||
|
|
||||||
|
ipuaro supports: .ts, .tsx, .js, .jsx, .json, .yaml, .yml
|
||||||
|
|
||||||
|
Ensure you're running ipuaro in a project directory with source files.`,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return { ok: true, fileCount }
|
||||||
|
} catch (error) {
|
||||||
|
const message = error instanceof Error ? error.message : String(error)
|
||||||
|
return {
|
||||||
|
ok: false,
|
||||||
|
fileCount: 0,
|
||||||
|
warning: `Failed to scan project: ${message}`,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Run all onboarding checks.
|
||||||
|
*/
|
||||||
|
export async function runOnboarding(options: OnboardingOptions): Promise<OnboardingResult> {
|
||||||
|
const errors: string[] = []
|
||||||
|
const warnings: string[] = []
|
||||||
|
const maxFiles = options.maxFiles ?? DEFAULT_MAX_FILES
|
||||||
|
|
||||||
|
let redisOk = true
|
||||||
|
let ollamaOk = true
|
||||||
|
let modelOk = true
|
||||||
|
let projectOk = true
|
||||||
|
let fileCount = 0
|
||||||
|
|
||||||
|
if (!options.skipRedis) {
|
||||||
|
const redisResult = await checkRedis(options.redisConfig)
|
||||||
|
redisOk = redisResult.ok
|
||||||
|
if (!redisOk && redisResult.error) {
|
||||||
|
errors.push(redisResult.error)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!options.skipOllama) {
|
||||||
|
const ollamaResult = await checkOllama(options.llmConfig)
|
||||||
|
ollamaOk = ollamaResult.ok
|
||||||
|
if (!ollamaOk && ollamaResult.error) {
|
||||||
|
errors.push(ollamaResult.error)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!options.skipModel && ollamaOk) {
|
||||||
|
const modelResult = await checkModel(options.llmConfig)
|
||||||
|
modelOk = modelResult.ok
|
||||||
|
if (!modelOk && modelResult.error) {
|
||||||
|
errors.push(modelResult.error)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!options.skipProject) {
|
||||||
|
const projectResult = await checkProjectSize(options.projectPath, maxFiles)
|
||||||
|
projectOk = projectResult.ok
|
||||||
|
fileCount = projectResult.fileCount
|
||||||
|
if (projectResult.warning) {
|
||||||
|
if (projectResult.ok) {
|
||||||
|
warnings.push(projectResult.warning)
|
||||||
|
} else {
|
||||||
|
errors.push(projectResult.warning)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
success: redisOk && ollamaOk && modelOk && projectOk && errors.length === 0,
|
||||||
|
redisOk,
|
||||||
|
ollamaOk,
|
||||||
|
modelOk,
|
||||||
|
projectOk,
|
||||||
|
fileCount,
|
||||||
|
errors,
|
||||||
|
warnings,
|
||||||
|
}
|
||||||
|
}
|
||||||
162
packages/ipuaro/src/cli/commands/start.ts
Normal file
162
packages/ipuaro/src/cli/commands/start.ts
Normal file
@@ -0,0 +1,162 @@
|
|||||||
|
/**
|
||||||
|
* Start command implementation.
|
||||||
|
* Launches the ipuaro TUI.
|
||||||
|
*/
|
||||||
|
|
||||||
|
import * as path from "node:path"
|
||||||
|
import * as readline from "node:readline"
|
||||||
|
import { render } from "ink"
|
||||||
|
import React from "react"
|
||||||
|
import { App, type AppDependencies } from "../../tui/App.js"
|
||||||
|
import { RedisClient } from "../../infrastructure/storage/RedisClient.js"
|
||||||
|
import { RedisStorage } from "../../infrastructure/storage/RedisStorage.js"
|
||||||
|
import { RedisSessionStorage } from "../../infrastructure/storage/RedisSessionStorage.js"
|
||||||
|
import { OllamaClient } from "../../infrastructure/llm/OllamaClient.js"
|
||||||
|
import { ToolRegistry } from "../../infrastructure/tools/registry.js"
|
||||||
|
import { generateProjectName } from "../../infrastructure/storage/schema.js"
|
||||||
|
import { type Config, DEFAULT_CONFIG } from "../../shared/constants/config.js"
|
||||||
|
import { checkModel, pullModel, runOnboarding } from "./onboarding.js"
|
||||||
|
import { registerAllTools } from "./tools-setup.js"
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Options for start command.
|
||||||
|
*/
|
||||||
|
export interface StartOptions {
|
||||||
|
autoApply?: boolean
|
||||||
|
model?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Result of start command.
|
||||||
|
*/
|
||||||
|
export interface StartResult {
|
||||||
|
success: boolean
|
||||||
|
error?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Execute the start command.
|
||||||
|
*/
|
||||||
|
export async function executeStart(
|
||||||
|
projectPath: string,
|
||||||
|
options: StartOptions,
|
||||||
|
config: Config = DEFAULT_CONFIG,
|
||||||
|
): Promise<StartResult> {
|
||||||
|
const resolvedPath = path.resolve(projectPath)
|
||||||
|
const projectName = generateProjectName(resolvedPath)
|
||||||
|
|
||||||
|
const llmConfig = {
|
||||||
|
...config.llm,
|
||||||
|
model: options.model ?? config.llm.model,
|
||||||
|
}
|
||||||
|
|
||||||
|
console.warn("🔍 Running pre-flight checks...\n")
|
||||||
|
|
||||||
|
const onboardingResult = await runOnboarding({
|
||||||
|
redisConfig: config.redis,
|
||||||
|
llmConfig,
|
||||||
|
projectPath: resolvedPath,
|
||||||
|
})
|
||||||
|
|
||||||
|
for (const warning of onboardingResult.warnings) {
|
||||||
|
console.warn(`⚠️ ${warning}\n`)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!onboardingResult.success) {
|
||||||
|
for (const error of onboardingResult.errors) {
|
||||||
|
console.error(`❌ ${error}\n`)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!onboardingResult.modelOk && onboardingResult.ollamaOk) {
|
||||||
|
const shouldPull = await promptYesNo(
|
||||||
|
`Would you like to pull "${llmConfig.model}"? (y/n): `,
|
||||||
|
)
|
||||||
|
|
||||||
|
if (shouldPull) {
|
||||||
|
const pullResult = await pullModel(llmConfig, console.warn)
|
||||||
|
if (!pullResult.ok) {
|
||||||
|
console.error(`❌ ${pullResult.error ?? "Unknown error"}`)
|
||||||
|
return { success: false, error: pullResult.error }
|
||||||
|
}
|
||||||
|
|
||||||
|
const recheckModel = await checkModel(llmConfig)
|
||||||
|
if (!recheckModel.ok) {
|
||||||
|
console.error("❌ Model still not available after pull.")
|
||||||
|
return { success: false, error: "Model pull failed" }
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
return { success: false, error: "Model not available" }
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
error: onboardingResult.errors.join("\n"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
console.warn(`✅ All checks passed. Found ${String(onboardingResult.fileCount)} files.\n`)
|
||||||
|
console.warn("🚀 Starting ipuaro...\n")
|
||||||
|
|
||||||
|
const redisClient = new RedisClient(config.redis)
|
||||||
|
|
||||||
|
try {
|
||||||
|
await redisClient.connect()
|
||||||
|
|
||||||
|
const storage = new RedisStorage(redisClient, projectName)
|
||||||
|
const sessionStorage = new RedisSessionStorage(redisClient)
|
||||||
|
const llm = new OllamaClient(llmConfig)
|
||||||
|
const tools = new ToolRegistry()
|
||||||
|
|
||||||
|
registerAllTools(tools)
|
||||||
|
|
||||||
|
const deps: AppDependencies = {
|
||||||
|
storage,
|
||||||
|
sessionStorage,
|
||||||
|
llm,
|
||||||
|
tools,
|
||||||
|
}
|
||||||
|
|
||||||
|
const handleExit = (): void => {
|
||||||
|
void redisClient.disconnect()
|
||||||
|
}
|
||||||
|
|
||||||
|
const { waitUntilExit } = render(
|
||||||
|
React.createElement(App, {
|
||||||
|
projectPath: resolvedPath,
|
||||||
|
autoApply: options.autoApply ?? config.edit.autoApply,
|
||||||
|
deps,
|
||||||
|
onExit: handleExit,
|
||||||
|
}),
|
||||||
|
)
|
||||||
|
|
||||||
|
await waitUntilExit()
|
||||||
|
await redisClient.disconnect()
|
||||||
|
|
||||||
|
return { success: true }
|
||||||
|
} catch (error) {
|
||||||
|
const message = error instanceof Error ? error.message : String(error)
|
||||||
|
console.error(`❌ Failed to start ipuaro: ${message}`)
|
||||||
|
await redisClient.disconnect()
|
||||||
|
return { success: false, error: message }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Simple yes/no prompt for CLI.
|
||||||
|
*/
|
||||||
|
async function promptYesNo(question: string): Promise<boolean> {
|
||||||
|
return new Promise((resolve) => {
|
||||||
|
process.stdout.write(question)
|
||||||
|
|
||||||
|
const rl = readline.createInterface({
|
||||||
|
input: process.stdin,
|
||||||
|
output: process.stdout,
|
||||||
|
})
|
||||||
|
|
||||||
|
rl.once("line", (answer: string) => {
|
||||||
|
rl.close()
|
||||||
|
resolve(answer.toLowerCase() === "y" || answer.toLowerCase() === "yes")
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
59
packages/ipuaro/src/cli/commands/tools-setup.ts
Normal file
59
packages/ipuaro/src/cli/commands/tools-setup.ts
Normal file
@@ -0,0 +1,59 @@
|
|||||||
|
/**
|
||||||
|
* Tool registration helper for CLI.
|
||||||
|
* Registers all 18 tools with the tool registry.
|
||||||
|
*/
|
||||||
|
|
||||||
|
import type { IToolRegistry } from "../../application/interfaces/IToolRegistry.js"
|
||||||
|
|
||||||
|
import { GetLinesTool } from "../../infrastructure/tools/read/GetLinesTool.js"
|
||||||
|
import { GetFunctionTool } from "../../infrastructure/tools/read/GetFunctionTool.js"
|
||||||
|
import { GetClassTool } from "../../infrastructure/tools/read/GetClassTool.js"
|
||||||
|
import { GetStructureTool } from "../../infrastructure/tools/read/GetStructureTool.js"
|
||||||
|
|
||||||
|
import { EditLinesTool } from "../../infrastructure/tools/edit/EditLinesTool.js"
|
||||||
|
import { CreateFileTool } from "../../infrastructure/tools/edit/CreateFileTool.js"
|
||||||
|
import { DeleteFileTool } from "../../infrastructure/tools/edit/DeleteFileTool.js"
|
||||||
|
|
||||||
|
import { FindReferencesTool } from "../../infrastructure/tools/search/FindReferencesTool.js"
|
||||||
|
import { FindDefinitionTool } from "../../infrastructure/tools/search/FindDefinitionTool.js"
|
||||||
|
|
||||||
|
import { GetDependenciesTool } from "../../infrastructure/tools/analysis/GetDependenciesTool.js"
|
||||||
|
import { GetDependentsTool } from "../../infrastructure/tools/analysis/GetDependentsTool.js"
|
||||||
|
import { GetComplexityTool } from "../../infrastructure/tools/analysis/GetComplexityTool.js"
|
||||||
|
import { GetTodosTool } from "../../infrastructure/tools/analysis/GetTodosTool.js"
|
||||||
|
|
||||||
|
import { GitStatusTool } from "../../infrastructure/tools/git/GitStatusTool.js"
|
||||||
|
import { GitDiffTool } from "../../infrastructure/tools/git/GitDiffTool.js"
|
||||||
|
import { GitCommitTool } from "../../infrastructure/tools/git/GitCommitTool.js"
|
||||||
|
|
||||||
|
import { RunCommandTool } from "../../infrastructure/tools/run/RunCommandTool.js"
|
||||||
|
import { RunTestsTool } from "../../infrastructure/tools/run/RunTestsTool.js"
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Register all 18 tools with the tool registry.
|
||||||
|
*/
|
||||||
|
export function registerAllTools(registry: IToolRegistry): void {
|
||||||
|
registry.register(new GetLinesTool())
|
||||||
|
registry.register(new GetFunctionTool())
|
||||||
|
registry.register(new GetClassTool())
|
||||||
|
registry.register(new GetStructureTool())
|
||||||
|
|
||||||
|
registry.register(new EditLinesTool())
|
||||||
|
registry.register(new CreateFileTool())
|
||||||
|
registry.register(new DeleteFileTool())
|
||||||
|
|
||||||
|
registry.register(new FindReferencesTool())
|
||||||
|
registry.register(new FindDefinitionTool())
|
||||||
|
|
||||||
|
registry.register(new GetDependenciesTool())
|
||||||
|
registry.register(new GetDependentsTool())
|
||||||
|
registry.register(new GetComplexityTool())
|
||||||
|
registry.register(new GetTodosTool())
|
||||||
|
|
||||||
|
registry.register(new GitStatusTool())
|
||||||
|
registry.register(new GitDiffTool())
|
||||||
|
registry.register(new GitCommitTool())
|
||||||
|
|
||||||
|
registry.register(new RunCommandTool())
|
||||||
|
registry.register(new RunTestsTool())
|
||||||
|
}
|
||||||
@@ -1,44 +1,63 @@
|
|||||||
#!/usr/bin/env node
|
#!/usr/bin/env node
|
||||||
|
|
||||||
|
/**
|
||||||
|
* ipuaro CLI entry point.
|
||||||
|
* Local AI agent for codebase operations with infinite context feeling.
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { createRequire } from "node:module"
|
||||||
import { Command } from "commander"
|
import { Command } from "commander"
|
||||||
|
import { executeStart } from "./commands/start.js"
|
||||||
|
import { executeInit } from "./commands/init.js"
|
||||||
|
import { executeIndex } from "./commands/index-cmd.js"
|
||||||
|
import { loadConfig } from "../shared/config/loader.js"
|
||||||
|
|
||||||
|
const require = createRequire(import.meta.url)
|
||||||
|
const pkg = require("../../package.json") as { version: string }
|
||||||
|
|
||||||
const program = new Command()
|
const program = new Command()
|
||||||
|
|
||||||
program
|
program
|
||||||
.name("ipuaro")
|
.name("ipuaro")
|
||||||
.description("Local AI agent for codebase operations with infinite context feeling")
|
.description("Local AI agent for codebase operations with infinite context feeling")
|
||||||
.version("0.1.0")
|
.version(pkg.version)
|
||||||
|
|
||||||
program
|
program
|
||||||
.command("start")
|
.command("start", { isDefault: true })
|
||||||
.description("Start ipuaro TUI in the current directory")
|
.description("Start ipuaro TUI in the current directory")
|
||||||
.argument("[path]", "Project path", ".")
|
.argument("[path]", "Project path", ".")
|
||||||
.option("--auto-apply", "Enable auto-apply mode for edits")
|
.option("--auto-apply", "Enable auto-apply mode for edits")
|
||||||
.option("--model <name>", "Override LLM model", "qwen2.5-coder:7b-instruct")
|
.option("--model <name>", "Override LLM model")
|
||||||
.action((path: string, options: { autoApply?: boolean; model?: string }) => {
|
.action(async (projectPath: string, options: { autoApply?: boolean; model?: string }) => {
|
||||||
const model = options.model ?? "default"
|
const config = loadConfig(projectPath)
|
||||||
const autoApply = options.autoApply ?? false
|
const result = await executeStart(projectPath, options, config)
|
||||||
console.warn(`Starting ipuaro in ${path}...`)
|
if (!result.success) {
|
||||||
console.warn(`Model: ${model}`)
|
process.exit(1)
|
||||||
console.warn(`Auto-apply: ${autoApply ? "enabled" : "disabled"}`)
|
}
|
||||||
console.warn("\nNot implemented yet. Coming in version 0.11.0!")
|
|
||||||
})
|
})
|
||||||
|
|
||||||
program
|
program
|
||||||
.command("init")
|
.command("init")
|
||||||
.description("Create .ipuaro.json config file")
|
.description("Create .ipuaro.json config file")
|
||||||
.action(() => {
|
.argument("[path]", "Project path", ".")
|
||||||
console.warn("Creating .ipuaro.json...")
|
.option("--force", "Overwrite existing config file")
|
||||||
console.warn("\nNot implemented yet. Coming in version 0.17.0!")
|
.action(async (projectPath: string, options: { force?: boolean }) => {
|
||||||
|
const result = await executeInit(projectPath, options)
|
||||||
|
if (!result.success) {
|
||||||
|
process.exit(1)
|
||||||
|
}
|
||||||
})
|
})
|
||||||
|
|
||||||
program
|
program
|
||||||
.command("index")
|
.command("index")
|
||||||
.description("Index project without starting TUI")
|
.description("Index project without starting TUI")
|
||||||
.argument("[path]", "Project path", ".")
|
.argument("[path]", "Project path", ".")
|
||||||
.action((path: string) => {
|
.action(async (projectPath: string) => {
|
||||||
console.warn(`Indexing ${path}...`)
|
const config = loadConfig(projectPath)
|
||||||
console.warn("\nNot implemented yet. Coming in version 0.3.0!")
|
const result = await executeIndex(projectPath, config)
|
||||||
|
if (!result.success) {
|
||||||
|
process.exit(1)
|
||||||
|
}
|
||||||
})
|
})
|
||||||
|
|
||||||
program.parse()
|
program.parse()
|
||||||
|
|||||||
@@ -94,6 +94,12 @@ export class Session {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
truncateHistory(maxMessages: number): void {
|
||||||
|
if (this.history.length > maxMessages) {
|
||||||
|
this.history = this.history.slice(-maxMessages)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
clearHistory(): void {
|
clearHistory(): void {
|
||||||
this.history = []
|
this.history = []
|
||||||
this.context = {
|
this.context = {
|
||||||
|
|||||||
@@ -21,6 +21,7 @@ export interface ScanResult {
|
|||||||
type: "file" | "directory" | "symlink"
|
type: "file" | "directory" | "symlink"
|
||||||
size: number
|
size: number
|
||||||
lastModified: number
|
lastModified: number
|
||||||
|
symlinkTarget?: string
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -46,7 +47,7 @@ export interface IIndexer {
|
|||||||
/**
|
/**
|
||||||
* Parse file content into AST.
|
* Parse file content into AST.
|
||||||
*/
|
*/
|
||||||
parseFile(content: string, language: "ts" | "tsx" | "js" | "jsx"): FileAST
|
parseFile(content: string, language: "ts" | "tsx" | "js" | "jsx" | "json" | "yaml"): FileAST
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Analyze file and compute metadata.
|
* Analyze file and compute metadata.
|
||||||
|
|||||||
@@ -1,26 +1,6 @@
|
|||||||
import type { ChatMessage } from "../value-objects/ChatMessage.js"
|
import type { ChatMessage } from "../value-objects/ChatMessage.js"
|
||||||
import type { ToolCall } from "../value-objects/ToolCall.js"
|
import type { ToolCall } from "../value-objects/ToolCall.js"
|
||||||
|
|
||||||
/**
|
|
||||||
* Tool parameter definition for LLM.
|
|
||||||
*/
|
|
||||||
export interface ToolParameter {
|
|
||||||
name: string
|
|
||||||
type: "string" | "number" | "boolean" | "array" | "object"
|
|
||||||
description: string
|
|
||||||
required: boolean
|
|
||||||
enum?: string[]
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Tool definition for LLM function calling.
|
|
||||||
*/
|
|
||||||
export interface ToolDef {
|
|
||||||
name: string
|
|
||||||
description: string
|
|
||||||
parameters: ToolParameter[]
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Response from LLM.
|
* Response from LLM.
|
||||||
*/
|
*/
|
||||||
@@ -42,12 +22,16 @@ export interface LLMResponse {
|
|||||||
/**
|
/**
|
||||||
* LLM client service interface (port).
|
* LLM client service interface (port).
|
||||||
* Abstracts the LLM provider.
|
* Abstracts the LLM provider.
|
||||||
|
*
|
||||||
|
* Tool definitions should be included in the system prompt as XML format,
|
||||||
|
* not passed as a separate parameter.
|
||||||
*/
|
*/
|
||||||
export interface ILLMClient {
|
export interface ILLMClient {
|
||||||
/**
|
/**
|
||||||
* Send messages to LLM and get response.
|
* Send messages to LLM and get response.
|
||||||
|
* Tool calls are extracted from the response content using XML parsing.
|
||||||
*/
|
*/
|
||||||
chat(messages: ChatMessage[], tools?: ToolDef[]): Promise<LLMResponse>
|
chat(messages: ChatMessage[]): Promise<LLMResponse>
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Count tokens in text.
|
* Count tokens in text.
|
||||||
|
|||||||
88
packages/ipuaro/src/domain/services/ISessionStorage.ts
Normal file
88
packages/ipuaro/src/domain/services/ISessionStorage.ts
Normal file
@@ -0,0 +1,88 @@
|
|||||||
|
import type { ContextState, Session, SessionStats } from "../entities/Session.js"
|
||||||
|
import type { ChatMessage } from "../value-objects/ChatMessage.js"
|
||||||
|
import type { UndoEntry } from "../value-objects/UndoEntry.js"
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Session data stored in persistence layer.
|
||||||
|
*/
|
||||||
|
export interface SessionData {
|
||||||
|
id: string
|
||||||
|
projectName: string
|
||||||
|
createdAt: number
|
||||||
|
lastActivityAt: number
|
||||||
|
history: ChatMessage[]
|
||||||
|
context: ContextState
|
||||||
|
stats: SessionStats
|
||||||
|
inputHistory: string[]
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Session list item (minimal info for listing).
|
||||||
|
*/
|
||||||
|
export interface SessionListItem {
|
||||||
|
id: string
|
||||||
|
projectName: string
|
||||||
|
createdAt: number
|
||||||
|
lastActivityAt: number
|
||||||
|
messageCount: number
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Storage service interface for session persistence.
|
||||||
|
*/
|
||||||
|
export interface ISessionStorage {
|
||||||
|
/**
|
||||||
|
* Save a session to storage.
|
||||||
|
*/
|
||||||
|
saveSession(session: Session): Promise<void>
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Load a session by ID.
|
||||||
|
*/
|
||||||
|
loadSession(sessionId: string): Promise<Session | null>
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Delete a session.
|
||||||
|
*/
|
||||||
|
deleteSession(sessionId: string): Promise<void>
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get list of all sessions for a project.
|
||||||
|
*/
|
||||||
|
listSessions(projectName?: string): Promise<SessionListItem[]>
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the latest session for a project.
|
||||||
|
*/
|
||||||
|
getLatestSession(projectName: string): Promise<Session | null>
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if a session exists.
|
||||||
|
*/
|
||||||
|
sessionExists(sessionId: string): Promise<boolean>
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Add undo entry to session's undo stack.
|
||||||
|
*/
|
||||||
|
pushUndoEntry(sessionId: string, entry: UndoEntry): Promise<void>
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Pop undo entry from session's undo stack.
|
||||||
|
*/
|
||||||
|
popUndoEntry(sessionId: string): Promise<UndoEntry | null>
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get undo stack for a session.
|
||||||
|
*/
|
||||||
|
getUndoStack(sessionId: string): Promise<UndoEntry[]>
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Update session's last activity timestamp.
|
||||||
|
*/
|
||||||
|
touchSession(sessionId: string): Promise<void>
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Clear all sessions.
|
||||||
|
*/
|
||||||
|
clearAllSessions(): Promise<void>
|
||||||
|
}
|
||||||
@@ -1,5 +1,6 @@
|
|||||||
// Domain Service Interfaces (Ports)
|
// Domain Service Interfaces (Ports)
|
||||||
export * from "./IStorage.js"
|
export * from "./IStorage.js"
|
||||||
|
export * from "./ISessionStorage.js"
|
||||||
export * from "./ILLMClient.js"
|
export * from "./ILLMClient.js"
|
||||||
export * from "./ITool.js"
|
export * from "./ITool.js"
|
||||||
export * from "./IIndexer.js"
|
export * from "./IIndexer.js"
|
||||||
|
|||||||
@@ -52,6 +52,8 @@ export interface FunctionInfo {
|
|||||||
isExported: boolean
|
isExported: boolean
|
||||||
/** Return type (if available) */
|
/** Return type (if available) */
|
||||||
returnType?: string
|
returnType?: string
|
||||||
|
/** Decorators applied to the function (e.g., ["@Get(':id')", "@Auth()"]) */
|
||||||
|
decorators?: string[]
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface MethodInfo {
|
export interface MethodInfo {
|
||||||
@@ -69,6 +71,8 @@ export interface MethodInfo {
|
|||||||
visibility: "public" | "private" | "protected"
|
visibility: "public" | "private" | "protected"
|
||||||
/** Whether it's static */
|
/** Whether it's static */
|
||||||
isStatic: boolean
|
isStatic: boolean
|
||||||
|
/** Decorators applied to the method (e.g., ["@Get(':id')", "@UseGuards(AuthGuard)"]) */
|
||||||
|
decorators?: string[]
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface PropertyInfo {
|
export interface PropertyInfo {
|
||||||
@@ -105,6 +109,8 @@ export interface ClassInfo {
|
|||||||
isExported: boolean
|
isExported: boolean
|
||||||
/** Whether class is abstract */
|
/** Whether class is abstract */
|
||||||
isAbstract: boolean
|
isAbstract: boolean
|
||||||
|
/** Decorators applied to the class (e.g., ["@Controller('users')", "@Injectable()"]) */
|
||||||
|
decorators?: string[]
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface InterfaceInfo {
|
export interface InterfaceInfo {
|
||||||
@@ -129,6 +135,30 @@ export interface TypeAliasInfo {
|
|||||||
line: number
|
line: number
|
||||||
/** Whether it's exported */
|
/** Whether it's exported */
|
||||||
isExported: boolean
|
isExported: boolean
|
||||||
|
/** Type definition (e.g., "string", "User & Admin", "{ id: string }") */
|
||||||
|
definition?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface EnumMemberInfo {
|
||||||
|
/** Member name */
|
||||||
|
name: string
|
||||||
|
/** Member value (string or number, if specified) */
|
||||||
|
value?: string | number
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface EnumInfo {
|
||||||
|
/** Enum name */
|
||||||
|
name: string
|
||||||
|
/** Start line number */
|
||||||
|
lineStart: number
|
||||||
|
/** End line number */
|
||||||
|
lineEnd: number
|
||||||
|
/** Enum members with values */
|
||||||
|
members: EnumMemberInfo[]
|
||||||
|
/** Whether it's exported */
|
||||||
|
isExported: boolean
|
||||||
|
/** Whether it's a const enum */
|
||||||
|
isConst: boolean
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface FileAST {
|
export interface FileAST {
|
||||||
@@ -144,6 +174,8 @@ export interface FileAST {
|
|||||||
interfaces: InterfaceInfo[]
|
interfaces: InterfaceInfo[]
|
||||||
/** Type alias declarations */
|
/** Type alias declarations */
|
||||||
typeAliases: TypeAliasInfo[]
|
typeAliases: TypeAliasInfo[]
|
||||||
|
/** Enum declarations */
|
||||||
|
enums: EnumInfo[]
|
||||||
/** Whether parsing encountered errors */
|
/** Whether parsing encountered errors */
|
||||||
parseError: boolean
|
parseError: boolean
|
||||||
/** Parse error message if any */
|
/** Parse error message if any */
|
||||||
@@ -158,6 +190,7 @@ export function createEmptyFileAST(): FileAST {
|
|||||||
classes: [],
|
classes: [],
|
||||||
interfaces: [],
|
interfaces: [],
|
||||||
typeAliases: [],
|
typeAliases: [],
|
||||||
|
enums: [],
|
||||||
parseError: false,
|
parseError: false,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -26,6 +26,12 @@ export interface FileMeta {
|
|||||||
isEntryPoint: boolean
|
isEntryPoint: boolean
|
||||||
/** File type classification */
|
/** File type classification */
|
||||||
fileType: "source" | "test" | "config" | "types" | "unknown"
|
fileType: "source" | "test" | "config" | "types" | "unknown"
|
||||||
|
/** Impact score (0-100): percentage of codebase that depends on this file */
|
||||||
|
impactScore: number
|
||||||
|
/** Count of files that depend on this file transitively (including indirect dependents) */
|
||||||
|
transitiveDepCount: number
|
||||||
|
/** Count of files this file depends on transitively (including indirect dependencies) */
|
||||||
|
transitiveDepByCount: number
|
||||||
}
|
}
|
||||||
|
|
||||||
export function createFileMeta(partial: Partial<FileMeta> = {}): FileMeta {
|
export function createFileMeta(partial: Partial<FileMeta> = {}): FileMeta {
|
||||||
@@ -41,6 +47,9 @@ export function createFileMeta(partial: Partial<FileMeta> = {}): FileMeta {
|
|||||||
isHub: false,
|
isHub: false,
|
||||||
isEntryPoint: false,
|
isEntryPoint: false,
|
||||||
fileType: "unknown",
|
fileType: "unknown",
|
||||||
|
impactScore: 0,
|
||||||
|
transitiveDepCount: 0,
|
||||||
|
transitiveDepByCount: 0,
|
||||||
...partial,
|
...partial,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -48,3 +57,20 @@ export function createFileMeta(partial: Partial<FileMeta> = {}): FileMeta {
|
|||||||
export function isHubFile(dependentCount: number): boolean {
|
export function isHubFile(dependentCount: number): boolean {
|
||||||
return dependentCount > 5
|
return dependentCount > 5
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Calculate impact score based on number of dependents and total files.
|
||||||
|
* Impact score represents what percentage of the codebase depends on this file.
|
||||||
|
* @param dependentCount - Number of files that depend on this file
|
||||||
|
* @param totalFiles - Total number of files in the project
|
||||||
|
* @returns Impact score from 0 to 100
|
||||||
|
*/
|
||||||
|
export function calculateImpactScore(dependentCount: number, totalFiles: number): number {
|
||||||
|
if (totalFiles <= 1) {
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
// Exclude the file itself from the total
|
||||||
|
const maxPossibleDependents = totalFiles - 1
|
||||||
|
const score = (dependentCount / maxPossibleDependents) * 100
|
||||||
|
return Math.round(Math.min(100, score))
|
||||||
|
}
|
||||||
|
|||||||
@@ -4,6 +4,11 @@
|
|||||||
* Main entry point for the library.
|
* Main entry point for the library.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
import { createRequire } from "node:module"
|
||||||
|
|
||||||
|
const require = createRequire(import.meta.url)
|
||||||
|
const pkg = require("../package.json") as { version: string }
|
||||||
|
|
||||||
// Domain exports
|
// Domain exports
|
||||||
export * from "./domain/index.js"
|
export * from "./domain/index.js"
|
||||||
|
|
||||||
@@ -13,5 +18,11 @@ export * from "./application/index.js"
|
|||||||
// Shared exports
|
// Shared exports
|
||||||
export * from "./shared/index.js"
|
export * from "./shared/index.js"
|
||||||
|
|
||||||
|
// Infrastructure exports
|
||||||
|
export * from "./infrastructure/index.js"
|
||||||
|
|
||||||
|
// TUI exports
|
||||||
|
export * from "./tui/index.js"
|
||||||
|
|
||||||
// Version
|
// Version
|
||||||
export const VERSION = "0.1.0"
|
export const VERSION = pkg.version
|
||||||
|
|||||||
6
packages/ipuaro/src/infrastructure/index.ts
Normal file
6
packages/ipuaro/src/infrastructure/index.ts
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
// Infrastructure layer exports
|
||||||
|
export * from "./storage/index.js"
|
||||||
|
export * from "./indexer/index.js"
|
||||||
|
export * from "./llm/index.js"
|
||||||
|
export * from "./tools/index.js"
|
||||||
|
export * from "./security/index.js"
|
||||||
818
packages/ipuaro/src/infrastructure/indexer/ASTParser.ts
Normal file
818
packages/ipuaro/src/infrastructure/indexer/ASTParser.ts
Normal file
@@ -0,0 +1,818 @@
|
|||||||
|
import { builtinModules } from "node:module"
|
||||||
|
import Parser from "tree-sitter"
|
||||||
|
import TypeScript from "tree-sitter-typescript"
|
||||||
|
import JavaScript from "tree-sitter-javascript"
|
||||||
|
import JSON from "tree-sitter-json"
|
||||||
|
import * as yamlParser from "yaml"
|
||||||
|
import {
|
||||||
|
createEmptyFileAST,
|
||||||
|
type EnumMemberInfo,
|
||||||
|
type ExportInfo,
|
||||||
|
type FileAST,
|
||||||
|
type ImportInfo,
|
||||||
|
type MethodInfo,
|
||||||
|
type ParameterInfo,
|
||||||
|
type PropertyInfo,
|
||||||
|
} from "../../domain/value-objects/FileAST.js"
|
||||||
|
import { FieldName, NodeType } from "./tree-sitter-types.js"
|
||||||
|
|
||||||
|
type Language = "ts" | "tsx" | "js" | "jsx" | "json" | "yaml"
|
||||||
|
type SyntaxNode = Parser.SyntaxNode
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parses source code into AST using tree-sitter.
|
||||||
|
*/
|
||||||
|
export class ASTParser {
|
||||||
|
private readonly parsers = new Map<Language, Parser>()
|
||||||
|
|
||||||
|
constructor() {
|
||||||
|
this.initializeParsers()
|
||||||
|
}
|
||||||
|
|
||||||
|
private initializeParsers(): void {
|
||||||
|
const tsParser = new Parser()
|
||||||
|
tsParser.setLanguage(TypeScript.typescript)
|
||||||
|
this.parsers.set("ts", tsParser)
|
||||||
|
|
||||||
|
const tsxParser = new Parser()
|
||||||
|
tsxParser.setLanguage(TypeScript.tsx)
|
||||||
|
this.parsers.set("tsx", tsxParser)
|
||||||
|
|
||||||
|
const jsParser = new Parser()
|
||||||
|
jsParser.setLanguage(JavaScript)
|
||||||
|
this.parsers.set("js", jsParser)
|
||||||
|
this.parsers.set("jsx", jsParser)
|
||||||
|
|
||||||
|
const jsonParser = new Parser()
|
||||||
|
jsonParser.setLanguage(JSON)
|
||||||
|
this.parsers.set("json", jsonParser)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parse source code and extract AST information.
|
||||||
|
*/
|
||||||
|
parse(content: string, language: Language): FileAST {
|
||||||
|
if (language === "yaml") {
|
||||||
|
return this.parseYAML(content)
|
||||||
|
}
|
||||||
|
|
||||||
|
const parser = this.parsers.get(language)
|
||||||
|
if (!parser) {
|
||||||
|
return {
|
||||||
|
...createEmptyFileAST(),
|
||||||
|
parseError: true,
|
||||||
|
parseErrorMessage: `Unsupported language: ${language}`,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const tree = parser.parse(content)
|
||||||
|
const root = tree.rootNode
|
||||||
|
|
||||||
|
if (root.hasError) {
|
||||||
|
const ast = this.extractAST(root, language)
|
||||||
|
ast.parseError = true
|
||||||
|
ast.parseErrorMessage = "Syntax error in source code"
|
||||||
|
return ast
|
||||||
|
}
|
||||||
|
|
||||||
|
return this.extractAST(root, language)
|
||||||
|
} catch (error) {
|
||||||
|
return {
|
||||||
|
...createEmptyFileAST(),
|
||||||
|
parseError: true,
|
||||||
|
parseErrorMessage: error instanceof Error ? error.message : "Unknown parse error",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parse YAML content using yaml package.
|
||||||
|
*/
|
||||||
|
private parseYAML(content: string): FileAST {
|
||||||
|
const ast = createEmptyFileAST()
|
||||||
|
|
||||||
|
try {
|
||||||
|
const doc = yamlParser.parseDocument(content)
|
||||||
|
|
||||||
|
if (doc.errors.length > 0) {
|
||||||
|
return {
|
||||||
|
...createEmptyFileAST(),
|
||||||
|
parseError: true,
|
||||||
|
parseErrorMessage: doc.errors[0].message,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const contents = doc.contents
|
||||||
|
|
||||||
|
if (yamlParser.isSeq(contents)) {
|
||||||
|
ast.exports.push({
|
||||||
|
name: "(array)",
|
||||||
|
line: 1,
|
||||||
|
isDefault: false,
|
||||||
|
kind: "variable",
|
||||||
|
})
|
||||||
|
} else if (yamlParser.isMap(contents)) {
|
||||||
|
for (const item of contents.items) {
|
||||||
|
if (yamlParser.isPair(item) && yamlParser.isScalar(item.key)) {
|
||||||
|
const keyRange = item.key.range
|
||||||
|
const line = keyRange ? this.getLineFromOffset(content, keyRange[0]) : 1
|
||||||
|
ast.exports.push({
|
||||||
|
name: String(item.key.value),
|
||||||
|
line,
|
||||||
|
isDefault: false,
|
||||||
|
kind: "variable",
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return ast
|
||||||
|
} catch (error) {
|
||||||
|
return {
|
||||||
|
...createEmptyFileAST(),
|
||||||
|
parseError: true,
|
||||||
|
parseErrorMessage: error instanceof Error ? error.message : "YAML parse error",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get line number from character offset.
|
||||||
|
*/
|
||||||
|
private getLineFromOffset(content: string, offset: number): number {
|
||||||
|
let line = 1
|
||||||
|
for (let i = 0; i < offset && i < content.length; i++) {
|
||||||
|
if (content[i] === "\n") {
|
||||||
|
line++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return line
|
||||||
|
}
|
||||||
|
|
||||||
|
private extractAST(root: SyntaxNode, language: Language): FileAST {
|
||||||
|
const ast = createEmptyFileAST()
|
||||||
|
|
||||||
|
if (language === "json") {
|
||||||
|
return this.extractJSONStructure(root, ast)
|
||||||
|
}
|
||||||
|
|
||||||
|
const isTypeScript = language === "ts" || language === "tsx"
|
||||||
|
|
||||||
|
for (const child of root.children) {
|
||||||
|
this.visitNode(child, ast, isTypeScript)
|
||||||
|
}
|
||||||
|
|
||||||
|
return ast
|
||||||
|
}
|
||||||
|
|
||||||
|
private visitNode(node: SyntaxNode, ast: FileAST, isTypeScript: boolean): void {
|
||||||
|
switch (node.type) {
|
||||||
|
case NodeType.IMPORT_STATEMENT:
|
||||||
|
this.extractImport(node, ast)
|
||||||
|
break
|
||||||
|
case NodeType.EXPORT_STATEMENT:
|
||||||
|
this.extractExport(node, ast)
|
||||||
|
break
|
||||||
|
case NodeType.FUNCTION_DECLARATION:
|
||||||
|
this.extractFunction(node, ast, false)
|
||||||
|
break
|
||||||
|
case NodeType.LEXICAL_DECLARATION:
|
||||||
|
this.extractLexicalDeclaration(node, ast)
|
||||||
|
break
|
||||||
|
case NodeType.CLASS_DECLARATION:
|
||||||
|
this.extractClass(node, ast, false)
|
||||||
|
break
|
||||||
|
case NodeType.INTERFACE_DECLARATION:
|
||||||
|
if (isTypeScript) {
|
||||||
|
this.extractInterface(node, ast, false)
|
||||||
|
}
|
||||||
|
break
|
||||||
|
case NodeType.TYPE_ALIAS_DECLARATION:
|
||||||
|
if (isTypeScript) {
|
||||||
|
this.extractTypeAlias(node, ast, false)
|
||||||
|
}
|
||||||
|
break
|
||||||
|
case NodeType.ENUM_DECLARATION:
|
||||||
|
if (isTypeScript) {
|
||||||
|
this.extractEnum(node, ast, false)
|
||||||
|
}
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private extractImport(node: SyntaxNode, ast: FileAST): void {
|
||||||
|
const sourceNode = node.childForFieldName(FieldName.SOURCE)
|
||||||
|
if (!sourceNode) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
const from = this.getStringValue(sourceNode)
|
||||||
|
const line = node.startPosition.row + 1
|
||||||
|
const importType = this.classifyImport(from)
|
||||||
|
|
||||||
|
const importClause = node.children.find((c) => c.type === NodeType.IMPORT_CLAUSE)
|
||||||
|
if (!importClause) {
|
||||||
|
ast.imports.push({
|
||||||
|
name: "*",
|
||||||
|
from,
|
||||||
|
line,
|
||||||
|
type: importType,
|
||||||
|
isDefault: false,
|
||||||
|
})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const child of importClause.children) {
|
||||||
|
if (child.type === NodeType.IDENTIFIER) {
|
||||||
|
ast.imports.push({
|
||||||
|
name: child.text,
|
||||||
|
from,
|
||||||
|
line,
|
||||||
|
type: importType,
|
||||||
|
isDefault: true,
|
||||||
|
})
|
||||||
|
} else if (child.type === NodeType.NAMESPACE_IMPORT) {
|
||||||
|
const alias = child.children.find((c) => c.type === NodeType.IDENTIFIER)
|
||||||
|
ast.imports.push({
|
||||||
|
name: alias?.text ?? "*",
|
||||||
|
from,
|
||||||
|
line,
|
||||||
|
type: importType,
|
||||||
|
isDefault: false,
|
||||||
|
})
|
||||||
|
} else if (child.type === NodeType.NAMED_IMPORTS) {
|
||||||
|
for (const specifier of child.children) {
|
||||||
|
if (specifier.type === NodeType.IMPORT_SPECIFIER) {
|
||||||
|
const nameNode = specifier.childForFieldName(FieldName.NAME)
|
||||||
|
const aliasNode = specifier.childForFieldName(FieldName.ALIAS)
|
||||||
|
ast.imports.push({
|
||||||
|
name: aliasNode?.text ?? nameNode?.text ?? "",
|
||||||
|
from,
|
||||||
|
line,
|
||||||
|
type: importType,
|
||||||
|
isDefault: false,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private extractExport(node: SyntaxNode, ast: FileAST): void {
|
||||||
|
const isDefault = node.children.some((c) => c.type === NodeType.DEFAULT)
|
||||||
|
const declaration = node.childForFieldName(FieldName.DECLARATION)
|
||||||
|
|
||||||
|
if (declaration) {
|
||||||
|
const decorators = this.extractDecoratorsFromSiblings(declaration)
|
||||||
|
|
||||||
|
switch (declaration.type) {
|
||||||
|
case NodeType.FUNCTION_DECLARATION:
|
||||||
|
this.extractFunction(declaration, ast, true, decorators)
|
||||||
|
this.addExportInfo(ast, declaration, "function", isDefault)
|
||||||
|
break
|
||||||
|
case NodeType.CLASS_DECLARATION:
|
||||||
|
this.extractClass(declaration, ast, true, decorators)
|
||||||
|
this.addExportInfo(ast, declaration, "class", isDefault)
|
||||||
|
break
|
||||||
|
case NodeType.INTERFACE_DECLARATION:
|
||||||
|
this.extractInterface(declaration, ast, true)
|
||||||
|
this.addExportInfo(ast, declaration, "interface", isDefault)
|
||||||
|
break
|
||||||
|
case NodeType.TYPE_ALIAS_DECLARATION:
|
||||||
|
this.extractTypeAlias(declaration, ast, true)
|
||||||
|
this.addExportInfo(ast, declaration, "type", isDefault)
|
||||||
|
break
|
||||||
|
case NodeType.ENUM_DECLARATION:
|
||||||
|
this.extractEnum(declaration, ast, true)
|
||||||
|
this.addExportInfo(ast, declaration, "type", isDefault)
|
||||||
|
break
|
||||||
|
case NodeType.LEXICAL_DECLARATION:
|
||||||
|
this.extractLexicalDeclaration(declaration, ast, true)
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const exportClause = node.children.find((c) => c.type === NodeType.EXPORT_CLAUSE)
|
||||||
|
if (exportClause) {
|
||||||
|
for (const specifier of exportClause.children) {
|
||||||
|
if (specifier.type === NodeType.EXPORT_SPECIFIER) {
|
||||||
|
const nameNode = specifier.childForFieldName(FieldName.NAME)
|
||||||
|
if (nameNode) {
|
||||||
|
ast.exports.push({
|
||||||
|
name: nameNode.text,
|
||||||
|
line: node.startPosition.row + 1,
|
||||||
|
isDefault: false,
|
||||||
|
kind: "variable",
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private extractFunction(
|
||||||
|
node: SyntaxNode,
|
||||||
|
ast: FileAST,
|
||||||
|
isExported: boolean,
|
||||||
|
externalDecorators: string[] = [],
|
||||||
|
): void {
|
||||||
|
const nameNode = node.childForFieldName(FieldName.NAME)
|
||||||
|
if (!nameNode) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
const params = this.extractParameters(node)
|
||||||
|
const isAsync = node.children.some((c) => c.type === NodeType.ASYNC)
|
||||||
|
const returnTypeNode = node.childForFieldName(FieldName.RETURN_TYPE)
|
||||||
|
|
||||||
|
const nodeDecorators = this.extractNodeDecorators(node)
|
||||||
|
const decorators = [...externalDecorators, ...nodeDecorators]
|
||||||
|
|
||||||
|
ast.functions.push({
|
||||||
|
name: nameNode.text,
|
||||||
|
lineStart: node.startPosition.row + 1,
|
||||||
|
lineEnd: node.endPosition.row + 1,
|
||||||
|
params,
|
||||||
|
isAsync,
|
||||||
|
isExported,
|
||||||
|
returnType: returnTypeNode?.text?.replace(/^:\s*/, ""),
|
||||||
|
decorators,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
private extractLexicalDeclaration(node: SyntaxNode, ast: FileAST, isExported = false): void {
|
||||||
|
for (const child of node.children) {
|
||||||
|
if (child.type === NodeType.VARIABLE_DECLARATOR) {
|
||||||
|
const nameNode = child.childForFieldName(FieldName.NAME)
|
||||||
|
const valueNode = child.childForFieldName(FieldName.VALUE)
|
||||||
|
|
||||||
|
if (
|
||||||
|
valueNode?.type === NodeType.ARROW_FUNCTION ||
|
||||||
|
valueNode?.type === NodeType.FUNCTION
|
||||||
|
) {
|
||||||
|
const params = this.extractParameters(valueNode)
|
||||||
|
const isAsync = valueNode.children.some((c) => c.type === NodeType.ASYNC)
|
||||||
|
const returnTypeNode = valueNode.childForFieldName(FieldName.RETURN_TYPE)
|
||||||
|
|
||||||
|
ast.functions.push({
|
||||||
|
name: nameNode?.text ?? "",
|
||||||
|
lineStart: node.startPosition.row + 1,
|
||||||
|
lineEnd: node.endPosition.row + 1,
|
||||||
|
params,
|
||||||
|
isAsync,
|
||||||
|
isExported,
|
||||||
|
returnType: returnTypeNode?.text?.replace(/^:\s*/, ""),
|
||||||
|
decorators: [],
|
||||||
|
})
|
||||||
|
|
||||||
|
if (isExported) {
|
||||||
|
ast.exports.push({
|
||||||
|
name: nameNode?.text ?? "",
|
||||||
|
line: node.startPosition.row + 1,
|
||||||
|
isDefault: false,
|
||||||
|
kind: "function",
|
||||||
|
})
|
||||||
|
}
|
||||||
|
} else if (isExported && nameNode) {
|
||||||
|
ast.exports.push({
|
||||||
|
name: nameNode.text,
|
||||||
|
line: node.startPosition.row + 1,
|
||||||
|
isDefault: false,
|
||||||
|
kind: "variable",
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private extractClass(
|
||||||
|
node: SyntaxNode,
|
||||||
|
ast: FileAST,
|
||||||
|
isExported: boolean,
|
||||||
|
externalDecorators: string[] = [],
|
||||||
|
): void {
|
||||||
|
const nameNode = node.childForFieldName(FieldName.NAME)
|
||||||
|
if (!nameNode) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
const body = node.childForFieldName(FieldName.BODY)
|
||||||
|
const methods: MethodInfo[] = []
|
||||||
|
const properties: PropertyInfo[] = []
|
||||||
|
|
||||||
|
if (body) {
|
||||||
|
let pendingDecorators: string[] = []
|
||||||
|
for (const member of body.children) {
|
||||||
|
if (member.type === NodeType.DECORATOR) {
|
||||||
|
pendingDecorators.push(this.formatDecorator(member))
|
||||||
|
} else if (member.type === NodeType.METHOD_DEFINITION) {
|
||||||
|
methods.push(this.extractMethod(member, pendingDecorators))
|
||||||
|
pendingDecorators = []
|
||||||
|
} else if (
|
||||||
|
member.type === NodeType.PUBLIC_FIELD_DEFINITION ||
|
||||||
|
member.type === NodeType.FIELD_DEFINITION
|
||||||
|
) {
|
||||||
|
properties.push(this.extractProperty(member))
|
||||||
|
pendingDecorators = []
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const { extendsName, implementsList } = this.extractClassHeritage(node)
|
||||||
|
const isAbstract = node.children.some((c) => c.type === NodeType.ABSTRACT)
|
||||||
|
|
||||||
|
const nodeDecorators = this.extractNodeDecorators(node)
|
||||||
|
const decorators = [...externalDecorators, ...nodeDecorators]
|
||||||
|
|
||||||
|
ast.classes.push({
|
||||||
|
name: nameNode.text,
|
||||||
|
lineStart: node.startPosition.row + 1,
|
||||||
|
lineEnd: node.endPosition.row + 1,
|
||||||
|
methods,
|
||||||
|
properties,
|
||||||
|
extends: extendsName,
|
||||||
|
implements: implementsList,
|
||||||
|
isExported,
|
||||||
|
isAbstract,
|
||||||
|
decorators,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
private extractClassHeritage(node: SyntaxNode): {
|
||||||
|
extendsName: string | undefined
|
||||||
|
implementsList: string[]
|
||||||
|
} {
|
||||||
|
let extendsName: string | undefined
|
||||||
|
const implementsList: string[] = []
|
||||||
|
|
||||||
|
for (const child of node.children) {
|
||||||
|
if (child.type === NodeType.CLASS_HERITAGE) {
|
||||||
|
this.parseHeritageClause(child, (ext) => (extendsName = ext), implementsList)
|
||||||
|
} else if (child.type === NodeType.EXTENDS_CLAUSE) {
|
||||||
|
extendsName = this.findTypeIdentifier(child)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return { extendsName, implementsList }
|
||||||
|
}
|
||||||
|
|
||||||
|
private parseHeritageClause(
|
||||||
|
heritage: SyntaxNode,
|
||||||
|
setExtends: (name: string) => void,
|
||||||
|
implementsList: string[],
|
||||||
|
): void {
|
||||||
|
for (const clause of heritage.children) {
|
||||||
|
if (clause.type === NodeType.EXTENDS_CLAUSE) {
|
||||||
|
const typeId = this.findTypeIdentifier(clause)
|
||||||
|
if (typeId) {
|
||||||
|
setExtends(typeId)
|
||||||
|
}
|
||||||
|
} else if (clause.type === NodeType.IMPLEMENTS_CLAUSE) {
|
||||||
|
this.collectImplements(clause, implementsList)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private findTypeIdentifier(node: SyntaxNode): string | undefined {
|
||||||
|
const typeNode = node.children.find(
|
||||||
|
(c) => c.type === NodeType.TYPE_IDENTIFIER || c.type === NodeType.IDENTIFIER,
|
||||||
|
)
|
||||||
|
return typeNode?.text
|
||||||
|
}
|
||||||
|
|
||||||
|
private collectImplements(clause: SyntaxNode, list: string[]): void {
|
||||||
|
for (const impl of clause.children) {
|
||||||
|
if (impl.type === NodeType.TYPE_IDENTIFIER || impl.type === NodeType.IDENTIFIER) {
|
||||||
|
list.push(impl.text)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private extractMethod(node: SyntaxNode, decorators: string[] = []): MethodInfo {
|
||||||
|
const nameNode = node.childForFieldName(FieldName.NAME)
|
||||||
|
const params = this.extractParameters(node)
|
||||||
|
const isAsync = node.children.some((c) => c.type === NodeType.ASYNC)
|
||||||
|
const isStatic = node.children.some((c) => c.type === NodeType.STATIC)
|
||||||
|
|
||||||
|
let visibility: "public" | "private" | "protected" = "public"
|
||||||
|
for (const child of node.children) {
|
||||||
|
if (child.type === NodeType.ACCESSIBILITY_MODIFIER) {
|
||||||
|
visibility = child.text as "public" | "private" | "protected"
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
name: nameNode?.text ?? "",
|
||||||
|
lineStart: node.startPosition.row + 1,
|
||||||
|
lineEnd: node.endPosition.row + 1,
|
||||||
|
params,
|
||||||
|
isAsync,
|
||||||
|
visibility,
|
||||||
|
isStatic,
|
||||||
|
decorators,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private extractProperty(node: SyntaxNode): PropertyInfo {
|
||||||
|
const nameNode = node.childForFieldName(FieldName.NAME)
|
||||||
|
const typeNode = node.childForFieldName(FieldName.TYPE)
|
||||||
|
const isStatic = node.children.some((c) => c.type === NodeType.STATIC)
|
||||||
|
const isReadonly = node.children.some((c) => c.text === NodeType.READONLY)
|
||||||
|
|
||||||
|
let visibility: "public" | "private" | "protected" = "public"
|
||||||
|
for (const child of node.children) {
|
||||||
|
if (child.type === NodeType.ACCESSIBILITY_MODIFIER) {
|
||||||
|
visibility = child.text as "public" | "private" | "protected"
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
name: nameNode?.text ?? "",
|
||||||
|
line: node.startPosition.row + 1,
|
||||||
|
type: typeNode?.text,
|
||||||
|
visibility,
|
||||||
|
isStatic,
|
||||||
|
isReadonly,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private extractInterface(node: SyntaxNode, ast: FileAST, isExported: boolean): void {
|
||||||
|
const nameNode = node.childForFieldName(FieldName.NAME)
|
||||||
|
if (!nameNode) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
const body = node.childForFieldName(FieldName.BODY)
|
||||||
|
const properties: PropertyInfo[] = []
|
||||||
|
|
||||||
|
if (body) {
|
||||||
|
for (const member of body.children) {
|
||||||
|
if (member.type === NodeType.PROPERTY_SIGNATURE) {
|
||||||
|
const propName = member.childForFieldName(FieldName.NAME)
|
||||||
|
const propType = member.childForFieldName(FieldName.TYPE)
|
||||||
|
properties.push({
|
||||||
|
name: propName?.text ?? "",
|
||||||
|
line: member.startPosition.row + 1,
|
||||||
|
type: propType?.text,
|
||||||
|
visibility: "public",
|
||||||
|
isStatic: false,
|
||||||
|
isReadonly: member.children.some((c) => c.text === NodeType.READONLY),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const extendsList: string[] = []
|
||||||
|
const extendsClause = node.children.find((c) => c.type === NodeType.EXTENDS_TYPE_CLAUSE)
|
||||||
|
if (extendsClause) {
|
||||||
|
for (const child of extendsClause.children) {
|
||||||
|
if (child.type === NodeType.TYPE_IDENTIFIER) {
|
||||||
|
extendsList.push(child.text)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
ast.interfaces.push({
|
||||||
|
name: nameNode.text,
|
||||||
|
lineStart: node.startPosition.row + 1,
|
||||||
|
lineEnd: node.endPosition.row + 1,
|
||||||
|
properties,
|
||||||
|
extends: extendsList,
|
||||||
|
isExported,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
private extractTypeAlias(node: SyntaxNode, ast: FileAST, isExported: boolean): void {
|
||||||
|
const nameNode = node.childForFieldName(FieldName.NAME)
|
||||||
|
if (!nameNode) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
const valueNode = node.childForFieldName(FieldName.VALUE)
|
||||||
|
const definition = valueNode?.text
|
||||||
|
|
||||||
|
ast.typeAliases.push({
|
||||||
|
name: nameNode.text,
|
||||||
|
line: node.startPosition.row + 1,
|
||||||
|
isExported,
|
||||||
|
definition,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
private extractEnum(node: SyntaxNode, ast: FileAST, isExported: boolean): void {
|
||||||
|
const nameNode = node.childForFieldName(FieldName.NAME)
|
||||||
|
if (!nameNode) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
const body = node.childForFieldName(FieldName.BODY)
|
||||||
|
const members: EnumMemberInfo[] = []
|
||||||
|
|
||||||
|
if (body) {
|
||||||
|
for (const child of body.children) {
|
||||||
|
if (child.type === NodeType.ENUM_ASSIGNMENT) {
|
||||||
|
const memberName = child.childForFieldName(FieldName.NAME)
|
||||||
|
const memberValue = child.childForFieldName(FieldName.VALUE)
|
||||||
|
if (memberName) {
|
||||||
|
members.push({
|
||||||
|
name: memberName.text,
|
||||||
|
value: this.parseEnumValue(memberValue),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
} else if (
|
||||||
|
child.type === NodeType.IDENTIFIER ||
|
||||||
|
child.type === NodeType.PROPERTY_IDENTIFIER
|
||||||
|
) {
|
||||||
|
members.push({
|
||||||
|
name: child.text,
|
||||||
|
value: undefined,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const isConst = node.children.some((c) => c.text === "const")
|
||||||
|
|
||||||
|
ast.enums.push({
|
||||||
|
name: nameNode.text,
|
||||||
|
lineStart: node.startPosition.row + 1,
|
||||||
|
lineEnd: node.endPosition.row + 1,
|
||||||
|
members,
|
||||||
|
isExported,
|
||||||
|
isConst,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
private parseEnumValue(valueNode: SyntaxNode | null): string | number | undefined {
|
||||||
|
if (!valueNode) {
|
||||||
|
return undefined
|
||||||
|
}
|
||||||
|
|
||||||
|
const text = valueNode.text
|
||||||
|
|
||||||
|
if (valueNode.type === "number") {
|
||||||
|
return Number(text)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (valueNode.type === "string") {
|
||||||
|
return this.getStringValue(valueNode)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (valueNode.type === "unary_expression" && text.startsWith("-")) {
|
||||||
|
const num = Number(text)
|
||||||
|
if (!isNaN(num)) {
|
||||||
|
return num
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return text
|
||||||
|
}
|
||||||
|
|
||||||
|
private extractParameters(node: SyntaxNode): ParameterInfo[] {
|
||||||
|
const params: ParameterInfo[] = []
|
||||||
|
const paramsNode = node.childForFieldName(FieldName.PARAMETERS)
|
||||||
|
|
||||||
|
if (paramsNode) {
|
||||||
|
for (const param of paramsNode.children) {
|
||||||
|
if (
|
||||||
|
param.type === NodeType.REQUIRED_PARAMETER ||
|
||||||
|
param.type === NodeType.OPTIONAL_PARAMETER ||
|
||||||
|
param.type === NodeType.IDENTIFIER
|
||||||
|
) {
|
||||||
|
const nameNode =
|
||||||
|
param.type === NodeType.IDENTIFIER
|
||||||
|
? param
|
||||||
|
: param.childForFieldName(FieldName.PATTERN)
|
||||||
|
const typeNode = param.childForFieldName(FieldName.TYPE)
|
||||||
|
const defaultValue = param.childForFieldName(FieldName.VALUE)
|
||||||
|
|
||||||
|
params.push({
|
||||||
|
name: nameNode?.text ?? "",
|
||||||
|
type: typeNode?.text,
|
||||||
|
optional: param.type === NodeType.OPTIONAL_PARAMETER,
|
||||||
|
hasDefault: defaultValue !== null,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return params
|
||||||
|
}
|
||||||
|
|
||||||
|
private addExportInfo(
|
||||||
|
ast: FileAST,
|
||||||
|
node: SyntaxNode,
|
||||||
|
kind: ExportInfo["kind"],
|
||||||
|
isDefault: boolean,
|
||||||
|
): void {
|
||||||
|
const nameNode = node.childForFieldName(FieldName.NAME)
|
||||||
|
if (nameNode) {
|
||||||
|
ast.exports.push({
|
||||||
|
name: nameNode.text,
|
||||||
|
line: node.startPosition.row + 1,
|
||||||
|
isDefault,
|
||||||
|
kind,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Format a decorator node to a string like "@Get(':id')" or "@Injectable()".
|
||||||
|
*/
|
||||||
|
private formatDecorator(node: SyntaxNode): string {
|
||||||
|
return node.text.replace(/\s+/g, " ").trim()
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Extract decorators that are direct children of a node.
|
||||||
|
* In tree-sitter, decorators are children of the class/function declaration.
|
||||||
|
*/
|
||||||
|
private extractNodeDecorators(node: SyntaxNode): string[] {
|
||||||
|
const decorators: string[] = []
|
||||||
|
for (const child of node.children) {
|
||||||
|
if (child.type === NodeType.DECORATOR) {
|
||||||
|
decorators.push(this.formatDecorator(child))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return decorators
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Extract decorators from sibling nodes before the current node.
|
||||||
|
* Decorators appear as children before the declaration in export statements.
|
||||||
|
*/
|
||||||
|
private extractDecoratorsFromSiblings(node: SyntaxNode): string[] {
|
||||||
|
const decorators: string[] = []
|
||||||
|
const parent = node.parent
|
||||||
|
if (!parent) {
|
||||||
|
return decorators
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const sibling of parent.children) {
|
||||||
|
if (sibling.type === NodeType.DECORATOR) {
|
||||||
|
decorators.push(this.formatDecorator(sibling))
|
||||||
|
} else if (sibling === node) {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return decorators
|
||||||
|
}
|
||||||
|
|
||||||
|
private classifyImport(from: string): ImportInfo["type"] {
|
||||||
|
if (from.startsWith(".") || from.startsWith("/")) {
|
||||||
|
return "internal"
|
||||||
|
}
|
||||||
|
if (from.startsWith("node:") || builtinModules.includes(from)) {
|
||||||
|
return "builtin"
|
||||||
|
}
|
||||||
|
return "external"
|
||||||
|
}
|
||||||
|
|
||||||
|
private getStringValue(node: SyntaxNode): string {
|
||||||
|
const text = node.text
|
||||||
|
if (
|
||||||
|
(text.startsWith('"') && text.endsWith('"')) ||
|
||||||
|
(text.startsWith("'") && text.endsWith("'"))
|
||||||
|
) {
|
||||||
|
return text.slice(1, -1)
|
||||||
|
}
|
||||||
|
return text
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Extract structure from JSON file.
|
||||||
|
* For JSON files, we extract top-level keys from objects.
|
||||||
|
*/
|
||||||
|
private extractJSONStructure(root: SyntaxNode, ast: FileAST): FileAST {
|
||||||
|
for (const child of root.children) {
|
||||||
|
if (child.type === "object") {
|
||||||
|
this.extractJSONKeys(child, ast)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return ast
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Extract keys from JSON object.
|
||||||
|
*/
|
||||||
|
private extractJSONKeys(node: SyntaxNode, ast: FileAST): void {
|
||||||
|
for (const child of node.children) {
|
||||||
|
if (child.type === "pair") {
|
||||||
|
const keyNode = child.childForFieldName("key")
|
||||||
|
if (keyNode) {
|
||||||
|
const keyName = this.getStringValue(keyNode)
|
||||||
|
ast.exports.push({
|
||||||
|
name: keyName,
|
||||||
|
line: keyNode.startPosition.row + 1,
|
||||||
|
isDefault: false,
|
||||||
|
kind: "variable",
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
216
packages/ipuaro/src/infrastructure/indexer/FileScanner.ts
Normal file
216
packages/ipuaro/src/infrastructure/indexer/FileScanner.ts
Normal file
@@ -0,0 +1,216 @@
|
|||||||
|
import * as fs from "node:fs/promises"
|
||||||
|
import type { Stats } from "node:fs"
|
||||||
|
import * as path from "node:path"
|
||||||
|
import { globby } from "globby"
|
||||||
|
import {
|
||||||
|
BINARY_EXTENSIONS,
|
||||||
|
DEFAULT_IGNORE_PATTERNS,
|
||||||
|
SUPPORTED_EXTENSIONS,
|
||||||
|
} from "../../domain/constants/index.js"
|
||||||
|
import type { ScanResult } from "../../domain/services/IIndexer.js"
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Progress callback for file scanning.
|
||||||
|
*/
|
||||||
|
export interface ScanProgress {
|
||||||
|
current: number
|
||||||
|
total: number
|
||||||
|
currentFile: string
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Options for FileScanner.
|
||||||
|
*/
|
||||||
|
export interface FileScannerOptions {
|
||||||
|
/** Additional ignore patterns (besides .gitignore and defaults) */
|
||||||
|
additionalIgnore?: string[]
|
||||||
|
/** Only include files with these extensions. Defaults to SUPPORTED_EXTENSIONS. */
|
||||||
|
extensions?: readonly string[]
|
||||||
|
/** Callback for progress updates */
|
||||||
|
onProgress?: (progress: ScanProgress) => void
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Scans project directories recursively using globby.
|
||||||
|
* Respects .gitignore, skips binary files and default ignore patterns.
|
||||||
|
*/
|
||||||
|
export class FileScanner {
|
||||||
|
private readonly extensions: Set<string>
|
||||||
|
private readonly additionalIgnore: string[]
|
||||||
|
private readonly onProgress?: (progress: ScanProgress) => void
|
||||||
|
|
||||||
|
constructor(options: FileScannerOptions = {}) {
|
||||||
|
this.extensions = new Set(options.extensions ?? SUPPORTED_EXTENSIONS)
|
||||||
|
this.additionalIgnore = options.additionalIgnore ?? []
|
||||||
|
this.onProgress = options.onProgress
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Build glob patterns from extensions.
|
||||||
|
*/
|
||||||
|
private buildGlobPatterns(): string[] {
|
||||||
|
const exts = [...this.extensions].map((ext) => ext.replace(".", ""))
|
||||||
|
if (exts.length === 1) {
|
||||||
|
return [`**/*.${exts[0]}`]
|
||||||
|
}
|
||||||
|
return [`**/*.{${exts.join(",")}}`]
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Build ignore patterns.
|
||||||
|
*/
|
||||||
|
private buildIgnorePatterns(): string[] {
|
||||||
|
const patterns = [
|
||||||
|
...DEFAULT_IGNORE_PATTERNS,
|
||||||
|
...this.additionalIgnore,
|
||||||
|
...BINARY_EXTENSIONS.map((ext) => `**/*${ext}`),
|
||||||
|
]
|
||||||
|
return patterns
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Scan directory and yield file results.
|
||||||
|
* @param root - Root directory to scan
|
||||||
|
*/
|
||||||
|
async *scan(root: string): AsyncGenerator<ScanResult> {
|
||||||
|
const globPatterns = this.buildGlobPatterns()
|
||||||
|
const ignorePatterns = this.buildIgnorePatterns()
|
||||||
|
|
||||||
|
const files = await globby(globPatterns, {
|
||||||
|
cwd: root,
|
||||||
|
gitignore: true,
|
||||||
|
ignore: ignorePatterns,
|
||||||
|
absolute: false,
|
||||||
|
onlyFiles: true,
|
||||||
|
followSymbolicLinks: false,
|
||||||
|
})
|
||||||
|
|
||||||
|
const total = files.length
|
||||||
|
let current = 0
|
||||||
|
|
||||||
|
for (const relativePath of files) {
|
||||||
|
current++
|
||||||
|
this.reportProgress(relativePath, current, total)
|
||||||
|
|
||||||
|
const fullPath = path.join(root, relativePath)
|
||||||
|
const stats = await this.safeStats(fullPath)
|
||||||
|
|
||||||
|
if (stats) {
|
||||||
|
const type = stats.isSymbolicLink()
|
||||||
|
? "symlink"
|
||||||
|
: stats.isDirectory()
|
||||||
|
? "directory"
|
||||||
|
: "file"
|
||||||
|
|
||||||
|
const result: ScanResult = {
|
||||||
|
path: relativePath,
|
||||||
|
type,
|
||||||
|
size: stats.size,
|
||||||
|
lastModified: stats.mtimeMs,
|
||||||
|
}
|
||||||
|
|
||||||
|
if (type === "symlink") {
|
||||||
|
const target = await this.safeReadlink(fullPath)
|
||||||
|
if (target) {
|
||||||
|
result.symlinkTarget = target
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
yield result
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Scan and return all results as array.
|
||||||
|
*/
|
||||||
|
async scanAll(root: string): Promise<ScanResult[]> {
|
||||||
|
const results: ScanResult[] = []
|
||||||
|
for await (const result of this.scan(root)) {
|
||||||
|
results.push(result)
|
||||||
|
}
|
||||||
|
return results
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if file has supported extension.
|
||||||
|
*/
|
||||||
|
isSupportedExtension(filePath: string): boolean {
|
||||||
|
const ext = path.extname(filePath).toLowerCase()
|
||||||
|
return this.extensions.has(ext)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Safely get file stats without throwing.
|
||||||
|
* Uses lstat to get information about symlinks themselves.
|
||||||
|
*/
|
||||||
|
private async safeStats(filePath: string): Promise<Stats | null> {
|
||||||
|
try {
|
||||||
|
return await fs.lstat(filePath)
|
||||||
|
} catch {
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Safely read symlink target without throwing.
|
||||||
|
*/
|
||||||
|
private async safeReadlink(filePath: string): Promise<string | null> {
|
||||||
|
try {
|
||||||
|
return await fs.readlink(filePath)
|
||||||
|
} catch {
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Report progress if callback is set.
|
||||||
|
*/
|
||||||
|
private reportProgress(currentFile: string, current: number, total: number): void {
|
||||||
|
if (this.onProgress) {
|
||||||
|
this.onProgress({ current, total, currentFile })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if file content is likely UTF-8 text.
|
||||||
|
* Reads first 8KB and checks for null bytes.
|
||||||
|
*/
|
||||||
|
static async isTextFile(filePath: string): Promise<boolean> {
|
||||||
|
try {
|
||||||
|
const handle = await fs.open(filePath, "r")
|
||||||
|
try {
|
||||||
|
const buffer = Buffer.alloc(8192)
|
||||||
|
const { bytesRead } = await handle.read(buffer, 0, 8192, 0)
|
||||||
|
if (bytesRead === 0) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
for (let i = 0; i < bytesRead; i++) {
|
||||||
|
if (buffer[i] === 0) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
} finally {
|
||||||
|
await handle.close()
|
||||||
|
}
|
||||||
|
} catch {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Read file content as string.
|
||||||
|
* Returns null if file is binary or unreadable.
|
||||||
|
*/
|
||||||
|
static async readFileContent(filePath: string): Promise<string | null> {
|
||||||
|
if (!(await FileScanner.isTextFile(filePath))) {
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
return await fs.readFile(filePath, "utf-8")
|
||||||
|
} catch {
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
406
packages/ipuaro/src/infrastructure/indexer/IndexBuilder.ts
Normal file
406
packages/ipuaro/src/infrastructure/indexer/IndexBuilder.ts
Normal file
@@ -0,0 +1,406 @@
|
|||||||
|
import * as path from "node:path"
|
||||||
|
import type { FileAST } from "../../domain/value-objects/FileAST.js"
|
||||||
|
import type { DepsGraph, SymbolIndex, SymbolLocation } from "../../domain/services/IStorage.js"
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Builds searchable indexes from parsed ASTs.
|
||||||
|
*/
|
||||||
|
export class IndexBuilder {
|
||||||
|
private readonly projectRoot: string
|
||||||
|
|
||||||
|
constructor(projectRoot: string) {
|
||||||
|
this.projectRoot = projectRoot
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Build symbol index from all ASTs.
|
||||||
|
* Maps symbol names to their locations for quick lookup.
|
||||||
|
*/
|
||||||
|
buildSymbolIndex(asts: Map<string, FileAST>): SymbolIndex {
|
||||||
|
const index: SymbolIndex = new Map()
|
||||||
|
|
||||||
|
for (const [filePath, ast] of asts) {
|
||||||
|
this.indexFunctions(filePath, ast, index)
|
||||||
|
this.indexClasses(filePath, ast, index)
|
||||||
|
this.indexInterfaces(filePath, ast, index)
|
||||||
|
this.indexTypeAliases(filePath, ast, index)
|
||||||
|
this.indexExportedVariables(filePath, ast, index)
|
||||||
|
}
|
||||||
|
|
||||||
|
return index
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Index function declarations.
|
||||||
|
*/
|
||||||
|
private indexFunctions(filePath: string, ast: FileAST, index: SymbolIndex): void {
|
||||||
|
for (const func of ast.functions) {
|
||||||
|
this.addSymbol(index, func.name, {
|
||||||
|
path: filePath,
|
||||||
|
line: func.lineStart,
|
||||||
|
type: "function",
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Index class declarations.
|
||||||
|
*/
|
||||||
|
private indexClasses(filePath: string, ast: FileAST, index: SymbolIndex): void {
|
||||||
|
for (const cls of ast.classes) {
|
||||||
|
this.addSymbol(index, cls.name, {
|
||||||
|
path: filePath,
|
||||||
|
line: cls.lineStart,
|
||||||
|
type: "class",
|
||||||
|
})
|
||||||
|
|
||||||
|
for (const method of cls.methods) {
|
||||||
|
const qualifiedName = `${cls.name}.${method.name}`
|
||||||
|
this.addSymbol(index, qualifiedName, {
|
||||||
|
path: filePath,
|
||||||
|
line: method.lineStart,
|
||||||
|
type: "function",
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Index interface declarations.
|
||||||
|
*/
|
||||||
|
private indexInterfaces(filePath: string, ast: FileAST, index: SymbolIndex): void {
|
||||||
|
for (const iface of ast.interfaces) {
|
||||||
|
this.addSymbol(index, iface.name, {
|
||||||
|
path: filePath,
|
||||||
|
line: iface.lineStart,
|
||||||
|
type: "interface",
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Index type alias declarations.
|
||||||
|
*/
|
||||||
|
private indexTypeAliases(filePath: string, ast: FileAST, index: SymbolIndex): void {
|
||||||
|
for (const typeAlias of ast.typeAliases) {
|
||||||
|
this.addSymbol(index, typeAlias.name, {
|
||||||
|
path: filePath,
|
||||||
|
line: typeAlias.line,
|
||||||
|
type: "type",
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Index exported variables (not functions).
|
||||||
|
*/
|
||||||
|
private indexExportedVariables(filePath: string, ast: FileAST, index: SymbolIndex): void {
|
||||||
|
const functionNames = new Set(ast.functions.map((f) => f.name))
|
||||||
|
|
||||||
|
for (const exp of ast.exports) {
|
||||||
|
if (exp.kind === "variable" && !functionNames.has(exp.name)) {
|
||||||
|
this.addSymbol(index, exp.name, {
|
||||||
|
path: filePath,
|
||||||
|
line: exp.line,
|
||||||
|
type: "variable",
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Add a symbol to the index.
|
||||||
|
*/
|
||||||
|
private addSymbol(index: SymbolIndex, name: string, location: SymbolLocation): void {
|
||||||
|
if (!name) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
const existing = index.get(name)
|
||||||
|
if (existing) {
|
||||||
|
const isDuplicate = existing.some(
|
||||||
|
(loc) => loc.path === location.path && loc.line === location.line,
|
||||||
|
)
|
||||||
|
if (!isDuplicate) {
|
||||||
|
existing.push(location)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
index.set(name, [location])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Build dependency graph from all ASTs.
|
||||||
|
* Creates bidirectional mapping of imports.
|
||||||
|
*/
|
||||||
|
buildDepsGraph(asts: Map<string, FileAST>): DepsGraph {
|
||||||
|
const imports = new Map<string, string[]>()
|
||||||
|
const importedBy = new Map<string, string[]>()
|
||||||
|
|
||||||
|
for (const filePath of asts.keys()) {
|
||||||
|
imports.set(filePath, [])
|
||||||
|
importedBy.set(filePath, [])
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const [filePath, ast] of asts) {
|
||||||
|
const fileImports = this.resolveFileImports(filePath, ast, asts)
|
||||||
|
imports.set(filePath, fileImports)
|
||||||
|
|
||||||
|
for (const importedFile of fileImports) {
|
||||||
|
const dependents = importedBy.get(importedFile) ?? []
|
||||||
|
if (!dependents.includes(filePath)) {
|
||||||
|
dependents.push(filePath)
|
||||||
|
importedBy.set(importedFile, dependents)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const [filePath, deps] of imports) {
|
||||||
|
imports.set(filePath, deps.sort())
|
||||||
|
}
|
||||||
|
for (const [filePath, deps] of importedBy) {
|
||||||
|
importedBy.set(filePath, deps.sort())
|
||||||
|
}
|
||||||
|
|
||||||
|
return { imports, importedBy }
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Resolve internal imports for a file.
|
||||||
|
*/
|
||||||
|
private resolveFileImports(
|
||||||
|
filePath: string,
|
||||||
|
ast: FileAST,
|
||||||
|
allASTs: Map<string, FileAST>,
|
||||||
|
): string[] {
|
||||||
|
const fileDir = path.dirname(filePath)
|
||||||
|
const resolvedImports: string[] = []
|
||||||
|
|
||||||
|
for (const imp of ast.imports) {
|
||||||
|
if (imp.type !== "internal") {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
const resolved = this.resolveImportPath(fileDir, imp.from, allASTs)
|
||||||
|
if (resolved && !resolvedImports.includes(resolved)) {
|
||||||
|
resolvedImports.push(resolved)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return resolvedImports
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Resolve import path to actual file path.
|
||||||
|
*/
|
||||||
|
private resolveImportPath(
|
||||||
|
fromDir: string,
|
||||||
|
importPath: string,
|
||||||
|
allASTs: Map<string, FileAST>,
|
||||||
|
): string | null {
|
||||||
|
const absolutePath = path.resolve(fromDir, importPath)
|
||||||
|
|
||||||
|
const candidates = this.getImportCandidates(absolutePath)
|
||||||
|
for (const candidate of candidates) {
|
||||||
|
if (allASTs.has(candidate)) {
|
||||||
|
return candidate
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Generate possible file paths for an import.
|
||||||
|
*/
|
||||||
|
private getImportCandidates(basePath: string): string[] {
|
||||||
|
const candidates: string[] = []
|
||||||
|
|
||||||
|
if (/\.(ts|tsx|js|jsx)$/.test(basePath)) {
|
||||||
|
candidates.push(basePath)
|
||||||
|
|
||||||
|
if (basePath.endsWith(".js")) {
|
||||||
|
candidates.push(`${basePath.slice(0, -3)}.ts`)
|
||||||
|
} else if (basePath.endsWith(".jsx")) {
|
||||||
|
candidates.push(`${basePath.slice(0, -4)}.tsx`)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
candidates.push(`${basePath}.ts`)
|
||||||
|
candidates.push(`${basePath}.tsx`)
|
||||||
|
candidates.push(`${basePath}.js`)
|
||||||
|
candidates.push(`${basePath}.jsx`)
|
||||||
|
candidates.push(`${basePath}/index.ts`)
|
||||||
|
candidates.push(`${basePath}/index.tsx`)
|
||||||
|
candidates.push(`${basePath}/index.js`)
|
||||||
|
candidates.push(`${basePath}/index.jsx`)
|
||||||
|
}
|
||||||
|
|
||||||
|
return candidates
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Find all locations of a symbol by name.
|
||||||
|
*/
|
||||||
|
findSymbol(index: SymbolIndex, name: string): SymbolLocation[] {
|
||||||
|
return index.get(name) ?? []
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Find symbols matching a pattern.
|
||||||
|
*/
|
||||||
|
searchSymbols(index: SymbolIndex, pattern: string): Map<string, SymbolLocation[]> {
|
||||||
|
const results = new Map<string, SymbolLocation[]>()
|
||||||
|
const regex = new RegExp(pattern, "i")
|
||||||
|
|
||||||
|
for (const [name, locations] of index) {
|
||||||
|
if (regex.test(name)) {
|
||||||
|
results.set(name, locations)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return results
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get all files that the given file depends on (imports).
|
||||||
|
*/
|
||||||
|
getDependencies(graph: DepsGraph, filePath: string): string[] {
|
||||||
|
return graph.imports.get(filePath) ?? []
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get all files that depend on the given file (import it).
|
||||||
|
*/
|
||||||
|
getDependents(graph: DepsGraph, filePath: string): string[] {
|
||||||
|
return graph.importedBy.get(filePath) ?? []
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Find circular dependencies in the graph.
|
||||||
|
*/
|
||||||
|
findCircularDependencies(graph: DepsGraph): string[][] {
|
||||||
|
const cycles: string[][] = []
|
||||||
|
const visited = new Set<string>()
|
||||||
|
const recursionStack = new Set<string>()
|
||||||
|
|
||||||
|
const dfs = (node: string, path: string[]): void => {
|
||||||
|
visited.add(node)
|
||||||
|
recursionStack.add(node)
|
||||||
|
path.push(node)
|
||||||
|
|
||||||
|
const deps = graph.imports.get(node) ?? []
|
||||||
|
for (const dep of deps) {
|
||||||
|
if (!visited.has(dep)) {
|
||||||
|
dfs(dep, [...path])
|
||||||
|
} else if (recursionStack.has(dep)) {
|
||||||
|
const cycleStart = path.indexOf(dep)
|
||||||
|
if (cycleStart !== -1) {
|
||||||
|
const cycle = [...path.slice(cycleStart), dep]
|
||||||
|
const normalized = this.normalizeCycle(cycle)
|
||||||
|
if (!this.cycleExists(cycles, normalized)) {
|
||||||
|
cycles.push(normalized)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
recursionStack.delete(node)
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const node of graph.imports.keys()) {
|
||||||
|
if (!visited.has(node)) {
|
||||||
|
dfs(node, [])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return cycles
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Normalize a cycle to start with the smallest path.
|
||||||
|
*/
|
||||||
|
private normalizeCycle(cycle: string[]): string[] {
|
||||||
|
if (cycle.length <= 1) {
|
||||||
|
return cycle
|
||||||
|
}
|
||||||
|
|
||||||
|
const withoutLast = cycle.slice(0, -1)
|
||||||
|
const minIndex = withoutLast.reduce(
|
||||||
|
(minIdx, path, idx) => (path < withoutLast[minIdx] ? idx : minIdx),
|
||||||
|
0,
|
||||||
|
)
|
||||||
|
|
||||||
|
const rotated = [...withoutLast.slice(minIndex), ...withoutLast.slice(0, minIndex)]
|
||||||
|
rotated.push(rotated[0])
|
||||||
|
|
||||||
|
return rotated
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if a cycle already exists in the list.
|
||||||
|
*/
|
||||||
|
private cycleExists(cycles: string[][], newCycle: string[]): boolean {
|
||||||
|
const newKey = newCycle.join("→")
|
||||||
|
return cycles.some((cycle) => cycle.join("→") === newKey)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get statistics about the indexes.
|
||||||
|
*/
|
||||||
|
getStats(
|
||||||
|
symbolIndex: SymbolIndex,
|
||||||
|
depsGraph: DepsGraph,
|
||||||
|
): {
|
||||||
|
totalSymbols: number
|
||||||
|
symbolsByType: Record<SymbolLocation["type"], number>
|
||||||
|
totalFiles: number
|
||||||
|
totalDependencies: number
|
||||||
|
averageDependencies: number
|
||||||
|
hubs: string[]
|
||||||
|
orphans: string[]
|
||||||
|
} {
|
||||||
|
const symbolsByType: Record<SymbolLocation["type"], number> = {
|
||||||
|
function: 0,
|
||||||
|
class: 0,
|
||||||
|
interface: 0,
|
||||||
|
type: 0,
|
||||||
|
variable: 0,
|
||||||
|
}
|
||||||
|
|
||||||
|
let totalSymbols = 0
|
||||||
|
for (const locations of symbolIndex.values()) {
|
||||||
|
totalSymbols += locations.length
|
||||||
|
for (const loc of locations) {
|
||||||
|
symbolsByType[loc.type]++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const totalFiles = depsGraph.imports.size
|
||||||
|
let totalDependencies = 0
|
||||||
|
const hubs: string[] = []
|
||||||
|
const orphans: string[] = []
|
||||||
|
|
||||||
|
for (const [_filePath, deps] of depsGraph.imports) {
|
||||||
|
totalDependencies += deps.length
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const [filePath, dependents] of depsGraph.importedBy) {
|
||||||
|
if (dependents.length > 5) {
|
||||||
|
hubs.push(filePath)
|
||||||
|
}
|
||||||
|
if (dependents.length === 0 && (depsGraph.imports.get(filePath)?.length ?? 0) === 0) {
|
||||||
|
orphans.push(filePath)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
totalSymbols,
|
||||||
|
symbolsByType,
|
||||||
|
totalFiles,
|
||||||
|
totalDependencies,
|
||||||
|
averageDependencies: totalFiles > 0 ? totalDependencies / totalFiles : 0,
|
||||||
|
hubs: hubs.sort(),
|
||||||
|
orphans: orphans.sort(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
615
packages/ipuaro/src/infrastructure/indexer/MetaAnalyzer.ts
Normal file
615
packages/ipuaro/src/infrastructure/indexer/MetaAnalyzer.ts
Normal file
@@ -0,0 +1,615 @@
|
|||||||
|
import * as path from "node:path"
|
||||||
|
import {
|
||||||
|
calculateImpactScore,
|
||||||
|
type ComplexityMetrics,
|
||||||
|
createFileMeta,
|
||||||
|
type FileMeta,
|
||||||
|
isHubFile,
|
||||||
|
} from "../../domain/value-objects/FileMeta.js"
|
||||||
|
import type { ClassInfo, FileAST, FunctionInfo } from "../../domain/value-objects/FileAST.js"
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Analyzes file metadata including complexity, dependencies, and classification.
|
||||||
|
*/
|
||||||
|
export class MetaAnalyzer {
|
||||||
|
private readonly projectRoot: string
|
||||||
|
|
||||||
|
constructor(projectRoot: string) {
|
||||||
|
this.projectRoot = projectRoot
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Analyze a file and compute its metadata.
|
||||||
|
* @param filePath - Absolute path to the file
|
||||||
|
* @param ast - Parsed AST for the file
|
||||||
|
* @param content - Raw file content (for LOC calculation)
|
||||||
|
* @param allASTs - Map of all file paths to their ASTs (for dependents)
|
||||||
|
*/
|
||||||
|
analyze(
|
||||||
|
filePath: string,
|
||||||
|
ast: FileAST,
|
||||||
|
content: string,
|
||||||
|
allASTs: Map<string, FileAST>,
|
||||||
|
): FileMeta {
|
||||||
|
const complexity = this.calculateComplexity(ast, content)
|
||||||
|
const dependencies = this.resolveDependencies(filePath, ast)
|
||||||
|
const dependents = this.findDependents(filePath, allASTs)
|
||||||
|
const fileType = this.classifyFileType(filePath)
|
||||||
|
const isEntryPoint = this.isEntryPointFile(filePath, dependents.length)
|
||||||
|
|
||||||
|
return createFileMeta({
|
||||||
|
complexity,
|
||||||
|
dependencies,
|
||||||
|
dependents,
|
||||||
|
isHub: isHubFile(dependents.length),
|
||||||
|
isEntryPoint,
|
||||||
|
fileType,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Calculate complexity metrics for a file.
|
||||||
|
*/
|
||||||
|
calculateComplexity(ast: FileAST, content: string): ComplexityMetrics {
|
||||||
|
const loc = this.countLinesOfCode(content)
|
||||||
|
const nesting = this.calculateMaxNesting(ast)
|
||||||
|
const cyclomaticComplexity = this.calculateCyclomaticComplexity(ast)
|
||||||
|
const score = this.calculateComplexityScore(loc, nesting, cyclomaticComplexity)
|
||||||
|
|
||||||
|
return {
|
||||||
|
loc,
|
||||||
|
nesting,
|
||||||
|
cyclomaticComplexity,
|
||||||
|
score,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Count lines of code (excluding empty lines and comments).
|
||||||
|
*/
|
||||||
|
countLinesOfCode(content: string): number {
|
||||||
|
const lines = content.split("\n")
|
||||||
|
let loc = 0
|
||||||
|
let inBlockComment = false
|
||||||
|
|
||||||
|
for (const line of lines) {
|
||||||
|
const trimmed = line.trim()
|
||||||
|
|
||||||
|
if (inBlockComment) {
|
||||||
|
if (trimmed.includes("*/")) {
|
||||||
|
inBlockComment = false
|
||||||
|
}
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if (trimmed.startsWith("/*")) {
|
||||||
|
if (!trimmed.includes("*/")) {
|
||||||
|
inBlockComment = true
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
const afterComment = trimmed.substring(trimmed.indexOf("*/") + 2).trim()
|
||||||
|
if (afterComment === "" || afterComment.startsWith("//")) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
loc++
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if (trimmed === "" || trimmed.startsWith("//")) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
loc++
|
||||||
|
}
|
||||||
|
|
||||||
|
return loc
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Calculate maximum nesting depth from AST.
|
||||||
|
*/
|
||||||
|
calculateMaxNesting(ast: FileAST): number {
|
||||||
|
let maxNesting = 0
|
||||||
|
|
||||||
|
for (const func of ast.functions) {
|
||||||
|
const depth = this.estimateFunctionNesting(func)
|
||||||
|
maxNesting = Math.max(maxNesting, depth)
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const cls of ast.classes) {
|
||||||
|
const depth = this.estimateClassNesting(cls)
|
||||||
|
maxNesting = Math.max(maxNesting, depth)
|
||||||
|
}
|
||||||
|
|
||||||
|
return maxNesting
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Estimate nesting depth for a function based on line count.
|
||||||
|
* More accurate nesting would require full AST traversal.
|
||||||
|
*/
|
||||||
|
private estimateFunctionNesting(func: FunctionInfo): number {
|
||||||
|
const lines = func.lineEnd - func.lineStart + 1
|
||||||
|
if (lines <= 5) {
|
||||||
|
return 1
|
||||||
|
}
|
||||||
|
if (lines <= 15) {
|
||||||
|
return 2
|
||||||
|
}
|
||||||
|
if (lines <= 30) {
|
||||||
|
return 3
|
||||||
|
}
|
||||||
|
if (lines <= 50) {
|
||||||
|
return 4
|
||||||
|
}
|
||||||
|
return 5
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Estimate nesting depth for a class.
|
||||||
|
*/
|
||||||
|
private estimateClassNesting(cls: ClassInfo): number {
|
||||||
|
let maxMethodNesting = 1
|
||||||
|
|
||||||
|
for (const method of cls.methods) {
|
||||||
|
const lines = method.lineEnd - method.lineStart + 1
|
||||||
|
let depth = 1
|
||||||
|
if (lines > 5) {
|
||||||
|
depth = 2
|
||||||
|
}
|
||||||
|
if (lines > 15) {
|
||||||
|
depth = 3
|
||||||
|
}
|
||||||
|
if (lines > 30) {
|
||||||
|
depth = 4
|
||||||
|
}
|
||||||
|
maxMethodNesting = Math.max(maxMethodNesting, depth)
|
||||||
|
}
|
||||||
|
|
||||||
|
return maxMethodNesting + 1
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Calculate cyclomatic complexity from AST.
|
||||||
|
* Base complexity is 1, +1 for each decision point.
|
||||||
|
*/
|
||||||
|
calculateCyclomaticComplexity(ast: FileAST): number {
|
||||||
|
let complexity = 1
|
||||||
|
|
||||||
|
for (const func of ast.functions) {
|
||||||
|
complexity += this.estimateFunctionComplexity(func)
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const cls of ast.classes) {
|
||||||
|
for (const method of cls.methods) {
|
||||||
|
const lines = method.lineEnd - method.lineStart + 1
|
||||||
|
complexity += Math.max(1, Math.floor(lines / 10))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return complexity
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Estimate function complexity based on size.
|
||||||
|
*/
|
||||||
|
private estimateFunctionComplexity(func: FunctionInfo): number {
|
||||||
|
const lines = func.lineEnd - func.lineStart + 1
|
||||||
|
return Math.max(1, Math.floor(lines / 8))
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Calculate overall complexity score (0-100).
|
||||||
|
*/
|
||||||
|
calculateComplexityScore(loc: number, nesting: number, cyclomatic: number): number {
|
||||||
|
const locWeight = 0.3
|
||||||
|
const nestingWeight = 0.35
|
||||||
|
const cyclomaticWeight = 0.35
|
||||||
|
|
||||||
|
const locScore = Math.min(100, (loc / 500) * 100)
|
||||||
|
const nestingScore = Math.min(100, (nesting / 6) * 100)
|
||||||
|
const cyclomaticScore = Math.min(100, (cyclomatic / 30) * 100)
|
||||||
|
|
||||||
|
const score =
|
||||||
|
locScore * locWeight + nestingScore * nestingWeight + cyclomaticScore * cyclomaticWeight
|
||||||
|
|
||||||
|
return Math.round(Math.min(100, score))
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Resolve internal imports to absolute file paths.
|
||||||
|
*/
|
||||||
|
resolveDependencies(filePath: string, ast: FileAST): string[] {
|
||||||
|
const dependencies: string[] = []
|
||||||
|
const fileDir = path.dirname(filePath)
|
||||||
|
|
||||||
|
for (const imp of ast.imports) {
|
||||||
|
if (imp.type !== "internal") {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
const resolved = this.resolveImportPath(fileDir, imp.from)
|
||||||
|
if (resolved && !dependencies.includes(resolved)) {
|
||||||
|
dependencies.push(resolved)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return dependencies.sort()
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Resolve a relative import path to an absolute path.
|
||||||
|
*/
|
||||||
|
private resolveImportPath(fromDir: string, importPath: string): string | null {
|
||||||
|
const absolutePath = path.resolve(fromDir, importPath)
|
||||||
|
const normalized = this.normalizeImportPath(absolutePath)
|
||||||
|
|
||||||
|
if (normalized.startsWith(this.projectRoot)) {
|
||||||
|
return normalized
|
||||||
|
}
|
||||||
|
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Normalize import path by removing file extension if present
|
||||||
|
* and handling index imports.
|
||||||
|
*/
|
||||||
|
private normalizeImportPath(importPath: string): string {
|
||||||
|
let normalized = importPath
|
||||||
|
|
||||||
|
if (normalized.endsWith(".js")) {
|
||||||
|
normalized = `${normalized.slice(0, -3)}.ts`
|
||||||
|
} else if (normalized.endsWith(".jsx")) {
|
||||||
|
normalized = `${normalized.slice(0, -4)}.tsx`
|
||||||
|
} else if (!/\.(ts|tsx|js|jsx)$/.exec(normalized)) {
|
||||||
|
normalized = `${normalized}.ts`
|
||||||
|
}
|
||||||
|
|
||||||
|
return normalized
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Find all files that import the given file.
|
||||||
|
*/
|
||||||
|
findDependents(filePath: string, allASTs: Map<string, FileAST>): string[] {
|
||||||
|
const dependents: string[] = []
|
||||||
|
const normalizedPath = this.normalizePathForComparison(filePath)
|
||||||
|
|
||||||
|
for (const [otherPath, ast] of allASTs) {
|
||||||
|
if (otherPath === filePath) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if (this.fileImportsTarget(otherPath, ast, normalizedPath)) {
|
||||||
|
dependents.push(otherPath)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return dependents.sort()
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if a file imports the target path.
|
||||||
|
*/
|
||||||
|
private fileImportsTarget(filePath: string, ast: FileAST, normalizedTarget: string): boolean {
|
||||||
|
const fileDir = path.dirname(filePath)
|
||||||
|
|
||||||
|
for (const imp of ast.imports) {
|
||||||
|
if (imp.type !== "internal") {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
const resolvedImport = this.resolveImportPath(fileDir, imp.from)
|
||||||
|
if (!resolvedImport) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
const normalizedImport = this.normalizePathForComparison(resolvedImport)
|
||||||
|
if (this.pathsMatch(normalizedTarget, normalizedImport)) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Normalize path for comparison (handle index.ts and extensions).
|
||||||
|
*/
|
||||||
|
private normalizePathForComparison(filePath: string): string {
|
||||||
|
let normalized = filePath
|
||||||
|
|
||||||
|
if (normalized.endsWith(".js")) {
|
||||||
|
normalized = normalized.slice(0, -3)
|
||||||
|
} else if (normalized.endsWith(".ts")) {
|
||||||
|
normalized = normalized.slice(0, -3)
|
||||||
|
} else if (normalized.endsWith(".jsx")) {
|
||||||
|
normalized = normalized.slice(0, -4)
|
||||||
|
} else if (normalized.endsWith(".tsx")) {
|
||||||
|
normalized = normalized.slice(0, -4)
|
||||||
|
}
|
||||||
|
|
||||||
|
return normalized
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if two normalized paths match (including index.ts resolution).
|
||||||
|
*/
|
||||||
|
private pathsMatch(path1: string, path2: string): boolean {
|
||||||
|
if (path1 === path2) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
if (path1.endsWith("/index") && path2 === path1.slice(0, -6)) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
if (path2.endsWith("/index") && path1 === path2.slice(0, -6)) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Classify file type based on path and name.
|
||||||
|
*/
|
||||||
|
classifyFileType(filePath: string): FileMeta["fileType"] {
|
||||||
|
const basename = path.basename(filePath)
|
||||||
|
const lowercasePath = filePath.toLowerCase()
|
||||||
|
|
||||||
|
if (basename.includes(".test.") || basename.includes(".spec.")) {
|
||||||
|
return "test"
|
||||||
|
}
|
||||||
|
|
||||||
|
if (lowercasePath.includes("/tests/") || lowercasePath.includes("/__tests__/")) {
|
||||||
|
return "test"
|
||||||
|
}
|
||||||
|
|
||||||
|
if (basename.endsWith(".d.ts")) {
|
||||||
|
return "types"
|
||||||
|
}
|
||||||
|
|
||||||
|
if (lowercasePath.includes("/types/") || basename === "types.ts") {
|
||||||
|
return "types"
|
||||||
|
}
|
||||||
|
|
||||||
|
const configPatterns = [
|
||||||
|
"config",
|
||||||
|
"tsconfig",
|
||||||
|
"eslint",
|
||||||
|
"prettier",
|
||||||
|
"vitest",
|
||||||
|
"jest",
|
||||||
|
"babel",
|
||||||
|
"webpack",
|
||||||
|
"vite",
|
||||||
|
"rollup",
|
||||||
|
]
|
||||||
|
|
||||||
|
for (const pattern of configPatterns) {
|
||||||
|
if (basename.toLowerCase().includes(pattern)) {
|
||||||
|
return "config"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (
|
||||||
|
filePath.endsWith(".ts") ||
|
||||||
|
filePath.endsWith(".tsx") ||
|
||||||
|
filePath.endsWith(".js") ||
|
||||||
|
filePath.endsWith(".jsx")
|
||||||
|
) {
|
||||||
|
return "source"
|
||||||
|
}
|
||||||
|
|
||||||
|
return "unknown"
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Determine if file is an entry point.
|
||||||
|
*/
|
||||||
|
isEntryPointFile(filePath: string, dependentCount: number): boolean {
|
||||||
|
const basename = path.basename(filePath)
|
||||||
|
|
||||||
|
if (basename.startsWith("index.")) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
if (dependentCount === 0) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
const entryPatterns = ["main.", "app.", "cli.", "server.", "index."]
|
||||||
|
for (const pattern of entryPatterns) {
|
||||||
|
if (basename.toLowerCase().startsWith(pattern)) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Batch analyze multiple files.
|
||||||
|
* Computes impact scores and transitive dependencies after all files are analyzed.
|
||||||
|
*/
|
||||||
|
analyzeAll(files: Map<string, { ast: FileAST; content: string }>): Map<string, FileMeta> {
|
||||||
|
const allASTs = new Map<string, FileAST>()
|
||||||
|
for (const [filePath, { ast }] of files) {
|
||||||
|
allASTs.set(filePath, ast)
|
||||||
|
}
|
||||||
|
|
||||||
|
const results = new Map<string, FileMeta>()
|
||||||
|
for (const [filePath, { ast, content }] of files) {
|
||||||
|
const meta = this.analyze(filePath, ast, content, allASTs)
|
||||||
|
results.set(filePath, meta)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Compute impact scores now that we know total file count
|
||||||
|
const totalFiles = results.size
|
||||||
|
for (const [, meta] of results) {
|
||||||
|
meta.impactScore = calculateImpactScore(meta.dependents.length, totalFiles)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Compute transitive dependency counts
|
||||||
|
this.computeTransitiveCounts(results)
|
||||||
|
|
||||||
|
return results
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Compute transitive dependency counts for all files.
|
||||||
|
* Uses DFS with memoization for efficiency.
|
||||||
|
*/
|
||||||
|
computeTransitiveCounts(metas: Map<string, FileMeta>): void {
|
||||||
|
// Memoization caches
|
||||||
|
const transitiveDepCache = new Map<string, Set<string>>()
|
||||||
|
const transitiveDepByCache = new Map<string, Set<string>>()
|
||||||
|
|
||||||
|
// Compute transitive dependents (files that depend on this file, directly or transitively)
|
||||||
|
for (const [filePath, meta] of metas) {
|
||||||
|
const transitiveDeps = this.getTransitiveDependents(filePath, metas, transitiveDepCache)
|
||||||
|
// Exclude the file itself from count (can happen in cycles)
|
||||||
|
meta.transitiveDepCount = transitiveDeps.has(filePath)
|
||||||
|
? transitiveDeps.size - 1
|
||||||
|
: transitiveDeps.size
|
||||||
|
}
|
||||||
|
|
||||||
|
// Compute transitive dependencies (files this file depends on, directly or transitively)
|
||||||
|
for (const [filePath, meta] of metas) {
|
||||||
|
const transitiveDepsBy = this.getTransitiveDependencies(
|
||||||
|
filePath,
|
||||||
|
metas,
|
||||||
|
transitiveDepByCache,
|
||||||
|
)
|
||||||
|
// Exclude the file itself from count (can happen in cycles)
|
||||||
|
meta.transitiveDepByCount = transitiveDepsBy.has(filePath)
|
||||||
|
? transitiveDepsBy.size - 1
|
||||||
|
: transitiveDepsBy.size
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get all files that depend on the given file transitively.
|
||||||
|
* Uses DFS with cycle detection. Caching only at the top level.
|
||||||
|
*/
|
||||||
|
getTransitiveDependents(
|
||||||
|
filePath: string,
|
||||||
|
metas: Map<string, FileMeta>,
|
||||||
|
cache: Map<string, Set<string>>,
|
||||||
|
visited?: Set<string>,
|
||||||
|
): Set<string> {
|
||||||
|
// Return cached result if available (only valid for top-level calls)
|
||||||
|
if (!visited) {
|
||||||
|
const cached = cache.get(filePath)
|
||||||
|
if (cached) {
|
||||||
|
return cached
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const isTopLevel = !visited
|
||||||
|
if (!visited) {
|
||||||
|
visited = new Set()
|
||||||
|
}
|
||||||
|
|
||||||
|
// Detect cycles
|
||||||
|
if (visited.has(filePath)) {
|
||||||
|
return new Set()
|
||||||
|
}
|
||||||
|
|
||||||
|
visited.add(filePath)
|
||||||
|
const result = new Set<string>()
|
||||||
|
|
||||||
|
const meta = metas.get(filePath)
|
||||||
|
if (!meta) {
|
||||||
|
if (isTopLevel) {
|
||||||
|
cache.set(filePath, result)
|
||||||
|
}
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add direct dependents
|
||||||
|
for (const dependent of meta.dependents) {
|
||||||
|
result.add(dependent)
|
||||||
|
|
||||||
|
// Recursively add transitive dependents
|
||||||
|
const transitive = this.getTransitiveDependents(
|
||||||
|
dependent,
|
||||||
|
metas,
|
||||||
|
cache,
|
||||||
|
new Set(visited),
|
||||||
|
)
|
||||||
|
for (const t of transitive) {
|
||||||
|
result.add(t)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Only cache top-level results (not intermediate results during recursion)
|
||||||
|
if (isTopLevel) {
|
||||||
|
cache.set(filePath, result)
|
||||||
|
}
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get all files that the given file depends on transitively.
|
||||||
|
* Uses DFS with cycle detection. Caching only at the top level.
|
||||||
|
*/
|
||||||
|
getTransitiveDependencies(
|
||||||
|
filePath: string,
|
||||||
|
metas: Map<string, FileMeta>,
|
||||||
|
cache: Map<string, Set<string>>,
|
||||||
|
visited?: Set<string>,
|
||||||
|
): Set<string> {
|
||||||
|
// Return cached result if available (only valid for top-level calls)
|
||||||
|
if (!visited) {
|
||||||
|
const cached = cache.get(filePath)
|
||||||
|
if (cached) {
|
||||||
|
return cached
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const isTopLevel = !visited
|
||||||
|
if (!visited) {
|
||||||
|
visited = new Set()
|
||||||
|
}
|
||||||
|
|
||||||
|
// Detect cycles
|
||||||
|
if (visited.has(filePath)) {
|
||||||
|
return new Set()
|
||||||
|
}
|
||||||
|
|
||||||
|
visited.add(filePath)
|
||||||
|
const result = new Set<string>()
|
||||||
|
|
||||||
|
const meta = metas.get(filePath)
|
||||||
|
if (!meta) {
|
||||||
|
if (isTopLevel) {
|
||||||
|
cache.set(filePath, result)
|
||||||
|
}
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add direct dependencies
|
||||||
|
for (const dependency of meta.dependencies) {
|
||||||
|
result.add(dependency)
|
||||||
|
|
||||||
|
// Recursively add transitive dependencies
|
||||||
|
const transitive = this.getTransitiveDependencies(
|
||||||
|
dependency,
|
||||||
|
metas,
|
||||||
|
cache,
|
||||||
|
new Set(visited),
|
||||||
|
)
|
||||||
|
for (const t of transitive) {
|
||||||
|
result.add(t)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Only cache top-level results (not intermediate results during recursion)
|
||||||
|
if (isTopLevel) {
|
||||||
|
cache.set(filePath, result)
|
||||||
|
}
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
}
|
||||||
285
packages/ipuaro/src/infrastructure/indexer/Watchdog.ts
Normal file
285
packages/ipuaro/src/infrastructure/indexer/Watchdog.ts
Normal file
@@ -0,0 +1,285 @@
|
|||||||
|
import * as chokidar from "chokidar"
|
||||||
|
import * as path from "node:path"
|
||||||
|
import { DEFAULT_IGNORE_PATTERNS, SUPPORTED_EXTENSIONS } from "../../domain/constants/index.js"
|
||||||
|
|
||||||
|
export type FileChangeType = "add" | "change" | "unlink"
|
||||||
|
|
||||||
|
export interface FileChangeEvent {
|
||||||
|
type: FileChangeType
|
||||||
|
path: string
|
||||||
|
timestamp: number
|
||||||
|
}
|
||||||
|
|
||||||
|
export type FileChangeCallback = (event: FileChangeEvent) => void
|
||||||
|
|
||||||
|
export interface WatchdogOptions {
|
||||||
|
/** Debounce delay in milliseconds (default: 500) */
|
||||||
|
debounceMs?: number
|
||||||
|
/** Patterns to ignore (default: DEFAULT_IGNORE_PATTERNS) */
|
||||||
|
ignorePatterns?: readonly string[]
|
||||||
|
/** File extensions to watch (default: SUPPORTED_EXTENSIONS) */
|
||||||
|
extensions?: readonly string[]
|
||||||
|
/** Use polling instead of native events (useful for network drives) */
|
||||||
|
usePolling?: boolean
|
||||||
|
/** Polling interval in milliseconds (default: 1000) */
|
||||||
|
pollInterval?: number
|
||||||
|
}
|
||||||
|
|
||||||
|
interface ResolvedWatchdogOptions {
|
||||||
|
debounceMs: number
|
||||||
|
ignorePatterns: readonly string[]
|
||||||
|
extensions: readonly string[]
|
||||||
|
usePolling: boolean
|
||||||
|
pollInterval: number
|
||||||
|
}
|
||||||
|
|
||||||
|
const DEFAULT_OPTIONS: ResolvedWatchdogOptions = {
|
||||||
|
debounceMs: 500,
|
||||||
|
ignorePatterns: DEFAULT_IGNORE_PATTERNS,
|
||||||
|
extensions: SUPPORTED_EXTENSIONS,
|
||||||
|
usePolling: false,
|
||||||
|
pollInterval: 1000,
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Watches for file changes in a directory using chokidar.
|
||||||
|
*/
|
||||||
|
export class Watchdog {
|
||||||
|
private watcher: chokidar.FSWatcher | null = null
|
||||||
|
private readonly callbacks: FileChangeCallback[] = []
|
||||||
|
private readonly pendingChanges = new Map<string, FileChangeEvent>()
|
||||||
|
private readonly debounceTimers = new Map<string, NodeJS.Timeout>()
|
||||||
|
private readonly options: ResolvedWatchdogOptions
|
||||||
|
private root = ""
|
||||||
|
private isRunning = false
|
||||||
|
|
||||||
|
constructor(options: WatchdogOptions = {}) {
|
||||||
|
this.options = { ...DEFAULT_OPTIONS, ...options }
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Start watching a directory for file changes.
|
||||||
|
*/
|
||||||
|
start(root: string): void {
|
||||||
|
if (this.isRunning) {
|
||||||
|
void this.stop()
|
||||||
|
}
|
||||||
|
|
||||||
|
this.root = root
|
||||||
|
this.isRunning = true
|
||||||
|
|
||||||
|
const globPatterns = this.buildGlobPatterns(root)
|
||||||
|
const ignorePatterns = this.buildIgnorePatterns()
|
||||||
|
|
||||||
|
this.watcher = chokidar.watch(globPatterns, {
|
||||||
|
ignored: ignorePatterns,
|
||||||
|
persistent: true,
|
||||||
|
ignoreInitial: true,
|
||||||
|
usePolling: this.options.usePolling,
|
||||||
|
interval: this.options.pollInterval,
|
||||||
|
awaitWriteFinish: {
|
||||||
|
stabilityThreshold: 100,
|
||||||
|
pollInterval: 100,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
this.watcher.on("add", (filePath) => {
|
||||||
|
this.handleChange("add", filePath)
|
||||||
|
})
|
||||||
|
this.watcher.on("change", (filePath) => {
|
||||||
|
this.handleChange("change", filePath)
|
||||||
|
})
|
||||||
|
this.watcher.on("unlink", (filePath) => {
|
||||||
|
this.handleChange("unlink", filePath)
|
||||||
|
})
|
||||||
|
this.watcher.on("error", (error) => {
|
||||||
|
this.handleError(error)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Stop watching for file changes.
|
||||||
|
*/
|
||||||
|
async stop(): Promise<void> {
|
||||||
|
if (!this.isRunning) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const timer of this.debounceTimers.values()) {
|
||||||
|
clearTimeout(timer)
|
||||||
|
}
|
||||||
|
this.debounceTimers.clear()
|
||||||
|
this.pendingChanges.clear()
|
||||||
|
|
||||||
|
if (this.watcher) {
|
||||||
|
await this.watcher.close()
|
||||||
|
this.watcher = null
|
||||||
|
}
|
||||||
|
|
||||||
|
this.isRunning = false
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Register a callback for file change events.
|
||||||
|
*/
|
||||||
|
onFileChange(callback: FileChangeCallback): void {
|
||||||
|
this.callbacks.push(callback)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Remove a callback.
|
||||||
|
*/
|
||||||
|
offFileChange(callback: FileChangeCallback): void {
|
||||||
|
const index = this.callbacks.indexOf(callback)
|
||||||
|
if (index !== -1) {
|
||||||
|
this.callbacks.splice(index, 1)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if the watchdog is currently running.
|
||||||
|
*/
|
||||||
|
isWatching(): boolean {
|
||||||
|
return this.isRunning
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the root directory being watched.
|
||||||
|
*/
|
||||||
|
getRoot(): string {
|
||||||
|
return this.root
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the number of pending changes waiting to be processed.
|
||||||
|
*/
|
||||||
|
getPendingCount(): number {
|
||||||
|
return this.pendingChanges.size
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handle a file change event with debouncing.
|
||||||
|
*/
|
||||||
|
private handleChange(type: FileChangeType, filePath: string): void {
|
||||||
|
if (!this.isValidFile(filePath)) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
const normalizedPath = path.resolve(filePath)
|
||||||
|
|
||||||
|
const event: FileChangeEvent = {
|
||||||
|
type,
|
||||||
|
path: normalizedPath,
|
||||||
|
timestamp: Date.now(),
|
||||||
|
}
|
||||||
|
|
||||||
|
this.pendingChanges.set(normalizedPath, event)
|
||||||
|
|
||||||
|
const existingTimer = this.debounceTimers.get(normalizedPath)
|
||||||
|
if (existingTimer) {
|
||||||
|
clearTimeout(existingTimer)
|
||||||
|
}
|
||||||
|
|
||||||
|
const timer = setTimeout(() => {
|
||||||
|
this.flushChange(normalizedPath)
|
||||||
|
}, this.options.debounceMs)
|
||||||
|
|
||||||
|
this.debounceTimers.set(normalizedPath, timer)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Flush a pending change and notify callbacks.
|
||||||
|
*/
|
||||||
|
private flushChange(filePath: string): void {
|
||||||
|
const event = this.pendingChanges.get(filePath)
|
||||||
|
if (!event) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
this.pendingChanges.delete(filePath)
|
||||||
|
this.debounceTimers.delete(filePath)
|
||||||
|
|
||||||
|
for (const callback of this.callbacks) {
|
||||||
|
try {
|
||||||
|
callback(event)
|
||||||
|
} catch {
|
||||||
|
// Silently ignore callback errors
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handle watcher errors.
|
||||||
|
*/
|
||||||
|
private handleError(error: Error): void {
|
||||||
|
// Log error but don't crash
|
||||||
|
console.error(`[Watchdog] Error: ${error.message}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if a file should be watched based on extension.
|
||||||
|
*/
|
||||||
|
private isValidFile(filePath: string): boolean {
|
||||||
|
const ext = path.extname(filePath)
|
||||||
|
return this.options.extensions.includes(ext)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Build glob patterns for watching.
|
||||||
|
*/
|
||||||
|
private buildGlobPatterns(root: string): string[] {
|
||||||
|
return this.options.extensions.map((ext) => path.join(root, "**", `*${ext}`))
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Build ignore patterns for chokidar.
|
||||||
|
*/
|
||||||
|
private buildIgnorePatterns(): (string | RegExp)[] {
|
||||||
|
const patterns: (string | RegExp)[] = []
|
||||||
|
|
||||||
|
for (const pattern of this.options.ignorePatterns) {
|
||||||
|
if (pattern.includes("*")) {
|
||||||
|
const regexPattern = pattern
|
||||||
|
.replace(/\./g, "\\.")
|
||||||
|
.replace(/\*\*/g, ".*")
|
||||||
|
.replace(/\*/g, "[^/]*")
|
||||||
|
patterns.push(new RegExp(regexPattern))
|
||||||
|
} else {
|
||||||
|
patterns.push(`**/${pattern}/**`)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return patterns
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Force flush all pending changes immediately.
|
||||||
|
*/
|
||||||
|
flushAll(): void {
|
||||||
|
for (const timer of this.debounceTimers.values()) {
|
||||||
|
clearTimeout(timer)
|
||||||
|
}
|
||||||
|
this.debounceTimers.clear()
|
||||||
|
|
||||||
|
for (const filePath of this.pendingChanges.keys()) {
|
||||||
|
this.flushChange(filePath)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get watched paths (for debugging).
|
||||||
|
*/
|
||||||
|
getWatchedPaths(): string[] {
|
||||||
|
if (!this.watcher) {
|
||||||
|
return []
|
||||||
|
}
|
||||||
|
const watched = this.watcher.getWatched()
|
||||||
|
const paths: string[] = []
|
||||||
|
for (const dir of Object.keys(watched)) {
|
||||||
|
for (const file of watched[dir]) {
|
||||||
|
paths.push(path.join(dir, file))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return paths.sort()
|
||||||
|
}
|
||||||
|
}
|
||||||
6
packages/ipuaro/src/infrastructure/indexer/index.ts
Normal file
6
packages/ipuaro/src/infrastructure/indexer/index.ts
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
export * from "./FileScanner.js"
|
||||||
|
export * from "./ASTParser.js"
|
||||||
|
export * from "./MetaAnalyzer.js"
|
||||||
|
export * from "./IndexBuilder.js"
|
||||||
|
export * from "./Watchdog.js"
|
||||||
|
export * from "./tree-sitter-types.js"
|
||||||
@@ -0,0 +1,86 @@
|
|||||||
|
/**
|
||||||
|
* Tree-sitter node type constants for TypeScript/JavaScript parsing.
|
||||||
|
* These are infrastructure-level constants, not exposed to domain/application layers.
|
||||||
|
*
|
||||||
|
* Source: tree-sitter-typescript/typescript/src/node-types.json
|
||||||
|
*/
|
||||||
|
|
||||||
|
export const NodeType = {
|
||||||
|
// Statements
|
||||||
|
IMPORT_STATEMENT: "import_statement",
|
||||||
|
EXPORT_STATEMENT: "export_statement",
|
||||||
|
LEXICAL_DECLARATION: "lexical_declaration",
|
||||||
|
|
||||||
|
// Declarations
|
||||||
|
FUNCTION_DECLARATION: "function_declaration",
|
||||||
|
CLASS_DECLARATION: "class_declaration",
|
||||||
|
INTERFACE_DECLARATION: "interface_declaration",
|
||||||
|
TYPE_ALIAS_DECLARATION: "type_alias_declaration",
|
||||||
|
ENUM_DECLARATION: "enum_declaration",
|
||||||
|
|
||||||
|
// Clauses
|
||||||
|
IMPORT_CLAUSE: "import_clause",
|
||||||
|
EXPORT_CLAUSE: "export_clause",
|
||||||
|
EXTENDS_CLAUSE: "extends_clause",
|
||||||
|
IMPLEMENTS_CLAUSE: "implements_clause",
|
||||||
|
EXTENDS_TYPE_CLAUSE: "extends_type_clause",
|
||||||
|
CLASS_HERITAGE: "class_heritage",
|
||||||
|
|
||||||
|
// Import specifiers
|
||||||
|
NAMESPACE_IMPORT: "namespace_import",
|
||||||
|
NAMED_IMPORTS: "named_imports",
|
||||||
|
IMPORT_SPECIFIER: "import_specifier",
|
||||||
|
EXPORT_SPECIFIER: "export_specifier",
|
||||||
|
|
||||||
|
// Class members
|
||||||
|
METHOD_DEFINITION: "method_definition",
|
||||||
|
PUBLIC_FIELD_DEFINITION: "public_field_definition",
|
||||||
|
FIELD_DEFINITION: "field_definition",
|
||||||
|
PROPERTY_SIGNATURE: "property_signature",
|
||||||
|
|
||||||
|
// Enum members
|
||||||
|
ENUM_BODY: "enum_body",
|
||||||
|
ENUM_ASSIGNMENT: "enum_assignment",
|
||||||
|
PROPERTY_IDENTIFIER: "property_identifier",
|
||||||
|
|
||||||
|
// Parameters
|
||||||
|
REQUIRED_PARAMETER: "required_parameter",
|
||||||
|
OPTIONAL_PARAMETER: "optional_parameter",
|
||||||
|
|
||||||
|
// Expressions & values
|
||||||
|
ARROW_FUNCTION: "arrow_function",
|
||||||
|
FUNCTION: "function",
|
||||||
|
VARIABLE_DECLARATOR: "variable_declarator",
|
||||||
|
|
||||||
|
// Identifiers & types
|
||||||
|
IDENTIFIER: "identifier",
|
||||||
|
TYPE_IDENTIFIER: "type_identifier",
|
||||||
|
|
||||||
|
// Modifiers
|
||||||
|
ASYNC: "async",
|
||||||
|
STATIC: "static",
|
||||||
|
ABSTRACT: "abstract",
|
||||||
|
DEFAULT: "default",
|
||||||
|
ACCESSIBILITY_MODIFIER: "accessibility_modifier",
|
||||||
|
READONLY: "readonly",
|
||||||
|
|
||||||
|
// Decorators
|
||||||
|
DECORATOR: "decorator",
|
||||||
|
} as const
|
||||||
|
|
||||||
|
export type NodeTypeValue = (typeof NodeType)[keyof typeof NodeType]
|
||||||
|
|
||||||
|
export const FieldName = {
|
||||||
|
SOURCE: "source",
|
||||||
|
NAME: "name",
|
||||||
|
ALIAS: "alias",
|
||||||
|
DECLARATION: "declaration",
|
||||||
|
PARAMETERS: "parameters",
|
||||||
|
RETURN_TYPE: "return_type",
|
||||||
|
BODY: "body",
|
||||||
|
TYPE: "type",
|
||||||
|
PATTERN: "pattern",
|
||||||
|
VALUE: "value",
|
||||||
|
} as const
|
||||||
|
|
||||||
|
export type FieldNameValue = (typeof FieldName)[keyof typeof FieldName]
|
||||||
239
packages/ipuaro/src/infrastructure/llm/OllamaClient.ts
Normal file
239
packages/ipuaro/src/infrastructure/llm/OllamaClient.ts
Normal file
@@ -0,0 +1,239 @@
|
|||||||
|
import { type Message, Ollama } from "ollama"
|
||||||
|
import type { ILLMClient, LLMResponse } from "../../domain/services/ILLMClient.js"
|
||||||
|
import type { ChatMessage } from "../../domain/value-objects/ChatMessage.js"
|
||||||
|
import type { LLMConfig } from "../../shared/constants/config.js"
|
||||||
|
import { IpuaroError } from "../../shared/errors/IpuaroError.js"
|
||||||
|
import { estimateTokens } from "../../shared/utils/tokens.js"
|
||||||
|
import { parseToolCalls } from "./ResponseParser.js"
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Ollama LLM client implementation.
|
||||||
|
* Wraps the Ollama SDK for chat completions with tool support.
|
||||||
|
*/
|
||||||
|
export class OllamaClient implements ILLMClient {
|
||||||
|
private readonly client: Ollama
|
||||||
|
private readonly host: string
|
||||||
|
private readonly model: string
|
||||||
|
private readonly contextWindow: number
|
||||||
|
private readonly temperature: number
|
||||||
|
private readonly timeout: number
|
||||||
|
private abortController: AbortController | null = null
|
||||||
|
|
||||||
|
constructor(config: LLMConfig) {
|
||||||
|
this.host = config.host
|
||||||
|
this.client = new Ollama({ host: this.host })
|
||||||
|
this.model = config.model
|
||||||
|
this.contextWindow = config.contextWindow
|
||||||
|
this.temperature = config.temperature
|
||||||
|
this.timeout = config.timeout
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Send messages to LLM and get response.
|
||||||
|
* Tool definitions should be included in the system prompt as XML format.
|
||||||
|
*/
|
||||||
|
async chat(messages: ChatMessage[]): Promise<LLMResponse> {
|
||||||
|
const startTime = Date.now()
|
||||||
|
this.abortController = new AbortController()
|
||||||
|
|
||||||
|
try {
|
||||||
|
const ollamaMessages = this.convertMessages(messages)
|
||||||
|
|
||||||
|
const response = await this.client.chat({
|
||||||
|
model: this.model,
|
||||||
|
messages: ollamaMessages,
|
||||||
|
options: {
|
||||||
|
temperature: this.temperature,
|
||||||
|
},
|
||||||
|
stream: false,
|
||||||
|
})
|
||||||
|
|
||||||
|
const timeMs = Date.now() - startTime
|
||||||
|
const parsed = parseToolCalls(response.message.content)
|
||||||
|
|
||||||
|
return {
|
||||||
|
content: parsed.content,
|
||||||
|
toolCalls: parsed.toolCalls,
|
||||||
|
tokens: response.eval_count ?? estimateTokens(response.message.content),
|
||||||
|
timeMs,
|
||||||
|
truncated: false,
|
||||||
|
stopReason: this.determineStopReason(response, parsed.toolCalls),
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
if (error instanceof Error && error.name === "AbortError") {
|
||||||
|
throw IpuaroError.llm("Request was aborted")
|
||||||
|
}
|
||||||
|
throw this.handleError(error)
|
||||||
|
} finally {
|
||||||
|
this.abortController = null
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Count tokens in text.
|
||||||
|
* Uses estimation since Ollama doesn't provide a tokenizer endpoint.
|
||||||
|
*/
|
||||||
|
async countTokens(text: string): Promise<number> {
|
||||||
|
return Promise.resolve(estimateTokens(text))
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if LLM service is available.
|
||||||
|
*/
|
||||||
|
async isAvailable(): Promise<boolean> {
|
||||||
|
try {
|
||||||
|
await this.client.list()
|
||||||
|
return true
|
||||||
|
} catch {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get current model name.
|
||||||
|
*/
|
||||||
|
getModelName(): string {
|
||||||
|
return this.model
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get context window size.
|
||||||
|
*/
|
||||||
|
getContextWindowSize(): number {
|
||||||
|
return this.contextWindow
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Pull/download model if not available locally.
|
||||||
|
*/
|
||||||
|
async pullModel(model: string): Promise<void> {
|
||||||
|
try {
|
||||||
|
await this.client.pull({ model, stream: false })
|
||||||
|
} catch (error) {
|
||||||
|
throw this.handleError(error, `Failed to pull model: ${model}`)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if a specific model is available locally.
|
||||||
|
*/
|
||||||
|
async hasModel(model: string): Promise<boolean> {
|
||||||
|
try {
|
||||||
|
const result = await this.client.list()
|
||||||
|
return result.models.some((m) => m.name === model || m.name.startsWith(`${model}:`))
|
||||||
|
} catch {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* List available models.
|
||||||
|
*/
|
||||||
|
async listModels(): Promise<string[]> {
|
||||||
|
try {
|
||||||
|
const result = await this.client.list()
|
||||||
|
return result.models.map((m) => m.name)
|
||||||
|
} catch (error) {
|
||||||
|
throw this.handleError(error, "Failed to list models")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Abort current generation.
|
||||||
|
*/
|
||||||
|
abort(): void {
|
||||||
|
if (this.abortController) {
|
||||||
|
this.abortController.abort()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Convert ChatMessage array to Ollama Message format.
|
||||||
|
*/
|
||||||
|
private convertMessages(messages: ChatMessage[]): Message[] {
|
||||||
|
return messages.map((msg): Message => {
|
||||||
|
const role = this.convertRole(msg.role)
|
||||||
|
|
||||||
|
if (msg.role === "tool" && msg.toolResults) {
|
||||||
|
return {
|
||||||
|
role: "tool",
|
||||||
|
content: msg.content,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (msg.role === "assistant" && msg.toolCalls && msg.toolCalls.length > 0) {
|
||||||
|
return {
|
||||||
|
role: "assistant",
|
||||||
|
content: msg.content,
|
||||||
|
tool_calls: msg.toolCalls.map((tc) => ({
|
||||||
|
function: {
|
||||||
|
name: tc.name,
|
||||||
|
arguments: tc.params,
|
||||||
|
},
|
||||||
|
})),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
role,
|
||||||
|
content: msg.content,
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Convert message role to Ollama role.
|
||||||
|
*/
|
||||||
|
private convertRole(role: ChatMessage["role"]): "user" | "assistant" | "system" | "tool" {
|
||||||
|
switch (role) {
|
||||||
|
case "user":
|
||||||
|
return "user"
|
||||||
|
case "assistant":
|
||||||
|
return "assistant"
|
||||||
|
case "system":
|
||||||
|
return "system"
|
||||||
|
case "tool":
|
||||||
|
return "tool"
|
||||||
|
default:
|
||||||
|
return "user"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Determine stop reason from response.
|
||||||
|
*/
|
||||||
|
private determineStopReason(
|
||||||
|
response: { done_reason?: string },
|
||||||
|
toolCalls: { name: string; params: Record<string, unknown> }[],
|
||||||
|
): "end" | "length" | "tool_use" {
|
||||||
|
if (toolCalls.length > 0) {
|
||||||
|
return "tool_use"
|
||||||
|
}
|
||||||
|
|
||||||
|
if (response.done_reason === "length") {
|
||||||
|
return "length"
|
||||||
|
}
|
||||||
|
|
||||||
|
return "end"
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handle and wrap errors.
|
||||||
|
*/
|
||||||
|
private handleError(error: unknown, context?: string): IpuaroError {
|
||||||
|
const message = error instanceof Error ? error.message : String(error)
|
||||||
|
const fullMessage = context ? `${context}: ${message}` : message
|
||||||
|
|
||||||
|
if (message.includes("ECONNREFUSED") || message.includes("fetch failed")) {
|
||||||
|
return IpuaroError.llm(`Cannot connect to Ollama at ${this.host}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (message.includes("model") && message.includes("not found")) {
|
||||||
|
return IpuaroError.llm(
|
||||||
|
`Model "${this.model}" not found. Run: ollama pull ${this.model}`,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
return IpuaroError.llm(fullMessage)
|
||||||
|
}
|
||||||
|
}
|
||||||
265
packages/ipuaro/src/infrastructure/llm/ResponseParser.ts
Normal file
265
packages/ipuaro/src/infrastructure/llm/ResponseParser.ts
Normal file
@@ -0,0 +1,265 @@
|
|||||||
|
import { createToolCall, type ToolCall } from "../../domain/value-objects/ToolCall.js"
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parsed response from LLM.
|
||||||
|
*/
|
||||||
|
export interface ParsedResponse {
|
||||||
|
/** Text content (excluding tool calls) */
|
||||||
|
content: string
|
||||||
|
/** Extracted tool calls */
|
||||||
|
toolCalls: ToolCall[]
|
||||||
|
/** Whether parsing encountered issues */
|
||||||
|
hasParseErrors: boolean
|
||||||
|
/** Parse error messages */
|
||||||
|
parseErrors: string[]
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* XML tool call tag pattern.
|
||||||
|
* Matches: <tool_call name="tool_name">...</tool_call>
|
||||||
|
*/
|
||||||
|
const TOOL_CALL_REGEX = /<tool_call\s+name\s*=\s*"([^"]+)">([\s\S]*?)<\/tool_call>/gi
|
||||||
|
|
||||||
|
/**
|
||||||
|
* XML parameter tag pattern.
|
||||||
|
* Matches: <param name="param_name">value</param> or <param_name>value</param_name>
|
||||||
|
*/
|
||||||
|
const PARAM_REGEX_NAMED = /<param\s+name\s*=\s*"([^"]+)">([\s\S]*?)<\/param>/gi
|
||||||
|
const PARAM_REGEX_ELEMENT = /<([a-z_][a-z0-9_]*)>([\s\S]*?)<\/\1>/gi
|
||||||
|
|
||||||
|
/**
|
||||||
|
* CDATA section pattern.
|
||||||
|
* Matches: <![CDATA[...]]>
|
||||||
|
*/
|
||||||
|
const CDATA_REGEX = /<!\[CDATA\[([\s\S]*?)\]\]>/g
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Valid tool names.
|
||||||
|
* Used for validation to catch typos or hallucinations.
|
||||||
|
*/
|
||||||
|
const VALID_TOOL_NAMES = new Set([
|
||||||
|
"get_lines",
|
||||||
|
"get_function",
|
||||||
|
"get_class",
|
||||||
|
"get_structure",
|
||||||
|
"edit_lines",
|
||||||
|
"create_file",
|
||||||
|
"delete_file",
|
||||||
|
"find_references",
|
||||||
|
"find_definition",
|
||||||
|
"get_dependencies",
|
||||||
|
"get_dependents",
|
||||||
|
"get_complexity",
|
||||||
|
"get_todos",
|
||||||
|
"git_status",
|
||||||
|
"git_diff",
|
||||||
|
"git_commit",
|
||||||
|
"run_command",
|
||||||
|
"run_tests",
|
||||||
|
])
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parse tool calls from LLM response text.
|
||||||
|
* Supports XML format: <tool_call name="get_lines"><path>src/index.ts</path></tool_call>
|
||||||
|
* Validates tool names and provides helpful error messages.
|
||||||
|
*/
|
||||||
|
export function parseToolCalls(response: string): ParsedResponse {
|
||||||
|
const toolCalls: ToolCall[] = []
|
||||||
|
const parseErrors: string[] = []
|
||||||
|
let content = response
|
||||||
|
|
||||||
|
const matches = [...response.matchAll(TOOL_CALL_REGEX)]
|
||||||
|
|
||||||
|
for (const match of matches) {
|
||||||
|
const [fullMatch, toolName, paramsXml] = match
|
||||||
|
|
||||||
|
if (!VALID_TOOL_NAMES.has(toolName)) {
|
||||||
|
parseErrors.push(
|
||||||
|
`Unknown tool "${toolName}". Valid tools: ${[...VALID_TOOL_NAMES].join(", ")}`,
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const params = parseParameters(paramsXml)
|
||||||
|
const toolCall = createToolCall(
|
||||||
|
`xml_${String(Date.now())}_${String(toolCalls.length)}`,
|
||||||
|
toolName,
|
||||||
|
params,
|
||||||
|
)
|
||||||
|
toolCalls.push(toolCall)
|
||||||
|
content = content.replace(fullMatch, "")
|
||||||
|
} catch (error) {
|
||||||
|
const errorMsg = error instanceof Error ? error.message : String(error)
|
||||||
|
parseErrors.push(`Failed to parse tool call "${toolName}": ${errorMsg}`)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
content = content.trim()
|
||||||
|
|
||||||
|
return {
|
||||||
|
content,
|
||||||
|
toolCalls,
|
||||||
|
hasParseErrors: parseErrors.length > 0,
|
||||||
|
parseErrors,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parse parameters from XML content.
|
||||||
|
*/
|
||||||
|
function parseParameters(xml: string): Record<string, unknown> {
|
||||||
|
const params: Record<string, unknown> = {}
|
||||||
|
|
||||||
|
const namedMatches = [...xml.matchAll(PARAM_REGEX_NAMED)]
|
||||||
|
for (const match of namedMatches) {
|
||||||
|
const [, name, value] = match
|
||||||
|
params[name] = parseValue(value)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (namedMatches.length === 0) {
|
||||||
|
const elementMatches = [...xml.matchAll(PARAM_REGEX_ELEMENT)]
|
||||||
|
for (const match of elementMatches) {
|
||||||
|
const [, name, value] = match
|
||||||
|
params[name] = parseValue(value)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return params
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parse a value string to appropriate type.
|
||||||
|
* Supports CDATA sections for multiline content.
|
||||||
|
*/
|
||||||
|
function parseValue(value: string): unknown {
|
||||||
|
const trimmed = value.trim()
|
||||||
|
|
||||||
|
const cdataMatches = [...trimmed.matchAll(CDATA_REGEX)]
|
||||||
|
if (cdataMatches.length > 0 && cdataMatches[0][1] !== undefined) {
|
||||||
|
return cdataMatches[0][1]
|
||||||
|
}
|
||||||
|
|
||||||
|
if (trimmed === "true") {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
if (trimmed === "false") {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
if (trimmed === "null") {
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
const num = Number(trimmed)
|
||||||
|
if (!isNaN(num) && trimmed !== "") {
|
||||||
|
return num
|
||||||
|
}
|
||||||
|
|
||||||
|
if (
|
||||||
|
(trimmed.startsWith("[") && trimmed.endsWith("]")) ||
|
||||||
|
(trimmed.startsWith("{") && trimmed.endsWith("}"))
|
||||||
|
) {
|
||||||
|
try {
|
||||||
|
return JSON.parse(trimmed)
|
||||||
|
} catch {
|
||||||
|
return trimmed
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return trimmed
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Format tool calls to XML for prompt injection.
|
||||||
|
* Useful when you need to show the LLM the expected format.
|
||||||
|
*/
|
||||||
|
export function formatToolCallsAsXml(toolCalls: ToolCall[]): string {
|
||||||
|
return toolCalls
|
||||||
|
.map((tc) => {
|
||||||
|
const params = Object.entries(tc.params)
|
||||||
|
.map(([key, value]) => ` <${key}>${formatValueForXml(value)}</${key}>`)
|
||||||
|
.join("\n")
|
||||||
|
return `<tool_call name="${tc.name}">\n${params}\n</tool_call>`
|
||||||
|
})
|
||||||
|
.join("\n\n")
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Format a value for XML output.
|
||||||
|
*/
|
||||||
|
function formatValueForXml(value: unknown): string {
|
||||||
|
if (value === null || value === undefined) {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
if (typeof value === "object") {
|
||||||
|
return JSON.stringify(value)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (typeof value === "string") {
|
||||||
|
return value
|
||||||
|
}
|
||||||
|
|
||||||
|
if (typeof value === "number" || typeof value === "boolean") {
|
||||||
|
return String(value)
|
||||||
|
}
|
||||||
|
|
||||||
|
return JSON.stringify(value)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Extract thinking/reasoning from response.
|
||||||
|
* Matches content between <thinking>...</thinking> tags.
|
||||||
|
*/
|
||||||
|
export function extractThinking(response: string): { thinking: string; content: string } {
|
||||||
|
const thinkingRegex = /<thinking>([\s\S]*?)<\/thinking>/gi
|
||||||
|
const matches = [...response.matchAll(thinkingRegex)]
|
||||||
|
|
||||||
|
if (matches.length === 0) {
|
||||||
|
return { thinking: "", content: response }
|
||||||
|
}
|
||||||
|
|
||||||
|
let content = response
|
||||||
|
const thoughts: string[] = []
|
||||||
|
|
||||||
|
for (const match of matches) {
|
||||||
|
thoughts.push(match[1].trim())
|
||||||
|
content = content.replace(match[0], "")
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
thinking: thoughts.join("\n\n"),
|
||||||
|
content: content.trim(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if response contains tool calls.
|
||||||
|
*/
|
||||||
|
export function hasToolCalls(response: string): boolean {
|
||||||
|
return TOOL_CALL_REGEX.test(response)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Validate tool call parameters against expected schema.
|
||||||
|
*/
|
||||||
|
export function validateToolCallParams(
|
||||||
|
toolName: string,
|
||||||
|
params: Record<string, unknown>,
|
||||||
|
requiredParams: string[],
|
||||||
|
): { valid: boolean; errors: string[] } {
|
||||||
|
const errors: string[] = []
|
||||||
|
|
||||||
|
for (const param of requiredParams) {
|
||||||
|
if (!(param in params) || params[param] === undefined || params[param] === null) {
|
||||||
|
errors.push(`Missing required parameter: ${param}`)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
valid: errors.length === 0,
|
||||||
|
errors,
|
||||||
|
}
|
||||||
|
}
|
||||||
48
packages/ipuaro/src/infrastructure/llm/index.ts
Normal file
48
packages/ipuaro/src/infrastructure/llm/index.ts
Normal file
@@ -0,0 +1,48 @@
|
|||||||
|
// LLM infrastructure exports
|
||||||
|
export { OllamaClient } from "./OllamaClient.js"
|
||||||
|
export {
|
||||||
|
SYSTEM_PROMPT,
|
||||||
|
buildInitialContext,
|
||||||
|
buildFileContext,
|
||||||
|
truncateContext,
|
||||||
|
type ProjectStructure,
|
||||||
|
} from "./prompts.js"
|
||||||
|
export {
|
||||||
|
ALL_TOOLS,
|
||||||
|
READ_TOOLS,
|
||||||
|
EDIT_TOOLS,
|
||||||
|
SEARCH_TOOLS,
|
||||||
|
ANALYSIS_TOOLS,
|
||||||
|
GIT_TOOLS,
|
||||||
|
RUN_TOOLS,
|
||||||
|
CONFIRMATION_TOOLS,
|
||||||
|
requiresConfirmation,
|
||||||
|
getToolDef,
|
||||||
|
getToolsByCategory,
|
||||||
|
GET_LINES_TOOL,
|
||||||
|
GET_FUNCTION_TOOL,
|
||||||
|
GET_CLASS_TOOL,
|
||||||
|
GET_STRUCTURE_TOOL,
|
||||||
|
EDIT_LINES_TOOL,
|
||||||
|
CREATE_FILE_TOOL,
|
||||||
|
DELETE_FILE_TOOL,
|
||||||
|
FIND_REFERENCES_TOOL,
|
||||||
|
FIND_DEFINITION_TOOL,
|
||||||
|
GET_DEPENDENCIES_TOOL,
|
||||||
|
GET_DEPENDENTS_TOOL,
|
||||||
|
GET_COMPLEXITY_TOOL,
|
||||||
|
GET_TODOS_TOOL,
|
||||||
|
GIT_STATUS_TOOL,
|
||||||
|
GIT_DIFF_TOOL,
|
||||||
|
GIT_COMMIT_TOOL,
|
||||||
|
RUN_COMMAND_TOOL,
|
||||||
|
RUN_TESTS_TOOL,
|
||||||
|
} from "./toolDefs.js"
|
||||||
|
export {
|
||||||
|
parseToolCalls,
|
||||||
|
formatToolCallsAsXml,
|
||||||
|
extractThinking,
|
||||||
|
hasToolCalls,
|
||||||
|
validateToolCallParams,
|
||||||
|
type ParsedResponse,
|
||||||
|
} from "./ResponseParser.js"
|
||||||
779
packages/ipuaro/src/infrastructure/llm/prompts.ts
Normal file
779
packages/ipuaro/src/infrastructure/llm/prompts.ts
Normal file
@@ -0,0 +1,779 @@
|
|||||||
|
import type { FileAST } from "../../domain/value-objects/FileAST.js"
|
||||||
|
import type { FileMeta } from "../../domain/value-objects/FileMeta.js"
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Project structure for context building.
|
||||||
|
*/
|
||||||
|
export interface ProjectStructure {
|
||||||
|
name: string
|
||||||
|
rootPath: string
|
||||||
|
files: string[]
|
||||||
|
directories: string[]
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Options for building initial context.
|
||||||
|
*/
|
||||||
|
export interface BuildContextOptions {
|
||||||
|
includeSignatures?: boolean
|
||||||
|
includeDepsGraph?: boolean
|
||||||
|
includeCircularDeps?: boolean
|
||||||
|
includeHighImpactFiles?: boolean
|
||||||
|
circularDeps?: string[][]
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* System prompt for the ipuaro AI agent.
|
||||||
|
*/
|
||||||
|
export const SYSTEM_PROMPT = `You are ipuaro, a local AI code assistant specialized in helping developers understand and modify their codebase. You operate within a single project directory and have access to powerful tools for reading, searching, analyzing, and editing code.
|
||||||
|
|
||||||
|
## Core Principles
|
||||||
|
|
||||||
|
1. **Lazy Loading**: You don't have the full code in context. Use tools to fetch exactly what you need.
|
||||||
|
2. **Precision**: Always verify file paths and line numbers before making changes.
|
||||||
|
3. **Safety**: Confirm destructive operations. Never execute dangerous commands.
|
||||||
|
4. **Efficiency**: Minimize context usage. Request only necessary code sections.
|
||||||
|
|
||||||
|
## Tool Calling Format
|
||||||
|
|
||||||
|
When you need to use a tool, format your call as XML:
|
||||||
|
|
||||||
|
<tool_call name="tool_name">
|
||||||
|
<param_name>value</param_name>
|
||||||
|
<another_param>value</another_param>
|
||||||
|
</tool_call>
|
||||||
|
|
||||||
|
You can call multiple tools in one response. Always wait for tool results before making conclusions.
|
||||||
|
|
||||||
|
**Examples:**
|
||||||
|
|
||||||
|
<tool_call name="get_lines">
|
||||||
|
<path>src/index.ts</path>
|
||||||
|
<start>1</start>
|
||||||
|
<end>50</end>
|
||||||
|
</tool_call>
|
||||||
|
|
||||||
|
<tool_call name="edit_lines">
|
||||||
|
<path>src/utils.ts</path>
|
||||||
|
<start>10</start>
|
||||||
|
<end>15</end>
|
||||||
|
<content>const newCode = "hello";</content>
|
||||||
|
</tool_call>
|
||||||
|
|
||||||
|
<tool_call name="find_references">
|
||||||
|
<symbol>getUserById</symbol>
|
||||||
|
</tool_call>
|
||||||
|
|
||||||
|
## Available Tools
|
||||||
|
|
||||||
|
### Reading Tools
|
||||||
|
- \`get_lines(path, start?, end?)\`: Get specific lines from a file
|
||||||
|
- \`get_function(path, name)\`: Get a function by name
|
||||||
|
- \`get_class(path, name)\`: Get a class by name
|
||||||
|
- \`get_structure(path?, depth?)\`: Get project directory structure
|
||||||
|
|
||||||
|
### Editing Tools (require confirmation)
|
||||||
|
- \`edit_lines(path, start, end, content)\`: Replace specific lines in a file
|
||||||
|
- \`create_file(path, content)\`: Create a new file
|
||||||
|
- \`delete_file(path)\`: Delete a file
|
||||||
|
|
||||||
|
### Search Tools
|
||||||
|
- \`find_references(symbol, path?)\`: Find all usages of a symbol
|
||||||
|
- \`find_definition(symbol)\`: Find where a symbol is defined
|
||||||
|
|
||||||
|
### Analysis Tools
|
||||||
|
- \`get_dependencies(path)\`: Get files this file imports
|
||||||
|
- \`get_dependents(path)\`: Get files that import this file
|
||||||
|
- \`get_complexity(path?, limit?)\`: Get complexity metrics
|
||||||
|
- \`get_todos(path?, type?)\`: Find TODO/FIXME comments
|
||||||
|
|
||||||
|
### Git Tools
|
||||||
|
- \`git_status()\`: Get repository status
|
||||||
|
- \`git_diff(path?, staged?)\`: Get uncommitted changes
|
||||||
|
- \`git_commit(message, files?)\`: Create a commit (requires confirmation)
|
||||||
|
|
||||||
|
### Run Tools
|
||||||
|
- \`run_command(command, timeout?)\`: Execute a shell command (security checked)
|
||||||
|
- \`run_tests(path?, filter?, watch?)\`: Run the test suite
|
||||||
|
|
||||||
|
## Response Guidelines
|
||||||
|
|
||||||
|
1. **Be concise**: Don't repeat information already in context.
|
||||||
|
2. **Show your work**: Explain what tools you're using and why.
|
||||||
|
3. **Verify before editing**: Always read the target code before modifying it.
|
||||||
|
4. **Handle errors gracefully**: If a tool fails, explain what went wrong and suggest alternatives.
|
||||||
|
|
||||||
|
## Code Editing Rules
|
||||||
|
|
||||||
|
1. Always use \`get_lines\` or \`get_function\` before \`edit_lines\`.
|
||||||
|
2. Provide exact line numbers for edits.
|
||||||
|
3. For large changes, break into multiple small edits.
|
||||||
|
4. After editing, suggest running tests if available.
|
||||||
|
|
||||||
|
## Safety Rules
|
||||||
|
|
||||||
|
1. Never execute commands that could harm the system.
|
||||||
|
2. Never expose sensitive data (API keys, passwords).
|
||||||
|
3. Always confirm file deletions and destructive git operations.
|
||||||
|
4. Stay within the project directory.
|
||||||
|
|
||||||
|
When you need to perform an action, use the appropriate tool. Think step by step about what information you need and which tools will provide it most efficiently.`
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Build initial context from project structure and AST metadata.
|
||||||
|
* Returns a compact representation without actual code.
|
||||||
|
*/
|
||||||
|
export function buildInitialContext(
|
||||||
|
structure: ProjectStructure,
|
||||||
|
asts: Map<string, FileAST>,
|
||||||
|
metas?: Map<string, FileMeta>,
|
||||||
|
options?: BuildContextOptions,
|
||||||
|
): string {
|
||||||
|
const sections: string[] = []
|
||||||
|
const includeSignatures = options?.includeSignatures ?? true
|
||||||
|
const includeDepsGraph = options?.includeDepsGraph ?? true
|
||||||
|
const includeCircularDeps = options?.includeCircularDeps ?? true
|
||||||
|
const includeHighImpactFiles = options?.includeHighImpactFiles ?? true
|
||||||
|
|
||||||
|
sections.push(formatProjectHeader(structure))
|
||||||
|
sections.push(formatDirectoryTree(structure))
|
||||||
|
sections.push(formatFileOverview(asts, metas, includeSignatures))
|
||||||
|
|
||||||
|
if (includeDepsGraph && metas && metas.size > 0) {
|
||||||
|
const depsGraph = formatDependencyGraph(metas)
|
||||||
|
if (depsGraph) {
|
||||||
|
sections.push(depsGraph)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (includeHighImpactFiles && metas && metas.size > 0) {
|
||||||
|
const highImpactSection = formatHighImpactFiles(metas)
|
||||||
|
if (highImpactSection) {
|
||||||
|
sections.push(highImpactSection)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (includeCircularDeps && options?.circularDeps && options.circularDeps.length > 0) {
|
||||||
|
const circularDepsSection = formatCircularDeps(options.circularDeps)
|
||||||
|
if (circularDepsSection) {
|
||||||
|
sections.push(circularDepsSection)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return sections.join("\n\n")
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Format project header section.
|
||||||
|
*/
|
||||||
|
function formatProjectHeader(structure: ProjectStructure): string {
|
||||||
|
const fileCount = String(structure.files.length)
|
||||||
|
const dirCount = String(structure.directories.length)
|
||||||
|
return `# Project: ${structure.name}
|
||||||
|
Root: ${structure.rootPath}
|
||||||
|
Files: ${fileCount} | Directories: ${dirCount}`
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Format directory tree.
|
||||||
|
*/
|
||||||
|
function formatDirectoryTree(structure: ProjectStructure): string {
|
||||||
|
const lines: string[] = ["## Structure", ""]
|
||||||
|
|
||||||
|
const sortedDirs = [...structure.directories].sort()
|
||||||
|
for (const dir of sortedDirs) {
|
||||||
|
const depth = dir.split("/").length - 1
|
||||||
|
const indent = " ".repeat(depth)
|
||||||
|
const name = dir.split("/").pop() ?? dir
|
||||||
|
lines.push(`${indent}${name}/`)
|
||||||
|
}
|
||||||
|
|
||||||
|
return lines.join("\n")
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Format file overview with AST summaries.
|
||||||
|
*/
|
||||||
|
function formatFileOverview(
|
||||||
|
asts: Map<string, FileAST>,
|
||||||
|
metas?: Map<string, FileMeta>,
|
||||||
|
includeSignatures = true,
|
||||||
|
): string {
|
||||||
|
const lines: string[] = ["## Files", ""]
|
||||||
|
|
||||||
|
const sortedPaths = [...asts.keys()].sort()
|
||||||
|
for (const path of sortedPaths) {
|
||||||
|
const ast = asts.get(path)
|
||||||
|
if (!ast) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
const meta = metas?.get(path)
|
||||||
|
lines.push(formatFileSummary(path, ast, meta, includeSignatures))
|
||||||
|
}
|
||||||
|
|
||||||
|
return lines.join("\n")
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Format decorators as a prefix string.
|
||||||
|
* Example: "@Get(':id') @Auth() "
|
||||||
|
*/
|
||||||
|
function formatDecoratorsPrefix(decorators: string[] | undefined): string {
|
||||||
|
if (!decorators || decorators.length === 0) {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
return `${decorators.join(" ")} `
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Format a function signature.
|
||||||
|
*/
|
||||||
|
function formatFunctionSignature(fn: FileAST["functions"][0]): string {
|
||||||
|
const decoratorsPrefix = formatDecoratorsPrefix(fn.decorators)
|
||||||
|
const asyncPrefix = fn.isAsync ? "async " : ""
|
||||||
|
const params = fn.params
|
||||||
|
.map((p) => {
|
||||||
|
const optional = p.optional ? "?" : ""
|
||||||
|
const type = p.type ? `: ${p.type}` : ""
|
||||||
|
return `${p.name}${optional}${type}`
|
||||||
|
})
|
||||||
|
.join(", ")
|
||||||
|
const returnType = fn.returnType ? `: ${fn.returnType}` : ""
|
||||||
|
return `${decoratorsPrefix}${asyncPrefix}${fn.name}(${params})${returnType}`
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Format an interface signature with fields.
|
||||||
|
* Example: "interface User extends Base { id: string, name: string, email?: string }"
|
||||||
|
*/
|
||||||
|
function formatInterfaceSignature(iface: FileAST["interfaces"][0]): string {
|
||||||
|
const extList = iface.extends ?? []
|
||||||
|
const ext = extList.length > 0 ? ` extends ${extList.join(", ")}` : ""
|
||||||
|
|
||||||
|
if (iface.properties.length === 0) {
|
||||||
|
return `interface ${iface.name}${ext}`
|
||||||
|
}
|
||||||
|
|
||||||
|
const fields = iface.properties
|
||||||
|
.map((p) => {
|
||||||
|
const readonly = p.isReadonly ? "readonly " : ""
|
||||||
|
const optional = p.name.endsWith("?") ? "" : ""
|
||||||
|
const type = p.type ? `: ${p.type}` : ""
|
||||||
|
return `${readonly}${p.name}${optional}${type}`
|
||||||
|
})
|
||||||
|
.join(", ")
|
||||||
|
|
||||||
|
return `interface ${iface.name}${ext} { ${fields} }`
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Format a type alias signature with definition.
|
||||||
|
* Example: "type UserId = string" or "type Handler = (event: Event) => void"
|
||||||
|
*/
|
||||||
|
function formatTypeAliasSignature(type: FileAST["typeAliases"][0]): string {
|
||||||
|
if (!type.definition) {
|
||||||
|
return `type ${type.name}`
|
||||||
|
}
|
||||||
|
|
||||||
|
const definition = truncateDefinition(type.definition, 80)
|
||||||
|
return `type ${type.name} = ${definition}`
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Format an enum signature with members and values.
|
||||||
|
* Example: "enum Status { Active=1, Inactive=0, Pending=2 }"
|
||||||
|
* Example: "const enum Role { Admin="admin", User="user" }"
|
||||||
|
*/
|
||||||
|
function formatEnumSignature(enumInfo: FileAST["enums"][0]): string {
|
||||||
|
const constPrefix = enumInfo.isConst ? "const " : ""
|
||||||
|
|
||||||
|
if (enumInfo.members.length === 0) {
|
||||||
|
return `${constPrefix}enum ${enumInfo.name}`
|
||||||
|
}
|
||||||
|
|
||||||
|
const membersStr = enumInfo.members
|
||||||
|
.map((m) => {
|
||||||
|
if (m.value === undefined) {
|
||||||
|
return m.name
|
||||||
|
}
|
||||||
|
const valueStr = typeof m.value === "string" ? `"${m.value}"` : String(m.value)
|
||||||
|
return `${m.name}=${valueStr}`
|
||||||
|
})
|
||||||
|
.join(", ")
|
||||||
|
|
||||||
|
const result = `${constPrefix}enum ${enumInfo.name} { ${membersStr} }`
|
||||||
|
|
||||||
|
if (result.length > 100) {
|
||||||
|
return truncateDefinition(result, 100)
|
||||||
|
}
|
||||||
|
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Truncate long type definitions for display.
|
||||||
|
*/
|
||||||
|
function truncateDefinition(definition: string, maxLength: number): string {
|
||||||
|
const normalized = definition.replace(/\s+/g, " ").trim()
|
||||||
|
if (normalized.length <= maxLength) {
|
||||||
|
return normalized
|
||||||
|
}
|
||||||
|
return `${normalized.slice(0, maxLength - 3)}...`
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Format a single file's AST summary.
|
||||||
|
* When includeSignatures is true, shows full function signatures.
|
||||||
|
* When false, shows compact format with just names.
|
||||||
|
*/
|
||||||
|
function formatFileSummary(
|
||||||
|
path: string,
|
||||||
|
ast: FileAST,
|
||||||
|
meta?: FileMeta,
|
||||||
|
includeSignatures = true,
|
||||||
|
): string {
|
||||||
|
const flags = formatFileFlags(meta)
|
||||||
|
|
||||||
|
if (!includeSignatures) {
|
||||||
|
return formatFileSummaryCompact(path, ast, flags)
|
||||||
|
}
|
||||||
|
|
||||||
|
const lines: string[] = []
|
||||||
|
lines.push(`### ${path}${flags}`)
|
||||||
|
|
||||||
|
if (ast.functions.length > 0) {
|
||||||
|
for (const fn of ast.functions) {
|
||||||
|
lines.push(`- ${formatFunctionSignature(fn)}`)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (ast.classes.length > 0) {
|
||||||
|
for (const cls of ast.classes) {
|
||||||
|
const decoratorsPrefix = formatDecoratorsPrefix(cls.decorators)
|
||||||
|
const ext = cls.extends ? ` extends ${cls.extends}` : ""
|
||||||
|
const impl = cls.implements.length > 0 ? ` implements ${cls.implements.join(", ")}` : ""
|
||||||
|
lines.push(`- ${decoratorsPrefix}class ${cls.name}${ext}${impl}`)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (ast.interfaces.length > 0) {
|
||||||
|
for (const iface of ast.interfaces) {
|
||||||
|
lines.push(`- ${formatInterfaceSignature(iface)}`)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (ast.typeAliases.length > 0) {
|
||||||
|
for (const type of ast.typeAliases) {
|
||||||
|
lines.push(`- ${formatTypeAliasSignature(type)}`)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (ast.enums && ast.enums.length > 0) {
|
||||||
|
for (const enumInfo of ast.enums) {
|
||||||
|
lines.push(`- ${formatEnumSignature(enumInfo)}`)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (lines.length === 1) {
|
||||||
|
return `- ${path}${flags}`
|
||||||
|
}
|
||||||
|
|
||||||
|
return lines.join("\n")
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Format file summary in compact mode (just names, no signatures).
|
||||||
|
*/
|
||||||
|
function formatFileSummaryCompact(path: string, ast: FileAST, flags: string): string {
|
||||||
|
const parts: string[] = []
|
||||||
|
|
||||||
|
if (ast.functions.length > 0) {
|
||||||
|
const names = ast.functions.map((f) => f.name).join(", ")
|
||||||
|
parts.push(`fn: ${names}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (ast.classes.length > 0) {
|
||||||
|
const names = ast.classes.map((c) => c.name).join(", ")
|
||||||
|
parts.push(`class: ${names}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (ast.interfaces.length > 0) {
|
||||||
|
const names = ast.interfaces.map((i) => i.name).join(", ")
|
||||||
|
parts.push(`interface: ${names}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (ast.typeAliases.length > 0) {
|
||||||
|
const names = ast.typeAliases.map((t) => t.name).join(", ")
|
||||||
|
parts.push(`type: ${names}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (ast.enums && ast.enums.length > 0) {
|
||||||
|
const names = ast.enums.map((e) => e.name).join(", ")
|
||||||
|
parts.push(`enum: ${names}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
const summary = parts.length > 0 ? ` [${parts.join(" | ")}]` : ""
|
||||||
|
return `- ${path}${summary}${flags}`
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Format file metadata flags.
|
||||||
|
*/
|
||||||
|
function formatFileFlags(meta?: FileMeta): string {
|
||||||
|
if (!meta) {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
const flags: string[] = []
|
||||||
|
|
||||||
|
if (meta.isHub) {
|
||||||
|
flags.push("hub")
|
||||||
|
}
|
||||||
|
|
||||||
|
if (meta.isEntryPoint) {
|
||||||
|
flags.push("entry")
|
||||||
|
}
|
||||||
|
|
||||||
|
if (meta.complexity.score > 70) {
|
||||||
|
flags.push("complex")
|
||||||
|
}
|
||||||
|
|
||||||
|
return flags.length > 0 ? ` (${flags.join(", ")})` : ""
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Shorten a file path for display in dependency graph.
|
||||||
|
* Removes common prefixes like "src/" and file extensions.
|
||||||
|
*/
|
||||||
|
function shortenPath(path: string): string {
|
||||||
|
let short = path
|
||||||
|
if (short.startsWith("src/")) {
|
||||||
|
short = short.slice(4)
|
||||||
|
}
|
||||||
|
// Remove common extensions
|
||||||
|
short = short.replace(/\.(ts|tsx|js|jsx)$/, "")
|
||||||
|
// Remove /index suffix
|
||||||
|
short = short.replace(/\/index$/, "")
|
||||||
|
return short
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Format a single dependency graph entry.
|
||||||
|
* Format: "path: → dep1, dep2 ← dependent1, dependent2"
|
||||||
|
*/
|
||||||
|
function formatDepsEntry(path: string, dependencies: string[], dependents: string[]): string {
|
||||||
|
const parts: string[] = []
|
||||||
|
const shortPath = shortenPath(path)
|
||||||
|
|
||||||
|
if (dependencies.length > 0) {
|
||||||
|
const deps = dependencies.map(shortenPath).join(", ")
|
||||||
|
parts.push(`→ ${deps}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (dependents.length > 0) {
|
||||||
|
const deps = dependents.map(shortenPath).join(", ")
|
||||||
|
parts.push(`← ${deps}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (parts.length === 0) {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
return `${shortPath}: ${parts.join(" ")}`
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Format dependency graph for all files.
|
||||||
|
* Shows hub files first, then files with dependencies/dependents.
|
||||||
|
*
|
||||||
|
* Format:
|
||||||
|
* ## Dependency Graph
|
||||||
|
* services/user: → types/user, utils/validation ← controllers/user
|
||||||
|
* services/auth: → services/user, utils/jwt ← controllers/auth
|
||||||
|
*/
|
||||||
|
export function formatDependencyGraph(metas: Map<string, FileMeta>): string | null {
|
||||||
|
if (metas.size === 0) {
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
const entries: { path: string; deps: string[]; dependents: string[]; isHub: boolean }[] = []
|
||||||
|
|
||||||
|
for (const [path, meta] of metas) {
|
||||||
|
// Only include files that have connections
|
||||||
|
if (meta.dependencies.length > 0 || meta.dependents.length > 0) {
|
||||||
|
entries.push({
|
||||||
|
path,
|
||||||
|
deps: meta.dependencies,
|
||||||
|
dependents: meta.dependents,
|
||||||
|
isHub: meta.isHub,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (entries.length === 0) {
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
// Sort: hubs first, then by total connections (desc), then by path
|
||||||
|
entries.sort((a, b) => {
|
||||||
|
if (a.isHub !== b.isHub) {
|
||||||
|
return a.isHub ? -1 : 1
|
||||||
|
}
|
||||||
|
const aTotal = a.deps.length + a.dependents.length
|
||||||
|
const bTotal = b.deps.length + b.dependents.length
|
||||||
|
if (aTotal !== bTotal) {
|
||||||
|
return bTotal - aTotal
|
||||||
|
}
|
||||||
|
return a.path.localeCompare(b.path)
|
||||||
|
})
|
||||||
|
|
||||||
|
const lines: string[] = ["## Dependency Graph", ""]
|
||||||
|
|
||||||
|
for (const entry of entries) {
|
||||||
|
const line = formatDepsEntry(entry.path, entry.deps, entry.dependents)
|
||||||
|
if (line) {
|
||||||
|
lines.push(line)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Return null if only header (no actual entries)
|
||||||
|
if (lines.length <= 2) {
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
return lines.join("\n")
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Format circular dependencies for display in context.
|
||||||
|
* Shows warning section with cycle chains.
|
||||||
|
*
|
||||||
|
* Format:
|
||||||
|
* ## ⚠️ Circular Dependencies
|
||||||
|
* - services/user → services/auth → services/user
|
||||||
|
* - utils/a → utils/b → utils/c → utils/a
|
||||||
|
*/
|
||||||
|
export function formatCircularDeps(cycles: string[][]): string | null {
|
||||||
|
if (!cycles || cycles.length === 0) {
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
const lines: string[] = ["## ⚠️ Circular Dependencies", ""]
|
||||||
|
|
||||||
|
for (const cycle of cycles) {
|
||||||
|
if (cycle.length === 0) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
const formattedCycle = cycle.map(shortenPath).join(" → ")
|
||||||
|
lines.push(`- ${formattedCycle}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Return null if only header (no actual cycles)
|
||||||
|
if (lines.length <= 2) {
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
return lines.join("\n")
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Format high impact files table for display in context.
|
||||||
|
* Shows files with highest impact scores (most dependents).
|
||||||
|
*
|
||||||
|
* Format:
|
||||||
|
* ## High Impact Files
|
||||||
|
* | File | Impact | Dependents |
|
||||||
|
* |------|--------|------------|
|
||||||
|
* | src/utils/validation.ts | 67% | 12 files |
|
||||||
|
*
|
||||||
|
* @param metas - Map of file paths to their metadata
|
||||||
|
* @param limit - Maximum number of files to show (default: 10)
|
||||||
|
* @param minImpact - Minimum impact score to include (default: 5)
|
||||||
|
*/
|
||||||
|
export function formatHighImpactFiles(
|
||||||
|
metas: Map<string, FileMeta>,
|
||||||
|
limit = 10,
|
||||||
|
minImpact = 5,
|
||||||
|
): string | null {
|
||||||
|
if (metas.size === 0) {
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
// Collect files with impact score >= minImpact
|
||||||
|
const impactFiles: { path: string; impact: number; dependents: number }[] = []
|
||||||
|
|
||||||
|
for (const [path, meta] of metas) {
|
||||||
|
if (meta.impactScore >= minImpact) {
|
||||||
|
impactFiles.push({
|
||||||
|
path,
|
||||||
|
impact: meta.impactScore,
|
||||||
|
dependents: meta.dependents.length,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (impactFiles.length === 0) {
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
// Sort by impact score descending, then by path
|
||||||
|
impactFiles.sort((a, b) => {
|
||||||
|
if (a.impact !== b.impact) {
|
||||||
|
return b.impact - a.impact
|
||||||
|
}
|
||||||
|
return a.path.localeCompare(b.path)
|
||||||
|
})
|
||||||
|
|
||||||
|
// Take top N files
|
||||||
|
const topFiles = impactFiles.slice(0, limit)
|
||||||
|
|
||||||
|
const lines: string[] = [
|
||||||
|
"## High Impact Files",
|
||||||
|
"",
|
||||||
|
"| File | Impact | Dependents |",
|
||||||
|
"|------|--------|------------|",
|
||||||
|
]
|
||||||
|
|
||||||
|
for (const file of topFiles) {
|
||||||
|
const shortPath = shortenPath(file.path)
|
||||||
|
const impact = `${String(file.impact)}%`
|
||||||
|
const dependents = file.dependents === 1 ? "1 file" : `${String(file.dependents)} files`
|
||||||
|
lines.push(`| ${shortPath} | ${impact} | ${dependents} |`)
|
||||||
|
}
|
||||||
|
|
||||||
|
return lines.join("\n")
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Format line range for display.
|
||||||
|
*/
|
||||||
|
function formatLineRange(start: number, end: number): string {
|
||||||
|
return `[${String(start)}-${String(end)}]`
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Format imports section.
|
||||||
|
*/
|
||||||
|
function formatImportsSection(ast: FileAST): string[] {
|
||||||
|
if (ast.imports.length === 0) {
|
||||||
|
return []
|
||||||
|
}
|
||||||
|
const lines = ["### Imports"]
|
||||||
|
for (const imp of ast.imports) {
|
||||||
|
lines.push(`- ${imp.name} from "${imp.from}" (${imp.type})`)
|
||||||
|
}
|
||||||
|
lines.push("")
|
||||||
|
return lines
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Format exports section.
|
||||||
|
*/
|
||||||
|
function formatExportsSection(ast: FileAST): string[] {
|
||||||
|
if (ast.exports.length === 0) {
|
||||||
|
return []
|
||||||
|
}
|
||||||
|
const lines = ["### Exports"]
|
||||||
|
for (const exp of ast.exports) {
|
||||||
|
const defaultMark = exp.isDefault ? " (default)" : ""
|
||||||
|
lines.push(`- ${exp.kind} ${exp.name}${defaultMark}`)
|
||||||
|
}
|
||||||
|
lines.push("")
|
||||||
|
return lines
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Format functions section.
|
||||||
|
*/
|
||||||
|
function formatFunctionsSection(ast: FileAST): string[] {
|
||||||
|
if (ast.functions.length === 0) {
|
||||||
|
return []
|
||||||
|
}
|
||||||
|
const lines = ["### Functions"]
|
||||||
|
for (const fn of ast.functions) {
|
||||||
|
const params = fn.params.map((p) => p.name).join(", ")
|
||||||
|
const asyncMark = fn.isAsync ? "async " : ""
|
||||||
|
const range = formatLineRange(fn.lineStart, fn.lineEnd)
|
||||||
|
lines.push(`- ${asyncMark}${fn.name}(${params}) ${range}`)
|
||||||
|
}
|
||||||
|
lines.push("")
|
||||||
|
return lines
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Format classes section.
|
||||||
|
*/
|
||||||
|
function formatClassesSection(ast: FileAST): string[] {
|
||||||
|
if (ast.classes.length === 0) {
|
||||||
|
return []
|
||||||
|
}
|
||||||
|
const lines = ["### Classes"]
|
||||||
|
for (const cls of ast.classes) {
|
||||||
|
const ext = cls.extends ? ` extends ${cls.extends}` : ""
|
||||||
|
const impl = cls.implements.length > 0 ? ` implements ${cls.implements.join(", ")}` : ""
|
||||||
|
const range = formatLineRange(cls.lineStart, cls.lineEnd)
|
||||||
|
lines.push(`- ${cls.name}${ext}${impl} ${range}`)
|
||||||
|
|
||||||
|
for (const method of cls.methods) {
|
||||||
|
const vis = method.visibility === "public" ? "" : `${method.visibility} `
|
||||||
|
const methodRange = formatLineRange(method.lineStart, method.lineEnd)
|
||||||
|
lines.push(` - ${vis}${method.name}() ${methodRange}`)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
lines.push("")
|
||||||
|
return lines
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Format metadata section.
|
||||||
|
*/
|
||||||
|
function formatMetadataSection(meta: FileMeta): string[] {
|
||||||
|
const loc = String(meta.complexity.loc)
|
||||||
|
const score = String(meta.complexity.score)
|
||||||
|
const deps = String(meta.dependencies.length)
|
||||||
|
const dependents = String(meta.dependents.length)
|
||||||
|
return [
|
||||||
|
"### Metadata",
|
||||||
|
`- LOC: ${loc}`,
|
||||||
|
`- Complexity: ${score}/100`,
|
||||||
|
`- Dependencies: ${deps}`,
|
||||||
|
`- Dependents: ${dependents}`,
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Build context for a specific file request.
|
||||||
|
*/
|
||||||
|
export function buildFileContext(path: string, ast: FileAST, meta?: FileMeta): string {
|
||||||
|
const lines: string[] = [`## ${path}`, ""]
|
||||||
|
|
||||||
|
lines.push(...formatImportsSection(ast))
|
||||||
|
lines.push(...formatExportsSection(ast))
|
||||||
|
lines.push(...formatFunctionsSection(ast))
|
||||||
|
lines.push(...formatClassesSection(ast))
|
||||||
|
|
||||||
|
if (meta) {
|
||||||
|
lines.push(...formatMetadataSection(meta))
|
||||||
|
}
|
||||||
|
|
||||||
|
return lines.join("\n")
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Truncate context to fit within token budget.
|
||||||
|
*/
|
||||||
|
export function truncateContext(context: string, maxTokens: number): string {
|
||||||
|
const charsPerToken = 4
|
||||||
|
const maxChars = maxTokens * charsPerToken
|
||||||
|
|
||||||
|
if (context.length <= maxChars) {
|
||||||
|
return context
|
||||||
|
}
|
||||||
|
|
||||||
|
const truncated = context.slice(0, maxChars - 100)
|
||||||
|
const lastNewline = truncated.lastIndexOf("\n")
|
||||||
|
const remaining = String(context.length - lastNewline)
|
||||||
|
|
||||||
|
return `${truncated.slice(0, lastNewline)}\n\n... (truncated, ${remaining} chars remaining)`
|
||||||
|
}
|
||||||
511
packages/ipuaro/src/infrastructure/llm/toolDefs.ts
Normal file
511
packages/ipuaro/src/infrastructure/llm/toolDefs.ts
Normal file
@@ -0,0 +1,511 @@
|
|||||||
|
import type { ToolDef } from "../../shared/types/tool-definitions.js"
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Tool definitions for ipuaro LLM.
|
||||||
|
* 18 tools across 6 categories: read, edit, search, analysis, git, run.
|
||||||
|
*/
|
||||||
|
|
||||||
|
/*
|
||||||
|
* =============================================================================
|
||||||
|
* Read Tools (4)
|
||||||
|
* =============================================================================
|
||||||
|
*/
|
||||||
|
|
||||||
|
export const GET_LINES_TOOL: ToolDef = {
|
||||||
|
name: "get_lines",
|
||||||
|
description:
|
||||||
|
"Get specific lines from a file. Returns the content with line numbers. " +
|
||||||
|
"If no range is specified, returns the entire file.",
|
||||||
|
parameters: [
|
||||||
|
{
|
||||||
|
name: "path",
|
||||||
|
type: "string",
|
||||||
|
description: "File path relative to project root",
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "start",
|
||||||
|
type: "number",
|
||||||
|
description: "Start line number (1-based, inclusive)",
|
||||||
|
required: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "end",
|
||||||
|
type: "number",
|
||||||
|
description: "End line number (1-based, inclusive)",
|
||||||
|
required: false,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}
|
||||||
|
|
||||||
|
export const GET_FUNCTION_TOOL: ToolDef = {
|
||||||
|
name: "get_function",
|
||||||
|
description:
|
||||||
|
"Get a function's source code by name. Uses AST to find exact line range. " +
|
||||||
|
"Returns the function code with line numbers.",
|
||||||
|
parameters: [
|
||||||
|
{
|
||||||
|
name: "path",
|
||||||
|
type: "string",
|
||||||
|
description: "File path relative to project root",
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "name",
|
||||||
|
type: "string",
|
||||||
|
description: "Function name to retrieve",
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}
|
||||||
|
|
||||||
|
export const GET_CLASS_TOOL: ToolDef = {
|
||||||
|
name: "get_class",
|
||||||
|
description:
|
||||||
|
"Get a class's source code by name. Uses AST to find exact line range. " +
|
||||||
|
"Returns the class code with line numbers.",
|
||||||
|
parameters: [
|
||||||
|
{
|
||||||
|
name: "path",
|
||||||
|
type: "string",
|
||||||
|
description: "File path relative to project root",
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "name",
|
||||||
|
type: "string",
|
||||||
|
description: "Class name to retrieve",
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}
|
||||||
|
|
||||||
|
export const GET_STRUCTURE_TOOL: ToolDef = {
|
||||||
|
name: "get_structure",
|
||||||
|
description:
|
||||||
|
"Get project directory structure as a tree. " +
|
||||||
|
"If path is specified, shows structure of that subdirectory only.",
|
||||||
|
parameters: [
|
||||||
|
{
|
||||||
|
name: "path",
|
||||||
|
type: "string",
|
||||||
|
description: "Subdirectory path relative to project root (optional, defaults to root)",
|
||||||
|
required: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "depth",
|
||||||
|
type: "number",
|
||||||
|
description: "Maximum depth to traverse (default: unlimited)",
|
||||||
|
required: false,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* =============================================================================
|
||||||
|
* Edit Tools (3) - All require confirmation
|
||||||
|
* =============================================================================
|
||||||
|
*/
|
||||||
|
|
||||||
|
export const EDIT_LINES_TOOL: ToolDef = {
|
||||||
|
name: "edit_lines",
|
||||||
|
description:
|
||||||
|
"Replace lines in a file with new content. Requires reading the file first. " +
|
||||||
|
"Will show diff and ask for confirmation before applying.",
|
||||||
|
parameters: [
|
||||||
|
{
|
||||||
|
name: "path",
|
||||||
|
type: "string",
|
||||||
|
description: "File path relative to project root",
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "start",
|
||||||
|
type: "number",
|
||||||
|
description: "Start line number (1-based, inclusive) to replace",
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "end",
|
||||||
|
type: "number",
|
||||||
|
description: "End line number (1-based, inclusive) to replace",
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "content",
|
||||||
|
type: "string",
|
||||||
|
description: "New content to insert (can be multiple lines)",
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}
|
||||||
|
|
||||||
|
export const CREATE_FILE_TOOL: ToolDef = {
|
||||||
|
name: "create_file",
|
||||||
|
description:
|
||||||
|
"Create a new file with specified content. " +
|
||||||
|
"Will fail if file already exists. Will ask for confirmation.",
|
||||||
|
parameters: [
|
||||||
|
{
|
||||||
|
name: "path",
|
||||||
|
type: "string",
|
||||||
|
description: "File path relative to project root",
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "content",
|
||||||
|
type: "string",
|
||||||
|
description: "File content",
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}
|
||||||
|
|
||||||
|
export const DELETE_FILE_TOOL: ToolDef = {
|
||||||
|
name: "delete_file",
|
||||||
|
description:
|
||||||
|
"Delete a file from the project. " +
|
||||||
|
"Will ask for confirmation. Previous content is saved to undo stack.",
|
||||||
|
parameters: [
|
||||||
|
{
|
||||||
|
name: "path",
|
||||||
|
type: "string",
|
||||||
|
description: "File path relative to project root",
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* =============================================================================
|
||||||
|
* Search Tools (2)
|
||||||
|
* =============================================================================
|
||||||
|
*/
|
||||||
|
|
||||||
|
export const FIND_REFERENCES_TOOL: ToolDef = {
|
||||||
|
name: "find_references",
|
||||||
|
description:
|
||||||
|
"Find all usages of a symbol across the codebase. " +
|
||||||
|
"Returns list of file paths, line numbers, and context.",
|
||||||
|
parameters: [
|
||||||
|
{
|
||||||
|
name: "symbol",
|
||||||
|
type: "string",
|
||||||
|
description: "Symbol name to search for (function, class, variable, etc.)",
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "path",
|
||||||
|
type: "string",
|
||||||
|
description: "Limit search to specific file or directory",
|
||||||
|
required: false,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}
|
||||||
|
|
||||||
|
export const FIND_DEFINITION_TOOL: ToolDef = {
|
||||||
|
name: "find_definition",
|
||||||
|
description:
|
||||||
|
"Find where a symbol is defined. " + "Returns file path, line number, and symbol type.",
|
||||||
|
parameters: [
|
||||||
|
{
|
||||||
|
name: "symbol",
|
||||||
|
type: "string",
|
||||||
|
description: "Symbol name to find definition for",
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* =============================================================================
|
||||||
|
* Analysis Tools (4)
|
||||||
|
* =============================================================================
|
||||||
|
*/
|
||||||
|
|
||||||
|
export const GET_DEPENDENCIES_TOOL: ToolDef = {
|
||||||
|
name: "get_dependencies",
|
||||||
|
description:
|
||||||
|
"Get files that this file imports (internal dependencies). " +
|
||||||
|
"Returns list of imported file paths.",
|
||||||
|
parameters: [
|
||||||
|
{
|
||||||
|
name: "path",
|
||||||
|
type: "string",
|
||||||
|
description: "File path relative to project root",
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}
|
||||||
|
|
||||||
|
export const GET_DEPENDENTS_TOOL: ToolDef = {
|
||||||
|
name: "get_dependents",
|
||||||
|
description:
|
||||||
|
"Get files that import this file (reverse dependencies). " +
|
||||||
|
"Returns list of file paths that depend on this file.",
|
||||||
|
parameters: [
|
||||||
|
{
|
||||||
|
name: "path",
|
||||||
|
type: "string",
|
||||||
|
description: "File path relative to project root",
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}
|
||||||
|
|
||||||
|
export const GET_COMPLEXITY_TOOL: ToolDef = {
|
||||||
|
name: "get_complexity",
|
||||||
|
description:
|
||||||
|
"Get complexity metrics for a file or the entire project. " +
|
||||||
|
"Returns LOC, nesting depth, cyclomatic complexity, and overall score.",
|
||||||
|
parameters: [
|
||||||
|
{
|
||||||
|
name: "path",
|
||||||
|
type: "string",
|
||||||
|
description: "File path (optional, defaults to all files sorted by complexity)",
|
||||||
|
required: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "limit",
|
||||||
|
type: "number",
|
||||||
|
description: "Max files to return when showing all (default: 10)",
|
||||||
|
required: false,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}
|
||||||
|
|
||||||
|
export const GET_TODOS_TOOL: ToolDef = {
|
||||||
|
name: "get_todos",
|
||||||
|
description:
|
||||||
|
"Find TODO, FIXME, HACK, and XXX comments in the codebase. " +
|
||||||
|
"Returns list with file paths, line numbers, and comment text.",
|
||||||
|
parameters: [
|
||||||
|
{
|
||||||
|
name: "path",
|
||||||
|
type: "string",
|
||||||
|
description: "Limit search to specific file or directory",
|
||||||
|
required: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "type",
|
||||||
|
type: "string",
|
||||||
|
description: "Filter by comment type",
|
||||||
|
required: false,
|
||||||
|
enum: ["TODO", "FIXME", "HACK", "XXX"],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* =============================================================================
|
||||||
|
* Git Tools (3)
|
||||||
|
* =============================================================================
|
||||||
|
*/
|
||||||
|
|
||||||
|
export const GIT_STATUS_TOOL: ToolDef = {
|
||||||
|
name: "git_status",
|
||||||
|
description:
|
||||||
|
"Get current git repository status. " +
|
||||||
|
"Returns branch name, staged files, modified files, and untracked files.",
|
||||||
|
parameters: [],
|
||||||
|
}
|
||||||
|
|
||||||
|
export const GIT_DIFF_TOOL: ToolDef = {
|
||||||
|
name: "git_diff",
|
||||||
|
description:
|
||||||
|
"Get uncommitted changes (diff). " + "Shows what has changed but not yet committed.",
|
||||||
|
parameters: [
|
||||||
|
{
|
||||||
|
name: "path",
|
||||||
|
type: "string",
|
||||||
|
description: "Limit diff to specific file or directory",
|
||||||
|
required: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "staged",
|
||||||
|
type: "boolean",
|
||||||
|
description: "Show only staged changes (default: false, shows all)",
|
||||||
|
required: false,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}
|
||||||
|
|
||||||
|
export const GIT_COMMIT_TOOL: ToolDef = {
|
||||||
|
name: "git_commit",
|
||||||
|
description:
|
||||||
|
"Create a git commit with the specified message. " +
|
||||||
|
"Will ask for confirmation. Optionally stage specific files first.",
|
||||||
|
parameters: [
|
||||||
|
{
|
||||||
|
name: "message",
|
||||||
|
type: "string",
|
||||||
|
description: "Commit message",
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "files",
|
||||||
|
type: "array",
|
||||||
|
description: "Files to stage before commit (optional, defaults to all staged)",
|
||||||
|
required: false,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* =============================================================================
|
||||||
|
* Run Tools (2)
|
||||||
|
* =============================================================================
|
||||||
|
*/
|
||||||
|
|
||||||
|
export const RUN_COMMAND_TOOL: ToolDef = {
|
||||||
|
name: "run_command",
|
||||||
|
description:
|
||||||
|
"Execute a shell command in the project directory. " +
|
||||||
|
"Commands are checked against blacklist/whitelist for security. " +
|
||||||
|
"Unknown commands require user confirmation.",
|
||||||
|
parameters: [
|
||||||
|
{
|
||||||
|
name: "command",
|
||||||
|
type: "string",
|
||||||
|
description: "Shell command to execute",
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "timeout",
|
||||||
|
type: "number",
|
||||||
|
description: "Timeout in milliseconds (default: 30000)",
|
||||||
|
required: false,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}
|
||||||
|
|
||||||
|
export const RUN_TESTS_TOOL: ToolDef = {
|
||||||
|
name: "run_tests",
|
||||||
|
description:
|
||||||
|
"Run the project's test suite. Auto-detects test runner (vitest, jest, npm test). " +
|
||||||
|
"Returns test results summary.",
|
||||||
|
parameters: [
|
||||||
|
{
|
||||||
|
name: "path",
|
||||||
|
type: "string",
|
||||||
|
description: "Run tests for specific file or directory",
|
||||||
|
required: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "filter",
|
||||||
|
type: "string",
|
||||||
|
description: "Filter tests by name pattern",
|
||||||
|
required: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "watch",
|
||||||
|
type: "boolean",
|
||||||
|
description: "Run in watch mode (default: false)",
|
||||||
|
required: false,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* =============================================================================
|
||||||
|
* Tool Collection
|
||||||
|
* =============================================================================
|
||||||
|
*/
|
||||||
|
|
||||||
|
/**
|
||||||
|
* All read tools (no confirmation required).
|
||||||
|
*/
|
||||||
|
export const READ_TOOLS: ToolDef[] = [
|
||||||
|
GET_LINES_TOOL,
|
||||||
|
GET_FUNCTION_TOOL,
|
||||||
|
GET_CLASS_TOOL,
|
||||||
|
GET_STRUCTURE_TOOL,
|
||||||
|
]
|
||||||
|
|
||||||
|
/**
|
||||||
|
* All edit tools (require confirmation).
|
||||||
|
*/
|
||||||
|
export const EDIT_TOOLS: ToolDef[] = [EDIT_LINES_TOOL, CREATE_FILE_TOOL, DELETE_FILE_TOOL]
|
||||||
|
|
||||||
|
/**
|
||||||
|
* All search tools (no confirmation required).
|
||||||
|
*/
|
||||||
|
export const SEARCH_TOOLS: ToolDef[] = [FIND_REFERENCES_TOOL, FIND_DEFINITION_TOOL]
|
||||||
|
|
||||||
|
/**
|
||||||
|
* All analysis tools (no confirmation required).
|
||||||
|
*/
|
||||||
|
export const ANALYSIS_TOOLS: ToolDef[] = [
|
||||||
|
GET_DEPENDENCIES_TOOL,
|
||||||
|
GET_DEPENDENTS_TOOL,
|
||||||
|
GET_COMPLEXITY_TOOL,
|
||||||
|
GET_TODOS_TOOL,
|
||||||
|
]
|
||||||
|
|
||||||
|
/**
|
||||||
|
* All git tools (git_commit requires confirmation).
|
||||||
|
*/
|
||||||
|
export const GIT_TOOLS: ToolDef[] = [GIT_STATUS_TOOL, GIT_DIFF_TOOL, GIT_COMMIT_TOOL]
|
||||||
|
|
||||||
|
/**
|
||||||
|
* All run tools (run_command may require confirmation).
|
||||||
|
*/
|
||||||
|
export const RUN_TOOLS: ToolDef[] = [RUN_COMMAND_TOOL, RUN_TESTS_TOOL]
|
||||||
|
|
||||||
|
/**
|
||||||
|
* All 18 tool definitions.
|
||||||
|
*/
|
||||||
|
export const ALL_TOOLS: ToolDef[] = [
|
||||||
|
...READ_TOOLS,
|
||||||
|
...EDIT_TOOLS,
|
||||||
|
...SEARCH_TOOLS,
|
||||||
|
...ANALYSIS_TOOLS,
|
||||||
|
...GIT_TOOLS,
|
||||||
|
...RUN_TOOLS,
|
||||||
|
]
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Tools that require user confirmation before execution.
|
||||||
|
*/
|
||||||
|
export const CONFIRMATION_TOOLS = new Set([
|
||||||
|
"edit_lines",
|
||||||
|
"create_file",
|
||||||
|
"delete_file",
|
||||||
|
"git_commit",
|
||||||
|
])
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if a tool requires confirmation.
|
||||||
|
*/
|
||||||
|
export function requiresConfirmation(toolName: string): boolean {
|
||||||
|
return CONFIRMATION_TOOLS.has(toolName)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get tool definition by name.
|
||||||
|
*/
|
||||||
|
export function getToolDef(name: string): ToolDef | undefined {
|
||||||
|
return ALL_TOOLS.find((t) => t.name === name)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get tool definitions by category.
|
||||||
|
*/
|
||||||
|
export function getToolsByCategory(category: string): ToolDef[] {
|
||||||
|
switch (category) {
|
||||||
|
case "read":
|
||||||
|
return READ_TOOLS
|
||||||
|
case "edit":
|
||||||
|
return EDIT_TOOLS
|
||||||
|
case "search":
|
||||||
|
return SEARCH_TOOLS
|
||||||
|
case "analysis":
|
||||||
|
return ANALYSIS_TOOLS
|
||||||
|
case "git":
|
||||||
|
return GIT_TOOLS
|
||||||
|
case "run":
|
||||||
|
return RUN_TOOLS
|
||||||
|
default:
|
||||||
|
return []
|
||||||
|
}
|
||||||
|
}
|
||||||
293
packages/ipuaro/src/infrastructure/security/PathValidator.ts
Normal file
293
packages/ipuaro/src/infrastructure/security/PathValidator.ts
Normal file
@@ -0,0 +1,293 @@
|
|||||||
|
import * as path from "node:path"
|
||||||
|
import { promises as fs } from "node:fs"
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Path validation result classification.
|
||||||
|
*/
|
||||||
|
export type PathValidationStatus = "valid" | "invalid" | "outside_project"
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Result of path validation.
|
||||||
|
*/
|
||||||
|
export interface PathValidationResult {
|
||||||
|
/** Validation status */
|
||||||
|
status: PathValidationStatus
|
||||||
|
/** Reason for the status */
|
||||||
|
reason: string
|
||||||
|
/** Normalized absolute path (only if valid) */
|
||||||
|
absolutePath?: string
|
||||||
|
/** Normalized relative path (only if valid) */
|
||||||
|
relativePath?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Options for path validation.
|
||||||
|
*/
|
||||||
|
export interface PathValidatorOptions {
|
||||||
|
/** Allow paths that don't exist yet (for create operations) */
|
||||||
|
allowNonExistent?: boolean
|
||||||
|
/** Check if path is a directory */
|
||||||
|
requireDirectory?: boolean
|
||||||
|
/** Check if path is a file */
|
||||||
|
requireFile?: boolean
|
||||||
|
/** Follow symlinks when checking existence */
|
||||||
|
followSymlinks?: boolean
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Path validator for ensuring file operations stay within project boundaries.
|
||||||
|
* Prevents path traversal attacks and unauthorized file access.
|
||||||
|
*/
|
||||||
|
export class PathValidator {
|
||||||
|
private readonly projectRoot: string
|
||||||
|
|
||||||
|
constructor(projectRoot: string) {
|
||||||
|
this.projectRoot = path.resolve(projectRoot)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Validate a path and return detailed result.
|
||||||
|
* @param inputPath - Path to validate (relative or absolute)
|
||||||
|
* @param options - Validation options
|
||||||
|
*/
|
||||||
|
async validate(
|
||||||
|
inputPath: string,
|
||||||
|
options: PathValidatorOptions = {},
|
||||||
|
): Promise<PathValidationResult> {
|
||||||
|
if (!inputPath || inputPath.trim() === "") {
|
||||||
|
return {
|
||||||
|
status: "invalid",
|
||||||
|
reason: "Path is empty",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const normalizedInput = inputPath.trim()
|
||||||
|
|
||||||
|
if (this.containsTraversalPatterns(normalizedInput)) {
|
||||||
|
return {
|
||||||
|
status: "invalid",
|
||||||
|
reason: "Path contains traversal patterns",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const absolutePath = path.resolve(this.projectRoot, normalizedInput)
|
||||||
|
|
||||||
|
if (!this.isWithinProject(absolutePath)) {
|
||||||
|
return {
|
||||||
|
status: "outside_project",
|
||||||
|
reason: "Path is outside project root",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const relativePath = path.relative(this.projectRoot, absolutePath)
|
||||||
|
|
||||||
|
if (!options.allowNonExistent) {
|
||||||
|
const existsResult = await this.checkExists(absolutePath, options)
|
||||||
|
if (existsResult) {
|
||||||
|
return existsResult
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
status: "valid",
|
||||||
|
reason: "Path is valid",
|
||||||
|
absolutePath,
|
||||||
|
relativePath,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Synchronous validation for simple checks.
|
||||||
|
* Does not check file existence or type.
|
||||||
|
* @param inputPath - Path to validate (relative or absolute)
|
||||||
|
*/
|
||||||
|
validateSync(inputPath: string): PathValidationResult {
|
||||||
|
if (!inputPath || inputPath.trim() === "") {
|
||||||
|
return {
|
||||||
|
status: "invalid",
|
||||||
|
reason: "Path is empty",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const normalizedInput = inputPath.trim()
|
||||||
|
|
||||||
|
if (this.containsTraversalPatterns(normalizedInput)) {
|
||||||
|
return {
|
||||||
|
status: "invalid",
|
||||||
|
reason: "Path contains traversal patterns",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const absolutePath = path.resolve(this.projectRoot, normalizedInput)
|
||||||
|
|
||||||
|
if (!this.isWithinProject(absolutePath)) {
|
||||||
|
return {
|
||||||
|
status: "outside_project",
|
||||||
|
reason: "Path is outside project root",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const relativePath = path.relative(this.projectRoot, absolutePath)
|
||||||
|
|
||||||
|
return {
|
||||||
|
status: "valid",
|
||||||
|
reason: "Path is valid",
|
||||||
|
absolutePath,
|
||||||
|
relativePath,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Quick check if path is within project.
|
||||||
|
* @param inputPath - Path to check (relative or absolute)
|
||||||
|
*/
|
||||||
|
isWithin(inputPath: string): boolean {
|
||||||
|
if (!inputPath || inputPath.trim() === "") {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
const normalizedInput = inputPath.trim()
|
||||||
|
|
||||||
|
if (this.containsTraversalPatterns(normalizedInput)) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
const absolutePath = path.resolve(this.projectRoot, normalizedInput)
|
||||||
|
return this.isWithinProject(absolutePath)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Resolve a path relative to project root.
|
||||||
|
* Returns null if path would be outside project.
|
||||||
|
* @param inputPath - Path to resolve
|
||||||
|
*/
|
||||||
|
resolve(inputPath: string): string | null {
|
||||||
|
const result = this.validateSync(inputPath)
|
||||||
|
return result.status === "valid" ? (result.absolutePath ?? null) : null
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Resolve a path or throw an error if invalid.
|
||||||
|
* @param inputPath - Path to resolve
|
||||||
|
* @returns Tuple of [absolutePath, relativePath]
|
||||||
|
* @throws Error if path is invalid
|
||||||
|
*/
|
||||||
|
resolveOrThrow(inputPath: string): [absolutePath: string, relativePath: string] {
|
||||||
|
const result = this.validateSync(inputPath)
|
||||||
|
if (result.status !== "valid" || result.absolutePath === undefined) {
|
||||||
|
throw new Error(result.reason)
|
||||||
|
}
|
||||||
|
return [result.absolutePath, result.relativePath ?? ""]
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get relative path from project root.
|
||||||
|
* Returns null if path would be outside project.
|
||||||
|
* @param inputPath - Path to make relative
|
||||||
|
*/
|
||||||
|
relativize(inputPath: string): string | null {
|
||||||
|
const result = this.validateSync(inputPath)
|
||||||
|
return result.status === "valid" ? (result.relativePath ?? null) : null
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the project root path.
|
||||||
|
*/
|
||||||
|
getProjectRoot(): string {
|
||||||
|
return this.projectRoot
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if path contains directory traversal patterns.
|
||||||
|
*/
|
||||||
|
private containsTraversalPatterns(inputPath: string): boolean {
|
||||||
|
const normalized = inputPath.replace(/\\/g, "/")
|
||||||
|
|
||||||
|
if (normalized.includes("..")) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
if (normalized.startsWith("~")) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if absolute path is within project root.
|
||||||
|
*/
|
||||||
|
private isWithinProject(absolutePath: string): boolean {
|
||||||
|
const normalizedProject = this.projectRoot.replace(/\\/g, "/")
|
||||||
|
const normalizedPath = absolutePath.replace(/\\/g, "/")
|
||||||
|
|
||||||
|
if (normalizedPath === normalizedProject) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
const projectWithSep = normalizedProject.endsWith("/")
|
||||||
|
? normalizedProject
|
||||||
|
: `${normalizedProject}/`
|
||||||
|
|
||||||
|
return normalizedPath.startsWith(projectWithSep)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check file existence and type.
|
||||||
|
*/
|
||||||
|
private async checkExists(
|
||||||
|
absolutePath: string,
|
||||||
|
options: PathValidatorOptions,
|
||||||
|
): Promise<PathValidationResult | null> {
|
||||||
|
try {
|
||||||
|
const statFn = options.followSymlinks ? fs.stat : fs.lstat
|
||||||
|
const stats = await statFn(absolutePath)
|
||||||
|
|
||||||
|
if (options.requireDirectory && !stats.isDirectory()) {
|
||||||
|
return {
|
||||||
|
status: "invalid",
|
||||||
|
reason: "Path is not a directory",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (options.requireFile && !stats.isFile()) {
|
||||||
|
return {
|
||||||
|
status: "invalid",
|
||||||
|
reason: "Path is not a file",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return null
|
||||||
|
} catch (error) {
|
||||||
|
if ((error as NodeJS.ErrnoException).code === "ENOENT") {
|
||||||
|
return {
|
||||||
|
status: "invalid",
|
||||||
|
reason: "Path does not exist",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
status: "invalid",
|
||||||
|
reason: `Cannot access path: ${(error as Error).message}`,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a path validator for a project.
|
||||||
|
* @param projectRoot - Root directory of the project
|
||||||
|
*/
|
||||||
|
export function createPathValidator(projectRoot: string): PathValidator {
|
||||||
|
return new PathValidator(projectRoot)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Standalone function for quick path validation.
|
||||||
|
* @param inputPath - Path to validate
|
||||||
|
* @param projectRoot - Project root directory
|
||||||
|
*/
|
||||||
|
export function validatePath(inputPath: string, projectRoot: string): boolean {
|
||||||
|
const validator = new PathValidator(projectRoot)
|
||||||
|
return validator.isWithin(inputPath)
|
||||||
|
}
|
||||||
9
packages/ipuaro/src/infrastructure/security/index.ts
Normal file
9
packages/ipuaro/src/infrastructure/security/index.ts
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
// Security module exports
|
||||||
|
export {
|
||||||
|
PathValidator,
|
||||||
|
createPathValidator,
|
||||||
|
validatePath,
|
||||||
|
type PathValidationResult,
|
||||||
|
type PathValidationStatus,
|
||||||
|
type PathValidatorOptions,
|
||||||
|
} from "./PathValidator.js"
|
||||||
119
packages/ipuaro/src/infrastructure/storage/RedisClient.ts
Normal file
119
packages/ipuaro/src/infrastructure/storage/RedisClient.ts
Normal file
@@ -0,0 +1,119 @@
|
|||||||
|
import { Redis } from "ioredis"
|
||||||
|
import type { RedisConfig } from "../../shared/constants/config.js"
|
||||||
|
import { IpuaroError } from "../../shared/errors/IpuaroError.js"
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Redis client wrapper with connection management.
|
||||||
|
* Handles connection lifecycle and AOF configuration.
|
||||||
|
*/
|
||||||
|
export class RedisClient {
|
||||||
|
private client: Redis | null = null
|
||||||
|
private readonly config: RedisConfig
|
||||||
|
private connected = false
|
||||||
|
|
||||||
|
constructor(config: RedisConfig) {
|
||||||
|
this.config = config
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Connect to Redis server.
|
||||||
|
* Configures AOF persistence on successful connection.
|
||||||
|
*/
|
||||||
|
async connect(): Promise<void> {
|
||||||
|
if (this.connected && this.client) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
this.client = new Redis({
|
||||||
|
host: this.config.host,
|
||||||
|
port: this.config.port,
|
||||||
|
db: this.config.db,
|
||||||
|
password: this.config.password,
|
||||||
|
keyPrefix: this.config.keyPrefix,
|
||||||
|
lazyConnect: true,
|
||||||
|
retryStrategy: (times: number): number | null => {
|
||||||
|
if (times > 3) {
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
return Math.min(times * 200, 1000)
|
||||||
|
},
|
||||||
|
maxRetriesPerRequest: 3,
|
||||||
|
enableReadyCheck: true,
|
||||||
|
})
|
||||||
|
|
||||||
|
await this.client.connect()
|
||||||
|
await this.configureAOF()
|
||||||
|
this.connected = true
|
||||||
|
} catch (error) {
|
||||||
|
this.connected = false
|
||||||
|
this.client = null
|
||||||
|
const message = error instanceof Error ? error.message : "Unknown error"
|
||||||
|
throw IpuaroError.redis(`Failed to connect to Redis: ${message}`)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Disconnect from Redis server.
|
||||||
|
*/
|
||||||
|
async disconnect(): Promise<void> {
|
||||||
|
if (this.client) {
|
||||||
|
await this.client.quit()
|
||||||
|
this.client = null
|
||||||
|
this.connected = false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if connected to Redis.
|
||||||
|
*/
|
||||||
|
isConnected(): boolean {
|
||||||
|
return this.connected && this.client !== null && this.client.status === "ready"
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the underlying Redis client.
|
||||||
|
* @throws IpuaroError if not connected
|
||||||
|
*/
|
||||||
|
getClient(): Redis {
|
||||||
|
if (!this.client || !this.connected) {
|
||||||
|
throw IpuaroError.redis("Redis client is not connected")
|
||||||
|
}
|
||||||
|
return this.client
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Execute a health check ping.
|
||||||
|
*/
|
||||||
|
async ping(): Promise<boolean> {
|
||||||
|
if (!this.client) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
const result = await this.client.ping()
|
||||||
|
return result === "PONG"
|
||||||
|
} catch {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Configure AOF (Append Only File) persistence.
|
||||||
|
* AOF provides better durability by logging every write operation.
|
||||||
|
*/
|
||||||
|
private async configureAOF(): Promise<void> {
|
||||||
|
if (!this.client) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
await this.client.config("SET", "appendonly", "yes")
|
||||||
|
await this.client.config("SET", "appendfsync", "everysec")
|
||||||
|
} catch {
|
||||||
|
/*
|
||||||
|
* AOF config may fail if Redis doesn't allow CONFIG SET.
|
||||||
|
* This is non-fatal - persistence will still work with default settings.
|
||||||
|
*/
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,225 @@
|
|||||||
|
import type { ISessionStorage, SessionListItem } from "../../domain/services/ISessionStorage.js"
|
||||||
|
import { type ContextState, Session, type SessionStats } from "../../domain/entities/Session.js"
|
||||||
|
import type { ChatMessage } from "../../domain/value-objects/ChatMessage.js"
|
||||||
|
import type { UndoEntry } from "../../domain/value-objects/UndoEntry.js"
|
||||||
|
import { MAX_UNDO_STACK_SIZE } from "../../domain/constants/index.js"
|
||||||
|
import { IpuaroError } from "../../shared/errors/IpuaroError.js"
|
||||||
|
import { RedisClient } from "./RedisClient.js"
|
||||||
|
import { SessionFields, SessionKeys } from "./schema.js"
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Redis implementation of ISessionStorage.
|
||||||
|
* Stores session data in Redis hashes and lists.
|
||||||
|
*/
|
||||||
|
export class RedisSessionStorage implements ISessionStorage {
|
||||||
|
private readonly client: RedisClient
|
||||||
|
|
||||||
|
constructor(client: RedisClient) {
|
||||||
|
this.client = client
|
||||||
|
}
|
||||||
|
|
||||||
|
async saveSession(session: Session): Promise<void> {
|
||||||
|
const redis = this.getRedis()
|
||||||
|
const dataKey = SessionKeys.data(session.id)
|
||||||
|
|
||||||
|
const pipeline = redis.pipeline()
|
||||||
|
|
||||||
|
pipeline.hset(dataKey, SessionFields.projectName, session.projectName)
|
||||||
|
pipeline.hset(dataKey, SessionFields.createdAt, String(session.createdAt))
|
||||||
|
pipeline.hset(dataKey, SessionFields.lastActivityAt, String(session.lastActivityAt))
|
||||||
|
pipeline.hset(dataKey, SessionFields.history, JSON.stringify(session.history))
|
||||||
|
pipeline.hset(dataKey, SessionFields.context, JSON.stringify(session.context))
|
||||||
|
pipeline.hset(dataKey, SessionFields.stats, JSON.stringify(session.stats))
|
||||||
|
pipeline.hset(dataKey, SessionFields.inputHistory, JSON.stringify(session.inputHistory))
|
||||||
|
|
||||||
|
await this.addToSessionsList(session.id)
|
||||||
|
|
||||||
|
await pipeline.exec()
|
||||||
|
}
|
||||||
|
|
||||||
|
async loadSession(sessionId: string): Promise<Session | null> {
|
||||||
|
const redis = this.getRedis()
|
||||||
|
const dataKey = SessionKeys.data(sessionId)
|
||||||
|
|
||||||
|
const data = await redis.hgetall(dataKey)
|
||||||
|
if (!data || Object.keys(data).length === 0) {
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
const session = new Session(
|
||||||
|
sessionId,
|
||||||
|
data[SessionFields.projectName],
|
||||||
|
Number(data[SessionFields.createdAt]),
|
||||||
|
)
|
||||||
|
|
||||||
|
session.lastActivityAt = Number(data[SessionFields.lastActivityAt])
|
||||||
|
session.history = this.parseJSON(data[SessionFields.history], "history") as ChatMessage[]
|
||||||
|
session.context = this.parseJSON(data[SessionFields.context], "context") as ContextState
|
||||||
|
session.stats = this.parseJSON(data[SessionFields.stats], "stats") as SessionStats
|
||||||
|
session.inputHistory = this.parseJSON(
|
||||||
|
data[SessionFields.inputHistory],
|
||||||
|
"inputHistory",
|
||||||
|
) as string[]
|
||||||
|
|
||||||
|
const undoStack = await this.getUndoStack(sessionId)
|
||||||
|
for (const entry of undoStack) {
|
||||||
|
session.undoStack.push(entry)
|
||||||
|
}
|
||||||
|
|
||||||
|
return session
|
||||||
|
}
|
||||||
|
|
||||||
|
async deleteSession(sessionId: string): Promise<void> {
|
||||||
|
const redis = this.getRedis()
|
||||||
|
|
||||||
|
await Promise.all([
|
||||||
|
redis.del(SessionKeys.data(sessionId)),
|
||||||
|
redis.del(SessionKeys.undo(sessionId)),
|
||||||
|
redis.lrem(SessionKeys.list, 0, sessionId),
|
||||||
|
])
|
||||||
|
}
|
||||||
|
|
||||||
|
async listSessions(projectName?: string): Promise<SessionListItem[]> {
|
||||||
|
const redis = this.getRedis()
|
||||||
|
const sessionIds = await redis.lrange(SessionKeys.list, 0, -1)
|
||||||
|
|
||||||
|
const sessions: SessionListItem[] = []
|
||||||
|
|
||||||
|
for (const id of sessionIds) {
|
||||||
|
const data = await redis.hgetall(SessionKeys.data(id))
|
||||||
|
if (!data || Object.keys(data).length === 0) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
const sessionProjectName = data[SessionFields.projectName]
|
||||||
|
if (projectName && sessionProjectName !== projectName) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
const history = this.parseJSON(data[SessionFields.history], "history") as ChatMessage[]
|
||||||
|
|
||||||
|
sessions.push({
|
||||||
|
id,
|
||||||
|
projectName: sessionProjectName,
|
||||||
|
createdAt: Number(data[SessionFields.createdAt]),
|
||||||
|
lastActivityAt: Number(data[SessionFields.lastActivityAt]),
|
||||||
|
messageCount: history.length,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
sessions.sort((a, b) => b.lastActivityAt - a.lastActivityAt)
|
||||||
|
|
||||||
|
return sessions
|
||||||
|
}
|
||||||
|
|
||||||
|
async getLatestSession(projectName: string): Promise<Session | null> {
|
||||||
|
const sessions = await this.listSessions(projectName)
|
||||||
|
if (sessions.length === 0) {
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
return this.loadSession(sessions[0].id)
|
||||||
|
}
|
||||||
|
|
||||||
|
async sessionExists(sessionId: string): Promise<boolean> {
|
||||||
|
const redis = this.getRedis()
|
||||||
|
const exists = await redis.exists(SessionKeys.data(sessionId))
|
||||||
|
return exists === 1
|
||||||
|
}
|
||||||
|
|
||||||
|
async pushUndoEntry(sessionId: string, entry: UndoEntry): Promise<void> {
|
||||||
|
const redis = this.getRedis()
|
||||||
|
const undoKey = SessionKeys.undo(sessionId)
|
||||||
|
|
||||||
|
await redis.rpush(undoKey, JSON.stringify(entry))
|
||||||
|
|
||||||
|
const length = await redis.llen(undoKey)
|
||||||
|
if (length > MAX_UNDO_STACK_SIZE) {
|
||||||
|
await redis.lpop(undoKey)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async popUndoEntry(sessionId: string): Promise<UndoEntry | null> {
|
||||||
|
const redis = this.getRedis()
|
||||||
|
const undoKey = SessionKeys.undo(sessionId)
|
||||||
|
|
||||||
|
const data = await redis.rpop(undoKey)
|
||||||
|
if (!data) {
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
return this.parseJSON(data, "UndoEntry") as UndoEntry
|
||||||
|
}
|
||||||
|
|
||||||
|
async getUndoStack(sessionId: string): Promise<UndoEntry[]> {
|
||||||
|
const redis = this.getRedis()
|
||||||
|
const undoKey = SessionKeys.undo(sessionId)
|
||||||
|
|
||||||
|
const entries = await redis.lrange(undoKey, 0, -1)
|
||||||
|
return entries.map((entry) => this.parseJSON(entry, "UndoEntry") as UndoEntry)
|
||||||
|
}
|
||||||
|
|
||||||
|
async touchSession(sessionId: string): Promise<void> {
|
||||||
|
const redis = this.getRedis()
|
||||||
|
await redis.hset(
|
||||||
|
SessionKeys.data(sessionId),
|
||||||
|
SessionFields.lastActivityAt,
|
||||||
|
String(Date.now()),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
async clearAllSessions(): Promise<void> {
|
||||||
|
const redis = this.getRedis()
|
||||||
|
const sessionIds = await redis.lrange(SessionKeys.list, 0, -1)
|
||||||
|
|
||||||
|
const pipeline = redis.pipeline()
|
||||||
|
for (const id of sessionIds) {
|
||||||
|
pipeline.del(SessionKeys.data(id))
|
||||||
|
pipeline.del(SessionKeys.undo(id))
|
||||||
|
}
|
||||||
|
pipeline.del(SessionKeys.list)
|
||||||
|
|
||||||
|
await pipeline.exec()
|
||||||
|
}
|
||||||
|
|
||||||
|
private async addToSessionsList(sessionId: string): Promise<void> {
|
||||||
|
const redis = this.getRedis()
|
||||||
|
|
||||||
|
const exists = await redis.lpos(SessionKeys.list, sessionId)
|
||||||
|
if (exists === null) {
|
||||||
|
await redis.lpush(SessionKeys.list, sessionId)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private getRedis(): ReturnType<RedisClient["getClient"]> {
|
||||||
|
return this.client.getClient()
|
||||||
|
}
|
||||||
|
|
||||||
|
private parseJSON(data: string | undefined, type: string): unknown {
|
||||||
|
if (!data) {
|
||||||
|
if (type === "history" || type === "inputHistory") {
|
||||||
|
return []
|
||||||
|
}
|
||||||
|
if (type === "context") {
|
||||||
|
return { filesInContext: [], tokenUsage: 0, needsCompression: false }
|
||||||
|
}
|
||||||
|
if (type === "stats") {
|
||||||
|
return {
|
||||||
|
totalTokens: 0,
|
||||||
|
totalTimeMs: 0,
|
||||||
|
toolCalls: 0,
|
||||||
|
editsApplied: 0,
|
||||||
|
editsRejected: 0,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
return JSON.parse(data) as unknown
|
||||||
|
} catch (error) {
|
||||||
|
const message = error instanceof Error ? error.message : "Unknown error"
|
||||||
|
throw IpuaroError.parse(`Failed to parse ${type}: ${message}`)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
236
packages/ipuaro/src/infrastructure/storage/RedisStorage.ts
Normal file
236
packages/ipuaro/src/infrastructure/storage/RedisStorage.ts
Normal file
@@ -0,0 +1,236 @@
|
|||||||
|
import type { DepsGraph, IStorage, SymbolIndex } from "../../domain/services/IStorage.js"
|
||||||
|
import type { FileAST } from "../../domain/value-objects/FileAST.js"
|
||||||
|
import type { FileData } from "../../domain/value-objects/FileData.js"
|
||||||
|
import type { FileMeta } from "../../domain/value-objects/FileMeta.js"
|
||||||
|
import { IpuaroError } from "../../shared/errors/IpuaroError.js"
|
||||||
|
import { RedisClient } from "./RedisClient.js"
|
||||||
|
import { IndexFields, ProjectKeys } from "./schema.js"
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Redis implementation of IStorage.
|
||||||
|
* Stores project data (files, AST, meta, indexes) in Redis hashes.
|
||||||
|
*/
|
||||||
|
export class RedisStorage implements IStorage {
|
||||||
|
private readonly client: RedisClient
|
||||||
|
private readonly projectName: string
|
||||||
|
|
||||||
|
constructor(client: RedisClient, projectName: string) {
|
||||||
|
this.client = client
|
||||||
|
this.projectName = projectName
|
||||||
|
}
|
||||||
|
|
||||||
|
async getFile(path: string): Promise<FileData | null> {
|
||||||
|
const redis = this.getRedis()
|
||||||
|
const data = await redis.hget(ProjectKeys.files(this.projectName), path)
|
||||||
|
if (!data) {
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
return this.parseJSON(data, "FileData") as FileData
|
||||||
|
}
|
||||||
|
|
||||||
|
async setFile(path: string, data: FileData): Promise<void> {
|
||||||
|
const redis = this.getRedis()
|
||||||
|
await redis.hset(ProjectKeys.files(this.projectName), path, JSON.stringify(data))
|
||||||
|
}
|
||||||
|
|
||||||
|
async deleteFile(path: string): Promise<void> {
|
||||||
|
const redis = this.getRedis()
|
||||||
|
await redis.hdel(ProjectKeys.files(this.projectName), path)
|
||||||
|
}
|
||||||
|
|
||||||
|
async getAllFiles(): Promise<Map<string, FileData>> {
|
||||||
|
const redis = this.getRedis()
|
||||||
|
const data = await redis.hgetall(ProjectKeys.files(this.projectName))
|
||||||
|
const result = new Map<string, FileData>()
|
||||||
|
|
||||||
|
for (const [path, value] of Object.entries(data)) {
|
||||||
|
const parsed = this.parseJSON(value, "FileData") as FileData | null
|
||||||
|
if (parsed) {
|
||||||
|
result.set(path, parsed)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
async getFileCount(): Promise<number> {
|
||||||
|
const redis = this.getRedis()
|
||||||
|
return redis.hlen(ProjectKeys.files(this.projectName))
|
||||||
|
}
|
||||||
|
|
||||||
|
async getAST(path: string): Promise<FileAST | null> {
|
||||||
|
const redis = this.getRedis()
|
||||||
|
const data = await redis.hget(ProjectKeys.ast(this.projectName), path)
|
||||||
|
if (!data) {
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
return this.parseJSON(data, "FileAST") as FileAST
|
||||||
|
}
|
||||||
|
|
||||||
|
async setAST(path: string, ast: FileAST): Promise<void> {
|
||||||
|
const redis = this.getRedis()
|
||||||
|
await redis.hset(ProjectKeys.ast(this.projectName), path, JSON.stringify(ast))
|
||||||
|
}
|
||||||
|
|
||||||
|
async deleteAST(path: string): Promise<void> {
|
||||||
|
const redis = this.getRedis()
|
||||||
|
await redis.hdel(ProjectKeys.ast(this.projectName), path)
|
||||||
|
}
|
||||||
|
|
||||||
|
async getAllASTs(): Promise<Map<string, FileAST>> {
|
||||||
|
const redis = this.getRedis()
|
||||||
|
const data = await redis.hgetall(ProjectKeys.ast(this.projectName))
|
||||||
|
const result = new Map<string, FileAST>()
|
||||||
|
|
||||||
|
for (const [path, value] of Object.entries(data)) {
|
||||||
|
const parsed = this.parseJSON(value, "FileAST") as FileAST | null
|
||||||
|
if (parsed) {
|
||||||
|
result.set(path, parsed)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
async getMeta(path: string): Promise<FileMeta | null> {
|
||||||
|
const redis = this.getRedis()
|
||||||
|
const data = await redis.hget(ProjectKeys.meta(this.projectName), path)
|
||||||
|
if (!data) {
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
return this.parseJSON(data, "FileMeta") as FileMeta
|
||||||
|
}
|
||||||
|
|
||||||
|
async setMeta(path: string, meta: FileMeta): Promise<void> {
|
||||||
|
const redis = this.getRedis()
|
||||||
|
await redis.hset(ProjectKeys.meta(this.projectName), path, JSON.stringify(meta))
|
||||||
|
}
|
||||||
|
|
||||||
|
async deleteMeta(path: string): Promise<void> {
|
||||||
|
const redis = this.getRedis()
|
||||||
|
await redis.hdel(ProjectKeys.meta(this.projectName), path)
|
||||||
|
}
|
||||||
|
|
||||||
|
async getAllMetas(): Promise<Map<string, FileMeta>> {
|
||||||
|
const redis = this.getRedis()
|
||||||
|
const data = await redis.hgetall(ProjectKeys.meta(this.projectName))
|
||||||
|
const result = new Map<string, FileMeta>()
|
||||||
|
|
||||||
|
for (const [path, value] of Object.entries(data)) {
|
||||||
|
const parsed = this.parseJSON(value, "FileMeta") as FileMeta | null
|
||||||
|
if (parsed) {
|
||||||
|
result.set(path, parsed)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
async getSymbolIndex(): Promise<SymbolIndex> {
|
||||||
|
const redis = this.getRedis()
|
||||||
|
const data = await redis.hget(ProjectKeys.indexes(this.projectName), IndexFields.symbols)
|
||||||
|
if (!data) {
|
||||||
|
return new Map()
|
||||||
|
}
|
||||||
|
|
||||||
|
const parsed = this.parseJSON(data, "SymbolIndex") as [string, unknown[]][] | null
|
||||||
|
if (!parsed) {
|
||||||
|
return new Map()
|
||||||
|
}
|
||||||
|
|
||||||
|
return new Map(parsed) as SymbolIndex
|
||||||
|
}
|
||||||
|
|
||||||
|
async setSymbolIndex(index: SymbolIndex): Promise<void> {
|
||||||
|
const redis = this.getRedis()
|
||||||
|
const serialized = JSON.stringify([...index.entries()])
|
||||||
|
await redis.hset(ProjectKeys.indexes(this.projectName), IndexFields.symbols, serialized)
|
||||||
|
}
|
||||||
|
|
||||||
|
async getDepsGraph(): Promise<DepsGraph> {
|
||||||
|
const redis = this.getRedis()
|
||||||
|
const data = await redis.hget(ProjectKeys.indexes(this.projectName), IndexFields.depsGraph)
|
||||||
|
if (!data) {
|
||||||
|
return {
|
||||||
|
imports: new Map(),
|
||||||
|
importedBy: new Map(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const parsed = this.parseJSON(data, "DepsGraph") as {
|
||||||
|
imports: [string, string[]][]
|
||||||
|
importedBy: [string, string[]][]
|
||||||
|
} | null
|
||||||
|
|
||||||
|
if (!parsed) {
|
||||||
|
return {
|
||||||
|
imports: new Map(),
|
||||||
|
importedBy: new Map(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
imports: new Map(parsed.imports),
|
||||||
|
importedBy: new Map(parsed.importedBy),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async setDepsGraph(graph: DepsGraph): Promise<void> {
|
||||||
|
const redis = this.getRedis()
|
||||||
|
const serialized = JSON.stringify({
|
||||||
|
imports: [...graph.imports.entries()],
|
||||||
|
importedBy: [...graph.importedBy.entries()],
|
||||||
|
})
|
||||||
|
await redis.hset(ProjectKeys.indexes(this.projectName), IndexFields.depsGraph, serialized)
|
||||||
|
}
|
||||||
|
|
||||||
|
async getProjectConfig(key: string): Promise<unknown> {
|
||||||
|
const redis = this.getRedis()
|
||||||
|
const data = await redis.hget(ProjectKeys.config(this.projectName), key)
|
||||||
|
if (!data) {
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
return this.parseJSON(data, "ProjectConfig")
|
||||||
|
}
|
||||||
|
|
||||||
|
async setProjectConfig(key: string, value: unknown): Promise<void> {
|
||||||
|
const redis = this.getRedis()
|
||||||
|
await redis.hset(ProjectKeys.config(this.projectName), key, JSON.stringify(value))
|
||||||
|
}
|
||||||
|
|
||||||
|
async connect(): Promise<void> {
|
||||||
|
await this.client.connect()
|
||||||
|
}
|
||||||
|
|
||||||
|
async disconnect(): Promise<void> {
|
||||||
|
await this.client.disconnect()
|
||||||
|
}
|
||||||
|
|
||||||
|
isConnected(): boolean {
|
||||||
|
return this.client.isConnected()
|
||||||
|
}
|
||||||
|
|
||||||
|
async clear(): Promise<void> {
|
||||||
|
const redis = this.getRedis()
|
||||||
|
await Promise.all([
|
||||||
|
redis.del(ProjectKeys.files(this.projectName)),
|
||||||
|
redis.del(ProjectKeys.ast(this.projectName)),
|
||||||
|
redis.del(ProjectKeys.meta(this.projectName)),
|
||||||
|
redis.del(ProjectKeys.indexes(this.projectName)),
|
||||||
|
redis.del(ProjectKeys.config(this.projectName)),
|
||||||
|
])
|
||||||
|
}
|
||||||
|
|
||||||
|
private getRedis(): ReturnType<RedisClient["getClient"]> {
|
||||||
|
return this.client.getClient()
|
||||||
|
}
|
||||||
|
|
||||||
|
private parseJSON(data: string, type: string): unknown {
|
||||||
|
try {
|
||||||
|
return JSON.parse(data) as unknown
|
||||||
|
} catch (error) {
|
||||||
|
const message = error instanceof Error ? error.message : "Unknown error"
|
||||||
|
throw IpuaroError.parse(`Failed to parse ${type}: ${message}`)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
11
packages/ipuaro/src/infrastructure/storage/index.ts
Normal file
11
packages/ipuaro/src/infrastructure/storage/index.ts
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
// Storage module exports
|
||||||
|
export { RedisClient } from "./RedisClient.js"
|
||||||
|
export { RedisStorage } from "./RedisStorage.js"
|
||||||
|
export { RedisSessionStorage } from "./RedisSessionStorage.js"
|
||||||
|
export {
|
||||||
|
ProjectKeys,
|
||||||
|
SessionKeys,
|
||||||
|
IndexFields,
|
||||||
|
SessionFields,
|
||||||
|
generateProjectName,
|
||||||
|
} from "./schema.js"
|
||||||
95
packages/ipuaro/src/infrastructure/storage/schema.ts
Normal file
95
packages/ipuaro/src/infrastructure/storage/schema.ts
Normal file
@@ -0,0 +1,95 @@
|
|||||||
|
/**
|
||||||
|
* Redis key schema for ipuaro data storage.
|
||||||
|
*
|
||||||
|
* Key structure:
|
||||||
|
* - project:{name}:files # Hash<path, FileData>
|
||||||
|
* - project:{name}:ast # Hash<path, FileAST>
|
||||||
|
* - project:{name}:meta # Hash<path, FileMeta>
|
||||||
|
* - project:{name}:indexes # Hash<name, JSON> (symbols, deps_graph)
|
||||||
|
* - project:{name}:config # Hash<key, JSON>
|
||||||
|
*
|
||||||
|
* - session:{id}:data # Hash<field, JSON> (history, context, stats)
|
||||||
|
* - session:{id}:undo # List<UndoEntry> (max 10)
|
||||||
|
* - sessions:list # List<session_id>
|
||||||
|
*
|
||||||
|
* Project name format: {parent-folder}-{project-folder}
|
||||||
|
*/
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Project-related Redis keys.
|
||||||
|
*/
|
||||||
|
export const ProjectKeys = {
|
||||||
|
files: (projectName: string): string => `project:${projectName}:files`,
|
||||||
|
ast: (projectName: string): string => `project:${projectName}:ast`,
|
||||||
|
meta: (projectName: string): string => `project:${projectName}:meta`,
|
||||||
|
indexes: (projectName: string): string => `project:${projectName}:indexes`,
|
||||||
|
config: (projectName: string): string => `project:${projectName}:config`,
|
||||||
|
} as const
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Session-related Redis keys.
|
||||||
|
*/
|
||||||
|
export const SessionKeys = {
|
||||||
|
data: (sessionId: string): string => `session:${sessionId}:data`,
|
||||||
|
undo: (sessionId: string): string => `session:${sessionId}:undo`,
|
||||||
|
list: "sessions:list",
|
||||||
|
} as const
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Index field names within project:indexes hash.
|
||||||
|
*/
|
||||||
|
export const IndexFields = {
|
||||||
|
symbols: "symbols",
|
||||||
|
depsGraph: "deps_graph",
|
||||||
|
} as const
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Session data field names within session:data hash.
|
||||||
|
*/
|
||||||
|
export const SessionFields = {
|
||||||
|
history: "history",
|
||||||
|
context: "context",
|
||||||
|
stats: "stats",
|
||||||
|
inputHistory: "input_history",
|
||||||
|
createdAt: "created_at",
|
||||||
|
lastActivityAt: "last_activity_at",
|
||||||
|
projectName: "project_name",
|
||||||
|
} as const
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Generate project name from path.
|
||||||
|
* Format: {parent-folder}-{project-folder}
|
||||||
|
*
|
||||||
|
* @example
|
||||||
|
* generateProjectName("/home/user/projects/myapp") -> "projects-myapp"
|
||||||
|
* generateProjectName("/app") -> "app"
|
||||||
|
*/
|
||||||
|
export function generateProjectName(projectPath: string): string {
|
||||||
|
const normalized = projectPath.replace(/\\/g, "/").replace(/\/+$/, "")
|
||||||
|
const parts = normalized.split("/").filter(Boolean)
|
||||||
|
|
||||||
|
if (parts.length === 0) {
|
||||||
|
return "root"
|
||||||
|
}
|
||||||
|
|
||||||
|
if (parts.length === 1) {
|
||||||
|
return sanitizeName(parts[0])
|
||||||
|
}
|
||||||
|
|
||||||
|
const projectFolder = sanitizeName(parts[parts.length - 1])
|
||||||
|
const parentFolder = sanitizeName(parts[parts.length - 2])
|
||||||
|
|
||||||
|
return `${parentFolder}-${projectFolder}`
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Sanitize a name for use in Redis keys.
|
||||||
|
* Replaces non-alphanumeric characters with hyphens.
|
||||||
|
*/
|
||||||
|
function sanitizeName(name: string): string {
|
||||||
|
return name
|
||||||
|
.toLowerCase()
|
||||||
|
.replace(/[^a-z0-9-]/g, "-")
|
||||||
|
.replace(/-+/g, "-")
|
||||||
|
.replace(/^-|-$/g, "")
|
||||||
|
}
|
||||||
@@ -0,0 +1,232 @@
|
|||||||
|
import * as path from "node:path"
|
||||||
|
import type { ITool, ToolContext, ToolParameterSchema } from "../../../domain/services/ITool.js"
|
||||||
|
import type { ComplexityMetrics, FileMeta } from "../../../domain/value-objects/FileMeta.js"
|
||||||
|
import {
|
||||||
|
createErrorResult,
|
||||||
|
createSuccessResult,
|
||||||
|
type ToolResult,
|
||||||
|
} from "../../../domain/value-objects/ToolResult.js"
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Complexity entry for a single file.
|
||||||
|
*/
|
||||||
|
export interface ComplexityEntry {
|
||||||
|
/** Relative path to the file */
|
||||||
|
path: string
|
||||||
|
/** Complexity metrics */
|
||||||
|
metrics: ComplexityMetrics
|
||||||
|
/** File type classification */
|
||||||
|
fileType: "source" | "test" | "config" | "types" | "unknown"
|
||||||
|
/** Whether the file is a hub */
|
||||||
|
isHub: boolean
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Result data from get_complexity tool.
|
||||||
|
*/
|
||||||
|
export interface GetComplexityResult {
|
||||||
|
/** The path that was analyzed (file or directory) */
|
||||||
|
analyzedPath: string | null
|
||||||
|
/** Total files analyzed */
|
||||||
|
totalFiles: number
|
||||||
|
/** Average complexity score */
|
||||||
|
averageScore: number
|
||||||
|
/** Files sorted by complexity score (descending) */
|
||||||
|
files: ComplexityEntry[]
|
||||||
|
/** Summary statistics */
|
||||||
|
summary: {
|
||||||
|
highComplexity: number
|
||||||
|
mediumComplexity: number
|
||||||
|
lowComplexity: number
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Complexity thresholds for classification.
|
||||||
|
*/
|
||||||
|
const COMPLEXITY_THRESHOLDS = {
|
||||||
|
high: 60,
|
||||||
|
medium: 30,
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Tool for getting complexity metrics for files.
|
||||||
|
* Can analyze a single file or all files in the project.
|
||||||
|
*/
|
||||||
|
export class GetComplexityTool implements ITool {
|
||||||
|
readonly name = "get_complexity"
|
||||||
|
readonly description =
|
||||||
|
"Get complexity metrics for files. " +
|
||||||
|
"Returns LOC, nesting depth, cyclomatic complexity, and overall score. " +
|
||||||
|
"Without path, returns all files sorted by complexity."
|
||||||
|
readonly parameters: ToolParameterSchema[] = [
|
||||||
|
{
|
||||||
|
name: "path",
|
||||||
|
type: "string",
|
||||||
|
description: "File or directory path to analyze (optional, defaults to entire project)",
|
||||||
|
required: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "limit",
|
||||||
|
type: "number",
|
||||||
|
description: "Maximum number of files to return (default: 20)",
|
||||||
|
required: false,
|
||||||
|
default: 20,
|
||||||
|
},
|
||||||
|
]
|
||||||
|
readonly requiresConfirmation = false
|
||||||
|
readonly category = "analysis" as const
|
||||||
|
|
||||||
|
validateParams(params: Record<string, unknown>): string | null {
|
||||||
|
if (params.path !== undefined && typeof params.path !== "string") {
|
||||||
|
return "Parameter 'path' must be a string"
|
||||||
|
}
|
||||||
|
if (params.limit !== undefined) {
|
||||||
|
if (typeof params.limit !== "number" || !Number.isInteger(params.limit)) {
|
||||||
|
return "Parameter 'limit' must be an integer"
|
||||||
|
}
|
||||||
|
if (params.limit < 1) {
|
||||||
|
return "Parameter 'limit' must be at least 1"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
async execute(params: Record<string, unknown>, ctx: ToolContext): Promise<ToolResult> {
|
||||||
|
const startTime = Date.now()
|
||||||
|
const callId = `${this.name}-${String(startTime)}`
|
||||||
|
|
||||||
|
const inputPath = params.path as string | undefined
|
||||||
|
const limit = (params.limit as number | undefined) ?? 20
|
||||||
|
|
||||||
|
try {
|
||||||
|
const allMetas = await ctx.storage.getAllMetas()
|
||||||
|
|
||||||
|
if (allMetas.size === 0) {
|
||||||
|
return createSuccessResult(
|
||||||
|
callId,
|
||||||
|
{
|
||||||
|
analyzedPath: inputPath ?? null,
|
||||||
|
totalFiles: 0,
|
||||||
|
averageScore: 0,
|
||||||
|
files: [],
|
||||||
|
summary: { highComplexity: 0, mediumComplexity: 0, lowComplexity: 0 },
|
||||||
|
} satisfies GetComplexityResult,
|
||||||
|
Date.now() - startTime,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
let filteredMetas = allMetas
|
||||||
|
let analyzedPath: string | null = null
|
||||||
|
|
||||||
|
if (inputPath) {
|
||||||
|
const relativePath = this.normalizePathToRelative(inputPath, ctx.projectRoot)
|
||||||
|
analyzedPath = relativePath
|
||||||
|
filteredMetas = this.filterByPath(allMetas, relativePath)
|
||||||
|
|
||||||
|
if (filteredMetas.size === 0) {
|
||||||
|
return createErrorResult(
|
||||||
|
callId,
|
||||||
|
`No files found at path: ${relativePath}`,
|
||||||
|
Date.now() - startTime,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const entries: ComplexityEntry[] = []
|
||||||
|
for (const [filePath, meta] of filteredMetas) {
|
||||||
|
entries.push({
|
||||||
|
path: filePath,
|
||||||
|
metrics: meta.complexity,
|
||||||
|
fileType: meta.fileType,
|
||||||
|
isHub: meta.isHub,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
entries.sort((a, b) => b.metrics.score - a.metrics.score)
|
||||||
|
|
||||||
|
const summary = this.calculateSummary(entries)
|
||||||
|
const averageScore = this.calculateAverageScore(entries)
|
||||||
|
|
||||||
|
const limitedEntries = entries.slice(0, limit)
|
||||||
|
|
||||||
|
const result: GetComplexityResult = {
|
||||||
|
analyzedPath,
|
||||||
|
totalFiles: entries.length,
|
||||||
|
averageScore,
|
||||||
|
files: limitedEntries,
|
||||||
|
summary,
|
||||||
|
}
|
||||||
|
|
||||||
|
return createSuccessResult(callId, result, Date.now() - startTime)
|
||||||
|
} catch (error) {
|
||||||
|
const message = error instanceof Error ? error.message : String(error)
|
||||||
|
return createErrorResult(callId, message, Date.now() - startTime)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Normalize input path to relative path from project root.
|
||||||
|
*/
|
||||||
|
private normalizePathToRelative(inputPath: string, projectRoot: string): string {
|
||||||
|
if (path.isAbsolute(inputPath)) {
|
||||||
|
return path.relative(projectRoot, inputPath)
|
||||||
|
}
|
||||||
|
return inputPath
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Filter metas by path prefix (file or directory).
|
||||||
|
*/
|
||||||
|
private filterByPath(
|
||||||
|
allMetas: Map<string, FileMeta>,
|
||||||
|
targetPath: string,
|
||||||
|
): Map<string, FileMeta> {
|
||||||
|
const filtered = new Map<string, FileMeta>()
|
||||||
|
|
||||||
|
for (const [filePath, meta] of allMetas) {
|
||||||
|
if (filePath === targetPath || filePath.startsWith(`${targetPath}/`)) {
|
||||||
|
filtered.set(filePath, meta)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return filtered
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Calculate summary statistics for complexity entries.
|
||||||
|
*/
|
||||||
|
private calculateSummary(entries: ComplexityEntry[]): {
|
||||||
|
highComplexity: number
|
||||||
|
mediumComplexity: number
|
||||||
|
lowComplexity: number
|
||||||
|
} {
|
||||||
|
let high = 0
|
||||||
|
let medium = 0
|
||||||
|
let low = 0
|
||||||
|
|
||||||
|
for (const entry of entries) {
|
||||||
|
const score = entry.metrics.score
|
||||||
|
if (score >= COMPLEXITY_THRESHOLDS.high) {
|
||||||
|
high++
|
||||||
|
} else if (score >= COMPLEXITY_THRESHOLDS.medium) {
|
||||||
|
medium++
|
||||||
|
} else {
|
||||||
|
low++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return { highComplexity: high, mediumComplexity: medium, lowComplexity: low }
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Calculate average complexity score.
|
||||||
|
*/
|
||||||
|
private calculateAverageScore(entries: ComplexityEntry[]): number {
|
||||||
|
if (entries.length === 0) {
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
const total = entries.reduce((sum, entry) => sum + entry.metrics.score, 0)
|
||||||
|
return Math.round((total / entries.length) * 100) / 100
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,121 @@
|
|||||||
|
import * as path from "node:path"
|
||||||
|
import type { ITool, ToolContext, ToolParameterSchema } from "../../../domain/services/ITool.js"
|
||||||
|
import {
|
||||||
|
createErrorResult,
|
||||||
|
createSuccessResult,
|
||||||
|
type ToolResult,
|
||||||
|
} from "../../../domain/value-objects/ToolResult.js"
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Single dependency entry with metadata.
|
||||||
|
*/
|
||||||
|
export interface DependencyEntry {
|
||||||
|
/** Relative path to the dependency */
|
||||||
|
path: string
|
||||||
|
/** Whether the file exists in the project */
|
||||||
|
exists: boolean
|
||||||
|
/** Whether it's an entry point */
|
||||||
|
isEntryPoint: boolean
|
||||||
|
/** Whether it's a hub file */
|
||||||
|
isHub: boolean
|
||||||
|
/** File type classification */
|
||||||
|
fileType: "source" | "test" | "config" | "types" | "unknown"
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Result data from get_dependencies tool.
|
||||||
|
*/
|
||||||
|
export interface GetDependenciesResult {
|
||||||
|
/** The file being analyzed */
|
||||||
|
file: string
|
||||||
|
/** Total number of dependencies */
|
||||||
|
totalDependencies: number
|
||||||
|
/** List of dependencies with metadata */
|
||||||
|
dependencies: DependencyEntry[]
|
||||||
|
/** File type of the source file */
|
||||||
|
fileType: "source" | "test" | "config" | "types" | "unknown"
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Tool for getting files that a specific file imports.
|
||||||
|
* Returns the list of internal dependencies from FileMeta.
|
||||||
|
*/
|
||||||
|
export class GetDependenciesTool implements ITool {
|
||||||
|
readonly name = "get_dependencies"
|
||||||
|
readonly description =
|
||||||
|
"Get files that a specific file imports. " +
|
||||||
|
"Returns internal dependencies resolved to file paths."
|
||||||
|
readonly parameters: ToolParameterSchema[] = [
|
||||||
|
{
|
||||||
|
name: "path",
|
||||||
|
type: "string",
|
||||||
|
description: "File path to analyze (relative to project root or absolute)",
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
]
|
||||||
|
readonly requiresConfirmation = false
|
||||||
|
readonly category = "analysis" as const
|
||||||
|
|
||||||
|
validateParams(params: Record<string, unknown>): string | null {
|
||||||
|
if (typeof params.path !== "string" || params.path.trim() === "") {
|
||||||
|
return "Parameter 'path' is required and must be a non-empty string"
|
||||||
|
}
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
async execute(params: Record<string, unknown>, ctx: ToolContext): Promise<ToolResult> {
|
||||||
|
const startTime = Date.now()
|
||||||
|
const callId = `${this.name}-${String(startTime)}`
|
||||||
|
|
||||||
|
const inputPath = (params.path as string).trim()
|
||||||
|
|
||||||
|
try {
|
||||||
|
const relativePath = this.normalizePathToRelative(inputPath, ctx.projectRoot)
|
||||||
|
|
||||||
|
const meta = await ctx.storage.getMeta(relativePath)
|
||||||
|
if (!meta) {
|
||||||
|
return createErrorResult(
|
||||||
|
callId,
|
||||||
|
`File not found or not indexed: ${relativePath}`,
|
||||||
|
Date.now() - startTime,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const dependencies: DependencyEntry[] = []
|
||||||
|
for (const depPath of meta.dependencies) {
|
||||||
|
const depMeta = await ctx.storage.getMeta(depPath)
|
||||||
|
dependencies.push({
|
||||||
|
path: depPath,
|
||||||
|
exists: depMeta !== null,
|
||||||
|
isEntryPoint: depMeta?.isEntryPoint ?? false,
|
||||||
|
isHub: depMeta?.isHub ?? false,
|
||||||
|
fileType: depMeta?.fileType ?? "unknown",
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
dependencies.sort((a, b) => a.path.localeCompare(b.path))
|
||||||
|
|
||||||
|
const result: GetDependenciesResult = {
|
||||||
|
file: relativePath,
|
||||||
|
totalDependencies: dependencies.length,
|
||||||
|
dependencies,
|
||||||
|
fileType: meta.fileType,
|
||||||
|
}
|
||||||
|
|
||||||
|
return createSuccessResult(callId, result, Date.now() - startTime)
|
||||||
|
} catch (error) {
|
||||||
|
const message = error instanceof Error ? error.message : String(error)
|
||||||
|
return createErrorResult(callId, message, Date.now() - startTime)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Normalize input path to relative path from project root.
|
||||||
|
*/
|
||||||
|
private normalizePathToRelative(inputPath: string, projectRoot: string): string {
|
||||||
|
if (path.isAbsolute(inputPath)) {
|
||||||
|
return path.relative(projectRoot, inputPath)
|
||||||
|
}
|
||||||
|
return inputPath
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,124 @@
|
|||||||
|
import * as path from "node:path"
|
||||||
|
import type { ITool, ToolContext, ToolParameterSchema } from "../../../domain/services/ITool.js"
|
||||||
|
import {
|
||||||
|
createErrorResult,
|
||||||
|
createSuccessResult,
|
||||||
|
type ToolResult,
|
||||||
|
} from "../../../domain/value-objects/ToolResult.js"
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Single dependent entry with metadata.
|
||||||
|
*/
|
||||||
|
export interface DependentEntry {
|
||||||
|
/** Relative path to the dependent file */
|
||||||
|
path: string
|
||||||
|
/** Whether the file is an entry point */
|
||||||
|
isEntryPoint: boolean
|
||||||
|
/** Whether the file is a hub */
|
||||||
|
isHub: boolean
|
||||||
|
/** File type classification */
|
||||||
|
fileType: "source" | "test" | "config" | "types" | "unknown"
|
||||||
|
/** Complexity score of the dependent */
|
||||||
|
complexityScore: number
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Result data from get_dependents tool.
|
||||||
|
*/
|
||||||
|
export interface GetDependentsResult {
|
||||||
|
/** The file being analyzed */
|
||||||
|
file: string
|
||||||
|
/** Total number of dependents */
|
||||||
|
totalDependents: number
|
||||||
|
/** Whether this file is a hub (>5 dependents) */
|
||||||
|
isHub: boolean
|
||||||
|
/** List of files that import this file */
|
||||||
|
dependents: DependentEntry[]
|
||||||
|
/** File type of the source file */
|
||||||
|
fileType: "source" | "test" | "config" | "types" | "unknown"
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Tool for getting files that import a specific file.
|
||||||
|
* Returns the list of files that depend on the target file.
|
||||||
|
*/
|
||||||
|
export class GetDependentsTool implements ITool {
|
||||||
|
readonly name = "get_dependents"
|
||||||
|
readonly description =
|
||||||
|
"Get files that import a specific file. " +
|
||||||
|
"Returns list of files that depend on the target."
|
||||||
|
readonly parameters: ToolParameterSchema[] = [
|
||||||
|
{
|
||||||
|
name: "path",
|
||||||
|
type: "string",
|
||||||
|
description: "File path to analyze (relative to project root or absolute)",
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
]
|
||||||
|
readonly requiresConfirmation = false
|
||||||
|
readonly category = "analysis" as const
|
||||||
|
|
||||||
|
validateParams(params: Record<string, unknown>): string | null {
|
||||||
|
if (typeof params.path !== "string" || params.path.trim() === "") {
|
||||||
|
return "Parameter 'path' is required and must be a non-empty string"
|
||||||
|
}
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
async execute(params: Record<string, unknown>, ctx: ToolContext): Promise<ToolResult> {
|
||||||
|
const startTime = Date.now()
|
||||||
|
const callId = `${this.name}-${String(startTime)}`
|
||||||
|
|
||||||
|
const inputPath = (params.path as string).trim()
|
||||||
|
|
||||||
|
try {
|
||||||
|
const relativePath = this.normalizePathToRelative(inputPath, ctx.projectRoot)
|
||||||
|
|
||||||
|
const meta = await ctx.storage.getMeta(relativePath)
|
||||||
|
if (!meta) {
|
||||||
|
return createErrorResult(
|
||||||
|
callId,
|
||||||
|
`File not found or not indexed: ${relativePath}`,
|
||||||
|
Date.now() - startTime,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const dependents: DependentEntry[] = []
|
||||||
|
for (const depPath of meta.dependents) {
|
||||||
|
const depMeta = await ctx.storage.getMeta(depPath)
|
||||||
|
dependents.push({
|
||||||
|
path: depPath,
|
||||||
|
isEntryPoint: depMeta?.isEntryPoint ?? false,
|
||||||
|
isHub: depMeta?.isHub ?? false,
|
||||||
|
fileType: depMeta?.fileType ?? "unknown",
|
||||||
|
complexityScore: depMeta?.complexity.score ?? 0,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
dependents.sort((a, b) => a.path.localeCompare(b.path))
|
||||||
|
|
||||||
|
const result: GetDependentsResult = {
|
||||||
|
file: relativePath,
|
||||||
|
totalDependents: dependents.length,
|
||||||
|
isHub: meta.isHub,
|
||||||
|
dependents,
|
||||||
|
fileType: meta.fileType,
|
||||||
|
}
|
||||||
|
|
||||||
|
return createSuccessResult(callId, result, Date.now() - startTime)
|
||||||
|
} catch (error) {
|
||||||
|
const message = error instanceof Error ? error.message : String(error)
|
||||||
|
return createErrorResult(callId, message, Date.now() - startTime)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Normalize input path to relative path from project root.
|
||||||
|
*/
|
||||||
|
private normalizePathToRelative(inputPath: string, projectRoot: string): string {
|
||||||
|
if (path.isAbsolute(inputPath)) {
|
||||||
|
return path.relative(projectRoot, inputPath)
|
||||||
|
}
|
||||||
|
return inputPath
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,276 @@
|
|||||||
|
import * as path from "node:path"
|
||||||
|
import type { ITool, ToolContext, ToolParameterSchema } from "../../../domain/services/ITool.js"
|
||||||
|
import type { FileData } from "../../../domain/value-objects/FileData.js"
|
||||||
|
import {
|
||||||
|
createErrorResult,
|
||||||
|
createSuccessResult,
|
||||||
|
type ToolResult,
|
||||||
|
} from "../../../domain/value-objects/ToolResult.js"
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Types of TODO markers to search for.
|
||||||
|
*/
|
||||||
|
export type TodoType = "TODO" | "FIXME" | "HACK" | "XXX" | "BUG" | "NOTE"
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A single TODO entry found in the codebase.
|
||||||
|
*/
|
||||||
|
export interface TodoEntry {
|
||||||
|
/** Relative path to the file */
|
||||||
|
path: string
|
||||||
|
/** Line number where the TODO is found */
|
||||||
|
line: number
|
||||||
|
/** Type of TODO marker (TODO, FIXME, etc.) */
|
||||||
|
type: TodoType
|
||||||
|
/** The TODO text content */
|
||||||
|
text: string
|
||||||
|
/** Full line content for context */
|
||||||
|
context: string
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Result data from get_todos tool.
|
||||||
|
*/
|
||||||
|
export interface GetTodosResult {
|
||||||
|
/** The path that was searched (file or directory) */
|
||||||
|
searchedPath: string | null
|
||||||
|
/** Total number of TODOs found */
|
||||||
|
totalTodos: number
|
||||||
|
/** Number of files with TODOs */
|
||||||
|
filesWithTodos: number
|
||||||
|
/** TODOs grouped by type */
|
||||||
|
byType: Record<TodoType, number>
|
||||||
|
/** List of TODO entries */
|
||||||
|
todos: TodoEntry[]
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Supported TODO marker patterns.
|
||||||
|
*/
|
||||||
|
const TODO_MARKERS: TodoType[] = ["TODO", "FIXME", "HACK", "XXX", "BUG", "NOTE"]
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Regex pattern for matching TODO markers in comments.
|
||||||
|
*/
|
||||||
|
const TODO_PATTERN = new RegExp(
|
||||||
|
`(?://|/\\*|\\*|#)\\s*(${TODO_MARKERS.join("|")})(?:\\([^)]*\\))?:?\\s*(.*)`,
|
||||||
|
"i",
|
||||||
|
)
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Tool for finding TODO/FIXME/HACK comments in the codebase.
|
||||||
|
* Searches through indexed files for common task markers.
|
||||||
|
*/
|
||||||
|
export class GetTodosTool implements ITool {
|
||||||
|
readonly name = "get_todos"
|
||||||
|
readonly description =
|
||||||
|
"Find TODO, FIXME, HACK, XXX, BUG, and NOTE comments in the codebase. " +
|
||||||
|
"Returns list of locations with context."
|
||||||
|
readonly parameters: ToolParameterSchema[] = [
|
||||||
|
{
|
||||||
|
name: "path",
|
||||||
|
type: "string",
|
||||||
|
description: "File or directory to search (optional, defaults to entire project)",
|
||||||
|
required: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "type",
|
||||||
|
type: "string",
|
||||||
|
description:
|
||||||
|
"Filter by TODO type: TODO, FIXME, HACK, XXX, BUG, NOTE (optional, defaults to all)",
|
||||||
|
required: false,
|
||||||
|
},
|
||||||
|
]
|
||||||
|
readonly requiresConfirmation = false
|
||||||
|
readonly category = "analysis" as const
|
||||||
|
|
||||||
|
validateParams(params: Record<string, unknown>): string | null {
|
||||||
|
if (params.path !== undefined && typeof params.path !== "string") {
|
||||||
|
return "Parameter 'path' must be a string"
|
||||||
|
}
|
||||||
|
if (params.type !== undefined) {
|
||||||
|
if (typeof params.type !== "string") {
|
||||||
|
return "Parameter 'type' must be a string"
|
||||||
|
}
|
||||||
|
const upperType = params.type.toUpperCase()
|
||||||
|
if (!TODO_MARKERS.includes(upperType as TodoType)) {
|
||||||
|
return `Parameter 'type' must be one of: ${TODO_MARKERS.join(", ")}`
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
async execute(params: Record<string, unknown>, ctx: ToolContext): Promise<ToolResult> {
|
||||||
|
const startTime = Date.now()
|
||||||
|
const callId = `${this.name}-${String(startTime)}`
|
||||||
|
|
||||||
|
const inputPath = params.path as string | undefined
|
||||||
|
const filterType = params.type ? ((params.type as string).toUpperCase() as TodoType) : null
|
||||||
|
|
||||||
|
try {
|
||||||
|
const allFiles = await ctx.storage.getAllFiles()
|
||||||
|
|
||||||
|
if (allFiles.size === 0) {
|
||||||
|
return createSuccessResult(
|
||||||
|
callId,
|
||||||
|
this.createEmptyResult(inputPath ?? null),
|
||||||
|
Date.now() - startTime,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
let filesToSearch = allFiles
|
||||||
|
let searchedPath: string | null = null
|
||||||
|
|
||||||
|
if (inputPath) {
|
||||||
|
const relativePath = this.normalizePathToRelative(inputPath, ctx.projectRoot)
|
||||||
|
searchedPath = relativePath
|
||||||
|
filesToSearch = this.filterByPath(allFiles, relativePath)
|
||||||
|
|
||||||
|
if (filesToSearch.size === 0) {
|
||||||
|
return createErrorResult(
|
||||||
|
callId,
|
||||||
|
`No files found at path: ${relativePath}`,
|
||||||
|
Date.now() - startTime,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const todos: TodoEntry[] = []
|
||||||
|
const filesWithTodos = new Set<string>()
|
||||||
|
|
||||||
|
for (const [filePath, fileData] of filesToSearch) {
|
||||||
|
const fileTodos = this.findTodosInFile(filePath, fileData.lines, filterType)
|
||||||
|
if (fileTodos.length > 0) {
|
||||||
|
filesWithTodos.add(filePath)
|
||||||
|
todos.push(...fileTodos)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
todos.sort((a, b) => {
|
||||||
|
const pathCompare = a.path.localeCompare(b.path)
|
||||||
|
if (pathCompare !== 0) {
|
||||||
|
return pathCompare
|
||||||
|
}
|
||||||
|
return a.line - b.line
|
||||||
|
})
|
||||||
|
|
||||||
|
const byType = this.countByType(todos)
|
||||||
|
|
||||||
|
const result: GetTodosResult = {
|
||||||
|
searchedPath,
|
||||||
|
totalTodos: todos.length,
|
||||||
|
filesWithTodos: filesWithTodos.size,
|
||||||
|
byType,
|
||||||
|
todos,
|
||||||
|
}
|
||||||
|
|
||||||
|
return createSuccessResult(callId, result, Date.now() - startTime)
|
||||||
|
} catch (error) {
|
||||||
|
const message = error instanceof Error ? error.message : String(error)
|
||||||
|
return createErrorResult(callId, message, Date.now() - startTime)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Normalize input path to relative path from project root.
|
||||||
|
*/
|
||||||
|
private normalizePathToRelative(inputPath: string, projectRoot: string): string {
|
||||||
|
if (path.isAbsolute(inputPath)) {
|
||||||
|
return path.relative(projectRoot, inputPath)
|
||||||
|
}
|
||||||
|
return inputPath
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Filter files by path prefix.
|
||||||
|
*/
|
||||||
|
private filterByPath(
|
||||||
|
allFiles: Map<string, FileData>,
|
||||||
|
targetPath: string,
|
||||||
|
): Map<string, FileData> {
|
||||||
|
const filtered = new Map<string, FileData>()
|
||||||
|
|
||||||
|
for (const [filePath, fileData] of allFiles) {
|
||||||
|
if (filePath === targetPath || filePath.startsWith(`${targetPath}/`)) {
|
||||||
|
filtered.set(filePath, fileData)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return filtered
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Find all TODOs in a file.
|
||||||
|
*/
|
||||||
|
private findTodosInFile(
|
||||||
|
filePath: string,
|
||||||
|
lines: string[],
|
||||||
|
filterType: TodoType | null,
|
||||||
|
): TodoEntry[] {
|
||||||
|
const todos: TodoEntry[] = []
|
||||||
|
|
||||||
|
for (let i = 0; i < lines.length; i++) {
|
||||||
|
const line = lines[i]
|
||||||
|
const match = TODO_PATTERN.exec(line)
|
||||||
|
|
||||||
|
if (match) {
|
||||||
|
const type = match[1].toUpperCase() as TodoType
|
||||||
|
const text = match[2].trim()
|
||||||
|
|
||||||
|
if (filterType && type !== filterType) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
todos.push({
|
||||||
|
path: filePath,
|
||||||
|
line: i + 1,
|
||||||
|
type,
|
||||||
|
text: text || "(no description)",
|
||||||
|
context: line.trim(),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return todos
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Count TODOs by type.
|
||||||
|
*/
|
||||||
|
private countByType(todos: TodoEntry[]): Record<TodoType, number> {
|
||||||
|
const counts: Record<TodoType, number> = {
|
||||||
|
TODO: 0,
|
||||||
|
FIXME: 0,
|
||||||
|
HACK: 0,
|
||||||
|
XXX: 0,
|
||||||
|
BUG: 0,
|
||||||
|
NOTE: 0,
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const todo of todos) {
|
||||||
|
counts[todo.type]++
|
||||||
|
}
|
||||||
|
|
||||||
|
return counts
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create empty result structure.
|
||||||
|
*/
|
||||||
|
private createEmptyResult(searchedPath: string | null): GetTodosResult {
|
||||||
|
return {
|
||||||
|
searchedPath,
|
||||||
|
totalTodos: 0,
|
||||||
|
filesWithTodos: 0,
|
||||||
|
byType: {
|
||||||
|
TODO: 0,
|
||||||
|
FIXME: 0,
|
||||||
|
HACK: 0,
|
||||||
|
XXX: 0,
|
||||||
|
BUG: 0,
|
||||||
|
NOTE: 0,
|
||||||
|
},
|
||||||
|
todos: [],
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
20
packages/ipuaro/src/infrastructure/tools/analysis/index.ts
Normal file
20
packages/ipuaro/src/infrastructure/tools/analysis/index.ts
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
// Analysis tools module exports
|
||||||
|
export {
|
||||||
|
GetDependenciesTool,
|
||||||
|
type GetDependenciesResult,
|
||||||
|
type DependencyEntry,
|
||||||
|
} from "./GetDependenciesTool.js"
|
||||||
|
|
||||||
|
export {
|
||||||
|
GetDependentsTool,
|
||||||
|
type GetDependentsResult,
|
||||||
|
type DependentEntry,
|
||||||
|
} from "./GetDependentsTool.js"
|
||||||
|
|
||||||
|
export {
|
||||||
|
GetComplexityTool,
|
||||||
|
type GetComplexityResult,
|
||||||
|
type ComplexityEntry,
|
||||||
|
} from "./GetComplexityTool.js"
|
||||||
|
|
||||||
|
export { GetTodosTool, type GetTodosResult, type TodoEntry, type TodoType } from "./GetTodosTool.js"
|
||||||
142
packages/ipuaro/src/infrastructure/tools/edit/CreateFileTool.ts
Normal file
142
packages/ipuaro/src/infrastructure/tools/edit/CreateFileTool.ts
Normal file
@@ -0,0 +1,142 @@
|
|||||||
|
import { promises as fs } from "node:fs"
|
||||||
|
import * as path from "node:path"
|
||||||
|
import type { ITool, ToolContext, ToolParameterSchema } from "../../../domain/services/ITool.js"
|
||||||
|
import { createFileData } from "../../../domain/value-objects/FileData.js"
|
||||||
|
import {
|
||||||
|
createErrorResult,
|
||||||
|
createSuccessResult,
|
||||||
|
type ToolResult,
|
||||||
|
} from "../../../domain/value-objects/ToolResult.js"
|
||||||
|
import { hashLines } from "../../../shared/utils/hash.js"
|
||||||
|
import { PathValidator } from "../../security/PathValidator.js"
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Result data from create_file tool.
|
||||||
|
*/
|
||||||
|
export interface CreateFileResult {
|
||||||
|
path: string
|
||||||
|
lines: number
|
||||||
|
size: number
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Tool for creating new files.
|
||||||
|
* Creates a new file with the specified content.
|
||||||
|
* Requires user confirmation before creating.
|
||||||
|
*/
|
||||||
|
export class CreateFileTool implements ITool {
|
||||||
|
readonly name = "create_file"
|
||||||
|
readonly description =
|
||||||
|
"Create a new file with the specified content. " +
|
||||||
|
"The file path must be within the project root. " +
|
||||||
|
"Requires confirmation before creating."
|
||||||
|
readonly parameters: ToolParameterSchema[] = [
|
||||||
|
{
|
||||||
|
name: "path",
|
||||||
|
type: "string",
|
||||||
|
description: "File path relative to project root",
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "content",
|
||||||
|
type: "string",
|
||||||
|
description: "File content",
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
]
|
||||||
|
readonly requiresConfirmation = true
|
||||||
|
readonly category = "edit" as const
|
||||||
|
|
||||||
|
validateParams(params: Record<string, unknown>): string | null {
|
||||||
|
if (typeof params.path !== "string" || params.path.trim() === "") {
|
||||||
|
return "Parameter 'path' is required and must be a non-empty string"
|
||||||
|
}
|
||||||
|
|
||||||
|
if (typeof params.content !== "string") {
|
||||||
|
return "Parameter 'content' is required and must be a string"
|
||||||
|
}
|
||||||
|
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
async execute(params: Record<string, unknown>, ctx: ToolContext): Promise<ToolResult> {
|
||||||
|
const startTime = Date.now()
|
||||||
|
const callId = `${this.name}-${String(startTime)}`
|
||||||
|
|
||||||
|
const inputPath = params.path as string
|
||||||
|
const content = params.content as string
|
||||||
|
|
||||||
|
const pathValidator = new PathValidator(ctx.projectRoot)
|
||||||
|
|
||||||
|
let absolutePath: string
|
||||||
|
let relativePath: string
|
||||||
|
try {
|
||||||
|
;[absolutePath, relativePath] = pathValidator.resolveOrThrow(inputPath)
|
||||||
|
} catch (error) {
|
||||||
|
const message = error instanceof Error ? error.message : String(error)
|
||||||
|
return createErrorResult(callId, message, Date.now() - startTime)
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const exists = await this.fileExists(absolutePath)
|
||||||
|
if (exists) {
|
||||||
|
return createErrorResult(
|
||||||
|
callId,
|
||||||
|
`File already exists: ${relativePath}`,
|
||||||
|
Date.now() - startTime,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const lines = content.split("\n")
|
||||||
|
|
||||||
|
const confirmed = await ctx.requestConfirmation(
|
||||||
|
`Create new file: ${relativePath} (${String(lines.length)} lines)`,
|
||||||
|
{
|
||||||
|
filePath: relativePath,
|
||||||
|
oldLines: [],
|
||||||
|
newLines: lines,
|
||||||
|
startLine: 1,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
if (!confirmed) {
|
||||||
|
return createErrorResult(
|
||||||
|
callId,
|
||||||
|
"File creation cancelled by user",
|
||||||
|
Date.now() - startTime,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const dirPath = path.dirname(absolutePath)
|
||||||
|
await fs.mkdir(dirPath, { recursive: true })
|
||||||
|
await fs.writeFile(absolutePath, content, "utf-8")
|
||||||
|
|
||||||
|
const stats = await fs.stat(absolutePath)
|
||||||
|
const fileData = createFileData(lines, hashLines(lines), stats.size, stats.mtimeMs)
|
||||||
|
await ctx.storage.setFile(relativePath, fileData)
|
||||||
|
|
||||||
|
const result: CreateFileResult = {
|
||||||
|
path: relativePath,
|
||||||
|
lines: lines.length,
|
||||||
|
size: stats.size,
|
||||||
|
}
|
||||||
|
|
||||||
|
return createSuccessResult(callId, result, Date.now() - startTime)
|
||||||
|
} catch (error) {
|
||||||
|
const message = error instanceof Error ? error.message : String(error)
|
||||||
|
return createErrorResult(callId, message, Date.now() - startTime)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if file exists.
|
||||||
|
*/
|
||||||
|
private async fileExists(filePath: string): Promise<boolean> {
|
||||||
|
try {
|
||||||
|
await fs.access(filePath)
|
||||||
|
return true
|
||||||
|
} catch {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
137
packages/ipuaro/src/infrastructure/tools/edit/DeleteFileTool.ts
Normal file
137
packages/ipuaro/src/infrastructure/tools/edit/DeleteFileTool.ts
Normal file
@@ -0,0 +1,137 @@
|
|||||||
|
import { promises as fs } from "node:fs"
|
||||||
|
import type { ITool, ToolContext, ToolParameterSchema } from "../../../domain/services/ITool.js"
|
||||||
|
import {
|
||||||
|
createErrorResult,
|
||||||
|
createSuccessResult,
|
||||||
|
type ToolResult,
|
||||||
|
} from "../../../domain/value-objects/ToolResult.js"
|
||||||
|
import { PathValidator } from "../../security/PathValidator.js"
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Result data from delete_file tool.
|
||||||
|
*/
|
||||||
|
export interface DeleteFileResult {
|
||||||
|
path: string
|
||||||
|
deleted: boolean
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Tool for deleting files.
|
||||||
|
* Deletes a file from the filesystem and storage.
|
||||||
|
* Requires user confirmation before deleting.
|
||||||
|
*/
|
||||||
|
export class DeleteFileTool implements ITool {
|
||||||
|
readonly name = "delete_file"
|
||||||
|
readonly description =
|
||||||
|
"Delete a file from the project. " +
|
||||||
|
"The file path must be within the project root. " +
|
||||||
|
"Requires confirmation before deleting."
|
||||||
|
readonly parameters: ToolParameterSchema[] = [
|
||||||
|
{
|
||||||
|
name: "path",
|
||||||
|
type: "string",
|
||||||
|
description: "File path relative to project root",
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
]
|
||||||
|
readonly requiresConfirmation = true
|
||||||
|
readonly category = "edit" as const
|
||||||
|
|
||||||
|
validateParams(params: Record<string, unknown>): string | null {
|
||||||
|
if (typeof params.path !== "string" || params.path.trim() === "") {
|
||||||
|
return "Parameter 'path' is required and must be a non-empty string"
|
||||||
|
}
|
||||||
|
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
async execute(params: Record<string, unknown>, ctx: ToolContext): Promise<ToolResult> {
|
||||||
|
const startTime = Date.now()
|
||||||
|
const callId = `${this.name}-${String(startTime)}`
|
||||||
|
|
||||||
|
const inputPath = params.path as string
|
||||||
|
const pathValidator = new PathValidator(ctx.projectRoot)
|
||||||
|
|
||||||
|
let absolutePath: string
|
||||||
|
let relativePath: string
|
||||||
|
try {
|
||||||
|
;[absolutePath, relativePath] = pathValidator.resolveOrThrow(inputPath)
|
||||||
|
} catch (error) {
|
||||||
|
const message = error instanceof Error ? error.message : String(error)
|
||||||
|
return createErrorResult(callId, message, Date.now() - startTime)
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const exists = await this.fileExists(absolutePath)
|
||||||
|
if (!exists) {
|
||||||
|
return createErrorResult(
|
||||||
|
callId,
|
||||||
|
`File not found: ${relativePath}`,
|
||||||
|
Date.now() - startTime,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const fileContent = await this.getFileContent(absolutePath, relativePath, ctx)
|
||||||
|
|
||||||
|
const confirmed = await ctx.requestConfirmation(`Delete file: ${relativePath}`, {
|
||||||
|
filePath: relativePath,
|
||||||
|
oldLines: fileContent,
|
||||||
|
newLines: [],
|
||||||
|
startLine: 1,
|
||||||
|
})
|
||||||
|
|
||||||
|
if (!confirmed) {
|
||||||
|
return createErrorResult(
|
||||||
|
callId,
|
||||||
|
"File deletion cancelled by user",
|
||||||
|
Date.now() - startTime,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
await fs.unlink(absolutePath)
|
||||||
|
|
||||||
|
await ctx.storage.deleteFile(relativePath)
|
||||||
|
await ctx.storage.deleteAST(relativePath)
|
||||||
|
await ctx.storage.deleteMeta(relativePath)
|
||||||
|
|
||||||
|
const result: DeleteFileResult = {
|
||||||
|
path: relativePath,
|
||||||
|
deleted: true,
|
||||||
|
}
|
||||||
|
|
||||||
|
return createSuccessResult(callId, result, Date.now() - startTime)
|
||||||
|
} catch (error) {
|
||||||
|
const message = error instanceof Error ? error.message : String(error)
|
||||||
|
return createErrorResult(callId, message, Date.now() - startTime)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if file exists.
|
||||||
|
*/
|
||||||
|
private async fileExists(filePath: string): Promise<boolean> {
|
||||||
|
try {
|
||||||
|
const stats = await fs.stat(filePath)
|
||||||
|
return stats.isFile()
|
||||||
|
} catch {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get file content for diff display.
|
||||||
|
*/
|
||||||
|
private async getFileContent(
|
||||||
|
absolutePath: string,
|
||||||
|
relativePath: string,
|
||||||
|
ctx: ToolContext,
|
||||||
|
): Promise<string[]> {
|
||||||
|
const fileData = await ctx.storage.getFile(relativePath)
|
||||||
|
if (fileData) {
|
||||||
|
return fileData.lines
|
||||||
|
}
|
||||||
|
|
||||||
|
const content = await fs.readFile(absolutePath, "utf-8")
|
||||||
|
return content.split("\n")
|
||||||
|
}
|
||||||
|
}
|
||||||
227
packages/ipuaro/src/infrastructure/tools/edit/EditLinesTool.ts
Normal file
227
packages/ipuaro/src/infrastructure/tools/edit/EditLinesTool.ts
Normal file
@@ -0,0 +1,227 @@
|
|||||||
|
import { promises as fs } from "node:fs"
|
||||||
|
import type { ITool, ToolContext, ToolParameterSchema } from "../../../domain/services/ITool.js"
|
||||||
|
import { createFileData } from "../../../domain/value-objects/FileData.js"
|
||||||
|
import {
|
||||||
|
createErrorResult,
|
||||||
|
createSuccessResult,
|
||||||
|
type ToolResult,
|
||||||
|
} from "../../../domain/value-objects/ToolResult.js"
|
||||||
|
import { hashLines } from "../../../shared/utils/hash.js"
|
||||||
|
import { PathValidator } from "../../security/PathValidator.js"
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Result data from edit_lines tool.
|
||||||
|
*/
|
||||||
|
export interface EditLinesResult {
|
||||||
|
path: string
|
||||||
|
startLine: number
|
||||||
|
endLine: number
|
||||||
|
linesReplaced: number
|
||||||
|
linesInserted: number
|
||||||
|
totalLines: number
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Tool for editing specific lines in a file.
|
||||||
|
* Replaces lines from start to end with new content.
|
||||||
|
* Requires user confirmation before applying changes.
|
||||||
|
*/
|
||||||
|
export class EditLinesTool implements ITool {
|
||||||
|
readonly name = "edit_lines"
|
||||||
|
readonly description =
|
||||||
|
"Replace lines in a file. Replaces lines from start to end (inclusive) with new content. " +
|
||||||
|
"Requires confirmation before applying changes."
|
||||||
|
readonly parameters: ToolParameterSchema[] = [
|
||||||
|
{
|
||||||
|
name: "path",
|
||||||
|
type: "string",
|
||||||
|
description: "File path relative to project root",
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "start",
|
||||||
|
type: "number",
|
||||||
|
description: "Start line number (1-based, inclusive)",
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "end",
|
||||||
|
type: "number",
|
||||||
|
description: "End line number (1-based, inclusive)",
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "content",
|
||||||
|
type: "string",
|
||||||
|
description: "New content to insert (can be multi-line)",
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
]
|
||||||
|
readonly requiresConfirmation = true
|
||||||
|
readonly category = "edit" as const
|
||||||
|
|
||||||
|
validateParams(params: Record<string, unknown>): string | null {
|
||||||
|
if (typeof params.path !== "string" || params.path.trim() === "") {
|
||||||
|
return "Parameter 'path' is required and must be a non-empty string"
|
||||||
|
}
|
||||||
|
|
||||||
|
if (typeof params.start !== "number" || !Number.isInteger(params.start)) {
|
||||||
|
return "Parameter 'start' is required and must be an integer"
|
||||||
|
}
|
||||||
|
if (params.start < 1) {
|
||||||
|
return "Parameter 'start' must be >= 1"
|
||||||
|
}
|
||||||
|
|
||||||
|
if (typeof params.end !== "number" || !Number.isInteger(params.end)) {
|
||||||
|
return "Parameter 'end' is required and must be an integer"
|
||||||
|
}
|
||||||
|
if (params.end < 1) {
|
||||||
|
return "Parameter 'end' must be >= 1"
|
||||||
|
}
|
||||||
|
|
||||||
|
if (params.start > params.end) {
|
||||||
|
return "Parameter 'start' must be <= 'end'"
|
||||||
|
}
|
||||||
|
|
||||||
|
if (typeof params.content !== "string") {
|
||||||
|
return "Parameter 'content' is required and must be a string"
|
||||||
|
}
|
||||||
|
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
async execute(params: Record<string, unknown>, ctx: ToolContext): Promise<ToolResult> {
|
||||||
|
const startTime = Date.now()
|
||||||
|
const callId = `${this.name}-${String(startTime)}`
|
||||||
|
|
||||||
|
const inputPath = params.path as string
|
||||||
|
const startLine = params.start as number
|
||||||
|
const endLine = params.end as number
|
||||||
|
const newContent = params.content as string
|
||||||
|
|
||||||
|
const pathValidator = new PathValidator(ctx.projectRoot)
|
||||||
|
|
||||||
|
let absolutePath: string
|
||||||
|
let relativePath: string
|
||||||
|
try {
|
||||||
|
;[absolutePath, relativePath] = pathValidator.resolveOrThrow(inputPath)
|
||||||
|
} catch (error) {
|
||||||
|
const message = error instanceof Error ? error.message : String(error)
|
||||||
|
return createErrorResult(callId, message, Date.now() - startTime)
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const currentLines = await this.getCurrentLines(absolutePath, relativePath, ctx)
|
||||||
|
const totalLines = currentLines.length
|
||||||
|
|
||||||
|
if (startLine > totalLines) {
|
||||||
|
return createErrorResult(
|
||||||
|
callId,
|
||||||
|
`Start line ${String(startLine)} exceeds file length (${String(totalLines)} lines)`,
|
||||||
|
Date.now() - startTime,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const adjustedEnd = Math.min(endLine, totalLines)
|
||||||
|
const conflictCheck = await this.checkHashConflict(relativePath, currentLines, ctx)
|
||||||
|
if (conflictCheck) {
|
||||||
|
return createErrorResult(callId, conflictCheck, Date.now() - startTime)
|
||||||
|
}
|
||||||
|
|
||||||
|
const oldLines = currentLines.slice(startLine - 1, adjustedEnd)
|
||||||
|
const newLines = newContent.split("\n")
|
||||||
|
|
||||||
|
const confirmed = await ctx.requestConfirmation(
|
||||||
|
`Replace lines ${String(startLine)}-${String(adjustedEnd)} in ${relativePath}`,
|
||||||
|
{
|
||||||
|
filePath: relativePath,
|
||||||
|
oldLines,
|
||||||
|
newLines,
|
||||||
|
startLine,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
if (!confirmed) {
|
||||||
|
return createErrorResult(callId, "Edit cancelled by user", Date.now() - startTime)
|
||||||
|
}
|
||||||
|
|
||||||
|
const updatedLines = [
|
||||||
|
...currentLines.slice(0, startLine - 1),
|
||||||
|
...newLines,
|
||||||
|
...currentLines.slice(adjustedEnd),
|
||||||
|
]
|
||||||
|
|
||||||
|
await this.applyChanges(absolutePath, relativePath, updatedLines, ctx)
|
||||||
|
|
||||||
|
const result: EditLinesResult = {
|
||||||
|
path: relativePath,
|
||||||
|
startLine,
|
||||||
|
endLine: adjustedEnd,
|
||||||
|
linesReplaced: adjustedEnd - startLine + 1,
|
||||||
|
linesInserted: newLines.length,
|
||||||
|
totalLines: updatedLines.length,
|
||||||
|
}
|
||||||
|
|
||||||
|
return createSuccessResult(callId, result, Date.now() - startTime)
|
||||||
|
} catch (error) {
|
||||||
|
const message = error instanceof Error ? error.message : String(error)
|
||||||
|
return createErrorResult(callId, message, Date.now() - startTime)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get current file lines from storage or filesystem.
|
||||||
|
*/
|
||||||
|
private async getCurrentLines(
|
||||||
|
absolutePath: string,
|
||||||
|
relativePath: string,
|
||||||
|
ctx: ToolContext,
|
||||||
|
): Promise<string[]> {
|
||||||
|
const fileData = await ctx.storage.getFile(relativePath)
|
||||||
|
if (fileData) {
|
||||||
|
return fileData.lines
|
||||||
|
}
|
||||||
|
|
||||||
|
const content = await fs.readFile(absolutePath, "utf-8")
|
||||||
|
return content.split("\n")
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if file has changed since it was indexed.
|
||||||
|
* Returns error message if conflict detected, null otherwise.
|
||||||
|
*/
|
||||||
|
private async checkHashConflict(
|
||||||
|
relativePath: string,
|
||||||
|
currentLines: string[],
|
||||||
|
ctx: ToolContext,
|
||||||
|
): Promise<string | null> {
|
||||||
|
const storedFile = await ctx.storage.getFile(relativePath)
|
||||||
|
if (!storedFile) {
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
const currentHash = hashLines(currentLines)
|
||||||
|
if (storedFile.hash !== currentHash) {
|
||||||
|
return "File has been modified externally. Please refresh the file before editing."
|
||||||
|
}
|
||||||
|
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Apply changes to filesystem and storage.
|
||||||
|
*/
|
||||||
|
private async applyChanges(
|
||||||
|
absolutePath: string,
|
||||||
|
relativePath: string,
|
||||||
|
lines: string[],
|
||||||
|
ctx: ToolContext,
|
||||||
|
): Promise<void> {
|
||||||
|
const content = lines.join("\n")
|
||||||
|
await fs.writeFile(absolutePath, content, "utf-8")
|
||||||
|
|
||||||
|
const stats = await fs.stat(absolutePath)
|
||||||
|
const fileData = createFileData(lines, hashLines(lines), stats.size, stats.mtimeMs)
|
||||||
|
await ctx.storage.setFile(relativePath, fileData)
|
||||||
|
}
|
||||||
|
}
|
||||||
4
packages/ipuaro/src/infrastructure/tools/edit/index.ts
Normal file
4
packages/ipuaro/src/infrastructure/tools/edit/index.ts
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
// Edit tools exports
|
||||||
|
export { EditLinesTool, type EditLinesResult } from "./EditLinesTool.js"
|
||||||
|
export { CreateFileTool, type CreateFileResult } from "./CreateFileTool.js"
|
||||||
|
export { DeleteFileTool, type DeleteFileResult } from "./DeleteFileTool.js"
|
||||||
155
packages/ipuaro/src/infrastructure/tools/git/GitCommitTool.ts
Normal file
155
packages/ipuaro/src/infrastructure/tools/git/GitCommitTool.ts
Normal file
@@ -0,0 +1,155 @@
|
|||||||
|
import { type CommitResult, type SimpleGit, simpleGit } from "simple-git"
|
||||||
|
import type { ITool, ToolContext, ToolParameterSchema } from "../../../domain/services/ITool.js"
|
||||||
|
import {
|
||||||
|
createErrorResult,
|
||||||
|
createSuccessResult,
|
||||||
|
type ToolResult,
|
||||||
|
} from "../../../domain/value-objects/ToolResult.js"
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Author information.
|
||||||
|
*/
|
||||||
|
export interface CommitAuthor {
|
||||||
|
name: string
|
||||||
|
email: string
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Result data from git_commit tool.
|
||||||
|
*/
|
||||||
|
export interface GitCommitResult {
|
||||||
|
/** Commit hash */
|
||||||
|
hash: string
|
||||||
|
/** Current branch */
|
||||||
|
branch: string
|
||||||
|
/** Commit message */
|
||||||
|
message: string
|
||||||
|
/** Number of files changed */
|
||||||
|
filesChanged: number
|
||||||
|
/** Number of insertions */
|
||||||
|
insertions: number
|
||||||
|
/** Number of deletions */
|
||||||
|
deletions: number
|
||||||
|
/** Author information */
|
||||||
|
author: CommitAuthor | null
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Tool for creating git commits.
|
||||||
|
* Requires confirmation before execution.
|
||||||
|
*/
|
||||||
|
export class GitCommitTool implements ITool {
|
||||||
|
readonly name = "git_commit"
|
||||||
|
readonly description =
|
||||||
|
"Create a git commit with the specified message. " +
|
||||||
|
"Will ask for confirmation. Optionally stage specific files first."
|
||||||
|
readonly parameters: ToolParameterSchema[] = [
|
||||||
|
{
|
||||||
|
name: "message",
|
||||||
|
type: "string",
|
||||||
|
description: "Commit message",
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "files",
|
||||||
|
type: "array",
|
||||||
|
description: "Files to stage before commit (optional, defaults to all staged)",
|
||||||
|
required: false,
|
||||||
|
},
|
||||||
|
]
|
||||||
|
readonly requiresConfirmation = true
|
||||||
|
readonly category = "git" as const
|
||||||
|
|
||||||
|
private readonly gitFactory: (basePath: string) => SimpleGit
|
||||||
|
|
||||||
|
constructor(gitFactory?: (basePath: string) => SimpleGit) {
|
||||||
|
this.gitFactory = gitFactory ?? ((basePath: string) => simpleGit(basePath))
|
||||||
|
}
|
||||||
|
|
||||||
|
validateParams(params: Record<string, unknown>): string | null {
|
||||||
|
if (params.message === undefined) {
|
||||||
|
return "Parameter 'message' is required"
|
||||||
|
}
|
||||||
|
if (typeof params.message !== "string") {
|
||||||
|
return "Parameter 'message' must be a string"
|
||||||
|
}
|
||||||
|
if (params.message.trim() === "") {
|
||||||
|
return "Parameter 'message' cannot be empty"
|
||||||
|
}
|
||||||
|
if (params.files !== undefined) {
|
||||||
|
if (!Array.isArray(params.files)) {
|
||||||
|
return "Parameter 'files' must be an array"
|
||||||
|
}
|
||||||
|
for (const file of params.files) {
|
||||||
|
if (typeof file !== "string") {
|
||||||
|
return "Parameter 'files' must be an array of strings"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
async execute(params: Record<string, unknown>, ctx: ToolContext): Promise<ToolResult> {
|
||||||
|
const startTime = Date.now()
|
||||||
|
const callId = `${this.name}-${String(startTime)}`
|
||||||
|
|
||||||
|
const message = params.message as string
|
||||||
|
const files = params.files as string[] | undefined
|
||||||
|
|
||||||
|
try {
|
||||||
|
const git = this.gitFactory(ctx.projectRoot)
|
||||||
|
|
||||||
|
const isRepo = await git.checkIsRepo()
|
||||||
|
if (!isRepo) {
|
||||||
|
return createErrorResult(
|
||||||
|
callId,
|
||||||
|
"Not a git repository. Initialize with 'git init' first.",
|
||||||
|
Date.now() - startTime,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (files && files.length > 0) {
|
||||||
|
await git.add(files)
|
||||||
|
}
|
||||||
|
|
||||||
|
const status = await git.status()
|
||||||
|
if (status.staged.length === 0 && (!files || files.length === 0)) {
|
||||||
|
return createErrorResult(
|
||||||
|
callId,
|
||||||
|
"Nothing to commit. Stage files first with 'git add' or provide 'files' parameter.",
|
||||||
|
Date.now() - startTime,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const commitSummary = `Committing ${String(status.staged.length)} file(s): ${message}`
|
||||||
|
const confirmed = await ctx.requestConfirmation(commitSummary)
|
||||||
|
|
||||||
|
if (!confirmed) {
|
||||||
|
return createErrorResult(callId, "Commit cancelled by user", Date.now() - startTime)
|
||||||
|
}
|
||||||
|
|
||||||
|
const commitResult = await git.commit(message)
|
||||||
|
const result = this.formatCommitResult(commitResult, message)
|
||||||
|
|
||||||
|
return createSuccessResult(callId, result, Date.now() - startTime)
|
||||||
|
} catch (error) {
|
||||||
|
const message_ = error instanceof Error ? error.message : String(error)
|
||||||
|
return createErrorResult(callId, message_, Date.now() - startTime)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Format simple-git CommitResult into our result structure.
|
||||||
|
*/
|
||||||
|
private formatCommitResult(commit: CommitResult, message: string): GitCommitResult {
|
||||||
|
return {
|
||||||
|
hash: commit.commit,
|
||||||
|
branch: commit.branch,
|
||||||
|
message,
|
||||||
|
filesChanged: commit.summary.changes,
|
||||||
|
insertions: commit.summary.insertions,
|
||||||
|
deletions: commit.summary.deletions,
|
||||||
|
author: commit.author ?? null,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
155
packages/ipuaro/src/infrastructure/tools/git/GitDiffTool.ts
Normal file
155
packages/ipuaro/src/infrastructure/tools/git/GitDiffTool.ts
Normal file
@@ -0,0 +1,155 @@
|
|||||||
|
import { simpleGit, type SimpleGit } from "simple-git"
|
||||||
|
import type { ITool, ToolContext, ToolParameterSchema } from "../../../domain/services/ITool.js"
|
||||||
|
import {
|
||||||
|
createErrorResult,
|
||||||
|
createSuccessResult,
|
||||||
|
type ToolResult,
|
||||||
|
} from "../../../domain/value-objects/ToolResult.js"
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A single file diff entry.
|
||||||
|
*/
|
||||||
|
export interface DiffEntry {
|
||||||
|
/** File path */
|
||||||
|
file: string
|
||||||
|
/** Number of insertions */
|
||||||
|
insertions: number
|
||||||
|
/** Number of deletions */
|
||||||
|
deletions: number
|
||||||
|
/** Whether the file is binary */
|
||||||
|
binary: boolean
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Result data from git_diff tool.
|
||||||
|
*/
|
||||||
|
export interface GitDiffResult {
|
||||||
|
/** Whether showing staged or all changes */
|
||||||
|
staged: boolean
|
||||||
|
/** Path filter applied (null if all files) */
|
||||||
|
pathFilter: string | null
|
||||||
|
/** Whether there are any changes */
|
||||||
|
hasChanges: boolean
|
||||||
|
/** Summary of changes */
|
||||||
|
summary: {
|
||||||
|
/** Number of files changed */
|
||||||
|
filesChanged: number
|
||||||
|
/** Total insertions */
|
||||||
|
insertions: number
|
||||||
|
/** Total deletions */
|
||||||
|
deletions: number
|
||||||
|
}
|
||||||
|
/** List of changed files */
|
||||||
|
files: DiffEntry[]
|
||||||
|
/** Full diff text */
|
||||||
|
diff: string
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Tool for getting uncommitted git changes (diff).
|
||||||
|
* Shows what has changed but not yet committed.
|
||||||
|
*/
|
||||||
|
export class GitDiffTool implements ITool {
|
||||||
|
readonly name = "git_diff"
|
||||||
|
readonly description =
|
||||||
|
"Get uncommitted changes (diff). " + "Shows what has changed but not yet committed."
|
||||||
|
readonly parameters: ToolParameterSchema[] = [
|
||||||
|
{
|
||||||
|
name: "path",
|
||||||
|
type: "string",
|
||||||
|
description: "Limit diff to specific file or directory",
|
||||||
|
required: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "staged",
|
||||||
|
type: "boolean",
|
||||||
|
description: "Show only staged changes (default: false, shows all)",
|
||||||
|
required: false,
|
||||||
|
},
|
||||||
|
]
|
||||||
|
readonly requiresConfirmation = false
|
||||||
|
readonly category = "git" as const
|
||||||
|
|
||||||
|
private readonly gitFactory: (basePath: string) => SimpleGit
|
||||||
|
|
||||||
|
constructor(gitFactory?: (basePath: string) => SimpleGit) {
|
||||||
|
this.gitFactory = gitFactory ?? ((basePath: string) => simpleGit(basePath))
|
||||||
|
}
|
||||||
|
|
||||||
|
validateParams(params: Record<string, unknown>): string | null {
|
||||||
|
if (params.path !== undefined && typeof params.path !== "string") {
|
||||||
|
return "Parameter 'path' must be a string"
|
||||||
|
}
|
||||||
|
if (params.staged !== undefined && typeof params.staged !== "boolean") {
|
||||||
|
return "Parameter 'staged' must be a boolean"
|
||||||
|
}
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
async execute(params: Record<string, unknown>, ctx: ToolContext): Promise<ToolResult> {
|
||||||
|
const startTime = Date.now()
|
||||||
|
const callId = `${this.name}-${String(startTime)}`
|
||||||
|
|
||||||
|
const pathFilter = (params.path as string) ?? null
|
||||||
|
const staged = (params.staged as boolean) ?? false
|
||||||
|
|
||||||
|
try {
|
||||||
|
const git = this.gitFactory(ctx.projectRoot)
|
||||||
|
|
||||||
|
const isRepo = await git.checkIsRepo()
|
||||||
|
if (!isRepo) {
|
||||||
|
return createErrorResult(
|
||||||
|
callId,
|
||||||
|
"Not a git repository. Initialize with 'git init' first.",
|
||||||
|
Date.now() - startTime,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const diffArgs = this.buildDiffArgs(staged, pathFilter)
|
||||||
|
const diffSummary = await git.diffSummary(diffArgs)
|
||||||
|
const diffText = await git.diff(diffArgs)
|
||||||
|
|
||||||
|
const files: DiffEntry[] = diffSummary.files.map((f) => ({
|
||||||
|
file: f.file,
|
||||||
|
insertions: "insertions" in f ? f.insertions : 0,
|
||||||
|
deletions: "deletions" in f ? f.deletions : 0,
|
||||||
|
binary: f.binary,
|
||||||
|
}))
|
||||||
|
|
||||||
|
const result: GitDiffResult = {
|
||||||
|
staged,
|
||||||
|
pathFilter,
|
||||||
|
hasChanges: diffSummary.files.length > 0,
|
||||||
|
summary: {
|
||||||
|
filesChanged: diffSummary.files.length,
|
||||||
|
insertions: diffSummary.insertions,
|
||||||
|
deletions: diffSummary.deletions,
|
||||||
|
},
|
||||||
|
files,
|
||||||
|
diff: diffText,
|
||||||
|
}
|
||||||
|
|
||||||
|
return createSuccessResult(callId, result, Date.now() - startTime)
|
||||||
|
} catch (error) {
|
||||||
|
const message = error instanceof Error ? error.message : String(error)
|
||||||
|
return createErrorResult(callId, message, Date.now() - startTime)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Build diff arguments array.
|
||||||
|
*/
|
||||||
|
private buildDiffArgs(staged: boolean, pathFilter: string | null): string[] {
|
||||||
|
const args: string[] = []
|
||||||
|
|
||||||
|
if (staged) {
|
||||||
|
args.push("--cached")
|
||||||
|
}
|
||||||
|
|
||||||
|
if (pathFilter) {
|
||||||
|
args.push("--", pathFilter)
|
||||||
|
}
|
||||||
|
|
||||||
|
return args
|
||||||
|
}
|
||||||
|
}
|
||||||
129
packages/ipuaro/src/infrastructure/tools/git/GitStatusTool.ts
Normal file
129
packages/ipuaro/src/infrastructure/tools/git/GitStatusTool.ts
Normal file
@@ -0,0 +1,129 @@
|
|||||||
|
import { simpleGit, type SimpleGit, type StatusResult } from "simple-git"
|
||||||
|
import type { ITool, ToolContext, ToolParameterSchema } from "../../../domain/services/ITool.js"
|
||||||
|
import {
|
||||||
|
createErrorResult,
|
||||||
|
createSuccessResult,
|
||||||
|
type ToolResult,
|
||||||
|
} from "../../../domain/value-objects/ToolResult.js"
|
||||||
|
|
||||||
|
/**
|
||||||
|
* File status entry in git status.
|
||||||
|
*/
|
||||||
|
export interface FileStatusEntry {
|
||||||
|
/** Relative file path */
|
||||||
|
path: string
|
||||||
|
/** Working directory status (modified, deleted, etc.) */
|
||||||
|
workingDir: string
|
||||||
|
/** Index/staging status */
|
||||||
|
index: string
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Result data from git_status tool.
|
||||||
|
*/
|
||||||
|
export interface GitStatusResult {
|
||||||
|
/** Current branch name */
|
||||||
|
branch: string
|
||||||
|
/** Tracking branch (e.g., origin/main) */
|
||||||
|
tracking: string | null
|
||||||
|
/** Number of commits ahead of tracking */
|
||||||
|
ahead: number
|
||||||
|
/** Number of commits behind tracking */
|
||||||
|
behind: number
|
||||||
|
/** Files staged for commit */
|
||||||
|
staged: FileStatusEntry[]
|
||||||
|
/** Modified files not staged */
|
||||||
|
modified: FileStatusEntry[]
|
||||||
|
/** Untracked files */
|
||||||
|
untracked: string[]
|
||||||
|
/** Files with merge conflicts */
|
||||||
|
conflicted: string[]
|
||||||
|
/** Whether working directory is clean */
|
||||||
|
isClean: boolean
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Tool for getting git repository status.
|
||||||
|
* Returns branch info, staged/modified/untracked files.
|
||||||
|
*/
|
||||||
|
export class GitStatusTool implements ITool {
|
||||||
|
readonly name = "git_status"
|
||||||
|
readonly description =
|
||||||
|
"Get current git repository status. " +
|
||||||
|
"Returns branch name, staged files, modified files, and untracked files."
|
||||||
|
readonly parameters: ToolParameterSchema[] = []
|
||||||
|
readonly requiresConfirmation = false
|
||||||
|
readonly category = "git" as const
|
||||||
|
|
||||||
|
private readonly gitFactory: (basePath: string) => SimpleGit
|
||||||
|
|
||||||
|
constructor(gitFactory?: (basePath: string) => SimpleGit) {
|
||||||
|
this.gitFactory = gitFactory ?? ((basePath: string) => simpleGit(basePath))
|
||||||
|
}
|
||||||
|
|
||||||
|
validateParams(_params: Record<string, unknown>): string | null {
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
async execute(_params: Record<string, unknown>, ctx: ToolContext): Promise<ToolResult> {
|
||||||
|
const startTime = Date.now()
|
||||||
|
const callId = `${this.name}-${String(startTime)}`
|
||||||
|
|
||||||
|
try {
|
||||||
|
const git = this.gitFactory(ctx.projectRoot)
|
||||||
|
|
||||||
|
const isRepo = await git.checkIsRepo()
|
||||||
|
if (!isRepo) {
|
||||||
|
return createErrorResult(
|
||||||
|
callId,
|
||||||
|
"Not a git repository. Initialize with 'git init' first.",
|
||||||
|
Date.now() - startTime,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const status = await git.status()
|
||||||
|
const result = this.formatStatus(status)
|
||||||
|
|
||||||
|
return createSuccessResult(callId, result, Date.now() - startTime)
|
||||||
|
} catch (error) {
|
||||||
|
const message = error instanceof Error ? error.message : String(error)
|
||||||
|
return createErrorResult(callId, message, Date.now() - startTime)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Format simple-git StatusResult into our result structure.
|
||||||
|
*/
|
||||||
|
private formatStatus(status: StatusResult): GitStatusResult {
|
||||||
|
const staged: FileStatusEntry[] = []
|
||||||
|
const modified: FileStatusEntry[] = []
|
||||||
|
|
||||||
|
for (const file of status.files) {
|
||||||
|
const entry: FileStatusEntry = {
|
||||||
|
path: file.path,
|
||||||
|
workingDir: file.working_dir,
|
||||||
|
index: file.index,
|
||||||
|
}
|
||||||
|
|
||||||
|
if (file.index !== " " && file.index !== "?") {
|
||||||
|
staged.push(entry)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (file.working_dir !== " " && file.working_dir !== "?") {
|
||||||
|
modified.push(entry)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
branch: status.current ?? "HEAD (detached)",
|
||||||
|
tracking: status.tracking ?? null,
|
||||||
|
ahead: status.ahead,
|
||||||
|
behind: status.behind,
|
||||||
|
staged,
|
||||||
|
modified,
|
||||||
|
untracked: status.not_added,
|
||||||
|
conflicted: status.conflicted,
|
||||||
|
isClean: status.isClean(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
6
packages/ipuaro/src/infrastructure/tools/git/index.ts
Normal file
6
packages/ipuaro/src/infrastructure/tools/git/index.ts
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
// Git tools exports
|
||||||
|
export { GitStatusTool, type GitStatusResult, type FileStatusEntry } from "./GitStatusTool.js"
|
||||||
|
|
||||||
|
export { GitDiffTool, type GitDiffResult, type DiffEntry } from "./GitDiffTool.js"
|
||||||
|
|
||||||
|
export { GitCommitTool, type GitCommitResult, type CommitAuthor } from "./GitCommitTool.js"
|
||||||
75
packages/ipuaro/src/infrastructure/tools/index.ts
Normal file
75
packages/ipuaro/src/infrastructure/tools/index.ts
Normal file
@@ -0,0 +1,75 @@
|
|||||||
|
// Tools module exports
|
||||||
|
export { ToolRegistry } from "./registry.js"
|
||||||
|
|
||||||
|
// Read tools
|
||||||
|
export { GetLinesTool, type GetLinesResult } from "./read/GetLinesTool.js"
|
||||||
|
export { GetFunctionTool, type GetFunctionResult } from "./read/GetFunctionTool.js"
|
||||||
|
export { GetClassTool, type GetClassResult } from "./read/GetClassTool.js"
|
||||||
|
export {
|
||||||
|
GetStructureTool,
|
||||||
|
type GetStructureResult,
|
||||||
|
type TreeNode,
|
||||||
|
} from "./read/GetStructureTool.js"
|
||||||
|
|
||||||
|
// Edit tools
|
||||||
|
export { EditLinesTool, type EditLinesResult } from "./edit/EditLinesTool.js"
|
||||||
|
export { CreateFileTool, type CreateFileResult } from "./edit/CreateFileTool.js"
|
||||||
|
export { DeleteFileTool, type DeleteFileResult } from "./edit/DeleteFileTool.js"
|
||||||
|
|
||||||
|
// Search tools
|
||||||
|
export {
|
||||||
|
FindReferencesTool,
|
||||||
|
type FindReferencesResult,
|
||||||
|
type SymbolReference,
|
||||||
|
} from "./search/FindReferencesTool.js"
|
||||||
|
export {
|
||||||
|
FindDefinitionTool,
|
||||||
|
type FindDefinitionResult,
|
||||||
|
type DefinitionLocation,
|
||||||
|
} from "./search/FindDefinitionTool.js"
|
||||||
|
|
||||||
|
// Analysis tools
|
||||||
|
export {
|
||||||
|
GetDependenciesTool,
|
||||||
|
type GetDependenciesResult,
|
||||||
|
type DependencyEntry,
|
||||||
|
} from "./analysis/GetDependenciesTool.js"
|
||||||
|
|
||||||
|
export {
|
||||||
|
GetDependentsTool,
|
||||||
|
type GetDependentsResult,
|
||||||
|
type DependentEntry,
|
||||||
|
} from "./analysis/GetDependentsTool.js"
|
||||||
|
|
||||||
|
export {
|
||||||
|
GetComplexityTool,
|
||||||
|
type GetComplexityResult,
|
||||||
|
type ComplexityEntry,
|
||||||
|
} from "./analysis/GetComplexityTool.js"
|
||||||
|
|
||||||
|
export {
|
||||||
|
GetTodosTool,
|
||||||
|
type GetTodosResult,
|
||||||
|
type TodoEntry,
|
||||||
|
type TodoType,
|
||||||
|
} from "./analysis/GetTodosTool.js"
|
||||||
|
|
||||||
|
// Git tools
|
||||||
|
export { GitStatusTool, type GitStatusResult, type FileStatusEntry } from "./git/GitStatusTool.js"
|
||||||
|
|
||||||
|
export { GitDiffTool, type GitDiffResult, type DiffEntry } from "./git/GitDiffTool.js"
|
||||||
|
|
||||||
|
export { GitCommitTool, type GitCommitResult, type CommitAuthor } from "./git/GitCommitTool.js"
|
||||||
|
|
||||||
|
// Run tools
|
||||||
|
export {
|
||||||
|
CommandSecurity,
|
||||||
|
DEFAULT_BLACKLIST,
|
||||||
|
DEFAULT_WHITELIST,
|
||||||
|
type CommandClassification,
|
||||||
|
type SecurityCheckResult,
|
||||||
|
} from "./run/CommandSecurity.js"
|
||||||
|
|
||||||
|
export { RunCommandTool, type RunCommandResult } from "./run/RunCommandTool.js"
|
||||||
|
|
||||||
|
export { RunTestsTool, type RunTestsResult, type TestRunner } from "./run/RunTestsTool.js"
|
||||||
166
packages/ipuaro/src/infrastructure/tools/read/GetClassTool.ts
Normal file
166
packages/ipuaro/src/infrastructure/tools/read/GetClassTool.ts
Normal file
@@ -0,0 +1,166 @@
|
|||||||
|
import { promises as fs } from "node:fs"
|
||||||
|
import type { ITool, ToolContext, ToolParameterSchema } from "../../../domain/services/ITool.js"
|
||||||
|
import type { ClassInfo } from "../../../domain/value-objects/FileAST.js"
|
||||||
|
import {
|
||||||
|
createErrorResult,
|
||||||
|
createSuccessResult,
|
||||||
|
type ToolResult,
|
||||||
|
} from "../../../domain/value-objects/ToolResult.js"
|
||||||
|
import { PathValidator } from "../../security/PathValidator.js"
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Result data from get_class tool.
|
||||||
|
*/
|
||||||
|
export interface GetClassResult {
|
||||||
|
path: string
|
||||||
|
name: string
|
||||||
|
startLine: number
|
||||||
|
endLine: number
|
||||||
|
isExported: boolean
|
||||||
|
isAbstract: boolean
|
||||||
|
extends?: string
|
||||||
|
implements: string[]
|
||||||
|
methods: string[]
|
||||||
|
properties: string[]
|
||||||
|
content: string
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Tool for retrieving a class's source code by name.
|
||||||
|
* Uses AST to find exact line range.
|
||||||
|
*/
|
||||||
|
export class GetClassTool implements ITool {
|
||||||
|
readonly name = "get_class"
|
||||||
|
readonly description =
|
||||||
|
"Get a class's source code by name. Uses AST to find exact line range. " +
|
||||||
|
"Returns the class code with line numbers."
|
||||||
|
readonly parameters: ToolParameterSchema[] = [
|
||||||
|
{
|
||||||
|
name: "path",
|
||||||
|
type: "string",
|
||||||
|
description: "File path relative to project root",
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "name",
|
||||||
|
type: "string",
|
||||||
|
description: "Class name to retrieve",
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
]
|
||||||
|
readonly requiresConfirmation = false
|
||||||
|
readonly category = "read" as const
|
||||||
|
|
||||||
|
validateParams(params: Record<string, unknown>): string | null {
|
||||||
|
if (typeof params.path !== "string" || params.path.trim() === "") {
|
||||||
|
return "Parameter 'path' is required and must be a non-empty string"
|
||||||
|
}
|
||||||
|
|
||||||
|
if (typeof params.name !== "string" || params.name.trim() === "") {
|
||||||
|
return "Parameter 'name' is required and must be a non-empty string"
|
||||||
|
}
|
||||||
|
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
async execute(params: Record<string, unknown>, ctx: ToolContext): Promise<ToolResult> {
|
||||||
|
const startTime = Date.now()
|
||||||
|
const callId = `${this.name}-${String(startTime)}`
|
||||||
|
|
||||||
|
const inputPath = params.path as string
|
||||||
|
const className = params.name as string
|
||||||
|
const pathValidator = new PathValidator(ctx.projectRoot)
|
||||||
|
|
||||||
|
let absolutePath: string
|
||||||
|
let relativePath: string
|
||||||
|
try {
|
||||||
|
;[absolutePath, relativePath] = pathValidator.resolveOrThrow(inputPath)
|
||||||
|
} catch (error) {
|
||||||
|
const message = error instanceof Error ? error.message : String(error)
|
||||||
|
return createErrorResult(callId, message, Date.now() - startTime)
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const ast = await ctx.storage.getAST(relativePath)
|
||||||
|
if (!ast) {
|
||||||
|
return createErrorResult(
|
||||||
|
callId,
|
||||||
|
`AST not found for "${relativePath}". File may not be indexed.`,
|
||||||
|
Date.now() - startTime,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const classInfo = this.findClass(ast.classes, className)
|
||||||
|
if (!classInfo) {
|
||||||
|
const available = ast.classes.map((c) => c.name).join(", ") || "none"
|
||||||
|
return createErrorResult(
|
||||||
|
callId,
|
||||||
|
`Class "${className}" not found in "${relativePath}". Available: ${available}`,
|
||||||
|
Date.now() - startTime,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const lines = await this.getFileLines(absolutePath, relativePath, ctx)
|
||||||
|
const classLines = lines.slice(classInfo.lineStart - 1, classInfo.lineEnd)
|
||||||
|
const content = this.formatLinesWithNumbers(classLines, classInfo.lineStart)
|
||||||
|
|
||||||
|
const result: GetClassResult = {
|
||||||
|
path: relativePath,
|
||||||
|
name: classInfo.name,
|
||||||
|
startLine: classInfo.lineStart,
|
||||||
|
endLine: classInfo.lineEnd,
|
||||||
|
isExported: classInfo.isExported,
|
||||||
|
isAbstract: classInfo.isAbstract,
|
||||||
|
extends: classInfo.extends,
|
||||||
|
implements: classInfo.implements,
|
||||||
|
methods: classInfo.methods.map((m) => m.name),
|
||||||
|
properties: classInfo.properties.map((p) => p.name),
|
||||||
|
content,
|
||||||
|
}
|
||||||
|
|
||||||
|
return createSuccessResult(callId, result, Date.now() - startTime)
|
||||||
|
} catch (error) {
|
||||||
|
const message = error instanceof Error ? error.message : String(error)
|
||||||
|
return createErrorResult(callId, message, Date.now() - startTime)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Find class by name in AST.
|
||||||
|
*/
|
||||||
|
private findClass(classes: ClassInfo[], name: string): ClassInfo | undefined {
|
||||||
|
return classes.find((c) => c.name === name)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get file lines from storage or filesystem.
|
||||||
|
*/
|
||||||
|
private async getFileLines(
|
||||||
|
absolutePath: string,
|
||||||
|
relativePath: string,
|
||||||
|
ctx: ToolContext,
|
||||||
|
): Promise<string[]> {
|
||||||
|
const fileData = await ctx.storage.getFile(relativePath)
|
||||||
|
if (fileData) {
|
||||||
|
return fileData.lines
|
||||||
|
}
|
||||||
|
|
||||||
|
const content = await fs.readFile(absolutePath, "utf-8")
|
||||||
|
return content.split("\n")
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Format lines with line numbers.
|
||||||
|
*/
|
||||||
|
private formatLinesWithNumbers(lines: string[], startLine: number): string {
|
||||||
|
const maxLineNum = startLine + lines.length - 1
|
||||||
|
const padWidth = String(maxLineNum).length
|
||||||
|
|
||||||
|
return lines
|
||||||
|
.map((line, index) => {
|
||||||
|
const lineNum = String(startLine + index).padStart(padWidth, " ")
|
||||||
|
return `${lineNum}│${line}`
|
||||||
|
})
|
||||||
|
.join("\n")
|
||||||
|
}
|
||||||
|
}
|
||||||
162
packages/ipuaro/src/infrastructure/tools/read/GetFunctionTool.ts
Normal file
162
packages/ipuaro/src/infrastructure/tools/read/GetFunctionTool.ts
Normal file
@@ -0,0 +1,162 @@
|
|||||||
|
import { promises as fs } from "node:fs"
|
||||||
|
import type { ITool, ToolContext, ToolParameterSchema } from "../../../domain/services/ITool.js"
|
||||||
|
import type { FunctionInfo } from "../../../domain/value-objects/FileAST.js"
|
||||||
|
import {
|
||||||
|
createErrorResult,
|
||||||
|
createSuccessResult,
|
||||||
|
type ToolResult,
|
||||||
|
} from "../../../domain/value-objects/ToolResult.js"
|
||||||
|
import { PathValidator } from "../../security/PathValidator.js"
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Result data from get_function tool.
|
||||||
|
*/
|
||||||
|
export interface GetFunctionResult {
|
||||||
|
path: string
|
||||||
|
name: string
|
||||||
|
startLine: number
|
||||||
|
endLine: number
|
||||||
|
isAsync: boolean
|
||||||
|
isExported: boolean
|
||||||
|
params: string[]
|
||||||
|
returnType?: string
|
||||||
|
content: string
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Tool for retrieving a function's source code by name.
|
||||||
|
* Uses AST to find exact line range.
|
||||||
|
*/
|
||||||
|
export class GetFunctionTool implements ITool {
|
||||||
|
readonly name = "get_function"
|
||||||
|
readonly description =
|
||||||
|
"Get a function's source code by name. Uses AST to find exact line range. " +
|
||||||
|
"Returns the function code with line numbers."
|
||||||
|
readonly parameters: ToolParameterSchema[] = [
|
||||||
|
{
|
||||||
|
name: "path",
|
||||||
|
type: "string",
|
||||||
|
description: "File path relative to project root",
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "name",
|
||||||
|
type: "string",
|
||||||
|
description: "Function name to retrieve",
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
]
|
||||||
|
readonly requiresConfirmation = false
|
||||||
|
readonly category = "read" as const
|
||||||
|
|
||||||
|
validateParams(params: Record<string, unknown>): string | null {
|
||||||
|
if (typeof params.path !== "string" || params.path.trim() === "") {
|
||||||
|
return "Parameter 'path' is required and must be a non-empty string"
|
||||||
|
}
|
||||||
|
|
||||||
|
if (typeof params.name !== "string" || params.name.trim() === "") {
|
||||||
|
return "Parameter 'name' is required and must be a non-empty string"
|
||||||
|
}
|
||||||
|
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
async execute(params: Record<string, unknown>, ctx: ToolContext): Promise<ToolResult> {
|
||||||
|
const startTime = Date.now()
|
||||||
|
const callId = `${this.name}-${String(startTime)}`
|
||||||
|
|
||||||
|
const inputPath = params.path as string
|
||||||
|
const functionName = params.name as string
|
||||||
|
const pathValidator = new PathValidator(ctx.projectRoot)
|
||||||
|
|
||||||
|
let absolutePath: string
|
||||||
|
let relativePath: string
|
||||||
|
try {
|
||||||
|
;[absolutePath, relativePath] = pathValidator.resolveOrThrow(inputPath)
|
||||||
|
} catch (error) {
|
||||||
|
const message = error instanceof Error ? error.message : String(error)
|
||||||
|
return createErrorResult(callId, message, Date.now() - startTime)
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const ast = await ctx.storage.getAST(relativePath)
|
||||||
|
if (!ast) {
|
||||||
|
return createErrorResult(
|
||||||
|
callId,
|
||||||
|
`AST not found for "${relativePath}". File may not be indexed.`,
|
||||||
|
Date.now() - startTime,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const functionInfo = this.findFunction(ast.functions, functionName)
|
||||||
|
if (!functionInfo) {
|
||||||
|
const available = ast.functions.map((f) => f.name).join(", ") || "none"
|
||||||
|
return createErrorResult(
|
||||||
|
callId,
|
||||||
|
`Function "${functionName}" not found in "${relativePath}". Available: ${available}`,
|
||||||
|
Date.now() - startTime,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const lines = await this.getFileLines(absolutePath, relativePath, ctx)
|
||||||
|
const functionLines = lines.slice(functionInfo.lineStart - 1, functionInfo.lineEnd)
|
||||||
|
const content = this.formatLinesWithNumbers(functionLines, functionInfo.lineStart)
|
||||||
|
|
||||||
|
const result: GetFunctionResult = {
|
||||||
|
path: relativePath,
|
||||||
|
name: functionInfo.name,
|
||||||
|
startLine: functionInfo.lineStart,
|
||||||
|
endLine: functionInfo.lineEnd,
|
||||||
|
isAsync: functionInfo.isAsync,
|
||||||
|
isExported: functionInfo.isExported,
|
||||||
|
params: functionInfo.params.map((p) => p.name),
|
||||||
|
returnType: functionInfo.returnType,
|
||||||
|
content,
|
||||||
|
}
|
||||||
|
|
||||||
|
return createSuccessResult(callId, result, Date.now() - startTime)
|
||||||
|
} catch (error) {
|
||||||
|
const message = error instanceof Error ? error.message : String(error)
|
||||||
|
return createErrorResult(callId, message, Date.now() - startTime)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Find function by name in AST.
|
||||||
|
*/
|
||||||
|
private findFunction(functions: FunctionInfo[], name: string): FunctionInfo | undefined {
|
||||||
|
return functions.find((f) => f.name === name)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get file lines from storage or filesystem.
|
||||||
|
*/
|
||||||
|
private async getFileLines(
|
||||||
|
absolutePath: string,
|
||||||
|
relativePath: string,
|
||||||
|
ctx: ToolContext,
|
||||||
|
): Promise<string[]> {
|
||||||
|
const fileData = await ctx.storage.getFile(relativePath)
|
||||||
|
if (fileData) {
|
||||||
|
return fileData.lines
|
||||||
|
}
|
||||||
|
|
||||||
|
const content = await fs.readFile(absolutePath, "utf-8")
|
||||||
|
return content.split("\n")
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Format lines with line numbers.
|
||||||
|
*/
|
||||||
|
private formatLinesWithNumbers(lines: string[], startLine: number): string {
|
||||||
|
const maxLineNum = startLine + lines.length - 1
|
||||||
|
const padWidth = String(maxLineNum).length
|
||||||
|
|
||||||
|
return lines
|
||||||
|
.map((line, index) => {
|
||||||
|
const lineNum = String(startLine + index).padStart(padWidth, " ")
|
||||||
|
return `${lineNum}│${line}`
|
||||||
|
})
|
||||||
|
.join("\n")
|
||||||
|
}
|
||||||
|
}
|
||||||
159
packages/ipuaro/src/infrastructure/tools/read/GetLinesTool.ts
Normal file
159
packages/ipuaro/src/infrastructure/tools/read/GetLinesTool.ts
Normal file
@@ -0,0 +1,159 @@
|
|||||||
|
import { promises as fs } from "node:fs"
|
||||||
|
import type { ITool, ToolContext, ToolParameterSchema } from "../../../domain/services/ITool.js"
|
||||||
|
import {
|
||||||
|
createErrorResult,
|
||||||
|
createSuccessResult,
|
||||||
|
type ToolResult,
|
||||||
|
} from "../../../domain/value-objects/ToolResult.js"
|
||||||
|
import { PathValidator } from "../../security/PathValidator.js"
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Result data from get_lines tool.
|
||||||
|
*/
|
||||||
|
export interface GetLinesResult {
|
||||||
|
path: string
|
||||||
|
startLine: number
|
||||||
|
endLine: number
|
||||||
|
totalLines: number
|
||||||
|
content: string
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Tool for reading specific lines from a file.
|
||||||
|
* Returns content with line numbers.
|
||||||
|
*/
|
||||||
|
export class GetLinesTool implements ITool {
|
||||||
|
readonly name = "get_lines"
|
||||||
|
readonly description =
|
||||||
|
"Get specific lines from a file. Returns the content with line numbers. " +
|
||||||
|
"If no range is specified, returns the entire file."
|
||||||
|
readonly parameters: ToolParameterSchema[] = [
|
||||||
|
{
|
||||||
|
name: "path",
|
||||||
|
type: "string",
|
||||||
|
description: "File path relative to project root",
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "start",
|
||||||
|
type: "number",
|
||||||
|
description: "Start line number (1-based, inclusive)",
|
||||||
|
required: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "end",
|
||||||
|
type: "number",
|
||||||
|
description: "End line number (1-based, inclusive)",
|
||||||
|
required: false,
|
||||||
|
},
|
||||||
|
]
|
||||||
|
readonly requiresConfirmation = false
|
||||||
|
readonly category = "read" as const
|
||||||
|
|
||||||
|
validateParams(params: Record<string, unknown>): string | null {
|
||||||
|
if (typeof params.path !== "string" || params.path.trim() === "") {
|
||||||
|
return "Parameter 'path' is required and must be a non-empty string"
|
||||||
|
}
|
||||||
|
|
||||||
|
if (params.start !== undefined) {
|
||||||
|
if (typeof params.start !== "number" || !Number.isInteger(params.start)) {
|
||||||
|
return "Parameter 'start' must be an integer"
|
||||||
|
}
|
||||||
|
if (params.start < 1) {
|
||||||
|
return "Parameter 'start' must be >= 1"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (params.end !== undefined) {
|
||||||
|
if (typeof params.end !== "number" || !Number.isInteger(params.end)) {
|
||||||
|
return "Parameter 'end' must be an integer"
|
||||||
|
}
|
||||||
|
if (params.end < 1) {
|
||||||
|
return "Parameter 'end' must be >= 1"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (params.start !== undefined && params.end !== undefined && params.start > params.end) {
|
||||||
|
return "Parameter 'start' must be <= 'end'"
|
||||||
|
}
|
||||||
|
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
async execute(params: Record<string, unknown>, ctx: ToolContext): Promise<ToolResult> {
|
||||||
|
const startTime = Date.now()
|
||||||
|
const callId = `${this.name}-${String(startTime)}`
|
||||||
|
|
||||||
|
const inputPath = params.path as string
|
||||||
|
const pathValidator = new PathValidator(ctx.projectRoot)
|
||||||
|
|
||||||
|
let absolutePath: string
|
||||||
|
let relativePath: string
|
||||||
|
try {
|
||||||
|
;[absolutePath, relativePath] = pathValidator.resolveOrThrow(inputPath)
|
||||||
|
} catch (error) {
|
||||||
|
const message = error instanceof Error ? error.message : String(error)
|
||||||
|
return createErrorResult(callId, message, Date.now() - startTime)
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const lines = await this.getFileLines(absolutePath, relativePath, ctx)
|
||||||
|
const totalLines = lines.length
|
||||||
|
|
||||||
|
let startLine = (params.start as number | undefined) ?? 1
|
||||||
|
let endLine = (params.end as number | undefined) ?? totalLines
|
||||||
|
|
||||||
|
startLine = Math.max(1, Math.min(startLine, totalLines))
|
||||||
|
endLine = Math.max(startLine, Math.min(endLine, totalLines))
|
||||||
|
|
||||||
|
const selectedLines = lines.slice(startLine - 1, endLine)
|
||||||
|
const content = this.formatLinesWithNumbers(selectedLines, startLine)
|
||||||
|
|
||||||
|
const result: GetLinesResult = {
|
||||||
|
path: relativePath,
|
||||||
|
startLine,
|
||||||
|
endLine,
|
||||||
|
totalLines,
|
||||||
|
content,
|
||||||
|
}
|
||||||
|
|
||||||
|
return createSuccessResult(callId, result, Date.now() - startTime)
|
||||||
|
} catch (error) {
|
||||||
|
const message = error instanceof Error ? error.message : String(error)
|
||||||
|
return createErrorResult(callId, message, Date.now() - startTime)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get file lines from storage or filesystem.
|
||||||
|
*/
|
||||||
|
private async getFileLines(
|
||||||
|
absolutePath: string,
|
||||||
|
relativePath: string,
|
||||||
|
ctx: ToolContext,
|
||||||
|
): Promise<string[]> {
|
||||||
|
const fileData = await ctx.storage.getFile(relativePath)
|
||||||
|
if (fileData) {
|
||||||
|
return fileData.lines
|
||||||
|
}
|
||||||
|
|
||||||
|
const content = await fs.readFile(absolutePath, "utf-8")
|
||||||
|
return content.split("\n")
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Format lines with line numbers.
|
||||||
|
* Example: " 1│const x = 1"
|
||||||
|
*/
|
||||||
|
private formatLinesWithNumbers(lines: string[], startLine: number): string {
|
||||||
|
const maxLineNum = startLine + lines.length - 1
|
||||||
|
const padWidth = String(maxLineNum).length
|
||||||
|
|
||||||
|
return lines
|
||||||
|
.map((line, index) => {
|
||||||
|
const lineNum = String(startLine + index).padStart(padWidth, " ")
|
||||||
|
return `${lineNum}│${line}`
|
||||||
|
})
|
||||||
|
.join("\n")
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,207 @@
|
|||||||
|
import { promises as fs } from "node:fs"
|
||||||
|
import * as path from "node:path"
|
||||||
|
import type { ITool, ToolContext, ToolParameterSchema } from "../../../domain/services/ITool.js"
|
||||||
|
import {
|
||||||
|
createErrorResult,
|
||||||
|
createSuccessResult,
|
||||||
|
type ToolResult,
|
||||||
|
} from "../../../domain/value-objects/ToolResult.js"
|
||||||
|
import { DEFAULT_IGNORE_PATTERNS } from "../../../domain/constants/index.js"
|
||||||
|
import { PathValidator } from "../../security/PathValidator.js"
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Tree node representing a file or directory.
|
||||||
|
*/
|
||||||
|
export interface TreeNode {
|
||||||
|
name: string
|
||||||
|
type: "file" | "directory"
|
||||||
|
children?: TreeNode[]
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Result data from get_structure tool.
|
||||||
|
*/
|
||||||
|
export interface GetStructureResult {
|
||||||
|
path: string
|
||||||
|
tree: TreeNode
|
||||||
|
content: string
|
||||||
|
stats: {
|
||||||
|
directories: number
|
||||||
|
files: number
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Tool for getting project directory structure as a tree.
|
||||||
|
*/
|
||||||
|
export class GetStructureTool implements ITool {
|
||||||
|
readonly name = "get_structure"
|
||||||
|
readonly description =
|
||||||
|
"Get project directory structure as a tree. " +
|
||||||
|
"If path is specified, shows structure of that subdirectory only."
|
||||||
|
readonly parameters: ToolParameterSchema[] = [
|
||||||
|
{
|
||||||
|
name: "path",
|
||||||
|
type: "string",
|
||||||
|
description: "Subdirectory path relative to project root (optional, defaults to root)",
|
||||||
|
required: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "depth",
|
||||||
|
type: "number",
|
||||||
|
description: "Maximum depth to traverse (default: unlimited)",
|
||||||
|
required: false,
|
||||||
|
},
|
||||||
|
]
|
||||||
|
readonly requiresConfirmation = false
|
||||||
|
readonly category = "read" as const
|
||||||
|
|
||||||
|
private readonly defaultIgnorePatterns = new Set([
|
||||||
|
...DEFAULT_IGNORE_PATTERNS,
|
||||||
|
".git",
|
||||||
|
".idea",
|
||||||
|
".vscode",
|
||||||
|
"__pycache__",
|
||||||
|
".pytest_cache",
|
||||||
|
".nyc_output",
|
||||||
|
"coverage",
|
||||||
|
])
|
||||||
|
|
||||||
|
validateParams(params: Record<string, unknown>): string | null {
|
||||||
|
if (params.path !== undefined) {
|
||||||
|
if (typeof params.path !== "string") {
|
||||||
|
return "Parameter 'path' must be a string"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (params.depth !== undefined) {
|
||||||
|
if (typeof params.depth !== "number" || !Number.isInteger(params.depth)) {
|
||||||
|
return "Parameter 'depth' must be an integer"
|
||||||
|
}
|
||||||
|
if (params.depth < 1) {
|
||||||
|
return "Parameter 'depth' must be >= 1"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
async execute(params: Record<string, unknown>, ctx: ToolContext): Promise<ToolResult> {
|
||||||
|
const startTime = Date.now()
|
||||||
|
const callId = `${this.name}-${String(startTime)}`
|
||||||
|
|
||||||
|
const inputPath = (params.path as string | undefined) ?? "."
|
||||||
|
const maxDepth = params.depth as number | undefined
|
||||||
|
const pathValidator = new PathValidator(ctx.projectRoot)
|
||||||
|
|
||||||
|
let absolutePath: string
|
||||||
|
let relativePath: string
|
||||||
|
try {
|
||||||
|
;[absolutePath, relativePath] = pathValidator.resolveOrThrow(inputPath)
|
||||||
|
} catch (error) {
|
||||||
|
const message = error instanceof Error ? error.message : String(error)
|
||||||
|
return createErrorResult(callId, message, Date.now() - startTime)
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const stat = await fs.stat(absolutePath)
|
||||||
|
if (!stat.isDirectory()) {
|
||||||
|
return createErrorResult(
|
||||||
|
callId,
|
||||||
|
`Path "${relativePath}" is not a directory`,
|
||||||
|
Date.now() - startTime,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const stats = { directories: 0, files: 0 }
|
||||||
|
const tree = await this.buildTree(absolutePath, maxDepth, 0, stats)
|
||||||
|
const content = this.formatTree(tree)
|
||||||
|
|
||||||
|
const result: GetStructureResult = {
|
||||||
|
path: relativePath || ".",
|
||||||
|
tree,
|
||||||
|
content,
|
||||||
|
stats,
|
||||||
|
}
|
||||||
|
|
||||||
|
return createSuccessResult(callId, result, Date.now() - startTime)
|
||||||
|
} catch (error) {
|
||||||
|
const message = error instanceof Error ? error.message : String(error)
|
||||||
|
return createErrorResult(callId, message, Date.now() - startTime)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Build tree structure recursively.
|
||||||
|
*/
|
||||||
|
private async buildTree(
|
||||||
|
dirPath: string,
|
||||||
|
maxDepth: number | undefined,
|
||||||
|
currentDepth: number,
|
||||||
|
stats: { directories: number; files: number },
|
||||||
|
): Promise<TreeNode> {
|
||||||
|
const name = path.basename(dirPath) || dirPath
|
||||||
|
const node: TreeNode = { name, type: "directory", children: [] }
|
||||||
|
stats.directories++
|
||||||
|
|
||||||
|
if (maxDepth !== undefined && currentDepth >= maxDepth) {
|
||||||
|
return node
|
||||||
|
}
|
||||||
|
|
||||||
|
const entries = await fs.readdir(dirPath, { withFileTypes: true })
|
||||||
|
const sortedEntries = entries
|
||||||
|
.filter((e) => !this.shouldIgnore(e.name))
|
||||||
|
.sort((a, b) => {
|
||||||
|
if (a.isDirectory() && !b.isDirectory()) {
|
||||||
|
return -1
|
||||||
|
}
|
||||||
|
if (!a.isDirectory() && b.isDirectory()) {
|
||||||
|
return 1
|
||||||
|
}
|
||||||
|
return a.name.localeCompare(b.name)
|
||||||
|
})
|
||||||
|
|
||||||
|
for (const entry of sortedEntries) {
|
||||||
|
const entryPath = path.join(dirPath, entry.name)
|
||||||
|
|
||||||
|
if (entry.isDirectory()) {
|
||||||
|
const childNode = await this.buildTree(entryPath, maxDepth, currentDepth + 1, stats)
|
||||||
|
node.children?.push(childNode)
|
||||||
|
} else if (entry.isFile()) {
|
||||||
|
node.children?.push({ name: entry.name, type: "file" })
|
||||||
|
stats.files++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return node
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if entry should be ignored.
|
||||||
|
*/
|
||||||
|
private shouldIgnore(name: string): boolean {
|
||||||
|
return this.defaultIgnorePatterns.has(name)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Format tree as ASCII art.
|
||||||
|
*/
|
||||||
|
private formatTree(node: TreeNode, prefix = "", isLast = true): string {
|
||||||
|
const lines: string[] = []
|
||||||
|
const connector = isLast ? "└── " : "├── "
|
||||||
|
const icon = node.type === "directory" ? "📁 " : "📄 "
|
||||||
|
|
||||||
|
lines.push(`${prefix}${connector}${icon}${node.name}`)
|
||||||
|
|
||||||
|
if (node.children) {
|
||||||
|
const childPrefix = prefix + (isLast ? " " : "│ ")
|
||||||
|
const childCount = node.children.length
|
||||||
|
node.children.forEach((child, index) => {
|
||||||
|
const childIsLast = index === childCount - 1
|
||||||
|
lines.push(this.formatTree(child, childPrefix, childIsLast))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
return lines.join("\n")
|
||||||
|
}
|
||||||
|
}
|
||||||
185
packages/ipuaro/src/infrastructure/tools/registry.ts
Normal file
185
packages/ipuaro/src/infrastructure/tools/registry.ts
Normal file
@@ -0,0 +1,185 @@
|
|||||||
|
import type { IToolRegistry } from "../../application/interfaces/IToolRegistry.js"
|
||||||
|
import type { ITool, ToolContext, ToolParameterSchema } from "../../domain/services/ITool.js"
|
||||||
|
import { createErrorResult, type ToolResult } from "../../domain/value-objects/ToolResult.js"
|
||||||
|
import { IpuaroError } from "../../shared/errors/IpuaroError.js"
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Tool registry implementation.
|
||||||
|
* Manages registration and execution of tools.
|
||||||
|
*/
|
||||||
|
export class ToolRegistry implements IToolRegistry {
|
||||||
|
private readonly tools = new Map<string, ITool>()
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Register a tool.
|
||||||
|
* @throws IpuaroError if tool with same name already registered
|
||||||
|
*/
|
||||||
|
register(tool: ITool): void {
|
||||||
|
if (this.tools.has(tool.name)) {
|
||||||
|
throw IpuaroError.validation(`Tool "${tool.name}" is already registered`)
|
||||||
|
}
|
||||||
|
this.tools.set(tool.name, tool)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Unregister a tool by name.
|
||||||
|
* @returns true if tool was removed, false if not found
|
||||||
|
*/
|
||||||
|
unregister(name: string): boolean {
|
||||||
|
return this.tools.delete(name)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get tool by name.
|
||||||
|
*/
|
||||||
|
get(name: string): ITool | undefined {
|
||||||
|
return this.tools.get(name)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get all registered tools.
|
||||||
|
*/
|
||||||
|
getAll(): ITool[] {
|
||||||
|
return Array.from(this.tools.values())
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get tools by category.
|
||||||
|
*/
|
||||||
|
getByCategory(category: ITool["category"]): ITool[] {
|
||||||
|
return this.getAll().filter((tool) => tool.category === category)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if tool exists.
|
||||||
|
*/
|
||||||
|
has(name: string): boolean {
|
||||||
|
return this.tools.has(name)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get number of registered tools.
|
||||||
|
*/
|
||||||
|
get size(): number {
|
||||||
|
return this.tools.size
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Execute tool by name.
|
||||||
|
* @throws IpuaroError if tool not found
|
||||||
|
*/
|
||||||
|
async execute(
|
||||||
|
name: string,
|
||||||
|
params: Record<string, unknown>,
|
||||||
|
ctx: ToolContext,
|
||||||
|
): Promise<ToolResult> {
|
||||||
|
const startTime = Date.now()
|
||||||
|
const callId = `${name}-${String(startTime)}`
|
||||||
|
|
||||||
|
const tool = this.tools.get(name)
|
||||||
|
if (!tool) {
|
||||||
|
return createErrorResult(callId, `Tool "${name}" not found`, Date.now() - startTime)
|
||||||
|
}
|
||||||
|
|
||||||
|
const validationError = tool.validateParams(params)
|
||||||
|
if (validationError) {
|
||||||
|
return createErrorResult(callId, validationError, Date.now() - startTime)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (tool.requiresConfirmation) {
|
||||||
|
const confirmed = await ctx.requestConfirmation(
|
||||||
|
`Execute "${name}" with params: ${JSON.stringify(params)}`,
|
||||||
|
)
|
||||||
|
if (!confirmed) {
|
||||||
|
return createErrorResult(callId, "User cancelled operation", Date.now() - startTime)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const result = await tool.execute(params, ctx)
|
||||||
|
return {
|
||||||
|
...result,
|
||||||
|
callId,
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
const message = error instanceof Error ? error.message : String(error)
|
||||||
|
return createErrorResult(callId, message, Date.now() - startTime)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get tool definitions for LLM.
|
||||||
|
* Converts ITool[] to LLM-compatible format.
|
||||||
|
*/
|
||||||
|
getToolDefinitions(): {
|
||||||
|
name: string
|
||||||
|
description: string
|
||||||
|
parameters: {
|
||||||
|
type: "object"
|
||||||
|
properties: Record<string, { type: string; description: string }>
|
||||||
|
required: string[]
|
||||||
|
}
|
||||||
|
}[] {
|
||||||
|
return this.getAll().map((tool) => ({
|
||||||
|
name: tool.name,
|
||||||
|
description: tool.description,
|
||||||
|
parameters: this.convertParametersToSchema(tool.parameters),
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Convert tool parameters to JSON Schema format.
|
||||||
|
*/
|
||||||
|
private convertParametersToSchema(params: ToolParameterSchema[]): {
|
||||||
|
type: "object"
|
||||||
|
properties: Record<string, { type: string; description: string }>
|
||||||
|
required: string[]
|
||||||
|
} {
|
||||||
|
const properties: Record<string, { type: string; description: string }> = {}
|
||||||
|
const required: string[] = []
|
||||||
|
|
||||||
|
for (const param of params) {
|
||||||
|
properties[param.name] = {
|
||||||
|
type: param.type,
|
||||||
|
description: param.description,
|
||||||
|
}
|
||||||
|
if (param.required) {
|
||||||
|
required.push(param.name)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
type: "object",
|
||||||
|
properties,
|
||||||
|
required,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Clear all registered tools.
|
||||||
|
*/
|
||||||
|
clear(): void {
|
||||||
|
this.tools.clear()
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get tool names.
|
||||||
|
*/
|
||||||
|
getNames(): string[] {
|
||||||
|
return Array.from(this.tools.keys())
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get tools that require confirmation.
|
||||||
|
*/
|
||||||
|
getConfirmationTools(): ITool[] {
|
||||||
|
return this.getAll().filter((tool) => tool.requiresConfirmation)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get tools that don't require confirmation.
|
||||||
|
*/
|
||||||
|
getSafeTools(): ITool[] {
|
||||||
|
return this.getAll().filter((tool) => !tool.requiresConfirmation)
|
||||||
|
}
|
||||||
|
}
|
||||||
257
packages/ipuaro/src/infrastructure/tools/run/CommandSecurity.ts
Normal file
257
packages/ipuaro/src/infrastructure/tools/run/CommandSecurity.ts
Normal file
@@ -0,0 +1,257 @@
|
|||||||
|
/**
|
||||||
|
* Command security classification.
|
||||||
|
*/
|
||||||
|
export type CommandClassification = "allowed" | "blocked" | "requires_confirmation"
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Result of command security check.
|
||||||
|
*/
|
||||||
|
export interface SecurityCheckResult {
|
||||||
|
/** Classification of the command */
|
||||||
|
classification: CommandClassification
|
||||||
|
/** Reason for the classification */
|
||||||
|
reason: string
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Dangerous commands that are always blocked.
|
||||||
|
* These commands can cause data loss or security issues.
|
||||||
|
*/
|
||||||
|
export const DEFAULT_BLACKLIST: string[] = [
|
||||||
|
// Destructive file operations
|
||||||
|
"rm -rf",
|
||||||
|
"rm -r",
|
||||||
|
"rm -fr",
|
||||||
|
"rmdir",
|
||||||
|
// Dangerous git operations
|
||||||
|
"git push --force",
|
||||||
|
"git push -f",
|
||||||
|
"git reset --hard",
|
||||||
|
"git clean -fd",
|
||||||
|
"git clean -f",
|
||||||
|
// Publishing/deployment
|
||||||
|
"npm publish",
|
||||||
|
"yarn publish",
|
||||||
|
"pnpm publish",
|
||||||
|
// System commands
|
||||||
|
"sudo",
|
||||||
|
"su ",
|
||||||
|
"chmod",
|
||||||
|
"chown",
|
||||||
|
// Network/download commands that could be dangerous
|
||||||
|
"| sh",
|
||||||
|
"| bash",
|
||||||
|
// Environment manipulation
|
||||||
|
"export ",
|
||||||
|
"unset ",
|
||||||
|
// Process control
|
||||||
|
"kill -9",
|
||||||
|
"killall",
|
||||||
|
"pkill",
|
||||||
|
// Disk operations (require exact command start)
|
||||||
|
"mkfs",
|
||||||
|
"fdisk",
|
||||||
|
// Other dangerous
|
||||||
|
":(){ :|:& };:",
|
||||||
|
"eval ",
|
||||||
|
]
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Safe commands that don't require confirmation.
|
||||||
|
* Matched by first word (command name).
|
||||||
|
*/
|
||||||
|
export const DEFAULT_WHITELIST: string[] = [
|
||||||
|
// Package managers
|
||||||
|
"npm",
|
||||||
|
"pnpm",
|
||||||
|
"yarn",
|
||||||
|
"npx",
|
||||||
|
"bun",
|
||||||
|
// Node.js
|
||||||
|
"node",
|
||||||
|
"tsx",
|
||||||
|
"ts-node",
|
||||||
|
// Git (read operations)
|
||||||
|
"git",
|
||||||
|
// Build tools
|
||||||
|
"tsc",
|
||||||
|
"tsup",
|
||||||
|
"esbuild",
|
||||||
|
"vite",
|
||||||
|
"webpack",
|
||||||
|
"rollup",
|
||||||
|
// Testing
|
||||||
|
"vitest",
|
||||||
|
"jest",
|
||||||
|
"mocha",
|
||||||
|
"playwright",
|
||||||
|
"cypress",
|
||||||
|
// Linting/formatting
|
||||||
|
"eslint",
|
||||||
|
"prettier",
|
||||||
|
"biome",
|
||||||
|
// Utilities
|
||||||
|
"echo",
|
||||||
|
"cat",
|
||||||
|
"ls",
|
||||||
|
"pwd",
|
||||||
|
"which",
|
||||||
|
"head",
|
||||||
|
"tail",
|
||||||
|
"grep",
|
||||||
|
"find",
|
||||||
|
"wc",
|
||||||
|
"sort",
|
||||||
|
"diff",
|
||||||
|
]
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Git subcommands that are safe and don't need confirmation.
|
||||||
|
*/
|
||||||
|
const SAFE_GIT_SUBCOMMANDS: string[] = [
|
||||||
|
"status",
|
||||||
|
"log",
|
||||||
|
"diff",
|
||||||
|
"show",
|
||||||
|
"branch",
|
||||||
|
"remote",
|
||||||
|
"fetch",
|
||||||
|
"pull",
|
||||||
|
"stash",
|
||||||
|
"tag",
|
||||||
|
"blame",
|
||||||
|
"ls-files",
|
||||||
|
"ls-tree",
|
||||||
|
"rev-parse",
|
||||||
|
"describe",
|
||||||
|
]
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Command security checker.
|
||||||
|
* Determines if a command is safe to execute, blocked, or requires confirmation.
|
||||||
|
*/
|
||||||
|
export class CommandSecurity {
|
||||||
|
private readonly blacklist: string[]
|
||||||
|
private readonly whitelist: string[]
|
||||||
|
|
||||||
|
constructor(blacklist: string[] = DEFAULT_BLACKLIST, whitelist: string[] = DEFAULT_WHITELIST) {
|
||||||
|
this.blacklist = blacklist.map((cmd) => cmd.toLowerCase())
|
||||||
|
this.whitelist = whitelist.map((cmd) => cmd.toLowerCase())
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if a command is safe to execute.
|
||||||
|
*/
|
||||||
|
check(command: string): SecurityCheckResult {
|
||||||
|
const normalized = command.trim().toLowerCase()
|
||||||
|
|
||||||
|
const blacklistMatch = this.isBlacklisted(normalized)
|
||||||
|
if (blacklistMatch) {
|
||||||
|
return {
|
||||||
|
classification: "blocked",
|
||||||
|
reason: `Command contains blocked pattern: '${blacklistMatch}'`,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (this.isWhitelisted(normalized)) {
|
||||||
|
return {
|
||||||
|
classification: "allowed",
|
||||||
|
reason: "Command is in the whitelist",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
classification: "requires_confirmation",
|
||||||
|
reason: "Command is not in the whitelist and requires user confirmation",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if command matches any blacklist pattern.
|
||||||
|
* Returns the matched pattern or null.
|
||||||
|
*/
|
||||||
|
private isBlacklisted(command: string): string | null {
|
||||||
|
for (const pattern of this.blacklist) {
|
||||||
|
if (command.includes(pattern)) {
|
||||||
|
return pattern
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if command's first word is in the whitelist.
|
||||||
|
*/
|
||||||
|
private isWhitelisted(command: string): boolean {
|
||||||
|
const firstWord = this.getFirstWord(command)
|
||||||
|
|
||||||
|
if (!this.whitelist.includes(firstWord)) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
if (firstWord === "git") {
|
||||||
|
return this.isGitCommandSafe(command)
|
||||||
|
}
|
||||||
|
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if git command is safe (read-only operations).
|
||||||
|
*/
|
||||||
|
private isGitCommandSafe(command: string): boolean {
|
||||||
|
const parts = command.split(/\s+/)
|
||||||
|
if (parts.length < 2) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
const subcommand = parts[1]
|
||||||
|
return SAFE_GIT_SUBCOMMANDS.includes(subcommand)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get first word from command.
|
||||||
|
*/
|
||||||
|
private getFirstWord(command: string): string {
|
||||||
|
const match = /^(\S+)/.exec(command)
|
||||||
|
return match ? match[1] : ""
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Add patterns to the blacklist.
|
||||||
|
*/
|
||||||
|
addToBlacklist(patterns: string[]): void {
|
||||||
|
for (const pattern of patterns) {
|
||||||
|
const normalized = pattern.toLowerCase()
|
||||||
|
if (!this.blacklist.includes(normalized)) {
|
||||||
|
this.blacklist.push(normalized)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Add commands to the whitelist.
|
||||||
|
*/
|
||||||
|
addToWhitelist(commands: string[]): void {
|
||||||
|
for (const cmd of commands) {
|
||||||
|
const normalized = cmd.toLowerCase()
|
||||||
|
if (!this.whitelist.includes(normalized)) {
|
||||||
|
this.whitelist.push(normalized)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get current blacklist.
|
||||||
|
*/
|
||||||
|
getBlacklist(): string[] {
|
||||||
|
return [...this.blacklist]
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get current whitelist.
|
||||||
|
*/
|
||||||
|
getWhitelist(): string[] {
|
||||||
|
return [...this.whitelist]
|
||||||
|
}
|
||||||
|
}
|
||||||
230
packages/ipuaro/src/infrastructure/tools/run/RunCommandTool.ts
Normal file
230
packages/ipuaro/src/infrastructure/tools/run/RunCommandTool.ts
Normal file
@@ -0,0 +1,230 @@
|
|||||||
|
import { exec } from "node:child_process"
|
||||||
|
import { promisify } from "node:util"
|
||||||
|
import type { ITool, ToolContext, ToolParameterSchema } from "../../../domain/services/ITool.js"
|
||||||
|
import {
|
||||||
|
createErrorResult,
|
||||||
|
createSuccessResult,
|
||||||
|
type ToolResult,
|
||||||
|
} from "../../../domain/value-objects/ToolResult.js"
|
||||||
|
import type { CommandsConfig } from "../../../shared/constants/config.js"
|
||||||
|
import { CommandSecurity } from "./CommandSecurity.js"
|
||||||
|
|
||||||
|
const execAsync = promisify(exec)
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Result data from run_command tool.
|
||||||
|
*/
|
||||||
|
export interface RunCommandResult {
|
||||||
|
/** The command that was executed */
|
||||||
|
command: string
|
||||||
|
/** Exit code (0 = success) */
|
||||||
|
exitCode: number
|
||||||
|
/** Standard output */
|
||||||
|
stdout: string
|
||||||
|
/** Standard error output */
|
||||||
|
stderr: string
|
||||||
|
/** Whether command was successful (exit code 0) */
|
||||||
|
success: boolean
|
||||||
|
/** Execution time in milliseconds */
|
||||||
|
durationMs: number
|
||||||
|
/** Whether user confirmation was required */
|
||||||
|
requiredConfirmation: boolean
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Default command timeout in milliseconds.
|
||||||
|
*/
|
||||||
|
const DEFAULT_TIMEOUT = 30000
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Maximum output size in characters.
|
||||||
|
*/
|
||||||
|
const MAX_OUTPUT_SIZE = 100000
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Tool for executing shell commands.
|
||||||
|
* Commands are checked against blacklist/whitelist for security.
|
||||||
|
*/
|
||||||
|
export class RunCommandTool implements ITool {
|
||||||
|
readonly name = "run_command"
|
||||||
|
readonly description =
|
||||||
|
"Execute a shell command in the project directory. " +
|
||||||
|
"Commands are checked against blacklist/whitelist for security. " +
|
||||||
|
"Unknown commands require user confirmation."
|
||||||
|
readonly parameters: ToolParameterSchema[] = [
|
||||||
|
{
|
||||||
|
name: "command",
|
||||||
|
type: "string",
|
||||||
|
description: "Shell command to execute",
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "timeout",
|
||||||
|
type: "number",
|
||||||
|
description: "Timeout in milliseconds (default: from config or 30000, max: 600000)",
|
||||||
|
required: false,
|
||||||
|
},
|
||||||
|
]
|
||||||
|
readonly requiresConfirmation = false
|
||||||
|
readonly category = "run" as const
|
||||||
|
|
||||||
|
private readonly security: CommandSecurity
|
||||||
|
private readonly execFn: typeof execAsync
|
||||||
|
private readonly configTimeout: number | null
|
||||||
|
|
||||||
|
constructor(security?: CommandSecurity, execFn?: typeof execAsync, config?: CommandsConfig) {
|
||||||
|
this.security = security ?? new CommandSecurity()
|
||||||
|
this.execFn = execFn ?? execAsync
|
||||||
|
this.configTimeout = config?.timeout ?? null
|
||||||
|
}
|
||||||
|
|
||||||
|
validateParams(params: Record<string, unknown>): string | null {
|
||||||
|
if (params.command === undefined) {
|
||||||
|
return "Parameter 'command' is required"
|
||||||
|
}
|
||||||
|
if (typeof params.command !== "string") {
|
||||||
|
return "Parameter 'command' must be a string"
|
||||||
|
}
|
||||||
|
if (params.command.trim() === "") {
|
||||||
|
return "Parameter 'command' cannot be empty"
|
||||||
|
}
|
||||||
|
if (params.timeout !== undefined) {
|
||||||
|
if (typeof params.timeout !== "number") {
|
||||||
|
return "Parameter 'timeout' must be a number"
|
||||||
|
}
|
||||||
|
if (params.timeout <= 0) {
|
||||||
|
return "Parameter 'timeout' must be positive"
|
||||||
|
}
|
||||||
|
if (params.timeout > 600000) {
|
||||||
|
return "Parameter 'timeout' cannot exceed 600000ms (10 minutes)"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
async execute(params: Record<string, unknown>, ctx: ToolContext): Promise<ToolResult> {
|
||||||
|
const startTime = Date.now()
|
||||||
|
const callId = `${this.name}-${String(startTime)}`
|
||||||
|
|
||||||
|
const command = params.command as string
|
||||||
|
const timeout = (params.timeout as number) ?? this.configTimeout ?? DEFAULT_TIMEOUT
|
||||||
|
|
||||||
|
const securityCheck = this.security.check(command)
|
||||||
|
|
||||||
|
if (securityCheck.classification === "blocked") {
|
||||||
|
return createErrorResult(
|
||||||
|
callId,
|
||||||
|
`Command blocked for security: ${securityCheck.reason}`,
|
||||||
|
Date.now() - startTime,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
let requiredConfirmation = false
|
||||||
|
|
||||||
|
if (securityCheck.classification === "requires_confirmation") {
|
||||||
|
requiredConfirmation = true
|
||||||
|
const confirmed = await ctx.requestConfirmation(
|
||||||
|
`Execute command: ${command}\n\nReason: ${securityCheck.reason}`,
|
||||||
|
)
|
||||||
|
|
||||||
|
if (!confirmed) {
|
||||||
|
return createErrorResult(
|
||||||
|
callId,
|
||||||
|
"Command execution cancelled by user",
|
||||||
|
Date.now() - startTime,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const execStartTime = Date.now()
|
||||||
|
|
||||||
|
const { stdout, stderr } = await this.execFn(command, {
|
||||||
|
cwd: ctx.projectRoot,
|
||||||
|
timeout,
|
||||||
|
maxBuffer: MAX_OUTPUT_SIZE,
|
||||||
|
env: { ...process.env, FORCE_COLOR: "0" },
|
||||||
|
})
|
||||||
|
|
||||||
|
const durationMs = Date.now() - execStartTime
|
||||||
|
|
||||||
|
const result: RunCommandResult = {
|
||||||
|
command,
|
||||||
|
exitCode: 0,
|
||||||
|
stdout: this.truncateOutput(stdout),
|
||||||
|
stderr: this.truncateOutput(stderr),
|
||||||
|
success: true,
|
||||||
|
durationMs,
|
||||||
|
requiredConfirmation,
|
||||||
|
}
|
||||||
|
|
||||||
|
return createSuccessResult(callId, result, Date.now() - startTime)
|
||||||
|
} catch (error) {
|
||||||
|
return this.handleExecError(callId, command, error, requiredConfirmation, startTime)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handle exec errors and return appropriate result.
|
||||||
|
*/
|
||||||
|
private handleExecError(
|
||||||
|
callId: string,
|
||||||
|
command: string,
|
||||||
|
error: unknown,
|
||||||
|
requiredConfirmation: boolean,
|
||||||
|
startTime: number,
|
||||||
|
): ToolResult {
|
||||||
|
if (this.isExecError(error)) {
|
||||||
|
const result: RunCommandResult = {
|
||||||
|
command,
|
||||||
|
exitCode: error.code ?? 1,
|
||||||
|
stdout: this.truncateOutput(error.stdout ?? ""),
|
||||||
|
stderr: this.truncateOutput(error.stderr ?? error.message),
|
||||||
|
success: false,
|
||||||
|
durationMs: Date.now() - startTime,
|
||||||
|
requiredConfirmation,
|
||||||
|
}
|
||||||
|
|
||||||
|
return createSuccessResult(callId, result, Date.now() - startTime)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (error instanceof Error) {
|
||||||
|
if (error.message.includes("ETIMEDOUT") || error.message.includes("timed out")) {
|
||||||
|
return createErrorResult(
|
||||||
|
callId,
|
||||||
|
`Command timed out: ${command}`,
|
||||||
|
Date.now() - startTime,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
return createErrorResult(callId, error.message, Date.now() - startTime)
|
||||||
|
}
|
||||||
|
|
||||||
|
return createErrorResult(callId, String(error), Date.now() - startTime)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Type guard for exec error.
|
||||||
|
*/
|
||||||
|
private isExecError(
|
||||||
|
error: unknown,
|
||||||
|
): error is Error & { code?: number; stdout?: string; stderr?: string } {
|
||||||
|
return error instanceof Error && "code" in error
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Truncate output if too large.
|
||||||
|
*/
|
||||||
|
private truncateOutput(output: string): string {
|
||||||
|
if (output.length <= MAX_OUTPUT_SIZE) {
|
||||||
|
return output
|
||||||
|
}
|
||||||
|
return `${output.slice(0, MAX_OUTPUT_SIZE)}\n... (output truncated)`
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the security checker instance.
|
||||||
|
*/
|
||||||
|
getSecurity(): CommandSecurity {
|
||||||
|
return this.security
|
||||||
|
}
|
||||||
|
}
|
||||||
365
packages/ipuaro/src/infrastructure/tools/run/RunTestsTool.ts
Normal file
365
packages/ipuaro/src/infrastructure/tools/run/RunTestsTool.ts
Normal file
@@ -0,0 +1,365 @@
|
|||||||
|
import { exec } from "node:child_process"
|
||||||
|
import { promisify } from "node:util"
|
||||||
|
import * as path from "node:path"
|
||||||
|
import * as fs from "node:fs/promises"
|
||||||
|
import type { ITool, ToolContext, ToolParameterSchema } from "../../../domain/services/ITool.js"
|
||||||
|
import {
|
||||||
|
createErrorResult,
|
||||||
|
createSuccessResult,
|
||||||
|
type ToolResult,
|
||||||
|
} from "../../../domain/value-objects/ToolResult.js"
|
||||||
|
|
||||||
|
const execAsync = promisify(exec)
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Supported test runners.
|
||||||
|
*/
|
||||||
|
export type TestRunner = "vitest" | "jest" | "mocha" | "npm"
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Result data from run_tests tool.
|
||||||
|
*/
|
||||||
|
export interface RunTestsResult {
|
||||||
|
/** Test runner that was used */
|
||||||
|
runner: TestRunner
|
||||||
|
/** Command that was executed */
|
||||||
|
command: string
|
||||||
|
/** Whether all tests passed */
|
||||||
|
passed: boolean
|
||||||
|
/** Exit code */
|
||||||
|
exitCode: number
|
||||||
|
/** Standard output */
|
||||||
|
stdout: string
|
||||||
|
/** Standard error output */
|
||||||
|
stderr: string
|
||||||
|
/** Execution time in milliseconds */
|
||||||
|
durationMs: number
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Default test timeout in milliseconds (5 minutes).
|
||||||
|
*/
|
||||||
|
const DEFAULT_TIMEOUT = 300000
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Maximum output size in characters.
|
||||||
|
*/
|
||||||
|
const MAX_OUTPUT_SIZE = 200000
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Tool for running project tests.
|
||||||
|
* Auto-detects test runner (vitest, jest, mocha, npm test).
|
||||||
|
*/
|
||||||
|
export class RunTestsTool implements ITool {
|
||||||
|
readonly name = "run_tests"
|
||||||
|
readonly description =
|
||||||
|
"Run the project's test suite. Auto-detects test runner (vitest, jest, npm test). " +
|
||||||
|
"Returns test results summary."
|
||||||
|
readonly parameters: ToolParameterSchema[] = [
|
||||||
|
{
|
||||||
|
name: "path",
|
||||||
|
type: "string",
|
||||||
|
description: "Run tests for specific file or directory",
|
||||||
|
required: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "filter",
|
||||||
|
type: "string",
|
||||||
|
description: "Filter tests by name pattern",
|
||||||
|
required: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "watch",
|
||||||
|
type: "boolean",
|
||||||
|
description: "Run in watch mode (default: false)",
|
||||||
|
required: false,
|
||||||
|
},
|
||||||
|
]
|
||||||
|
readonly requiresConfirmation = false
|
||||||
|
readonly category = "run" as const
|
||||||
|
|
||||||
|
private readonly execFn: typeof execAsync
|
||||||
|
private readonly fsAccess: typeof fs.access
|
||||||
|
private readonly fsReadFile: typeof fs.readFile
|
||||||
|
|
||||||
|
constructor(
|
||||||
|
execFn?: typeof execAsync,
|
||||||
|
fsAccess?: typeof fs.access,
|
||||||
|
fsReadFile?: typeof fs.readFile,
|
||||||
|
) {
|
||||||
|
this.execFn = execFn ?? execAsync
|
||||||
|
this.fsAccess = fsAccess ?? fs.access
|
||||||
|
this.fsReadFile = fsReadFile ?? fs.readFile
|
||||||
|
}
|
||||||
|
|
||||||
|
validateParams(params: Record<string, unknown>): string | null {
|
||||||
|
if (params.path !== undefined && typeof params.path !== "string") {
|
||||||
|
return "Parameter 'path' must be a string"
|
||||||
|
}
|
||||||
|
if (params.filter !== undefined && typeof params.filter !== "string") {
|
||||||
|
return "Parameter 'filter' must be a string"
|
||||||
|
}
|
||||||
|
if (params.watch !== undefined && typeof params.watch !== "boolean") {
|
||||||
|
return "Parameter 'watch' must be a boolean"
|
||||||
|
}
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
async execute(params: Record<string, unknown>, ctx: ToolContext): Promise<ToolResult> {
|
||||||
|
const startTime = Date.now()
|
||||||
|
const callId = `${this.name}-${String(startTime)}`
|
||||||
|
|
||||||
|
const testPath = params.path as string | undefined
|
||||||
|
const filter = params.filter as string | undefined
|
||||||
|
const watch = (params.watch as boolean) ?? false
|
||||||
|
|
||||||
|
try {
|
||||||
|
const runner = await this.detectTestRunner(ctx.projectRoot)
|
||||||
|
|
||||||
|
if (!runner) {
|
||||||
|
return createErrorResult(
|
||||||
|
callId,
|
||||||
|
"No test runner detected. Ensure vitest, jest, or mocha is installed, or 'test' script exists in package.json.",
|
||||||
|
Date.now() - startTime,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const command = this.buildCommand(runner, testPath, filter, watch)
|
||||||
|
const execStartTime = Date.now()
|
||||||
|
|
||||||
|
try {
|
||||||
|
const { stdout, stderr } = await this.execFn(command, {
|
||||||
|
cwd: ctx.projectRoot,
|
||||||
|
timeout: DEFAULT_TIMEOUT,
|
||||||
|
maxBuffer: MAX_OUTPUT_SIZE,
|
||||||
|
env: { ...process.env, FORCE_COLOR: "0", CI: "true" },
|
||||||
|
})
|
||||||
|
|
||||||
|
const durationMs = Date.now() - execStartTime
|
||||||
|
|
||||||
|
const result: RunTestsResult = {
|
||||||
|
runner,
|
||||||
|
command,
|
||||||
|
passed: true,
|
||||||
|
exitCode: 0,
|
||||||
|
stdout: this.truncateOutput(stdout),
|
||||||
|
stderr: this.truncateOutput(stderr),
|
||||||
|
durationMs,
|
||||||
|
}
|
||||||
|
|
||||||
|
return createSuccessResult(callId, result, Date.now() - startTime)
|
||||||
|
} catch (error) {
|
||||||
|
return this.handleExecError(
|
||||||
|
{ callId, runner, command, startTime },
|
||||||
|
error,
|
||||||
|
execStartTime,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
const message = error instanceof Error ? error.message : String(error)
|
||||||
|
return createErrorResult(callId, message, Date.now() - startTime)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Detect which test runner is available in the project.
|
||||||
|
*/
|
||||||
|
async detectTestRunner(projectRoot: string): Promise<TestRunner | null> {
|
||||||
|
const configRunner = await this.detectByConfigFile(projectRoot)
|
||||||
|
if (configRunner) {
|
||||||
|
return configRunner
|
||||||
|
}
|
||||||
|
|
||||||
|
return this.detectByPackageJson(projectRoot)
|
||||||
|
}
|
||||||
|
|
||||||
|
private async detectByConfigFile(projectRoot: string): Promise<TestRunner | null> {
|
||||||
|
const configFiles: { files: string[]; runner: TestRunner }[] = [
|
||||||
|
{
|
||||||
|
files: ["vitest.config.ts", "vitest.config.js", "vitest.config.mts"],
|
||||||
|
runner: "vitest",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
files: ["jest.config.js", "jest.config.ts", "jest.config.json"],
|
||||||
|
runner: "jest",
|
||||||
|
},
|
||||||
|
]
|
||||||
|
|
||||||
|
for (const { files, runner } of configFiles) {
|
||||||
|
for (const file of files) {
|
||||||
|
if (await this.hasFile(projectRoot, file)) {
|
||||||
|
return runner
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
private async detectByPackageJson(projectRoot: string): Promise<TestRunner | null> {
|
||||||
|
const packageJsonPath = path.join(projectRoot, "package.json")
|
||||||
|
try {
|
||||||
|
const content = await this.fsReadFile(packageJsonPath, "utf-8")
|
||||||
|
const pkg = JSON.parse(content) as {
|
||||||
|
scripts?: Record<string, string>
|
||||||
|
devDependencies?: Record<string, string>
|
||||||
|
dependencies?: Record<string, string>
|
||||||
|
}
|
||||||
|
|
||||||
|
const deps = { ...pkg.devDependencies, ...pkg.dependencies }
|
||||||
|
if (deps.vitest) {
|
||||||
|
return "vitest"
|
||||||
|
}
|
||||||
|
if (deps.jest) {
|
||||||
|
return "jest"
|
||||||
|
}
|
||||||
|
if (deps.mocha) {
|
||||||
|
return "mocha"
|
||||||
|
}
|
||||||
|
if (pkg.scripts?.test) {
|
||||||
|
return "npm"
|
||||||
|
}
|
||||||
|
} catch {
|
||||||
|
// package.json doesn't exist or is invalid
|
||||||
|
}
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Build the test command based on runner and options.
|
||||||
|
*/
|
||||||
|
buildCommand(runner: TestRunner, testPath?: string, filter?: string, watch?: boolean): string {
|
||||||
|
const builders: Record<TestRunner, () => string[]> = {
|
||||||
|
vitest: () => this.buildVitestCommand(testPath, filter, watch),
|
||||||
|
jest: () => this.buildJestCommand(testPath, filter, watch),
|
||||||
|
mocha: () => this.buildMochaCommand(testPath, filter, watch),
|
||||||
|
npm: () => this.buildNpmCommand(testPath, filter),
|
||||||
|
}
|
||||||
|
return builders[runner]().join(" ")
|
||||||
|
}
|
||||||
|
|
||||||
|
private buildVitestCommand(testPath?: string, filter?: string, watch?: boolean): string[] {
|
||||||
|
const parts = ["npx vitest"]
|
||||||
|
if (!watch) {
|
||||||
|
parts.push("run")
|
||||||
|
}
|
||||||
|
if (testPath) {
|
||||||
|
parts.push(testPath)
|
||||||
|
}
|
||||||
|
if (filter) {
|
||||||
|
parts.push("-t", `"${filter}"`)
|
||||||
|
}
|
||||||
|
return parts
|
||||||
|
}
|
||||||
|
|
||||||
|
private buildJestCommand(testPath?: string, filter?: string, watch?: boolean): string[] {
|
||||||
|
const parts = ["npx jest"]
|
||||||
|
if (testPath) {
|
||||||
|
parts.push(testPath)
|
||||||
|
}
|
||||||
|
if (filter) {
|
||||||
|
parts.push("-t", `"${filter}"`)
|
||||||
|
}
|
||||||
|
if (watch) {
|
||||||
|
parts.push("--watch")
|
||||||
|
}
|
||||||
|
return parts
|
||||||
|
}
|
||||||
|
|
||||||
|
private buildMochaCommand(testPath?: string, filter?: string, watch?: boolean): string[] {
|
||||||
|
const parts = ["npx mocha"]
|
||||||
|
if (testPath) {
|
||||||
|
parts.push(testPath)
|
||||||
|
}
|
||||||
|
if (filter) {
|
||||||
|
parts.push("--grep", `"${filter}"`)
|
||||||
|
}
|
||||||
|
if (watch) {
|
||||||
|
parts.push("--watch")
|
||||||
|
}
|
||||||
|
return parts
|
||||||
|
}
|
||||||
|
|
||||||
|
private buildNpmCommand(testPath?: string, filter?: string): string[] {
|
||||||
|
const parts = ["npm test"]
|
||||||
|
if (testPath || filter) {
|
||||||
|
parts.push("--")
|
||||||
|
if (testPath) {
|
||||||
|
parts.push(testPath)
|
||||||
|
}
|
||||||
|
if (filter) {
|
||||||
|
parts.push(`"${filter}"`)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return parts
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if a file exists.
|
||||||
|
*/
|
||||||
|
private async hasFile(projectRoot: string, filename: string): Promise<boolean> {
|
||||||
|
try {
|
||||||
|
await this.fsAccess(path.join(projectRoot, filename))
|
||||||
|
return true
|
||||||
|
} catch {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handle exec errors and return appropriate result.
|
||||||
|
*/
|
||||||
|
private handleExecError(
|
||||||
|
ctx: { callId: string; runner: TestRunner; command: string; startTime: number },
|
||||||
|
error: unknown,
|
||||||
|
execStartTime: number,
|
||||||
|
): ToolResult {
|
||||||
|
const { callId, runner, command, startTime } = ctx
|
||||||
|
const durationMs = Date.now() - execStartTime
|
||||||
|
|
||||||
|
if (this.isExecError(error)) {
|
||||||
|
const result: RunTestsResult = {
|
||||||
|
runner,
|
||||||
|
command,
|
||||||
|
passed: false,
|
||||||
|
exitCode: error.code ?? 1,
|
||||||
|
stdout: this.truncateOutput(error.stdout ?? ""),
|
||||||
|
stderr: this.truncateOutput(error.stderr ?? error.message),
|
||||||
|
durationMs,
|
||||||
|
}
|
||||||
|
|
||||||
|
return createSuccessResult(callId, result, Date.now() - startTime)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (error instanceof Error) {
|
||||||
|
if (error.message.includes("ETIMEDOUT") || error.message.includes("timed out")) {
|
||||||
|
return createErrorResult(
|
||||||
|
callId,
|
||||||
|
`Tests timed out after ${String(DEFAULT_TIMEOUT / 1000)} seconds`,
|
||||||
|
Date.now() - startTime,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
return createErrorResult(callId, error.message, Date.now() - startTime)
|
||||||
|
}
|
||||||
|
|
||||||
|
return createErrorResult(callId, String(error), Date.now() - startTime)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Type guard for exec error.
|
||||||
|
*/
|
||||||
|
private isExecError(
|
||||||
|
error: unknown,
|
||||||
|
): error is Error & { code?: number; stdout?: string; stderr?: string } {
|
||||||
|
return error instanceof Error && "code" in error
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Truncate output if too large.
|
||||||
|
*/
|
||||||
|
private truncateOutput(output: string): string {
|
||||||
|
if (output.length <= MAX_OUTPUT_SIZE) {
|
||||||
|
return output
|
||||||
|
}
|
||||||
|
return `${output.slice(0, MAX_OUTPUT_SIZE)}\n... (output truncated)`
|
||||||
|
}
|
||||||
|
}
|
||||||
12
packages/ipuaro/src/infrastructure/tools/run/index.ts
Normal file
12
packages/ipuaro/src/infrastructure/tools/run/index.ts
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
// Run tools exports
|
||||||
|
export {
|
||||||
|
CommandSecurity,
|
||||||
|
DEFAULT_BLACKLIST,
|
||||||
|
DEFAULT_WHITELIST,
|
||||||
|
type CommandClassification,
|
||||||
|
type SecurityCheckResult,
|
||||||
|
} from "./CommandSecurity.js"
|
||||||
|
|
||||||
|
export { RunCommandTool, type RunCommandResult } from "./RunCommandTool.js"
|
||||||
|
|
||||||
|
export { RunTestsTool, type RunTestsResult, type TestRunner } from "./RunTestsTool.js"
|
||||||
@@ -0,0 +1,221 @@
|
|||||||
|
import { promises as fs } from "node:fs"
|
||||||
|
import * as path from "node:path"
|
||||||
|
import type { ITool, ToolContext, ToolParameterSchema } from "../../../domain/services/ITool.js"
|
||||||
|
import type { SymbolLocation } from "../../../domain/services/IStorage.js"
|
||||||
|
import {
|
||||||
|
createErrorResult,
|
||||||
|
createSuccessResult,
|
||||||
|
type ToolResult,
|
||||||
|
} from "../../../domain/value-objects/ToolResult.js"
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A single definition location with context.
|
||||||
|
*/
|
||||||
|
export interface DefinitionLocation {
|
||||||
|
path: string
|
||||||
|
line: number
|
||||||
|
type: SymbolLocation["type"]
|
||||||
|
context: string
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Result data from find_definition tool.
|
||||||
|
*/
|
||||||
|
export interface FindDefinitionResult {
|
||||||
|
symbol: string
|
||||||
|
found: boolean
|
||||||
|
definitions: DefinitionLocation[]
|
||||||
|
suggestions?: string[]
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Tool for finding where a symbol is defined.
|
||||||
|
* Uses the SymbolIndex to locate definitions.
|
||||||
|
*/
|
||||||
|
export class FindDefinitionTool implements ITool {
|
||||||
|
readonly name = "find_definition"
|
||||||
|
readonly description =
|
||||||
|
"Find where a symbol is defined. " + "Returns file path, line number, and symbol type."
|
||||||
|
readonly parameters: ToolParameterSchema[] = [
|
||||||
|
{
|
||||||
|
name: "symbol",
|
||||||
|
type: "string",
|
||||||
|
description: "Symbol name to find definition for",
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
]
|
||||||
|
readonly requiresConfirmation = false
|
||||||
|
readonly category = "search" as const
|
||||||
|
|
||||||
|
private readonly contextLines = 2
|
||||||
|
|
||||||
|
validateParams(params: Record<string, unknown>): string | null {
|
||||||
|
if (typeof params.symbol !== "string" || params.symbol.trim() === "") {
|
||||||
|
return "Parameter 'symbol' is required and must be a non-empty string"
|
||||||
|
}
|
||||||
|
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
async execute(params: Record<string, unknown>, ctx: ToolContext): Promise<ToolResult> {
|
||||||
|
const startTime = Date.now()
|
||||||
|
const callId = `${this.name}-${String(startTime)}`
|
||||||
|
|
||||||
|
const symbol = (params.symbol as string).trim()
|
||||||
|
|
||||||
|
try {
|
||||||
|
const symbolIndex = await ctx.storage.getSymbolIndex()
|
||||||
|
const locations = symbolIndex.get(symbol)
|
||||||
|
|
||||||
|
if (!locations || locations.length === 0) {
|
||||||
|
const suggestions = this.findSimilarSymbols(symbol, symbolIndex)
|
||||||
|
return createSuccessResult(
|
||||||
|
callId,
|
||||||
|
{
|
||||||
|
symbol,
|
||||||
|
found: false,
|
||||||
|
definitions: [],
|
||||||
|
suggestions: suggestions.length > 0 ? suggestions : undefined,
|
||||||
|
} satisfies FindDefinitionResult,
|
||||||
|
Date.now() - startTime,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const definitions: DefinitionLocation[] = []
|
||||||
|
for (const loc of locations) {
|
||||||
|
const context = await this.getContext(loc, ctx)
|
||||||
|
definitions.push({
|
||||||
|
path: loc.path,
|
||||||
|
line: loc.line,
|
||||||
|
type: loc.type,
|
||||||
|
context,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
definitions.sort((a, b) => {
|
||||||
|
const pathCompare = a.path.localeCompare(b.path)
|
||||||
|
if (pathCompare !== 0) {
|
||||||
|
return pathCompare
|
||||||
|
}
|
||||||
|
return a.line - b.line
|
||||||
|
})
|
||||||
|
|
||||||
|
const result: FindDefinitionResult = {
|
||||||
|
symbol,
|
||||||
|
found: true,
|
||||||
|
definitions,
|
||||||
|
}
|
||||||
|
|
||||||
|
return createSuccessResult(callId, result, Date.now() - startTime)
|
||||||
|
} catch (error) {
|
||||||
|
const message = error instanceof Error ? error.message : String(error)
|
||||||
|
return createErrorResult(callId, message, Date.now() - startTime)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get context lines around the definition.
|
||||||
|
*/
|
||||||
|
private async getContext(loc: SymbolLocation, ctx: ToolContext): Promise<string> {
|
||||||
|
try {
|
||||||
|
const lines = await this.getFileLines(loc.path, ctx)
|
||||||
|
if (lines.length === 0) {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
const lineIndex = loc.line - 1
|
||||||
|
const startIndex = Math.max(0, lineIndex - this.contextLines)
|
||||||
|
const endIndex = Math.min(lines.length - 1, lineIndex + this.contextLines)
|
||||||
|
|
||||||
|
const contextLines: string[] = []
|
||||||
|
for (let i = startIndex; i <= endIndex; i++) {
|
||||||
|
const lineNum = i + 1
|
||||||
|
const prefix = i === lineIndex ? ">" : " "
|
||||||
|
contextLines.push(`${prefix}${String(lineNum).padStart(4)}│${lines[i]}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
return contextLines.join("\n")
|
||||||
|
} catch {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get file lines from storage or filesystem.
|
||||||
|
*/
|
||||||
|
private async getFileLines(relativePath: string, ctx: ToolContext): Promise<string[]> {
|
||||||
|
const fileData = await ctx.storage.getFile(relativePath)
|
||||||
|
if (fileData) {
|
||||||
|
return fileData.lines
|
||||||
|
}
|
||||||
|
|
||||||
|
const absolutePath = path.resolve(ctx.projectRoot, relativePath)
|
||||||
|
try {
|
||||||
|
const content = await fs.readFile(absolutePath, "utf-8")
|
||||||
|
return content.split("\n")
|
||||||
|
} catch {
|
||||||
|
return []
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Find similar symbol names for suggestions.
|
||||||
|
*/
|
||||||
|
private findSimilarSymbols(symbol: string, symbolIndex: Map<string, unknown>): string[] {
|
||||||
|
const suggestions: string[] = []
|
||||||
|
const lowerSymbol = symbol.toLowerCase()
|
||||||
|
const maxSuggestions = 5
|
||||||
|
|
||||||
|
for (const name of symbolIndex.keys()) {
|
||||||
|
if (suggestions.length >= maxSuggestions) {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
const lowerName = name.toLowerCase()
|
||||||
|
if (lowerName.includes(lowerSymbol) || lowerSymbol.includes(lowerName)) {
|
||||||
|
suggestions.push(name)
|
||||||
|
} else if (this.levenshteinDistance(lowerSymbol, lowerName) <= 2) {
|
||||||
|
suggestions.push(name)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return suggestions.sort()
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Calculate Levenshtein distance between two strings.
|
||||||
|
*/
|
||||||
|
private levenshteinDistance(a: string, b: string): number {
|
||||||
|
if (a.length === 0) {
|
||||||
|
return b.length
|
||||||
|
}
|
||||||
|
if (b.length === 0) {
|
||||||
|
return a.length
|
||||||
|
}
|
||||||
|
|
||||||
|
const matrix: number[][] = []
|
||||||
|
|
||||||
|
for (let i = 0; i <= b.length; i++) {
|
||||||
|
matrix[i] = [i]
|
||||||
|
}
|
||||||
|
for (let j = 0; j <= a.length; j++) {
|
||||||
|
matrix[0][j] = j
|
||||||
|
}
|
||||||
|
|
||||||
|
for (let i = 1; i <= b.length; i++) {
|
||||||
|
for (let j = 1; j <= a.length; j++) {
|
||||||
|
if (b.charAt(i - 1) === a.charAt(j - 1)) {
|
||||||
|
matrix[i][j] = matrix[i - 1][j - 1]
|
||||||
|
} else {
|
||||||
|
matrix[i][j] = Math.min(
|
||||||
|
matrix[i - 1][j - 1] + 1,
|
||||||
|
matrix[i][j - 1] + 1,
|
||||||
|
matrix[i - 1][j] + 1,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return matrix[b.length][a.length]
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,260 @@
|
|||||||
|
import * as path from "node:path"
|
||||||
|
import type { ITool, ToolContext, ToolParameterSchema } from "../../../domain/services/ITool.js"
|
||||||
|
import {
|
||||||
|
createErrorResult,
|
||||||
|
createSuccessResult,
|
||||||
|
type ToolResult,
|
||||||
|
} from "../../../domain/value-objects/ToolResult.js"
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A single reference to a symbol.
|
||||||
|
*/
|
||||||
|
export interface SymbolReference {
|
||||||
|
path: string
|
||||||
|
line: number
|
||||||
|
column: number
|
||||||
|
context: string
|
||||||
|
isDefinition: boolean
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Result data from find_references tool.
|
||||||
|
*/
|
||||||
|
export interface FindReferencesResult {
|
||||||
|
symbol: string
|
||||||
|
totalReferences: number
|
||||||
|
files: number
|
||||||
|
references: SymbolReference[]
|
||||||
|
definitionLocations: {
|
||||||
|
path: string
|
||||||
|
line: number
|
||||||
|
type: string
|
||||||
|
}[]
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Tool for finding all usages of a symbol across the codebase.
|
||||||
|
* Searches through indexed files for symbol references.
|
||||||
|
*/
|
||||||
|
export class FindReferencesTool implements ITool {
|
||||||
|
readonly name = "find_references"
|
||||||
|
readonly description =
|
||||||
|
"Find all usages of a symbol across the codebase. " +
|
||||||
|
"Returns list of file paths, line numbers, and context."
|
||||||
|
readonly parameters: ToolParameterSchema[] = [
|
||||||
|
{
|
||||||
|
name: "symbol",
|
||||||
|
type: "string",
|
||||||
|
description: "Symbol name to search for (function, class, variable, etc.)",
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "path",
|
||||||
|
type: "string",
|
||||||
|
description: "Limit search to specific file or directory",
|
||||||
|
required: false,
|
||||||
|
},
|
||||||
|
]
|
||||||
|
readonly requiresConfirmation = false
|
||||||
|
readonly category = "search" as const
|
||||||
|
|
||||||
|
private readonly contextLines = 1
|
||||||
|
|
||||||
|
validateParams(params: Record<string, unknown>): string | null {
|
||||||
|
if (typeof params.symbol !== "string" || params.symbol.trim() === "") {
|
||||||
|
return "Parameter 'symbol' is required and must be a non-empty string"
|
||||||
|
}
|
||||||
|
|
||||||
|
if (params.path !== undefined && typeof params.path !== "string") {
|
||||||
|
return "Parameter 'path' must be a string"
|
||||||
|
}
|
||||||
|
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
async execute(params: Record<string, unknown>, ctx: ToolContext): Promise<ToolResult> {
|
||||||
|
const startTime = Date.now()
|
||||||
|
const callId = `${this.name}-${String(startTime)}`
|
||||||
|
|
||||||
|
const symbol = (params.symbol as string).trim()
|
||||||
|
const filterPath = params.path as string | undefined
|
||||||
|
|
||||||
|
try {
|
||||||
|
const symbolIndex = await ctx.storage.getSymbolIndex()
|
||||||
|
const definitionLocations = symbolIndex.get(symbol) ?? []
|
||||||
|
|
||||||
|
const allFiles = await ctx.storage.getAllFiles()
|
||||||
|
const filesToSearch = this.filterFiles(allFiles, filterPath, ctx.projectRoot)
|
||||||
|
|
||||||
|
if (filesToSearch.size === 0) {
|
||||||
|
return createSuccessResult(
|
||||||
|
callId,
|
||||||
|
{
|
||||||
|
symbol,
|
||||||
|
totalReferences: 0,
|
||||||
|
files: 0,
|
||||||
|
references: [],
|
||||||
|
definitionLocations: definitionLocations.map((loc) => ({
|
||||||
|
path: loc.path,
|
||||||
|
line: loc.line,
|
||||||
|
type: loc.type,
|
||||||
|
})),
|
||||||
|
} satisfies FindReferencesResult,
|
||||||
|
Date.now() - startTime,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const references: SymbolReference[] = []
|
||||||
|
const filesWithReferences = new Set<string>()
|
||||||
|
|
||||||
|
for (const [filePath, fileData] of filesToSearch) {
|
||||||
|
const fileRefs = this.findReferencesInFile(
|
||||||
|
filePath,
|
||||||
|
fileData.lines,
|
||||||
|
symbol,
|
||||||
|
definitionLocations,
|
||||||
|
)
|
||||||
|
|
||||||
|
if (fileRefs.length > 0) {
|
||||||
|
filesWithReferences.add(filePath)
|
||||||
|
references.push(...fileRefs)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
references.sort((a, b) => {
|
||||||
|
const pathCompare = a.path.localeCompare(b.path)
|
||||||
|
if (pathCompare !== 0) {
|
||||||
|
return pathCompare
|
||||||
|
}
|
||||||
|
return a.line - b.line
|
||||||
|
})
|
||||||
|
|
||||||
|
const result: FindReferencesResult = {
|
||||||
|
symbol,
|
||||||
|
totalReferences: references.length,
|
||||||
|
files: filesWithReferences.size,
|
||||||
|
references,
|
||||||
|
definitionLocations: definitionLocations.map((loc) => ({
|
||||||
|
path: loc.path,
|
||||||
|
line: loc.line,
|
||||||
|
type: loc.type,
|
||||||
|
})),
|
||||||
|
}
|
||||||
|
|
||||||
|
return createSuccessResult(callId, result, Date.now() - startTime)
|
||||||
|
} catch (error) {
|
||||||
|
const message = error instanceof Error ? error.message : String(error)
|
||||||
|
return createErrorResult(callId, message, Date.now() - startTime)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Filter files by path prefix if specified.
|
||||||
|
*/
|
||||||
|
private filterFiles(
|
||||||
|
allFiles: Map<string, { lines: string[] }>,
|
||||||
|
filterPath: string | undefined,
|
||||||
|
projectRoot: string,
|
||||||
|
): Map<string, { lines: string[] }> {
|
||||||
|
if (!filterPath) {
|
||||||
|
return allFiles
|
||||||
|
}
|
||||||
|
|
||||||
|
const normalizedFilter = filterPath.startsWith("/")
|
||||||
|
? path.relative(projectRoot, filterPath)
|
||||||
|
: filterPath
|
||||||
|
|
||||||
|
const filtered = new Map<string, { lines: string[] }>()
|
||||||
|
for (const [filePath, fileData] of allFiles) {
|
||||||
|
if (filePath === normalizedFilter || filePath.startsWith(`${normalizedFilter}/`)) {
|
||||||
|
filtered.set(filePath, fileData)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return filtered
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Find all references to the symbol in a file.
|
||||||
|
*/
|
||||||
|
private findReferencesInFile(
|
||||||
|
filePath: string,
|
||||||
|
lines: string[],
|
||||||
|
symbol: string,
|
||||||
|
definitionLocations: { path: string; line: number }[],
|
||||||
|
): SymbolReference[] {
|
||||||
|
const references: SymbolReference[] = []
|
||||||
|
const symbolRegex = this.createSymbolRegex(symbol)
|
||||||
|
|
||||||
|
for (let lineIndex = 0; lineIndex < lines.length; lineIndex++) {
|
||||||
|
const line = lines[lineIndex]
|
||||||
|
const lineNumber = lineIndex + 1
|
||||||
|
let match: RegExpExecArray | null
|
||||||
|
|
||||||
|
symbolRegex.lastIndex = 0
|
||||||
|
while ((match = symbolRegex.exec(line)) !== null) {
|
||||||
|
const column = match.index + 1
|
||||||
|
const context = this.buildContext(lines, lineIndex)
|
||||||
|
const isDefinition = this.isDefinitionLine(
|
||||||
|
filePath,
|
||||||
|
lineNumber,
|
||||||
|
definitionLocations,
|
||||||
|
)
|
||||||
|
|
||||||
|
references.push({
|
||||||
|
path: filePath,
|
||||||
|
line: lineNumber,
|
||||||
|
column,
|
||||||
|
context,
|
||||||
|
isDefinition,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return references
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a regex for matching the symbol with appropriate boundaries.
|
||||||
|
* Handles symbols that start or end with non-word characters (like $value).
|
||||||
|
*/
|
||||||
|
private createSymbolRegex(symbol: string): RegExp {
|
||||||
|
const escaped = symbol.replace(/[.*+?^${}()|[\]\\]/g, "\\$&")
|
||||||
|
|
||||||
|
const startsWithWordChar = /^\w/.test(symbol)
|
||||||
|
const endsWithWordChar = /\w$/.test(symbol)
|
||||||
|
|
||||||
|
const prefix = startsWithWordChar ? "\\b" : "(?<![\\w$])"
|
||||||
|
const suffix = endsWithWordChar ? "\\b" : "(?![\\w$])"
|
||||||
|
|
||||||
|
return new RegExp(`${prefix}${escaped}${suffix}`, "g")
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Build context string with surrounding lines.
|
||||||
|
*/
|
||||||
|
private buildContext(lines: string[], currentIndex: number): string {
|
||||||
|
const startIndex = Math.max(0, currentIndex - this.contextLines)
|
||||||
|
const endIndex = Math.min(lines.length - 1, currentIndex + this.contextLines)
|
||||||
|
|
||||||
|
const contextLines: string[] = []
|
||||||
|
for (let i = startIndex; i <= endIndex; i++) {
|
||||||
|
const lineNum = i + 1
|
||||||
|
const prefix = i === currentIndex ? ">" : " "
|
||||||
|
contextLines.push(`${prefix}${String(lineNum).padStart(4)}│${lines[i]}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
return contextLines.join("\n")
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if this line is a definition location.
|
||||||
|
*/
|
||||||
|
private isDefinitionLine(
|
||||||
|
filePath: string,
|
||||||
|
lineNumber: number,
|
||||||
|
definitionLocations: { path: string; line: number }[],
|
||||||
|
): boolean {
|
||||||
|
return definitionLocations.some((loc) => loc.path === filePath && loc.line === lineNumber)
|
||||||
|
}
|
||||||
|
}
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user