mirror of
https://github.com/samiyev/puaros.git
synced 2025-12-28 07:16:53 +05:00
Compare commits
43 Commits
v0.9.1
...
ipuaro-v0.
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
baccfd53c0 | ||
|
|
8f995fc596 | ||
|
|
f947c6d157 | ||
|
|
33d52bc7ca | ||
|
|
2c6eb6ce9b | ||
|
|
7d18e87423 | ||
|
|
fd1e6ad86e | ||
|
|
259ecc181a | ||
|
|
0f2ed5b301 | ||
|
|
56643d903f | ||
|
|
f5f904a847 | ||
|
|
2ae1ac13f5 | ||
|
|
caf7aac116 | ||
|
|
4ad5a209c4 | ||
|
|
25146003cc | ||
|
|
68f927d906 | ||
|
|
b3e04a411c | ||
|
|
294d085ad4 | ||
|
|
958e4daed5 | ||
|
|
6234fbce92 | ||
|
|
af9c2377a0 | ||
|
|
d0c1ddc22e | ||
|
|
225480c806 | ||
|
|
fd8e97af0e | ||
|
|
d36f9a6e21 | ||
|
|
4267938dcd | ||
|
|
127c7e2185 | ||
|
|
130a8c4f24 | ||
|
|
7f6180df37 | ||
|
|
daace23814 | ||
|
|
625e109c0a | ||
|
|
ec7adb1330 | ||
|
|
085e236c4a | ||
|
|
ee6388f587 | ||
|
|
a75dbcf147 | ||
|
|
42da5127cc | ||
|
|
0da6d9f3c2 | ||
|
|
6b35679f09 | ||
|
|
07e6535633 | ||
|
|
e8626dd03c | ||
|
|
ce78183c6e | ||
|
|
1d6aebcd87 | ||
|
|
ceb87f1b1f |
1
.gitignore
vendored
1
.gitignore
vendored
@@ -86,3 +86,4 @@ Thumbs.db
|
|||||||
|
|
||||||
# Yarn Integrity file
|
# Yarn Integrity file
|
||||||
.yarn-integrity
|
.yarn-integrity
|
||||||
|
packages/guardian/docs/STRATEGIC_ANALYSIS_2025-11.md
|
||||||
|
|||||||
15
.gitmessage
15
.gitmessage
@@ -1,9 +1,17 @@
|
|||||||
# <type>: <subject>
|
# <type>(<package>): <subject>
|
||||||
#
|
#
|
||||||
# <body>
|
# <body>
|
||||||
#
|
#
|
||||||
# <footer>
|
# <footer>
|
||||||
|
|
||||||
|
# Format:
|
||||||
|
# - Package changes: <type>(<package>): <subject>
|
||||||
|
# Examples: feat(guardian): add detector
|
||||||
|
# fix(ipuaro): resolve memory leak
|
||||||
|
# - Root changes: <type>: <subject>
|
||||||
|
# Examples: chore: update eslint config
|
||||||
|
# docs: update root README
|
||||||
|
|
||||||
# Type should be one of the following:
|
# Type should be one of the following:
|
||||||
# * feat: A new feature
|
# * feat: A new feature
|
||||||
# * fix: A bug fix
|
# * fix: A bug fix
|
||||||
@@ -16,6 +24,11 @@
|
|||||||
# * ci: Changes to CI configuration files and scripts
|
# * ci: Changes to CI configuration files and scripts
|
||||||
# * chore: Other changes that don't modify src or test files
|
# * chore: Other changes that don't modify src or test files
|
||||||
# * revert: Reverts a previous commit
|
# * revert: Reverts a previous commit
|
||||||
|
|
||||||
|
# Package scopes:
|
||||||
|
# * guardian - @puaros/guardian package
|
||||||
|
# * ipuaro - @puaros/ipuaro package
|
||||||
|
# * (none) - root-level changes
|
||||||
#
|
#
|
||||||
# Subject line rules:
|
# Subject line rules:
|
||||||
# - Use imperative mood ("add feature" not "added feature")
|
# - Use imperative mood ("add feature" not "added feature")
|
||||||
|
|||||||
63
CHANGELOG.md
63
CHANGELOG.md
@@ -1,63 +0,0 @@
|
|||||||
# Changelog
|
|
||||||
|
|
||||||
All notable changes to this project will be documented in this file.
|
|
||||||
|
|
||||||
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/),
|
|
||||||
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
|
||||||
|
|
||||||
## [Unreleased]
|
|
||||||
|
|
||||||
## [0.4.0] - 2025-11-24
|
|
||||||
|
|
||||||
### Added
|
|
||||||
- Dependency direction enforcement - validate that dependencies flow in the correct direction according to Clean Architecture principles
|
|
||||||
- Architecture layer violation detection for domain, application, and infrastructure layers
|
|
||||||
|
|
||||||
## [0.3.0] - 2025-11-24
|
|
||||||
|
|
||||||
### Added
|
|
||||||
- Entity exposure detection - identify when domain entities are exposed outside their module boundaries
|
|
||||||
- Enhanced architecture violation reporting
|
|
||||||
|
|
||||||
## [0.2.0] - 2025-11-24
|
|
||||||
|
|
||||||
### Added
|
|
||||||
- Framework leak detection - detect when domain layer imports framework code
|
|
||||||
- Framework leak reporting in CLI
|
|
||||||
- Framework leak examples and documentation
|
|
||||||
|
|
||||||
## [0.1.0] - 2025-11-24
|
|
||||||
|
|
||||||
### Added
|
|
||||||
- Initial monorepo setup with pnpm workspaces
|
|
||||||
- `@puaros/guardian` package - code quality guardian for vibe coders and enterprise teams
|
|
||||||
- TypeScript with strict type checking and Vitest configuration
|
|
||||||
- ESLint strict TypeScript rules with 4-space indentation
|
|
||||||
- Prettier code formatting (4 spaces, double quotes, no semicolons)
|
|
||||||
- LINTING.md documentation for code style guidelines
|
|
||||||
- CLAUDE.md for AI assistant guidance
|
|
||||||
- EditorConfig for consistent IDE settings
|
|
||||||
- Node.js version specification (.nvmrc: 22.18.0)
|
|
||||||
- Vitest testing framework with 80% coverage thresholds
|
|
||||||
- Guardian dependencies: commander, simple-git, tree-sitter, uuid
|
|
||||||
|
|
||||||
### Configuration
|
|
||||||
- TypeScript: nodenext modules, ES2023 target, strict null checks
|
|
||||||
- ESLint: Strict type checking, complexity limits, code quality rules
|
|
||||||
- Prettier: 100 char line length, double quotes, no semicolons, trailing commas
|
|
||||||
- Test coverage: 80% threshold for lines, functions, branches, statements
|
|
||||||
|
|
||||||
### Guardian Package
|
|
||||||
- Hardcode detection (magic numbers, strings)
|
|
||||||
- Circular dependency detection
|
|
||||||
- Naming convention enforcement
|
|
||||||
- Architecture violation detection
|
|
||||||
- CLI tool with `guardian` command
|
|
||||||
- 159 tests, all passing
|
|
||||||
- Clean Architecture implementation
|
|
||||||
|
|
||||||
## [0.0.1] - 2025-11-24
|
|
||||||
|
|
||||||
### Added
|
|
||||||
- Initial project structure
|
|
||||||
- Monorepo workspace configuration
|
|
||||||
485
CLAUDE.md
485
CLAUDE.md
@@ -4,7 +4,53 @@ This file provides guidance to Claude Code (claude.ai/code) when working with co
|
|||||||
|
|
||||||
## Project Overview
|
## Project Overview
|
||||||
|
|
||||||
Puaros is a TypeScript monorepo using pnpm workspaces. Currently contains the `@puaros/guardian` package - a code quality guardian for detecting hardcoded values, circular dependencies, framework leaks, naming violations, and architecture violations. The project uses Node.js 22.18.0 (see `.nvmrc`).
|
Puaros is a TypeScript monorepo using pnpm workspaces. Contains two packages:
|
||||||
|
|
||||||
|
- **`@samiyev/guardian`** - Code quality guardian for detecting hardcoded values, circular dependencies, framework leaks, naming violations, and architecture violations.
|
||||||
|
|
||||||
|
- **`@samiyev/ipuaro`** - Local AI agent for codebase operations with "infinite" context feeling. Uses lazy loading, Redis persistence, tree-sitter AST parsing, and Ollama LLM integration.
|
||||||
|
|
||||||
|
The project uses Node.js 22.18.0 (see `.nvmrc`).
|
||||||
|
|
||||||
|
## Path Reference
|
||||||
|
|
||||||
|
**Root:** `/Users/fozilbeksamiyev/projects/ailabs/puaros`
|
||||||
|
|
||||||
|
### Key Paths
|
||||||
|
|
||||||
|
| Description | Path |
|
||||||
|
|-------------|------|
|
||||||
|
| **Root** | `.` |
|
||||||
|
| **Guardian package** | `packages/guardian` |
|
||||||
|
| **Guardian src** | `packages/guardian/src` |
|
||||||
|
| **Guardian tests** | `packages/guardian/tests` |
|
||||||
|
| **Guardian CLI** | `packages/guardian/src/cli` |
|
||||||
|
| **Guardian domain** | `packages/guardian/src/domain` |
|
||||||
|
| **Guardian infrastructure** | `packages/guardian/src/infrastructure` |
|
||||||
|
| **ipuaro package** | `packages/ipuaro` |
|
||||||
|
| **ipuaro docs** | `packages/ipuaro/docs` |
|
||||||
|
|
||||||
|
### File Locations
|
||||||
|
|
||||||
|
| File | Location |
|
||||||
|
|------|----------|
|
||||||
|
| Root package.json | `./package.json` |
|
||||||
|
| Guardian package.json | `packages/guardian/package.json` |
|
||||||
|
| Guardian tsconfig | `packages/guardian/tsconfig.json` |
|
||||||
|
| Guardian TODO | `packages/guardian/TODO.md` |
|
||||||
|
| Guardian CHANGELOG | `packages/guardian/CHANGELOG.md` |
|
||||||
|
| ipuaro ROADMAP | `packages/ipuaro/ROADMAP.md` |
|
||||||
|
| ESLint config | `./eslint.config.mjs` |
|
||||||
|
| Prettier config | `./.prettierrc` |
|
||||||
|
| Base tsconfig | `./tsconfig.base.json` |
|
||||||
|
|
||||||
|
### Path Rules
|
||||||
|
|
||||||
|
1. **Always use relative paths from project root** (not absolute)
|
||||||
|
2. **Package paths start with** `packages/<name>/`
|
||||||
|
3. **Source code is in** `packages/<name>/src/`
|
||||||
|
4. **Tests are in** `packages/<name>/tests/`
|
||||||
|
5. **Docs are in** `packages/<name>/docs/` or `./docs/`
|
||||||
|
|
||||||
## Essential Commands
|
## Essential Commands
|
||||||
|
|
||||||
@@ -100,28 +146,51 @@ From `eslint.config.mjs` and detailed in `LINTING.md`:
|
|||||||
|
|
||||||
Follow Conventional Commits format. See `.gitmessage` for full rules.
|
Follow Conventional Commits format. See `.gitmessage` for full rules.
|
||||||
|
|
||||||
Format: `<type>: <subject>` (imperative mood, no caps, max 50 chars)
|
**Monorepo format:** `<type>(<package>): <subject>`
|
||||||
|
|
||||||
**IMPORTANT: Do NOT add "Generated with Claude Code" footer or "Co-Authored-By: Claude" to commit messages.**
|
Examples:
|
||||||
Commits should only follow the Conventional Commits format without any additional attribution.
|
- `feat(guardian): add circular dependency detector`
|
||||||
|
- `fix(ipuaro): resolve memory leak in indexer`
|
||||||
|
- `docs(guardian): update CLI usage examples`
|
||||||
|
- `refactor(ipuaro): extract tool registry`
|
||||||
|
|
||||||
|
**Root-level changes:** `<type>: <subject>` (no scope)
|
||||||
|
- `chore: update eslint config`
|
||||||
|
- `docs: update root README`
|
||||||
|
|
||||||
|
**Types:** feat, fix, docs, style, refactor, test, chore
|
||||||
|
|
||||||
|
**Rules:**
|
||||||
|
- Imperative mood, no caps, max 50 chars
|
||||||
|
- Do NOT add "Generated with Claude Code" footer
|
||||||
|
- Do NOT add "Co-Authored-By: Claude"
|
||||||
|
|
||||||
## Monorepo Structure
|
## Monorepo Structure
|
||||||
|
|
||||||
```
|
```
|
||||||
puaros/
|
puaros/
|
||||||
├── packages/
|
├── packages/
|
||||||
│ └── guardian/ # @puaros/guardian - Code quality analyzer
|
│ ├── guardian/ # @samiyev/guardian - Code quality analyzer
|
||||||
│ ├── src/ # Source files (Clean Architecture layers)
|
│ │ ├── src/ # Source files (Clean Architecture)
|
||||||
│ │ ├── domain/ # Domain layer (entities, value objects)
|
│ │ │ ├── domain/ # Entities, value objects
|
||||||
│ │ ├── application/ # Application layer (use cases, DTOs)
|
│ │ │ ├── application/ # Use cases, DTOs
|
||||||
│ │ ├── infrastructure/ # Infrastructure layer (parsers, analyzers)
|
│ │ │ ├── infrastructure/ # Parsers, analyzers
|
||||||
│ │ ├── cli/ # CLI implementation
|
│ │ │ ├── cli/ # CLI implementation
|
||||||
│ │ └── shared/ # Shared utilities
|
│ │ │ └── shared/ # Shared utilities
|
||||||
│ ├── dist/ # Build output
|
│ │ ├── bin/ # CLI entry point
|
||||||
|
│ │ ├── tests/ # Test files
|
||||||
|
│ │ └── examples/ # Usage examples
|
||||||
|
│ └── ipuaro/ # @samiyev/ipuaro - Local AI agent
|
||||||
|
│ ├── src/ # Source files (Clean Architecture)
|
||||||
|
│ │ ├── domain/ # Entities, value objects, services
|
||||||
|
│ │ ├── application/ # Use cases, DTOs, mappers
|
||||||
|
│ │ ├── infrastructure/ # Storage, LLM, indexer, tools
|
||||||
|
│ │ ├── tui/ # Terminal UI (Ink/React)
|
||||||
|
│ │ ├── cli/ # CLI commands
|
||||||
|
│ │ └── shared/ # Types, constants, utils
|
||||||
│ ├── bin/ # CLI entry point
|
│ ├── bin/ # CLI entry point
|
||||||
│ ├── tests/ # Test files
|
│ ├── tests/ # Unit and E2E tests
|
||||||
│ ├── examples/ # Usage examples
|
│ └── examples/ # Demo projects
|
||||||
│ └── package.json # Uses Vitest for testing
|
|
||||||
├── pnpm-workspace.yaml # Workspace configuration
|
├── pnpm-workspace.yaml # Workspace configuration
|
||||||
└── tsconfig.base.json # Shared TypeScript config
|
└── tsconfig.base.json # Shared TypeScript config
|
||||||
```
|
```
|
||||||
@@ -142,6 +211,34 @@ Key features:
|
|||||||
- Architecture violation detection
|
- Architecture violation detection
|
||||||
- CLI tool with `guardian` command
|
- CLI tool with `guardian` command
|
||||||
|
|
||||||
|
### ipuaro Package Architecture
|
||||||
|
|
||||||
|
The ipuaro package follows Clean Architecture principles:
|
||||||
|
- **Domain Layer**: Entities (Session, Project), value objects (FileData, FileAST, ChatMessage), service interfaces
|
||||||
|
- **Application Layer**: Use cases (StartSession, HandleMessage, IndexProject, ExecuteTool), DTOs, mappers
|
||||||
|
- **Infrastructure Layer**: Redis storage, Ollama client, indexer, 18 tool implementations, security
|
||||||
|
- **TUI Layer**: Ink/React components (StatusBar, Chat, Input, DiffView, ConfirmDialog)
|
||||||
|
- **CLI Layer**: Commander.js entry point and commands
|
||||||
|
|
||||||
|
Key features:
|
||||||
|
- 18 LLM tools (read, edit, search, analysis, git, run)
|
||||||
|
- Redis persistence with AOF
|
||||||
|
- tree-sitter AST parsing (ts, tsx, js, jsx)
|
||||||
|
- Ollama LLM integration (qwen2.5-coder:7b-instruct)
|
||||||
|
- File watching via chokidar
|
||||||
|
- Session and undo management
|
||||||
|
- Security (blacklist/whitelist for commands)
|
||||||
|
|
||||||
|
**Tools summary:**
|
||||||
|
| Category | Tools |
|
||||||
|
|----------|-------|
|
||||||
|
| Read | get_lines, get_function, get_class, get_structure |
|
||||||
|
| Edit | edit_lines, create_file, delete_file |
|
||||||
|
| Search | find_references, find_definition |
|
||||||
|
| Analysis | get_dependencies, get_dependents, get_complexity, get_todos |
|
||||||
|
| Git | git_status, git_diff, git_commit |
|
||||||
|
| Run | run_command, run_tests |
|
||||||
|
|
||||||
### TypeScript Configuration
|
### TypeScript Configuration
|
||||||
|
|
||||||
Base configuration (`tsconfig.base.json`) uses:
|
Base configuration (`tsconfig.base.json`) uses:
|
||||||
@@ -163,253 +260,283 @@ Guardian package (`packages/guardian/tsconfig.json`):
|
|||||||
## Adding New Packages
|
## Adding New Packages
|
||||||
|
|
||||||
1. Create `packages/new-package/` directory
|
1. Create `packages/new-package/` directory
|
||||||
2. Add `package.json` with name `@puaros/new-package`
|
2. Add `package.json` with name `@samiyev/new-package`
|
||||||
3. Create `tsconfig.json` extending `../../tsconfig.base.json`
|
3. Create `tsconfig.json` extending `../../tsconfig.base.json`
|
||||||
4. Package auto-discovered via `pnpm-workspace.yaml` glob pattern
|
4. Package auto-discovered via `pnpm-workspace.yaml` glob pattern
|
||||||
|
|
||||||
## Dependencies
|
## Dependencies
|
||||||
|
|
||||||
Guardian package uses:
|
**Guardian package:**
|
||||||
- `commander` - CLI framework for command-line interface
|
- `commander` - CLI framework
|
||||||
- `simple-git` - Git operations
|
- `simple-git` - Git operations
|
||||||
- `tree-sitter` - Abstract syntax tree parsing
|
- `tree-sitter` - AST parsing
|
||||||
- `tree-sitter-javascript` - JavaScript parser
|
- `tree-sitter-javascript/typescript` - JS/TS parsers
|
||||||
- `tree-sitter-typescript` - TypeScript parser
|
|
||||||
- `uuid` - UUID generation
|
- `uuid` - UUID generation
|
||||||
|
|
||||||
Development tools:
|
**ipuaro package:**
|
||||||
- Vitest for testing with coverage thresholds
|
- `ink`, `ink-text-input`, `react` - Terminal UI
|
||||||
|
- `ioredis` - Redis client
|
||||||
|
- `tree-sitter` - AST parsing
|
||||||
|
- `tree-sitter-javascript/typescript` - JS/TS parsers
|
||||||
|
- `ollama` - LLM client
|
||||||
|
- `simple-git` - Git operations
|
||||||
|
- `chokidar` - File watching
|
||||||
|
- `commander` - CLI framework
|
||||||
|
- `zod` - Validation
|
||||||
|
- `ignore` - Gitignore parsing
|
||||||
|
|
||||||
|
**Development tools (shared):**
|
||||||
|
- Vitest for testing (80% coverage threshold)
|
||||||
- ESLint with TypeScript strict rules
|
- ESLint with TypeScript strict rules
|
||||||
- Prettier for formatting
|
- Prettier (4-space indentation)
|
||||||
- `@vitest/ui` - Vitest UI for interactive testing
|
- `@vitest/ui` - Interactive testing UI
|
||||||
- `@vitest/coverage-v8` - Coverage reporting
|
- `@vitest/coverage-v8` - Coverage reporting
|
||||||
|
|
||||||
## Development Workflow
|
## Monorepo Versioning Strategy
|
||||||
|
|
||||||
### Complete Feature Development & Release Workflow
|
### Git Tag Format
|
||||||
|
|
||||||
This workflow ensures high quality and consistency from feature implementation to package publication.
|
**Prefixed tags for each package:**
|
||||||
|
```
|
||||||
#### Phase 1: Feature Planning & Implementation
|
guardian-v0.5.0
|
||||||
|
ipuaro-v0.1.0
|
||||||
```bash
|
|
||||||
# 1. Create feature branch (if needed)
|
|
||||||
git checkout -b feature/your-feature-name
|
|
||||||
|
|
||||||
# 2. Implement feature following Clean Architecture
|
|
||||||
# - Add to appropriate layer (domain/application/infrastructure/cli)
|
|
||||||
# - Follow naming conventions
|
|
||||||
# - Keep functions small and focused
|
|
||||||
|
|
||||||
# 3. Update constants if adding CLI options
|
|
||||||
# Edit: packages/guardian/src/cli/constants.ts
|
|
||||||
```
|
```
|
||||||
|
|
||||||
#### Phase 2: Quality Checks (Run After Implementation)
|
**Why prefixed tags:**
|
||||||
|
- Independent versioning per package
|
||||||
|
- Clear release history for each package
|
||||||
|
- Works with npm publish and CI/CD
|
||||||
|
- Easy to filter: `git tag -l "guardian-*"`
|
||||||
|
|
||||||
|
**Legacy tags:** Tags before monorepo (v0.1.0, v0.2.0, etc.) are kept as-is for historical reference.
|
||||||
|
|
||||||
|
### Semantic Versioning
|
||||||
|
|
||||||
|
All packages follow SemVer: `MAJOR.MINOR.PATCH`
|
||||||
|
|
||||||
|
- **MAJOR** (1.0.0) - Breaking API changes
|
||||||
|
- **MINOR** (0.1.0) - New features, backwards compatible
|
||||||
|
- **PATCH** (0.0.1) - Bug fixes, backwards compatible
|
||||||
|
|
||||||
|
**Pre-1.0 policy:** Minor bumps (0.x.0) may include breaking changes.
|
||||||
|
|
||||||
|
## Release Pipeline
|
||||||
|
|
||||||
|
**Quick reference:** Say "run pipeline for [package]" to execute full release flow.
|
||||||
|
|
||||||
|
The pipeline has 6 phases. Each phase must pass before proceeding.
|
||||||
|
|
||||||
|
### Phase 1: Quality Gates
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# Navigate to package
|
cd packages/<package>
|
||||||
cd packages/guardian
|
|
||||||
|
|
||||||
# 1. Format code (REQUIRED - 4 spaces indentation)
|
# All must pass:
|
||||||
pnpm format
|
pnpm format # 4-space indentation
|
||||||
|
pnpm build # TypeScript compiles
|
||||||
# 2. Build to check compilation
|
cd ../.. && pnpm eslint "packages/**/*.ts" --fix # 0 errors, 0 warnings
|
||||||
pnpm build
|
cd packages/<package>
|
||||||
|
pnpm test:run # All tests pass
|
||||||
# 3. Run linter (must pass with 0 errors, 0 warnings)
|
pnpm test:coverage # Coverage ≥80%
|
||||||
cd ../.. && pnpm eslint "packages/**/*.ts" --fix
|
|
||||||
|
|
||||||
# 4. Run tests (all must pass)
|
|
||||||
pnpm test:run
|
|
||||||
|
|
||||||
# 5. Check coverage (must be ≥80%)
|
|
||||||
pnpm test:coverage
|
|
||||||
```
|
```
|
||||||
|
|
||||||
**Quality Gates:**
|
### Phase 2: Documentation
|
||||||
- ✅ Format: No changes after `pnpm format`
|
|
||||||
- ✅ Build: TypeScript compiles without errors
|
|
||||||
- ✅ Lint: 0 errors, 0 warnings
|
|
||||||
- ✅ Tests: All tests pass (292/292)
|
|
||||||
- ✅ Coverage: ≥80% on all metrics
|
|
||||||
|
|
||||||
#### Phase 3: Documentation Updates
|
Update these files in `packages/<package>/`:
|
||||||
|
|
||||||
|
| File | Action |
|
||||||
|
|------|--------|
|
||||||
|
| `README.md` | Add feature docs, update CLI usage, update API |
|
||||||
|
| `TODO.md` | Mark completed tasks, add new tech debt if any |
|
||||||
|
| `CHANGELOG.md` | Add version entry with all changes |
|
||||||
|
| `ROADMAP.md` | Update if milestone completed |
|
||||||
|
|
||||||
|
**Tech debt rule:** If implementation leaves known issues, shortcuts, or future improvements needed — add them to TODO.md before committing.
|
||||||
|
|
||||||
|
### Phase 3: Manual Testing
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# 1. Update README.md
|
cd packages/<package>
|
||||||
# - Add new feature to Features section
|
|
||||||
# - Update CLI Usage examples if CLI changed
|
|
||||||
# - Update API documentation if public API changed
|
|
||||||
# - Update TypeScript interfaces
|
|
||||||
|
|
||||||
# 2. Update TODO.md
|
# Test CLI/API manually
|
||||||
# - Mark completed tasks as done
|
node dist/cli/index.js <command> ./examples
|
||||||
# - Add new technical debt if discovered
|
|
||||||
# - Document coverage issues for new files
|
|
||||||
# - Update "Recent Updates" section with changes
|
|
||||||
|
|
||||||
# 3. Update CHANGELOG.md (for releases)
|
# Verify output, edge cases, error handling
|
||||||
# - Add entry with version number
|
|
||||||
# - List all changes (features, fixes, improvements)
|
|
||||||
# - Follow Keep a Changelog format
|
|
||||||
```
|
```
|
||||||
|
|
||||||
#### Phase 4: Verification & Testing
|
### Phase 4: Commit
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# 1. Test CLI manually with examples
|
|
||||||
cd packages/guardian
|
|
||||||
node dist/cli/index.js check ./examples --limit 5
|
|
||||||
|
|
||||||
# 2. Test new feature with different options
|
|
||||||
node dist/cli/index.js check ./examples --only-critical
|
|
||||||
node dist/cli/index.js check ./examples --min-severity high
|
|
||||||
|
|
||||||
# 3. Verify output formatting and messages
|
|
||||||
# - Check that all violations display correctly
|
|
||||||
# - Verify severity labels and suggestions
|
|
||||||
# - Test edge cases and error handling
|
|
||||||
|
|
||||||
# 4. Run full quality check suite
|
|
||||||
pnpm format && pnpm eslint "packages/**/*.ts" && pnpm build && pnpm test:run
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Phase 5: Commit & Version
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# 1. Stage changes
|
|
||||||
git add .
|
git add .
|
||||||
|
git commit -m "<type>(<package>): <description>"
|
||||||
|
|
||||||
# 2. Commit with Conventional Commits format
|
# Examples:
|
||||||
git commit -m "feat: add --limit option for output control"
|
# feat(guardian): add --limit option
|
||||||
# or
|
# fix(ipuaro): resolve memory leak in indexer
|
||||||
git commit -m "fix: resolve unused variable in detector"
|
# docs(guardian): update API examples
|
||||||
# or
|
|
||||||
git commit -m "docs: update README with new features"
|
|
||||||
|
|
||||||
# Types: feat, fix, docs, style, refactor, test, chore
|
|
||||||
|
|
||||||
# 3. Update package version (if releasing)
|
|
||||||
cd packages/guardian
|
|
||||||
npm version patch # Bug fixes (0.5.2 → 0.5.3)
|
|
||||||
npm version minor # New features (0.5.2 → 0.6.0)
|
|
||||||
npm version major # Breaking changes (0.5.2 → 1.0.0)
|
|
||||||
|
|
||||||
# 4. Push changes
|
|
||||||
git push origin main # or your branch
|
|
||||||
git push --tags # Push version tags
|
|
||||||
```
|
```
|
||||||
|
|
||||||
#### Phase 6: Publication (Maintainers Only)
|
**Commit types:** feat, fix, docs, style, refactor, test, chore
|
||||||
|
|
||||||
|
### Phase 5: Version & Tag
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# 1. Final verification before publish
|
cd packages/<package>
|
||||||
cd packages/guardian
|
|
||||||
|
# Bump version
|
||||||
|
npm version patch # 0.5.2 → 0.5.3 (bug fix)
|
||||||
|
npm version minor # 0.5.2 → 0.6.0 (new feature)
|
||||||
|
npm version major # 0.5.2 → 1.0.0 (breaking change)
|
||||||
|
|
||||||
|
# Create prefixed git tag
|
||||||
|
git tag <package>-v<version>
|
||||||
|
# Example: git tag guardian-v0.6.0
|
||||||
|
|
||||||
|
# Push
|
||||||
|
git push origin main
|
||||||
|
git push origin <package>-v<version>
|
||||||
|
```
|
||||||
|
|
||||||
|
### Phase 6: Publish (Maintainers Only)
|
||||||
|
|
||||||
|
```bash
|
||||||
|
cd packages/<package>
|
||||||
|
|
||||||
|
# Final verification
|
||||||
pnpm build && pnpm test:run && pnpm test:coverage
|
pnpm build && pnpm test:run && pnpm test:coverage
|
||||||
|
|
||||||
# 2. Verify package contents
|
# Check package contents
|
||||||
npm pack --dry-run
|
npm pack --dry-run
|
||||||
|
|
||||||
# 3. Publish to npm
|
# Publish
|
||||||
npm publish --access public
|
npm publish --access public
|
||||||
|
|
||||||
# 4. Verify publication
|
# Verify
|
||||||
npm info @samiyev/guardian
|
npm info @samiyev/<package>
|
||||||
|
|
||||||
# 5. Test installation
|
|
||||||
npm install -g @samiyev/guardian@latest
|
|
||||||
guardian --version
|
|
||||||
```
|
```
|
||||||
|
|
||||||
### Quick Checklist for New Features
|
## Pipeline Checklist
|
||||||
|
|
||||||
**Before Committing:**
|
Copy and use for each release:
|
||||||
- [ ] Feature implemented in correct layer
|
|
||||||
- [ ] Code formatted with `pnpm format`
|
|
||||||
- [ ] Lint passes: `pnpm eslint "packages/**/*.ts"`
|
|
||||||
- [ ] Build succeeds: `pnpm build`
|
|
||||||
- [ ] All tests pass: `pnpm test:run`
|
|
||||||
- [ ] Coverage ≥80%: `pnpm test:coverage`
|
|
||||||
- [ ] CLI tested manually if CLI changed
|
|
||||||
- [ ] README.md updated with examples
|
|
||||||
- [ ] TODO.md updated with progress
|
|
||||||
- [ ] No `console.log` in production code
|
|
||||||
- [ ] TypeScript interfaces documented
|
|
||||||
|
|
||||||
**Before Publishing:**
|
```markdown
|
||||||
- [ ] CHANGELOG.md updated
|
## Release: <package> v<version>
|
||||||
|
|
||||||
|
### Quality Gates
|
||||||
|
- [ ] `pnpm format` - no changes
|
||||||
|
- [ ] `pnpm build` - compiles
|
||||||
|
- [ ] `pnpm eslint` - 0 errors, 0 warnings
|
||||||
|
- [ ] `pnpm test:run` - all pass
|
||||||
|
- [ ] `pnpm test:coverage` - ≥80%
|
||||||
|
|
||||||
|
### Documentation
|
||||||
|
- [ ] README.md updated
|
||||||
|
- [ ] TODO.md - completed tasks marked, new debt added
|
||||||
|
- [ ] CHANGELOG.md - version entry added
|
||||||
|
- [ ] ROADMAP.md updated (if needed)
|
||||||
|
|
||||||
|
### Testing
|
||||||
|
- [ ] CLI/API tested manually
|
||||||
|
- [ ] Edge cases verified
|
||||||
|
|
||||||
|
### Release
|
||||||
|
- [ ] Commit with conventional format
|
||||||
- [ ] Version bumped in package.json
|
- [ ] Version bumped in package.json
|
||||||
- [ ] All quality gates pass
|
- [ ] Git tag created: <package>-v<version>
|
||||||
- [ ] Examples work correctly
|
- [ ] Pushed to origin
|
||||||
- [ ] Git tags pushed
|
- [ ] Published to npm (if public release)
|
||||||
|
```
|
||||||
|
|
||||||
### Common Workflows
|
## Working with Roadmap
|
||||||
|
|
||||||
|
When the user points to `ROADMAP.md` or asks about the roadmap/next steps:
|
||||||
|
|
||||||
|
1. **Read both files together:**
|
||||||
|
- `packages/<package>/ROADMAP.md` - to understand the planned features and milestones
|
||||||
|
- `packages/<package>/CHANGELOG.md` - to see what's already implemented
|
||||||
|
|
||||||
|
2. **Determine current position:**
|
||||||
|
- Check the latest version in CHANGELOG.md
|
||||||
|
- Cross-reference with ROADMAP.md milestones
|
||||||
|
- Identify which roadmap items are already completed (present in CHANGELOG)
|
||||||
|
|
||||||
|
3. **Suggest next steps:**
|
||||||
|
- Find the first uncompleted item in the current milestone
|
||||||
|
- Or identify the next milestone if current one is complete
|
||||||
|
- Present clear "start here" recommendation
|
||||||
|
|
||||||
|
**Example workflow:**
|
||||||
|
```
|
||||||
|
User: "Let's work on the roadmap" or points to ROADMAP.md
|
||||||
|
|
||||||
|
Claude should:
|
||||||
|
1. Read ROADMAP.md → See milestones v0.1.0, v0.2.0, v0.3.0...
|
||||||
|
2. Read CHANGELOG.md → See latest release is v0.1.1
|
||||||
|
3. Compare → v0.1.0 milestone complete, v0.2.0 in progress
|
||||||
|
4. Report → "v0.1.0 is complete. For v0.2.0, next item is: <feature>"
|
||||||
|
```
|
||||||
|
|
||||||
|
## Common Workflows
|
||||||
|
|
||||||
|
### Adding a new CLI option
|
||||||
|
|
||||||
**Adding a new CLI option:**
|
|
||||||
```bash
|
```bash
|
||||||
# 1. Add to cli/constants.ts (CLI_OPTIONS, CLI_DESCRIPTIONS)
|
# 1. Add to cli/constants.ts (CLI_OPTIONS, CLI_DESCRIPTIONS)
|
||||||
# 2. Add option in cli/index.ts (.option() call)
|
# 2. Add option in cli/index.ts (.option() call)
|
||||||
# 3. Parse and use option in action handler
|
# 3. Parse and use option in action handler
|
||||||
# 4. Test with: node dist/cli/index.js check ./examples --your-option
|
# 4. Test: node dist/cli/index.js <command> --your-option
|
||||||
# 5. Update README.md CLI Usage section
|
# 5. Run pipeline
|
||||||
# 6. Run quality checks
|
|
||||||
```
|
```
|
||||||
|
|
||||||
**Adding a new detector:**
|
### Adding a new detector (guardian)
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# 1. Create value object in domain/value-objects/
|
# 1. Create value object in domain/value-objects/
|
||||||
# 2. Create detector in infrastructure/analyzers/
|
# 2. Create detector in infrastructure/analyzers/
|
||||||
# 3. Add detector interface to domain/services/
|
# 3. Add interface to domain/services/
|
||||||
# 4. Integrate in application/use-cases/AnalyzeProject.ts
|
# 4. Integrate in application/use-cases/AnalyzeProject.ts
|
||||||
# 5. Add CLI output in cli/index.ts
|
# 5. Add CLI output in cli/index.ts
|
||||||
# 6. Write tests (aim for >90% coverage)
|
# 6. Write tests (aim for >90% coverage)
|
||||||
# 7. Update README.md Features section
|
# 7. Run pipeline
|
||||||
# 8. Run full quality suite
|
|
||||||
```
|
```
|
||||||
|
|
||||||
**Fixing technical debt:**
|
### Adding a new tool (ipuaro)
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# 1. Define tool schema in infrastructure/tools/schemas/
|
||||||
|
# 2. Implement tool in infrastructure/tools/
|
||||||
|
# 3. Register in infrastructure/tools/index.ts
|
||||||
|
# 4. Add tests
|
||||||
|
# 5. Run pipeline
|
||||||
|
```
|
||||||
|
|
||||||
|
### Fixing technical debt
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# 1. Find issue in TODO.md
|
# 1. Find issue in TODO.md
|
||||||
# 2. Implement fix
|
# 2. Implement fix
|
||||||
# 3. Run quality checks
|
# 3. Update TODO.md (mark as completed)
|
||||||
# 4. Update TODO.md (mark as completed)
|
# 4. Run pipeline with type: "refactor:" or "fix:"
|
||||||
# 5. Commit with type: "refactor:" or "fix:"
|
|
||||||
```
|
```
|
||||||
|
|
||||||
### Debugging Tips
|
## Debugging Tips
|
||||||
|
|
||||||
**Build errors:**
|
**Build errors:**
|
||||||
```bash
|
```bash
|
||||||
# Check TypeScript errors in detail
|
|
||||||
pnpm tsc --noEmit
|
pnpm tsc --noEmit
|
||||||
|
pnpm tsc --noEmit packages/<package>/src/path/to/file.ts
|
||||||
# Check specific file
|
|
||||||
pnpm tsc --noEmit packages/guardian/src/path/to/file.ts
|
|
||||||
```
|
```
|
||||||
|
|
||||||
**Test failures:**
|
**Test failures:**
|
||||||
```bash
|
```bash
|
||||||
# Run single test file
|
|
||||||
pnpm vitest tests/path/to/test.test.ts
|
pnpm vitest tests/path/to/test.test.ts
|
||||||
|
|
||||||
# Run tests with UI
|
|
||||||
pnpm test:ui
|
pnpm test:ui
|
||||||
|
|
||||||
# Run tests in watch mode for debugging
|
|
||||||
pnpm test
|
|
||||||
```
|
```
|
||||||
|
|
||||||
**Coverage issues:**
|
**Coverage issues:**
|
||||||
```bash
|
```bash
|
||||||
# Generate detailed coverage report
|
|
||||||
pnpm test:coverage
|
pnpm test:coverage
|
||||||
|
|
||||||
# View HTML report
|
|
||||||
open coverage/index.html
|
open coverage/index.html
|
||||||
|
|
||||||
# Check specific file coverage
|
|
||||||
pnpm vitest --coverage --reporter=verbose
|
|
||||||
```
|
```
|
||||||
|
|
||||||
## Important Notes
|
## Important Notes
|
||||||
|
|||||||
104
README.md
104
README.md
@@ -6,6 +6,8 @@ A TypeScript monorepo for code quality and analysis tools.
|
|||||||
|
|
||||||
- **[@puaros/guardian](./packages/guardian)** - Research-backed code quality guardian for vibe coders and enterprise teams. Detects hardcoded values, secrets, circular dependencies, architecture violations, and anemic domain models. Every rule is based on academic research, industry standards (OWASP, SonarQube), and authoritative books (Martin Fowler, Uncle Bob, Eric Evans). Perfect for AI-assisted development and enforcing Clean Architecture at scale.
|
- **[@puaros/guardian](./packages/guardian)** - Research-backed code quality guardian for vibe coders and enterprise teams. Detects hardcoded values, secrets, circular dependencies, architecture violations, and anemic domain models. Every rule is based on academic research, industry standards (OWASP, SonarQube), and authoritative books (Martin Fowler, Uncle Bob, Eric Evans). Perfect for AI-assisted development and enforcing Clean Architecture at scale.
|
||||||
|
|
||||||
|
- **[@puaros/ipuaro](./packages/ipuaro)** - Local AI agent for codebase operations with "infinite" context feeling. Uses lazy loading and smart context management to work with codebases of any size. Features 18 LLM tools for reading, editing, searching, and analyzing code. Built with Ink/React TUI, Redis persistence, tree-sitter AST parsing, and Ollama integration.
|
||||||
|
|
||||||
## Prerequisites
|
## Prerequisites
|
||||||
|
|
||||||
- Node.js 22.18.0 (use `nvm use` to automatically switch to the correct version)
|
- Node.js 22.18.0 (use `nvm use` to automatically switch to the correct version)
|
||||||
@@ -75,18 +77,27 @@ pnpm eslint "packages/**/*.ts"
|
|||||||
```
|
```
|
||||||
puaros/
|
puaros/
|
||||||
├── packages/
|
├── packages/
|
||||||
│ └── guardian/ # @puaros/guardian - Code quality analyzer
|
│ ├── guardian/ # @puaros/guardian - Code quality analyzer
|
||||||
|
│ │ ├── src/ # Source files (Clean Architecture)
|
||||||
|
│ │ │ ├── domain/ # Domain layer
|
||||||
|
│ │ │ ├── application/ # Application layer
|
||||||
|
│ │ │ ├── infrastructure/# Infrastructure layer
|
||||||
|
│ │ │ ├── cli/ # CLI implementation
|
||||||
|
│ │ │ └── shared/ # Shared utilities
|
||||||
|
│ │ ├── bin/ # CLI entry point
|
||||||
|
│ │ ├── tests/ # Unit and integration tests
|
||||||
|
│ │ └── examples/ # Usage examples
|
||||||
|
│ └── ipuaro/ # @puaros/ipuaro - Local AI agent
|
||||||
│ ├── src/ # Source files (Clean Architecture)
|
│ ├── src/ # Source files (Clean Architecture)
|
||||||
│ │ ├── domain/ # Domain layer
|
│ │ ├── domain/ # Entities, value objects, services
|
||||||
│ │ ├── application/ # Application layer
|
│ │ ├── application/ # Use cases, DTOs, mappers
|
||||||
│ │ ├── infrastructure/# Infrastructure layer
|
│ │ ├── infrastructure/# Storage, LLM, indexer, tools
|
||||||
│ │ ├── cli/ # CLI implementation
|
│ │ ├── tui/ # Terminal UI (Ink/React)
|
||||||
│ │ └── shared/ # Shared utilities
|
│ │ ├── cli/ # CLI commands
|
||||||
│ ├── dist/ # Build output (generated)
|
│ │ └── shared/ # Types, constants, utils
|
||||||
│ ├── bin/ # CLI entry point
|
│ ├── bin/ # CLI entry point
|
||||||
│ ├── tests/ # Unit and integration tests
|
│ ├── tests/ # Unit and E2E tests
|
||||||
│ ├── examples/ # Usage examples
|
│ └── examples/ # Demo projects
|
||||||
│ └── package.json
|
|
||||||
├── pnpm-workspace.yaml # Workspace configuration
|
├── pnpm-workspace.yaml # Workspace configuration
|
||||||
├── tsconfig.base.json # Shared TypeScript config
|
├── tsconfig.base.json # Shared TypeScript config
|
||||||
├── eslint.config.mjs # ESLint configuration
|
├── eslint.config.mjs # ESLint configuration
|
||||||
@@ -204,6 +215,79 @@ guardian check ./src --format json > report.json
|
|||||||
guardian check ./src --fail-on hardcode --fail-on circular
|
guardian check ./src --fail-on hardcode --fail-on circular
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## ipuaro Package
|
||||||
|
|
||||||
|
The `@puaros/ipuaro` package is a local AI agent for codebase operations:
|
||||||
|
|
||||||
|
### Features
|
||||||
|
|
||||||
|
- **Infinite Context Feeling**: Lazy loading and smart context management for any codebase size
|
||||||
|
- **18 LLM Tools**: Read, edit, search, analyze code through natural language
|
||||||
|
- **Terminal UI**: Full-featured TUI built with Ink/React
|
||||||
|
- **Redis Persistence**: Sessions, undo stack, and project index stored in Redis
|
||||||
|
- **AST Parsing**: tree-sitter for TypeScript/JavaScript analysis
|
||||||
|
- **File Watching**: Real-time index updates via chokidar
|
||||||
|
- **Security**: Blacklist/whitelist for command execution
|
||||||
|
|
||||||
|
### Tech Stack
|
||||||
|
|
||||||
|
| Component | Technology |
|
||||||
|
|-----------|------------|
|
||||||
|
| Runtime | Node.js + TypeScript |
|
||||||
|
| TUI | Ink (React for terminal) |
|
||||||
|
| Storage | Redis with AOF persistence |
|
||||||
|
| AST | tree-sitter (ts, tsx, js, jsx) |
|
||||||
|
| LLM | Ollama (qwen2.5-coder:7b-instruct) |
|
||||||
|
| Git | simple-git |
|
||||||
|
| File watching | chokidar |
|
||||||
|
|
||||||
|
### Tools (18 total)
|
||||||
|
|
||||||
|
| Category | Tools |
|
||||||
|
|----------|-------|
|
||||||
|
| **Read** | get_lines, get_function, get_class, get_structure |
|
||||||
|
| **Edit** | edit_lines, create_file, delete_file |
|
||||||
|
| **Search** | find_references, find_definition |
|
||||||
|
| **Analysis** | get_dependencies, get_dependents, get_complexity, get_todos |
|
||||||
|
| **Git** | git_status, git_diff, git_commit |
|
||||||
|
| **Run** | run_command, run_tests |
|
||||||
|
|
||||||
|
### Architecture
|
||||||
|
|
||||||
|
Built with Clean Architecture principles:
|
||||||
|
- **Domain Layer**: Entities, value objects, service interfaces
|
||||||
|
- **Application Layer**: Use cases, DTOs, mappers
|
||||||
|
- **Infrastructure Layer**: Redis storage, Ollama client, indexer, tools
|
||||||
|
- **TUI Layer**: Ink/React components and hooks
|
||||||
|
- **CLI Layer**: Commander.js entry point
|
||||||
|
|
||||||
|
### Usage
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Start TUI in current directory
|
||||||
|
ipuaro
|
||||||
|
|
||||||
|
# Start in specific directory
|
||||||
|
ipuaro /path/to/project
|
||||||
|
|
||||||
|
# Index only (no TUI)
|
||||||
|
ipuaro index
|
||||||
|
|
||||||
|
# With auto-apply mode
|
||||||
|
ipuaro --auto-apply
|
||||||
|
```
|
||||||
|
|
||||||
|
### Commands
|
||||||
|
|
||||||
|
| Command | Description |
|
||||||
|
|---------|-------------|
|
||||||
|
| `/help` | Show all commands |
|
||||||
|
| `/clear` | Clear chat history |
|
||||||
|
| `/undo` | Revert last file change |
|
||||||
|
| `/sessions` | Manage sessions |
|
||||||
|
| `/status` | System status |
|
||||||
|
| `/reindex` | Force reindexation |
|
||||||
|
|
||||||
## Dependencies
|
## Dependencies
|
||||||
|
|
||||||
Guardian package uses:
|
Guardian package uses:
|
||||||
|
|||||||
@@ -74,6 +74,7 @@ export default tseslint.config(
|
|||||||
'@typescript-eslint/require-await': 'warn',
|
'@typescript-eslint/require-await': 'warn',
|
||||||
'@typescript-eslint/no-unnecessary-condition': 'off', // Sometimes useful for defensive coding
|
'@typescript-eslint/no-unnecessary-condition': 'off', // Sometimes useful for defensive coding
|
||||||
'@typescript-eslint/no-non-null-assertion': 'warn',
|
'@typescript-eslint/no-non-null-assertion': 'warn',
|
||||||
|
'@typescript-eslint/no-unnecessary-type-parameters': 'warn', // Allow generic JSON parsers
|
||||||
|
|
||||||
// ========================================
|
// ========================================
|
||||||
// Code Quality & Best Practices
|
// Code Quality & Best Practices
|
||||||
|
|||||||
@@ -5,6 +5,52 @@ All notable changes to @samiyev/guardian will be documented in this file.
|
|||||||
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
|
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
|
||||||
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
||||||
|
|
||||||
|
## [0.9.4] - 2025-11-30
|
||||||
|
|
||||||
|
### Added
|
||||||
|
|
||||||
|
- **VERSION export** - Package version is now exported from index.ts, automatically read from package.json
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
|
||||||
|
- 🔄 **Refactored SecretDetector** - Reduced cyclomatic complexity from 24 to <15:
|
||||||
|
- Extracted helper methods: `extractByRuleId`, `extractAwsType`, `extractGithubType`, `extractSshType`, `extractSlackType`, `extractByMessage`
|
||||||
|
- Used lookup arrays for SSH and message type mappings
|
||||||
|
- 🔄 **Refactored AstNamingTraverser** - Reduced cyclomatic complexity from 17 to <15:
|
||||||
|
- Replaced if-else chain with Map-based node handlers
|
||||||
|
- Added `buildNodeHandlers()` method for cleaner architecture
|
||||||
|
|
||||||
|
### Quality
|
||||||
|
|
||||||
|
- ✅ **Zero lint warnings** - All ESLint warnings resolved
|
||||||
|
- ✅ **All 616 tests pass**
|
||||||
|
|
||||||
|
## [0.9.2] - 2025-11-27
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
|
||||||
|
- 🔄 **Refactored naming convention detector** - Migrated from regex-based to AST-based analysis:
|
||||||
|
- Replaced regex pattern matching with tree-sitter Abstract Syntax Tree traversal
|
||||||
|
- Improved accuracy with AST node context awareness (classes, interfaces, functions, variables)
|
||||||
|
- Reduced false positives with better naming pattern detection
|
||||||
|
- Added centralized AST node type constants (`ast-node-types.ts`) for maintainability
|
||||||
|
- New modular architecture with specialized analyzers:
|
||||||
|
- `AstClassNameAnalyzer` - Class naming validation
|
||||||
|
- `AstInterfaceNameAnalyzer` - Interface naming validation
|
||||||
|
- `AstFunctionNameAnalyzer` - Function naming validation
|
||||||
|
- `AstVariableNameAnalyzer` - Variable naming validation
|
||||||
|
- `AstNamingTraverser` - AST traversal for naming analysis
|
||||||
|
- Enhanced context-aware suggestions for hardcoded values:
|
||||||
|
- Added context keywords (EMAIL_CONTEXT_KEYWORDS, API_KEY_CONTEXT_KEYWORDS, URL_CONTEXT_KEYWORDS, etc.)
|
||||||
|
- Improved constant name generation based on context (ADMIN_EMAIL, API_SECRET_KEY, DATABASE_URL, etc.)
|
||||||
|
- Better file path suggestions (CONFIG_ENVIRONMENT, CONFIG_CONTACTS, CONFIG_PATHS, etc.)
|
||||||
|
|
||||||
|
### Quality
|
||||||
|
|
||||||
|
- ✅ **All tests pass** - Updated tests for AST-based naming detection
|
||||||
|
- ✅ **Code organization** - Centralized AST constants reduce code duplication
|
||||||
|
- ✅ **Maintainability** - Modular analyzers improve code separation and testability
|
||||||
|
|
||||||
## [0.9.1] - 2025-11-26
|
## [0.9.1] - 2025-11-26
|
||||||
|
|
||||||
### Changed
|
### Changed
|
||||||
|
|||||||
@@ -325,17 +325,6 @@ await reportMetrics({
|
|||||||
| **AI Enablement** | Safely adopt AI coding tools at scale |
|
| **AI Enablement** | Safely adopt AI coding tools at scale |
|
||||||
| **Technical Debt Visibility** | Metrics and trends for data-driven decisions |
|
| **Technical Debt Visibility** | Metrics and trends for data-driven decisions |
|
||||||
|
|
||||||
### Enterprise Success Stories
|
|
||||||
|
|
||||||
**Fortune 500 Financial Services** 🏦
|
|
||||||
> "We have 200+ developers and were struggling with architectural consistency. Guardian reduced our code review cycle time by 35% and caught 12 hardcoded API keys before they hit production. ROI in first month." - VP Engineering
|
|
||||||
|
|
||||||
**Scale-up SaaS (Series B)** 📈
|
|
||||||
> "Guardian allowed us to confidently adopt GitHub Copilot across our team. AI writes code 3x faster, Guardian ensures quality. We ship more features without increasing tech debt." - CTO
|
|
||||||
|
|
||||||
**Consulting Firm** 💼
|
|
||||||
> "We use Guardian on every client project. It enforces our standards automatically, and clients love the quality metrics reports. Saved us from a major security incident when it caught hardcoded AWS credentials." - Lead Architect
|
|
||||||
|
|
||||||
## Installation
|
## Installation
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
@@ -970,36 +959,6 @@ Guardian follows Clean Architecture principles:
|
|||||||
- Node.js >= 18.0.0
|
- Node.js >= 18.0.0
|
||||||
- TypeScript >= 5.0.0 (for TypeScript projects)
|
- TypeScript >= 5.0.0 (for TypeScript projects)
|
||||||
|
|
||||||
## Real-World Vibe Coding Stats
|
|
||||||
|
|
||||||
Based on testing Guardian with AI-generated codebases:
|
|
||||||
|
|
||||||
| Metric | Typical AI Code | After Guardian |
|
|
||||||
|--------|----------------|----------------|
|
|
||||||
| Hardcoded values | 15-30 per 1000 LOC | 0-2 per 1000 LOC |
|
|
||||||
| Circular deps | 2-5 per project | 0 per project |
|
|
||||||
| Architecture violations | 10-20% of files | <1% of files |
|
|
||||||
| Time to fix issues | Manual review: 2-4 hours | Guardian + AI: 5-10 minutes |
|
|
||||||
|
|
||||||
**Common Issues Guardian Finds in AI Code:**
|
|
||||||
- 🔐 Hardcoded secrets and API keys (CRITICAL)
|
|
||||||
- ⏱️ Magic timeouts and retry counts
|
|
||||||
- 🌐 Hardcoded URLs and endpoints
|
|
||||||
- 🔄 Accidental circular imports
|
|
||||||
- 📁 Files in wrong architectural layers
|
|
||||||
- 🏷️ Inconsistent naming patterns
|
|
||||||
|
|
||||||
## Success Stories
|
|
||||||
|
|
||||||
**Prototype to Production** ⚡
|
|
||||||
> "Built a SaaS MVP with Claude in 3 days. Guardian caught 47 hardcoded values before first deploy. Saved us from production disasters." - Indie Hacker
|
|
||||||
|
|
||||||
**Learning Clean Architecture** 📚
|
|
||||||
> "Guardian taught me Clean Architecture better than any tutorial. Every violation is a mini lesson with suggestions." - Junior Dev
|
|
||||||
|
|
||||||
**AI-First Startup** 🚀
|
|
||||||
> "We ship 5+ features daily using Claude + Guardian. No human code reviews needed for AI-generated code anymore." - Tech Lead
|
|
||||||
|
|
||||||
## FAQ for Vibe Coders
|
## FAQ for Vibe Coders
|
||||||
|
|
||||||
**Q: Will Guardian slow down my AI workflow?**
|
**Q: Will Guardian slow down my AI workflow?**
|
||||||
|
|||||||
979
packages/guardian/docs/RESEARCH_PROJECT_STRUCTURE_DETECTION.md
Normal file
979
packages/guardian/docs/RESEARCH_PROJECT_STRUCTURE_DETECTION.md
Normal file
@@ -0,0 +1,979 @@
|
|||||||
|
# Research: Project Structure Detection for Architecture Analysis
|
||||||
|
|
||||||
|
This document provides comprehensive research on approaches to detecting and validating project architecture structure. It covers existing tools, academic research, algorithms, and industry best practices that inform Guardian's architecture detection strategy.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Table of Contents
|
||||||
|
|
||||||
|
1. [Executive Summary](#1-executive-summary)
|
||||||
|
2. [Existing Tools Analysis](#2-existing-tools-analysis)
|
||||||
|
3. [Academic Approaches to Architecture Recovery](#3-academic-approaches-to-architecture-recovery)
|
||||||
|
4. [Graph Analysis Algorithms](#4-graph-analysis-algorithms)
|
||||||
|
5. [Configuration Patterns and Best Practices](#5-configuration-patterns-and-best-practices)
|
||||||
|
6. [Industry Consensus](#6-industry-consensus)
|
||||||
|
7. [Recommendations for Guardian](#7-recommendations-for-guardian)
|
||||||
|
8. [Additional Resources](#8-additional-resources)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 1. Executive Summary
|
||||||
|
|
||||||
|
### Key Finding
|
||||||
|
|
||||||
|
**Industry consensus:** Automatic architecture detection is unreliable. All major tools (ArchUnit, eslint-plugin-boundaries, Nx, dependency-cruiser, SonarQube) require **explicit configuration** from users rather than attempting automatic detection.
|
||||||
|
|
||||||
|
### Why Automatic Detection Fails
|
||||||
|
|
||||||
|
1. **Too Many Variations**: Project structures vary wildly across teams, frameworks, and domains
|
||||||
|
2. **False Positives**: Algorithms may "detect" non-existent architectural patterns
|
||||||
|
3. **Performance**: Graph analysis is slow for large codebases (>2000 files)
|
||||||
|
4. **Ambiguity**: Same folder names can mean different things in different contexts
|
||||||
|
5. **Legacy Code**: Poorly structured code produces meaningless analysis results
|
||||||
|
|
||||||
|
### Recommended Approach
|
||||||
|
|
||||||
|
| Priority | Approach | Description |
|
||||||
|
|----------|----------|-------------|
|
||||||
|
| P0 | Pattern-based detection | Glob/regex patterns for layer identification |
|
||||||
|
| P0 | Configuration file | `.guardianrc.json` for explicit rules |
|
||||||
|
| P1 | Presets | Pre-configured patterns for common architectures |
|
||||||
|
| P1 | Generic mode | Fallback with minimal checks |
|
||||||
|
| P2 | Interactive setup | CLI wizard for configuration generation |
|
||||||
|
| P2 | Graph visualization | Visual dependency analysis (informational only) |
|
||||||
|
| ❌ | Auto-detection | NOT recommended as primary strategy |
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 2. Existing Tools Analysis
|
||||||
|
|
||||||
|
### 2.1 ArchUnit (Java)
|
||||||
|
|
||||||
|
**Approach:** Fully declarative - user defines all layers explicitly.
|
||||||
|
|
||||||
|
**Official Website:** https://www.archunit.org/
|
||||||
|
|
||||||
|
**User Guide:** https://www.archunit.org/userguide/html/000_Index.html
|
||||||
|
|
||||||
|
**GitHub Repository:** https://github.com/TNG/ArchUnit
|
||||||
|
|
||||||
|
**Key Characteristics:**
|
||||||
|
- Does NOT detect architecture automatically
|
||||||
|
- User explicitly defines layers via package patterns
|
||||||
|
- Fluent API for rule definition
|
||||||
|
- Supports Layered, Onion, and Hexagonal architectures out-of-box
|
||||||
|
- Integrates with JUnit/TestNG test frameworks
|
||||||
|
|
||||||
|
**Example Configuration:**
|
||||||
|
```java
|
||||||
|
layeredArchitecture()
|
||||||
|
.layer("Controller").definedBy("..controller..")
|
||||||
|
.layer("Service").definedBy("..service..")
|
||||||
|
.layer("Persistence").definedBy("..persistence..")
|
||||||
|
.whereLayer("Controller").mayNotBeAccessedByAnyLayer()
|
||||||
|
.whereLayer("Service").mayOnlyBeAccessedByLayers("Controller")
|
||||||
|
.whereLayer("Persistence").mayOnlyBeAccessedByLayers("Service")
|
||||||
|
```
|
||||||
|
|
||||||
|
**References:**
|
||||||
|
- Baeldung Tutorial: https://www.baeldung.com/java-archunit-intro
|
||||||
|
- InfoQ Article: https://www.infoq.com/news/2022/10/archunit/
|
||||||
|
- Examples Repository: https://github.com/TNG/ArchUnit-Examples
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### 2.2 eslint-plugin-boundaries (TypeScript/JavaScript)
|
||||||
|
|
||||||
|
**Approach:** Pattern-based element definition with dependency rules.
|
||||||
|
|
||||||
|
**NPM Package:** https://www.npmjs.com/package/eslint-plugin-boundaries
|
||||||
|
|
||||||
|
**GitHub Repository:** https://github.com/javierbrea/eslint-plugin-boundaries
|
||||||
|
|
||||||
|
**Key Characteristics:**
|
||||||
|
- Does NOT detect architecture automatically
|
||||||
|
- Uses micromatch/glob patterns for element identification
|
||||||
|
- Supports capture groups for dynamic element naming
|
||||||
|
- TypeScript import type awareness (`value` vs `type` imports)
|
||||||
|
- Works with monorepos
|
||||||
|
|
||||||
|
**Example Configuration:**
|
||||||
|
```javascript
|
||||||
|
settings: {
|
||||||
|
"boundaries/elements": [
|
||||||
|
{
|
||||||
|
type: "domain",
|
||||||
|
pattern: "src/domain/*",
|
||||||
|
mode: "folder",
|
||||||
|
capture: ["elementName"]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
type: "application",
|
||||||
|
pattern: "src/application/*",
|
||||||
|
mode: "folder"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
type: "infrastructure",
|
||||||
|
pattern: "src/infrastructure/*",
|
||||||
|
mode: "folder"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
rules: {
|
||||||
|
"boundaries/element-types": [2, {
|
||||||
|
default: "disallow",
|
||||||
|
rules: [
|
||||||
|
{ from: "infrastructure", allow: ["application", "domain"] },
|
||||||
|
{ from: "application", allow: ["domain"] },
|
||||||
|
{ from: "domain", disallow: ["*"] }
|
||||||
|
]
|
||||||
|
}]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
**References:**
|
||||||
|
- TypeScript Example: https://github.com/javierbrea/epb-ts-example
|
||||||
|
- Element Types Documentation: https://github.com/javierbrea/eslint-plugin-boundaries/blob/master/docs/rules/element-types.md
|
||||||
|
- Medium Tutorial: https://medium.com/@taynan_duarte/ensuring-dependency-rules-in-a-nodejs-application-with-typescript-using-eslint-plugin-boundaries-68b70ce32437
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### 2.3 SonarQube Architecture as Code
|
||||||
|
|
||||||
|
**Approach:** YAML/JSON configuration with automatic code structure analysis.
|
||||||
|
|
||||||
|
**Official Documentation:** https://docs.sonarsource.com/sonarqube-server/design-and-architecture/overview/
|
||||||
|
|
||||||
|
**Configuration Guide:** https://docs.sonarsource.com/sonarqube-server/design-and-architecture/configuring-the-architecture-analysis/
|
||||||
|
|
||||||
|
**Key Characteristics:**
|
||||||
|
- Introduced in SonarQube 2025 Release 2
|
||||||
|
- Automatic code structure analysis (basic)
|
||||||
|
- YAML/JSON configuration for custom rules
|
||||||
|
- Supports "Perspectives" (multiple views of architecture)
|
||||||
|
- Hierarchical "Groups" for organization
|
||||||
|
- Glob and regex pattern support
|
||||||
|
- Works without configuration for basic checks (cycle detection)
|
||||||
|
|
||||||
|
**Supported Languages:**
|
||||||
|
- Java (SonarQube Server)
|
||||||
|
- Java, JavaScript, TypeScript (SonarQube Cloud)
|
||||||
|
- Python, C# (coming soon)
|
||||||
|
- C++ (under consideration)
|
||||||
|
|
||||||
|
**Example Configuration:**
|
||||||
|
```yaml
|
||||||
|
# architecture.yaml
|
||||||
|
perspectives:
|
||||||
|
- name: "Clean Architecture"
|
||||||
|
groups:
|
||||||
|
- name: "Domain"
|
||||||
|
patterns:
|
||||||
|
- "src/domain/**"
|
||||||
|
- "src/core/**"
|
||||||
|
- name: "Application"
|
||||||
|
patterns:
|
||||||
|
- "src/application/**"
|
||||||
|
- "src/use-cases/**"
|
||||||
|
- name: "Infrastructure"
|
||||||
|
patterns:
|
||||||
|
- "src/infrastructure/**"
|
||||||
|
- "src/adapters/**"
|
||||||
|
constraints:
|
||||||
|
- from: "Domain"
|
||||||
|
deny: ["Application", "Infrastructure"]
|
||||||
|
- from: "Application"
|
||||||
|
deny: ["Infrastructure"]
|
||||||
|
```
|
||||||
|
|
||||||
|
**References:**
|
||||||
|
- Blog Announcement: https://www.sonarsource.com/blog/introducing-architecture-as-code-in-sonarqube/
|
||||||
|
- Security Boulevard Coverage: https://securityboulevard.com/2025/04/introducing-architecture-as-code-in-sonarqube-7/
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### 2.4 Nx Enforce Module Boundaries
|
||||||
|
|
||||||
|
**Approach:** Tag-based system with ESLint integration.
|
||||||
|
|
||||||
|
**Official Documentation:** https://nx.dev/docs/features/enforce-module-boundaries
|
||||||
|
|
||||||
|
**ESLint Rule Guide:** https://nx.dev/docs/technologies/eslint/eslint-plugin/guides/enforce-module-boundaries
|
||||||
|
|
||||||
|
**Key Characteristics:**
|
||||||
|
- Tag-based constraint system (scope, type)
|
||||||
|
- Projects tagged in project.json or package.json
|
||||||
|
- Supports regex patterns in tags
|
||||||
|
- Two-dimensional constraints (scope + type)
|
||||||
|
- External dependency blocking
|
||||||
|
- Integration with Nx project graph
|
||||||
|
|
||||||
|
**Example Configuration:**
|
||||||
|
```json
|
||||||
|
// project.json
|
||||||
|
{
|
||||||
|
"name": "user-domain",
|
||||||
|
"tags": ["scope:user", "type:domain"]
|
||||||
|
}
|
||||||
|
|
||||||
|
// ESLint config
|
||||||
|
{
|
||||||
|
"@nx/enforce-module-boundaries": ["error", {
|
||||||
|
"depConstraints": [
|
||||||
|
{
|
||||||
|
"sourceTag": "type:domain",
|
||||||
|
"onlyDependOnLibsWithTags": ["type:domain"]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"sourceTag": "type:application",
|
||||||
|
"onlyDependOnLibsWithTags": ["type:domain", "type:application"]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"sourceTag": "scope:user",
|
||||||
|
"onlyDependOnLibsWithTags": ["scope:user", "scope:shared"]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
**References:**
|
||||||
|
- Project Dependency Rules: https://nx.dev/docs/concepts/decisions/project-dependency-rules
|
||||||
|
- Blog Post on Module Boundaries: https://nx.dev/blog/mastering-the-project-boundaries-in-nx
|
||||||
|
- Medium Tutorial: https://medium.com/rupesh-tiwari/enforcing-dependency-constraints-within-service-in-nx-monorepo-workspace-56e87e792c98
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### 2.5 dependency-cruiser
|
||||||
|
|
||||||
|
**Approach:** Rule-based validation with visualization capabilities.
|
||||||
|
|
||||||
|
**NPM Package:** https://www.npmjs.com/package/dependency-cruiser
|
||||||
|
|
||||||
|
**GitHub Repository:** https://github.com/sverweij/dependency-cruiser
|
||||||
|
|
||||||
|
**Key Characteristics:**
|
||||||
|
- Regex patterns for from/to rules
|
||||||
|
- Multiple output formats (SVG, DOT, Mermaid, JSON, HTML)
|
||||||
|
- CI/CD integration support
|
||||||
|
- TypeScript pre-compilation dependency support
|
||||||
|
- Does NOT detect architecture automatically
|
||||||
|
|
||||||
|
**Example Configuration:**
|
||||||
|
```javascript
|
||||||
|
// .dependency-cruiser.js
|
||||||
|
module.exports = {
|
||||||
|
forbidden: [
|
||||||
|
{
|
||||||
|
name: "no-domain-to-infrastructure",
|
||||||
|
severity: "error",
|
||||||
|
from: { path: "^src/domain" },
|
||||||
|
to: { path: "^src/infrastructure" }
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "no-circular",
|
||||||
|
severity: "error",
|
||||||
|
from: {},
|
||||||
|
to: { circular: true }
|
||||||
|
}
|
||||||
|
],
|
||||||
|
options: {
|
||||||
|
doNotFollow: { path: "node_modules" },
|
||||||
|
tsPreCompilationDeps: true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
**References:**
|
||||||
|
- Options Reference: https://github.com/sverweij/dependency-cruiser/blob/main/doc/options-reference.md
|
||||||
|
- Rules Reference: https://github.com/sverweij/dependency-cruiser/blob/main/doc/rules-reference.md
|
||||||
|
- Clean Architecture Tutorial: https://betterprogramming.pub/validate-dependencies-according-to-clean-architecture-743077ea084c
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### 2.6 ts-arch / ArchUnitTS (TypeScript)
|
||||||
|
|
||||||
|
**Approach:** ArchUnit-like fluent API for TypeScript.
|
||||||
|
|
||||||
|
**ts-arch GitHub:** https://github.com/ts-arch/ts-arch
|
||||||
|
|
||||||
|
**ts-arch Documentation:** https://ts-arch.github.io/ts-arch/
|
||||||
|
|
||||||
|
**ArchUnitTS GitHub:** https://github.com/LukasNiessen/ArchUnitTS
|
||||||
|
|
||||||
|
**Key Characteristics:**
|
||||||
|
- Fluent API similar to ArchUnit
|
||||||
|
- PlantUML diagram validation support
|
||||||
|
- Jest/Vitest integration
|
||||||
|
- Nx monorepo support
|
||||||
|
- Does NOT detect architecture automatically
|
||||||
|
|
||||||
|
**Example Usage:**
|
||||||
|
```typescript
|
||||||
|
import { filesOfProject } from "tsarch"
|
||||||
|
|
||||||
|
// Folder-based dependency check
|
||||||
|
const rule = filesOfProject()
|
||||||
|
.inFolder("domain")
|
||||||
|
.shouldNot()
|
||||||
|
.dependOnFiles()
|
||||||
|
.inFolder("infrastructure")
|
||||||
|
|
||||||
|
await expect(rule).toPassAsync()
|
||||||
|
|
||||||
|
// PlantUML diagram validation
|
||||||
|
const rule = await slicesOfProject()
|
||||||
|
.definedBy("src/(**/)")
|
||||||
|
.should()
|
||||||
|
.adhereToDiagramInFile("architecture.puml")
|
||||||
|
```
|
||||||
|
|
||||||
|
**References:**
|
||||||
|
- NPM Package: https://www.npmjs.com/package/tsarch
|
||||||
|
- ArchUnitTS Documentation: https://lukasniessen.github.io/ArchUnitTS/
|
||||||
|
- DeepWiki Analysis: https://deepwiki.com/ts-arch/ts-arch
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### 2.7 Madge
|
||||||
|
|
||||||
|
**Approach:** Visualization and circular dependency detection.
|
||||||
|
|
||||||
|
**NPM Package:** https://www.npmjs.com/package/madge
|
||||||
|
|
||||||
|
**GitHub Repository:** https://github.com/pahen/madge
|
||||||
|
|
||||||
|
**Key Characteristics:**
|
||||||
|
- Dependency graph visualization
|
||||||
|
- Circular dependency detection
|
||||||
|
- Multiple layout algorithms (dot, neato, fdp, circo)
|
||||||
|
- Simple CLI interface
|
||||||
|
- Does NOT define or enforce layers
|
||||||
|
|
||||||
|
**Usage:**
|
||||||
|
```bash
|
||||||
|
# Find circular dependencies
|
||||||
|
npx madge --circular src/
|
||||||
|
|
||||||
|
# Generate dependency graph
|
||||||
|
npx madge src/ --image deps.svg
|
||||||
|
|
||||||
|
# TypeScript support
|
||||||
|
npx madge src/main.ts --ts-config tsconfig.json --image ./deps.png
|
||||||
|
```
|
||||||
|
|
||||||
|
**References:**
|
||||||
|
- NestJS Integration: https://manishbit97.medium.com/identifying-circular-dependencies-in-nestjs-using-madge-de137cd7f74f
|
||||||
|
- Angular Integration: https://www.angulartraining.com/daily-newsletter/visualizing-internal-dependencies-with-madge/
|
||||||
|
- React/TypeScript Tutorial: https://dev.to/greenroach/detecting-circular-dependencies-in-a-reacttypescript-app-using-madge-229
|
||||||
|
|
||||||
|
**Alternative: Skott**
|
||||||
|
- Claims to be 7x faster than Madge
|
||||||
|
- Reference: https://dev.to/antoinecoulon/introducing-skott-the-new-madge-1bfl
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 3. Academic Approaches to Architecture Recovery
|
||||||
|
|
||||||
|
### 3.1 Software Architecture Recovery Overview
|
||||||
|
|
||||||
|
**Wikipedia Definition:** https://en.wikipedia.org/wiki/Software_architecture_recovery
|
||||||
|
|
||||||
|
Software architecture recovery is a set of methods for extracting architectural information from lower-level representations of a software system, such as source code. The abstraction process frequently involves clustering source code entities (files, classes, functions) into subsystems according to application-dependent or independent criteria.
|
||||||
|
|
||||||
|
**Motivation:**
|
||||||
|
- Legacy systems often lack architectural documentation
|
||||||
|
- Existing documentation is frequently out of sync with implementation
|
||||||
|
- Understanding architecture is essential for maintenance and evolution
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### 3.2 Machine Learning Approaches
|
||||||
|
|
||||||
|
**Research Paper:** "Automatic software architecture recovery: A machine learning approach"
|
||||||
|
|
||||||
|
**Source:** ResearchGate - https://www.researchgate.net/publication/261309157_Automatic_software_architecture_recovery_A_machine_learning_approach
|
||||||
|
|
||||||
|
**Key Points:**
|
||||||
|
- Current architecture recovery techniques require heavy human intervention or fail to recover quality components
|
||||||
|
- Machine learning techniques use multiple feature types:
|
||||||
|
- Structural features (dependencies, coupling)
|
||||||
|
- Runtime behavioral features
|
||||||
|
- Domain/textual features
|
||||||
|
- Contextual features (code authorship, line co-change)
|
||||||
|
- Automatically recovering functional architecture facilitates developer understanding
|
||||||
|
|
||||||
|
**Limitation:** Requires training data and may not generalize across project types.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### 3.3 Genetic Algorithms for Architecture Recovery
|
||||||
|
|
||||||
|
**Research Paper:** "Parallelization of genetic algorithms for software architecture recovery"
|
||||||
|
|
||||||
|
**Source:** Springer - https://link.springer.com/content/pdf/10.1007/s10515-024-00479-0.pdf
|
||||||
|
|
||||||
|
**Key Points:**
|
||||||
|
- Software Architecture Recovery (SAR) techniques analyze dependencies between modules
|
||||||
|
- Automatically cluster modules to achieve high modularity
|
||||||
|
- Many approaches employ Genetic Algorithms (GAs)
|
||||||
|
- Major drawback: lack of scalability
|
||||||
|
- Solution: parallel execution of GA subroutines
|
||||||
|
|
||||||
|
**Finding:** Finding optimal software clustering is an NP-complete problem.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### 3.4 Clustering Algorithms Comparison
|
||||||
|
|
||||||
|
**Research Paper:** "A comparative analysis of software architecture recovery techniques"
|
||||||
|
|
||||||
|
**Source:** IEEE Xplore - https://ieeexplore.ieee.org/document/6693106/
|
||||||
|
|
||||||
|
**Algorithms Compared:**
|
||||||
|
| Algorithm | Description | Strengths | Weaknesses |
|
||||||
|
|-----------|-------------|-----------|------------|
|
||||||
|
| ACDC | Comprehension-Driven Clustering | Finds natural subsystems | Requires parameter tuning |
|
||||||
|
| LIMBO | Information-Theoretic Clustering | Scalable | May miss domain patterns |
|
||||||
|
| WCA | Weighted Combined Algorithm | Balances multiple factors | Complex configuration |
|
||||||
|
| K-means | Baseline clustering | Simple, fast | Poor for code structure |
|
||||||
|
|
||||||
|
**Key Finding:** Even the best techniques have surprisingly low accuracy when compared against verified ground truths.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### 3.5 ACDC Algorithm (Algorithm for Comprehension-Driven Clustering)
|
||||||
|
|
||||||
|
**Original Paper:** "ACDC: An Algorithm for Comprehension-Driven Clustering"
|
||||||
|
|
||||||
|
**Source:** ResearchGate - https://www.researchgate.net/publication/221200422_ACDC_An_Algorithm_for_Comprehension-Driven_Clustering
|
||||||
|
|
||||||
|
**York University Wiki:** https://wiki.eecs.yorku.ca/project/cluster/protected:acdc
|
||||||
|
|
||||||
|
**Algorithm Steps:**
|
||||||
|
1. Build dependency graph
|
||||||
|
2. Find "dominator" nodes (subsystem patterns)
|
||||||
|
3. Group nodes with common dominators
|
||||||
|
4. Apply orphan adoption for ungrouped nodes
|
||||||
|
5. Iteratively improve clusters
|
||||||
|
|
||||||
|
**Advantages:**
|
||||||
|
- Considers human comprehension patterns
|
||||||
|
- Finds natural subsystems
|
||||||
|
- Works without prior knowledge
|
||||||
|
|
||||||
|
**Disadvantages:**
|
||||||
|
- Requires parameter tuning
|
||||||
|
- Does not guarantee optimality
|
||||||
|
- May not work well on poorly structured code
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### 3.6 LLM-Based Architecture Recovery (Recent Research)
|
||||||
|
|
||||||
|
**Research Paper:** "Automated Software Architecture Design Recovery from Source Code Using LLMs"
|
||||||
|
|
||||||
|
**Source:** Springer - https://link.springer.com/chapter/10.1007/978-3-032-02138-0_5
|
||||||
|
|
||||||
|
**Key Findings:**
|
||||||
|
- LLMs show promise for automating software architecture recovery
|
||||||
|
- Effective at identifying:
|
||||||
|
- ✅ Architectural styles
|
||||||
|
- ✅ Structural elements
|
||||||
|
- ✅ Basic design patterns
|
||||||
|
- Struggle with:
|
||||||
|
- ❌ Complex abstractions
|
||||||
|
- ❌ Class relationships
|
||||||
|
- ❌ Fine-grained design patterns
|
||||||
|
|
||||||
|
**Conclusion:** "LLMs can support SAR activities, particularly in identifying structural and stylistic elements, but they struggle with complex abstractions"
|
||||||
|
|
||||||
|
**Additional Reference:** arXiv paper on design principles - https://arxiv.org/html/2508.11717
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 4. Graph Analysis Algorithms
|
||||||
|
|
||||||
|
### 4.1 Louvain Algorithm for Community Detection
|
||||||
|
|
||||||
|
**Wikipedia:** https://en.wikipedia.org/wiki/Louvain_method
|
||||||
|
|
||||||
|
**Original Paper:** "Fast unfolding of communities in large networks" (2008)
|
||||||
|
- Authors: Vincent D Blondel, Jean-Loup Guillaume, Renaud Lambiotte, Etienne Lefebvre
|
||||||
|
- Journal: Journal of Statistical Mechanics: Theory and Experiment
|
||||||
|
- Reference: https://perso.uclouvain.be/vincent.blondel/research/louvain.html
|
||||||
|
|
||||||
|
**Algorithm Description:**
|
||||||
|
1. Initialize each node as its own community
|
||||||
|
2. For each node, try moving to neighboring communities
|
||||||
|
3. Select move with maximum modularity gain
|
||||||
|
4. Merge communities into "super-nodes"
|
||||||
|
5. Repeat from step 2
|
||||||
|
|
||||||
|
**Modularity Formula:**
|
||||||
|
```
|
||||||
|
Q = (1/2m) * Σ[Aij - (ki*kj)/(2m)] * δ(ci, cj)
|
||||||
|
|
||||||
|
Where:
|
||||||
|
- Aij = edge weight between i and j
|
||||||
|
- ki, kj = node degrees
|
||||||
|
- m = sum of all weights
|
||||||
|
- δ = 1 if ci = cj (same cluster)
|
||||||
|
```
|
||||||
|
|
||||||
|
**Characteristics:**
|
||||||
|
| Parameter | Value |
|
||||||
|
|-----------|-------|
|
||||||
|
| Time Complexity | O(n log n) |
|
||||||
|
| Modularity Range | -1 to 1 |
|
||||||
|
| Good Result | Q > 0.3 |
|
||||||
|
| Resolution Limit | Yes (may hide small communities) |
|
||||||
|
|
||||||
|
**Implementations:**
|
||||||
|
- NetworkX: https://networkx.org/documentation/stable/reference/algorithms/generated/networkx.algorithms.community.louvain.louvain_communities.html
|
||||||
|
- Neo4j: https://neo4j.com/docs/graph-data-science/current/algorithms/louvain/
|
||||||
|
- Graphology: https://graphology.github.io/standard-library/communities-louvain.html
|
||||||
|
- igraph: https://igraph.org/r/doc/cluster_louvain.html
|
||||||
|
|
||||||
|
**Application to Code Analysis:**
|
||||||
|
```
|
||||||
|
Dependency Graph:
|
||||||
|
User.ts → Email.ts, UserId.ts
|
||||||
|
Order.ts → OrderId.ts, Money.ts
|
||||||
|
UserController.ts → User.ts, CreateUser.ts
|
||||||
|
|
||||||
|
Louvain detects communities:
|
||||||
|
Community 1: [User.ts, Email.ts, UserId.ts] // User aggregate
|
||||||
|
Community 2: [Order.ts, OrderId.ts, Money.ts] // Order aggregate
|
||||||
|
Community 3: [UserController.ts, CreateUser.ts] // User feature
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### 4.2 Modularity as Quality Metric
|
||||||
|
|
||||||
|
**Wikipedia:** https://en.wikipedia.org/wiki/Modularity_(networks)
|
||||||
|
|
||||||
|
**Definition:** Modularity measures the strength of division of a network into modules (groups, clusters, communities). Networks with high modularity have dense connections within modules but sparse connections between modules.
|
||||||
|
|
||||||
|
**Interpretation:**
|
||||||
|
| Modularity Value | Interpretation |
|
||||||
|
|------------------|----------------|
|
||||||
|
| Q < 0 | Non-modular (worse than random) |
|
||||||
|
| 0 < Q < 0.3 | Weak community structure |
|
||||||
|
| 0.3 < Q < 0.5 | Moderate community structure |
|
||||||
|
| Q > 0.5 | Strong community structure |
|
||||||
|
| Q → 1 | Perfect modularity |
|
||||||
|
|
||||||
|
**Research Reference:** "Fast Algorithm for Modularity-Based Graph Clustering" - https://cdn.aaai.org/ojs/8455/8455-13-11983-1-2-20201228.pdf
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### 4.3 Graph-Based Software Modularization
|
||||||
|
|
||||||
|
**Research Paper:** "A graph-based clustering algorithm for software systems modularization"
|
||||||
|
|
||||||
|
**Source:** ScienceDirect - https://www.sciencedirect.com/science/article/abs/pii/S0950584920302147
|
||||||
|
|
||||||
|
**Key Points:**
|
||||||
|
- Clustering algorithms partition source code into manageable modules
|
||||||
|
- Resulting decomposition is called software system structure
|
||||||
|
- Due to NP-hardness, evolutionary approaches are commonly used
|
||||||
|
- Objectives:
|
||||||
|
- Minimize inter-cluster connections
|
||||||
|
- Maximize intra-cluster connections
|
||||||
|
- Maximize overall clustering quality
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### 4.4 Topological Sorting for Layer Detection
|
||||||
|
|
||||||
|
**Algorithm Description:**
|
||||||
|
|
||||||
|
Layers can be inferred from dependency graph topology:
|
||||||
|
- **Layer 0 (Domain)**: Nodes with no outgoing dependencies to other layers
|
||||||
|
- **Layer 1 (Application)**: Nodes depending only on Layer 0
|
||||||
|
- **Layer 2+ (Infrastructure)**: Nodes depending on lower layers
|
||||||
|
|
||||||
|
**Pseudocode:**
|
||||||
|
```
|
||||||
|
function detectLayers(graph):
|
||||||
|
layers = Map()
|
||||||
|
visited = Set()
|
||||||
|
|
||||||
|
function dfs(node):
|
||||||
|
if layers.has(node): return layers.get(node)
|
||||||
|
if visited.has(node): return 0 // Cycle detected
|
||||||
|
|
||||||
|
visited.add(node)
|
||||||
|
deps = graph.getDependencies(node)
|
||||||
|
|
||||||
|
if deps.isEmpty():
|
||||||
|
layers.set(node, 0) // Leaf node = Domain
|
||||||
|
return 0
|
||||||
|
|
||||||
|
maxDepth = max(deps.map(dfs))
|
||||||
|
layers.set(node, maxDepth + 1)
|
||||||
|
return maxDepth + 1
|
||||||
|
|
||||||
|
graph.nodes.forEach(dfs)
|
||||||
|
return layers
|
||||||
|
```
|
||||||
|
|
||||||
|
**Limitation:** Assumes acyclic graph; circular dependencies break this approach.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### 4.5 Graph Metrics for Code Quality Assessment
|
||||||
|
|
||||||
|
**Useful Metrics:**
|
||||||
|
| Metric | Description | Good Value |
|
||||||
|
|--------|-------------|------------|
|
||||||
|
| Modularity | Clustering quality | > 0.3 |
|
||||||
|
| Density | Edge/node ratio | Low for good separation |
|
||||||
|
| Clustering Coefficient | Local clustering | Domain-dependent |
|
||||||
|
| Cyclic Rate | % of circular deps | < 0.1 (10%) |
|
||||||
|
| Average Path Length | Mean dependency distance | Lower = more coupled |
|
||||||
|
|
||||||
|
**Code Quality Interpretation:**
|
||||||
|
```
|
||||||
|
if cyclicRate > 0.5:
|
||||||
|
return "SPAGHETTI" // Cannot determine architecture
|
||||||
|
if modularity < 0.2:
|
||||||
|
return "MONOLITH" // No clear separation
|
||||||
|
if modularity > 0.5:
|
||||||
|
return "WELL_STRUCTURED" // Can determine layers
|
||||||
|
return "MODERATE"
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 5. Configuration Patterns and Best Practices
|
||||||
|
|
||||||
|
### 5.1 Pattern Hierarchy
|
||||||
|
|
||||||
|
**Level 1: Minimal Configuration**
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"architecture": "clean-architecture"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
**Level 2: Custom Paths**
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"architecture": "clean-architecture",
|
||||||
|
"layers": {
|
||||||
|
"domain": ["src/core", "src/domain"],
|
||||||
|
"application": ["src/app", "src/use-cases"],
|
||||||
|
"infrastructure": ["src/infra", "src/adapters"]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
**Level 3: Full Control**
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"layers": [
|
||||||
|
{
|
||||||
|
"name": "domain",
|
||||||
|
"patterns": ["src/domain/**", "**/*.entity.ts"],
|
||||||
|
"allowDependOn": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "application",
|
||||||
|
"patterns": ["src/application/**", "**/*.use-case.ts"],
|
||||||
|
"allowDependOn": ["domain"]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "infrastructure",
|
||||||
|
"patterns": ["src/infrastructure/**", "**/*.controller.ts"],
|
||||||
|
"allowDependOn": ["domain", "application"]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### 5.2 Architecture Drift Detection in CI/CD
|
||||||
|
|
||||||
|
**Best Practices from Industry:**
|
||||||
|
|
||||||
|
**Source:** Firefly Academy - https://www.firefly.ai/academy/implementing-continuous-drift-detection-in-ci-cd-pipelines-with-github-actions-workflow
|
||||||
|
|
||||||
|
**Source:** Brainboard Blog - https://blog.brainboard.co/drift-detection-best-practices/
|
||||||
|
|
||||||
|
**Key Recommendations:**
|
||||||
|
|
||||||
|
1. **Integrate into Pipeline**: Validate architecture on every code update
|
||||||
|
2. **Continuous Monitoring**: Run automated scans daily minimum, hourly for active projects
|
||||||
|
3. **Enforce IaC-Only Changes**: All changes through automated workflows
|
||||||
|
4. **Automated Reconciliation**: Regular drift detection and correction
|
||||||
|
5. **Proper Alerting**: Slack for minor drift, PagerDuty for critical
|
||||||
|
6. **Least Privilege**: Limit who can bypass architecture checks
|
||||||
|
7. **Emergency Process**: Document process for urgent manual changes
|
||||||
|
8. **Environment Refresh**: Reset after each pipeline run
|
||||||
|
|
||||||
|
**Example GitHub Actions Integration:**
|
||||||
|
```yaml
|
||||||
|
name: Architecture Check
|
||||||
|
|
||||||
|
on: [push, pull_request]
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
architecture:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Check Architecture
|
||||||
|
run: npx guardian check --strict
|
||||||
|
|
||||||
|
- name: Generate Report
|
||||||
|
if: failure()
|
||||||
|
run: npx guardian report --format html
|
||||||
|
|
||||||
|
- name: Upload Report
|
||||||
|
if: failure()
|
||||||
|
uses: actions/upload-artifact@v3
|
||||||
|
with:
|
||||||
|
name: architecture-report
|
||||||
|
path: architecture-report.html
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### 5.3 Presets for Common Architectures
|
||||||
|
|
||||||
|
**Clean Architecture Preset:**
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"preset": "clean-architecture",
|
||||||
|
"layers": {
|
||||||
|
"domain": {
|
||||||
|
"patterns": ["**/domain/**", "**/entities/**", "**/core/**"],
|
||||||
|
"allowDependOn": []
|
||||||
|
},
|
||||||
|
"application": {
|
||||||
|
"patterns": ["**/application/**", "**/use-cases/**", "**/services/**"],
|
||||||
|
"allowDependOn": ["domain"]
|
||||||
|
},
|
||||||
|
"infrastructure": {
|
||||||
|
"patterns": ["**/infrastructure/**", "**/adapters/**", "**/api/**"],
|
||||||
|
"allowDependOn": ["domain", "application"]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
**Hexagonal Architecture Preset:**
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"preset": "hexagonal",
|
||||||
|
"layers": {
|
||||||
|
"core": {
|
||||||
|
"patterns": ["**/core/**", "**/domain/**"],
|
||||||
|
"allowDependOn": []
|
||||||
|
},
|
||||||
|
"ports": {
|
||||||
|
"patterns": ["**/ports/**"],
|
||||||
|
"allowDependOn": ["core"]
|
||||||
|
},
|
||||||
|
"adapters": {
|
||||||
|
"patterns": ["**/adapters/**", "**/infrastructure/**"],
|
||||||
|
"allowDependOn": ["core", "ports"]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
**NestJS Preset:**
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"preset": "nestjs",
|
||||||
|
"layers": {
|
||||||
|
"domain": {
|
||||||
|
"patterns": ["**/*.entity.ts", "**/entities/**"],
|
||||||
|
"allowDependOn": []
|
||||||
|
},
|
||||||
|
"application": {
|
||||||
|
"patterns": ["**/*.service.ts", "**/*.use-case.ts"],
|
||||||
|
"allowDependOn": ["domain"]
|
||||||
|
},
|
||||||
|
"infrastructure": {
|
||||||
|
"patterns": ["**/*.controller.ts", "**/*.module.ts", "**/*.resolver.ts"],
|
||||||
|
"allowDependOn": ["domain", "application"]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 6. Industry Consensus
|
||||||
|
|
||||||
|
### 6.1 Why Major Tools Don't Auto-Detect
|
||||||
|
|
||||||
|
| Tool | Auto-Detection | Reasoning |
|
||||||
|
|------|----------------|-----------|
|
||||||
|
| ArchUnit | ❌ No | "User knows their architecture best" |
|
||||||
|
| eslint-plugin-boundaries | ❌ No | "Too many structure variations" |
|
||||||
|
| Nx | ❌ No | "Tag-based approach is more flexible" |
|
||||||
|
| dependency-cruiser | ❌ No | "Regex patterns cover all cases" |
|
||||||
|
| SonarQube | ⚠️ Partial | "Basic analysis + config for accuracy" |
|
||||||
|
|
||||||
|
### 6.2 Common Themes Across Tools
|
||||||
|
|
||||||
|
1. **Explicit Configuration**: All tools require user-defined rules
|
||||||
|
2. **Pattern Matching**: Glob/regex patterns are universal
|
||||||
|
3. **Layered Rules**: Allow/deny dependencies between layers
|
||||||
|
4. **CI/CD Integration**: All support pipeline integration
|
||||||
|
5. **Visualization**: Optional but valuable for understanding
|
||||||
|
|
||||||
|
### 6.3 Graph Analysis Position
|
||||||
|
|
||||||
|
Graph analysis is used for:
|
||||||
|
- ✅ Circular dependency detection
|
||||||
|
- ✅ Visualization
|
||||||
|
- ✅ Metrics calculation
|
||||||
|
- ✅ Suggestion generation
|
||||||
|
|
||||||
|
Graph analysis is NOT used for:
|
||||||
|
- ❌ Primary layer detection
|
||||||
|
- ❌ Automatic architecture classification
|
||||||
|
- ❌ Rule enforcement
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 7. Recommendations for Guardian
|
||||||
|
|
||||||
|
### 7.1 Recommended Architecture
|
||||||
|
|
||||||
|
```
|
||||||
|
┌─────────────────────────────────────────────────────────────┐
|
||||||
|
│ Configuration Layer │
|
||||||
|
├─────────────────────────────────────────────────────────────┤
|
||||||
|
│ .guardianrc.json │ package.json │ CLI args │ Interactive │
|
||||||
|
└─────────────────────────────────────────────────────────────┘
|
||||||
|
│
|
||||||
|
▼
|
||||||
|
┌─────────────────────────────────────────────────────────────┐
|
||||||
|
│ Strategy Resolver │
|
||||||
|
├─────────────────────────────────────────────────────────────┤
|
||||||
|
│ 1. Explicit Config (if .guardianrc.json exists) │
|
||||||
|
│ 2. Preset Detection (if preset specified) │
|
||||||
|
│ 3. Smart Defaults (standard patterns) │
|
||||||
|
│ 4. Generic Mode (fallback - minimal checks) │
|
||||||
|
└─────────────────────────────────────────────────────────────┘
|
||||||
|
│
|
||||||
|
▼
|
||||||
|
┌─────────────────────────────────────────────────────────────┐
|
||||||
|
│ Analysis Engine │
|
||||||
|
├─────────────────────────────────────────────────────────────┤
|
||||||
|
│ Pattern Matcher │ Layer Detector │ Dependency Analyzer │
|
||||||
|
└─────────────────────────────────────────────────────────────┘
|
||||||
|
```
|
||||||
|
|
||||||
|
### 7.2 Implementation Priorities
|
||||||
|
|
||||||
|
**Phase 1: Configuration File Support**
|
||||||
|
- Add `.guardianrc.json` parser
|
||||||
|
- Support custom layer patterns
|
||||||
|
- Support custom DDD folder names
|
||||||
|
- Validate configuration on load
|
||||||
|
|
||||||
|
**Phase 2: Presets System**
|
||||||
|
- Clean Architecture preset
|
||||||
|
- Hexagonal Architecture preset
|
||||||
|
- NestJS preset
|
||||||
|
- Feature-based preset
|
||||||
|
|
||||||
|
**Phase 3: Smart Defaults**
|
||||||
|
- Try standard folder names first
|
||||||
|
- Fall back to file naming patterns
|
||||||
|
- Support common conventions
|
||||||
|
|
||||||
|
**Phase 4: Interactive Setup**
|
||||||
|
- `guardian init` command
|
||||||
|
- Project structure scanning
|
||||||
|
- Configuration file generation
|
||||||
|
- Preset recommendations
|
||||||
|
|
||||||
|
**Phase 5: Generic Mode**
|
||||||
|
- Minimal checks without layer knowledge
|
||||||
|
- Hardcode detection
|
||||||
|
- Secret detection
|
||||||
|
- Circular dependency detection
|
||||||
|
- Basic naming conventions
|
||||||
|
|
||||||
|
### 7.3 Graph Analysis - Optional Feature Only
|
||||||
|
|
||||||
|
Graph analysis should be:
|
||||||
|
- **Optional**: Not required for basic functionality
|
||||||
|
- **Informational**: For visualization and metrics
|
||||||
|
- **Suggestive**: Can propose configuration, not enforce it
|
||||||
|
|
||||||
|
**CLI Commands:**
|
||||||
|
```bash
|
||||||
|
guardian analyze --graph --output deps.svg # Visualization
|
||||||
|
guardian metrics # Quality metrics
|
||||||
|
guardian suggest # Configuration suggestions
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 8. Additional Resources
|
||||||
|
|
||||||
|
### Official Documentation
|
||||||
|
|
||||||
|
- ArchUnit: https://www.archunit.org/userguide/html/000_Index.html
|
||||||
|
- eslint-plugin-boundaries: https://github.com/javierbrea/eslint-plugin-boundaries
|
||||||
|
- SonarQube Architecture: https://docs.sonarsource.com/sonarqube-server/design-and-architecture/overview/
|
||||||
|
- Nx Module Boundaries: https://nx.dev/docs/features/enforce-module-boundaries
|
||||||
|
- dependency-cruiser: https://github.com/sverweij/dependency-cruiser
|
||||||
|
|
||||||
|
### Academic Papers
|
||||||
|
|
||||||
|
- Software Architecture Recovery (Wikipedia): https://en.wikipedia.org/wiki/Software_architecture_recovery
|
||||||
|
- ACDC Algorithm: https://www.researchgate.net/publication/221200422_ACDC_An_Algorithm_for_Comprehension-Driven_Clustering
|
||||||
|
- Louvain Method: https://en.wikipedia.org/wiki/Louvain_method
|
||||||
|
- Graph Modularity: https://en.wikipedia.org/wiki/Modularity_(networks)
|
||||||
|
- LLM-based SAR: https://link.springer.com/chapter/10.1007/978-3-032-02138-0_5
|
||||||
|
|
||||||
|
### Tutorials and Guides
|
||||||
|
|
||||||
|
- Clean Architecture Validation: https://betterprogramming.pub/validate-dependencies-according-to-clean-architecture-743077ea084c
|
||||||
|
- Drift Detection Best Practices: https://blog.brainboard.co/drift-detection-best-practices/
|
||||||
|
- Louvain Algorithm Tutorial: https://medium.com/data-science-in-your-pocket/community-detection-in-a-graph-using-louvain-algorithm-with-example-7a77e5e4b079
|
||||||
|
|
||||||
|
### Related Books
|
||||||
|
|
||||||
|
- **Clean Architecture** by Robert C. Martin (2017) - ISBN: 978-0134494166
|
||||||
|
- **Domain-Driven Design** by Eric Evans (2003) - ISBN: 978-0321125217
|
||||||
|
- **Implementing Domain-Driven Design** by Vaughn Vernon (2013) - ISBN: 978-0321834577
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Conclusion
|
||||||
|
|
||||||
|
The research conclusively shows that **automatic architecture detection is unreliable** and **not used by major industry tools**. The recommended approach for Guardian is:
|
||||||
|
|
||||||
|
1. **Configuration-first**: Support explicit layer definitions via `.guardianrc.json`
|
||||||
|
2. **Pattern-based**: Use glob/regex patterns for flexible matching
|
||||||
|
3. **Presets**: Provide pre-configured patterns for common architectures
|
||||||
|
4. **Smart defaults**: Try standard conventions when no config exists
|
||||||
|
5. **Generic fallback**: Provide useful checks even without architecture knowledge
|
||||||
|
6. **Graph analysis as optional**: Use for visualization and suggestions only
|
||||||
|
|
||||||
|
This approach aligns with industry best practices from ArchUnit, eslint-plugin-boundaries, SonarQube, Nx, and dependency-cruiser.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
**Document Version**: 1.0
|
||||||
|
**Last Updated**: 2025-11-27
|
||||||
|
**Author**: Guardian Research Team
|
||||||
|
**Questions or contributions?**
|
||||||
|
- 📧 Email: fozilbek.samiyev@gmail.com
|
||||||
|
- 🐙 GitHub: https://github.com/samiyev/puaros/issues
|
||||||
|
**Based on research as of**: November 2025
|
||||||
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "@samiyev/guardian",
|
"name": "@samiyev/guardian",
|
||||||
"version": "0.9.0",
|
"version": "0.9.4",
|
||||||
"description": "Research-backed code quality guardian for AI-assisted development. Detects hardcodes, secrets, circular deps, framework leaks, entity exposure, and 9 architecture violations. Enforces Clean Architecture/DDD principles. Works with GitHub Copilot, Cursor, Windsurf, Claude, ChatGPT, Cline, and any AI coding tool.",
|
"description": "Research-backed code quality guardian for AI-assisted development. Detects hardcodes, secrets, circular deps, framework leaks, entity exposure, and 9 architecture violations. Enforces Clean Architecture/DDD principles. Works with GitHub Copilot, Cursor, Windsurf, Claude, ChatGPT, Cline, and any AI coding tool.",
|
||||||
"keywords": [
|
"keywords": [
|
||||||
"puaros",
|
"puaros",
|
||||||
@@ -40,7 +40,7 @@
|
|||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"repository": {
|
"repository": {
|
||||||
"type": "git",
|
"type": "git",
|
||||||
"url": "https://github.com/samiyev/puaros.git",
|
"url": "git+https://github.com/samiyev/puaros.git",
|
||||||
"directory": "packages/guardian"
|
"directory": "packages/guardian"
|
||||||
},
|
},
|
||||||
"bugs": {
|
"bugs": {
|
||||||
|
|||||||
@@ -215,6 +215,7 @@ export class AnalyzeProject extends UseCase<
|
|||||||
private readonly detectionPipeline: ExecuteDetection
|
private readonly detectionPipeline: ExecuteDetection
|
||||||
private readonly resultAggregator: AggregateResults
|
private readonly resultAggregator: AggregateResults
|
||||||
|
|
||||||
|
// eslint-disable-next-line max-params
|
||||||
constructor(
|
constructor(
|
||||||
fileScanner: IFileScanner,
|
fileScanner: IFileScanner,
|
||||||
codeParser: ICodeParser,
|
codeParser: ICodeParser,
|
||||||
|
|||||||
@@ -56,6 +56,7 @@ export interface DetectionResult {
|
|||||||
* Pipeline step responsible for running all detectors
|
* Pipeline step responsible for running all detectors
|
||||||
*/
|
*/
|
||||||
export class ExecuteDetection {
|
export class ExecuteDetection {
|
||||||
|
// eslint-disable-next-line max-params
|
||||||
constructor(
|
constructor(
|
||||||
private readonly hardcodeDetector: IHardcodeDetector,
|
private readonly hardcodeDetector: IHardcodeDetector,
|
||||||
private readonly namingConventionDetector: INamingConventionDetector,
|
private readonly namingConventionDetector: INamingConventionDetector,
|
||||||
@@ -240,6 +241,7 @@ export class ExecuteDetection {
|
|||||||
|
|
||||||
for (const file of sourceFiles) {
|
for (const file of sourceFiles) {
|
||||||
const namingViolations = this.namingConventionDetector.detectViolations(
|
const namingViolations = this.namingConventionDetector.detectViolations(
|
||||||
|
file.content,
|
||||||
file.path.filename,
|
file.path.filename,
|
||||||
file.layer,
|
file.layer,
|
||||||
file.path.relative,
|
file.path.relative,
|
||||||
|
|||||||
@@ -80,3 +80,12 @@ export const ANEMIC_MODEL_MESSAGES = {
|
|||||||
ENCAPSULATE_BUSINESS_RULES: "3. Encapsulate business rules inside entity methods",
|
ENCAPSULATE_BUSINESS_RULES: "3. Encapsulate business rules inside entity methods",
|
||||||
USE_DOMAIN_EVENTS: "4. Use domain events to communicate state changes",
|
USE_DOMAIN_EVENTS: "4. Use domain events to communicate state changes",
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Example values used in violation messages
|
||||||
|
*/
|
||||||
|
export const VIOLATION_EXAMPLE_VALUES = {
|
||||||
|
UNKNOWN: "unknown",
|
||||||
|
USER_REPOSITORY: "UserRepository",
|
||||||
|
FIND_ONE: "findOne",
|
||||||
|
}
|
||||||
|
|||||||
@@ -24,6 +24,106 @@ export const SUGGESTION_KEYWORDS = {
|
|||||||
CONSOLE_ERROR: "console.error",
|
CONSOLE_ERROR: "console.error",
|
||||||
} as const
|
} as const
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Context keywords for email detection
|
||||||
|
*/
|
||||||
|
export const EMAIL_CONTEXT_KEYWORDS = {
|
||||||
|
ADMIN: "admin",
|
||||||
|
SUPPORT: "support",
|
||||||
|
NOREPLY: "noreply",
|
||||||
|
NO_REPLY: "no-reply",
|
||||||
|
} as const
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Context keywords for API key detection
|
||||||
|
*/
|
||||||
|
export const API_KEY_CONTEXT_KEYWORDS = {
|
||||||
|
SECRET: "secret",
|
||||||
|
PUBLIC: "public",
|
||||||
|
} as const
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Context keywords for URL detection
|
||||||
|
*/
|
||||||
|
export const URL_CONTEXT_KEYWORDS = {
|
||||||
|
API: "api",
|
||||||
|
DATABASE: "database",
|
||||||
|
DB: "db",
|
||||||
|
MONGO: "mongo",
|
||||||
|
POSTGRES: "postgres",
|
||||||
|
PG: "pg",
|
||||||
|
} as const
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Context keywords for IP address detection
|
||||||
|
*/
|
||||||
|
export const IP_CONTEXT_KEYWORDS = {
|
||||||
|
SERVER: "server",
|
||||||
|
REDIS: "redis",
|
||||||
|
} as const
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Context keywords for file path detection
|
||||||
|
*/
|
||||||
|
export const FILE_PATH_CONTEXT_KEYWORDS = {
|
||||||
|
LOG: "log",
|
||||||
|
DATA: "data",
|
||||||
|
TEMP: "temp",
|
||||||
|
} as const
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Context keywords for date detection
|
||||||
|
*/
|
||||||
|
export const DATE_CONTEXT_KEYWORDS = {
|
||||||
|
DEADLINE: "deadline",
|
||||||
|
START: "start",
|
||||||
|
END: "end",
|
||||||
|
EXPIR: "expir",
|
||||||
|
} as const
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Context keywords for UUID detection
|
||||||
|
*/
|
||||||
|
export const UUID_CONTEXT_KEYWORDS = {
|
||||||
|
ID: "id",
|
||||||
|
IDENTIFIER: "identifier",
|
||||||
|
REQUEST: "request",
|
||||||
|
SESSION: "session",
|
||||||
|
} as const
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Context keywords for version detection
|
||||||
|
*/
|
||||||
|
export const VERSION_CONTEXT_KEYWORDS = {
|
||||||
|
APP: "app",
|
||||||
|
} as const
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Context keywords for color detection
|
||||||
|
*/
|
||||||
|
export const COLOR_CONTEXT_KEYWORDS = {
|
||||||
|
PRIMARY: "primary",
|
||||||
|
SECONDARY: "secondary",
|
||||||
|
BACKGROUND: "background",
|
||||||
|
} as const
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Context keywords for base64 detection
|
||||||
|
*/
|
||||||
|
export const BASE64_CONTEXT_KEYWORDS = {
|
||||||
|
TOKEN: "token",
|
||||||
|
KEY: "key",
|
||||||
|
} as const
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Context keywords for config detection
|
||||||
|
*/
|
||||||
|
export const CONFIG_CONTEXT_KEYWORDS = {
|
||||||
|
ENDPOINT: "endpoint",
|
||||||
|
ROUTE: "route",
|
||||||
|
CONNECTION: "connection",
|
||||||
|
} as const
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Constant name templates
|
* Constant name templates
|
||||||
*/
|
*/
|
||||||
@@ -41,6 +141,50 @@ export const CONSTANT_NAMES = {
|
|||||||
MAGIC_STRING: "MAGIC_STRING",
|
MAGIC_STRING: "MAGIC_STRING",
|
||||||
MAGIC_NUMBER: "MAGIC_NUMBER",
|
MAGIC_NUMBER: "MAGIC_NUMBER",
|
||||||
UNKNOWN_CONSTANT: "UNKNOWN_CONSTANT",
|
UNKNOWN_CONSTANT: "UNKNOWN_CONSTANT",
|
||||||
|
ADMIN_EMAIL: "ADMIN_EMAIL",
|
||||||
|
SUPPORT_EMAIL: "SUPPORT_EMAIL",
|
||||||
|
NOREPLY_EMAIL: "NOREPLY_EMAIL",
|
||||||
|
DEFAULT_EMAIL: "DEFAULT_EMAIL",
|
||||||
|
API_SECRET_KEY: "API_SECRET_KEY",
|
||||||
|
API_PUBLIC_KEY: "API_PUBLIC_KEY",
|
||||||
|
API_KEY: "API_KEY",
|
||||||
|
DATABASE_URL: "DATABASE_URL",
|
||||||
|
MONGODB_CONNECTION_STRING: "MONGODB_CONNECTION_STRING",
|
||||||
|
POSTGRES_URL: "POSTGRES_URL",
|
||||||
|
BASE_URL: "BASE_URL",
|
||||||
|
SERVER_IP: "SERVER_IP",
|
||||||
|
DATABASE_HOST: "DATABASE_HOST",
|
||||||
|
REDIS_HOST: "REDIS_HOST",
|
||||||
|
HOST_IP: "HOST_IP",
|
||||||
|
LOG_FILE_PATH: "LOG_FILE_PATH",
|
||||||
|
CONFIG_FILE_PATH: "CONFIG_FILE_PATH",
|
||||||
|
DATA_DIR_PATH: "DATA_DIR_PATH",
|
||||||
|
TEMP_DIR_PATH: "TEMP_DIR_PATH",
|
||||||
|
FILE_PATH: "FILE_PATH",
|
||||||
|
DEADLINE: "DEADLINE",
|
||||||
|
START_DATE: "START_DATE",
|
||||||
|
END_DATE: "END_DATE",
|
||||||
|
EXPIRATION_DATE: "EXPIRATION_DATE",
|
||||||
|
DEFAULT_DATE: "DEFAULT_DATE",
|
||||||
|
DEFAULT_ID: "DEFAULT_ID",
|
||||||
|
REQUEST_ID: "REQUEST_ID",
|
||||||
|
SESSION_ID: "SESSION_ID",
|
||||||
|
UUID_CONSTANT: "UUID_CONSTANT",
|
||||||
|
API_VERSION: "API_VERSION",
|
||||||
|
APP_VERSION: "APP_VERSION",
|
||||||
|
VERSION: "VERSION",
|
||||||
|
PRIMARY_COLOR: "PRIMARY_COLOR",
|
||||||
|
SECONDARY_COLOR: "SECONDARY_COLOR",
|
||||||
|
BACKGROUND_COLOR: "BACKGROUND_COLOR",
|
||||||
|
COLOR_CONSTANT: "COLOR_CONSTANT",
|
||||||
|
MAC_ADDRESS: "MAC_ADDRESS",
|
||||||
|
ENCODED_TOKEN: "ENCODED_TOKEN",
|
||||||
|
ENCODED_KEY: "ENCODED_KEY",
|
||||||
|
BASE64_VALUE: "BASE64_VALUE",
|
||||||
|
API_ENDPOINT: "API_ENDPOINT",
|
||||||
|
ROUTE_PATH: "ROUTE_PATH",
|
||||||
|
CONNECTION_STRING: "CONNECTION_STRING",
|
||||||
|
CONFIG_VALUE: "CONFIG_VALUE",
|
||||||
} as const
|
} as const
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -50,4 +194,8 @@ export const LOCATIONS = {
|
|||||||
SHARED_CONSTANTS: "shared/constants",
|
SHARED_CONSTANTS: "shared/constants",
|
||||||
DOMAIN_CONSTANTS: "domain/constants",
|
DOMAIN_CONSTANTS: "domain/constants",
|
||||||
INFRASTRUCTURE_CONFIG: "infrastructure/config",
|
INFRASTRUCTURE_CONFIG: "infrastructure/config",
|
||||||
|
CONFIG_ENVIRONMENT: "src/config/environment.ts",
|
||||||
|
CONFIG_CONTACTS: "src/config/contacts.ts",
|
||||||
|
CONFIG_PATHS: "src/config/paths.ts",
|
||||||
|
CONFIG_DATES: "src/config/dates.ts",
|
||||||
} as const
|
} as const
|
||||||
|
|||||||
@@ -7,12 +7,14 @@ export interface INamingConventionDetector {
|
|||||||
/**
|
/**
|
||||||
* Detects naming convention violations for a given file
|
* Detects naming convention violations for a given file
|
||||||
*
|
*
|
||||||
|
* @param content - Source code content to analyze
|
||||||
* @param fileName - Name of the file to check (e.g., "UserService.ts")
|
* @param fileName - Name of the file to check (e.g., "UserService.ts")
|
||||||
* @param layer - Architectural layer of the file (domain, application, infrastructure, shared)
|
* @param layer - Architectural layer of the file (domain, application, infrastructure, shared)
|
||||||
* @param filePath - Relative file path for context
|
* @param filePath - Relative file path for context
|
||||||
* @returns Array of naming convention violations
|
* @returns Array of naming convention violations
|
||||||
*/
|
*/
|
||||||
detectViolations(
|
detectViolations(
|
||||||
|
content: string,
|
||||||
fileName: string,
|
fileName: string,
|
||||||
layer: string | undefined,
|
layer: string | undefined,
|
||||||
filePath: string,
|
filePath: string,
|
||||||
|
|||||||
@@ -1,6 +1,21 @@
|
|||||||
import { ValueObject } from "./ValueObject"
|
import { ValueObject } from "./ValueObject"
|
||||||
import { DETECTION_PATTERNS, HARDCODE_TYPES } from "../../shared/constants/rules"
|
import { DETECTION_PATTERNS, HARDCODE_TYPES } from "../../shared/constants/rules"
|
||||||
import { CONSTANT_NAMES, LOCATIONS, SUGGESTION_KEYWORDS } from "../constants/Suggestions"
|
import {
|
||||||
|
API_KEY_CONTEXT_KEYWORDS,
|
||||||
|
BASE64_CONTEXT_KEYWORDS,
|
||||||
|
COLOR_CONTEXT_KEYWORDS,
|
||||||
|
CONFIG_CONTEXT_KEYWORDS,
|
||||||
|
CONSTANT_NAMES,
|
||||||
|
DATE_CONTEXT_KEYWORDS,
|
||||||
|
EMAIL_CONTEXT_KEYWORDS,
|
||||||
|
FILE_PATH_CONTEXT_KEYWORDS,
|
||||||
|
IP_CONTEXT_KEYWORDS,
|
||||||
|
LOCATIONS,
|
||||||
|
SUGGESTION_KEYWORDS,
|
||||||
|
URL_CONTEXT_KEYWORDS,
|
||||||
|
UUID_CONTEXT_KEYWORDS,
|
||||||
|
VERSION_CONTEXT_KEYWORDS,
|
||||||
|
} from "../constants/Suggestions"
|
||||||
|
|
||||||
export type HardcodeType = (typeof HARDCODE_TYPES)[keyof typeof HARDCODE_TYPES]
|
export type HardcodeType = (typeof HARDCODE_TYPES)[keyof typeof HARDCODE_TYPES]
|
||||||
|
|
||||||
@@ -156,156 +171,172 @@ export class HardcodedValue extends ValueObject<HardcodedValueProps> {
|
|||||||
return `${CONSTANT_NAMES.MAGIC_NUMBER}_${String(value)}`
|
return `${CONSTANT_NAMES.MAGIC_NUMBER}_${String(value)}`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// eslint-disable-next-line complexity, max-lines-per-function
|
||||||
private suggestStringConstantName(): string {
|
private suggestStringConstantName(): string {
|
||||||
const value = String(this.props.value)
|
const value = String(this.props.value)
|
||||||
const context = this.props.context.toLowerCase()
|
const context = this.props.context.toLowerCase()
|
||||||
const valueType = this.props.valueType
|
const valueType = this.props.valueType
|
||||||
|
|
||||||
if (valueType === "email") {
|
if (valueType === "email") {
|
||||||
if (context.includes("admin")) {
|
if (context.includes(EMAIL_CONTEXT_KEYWORDS.ADMIN)) {
|
||||||
return "ADMIN_EMAIL"
|
return CONSTANT_NAMES.ADMIN_EMAIL
|
||||||
}
|
}
|
||||||
if (context.includes("support")) {
|
if (context.includes(EMAIL_CONTEXT_KEYWORDS.SUPPORT)) {
|
||||||
return "SUPPORT_EMAIL"
|
return CONSTANT_NAMES.SUPPORT_EMAIL
|
||||||
}
|
}
|
||||||
if (context.includes("noreply") || context.includes("no-reply")) {
|
if (
|
||||||
return "NOREPLY_EMAIL"
|
context.includes(EMAIL_CONTEXT_KEYWORDS.NOREPLY) ||
|
||||||
|
context.includes(EMAIL_CONTEXT_KEYWORDS.NO_REPLY)
|
||||||
|
) {
|
||||||
|
return CONSTANT_NAMES.NOREPLY_EMAIL
|
||||||
}
|
}
|
||||||
return "DEFAULT_EMAIL"
|
return CONSTANT_NAMES.DEFAULT_EMAIL
|
||||||
}
|
}
|
||||||
|
|
||||||
if (valueType === "api_key") {
|
if (valueType === "api_key") {
|
||||||
if (context.includes("secret")) {
|
if (context.includes(API_KEY_CONTEXT_KEYWORDS.SECRET)) {
|
||||||
return "API_SECRET_KEY"
|
return CONSTANT_NAMES.API_SECRET_KEY
|
||||||
}
|
}
|
||||||
if (context.includes("public")) {
|
if (context.includes(API_KEY_CONTEXT_KEYWORDS.PUBLIC)) {
|
||||||
return "API_PUBLIC_KEY"
|
return CONSTANT_NAMES.API_PUBLIC_KEY
|
||||||
}
|
}
|
||||||
return "API_KEY"
|
return CONSTANT_NAMES.API_KEY
|
||||||
}
|
}
|
||||||
|
|
||||||
if (valueType === "url") {
|
if (valueType === "url") {
|
||||||
if (context.includes("api")) {
|
if (context.includes(URL_CONTEXT_KEYWORDS.API)) {
|
||||||
return "API_BASE_URL"
|
return CONSTANT_NAMES.API_BASE_URL
|
||||||
}
|
}
|
||||||
if (context.includes("database") || context.includes("db")) {
|
if (
|
||||||
return "DATABASE_URL"
|
context.includes(URL_CONTEXT_KEYWORDS.DATABASE) ||
|
||||||
|
context.includes(URL_CONTEXT_KEYWORDS.DB)
|
||||||
|
) {
|
||||||
|
return CONSTANT_NAMES.DATABASE_URL
|
||||||
}
|
}
|
||||||
if (context.includes("mongo")) {
|
if (context.includes(URL_CONTEXT_KEYWORDS.MONGO)) {
|
||||||
return "MONGODB_CONNECTION_STRING"
|
return CONSTANT_NAMES.MONGODB_CONNECTION_STRING
|
||||||
}
|
}
|
||||||
if (context.includes("postgres") || context.includes("pg")) {
|
if (
|
||||||
return "POSTGRES_URL"
|
context.includes(URL_CONTEXT_KEYWORDS.POSTGRES) ||
|
||||||
|
context.includes(URL_CONTEXT_KEYWORDS.PG)
|
||||||
|
) {
|
||||||
|
return CONSTANT_NAMES.POSTGRES_URL
|
||||||
}
|
}
|
||||||
return "BASE_URL"
|
return CONSTANT_NAMES.BASE_URL
|
||||||
}
|
}
|
||||||
|
|
||||||
if (valueType === "ip_address") {
|
if (valueType === "ip_address") {
|
||||||
if (context.includes("server")) {
|
if (context.includes(IP_CONTEXT_KEYWORDS.SERVER)) {
|
||||||
return "SERVER_IP"
|
return CONSTANT_NAMES.SERVER_IP
|
||||||
}
|
}
|
||||||
if (context.includes("database") || context.includes("db")) {
|
if (
|
||||||
return "DATABASE_HOST"
|
context.includes(URL_CONTEXT_KEYWORDS.DATABASE) ||
|
||||||
|
context.includes(URL_CONTEXT_KEYWORDS.DB)
|
||||||
|
) {
|
||||||
|
return CONSTANT_NAMES.DATABASE_HOST
|
||||||
}
|
}
|
||||||
if (context.includes("redis")) {
|
if (context.includes(IP_CONTEXT_KEYWORDS.REDIS)) {
|
||||||
return "REDIS_HOST"
|
return CONSTANT_NAMES.REDIS_HOST
|
||||||
}
|
}
|
||||||
return "HOST_IP"
|
return CONSTANT_NAMES.HOST_IP
|
||||||
}
|
}
|
||||||
|
|
||||||
if (valueType === "file_path") {
|
if (valueType === "file_path") {
|
||||||
if (context.includes("log")) {
|
if (context.includes(FILE_PATH_CONTEXT_KEYWORDS.LOG)) {
|
||||||
return "LOG_FILE_PATH"
|
return CONSTANT_NAMES.LOG_FILE_PATH
|
||||||
}
|
}
|
||||||
if (context.includes("config")) {
|
if (context.includes(SUGGESTION_KEYWORDS.CONFIG)) {
|
||||||
return "CONFIG_FILE_PATH"
|
return CONSTANT_NAMES.CONFIG_FILE_PATH
|
||||||
}
|
}
|
||||||
if (context.includes("data")) {
|
if (context.includes(FILE_PATH_CONTEXT_KEYWORDS.DATA)) {
|
||||||
return "DATA_DIR_PATH"
|
return CONSTANT_NAMES.DATA_DIR_PATH
|
||||||
}
|
}
|
||||||
if (context.includes("temp")) {
|
if (context.includes(FILE_PATH_CONTEXT_KEYWORDS.TEMP)) {
|
||||||
return "TEMP_DIR_PATH"
|
return CONSTANT_NAMES.TEMP_DIR_PATH
|
||||||
}
|
}
|
||||||
return "FILE_PATH"
|
return CONSTANT_NAMES.FILE_PATH
|
||||||
}
|
}
|
||||||
|
|
||||||
if (valueType === "date") {
|
if (valueType === "date") {
|
||||||
if (context.includes("deadline")) {
|
if (context.includes(DATE_CONTEXT_KEYWORDS.DEADLINE)) {
|
||||||
return "DEADLINE"
|
return CONSTANT_NAMES.DEADLINE
|
||||||
}
|
}
|
||||||
if (context.includes("start")) {
|
if (context.includes(DATE_CONTEXT_KEYWORDS.START)) {
|
||||||
return "START_DATE"
|
return CONSTANT_NAMES.START_DATE
|
||||||
}
|
}
|
||||||
if (context.includes("end")) {
|
if (context.includes(DATE_CONTEXT_KEYWORDS.END)) {
|
||||||
return "END_DATE"
|
return CONSTANT_NAMES.END_DATE
|
||||||
}
|
}
|
||||||
if (context.includes("expir")) {
|
if (context.includes(DATE_CONTEXT_KEYWORDS.EXPIR)) {
|
||||||
return "EXPIRATION_DATE"
|
return CONSTANT_NAMES.EXPIRATION_DATE
|
||||||
}
|
}
|
||||||
return "DEFAULT_DATE"
|
return CONSTANT_NAMES.DEFAULT_DATE
|
||||||
}
|
}
|
||||||
|
|
||||||
if (valueType === "uuid") {
|
if (valueType === "uuid") {
|
||||||
if (context.includes("id") || context.includes("identifier")) {
|
if (
|
||||||
return "DEFAULT_ID"
|
context.includes(UUID_CONTEXT_KEYWORDS.ID) ||
|
||||||
|
context.includes(UUID_CONTEXT_KEYWORDS.IDENTIFIER)
|
||||||
|
) {
|
||||||
|
return CONSTANT_NAMES.DEFAULT_ID
|
||||||
}
|
}
|
||||||
if (context.includes("request")) {
|
if (context.includes(UUID_CONTEXT_KEYWORDS.REQUEST)) {
|
||||||
return "REQUEST_ID"
|
return CONSTANT_NAMES.REQUEST_ID
|
||||||
}
|
}
|
||||||
if (context.includes("session")) {
|
if (context.includes(UUID_CONTEXT_KEYWORDS.SESSION)) {
|
||||||
return "SESSION_ID"
|
return CONSTANT_NAMES.SESSION_ID
|
||||||
}
|
}
|
||||||
return "UUID_CONSTANT"
|
return CONSTANT_NAMES.UUID_CONSTANT
|
||||||
}
|
}
|
||||||
|
|
||||||
if (valueType === "version") {
|
if (valueType === "version") {
|
||||||
if (context.includes("api")) {
|
if (context.includes(URL_CONTEXT_KEYWORDS.API)) {
|
||||||
return "API_VERSION"
|
return CONSTANT_NAMES.API_VERSION
|
||||||
}
|
}
|
||||||
if (context.includes("app")) {
|
if (context.includes(VERSION_CONTEXT_KEYWORDS.APP)) {
|
||||||
return "APP_VERSION"
|
return CONSTANT_NAMES.APP_VERSION
|
||||||
}
|
}
|
||||||
return "VERSION"
|
return CONSTANT_NAMES.VERSION
|
||||||
}
|
}
|
||||||
|
|
||||||
if (valueType === "color") {
|
if (valueType === "color") {
|
||||||
if (context.includes("primary")) {
|
if (context.includes(COLOR_CONTEXT_KEYWORDS.PRIMARY)) {
|
||||||
return "PRIMARY_COLOR"
|
return CONSTANT_NAMES.PRIMARY_COLOR
|
||||||
}
|
}
|
||||||
if (context.includes("secondary")) {
|
if (context.includes(COLOR_CONTEXT_KEYWORDS.SECONDARY)) {
|
||||||
return "SECONDARY_COLOR"
|
return CONSTANT_NAMES.SECONDARY_COLOR
|
||||||
}
|
}
|
||||||
if (context.includes("background")) {
|
if (context.includes(COLOR_CONTEXT_KEYWORDS.BACKGROUND)) {
|
||||||
return "BACKGROUND_COLOR"
|
return CONSTANT_NAMES.BACKGROUND_COLOR
|
||||||
}
|
}
|
||||||
return "COLOR_CONSTANT"
|
return CONSTANT_NAMES.COLOR_CONSTANT
|
||||||
}
|
}
|
||||||
|
|
||||||
if (valueType === "mac_address") {
|
if (valueType === "mac_address") {
|
||||||
return "MAC_ADDRESS"
|
return CONSTANT_NAMES.MAC_ADDRESS
|
||||||
}
|
}
|
||||||
|
|
||||||
if (valueType === "base64") {
|
if (valueType === "base64") {
|
||||||
if (context.includes("token")) {
|
if (context.includes(BASE64_CONTEXT_KEYWORDS.TOKEN)) {
|
||||||
return "ENCODED_TOKEN"
|
return CONSTANT_NAMES.ENCODED_TOKEN
|
||||||
}
|
}
|
||||||
if (context.includes("key")) {
|
if (context.includes(BASE64_CONTEXT_KEYWORDS.KEY)) {
|
||||||
return "ENCODED_KEY"
|
return CONSTANT_NAMES.ENCODED_KEY
|
||||||
}
|
}
|
||||||
return "BASE64_VALUE"
|
return CONSTANT_NAMES.BASE64_VALUE
|
||||||
}
|
}
|
||||||
|
|
||||||
if (valueType === "config") {
|
if (valueType === "config") {
|
||||||
if (context.includes("endpoint")) {
|
if (context.includes(CONFIG_CONTEXT_KEYWORDS.ENDPOINT)) {
|
||||||
return "API_ENDPOINT"
|
return CONSTANT_NAMES.API_ENDPOINT
|
||||||
}
|
}
|
||||||
if (context.includes("route")) {
|
if (context.includes(CONFIG_CONTEXT_KEYWORDS.ROUTE)) {
|
||||||
return "ROUTE_PATH"
|
return CONSTANT_NAMES.ROUTE_PATH
|
||||||
}
|
}
|
||||||
if (context.includes("connection")) {
|
if (context.includes(CONFIG_CONTEXT_KEYWORDS.CONNECTION)) {
|
||||||
return "CONNECTION_STRING"
|
return CONSTANT_NAMES.CONNECTION_STRING
|
||||||
}
|
}
|
||||||
return "CONFIG_VALUE"
|
return CONSTANT_NAMES.CONFIG_VALUE
|
||||||
}
|
}
|
||||||
|
|
||||||
if (value.includes(SUGGESTION_KEYWORDS.HTTP)) {
|
if (value.includes(SUGGESTION_KEYWORDS.HTTP)) {
|
||||||
@@ -339,19 +370,19 @@ export class HardcodedValue extends ValueObject<HardcodedValueProps> {
|
|||||||
const valueType = this.props.valueType
|
const valueType = this.props.valueType
|
||||||
|
|
||||||
if (valueType === "api_key" || valueType === "url" || valueType === "ip_address") {
|
if (valueType === "api_key" || valueType === "url" || valueType === "ip_address") {
|
||||||
return "src/config/environment.ts"
|
return LOCATIONS.CONFIG_ENVIRONMENT
|
||||||
}
|
}
|
||||||
|
|
||||||
if (valueType === "email") {
|
if (valueType === "email") {
|
||||||
return "src/config/contacts.ts"
|
return LOCATIONS.CONFIG_CONTACTS
|
||||||
}
|
}
|
||||||
|
|
||||||
if (valueType === "file_path") {
|
if (valueType === "file_path") {
|
||||||
return "src/config/paths.ts"
|
return LOCATIONS.CONFIG_PATHS
|
||||||
}
|
}
|
||||||
|
|
||||||
if (valueType === "date") {
|
if (valueType === "date") {
|
||||||
return "src/config/dates.ts"
|
return LOCATIONS.CONFIG_DATES
|
||||||
}
|
}
|
||||||
|
|
||||||
if (
|
if (
|
||||||
|
|||||||
@@ -1,6 +1,10 @@
|
|||||||
import { ValueObject } from "./ValueObject"
|
import { ValueObject } from "./ValueObject"
|
||||||
import { REPOSITORY_VIOLATION_TYPES } from "../../shared/constants/rules"
|
import { REPOSITORY_VIOLATION_TYPES } from "../../shared/constants/rules"
|
||||||
import { REPOSITORY_FALLBACK_SUGGESTIONS, REPOSITORY_PATTERN_MESSAGES } from "../constants/Messages"
|
import {
|
||||||
|
REPOSITORY_FALLBACK_SUGGESTIONS,
|
||||||
|
REPOSITORY_PATTERN_MESSAGES,
|
||||||
|
VIOLATION_EXAMPLE_VALUES,
|
||||||
|
} from "../constants/Messages"
|
||||||
|
|
||||||
interface RepositoryViolationProps {
|
interface RepositoryViolationProps {
|
||||||
readonly violationType:
|
readonly violationType:
|
||||||
@@ -105,16 +109,16 @@ export class RepositoryViolation extends ValueObject<RepositoryViolationProps> {
|
|||||||
public getMessage(): string {
|
public getMessage(): string {
|
||||||
switch (this.props.violationType) {
|
switch (this.props.violationType) {
|
||||||
case REPOSITORY_VIOLATION_TYPES.ORM_TYPE_IN_INTERFACE:
|
case REPOSITORY_VIOLATION_TYPES.ORM_TYPE_IN_INTERFACE:
|
||||||
return `Repository interface uses ORM-specific type '${this.props.ormType || "unknown"}'. Domain should not depend on infrastructure concerns.`
|
return `Repository interface uses ORM-specific type '${this.props.ormType || VIOLATION_EXAMPLE_VALUES.UNKNOWN}'. Domain should not depend on infrastructure concerns.`
|
||||||
|
|
||||||
case REPOSITORY_VIOLATION_TYPES.CONCRETE_REPOSITORY_IN_USE_CASE:
|
case REPOSITORY_VIOLATION_TYPES.CONCRETE_REPOSITORY_IN_USE_CASE:
|
||||||
return `Use case depends on concrete repository '${this.props.repositoryName || "unknown"}' instead of interface. Use dependency inversion.`
|
return `Use case depends on concrete repository '${this.props.repositoryName || VIOLATION_EXAMPLE_VALUES.UNKNOWN}' instead of interface. Use dependency inversion.`
|
||||||
|
|
||||||
case REPOSITORY_VIOLATION_TYPES.NEW_REPOSITORY_IN_USE_CASE:
|
case REPOSITORY_VIOLATION_TYPES.NEW_REPOSITORY_IN_USE_CASE:
|
||||||
return `Use case creates repository with 'new ${this.props.repositoryName || "Repository"}()'. Use dependency injection instead.`
|
return `Use case creates repository with 'new ${this.props.repositoryName || "Repository"}()'. Use dependency injection instead.`
|
||||||
|
|
||||||
case REPOSITORY_VIOLATION_TYPES.NON_DOMAIN_METHOD_NAME:
|
case REPOSITORY_VIOLATION_TYPES.NON_DOMAIN_METHOD_NAME:
|
||||||
return `Repository method '${this.props.methodName || "unknown"}' uses technical name. Use domain language instead.`
|
return `Repository method '${this.props.methodName || VIOLATION_EXAMPLE_VALUES.UNKNOWN}' uses technical name. Use domain language instead.`
|
||||||
|
|
||||||
default:
|
default:
|
||||||
return `Repository pattern violation: ${this.props.details}`
|
return `Repository pattern violation: ${this.props.details}`
|
||||||
@@ -159,8 +163,8 @@ export class RepositoryViolation extends ValueObject<RepositoryViolationProps> {
|
|||||||
REPOSITORY_PATTERN_MESSAGES.STEP_USE_DI,
|
REPOSITORY_PATTERN_MESSAGES.STEP_USE_DI,
|
||||||
"",
|
"",
|
||||||
REPOSITORY_PATTERN_MESSAGES.EXAMPLE_PREFIX,
|
REPOSITORY_PATTERN_MESSAGES.EXAMPLE_PREFIX,
|
||||||
`❌ Bad: constructor(private repo: ${this.props.repositoryName || "UserRepository"})`,
|
`❌ Bad: constructor(private repo: ${this.props.repositoryName || VIOLATION_EXAMPLE_VALUES.USER_REPOSITORY})`,
|
||||||
`✅ Good: constructor(private repo: I${this.props.repositoryName?.replace(/^.*?([A-Z]\w+)$/, "$1") || "UserRepository"})`,
|
`✅ Good: constructor(private repo: I${this.props.repositoryName?.replace(/^.*?([A-Z]\w+)$/, "$1") || VIOLATION_EXAMPLE_VALUES.USER_REPOSITORY})`,
|
||||||
].join("\n")
|
].join("\n")
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -200,7 +204,7 @@ export class RepositoryViolation extends ValueObject<RepositoryViolationProps> {
|
|||||||
REPOSITORY_PATTERN_MESSAGES.STEP_AVOID_TECHNICAL,
|
REPOSITORY_PATTERN_MESSAGES.STEP_AVOID_TECHNICAL,
|
||||||
"",
|
"",
|
||||||
REPOSITORY_PATTERN_MESSAGES.EXAMPLE_PREFIX,
|
REPOSITORY_PATTERN_MESSAGES.EXAMPLE_PREFIX,
|
||||||
`❌ Bad: ${this.props.methodName || "findOne"}()`,
|
`❌ Bad: ${this.props.methodName || VIOLATION_EXAMPLE_VALUES.FIND_ONE}()`,
|
||||||
`✅ Good: ${finalSuggestion}`,
|
`✅ Good: ${finalSuggestion}`,
|
||||||
].join("\n")
|
].join("\n")
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,3 +1,7 @@
|
|||||||
|
import pkg from "../package.json"
|
||||||
|
|
||||||
|
export const VERSION = pkg.version
|
||||||
|
|
||||||
export * from "./domain"
|
export * from "./domain"
|
||||||
export * from "./application"
|
export * from "./application"
|
||||||
export * from "./infrastructure"
|
export * from "./infrastructure"
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
import Parser from "tree-sitter"
|
import Parser from "tree-sitter"
|
||||||
import { IHardcodeDetector } from "../../domain/services/IHardcodeDetector"
|
import { IHardcodeDetector } from "../../domain/services/IHardcodeDetector"
|
||||||
import { HardcodedValue } from "../../domain/value-objects/HardcodedValue"
|
import { HardcodedValue } from "../../domain/value-objects/HardcodedValue"
|
||||||
|
import { FILE_EXTENSIONS } from "../../shared/constants"
|
||||||
import { CodeParser } from "../parsers/CodeParser"
|
import { CodeParser } from "../parsers/CodeParser"
|
||||||
import { AstBooleanAnalyzer } from "../strategies/AstBooleanAnalyzer"
|
import { AstBooleanAnalyzer } from "../strategies/AstBooleanAnalyzer"
|
||||||
import { AstConfigObjectAnalyzer } from "../strategies/AstConfigObjectAnalyzer"
|
import { AstConfigObjectAnalyzer } from "../strategies/AstConfigObjectAnalyzer"
|
||||||
@@ -112,9 +113,9 @@ export class HardcodeDetector implements IHardcodeDetector {
|
|||||||
* Parses code based on file extension
|
* Parses code based on file extension
|
||||||
*/
|
*/
|
||||||
private parseCode(code: string, filePath: string): Parser.Tree {
|
private parseCode(code: string, filePath: string): Parser.Tree {
|
||||||
if (filePath.endsWith(".tsx")) {
|
if (filePath.endsWith(FILE_EXTENSIONS.TYPESCRIPT_JSX)) {
|
||||||
return this.parser.parseTsx(code)
|
return this.parser.parseTsx(code)
|
||||||
} else if (filePath.endsWith(".ts")) {
|
} else if (filePath.endsWith(FILE_EXTENSIONS.TYPESCRIPT)) {
|
||||||
return this.parser.parseTypeScript(code)
|
return this.parser.parseTypeScript(code)
|
||||||
}
|
}
|
||||||
return this.parser.parseJavaScript(code)
|
return this.parser.parseJavaScript(code)
|
||||||
|
|||||||
@@ -1,37 +1,72 @@
|
|||||||
|
import Parser from "tree-sitter"
|
||||||
import { INamingConventionDetector } from "../../domain/services/INamingConventionDetector"
|
import { INamingConventionDetector } from "../../domain/services/INamingConventionDetector"
|
||||||
import { NamingViolation } from "../../domain/value-objects/NamingViolation"
|
import { NamingViolation } from "../../domain/value-objects/NamingViolation"
|
||||||
import {
|
import { FILE_EXTENSIONS } from "../../shared/constants"
|
||||||
LAYERS,
|
import { EXCLUDED_FILES } from "../constants/detectorPatterns"
|
||||||
NAMING_PATTERNS,
|
import { CodeParser } from "../parsers/CodeParser"
|
||||||
NAMING_VIOLATION_TYPES,
|
import { AstClassNameAnalyzer } from "../strategies/naming/AstClassNameAnalyzer"
|
||||||
USE_CASE_VERBS,
|
import { AstFunctionNameAnalyzer } from "../strategies/naming/AstFunctionNameAnalyzer"
|
||||||
} from "../../shared/constants/rules"
|
import { AstInterfaceNameAnalyzer } from "../strategies/naming/AstInterfaceNameAnalyzer"
|
||||||
import {
|
import { AstNamingTraverser } from "../strategies/naming/AstNamingTraverser"
|
||||||
EXCLUDED_FILES,
|
import { AstVariableNameAnalyzer } from "../strategies/naming/AstVariableNameAnalyzer"
|
||||||
FILE_SUFFIXES,
|
|
||||||
NAMING_ERROR_MESSAGES,
|
|
||||||
PATH_PATTERNS,
|
|
||||||
PATTERN_WORDS,
|
|
||||||
} from "../constants/detectorPatterns"
|
|
||||||
import { NAMING_SUGGESTION_DEFAULT } from "../constants/naming-patterns"
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Detects naming convention violations based on Clean Architecture layers
|
* Detects naming convention violations using AST-based analysis
|
||||||
*
|
*
|
||||||
* This detector ensures that files follow naming conventions appropriate to their layer:
|
* This detector uses Abstract Syntax Tree (AST) analysis via tree-sitter to identify
|
||||||
* - Domain: Entities (nouns), Services (*Service), Value Objects, Repository interfaces (I*Repository)
|
* naming convention violations in classes, interfaces, functions, and variables
|
||||||
* - Application: Use cases (verbs), DTOs (*Dto/*Request/*Response), Mappers (*Mapper)
|
* according to Clean Architecture layer rules.
|
||||||
* - Infrastructure: Controllers (*Controller), Repository implementations (*Repository), Services (*Service/*Adapter)
|
*
|
||||||
|
* The detector uses a modular architecture with specialized components:
|
||||||
|
* - AstClassNameAnalyzer: Analyzes class names
|
||||||
|
* - AstInterfaceNameAnalyzer: Analyzes interface names
|
||||||
|
* - AstFunctionNameAnalyzer: Analyzes function and method names
|
||||||
|
* - AstVariableNameAnalyzer: Analyzes variable and constant names
|
||||||
|
* - AstNamingTraverser: Traverses the AST and coordinates analyzers
|
||||||
*
|
*
|
||||||
* @example
|
* @example
|
||||||
* ```typescript
|
* ```typescript
|
||||||
* const detector = new NamingConventionDetector()
|
* const detector = new NamingConventionDetector()
|
||||||
* const violations = detector.detectViolations('UserDto.ts', 'domain', 'src/domain/UserDto.ts')
|
* const code = `
|
||||||
* // Returns violation: DTOs should not be in domain layer
|
* class userService { // Wrong: should be UserService
|
||||||
|
* GetUser() {} // Wrong: should be getUser
|
||||||
|
* }
|
||||||
|
* `
|
||||||
|
* const violations = detector.detectViolations(code, 'UserService.ts', 'domain', 'src/domain/UserService.ts')
|
||||||
|
* // Returns array of NamingViolation objects
|
||||||
* ```
|
* ```
|
||||||
*/
|
*/
|
||||||
export class NamingConventionDetector implements INamingConventionDetector {
|
export class NamingConventionDetector implements INamingConventionDetector {
|
||||||
|
private readonly parser: CodeParser
|
||||||
|
private readonly traverser: AstNamingTraverser
|
||||||
|
|
||||||
|
constructor() {
|
||||||
|
this.parser = new CodeParser()
|
||||||
|
|
||||||
|
const classAnalyzer = new AstClassNameAnalyzer()
|
||||||
|
const interfaceAnalyzer = new AstInterfaceNameAnalyzer()
|
||||||
|
const functionAnalyzer = new AstFunctionNameAnalyzer()
|
||||||
|
const variableAnalyzer = new AstVariableNameAnalyzer()
|
||||||
|
|
||||||
|
this.traverser = new AstNamingTraverser(
|
||||||
|
classAnalyzer,
|
||||||
|
interfaceAnalyzer,
|
||||||
|
functionAnalyzer,
|
||||||
|
variableAnalyzer,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Detects naming convention violations in the given code
|
||||||
|
*
|
||||||
|
* @param content - Source code to analyze
|
||||||
|
* @param fileName - Name of the file being analyzed
|
||||||
|
* @param layer - Architectural layer (domain, application, infrastructure, shared)
|
||||||
|
* @param filePath - File path for context (used in violation reports)
|
||||||
|
* @returns Array of detected naming violations
|
||||||
|
*/
|
||||||
public detectViolations(
|
public detectViolations(
|
||||||
|
content: string,
|
||||||
fileName: string,
|
fileName: string,
|
||||||
layer: string | undefined,
|
layer: string | undefined,
|
||||||
filePath: string,
|
filePath: string,
|
||||||
@@ -44,235 +79,23 @@ export class NamingConventionDetector implements INamingConventionDetector {
|
|||||||
return []
|
return []
|
||||||
}
|
}
|
||||||
|
|
||||||
switch (layer) {
|
if (!content || content.trim().length === 0) {
|
||||||
case LAYERS.DOMAIN:
|
return []
|
||||||
return this.checkDomainLayer(fileName, filePath)
|
|
||||||
case LAYERS.APPLICATION:
|
|
||||||
return this.checkApplicationLayer(fileName, filePath)
|
|
||||||
case LAYERS.INFRASTRUCTURE:
|
|
||||||
return this.checkInfrastructureLayer(fileName, filePath)
|
|
||||||
case LAYERS.SHARED:
|
|
||||||
return []
|
|
||||||
default:
|
|
||||||
return []
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const tree = this.parseCode(content, filePath)
|
||||||
|
return this.traverser.traverse(tree, content, layer, filePath)
|
||||||
}
|
}
|
||||||
|
|
||||||
private checkDomainLayer(fileName: string, filePath: string): NamingViolation[] {
|
/**
|
||||||
const violations: NamingViolation[] = []
|
* Parses code based on file extension
|
||||||
|
*/
|
||||||
const forbiddenPatterns = NAMING_PATTERNS.DOMAIN.ENTITY.forbidden ?? []
|
private parseCode(code: string, filePath: string): Parser.Tree {
|
||||||
|
if (filePath.endsWith(FILE_EXTENSIONS.TYPESCRIPT_JSX)) {
|
||||||
for (const forbidden of forbiddenPatterns) {
|
return this.parser.parseTsx(code)
|
||||||
if (fileName.includes(forbidden)) {
|
} else if (filePath.endsWith(FILE_EXTENSIONS.TYPESCRIPT)) {
|
||||||
violations.push(
|
return this.parser.parseTypeScript(code)
|
||||||
NamingViolation.create(
|
|
||||||
fileName,
|
|
||||||
NAMING_VIOLATION_TYPES.FORBIDDEN_PATTERN,
|
|
||||||
LAYERS.DOMAIN,
|
|
||||||
filePath,
|
|
||||||
NAMING_ERROR_MESSAGES.DOMAIN_FORBIDDEN,
|
|
||||||
fileName,
|
|
||||||
NAMING_SUGGESTION_DEFAULT,
|
|
||||||
),
|
|
||||||
)
|
|
||||||
return violations
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
return this.parser.parseJavaScript(code)
|
||||||
if (fileName.endsWith(FILE_SUFFIXES.SERVICE)) {
|
|
||||||
if (!NAMING_PATTERNS.DOMAIN.SERVICE.pattern.test(fileName)) {
|
|
||||||
violations.push(
|
|
||||||
NamingViolation.create(
|
|
||||||
fileName,
|
|
||||||
NAMING_VIOLATION_TYPES.WRONG_CASE,
|
|
||||||
LAYERS.DOMAIN,
|
|
||||||
filePath,
|
|
||||||
NAMING_PATTERNS.DOMAIN.SERVICE.description,
|
|
||||||
fileName,
|
|
||||||
),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
return violations
|
|
||||||
}
|
|
||||||
|
|
||||||
if (
|
|
||||||
fileName.startsWith(PATTERN_WORDS.I_PREFIX) &&
|
|
||||||
fileName.includes(PATTERN_WORDS.REPOSITORY)
|
|
||||||
) {
|
|
||||||
if (!NAMING_PATTERNS.DOMAIN.REPOSITORY_INTERFACE.pattern.test(fileName)) {
|
|
||||||
violations.push(
|
|
||||||
NamingViolation.create(
|
|
||||||
fileName,
|
|
||||||
NAMING_VIOLATION_TYPES.WRONG_PREFIX,
|
|
||||||
LAYERS.DOMAIN,
|
|
||||||
filePath,
|
|
||||||
NAMING_PATTERNS.DOMAIN.REPOSITORY_INTERFACE.description,
|
|
||||||
fileName,
|
|
||||||
),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
return violations
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!NAMING_PATTERNS.DOMAIN.ENTITY.pattern.test(fileName)) {
|
|
||||||
violations.push(
|
|
||||||
NamingViolation.create(
|
|
||||||
fileName,
|
|
||||||
NAMING_VIOLATION_TYPES.WRONG_CASE,
|
|
||||||
LAYERS.DOMAIN,
|
|
||||||
filePath,
|
|
||||||
NAMING_PATTERNS.DOMAIN.ENTITY.description,
|
|
||||||
fileName,
|
|
||||||
NAMING_ERROR_MESSAGES.USE_PASCAL_CASE,
|
|
||||||
),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
return violations
|
|
||||||
}
|
|
||||||
|
|
||||||
private checkApplicationLayer(fileName: string, filePath: string): NamingViolation[] {
|
|
||||||
const violations: NamingViolation[] = []
|
|
||||||
|
|
||||||
if (
|
|
||||||
fileName.endsWith(FILE_SUFFIXES.DTO) ||
|
|
||||||
fileName.endsWith(FILE_SUFFIXES.REQUEST) ||
|
|
||||||
fileName.endsWith(FILE_SUFFIXES.RESPONSE)
|
|
||||||
) {
|
|
||||||
if (!NAMING_PATTERNS.APPLICATION.DTO.pattern.test(fileName)) {
|
|
||||||
violations.push(
|
|
||||||
NamingViolation.create(
|
|
||||||
fileName,
|
|
||||||
NAMING_VIOLATION_TYPES.WRONG_SUFFIX,
|
|
||||||
LAYERS.APPLICATION,
|
|
||||||
filePath,
|
|
||||||
NAMING_PATTERNS.APPLICATION.DTO.description,
|
|
||||||
fileName,
|
|
||||||
NAMING_ERROR_MESSAGES.USE_DTO_SUFFIX,
|
|
||||||
),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
return violations
|
|
||||||
}
|
|
||||||
|
|
||||||
if (fileName.endsWith(FILE_SUFFIXES.MAPPER)) {
|
|
||||||
if (!NAMING_PATTERNS.APPLICATION.MAPPER.pattern.test(fileName)) {
|
|
||||||
violations.push(
|
|
||||||
NamingViolation.create(
|
|
||||||
fileName,
|
|
||||||
NAMING_VIOLATION_TYPES.WRONG_SUFFIX,
|
|
||||||
LAYERS.APPLICATION,
|
|
||||||
filePath,
|
|
||||||
NAMING_PATTERNS.APPLICATION.MAPPER.description,
|
|
||||||
fileName,
|
|
||||||
),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
return violations
|
|
||||||
}
|
|
||||||
|
|
||||||
const startsWithVerb = this.startsWithCommonVerb(fileName)
|
|
||||||
if (startsWithVerb) {
|
|
||||||
if (!NAMING_PATTERNS.APPLICATION.USE_CASE.pattern.test(fileName)) {
|
|
||||||
violations.push(
|
|
||||||
NamingViolation.create(
|
|
||||||
fileName,
|
|
||||||
NAMING_VIOLATION_TYPES.WRONG_VERB_NOUN,
|
|
||||||
LAYERS.APPLICATION,
|
|
||||||
filePath,
|
|
||||||
NAMING_PATTERNS.APPLICATION.USE_CASE.description,
|
|
||||||
fileName,
|
|
||||||
NAMING_ERROR_MESSAGES.USE_VERB_NOUN,
|
|
||||||
),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
return violations
|
|
||||||
}
|
|
||||||
|
|
||||||
if (
|
|
||||||
filePath.includes(PATH_PATTERNS.USE_CASES) ||
|
|
||||||
filePath.includes(PATH_PATTERNS.USE_CASES_ALT)
|
|
||||||
) {
|
|
||||||
const hasVerb = this.startsWithCommonVerb(fileName)
|
|
||||||
if (!hasVerb) {
|
|
||||||
violations.push(
|
|
||||||
NamingViolation.create(
|
|
||||||
fileName,
|
|
||||||
NAMING_VIOLATION_TYPES.WRONG_VERB_NOUN,
|
|
||||||
LAYERS.APPLICATION,
|
|
||||||
filePath,
|
|
||||||
NAMING_ERROR_MESSAGES.USE_CASE_START_VERB,
|
|
||||||
fileName,
|
|
||||||
`Start with a verb like: ${USE_CASE_VERBS.slice(0, 5).join(", ")}`,
|
|
||||||
),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return violations
|
|
||||||
}
|
|
||||||
|
|
||||||
private checkInfrastructureLayer(fileName: string, filePath: string): NamingViolation[] {
|
|
||||||
const violations: NamingViolation[] = []
|
|
||||||
|
|
||||||
if (fileName.endsWith(FILE_SUFFIXES.CONTROLLER)) {
|
|
||||||
if (!NAMING_PATTERNS.INFRASTRUCTURE.CONTROLLER.pattern.test(fileName)) {
|
|
||||||
violations.push(
|
|
||||||
NamingViolation.create(
|
|
||||||
fileName,
|
|
||||||
NAMING_VIOLATION_TYPES.WRONG_SUFFIX,
|
|
||||||
LAYERS.INFRASTRUCTURE,
|
|
||||||
filePath,
|
|
||||||
NAMING_PATTERNS.INFRASTRUCTURE.CONTROLLER.description,
|
|
||||||
fileName,
|
|
||||||
),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
return violations
|
|
||||||
}
|
|
||||||
|
|
||||||
if (
|
|
||||||
fileName.endsWith(FILE_SUFFIXES.REPOSITORY) &&
|
|
||||||
!fileName.startsWith(PATTERN_WORDS.I_PREFIX)
|
|
||||||
) {
|
|
||||||
if (!NAMING_PATTERNS.INFRASTRUCTURE.REPOSITORY_IMPL.pattern.test(fileName)) {
|
|
||||||
violations.push(
|
|
||||||
NamingViolation.create(
|
|
||||||
fileName,
|
|
||||||
NAMING_VIOLATION_TYPES.WRONG_SUFFIX,
|
|
||||||
LAYERS.INFRASTRUCTURE,
|
|
||||||
filePath,
|
|
||||||
NAMING_PATTERNS.INFRASTRUCTURE.REPOSITORY_IMPL.description,
|
|
||||||
fileName,
|
|
||||||
),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
return violations
|
|
||||||
}
|
|
||||||
|
|
||||||
if (fileName.endsWith(FILE_SUFFIXES.SERVICE) || fileName.endsWith(FILE_SUFFIXES.ADAPTER)) {
|
|
||||||
if (!NAMING_PATTERNS.INFRASTRUCTURE.SERVICE.pattern.test(fileName)) {
|
|
||||||
violations.push(
|
|
||||||
NamingViolation.create(
|
|
||||||
fileName,
|
|
||||||
NAMING_VIOLATION_TYPES.WRONG_SUFFIX,
|
|
||||||
LAYERS.INFRASTRUCTURE,
|
|
||||||
filePath,
|
|
||||||
NAMING_PATTERNS.INFRASTRUCTURE.SERVICE.description,
|
|
||||||
fileName,
|
|
||||||
),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
return violations
|
|
||||||
}
|
|
||||||
|
|
||||||
return violations
|
|
||||||
}
|
|
||||||
|
|
||||||
private startsWithCommonVerb(fileName: string): boolean {
|
|
||||||
const baseFileName = fileName.replace(/\.tsx?$/, "")
|
|
||||||
|
|
||||||
return USE_CASE_VERBS.some((verb) => baseFileName.startsWith(verb))
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -90,80 +90,98 @@ export class SecretDetector implements ISecretDetector {
|
|||||||
}
|
}
|
||||||
|
|
||||||
private extractSecretType(message: string, ruleId: string): string {
|
private extractSecretType(message: string, ruleId: string): string {
|
||||||
|
const lowerMessage = message.toLowerCase()
|
||||||
|
|
||||||
|
const ruleBasedType = this.extractByRuleId(ruleId, lowerMessage)
|
||||||
|
if (ruleBasedType) {
|
||||||
|
return ruleBasedType
|
||||||
|
}
|
||||||
|
|
||||||
|
return this.extractByMessage(lowerMessage)
|
||||||
|
}
|
||||||
|
|
||||||
|
private extractByRuleId(ruleId: string, lowerMessage: string): string | null {
|
||||||
if (ruleId.includes(SECRET_KEYWORDS.AWS)) {
|
if (ruleId.includes(SECRET_KEYWORDS.AWS)) {
|
||||||
if (message.toLowerCase().includes(SECRET_KEYWORDS.ACCESS_KEY)) {
|
return this.extractAwsType(lowerMessage)
|
||||||
return SECRET_TYPE_NAMES.AWS_ACCESS_KEY
|
|
||||||
}
|
|
||||||
if (message.toLowerCase().includes(SECRET_KEYWORDS.SECRET)) {
|
|
||||||
return SECRET_TYPE_NAMES.AWS_SECRET_KEY
|
|
||||||
}
|
|
||||||
return SECRET_TYPE_NAMES.AWS_CREDENTIAL
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (ruleId.includes(SECRET_KEYWORDS.GITHUB)) {
|
if (ruleId.includes(SECRET_KEYWORDS.GITHUB)) {
|
||||||
if (message.toLowerCase().includes(SECRET_KEYWORDS.PERSONAL_ACCESS_TOKEN)) {
|
return this.extractGithubType(lowerMessage)
|
||||||
return SECRET_TYPE_NAMES.GITHUB_PERSONAL_ACCESS_TOKEN
|
|
||||||
}
|
|
||||||
if (message.toLowerCase().includes(SECRET_KEYWORDS.OAUTH)) {
|
|
||||||
return SECRET_TYPE_NAMES.GITHUB_OAUTH_TOKEN
|
|
||||||
}
|
|
||||||
return SECRET_TYPE_NAMES.GITHUB_TOKEN
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (ruleId.includes(SECRET_KEYWORDS.NPM)) {
|
if (ruleId.includes(SECRET_KEYWORDS.NPM)) {
|
||||||
return SECRET_TYPE_NAMES.NPM_TOKEN
|
return SECRET_TYPE_NAMES.NPM_TOKEN
|
||||||
}
|
}
|
||||||
|
|
||||||
if (ruleId.includes(SECRET_KEYWORDS.GCP) || ruleId.includes(SECRET_KEYWORDS.GOOGLE)) {
|
if (ruleId.includes(SECRET_KEYWORDS.GCP) || ruleId.includes(SECRET_KEYWORDS.GOOGLE)) {
|
||||||
return SECRET_TYPE_NAMES.GCP_SERVICE_ACCOUNT_KEY
|
return SECRET_TYPE_NAMES.GCP_SERVICE_ACCOUNT_KEY
|
||||||
}
|
}
|
||||||
|
|
||||||
if (ruleId.includes(SECRET_KEYWORDS.PRIVATEKEY) || ruleId.includes(SECRET_KEYWORDS.SSH)) {
|
if (ruleId.includes(SECRET_KEYWORDS.PRIVATEKEY) || ruleId.includes(SECRET_KEYWORDS.SSH)) {
|
||||||
if (message.toLowerCase().includes(SECRET_KEYWORDS.RSA)) {
|
return this.extractSshType(lowerMessage)
|
||||||
return SECRET_TYPE_NAMES.SSH_RSA_PRIVATE_KEY
|
|
||||||
}
|
|
||||||
if (message.toLowerCase().includes(SECRET_KEYWORDS.DSA)) {
|
|
||||||
return SECRET_TYPE_NAMES.SSH_DSA_PRIVATE_KEY
|
|
||||||
}
|
|
||||||
if (message.toLowerCase().includes(SECRET_KEYWORDS.ECDSA)) {
|
|
||||||
return SECRET_TYPE_NAMES.SSH_ECDSA_PRIVATE_KEY
|
|
||||||
}
|
|
||||||
if (message.toLowerCase().includes(SECRET_KEYWORDS.ED25519)) {
|
|
||||||
return SECRET_TYPE_NAMES.SSH_ED25519_PRIVATE_KEY
|
|
||||||
}
|
|
||||||
return SECRET_TYPE_NAMES.SSH_PRIVATE_KEY
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (ruleId.includes(SECRET_KEYWORDS.SLACK)) {
|
if (ruleId.includes(SECRET_KEYWORDS.SLACK)) {
|
||||||
if (message.toLowerCase().includes(SECRET_KEYWORDS.BOT)) {
|
return this.extractSlackType(lowerMessage)
|
||||||
return SECRET_TYPE_NAMES.SLACK_BOT_TOKEN
|
|
||||||
}
|
|
||||||
if (message.toLowerCase().includes(SECRET_KEYWORDS.USER)) {
|
|
||||||
return SECRET_TYPE_NAMES.SLACK_USER_TOKEN
|
|
||||||
}
|
|
||||||
return SECRET_TYPE_NAMES.SLACK_TOKEN
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (ruleId.includes(SECRET_KEYWORDS.BASICAUTH)) {
|
if (ruleId.includes(SECRET_KEYWORDS.BASICAUTH)) {
|
||||||
return SECRET_TYPE_NAMES.BASIC_AUTH_CREDENTIALS
|
return SECRET_TYPE_NAMES.BASIC_AUTH_CREDENTIALS
|
||||||
}
|
}
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
if (message.toLowerCase().includes(SECRET_KEYWORDS.API_KEY)) {
|
private extractAwsType(lowerMessage: string): string {
|
||||||
return SECRET_TYPE_NAMES.API_KEY
|
if (lowerMessage.includes(SECRET_KEYWORDS.ACCESS_KEY)) {
|
||||||
|
return SECRET_TYPE_NAMES.AWS_ACCESS_KEY
|
||||||
}
|
}
|
||||||
|
if (lowerMessage.includes(SECRET_KEYWORDS.SECRET)) {
|
||||||
if (message.toLowerCase().includes(SECRET_KEYWORDS.TOKEN)) {
|
return SECRET_TYPE_NAMES.AWS_SECRET_KEY
|
||||||
return SECRET_TYPE_NAMES.AUTHENTICATION_TOKEN
|
|
||||||
}
|
}
|
||||||
|
return SECRET_TYPE_NAMES.AWS_CREDENTIAL
|
||||||
|
}
|
||||||
|
|
||||||
if (message.toLowerCase().includes(SECRET_KEYWORDS.PASSWORD)) {
|
private extractGithubType(lowerMessage: string): string {
|
||||||
return SECRET_TYPE_NAMES.PASSWORD
|
if (lowerMessage.includes(SECRET_KEYWORDS.PERSONAL_ACCESS_TOKEN)) {
|
||||||
|
return SECRET_TYPE_NAMES.GITHUB_PERSONAL_ACCESS_TOKEN
|
||||||
}
|
}
|
||||||
|
if (lowerMessage.includes(SECRET_KEYWORDS.OAUTH)) {
|
||||||
if (message.toLowerCase().includes(SECRET_KEYWORDS.SECRET)) {
|
return SECRET_TYPE_NAMES.GITHUB_OAUTH_TOKEN
|
||||||
return SECRET_TYPE_NAMES.SECRET
|
|
||||||
}
|
}
|
||||||
|
return SECRET_TYPE_NAMES.GITHUB_TOKEN
|
||||||
|
}
|
||||||
|
|
||||||
|
private extractSshType(lowerMessage: string): string {
|
||||||
|
const sshTypeMap: [string, string][] = [
|
||||||
|
[SECRET_KEYWORDS.RSA, SECRET_TYPE_NAMES.SSH_RSA_PRIVATE_KEY],
|
||||||
|
[SECRET_KEYWORDS.DSA, SECRET_TYPE_NAMES.SSH_DSA_PRIVATE_KEY],
|
||||||
|
[SECRET_KEYWORDS.ECDSA, SECRET_TYPE_NAMES.SSH_ECDSA_PRIVATE_KEY],
|
||||||
|
[SECRET_KEYWORDS.ED25519, SECRET_TYPE_NAMES.SSH_ED25519_PRIVATE_KEY],
|
||||||
|
]
|
||||||
|
for (const [keyword, typeName] of sshTypeMap) {
|
||||||
|
if (lowerMessage.includes(keyword)) {
|
||||||
|
return typeName
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return SECRET_TYPE_NAMES.SSH_PRIVATE_KEY
|
||||||
|
}
|
||||||
|
|
||||||
|
private extractSlackType(lowerMessage: string): string {
|
||||||
|
if (lowerMessage.includes(SECRET_KEYWORDS.BOT)) {
|
||||||
|
return SECRET_TYPE_NAMES.SLACK_BOT_TOKEN
|
||||||
|
}
|
||||||
|
if (lowerMessage.includes(SECRET_KEYWORDS.USER)) {
|
||||||
|
return SECRET_TYPE_NAMES.SLACK_USER_TOKEN
|
||||||
|
}
|
||||||
|
return SECRET_TYPE_NAMES.SLACK_TOKEN
|
||||||
|
}
|
||||||
|
|
||||||
|
private extractByMessage(lowerMessage: string): string {
|
||||||
|
const messageTypeMap: [string, string][] = [
|
||||||
|
[SECRET_KEYWORDS.API_KEY, SECRET_TYPE_NAMES.API_KEY],
|
||||||
|
[SECRET_KEYWORDS.TOKEN, SECRET_TYPE_NAMES.AUTHENTICATION_TOKEN],
|
||||||
|
[SECRET_KEYWORDS.PASSWORD, SECRET_TYPE_NAMES.PASSWORD],
|
||||||
|
[SECRET_KEYWORDS.SECRET, SECRET_TYPE_NAMES.SECRET],
|
||||||
|
]
|
||||||
|
for (const [keyword, typeName] of messageTypeMap) {
|
||||||
|
if (lowerMessage.includes(keyword)) {
|
||||||
|
return typeName
|
||||||
|
}
|
||||||
|
}
|
||||||
return SECRET_TYPE_NAMES.SENSITIVE_DATA
|
return SECRET_TYPE_NAMES.SENSITIVE_DATA
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -63,6 +63,28 @@ export const NAMING_ERROR_MESSAGES = {
|
|||||||
USE_DTO_SUFFIX: "Use *Dto, *Request, or *Response suffix (e.g., UserResponseDto.ts)",
|
USE_DTO_SUFFIX: "Use *Dto, *Request, or *Response suffix (e.g., UserResponseDto.ts)",
|
||||||
USE_VERB_NOUN: "Use verb + noun in PascalCase (e.g., CreateUser.ts, UpdateProfile.ts)",
|
USE_VERB_NOUN: "Use verb + noun in PascalCase (e.g., CreateUser.ts, UpdateProfile.ts)",
|
||||||
USE_CASE_START_VERB: "Use cases should start with a verb",
|
USE_CASE_START_VERB: "Use cases should start with a verb",
|
||||||
|
DOMAIN_SERVICE_PASCAL_CASE: "Domain services must be PascalCase ending with 'Service'",
|
||||||
|
DOMAIN_ENTITY_PASCAL_CASE: "Domain entities must be PascalCase nouns",
|
||||||
|
DTO_PASCAL_CASE: "DTOs must be PascalCase ending with 'Dto', 'Request', or 'Response'",
|
||||||
|
MAPPER_PASCAL_CASE: "Mappers must be PascalCase ending with 'Mapper'",
|
||||||
|
USE_CASE_VERB_NOUN: "Use cases must be PascalCase Verb+Noun (e.g., CreateUser)",
|
||||||
|
CONTROLLER_PASCAL_CASE: "Controllers must be PascalCase ending with 'Controller'",
|
||||||
|
REPOSITORY_IMPL_PASCAL_CASE:
|
||||||
|
"Repository implementations must be PascalCase ending with 'Repository'",
|
||||||
|
SERVICE_ADAPTER_PASCAL_CASE:
|
||||||
|
"Services/Adapters must be PascalCase ending with 'Service' or 'Adapter'",
|
||||||
|
FUNCTION_CAMEL_CASE: "Functions and methods must be camelCase",
|
||||||
|
USE_CAMEL_CASE_FUNCTION: "Use camelCase for function names (e.g., getUserById, createOrder)",
|
||||||
|
INTERFACE_PASCAL_CASE: "Interfaces must be PascalCase",
|
||||||
|
USE_PASCAL_CASE_INTERFACE: "Use PascalCase for interface names",
|
||||||
|
REPOSITORY_INTERFACE_I_PREFIX:
|
||||||
|
"Domain repository interfaces must start with 'I' (e.g., IUserRepository)",
|
||||||
|
REPOSITORY_INTERFACE_PATTERN: "Repository interfaces must be I + PascalCase + Repository",
|
||||||
|
CONSTANT_UPPER_SNAKE_CASE: "Exported constants must be UPPER_SNAKE_CASE",
|
||||||
|
USE_UPPER_SNAKE_CASE_CONSTANT:
|
||||||
|
"Use UPPER_SNAKE_CASE for constant names (e.g., MAX_RETRIES, API_URL)",
|
||||||
|
VARIABLE_CAMEL_CASE: "Variables must be camelCase",
|
||||||
|
USE_CAMEL_CASE_VARIABLE: "Use camelCase for variable names (e.g., userId, orderList)",
|
||||||
} as const
|
} as const
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
import Parser from "tree-sitter"
|
import Parser from "tree-sitter"
|
||||||
import { HardcodedValue, HardcodeType } from "../../domain/value-objects/HardcodedValue"
|
import { HardcodedValue, HardcodeType } from "../../domain/value-objects/HardcodedValue"
|
||||||
import { DETECTION_VALUES } from "../../shared/constants/rules"
|
import { DETECTION_VALUES, HARDCODE_TYPES } from "../../shared/constants/rules"
|
||||||
import { AstContextChecker } from "./AstContextChecker"
|
import { AstContextChecker } from "./AstContextChecker"
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -83,7 +83,7 @@ export class AstBooleanAnalyzer {
|
|||||||
|
|
||||||
return HardcodedValue.create(
|
return HardcodedValue.create(
|
||||||
value,
|
value,
|
||||||
"MAGIC_BOOLEAN" as HardcodeType,
|
HARDCODE_TYPES.MAGIC_BOOLEAN as HardcodeType,
|
||||||
lineNumber,
|
lineNumber,
|
||||||
column,
|
column,
|
||||||
context,
|
context,
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
import Parser from "tree-sitter"
|
import Parser from "tree-sitter"
|
||||||
import { HardcodedValue, HardcodeType } from "../../domain/value-objects/HardcodedValue"
|
import { HardcodedValue, HardcodeType } from "../../domain/value-objects/HardcodedValue"
|
||||||
import { HARDCODE_TYPES } from "../../shared/constants/rules"
|
import { HARDCODE_TYPES } from "../../shared/constants/rules"
|
||||||
|
import { AST_STRING_TYPES } from "../../shared/constants/ast-node-types"
|
||||||
import { ALLOWED_NUMBERS } from "../constants/defaults"
|
import { ALLOWED_NUMBERS } from "../constants/defaults"
|
||||||
import { AstContextChecker } from "./AstContextChecker"
|
import { AstContextChecker } from "./AstContextChecker"
|
||||||
|
|
||||||
@@ -71,7 +72,9 @@ export class AstConfigObjectAnalyzer {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (node.type === "string") {
|
if (node.type === "string") {
|
||||||
const stringFragment = node.children.find((c) => c.type === "string_fragment")
|
const stringFragment = node.children.find(
|
||||||
|
(c) => c.type === AST_STRING_TYPES.STRING_FRAGMENT,
|
||||||
|
)
|
||||||
return stringFragment !== undefined && stringFragment.text.length > 3
|
return stringFragment !== undefined && stringFragment.text.length > 3
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -1,4 +1,10 @@
|
|||||||
import Parser from "tree-sitter"
|
import Parser from "tree-sitter"
|
||||||
|
import {
|
||||||
|
AST_FIELD_NAMES,
|
||||||
|
AST_IDENTIFIER_TYPES,
|
||||||
|
AST_MODIFIER_TYPES,
|
||||||
|
AST_VARIABLE_TYPES,
|
||||||
|
} from "../../shared/constants/ast-node-types"
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* AST context checker for analyzing node contexts
|
* AST context checker for analyzing node contexts
|
||||||
@@ -29,22 +35,26 @@ export class AstContextChecker {
|
|||||||
* Helper to check if export statement contains "as const"
|
* Helper to check if export statement contains "as const"
|
||||||
*/
|
*/
|
||||||
private checkExportedConstant(exportNode: Parser.SyntaxNode): boolean {
|
private checkExportedConstant(exportNode: Parser.SyntaxNode): boolean {
|
||||||
const declaration = exportNode.childForFieldName("declaration")
|
const declaration = exportNode.childForFieldName(AST_FIELD_NAMES.DECLARATION)
|
||||||
if (!declaration) {
|
if (!declaration) {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
const declarator = this.findDescendant(declaration, "variable_declarator")
|
if (declaration.type !== "lexical_declaration") {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
const declarator = this.findDescendant(declaration, AST_VARIABLE_TYPES.VARIABLE_DECLARATOR)
|
||||||
if (!declarator) {
|
if (!declarator) {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
const value = declarator.childForFieldName("value")
|
const value = declarator.childForFieldName(AST_FIELD_NAMES.VALUE)
|
||||||
if (value?.type !== "as_expression") {
|
if (value?.type !== "as_expression") {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
const asType = value.children.find((c) => c.type === "const")
|
const asType = value.children.find((c) => c.type === AST_MODIFIER_TYPES.CONST)
|
||||||
return asType !== undefined
|
return asType !== undefined
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -83,12 +93,17 @@ export class AstContextChecker {
|
|||||||
|
|
||||||
if (current.type === "call_expression") {
|
if (current.type === "call_expression") {
|
||||||
const functionNode =
|
const functionNode =
|
||||||
current.childForFieldName("function") ||
|
current.childForFieldName(AST_FIELD_NAMES.FUNCTION) ||
|
||||||
current.children.find((c) => c.type === "identifier" || c.type === "import")
|
current.children.find(
|
||||||
|
(c) =>
|
||||||
|
c.type === AST_IDENTIFIER_TYPES.IDENTIFIER ||
|
||||||
|
c.type === AST_IDENTIFIER_TYPES.IMPORT,
|
||||||
|
)
|
||||||
|
|
||||||
if (
|
if (
|
||||||
functionNode &&
|
functionNode &&
|
||||||
(functionNode.text === "import" || functionNode.type === "import")
|
(functionNode.text === "import" ||
|
||||||
|
functionNode.type === AST_IDENTIFIER_TYPES.IMPORT)
|
||||||
) {
|
) {
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
@@ -229,7 +244,13 @@ export class AstContextChecker {
|
|||||||
public getNodeContext(node: Parser.SyntaxNode): string {
|
public getNodeContext(node: Parser.SyntaxNode): string {
|
||||||
let current: Parser.SyntaxNode | null = node
|
let current: Parser.SyntaxNode | null = node
|
||||||
|
|
||||||
while (current && current.type !== "lexical_declaration" && current.type !== "pair") {
|
while (
|
||||||
|
current &&
|
||||||
|
current.type !== "lexical_declaration" &&
|
||||||
|
current.type !== "pair" &&
|
||||||
|
current.type !== "call_expression" &&
|
||||||
|
current.type !== "return_statement"
|
||||||
|
) {
|
||||||
current = current.parent
|
current = current.parent
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
import Parser from "tree-sitter"
|
import Parser from "tree-sitter"
|
||||||
import { HardcodedValue, HardcodeType } from "../../domain/value-objects/HardcodedValue"
|
import { HardcodedValue, HardcodeType } from "../../domain/value-objects/HardcodedValue"
|
||||||
import { HARDCODE_TYPES } from "../../shared/constants/rules"
|
import { HARDCODE_TYPES } from "../../shared/constants/rules"
|
||||||
|
import { TIMER_FUNCTIONS } from "../../shared/constants/ast-node-types"
|
||||||
import { ALLOWED_NUMBERS, DETECTION_KEYWORDS } from "../constants/defaults"
|
import { ALLOWED_NUMBERS, DETECTION_KEYWORDS } from "../constants/defaults"
|
||||||
import { AstContextChecker } from "./AstContextChecker"
|
import { AstContextChecker } from "./AstContextChecker"
|
||||||
|
|
||||||
@@ -43,7 +44,12 @@ export class AstNumberAnalyzer {
|
|||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
if (this.contextChecker.isInCallExpression(parent, ["setTimeout", "setInterval"])) {
|
if (
|
||||||
|
this.contextChecker.isInCallExpression(parent, [
|
||||||
|
TIMER_FUNCTIONS.SET_TIMEOUT,
|
||||||
|
TIMER_FUNCTIONS.SET_INTERVAL,
|
||||||
|
])
|
||||||
|
) {
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
import Parser from "tree-sitter"
|
import Parser from "tree-sitter"
|
||||||
import { HardcodedValue, HardcodeType } from "../../domain/value-objects/HardcodedValue"
|
import { HardcodedValue, HardcodeType } from "../../domain/value-objects/HardcodedValue"
|
||||||
import { CONFIG_KEYWORDS, DETECTION_VALUES, HARDCODE_TYPES } from "../../shared/constants/rules"
|
import { CONFIG_KEYWORDS, DETECTION_VALUES, HARDCODE_TYPES } from "../../shared/constants/rules"
|
||||||
|
import { AST_STRING_TYPES } from "../../shared/constants/ast-node-types"
|
||||||
import { AstContextChecker } from "./AstContextChecker"
|
import { AstContextChecker } from "./AstContextChecker"
|
||||||
import { ValuePatternMatcher } from "./ValuePatternMatcher"
|
import { ValuePatternMatcher } from "./ValuePatternMatcher"
|
||||||
|
|
||||||
@@ -29,7 +30,9 @@ export class AstStringAnalyzer {
|
|||||||
* Analyzes a string node and returns a violation if it's a magic string
|
* Analyzes a string node and returns a violation if it's a magic string
|
||||||
*/
|
*/
|
||||||
public analyze(node: Parser.SyntaxNode, lines: string[]): HardcodedValue | null {
|
public analyze(node: Parser.SyntaxNode, lines: string[]): HardcodedValue | null {
|
||||||
const stringFragment = node.children.find((child) => child.type === "string_fragment")
|
const stringFragment = node.children.find(
|
||||||
|
(child) => child.type === AST_STRING_TYPES.STRING_FRAGMENT,
|
||||||
|
)
|
||||||
if (!stringFragment) {
|
if (!stringFragment) {
|
||||||
return null
|
return null
|
||||||
}
|
}
|
||||||
@@ -108,6 +111,7 @@ export class AstStringAnalyzer {
|
|||||||
"key",
|
"key",
|
||||||
...CONFIG_KEYWORDS.MESSAGES,
|
...CONFIG_KEYWORDS.MESSAGES,
|
||||||
"label",
|
"label",
|
||||||
|
...CONFIG_KEYWORDS.TECHNICAL,
|
||||||
]
|
]
|
||||||
|
|
||||||
return configKeywords.some((keyword) => context.includes(keyword))
|
return configKeywords.some((keyword) => context.includes(keyword))
|
||||||
|
|||||||
@@ -1,3 +1,5 @@
|
|||||||
|
import { VALUE_PATTERN_TYPES } from "../../shared/constants/ast-node-types"
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Pattern matcher for detecting specific value types
|
* Pattern matcher for detecting specific value types
|
||||||
*
|
*
|
||||||
@@ -131,40 +133,40 @@ export class ValuePatternMatcher {
|
|||||||
| "base64"
|
| "base64"
|
||||||
| null {
|
| null {
|
||||||
if (this.isEmail(value)) {
|
if (this.isEmail(value)) {
|
||||||
return "email"
|
return VALUE_PATTERN_TYPES.EMAIL
|
||||||
}
|
}
|
||||||
if (this.isJwt(value)) {
|
if (this.isJwt(value)) {
|
||||||
return "api_key"
|
return VALUE_PATTERN_TYPES.API_KEY
|
||||||
}
|
}
|
||||||
if (this.isApiKey(value)) {
|
if (this.isApiKey(value)) {
|
||||||
return "api_key"
|
return VALUE_PATTERN_TYPES.API_KEY
|
||||||
}
|
}
|
||||||
if (this.isUrl(value)) {
|
if (this.isUrl(value)) {
|
||||||
return "url"
|
return VALUE_PATTERN_TYPES.URL
|
||||||
}
|
}
|
||||||
if (this.isIpAddress(value)) {
|
if (this.isIpAddress(value)) {
|
||||||
return "ip_address"
|
return VALUE_PATTERN_TYPES.IP_ADDRESS
|
||||||
}
|
}
|
||||||
if (this.isFilePath(value)) {
|
if (this.isFilePath(value)) {
|
||||||
return "file_path"
|
return VALUE_PATTERN_TYPES.FILE_PATH
|
||||||
}
|
}
|
||||||
if (this.isDate(value)) {
|
if (this.isDate(value)) {
|
||||||
return "date"
|
return VALUE_PATTERN_TYPES.DATE
|
||||||
}
|
}
|
||||||
if (this.isUuid(value)) {
|
if (this.isUuid(value)) {
|
||||||
return "uuid"
|
return VALUE_PATTERN_TYPES.UUID
|
||||||
}
|
}
|
||||||
if (this.isSemver(value)) {
|
if (this.isSemver(value)) {
|
||||||
return "version"
|
return VALUE_PATTERN_TYPES.VERSION
|
||||||
}
|
}
|
||||||
if (this.isHexColor(value)) {
|
if (this.isHexColor(value)) {
|
||||||
return "color"
|
return "color"
|
||||||
}
|
}
|
||||||
if (this.isMacAddress(value)) {
|
if (this.isMacAddress(value)) {
|
||||||
return "mac_address"
|
return VALUE_PATTERN_TYPES.MAC_ADDRESS
|
||||||
}
|
}
|
||||||
if (this.isBase64(value)) {
|
if (this.isBase64(value)) {
|
||||||
return "base64"
|
return VALUE_PATTERN_TYPES.BASE64
|
||||||
}
|
}
|
||||||
return null
|
return null
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -0,0 +1,230 @@
|
|||||||
|
import Parser from "tree-sitter"
|
||||||
|
import { NamingViolation } from "../../../domain/value-objects/NamingViolation"
|
||||||
|
import { AST_CLASS_TYPES, AST_FIELD_NAMES } from "../../../shared/constants"
|
||||||
|
import { LAYERS, NAMING_VIOLATION_TYPES, USE_CASE_VERBS } from "../../../shared/constants/rules"
|
||||||
|
import {
|
||||||
|
FILE_SUFFIXES,
|
||||||
|
NAMING_ERROR_MESSAGES,
|
||||||
|
PATTERN_WORDS,
|
||||||
|
} from "../../constants/detectorPatterns"
|
||||||
|
|
||||||
|
/**
|
||||||
|
* AST-based analyzer for detecting class naming violations
|
||||||
|
*
|
||||||
|
* Analyzes class declaration nodes to ensure proper naming conventions:
|
||||||
|
* - Domain layer: PascalCase entities and services (*Service)
|
||||||
|
* - Application layer: PascalCase use cases (Verb+Noun), DTOs (*Dto/*Request/*Response)
|
||||||
|
* - Infrastructure layer: PascalCase controllers, repositories, services
|
||||||
|
*/
|
||||||
|
export class AstClassNameAnalyzer {
|
||||||
|
/**
|
||||||
|
* Analyzes a class declaration node
|
||||||
|
*/
|
||||||
|
public analyze(
|
||||||
|
node: Parser.SyntaxNode,
|
||||||
|
layer: string,
|
||||||
|
filePath: string,
|
||||||
|
_lines: string[],
|
||||||
|
): NamingViolation | null {
|
||||||
|
if (node.type !== AST_CLASS_TYPES.CLASS_DECLARATION) {
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
const nameNode = node.childForFieldName(AST_FIELD_NAMES.NAME)
|
||||||
|
if (!nameNode) {
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
const className = nameNode.text
|
||||||
|
const lineNumber = nameNode.startPosition.row + 1
|
||||||
|
|
||||||
|
switch (layer) {
|
||||||
|
case LAYERS.DOMAIN:
|
||||||
|
return this.checkDomainClass(className, filePath, lineNumber)
|
||||||
|
case LAYERS.APPLICATION:
|
||||||
|
return this.checkApplicationClass(className, filePath, lineNumber)
|
||||||
|
case LAYERS.INFRASTRUCTURE:
|
||||||
|
return this.checkInfrastructureClass(className, filePath, lineNumber)
|
||||||
|
default:
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Checks domain layer class naming
|
||||||
|
*/
|
||||||
|
private checkDomainClass(
|
||||||
|
className: string,
|
||||||
|
filePath: string,
|
||||||
|
lineNumber: number,
|
||||||
|
): NamingViolation | null {
|
||||||
|
if (className.endsWith(FILE_SUFFIXES.SERVICE.replace(".ts", ""))) {
|
||||||
|
if (!/^[A-Z][a-zA-Z0-9]*Service$/.test(className)) {
|
||||||
|
return NamingViolation.create(
|
||||||
|
className,
|
||||||
|
NAMING_VIOLATION_TYPES.WRONG_CASE,
|
||||||
|
LAYERS.DOMAIN,
|
||||||
|
`${filePath}:${String(lineNumber)}`,
|
||||||
|
NAMING_ERROR_MESSAGES.DOMAIN_SERVICE_PASCAL_CASE,
|
||||||
|
className,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!/^[A-Z][a-zA-Z0-9]*$/.test(className)) {
|
||||||
|
return NamingViolation.create(
|
||||||
|
className,
|
||||||
|
NAMING_VIOLATION_TYPES.WRONG_CASE,
|
||||||
|
LAYERS.DOMAIN,
|
||||||
|
`${filePath}:${String(lineNumber)}`,
|
||||||
|
NAMING_ERROR_MESSAGES.DOMAIN_ENTITY_PASCAL_CASE,
|
||||||
|
className,
|
||||||
|
NAMING_ERROR_MESSAGES.USE_PASCAL_CASE,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Checks application layer class naming
|
||||||
|
*/
|
||||||
|
private checkApplicationClass(
|
||||||
|
className: string,
|
||||||
|
filePath: string,
|
||||||
|
lineNumber: number,
|
||||||
|
): NamingViolation | null {
|
||||||
|
if (
|
||||||
|
className.endsWith("Dto") ||
|
||||||
|
className.endsWith("Request") ||
|
||||||
|
className.endsWith("Response")
|
||||||
|
) {
|
||||||
|
if (!/^[A-Z][a-zA-Z0-9]*(Dto|Request|Response)$/.test(className)) {
|
||||||
|
return NamingViolation.create(
|
||||||
|
className,
|
||||||
|
NAMING_VIOLATION_TYPES.WRONG_SUFFIX,
|
||||||
|
LAYERS.APPLICATION,
|
||||||
|
`${filePath}:${String(lineNumber)}`,
|
||||||
|
NAMING_ERROR_MESSAGES.DTO_PASCAL_CASE,
|
||||||
|
className,
|
||||||
|
NAMING_ERROR_MESSAGES.USE_DTO_SUFFIX,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
if (className.endsWith("Mapper")) {
|
||||||
|
if (!/^[A-Z][a-zA-Z0-9]*Mapper$/.test(className)) {
|
||||||
|
return NamingViolation.create(
|
||||||
|
className,
|
||||||
|
NAMING_VIOLATION_TYPES.WRONG_SUFFIX,
|
||||||
|
LAYERS.APPLICATION,
|
||||||
|
`${filePath}:${String(lineNumber)}`,
|
||||||
|
NAMING_ERROR_MESSAGES.MAPPER_PASCAL_CASE,
|
||||||
|
className,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
const startsWithVerb = this.startsWithCommonVerb(className)
|
||||||
|
const startsWithLowercaseVerb = this.startsWithLowercaseVerb(className)
|
||||||
|
if (startsWithVerb) {
|
||||||
|
if (!/^[A-Z][a-z]+[A-Z][a-zA-Z0-9]*$/.test(className)) {
|
||||||
|
return NamingViolation.create(
|
||||||
|
className,
|
||||||
|
NAMING_VIOLATION_TYPES.WRONG_VERB_NOUN,
|
||||||
|
LAYERS.APPLICATION,
|
||||||
|
`${filePath}:${String(lineNumber)}`,
|
||||||
|
NAMING_ERROR_MESSAGES.USE_CASE_VERB_NOUN,
|
||||||
|
className,
|
||||||
|
NAMING_ERROR_MESSAGES.USE_VERB_NOUN,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
} else if (startsWithLowercaseVerb) {
|
||||||
|
return NamingViolation.create(
|
||||||
|
className,
|
||||||
|
NAMING_VIOLATION_TYPES.WRONG_VERB_NOUN,
|
||||||
|
LAYERS.APPLICATION,
|
||||||
|
`${filePath}:${String(lineNumber)}`,
|
||||||
|
NAMING_ERROR_MESSAGES.USE_CASE_VERB_NOUN,
|
||||||
|
className,
|
||||||
|
NAMING_ERROR_MESSAGES.USE_VERB_NOUN,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Checks infrastructure layer class naming
|
||||||
|
*/
|
||||||
|
private checkInfrastructureClass(
|
||||||
|
className: string,
|
||||||
|
filePath: string,
|
||||||
|
lineNumber: number,
|
||||||
|
): NamingViolation | null {
|
||||||
|
if (className.endsWith("Controller")) {
|
||||||
|
if (!/^[A-Z][a-zA-Z0-9]*Controller$/.test(className)) {
|
||||||
|
return NamingViolation.create(
|
||||||
|
className,
|
||||||
|
NAMING_VIOLATION_TYPES.WRONG_SUFFIX,
|
||||||
|
LAYERS.INFRASTRUCTURE,
|
||||||
|
`${filePath}:${String(lineNumber)}`,
|
||||||
|
NAMING_ERROR_MESSAGES.CONTROLLER_PASCAL_CASE,
|
||||||
|
className,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
if (
|
||||||
|
className.endsWith(PATTERN_WORDS.REPOSITORY) &&
|
||||||
|
!className.startsWith(PATTERN_WORDS.I_PREFIX)
|
||||||
|
) {
|
||||||
|
if (!/^[A-Z][a-zA-Z0-9]*Repository$/.test(className)) {
|
||||||
|
return NamingViolation.create(
|
||||||
|
className,
|
||||||
|
NAMING_VIOLATION_TYPES.WRONG_SUFFIX,
|
||||||
|
LAYERS.INFRASTRUCTURE,
|
||||||
|
`${filePath}:${String(lineNumber)}`,
|
||||||
|
NAMING_ERROR_MESSAGES.REPOSITORY_IMPL_PASCAL_CASE,
|
||||||
|
className,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
if (className.endsWith("Service") || className.endsWith("Adapter")) {
|
||||||
|
if (!/^[A-Z][a-zA-Z0-9]*(Service|Adapter)$/.test(className)) {
|
||||||
|
return NamingViolation.create(
|
||||||
|
className,
|
||||||
|
NAMING_VIOLATION_TYPES.WRONG_SUFFIX,
|
||||||
|
LAYERS.INFRASTRUCTURE,
|
||||||
|
`${filePath}:${String(lineNumber)}`,
|
||||||
|
NAMING_ERROR_MESSAGES.SERVICE_ADAPTER_PASCAL_CASE,
|
||||||
|
className,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Checks if class name starts with a common use case verb
|
||||||
|
*/
|
||||||
|
private startsWithCommonVerb(className: string): boolean {
|
||||||
|
return USE_CASE_VERBS.some((verb) => className.startsWith(verb))
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Checks if class name starts with a lowercase verb (camelCase use case)
|
||||||
|
*/
|
||||||
|
private startsWithLowercaseVerb(className: string): boolean {
|
||||||
|
const lowercaseVerbs = USE_CASE_VERBS.map((verb) => verb.toLowerCase())
|
||||||
|
return lowercaseVerbs.some((verb) => className.startsWith(verb))
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,65 @@
|
|||||||
|
import Parser from "tree-sitter"
|
||||||
|
import { NamingViolation } from "../../../domain/value-objects/NamingViolation"
|
||||||
|
import { AST_FIELD_NAMES, AST_FUNCTION_TYPES, CLASS_KEYWORDS } from "../../../shared/constants"
|
||||||
|
import { NAMING_VIOLATION_TYPES } from "../../../shared/constants/rules"
|
||||||
|
import { NAMING_ERROR_MESSAGES } from "../../constants/detectorPatterns"
|
||||||
|
|
||||||
|
/**
|
||||||
|
* AST-based analyzer for detecting function and method naming violations
|
||||||
|
*
|
||||||
|
* Analyzes function declaration, method definition, and arrow function nodes
|
||||||
|
* to ensure proper naming conventions:
|
||||||
|
* - Functions and methods should be camelCase
|
||||||
|
* - Private methods with underscore prefix are allowed
|
||||||
|
*/
|
||||||
|
export class AstFunctionNameAnalyzer {
|
||||||
|
/**
|
||||||
|
* Analyzes a function or method declaration node
|
||||||
|
*/
|
||||||
|
public analyze(
|
||||||
|
node: Parser.SyntaxNode,
|
||||||
|
layer: string,
|
||||||
|
filePath: string,
|
||||||
|
_lines: string[],
|
||||||
|
): NamingViolation | null {
|
||||||
|
const functionNodeTypes = [
|
||||||
|
AST_FUNCTION_TYPES.FUNCTION_DECLARATION,
|
||||||
|
AST_FUNCTION_TYPES.METHOD_DEFINITION,
|
||||||
|
AST_FUNCTION_TYPES.FUNCTION_SIGNATURE,
|
||||||
|
] as const
|
||||||
|
|
||||||
|
if (!(functionNodeTypes as readonly string[]).includes(node.type)) {
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
const nameNode = node.childForFieldName(AST_FIELD_NAMES.NAME)
|
||||||
|
if (!nameNode) {
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
const functionName = nameNode.text
|
||||||
|
const lineNumber = nameNode.startPosition.row + 1
|
||||||
|
|
||||||
|
if (functionName.startsWith("_")) {
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
if (functionName === CLASS_KEYWORDS.CONSTRUCTOR) {
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!/^[a-z][a-zA-Z0-9]*$/.test(functionName)) {
|
||||||
|
return NamingViolation.create(
|
||||||
|
functionName,
|
||||||
|
NAMING_VIOLATION_TYPES.WRONG_CASE,
|
||||||
|
layer,
|
||||||
|
`${filePath}:${String(lineNumber)}`,
|
||||||
|
NAMING_ERROR_MESSAGES.FUNCTION_CAMEL_CASE,
|
||||||
|
functionName,
|
||||||
|
NAMING_ERROR_MESSAGES.USE_CAMEL_CASE_FUNCTION,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,90 @@
|
|||||||
|
import Parser from "tree-sitter"
|
||||||
|
import { NamingViolation } from "../../../domain/value-objects/NamingViolation"
|
||||||
|
import { AST_CLASS_TYPES, AST_FIELD_NAMES } from "../../../shared/constants"
|
||||||
|
import { LAYERS, NAMING_VIOLATION_TYPES } from "../../../shared/constants/rules"
|
||||||
|
import { NAMING_ERROR_MESSAGES, PATTERN_WORDS } from "../../constants/detectorPatterns"
|
||||||
|
|
||||||
|
/**
|
||||||
|
* AST-based analyzer for detecting interface naming violations
|
||||||
|
*
|
||||||
|
* Analyzes interface declaration nodes to ensure proper naming conventions:
|
||||||
|
* - Domain layer: Repository interfaces must start with 'I' (e.g., IUserRepository)
|
||||||
|
* - All layers: Interfaces should be PascalCase
|
||||||
|
*/
|
||||||
|
export class AstInterfaceNameAnalyzer {
|
||||||
|
/**
|
||||||
|
* Analyzes an interface declaration node
|
||||||
|
*/
|
||||||
|
public analyze(
|
||||||
|
node: Parser.SyntaxNode,
|
||||||
|
layer: string,
|
||||||
|
filePath: string,
|
||||||
|
_lines: string[],
|
||||||
|
): NamingViolation | null {
|
||||||
|
if (node.type !== AST_CLASS_TYPES.INTERFACE_DECLARATION) {
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
const nameNode = node.childForFieldName(AST_FIELD_NAMES.NAME)
|
||||||
|
if (!nameNode) {
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
const interfaceName = nameNode.text
|
||||||
|
const lineNumber = nameNode.startPosition.row + 1
|
||||||
|
|
||||||
|
if (!/^[A-Z][a-zA-Z0-9]*$/.test(interfaceName)) {
|
||||||
|
return NamingViolation.create(
|
||||||
|
interfaceName,
|
||||||
|
NAMING_VIOLATION_TYPES.WRONG_CASE,
|
||||||
|
layer,
|
||||||
|
`${filePath}:${String(lineNumber)}`,
|
||||||
|
NAMING_ERROR_MESSAGES.INTERFACE_PASCAL_CASE,
|
||||||
|
interfaceName,
|
||||||
|
NAMING_ERROR_MESSAGES.USE_PASCAL_CASE_INTERFACE,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (layer === LAYERS.DOMAIN) {
|
||||||
|
return this.checkDomainInterface(interfaceName, filePath, lineNumber)
|
||||||
|
}
|
||||||
|
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Checks domain layer interface naming
|
||||||
|
*/
|
||||||
|
private checkDomainInterface(
|
||||||
|
interfaceName: string,
|
||||||
|
filePath: string,
|
||||||
|
lineNumber: number,
|
||||||
|
): NamingViolation | null {
|
||||||
|
if (interfaceName.endsWith(PATTERN_WORDS.REPOSITORY)) {
|
||||||
|
if (!interfaceName.startsWith(PATTERN_WORDS.I_PREFIX)) {
|
||||||
|
return NamingViolation.create(
|
||||||
|
interfaceName,
|
||||||
|
NAMING_VIOLATION_TYPES.WRONG_PREFIX,
|
||||||
|
LAYERS.DOMAIN,
|
||||||
|
`${filePath}:${String(lineNumber)}`,
|
||||||
|
NAMING_ERROR_MESSAGES.REPOSITORY_INTERFACE_I_PREFIX,
|
||||||
|
interfaceName,
|
||||||
|
`Rename to I${interfaceName}`,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!/^I[A-Z][a-zA-Z0-9]*Repository$/.test(interfaceName)) {
|
||||||
|
return NamingViolation.create(
|
||||||
|
interfaceName,
|
||||||
|
NAMING_VIOLATION_TYPES.WRONG_CASE,
|
||||||
|
LAYERS.DOMAIN,
|
||||||
|
`${filePath}:${String(lineNumber)}`,
|
||||||
|
NAMING_ERROR_MESSAGES.REPOSITORY_INTERFACE_PATTERN,
|
||||||
|
interfaceName,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,106 @@
|
|||||||
|
import Parser from "tree-sitter"
|
||||||
|
import { NamingViolation } from "../../../domain/value-objects/NamingViolation"
|
||||||
|
import { AST_CLASS_TYPES, AST_FUNCTION_TYPES, AST_VARIABLE_TYPES } from "../../../shared/constants"
|
||||||
|
import { AstClassNameAnalyzer } from "./AstClassNameAnalyzer"
|
||||||
|
import { AstFunctionNameAnalyzer } from "./AstFunctionNameAnalyzer"
|
||||||
|
import { AstInterfaceNameAnalyzer } from "./AstInterfaceNameAnalyzer"
|
||||||
|
import { AstVariableNameAnalyzer } from "./AstVariableNameAnalyzer"
|
||||||
|
|
||||||
|
type NodeAnalyzer = (
|
||||||
|
node: Parser.SyntaxNode,
|
||||||
|
layer: string,
|
||||||
|
filePath: string,
|
||||||
|
lines: string[],
|
||||||
|
) => NamingViolation | null
|
||||||
|
|
||||||
|
/**
|
||||||
|
* AST tree traverser for detecting naming convention violations
|
||||||
|
*
|
||||||
|
* Walks through the Abstract Syntax Tree and uses analyzers
|
||||||
|
* to detect naming violations in classes, interfaces, functions, and variables.
|
||||||
|
*/
|
||||||
|
export class AstNamingTraverser {
|
||||||
|
private readonly nodeHandlers: Map<string, NodeAnalyzer>
|
||||||
|
|
||||||
|
constructor(
|
||||||
|
private readonly classAnalyzer: AstClassNameAnalyzer,
|
||||||
|
private readonly interfaceAnalyzer: AstInterfaceNameAnalyzer,
|
||||||
|
private readonly functionAnalyzer: AstFunctionNameAnalyzer,
|
||||||
|
private readonly variableAnalyzer: AstVariableNameAnalyzer,
|
||||||
|
) {
|
||||||
|
this.nodeHandlers = this.buildNodeHandlers()
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Traverses the AST tree and collects naming violations
|
||||||
|
*/
|
||||||
|
public traverse(
|
||||||
|
tree: Parser.Tree,
|
||||||
|
sourceCode: string,
|
||||||
|
layer: string,
|
||||||
|
filePath: string,
|
||||||
|
): NamingViolation[] {
|
||||||
|
const results: NamingViolation[] = []
|
||||||
|
const lines = sourceCode.split("\n")
|
||||||
|
const cursor = tree.walk()
|
||||||
|
|
||||||
|
this.visit(cursor, lines, layer, filePath, results)
|
||||||
|
|
||||||
|
return results
|
||||||
|
}
|
||||||
|
|
||||||
|
private buildNodeHandlers(): Map<string, NodeAnalyzer> {
|
||||||
|
const handlers = new Map<string, NodeAnalyzer>()
|
||||||
|
|
||||||
|
handlers.set(AST_CLASS_TYPES.CLASS_DECLARATION, (node, layer, filePath, lines) =>
|
||||||
|
this.classAnalyzer.analyze(node, layer, filePath, lines),
|
||||||
|
)
|
||||||
|
handlers.set(AST_CLASS_TYPES.INTERFACE_DECLARATION, (node, layer, filePath, lines) =>
|
||||||
|
this.interfaceAnalyzer.analyze(node, layer, filePath, lines),
|
||||||
|
)
|
||||||
|
|
||||||
|
const functionHandler: NodeAnalyzer = (node, layer, filePath, lines) =>
|
||||||
|
this.functionAnalyzer.analyze(node, layer, filePath, lines)
|
||||||
|
handlers.set(AST_FUNCTION_TYPES.FUNCTION_DECLARATION, functionHandler)
|
||||||
|
handlers.set(AST_FUNCTION_TYPES.METHOD_DEFINITION, functionHandler)
|
||||||
|
handlers.set(AST_FUNCTION_TYPES.FUNCTION_SIGNATURE, functionHandler)
|
||||||
|
|
||||||
|
const variableHandler: NodeAnalyzer = (node, layer, filePath, lines) =>
|
||||||
|
this.variableAnalyzer.analyze(node, layer, filePath, lines)
|
||||||
|
handlers.set(AST_VARIABLE_TYPES.VARIABLE_DECLARATOR, variableHandler)
|
||||||
|
handlers.set(AST_VARIABLE_TYPES.REQUIRED_PARAMETER, variableHandler)
|
||||||
|
handlers.set(AST_VARIABLE_TYPES.OPTIONAL_PARAMETER, variableHandler)
|
||||||
|
handlers.set(AST_VARIABLE_TYPES.PUBLIC_FIELD_DEFINITION, variableHandler)
|
||||||
|
handlers.set(AST_VARIABLE_TYPES.PROPERTY_SIGNATURE, variableHandler)
|
||||||
|
|
||||||
|
return handlers
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Recursively visits AST nodes
|
||||||
|
*/
|
||||||
|
private visit(
|
||||||
|
cursor: Parser.TreeCursor,
|
||||||
|
lines: string[],
|
||||||
|
layer: string,
|
||||||
|
filePath: string,
|
||||||
|
results: NamingViolation[],
|
||||||
|
): void {
|
||||||
|
const node = cursor.currentNode
|
||||||
|
const handler = this.nodeHandlers.get(node.type)
|
||||||
|
|
||||||
|
if (handler) {
|
||||||
|
const violation = handler(node, layer, filePath, lines)
|
||||||
|
if (violation) {
|
||||||
|
results.push(violation)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (cursor.gotoFirstChild()) {
|
||||||
|
do {
|
||||||
|
this.visit(cursor, lines, layer, filePath, results)
|
||||||
|
} while (cursor.gotoNextSibling())
|
||||||
|
cursor.gotoParent()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,159 @@
|
|||||||
|
import Parser from "tree-sitter"
|
||||||
|
import { NamingViolation } from "../../../domain/value-objects/NamingViolation"
|
||||||
|
import {
|
||||||
|
AST_FIELD_NAMES,
|
||||||
|
AST_FIELD_TYPES,
|
||||||
|
AST_MODIFIER_TYPES,
|
||||||
|
AST_PATTERN_TYPES,
|
||||||
|
AST_STATEMENT_TYPES,
|
||||||
|
AST_VARIABLE_TYPES,
|
||||||
|
} from "../../../shared/constants"
|
||||||
|
import { NAMING_VIOLATION_TYPES } from "../../../shared/constants/rules"
|
||||||
|
import { NAMING_ERROR_MESSAGES } from "../../constants/detectorPatterns"
|
||||||
|
|
||||||
|
/**
|
||||||
|
* AST-based analyzer for detecting variable naming violations
|
||||||
|
*
|
||||||
|
* Analyzes variable declarations to ensure proper naming conventions:
|
||||||
|
* - Regular variables: camelCase
|
||||||
|
* - Constants (exported UPPER_CASE): UPPER_SNAKE_CASE
|
||||||
|
* - Class properties: camelCase
|
||||||
|
* - Private properties with underscore prefix are allowed
|
||||||
|
*/
|
||||||
|
export class AstVariableNameAnalyzer {
|
||||||
|
/**
|
||||||
|
* Analyzes a variable declaration node
|
||||||
|
*/
|
||||||
|
public analyze(
|
||||||
|
node: Parser.SyntaxNode,
|
||||||
|
layer: string,
|
||||||
|
filePath: string,
|
||||||
|
_lines: string[],
|
||||||
|
): NamingViolation | null {
|
||||||
|
const variableNodeTypes = [
|
||||||
|
AST_VARIABLE_TYPES.VARIABLE_DECLARATOR,
|
||||||
|
AST_VARIABLE_TYPES.REQUIRED_PARAMETER,
|
||||||
|
AST_VARIABLE_TYPES.OPTIONAL_PARAMETER,
|
||||||
|
AST_VARIABLE_TYPES.PUBLIC_FIELD_DEFINITION,
|
||||||
|
AST_VARIABLE_TYPES.PROPERTY_SIGNATURE,
|
||||||
|
] as const
|
||||||
|
|
||||||
|
if (!(variableNodeTypes as readonly string[]).includes(node.type)) {
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
const nameNode = node.childForFieldName(AST_FIELD_NAMES.NAME)
|
||||||
|
if (!nameNode) {
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
if (this.isDestructuringPattern(nameNode)) {
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
const variableName = nameNode.text
|
||||||
|
const lineNumber = nameNode.startPosition.row + 1
|
||||||
|
|
||||||
|
if (variableName.startsWith("_")) {
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
const isConstant = this.isConstantVariable(node)
|
||||||
|
|
||||||
|
if (isConstant) {
|
||||||
|
if (!/^[A-Z][A-Z0-9_]*$/.test(variableName)) {
|
||||||
|
return NamingViolation.create(
|
||||||
|
variableName,
|
||||||
|
NAMING_VIOLATION_TYPES.WRONG_CASE,
|
||||||
|
layer,
|
||||||
|
`${filePath}:${String(lineNumber)}`,
|
||||||
|
NAMING_ERROR_MESSAGES.CONSTANT_UPPER_SNAKE_CASE,
|
||||||
|
variableName,
|
||||||
|
NAMING_ERROR_MESSAGES.USE_UPPER_SNAKE_CASE_CONSTANT,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if (!/^[a-z][a-zA-Z0-9]*$/.test(variableName)) {
|
||||||
|
return NamingViolation.create(
|
||||||
|
variableName,
|
||||||
|
NAMING_VIOLATION_TYPES.WRONG_CASE,
|
||||||
|
layer,
|
||||||
|
`${filePath}:${String(lineNumber)}`,
|
||||||
|
NAMING_ERROR_MESSAGES.VARIABLE_CAMEL_CASE,
|
||||||
|
variableName,
|
||||||
|
NAMING_ERROR_MESSAGES.USE_CAMEL_CASE_VARIABLE,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Checks if node is a destructuring pattern (object or array)
|
||||||
|
*/
|
||||||
|
private isDestructuringPattern(node: Parser.SyntaxNode): boolean {
|
||||||
|
return (
|
||||||
|
node.type === AST_PATTERN_TYPES.OBJECT_PATTERN ||
|
||||||
|
node.type === AST_PATTERN_TYPES.ARRAY_PATTERN
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Checks if a variable is a constant (exported UPPER_CASE)
|
||||||
|
*/
|
||||||
|
private isConstantVariable(node: Parser.SyntaxNode): boolean {
|
||||||
|
const variableName = node.childForFieldName(AST_FIELD_NAMES.NAME)?.text
|
||||||
|
if (!variableName || !/^[A-Z]/.test(variableName)) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
if (
|
||||||
|
node.type === AST_VARIABLE_TYPES.PUBLIC_FIELD_DEFINITION ||
|
||||||
|
node.type === AST_FIELD_TYPES.FIELD_DEFINITION
|
||||||
|
) {
|
||||||
|
return this.hasConstModifiers(node)
|
||||||
|
}
|
||||||
|
|
||||||
|
let current: Parser.SyntaxNode | null = node.parent
|
||||||
|
|
||||||
|
while (current) {
|
||||||
|
if (current.type === AST_STATEMENT_TYPES.LEXICAL_DECLARATION) {
|
||||||
|
const firstChild = current.child(0)
|
||||||
|
if (firstChild?.type === AST_MODIFIER_TYPES.CONST) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (
|
||||||
|
current.type === AST_VARIABLE_TYPES.PUBLIC_FIELD_DEFINITION ||
|
||||||
|
current.type === AST_FIELD_TYPES.FIELD_DEFINITION
|
||||||
|
) {
|
||||||
|
return this.hasConstModifiers(current)
|
||||||
|
}
|
||||||
|
|
||||||
|
current = current.parent
|
||||||
|
}
|
||||||
|
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Checks if field has readonly or static modifiers (indicating a constant)
|
||||||
|
*/
|
||||||
|
private hasConstModifiers(fieldNode: Parser.SyntaxNode): boolean {
|
||||||
|
for (let i = 0; i < fieldNode.childCount; i++) {
|
||||||
|
const child = fieldNode.child(i)
|
||||||
|
const childText = child?.text
|
||||||
|
if (
|
||||||
|
child?.type === AST_MODIFIER_TYPES.READONLY ||
|
||||||
|
child?.type === AST_MODIFIER_TYPES.STATIC ||
|
||||||
|
childText === AST_MODIFIER_TYPES.READONLY ||
|
||||||
|
childText === AST_MODIFIER_TYPES.STATIC
|
||||||
|
) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
139
packages/guardian/src/shared/constants/ast-node-types.ts
Normal file
139
packages/guardian/src/shared/constants/ast-node-types.ts
Normal file
@@ -0,0 +1,139 @@
|
|||||||
|
/**
|
||||||
|
* Abstract Syntax Tree (AST) node type constants
|
||||||
|
*
|
||||||
|
* These constants represent tree-sitter AST node types used for code analysis.
|
||||||
|
* Using constants instead of magic strings improves maintainability and prevents typos.
|
||||||
|
*
|
||||||
|
* @see https://tree-sitter.github.io/tree-sitter/
|
||||||
|
*/
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Class and interface declaration node types
|
||||||
|
*/
|
||||||
|
export const AST_CLASS_TYPES = {
|
||||||
|
CLASS_DECLARATION: "class_declaration",
|
||||||
|
INTERFACE_DECLARATION: "interface_declaration",
|
||||||
|
} as const
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Function and method node types
|
||||||
|
*/
|
||||||
|
export const AST_FUNCTION_TYPES = {
|
||||||
|
FUNCTION_DECLARATION: "function_declaration",
|
||||||
|
METHOD_DEFINITION: "method_definition",
|
||||||
|
FUNCTION_SIGNATURE: "function_signature",
|
||||||
|
} as const
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Variable and parameter node types
|
||||||
|
*/
|
||||||
|
export const AST_VARIABLE_TYPES = {
|
||||||
|
VARIABLE_DECLARATOR: "variable_declarator",
|
||||||
|
REQUIRED_PARAMETER: "required_parameter",
|
||||||
|
OPTIONAL_PARAMETER: "optional_parameter",
|
||||||
|
PUBLIC_FIELD_DEFINITION: "public_field_definition",
|
||||||
|
PROPERTY_SIGNATURE: "property_signature",
|
||||||
|
} as const
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Type system node types
|
||||||
|
*/
|
||||||
|
export const AST_TYPE_TYPES = {
|
||||||
|
TYPE_ALIAS_DECLARATION: "type_alias_declaration",
|
||||||
|
UNION_TYPE: "union_type",
|
||||||
|
LITERAL_TYPE: "literal_type",
|
||||||
|
TYPE_ANNOTATION: "type_annotation",
|
||||||
|
} as const
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Statement node types
|
||||||
|
*/
|
||||||
|
export const AST_STATEMENT_TYPES = {
|
||||||
|
EXPORT_STATEMENT: "export_statement",
|
||||||
|
IMPORT_STATEMENT: "import_statement",
|
||||||
|
LEXICAL_DECLARATION: "lexical_declaration",
|
||||||
|
} as const
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Expression node types
|
||||||
|
*/
|
||||||
|
export const AST_EXPRESSION_TYPES = {
|
||||||
|
CALL_EXPRESSION: "call_expression",
|
||||||
|
AS_EXPRESSION: "as_expression",
|
||||||
|
} as const
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Field and property node types
|
||||||
|
*/
|
||||||
|
export const AST_FIELD_TYPES = {
|
||||||
|
FIELD_DEFINITION: "field_definition",
|
||||||
|
} as const
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Pattern node types
|
||||||
|
*/
|
||||||
|
export const AST_PATTERN_TYPES = {
|
||||||
|
OBJECT_PATTERN: "object_pattern",
|
||||||
|
ARRAY_PATTERN: "array_pattern",
|
||||||
|
} as const
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Modifier node types
|
||||||
|
*/
|
||||||
|
export const AST_MODIFIER_TYPES = {
|
||||||
|
READONLY: "readonly",
|
||||||
|
STATIC: "static",
|
||||||
|
CONST: "const",
|
||||||
|
} as const
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Special identifier node types
|
||||||
|
*/
|
||||||
|
export const AST_IDENTIFIER_TYPES = {
|
||||||
|
IDENTIFIER: "identifier",
|
||||||
|
TYPE_IDENTIFIER: "type_identifier",
|
||||||
|
PROPERTY_IDENTIFIER: "property_identifier",
|
||||||
|
IMPORT: "import",
|
||||||
|
} as const
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Node field names used with childForFieldName()
|
||||||
|
*/
|
||||||
|
export const AST_FIELD_NAMES = {
|
||||||
|
NAME: "name",
|
||||||
|
DECLARATION: "declaration",
|
||||||
|
VALUE: "value",
|
||||||
|
FUNCTION: "function",
|
||||||
|
} as const
|
||||||
|
|
||||||
|
/**
|
||||||
|
* String fragment node type
|
||||||
|
*/
|
||||||
|
export const AST_STRING_TYPES = {
|
||||||
|
STRING_FRAGMENT: "string_fragment",
|
||||||
|
} as const
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Common JavaScript timer functions
|
||||||
|
*/
|
||||||
|
export const TIMER_FUNCTIONS = {
|
||||||
|
SET_TIMEOUT: "setTimeout",
|
||||||
|
SET_INTERVAL: "setInterval",
|
||||||
|
} as const
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Value pattern types for pattern matching
|
||||||
|
*/
|
||||||
|
export const VALUE_PATTERN_TYPES = {
|
||||||
|
EMAIL: "email",
|
||||||
|
API_KEY: "api_key",
|
||||||
|
URL: "url",
|
||||||
|
IP_ADDRESS: "ip_address",
|
||||||
|
FILE_PATH: "file_path",
|
||||||
|
DATE: "date",
|
||||||
|
UUID: "uuid",
|
||||||
|
VERSION: "version",
|
||||||
|
JWT: "jwt",
|
||||||
|
MAC_ADDRESS: "mac_address",
|
||||||
|
BASE64: "base64",
|
||||||
|
} as const
|
||||||
@@ -119,3 +119,4 @@ export const VIOLATION_SEVERITY_MAP = {
|
|||||||
} as const
|
} as const
|
||||||
|
|
||||||
export * from "./rules"
|
export * from "./rules"
|
||||||
|
export * from "./ast-node-types"
|
||||||
|
|||||||
@@ -459,7 +459,27 @@ export const CONFIG_KEYWORDS = {
|
|||||||
NETWORK: ["endpoint", "host", "domain", "path", "route"],
|
NETWORK: ["endpoint", "host", "domain", "path", "route"],
|
||||||
DATABASE: ["connection", "database"],
|
DATABASE: ["connection", "database"],
|
||||||
SECURITY: ["config", "secret", "token", "password", "credential"],
|
SECURITY: ["config", "secret", "token", "password", "credential"],
|
||||||
MESSAGES: ["message", "error", "warning", "text"],
|
MESSAGES: [
|
||||||
|
"message",
|
||||||
|
"error",
|
||||||
|
"warning",
|
||||||
|
"text",
|
||||||
|
"description",
|
||||||
|
"suggestion",
|
||||||
|
"violation",
|
||||||
|
"expected",
|
||||||
|
"actual",
|
||||||
|
],
|
||||||
|
TECHNICAL: [
|
||||||
|
"type",
|
||||||
|
"node",
|
||||||
|
"declaration",
|
||||||
|
"definition",
|
||||||
|
"signature",
|
||||||
|
"pattern",
|
||||||
|
"suffix",
|
||||||
|
"prefix",
|
||||||
|
],
|
||||||
} as const
|
} as const
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -1,19 +1,26 @@
|
|||||||
{
|
{
|
||||||
"compilerOptions": {
|
"compilerOptions": {
|
||||||
"outDir": "./dist",
|
"outDir": "./dist",
|
||||||
"rootDir": "./src",
|
"rootDir": "./src",
|
||||||
"target": "ES2023",
|
"target": "ES2023",
|
||||||
"module": "CommonJS",
|
"module": "CommonJS",
|
||||||
"moduleResolution": "node",
|
"moduleResolution": "node",
|
||||||
"declaration": true,
|
"declaration": true,
|
||||||
"declarationMap": true,
|
"declarationMap": true,
|
||||||
"esModuleInterop": true,
|
"esModuleInterop": true,
|
||||||
"allowSyntheticDefaultImports": true,
|
"allowSyntheticDefaultImports": true,
|
||||||
"strict": true,
|
"strict": true,
|
||||||
"skipLibCheck": true,
|
"skipLibCheck": true,
|
||||||
"sourceMap": true,
|
"sourceMap": true,
|
||||||
"resolveJsonModule": true
|
"resolveJsonModule": true
|
||||||
},
|
},
|
||||||
"include": ["src/**/*"],
|
"include": [
|
||||||
"exclude": ["node_modules", "dist", "**/*.spec.ts", "**/*.test.ts"]
|
"src/**/*"
|
||||||
|
],
|
||||||
|
"exclude": [
|
||||||
|
"node_modules",
|
||||||
|
"dist",
|
||||||
|
"**/*.spec.ts",
|
||||||
|
"**/*.test.ts"
|
||||||
|
]
|
||||||
}
|
}
|
||||||
|
|||||||
13
packages/ipuaro/.gitignore
vendored
Normal file
13
packages/ipuaro/.gitignore
vendored
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
# Build output
|
||||||
|
dist/
|
||||||
|
*.tsbuildinfo
|
||||||
|
|
||||||
|
# Dependencies
|
||||||
|
node_modules/
|
||||||
|
|
||||||
|
# Test coverage
|
||||||
|
coverage/
|
||||||
|
|
||||||
|
# Logs
|
||||||
|
*.log
|
||||||
|
npm-debug.log*
|
||||||
38
packages/ipuaro/.npmignore
Normal file
38
packages/ipuaro/.npmignore
Normal file
@@ -0,0 +1,38 @@
|
|||||||
|
# Source files (only publish dist/)
|
||||||
|
src/
|
||||||
|
*.ts
|
||||||
|
!*.d.ts
|
||||||
|
|
||||||
|
# Build artifacts
|
||||||
|
tsconfig.json
|
||||||
|
tsconfig.*.json
|
||||||
|
tsconfig.tsbuildinfo
|
||||||
|
*.tsbuildinfo
|
||||||
|
|
||||||
|
# Tests
|
||||||
|
**/*.spec.ts
|
||||||
|
**/*.test.ts
|
||||||
|
__tests__/
|
||||||
|
coverage/
|
||||||
|
|
||||||
|
# Development
|
||||||
|
node_modules/
|
||||||
|
.env
|
||||||
|
.env.*
|
||||||
|
|
||||||
|
# IDE
|
||||||
|
.vscode/
|
||||||
|
.idea/
|
||||||
|
*.swp
|
||||||
|
*.swo
|
||||||
|
|
||||||
|
# Git
|
||||||
|
.git/
|
||||||
|
.gitignore
|
||||||
|
|
||||||
|
# Other
|
||||||
|
*.log
|
||||||
|
npm-debug.log*
|
||||||
|
yarn-debug.log*
|
||||||
|
yarn-error.log*
|
||||||
|
.DS_Store
|
||||||
566
packages/ipuaro/ARCHITECTURE.md
Normal file
566
packages/ipuaro/ARCHITECTURE.md
Normal file
@@ -0,0 +1,566 @@
|
|||||||
|
# ipuaro Architecture
|
||||||
|
|
||||||
|
This document describes the architecture, design decisions, and implementation details of ipuaro.
|
||||||
|
|
||||||
|
## Table of Contents
|
||||||
|
|
||||||
|
- [Overview](#overview)
|
||||||
|
- [Clean Architecture](#clean-architecture)
|
||||||
|
- [Layer Details](#layer-details)
|
||||||
|
- [Data Flow](#data-flow)
|
||||||
|
- [Key Design Decisions](#key-design-decisions)
|
||||||
|
- [Tech Stack](#tech-stack)
|
||||||
|
- [Performance Considerations](#performance-considerations)
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
|
||||||
|
ipuaro is a local AI agent for codebase operations built on Clean Architecture principles. It enables "infinite" context feeling through lazy loading and AST-based code understanding.
|
||||||
|
|
||||||
|
### Core Concepts
|
||||||
|
|
||||||
|
1. **Lazy Loading**: Load code on-demand via tools, not all at once
|
||||||
|
2. **AST-Based Understanding**: Parse and index code structure for fast lookups
|
||||||
|
3. **100% Local**: Ollama LLM + Redis storage, no cloud dependencies
|
||||||
|
4. **Session Persistence**: Resume conversations across restarts
|
||||||
|
5. **Tool-Based Interface**: LLM accesses code through 18 specialized tools
|
||||||
|
|
||||||
|
## Clean Architecture
|
||||||
|
|
||||||
|
The project follows Clean Architecture with strict dependency rules:
|
||||||
|
|
||||||
|
```
|
||||||
|
┌─────────────────────────────────────────────────┐
|
||||||
|
│ TUI Layer │ ← Ink/React components
|
||||||
|
│ (Framework) │
|
||||||
|
├─────────────────────────────────────────────────┤
|
||||||
|
│ CLI Layer │ ← Commander.js entry
|
||||||
|
│ (Interface) │
|
||||||
|
├─────────────────────────────────────────────────┤
|
||||||
|
│ Infrastructure Layer │ ← External adapters
|
||||||
|
│ (Storage, LLM, Indexer, Tools, Security) │
|
||||||
|
├─────────────────────────────────────────────────┤
|
||||||
|
│ Application Layer │ ← Use cases & DTOs
|
||||||
|
│ (StartSession, HandleMessage, etc.) │
|
||||||
|
├─────────────────────────────────────────────────┤
|
||||||
|
│ Domain Layer │ ← Business logic
|
||||||
|
│ (Entities, Value Objects, Service Interfaces) │
|
||||||
|
└─────────────────────────────────────────────────┘
|
||||||
|
```
|
||||||
|
|
||||||
|
**Dependency Rule**: Outer layers depend on inner layers, never the reverse.
|
||||||
|
|
||||||
|
## Layer Details
|
||||||
|
|
||||||
|
### Domain Layer (Core Business Logic)
|
||||||
|
|
||||||
|
**Location**: `src/domain/`
|
||||||
|
|
||||||
|
**Responsibilities**:
|
||||||
|
- Define business entities and value objects
|
||||||
|
- Declare service interfaces (ports)
|
||||||
|
- No external dependencies (pure TypeScript)
|
||||||
|
|
||||||
|
**Components**:
|
||||||
|
|
||||||
|
```
|
||||||
|
domain/
|
||||||
|
├── entities/
|
||||||
|
│ ├── Session.ts # Session entity with history and stats
|
||||||
|
│ └── Project.ts # Project entity with metadata
|
||||||
|
├── value-objects/
|
||||||
|
│ ├── FileData.ts # File content with hash and size
|
||||||
|
│ ├── FileAST.ts # Parsed AST structure
|
||||||
|
│ ├── FileMeta.ts # Complexity, dependencies, hub detection
|
||||||
|
│ ├── ChatMessage.ts # Message with role, content, tool calls
|
||||||
|
│ ├── ToolCall.ts # Tool invocation with parameters
|
||||||
|
│ ├── ToolResult.ts # Tool execution result
|
||||||
|
│ └── UndoEntry.ts # File change for undo stack
|
||||||
|
├── services/
|
||||||
|
│ ├── IStorage.ts # Storage interface (port)
|
||||||
|
│ ├── ILLMClient.ts # LLM interface (port)
|
||||||
|
│ ├── ITool.ts # Tool interface (port)
|
||||||
|
│ └── IIndexer.ts # Indexer interface (port)
|
||||||
|
└── constants/
|
||||||
|
└── index.ts # Domain constants
|
||||||
|
```
|
||||||
|
|
||||||
|
**Key Design**:
|
||||||
|
- Value objects are immutable
|
||||||
|
- Entities have identity and lifecycle
|
||||||
|
- Interfaces define contracts, not implementations
|
||||||
|
|
||||||
|
### Application Layer (Use Cases)
|
||||||
|
|
||||||
|
**Location**: `src/application/`
|
||||||
|
|
||||||
|
**Responsibilities**:
|
||||||
|
- Orchestrate domain logic
|
||||||
|
- Implement use cases (application-specific business rules)
|
||||||
|
- Define DTOs for data transfer
|
||||||
|
- Coordinate between domain and infrastructure
|
||||||
|
|
||||||
|
**Components**:
|
||||||
|
|
||||||
|
```
|
||||||
|
application/
|
||||||
|
├── use-cases/
|
||||||
|
│ ├── StartSession.ts # Initialize or load session
|
||||||
|
│ ├── HandleMessage.ts # Main message orchestrator
|
||||||
|
│ ├── IndexProject.ts # Project indexing workflow
|
||||||
|
│ ├── ExecuteTool.ts # Tool execution with validation
|
||||||
|
│ └── UndoChange.ts # Revert file changes
|
||||||
|
├── dtos/
|
||||||
|
│ ├── SessionDto.ts # Session data transfer object
|
||||||
|
│ ├── MessageDto.ts # Message DTO
|
||||||
|
│ └── ToolCallDto.ts # Tool call DTO
|
||||||
|
├── mappers/
|
||||||
|
│ └── SessionMapper.ts # Domain ↔ DTO conversion
|
||||||
|
└── interfaces/
|
||||||
|
└── IToolRegistry.ts # Tool registry interface
|
||||||
|
```
|
||||||
|
|
||||||
|
**Key Use Cases**:
|
||||||
|
|
||||||
|
1. **StartSession**: Creates new session or loads latest
|
||||||
|
2. **HandleMessage**: Main flow (LLM → Tools → Response)
|
||||||
|
3. **IndexProject**: Scan → Parse → Analyze → Store
|
||||||
|
4. **UndoChange**: Restore file from undo stack
|
||||||
|
|
||||||
|
### Infrastructure Layer (External Implementations)
|
||||||
|
|
||||||
|
**Location**: `src/infrastructure/`
|
||||||
|
|
||||||
|
**Responsibilities**:
|
||||||
|
- Implement domain interfaces
|
||||||
|
- Handle external systems (Redis, Ollama, filesystem)
|
||||||
|
- Provide concrete tool implementations
|
||||||
|
- Security and validation
|
||||||
|
|
||||||
|
**Components**:
|
||||||
|
|
||||||
|
```
|
||||||
|
infrastructure/
|
||||||
|
├── storage/
|
||||||
|
│ ├── RedisClient.ts # Redis connection wrapper
|
||||||
|
│ ├── RedisStorage.ts # IStorage implementation
|
||||||
|
│ └── schema.ts # Redis key schema
|
||||||
|
├── llm/
|
||||||
|
│ ├── OllamaClient.ts # ILLMClient implementation
|
||||||
|
│ ├── prompts.ts # System prompts
|
||||||
|
│ └── ResponseParser.ts # Parse XML tool calls
|
||||||
|
├── indexer/
|
||||||
|
│ ├── FileScanner.ts # Recursive file scanning
|
||||||
|
│ ├── ASTParser.ts # tree-sitter parsing
|
||||||
|
│ ├── MetaAnalyzer.ts # Complexity and dependencies
|
||||||
|
│ ├── IndexBuilder.ts # Symbol index + deps graph
|
||||||
|
│ └── Watchdog.ts # File watching (chokidar)
|
||||||
|
├── tools/ # 18 tool implementations
|
||||||
|
│ ├── registry.ts
|
||||||
|
│ ├── read/ # GetLines, GetFunction, GetClass, GetStructure
|
||||||
|
│ ├── edit/ # EditLines, CreateFile, DeleteFile
|
||||||
|
│ ├── search/ # FindReferences, FindDefinition
|
||||||
|
│ ├── analysis/ # GetDependencies, GetDependents, GetComplexity, GetTodos
|
||||||
|
│ ├── git/ # GitStatus, GitDiff, GitCommit
|
||||||
|
│ └── run/ # RunCommand, RunTests
|
||||||
|
└── security/
|
||||||
|
├── Blacklist.ts # Dangerous commands
|
||||||
|
├── Whitelist.ts # Safe commands
|
||||||
|
└── PathValidator.ts # Path traversal prevention
|
||||||
|
```
|
||||||
|
|
||||||
|
**Key Implementations**:
|
||||||
|
|
||||||
|
1. **RedisStorage**: Uses Redis hashes for files/AST/meta, lists for undo
|
||||||
|
2. **OllamaClient**: HTTP API client with tool calling support
|
||||||
|
3. **ASTParser**: tree-sitter for TS/JS/TSX/JSX parsing
|
||||||
|
4. **ToolRegistry**: Manages tool lifecycle and execution
|
||||||
|
|
||||||
|
### TUI Layer (Terminal UI)
|
||||||
|
|
||||||
|
**Location**: `src/tui/`
|
||||||
|
|
||||||
|
**Responsibilities**:
|
||||||
|
- Render terminal UI with Ink (React for terminal)
|
||||||
|
- Handle user input and hotkeys
|
||||||
|
- Display chat history and status
|
||||||
|
|
||||||
|
**Components**:
|
||||||
|
|
||||||
|
```
|
||||||
|
tui/
|
||||||
|
├── App.tsx # Main app shell
|
||||||
|
├── components/
|
||||||
|
│ ├── StatusBar.tsx # Top status bar
|
||||||
|
│ ├── Chat.tsx # Message history display
|
||||||
|
│ ├── Input.tsx # User input with history
|
||||||
|
│ ├── DiffView.tsx # Inline diff display
|
||||||
|
│ ├── ConfirmDialog.tsx # Edit confirmation
|
||||||
|
│ ├── ErrorDialog.tsx # Error handling
|
||||||
|
│ └── Progress.tsx # Progress bar (indexing)
|
||||||
|
└── hooks/
|
||||||
|
├── useSession.ts # Session state management
|
||||||
|
├── useHotkeys.ts # Keyboard shortcuts
|
||||||
|
└── useCommands.ts # Slash command handling
|
||||||
|
```
|
||||||
|
|
||||||
|
**Key Features**:
|
||||||
|
|
||||||
|
- Real-time status updates (context usage, session time)
|
||||||
|
- Input history with ↑/↓ navigation
|
||||||
|
- Hotkeys: Ctrl+C (interrupt), Ctrl+D (exit), Ctrl+Z (undo)
|
||||||
|
- Diff preview for edits with confirmation
|
||||||
|
- Error recovery with retry/skip/abort options
|
||||||
|
|
||||||
|
### CLI Layer (Entry Point)
|
||||||
|
|
||||||
|
**Location**: `src/cli/`
|
||||||
|
|
||||||
|
**Responsibilities**:
|
||||||
|
- Command-line interface with Commander.js
|
||||||
|
- Dependency injection and initialization
|
||||||
|
- Onboarding checks (Redis, Ollama, model)
|
||||||
|
|
||||||
|
**Components**:
|
||||||
|
|
||||||
|
```
|
||||||
|
cli/
|
||||||
|
├── index.ts # Commander.js setup
|
||||||
|
└── commands/
|
||||||
|
├── start.ts # Start TUI (default command)
|
||||||
|
├── init.ts # Create .ipuaro.json config
|
||||||
|
└── index-cmd.ts # Index-only command
|
||||||
|
```
|
||||||
|
|
||||||
|
**Commands**:
|
||||||
|
|
||||||
|
1. `ipuaro [path]` - Start TUI in directory
|
||||||
|
2. `ipuaro init` - Create config file
|
||||||
|
3. `ipuaro index` - Index without TUI
|
||||||
|
|
||||||
|
### Shared Module
|
||||||
|
|
||||||
|
**Location**: `src/shared/`
|
||||||
|
|
||||||
|
**Responsibilities**:
|
||||||
|
- Cross-cutting concerns
|
||||||
|
- Configuration management
|
||||||
|
- Error handling
|
||||||
|
- Utility functions
|
||||||
|
|
||||||
|
**Components**:
|
||||||
|
|
||||||
|
```
|
||||||
|
shared/
|
||||||
|
├── types/
|
||||||
|
│ └── index.ts # Shared TypeScript types
|
||||||
|
├── constants/
|
||||||
|
│ ├── config.ts # Config schema and loader
|
||||||
|
│ └── messages.ts # User-facing messages
|
||||||
|
├── utils/
|
||||||
|
│ ├── hash.ts # MD5 hashing
|
||||||
|
│ └── tokens.ts # Token estimation
|
||||||
|
└── errors/
|
||||||
|
├── IpuaroError.ts # Custom error class
|
||||||
|
└── ErrorHandler.ts # Error handling service
|
||||||
|
```
|
||||||
|
|
||||||
|
## Data Flow
|
||||||
|
|
||||||
|
### 1. Startup Flow
|
||||||
|
|
||||||
|
```
|
||||||
|
CLI Entry (bin/ipuaro.js)
|
||||||
|
↓
|
||||||
|
Commander.js parses arguments
|
||||||
|
↓
|
||||||
|
Onboarding checks (Redis, Ollama, Model)
|
||||||
|
↓
|
||||||
|
Initialize dependencies:
|
||||||
|
- RedisClient connects
|
||||||
|
- RedisStorage initialized
|
||||||
|
- OllamaClient created
|
||||||
|
- ToolRegistry with 18 tools
|
||||||
|
↓
|
||||||
|
StartSession use case:
|
||||||
|
- Load latest session or create new
|
||||||
|
- Initialize ContextManager
|
||||||
|
↓
|
||||||
|
Launch TUI (App.tsx)
|
||||||
|
- Render StatusBar, Chat, Input
|
||||||
|
- Set up hotkeys
|
||||||
|
```
|
||||||
|
|
||||||
|
### 2. Message Flow
|
||||||
|
|
||||||
|
```
|
||||||
|
User types message in Input.tsx
|
||||||
|
↓
|
||||||
|
useSession.handleMessage()
|
||||||
|
↓
|
||||||
|
HandleMessage use case:
|
||||||
|
1. Add user message to history
|
||||||
|
2. Build context (system prompt + structure + AST)
|
||||||
|
3. Send to OllamaClient.chat()
|
||||||
|
4. Parse tool calls from response
|
||||||
|
5. For each tool call:
|
||||||
|
- If requiresConfirmation: show ConfirmDialog
|
||||||
|
- Execute tool via ToolRegistry
|
||||||
|
- Collect results
|
||||||
|
6. If tool results: goto step 3 (continue loop)
|
||||||
|
7. Add assistant response to history
|
||||||
|
8. Update session in Redis
|
||||||
|
↓
|
||||||
|
Display response in Chat.tsx
|
||||||
|
```
|
||||||
|
|
||||||
|
### 3. Edit Flow
|
||||||
|
|
||||||
|
```
|
||||||
|
LLM calls edit_lines tool
|
||||||
|
↓
|
||||||
|
ToolRegistry.execute()
|
||||||
|
↓
|
||||||
|
EditLinesTool.execute():
|
||||||
|
1. Validate path (PathValidator)
|
||||||
|
2. Check hash conflict
|
||||||
|
3. Build diff
|
||||||
|
↓
|
||||||
|
ConfirmDialog shows diff
|
||||||
|
↓
|
||||||
|
User chooses:
|
||||||
|
- Apply: Continue
|
||||||
|
- Cancel: Return error to LLM
|
||||||
|
- Edit: Manual edit (future)
|
||||||
|
↓
|
||||||
|
If Apply:
|
||||||
|
1. Create UndoEntry
|
||||||
|
2. Push to undo stack (Redis list)
|
||||||
|
3. Write to filesystem
|
||||||
|
4. Update RedisStorage (lines, hash, AST, meta)
|
||||||
|
↓
|
||||||
|
Return success to LLM
|
||||||
|
```
|
||||||
|
|
||||||
|
### 4. Indexing Flow
|
||||||
|
|
||||||
|
```
|
||||||
|
FileScanner.scan()
|
||||||
|
- Recursively walk directory
|
||||||
|
- Filter via .gitignore + ignore patterns
|
||||||
|
- Detect binary files (skip)
|
||||||
|
↓
|
||||||
|
For each file:
|
||||||
|
ASTParser.parse()
|
||||||
|
- tree-sitter parse
|
||||||
|
- Extract imports, exports, functions, classes
|
||||||
|
↓
|
||||||
|
MetaAnalyzer.analyze()
|
||||||
|
- Calculate complexity (LOC, nesting, cyclomatic)
|
||||||
|
- Resolve dependencies (imports → file paths)
|
||||||
|
- Detect hubs (>5 dependents)
|
||||||
|
↓
|
||||||
|
RedisStorage.setFile(), .setAST(), .setMeta()
|
||||||
|
↓
|
||||||
|
IndexBuilder.buildSymbolIndex()
|
||||||
|
- Map symbol names → locations
|
||||||
|
↓
|
||||||
|
IndexBuilder.buildDepsGraph()
|
||||||
|
- Build bidirectional import graph
|
||||||
|
↓
|
||||||
|
Store indexes in Redis
|
||||||
|
↓
|
||||||
|
Watchdog.start()
|
||||||
|
- Watch for file changes
|
||||||
|
- On change: Re-parse and update indexes
|
||||||
|
```
|
||||||
|
|
||||||
|
## Key Design Decisions
|
||||||
|
|
||||||
|
### 1. Why Redis?
|
||||||
|
|
||||||
|
**Pros**:
|
||||||
|
- Fast in-memory access for frequent reads
|
||||||
|
- AOF persistence (append-only file) for durability
|
||||||
|
- Native support for hashes, lists, sets
|
||||||
|
- Simple key-value model fits our needs
|
||||||
|
- Excellent for session data
|
||||||
|
|
||||||
|
**Alternatives considered**:
|
||||||
|
- SQLite: Slower, overkill for our use case
|
||||||
|
- JSON files: No concurrent access, slow for large data
|
||||||
|
- PostgreSQL: Too heavy, we don't need relational features
|
||||||
|
|
||||||
|
### 2. Why tree-sitter?
|
||||||
|
|
||||||
|
**Pros**:
|
||||||
|
- Incremental parsing (fast re-parsing)
|
||||||
|
- Error-tolerant (works with syntax errors)
|
||||||
|
- Multi-language support
|
||||||
|
- Used by GitHub, Neovim, Atom
|
||||||
|
|
||||||
|
**Alternatives considered**:
|
||||||
|
- TypeScript Compiler API: TS-only, not error-tolerant
|
||||||
|
- Babel: JS-focused, heavy dependencies
|
||||||
|
- Regex: Fragile, inaccurate
|
||||||
|
|
||||||
|
### 3. Why Ollama?
|
||||||
|
|
||||||
|
**Pros**:
|
||||||
|
- 100% local, no API keys
|
||||||
|
- Easy installation (brew install ollama)
|
||||||
|
- Good model selection (qwen2.5-coder, deepseek-coder)
|
||||||
|
- Tool calling support
|
||||||
|
|
||||||
|
**Alternatives considered**:
|
||||||
|
- OpenAI: Costs money, sends code to cloud
|
||||||
|
- Anthropic Claude: Same concerns as OpenAI
|
||||||
|
- llama.cpp: Lower level, requires more setup
|
||||||
|
|
||||||
|
Planned: Support for OpenAI/Anthropic in v1.2.0 as optional providers.
|
||||||
|
|
||||||
|
### 4. Why XML for Tool Calls?
|
||||||
|
|
||||||
|
**Pros**:
|
||||||
|
- LLMs trained on XML (very common format)
|
||||||
|
- Self-describing (parameter names in tags)
|
||||||
|
- Easy to parse with regex
|
||||||
|
- More reliable than JSON for smaller models
|
||||||
|
|
||||||
|
**Alternatives considered**:
|
||||||
|
- JSON: Smaller models struggle with exact JSON syntax
|
||||||
|
- Function calling API: Not all models support it
|
||||||
|
|
||||||
|
### 5. Why Clean Architecture?
|
||||||
|
|
||||||
|
**Pros**:
|
||||||
|
- Testability (domain has no external dependencies)
|
||||||
|
- Flexibility (easy to swap Redis for SQLite)
|
||||||
|
- Maintainability (clear separation of concerns)
|
||||||
|
- Scalability (layers can evolve independently)
|
||||||
|
|
||||||
|
**Cost**: More files and indirection, but worth it for long-term maintenance.
|
||||||
|
|
||||||
|
### 6. Why Lazy Loading Instead of RAG?
|
||||||
|
|
||||||
|
**RAG (Retrieval Augmented Generation)**:
|
||||||
|
- Pre-computes embeddings
|
||||||
|
- Searches embeddings for relevant chunks
|
||||||
|
- Adds chunks to context
|
||||||
|
|
||||||
|
**Lazy Loading (our approach)**:
|
||||||
|
- Agent requests specific code via tools
|
||||||
|
- More precise control over what's loaded
|
||||||
|
- Simpler implementation (no embeddings)
|
||||||
|
- Works with any LLM (no embedding model needed)
|
||||||
|
|
||||||
|
**Trade-off**: RAG might be better for semantic search ("find error handling code"), but tool-based approach gives agent explicit control.
|
||||||
|
|
||||||
|
## Tech Stack
|
||||||
|
|
||||||
|
### Core Dependencies
|
||||||
|
|
||||||
|
| Package | Purpose | Why? |
|
||||||
|
|---------|---------|------|
|
||||||
|
| `ioredis` | Redis client | Most popular, excellent TypeScript support |
|
||||||
|
| `ollama` | LLM client | Official SDK, simple API |
|
||||||
|
| `tree-sitter` | AST parsing | Fast, error-tolerant, multi-language |
|
||||||
|
| `tree-sitter-typescript` | TS/TSX parser | Official TypeScript grammar |
|
||||||
|
| `tree-sitter-javascript` | JS/JSX parser | Official JavaScript grammar |
|
||||||
|
| `ink` | Terminal UI | React for terminal, declarative |
|
||||||
|
| `ink-text-input` | Input component | Maintained ink component |
|
||||||
|
| `react` | UI framework | Required by Ink |
|
||||||
|
| `simple-git` | Git operations | Simple API, well-tested |
|
||||||
|
| `chokidar` | File watching | Cross-platform, reliable |
|
||||||
|
| `commander` | CLI framework | Industry standard |
|
||||||
|
| `zod` | Validation | Type-safe validation |
|
||||||
|
| `globby` | File globbing | ESM-native, .gitignore support |
|
||||||
|
|
||||||
|
### Development Dependencies
|
||||||
|
|
||||||
|
| Package | Purpose |
|
||||||
|
|---------|---------|
|
||||||
|
| `vitest` | Testing framework |
|
||||||
|
| `@vitest/coverage-v8` | Coverage reporting |
|
||||||
|
| `@vitest/ui` | Interactive test UI |
|
||||||
|
| `tsup` | TypeScript bundler |
|
||||||
|
| `typescript` | Type checking |
|
||||||
|
|
||||||
|
## Performance Considerations
|
||||||
|
|
||||||
|
### 1. Indexing Performance
|
||||||
|
|
||||||
|
**Problem**: Large projects (10k+ files) take time to index.
|
||||||
|
|
||||||
|
**Optimizations**:
|
||||||
|
- Incremental parsing with tree-sitter (only changed files)
|
||||||
|
- Parallel parsing (planned for v1.1.0)
|
||||||
|
- Ignore patterns (.gitignore, node_modules, dist)
|
||||||
|
- Skip binary files early
|
||||||
|
|
||||||
|
**Current**: ~1000 files/second on M1 Mac
|
||||||
|
|
||||||
|
### 2. Memory Usage
|
||||||
|
|
||||||
|
**Problem**: Entire AST in memory could be 100s of MB.
|
||||||
|
|
||||||
|
**Optimizations**:
|
||||||
|
- Store ASTs in Redis (out of Node.js heap)
|
||||||
|
- Load ASTs on-demand from Redis
|
||||||
|
- Lazy-load file content (not stored in session)
|
||||||
|
|
||||||
|
**Current**: ~200MB for 5000 files indexed
|
||||||
|
|
||||||
|
### 3. Context Window Management
|
||||||
|
|
||||||
|
**Problem**: 128k token context window fills up.
|
||||||
|
|
||||||
|
**Optimizations**:
|
||||||
|
- Auto-compression at 80% usage
|
||||||
|
- LLM summarizes old messages
|
||||||
|
- Remove tool results older than 5 messages
|
||||||
|
- Only load structure + metadata initially (~10k tokens)
|
||||||
|
|
||||||
|
### 4. Redis Performance
|
||||||
|
|
||||||
|
**Problem**: Redis is single-threaded.
|
||||||
|
|
||||||
|
**Optimizations**:
|
||||||
|
- Pipeline commands where possible
|
||||||
|
- Use hashes for related data (fewer keys)
|
||||||
|
- AOF every second (not every command)
|
||||||
|
- Keep undo stack limited (10 entries)
|
||||||
|
|
||||||
|
**Current**: <1ms latency for most operations
|
||||||
|
|
||||||
|
### 5. Tool Execution
|
||||||
|
|
||||||
|
**Problem**: Tool execution could block LLM.
|
||||||
|
|
||||||
|
**Current**: Synchronous execution (simpler)
|
||||||
|
|
||||||
|
**Future**: Async tool execution with progress callbacks (v1.1.0)
|
||||||
|
|
||||||
|
## Future Improvements
|
||||||
|
|
||||||
|
### v1.1.0 - Performance
|
||||||
|
- Parallel AST parsing
|
||||||
|
- Incremental indexing (only changed files)
|
||||||
|
- Response caching
|
||||||
|
- Stream LLM responses
|
||||||
|
|
||||||
|
### v1.2.0 - Features
|
||||||
|
- Multiple file edits in one operation
|
||||||
|
- Batch operations
|
||||||
|
- Custom prompt templates
|
||||||
|
- OpenAI/Anthropic provider support
|
||||||
|
|
||||||
|
### v1.3.0 - Extensibility
|
||||||
|
- Plugin system for custom tools
|
||||||
|
- LSP integration
|
||||||
|
- Multi-language support (Python, Go, Rust)
|
||||||
|
- Custom indexing rules
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
**Last Updated**: 2025-12-01
|
||||||
|
**Version**: 0.16.0
|
||||||
855
packages/ipuaro/CHANGELOG.md
Normal file
855
packages/ipuaro/CHANGELOG.md
Normal file
@@ -0,0 +1,855 @@
|
|||||||
|
# Changelog
|
||||||
|
|
||||||
|
All notable changes to this project will be documented in this file.
|
||||||
|
|
||||||
|
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
|
||||||
|
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
||||||
|
|
||||||
|
## [0.17.0] - 2025-12-01 - Documentation Complete
|
||||||
|
|
||||||
|
### Added
|
||||||
|
|
||||||
|
- **Complete README.md Documentation**
|
||||||
|
- Updated status to Release Candidate (v0.16.0 → v1.0.0)
|
||||||
|
- Comprehensive tools reference with 18 tools and usage examples
|
||||||
|
- Slash commands documentation (8 commands)
|
||||||
|
- Hotkeys reference (5 shortcuts)
|
||||||
|
- Programmatic API examples with real code
|
||||||
|
- Enhanced "How It Works" section with 5 detailed subsections
|
||||||
|
- Troubleshooting guide with 6 common issues and solutions
|
||||||
|
- FAQ section with 8 frequently asked questions
|
||||||
|
- Updated development status showing all completed milestones
|
||||||
|
|
||||||
|
- **ARCHITECTURE.md (New File)**
|
||||||
|
- Complete architecture overview with Clean Architecture principles
|
||||||
|
- Detailed layer breakdown (Domain, Application, Infrastructure, TUI, CLI)
|
||||||
|
- Data flow diagrams for startup, messages, edits, and indexing
|
||||||
|
- Key design decisions with rationale (Redis, tree-sitter, Ollama, XML, etc.)
|
||||||
|
- Complete tech stack documentation
|
||||||
|
- Performance considerations and optimizations
|
||||||
|
- Future roadmap (v1.1.0 - v1.3.0)
|
||||||
|
|
||||||
|
- **TOOLS.md (New File)**
|
||||||
|
- Complete reference for all 18 tools organized by category
|
||||||
|
- TypeScript signatures for each tool
|
||||||
|
- Parameter descriptions and return types
|
||||||
|
- Multiple usage examples per tool
|
||||||
|
- Example outputs and use cases
|
||||||
|
- Error cases and handling
|
||||||
|
- Tool confirmation flow explanation
|
||||||
|
- Best practices and common workflow patterns
|
||||||
|
- Refactoring, bug fix, and feature development flows
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
|
||||||
|
- **README.md Improvements**
|
||||||
|
- Features table now shows all tools implemented ✅
|
||||||
|
- Terminal UI section enhanced with better examples
|
||||||
|
- Security section expanded with three-layer security model
|
||||||
|
- Development status updated to show 1420 tests with 98% coverage
|
||||||
|
|
||||||
|
### Documentation Statistics
|
||||||
|
|
||||||
|
- Total documentation: ~2500 lines across 3 files
|
||||||
|
- Tools documented: 18/18 (100%)
|
||||||
|
- Slash commands: 8/8 (100%)
|
||||||
|
- Code examples: 50+ throughout documentation
|
||||||
|
- Troubleshooting entries: 6 issues covered
|
||||||
|
- FAQ answers: 8 questions answered
|
||||||
|
|
||||||
|
### Technical Details
|
||||||
|
|
||||||
|
- No code changes (documentation-only release)
|
||||||
|
- All 1420 tests passing
|
||||||
|
- Coverage maintained at 97.59%
|
||||||
|
- Zero ESLint errors/warnings
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## [0.16.0] - 2025-12-01 - Error Handling
|
||||||
|
|
||||||
|
### Added
|
||||||
|
|
||||||
|
- **Error Handling Matrix (0.16.2)**
|
||||||
|
- `ERROR_MATRIX`: Defines behavior for each error type
|
||||||
|
- Per-type options: retry, skip, abort, confirm, regenerate
|
||||||
|
- Per-type defaults and recoverability settings
|
||||||
|
- Comprehensive error type support: redis, parse, llm, file, command, conflict, validation, timeout, unknown
|
||||||
|
|
||||||
|
- **IpuaroError Enhancements (0.16.1)**
|
||||||
|
- `ErrorOption` type: New type for available recovery options
|
||||||
|
- `ErrorMeta` interface: Error metadata with type, recoverable flag, options, and default
|
||||||
|
- `options` property: Available recovery options from matrix
|
||||||
|
- `defaultOption` property: Default option for the error type
|
||||||
|
- `context` property: Optional context data for debugging
|
||||||
|
- `getMeta()`: Returns full error metadata
|
||||||
|
- `hasOption()`: Checks if an option is available
|
||||||
|
- `toDisplayString()`: Formatted error message with suggestion
|
||||||
|
- New factory methods: `llmTimeout()`, `fileNotFound()`, `commandBlacklisted()`, `unknown()`
|
||||||
|
|
||||||
|
- **ErrorHandler Service**
|
||||||
|
- `handle()`: Async error handling with user callback
|
||||||
|
- `handleSync()`: Sync error handling with defaults
|
||||||
|
- `wrap()`: Wraps async functions with error handling
|
||||||
|
- `withRetry()`: Wraps functions with automatic retry logic
|
||||||
|
- `resetRetries()`: Resets retry counters
|
||||||
|
- `getRetryCount()`: Gets current retry count
|
||||||
|
- `isMaxRetriesExceeded()`: Checks if max retries reached
|
||||||
|
- Configurable options: maxRetries, autoSkipParseErrors, autoRetryLLMErrors
|
||||||
|
|
||||||
|
- **Utility Functions**
|
||||||
|
- `getErrorOptions()`: Get available options for error type
|
||||||
|
- `getDefaultErrorOption()`: Get default option for error type
|
||||||
|
- `isRecoverableError()`: Check if error type is recoverable
|
||||||
|
- `toIpuaroError()`: Convert any error to IpuaroError
|
||||||
|
- `createErrorHandler()`: Factory function for ErrorHandler
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
|
||||||
|
- **IpuaroError Constructor**
|
||||||
|
- New signature: `(type, message, options?)` with options object
|
||||||
|
- Options include: recoverable, suggestion, context
|
||||||
|
- Matrix-based defaults for all properties
|
||||||
|
|
||||||
|
- **ErrorChoice → ErrorOption**
|
||||||
|
- `ErrorChoice` type deprecated in shared/types
|
||||||
|
- Use `ErrorOption` from shared/errors instead
|
||||||
|
- Updated HandleMessage and useSession to use ErrorOption
|
||||||
|
|
||||||
|
### Technical Details
|
||||||
|
|
||||||
|
- Total tests: 1420 (59 new tests)
|
||||||
|
- Coverage: 97.59% maintained
|
||||||
|
- New test files: ErrorHandler.test.ts
|
||||||
|
- Updated test file: IpuaroError.test.ts
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## [0.15.0] - 2025-12-01 - CLI Entry Point
|
||||||
|
|
||||||
|
### Added
|
||||||
|
|
||||||
|
- **Onboarding Module (0.15.3)**
|
||||||
|
- `checkRedis()`: Validates Redis connection with helpful error messages
|
||||||
|
- `checkOllama()`: Validates Ollama availability with install instructions
|
||||||
|
- `checkModel()`: Checks if LLM model is available, offers to pull if missing
|
||||||
|
- `checkProjectSize()`: Warns if project has >10K files
|
||||||
|
- `runOnboarding()`: Runs all pre-flight checks before starting
|
||||||
|
|
||||||
|
- **Start Command (0.15.1)**
|
||||||
|
- Full TUI startup with dependency injection
|
||||||
|
- Integrates onboarding checks before launch
|
||||||
|
- Interactive model pull prompt if model missing
|
||||||
|
- Redis, storage, LLM, and tools initialization
|
||||||
|
- Clean shutdown with disconnect on exit
|
||||||
|
|
||||||
|
- **Init Command (0.15.1)**
|
||||||
|
- Creates `.ipuaro.json` configuration file
|
||||||
|
- Default template with Redis, LLM, and edit settings
|
||||||
|
- `--force` option to overwrite existing config
|
||||||
|
- Helpful output showing available options
|
||||||
|
|
||||||
|
- **Index Command (0.15.1)**
|
||||||
|
- Standalone project indexing without TUI
|
||||||
|
- File scanning with progress output
|
||||||
|
- AST parsing with error handling
|
||||||
|
- Metadata analysis and storage
|
||||||
|
- Symbol index and dependency graph building
|
||||||
|
- Duration and statistics reporting
|
||||||
|
|
||||||
|
- **CLI Options (0.15.2)**
|
||||||
|
- `--auto-apply`: Enable auto-apply mode for edits
|
||||||
|
- `--model <name>`: Override LLM model
|
||||||
|
- `--help`: Show help
|
||||||
|
- `--version`: Show version
|
||||||
|
|
||||||
|
- **Tools Setup Helper**
|
||||||
|
- `registerAllTools()`: Registers all 18 tools with the registry
|
||||||
|
- Clean separation from CLI logic
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
|
||||||
|
- **CLI Architecture**
|
||||||
|
- Refactored from placeholder to full implementation
|
||||||
|
- Commands in separate modules under `src/cli/commands/`
|
||||||
|
- Dynamic version from package.json
|
||||||
|
- `start` command is now default (runs with `ipuaro` or `ipuaro start`)
|
||||||
|
|
||||||
|
### Technical Details
|
||||||
|
|
||||||
|
- Total tests: 1372 (29 new CLI tests)
|
||||||
|
- Coverage: ~98% maintained (CLI excluded from coverage thresholds)
|
||||||
|
- New test files: onboarding.test.ts, init.test.ts, tools-setup.test.ts
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## [0.14.0] - 2025-12-01 - Commands
|
||||||
|
|
||||||
|
### Added
|
||||||
|
|
||||||
|
- **useCommands Hook**
|
||||||
|
- New hook for handling slash commands in TUI
|
||||||
|
- `parseCommand()`: Parses command input into name and arguments
|
||||||
|
- `isCommand()`: Checks if input is a slash command
|
||||||
|
- `executeCommand()`: Executes command and returns result
|
||||||
|
- `getCommands()`: Returns all available command definitions
|
||||||
|
|
||||||
|
- **8 Slash Commands**
|
||||||
|
- `/help` - Shows all commands and hotkeys
|
||||||
|
- `/clear` - Clears chat history (keeps session)
|
||||||
|
- `/undo` - Reverts last file change from undo stack
|
||||||
|
- `/sessions [list|load|delete] [id]` - Manage sessions
|
||||||
|
- `/status` - Shows system status (LLM, context, stats)
|
||||||
|
- `/reindex` - Forces full project reindexation
|
||||||
|
- `/eval` - LLM self-check for hallucinations
|
||||||
|
- `/auto-apply [on|off]` - Toggle auto-apply mode
|
||||||
|
|
||||||
|
- **Command Result Display**
|
||||||
|
- Visual feedback box for command results
|
||||||
|
- Green border for success, red for errors
|
||||||
|
- Auto-clear after 5 seconds
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
|
||||||
|
- **App.tsx Integration**
|
||||||
|
- Added `useCommands` hook integration
|
||||||
|
- Command handling in `handleSubmit`
|
||||||
|
- New state for `autoApply` and `commandResult`
|
||||||
|
- Reindex placeholder action
|
||||||
|
|
||||||
|
### Technical Details
|
||||||
|
|
||||||
|
- Total tests: 1343 (38 new useCommands tests)
|
||||||
|
- Test coverage: ~98% maintained
|
||||||
|
- Modular command factory functions for maintainability
|
||||||
|
- Commands extracted to separate functions to stay under line limits
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## [0.13.0] - 2025-12-01 - Security
|
||||||
|
|
||||||
|
### Added
|
||||||
|
|
||||||
|
- **PathValidator Utility (0.13.3)**
|
||||||
|
- Centralized path validation for all file operations
|
||||||
|
- Prevents path traversal attacks (`..`, `~`)
|
||||||
|
- Validates paths are within project root
|
||||||
|
- Sync (`validateSync`) and async (`validate`) validation methods
|
||||||
|
- Quick check method (`isWithin`) for simple validations
|
||||||
|
- Resolution methods (`resolve`, `relativize`, `resolveOrThrow`)
|
||||||
|
- Detailed validation results with status and reason
|
||||||
|
- Options for file existence, directory/file type checks
|
||||||
|
|
||||||
|
- **Security Module**
|
||||||
|
- New `infrastructure/security` module
|
||||||
|
- Exports: `PathValidator`, `createPathValidator`, `validatePath`
|
||||||
|
- Type exports: `PathValidationResult`, `PathValidationStatus`, `PathValidatorOptions`
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
|
||||||
|
- **Refactored All File Tools to Use PathValidator**
|
||||||
|
- GetLinesTool: Uses PathValidator for path validation
|
||||||
|
- GetFunctionTool: Uses PathValidator for path validation
|
||||||
|
- GetClassTool: Uses PathValidator for path validation
|
||||||
|
- GetStructureTool: Uses PathValidator for path validation
|
||||||
|
- EditLinesTool: Uses PathValidator for path validation
|
||||||
|
- CreateFileTool: Uses PathValidator for path validation
|
||||||
|
- DeleteFileTool: Uses PathValidator for path validation
|
||||||
|
|
||||||
|
- **Improved Error Messages**
|
||||||
|
- More specific error messages from PathValidator
|
||||||
|
- "Path contains traversal patterns" for `..` attempts
|
||||||
|
- "Path is outside project root" for absolute paths outside project
|
||||||
|
- "Path is empty" for empty/whitespace paths
|
||||||
|
|
||||||
|
### Technical Details
|
||||||
|
|
||||||
|
- Total tests: 1305 (51 new PathValidator tests)
|
||||||
|
- Test coverage: ~98% maintained
|
||||||
|
- No breaking changes to existing tool APIs
|
||||||
|
- Security validation is now consistent across all 7 file tools
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## [0.12.0] - 2025-12-01 - TUI Advanced
|
||||||
|
|
||||||
|
### Added
|
||||||
|
|
||||||
|
- **DiffView Component (0.12.1)**
|
||||||
|
- Inline diff display with green (added) and red (removed) highlighting
|
||||||
|
- Header with file path and line range: `┌─── path (lines X-Y) ───┐`
|
||||||
|
- Line numbers with proper padding
|
||||||
|
- Stats footer showing additions and deletions count
|
||||||
|
|
||||||
|
- **ConfirmDialog Component (0.12.2)**
|
||||||
|
- Confirmation dialog with [Y] Apply / [N] Cancel / [E] Edit options
|
||||||
|
- Optional diff preview integration
|
||||||
|
- Keyboard input handling (Y/N/E keys, Escape)
|
||||||
|
- Visual selection feedback
|
||||||
|
|
||||||
|
- **ErrorDialog Component (0.12.3)**
|
||||||
|
- Error dialog with [R] Retry / [S] Skip / [A] Abort options
|
||||||
|
- Recoverable vs non-recoverable error handling
|
||||||
|
- Disabled buttons for non-recoverable errors
|
||||||
|
- Keyboard input with Escape support
|
||||||
|
|
||||||
|
- **Progress Component (0.12.4)**
|
||||||
|
- Progress bar display: `[=====> ] 45% (120/267 files)`
|
||||||
|
- Color-coded progress (cyan < 50%, yellow < 100%, green = 100%)
|
||||||
|
- Configurable width
|
||||||
|
- Label support for context
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
|
||||||
|
- Total tests: 1254 (unchanged - TUI components excluded from coverage)
|
||||||
|
- TUI layer now has 8 components + 2 hooks
|
||||||
|
- All v0.12.0 roadmap items complete
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## [0.11.0] - 2025-12-01 - TUI Basic
|
||||||
|
|
||||||
|
### Added
|
||||||
|
|
||||||
|
- **TUI Types (0.11.0)**
|
||||||
|
- `TuiStatus`: Status type for TUI display (ready, thinking, tool_call, awaiting_confirmation, error)
|
||||||
|
- `BranchInfo`: Git branch information (name, isDetached)
|
||||||
|
- `AppProps`: Main app component props
|
||||||
|
- `StatusBarData`: Status bar display data
|
||||||
|
|
||||||
|
- **App Shell (0.11.1)**
|
||||||
|
- Main TUI App component with React/Ink
|
||||||
|
- Session initialization and state management
|
||||||
|
- Loading and error screens
|
||||||
|
- Hotkey integration (Ctrl+C, Ctrl+D, Ctrl+Z)
|
||||||
|
- Session time tracking
|
||||||
|
|
||||||
|
- **StatusBar Component (0.11.2)**
|
||||||
|
- Displays: `[ipuaro] [ctx: 12%] [project] [branch] [time] status`
|
||||||
|
- Context usage with color warning at >80%
|
||||||
|
- Git branch with detached HEAD support
|
||||||
|
- Status indicator with colors (ready=green, thinking=yellow, error=red)
|
||||||
|
|
||||||
|
- **Chat Component (0.11.3)**
|
||||||
|
- Message history display with role-based styling
|
||||||
|
- User messages (green), Assistant messages (cyan), System messages (gray)
|
||||||
|
- Tool call display with parameters
|
||||||
|
- Response stats: time, tokens, tool calls
|
||||||
|
- Thinking indicator during LLM processing
|
||||||
|
|
||||||
|
- **Input Component (0.11.4)**
|
||||||
|
- Prompt with `> ` prefix
|
||||||
|
- History navigation with ↑/↓ arrow keys
|
||||||
|
- Saved input restoration when navigating past history
|
||||||
|
- Disabled state during processing
|
||||||
|
- Custom placeholder support
|
||||||
|
|
||||||
|
- **useSession Hook (0.11.5)**
|
||||||
|
- Session state management with React hooks
|
||||||
|
- Message handling integration
|
||||||
|
- Status tracking (ready, thinking, tool_call, error)
|
||||||
|
- Undo support
|
||||||
|
- Clear history functionality
|
||||||
|
- Abort/interrupt support
|
||||||
|
|
||||||
|
- **useHotkeys Hook (0.11.6)**
|
||||||
|
- Ctrl+C: Interrupt (1st), Exit (2nd within 1s)
|
||||||
|
- Ctrl+D: Exit with session save
|
||||||
|
- Ctrl+Z: Undo last change
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
|
||||||
|
- Total tests: 1254 (was 1174)
|
||||||
|
- Coverage: 97.75% lines, 92.22% branches
|
||||||
|
- TUI layer now has 4 components + 2 hooks
|
||||||
|
- TUI excluded from coverage thresholds (requires React testing setup)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## [0.10.0] - 2025-12-01 - Session Management
|
||||||
|
|
||||||
|
### Added
|
||||||
|
|
||||||
|
- **ISessionStorage (0.10.1)**
|
||||||
|
- Session storage service interface
|
||||||
|
- Methods: saveSession, loadSession, deleteSession, listSessions
|
||||||
|
- Undo stack management: pushUndoEntry, popUndoEntry, getUndoStack
|
||||||
|
- Session lifecycle: getLatestSession, sessionExists, touchSession
|
||||||
|
|
||||||
|
- **RedisSessionStorage (0.10.2)**
|
||||||
|
- Redis implementation of ISessionStorage
|
||||||
|
- Session data in Redis hashes (project, history, context, stats)
|
||||||
|
- Undo stack in Redis lists (max 10 entries)
|
||||||
|
- Sessions list for project-wide queries
|
||||||
|
- 22 unit tests
|
||||||
|
|
||||||
|
- **ContextManager (0.10.3)**
|
||||||
|
- Manages context window token budget
|
||||||
|
- File context tracking with addToContext/removeFromContext
|
||||||
|
- Usage monitoring: getUsage, getAvailableTokens, getRemainingTokens
|
||||||
|
- Auto-compression at 80% threshold via LLM summarization
|
||||||
|
- Context state export for session persistence
|
||||||
|
- 23 unit tests
|
||||||
|
|
||||||
|
- **StartSession (0.10.4)**
|
||||||
|
- Use case for session initialization
|
||||||
|
- Creates new session or loads latest for project
|
||||||
|
- Optional sessionId for specific session loading
|
||||||
|
- forceNew option to always create fresh session
|
||||||
|
- 10 unit tests
|
||||||
|
|
||||||
|
- **HandleMessage (0.10.5)**
|
||||||
|
- Main orchestrator use case for message handling
|
||||||
|
- LLM interaction with tool calling support
|
||||||
|
- Edit confirmation flow with diff preview
|
||||||
|
- Error handling with retry/skip/abort choices
|
||||||
|
- Status tracking: ready, thinking, tool_call, awaiting_confirmation, error
|
||||||
|
- Event callbacks: onMessage, onToolCall, onToolResult, onConfirmation, onError
|
||||||
|
- 21 unit tests
|
||||||
|
|
||||||
|
- **UndoChange (0.10.6)**
|
||||||
|
- Use case for reverting file changes
|
||||||
|
- Validates file hasn't changed since edit
|
||||||
|
- Restores original content from undo entry
|
||||||
|
- Updates storage after successful undo
|
||||||
|
- 12 unit tests
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
|
||||||
|
- Total tests: 1174 (was 1086)
|
||||||
|
- Coverage: 97.73% lines, 92.21% branches
|
||||||
|
- Application layer now has 4 use cases implemented
|
||||||
|
- All planned session management features complete
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## [0.9.0] - 2025-12-01 - Git & Run Tools
|
||||||
|
|
||||||
|
### Added
|
||||||
|
|
||||||
|
- **GitStatusTool (0.9.1)**
|
||||||
|
- `git_status()`: Get current git repository status
|
||||||
|
- Returns branch name, tracking branch, ahead/behind counts
|
||||||
|
- Lists staged, modified, untracked, and conflicted files
|
||||||
|
- Detects detached HEAD state
|
||||||
|
- 29 unit tests
|
||||||
|
|
||||||
|
- **GitDiffTool (0.9.2)**
|
||||||
|
- `git_diff(path?, staged?)`: Get uncommitted changes
|
||||||
|
- Returns file-by-file diff summary with insertions/deletions
|
||||||
|
- Full diff text output
|
||||||
|
- Optional path filter for specific files/directories
|
||||||
|
- Staged-only mode (`--cached`)
|
||||||
|
- Handles binary files
|
||||||
|
- 25 unit tests
|
||||||
|
|
||||||
|
- **GitCommitTool (0.9.3)**
|
||||||
|
- `git_commit(message, files?)`: Create a git commit
|
||||||
|
- Requires user confirmation before commit
|
||||||
|
- Optional file staging before commit
|
||||||
|
- Returns commit hash, summary, author info
|
||||||
|
- Validates staged files exist
|
||||||
|
- 26 unit tests
|
||||||
|
|
||||||
|
- **CommandSecurity**
|
||||||
|
- Security module for shell command validation
|
||||||
|
- Blacklist: dangerous commands always blocked (rm -rf, sudo, git push --force, etc.)
|
||||||
|
- Whitelist: safe commands allowed without confirmation (npm, node, git status, etc.)
|
||||||
|
- Classification: `allowed`, `blocked`, `requires_confirmation`
|
||||||
|
- Git subcommand awareness (safe read operations vs write operations)
|
||||||
|
- Extensible via `addToBlacklist()` and `addToWhitelist()`
|
||||||
|
- 65 unit tests
|
||||||
|
|
||||||
|
- **RunCommandTool (0.9.4)**
|
||||||
|
- `run_command(command, timeout?)`: Execute shell commands
|
||||||
|
- Security-first design with blacklist/whitelist checks
|
||||||
|
- Blocked commands rejected immediately
|
||||||
|
- Unknown commands require user confirmation
|
||||||
|
- Configurable timeout (default 30s, max 10min)
|
||||||
|
- Output truncation for large outputs
|
||||||
|
- Returns stdout, stderr, exit code, duration
|
||||||
|
- 40 unit tests
|
||||||
|
|
||||||
|
- **RunTestsTool (0.9.5)**
|
||||||
|
- `run_tests(path?, filter?, watch?)`: Run project tests
|
||||||
|
- Auto-detects test runner: vitest, jest, mocha, npm test
|
||||||
|
- Detects by config files and package.json dependencies
|
||||||
|
- Path filtering for specific test files/directories
|
||||||
|
- Name pattern filtering (`-t` / `--grep`)
|
||||||
|
- Watch mode support
|
||||||
|
- Returns pass/fail status, exit code, output
|
||||||
|
- 48 unit tests
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
|
||||||
|
- Total tests: 1086 (was 853)
|
||||||
|
- Coverage: 98.08% lines, 92.21% branches
|
||||||
|
- Git tools category now fully implemented (3/3 tools)
|
||||||
|
- Run tools category now fully implemented (2/2 tools)
|
||||||
|
- All 18 planned tools now implemented
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## [0.8.0] - 2025-12-01 - Analysis Tools
|
||||||
|
|
||||||
|
### Added
|
||||||
|
|
||||||
|
- **GetDependenciesTool (0.8.1)**
|
||||||
|
- `get_dependencies(path)`: Get files that a specific file imports
|
||||||
|
- Returns internal dependencies resolved to file paths
|
||||||
|
- Includes metadata: exists, isHub, isEntryPoint, fileType
|
||||||
|
- Sorted by path for consistent output
|
||||||
|
- 23 unit tests
|
||||||
|
|
||||||
|
- **GetDependentsTool (0.8.2)**
|
||||||
|
- `get_dependents(path)`: Get files that import a specific file
|
||||||
|
- Shows hub status for the analyzed file
|
||||||
|
- Includes metadata: isHub, isEntryPoint, fileType, complexityScore
|
||||||
|
- Sorted by path for consistent output
|
||||||
|
- 24 unit tests
|
||||||
|
|
||||||
|
- **GetComplexityTool (0.8.3)**
|
||||||
|
- `get_complexity(path?, limit?)`: Get complexity metrics for files
|
||||||
|
- Returns LOC, nesting depth, cyclomatic complexity, and overall score
|
||||||
|
- Summary statistics: high/medium/low complexity counts
|
||||||
|
- Average score calculation
|
||||||
|
- Sorted by complexity score descending
|
||||||
|
- Default limit of 20 files
|
||||||
|
- 31 unit tests
|
||||||
|
|
||||||
|
- **GetTodosTool (0.8.4)**
|
||||||
|
- `get_todos(path?, type?)`: Find TODO/FIXME/HACK/XXX/BUG/NOTE comments
|
||||||
|
- Supports multiple comment styles: `//`, `/* */`, `#`
|
||||||
|
- Filter by type (case-insensitive)
|
||||||
|
- Counts by type
|
||||||
|
- Includes line context
|
||||||
|
- 42 unit tests
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
|
||||||
|
- Total tests: 853 (was 733)
|
||||||
|
- Coverage: 97.91% lines, 92.32% branches
|
||||||
|
- Analysis tools category now fully implemented (4/4 tools)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## [0.7.0] - 2025-12-01 - Search Tools
|
||||||
|
|
||||||
|
### Added
|
||||||
|
|
||||||
|
- **FindReferencesTool (0.7.1)**
|
||||||
|
- `find_references(symbol, path?)`: Find all usages of a symbol across the codebase
|
||||||
|
- Word boundary matching with support for special characters (e.g., `$value`)
|
||||||
|
- Context lines around each reference (1 line before/after)
|
||||||
|
- Marks definition vs usage references
|
||||||
|
- Optional path filter for scoped searches
|
||||||
|
- Returns: path, line, column, context, isDefinition
|
||||||
|
- 37 unit tests
|
||||||
|
|
||||||
|
- **FindDefinitionTool (0.7.2)**
|
||||||
|
- `find_definition(symbol)`: Find where a symbol is defined
|
||||||
|
- Uses SymbolIndex for fast lookups
|
||||||
|
- Returns multiple definitions (for overloads/re-exports)
|
||||||
|
- Suggests similar symbols when not found (Levenshtein distance)
|
||||||
|
- Context lines around definition (2 lines before/after)
|
||||||
|
- Returns: path, line, type, context
|
||||||
|
- 32 unit tests
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
|
||||||
|
- Total tests: 733 (was 664)
|
||||||
|
- Coverage: 97.71% lines, 91.84% branches
|
||||||
|
- Search tools category now fully implemented (2/2 tools)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## [0.6.0] - 2025-12-01 - Edit Tools
|
||||||
|
|
||||||
|
### Added
|
||||||
|
|
||||||
|
- **EditLinesTool (0.6.1)**
|
||||||
|
- `edit_lines(path, start, end, content)`: Replace lines in a file
|
||||||
|
- Hash conflict detection (prevents editing externally modified files)
|
||||||
|
- Confirmation required with diff preview
|
||||||
|
- Automatic storage update after edit
|
||||||
|
- 35 unit tests
|
||||||
|
|
||||||
|
- **CreateFileTool (0.6.2)**
|
||||||
|
- `create_file(path, content)`: Create new file with content
|
||||||
|
- Automatic directory creation if needed
|
||||||
|
- Path validation (must be within project root)
|
||||||
|
- Prevents overwriting existing files
|
||||||
|
- Confirmation required before creation
|
||||||
|
- 26 unit tests
|
||||||
|
|
||||||
|
- **DeleteFileTool (0.6.3)**
|
||||||
|
- `delete_file(path)`: Delete file from filesystem and storage
|
||||||
|
- Removes file data, AST, and meta from Redis
|
||||||
|
- Confirmation required with file content preview
|
||||||
|
- 20 unit tests
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
|
||||||
|
- Total tests: 664 (was 540)
|
||||||
|
- Coverage: 97.71% lines, 91.89% branches
|
||||||
|
- Coverage thresholds: 95% lines/functions/statements, 90% branches
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## [0.5.0] - 2025-12-01 - Read Tools
|
||||||
|
|
||||||
|
### Added
|
||||||
|
|
||||||
|
- **ToolRegistry (0.5.1)**
|
||||||
|
- `IToolRegistry` implementation for managing tool lifecycle
|
||||||
|
- Methods: `register()`, `unregister()`, `get()`, `getAll()`, `getByCategory()`, `has()`
|
||||||
|
- `execute()`: Tool execution with validation and confirmation flow
|
||||||
|
- `getToolDefinitions()`: Convert tools to LLM-compatible JSON Schema format
|
||||||
|
- Helper methods: `getConfirmationTools()`, `getSafeTools()`, `getNames()`, `clear()`
|
||||||
|
- 34 unit tests
|
||||||
|
|
||||||
|
- **GetLinesTool (0.5.2)**
|
||||||
|
- `get_lines(path, start?, end?)`: Read file lines with line numbers
|
||||||
|
- Reads from Redis storage or filesystem fallback
|
||||||
|
- Line number formatting with proper padding
|
||||||
|
- Path validation (must be within project root)
|
||||||
|
- 25 unit tests
|
||||||
|
|
||||||
|
- **GetFunctionTool (0.5.3)**
|
||||||
|
- `get_function(path, name)`: Get function source by name
|
||||||
|
- Uses AST to find exact line range
|
||||||
|
- Returns metadata: isAsync, isExported, params, returnType
|
||||||
|
- Lists available functions if target not found
|
||||||
|
- 20 unit tests
|
||||||
|
|
||||||
|
- **GetClassTool (0.5.4)**
|
||||||
|
- `get_class(path, name)`: Get class source by name
|
||||||
|
- Uses AST to find exact line range
|
||||||
|
- Returns metadata: isAbstract, extends, implements, methods, properties
|
||||||
|
- Lists available classes if target not found
|
||||||
|
- 19 unit tests
|
||||||
|
|
||||||
|
- **GetStructureTool (0.5.5)**
|
||||||
|
- `get_structure(path?, depth?)`: Get directory tree
|
||||||
|
- ASCII tree output with 📁/📄 icons
|
||||||
|
- Filters: node_modules, .git, dist, coverage, etc.
|
||||||
|
- Directories sorted before files
|
||||||
|
- Stats: directory and file counts
|
||||||
|
- 23 unit tests
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
|
||||||
|
- Total tests: 540 (was 419)
|
||||||
|
- Coverage: 96%+
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## [0.4.0] - 2025-11-30 - LLM Integration
|
||||||
|
|
||||||
|
### Added
|
||||||
|
|
||||||
|
- **OllamaClient (0.4.1)**
|
||||||
|
- Full `ILLMClient` implementation for Ollama SDK
|
||||||
|
- Chat completion with tool/function calling support
|
||||||
|
- Token counting via estimation (Ollama has no tokenizer API)
|
||||||
|
- Model management: `pullModel()`, `hasModel()`, `listModels()`
|
||||||
|
- Connection status check: `isAvailable()`
|
||||||
|
- Request abortion support: `abort()`
|
||||||
|
- Error handling with `IpuaroError` for connection and model errors
|
||||||
|
- 21 unit tests
|
||||||
|
|
||||||
|
- **System Prompt & Context Builder (0.4.2)**
|
||||||
|
- `SYSTEM_PROMPT`: Comprehensive agent instructions with tool descriptions
|
||||||
|
- `buildInitialContext()`: Generates compact project overview from structure and ASTs
|
||||||
|
- `buildFileContext()`: Detailed file context with imports, exports, functions, classes
|
||||||
|
- `truncateContext()`: Token-aware context truncation
|
||||||
|
- Hub/entry point/complexity flags in file summaries
|
||||||
|
- 17 unit tests
|
||||||
|
|
||||||
|
- **Tool Definitions (0.4.3)**
|
||||||
|
- 18 tool definitions across 6 categories:
|
||||||
|
- Read: `get_lines`, `get_function`, `get_class`, `get_structure`
|
||||||
|
- Edit: `edit_lines`, `create_file`, `delete_file`
|
||||||
|
- Search: `find_references`, `find_definition`
|
||||||
|
- Analysis: `get_dependencies`, `get_dependents`, `get_complexity`, `get_todos`
|
||||||
|
- Git: `git_status`, `git_diff`, `git_commit`
|
||||||
|
- Run: `run_command`, `run_tests`
|
||||||
|
- Category groupings: `READ_TOOLS`, `EDIT_TOOLS`, etc.
|
||||||
|
- `CONFIRMATION_TOOLS` set for tools requiring user approval
|
||||||
|
- Helper functions: `requiresConfirmation()`, `getToolDef()`, `getToolsByCategory()`
|
||||||
|
- 39 unit tests
|
||||||
|
|
||||||
|
- **Response Parser (0.4.4)**
|
||||||
|
- XML tool call parsing: `<tool_call name="...">...</tool_call>`
|
||||||
|
- Parameter extraction from XML elements
|
||||||
|
- Type coercion: boolean, number, null, JSON arrays/objects
|
||||||
|
- `extractThinking()`: Extracts `<thinking>...</thinking>` blocks
|
||||||
|
- `hasToolCalls()`: Quick check for tool call presence
|
||||||
|
- `validateToolCallParams()`: Parameter validation against required list
|
||||||
|
- `formatToolCallsAsXml()`: Tool calls to XML for prompt injection
|
||||||
|
- 21 unit tests
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
|
||||||
|
- Total tests: 419 (was 321)
|
||||||
|
- Coverage: 96.38%
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## [0.3.1] - 2025-11-30
|
||||||
|
|
||||||
|
### Added
|
||||||
|
|
||||||
|
- **VERSION export** - Package version is now exported from index.ts, automatically read from package.json via `createRequire`
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
|
||||||
|
- 🔄 **Refactored ASTParser** - Reduced complexity and nesting depth:
|
||||||
|
- Extracted `extractClassHeritage()`, `parseHeritageClause()`, `findTypeIdentifier()`, `collectImplements()` helper methods
|
||||||
|
- Max nesting depth reduced from 5 to 4
|
||||||
|
- 🔄 **Refactored RedisStorage** - Removed unnecessary type parameter from `parseJSON()` method
|
||||||
|
|
||||||
|
### Quality
|
||||||
|
|
||||||
|
- ✅ **Zero lint warnings** - All ESLint warnings resolved
|
||||||
|
- ✅ **All 321 tests pass**
|
||||||
|
|
||||||
|
## [0.3.0] - 2025-11-30 - Indexer
|
||||||
|
|
||||||
|
### Added
|
||||||
|
|
||||||
|
- **FileScanner (0.3.1)**
|
||||||
|
- Recursive directory scanning with async generator
|
||||||
|
- `.gitignore` support via `globby` (replaced `ignore` package for ESM compatibility)
|
||||||
|
- Filters: binary files, node_modules, dist, default ignore patterns
|
||||||
|
- Progress callback for UI integration
|
||||||
|
- `isTextFile()` and `readFileContent()` static utilities
|
||||||
|
- 22 unit tests
|
||||||
|
|
||||||
|
- **ASTParser (0.3.2)**
|
||||||
|
- Tree-sitter based parsing for TS, TSX, JS, JSX
|
||||||
|
- Extracts: imports, exports, functions, classes, interfaces, type aliases
|
||||||
|
- Import classification: internal, external, builtin (using `node:module` builtinModules)
|
||||||
|
- Graceful error handling with partial AST on syntax errors
|
||||||
|
- 30 unit tests
|
||||||
|
|
||||||
|
- **MetaAnalyzer (0.3.3)**
|
||||||
|
- Complexity metrics: LOC (excluding comments), nesting depth, cyclomatic complexity, overall score
|
||||||
|
- Dependency resolution: internal imports resolved to absolute file paths
|
||||||
|
- Dependents calculation: reverse dependency lookup across all project files
|
||||||
|
- File type classification: source, test, config, types, unknown
|
||||||
|
- Entry point detection: index files, main/app/cli/server patterns, files with no dependents
|
||||||
|
- Hub detection: files with >5 dependents
|
||||||
|
- Batch analysis via `analyzeAll()` method
|
||||||
|
- 54 unit tests
|
||||||
|
|
||||||
|
- **IndexBuilder (0.3.4)**
|
||||||
|
- SymbolIndex: maps symbol names to locations for quick lookup (functions, classes, interfaces, types, variables)
|
||||||
|
- Qualified names for class methods: `ClassName.methodName`
|
||||||
|
- DepsGraph: bidirectional import mapping (`imports` and `importedBy`)
|
||||||
|
- Import resolution: handles `.js` → `.ts`, index.ts, directory imports
|
||||||
|
- `findSymbol()`: exact symbol lookup
|
||||||
|
- `searchSymbols()`: regex-based symbol search
|
||||||
|
- `findCircularDependencies()`: detect import cycles
|
||||||
|
- `getStats()`: comprehensive index statistics (symbols by type, hubs, orphans)
|
||||||
|
- 35 unit tests
|
||||||
|
|
||||||
|
- **Watchdog (0.3.5)**
|
||||||
|
- File watching with chokidar (native events + polling fallback)
|
||||||
|
- Debounced change handling (configurable, default 500ms)
|
||||||
|
- Event types: add, change, unlink
|
||||||
|
- Extension filtering (default: SUPPORTED_EXTENSIONS)
|
||||||
|
- Ignore patterns (default: DEFAULT_IGNORE_PATTERNS)
|
||||||
|
- Multiple callback support
|
||||||
|
- `flushAll()` for immediate processing
|
||||||
|
- Silent error handling for stability
|
||||||
|
- 21 unit tests
|
||||||
|
|
||||||
|
- **Infrastructure Constants**
|
||||||
|
- `tree-sitter-types.ts`: NodeType and FieldName constants for tree-sitter
|
||||||
|
- Eliminates magic strings in ASTParser
|
||||||
|
|
||||||
|
- **Dependencies**
|
||||||
|
- Added `globby` for ESM-native file globbing
|
||||||
|
- Removed `ignore` package (CJS incompatibility with nodenext)
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
|
||||||
|
- Refactored ASTParser to use constants instead of magic strings
|
||||||
|
- Total tests: 321
|
||||||
|
- Coverage: 96.43%
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## [0.2.0] - 2025-01-30
|
||||||
|
|
||||||
|
### Added
|
||||||
|
|
||||||
|
- **Redis Storage (0.2.x milestone)**
|
||||||
|
- RedisClient: connection wrapper with AOF persistence configuration
|
||||||
|
- RedisStorage: full IStorage implementation with Redis hashes
|
||||||
|
- Redis key schema: project files, AST, meta, indexes, config
|
||||||
|
- Session keys schema: data, undo stack, sessions list
|
||||||
|
- `generateProjectName()` utility for consistent project naming
|
||||||
|
|
||||||
|
- **Infrastructure Layer**
|
||||||
|
- `src/infrastructure/storage/` module
|
||||||
|
- Exports via `src/infrastructure/index.ts`
|
||||||
|
|
||||||
|
- **Testing**
|
||||||
|
- 68 new unit tests for Redis module
|
||||||
|
- 159 total tests
|
||||||
|
- 99% code coverage maintained
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
|
||||||
|
- Updated ESLint config: `@typescript-eslint/no-unnecessary-type-parameters` set to warn
|
||||||
|
|
||||||
|
### Notes
|
||||||
|
|
||||||
|
Redis Storage milestone complete. Next: 0.3.0 - Indexer (FileScanner, AST Parser, Watchdog)
|
||||||
|
|
||||||
|
## [0.1.0] - 2025-01-29
|
||||||
|
|
||||||
|
### Added
|
||||||
|
|
||||||
|
- **Project Setup**
|
||||||
|
- package.json with all dependencies (ink, ioredis, tree-sitter, ollama, etc.)
|
||||||
|
- tsconfig.json for ESM + React JSX
|
||||||
|
- tsup.config.ts for bundling
|
||||||
|
- vitest.config.ts with 80% coverage threshold
|
||||||
|
- CLI entry point (bin/ipuaro.js)
|
||||||
|
|
||||||
|
- **Domain Layer**
|
||||||
|
- Entities: Session, Project
|
||||||
|
- Value Objects: FileData, FileAST, FileMeta, ChatMessage, ToolCall, ToolResult, UndoEntry
|
||||||
|
- Service Interfaces: IStorage, ILLMClient, ITool, IIndexer
|
||||||
|
- Constants: supported extensions, ignore patterns, context limits
|
||||||
|
|
||||||
|
- **Application Layer**
|
||||||
|
- IToolRegistry interface
|
||||||
|
- Placeholder structure for use cases and DTOs
|
||||||
|
|
||||||
|
- **Shared Module**
|
||||||
|
- Config schema with Zod validation
|
||||||
|
- Config loader (default.json + .ipuaro.json)
|
||||||
|
- IpuaroError class with typed errors
|
||||||
|
- Utility functions: md5 hash, token estimation
|
||||||
|
- Result type for error handling
|
||||||
|
|
||||||
|
- **CLI**
|
||||||
|
- Basic commands: start, init, index (placeholders)
|
||||||
|
- Commander.js integration
|
||||||
|
|
||||||
|
- **Testing**
|
||||||
|
- 91 unit tests
|
||||||
|
- 100% code coverage
|
||||||
|
|
||||||
|
### Notes
|
||||||
|
|
||||||
|
This is the foundation release. The following features are planned for upcoming versions:
|
||||||
|
- 0.2.0: Redis Storage
|
||||||
|
- 0.3.0: Indexer
|
||||||
|
- 0.4.0: LLM Integration
|
||||||
|
- 0.5.0+: Tools implementation
|
||||||
|
- 0.10.0+: TUI and session management
|
||||||
21
packages/ipuaro/LICENSE
Normal file
21
packages/ipuaro/LICENSE
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
MIT License
|
||||||
|
|
||||||
|
Copyright (c) 2025 Fozilbek Samiyev
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all
|
||||||
|
copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
SOFTWARE.
|
||||||
662
packages/ipuaro/README.md
Normal file
662
packages/ipuaro/README.md
Normal file
@@ -0,0 +1,662 @@
|
|||||||
|
# @samiyev/ipuaro 🎩
|
||||||
|
|
||||||
|
**Local AI Agent for Codebase Operations**
|
||||||
|
|
||||||
|
"Infinite" context feeling through lazy loading - work with your entire codebase using local LLM.
|
||||||
|
|
||||||
|
[](https://www.npmjs.com/package/@samiyev/ipuaro)
|
||||||
|
[](https://opensource.org/licenses/MIT)
|
||||||
|
|
||||||
|
> **Status:** 🎉 Release Candidate (v0.16.0 → v1.0.0)
|
||||||
|
>
|
||||||
|
> All core features complete. Production-ready release coming soon.
|
||||||
|
|
||||||
|
## Vision
|
||||||
|
|
||||||
|
Work with codebases of any size using local AI:
|
||||||
|
- 📂 **Lazy Loading**: Load code on-demand, not all at once
|
||||||
|
- 🧠 **Smart Context**: AST-based understanding of your code structure
|
||||||
|
- 🔒 **100% Local**: Your code never leaves your machine
|
||||||
|
- ⚡ **Fast**: Redis persistence + tree-sitter parsing
|
||||||
|
|
||||||
|
## Features
|
||||||
|
|
||||||
|
### 18 LLM Tools (All Implemented ✅)
|
||||||
|
|
||||||
|
| Category | Tools | Description |
|
||||||
|
|----------|-------|-------------|
|
||||||
|
| **Read** | `get_lines`, `get_function`, `get_class`, `get_structure` | Read code without loading everything into context |
|
||||||
|
| **Edit** | `edit_lines`, `create_file`, `delete_file` | Make changes with confirmation and undo support |
|
||||||
|
| **Search** | `find_references`, `find_definition` | Find symbol definitions and usages across codebase |
|
||||||
|
| **Analysis** | `get_dependencies`, `get_dependents`, `get_complexity`, `get_todos` | Analyze code structure, complexity, and TODOs |
|
||||||
|
| **Git** | `git_status`, `git_diff`, `git_commit` | Git operations with safety checks |
|
||||||
|
| **Run** | `run_command`, `run_tests` | Execute commands and tests with security validation |
|
||||||
|
|
||||||
|
See [Tools Documentation](#tools-reference) below for detailed usage examples.
|
||||||
|
|
||||||
|
### Terminal UI
|
||||||
|
|
||||||
|
```
|
||||||
|
┌─ ipuaro ──────────────────────────────────────────────────┐
|
||||||
|
│ [ctx: 12%] [project: myapp] [main] [47m] ✓ Ready │
|
||||||
|
├───────────────────────────────────────────────────────────┤
|
||||||
|
│ You: How does the authentication flow work? │
|
||||||
|
│ │
|
||||||
|
│ Assistant: Let me analyze the auth module... │
|
||||||
|
│ [get_structure src/auth/] │
|
||||||
|
│ [get_function src/auth/service.ts login] │
|
||||||
|
│ │
|
||||||
|
│ The authentication flow works as follows: │
|
||||||
|
│ 1. User calls POST /auth/login │
|
||||||
|
│ 2. AuthService.login() validates credentials... │
|
||||||
|
│ │
|
||||||
|
│ ⏱ 3.2s │ 1,247 tokens │ 2 tool calls │
|
||||||
|
├───────────────────────────────────────────────────────────┤
|
||||||
|
│ > _ │
|
||||||
|
└───────────────────────────────────────────────────────────┘
|
||||||
|
```
|
||||||
|
|
||||||
|
### Slash Commands
|
||||||
|
|
||||||
|
Control your session with built-in commands:
|
||||||
|
|
||||||
|
| Command | Description |
|
||||||
|
|---------|-------------|
|
||||||
|
| `/help` | Show all commands and hotkeys |
|
||||||
|
| `/clear` | Clear chat history (keeps session) |
|
||||||
|
| `/undo` | Revert last file change from undo stack |
|
||||||
|
| `/sessions [list\|load\|delete] [id]` | Manage sessions |
|
||||||
|
| `/status` | Show system status (LLM, context, stats) |
|
||||||
|
| `/reindex` | Force full project reindexation |
|
||||||
|
| `/eval` | LLM self-check for hallucinations |
|
||||||
|
| `/auto-apply [on\|off]` | Toggle auto-apply mode for edits |
|
||||||
|
|
||||||
|
### Hotkeys
|
||||||
|
|
||||||
|
| Hotkey | Action |
|
||||||
|
|--------|--------|
|
||||||
|
| `Ctrl+C` | Interrupt generation (1st press) / Exit (2nd press within 1s) |
|
||||||
|
| `Ctrl+D` | Exit and save session |
|
||||||
|
| `Ctrl+Z` | Undo last file change |
|
||||||
|
| `↑` / `↓` | Navigate input history |
|
||||||
|
| `Tab` | Path autocomplete (coming soon) |
|
||||||
|
|
||||||
|
### Key Capabilities
|
||||||
|
|
||||||
|
🔍 **Smart Code Understanding**
|
||||||
|
- tree-sitter AST parsing (TypeScript, JavaScript)
|
||||||
|
- Symbol index for fast lookups
|
||||||
|
- Dependency graph analysis
|
||||||
|
|
||||||
|
💾 **Persistent Sessions**
|
||||||
|
- Redis storage with AOF persistence
|
||||||
|
- Session history across restarts
|
||||||
|
- Undo stack for file changes
|
||||||
|
|
||||||
|
🛡️ **Security**
|
||||||
|
- Command blacklist (dangerous operations blocked)
|
||||||
|
- Command whitelist (safe commands auto-approved)
|
||||||
|
- Path validation (no access outside project)
|
||||||
|
|
||||||
|
## Installation
|
||||||
|
|
||||||
|
```bash
|
||||||
|
npm install @samiyev/ipuaro
|
||||||
|
# or
|
||||||
|
pnpm add @samiyev/ipuaro
|
||||||
|
```
|
||||||
|
|
||||||
|
## Requirements
|
||||||
|
|
||||||
|
- **Node.js** >= 20.0.0
|
||||||
|
- **Redis** (for persistence)
|
||||||
|
- **Ollama** (for local LLM inference)
|
||||||
|
|
||||||
|
### Setup Ollama
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Install Ollama (macOS)
|
||||||
|
brew install ollama
|
||||||
|
|
||||||
|
# Start Ollama
|
||||||
|
ollama serve
|
||||||
|
|
||||||
|
# Pull recommended model
|
||||||
|
ollama pull qwen2.5-coder:7b-instruct
|
||||||
|
```
|
||||||
|
|
||||||
|
### Setup Redis
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Install Redis (macOS)
|
||||||
|
brew install redis
|
||||||
|
|
||||||
|
# Start Redis with persistence
|
||||||
|
redis-server --appendonly yes
|
||||||
|
```
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Start ipuaro in current directory
|
||||||
|
ipuaro
|
||||||
|
|
||||||
|
# Start in specific directory
|
||||||
|
ipuaro /path/to/project
|
||||||
|
|
||||||
|
# With custom model
|
||||||
|
ipuaro --model qwen2.5-coder:32b-instruct
|
||||||
|
|
||||||
|
# With auto-apply mode (skip edit confirmations)
|
||||||
|
ipuaro --auto-apply
|
||||||
|
```
|
||||||
|
|
||||||
|
## Commands
|
||||||
|
|
||||||
|
| Command | Description |
|
||||||
|
|---------|-------------|
|
||||||
|
| `ipuaro [path]` | Start TUI in directory |
|
||||||
|
| `ipuaro init` | Create `.ipuaro.json` config |
|
||||||
|
| `ipuaro index` | Index project without TUI |
|
||||||
|
|
||||||
|
## Configuration
|
||||||
|
|
||||||
|
Create `.ipuaro.json` in your project root:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"redis": {
|
||||||
|
"host": "localhost",
|
||||||
|
"port": 6379
|
||||||
|
},
|
||||||
|
"llm": {
|
||||||
|
"model": "qwen2.5-coder:7b-instruct",
|
||||||
|
"temperature": 0.1
|
||||||
|
},
|
||||||
|
"project": {
|
||||||
|
"ignorePatterns": ["node_modules", "dist", ".git"]
|
||||||
|
},
|
||||||
|
"edit": {
|
||||||
|
"autoApply": false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Architecture
|
||||||
|
|
||||||
|
Clean Architecture with clear separation:
|
||||||
|
|
||||||
|
```
|
||||||
|
@samiyev/ipuaro/
|
||||||
|
├── domain/ # Business logic (no dependencies)
|
||||||
|
│ ├── entities/ # Session, Project
|
||||||
|
│ ├── value-objects/ # FileData, FileAST, ChatMessage, etc.
|
||||||
|
│ └── services/ # IStorage, ILLMClient, ITool, IIndexer
|
||||||
|
├── application/ # Use cases & orchestration
|
||||||
|
│ ├── use-cases/ # StartSession, HandleMessage, etc.
|
||||||
|
│ └── interfaces/ # IToolRegistry
|
||||||
|
├── infrastructure/ # External implementations
|
||||||
|
│ ├── storage/ # Redis client & storage
|
||||||
|
│ ├── llm/ # Ollama client & prompts
|
||||||
|
│ ├── indexer/ # File scanner, AST parser
|
||||||
|
│ └── tools/ # 18 tool implementations
|
||||||
|
├── tui/ # Terminal UI (Ink/React)
|
||||||
|
│ └── components/ # StatusBar, Chat, Input, etc.
|
||||||
|
├── cli/ # CLI entry point
|
||||||
|
└── shared/ # Config, errors, utils
|
||||||
|
```
|
||||||
|
|
||||||
|
## Development Status
|
||||||
|
|
||||||
|
### ✅ Completed (v0.1.0 - v0.16.0)
|
||||||
|
|
||||||
|
- [x] **v0.1.0 - v0.4.0**: Foundation (domain, storage, indexer, LLM integration)
|
||||||
|
- [x] **v0.5.0 - v0.9.0**: All 18 tools implemented
|
||||||
|
- [x] **v0.10.0**: Session management with undo support
|
||||||
|
- [x] **v0.11.0 - v0.12.0**: Full TUI with all components
|
||||||
|
- [x] **v0.13.0**: Security (PathValidator, command validation)
|
||||||
|
- [x] **v0.14.0**: 8 slash commands
|
||||||
|
- [x] **v0.15.0**: CLI entry point with onboarding
|
||||||
|
- [x] **v0.16.0**: Comprehensive error handling system
|
||||||
|
- [x] **1420 tests, 98% coverage**
|
||||||
|
|
||||||
|
### 🔜 v1.0.0 - Production Ready
|
||||||
|
|
||||||
|
- [ ] Performance optimizations
|
||||||
|
- [ ] Complete documentation
|
||||||
|
- [ ] Working examples
|
||||||
|
|
||||||
|
See [ROADMAP.md](./ROADMAP.md) for detailed development plan and [CHANGELOG.md](./CHANGELOG.md) for release history.
|
||||||
|
|
||||||
|
## Tools Reference
|
||||||
|
|
||||||
|
The AI agent has access to 18 tools for working with your codebase. Here are the most commonly used ones:
|
||||||
|
|
||||||
|
### Read Tools
|
||||||
|
|
||||||
|
**`get_lines(path, start?, end?)`**
|
||||||
|
Read specific lines from a file.
|
||||||
|
|
||||||
|
```
|
||||||
|
You: Show me the authentication logic
|
||||||
|
Assistant: [get_lines src/auth/service.ts 45 67]
|
||||||
|
# Returns lines 45-67 with line numbers
|
||||||
|
```
|
||||||
|
|
||||||
|
**`get_function(path, name)`**
|
||||||
|
Get a specific function's source code and metadata.
|
||||||
|
|
||||||
|
```
|
||||||
|
You: How does the login function work?
|
||||||
|
Assistant: [get_function src/auth/service.ts login]
|
||||||
|
# Returns function code, params, return type, and metadata
|
||||||
|
```
|
||||||
|
|
||||||
|
**`get_class(path, name)`**
|
||||||
|
Get a specific class's source code and metadata.
|
||||||
|
|
||||||
|
```
|
||||||
|
You: Show me the UserService class
|
||||||
|
Assistant: [get_class src/services/user.ts UserService]
|
||||||
|
# Returns class code, methods, properties, and inheritance info
|
||||||
|
```
|
||||||
|
|
||||||
|
**`get_structure(path?, depth?)`**
|
||||||
|
Get directory tree structure.
|
||||||
|
|
||||||
|
```
|
||||||
|
You: What's in the src/auth directory?
|
||||||
|
Assistant: [get_structure src/auth]
|
||||||
|
# Returns ASCII tree with files and folders
|
||||||
|
```
|
||||||
|
|
||||||
|
### Edit Tools
|
||||||
|
|
||||||
|
**`edit_lines(path, start, end, content)`**
|
||||||
|
Replace lines in a file (requires confirmation).
|
||||||
|
|
||||||
|
```
|
||||||
|
You: Update the timeout to 5000ms
|
||||||
|
Assistant: [edit_lines src/config.ts 23 23 " timeout: 5000,"]
|
||||||
|
# Shows diff, asks for confirmation
|
||||||
|
```
|
||||||
|
|
||||||
|
**`create_file(path, content)`**
|
||||||
|
Create a new file (requires confirmation).
|
||||||
|
|
||||||
|
```
|
||||||
|
You: Create a new utility for date formatting
|
||||||
|
Assistant: [create_file src/utils/date.ts "export function formatDate..."]
|
||||||
|
# Creates file after confirmation
|
||||||
|
```
|
||||||
|
|
||||||
|
**`delete_file(path)`**
|
||||||
|
Delete a file (requires confirmation).
|
||||||
|
|
||||||
|
```
|
||||||
|
You: Remove the old test file
|
||||||
|
Assistant: [delete_file tests/old-test.test.ts]
|
||||||
|
# Deletes after confirmation
|
||||||
|
```
|
||||||
|
|
||||||
|
### Search Tools
|
||||||
|
|
||||||
|
**`find_references(symbol, path?)`**
|
||||||
|
Find all usages of a symbol across the codebase.
|
||||||
|
|
||||||
|
```
|
||||||
|
You: Where is getUserById used?
|
||||||
|
Assistant: [find_references getUserById]
|
||||||
|
# Returns all files/lines where it's called
|
||||||
|
```
|
||||||
|
|
||||||
|
**`find_definition(symbol)`**
|
||||||
|
Find where a symbol is defined.
|
||||||
|
|
||||||
|
```
|
||||||
|
You: Where is ApiClient defined?
|
||||||
|
Assistant: [find_definition ApiClient]
|
||||||
|
# Returns file, line, and context
|
||||||
|
```
|
||||||
|
|
||||||
|
### Analysis Tools
|
||||||
|
|
||||||
|
**`get_dependencies(path)`**
|
||||||
|
Get files that a specific file imports.
|
||||||
|
|
||||||
|
```
|
||||||
|
You: What does auth.ts depend on?
|
||||||
|
Assistant: [get_dependencies src/auth/service.ts]
|
||||||
|
# Returns list of imported files
|
||||||
|
```
|
||||||
|
|
||||||
|
**`get_dependents(path)`**
|
||||||
|
Get files that import a specific file.
|
||||||
|
|
||||||
|
```
|
||||||
|
You: What files use the database module?
|
||||||
|
Assistant: [get_dependents src/db/index.ts]
|
||||||
|
# Returns list of files importing this
|
||||||
|
```
|
||||||
|
|
||||||
|
**`get_complexity(path?, limit?)`**
|
||||||
|
Get complexity metrics for files.
|
||||||
|
|
||||||
|
```
|
||||||
|
You: Which files are most complex?
|
||||||
|
Assistant: [get_complexity null 10]
|
||||||
|
# Returns top 10 most complex files with metrics
|
||||||
|
```
|
||||||
|
|
||||||
|
**`get_todos(path?, type?)`**
|
||||||
|
Find TODO/FIXME/HACK comments.
|
||||||
|
|
||||||
|
```
|
||||||
|
You: What TODOs are there?
|
||||||
|
Assistant: [get_todos]
|
||||||
|
# Returns all TODO comments with locations
|
||||||
|
```
|
||||||
|
|
||||||
|
### Git Tools
|
||||||
|
|
||||||
|
**`git_status()`**
|
||||||
|
Get current git repository status.
|
||||||
|
|
||||||
|
```
|
||||||
|
You: What files have changed?
|
||||||
|
Assistant: [git_status]
|
||||||
|
# Returns branch, staged, modified, untracked files
|
||||||
|
```
|
||||||
|
|
||||||
|
**`git_diff(path?, staged?)`**
|
||||||
|
Get uncommitted changes.
|
||||||
|
|
||||||
|
```
|
||||||
|
You: Show me what changed in auth.ts
|
||||||
|
Assistant: [git_diff src/auth/service.ts]
|
||||||
|
# Returns diff output
|
||||||
|
```
|
||||||
|
|
||||||
|
**`git_commit(message, files?)`**
|
||||||
|
Create a git commit (requires confirmation).
|
||||||
|
|
||||||
|
```
|
||||||
|
You: Commit these auth changes
|
||||||
|
Assistant: [git_commit "feat: add password reset flow" ["src/auth/service.ts"]]
|
||||||
|
# Creates commit after confirmation
|
||||||
|
```
|
||||||
|
|
||||||
|
### Run Tools
|
||||||
|
|
||||||
|
**`run_command(command, timeout?)`**
|
||||||
|
Execute shell commands (with security validation).
|
||||||
|
|
||||||
|
```
|
||||||
|
You: Run the build
|
||||||
|
Assistant: [run_command "npm run build"]
|
||||||
|
# Checks security, then executes
|
||||||
|
```
|
||||||
|
|
||||||
|
**`run_tests(path?, filter?, watch?)`**
|
||||||
|
Run project tests.
|
||||||
|
|
||||||
|
```
|
||||||
|
You: Test the auth module
|
||||||
|
Assistant: [run_tests "tests/auth" null false]
|
||||||
|
# Auto-detects test runner and executes
|
||||||
|
```
|
||||||
|
|
||||||
|
For complete tool documentation with all parameters and options, see [TOOLS.md](./TOOLS.md).
|
||||||
|
|
||||||
|
## Programmatic API
|
||||||
|
|
||||||
|
You can use ipuaro as a library in your own Node.js applications:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import {
|
||||||
|
createRedisClient,
|
||||||
|
RedisStorage,
|
||||||
|
OllamaClient,
|
||||||
|
ToolRegistry,
|
||||||
|
StartSession,
|
||||||
|
HandleMessage
|
||||||
|
} from "@samiyev/ipuaro"
|
||||||
|
|
||||||
|
// Initialize dependencies
|
||||||
|
const redis = await createRedisClient({ host: "localhost", port: 6379 })
|
||||||
|
const storage = new RedisStorage(redis, "my-project")
|
||||||
|
const llm = new OllamaClient({
|
||||||
|
model: "qwen2.5-coder:7b-instruct",
|
||||||
|
contextWindow: 128000,
|
||||||
|
temperature: 0.1
|
||||||
|
})
|
||||||
|
const tools = new ToolRegistry()
|
||||||
|
|
||||||
|
// Register tools
|
||||||
|
tools.register(new GetLinesTool(storage, "/path/to/project"))
|
||||||
|
// ... register other tools
|
||||||
|
|
||||||
|
// Start a session
|
||||||
|
const startSession = new StartSession(storage)
|
||||||
|
const session = await startSession.execute("my-project")
|
||||||
|
|
||||||
|
// Handle a message
|
||||||
|
const handleMessage = new HandleMessage(storage, llm, tools)
|
||||||
|
await handleMessage.execute(session, "Show me the auth flow")
|
||||||
|
|
||||||
|
// Session is automatically updated in Redis
|
||||||
|
```
|
||||||
|
|
||||||
|
For full API documentation, see the TypeScript definitions in `src/` or explore the [source code](./src/).
|
||||||
|
|
||||||
|
## How It Works
|
||||||
|
|
||||||
|
### 1. Project Indexing
|
||||||
|
|
||||||
|
When you start ipuaro, it scans your project and builds an index:
|
||||||
|
|
||||||
|
```
|
||||||
|
1. File Scanner → Recursively scans files (.ts, .js, .tsx, .jsx)
|
||||||
|
2. AST Parser → Parses with tree-sitter (extracts functions, classes, imports)
|
||||||
|
3. Meta Analyzer → Calculates complexity, dependencies, hub detection
|
||||||
|
4. Index Builder → Creates symbol index and dependency graph
|
||||||
|
5. Redis Storage → Persists everything for instant startup next time
|
||||||
|
6. Watchdog → Watches files for changes and updates index in background
|
||||||
|
```
|
||||||
|
|
||||||
|
### 2. Lazy Loading Context
|
||||||
|
|
||||||
|
Instead of loading entire codebase into context:
|
||||||
|
|
||||||
|
```
|
||||||
|
Traditional approach:
|
||||||
|
├── Load all files → 500k tokens → ❌ Exceeds context window
|
||||||
|
|
||||||
|
ipuaro approach:
|
||||||
|
├── Load project structure → ~2k tokens
|
||||||
|
├── Load AST metadata → ~10k tokens
|
||||||
|
├── On demand: get_function("auth.ts", "login") → ~200 tokens
|
||||||
|
├── Total: ~12k tokens → ✅ Fits in 128k context window
|
||||||
|
```
|
||||||
|
|
||||||
|
Context automatically compresses when usage exceeds 80% by summarizing old messages.
|
||||||
|
|
||||||
|
### 3. Tool-Based Code Access
|
||||||
|
|
||||||
|
The LLM doesn't see your code initially. It only sees structure and metadata. When it needs code, it uses tools:
|
||||||
|
|
||||||
|
```
|
||||||
|
You: "How does user creation work?"
|
||||||
|
|
||||||
|
Agent reasoning:
|
||||||
|
1. [get_structure src/] → sees user/ folder exists
|
||||||
|
2. [get_function src/user/service.ts createUser] → loads specific function
|
||||||
|
3. [find_references createUser] → finds all usages
|
||||||
|
4. Synthesizes answer with only relevant code loaded
|
||||||
|
|
||||||
|
Total tokens used: ~2k (vs loading entire src/ which could be 50k+)
|
||||||
|
```
|
||||||
|
|
||||||
|
### 4. Session Persistence
|
||||||
|
|
||||||
|
Everything is saved to Redis:
|
||||||
|
- Chat history and context state
|
||||||
|
- Undo stack (last 10 file changes)
|
||||||
|
- Session metadata and statistics
|
||||||
|
|
||||||
|
Resume your session anytime with `/sessions load <id>`.
|
||||||
|
|
||||||
|
### 5. Security Model
|
||||||
|
|
||||||
|
Three-layer security:
|
||||||
|
1. **Blacklist**: Dangerous commands always blocked (rm -rf, sudo, etc.)
|
||||||
|
2. **Whitelist**: Safe commands auto-approved (npm, git status, etc.)
|
||||||
|
3. **Confirmation**: Unknown commands require user approval
|
||||||
|
|
||||||
|
File operations are restricted to project directory only (path traversal prevention).
|
||||||
|
|
||||||
|
## Troubleshooting
|
||||||
|
|
||||||
|
### Redis Connection Errors
|
||||||
|
|
||||||
|
**Error**: `Redis connection failed`
|
||||||
|
|
||||||
|
**Solutions**:
|
||||||
|
```bash
|
||||||
|
# Check if Redis is running
|
||||||
|
redis-cli ping # Should return "PONG"
|
||||||
|
|
||||||
|
# Start Redis with AOF persistence
|
||||||
|
redis-server --appendonly yes
|
||||||
|
|
||||||
|
# Check Redis logs
|
||||||
|
tail -f /usr/local/var/log/redis.log # macOS
|
||||||
|
```
|
||||||
|
|
||||||
|
### Ollama Model Not Found
|
||||||
|
|
||||||
|
**Error**: `Model qwen2.5-coder:7b-instruct not found`
|
||||||
|
|
||||||
|
**Solutions**:
|
||||||
|
```bash
|
||||||
|
# Pull the model
|
||||||
|
ollama pull qwen2.5-coder:7b-instruct
|
||||||
|
|
||||||
|
# List installed models
|
||||||
|
ollama list
|
||||||
|
|
||||||
|
# Check Ollama is running
|
||||||
|
ollama serve
|
||||||
|
```
|
||||||
|
|
||||||
|
### Large Project Performance
|
||||||
|
|
||||||
|
**Issue**: Indexing takes too long or uses too much memory
|
||||||
|
|
||||||
|
**Solutions**:
|
||||||
|
```bash
|
||||||
|
# Index only a subdirectory
|
||||||
|
ipuaro ./src
|
||||||
|
|
||||||
|
# Add more ignore patterns to .ipuaro.json
|
||||||
|
{
|
||||||
|
"project": {
|
||||||
|
"ignorePatterns": ["node_modules", "dist", ".git", "coverage", "build"]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
# Increase Node.js memory limit
|
||||||
|
NODE_OPTIONS="--max-old-space-size=4096" ipuaro
|
||||||
|
```
|
||||||
|
|
||||||
|
### Context Window Exceeded
|
||||||
|
|
||||||
|
**Issue**: `Context window exceeded` errors
|
||||||
|
|
||||||
|
**Solutions**:
|
||||||
|
- Context auto-compresses at 80%, but you can manually `/clear` history
|
||||||
|
- Use more targeted questions instead of asking about entire codebase
|
||||||
|
- The agent will automatically use tools to load only what's needed
|
||||||
|
|
||||||
|
### File Changes Not Detected
|
||||||
|
|
||||||
|
**Issue**: Made changes but agent doesn't see them
|
||||||
|
|
||||||
|
**Solutions**:
|
||||||
|
```bash
|
||||||
|
# Force reindex
|
||||||
|
/reindex
|
||||||
|
|
||||||
|
# Or restart with fresh index
|
||||||
|
rm -rf ~/.ipuaro/cache
|
||||||
|
ipuaro
|
||||||
|
```
|
||||||
|
|
||||||
|
### Undo Not Working
|
||||||
|
|
||||||
|
**Issue**: `/undo` says no changes to undo
|
||||||
|
|
||||||
|
**Explanation**: Undo stack only tracks the last 10 file edits made through ipuaro. Manual file edits outside ipuaro cannot be undone.
|
||||||
|
|
||||||
|
## FAQ
|
||||||
|
|
||||||
|
**Q: Does ipuaro send my code to any external servers?**
|
||||||
|
|
||||||
|
A: No. Everything runs locally. Ollama runs on your machine, Redis stores data locally, and no network requests are made except to your local Ollama instance.
|
||||||
|
|
||||||
|
**Q: What languages are supported?**
|
||||||
|
|
||||||
|
A: Currently TypeScript, JavaScript (including TSX/JSX). More languages planned for future versions.
|
||||||
|
|
||||||
|
**Q: Can I use OpenAI/Anthropic/other LLM providers?**
|
||||||
|
|
||||||
|
A: Currently only Ollama is supported. OpenAI/Anthropic support is planned for v1.2.0.
|
||||||
|
|
||||||
|
**Q: How much disk space does Redis use?**
|
||||||
|
|
||||||
|
A: Depends on project size. A typical mid-size project (1000 files) uses ~50-100MB. Redis uses AOF persistence, so data survives restarts.
|
||||||
|
|
||||||
|
**Q: Can I use ipuaro in a CI/CD pipeline?**
|
||||||
|
|
||||||
|
A: Yes, but it's designed for interactive use. For automated code analysis, consider the programmatic API.
|
||||||
|
|
||||||
|
**Q: What's the difference between ipuaro and GitHub Copilot?**
|
||||||
|
|
||||||
|
A: Copilot is an autocomplete tool. ipuaro is a conversational agent that can read, analyze, modify files, run commands, and has full codebase understanding through AST parsing.
|
||||||
|
|
||||||
|
**Q: Why Redis instead of SQLite or JSON files?**
|
||||||
|
|
||||||
|
A: Redis provides fast in-memory access, AOF persistence, and handles concurrent access well. The session model fits Redis's data structures perfectly.
|
||||||
|
|
||||||
|
## Contributing
|
||||||
|
|
||||||
|
Contributions welcome! This project is in early development.
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Clone
|
||||||
|
git clone https://github.com/samiyev/puaros.git
|
||||||
|
cd puaros/packages/ipuaro
|
||||||
|
|
||||||
|
# Install
|
||||||
|
pnpm install
|
||||||
|
|
||||||
|
# Build
|
||||||
|
pnpm build
|
||||||
|
|
||||||
|
# Test
|
||||||
|
pnpm test:run
|
||||||
|
|
||||||
|
# Coverage
|
||||||
|
pnpm test:coverage
|
||||||
|
```
|
||||||
|
|
||||||
|
## License
|
||||||
|
|
||||||
|
MIT © Fozilbek Samiyev
|
||||||
|
|
||||||
|
## Links
|
||||||
|
|
||||||
|
- [GitHub Repository](https://github.com/samiyev/puaros/tree/main/packages/ipuaro)
|
||||||
|
- [Issues](https://github.com/samiyev/puaros/issues)
|
||||||
|
- [Changelog](./CHANGELOG.md)
|
||||||
|
- [Roadmap](./ROADMAP.md)
|
||||||
1401
packages/ipuaro/ROADMAP.md
Normal file
1401
packages/ipuaro/ROADMAP.md
Normal file
File diff suppressed because it is too large
Load Diff
109
packages/ipuaro/TODO.md
Normal file
109
packages/ipuaro/TODO.md
Normal file
@@ -0,0 +1,109 @@
|
|||||||
|
# ipuaro TODO
|
||||||
|
|
||||||
|
## Completed
|
||||||
|
|
||||||
|
### Version 0.1.0 - Foundation
|
||||||
|
- [x] Project setup (package.json, tsconfig, vitest)
|
||||||
|
- [x] Domain entities (Session, Project)
|
||||||
|
- [x] Domain value objects (FileData, FileAST, FileMeta, ChatMessage, etc.)
|
||||||
|
- [x] Domain service interfaces (IStorage, ILLMClient, ITool, IIndexer)
|
||||||
|
- [x] Shared config loader with Zod validation
|
||||||
|
- [x] IpuaroError class
|
||||||
|
|
||||||
|
### Version 0.2.0 - Redis Storage
|
||||||
|
- [x] RedisClient with AOF config
|
||||||
|
- [x] Redis schema implementation
|
||||||
|
- [x] RedisStorage class
|
||||||
|
|
||||||
|
### Version 0.3.0 - Indexer
|
||||||
|
- [x] FileScanner with gitignore support
|
||||||
|
- [x] ASTParser with tree-sitter
|
||||||
|
- [x] MetaAnalyzer for complexity
|
||||||
|
- [x] IndexBuilder for symbols
|
||||||
|
- [x] Watchdog for file changes
|
||||||
|
|
||||||
|
### Version 0.4.0 - LLM Integration
|
||||||
|
- [x] OllamaClient implementation
|
||||||
|
- [x] System prompt design
|
||||||
|
- [x] Tool definitions (18 tools)
|
||||||
|
- [x] Response parser (XML format)
|
||||||
|
|
||||||
|
### Version 0.5.0 - Read Tools
|
||||||
|
- [x] ToolRegistry implementation
|
||||||
|
- [x] get_lines tool
|
||||||
|
- [x] get_function tool
|
||||||
|
- [x] get_class tool
|
||||||
|
- [x] get_structure tool
|
||||||
|
|
||||||
|
### Version 0.6.0 - Edit Tools
|
||||||
|
- [x] edit_lines tool
|
||||||
|
- [x] create_file tool
|
||||||
|
- [x] delete_file tool
|
||||||
|
|
||||||
|
### Version 0.7.0 - Search Tools
|
||||||
|
- [x] find_references tool
|
||||||
|
- [x] find_definition tool
|
||||||
|
|
||||||
|
### Version 0.8.0 - Analysis Tools
|
||||||
|
- [x] get_dependencies tool
|
||||||
|
- [x] get_dependents tool
|
||||||
|
- [x] get_complexity tool
|
||||||
|
- [x] get_todos tool
|
||||||
|
|
||||||
|
### Version 0.9.0 - Git & Run Tools
|
||||||
|
- [x] git_status tool
|
||||||
|
- [x] git_diff tool
|
||||||
|
- [x] git_commit tool
|
||||||
|
- [x] CommandSecurity (blacklist/whitelist)
|
||||||
|
- [x] run_command tool
|
||||||
|
- [x] run_tests tool
|
||||||
|
|
||||||
|
### Version 0.10.0 - Session Management
|
||||||
|
- [x] ISessionStorage interface
|
||||||
|
- [x] RedisSessionStorage implementation
|
||||||
|
- [x] ContextManager use case
|
||||||
|
- [x] StartSession use case
|
||||||
|
- [x] HandleMessage use case
|
||||||
|
- [x] UndoChange use case
|
||||||
|
|
||||||
|
## In Progress
|
||||||
|
|
||||||
|
### Version 0.11.0 - TUI Basic
|
||||||
|
- [ ] App shell (Ink/React)
|
||||||
|
- [ ] StatusBar component
|
||||||
|
- [ ] Chat component
|
||||||
|
- [ ] Input component
|
||||||
|
|
||||||
|
## Planned
|
||||||
|
|
||||||
|
### Version 0.12.0 - TUI Advanced
|
||||||
|
- [ ] DiffView component
|
||||||
|
- [ ] ConfirmDialog component
|
||||||
|
- [ ] ErrorDialog component
|
||||||
|
- [ ] Progress component
|
||||||
|
|
||||||
|
### Version 0.13.0+ - Commands & Polish
|
||||||
|
- [ ] Slash commands (/help, /clear, /undo, /sessions, /status)
|
||||||
|
- [ ] Hotkeys (Ctrl+C, Ctrl+D, Ctrl+Z)
|
||||||
|
- [ ] Auto-compression at 80% context
|
||||||
|
|
||||||
|
### Version 0.14.0 - CLI Entry Point
|
||||||
|
- [ ] Full CLI commands (start, init, index)
|
||||||
|
- [ ] Onboarding flow (Redis check, Ollama check, model pull)
|
||||||
|
|
||||||
|
## Technical Debt
|
||||||
|
|
||||||
|
_None at this time._
|
||||||
|
|
||||||
|
## Ideas for Future
|
||||||
|
|
||||||
|
- Plugin system for custom tools
|
||||||
|
- Multiple LLM providers (OpenAI, Anthropic)
|
||||||
|
- IDE integration (LSP)
|
||||||
|
- Web UI option
|
||||||
|
- Parallel AST parsing
|
||||||
|
- Response caching
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
**Last Updated:** 2025-12-01
|
||||||
1605
packages/ipuaro/TOOLS.md
Normal file
1605
packages/ipuaro/TOOLS.md
Normal file
File diff suppressed because it is too large
Load Diff
3
packages/ipuaro/bin/ipuaro.js
Normal file
3
packages/ipuaro/bin/ipuaro.js
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
#!/usr/bin/env node
|
||||||
|
|
||||||
|
import "../dist/cli/index.js"
|
||||||
1143
packages/ipuaro/docs/CONCEPT.md
Normal file
1143
packages/ipuaro/docs/CONCEPT.md
Normal file
File diff suppressed because it is too large
Load Diff
80
packages/ipuaro/package.json
Normal file
80
packages/ipuaro/package.json
Normal file
@@ -0,0 +1,80 @@
|
|||||||
|
{
|
||||||
|
"name": "@samiyev/ipuaro",
|
||||||
|
"version": "0.17.0",
|
||||||
|
"description": "Local AI agent for codebase operations with infinite context feeling",
|
||||||
|
"author": "Fozilbek Samiyev <fozilbek.samiyev@gmail.com>",
|
||||||
|
"license": "MIT",
|
||||||
|
"type": "module",
|
||||||
|
"main": "./dist/index.js",
|
||||||
|
"types": "./dist/index.d.ts",
|
||||||
|
"bin": {
|
||||||
|
"ipuaro": "./bin/ipuaro.js"
|
||||||
|
},
|
||||||
|
"exports": {
|
||||||
|
".": {
|
||||||
|
"types": "./dist/index.d.ts",
|
||||||
|
"import": "./dist/index.js"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"files": [
|
||||||
|
"dist",
|
||||||
|
"bin"
|
||||||
|
],
|
||||||
|
"scripts": {
|
||||||
|
"build": "tsup",
|
||||||
|
"watch": "tsup --watch",
|
||||||
|
"clean": "rm -rf dist",
|
||||||
|
"test": "vitest",
|
||||||
|
"test:run": "vitest run",
|
||||||
|
"test:coverage": "vitest run --coverage",
|
||||||
|
"test:ui": "vitest --ui",
|
||||||
|
"test:watch": "vitest --watch",
|
||||||
|
"lint": "eslint src --fix",
|
||||||
|
"format": "prettier --write src"
|
||||||
|
},
|
||||||
|
"dependencies": {
|
||||||
|
"chokidar": "^3.6.0",
|
||||||
|
"commander": "^11.1.0",
|
||||||
|
"globby": "^16.0.0",
|
||||||
|
"ink": "^4.4.1",
|
||||||
|
"ink-text-input": "^5.0.1",
|
||||||
|
"ioredis": "^5.4.1",
|
||||||
|
"ollama": "^0.5.11",
|
||||||
|
"react": "^18.2.0",
|
||||||
|
"simple-git": "^3.27.0",
|
||||||
|
"tree-sitter": "^0.21.1",
|
||||||
|
"tree-sitter-javascript": "^0.21.0",
|
||||||
|
"tree-sitter-typescript": "^0.21.2",
|
||||||
|
"zod": "^3.23.8"
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"@types/node": "^22.10.1",
|
||||||
|
"@types/react": "^18.2.0",
|
||||||
|
"@vitest/coverage-v8": "^1.6.0",
|
||||||
|
"@vitest/ui": "^1.6.0",
|
||||||
|
"tsup": "^8.3.5",
|
||||||
|
"typescript": "^5.7.2",
|
||||||
|
"vitest": "^1.6.0"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=20.0.0"
|
||||||
|
},
|
||||||
|
"keywords": [
|
||||||
|
"ai",
|
||||||
|
"agent",
|
||||||
|
"codebase",
|
||||||
|
"llm",
|
||||||
|
"ollama",
|
||||||
|
"cli",
|
||||||
|
"terminal"
|
||||||
|
],
|
||||||
|
"repository": {
|
||||||
|
"type": "git",
|
||||||
|
"url": "git+https://github.com/samiyev/puaros.git",
|
||||||
|
"directory": "packages/ipuaro"
|
||||||
|
},
|
||||||
|
"bugs": {
|
||||||
|
"url": "https://github.com/samiyev/puaros/issues"
|
||||||
|
},
|
||||||
|
"homepage": "https://github.com/samiyev/puaros/tree/main/packages/ipuaro#readme"
|
||||||
|
}
|
||||||
4
packages/ipuaro/src/application/dtos/index.ts
Normal file
4
packages/ipuaro/src/application/dtos/index.ts
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
/*
|
||||||
|
* Application DTOs
|
||||||
|
* Will be implemented in version 0.10.0+
|
||||||
|
*/
|
||||||
10
packages/ipuaro/src/application/index.ts
Normal file
10
packages/ipuaro/src/application/index.ts
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
// Application Layer exports
|
||||||
|
|
||||||
|
// Use Cases
|
||||||
|
export * from "./use-cases/index.js"
|
||||||
|
|
||||||
|
// DTOs
|
||||||
|
export * from "./dtos/index.js"
|
||||||
|
|
||||||
|
// Interfaces
|
||||||
|
export * from "./interfaces/index.js"
|
||||||
51
packages/ipuaro/src/application/interfaces/IToolRegistry.ts
Normal file
51
packages/ipuaro/src/application/interfaces/IToolRegistry.ts
Normal file
@@ -0,0 +1,51 @@
|
|||||||
|
import type { ITool, ToolContext } from "../../domain/services/ITool.js"
|
||||||
|
import type { ToolResult } from "../../domain/value-objects/ToolResult.js"
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Tool registry interface.
|
||||||
|
* Manages registration and execution of tools.
|
||||||
|
*/
|
||||||
|
export interface IToolRegistry {
|
||||||
|
/**
|
||||||
|
* Register a tool.
|
||||||
|
*/
|
||||||
|
register(tool: ITool): void
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get tool by name.
|
||||||
|
*/
|
||||||
|
get(name: string): ITool | undefined
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get all registered tools.
|
||||||
|
*/
|
||||||
|
getAll(): ITool[]
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get tools by category.
|
||||||
|
*/
|
||||||
|
getByCategory(category: ITool["category"]): ITool[]
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if tool exists.
|
||||||
|
*/
|
||||||
|
has(name: string): boolean
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Execute tool by name.
|
||||||
|
*/
|
||||||
|
execute(name: string, params: Record<string, unknown>, ctx: ToolContext): Promise<ToolResult>
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get tool definitions for LLM.
|
||||||
|
*/
|
||||||
|
getToolDefinitions(): {
|
||||||
|
name: string
|
||||||
|
description: string
|
||||||
|
parameters: {
|
||||||
|
type: "object"
|
||||||
|
properties: Record<string, { type: string; description: string }>
|
||||||
|
required: string[]
|
||||||
|
}
|
||||||
|
}[]
|
||||||
|
}
|
||||||
2
packages/ipuaro/src/application/interfaces/index.ts
Normal file
2
packages/ipuaro/src/application/interfaces/index.ts
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
// Application Interfaces
|
||||||
|
export * from "./IToolRegistry.js"
|
||||||
229
packages/ipuaro/src/application/use-cases/ContextManager.ts
Normal file
229
packages/ipuaro/src/application/use-cases/ContextManager.ts
Normal file
@@ -0,0 +1,229 @@
|
|||||||
|
import type { ContextState, Session } from "../../domain/entities/Session.js"
|
||||||
|
import type { ILLMClient } from "../../domain/services/ILLMClient.js"
|
||||||
|
import { type ChatMessage, createSystemMessage } from "../../domain/value-objects/ChatMessage.js"
|
||||||
|
import { CONTEXT_COMPRESSION_THRESHOLD, CONTEXT_WINDOW_SIZE } from "../../domain/constants/index.js"
|
||||||
|
|
||||||
|
/**
|
||||||
|
* File in context with token count.
|
||||||
|
*/
|
||||||
|
export interface FileContext {
|
||||||
|
path: string
|
||||||
|
tokens: number
|
||||||
|
addedAt: number
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Compression result.
|
||||||
|
*/
|
||||||
|
export interface CompressionResult {
|
||||||
|
compressed: boolean
|
||||||
|
removedMessages: number
|
||||||
|
tokensSaved: number
|
||||||
|
summary?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
const COMPRESSION_PROMPT = `Summarize the following conversation history in a concise way,
|
||||||
|
preserving key information about:
|
||||||
|
- What files were discussed or modified
|
||||||
|
- What changes were made
|
||||||
|
- Important decisions or context
|
||||||
|
Keep the summary under 500 tokens.`
|
||||||
|
|
||||||
|
const MESSAGES_TO_KEEP = 5
|
||||||
|
const MIN_MESSAGES_FOR_COMPRESSION = 10
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Manages context window token budget and compression.
|
||||||
|
*/
|
||||||
|
export class ContextManager {
|
||||||
|
private readonly filesInContext = new Map<string, FileContext>()
|
||||||
|
private currentTokens = 0
|
||||||
|
private readonly contextWindowSize: number
|
||||||
|
|
||||||
|
constructor(contextWindowSize: number = CONTEXT_WINDOW_SIZE) {
|
||||||
|
this.contextWindowSize = contextWindowSize
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Add a file to the context.
|
||||||
|
*/
|
||||||
|
addToContext(file: string, tokens: number): void {
|
||||||
|
const existing = this.filesInContext.get(file)
|
||||||
|
if (existing) {
|
||||||
|
this.currentTokens -= existing.tokens
|
||||||
|
}
|
||||||
|
|
||||||
|
this.filesInContext.set(file, {
|
||||||
|
path: file,
|
||||||
|
tokens,
|
||||||
|
addedAt: Date.now(),
|
||||||
|
})
|
||||||
|
this.currentTokens += tokens
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Remove a file from the context.
|
||||||
|
*/
|
||||||
|
removeFromContext(file: string): void {
|
||||||
|
const existing = this.filesInContext.get(file)
|
||||||
|
if (existing) {
|
||||||
|
this.currentTokens -= existing.tokens
|
||||||
|
this.filesInContext.delete(file)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get current token usage ratio (0-1).
|
||||||
|
*/
|
||||||
|
getUsage(): number {
|
||||||
|
return this.currentTokens / this.contextWindowSize
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get current token count.
|
||||||
|
*/
|
||||||
|
getTokenCount(): number {
|
||||||
|
return this.currentTokens
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get available tokens.
|
||||||
|
*/
|
||||||
|
getAvailableTokens(): number {
|
||||||
|
return this.contextWindowSize - this.currentTokens
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if compression is needed.
|
||||||
|
*/
|
||||||
|
needsCompression(): boolean {
|
||||||
|
return this.getUsage() > CONTEXT_COMPRESSION_THRESHOLD
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Update token count (e.g., after receiving a message).
|
||||||
|
*/
|
||||||
|
addTokens(tokens: number): void {
|
||||||
|
this.currentTokens += tokens
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get files in context.
|
||||||
|
*/
|
||||||
|
getFilesInContext(): string[] {
|
||||||
|
return Array.from(this.filesInContext.keys())
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Sync context state from session.
|
||||||
|
*/
|
||||||
|
syncFromSession(session: Session): void {
|
||||||
|
this.filesInContext.clear()
|
||||||
|
this.currentTokens = 0
|
||||||
|
|
||||||
|
for (const file of session.context.filesInContext) {
|
||||||
|
this.filesInContext.set(file, {
|
||||||
|
path: file,
|
||||||
|
tokens: 0,
|
||||||
|
addedAt: Date.now(),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
this.currentTokens = Math.floor(session.context.tokenUsage * this.contextWindowSize)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Update session context state.
|
||||||
|
*/
|
||||||
|
updateSession(session: Session): void {
|
||||||
|
session.context.filesInContext = this.getFilesInContext()
|
||||||
|
session.context.tokenUsage = this.getUsage()
|
||||||
|
session.context.needsCompression = this.needsCompression()
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Compress context using LLM to summarize old messages.
|
||||||
|
*/
|
||||||
|
async compress(session: Session, llm: ILLMClient): Promise<CompressionResult> {
|
||||||
|
const history = session.history
|
||||||
|
if (history.length < MIN_MESSAGES_FOR_COMPRESSION) {
|
||||||
|
return {
|
||||||
|
compressed: false,
|
||||||
|
removedMessages: 0,
|
||||||
|
tokensSaved: 0,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const messagesToCompress = history.slice(0, -MESSAGES_TO_KEEP)
|
||||||
|
const messagesToKeep = history.slice(-MESSAGES_TO_KEEP)
|
||||||
|
|
||||||
|
const tokensBeforeCompression = await this.countHistoryTokens(messagesToCompress, llm)
|
||||||
|
|
||||||
|
const summary = await this.summarizeMessages(messagesToCompress, llm)
|
||||||
|
const summaryTokens = await llm.countTokens(summary)
|
||||||
|
|
||||||
|
const summaryMessage = createSystemMessage(`[Previous conversation summary]\n${summary}`)
|
||||||
|
|
||||||
|
session.history = [summaryMessage, ...messagesToKeep]
|
||||||
|
|
||||||
|
const tokensSaved = tokensBeforeCompression - summaryTokens
|
||||||
|
this.currentTokens -= tokensSaved
|
||||||
|
|
||||||
|
this.updateSession(session)
|
||||||
|
|
||||||
|
return {
|
||||||
|
compressed: true,
|
||||||
|
removedMessages: messagesToCompress.length,
|
||||||
|
tokensSaved,
|
||||||
|
summary,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a new context state.
|
||||||
|
*/
|
||||||
|
static createInitialState(): ContextState {
|
||||||
|
return {
|
||||||
|
filesInContext: [],
|
||||||
|
tokenUsage: 0,
|
||||||
|
needsCompression: false,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private async summarizeMessages(messages: ChatMessage[], llm: ILLMClient): Promise<string> {
|
||||||
|
const conversation = this.formatMessagesForSummary(messages)
|
||||||
|
|
||||||
|
const response = await llm.chat([
|
||||||
|
createSystemMessage(COMPRESSION_PROMPT),
|
||||||
|
createSystemMessage(conversation),
|
||||||
|
])
|
||||||
|
|
||||||
|
return response.content
|
||||||
|
}
|
||||||
|
|
||||||
|
private formatMessagesForSummary(messages: ChatMessage[]): string {
|
||||||
|
return messages
|
||||||
|
.filter((m) => m.role !== "tool")
|
||||||
|
.map((m) => {
|
||||||
|
const role = m.role === "user" ? "User" : "Assistant"
|
||||||
|
const content = this.truncateContent(m.content, 500)
|
||||||
|
return `${role}: ${content}`
|
||||||
|
})
|
||||||
|
.join("\n\n")
|
||||||
|
}
|
||||||
|
|
||||||
|
private truncateContent(content: string, maxLength: number): string {
|
||||||
|
if (content.length <= maxLength) {
|
||||||
|
return content
|
||||||
|
}
|
||||||
|
return `${content.slice(0, maxLength)}...`
|
||||||
|
}
|
||||||
|
|
||||||
|
private async countHistoryTokens(messages: ChatMessage[], llm: ILLMClient): Promise<number> {
|
||||||
|
let total = 0
|
||||||
|
for (const message of messages) {
|
||||||
|
total += await llm.countTokens(message.content)
|
||||||
|
}
|
||||||
|
return total
|
||||||
|
}
|
||||||
|
}
|
||||||
382
packages/ipuaro/src/application/use-cases/HandleMessage.ts
Normal file
382
packages/ipuaro/src/application/use-cases/HandleMessage.ts
Normal file
@@ -0,0 +1,382 @@
|
|||||||
|
import { randomUUID } from "node:crypto"
|
||||||
|
import type { Session } from "../../domain/entities/Session.js"
|
||||||
|
import type { ILLMClient } from "../../domain/services/ILLMClient.js"
|
||||||
|
import type { ISessionStorage } from "../../domain/services/ISessionStorage.js"
|
||||||
|
import type { IStorage } from "../../domain/services/IStorage.js"
|
||||||
|
import type { DiffInfo, ToolContext } from "../../domain/services/ITool.js"
|
||||||
|
import {
|
||||||
|
type ChatMessage,
|
||||||
|
createAssistantMessage,
|
||||||
|
createSystemMessage,
|
||||||
|
createToolMessage,
|
||||||
|
createUserMessage,
|
||||||
|
} from "../../domain/value-objects/ChatMessage.js"
|
||||||
|
import type { ToolCall } from "../../domain/value-objects/ToolCall.js"
|
||||||
|
import { createErrorResult, type ToolResult } from "../../domain/value-objects/ToolResult.js"
|
||||||
|
import { createUndoEntry, type UndoEntry } from "../../domain/value-objects/UndoEntry.js"
|
||||||
|
import { type ErrorOption, IpuaroError } from "../../shared/errors/IpuaroError.js"
|
||||||
|
import {
|
||||||
|
buildInitialContext,
|
||||||
|
type ProjectStructure,
|
||||||
|
SYSTEM_PROMPT,
|
||||||
|
} from "../../infrastructure/llm/prompts.js"
|
||||||
|
import { parseToolCalls } from "../../infrastructure/llm/ResponseParser.js"
|
||||||
|
import type { IToolRegistry } from "../interfaces/IToolRegistry.js"
|
||||||
|
import { ContextManager } from "./ContextManager.js"
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Status during message handling.
|
||||||
|
*/
|
||||||
|
export type HandleMessageStatus =
|
||||||
|
| "ready"
|
||||||
|
| "thinking"
|
||||||
|
| "tool_call"
|
||||||
|
| "awaiting_confirmation"
|
||||||
|
| "error"
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Edit request for confirmation.
|
||||||
|
*/
|
||||||
|
export interface EditRequest {
|
||||||
|
toolCall: ToolCall
|
||||||
|
filePath: string
|
||||||
|
description: string
|
||||||
|
diff?: DiffInfo
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* User's choice for edit confirmation.
|
||||||
|
*/
|
||||||
|
export type EditChoice = "apply" | "skip" | "edit" | "abort"
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Event callbacks for HandleMessage.
|
||||||
|
*/
|
||||||
|
export interface HandleMessageEvents {
|
||||||
|
onMessage?: (message: ChatMessage) => void
|
||||||
|
onToolCall?: (call: ToolCall) => void
|
||||||
|
onToolResult?: (result: ToolResult) => void
|
||||||
|
onConfirmation?: (message: string, diff?: DiffInfo) => Promise<boolean>
|
||||||
|
onError?: (error: IpuaroError) => Promise<ErrorOption>
|
||||||
|
onStatusChange?: (status: HandleMessageStatus) => void
|
||||||
|
onUndoEntry?: (entry: UndoEntry) => void
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Options for HandleMessage.
|
||||||
|
*/
|
||||||
|
export interface HandleMessageOptions {
|
||||||
|
autoApply?: boolean
|
||||||
|
maxToolCalls?: number
|
||||||
|
}
|
||||||
|
|
||||||
|
const DEFAULT_MAX_TOOL_CALLS = 20
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Use case for handling a user message.
|
||||||
|
* Main orchestrator for the LLM interaction loop.
|
||||||
|
*/
|
||||||
|
export class HandleMessage {
|
||||||
|
private readonly storage: IStorage
|
||||||
|
private readonly sessionStorage: ISessionStorage
|
||||||
|
private readonly llm: ILLMClient
|
||||||
|
private readonly tools: IToolRegistry
|
||||||
|
private readonly contextManager: ContextManager
|
||||||
|
private readonly projectRoot: string
|
||||||
|
private projectStructure?: ProjectStructure
|
||||||
|
|
||||||
|
private events: HandleMessageEvents = {}
|
||||||
|
private options: HandleMessageOptions = {}
|
||||||
|
private aborted = false
|
||||||
|
|
||||||
|
constructor(
|
||||||
|
storage: IStorage,
|
||||||
|
sessionStorage: ISessionStorage,
|
||||||
|
llm: ILLMClient,
|
||||||
|
tools: IToolRegistry,
|
||||||
|
projectRoot: string,
|
||||||
|
) {
|
||||||
|
this.storage = storage
|
||||||
|
this.sessionStorage = sessionStorage
|
||||||
|
this.llm = llm
|
||||||
|
this.tools = tools
|
||||||
|
this.projectRoot = projectRoot
|
||||||
|
this.contextManager = new ContextManager(llm.getContextWindowSize())
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Set event callbacks.
|
||||||
|
*/
|
||||||
|
setEvents(events: HandleMessageEvents): void {
|
||||||
|
this.events = events
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Set options.
|
||||||
|
*/
|
||||||
|
setOptions(options: HandleMessageOptions): void {
|
||||||
|
this.options = options
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Set project structure for context building.
|
||||||
|
*/
|
||||||
|
setProjectStructure(structure: ProjectStructure): void {
|
||||||
|
this.projectStructure = structure
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Abort current processing.
|
||||||
|
*/
|
||||||
|
abort(): void {
|
||||||
|
this.aborted = true
|
||||||
|
this.llm.abort()
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Execute the message handling flow.
|
||||||
|
*/
|
||||||
|
async execute(session: Session, message: string): Promise<void> {
|
||||||
|
this.aborted = false
|
||||||
|
this.contextManager.syncFromSession(session)
|
||||||
|
|
||||||
|
if (message.trim()) {
|
||||||
|
const userMessage = createUserMessage(message)
|
||||||
|
session.addMessage(userMessage)
|
||||||
|
session.addInputToHistory(message)
|
||||||
|
this.emitMessage(userMessage)
|
||||||
|
}
|
||||||
|
|
||||||
|
await this.sessionStorage.saveSession(session)
|
||||||
|
|
||||||
|
this.emitStatus("thinking")
|
||||||
|
|
||||||
|
let toolCallCount = 0
|
||||||
|
const maxToolCalls = this.options.maxToolCalls ?? DEFAULT_MAX_TOOL_CALLS
|
||||||
|
|
||||||
|
while (!this.aborted) {
|
||||||
|
const messages = await this.buildMessages(session)
|
||||||
|
|
||||||
|
const startTime = Date.now()
|
||||||
|
let response
|
||||||
|
|
||||||
|
try {
|
||||||
|
response = await this.llm.chat(messages)
|
||||||
|
} catch (error) {
|
||||||
|
await this.handleLLMError(error, session)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if (this.aborted) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
const parsed = parseToolCalls(response.content)
|
||||||
|
const timeMs = Date.now() - startTime
|
||||||
|
|
||||||
|
if (parsed.toolCalls.length === 0) {
|
||||||
|
const assistantMessage = createAssistantMessage(parsed.content, undefined, {
|
||||||
|
tokens: response.tokens,
|
||||||
|
timeMs,
|
||||||
|
toolCalls: 0,
|
||||||
|
})
|
||||||
|
session.addMessage(assistantMessage)
|
||||||
|
this.emitMessage(assistantMessage)
|
||||||
|
this.contextManager.addTokens(response.tokens)
|
||||||
|
this.contextManager.updateSession(session)
|
||||||
|
await this.sessionStorage.saveSession(session)
|
||||||
|
this.emitStatus("ready")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
const assistantMessage = createAssistantMessage(parsed.content, parsed.toolCalls, {
|
||||||
|
tokens: response.tokens,
|
||||||
|
timeMs,
|
||||||
|
toolCalls: parsed.toolCalls.length,
|
||||||
|
})
|
||||||
|
session.addMessage(assistantMessage)
|
||||||
|
this.emitMessage(assistantMessage)
|
||||||
|
|
||||||
|
toolCallCount += parsed.toolCalls.length
|
||||||
|
if (toolCallCount > maxToolCalls) {
|
||||||
|
const errorMsg = `Maximum tool calls (${String(maxToolCalls)}) exceeded`
|
||||||
|
const errorMessage = createSystemMessage(errorMsg)
|
||||||
|
session.addMessage(errorMessage)
|
||||||
|
this.emitMessage(errorMessage)
|
||||||
|
this.emitStatus("ready")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
this.emitStatus("tool_call")
|
||||||
|
|
||||||
|
const results: ToolResult[] = []
|
||||||
|
|
||||||
|
for (const toolCall of parsed.toolCalls) {
|
||||||
|
if (this.aborted) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
this.emitToolCall(toolCall)
|
||||||
|
|
||||||
|
const result = await this.executeToolCall(toolCall, session)
|
||||||
|
results.push(result)
|
||||||
|
this.emitToolResult(result)
|
||||||
|
}
|
||||||
|
|
||||||
|
const toolMessage = createToolMessage(results)
|
||||||
|
session.addMessage(toolMessage)
|
||||||
|
|
||||||
|
this.contextManager.addTokens(response.tokens)
|
||||||
|
|
||||||
|
if (this.contextManager.needsCompression()) {
|
||||||
|
await this.contextManager.compress(session, this.llm)
|
||||||
|
}
|
||||||
|
|
||||||
|
this.contextManager.updateSession(session)
|
||||||
|
await this.sessionStorage.saveSession(session)
|
||||||
|
|
||||||
|
this.emitStatus("thinking")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private async buildMessages(session: Session): Promise<ChatMessage[]> {
|
||||||
|
const messages: ChatMessage[] = []
|
||||||
|
|
||||||
|
messages.push(createSystemMessage(SYSTEM_PROMPT))
|
||||||
|
|
||||||
|
if (this.projectStructure) {
|
||||||
|
const asts = await this.storage.getAllASTs()
|
||||||
|
const metas = await this.storage.getAllMetas()
|
||||||
|
const context = buildInitialContext(this.projectStructure, asts, metas)
|
||||||
|
messages.push(createSystemMessage(context))
|
||||||
|
}
|
||||||
|
|
||||||
|
messages.push(...session.history)
|
||||||
|
|
||||||
|
return messages
|
||||||
|
}
|
||||||
|
|
||||||
|
private async executeToolCall(toolCall: ToolCall, session: Session): Promise<ToolResult> {
|
||||||
|
const startTime = Date.now()
|
||||||
|
const tool = this.tools.get(toolCall.name)
|
||||||
|
|
||||||
|
if (!tool) {
|
||||||
|
return createErrorResult(
|
||||||
|
toolCall.id,
|
||||||
|
`Unknown tool: ${toolCall.name}`,
|
||||||
|
Date.now() - startTime,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const context: ToolContext = {
|
||||||
|
projectRoot: this.projectRoot,
|
||||||
|
storage: this.storage,
|
||||||
|
requestConfirmation: async (msg: string, diff?: DiffInfo) => {
|
||||||
|
return this.handleConfirmation(msg, diff, toolCall, session)
|
||||||
|
},
|
||||||
|
onProgress: (_msg: string) => {
|
||||||
|
this.events.onStatusChange?.("tool_call")
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const validationError = tool.validateParams(toolCall.params)
|
||||||
|
if (validationError) {
|
||||||
|
return createErrorResult(toolCall.id, validationError, Date.now() - startTime)
|
||||||
|
}
|
||||||
|
|
||||||
|
const result = await tool.execute(toolCall.params, context)
|
||||||
|
return result
|
||||||
|
} catch (error) {
|
||||||
|
const errorMessage = error instanceof Error ? error.message : String(error)
|
||||||
|
return createErrorResult(toolCall.id, errorMessage, Date.now() - startTime)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private async handleConfirmation(
|
||||||
|
msg: string,
|
||||||
|
diff: DiffInfo | undefined,
|
||||||
|
toolCall: ToolCall,
|
||||||
|
session: Session,
|
||||||
|
): Promise<boolean> {
|
||||||
|
if (this.options.autoApply) {
|
||||||
|
if (diff) {
|
||||||
|
this.createUndoEntryFromDiff(diff, toolCall, session)
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
this.emitStatus("awaiting_confirmation")
|
||||||
|
|
||||||
|
if (this.events.onConfirmation) {
|
||||||
|
const confirmed = await this.events.onConfirmation(msg, diff)
|
||||||
|
|
||||||
|
if (confirmed && diff) {
|
||||||
|
this.createUndoEntryFromDiff(diff, toolCall, session)
|
||||||
|
}
|
||||||
|
|
||||||
|
return confirmed
|
||||||
|
}
|
||||||
|
|
||||||
|
if (diff) {
|
||||||
|
this.createUndoEntryFromDiff(diff, toolCall, session)
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
private createUndoEntryFromDiff(diff: DiffInfo, toolCall: ToolCall, session: Session): void {
|
||||||
|
const entry = createUndoEntry(
|
||||||
|
randomUUID(),
|
||||||
|
diff.filePath,
|
||||||
|
diff.oldLines,
|
||||||
|
diff.newLines,
|
||||||
|
`${toolCall.name}: ${diff.filePath}`,
|
||||||
|
toolCall.id,
|
||||||
|
)
|
||||||
|
|
||||||
|
session.addUndoEntry(entry)
|
||||||
|
void this.sessionStorage.pushUndoEntry(session.id, entry)
|
||||||
|
session.stats.editsApplied++
|
||||||
|
this.events.onUndoEntry?.(entry)
|
||||||
|
}
|
||||||
|
|
||||||
|
private async handleLLMError(error: unknown, session: Session): Promise<void> {
|
||||||
|
this.emitStatus("error")
|
||||||
|
|
||||||
|
const ipuaroError =
|
||||||
|
error instanceof IpuaroError
|
||||||
|
? error
|
||||||
|
: IpuaroError.llm(error instanceof Error ? error.message : String(error))
|
||||||
|
|
||||||
|
if (this.events.onError) {
|
||||||
|
const choice = await this.events.onError(ipuaroError)
|
||||||
|
|
||||||
|
if (choice === "retry") {
|
||||||
|
this.emitStatus("thinking")
|
||||||
|
return this.execute(session, "")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const errorMessage = createSystemMessage(`Error: ${ipuaroError.message}`)
|
||||||
|
session.addMessage(errorMessage)
|
||||||
|
this.emitMessage(errorMessage)
|
||||||
|
|
||||||
|
this.emitStatus("ready")
|
||||||
|
}
|
||||||
|
|
||||||
|
private emitMessage(message: ChatMessage): void {
|
||||||
|
this.events.onMessage?.(message)
|
||||||
|
}
|
||||||
|
|
||||||
|
private emitToolCall(call: ToolCall): void {
|
||||||
|
this.events.onToolCall?.(call)
|
||||||
|
}
|
||||||
|
|
||||||
|
private emitToolResult(result: ToolResult): void {
|
||||||
|
this.events.onToolResult?.(result)
|
||||||
|
}
|
||||||
|
|
||||||
|
private emitStatus(status: HandleMessageStatus): void {
|
||||||
|
this.events.onStatusChange?.(status)
|
||||||
|
}
|
||||||
|
}
|
||||||
62
packages/ipuaro/src/application/use-cases/StartSession.ts
Normal file
62
packages/ipuaro/src/application/use-cases/StartSession.ts
Normal file
@@ -0,0 +1,62 @@
|
|||||||
|
import { randomUUID } from "node:crypto"
|
||||||
|
import { Session } from "../../domain/entities/Session.js"
|
||||||
|
import type { ISessionStorage } from "../../domain/services/ISessionStorage.js"
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Options for starting a session.
|
||||||
|
*/
|
||||||
|
export interface StartSessionOptions {
|
||||||
|
/** Force creation of a new session even if one exists */
|
||||||
|
forceNew?: boolean
|
||||||
|
/** Specific session ID to load */
|
||||||
|
sessionId?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Result of starting a session.
|
||||||
|
*/
|
||||||
|
export interface StartSessionResult {
|
||||||
|
session: Session
|
||||||
|
isNew: boolean
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Use case for starting a session.
|
||||||
|
* Creates a new session or loads the latest one for a project.
|
||||||
|
*/
|
||||||
|
export class StartSession {
|
||||||
|
constructor(private readonly sessionStorage: ISessionStorage) {}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Execute the use case.
|
||||||
|
*
|
||||||
|
* @param projectName - The project name to start a session for
|
||||||
|
* @param options - Optional configuration
|
||||||
|
* @returns The session and whether it was newly created
|
||||||
|
*/
|
||||||
|
async execute(
|
||||||
|
projectName: string,
|
||||||
|
options: StartSessionOptions = {},
|
||||||
|
): Promise<StartSessionResult> {
|
||||||
|
if (options.sessionId) {
|
||||||
|
const session = await this.sessionStorage.loadSession(options.sessionId)
|
||||||
|
if (session) {
|
||||||
|
await this.sessionStorage.touchSession(session.id)
|
||||||
|
return { session, isNew: false }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!options.forceNew) {
|
||||||
|
const latestSession = await this.sessionStorage.getLatestSession(projectName)
|
||||||
|
if (latestSession) {
|
||||||
|
await this.sessionStorage.touchSession(latestSession.id)
|
||||||
|
return { session: latestSession, isNew: false }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const session = new Session(randomUUID(), projectName)
|
||||||
|
await this.sessionStorage.saveSession(session)
|
||||||
|
|
||||||
|
return { session, isNew: true }
|
||||||
|
}
|
||||||
|
}
|
||||||
119
packages/ipuaro/src/application/use-cases/UndoChange.ts
Normal file
119
packages/ipuaro/src/application/use-cases/UndoChange.ts
Normal file
@@ -0,0 +1,119 @@
|
|||||||
|
import { promises as fs } from "node:fs"
|
||||||
|
import type { Session } from "../../domain/entities/Session.js"
|
||||||
|
import type { ISessionStorage } from "../../domain/services/ISessionStorage.js"
|
||||||
|
import type { IStorage } from "../../domain/services/IStorage.js"
|
||||||
|
import { canUndo, type UndoEntry } from "../../domain/value-objects/UndoEntry.js"
|
||||||
|
import { md5 } from "../../shared/utils/hash.js"
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Result of undo operation.
|
||||||
|
*/
|
||||||
|
export interface UndoResult {
|
||||||
|
success: boolean
|
||||||
|
entry?: UndoEntry
|
||||||
|
error?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Use case for undoing the last file change.
|
||||||
|
*/
|
||||||
|
export class UndoChange {
|
||||||
|
constructor(
|
||||||
|
private readonly sessionStorage: ISessionStorage,
|
||||||
|
private readonly storage: IStorage,
|
||||||
|
) {}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Execute undo operation.
|
||||||
|
*
|
||||||
|
* @param session - The current session
|
||||||
|
* @returns Result of the undo operation
|
||||||
|
*/
|
||||||
|
async execute(session: Session): Promise<UndoResult> {
|
||||||
|
const entry = await this.sessionStorage.popUndoEntry(session.id)
|
||||||
|
if (!entry) {
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
error: "No changes to undo",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const currentContent = await this.readCurrentContent(entry.filePath)
|
||||||
|
|
||||||
|
if (!canUndo(entry, currentContent)) {
|
||||||
|
await this.sessionStorage.pushUndoEntry(session.id, entry)
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
entry,
|
||||||
|
error: "File has been modified since the change was made",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
await this.restoreContent(entry.filePath, entry.previousContent)
|
||||||
|
|
||||||
|
session.popUndoEntry()
|
||||||
|
session.stats.editsApplied--
|
||||||
|
|
||||||
|
return {
|
||||||
|
success: true,
|
||||||
|
entry,
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
await this.sessionStorage.pushUndoEntry(session.id, entry)
|
||||||
|
|
||||||
|
const message = error instanceof Error ? error.message : "Unknown error"
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
entry,
|
||||||
|
error: `Failed to undo: ${message}`,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if undo is available.
|
||||||
|
*/
|
||||||
|
async canUndo(session: Session): Promise<boolean> {
|
||||||
|
const stack = await this.sessionStorage.getUndoStack(session.id)
|
||||||
|
return stack.length > 0
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the next undo entry without removing it.
|
||||||
|
*/
|
||||||
|
async peekUndoEntry(session: Session): Promise<UndoEntry | null> {
|
||||||
|
const stack = await this.sessionStorage.getUndoStack(session.id)
|
||||||
|
if (stack.length === 0) {
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
return stack[stack.length - 1]
|
||||||
|
}
|
||||||
|
|
||||||
|
private async readCurrentContent(filePath: string): Promise<string[]> {
|
||||||
|
try {
|
||||||
|
const content = await fs.readFile(filePath, "utf-8")
|
||||||
|
return content.split("\n")
|
||||||
|
} catch (error) {
|
||||||
|
if ((error as NodeJS.ErrnoException).code === "ENOENT") {
|
||||||
|
return []
|
||||||
|
}
|
||||||
|
throw error
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private async restoreContent(filePath: string, content: string[]): Promise<void> {
|
||||||
|
const fileContent = content.join("\n")
|
||||||
|
await fs.writeFile(filePath, fileContent, "utf-8")
|
||||||
|
|
||||||
|
const hash = md5(fileContent)
|
||||||
|
const stats = await fs.stat(filePath)
|
||||||
|
|
||||||
|
await this.storage.setFile(filePath, {
|
||||||
|
lines: content,
|
||||||
|
hash,
|
||||||
|
size: stats.size,
|
||||||
|
lastModified: stats.mtimeMs,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
6
packages/ipuaro/src/application/use-cases/index.ts
Normal file
6
packages/ipuaro/src/application/use-cases/index.ts
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
// Application Use Cases
|
||||||
|
|
||||||
|
export * from "./StartSession.js"
|
||||||
|
export * from "./HandleMessage.js"
|
||||||
|
export * from "./UndoChange.js"
|
||||||
|
export * from "./ContextManager.js"
|
||||||
250
packages/ipuaro/src/cli/commands/index-cmd.ts
Normal file
250
packages/ipuaro/src/cli/commands/index-cmd.ts
Normal file
@@ -0,0 +1,250 @@
|
|||||||
|
/**
|
||||||
|
* Index command implementation.
|
||||||
|
* Indexes project without starting TUI.
|
||||||
|
*/
|
||||||
|
|
||||||
|
import * as fs from "node:fs/promises"
|
||||||
|
import * as path from "node:path"
|
||||||
|
import { RedisClient } from "../../infrastructure/storage/RedisClient.js"
|
||||||
|
import { RedisStorage } from "../../infrastructure/storage/RedisStorage.js"
|
||||||
|
import { generateProjectName } from "../../infrastructure/storage/schema.js"
|
||||||
|
import { FileScanner } from "../../infrastructure/indexer/FileScanner.js"
|
||||||
|
import { ASTParser } from "../../infrastructure/indexer/ASTParser.js"
|
||||||
|
import { MetaAnalyzer } from "../../infrastructure/indexer/MetaAnalyzer.js"
|
||||||
|
import { IndexBuilder } from "../../infrastructure/indexer/IndexBuilder.js"
|
||||||
|
import { createFileData } from "../../domain/value-objects/FileData.js"
|
||||||
|
import type { FileAST } from "../../domain/value-objects/FileAST.js"
|
||||||
|
import { type Config, DEFAULT_CONFIG } from "../../shared/constants/config.js"
|
||||||
|
import { md5 } from "../../shared/utils/hash.js"
|
||||||
|
import { checkRedis } from "./onboarding.js"
|
||||||
|
|
||||||
|
type Language = "ts" | "tsx" | "js" | "jsx"
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Result of index command.
|
||||||
|
*/
|
||||||
|
export interface IndexResult {
|
||||||
|
success: boolean
|
||||||
|
filesIndexed: number
|
||||||
|
filesSkipped: number
|
||||||
|
errors: string[]
|
||||||
|
duration: number
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Progress callback for indexing.
|
||||||
|
*/
|
||||||
|
export type IndexProgressCallback = (
|
||||||
|
phase: "scanning" | "parsing" | "analyzing" | "storing",
|
||||||
|
current: number,
|
||||||
|
total: number,
|
||||||
|
currentFile?: string,
|
||||||
|
) => void
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Execute the index command.
|
||||||
|
*/
|
||||||
|
export async function executeIndex(
|
||||||
|
projectPath: string,
|
||||||
|
config: Config = DEFAULT_CONFIG,
|
||||||
|
onProgress?: IndexProgressCallback,
|
||||||
|
): Promise<IndexResult> {
|
||||||
|
const startTime = Date.now()
|
||||||
|
const resolvedPath = path.resolve(projectPath)
|
||||||
|
const projectName = generateProjectName(resolvedPath)
|
||||||
|
const errors: string[] = []
|
||||||
|
|
||||||
|
console.warn(`📁 Indexing project: ${resolvedPath}`)
|
||||||
|
console.warn(` Project name: ${projectName}\n`)
|
||||||
|
|
||||||
|
const redisResult = await checkRedis(config.redis)
|
||||||
|
if (!redisResult.ok) {
|
||||||
|
console.error(`❌ ${redisResult.error ?? "Redis unavailable"}`)
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
filesIndexed: 0,
|
||||||
|
filesSkipped: 0,
|
||||||
|
errors: [redisResult.error ?? "Redis unavailable"],
|
||||||
|
duration: Date.now() - startTime,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let redisClient: RedisClient | null = null
|
||||||
|
|
||||||
|
try {
|
||||||
|
redisClient = new RedisClient(config.redis)
|
||||||
|
await redisClient.connect()
|
||||||
|
|
||||||
|
const storage = new RedisStorage(redisClient, projectName)
|
||||||
|
const scanner = new FileScanner({
|
||||||
|
onProgress: (progress): void => {
|
||||||
|
onProgress?.("scanning", progress.current, progress.total, progress.currentFile)
|
||||||
|
},
|
||||||
|
})
|
||||||
|
const astParser = new ASTParser()
|
||||||
|
const metaAnalyzer = new MetaAnalyzer(resolvedPath)
|
||||||
|
const indexBuilder = new IndexBuilder(resolvedPath)
|
||||||
|
|
||||||
|
console.warn("🔍 Scanning files...")
|
||||||
|
const files = await scanner.scanAll(resolvedPath)
|
||||||
|
console.warn(` Found ${String(files.length)} files\n`)
|
||||||
|
|
||||||
|
if (files.length === 0) {
|
||||||
|
console.warn("⚠️ No files found to index.")
|
||||||
|
return {
|
||||||
|
success: true,
|
||||||
|
filesIndexed: 0,
|
||||||
|
filesSkipped: 0,
|
||||||
|
errors: [],
|
||||||
|
duration: Date.now() - startTime,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
console.warn("📝 Parsing files...")
|
||||||
|
const allASTs = new Map<string, FileAST>()
|
||||||
|
const fileContents = new Map<string, string>()
|
||||||
|
let parsed = 0
|
||||||
|
let skipped = 0
|
||||||
|
|
||||||
|
for (const file of files) {
|
||||||
|
const fullPath = path.join(resolvedPath, file.path)
|
||||||
|
const language = getLanguage(file.path)
|
||||||
|
|
||||||
|
if (!language) {
|
||||||
|
skipped++
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const content = await fs.readFile(fullPath, "utf-8")
|
||||||
|
const ast = astParser.parse(content, language)
|
||||||
|
|
||||||
|
if (ast.parseError) {
|
||||||
|
errors.push(
|
||||||
|
`Parse error in ${file.path}: ${ast.parseErrorMessage ?? "unknown"}`,
|
||||||
|
)
|
||||||
|
skipped++
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
allASTs.set(file.path, ast)
|
||||||
|
fileContents.set(file.path, content)
|
||||||
|
parsed++
|
||||||
|
|
||||||
|
onProgress?.("parsing", parsed + skipped, files.length, file.path)
|
||||||
|
|
||||||
|
if ((parsed + skipped) % 50 === 0) {
|
||||||
|
process.stdout.write(
|
||||||
|
`\r Parsed ${String(parsed)} files (${String(skipped)} skipped)...`,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
const message = error instanceof Error ? error.message : String(error)
|
||||||
|
errors.push(`Error reading ${file.path}: ${message}`)
|
||||||
|
skipped++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
console.warn(`\r Parsed ${String(parsed)} files (${String(skipped)} skipped) \n`)
|
||||||
|
|
||||||
|
console.warn("📊 Analyzing metadata...")
|
||||||
|
let analyzed = 0
|
||||||
|
for (const [filePath, ast] of allASTs) {
|
||||||
|
const content = fileContents.get(filePath) ?? ""
|
||||||
|
const meta = metaAnalyzer.analyze(
|
||||||
|
path.join(resolvedPath, filePath),
|
||||||
|
ast,
|
||||||
|
content,
|
||||||
|
allASTs,
|
||||||
|
)
|
||||||
|
|
||||||
|
const fileData = createFileData({
|
||||||
|
lines: content.split("\n"),
|
||||||
|
hash: md5(content),
|
||||||
|
size: content.length,
|
||||||
|
lastModified: Date.now(),
|
||||||
|
})
|
||||||
|
|
||||||
|
await storage.setFile(filePath, fileData)
|
||||||
|
await storage.setAST(filePath, ast)
|
||||||
|
await storage.setMeta(filePath, meta)
|
||||||
|
|
||||||
|
analyzed++
|
||||||
|
onProgress?.("analyzing", analyzed, allASTs.size, filePath)
|
||||||
|
|
||||||
|
if (analyzed % 50 === 0) {
|
||||||
|
process.stdout.write(
|
||||||
|
`\r Analyzed ${String(analyzed)}/${String(allASTs.size)} files...`,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
console.warn(`\r Analyzed ${String(analyzed)} files \n`)
|
||||||
|
|
||||||
|
console.warn("🏗️ Building indexes...")
|
||||||
|
onProgress?.("storing", 0, 2)
|
||||||
|
const symbolIndex = indexBuilder.buildSymbolIndex(allASTs)
|
||||||
|
const depsGraph = indexBuilder.buildDepsGraph(allASTs)
|
||||||
|
|
||||||
|
await storage.setSymbolIndex(symbolIndex)
|
||||||
|
await storage.setDepsGraph(depsGraph)
|
||||||
|
onProgress?.("storing", 2, 2)
|
||||||
|
|
||||||
|
const duration = Date.now() - startTime
|
||||||
|
const durationSec = (duration / 1000).toFixed(2)
|
||||||
|
|
||||||
|
console.warn(`✅ Indexing complete in ${durationSec}s`)
|
||||||
|
console.warn(` Files indexed: ${String(parsed)}`)
|
||||||
|
console.warn(` Files skipped: ${String(skipped)}`)
|
||||||
|
console.warn(` Symbols: ${String(symbolIndex.size)}`)
|
||||||
|
|
||||||
|
if (errors.length > 0) {
|
||||||
|
console.warn(`\n⚠️ ${String(errors.length)} errors occurred:`)
|
||||||
|
for (const error of errors.slice(0, 5)) {
|
||||||
|
console.warn(` - ${error}`)
|
||||||
|
}
|
||||||
|
if (errors.length > 5) {
|
||||||
|
console.warn(` ... and ${String(errors.length - 5)} more`)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
success: true,
|
||||||
|
filesIndexed: parsed,
|
||||||
|
filesSkipped: skipped,
|
||||||
|
errors,
|
||||||
|
duration,
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
const message = error instanceof Error ? error.message : String(error)
|
||||||
|
console.error(`❌ Indexing failed: ${message}`)
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
filesIndexed: 0,
|
||||||
|
filesSkipped: 0,
|
||||||
|
errors: [message],
|
||||||
|
duration: Date.now() - startTime,
|
||||||
|
}
|
||||||
|
} finally {
|
||||||
|
if (redisClient) {
|
||||||
|
await redisClient.disconnect()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get language from file extension.
|
||||||
|
*/
|
||||||
|
function getLanguage(filePath: string): Language | null {
|
||||||
|
const ext = path.extname(filePath).toLowerCase()
|
||||||
|
switch (ext) {
|
||||||
|
case ".ts":
|
||||||
|
return "ts"
|
||||||
|
case ".tsx":
|
||||||
|
return "tsx"
|
||||||
|
case ".js":
|
||||||
|
return "js"
|
||||||
|
case ".jsx":
|
||||||
|
return "jsx"
|
||||||
|
default:
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
}
|
||||||
18
packages/ipuaro/src/cli/commands/index.ts
Normal file
18
packages/ipuaro/src/cli/commands/index.ts
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
/**
|
||||||
|
* CLI commands module.
|
||||||
|
*/
|
||||||
|
|
||||||
|
export { executeStart, type StartOptions, type StartResult } from "./start.js"
|
||||||
|
export { executeInit, type InitOptions, type InitResult } from "./init.js"
|
||||||
|
export { executeIndex, type IndexResult, type IndexProgressCallback } from "./index-cmd.js"
|
||||||
|
export {
|
||||||
|
runOnboarding,
|
||||||
|
checkRedis,
|
||||||
|
checkOllama,
|
||||||
|
checkModel,
|
||||||
|
checkProjectSize,
|
||||||
|
pullModel,
|
||||||
|
type OnboardingResult,
|
||||||
|
type OnboardingOptions,
|
||||||
|
} from "./onboarding.js"
|
||||||
|
export { registerAllTools } from "./tools-setup.js"
|
||||||
114
packages/ipuaro/src/cli/commands/init.ts
Normal file
114
packages/ipuaro/src/cli/commands/init.ts
Normal file
@@ -0,0 +1,114 @@
|
|||||||
|
/**
|
||||||
|
* Init command implementation.
|
||||||
|
* Creates .ipuaro.json configuration file.
|
||||||
|
*/
|
||||||
|
|
||||||
|
import * as fs from "node:fs/promises"
|
||||||
|
import * as path from "node:path"
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Default configuration template for .ipuaro.json
|
||||||
|
*/
|
||||||
|
const CONFIG_TEMPLATE = {
|
||||||
|
$schema: "https://raw.githubusercontent.com/samiyev/puaros/main/packages/ipuaro/schema.json",
|
||||||
|
redis: {
|
||||||
|
host: "localhost",
|
||||||
|
port: 6379,
|
||||||
|
db: 0,
|
||||||
|
},
|
||||||
|
llm: {
|
||||||
|
model: "qwen2.5-coder:7b-instruct",
|
||||||
|
temperature: 0.1,
|
||||||
|
host: "http://localhost:11434",
|
||||||
|
},
|
||||||
|
project: {
|
||||||
|
ignorePatterns: [],
|
||||||
|
},
|
||||||
|
edit: {
|
||||||
|
autoApply: false,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Options for init command.
|
||||||
|
*/
|
||||||
|
export interface InitOptions {
|
||||||
|
force?: boolean
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Result of init command.
|
||||||
|
*/
|
||||||
|
export interface InitResult {
|
||||||
|
success: boolean
|
||||||
|
filePath?: string
|
||||||
|
error?: string
|
||||||
|
skipped?: boolean
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Execute the init command.
|
||||||
|
* Creates a .ipuaro.json file in the specified directory.
|
||||||
|
*/
|
||||||
|
export async function executeInit(
|
||||||
|
projectPath = ".",
|
||||||
|
options: InitOptions = {},
|
||||||
|
): Promise<InitResult> {
|
||||||
|
const resolvedPath = path.resolve(projectPath)
|
||||||
|
const configPath = path.join(resolvedPath, ".ipuaro.json")
|
||||||
|
|
||||||
|
try {
|
||||||
|
const exists = await fileExists(configPath)
|
||||||
|
|
||||||
|
if (exists && !options.force) {
|
||||||
|
console.warn(`⚠️ Configuration file already exists: ${configPath}`)
|
||||||
|
console.warn(" Use --force to overwrite.")
|
||||||
|
return {
|
||||||
|
success: true,
|
||||||
|
skipped: true,
|
||||||
|
filePath: configPath,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const dirExists = await fileExists(resolvedPath)
|
||||||
|
if (!dirExists) {
|
||||||
|
await fs.mkdir(resolvedPath, { recursive: true })
|
||||||
|
}
|
||||||
|
|
||||||
|
const content = JSON.stringify(CONFIG_TEMPLATE, null, 4)
|
||||||
|
await fs.writeFile(configPath, content, "utf-8")
|
||||||
|
|
||||||
|
console.warn(`✅ Created ${configPath}`)
|
||||||
|
console.warn("\nConfiguration options:")
|
||||||
|
console.warn(" redis.host - Redis server host (default: localhost)")
|
||||||
|
console.warn(" redis.port - Redis server port (default: 6379)")
|
||||||
|
console.warn(" llm.model - Ollama model name (default: qwen2.5-coder:7b-instruct)")
|
||||||
|
console.warn(" llm.temperature - LLM temperature (default: 0.1)")
|
||||||
|
console.warn(" edit.autoApply - Auto-apply edits without confirmation (default: false)")
|
||||||
|
console.warn("\nRun `ipuaro` to start the AI agent.")
|
||||||
|
|
||||||
|
return {
|
||||||
|
success: true,
|
||||||
|
filePath: configPath,
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
const message = error instanceof Error ? error.message : String(error)
|
||||||
|
console.error(`❌ Failed to create configuration: ${message}`)
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
error: message,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if a file or directory exists.
|
||||||
|
*/
|
||||||
|
async function fileExists(filePath: string): Promise<boolean> {
|
||||||
|
try {
|
||||||
|
await fs.access(filePath)
|
||||||
|
return true
|
||||||
|
} catch {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
290
packages/ipuaro/src/cli/commands/onboarding.ts
Normal file
290
packages/ipuaro/src/cli/commands/onboarding.ts
Normal file
@@ -0,0 +1,290 @@
|
|||||||
|
/**
|
||||||
|
* Onboarding checks for CLI.
|
||||||
|
* Validates environment before starting ipuaro.
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { RedisClient } from "../../infrastructure/storage/RedisClient.js"
|
||||||
|
import { OllamaClient } from "../../infrastructure/llm/OllamaClient.js"
|
||||||
|
import { FileScanner } from "../../infrastructure/indexer/FileScanner.js"
|
||||||
|
import type { LLMConfig, RedisConfig } from "../../shared/constants/config.js"
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Result of onboarding checks.
|
||||||
|
*/
|
||||||
|
export interface OnboardingResult {
|
||||||
|
success: boolean
|
||||||
|
redisOk: boolean
|
||||||
|
ollamaOk: boolean
|
||||||
|
modelOk: boolean
|
||||||
|
projectOk: boolean
|
||||||
|
fileCount: number
|
||||||
|
errors: string[]
|
||||||
|
warnings: string[]
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Options for onboarding checks.
|
||||||
|
*/
|
||||||
|
export interface OnboardingOptions {
|
||||||
|
redisConfig: RedisConfig
|
||||||
|
llmConfig: LLMConfig
|
||||||
|
projectPath: string
|
||||||
|
maxFiles?: number
|
||||||
|
skipRedis?: boolean
|
||||||
|
skipOllama?: boolean
|
||||||
|
skipModel?: boolean
|
||||||
|
skipProject?: boolean
|
||||||
|
}
|
||||||
|
|
||||||
|
const DEFAULT_MAX_FILES = 10_000
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check Redis availability.
|
||||||
|
*/
|
||||||
|
export async function checkRedis(config: RedisConfig): Promise<{
|
||||||
|
ok: boolean
|
||||||
|
error?: string
|
||||||
|
}> {
|
||||||
|
const client = new RedisClient(config)
|
||||||
|
|
||||||
|
try {
|
||||||
|
await client.connect()
|
||||||
|
const pingOk = await client.ping()
|
||||||
|
await client.disconnect()
|
||||||
|
|
||||||
|
if (!pingOk) {
|
||||||
|
return {
|
||||||
|
ok: false,
|
||||||
|
error: "Redis ping failed. Server may be overloaded.",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return { ok: true }
|
||||||
|
} catch (error) {
|
||||||
|
const message = error instanceof Error ? error.message : String(error)
|
||||||
|
return {
|
||||||
|
ok: false,
|
||||||
|
error: `Cannot connect to Redis: ${message}
|
||||||
|
|
||||||
|
Redis is required for ipuaro to store project indexes and session data.
|
||||||
|
|
||||||
|
Install Redis:
|
||||||
|
macOS: brew install redis && brew services start redis
|
||||||
|
Ubuntu: sudo apt install redis-server && sudo systemctl start redis
|
||||||
|
Docker: docker run -d -p 6379:6379 redis`,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check Ollama availability.
|
||||||
|
*/
|
||||||
|
export async function checkOllama(config: LLMConfig): Promise<{
|
||||||
|
ok: boolean
|
||||||
|
error?: string
|
||||||
|
}> {
|
||||||
|
const client = new OllamaClient(config)
|
||||||
|
|
||||||
|
try {
|
||||||
|
const available = await client.isAvailable()
|
||||||
|
|
||||||
|
if (!available) {
|
||||||
|
return {
|
||||||
|
ok: false,
|
||||||
|
error: `Cannot connect to Ollama at ${config.host}
|
||||||
|
|
||||||
|
Ollama is required for ipuaro to process your requests using local LLMs.
|
||||||
|
|
||||||
|
Install Ollama:
|
||||||
|
macOS: brew install ollama && ollama serve
|
||||||
|
Linux: curl -fsSL https://ollama.com/install.sh | sh && ollama serve
|
||||||
|
Manual: https://ollama.com/download
|
||||||
|
|
||||||
|
After installing, ensure Ollama is running with: ollama serve`,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return { ok: true }
|
||||||
|
} catch (error) {
|
||||||
|
const message = error instanceof Error ? error.message : String(error)
|
||||||
|
return {
|
||||||
|
ok: false,
|
||||||
|
error: `Ollama check failed: ${message}`,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check model availability.
|
||||||
|
*/
|
||||||
|
export async function checkModel(config: LLMConfig): Promise<{
|
||||||
|
ok: boolean
|
||||||
|
needsPull: boolean
|
||||||
|
error?: string
|
||||||
|
}> {
|
||||||
|
const client = new OllamaClient(config)
|
||||||
|
|
||||||
|
try {
|
||||||
|
const hasModel = await client.hasModel(config.model)
|
||||||
|
|
||||||
|
if (!hasModel) {
|
||||||
|
return {
|
||||||
|
ok: false,
|
||||||
|
needsPull: true,
|
||||||
|
error: `Model "${config.model}" is not installed.
|
||||||
|
|
||||||
|
Would you like to pull it? This may take a few minutes.
|
||||||
|
Run: ollama pull ${config.model}`,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return { ok: true, needsPull: false }
|
||||||
|
} catch (error) {
|
||||||
|
const message = error instanceof Error ? error.message : String(error)
|
||||||
|
return {
|
||||||
|
ok: false,
|
||||||
|
needsPull: false,
|
||||||
|
error: `Model check failed: ${message}`,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Pull model from Ollama.
|
||||||
|
*/
|
||||||
|
export async function pullModel(
|
||||||
|
config: LLMConfig,
|
||||||
|
onProgress?: (status: string) => void,
|
||||||
|
): Promise<{ ok: boolean; error?: string }> {
|
||||||
|
const client = new OllamaClient(config)
|
||||||
|
|
||||||
|
try {
|
||||||
|
onProgress?.(`Pulling model "${config.model}"...`)
|
||||||
|
await client.pullModel(config.model)
|
||||||
|
onProgress?.(`Model "${config.model}" pulled successfully.`)
|
||||||
|
return { ok: true }
|
||||||
|
} catch (error) {
|
||||||
|
const message = error instanceof Error ? error.message : String(error)
|
||||||
|
return {
|
||||||
|
ok: false,
|
||||||
|
error: `Failed to pull model: ${message}`,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check project size.
|
||||||
|
*/
|
||||||
|
export async function checkProjectSize(
|
||||||
|
projectPath: string,
|
||||||
|
maxFiles: number = DEFAULT_MAX_FILES,
|
||||||
|
): Promise<{
|
||||||
|
ok: boolean
|
||||||
|
fileCount: number
|
||||||
|
warning?: string
|
||||||
|
}> {
|
||||||
|
const scanner = new FileScanner()
|
||||||
|
|
||||||
|
try {
|
||||||
|
const files = await scanner.scanAll(projectPath)
|
||||||
|
const fileCount = files.length
|
||||||
|
|
||||||
|
if (fileCount > maxFiles) {
|
||||||
|
return {
|
||||||
|
ok: true,
|
||||||
|
fileCount,
|
||||||
|
warning: `Project has ${fileCount.toLocaleString()} files (>${maxFiles.toLocaleString()}).
|
||||||
|
This may take a while to index and use more memory.
|
||||||
|
|
||||||
|
Consider:
|
||||||
|
1. Running ipuaro in a subdirectory: ipuaro ./src
|
||||||
|
2. Adding patterns to .gitignore to exclude unnecessary files
|
||||||
|
3. Using a smaller project for better performance`,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (fileCount === 0) {
|
||||||
|
return {
|
||||||
|
ok: false,
|
||||||
|
fileCount: 0,
|
||||||
|
warning: `No supported files found in "${projectPath}".
|
||||||
|
|
||||||
|
ipuaro supports: .ts, .tsx, .js, .jsx, .json, .yaml, .yml
|
||||||
|
|
||||||
|
Ensure you're running ipuaro in a project directory with source files.`,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return { ok: true, fileCount }
|
||||||
|
} catch (error) {
|
||||||
|
const message = error instanceof Error ? error.message : String(error)
|
||||||
|
return {
|
||||||
|
ok: false,
|
||||||
|
fileCount: 0,
|
||||||
|
warning: `Failed to scan project: ${message}`,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Run all onboarding checks.
|
||||||
|
*/
|
||||||
|
export async function runOnboarding(options: OnboardingOptions): Promise<OnboardingResult> {
|
||||||
|
const errors: string[] = []
|
||||||
|
const warnings: string[] = []
|
||||||
|
const maxFiles = options.maxFiles ?? DEFAULT_MAX_FILES
|
||||||
|
|
||||||
|
let redisOk = true
|
||||||
|
let ollamaOk = true
|
||||||
|
let modelOk = true
|
||||||
|
let projectOk = true
|
||||||
|
let fileCount = 0
|
||||||
|
|
||||||
|
if (!options.skipRedis) {
|
||||||
|
const redisResult = await checkRedis(options.redisConfig)
|
||||||
|
redisOk = redisResult.ok
|
||||||
|
if (!redisOk && redisResult.error) {
|
||||||
|
errors.push(redisResult.error)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!options.skipOllama) {
|
||||||
|
const ollamaResult = await checkOllama(options.llmConfig)
|
||||||
|
ollamaOk = ollamaResult.ok
|
||||||
|
if (!ollamaOk && ollamaResult.error) {
|
||||||
|
errors.push(ollamaResult.error)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!options.skipModel && ollamaOk) {
|
||||||
|
const modelResult = await checkModel(options.llmConfig)
|
||||||
|
modelOk = modelResult.ok
|
||||||
|
if (!modelOk && modelResult.error) {
|
||||||
|
errors.push(modelResult.error)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!options.skipProject) {
|
||||||
|
const projectResult = await checkProjectSize(options.projectPath, maxFiles)
|
||||||
|
projectOk = projectResult.ok
|
||||||
|
fileCount = projectResult.fileCount
|
||||||
|
if (projectResult.warning) {
|
||||||
|
if (projectResult.ok) {
|
||||||
|
warnings.push(projectResult.warning)
|
||||||
|
} else {
|
||||||
|
errors.push(projectResult.warning)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
success: redisOk && ollamaOk && modelOk && projectOk && errors.length === 0,
|
||||||
|
redisOk,
|
||||||
|
ollamaOk,
|
||||||
|
modelOk,
|
||||||
|
projectOk,
|
||||||
|
fileCount,
|
||||||
|
errors,
|
||||||
|
warnings,
|
||||||
|
}
|
||||||
|
}
|
||||||
162
packages/ipuaro/src/cli/commands/start.ts
Normal file
162
packages/ipuaro/src/cli/commands/start.ts
Normal file
@@ -0,0 +1,162 @@
|
|||||||
|
/**
|
||||||
|
* Start command implementation.
|
||||||
|
* Launches the ipuaro TUI.
|
||||||
|
*/
|
||||||
|
|
||||||
|
import * as path from "node:path"
|
||||||
|
import * as readline from "node:readline"
|
||||||
|
import { render } from "ink"
|
||||||
|
import React from "react"
|
||||||
|
import { App, type AppDependencies } from "../../tui/App.js"
|
||||||
|
import { RedisClient } from "../../infrastructure/storage/RedisClient.js"
|
||||||
|
import { RedisStorage } from "../../infrastructure/storage/RedisStorage.js"
|
||||||
|
import { RedisSessionStorage } from "../../infrastructure/storage/RedisSessionStorage.js"
|
||||||
|
import { OllamaClient } from "../../infrastructure/llm/OllamaClient.js"
|
||||||
|
import { ToolRegistry } from "../../infrastructure/tools/registry.js"
|
||||||
|
import { generateProjectName } from "../../infrastructure/storage/schema.js"
|
||||||
|
import { type Config, DEFAULT_CONFIG } from "../../shared/constants/config.js"
|
||||||
|
import { checkModel, pullModel, runOnboarding } from "./onboarding.js"
|
||||||
|
import { registerAllTools } from "./tools-setup.js"
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Options for start command.
|
||||||
|
*/
|
||||||
|
export interface StartOptions {
|
||||||
|
autoApply?: boolean
|
||||||
|
model?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Result of start command.
|
||||||
|
*/
|
||||||
|
export interface StartResult {
|
||||||
|
success: boolean
|
||||||
|
error?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Execute the start command.
|
||||||
|
*/
|
||||||
|
export async function executeStart(
|
||||||
|
projectPath: string,
|
||||||
|
options: StartOptions,
|
||||||
|
config: Config = DEFAULT_CONFIG,
|
||||||
|
): Promise<StartResult> {
|
||||||
|
const resolvedPath = path.resolve(projectPath)
|
||||||
|
const projectName = generateProjectName(resolvedPath)
|
||||||
|
|
||||||
|
const llmConfig = {
|
||||||
|
...config.llm,
|
||||||
|
model: options.model ?? config.llm.model,
|
||||||
|
}
|
||||||
|
|
||||||
|
console.warn("🔍 Running pre-flight checks...\n")
|
||||||
|
|
||||||
|
const onboardingResult = await runOnboarding({
|
||||||
|
redisConfig: config.redis,
|
||||||
|
llmConfig,
|
||||||
|
projectPath: resolvedPath,
|
||||||
|
})
|
||||||
|
|
||||||
|
for (const warning of onboardingResult.warnings) {
|
||||||
|
console.warn(`⚠️ ${warning}\n`)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!onboardingResult.success) {
|
||||||
|
for (const error of onboardingResult.errors) {
|
||||||
|
console.error(`❌ ${error}\n`)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!onboardingResult.modelOk && onboardingResult.ollamaOk) {
|
||||||
|
const shouldPull = await promptYesNo(
|
||||||
|
`Would you like to pull "${llmConfig.model}"? (y/n): `,
|
||||||
|
)
|
||||||
|
|
||||||
|
if (shouldPull) {
|
||||||
|
const pullResult = await pullModel(llmConfig, console.warn)
|
||||||
|
if (!pullResult.ok) {
|
||||||
|
console.error(`❌ ${pullResult.error ?? "Unknown error"}`)
|
||||||
|
return { success: false, error: pullResult.error }
|
||||||
|
}
|
||||||
|
|
||||||
|
const recheckModel = await checkModel(llmConfig)
|
||||||
|
if (!recheckModel.ok) {
|
||||||
|
console.error("❌ Model still not available after pull.")
|
||||||
|
return { success: false, error: "Model pull failed" }
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
return { success: false, error: "Model not available" }
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
error: onboardingResult.errors.join("\n"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
console.warn(`✅ All checks passed. Found ${String(onboardingResult.fileCount)} files.\n`)
|
||||||
|
console.warn("🚀 Starting ipuaro...\n")
|
||||||
|
|
||||||
|
const redisClient = new RedisClient(config.redis)
|
||||||
|
|
||||||
|
try {
|
||||||
|
await redisClient.connect()
|
||||||
|
|
||||||
|
const storage = new RedisStorage(redisClient, projectName)
|
||||||
|
const sessionStorage = new RedisSessionStorage(redisClient)
|
||||||
|
const llm = new OllamaClient(llmConfig)
|
||||||
|
const tools = new ToolRegistry()
|
||||||
|
|
||||||
|
registerAllTools(tools)
|
||||||
|
|
||||||
|
const deps: AppDependencies = {
|
||||||
|
storage,
|
||||||
|
sessionStorage,
|
||||||
|
llm,
|
||||||
|
tools,
|
||||||
|
}
|
||||||
|
|
||||||
|
const handleExit = (): void => {
|
||||||
|
void redisClient.disconnect()
|
||||||
|
}
|
||||||
|
|
||||||
|
const { waitUntilExit } = render(
|
||||||
|
React.createElement(App, {
|
||||||
|
projectPath: resolvedPath,
|
||||||
|
autoApply: options.autoApply ?? config.edit.autoApply,
|
||||||
|
deps,
|
||||||
|
onExit: handleExit,
|
||||||
|
}),
|
||||||
|
)
|
||||||
|
|
||||||
|
await waitUntilExit()
|
||||||
|
await redisClient.disconnect()
|
||||||
|
|
||||||
|
return { success: true }
|
||||||
|
} catch (error) {
|
||||||
|
const message = error instanceof Error ? error.message : String(error)
|
||||||
|
console.error(`❌ Failed to start ipuaro: ${message}`)
|
||||||
|
await redisClient.disconnect()
|
||||||
|
return { success: false, error: message }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Simple yes/no prompt for CLI.
|
||||||
|
*/
|
||||||
|
async function promptYesNo(question: string): Promise<boolean> {
|
||||||
|
return new Promise((resolve) => {
|
||||||
|
process.stdout.write(question)
|
||||||
|
|
||||||
|
const rl = readline.createInterface({
|
||||||
|
input: process.stdin,
|
||||||
|
output: process.stdout,
|
||||||
|
})
|
||||||
|
|
||||||
|
rl.once("line", (answer: string) => {
|
||||||
|
rl.close()
|
||||||
|
resolve(answer.toLowerCase() === "y" || answer.toLowerCase() === "yes")
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
59
packages/ipuaro/src/cli/commands/tools-setup.ts
Normal file
59
packages/ipuaro/src/cli/commands/tools-setup.ts
Normal file
@@ -0,0 +1,59 @@
|
|||||||
|
/**
|
||||||
|
* Tool registration helper for CLI.
|
||||||
|
* Registers all 18 tools with the tool registry.
|
||||||
|
*/
|
||||||
|
|
||||||
|
import type { IToolRegistry } from "../../application/interfaces/IToolRegistry.js"
|
||||||
|
|
||||||
|
import { GetLinesTool } from "../../infrastructure/tools/read/GetLinesTool.js"
|
||||||
|
import { GetFunctionTool } from "../../infrastructure/tools/read/GetFunctionTool.js"
|
||||||
|
import { GetClassTool } from "../../infrastructure/tools/read/GetClassTool.js"
|
||||||
|
import { GetStructureTool } from "../../infrastructure/tools/read/GetStructureTool.js"
|
||||||
|
|
||||||
|
import { EditLinesTool } from "../../infrastructure/tools/edit/EditLinesTool.js"
|
||||||
|
import { CreateFileTool } from "../../infrastructure/tools/edit/CreateFileTool.js"
|
||||||
|
import { DeleteFileTool } from "../../infrastructure/tools/edit/DeleteFileTool.js"
|
||||||
|
|
||||||
|
import { FindReferencesTool } from "../../infrastructure/tools/search/FindReferencesTool.js"
|
||||||
|
import { FindDefinitionTool } from "../../infrastructure/tools/search/FindDefinitionTool.js"
|
||||||
|
|
||||||
|
import { GetDependenciesTool } from "../../infrastructure/tools/analysis/GetDependenciesTool.js"
|
||||||
|
import { GetDependentsTool } from "../../infrastructure/tools/analysis/GetDependentsTool.js"
|
||||||
|
import { GetComplexityTool } from "../../infrastructure/tools/analysis/GetComplexityTool.js"
|
||||||
|
import { GetTodosTool } from "../../infrastructure/tools/analysis/GetTodosTool.js"
|
||||||
|
|
||||||
|
import { GitStatusTool } from "../../infrastructure/tools/git/GitStatusTool.js"
|
||||||
|
import { GitDiffTool } from "../../infrastructure/tools/git/GitDiffTool.js"
|
||||||
|
import { GitCommitTool } from "../../infrastructure/tools/git/GitCommitTool.js"
|
||||||
|
|
||||||
|
import { RunCommandTool } from "../../infrastructure/tools/run/RunCommandTool.js"
|
||||||
|
import { RunTestsTool } from "../../infrastructure/tools/run/RunTestsTool.js"
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Register all 18 tools with the tool registry.
|
||||||
|
*/
|
||||||
|
export function registerAllTools(registry: IToolRegistry): void {
|
||||||
|
registry.register(new GetLinesTool())
|
||||||
|
registry.register(new GetFunctionTool())
|
||||||
|
registry.register(new GetClassTool())
|
||||||
|
registry.register(new GetStructureTool())
|
||||||
|
|
||||||
|
registry.register(new EditLinesTool())
|
||||||
|
registry.register(new CreateFileTool())
|
||||||
|
registry.register(new DeleteFileTool())
|
||||||
|
|
||||||
|
registry.register(new FindReferencesTool())
|
||||||
|
registry.register(new FindDefinitionTool())
|
||||||
|
|
||||||
|
registry.register(new GetDependenciesTool())
|
||||||
|
registry.register(new GetDependentsTool())
|
||||||
|
registry.register(new GetComplexityTool())
|
||||||
|
registry.register(new GetTodosTool())
|
||||||
|
|
||||||
|
registry.register(new GitStatusTool())
|
||||||
|
registry.register(new GitDiffTool())
|
||||||
|
registry.register(new GitCommitTool())
|
||||||
|
|
||||||
|
registry.register(new RunCommandTool())
|
||||||
|
registry.register(new RunTestsTool())
|
||||||
|
}
|
||||||
63
packages/ipuaro/src/cli/index.ts
Normal file
63
packages/ipuaro/src/cli/index.ts
Normal file
@@ -0,0 +1,63 @@
|
|||||||
|
#!/usr/bin/env node
|
||||||
|
|
||||||
|
/**
|
||||||
|
* ipuaro CLI entry point.
|
||||||
|
* Local AI agent for codebase operations with infinite context feeling.
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { createRequire } from "node:module"
|
||||||
|
import { Command } from "commander"
|
||||||
|
import { executeStart } from "./commands/start.js"
|
||||||
|
import { executeInit } from "./commands/init.js"
|
||||||
|
import { executeIndex } from "./commands/index-cmd.js"
|
||||||
|
import { loadConfig } from "../shared/config/loader.js"
|
||||||
|
|
||||||
|
const require = createRequire(import.meta.url)
|
||||||
|
const pkg = require("../../package.json") as { version: string }
|
||||||
|
|
||||||
|
const program = new Command()
|
||||||
|
|
||||||
|
program
|
||||||
|
.name("ipuaro")
|
||||||
|
.description("Local AI agent for codebase operations with infinite context feeling")
|
||||||
|
.version(pkg.version)
|
||||||
|
|
||||||
|
program
|
||||||
|
.command("start", { isDefault: true })
|
||||||
|
.description("Start ipuaro TUI in the current directory")
|
||||||
|
.argument("[path]", "Project path", ".")
|
||||||
|
.option("--auto-apply", "Enable auto-apply mode for edits")
|
||||||
|
.option("--model <name>", "Override LLM model")
|
||||||
|
.action(async (projectPath: string, options: { autoApply?: boolean; model?: string }) => {
|
||||||
|
const config = loadConfig(projectPath)
|
||||||
|
const result = await executeStart(projectPath, options, config)
|
||||||
|
if (!result.success) {
|
||||||
|
process.exit(1)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
program
|
||||||
|
.command("init")
|
||||||
|
.description("Create .ipuaro.json config file")
|
||||||
|
.argument("[path]", "Project path", ".")
|
||||||
|
.option("--force", "Overwrite existing config file")
|
||||||
|
.action(async (projectPath: string, options: { force?: boolean }) => {
|
||||||
|
const result = await executeInit(projectPath, options)
|
||||||
|
if (!result.success) {
|
||||||
|
process.exit(1)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
program
|
||||||
|
.command("index")
|
||||||
|
.description("Index project without starting TUI")
|
||||||
|
.argument("[path]", "Project path", ".")
|
||||||
|
.action(async (projectPath: string) => {
|
||||||
|
const config = loadConfig(projectPath)
|
||||||
|
const result = await executeIndex(projectPath, config)
|
||||||
|
if (!result.success) {
|
||||||
|
process.exit(1)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
program.parse()
|
||||||
48
packages/ipuaro/src/domain/constants/index.ts
Normal file
48
packages/ipuaro/src/domain/constants/index.ts
Normal file
@@ -0,0 +1,48 @@
|
|||||||
|
// Domain Constants
|
||||||
|
|
||||||
|
export const MAX_UNDO_STACK_SIZE = 10
|
||||||
|
|
||||||
|
export const SUPPORTED_EXTENSIONS = [
|
||||||
|
".ts",
|
||||||
|
".tsx",
|
||||||
|
".js",
|
||||||
|
".jsx",
|
||||||
|
".json",
|
||||||
|
".yaml",
|
||||||
|
".yml",
|
||||||
|
] as const
|
||||||
|
|
||||||
|
export const BINARY_EXTENSIONS = [
|
||||||
|
".png",
|
||||||
|
".jpg",
|
||||||
|
".jpeg",
|
||||||
|
".gif",
|
||||||
|
".ico",
|
||||||
|
".svg",
|
||||||
|
".woff",
|
||||||
|
".woff2",
|
||||||
|
".ttf",
|
||||||
|
".eot",
|
||||||
|
".mp3",
|
||||||
|
".mp4",
|
||||||
|
".webm",
|
||||||
|
".pdf",
|
||||||
|
".zip",
|
||||||
|
".tar",
|
||||||
|
".gz",
|
||||||
|
] as const
|
||||||
|
|
||||||
|
export const DEFAULT_IGNORE_PATTERNS = [
|
||||||
|
"node_modules",
|
||||||
|
"dist",
|
||||||
|
"build",
|
||||||
|
".git",
|
||||||
|
".next",
|
||||||
|
".nuxt",
|
||||||
|
"coverage",
|
||||||
|
".cache",
|
||||||
|
] as const
|
||||||
|
|
||||||
|
export const CONTEXT_WINDOW_SIZE = 128_000
|
||||||
|
|
||||||
|
export const CONTEXT_COMPRESSION_THRESHOLD = 0.8
|
||||||
61
packages/ipuaro/src/domain/entities/Project.ts
Normal file
61
packages/ipuaro/src/domain/entities/Project.ts
Normal file
@@ -0,0 +1,61 @@
|
|||||||
|
import { basename, dirname } from "node:path"
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Project entity representing an indexed codebase.
|
||||||
|
*/
|
||||||
|
export class Project {
|
||||||
|
readonly name: string
|
||||||
|
readonly rootPath: string
|
||||||
|
readonly createdAt: number
|
||||||
|
lastIndexedAt: number | null
|
||||||
|
fileCount: number
|
||||||
|
indexingInProgress: boolean
|
||||||
|
|
||||||
|
constructor(rootPath: string, createdAt?: number) {
|
||||||
|
this.rootPath = rootPath
|
||||||
|
this.name = Project.generateProjectName(rootPath)
|
||||||
|
this.createdAt = createdAt ?? Date.now()
|
||||||
|
this.lastIndexedAt = null
|
||||||
|
this.fileCount = 0
|
||||||
|
this.indexingInProgress = false
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Generate project name from path.
|
||||||
|
* Format: {parent-folder}-{project-folder}
|
||||||
|
*/
|
||||||
|
static generateProjectName(rootPath: string): string {
|
||||||
|
const projectFolder = basename(rootPath)
|
||||||
|
const parentFolder = basename(dirname(rootPath))
|
||||||
|
|
||||||
|
if (parentFolder && parentFolder !== ".") {
|
||||||
|
return `${parentFolder}-${projectFolder}`
|
||||||
|
}
|
||||||
|
return projectFolder
|
||||||
|
}
|
||||||
|
|
||||||
|
markIndexingStarted(): void {
|
||||||
|
this.indexingInProgress = true
|
||||||
|
}
|
||||||
|
|
||||||
|
markIndexingCompleted(fileCount: number): void {
|
||||||
|
this.indexingInProgress = false
|
||||||
|
this.lastIndexedAt = Date.now()
|
||||||
|
this.fileCount = fileCount
|
||||||
|
}
|
||||||
|
|
||||||
|
markIndexingFailed(): void {
|
||||||
|
this.indexingInProgress = false
|
||||||
|
}
|
||||||
|
|
||||||
|
isIndexed(): boolean {
|
||||||
|
return this.lastIndexedAt !== null
|
||||||
|
}
|
||||||
|
|
||||||
|
getTimeSinceIndexed(): number | null {
|
||||||
|
if (this.lastIndexedAt === null) {
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
return Date.now() - this.lastIndexedAt
|
||||||
|
}
|
||||||
|
}
|
||||||
120
packages/ipuaro/src/domain/entities/Session.ts
Normal file
120
packages/ipuaro/src/domain/entities/Session.ts
Normal file
@@ -0,0 +1,120 @@
|
|||||||
|
import type { ChatMessage } from "../value-objects/ChatMessage.js"
|
||||||
|
import type { UndoEntry } from "../value-objects/UndoEntry.js"
|
||||||
|
import { MAX_UNDO_STACK_SIZE } from "../constants/index.js"
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Session statistics.
|
||||||
|
*/
|
||||||
|
export interface SessionStats {
|
||||||
|
/** Total tokens used */
|
||||||
|
totalTokens: number
|
||||||
|
/** Total time in milliseconds */
|
||||||
|
totalTimeMs: number
|
||||||
|
/** Number of tool calls made */
|
||||||
|
toolCalls: number
|
||||||
|
/** Number of edits applied */
|
||||||
|
editsApplied: number
|
||||||
|
/** Number of edits rejected */
|
||||||
|
editsRejected: number
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Context state for the session.
|
||||||
|
*/
|
||||||
|
export interface ContextState {
|
||||||
|
/** Files currently in context */
|
||||||
|
filesInContext: string[]
|
||||||
|
/** Estimated token usage (0-1) */
|
||||||
|
tokenUsage: number
|
||||||
|
/** Whether compression is needed */
|
||||||
|
needsCompression: boolean
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Session entity representing a chat session.
|
||||||
|
*/
|
||||||
|
export class Session {
|
||||||
|
readonly id: string
|
||||||
|
readonly projectName: string
|
||||||
|
readonly createdAt: number
|
||||||
|
lastActivityAt: number
|
||||||
|
history: ChatMessage[]
|
||||||
|
context: ContextState
|
||||||
|
undoStack: UndoEntry[]
|
||||||
|
stats: SessionStats
|
||||||
|
inputHistory: string[]
|
||||||
|
|
||||||
|
constructor(id: string, projectName: string, createdAt?: number) {
|
||||||
|
this.id = id
|
||||||
|
this.projectName = projectName
|
||||||
|
this.createdAt = createdAt ?? Date.now()
|
||||||
|
this.lastActivityAt = this.createdAt
|
||||||
|
this.history = []
|
||||||
|
this.context = {
|
||||||
|
filesInContext: [],
|
||||||
|
tokenUsage: 0,
|
||||||
|
needsCompression: false,
|
||||||
|
}
|
||||||
|
this.undoStack = []
|
||||||
|
this.stats = {
|
||||||
|
totalTokens: 0,
|
||||||
|
totalTimeMs: 0,
|
||||||
|
toolCalls: 0,
|
||||||
|
editsApplied: 0,
|
||||||
|
editsRejected: 0,
|
||||||
|
}
|
||||||
|
this.inputHistory = []
|
||||||
|
}
|
||||||
|
|
||||||
|
addMessage(message: ChatMessage): void {
|
||||||
|
this.history.push(message)
|
||||||
|
this.lastActivityAt = Date.now()
|
||||||
|
|
||||||
|
if (message.stats) {
|
||||||
|
this.stats.totalTokens += message.stats.tokens
|
||||||
|
this.stats.totalTimeMs += message.stats.timeMs
|
||||||
|
this.stats.toolCalls += message.stats.toolCalls
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
addUndoEntry(entry: UndoEntry): void {
|
||||||
|
this.undoStack.push(entry)
|
||||||
|
if (this.undoStack.length > MAX_UNDO_STACK_SIZE) {
|
||||||
|
this.undoStack.shift()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
popUndoEntry(): UndoEntry | undefined {
|
||||||
|
return this.undoStack.pop()
|
||||||
|
}
|
||||||
|
|
||||||
|
addInputToHistory(input: string): void {
|
||||||
|
if (input.trim() && this.inputHistory[this.inputHistory.length - 1] !== input) {
|
||||||
|
this.inputHistory.push(input)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
clearHistory(): void {
|
||||||
|
this.history = []
|
||||||
|
this.context = {
|
||||||
|
filesInContext: [],
|
||||||
|
tokenUsage: 0,
|
||||||
|
needsCompression: false,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
getSessionDurationMs(): number {
|
||||||
|
return Date.now() - this.createdAt
|
||||||
|
}
|
||||||
|
|
||||||
|
getSessionDurationFormatted(): string {
|
||||||
|
const totalMinutes = Math.floor(this.getSessionDurationMs() / 60_000)
|
||||||
|
const hours = Math.floor(totalMinutes / 60)
|
||||||
|
const minutes = totalMinutes % 60
|
||||||
|
|
||||||
|
if (hours > 0) {
|
||||||
|
return `${String(hours)}h ${String(minutes)}m`
|
||||||
|
}
|
||||||
|
return `${String(minutes)}m`
|
||||||
|
}
|
||||||
|
}
|
||||||
3
packages/ipuaro/src/domain/entities/index.ts
Normal file
3
packages/ipuaro/src/domain/entities/index.ts
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
// Domain Entities
|
||||||
|
export * from "./Session.js"
|
||||||
|
export * from "./Project.js"
|
||||||
13
packages/ipuaro/src/domain/index.ts
Normal file
13
packages/ipuaro/src/domain/index.ts
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
// Domain Layer exports
|
||||||
|
|
||||||
|
// Entities
|
||||||
|
export * from "./entities/index.js"
|
||||||
|
|
||||||
|
// Value Objects
|
||||||
|
export * from "./value-objects/index.js"
|
||||||
|
|
||||||
|
// Service Interfaces
|
||||||
|
export * from "./services/index.js"
|
||||||
|
|
||||||
|
// Constants
|
||||||
|
export * from "./constants/index.js"
|
||||||
83
packages/ipuaro/src/domain/services/IIndexer.ts
Normal file
83
packages/ipuaro/src/domain/services/IIndexer.ts
Normal file
@@ -0,0 +1,83 @@
|
|||||||
|
import type { FileAST } from "../value-objects/FileAST.js"
|
||||||
|
import type { FileData } from "../value-objects/FileData.js"
|
||||||
|
import type { FileMeta } from "../value-objects/FileMeta.js"
|
||||||
|
import type { DepsGraph, SymbolIndex } from "./IStorage.js"
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Progress callback for indexing operations.
|
||||||
|
*/
|
||||||
|
export interface IndexProgress {
|
||||||
|
current: number
|
||||||
|
total: number
|
||||||
|
currentFile: string
|
||||||
|
phase: "scanning" | "parsing" | "analyzing" | "indexing"
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Result of scanning a single file.
|
||||||
|
*/
|
||||||
|
export interface ScanResult {
|
||||||
|
path: string
|
||||||
|
type: "file" | "directory" | "symlink"
|
||||||
|
size: number
|
||||||
|
lastModified: number
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Indexing result statistics.
|
||||||
|
*/
|
||||||
|
export interface IndexingStats {
|
||||||
|
filesScanned: number
|
||||||
|
filesParsed: number
|
||||||
|
parseErrors: number
|
||||||
|
timeMs: number
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Indexer service interface (port).
|
||||||
|
* Handles project scanning, parsing, and indexing.
|
||||||
|
*/
|
||||||
|
export interface IIndexer {
|
||||||
|
/**
|
||||||
|
* Scan directory and yield file results.
|
||||||
|
*/
|
||||||
|
scan(root: string): AsyncGenerator<ScanResult>
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parse file content into AST.
|
||||||
|
*/
|
||||||
|
parseFile(content: string, language: "ts" | "tsx" | "js" | "jsx"): FileAST
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Analyze file and compute metadata.
|
||||||
|
*/
|
||||||
|
analyzeFile(path: string, ast: FileAST, allASTs: Map<string, FileAST>): FileMeta
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Build symbol index from all ASTs.
|
||||||
|
*/
|
||||||
|
buildSymbolIndex(asts: Map<string, FileAST>): SymbolIndex
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Build dependency graph from all ASTs.
|
||||||
|
*/
|
||||||
|
buildDepsGraph(asts: Map<string, FileAST>): DepsGraph
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Full indexing pipeline.
|
||||||
|
*/
|
||||||
|
indexProject(
|
||||||
|
root: string,
|
||||||
|
onProgress?: (progress: IndexProgress) => void,
|
||||||
|
): Promise<IndexingStats>
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Update single file (incremental indexing).
|
||||||
|
*/
|
||||||
|
updateFile(path: string, data: FileData): Promise<void>
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Remove file from index.
|
||||||
|
*/
|
||||||
|
removeFile(path: string): Promise<void>
|
||||||
|
}
|
||||||
81
packages/ipuaro/src/domain/services/ILLMClient.ts
Normal file
81
packages/ipuaro/src/domain/services/ILLMClient.ts
Normal file
@@ -0,0 +1,81 @@
|
|||||||
|
import type { ChatMessage } from "../value-objects/ChatMessage.js"
|
||||||
|
import type { ToolCall } from "../value-objects/ToolCall.js"
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Tool parameter definition for LLM.
|
||||||
|
*/
|
||||||
|
export interface ToolParameter {
|
||||||
|
name: string
|
||||||
|
type: "string" | "number" | "boolean" | "array" | "object"
|
||||||
|
description: string
|
||||||
|
required: boolean
|
||||||
|
enum?: string[]
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Tool definition for LLM function calling.
|
||||||
|
*/
|
||||||
|
export interface ToolDef {
|
||||||
|
name: string
|
||||||
|
description: string
|
||||||
|
parameters: ToolParameter[]
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Response from LLM.
|
||||||
|
*/
|
||||||
|
export interface LLMResponse {
|
||||||
|
/** Text content of the response */
|
||||||
|
content: string
|
||||||
|
/** Tool calls parsed from response */
|
||||||
|
toolCalls: ToolCall[]
|
||||||
|
/** Token count for this response */
|
||||||
|
tokens: number
|
||||||
|
/** Generation time in milliseconds */
|
||||||
|
timeMs: number
|
||||||
|
/** Whether response was truncated */
|
||||||
|
truncated: boolean
|
||||||
|
/** Stop reason */
|
||||||
|
stopReason: "end" | "length" | "tool_use"
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* LLM client service interface (port).
|
||||||
|
* Abstracts the LLM provider.
|
||||||
|
*/
|
||||||
|
export interface ILLMClient {
|
||||||
|
/**
|
||||||
|
* Send messages to LLM and get response.
|
||||||
|
*/
|
||||||
|
chat(messages: ChatMessage[], tools?: ToolDef[]): Promise<LLMResponse>
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Count tokens in text.
|
||||||
|
*/
|
||||||
|
countTokens(text: string): Promise<number>
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if LLM service is available.
|
||||||
|
*/
|
||||||
|
isAvailable(): Promise<boolean>
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get current model name.
|
||||||
|
*/
|
||||||
|
getModelName(): string
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get context window size.
|
||||||
|
*/
|
||||||
|
getContextWindowSize(): number
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Pull/download model if not available locally.
|
||||||
|
*/
|
||||||
|
pullModel(model: string): Promise<void>
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Abort current generation.
|
||||||
|
*/
|
||||||
|
abort(): void
|
||||||
|
}
|
||||||
88
packages/ipuaro/src/domain/services/ISessionStorage.ts
Normal file
88
packages/ipuaro/src/domain/services/ISessionStorage.ts
Normal file
@@ -0,0 +1,88 @@
|
|||||||
|
import type { ContextState, Session, SessionStats } from "../entities/Session.js"
|
||||||
|
import type { ChatMessage } from "../value-objects/ChatMessage.js"
|
||||||
|
import type { UndoEntry } from "../value-objects/UndoEntry.js"
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Session data stored in persistence layer.
|
||||||
|
*/
|
||||||
|
export interface SessionData {
|
||||||
|
id: string
|
||||||
|
projectName: string
|
||||||
|
createdAt: number
|
||||||
|
lastActivityAt: number
|
||||||
|
history: ChatMessage[]
|
||||||
|
context: ContextState
|
||||||
|
stats: SessionStats
|
||||||
|
inputHistory: string[]
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Session list item (minimal info for listing).
|
||||||
|
*/
|
||||||
|
export interface SessionListItem {
|
||||||
|
id: string
|
||||||
|
projectName: string
|
||||||
|
createdAt: number
|
||||||
|
lastActivityAt: number
|
||||||
|
messageCount: number
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Storage service interface for session persistence.
|
||||||
|
*/
|
||||||
|
export interface ISessionStorage {
|
||||||
|
/**
|
||||||
|
* Save a session to storage.
|
||||||
|
*/
|
||||||
|
saveSession(session: Session): Promise<void>
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Load a session by ID.
|
||||||
|
*/
|
||||||
|
loadSession(sessionId: string): Promise<Session | null>
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Delete a session.
|
||||||
|
*/
|
||||||
|
deleteSession(sessionId: string): Promise<void>
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get list of all sessions for a project.
|
||||||
|
*/
|
||||||
|
listSessions(projectName?: string): Promise<SessionListItem[]>
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the latest session for a project.
|
||||||
|
*/
|
||||||
|
getLatestSession(projectName: string): Promise<Session | null>
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if a session exists.
|
||||||
|
*/
|
||||||
|
sessionExists(sessionId: string): Promise<boolean>
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Add undo entry to session's undo stack.
|
||||||
|
*/
|
||||||
|
pushUndoEntry(sessionId: string, entry: UndoEntry): Promise<void>
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Pop undo entry from session's undo stack.
|
||||||
|
*/
|
||||||
|
popUndoEntry(sessionId: string): Promise<UndoEntry | null>
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get undo stack for a session.
|
||||||
|
*/
|
||||||
|
getUndoStack(sessionId: string): Promise<UndoEntry[]>
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Update session's last activity timestamp.
|
||||||
|
*/
|
||||||
|
touchSession(sessionId: string): Promise<void>
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Clear all sessions.
|
||||||
|
*/
|
||||||
|
clearAllSessions(): Promise<void>
|
||||||
|
}
|
||||||
65
packages/ipuaro/src/domain/services/IStorage.ts
Normal file
65
packages/ipuaro/src/domain/services/IStorage.ts
Normal file
@@ -0,0 +1,65 @@
|
|||||||
|
import type { FileData } from "../value-objects/FileData.js"
|
||||||
|
import type { FileAST } from "../value-objects/FileAST.js"
|
||||||
|
import type { FileMeta } from "../value-objects/FileMeta.js"
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Symbol index mapping symbol names to their locations.
|
||||||
|
*/
|
||||||
|
export interface SymbolLocation {
|
||||||
|
path: string
|
||||||
|
line: number
|
||||||
|
type: "function" | "class" | "interface" | "type" | "variable"
|
||||||
|
}
|
||||||
|
|
||||||
|
export type SymbolIndex = Map<string, SymbolLocation[]>
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Dependencies graph for the project.
|
||||||
|
*/
|
||||||
|
export interface DepsGraph {
|
||||||
|
/** Map from file path to its imports */
|
||||||
|
imports: Map<string, string[]>
|
||||||
|
/** Map from file path to files that import it */
|
||||||
|
importedBy: Map<string, string[]>
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Storage service interface (port).
|
||||||
|
* Abstracts the persistence layer for project data.
|
||||||
|
*/
|
||||||
|
export interface IStorage {
|
||||||
|
// File data operations
|
||||||
|
getFile(path: string): Promise<FileData | null>
|
||||||
|
setFile(path: string, data: FileData): Promise<void>
|
||||||
|
deleteFile(path: string): Promise<void>
|
||||||
|
getAllFiles(): Promise<Map<string, FileData>>
|
||||||
|
getFileCount(): Promise<number>
|
||||||
|
|
||||||
|
// AST operations
|
||||||
|
getAST(path: string): Promise<FileAST | null>
|
||||||
|
setAST(path: string, ast: FileAST): Promise<void>
|
||||||
|
deleteAST(path: string): Promise<void>
|
||||||
|
getAllASTs(): Promise<Map<string, FileAST>>
|
||||||
|
|
||||||
|
// Meta operations
|
||||||
|
getMeta(path: string): Promise<FileMeta | null>
|
||||||
|
setMeta(path: string, meta: FileMeta): Promise<void>
|
||||||
|
deleteMeta(path: string): Promise<void>
|
||||||
|
getAllMetas(): Promise<Map<string, FileMeta>>
|
||||||
|
|
||||||
|
// Index operations
|
||||||
|
getSymbolIndex(): Promise<SymbolIndex>
|
||||||
|
setSymbolIndex(index: SymbolIndex): Promise<void>
|
||||||
|
getDepsGraph(): Promise<DepsGraph>
|
||||||
|
setDepsGraph(graph: DepsGraph): Promise<void>
|
||||||
|
|
||||||
|
// Config operations
|
||||||
|
getProjectConfig(key: string): Promise<unknown>
|
||||||
|
setProjectConfig(key: string, value: unknown): Promise<void>
|
||||||
|
|
||||||
|
// Lifecycle
|
||||||
|
connect(): Promise<void>
|
||||||
|
disconnect(): Promise<void>
|
||||||
|
isConnected(): boolean
|
||||||
|
clear(): Promise<void>
|
||||||
|
}
|
||||||
68
packages/ipuaro/src/domain/services/ITool.ts
Normal file
68
packages/ipuaro/src/domain/services/ITool.ts
Normal file
@@ -0,0 +1,68 @@
|
|||||||
|
import type { ToolResult } from "../value-objects/ToolResult.js"
|
||||||
|
import type { IStorage } from "./IStorage.js"
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Tool parameter schema.
|
||||||
|
*/
|
||||||
|
export interface ToolParameterSchema {
|
||||||
|
name: string
|
||||||
|
type: "string" | "number" | "boolean" | "array" | "object"
|
||||||
|
description: string
|
||||||
|
required: boolean
|
||||||
|
default?: unknown
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Context provided to tools during execution.
|
||||||
|
*/
|
||||||
|
export interface ToolContext {
|
||||||
|
/** Project root path */
|
||||||
|
projectRoot: string
|
||||||
|
/** Storage service */
|
||||||
|
storage: IStorage
|
||||||
|
/** Request user confirmation callback */
|
||||||
|
requestConfirmation: (message: string, diff?: DiffInfo) => Promise<boolean>
|
||||||
|
/** Report progress callback */
|
||||||
|
onProgress?: (message: string) => void
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Diff information for confirmation dialogs.
|
||||||
|
*/
|
||||||
|
export interface DiffInfo {
|
||||||
|
filePath: string
|
||||||
|
oldLines: string[]
|
||||||
|
newLines: string[]
|
||||||
|
startLine: number
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Tool interface (port).
|
||||||
|
* All tools must implement this interface.
|
||||||
|
*/
|
||||||
|
export interface ITool {
|
||||||
|
/** Tool name (used in tool calls) */
|
||||||
|
readonly name: string
|
||||||
|
|
||||||
|
/** Human-readable description */
|
||||||
|
readonly description: string
|
||||||
|
|
||||||
|
/** Tool parameters schema */
|
||||||
|
readonly parameters: ToolParameterSchema[]
|
||||||
|
|
||||||
|
/** Whether tool requires user confirmation before execution */
|
||||||
|
readonly requiresConfirmation: boolean
|
||||||
|
|
||||||
|
/** Tool category */
|
||||||
|
readonly category: "read" | "edit" | "search" | "analysis" | "git" | "run"
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Execute the tool with given parameters.
|
||||||
|
*/
|
||||||
|
execute(params: Record<string, unknown>, ctx: ToolContext): Promise<ToolResult>
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Validate parameters before execution.
|
||||||
|
*/
|
||||||
|
validateParams(params: Record<string, unknown>): string | null
|
||||||
|
}
|
||||||
6
packages/ipuaro/src/domain/services/index.ts
Normal file
6
packages/ipuaro/src/domain/services/index.ts
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
// Domain Service Interfaces (Ports)
|
||||||
|
export * from "./IStorage.js"
|
||||||
|
export * from "./ISessionStorage.js"
|
||||||
|
export * from "./ILLMClient.js"
|
||||||
|
export * from "./ITool.js"
|
||||||
|
export * from "./IIndexer.js"
|
||||||
79
packages/ipuaro/src/domain/value-objects/ChatMessage.ts
Normal file
79
packages/ipuaro/src/domain/value-objects/ChatMessage.ts
Normal file
@@ -0,0 +1,79 @@
|
|||||||
|
import type { ToolCall } from "./ToolCall.js"
|
||||||
|
import type { ToolResult } from "./ToolResult.js"
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Represents a message in the chat history.
|
||||||
|
*/
|
||||||
|
|
||||||
|
export type MessageRole = "user" | "assistant" | "tool" | "system"
|
||||||
|
|
||||||
|
export interface MessageStats {
|
||||||
|
/** Token count for this message */
|
||||||
|
tokens: number
|
||||||
|
/** Response generation time in ms (for assistant messages) */
|
||||||
|
timeMs: number
|
||||||
|
/** Number of tool calls in this message */
|
||||||
|
toolCalls: number
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ChatMessage {
|
||||||
|
/** Message role */
|
||||||
|
role: MessageRole
|
||||||
|
/** Message content */
|
||||||
|
content: string
|
||||||
|
/** Timestamp when message was created */
|
||||||
|
timestamp: number
|
||||||
|
/** Tool calls made by assistant (if any) */
|
||||||
|
toolCalls?: ToolCall[]
|
||||||
|
/** Tool results (for tool role messages) */
|
||||||
|
toolResults?: ToolResult[]
|
||||||
|
/** Message statistics */
|
||||||
|
stats?: MessageStats
|
||||||
|
}
|
||||||
|
|
||||||
|
export function createUserMessage(content: string): ChatMessage {
|
||||||
|
return {
|
||||||
|
role: "user",
|
||||||
|
content,
|
||||||
|
timestamp: Date.now(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export function createAssistantMessage(
|
||||||
|
content: string,
|
||||||
|
toolCalls?: ToolCall[],
|
||||||
|
stats?: MessageStats,
|
||||||
|
): ChatMessage {
|
||||||
|
return {
|
||||||
|
role: "assistant",
|
||||||
|
content,
|
||||||
|
timestamp: Date.now(),
|
||||||
|
toolCalls,
|
||||||
|
stats,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export function createToolMessage(results: ToolResult[]): ChatMessage {
|
||||||
|
return {
|
||||||
|
role: "tool",
|
||||||
|
content: results.map((r) => formatToolResult(r)).join("\n\n"),
|
||||||
|
timestamp: Date.now(),
|
||||||
|
toolResults: results,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export function createSystemMessage(content: string): ChatMessage {
|
||||||
|
return {
|
||||||
|
role: "system",
|
||||||
|
content,
|
||||||
|
timestamp: Date.now(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function formatToolResult(result: ToolResult): string {
|
||||||
|
if (result.success) {
|
||||||
|
return `[${result.callId}] Success: ${JSON.stringify(result.data)}`
|
||||||
|
}
|
||||||
|
const errorMsg = result.error ?? "Unknown error"
|
||||||
|
return `[${result.callId}] Error: ${errorMsg}`
|
||||||
|
}
|
||||||
163
packages/ipuaro/src/domain/value-objects/FileAST.ts
Normal file
163
packages/ipuaro/src/domain/value-objects/FileAST.ts
Normal file
@@ -0,0 +1,163 @@
|
|||||||
|
/**
|
||||||
|
* Represents parsed AST information for a file.
|
||||||
|
*/
|
||||||
|
|
||||||
|
export interface ImportInfo {
|
||||||
|
/** Import name or alias */
|
||||||
|
name: string
|
||||||
|
/** Source module path */
|
||||||
|
from: string
|
||||||
|
/** Line number of import statement */
|
||||||
|
line: number
|
||||||
|
/** Import type classification */
|
||||||
|
type: "internal" | "external" | "builtin"
|
||||||
|
/** Whether it's a default import */
|
||||||
|
isDefault: boolean
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ExportInfo {
|
||||||
|
/** Exported name */
|
||||||
|
name: string
|
||||||
|
/** Line number of export */
|
||||||
|
line: number
|
||||||
|
/** Whether it's a default export */
|
||||||
|
isDefault: boolean
|
||||||
|
/** Export type: function, class, variable, type */
|
||||||
|
kind: "function" | "class" | "variable" | "type" | "interface"
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ParameterInfo {
|
||||||
|
/** Parameter name */
|
||||||
|
name: string
|
||||||
|
/** Parameter type (if available) */
|
||||||
|
type?: string
|
||||||
|
/** Whether it's optional */
|
||||||
|
optional: boolean
|
||||||
|
/** Whether it has a default value */
|
||||||
|
hasDefault: boolean
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface FunctionInfo {
|
||||||
|
/** Function name */
|
||||||
|
name: string
|
||||||
|
/** Start line number */
|
||||||
|
lineStart: number
|
||||||
|
/** End line number */
|
||||||
|
lineEnd: number
|
||||||
|
/** Function parameters */
|
||||||
|
params: ParameterInfo[]
|
||||||
|
/** Whether function is async */
|
||||||
|
isAsync: boolean
|
||||||
|
/** Whether function is exported */
|
||||||
|
isExported: boolean
|
||||||
|
/** Return type (if available) */
|
||||||
|
returnType?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface MethodInfo {
|
||||||
|
/** Method name */
|
||||||
|
name: string
|
||||||
|
/** Start line number */
|
||||||
|
lineStart: number
|
||||||
|
/** End line number */
|
||||||
|
lineEnd: number
|
||||||
|
/** Method parameters */
|
||||||
|
params: ParameterInfo[]
|
||||||
|
/** Whether method is async */
|
||||||
|
isAsync: boolean
|
||||||
|
/** Method visibility */
|
||||||
|
visibility: "public" | "private" | "protected"
|
||||||
|
/** Whether it's static */
|
||||||
|
isStatic: boolean
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface PropertyInfo {
|
||||||
|
/** Property name */
|
||||||
|
name: string
|
||||||
|
/** Line number */
|
||||||
|
line: number
|
||||||
|
/** Property type (if available) */
|
||||||
|
type?: string
|
||||||
|
/** Property visibility */
|
||||||
|
visibility: "public" | "private" | "protected"
|
||||||
|
/** Whether it's static */
|
||||||
|
isStatic: boolean
|
||||||
|
/** Whether it's readonly */
|
||||||
|
isReadonly: boolean
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ClassInfo {
|
||||||
|
/** Class name */
|
||||||
|
name: string
|
||||||
|
/** Start line number */
|
||||||
|
lineStart: number
|
||||||
|
/** End line number */
|
||||||
|
lineEnd: number
|
||||||
|
/** Class methods */
|
||||||
|
methods: MethodInfo[]
|
||||||
|
/** Class properties */
|
||||||
|
properties: PropertyInfo[]
|
||||||
|
/** Extended class name */
|
||||||
|
extends?: string
|
||||||
|
/** Implemented interfaces */
|
||||||
|
implements: string[]
|
||||||
|
/** Whether class is exported */
|
||||||
|
isExported: boolean
|
||||||
|
/** Whether class is abstract */
|
||||||
|
isAbstract: boolean
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface InterfaceInfo {
|
||||||
|
/** Interface name */
|
||||||
|
name: string
|
||||||
|
/** Start line number */
|
||||||
|
lineStart: number
|
||||||
|
/** End line number */
|
||||||
|
lineEnd: number
|
||||||
|
/** Interface properties */
|
||||||
|
properties: PropertyInfo[]
|
||||||
|
/** Extended interfaces */
|
||||||
|
extends: string[]
|
||||||
|
/** Whether interface is exported */
|
||||||
|
isExported: boolean
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface TypeAliasInfo {
|
||||||
|
/** Type alias name */
|
||||||
|
name: string
|
||||||
|
/** Line number */
|
||||||
|
line: number
|
||||||
|
/** Whether it's exported */
|
||||||
|
isExported: boolean
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface FileAST {
|
||||||
|
/** Import statements */
|
||||||
|
imports: ImportInfo[]
|
||||||
|
/** Export statements */
|
||||||
|
exports: ExportInfo[]
|
||||||
|
/** Function declarations */
|
||||||
|
functions: FunctionInfo[]
|
||||||
|
/** Class declarations */
|
||||||
|
classes: ClassInfo[]
|
||||||
|
/** Interface declarations */
|
||||||
|
interfaces: InterfaceInfo[]
|
||||||
|
/** Type alias declarations */
|
||||||
|
typeAliases: TypeAliasInfo[]
|
||||||
|
/** Whether parsing encountered errors */
|
||||||
|
parseError: boolean
|
||||||
|
/** Parse error message if any */
|
||||||
|
parseErrorMessage?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
export function createEmptyFileAST(): FileAST {
|
||||||
|
return {
|
||||||
|
imports: [],
|
||||||
|
exports: [],
|
||||||
|
functions: [],
|
||||||
|
classes: [],
|
||||||
|
interfaces: [],
|
||||||
|
typeAliases: [],
|
||||||
|
parseError: false,
|
||||||
|
}
|
||||||
|
}
|
||||||
26
packages/ipuaro/src/domain/value-objects/FileData.ts
Normal file
26
packages/ipuaro/src/domain/value-objects/FileData.ts
Normal file
@@ -0,0 +1,26 @@
|
|||||||
|
/**
|
||||||
|
* Represents file content with metadata for change detection.
|
||||||
|
*/
|
||||||
|
export interface FileData {
|
||||||
|
/** File content split into lines */
|
||||||
|
lines: string[]
|
||||||
|
/** MD5 hash for change detection */
|
||||||
|
hash: string
|
||||||
|
/** File size in bytes */
|
||||||
|
size: number
|
||||||
|
/** Last modification timestamp (ms) */
|
||||||
|
lastModified: number
|
||||||
|
}
|
||||||
|
|
||||||
|
export function createFileData(
|
||||||
|
lines: string[],
|
||||||
|
hash: string,
|
||||||
|
size: number,
|
||||||
|
lastModified: number,
|
||||||
|
): FileData {
|
||||||
|
return { lines, hash, size, lastModified }
|
||||||
|
}
|
||||||
|
|
||||||
|
export function isFileDataEqual(a: FileData, b: FileData): boolean {
|
||||||
|
return a.hash === b.hash
|
||||||
|
}
|
||||||
50
packages/ipuaro/src/domain/value-objects/FileMeta.ts
Normal file
50
packages/ipuaro/src/domain/value-objects/FileMeta.ts
Normal file
@@ -0,0 +1,50 @@
|
|||||||
|
/**
|
||||||
|
* Represents computed metadata about a file.
|
||||||
|
*/
|
||||||
|
|
||||||
|
export interface ComplexityMetrics {
|
||||||
|
/** Lines of code (excluding empty and comments) */
|
||||||
|
loc: number
|
||||||
|
/** Maximum nesting depth */
|
||||||
|
nesting: number
|
||||||
|
/** Cyclomatic complexity score */
|
||||||
|
cyclomaticComplexity: number
|
||||||
|
/** Overall complexity score (0-100) */
|
||||||
|
score: number
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface FileMeta {
|
||||||
|
/** Complexity metrics for the file */
|
||||||
|
complexity: ComplexityMetrics
|
||||||
|
/** Files that this file imports (internal paths) */
|
||||||
|
dependencies: string[]
|
||||||
|
/** Files that import this file */
|
||||||
|
dependents: string[]
|
||||||
|
/** Whether file is a dependency hub (>5 dependents) */
|
||||||
|
isHub: boolean
|
||||||
|
/** Whether file is an entry point (index.ts or 0 dependents) */
|
||||||
|
isEntryPoint: boolean
|
||||||
|
/** File type classification */
|
||||||
|
fileType: "source" | "test" | "config" | "types" | "unknown"
|
||||||
|
}
|
||||||
|
|
||||||
|
export function createFileMeta(partial: Partial<FileMeta> = {}): FileMeta {
|
||||||
|
return {
|
||||||
|
complexity: {
|
||||||
|
loc: 0,
|
||||||
|
nesting: 0,
|
||||||
|
cyclomaticComplexity: 1,
|
||||||
|
score: 0,
|
||||||
|
},
|
||||||
|
dependencies: [],
|
||||||
|
dependents: [],
|
||||||
|
isHub: false,
|
||||||
|
isEntryPoint: false,
|
||||||
|
fileType: "unknown",
|
||||||
|
...partial,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export function isHubFile(dependentCount: number): boolean {
|
||||||
|
return dependentCount > 5
|
||||||
|
}
|
||||||
27
packages/ipuaro/src/domain/value-objects/ToolCall.ts
Normal file
27
packages/ipuaro/src/domain/value-objects/ToolCall.ts
Normal file
@@ -0,0 +1,27 @@
|
|||||||
|
/**
|
||||||
|
* Represents a tool call from the LLM.
|
||||||
|
*/
|
||||||
|
|
||||||
|
export interface ToolCall {
|
||||||
|
/** Unique identifier for this call */
|
||||||
|
id: string
|
||||||
|
/** Tool name */
|
||||||
|
name: string
|
||||||
|
/** Tool parameters */
|
||||||
|
params: Record<string, unknown>
|
||||||
|
/** Timestamp when call was made */
|
||||||
|
timestamp: number
|
||||||
|
}
|
||||||
|
|
||||||
|
export function createToolCall(
|
||||||
|
id: string,
|
||||||
|
name: string,
|
||||||
|
params: Record<string, unknown>,
|
||||||
|
): ToolCall {
|
||||||
|
return {
|
||||||
|
id,
|
||||||
|
name,
|
||||||
|
params,
|
||||||
|
timestamp: Date.now(),
|
||||||
|
}
|
||||||
|
}
|
||||||
42
packages/ipuaro/src/domain/value-objects/ToolResult.ts
Normal file
42
packages/ipuaro/src/domain/value-objects/ToolResult.ts
Normal file
@@ -0,0 +1,42 @@
|
|||||||
|
/**
|
||||||
|
* Represents the result of a tool execution.
|
||||||
|
*/
|
||||||
|
|
||||||
|
export interface ToolResult {
|
||||||
|
/** Tool call ID this result belongs to */
|
||||||
|
callId: string
|
||||||
|
/** Whether execution was successful */
|
||||||
|
success: boolean
|
||||||
|
/** Result data (varies by tool) */
|
||||||
|
data?: unknown
|
||||||
|
/** Error message if failed */
|
||||||
|
error?: string
|
||||||
|
/** Execution time in milliseconds */
|
||||||
|
executionTimeMs: number
|
||||||
|
}
|
||||||
|
|
||||||
|
export function createSuccessResult(
|
||||||
|
callId: string,
|
||||||
|
data: unknown,
|
||||||
|
executionTimeMs: number,
|
||||||
|
): ToolResult {
|
||||||
|
return {
|
||||||
|
callId,
|
||||||
|
success: true,
|
||||||
|
data,
|
||||||
|
executionTimeMs,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export function createErrorResult(
|
||||||
|
callId: string,
|
||||||
|
error: string,
|
||||||
|
executionTimeMs: number,
|
||||||
|
): ToolResult {
|
||||||
|
return {
|
||||||
|
callId,
|
||||||
|
success: false,
|
||||||
|
error,
|
||||||
|
executionTimeMs,
|
||||||
|
}
|
||||||
|
}
|
||||||
50
packages/ipuaro/src/domain/value-objects/UndoEntry.ts
Normal file
50
packages/ipuaro/src/domain/value-objects/UndoEntry.ts
Normal file
@@ -0,0 +1,50 @@
|
|||||||
|
/**
|
||||||
|
* Represents an undo entry for file changes.
|
||||||
|
*/
|
||||||
|
|
||||||
|
export interface UndoEntry {
|
||||||
|
/** Unique identifier */
|
||||||
|
id: string
|
||||||
|
/** Timestamp when change was made */
|
||||||
|
timestamp: number
|
||||||
|
/** File path that was modified */
|
||||||
|
filePath: string
|
||||||
|
/** Content before the change */
|
||||||
|
previousContent: string[]
|
||||||
|
/** Content after the change */
|
||||||
|
newContent: string[]
|
||||||
|
/** Human-readable description of the change */
|
||||||
|
description: string
|
||||||
|
/** Tool call ID that made this change */
|
||||||
|
toolCallId?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
export function createUndoEntry(
|
||||||
|
id: string,
|
||||||
|
filePath: string,
|
||||||
|
previousContent: string[],
|
||||||
|
newContent: string[],
|
||||||
|
description: string,
|
||||||
|
toolCallId?: string,
|
||||||
|
): UndoEntry {
|
||||||
|
return {
|
||||||
|
id,
|
||||||
|
timestamp: Date.now(),
|
||||||
|
filePath,
|
||||||
|
previousContent,
|
||||||
|
newContent,
|
||||||
|
description,
|
||||||
|
toolCallId,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export function canUndo(entry: UndoEntry, currentContent: string[]): boolean {
|
||||||
|
return arraysEqual(entry.newContent, currentContent)
|
||||||
|
}
|
||||||
|
|
||||||
|
function arraysEqual(a: string[], b: string[]): boolean {
|
||||||
|
if (a.length !== b.length) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
return a.every((line, i) => line === b[i])
|
||||||
|
}
|
||||||
8
packages/ipuaro/src/domain/value-objects/index.ts
Normal file
8
packages/ipuaro/src/domain/value-objects/index.ts
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
// Domain Value Objects
|
||||||
|
export * from "./FileData.js"
|
||||||
|
export * from "./FileAST.js"
|
||||||
|
export * from "./FileMeta.js"
|
||||||
|
export * from "./ChatMessage.js"
|
||||||
|
export * from "./ToolCall.js"
|
||||||
|
export * from "./ToolResult.js"
|
||||||
|
export * from "./UndoEntry.js"
|
||||||
28
packages/ipuaro/src/index.ts
Normal file
28
packages/ipuaro/src/index.ts
Normal file
@@ -0,0 +1,28 @@
|
|||||||
|
/**
|
||||||
|
* @puaros/ipuaro - Local AI agent for codebase operations
|
||||||
|
*
|
||||||
|
* Main entry point for the library.
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { createRequire } from "node:module"
|
||||||
|
|
||||||
|
const require = createRequire(import.meta.url)
|
||||||
|
const pkg = require("../package.json") as { version: string }
|
||||||
|
|
||||||
|
// Domain exports
|
||||||
|
export * from "./domain/index.js"
|
||||||
|
|
||||||
|
// Application exports
|
||||||
|
export * from "./application/index.js"
|
||||||
|
|
||||||
|
// Shared exports
|
||||||
|
export * from "./shared/index.js"
|
||||||
|
|
||||||
|
// Infrastructure exports
|
||||||
|
export * from "./infrastructure/index.js"
|
||||||
|
|
||||||
|
// TUI exports
|
||||||
|
export * from "./tui/index.js"
|
||||||
|
|
||||||
|
// Version
|
||||||
|
export const VERSION = pkg.version
|
||||||
6
packages/ipuaro/src/infrastructure/index.ts
Normal file
6
packages/ipuaro/src/infrastructure/index.ts
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
// Infrastructure layer exports
|
||||||
|
export * from "./storage/index.js"
|
||||||
|
export * from "./indexer/index.js"
|
||||||
|
export * from "./llm/index.js"
|
||||||
|
export * from "./tools/index.js"
|
||||||
|
export * from "./security/index.js"
|
||||||
551
packages/ipuaro/src/infrastructure/indexer/ASTParser.ts
Normal file
551
packages/ipuaro/src/infrastructure/indexer/ASTParser.ts
Normal file
@@ -0,0 +1,551 @@
|
|||||||
|
import { builtinModules } from "node:module"
|
||||||
|
import Parser from "tree-sitter"
|
||||||
|
import TypeScript from "tree-sitter-typescript"
|
||||||
|
import JavaScript from "tree-sitter-javascript"
|
||||||
|
import {
|
||||||
|
createEmptyFileAST,
|
||||||
|
type ExportInfo,
|
||||||
|
type FileAST,
|
||||||
|
type ImportInfo,
|
||||||
|
type MethodInfo,
|
||||||
|
type ParameterInfo,
|
||||||
|
type PropertyInfo,
|
||||||
|
} from "../../domain/value-objects/FileAST.js"
|
||||||
|
import { FieldName, NodeType } from "./tree-sitter-types.js"
|
||||||
|
|
||||||
|
type Language = "ts" | "tsx" | "js" | "jsx"
|
||||||
|
type SyntaxNode = Parser.SyntaxNode
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parses source code into AST using tree-sitter.
|
||||||
|
*/
|
||||||
|
export class ASTParser {
|
||||||
|
private readonly parsers = new Map<Language, Parser>()
|
||||||
|
|
||||||
|
constructor() {
|
||||||
|
this.initializeParsers()
|
||||||
|
}
|
||||||
|
|
||||||
|
private initializeParsers(): void {
|
||||||
|
const tsParser = new Parser()
|
||||||
|
tsParser.setLanguage(TypeScript.typescript)
|
||||||
|
this.parsers.set("ts", tsParser)
|
||||||
|
|
||||||
|
const tsxParser = new Parser()
|
||||||
|
tsxParser.setLanguage(TypeScript.tsx)
|
||||||
|
this.parsers.set("tsx", tsxParser)
|
||||||
|
|
||||||
|
const jsParser = new Parser()
|
||||||
|
jsParser.setLanguage(JavaScript)
|
||||||
|
this.parsers.set("js", jsParser)
|
||||||
|
this.parsers.set("jsx", jsParser)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parse source code and extract AST information.
|
||||||
|
*/
|
||||||
|
parse(content: string, language: Language): FileAST {
|
||||||
|
const parser = this.parsers.get(language)
|
||||||
|
if (!parser) {
|
||||||
|
return {
|
||||||
|
...createEmptyFileAST(),
|
||||||
|
parseError: true,
|
||||||
|
parseErrorMessage: `Unsupported language: ${language}`,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const tree = parser.parse(content)
|
||||||
|
const root = tree.rootNode
|
||||||
|
|
||||||
|
if (root.hasError) {
|
||||||
|
const ast = this.extractAST(root, language)
|
||||||
|
ast.parseError = true
|
||||||
|
ast.parseErrorMessage = "Syntax error in source code"
|
||||||
|
return ast
|
||||||
|
}
|
||||||
|
|
||||||
|
return this.extractAST(root, language)
|
||||||
|
} catch (error) {
|
||||||
|
return {
|
||||||
|
...createEmptyFileAST(),
|
||||||
|
parseError: true,
|
||||||
|
parseErrorMessage: error instanceof Error ? error.message : "Unknown parse error",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private extractAST(root: SyntaxNode, language: Language): FileAST {
|
||||||
|
const ast = createEmptyFileAST()
|
||||||
|
const isTypeScript = language === "ts" || language === "tsx"
|
||||||
|
|
||||||
|
for (const child of root.children) {
|
||||||
|
this.visitNode(child, ast, isTypeScript)
|
||||||
|
}
|
||||||
|
|
||||||
|
return ast
|
||||||
|
}
|
||||||
|
|
||||||
|
private visitNode(node: SyntaxNode, ast: FileAST, isTypeScript: boolean): void {
|
||||||
|
switch (node.type) {
|
||||||
|
case NodeType.IMPORT_STATEMENT:
|
||||||
|
this.extractImport(node, ast)
|
||||||
|
break
|
||||||
|
case NodeType.EXPORT_STATEMENT:
|
||||||
|
this.extractExport(node, ast)
|
||||||
|
break
|
||||||
|
case NodeType.FUNCTION_DECLARATION:
|
||||||
|
this.extractFunction(node, ast, false)
|
||||||
|
break
|
||||||
|
case NodeType.LEXICAL_DECLARATION:
|
||||||
|
this.extractLexicalDeclaration(node, ast)
|
||||||
|
break
|
||||||
|
case NodeType.CLASS_DECLARATION:
|
||||||
|
this.extractClass(node, ast, false)
|
||||||
|
break
|
||||||
|
case NodeType.INTERFACE_DECLARATION:
|
||||||
|
if (isTypeScript) {
|
||||||
|
this.extractInterface(node, ast, false)
|
||||||
|
}
|
||||||
|
break
|
||||||
|
case NodeType.TYPE_ALIAS_DECLARATION:
|
||||||
|
if (isTypeScript) {
|
||||||
|
this.extractTypeAlias(node, ast, false)
|
||||||
|
}
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private extractImport(node: SyntaxNode, ast: FileAST): void {
|
||||||
|
const sourceNode = node.childForFieldName(FieldName.SOURCE)
|
||||||
|
if (!sourceNode) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
const from = this.getStringValue(sourceNode)
|
||||||
|
const line = node.startPosition.row + 1
|
||||||
|
const importType = this.classifyImport(from)
|
||||||
|
|
||||||
|
const importClause = node.children.find((c) => c.type === NodeType.IMPORT_CLAUSE)
|
||||||
|
if (!importClause) {
|
||||||
|
ast.imports.push({
|
||||||
|
name: "*",
|
||||||
|
from,
|
||||||
|
line,
|
||||||
|
type: importType,
|
||||||
|
isDefault: false,
|
||||||
|
})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const child of importClause.children) {
|
||||||
|
if (child.type === NodeType.IDENTIFIER) {
|
||||||
|
ast.imports.push({
|
||||||
|
name: child.text,
|
||||||
|
from,
|
||||||
|
line,
|
||||||
|
type: importType,
|
||||||
|
isDefault: true,
|
||||||
|
})
|
||||||
|
} else if (child.type === NodeType.NAMESPACE_IMPORT) {
|
||||||
|
const alias = child.children.find((c) => c.type === NodeType.IDENTIFIER)
|
||||||
|
ast.imports.push({
|
||||||
|
name: alias?.text ?? "*",
|
||||||
|
from,
|
||||||
|
line,
|
||||||
|
type: importType,
|
||||||
|
isDefault: false,
|
||||||
|
})
|
||||||
|
} else if (child.type === NodeType.NAMED_IMPORTS) {
|
||||||
|
for (const specifier of child.children) {
|
||||||
|
if (specifier.type === NodeType.IMPORT_SPECIFIER) {
|
||||||
|
const nameNode = specifier.childForFieldName(FieldName.NAME)
|
||||||
|
const aliasNode = specifier.childForFieldName(FieldName.ALIAS)
|
||||||
|
ast.imports.push({
|
||||||
|
name: aliasNode?.text ?? nameNode?.text ?? "",
|
||||||
|
from,
|
||||||
|
line,
|
||||||
|
type: importType,
|
||||||
|
isDefault: false,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private extractExport(node: SyntaxNode, ast: FileAST): void {
|
||||||
|
const isDefault = node.children.some((c) => c.type === NodeType.DEFAULT)
|
||||||
|
const declaration = node.childForFieldName(FieldName.DECLARATION)
|
||||||
|
|
||||||
|
if (declaration) {
|
||||||
|
switch (declaration.type) {
|
||||||
|
case NodeType.FUNCTION_DECLARATION:
|
||||||
|
this.extractFunction(declaration, ast, true)
|
||||||
|
this.addExportInfo(ast, declaration, "function", isDefault)
|
||||||
|
break
|
||||||
|
case NodeType.CLASS_DECLARATION:
|
||||||
|
this.extractClass(declaration, ast, true)
|
||||||
|
this.addExportInfo(ast, declaration, "class", isDefault)
|
||||||
|
break
|
||||||
|
case NodeType.INTERFACE_DECLARATION:
|
||||||
|
this.extractInterface(declaration, ast, true)
|
||||||
|
this.addExportInfo(ast, declaration, "interface", isDefault)
|
||||||
|
break
|
||||||
|
case NodeType.TYPE_ALIAS_DECLARATION:
|
||||||
|
this.extractTypeAlias(declaration, ast, true)
|
||||||
|
this.addExportInfo(ast, declaration, "type", isDefault)
|
||||||
|
break
|
||||||
|
case NodeType.LEXICAL_DECLARATION:
|
||||||
|
this.extractLexicalDeclaration(declaration, ast, true)
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const exportClause = node.children.find((c) => c.type === NodeType.EXPORT_CLAUSE)
|
||||||
|
if (exportClause) {
|
||||||
|
for (const specifier of exportClause.children) {
|
||||||
|
if (specifier.type === NodeType.EXPORT_SPECIFIER) {
|
||||||
|
const nameNode = specifier.childForFieldName(FieldName.NAME)
|
||||||
|
if (nameNode) {
|
||||||
|
ast.exports.push({
|
||||||
|
name: nameNode.text,
|
||||||
|
line: node.startPosition.row + 1,
|
||||||
|
isDefault: false,
|
||||||
|
kind: "variable",
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private extractFunction(node: SyntaxNode, ast: FileAST, isExported: boolean): void {
|
||||||
|
const nameNode = node.childForFieldName(FieldName.NAME)
|
||||||
|
if (!nameNode) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
const params = this.extractParameters(node)
|
||||||
|
const isAsync = node.children.some((c) => c.type === NodeType.ASYNC)
|
||||||
|
const returnTypeNode = node.childForFieldName(FieldName.RETURN_TYPE)
|
||||||
|
|
||||||
|
ast.functions.push({
|
||||||
|
name: nameNode.text,
|
||||||
|
lineStart: node.startPosition.row + 1,
|
||||||
|
lineEnd: node.endPosition.row + 1,
|
||||||
|
params,
|
||||||
|
isAsync,
|
||||||
|
isExported,
|
||||||
|
returnType: returnTypeNode?.text?.replace(/^:\s*/, ""),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
private extractLexicalDeclaration(node: SyntaxNode, ast: FileAST, isExported = false): void {
|
||||||
|
for (const child of node.children) {
|
||||||
|
if (child.type === NodeType.VARIABLE_DECLARATOR) {
|
||||||
|
const nameNode = child.childForFieldName(FieldName.NAME)
|
||||||
|
const valueNode = child.childForFieldName(FieldName.VALUE)
|
||||||
|
|
||||||
|
if (
|
||||||
|
valueNode?.type === NodeType.ARROW_FUNCTION ||
|
||||||
|
valueNode?.type === NodeType.FUNCTION
|
||||||
|
) {
|
||||||
|
const params = this.extractParameters(valueNode)
|
||||||
|
const isAsync = valueNode.children.some((c) => c.type === NodeType.ASYNC)
|
||||||
|
|
||||||
|
ast.functions.push({
|
||||||
|
name: nameNode?.text ?? "",
|
||||||
|
lineStart: node.startPosition.row + 1,
|
||||||
|
lineEnd: node.endPosition.row + 1,
|
||||||
|
params,
|
||||||
|
isAsync,
|
||||||
|
isExported,
|
||||||
|
})
|
||||||
|
|
||||||
|
if (isExported) {
|
||||||
|
ast.exports.push({
|
||||||
|
name: nameNode?.text ?? "",
|
||||||
|
line: node.startPosition.row + 1,
|
||||||
|
isDefault: false,
|
||||||
|
kind: "function",
|
||||||
|
})
|
||||||
|
}
|
||||||
|
} else if (isExported && nameNode) {
|
||||||
|
ast.exports.push({
|
||||||
|
name: nameNode.text,
|
||||||
|
line: node.startPosition.row + 1,
|
||||||
|
isDefault: false,
|
||||||
|
kind: "variable",
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private extractClass(node: SyntaxNode, ast: FileAST, isExported: boolean): void {
|
||||||
|
const nameNode = node.childForFieldName(FieldName.NAME)
|
||||||
|
if (!nameNode) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
const body = node.childForFieldName(FieldName.BODY)
|
||||||
|
const methods: MethodInfo[] = []
|
||||||
|
const properties: PropertyInfo[] = []
|
||||||
|
|
||||||
|
if (body) {
|
||||||
|
for (const member of body.children) {
|
||||||
|
if (member.type === NodeType.METHOD_DEFINITION) {
|
||||||
|
methods.push(this.extractMethod(member))
|
||||||
|
} else if (
|
||||||
|
member.type === NodeType.PUBLIC_FIELD_DEFINITION ||
|
||||||
|
member.type === NodeType.FIELD_DEFINITION
|
||||||
|
) {
|
||||||
|
properties.push(this.extractProperty(member))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const { extendsName, implementsList } = this.extractClassHeritage(node)
|
||||||
|
const isAbstract = node.children.some((c) => c.type === NodeType.ABSTRACT)
|
||||||
|
|
||||||
|
ast.classes.push({
|
||||||
|
name: nameNode.text,
|
||||||
|
lineStart: node.startPosition.row + 1,
|
||||||
|
lineEnd: node.endPosition.row + 1,
|
||||||
|
methods,
|
||||||
|
properties,
|
||||||
|
extends: extendsName,
|
||||||
|
implements: implementsList,
|
||||||
|
isExported,
|
||||||
|
isAbstract,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
private extractClassHeritage(node: SyntaxNode): {
|
||||||
|
extendsName: string | undefined
|
||||||
|
implementsList: string[]
|
||||||
|
} {
|
||||||
|
let extendsName: string | undefined
|
||||||
|
const implementsList: string[] = []
|
||||||
|
|
||||||
|
for (const child of node.children) {
|
||||||
|
if (child.type === NodeType.CLASS_HERITAGE) {
|
||||||
|
this.parseHeritageClause(child, (ext) => (extendsName = ext), implementsList)
|
||||||
|
} else if (child.type === NodeType.EXTENDS_CLAUSE) {
|
||||||
|
extendsName = this.findTypeIdentifier(child)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return { extendsName, implementsList }
|
||||||
|
}
|
||||||
|
|
||||||
|
private parseHeritageClause(
|
||||||
|
heritage: SyntaxNode,
|
||||||
|
setExtends: (name: string) => void,
|
||||||
|
implementsList: string[],
|
||||||
|
): void {
|
||||||
|
for (const clause of heritage.children) {
|
||||||
|
if (clause.type === NodeType.EXTENDS_CLAUSE) {
|
||||||
|
const typeId = this.findTypeIdentifier(clause)
|
||||||
|
if (typeId) {
|
||||||
|
setExtends(typeId)
|
||||||
|
}
|
||||||
|
} else if (clause.type === NodeType.IMPLEMENTS_CLAUSE) {
|
||||||
|
this.collectImplements(clause, implementsList)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private findTypeIdentifier(node: SyntaxNode): string | undefined {
|
||||||
|
const typeNode = node.children.find(
|
||||||
|
(c) => c.type === NodeType.TYPE_IDENTIFIER || c.type === NodeType.IDENTIFIER,
|
||||||
|
)
|
||||||
|
return typeNode?.text
|
||||||
|
}
|
||||||
|
|
||||||
|
private collectImplements(clause: SyntaxNode, list: string[]): void {
|
||||||
|
for (const impl of clause.children) {
|
||||||
|
if (impl.type === NodeType.TYPE_IDENTIFIER || impl.type === NodeType.IDENTIFIER) {
|
||||||
|
list.push(impl.text)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private extractMethod(node: SyntaxNode): MethodInfo {
|
||||||
|
const nameNode = node.childForFieldName(FieldName.NAME)
|
||||||
|
const params = this.extractParameters(node)
|
||||||
|
const isAsync = node.children.some((c) => c.type === NodeType.ASYNC)
|
||||||
|
const isStatic = node.children.some((c) => c.type === NodeType.STATIC)
|
||||||
|
|
||||||
|
let visibility: "public" | "private" | "protected" = "public"
|
||||||
|
for (const child of node.children) {
|
||||||
|
if (child.type === NodeType.ACCESSIBILITY_MODIFIER) {
|
||||||
|
visibility = child.text as "public" | "private" | "protected"
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
name: nameNode?.text ?? "",
|
||||||
|
lineStart: node.startPosition.row + 1,
|
||||||
|
lineEnd: node.endPosition.row + 1,
|
||||||
|
params,
|
||||||
|
isAsync,
|
||||||
|
visibility,
|
||||||
|
isStatic,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private extractProperty(node: SyntaxNode): PropertyInfo {
|
||||||
|
const nameNode = node.childForFieldName(FieldName.NAME)
|
||||||
|
const typeNode = node.childForFieldName(FieldName.TYPE)
|
||||||
|
const isStatic = node.children.some((c) => c.type === NodeType.STATIC)
|
||||||
|
const isReadonly = node.children.some((c) => c.text === NodeType.READONLY)
|
||||||
|
|
||||||
|
let visibility: "public" | "private" | "protected" = "public"
|
||||||
|
for (const child of node.children) {
|
||||||
|
if (child.type === NodeType.ACCESSIBILITY_MODIFIER) {
|
||||||
|
visibility = child.text as "public" | "private" | "protected"
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
name: nameNode?.text ?? "",
|
||||||
|
line: node.startPosition.row + 1,
|
||||||
|
type: typeNode?.text,
|
||||||
|
visibility,
|
||||||
|
isStatic,
|
||||||
|
isReadonly,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private extractInterface(node: SyntaxNode, ast: FileAST, isExported: boolean): void {
|
||||||
|
const nameNode = node.childForFieldName(FieldName.NAME)
|
||||||
|
if (!nameNode) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
const body = node.childForFieldName(FieldName.BODY)
|
||||||
|
const properties: PropertyInfo[] = []
|
||||||
|
|
||||||
|
if (body) {
|
||||||
|
for (const member of body.children) {
|
||||||
|
if (member.type === NodeType.PROPERTY_SIGNATURE) {
|
||||||
|
const propName = member.childForFieldName(FieldName.NAME)
|
||||||
|
const propType = member.childForFieldName(FieldName.TYPE)
|
||||||
|
properties.push({
|
||||||
|
name: propName?.text ?? "",
|
||||||
|
line: member.startPosition.row + 1,
|
||||||
|
type: propType?.text,
|
||||||
|
visibility: "public",
|
||||||
|
isStatic: false,
|
||||||
|
isReadonly: member.children.some((c) => c.text === NodeType.READONLY),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const extendsList: string[] = []
|
||||||
|
const extendsClause = node.children.find((c) => c.type === NodeType.EXTENDS_TYPE_CLAUSE)
|
||||||
|
if (extendsClause) {
|
||||||
|
for (const child of extendsClause.children) {
|
||||||
|
if (child.type === NodeType.TYPE_IDENTIFIER) {
|
||||||
|
extendsList.push(child.text)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
ast.interfaces.push({
|
||||||
|
name: nameNode.text,
|
||||||
|
lineStart: node.startPosition.row + 1,
|
||||||
|
lineEnd: node.endPosition.row + 1,
|
||||||
|
properties,
|
||||||
|
extends: extendsList,
|
||||||
|
isExported,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
private extractTypeAlias(node: SyntaxNode, ast: FileAST, isExported: boolean): void {
|
||||||
|
const nameNode = node.childForFieldName(FieldName.NAME)
|
||||||
|
if (!nameNode) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
ast.typeAliases.push({
|
||||||
|
name: nameNode.text,
|
||||||
|
line: node.startPosition.row + 1,
|
||||||
|
isExported,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
private extractParameters(node: SyntaxNode): ParameterInfo[] {
|
||||||
|
const params: ParameterInfo[] = []
|
||||||
|
const paramsNode = node.childForFieldName(FieldName.PARAMETERS)
|
||||||
|
|
||||||
|
if (paramsNode) {
|
||||||
|
for (const param of paramsNode.children) {
|
||||||
|
if (
|
||||||
|
param.type === NodeType.REQUIRED_PARAMETER ||
|
||||||
|
param.type === NodeType.OPTIONAL_PARAMETER ||
|
||||||
|
param.type === NodeType.IDENTIFIER
|
||||||
|
) {
|
||||||
|
const nameNode =
|
||||||
|
param.type === NodeType.IDENTIFIER
|
||||||
|
? param
|
||||||
|
: param.childForFieldName(FieldName.PATTERN)
|
||||||
|
const typeNode = param.childForFieldName(FieldName.TYPE)
|
||||||
|
const defaultValue = param.childForFieldName(FieldName.VALUE)
|
||||||
|
|
||||||
|
params.push({
|
||||||
|
name: nameNode?.text ?? "",
|
||||||
|
type: typeNode?.text,
|
||||||
|
optional: param.type === NodeType.OPTIONAL_PARAMETER,
|
||||||
|
hasDefault: defaultValue !== null,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return params
|
||||||
|
}
|
||||||
|
|
||||||
|
private addExportInfo(
|
||||||
|
ast: FileAST,
|
||||||
|
node: SyntaxNode,
|
||||||
|
kind: ExportInfo["kind"],
|
||||||
|
isDefault: boolean,
|
||||||
|
): void {
|
||||||
|
const nameNode = node.childForFieldName(FieldName.NAME)
|
||||||
|
if (nameNode) {
|
||||||
|
ast.exports.push({
|
||||||
|
name: nameNode.text,
|
||||||
|
line: node.startPosition.row + 1,
|
||||||
|
isDefault,
|
||||||
|
kind,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private classifyImport(from: string): ImportInfo["type"] {
|
||||||
|
if (from.startsWith(".") || from.startsWith("/")) {
|
||||||
|
return "internal"
|
||||||
|
}
|
||||||
|
if (from.startsWith("node:") || builtinModules.includes(from)) {
|
||||||
|
return "builtin"
|
||||||
|
}
|
||||||
|
return "external"
|
||||||
|
}
|
||||||
|
|
||||||
|
private getStringValue(node: SyntaxNode): string {
|
||||||
|
const text = node.text
|
||||||
|
if (
|
||||||
|
(text.startsWith('"') && text.endsWith('"')) ||
|
||||||
|
(text.startsWith("'") && text.endsWith("'"))
|
||||||
|
) {
|
||||||
|
return text.slice(1, -1)
|
||||||
|
}
|
||||||
|
return text
|
||||||
|
}
|
||||||
|
}
|
||||||
189
packages/ipuaro/src/infrastructure/indexer/FileScanner.ts
Normal file
189
packages/ipuaro/src/infrastructure/indexer/FileScanner.ts
Normal file
@@ -0,0 +1,189 @@
|
|||||||
|
import * as fs from "node:fs/promises"
|
||||||
|
import type { Stats } from "node:fs"
|
||||||
|
import * as path from "node:path"
|
||||||
|
import { globby } from "globby"
|
||||||
|
import {
|
||||||
|
BINARY_EXTENSIONS,
|
||||||
|
DEFAULT_IGNORE_PATTERNS,
|
||||||
|
SUPPORTED_EXTENSIONS,
|
||||||
|
} from "../../domain/constants/index.js"
|
||||||
|
import type { ScanResult } from "../../domain/services/IIndexer.js"
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Progress callback for file scanning.
|
||||||
|
*/
|
||||||
|
export interface ScanProgress {
|
||||||
|
current: number
|
||||||
|
total: number
|
||||||
|
currentFile: string
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Options for FileScanner.
|
||||||
|
*/
|
||||||
|
export interface FileScannerOptions {
|
||||||
|
/** Additional ignore patterns (besides .gitignore and defaults) */
|
||||||
|
additionalIgnore?: string[]
|
||||||
|
/** Only include files with these extensions. Defaults to SUPPORTED_EXTENSIONS. */
|
||||||
|
extensions?: readonly string[]
|
||||||
|
/** Callback for progress updates */
|
||||||
|
onProgress?: (progress: ScanProgress) => void
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Scans project directories recursively using globby.
|
||||||
|
* Respects .gitignore, skips binary files and default ignore patterns.
|
||||||
|
*/
|
||||||
|
export class FileScanner {
|
||||||
|
private readonly extensions: Set<string>
|
||||||
|
private readonly additionalIgnore: string[]
|
||||||
|
private readonly onProgress?: (progress: ScanProgress) => void
|
||||||
|
|
||||||
|
constructor(options: FileScannerOptions = {}) {
|
||||||
|
this.extensions = new Set(options.extensions ?? SUPPORTED_EXTENSIONS)
|
||||||
|
this.additionalIgnore = options.additionalIgnore ?? []
|
||||||
|
this.onProgress = options.onProgress
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Build glob patterns from extensions.
|
||||||
|
*/
|
||||||
|
private buildGlobPatterns(): string[] {
|
||||||
|
const exts = [...this.extensions].map((ext) => ext.replace(".", ""))
|
||||||
|
if (exts.length === 1) {
|
||||||
|
return [`**/*.${exts[0]}`]
|
||||||
|
}
|
||||||
|
return [`**/*.{${exts.join(",")}}`]
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Build ignore patterns.
|
||||||
|
*/
|
||||||
|
private buildIgnorePatterns(): string[] {
|
||||||
|
const patterns = [
|
||||||
|
...DEFAULT_IGNORE_PATTERNS,
|
||||||
|
...this.additionalIgnore,
|
||||||
|
...BINARY_EXTENSIONS.map((ext) => `**/*${ext}`),
|
||||||
|
]
|
||||||
|
return patterns
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Scan directory and yield file results.
|
||||||
|
* @param root - Root directory to scan
|
||||||
|
*/
|
||||||
|
async *scan(root: string): AsyncGenerator<ScanResult> {
|
||||||
|
const globPatterns = this.buildGlobPatterns()
|
||||||
|
const ignorePatterns = this.buildIgnorePatterns()
|
||||||
|
|
||||||
|
const files = await globby(globPatterns, {
|
||||||
|
cwd: root,
|
||||||
|
gitignore: true,
|
||||||
|
ignore: ignorePatterns,
|
||||||
|
absolute: false,
|
||||||
|
onlyFiles: true,
|
||||||
|
followSymbolicLinks: false,
|
||||||
|
})
|
||||||
|
|
||||||
|
const total = files.length
|
||||||
|
let current = 0
|
||||||
|
|
||||||
|
for (const relativePath of files) {
|
||||||
|
current++
|
||||||
|
this.reportProgress(relativePath, current, total)
|
||||||
|
|
||||||
|
const fullPath = path.join(root, relativePath)
|
||||||
|
const stats = await this.safeStats(fullPath)
|
||||||
|
|
||||||
|
if (stats) {
|
||||||
|
yield {
|
||||||
|
path: relativePath,
|
||||||
|
type: "file",
|
||||||
|
size: stats.size,
|
||||||
|
lastModified: stats.mtimeMs,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Scan and return all results as array.
|
||||||
|
*/
|
||||||
|
async scanAll(root: string): Promise<ScanResult[]> {
|
||||||
|
const results: ScanResult[] = []
|
||||||
|
for await (const result of this.scan(root)) {
|
||||||
|
results.push(result)
|
||||||
|
}
|
||||||
|
return results
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if file has supported extension.
|
||||||
|
*/
|
||||||
|
isSupportedExtension(filePath: string): boolean {
|
||||||
|
const ext = path.extname(filePath).toLowerCase()
|
||||||
|
return this.extensions.has(ext)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Safely get file stats without throwing.
|
||||||
|
*/
|
||||||
|
private async safeStats(filePath: string): Promise<Stats | null> {
|
||||||
|
try {
|
||||||
|
return await fs.stat(filePath)
|
||||||
|
} catch {
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Report progress if callback is set.
|
||||||
|
*/
|
||||||
|
private reportProgress(currentFile: string, current: number, total: number): void {
|
||||||
|
if (this.onProgress) {
|
||||||
|
this.onProgress({ current, total, currentFile })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if file content is likely UTF-8 text.
|
||||||
|
* Reads first 8KB and checks for null bytes.
|
||||||
|
*/
|
||||||
|
static async isTextFile(filePath: string): Promise<boolean> {
|
||||||
|
try {
|
||||||
|
const handle = await fs.open(filePath, "r")
|
||||||
|
try {
|
||||||
|
const buffer = Buffer.alloc(8192)
|
||||||
|
const { bytesRead } = await handle.read(buffer, 0, 8192, 0)
|
||||||
|
if (bytesRead === 0) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
for (let i = 0; i < bytesRead; i++) {
|
||||||
|
if (buffer[i] === 0) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
} finally {
|
||||||
|
await handle.close()
|
||||||
|
}
|
||||||
|
} catch {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Read file content as string.
|
||||||
|
* Returns null if file is binary or unreadable.
|
||||||
|
*/
|
||||||
|
static async readFileContent(filePath: string): Promise<string | null> {
|
||||||
|
if (!(await FileScanner.isTextFile(filePath))) {
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
return await fs.readFile(filePath, "utf-8")
|
||||||
|
} catch {
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
406
packages/ipuaro/src/infrastructure/indexer/IndexBuilder.ts
Normal file
406
packages/ipuaro/src/infrastructure/indexer/IndexBuilder.ts
Normal file
@@ -0,0 +1,406 @@
|
|||||||
|
import * as path from "node:path"
|
||||||
|
import type { FileAST } from "../../domain/value-objects/FileAST.js"
|
||||||
|
import type { DepsGraph, SymbolIndex, SymbolLocation } from "../../domain/services/IStorage.js"
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Builds searchable indexes from parsed ASTs.
|
||||||
|
*/
|
||||||
|
export class IndexBuilder {
|
||||||
|
private readonly projectRoot: string
|
||||||
|
|
||||||
|
constructor(projectRoot: string) {
|
||||||
|
this.projectRoot = projectRoot
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Build symbol index from all ASTs.
|
||||||
|
* Maps symbol names to their locations for quick lookup.
|
||||||
|
*/
|
||||||
|
buildSymbolIndex(asts: Map<string, FileAST>): SymbolIndex {
|
||||||
|
const index: SymbolIndex = new Map()
|
||||||
|
|
||||||
|
for (const [filePath, ast] of asts) {
|
||||||
|
this.indexFunctions(filePath, ast, index)
|
||||||
|
this.indexClasses(filePath, ast, index)
|
||||||
|
this.indexInterfaces(filePath, ast, index)
|
||||||
|
this.indexTypeAliases(filePath, ast, index)
|
||||||
|
this.indexExportedVariables(filePath, ast, index)
|
||||||
|
}
|
||||||
|
|
||||||
|
return index
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Index function declarations.
|
||||||
|
*/
|
||||||
|
private indexFunctions(filePath: string, ast: FileAST, index: SymbolIndex): void {
|
||||||
|
for (const func of ast.functions) {
|
||||||
|
this.addSymbol(index, func.name, {
|
||||||
|
path: filePath,
|
||||||
|
line: func.lineStart,
|
||||||
|
type: "function",
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Index class declarations.
|
||||||
|
*/
|
||||||
|
private indexClasses(filePath: string, ast: FileAST, index: SymbolIndex): void {
|
||||||
|
for (const cls of ast.classes) {
|
||||||
|
this.addSymbol(index, cls.name, {
|
||||||
|
path: filePath,
|
||||||
|
line: cls.lineStart,
|
||||||
|
type: "class",
|
||||||
|
})
|
||||||
|
|
||||||
|
for (const method of cls.methods) {
|
||||||
|
const qualifiedName = `${cls.name}.${method.name}`
|
||||||
|
this.addSymbol(index, qualifiedName, {
|
||||||
|
path: filePath,
|
||||||
|
line: method.lineStart,
|
||||||
|
type: "function",
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Index interface declarations.
|
||||||
|
*/
|
||||||
|
private indexInterfaces(filePath: string, ast: FileAST, index: SymbolIndex): void {
|
||||||
|
for (const iface of ast.interfaces) {
|
||||||
|
this.addSymbol(index, iface.name, {
|
||||||
|
path: filePath,
|
||||||
|
line: iface.lineStart,
|
||||||
|
type: "interface",
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Index type alias declarations.
|
||||||
|
*/
|
||||||
|
private indexTypeAliases(filePath: string, ast: FileAST, index: SymbolIndex): void {
|
||||||
|
for (const typeAlias of ast.typeAliases) {
|
||||||
|
this.addSymbol(index, typeAlias.name, {
|
||||||
|
path: filePath,
|
||||||
|
line: typeAlias.line,
|
||||||
|
type: "type",
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Index exported variables (not functions).
|
||||||
|
*/
|
||||||
|
private indexExportedVariables(filePath: string, ast: FileAST, index: SymbolIndex): void {
|
||||||
|
const functionNames = new Set(ast.functions.map((f) => f.name))
|
||||||
|
|
||||||
|
for (const exp of ast.exports) {
|
||||||
|
if (exp.kind === "variable" && !functionNames.has(exp.name)) {
|
||||||
|
this.addSymbol(index, exp.name, {
|
||||||
|
path: filePath,
|
||||||
|
line: exp.line,
|
||||||
|
type: "variable",
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Add a symbol to the index.
|
||||||
|
*/
|
||||||
|
private addSymbol(index: SymbolIndex, name: string, location: SymbolLocation): void {
|
||||||
|
if (!name) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
const existing = index.get(name)
|
||||||
|
if (existing) {
|
||||||
|
const isDuplicate = existing.some(
|
||||||
|
(loc) => loc.path === location.path && loc.line === location.line,
|
||||||
|
)
|
||||||
|
if (!isDuplicate) {
|
||||||
|
existing.push(location)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
index.set(name, [location])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Build dependency graph from all ASTs.
|
||||||
|
* Creates bidirectional mapping of imports.
|
||||||
|
*/
|
||||||
|
buildDepsGraph(asts: Map<string, FileAST>): DepsGraph {
|
||||||
|
const imports = new Map<string, string[]>()
|
||||||
|
const importedBy = new Map<string, string[]>()
|
||||||
|
|
||||||
|
for (const filePath of asts.keys()) {
|
||||||
|
imports.set(filePath, [])
|
||||||
|
importedBy.set(filePath, [])
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const [filePath, ast] of asts) {
|
||||||
|
const fileImports = this.resolveFileImports(filePath, ast, asts)
|
||||||
|
imports.set(filePath, fileImports)
|
||||||
|
|
||||||
|
for (const importedFile of fileImports) {
|
||||||
|
const dependents = importedBy.get(importedFile) ?? []
|
||||||
|
if (!dependents.includes(filePath)) {
|
||||||
|
dependents.push(filePath)
|
||||||
|
importedBy.set(importedFile, dependents)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const [filePath, deps] of imports) {
|
||||||
|
imports.set(filePath, deps.sort())
|
||||||
|
}
|
||||||
|
for (const [filePath, deps] of importedBy) {
|
||||||
|
importedBy.set(filePath, deps.sort())
|
||||||
|
}
|
||||||
|
|
||||||
|
return { imports, importedBy }
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Resolve internal imports for a file.
|
||||||
|
*/
|
||||||
|
private resolveFileImports(
|
||||||
|
filePath: string,
|
||||||
|
ast: FileAST,
|
||||||
|
allASTs: Map<string, FileAST>,
|
||||||
|
): string[] {
|
||||||
|
const fileDir = path.dirname(filePath)
|
||||||
|
const resolvedImports: string[] = []
|
||||||
|
|
||||||
|
for (const imp of ast.imports) {
|
||||||
|
if (imp.type !== "internal") {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
const resolved = this.resolveImportPath(fileDir, imp.from, allASTs)
|
||||||
|
if (resolved && !resolvedImports.includes(resolved)) {
|
||||||
|
resolvedImports.push(resolved)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return resolvedImports
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Resolve import path to actual file path.
|
||||||
|
*/
|
||||||
|
private resolveImportPath(
|
||||||
|
fromDir: string,
|
||||||
|
importPath: string,
|
||||||
|
allASTs: Map<string, FileAST>,
|
||||||
|
): string | null {
|
||||||
|
const absolutePath = path.resolve(fromDir, importPath)
|
||||||
|
|
||||||
|
const candidates = this.getImportCandidates(absolutePath)
|
||||||
|
for (const candidate of candidates) {
|
||||||
|
if (allASTs.has(candidate)) {
|
||||||
|
return candidate
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Generate possible file paths for an import.
|
||||||
|
*/
|
||||||
|
private getImportCandidates(basePath: string): string[] {
|
||||||
|
const candidates: string[] = []
|
||||||
|
|
||||||
|
if (/\.(ts|tsx|js|jsx)$/.test(basePath)) {
|
||||||
|
candidates.push(basePath)
|
||||||
|
|
||||||
|
if (basePath.endsWith(".js")) {
|
||||||
|
candidates.push(`${basePath.slice(0, -3)}.ts`)
|
||||||
|
} else if (basePath.endsWith(".jsx")) {
|
||||||
|
candidates.push(`${basePath.slice(0, -4)}.tsx`)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
candidates.push(`${basePath}.ts`)
|
||||||
|
candidates.push(`${basePath}.tsx`)
|
||||||
|
candidates.push(`${basePath}.js`)
|
||||||
|
candidates.push(`${basePath}.jsx`)
|
||||||
|
candidates.push(`${basePath}/index.ts`)
|
||||||
|
candidates.push(`${basePath}/index.tsx`)
|
||||||
|
candidates.push(`${basePath}/index.js`)
|
||||||
|
candidates.push(`${basePath}/index.jsx`)
|
||||||
|
}
|
||||||
|
|
||||||
|
return candidates
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Find all locations of a symbol by name.
|
||||||
|
*/
|
||||||
|
findSymbol(index: SymbolIndex, name: string): SymbolLocation[] {
|
||||||
|
return index.get(name) ?? []
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Find symbols matching a pattern.
|
||||||
|
*/
|
||||||
|
searchSymbols(index: SymbolIndex, pattern: string): Map<string, SymbolLocation[]> {
|
||||||
|
const results = new Map<string, SymbolLocation[]>()
|
||||||
|
const regex = new RegExp(pattern, "i")
|
||||||
|
|
||||||
|
for (const [name, locations] of index) {
|
||||||
|
if (regex.test(name)) {
|
||||||
|
results.set(name, locations)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return results
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get all files that the given file depends on (imports).
|
||||||
|
*/
|
||||||
|
getDependencies(graph: DepsGraph, filePath: string): string[] {
|
||||||
|
return graph.imports.get(filePath) ?? []
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get all files that depend on the given file (import it).
|
||||||
|
*/
|
||||||
|
getDependents(graph: DepsGraph, filePath: string): string[] {
|
||||||
|
return graph.importedBy.get(filePath) ?? []
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Find circular dependencies in the graph.
|
||||||
|
*/
|
||||||
|
findCircularDependencies(graph: DepsGraph): string[][] {
|
||||||
|
const cycles: string[][] = []
|
||||||
|
const visited = new Set<string>()
|
||||||
|
const recursionStack = new Set<string>()
|
||||||
|
|
||||||
|
const dfs = (node: string, path: string[]): void => {
|
||||||
|
visited.add(node)
|
||||||
|
recursionStack.add(node)
|
||||||
|
path.push(node)
|
||||||
|
|
||||||
|
const deps = graph.imports.get(node) ?? []
|
||||||
|
for (const dep of deps) {
|
||||||
|
if (!visited.has(dep)) {
|
||||||
|
dfs(dep, [...path])
|
||||||
|
} else if (recursionStack.has(dep)) {
|
||||||
|
const cycleStart = path.indexOf(dep)
|
||||||
|
if (cycleStart !== -1) {
|
||||||
|
const cycle = [...path.slice(cycleStart), dep]
|
||||||
|
const normalized = this.normalizeCycle(cycle)
|
||||||
|
if (!this.cycleExists(cycles, normalized)) {
|
||||||
|
cycles.push(normalized)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
recursionStack.delete(node)
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const node of graph.imports.keys()) {
|
||||||
|
if (!visited.has(node)) {
|
||||||
|
dfs(node, [])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return cycles
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Normalize a cycle to start with the smallest path.
|
||||||
|
*/
|
||||||
|
private normalizeCycle(cycle: string[]): string[] {
|
||||||
|
if (cycle.length <= 1) {
|
||||||
|
return cycle
|
||||||
|
}
|
||||||
|
|
||||||
|
const withoutLast = cycle.slice(0, -1)
|
||||||
|
const minIndex = withoutLast.reduce(
|
||||||
|
(minIdx, path, idx) => (path < withoutLast[minIdx] ? idx : minIdx),
|
||||||
|
0,
|
||||||
|
)
|
||||||
|
|
||||||
|
const rotated = [...withoutLast.slice(minIndex), ...withoutLast.slice(0, minIndex)]
|
||||||
|
rotated.push(rotated[0])
|
||||||
|
|
||||||
|
return rotated
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if a cycle already exists in the list.
|
||||||
|
*/
|
||||||
|
private cycleExists(cycles: string[][], newCycle: string[]): boolean {
|
||||||
|
const newKey = newCycle.join("→")
|
||||||
|
return cycles.some((cycle) => cycle.join("→") === newKey)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get statistics about the indexes.
|
||||||
|
*/
|
||||||
|
getStats(
|
||||||
|
symbolIndex: SymbolIndex,
|
||||||
|
depsGraph: DepsGraph,
|
||||||
|
): {
|
||||||
|
totalSymbols: number
|
||||||
|
symbolsByType: Record<SymbolLocation["type"], number>
|
||||||
|
totalFiles: number
|
||||||
|
totalDependencies: number
|
||||||
|
averageDependencies: number
|
||||||
|
hubs: string[]
|
||||||
|
orphans: string[]
|
||||||
|
} {
|
||||||
|
const symbolsByType: Record<SymbolLocation["type"], number> = {
|
||||||
|
function: 0,
|
||||||
|
class: 0,
|
||||||
|
interface: 0,
|
||||||
|
type: 0,
|
||||||
|
variable: 0,
|
||||||
|
}
|
||||||
|
|
||||||
|
let totalSymbols = 0
|
||||||
|
for (const locations of symbolIndex.values()) {
|
||||||
|
totalSymbols += locations.length
|
||||||
|
for (const loc of locations) {
|
||||||
|
symbolsByType[loc.type]++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const totalFiles = depsGraph.imports.size
|
||||||
|
let totalDependencies = 0
|
||||||
|
const hubs: string[] = []
|
||||||
|
const orphans: string[] = []
|
||||||
|
|
||||||
|
for (const [_filePath, deps] of depsGraph.imports) {
|
||||||
|
totalDependencies += deps.length
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const [filePath, dependents] of depsGraph.importedBy) {
|
||||||
|
if (dependents.length > 5) {
|
||||||
|
hubs.push(filePath)
|
||||||
|
}
|
||||||
|
if (dependents.length === 0 && (depsGraph.imports.get(filePath)?.length ?? 0) === 0) {
|
||||||
|
orphans.push(filePath)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
totalSymbols,
|
||||||
|
symbolsByType,
|
||||||
|
totalFiles,
|
||||||
|
totalDependencies,
|
||||||
|
averageDependencies: totalFiles > 0 ? totalDependencies / totalFiles : 0,
|
||||||
|
hubs: hubs.sort(),
|
||||||
|
orphans: orphans.sort(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
448
packages/ipuaro/src/infrastructure/indexer/MetaAnalyzer.ts
Normal file
448
packages/ipuaro/src/infrastructure/indexer/MetaAnalyzer.ts
Normal file
@@ -0,0 +1,448 @@
|
|||||||
|
import * as path from "node:path"
|
||||||
|
import {
|
||||||
|
type ComplexityMetrics,
|
||||||
|
createFileMeta,
|
||||||
|
type FileMeta,
|
||||||
|
isHubFile,
|
||||||
|
} from "../../domain/value-objects/FileMeta.js"
|
||||||
|
import type { ClassInfo, FileAST, FunctionInfo } from "../../domain/value-objects/FileAST.js"
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Analyzes file metadata including complexity, dependencies, and classification.
|
||||||
|
*/
|
||||||
|
export class MetaAnalyzer {
|
||||||
|
private readonly projectRoot: string
|
||||||
|
|
||||||
|
constructor(projectRoot: string) {
|
||||||
|
this.projectRoot = projectRoot
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Analyze a file and compute its metadata.
|
||||||
|
* @param filePath - Absolute path to the file
|
||||||
|
* @param ast - Parsed AST for the file
|
||||||
|
* @param content - Raw file content (for LOC calculation)
|
||||||
|
* @param allASTs - Map of all file paths to their ASTs (for dependents)
|
||||||
|
*/
|
||||||
|
analyze(
|
||||||
|
filePath: string,
|
||||||
|
ast: FileAST,
|
||||||
|
content: string,
|
||||||
|
allASTs: Map<string, FileAST>,
|
||||||
|
): FileMeta {
|
||||||
|
const complexity = this.calculateComplexity(ast, content)
|
||||||
|
const dependencies = this.resolveDependencies(filePath, ast)
|
||||||
|
const dependents = this.findDependents(filePath, allASTs)
|
||||||
|
const fileType = this.classifyFileType(filePath)
|
||||||
|
const isEntryPoint = this.isEntryPointFile(filePath, dependents.length)
|
||||||
|
|
||||||
|
return createFileMeta({
|
||||||
|
complexity,
|
||||||
|
dependencies,
|
||||||
|
dependents,
|
||||||
|
isHub: isHubFile(dependents.length),
|
||||||
|
isEntryPoint,
|
||||||
|
fileType,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Calculate complexity metrics for a file.
|
||||||
|
*/
|
||||||
|
calculateComplexity(ast: FileAST, content: string): ComplexityMetrics {
|
||||||
|
const loc = this.countLinesOfCode(content)
|
||||||
|
const nesting = this.calculateMaxNesting(ast)
|
||||||
|
const cyclomaticComplexity = this.calculateCyclomaticComplexity(ast)
|
||||||
|
const score = this.calculateComplexityScore(loc, nesting, cyclomaticComplexity)
|
||||||
|
|
||||||
|
return {
|
||||||
|
loc,
|
||||||
|
nesting,
|
||||||
|
cyclomaticComplexity,
|
||||||
|
score,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Count lines of code (excluding empty lines and comments).
|
||||||
|
*/
|
||||||
|
countLinesOfCode(content: string): number {
|
||||||
|
const lines = content.split("\n")
|
||||||
|
let loc = 0
|
||||||
|
let inBlockComment = false
|
||||||
|
|
||||||
|
for (const line of lines) {
|
||||||
|
const trimmed = line.trim()
|
||||||
|
|
||||||
|
if (inBlockComment) {
|
||||||
|
if (trimmed.includes("*/")) {
|
||||||
|
inBlockComment = false
|
||||||
|
}
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if (trimmed.startsWith("/*")) {
|
||||||
|
if (!trimmed.includes("*/")) {
|
||||||
|
inBlockComment = true
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
const afterComment = trimmed.substring(trimmed.indexOf("*/") + 2).trim()
|
||||||
|
if (afterComment === "" || afterComment.startsWith("//")) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
loc++
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if (trimmed === "" || trimmed.startsWith("//")) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
loc++
|
||||||
|
}
|
||||||
|
|
||||||
|
return loc
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Calculate maximum nesting depth from AST.
|
||||||
|
*/
|
||||||
|
calculateMaxNesting(ast: FileAST): number {
|
||||||
|
let maxNesting = 0
|
||||||
|
|
||||||
|
for (const func of ast.functions) {
|
||||||
|
const depth = this.estimateFunctionNesting(func)
|
||||||
|
maxNesting = Math.max(maxNesting, depth)
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const cls of ast.classes) {
|
||||||
|
const depth = this.estimateClassNesting(cls)
|
||||||
|
maxNesting = Math.max(maxNesting, depth)
|
||||||
|
}
|
||||||
|
|
||||||
|
return maxNesting
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Estimate nesting depth for a function based on line count.
|
||||||
|
* More accurate nesting would require full AST traversal.
|
||||||
|
*/
|
||||||
|
private estimateFunctionNesting(func: FunctionInfo): number {
|
||||||
|
const lines = func.lineEnd - func.lineStart + 1
|
||||||
|
if (lines <= 5) {
|
||||||
|
return 1
|
||||||
|
}
|
||||||
|
if (lines <= 15) {
|
||||||
|
return 2
|
||||||
|
}
|
||||||
|
if (lines <= 30) {
|
||||||
|
return 3
|
||||||
|
}
|
||||||
|
if (lines <= 50) {
|
||||||
|
return 4
|
||||||
|
}
|
||||||
|
return 5
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Estimate nesting depth for a class.
|
||||||
|
*/
|
||||||
|
private estimateClassNesting(cls: ClassInfo): number {
|
||||||
|
let maxMethodNesting = 1
|
||||||
|
|
||||||
|
for (const method of cls.methods) {
|
||||||
|
const lines = method.lineEnd - method.lineStart + 1
|
||||||
|
let depth = 1
|
||||||
|
if (lines > 5) {
|
||||||
|
depth = 2
|
||||||
|
}
|
||||||
|
if (lines > 15) {
|
||||||
|
depth = 3
|
||||||
|
}
|
||||||
|
if (lines > 30) {
|
||||||
|
depth = 4
|
||||||
|
}
|
||||||
|
maxMethodNesting = Math.max(maxMethodNesting, depth)
|
||||||
|
}
|
||||||
|
|
||||||
|
return maxMethodNesting + 1
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Calculate cyclomatic complexity from AST.
|
||||||
|
* Base complexity is 1, +1 for each decision point.
|
||||||
|
*/
|
||||||
|
calculateCyclomaticComplexity(ast: FileAST): number {
|
||||||
|
let complexity = 1
|
||||||
|
|
||||||
|
for (const func of ast.functions) {
|
||||||
|
complexity += this.estimateFunctionComplexity(func)
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const cls of ast.classes) {
|
||||||
|
for (const method of cls.methods) {
|
||||||
|
const lines = method.lineEnd - method.lineStart + 1
|
||||||
|
complexity += Math.max(1, Math.floor(lines / 10))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return complexity
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Estimate function complexity based on size.
|
||||||
|
*/
|
||||||
|
private estimateFunctionComplexity(func: FunctionInfo): number {
|
||||||
|
const lines = func.lineEnd - func.lineStart + 1
|
||||||
|
return Math.max(1, Math.floor(lines / 8))
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Calculate overall complexity score (0-100).
|
||||||
|
*/
|
||||||
|
calculateComplexityScore(loc: number, nesting: number, cyclomatic: number): number {
|
||||||
|
const locWeight = 0.3
|
||||||
|
const nestingWeight = 0.35
|
||||||
|
const cyclomaticWeight = 0.35
|
||||||
|
|
||||||
|
const locScore = Math.min(100, (loc / 500) * 100)
|
||||||
|
const nestingScore = Math.min(100, (nesting / 6) * 100)
|
||||||
|
const cyclomaticScore = Math.min(100, (cyclomatic / 30) * 100)
|
||||||
|
|
||||||
|
const score =
|
||||||
|
locScore * locWeight + nestingScore * nestingWeight + cyclomaticScore * cyclomaticWeight
|
||||||
|
|
||||||
|
return Math.round(Math.min(100, score))
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Resolve internal imports to absolute file paths.
|
||||||
|
*/
|
||||||
|
resolveDependencies(filePath: string, ast: FileAST): string[] {
|
||||||
|
const dependencies: string[] = []
|
||||||
|
const fileDir = path.dirname(filePath)
|
||||||
|
|
||||||
|
for (const imp of ast.imports) {
|
||||||
|
if (imp.type !== "internal") {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
const resolved = this.resolveImportPath(fileDir, imp.from)
|
||||||
|
if (resolved && !dependencies.includes(resolved)) {
|
||||||
|
dependencies.push(resolved)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return dependencies.sort()
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Resolve a relative import path to an absolute path.
|
||||||
|
*/
|
||||||
|
private resolveImportPath(fromDir: string, importPath: string): string | null {
|
||||||
|
const absolutePath = path.resolve(fromDir, importPath)
|
||||||
|
const normalized = this.normalizeImportPath(absolutePath)
|
||||||
|
|
||||||
|
if (normalized.startsWith(this.projectRoot)) {
|
||||||
|
return normalized
|
||||||
|
}
|
||||||
|
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Normalize import path by removing file extension if present
|
||||||
|
* and handling index imports.
|
||||||
|
*/
|
||||||
|
private normalizeImportPath(importPath: string): string {
|
||||||
|
let normalized = importPath
|
||||||
|
|
||||||
|
if (normalized.endsWith(".js")) {
|
||||||
|
normalized = `${normalized.slice(0, -3)}.ts`
|
||||||
|
} else if (normalized.endsWith(".jsx")) {
|
||||||
|
normalized = `${normalized.slice(0, -4)}.tsx`
|
||||||
|
} else if (!/\.(ts|tsx|js|jsx)$/.exec(normalized)) {
|
||||||
|
normalized = `${normalized}.ts`
|
||||||
|
}
|
||||||
|
|
||||||
|
return normalized
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Find all files that import the given file.
|
||||||
|
*/
|
||||||
|
findDependents(filePath: string, allASTs: Map<string, FileAST>): string[] {
|
||||||
|
const dependents: string[] = []
|
||||||
|
const normalizedPath = this.normalizePathForComparison(filePath)
|
||||||
|
|
||||||
|
for (const [otherPath, ast] of allASTs) {
|
||||||
|
if (otherPath === filePath) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if (this.fileImportsTarget(otherPath, ast, normalizedPath)) {
|
||||||
|
dependents.push(otherPath)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return dependents.sort()
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if a file imports the target path.
|
||||||
|
*/
|
||||||
|
private fileImportsTarget(filePath: string, ast: FileAST, normalizedTarget: string): boolean {
|
||||||
|
const fileDir = path.dirname(filePath)
|
||||||
|
|
||||||
|
for (const imp of ast.imports) {
|
||||||
|
if (imp.type !== "internal") {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
const resolvedImport = this.resolveImportPath(fileDir, imp.from)
|
||||||
|
if (!resolvedImport) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
const normalizedImport = this.normalizePathForComparison(resolvedImport)
|
||||||
|
if (this.pathsMatch(normalizedTarget, normalizedImport)) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Normalize path for comparison (handle index.ts and extensions).
|
||||||
|
*/
|
||||||
|
private normalizePathForComparison(filePath: string): string {
|
||||||
|
let normalized = filePath
|
||||||
|
|
||||||
|
if (normalized.endsWith(".js")) {
|
||||||
|
normalized = normalized.slice(0, -3)
|
||||||
|
} else if (normalized.endsWith(".ts")) {
|
||||||
|
normalized = normalized.slice(0, -3)
|
||||||
|
} else if (normalized.endsWith(".jsx")) {
|
||||||
|
normalized = normalized.slice(0, -4)
|
||||||
|
} else if (normalized.endsWith(".tsx")) {
|
||||||
|
normalized = normalized.slice(0, -4)
|
||||||
|
}
|
||||||
|
|
||||||
|
return normalized
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if two normalized paths match (including index.ts resolution).
|
||||||
|
*/
|
||||||
|
private pathsMatch(path1: string, path2: string): boolean {
|
||||||
|
if (path1 === path2) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
if (path1.endsWith("/index") && path2 === path1.slice(0, -6)) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
if (path2.endsWith("/index") && path1 === path2.slice(0, -6)) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Classify file type based on path and name.
|
||||||
|
*/
|
||||||
|
classifyFileType(filePath: string): FileMeta["fileType"] {
|
||||||
|
const basename = path.basename(filePath)
|
||||||
|
const lowercasePath = filePath.toLowerCase()
|
||||||
|
|
||||||
|
if (basename.includes(".test.") || basename.includes(".spec.")) {
|
||||||
|
return "test"
|
||||||
|
}
|
||||||
|
|
||||||
|
if (lowercasePath.includes("/tests/") || lowercasePath.includes("/__tests__/")) {
|
||||||
|
return "test"
|
||||||
|
}
|
||||||
|
|
||||||
|
if (basename.endsWith(".d.ts")) {
|
||||||
|
return "types"
|
||||||
|
}
|
||||||
|
|
||||||
|
if (lowercasePath.includes("/types/") || basename === "types.ts") {
|
||||||
|
return "types"
|
||||||
|
}
|
||||||
|
|
||||||
|
const configPatterns = [
|
||||||
|
"config",
|
||||||
|
"tsconfig",
|
||||||
|
"eslint",
|
||||||
|
"prettier",
|
||||||
|
"vitest",
|
||||||
|
"jest",
|
||||||
|
"babel",
|
||||||
|
"webpack",
|
||||||
|
"vite",
|
||||||
|
"rollup",
|
||||||
|
]
|
||||||
|
|
||||||
|
for (const pattern of configPatterns) {
|
||||||
|
if (basename.toLowerCase().includes(pattern)) {
|
||||||
|
return "config"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (
|
||||||
|
filePath.endsWith(".ts") ||
|
||||||
|
filePath.endsWith(".tsx") ||
|
||||||
|
filePath.endsWith(".js") ||
|
||||||
|
filePath.endsWith(".jsx")
|
||||||
|
) {
|
||||||
|
return "source"
|
||||||
|
}
|
||||||
|
|
||||||
|
return "unknown"
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Determine if file is an entry point.
|
||||||
|
*/
|
||||||
|
isEntryPointFile(filePath: string, dependentCount: number): boolean {
|
||||||
|
const basename = path.basename(filePath)
|
||||||
|
|
||||||
|
if (basename.startsWith("index.")) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
if (dependentCount === 0) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
const entryPatterns = ["main.", "app.", "cli.", "server.", "index."]
|
||||||
|
for (const pattern of entryPatterns) {
|
||||||
|
if (basename.toLowerCase().startsWith(pattern)) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Batch analyze multiple files.
|
||||||
|
*/
|
||||||
|
analyzeAll(files: Map<string, { ast: FileAST; content: string }>): Map<string, FileMeta> {
|
||||||
|
const allASTs = new Map<string, FileAST>()
|
||||||
|
for (const [filePath, { ast }] of files) {
|
||||||
|
allASTs.set(filePath, ast)
|
||||||
|
}
|
||||||
|
|
||||||
|
const results = new Map<string, FileMeta>()
|
||||||
|
for (const [filePath, { ast, content }] of files) {
|
||||||
|
const meta = this.analyze(filePath, ast, content, allASTs)
|
||||||
|
results.set(filePath, meta)
|
||||||
|
}
|
||||||
|
|
||||||
|
return results
|
||||||
|
}
|
||||||
|
}
|
||||||
285
packages/ipuaro/src/infrastructure/indexer/Watchdog.ts
Normal file
285
packages/ipuaro/src/infrastructure/indexer/Watchdog.ts
Normal file
@@ -0,0 +1,285 @@
|
|||||||
|
import * as chokidar from "chokidar"
|
||||||
|
import * as path from "node:path"
|
||||||
|
import { DEFAULT_IGNORE_PATTERNS, SUPPORTED_EXTENSIONS } from "../../domain/constants/index.js"
|
||||||
|
|
||||||
|
export type FileChangeType = "add" | "change" | "unlink"
|
||||||
|
|
||||||
|
export interface FileChangeEvent {
|
||||||
|
type: FileChangeType
|
||||||
|
path: string
|
||||||
|
timestamp: number
|
||||||
|
}
|
||||||
|
|
||||||
|
export type FileChangeCallback = (event: FileChangeEvent) => void
|
||||||
|
|
||||||
|
export interface WatchdogOptions {
|
||||||
|
/** Debounce delay in milliseconds (default: 500) */
|
||||||
|
debounceMs?: number
|
||||||
|
/** Patterns to ignore (default: DEFAULT_IGNORE_PATTERNS) */
|
||||||
|
ignorePatterns?: readonly string[]
|
||||||
|
/** File extensions to watch (default: SUPPORTED_EXTENSIONS) */
|
||||||
|
extensions?: readonly string[]
|
||||||
|
/** Use polling instead of native events (useful for network drives) */
|
||||||
|
usePolling?: boolean
|
||||||
|
/** Polling interval in milliseconds (default: 1000) */
|
||||||
|
pollInterval?: number
|
||||||
|
}
|
||||||
|
|
||||||
|
interface ResolvedWatchdogOptions {
|
||||||
|
debounceMs: number
|
||||||
|
ignorePatterns: readonly string[]
|
||||||
|
extensions: readonly string[]
|
||||||
|
usePolling: boolean
|
||||||
|
pollInterval: number
|
||||||
|
}
|
||||||
|
|
||||||
|
const DEFAULT_OPTIONS: ResolvedWatchdogOptions = {
|
||||||
|
debounceMs: 500,
|
||||||
|
ignorePatterns: DEFAULT_IGNORE_PATTERNS,
|
||||||
|
extensions: SUPPORTED_EXTENSIONS,
|
||||||
|
usePolling: false,
|
||||||
|
pollInterval: 1000,
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Watches for file changes in a directory using chokidar.
|
||||||
|
*/
|
||||||
|
export class Watchdog {
|
||||||
|
private watcher: chokidar.FSWatcher | null = null
|
||||||
|
private readonly callbacks: FileChangeCallback[] = []
|
||||||
|
private readonly pendingChanges = new Map<string, FileChangeEvent>()
|
||||||
|
private readonly debounceTimers = new Map<string, NodeJS.Timeout>()
|
||||||
|
private readonly options: ResolvedWatchdogOptions
|
||||||
|
private root = ""
|
||||||
|
private isRunning = false
|
||||||
|
|
||||||
|
constructor(options: WatchdogOptions = {}) {
|
||||||
|
this.options = { ...DEFAULT_OPTIONS, ...options }
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Start watching a directory for file changes.
|
||||||
|
*/
|
||||||
|
start(root: string): void {
|
||||||
|
if (this.isRunning) {
|
||||||
|
void this.stop()
|
||||||
|
}
|
||||||
|
|
||||||
|
this.root = root
|
||||||
|
this.isRunning = true
|
||||||
|
|
||||||
|
const globPatterns = this.buildGlobPatterns(root)
|
||||||
|
const ignorePatterns = this.buildIgnorePatterns()
|
||||||
|
|
||||||
|
this.watcher = chokidar.watch(globPatterns, {
|
||||||
|
ignored: ignorePatterns,
|
||||||
|
persistent: true,
|
||||||
|
ignoreInitial: true,
|
||||||
|
usePolling: this.options.usePolling,
|
||||||
|
interval: this.options.pollInterval,
|
||||||
|
awaitWriteFinish: {
|
||||||
|
stabilityThreshold: 100,
|
||||||
|
pollInterval: 100,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
this.watcher.on("add", (filePath) => {
|
||||||
|
this.handleChange("add", filePath)
|
||||||
|
})
|
||||||
|
this.watcher.on("change", (filePath) => {
|
||||||
|
this.handleChange("change", filePath)
|
||||||
|
})
|
||||||
|
this.watcher.on("unlink", (filePath) => {
|
||||||
|
this.handleChange("unlink", filePath)
|
||||||
|
})
|
||||||
|
this.watcher.on("error", (error) => {
|
||||||
|
this.handleError(error)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Stop watching for file changes.
|
||||||
|
*/
|
||||||
|
async stop(): Promise<void> {
|
||||||
|
if (!this.isRunning) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const timer of this.debounceTimers.values()) {
|
||||||
|
clearTimeout(timer)
|
||||||
|
}
|
||||||
|
this.debounceTimers.clear()
|
||||||
|
this.pendingChanges.clear()
|
||||||
|
|
||||||
|
if (this.watcher) {
|
||||||
|
await this.watcher.close()
|
||||||
|
this.watcher = null
|
||||||
|
}
|
||||||
|
|
||||||
|
this.isRunning = false
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Register a callback for file change events.
|
||||||
|
*/
|
||||||
|
onFileChange(callback: FileChangeCallback): void {
|
||||||
|
this.callbacks.push(callback)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Remove a callback.
|
||||||
|
*/
|
||||||
|
offFileChange(callback: FileChangeCallback): void {
|
||||||
|
const index = this.callbacks.indexOf(callback)
|
||||||
|
if (index !== -1) {
|
||||||
|
this.callbacks.splice(index, 1)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if the watchdog is currently running.
|
||||||
|
*/
|
||||||
|
isWatching(): boolean {
|
||||||
|
return this.isRunning
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the root directory being watched.
|
||||||
|
*/
|
||||||
|
getRoot(): string {
|
||||||
|
return this.root
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the number of pending changes waiting to be processed.
|
||||||
|
*/
|
||||||
|
getPendingCount(): number {
|
||||||
|
return this.pendingChanges.size
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handle a file change event with debouncing.
|
||||||
|
*/
|
||||||
|
private handleChange(type: FileChangeType, filePath: string): void {
|
||||||
|
if (!this.isValidFile(filePath)) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
const normalizedPath = path.resolve(filePath)
|
||||||
|
|
||||||
|
const event: FileChangeEvent = {
|
||||||
|
type,
|
||||||
|
path: normalizedPath,
|
||||||
|
timestamp: Date.now(),
|
||||||
|
}
|
||||||
|
|
||||||
|
this.pendingChanges.set(normalizedPath, event)
|
||||||
|
|
||||||
|
const existingTimer = this.debounceTimers.get(normalizedPath)
|
||||||
|
if (existingTimer) {
|
||||||
|
clearTimeout(existingTimer)
|
||||||
|
}
|
||||||
|
|
||||||
|
const timer = setTimeout(() => {
|
||||||
|
this.flushChange(normalizedPath)
|
||||||
|
}, this.options.debounceMs)
|
||||||
|
|
||||||
|
this.debounceTimers.set(normalizedPath, timer)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Flush a pending change and notify callbacks.
|
||||||
|
*/
|
||||||
|
private flushChange(filePath: string): void {
|
||||||
|
const event = this.pendingChanges.get(filePath)
|
||||||
|
if (!event) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
this.pendingChanges.delete(filePath)
|
||||||
|
this.debounceTimers.delete(filePath)
|
||||||
|
|
||||||
|
for (const callback of this.callbacks) {
|
||||||
|
try {
|
||||||
|
callback(event)
|
||||||
|
} catch {
|
||||||
|
// Silently ignore callback errors
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handle watcher errors.
|
||||||
|
*/
|
||||||
|
private handleError(error: Error): void {
|
||||||
|
// Log error but don't crash
|
||||||
|
console.error(`[Watchdog] Error: ${error.message}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if a file should be watched based on extension.
|
||||||
|
*/
|
||||||
|
private isValidFile(filePath: string): boolean {
|
||||||
|
const ext = path.extname(filePath)
|
||||||
|
return this.options.extensions.includes(ext)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Build glob patterns for watching.
|
||||||
|
*/
|
||||||
|
private buildGlobPatterns(root: string): string[] {
|
||||||
|
return this.options.extensions.map((ext) => path.join(root, "**", `*${ext}`))
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Build ignore patterns for chokidar.
|
||||||
|
*/
|
||||||
|
private buildIgnorePatterns(): (string | RegExp)[] {
|
||||||
|
const patterns: (string | RegExp)[] = []
|
||||||
|
|
||||||
|
for (const pattern of this.options.ignorePatterns) {
|
||||||
|
if (pattern.includes("*")) {
|
||||||
|
const regexPattern = pattern
|
||||||
|
.replace(/\./g, "\\.")
|
||||||
|
.replace(/\*\*/g, ".*")
|
||||||
|
.replace(/\*/g, "[^/]*")
|
||||||
|
patterns.push(new RegExp(regexPattern))
|
||||||
|
} else {
|
||||||
|
patterns.push(`**/${pattern}/**`)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return patterns
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Force flush all pending changes immediately.
|
||||||
|
*/
|
||||||
|
flushAll(): void {
|
||||||
|
for (const timer of this.debounceTimers.values()) {
|
||||||
|
clearTimeout(timer)
|
||||||
|
}
|
||||||
|
this.debounceTimers.clear()
|
||||||
|
|
||||||
|
for (const filePath of this.pendingChanges.keys()) {
|
||||||
|
this.flushChange(filePath)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get watched paths (for debugging).
|
||||||
|
*/
|
||||||
|
getWatchedPaths(): string[] {
|
||||||
|
if (!this.watcher) {
|
||||||
|
return []
|
||||||
|
}
|
||||||
|
const watched = this.watcher.getWatched()
|
||||||
|
const paths: string[] = []
|
||||||
|
for (const dir of Object.keys(watched)) {
|
||||||
|
for (const file of watched[dir]) {
|
||||||
|
paths.push(path.join(dir, file))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return paths.sort()
|
||||||
|
}
|
||||||
|
}
|
||||||
6
packages/ipuaro/src/infrastructure/indexer/index.ts
Normal file
6
packages/ipuaro/src/infrastructure/indexer/index.ts
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
export * from "./FileScanner.js"
|
||||||
|
export * from "./ASTParser.js"
|
||||||
|
export * from "./MetaAnalyzer.js"
|
||||||
|
export * from "./IndexBuilder.js"
|
||||||
|
export * from "./Watchdog.js"
|
||||||
|
export * from "./tree-sitter-types.js"
|
||||||
@@ -0,0 +1,77 @@
|
|||||||
|
/**
|
||||||
|
* Tree-sitter node type constants for TypeScript/JavaScript parsing.
|
||||||
|
* These are infrastructure-level constants, not exposed to domain/application layers.
|
||||||
|
*
|
||||||
|
* Source: tree-sitter-typescript/typescript/src/node-types.json
|
||||||
|
*/
|
||||||
|
|
||||||
|
export const NodeType = {
|
||||||
|
// Statements
|
||||||
|
IMPORT_STATEMENT: "import_statement",
|
||||||
|
EXPORT_STATEMENT: "export_statement",
|
||||||
|
LEXICAL_DECLARATION: "lexical_declaration",
|
||||||
|
|
||||||
|
// Declarations
|
||||||
|
FUNCTION_DECLARATION: "function_declaration",
|
||||||
|
CLASS_DECLARATION: "class_declaration",
|
||||||
|
INTERFACE_DECLARATION: "interface_declaration",
|
||||||
|
TYPE_ALIAS_DECLARATION: "type_alias_declaration",
|
||||||
|
|
||||||
|
// Clauses
|
||||||
|
IMPORT_CLAUSE: "import_clause",
|
||||||
|
EXPORT_CLAUSE: "export_clause",
|
||||||
|
EXTENDS_CLAUSE: "extends_clause",
|
||||||
|
IMPLEMENTS_CLAUSE: "implements_clause",
|
||||||
|
EXTENDS_TYPE_CLAUSE: "extends_type_clause",
|
||||||
|
CLASS_HERITAGE: "class_heritage",
|
||||||
|
|
||||||
|
// Import specifiers
|
||||||
|
NAMESPACE_IMPORT: "namespace_import",
|
||||||
|
NAMED_IMPORTS: "named_imports",
|
||||||
|
IMPORT_SPECIFIER: "import_specifier",
|
||||||
|
EXPORT_SPECIFIER: "export_specifier",
|
||||||
|
|
||||||
|
// Class members
|
||||||
|
METHOD_DEFINITION: "method_definition",
|
||||||
|
PUBLIC_FIELD_DEFINITION: "public_field_definition",
|
||||||
|
FIELD_DEFINITION: "field_definition",
|
||||||
|
PROPERTY_SIGNATURE: "property_signature",
|
||||||
|
|
||||||
|
// Parameters
|
||||||
|
REQUIRED_PARAMETER: "required_parameter",
|
||||||
|
OPTIONAL_PARAMETER: "optional_parameter",
|
||||||
|
|
||||||
|
// Expressions & values
|
||||||
|
ARROW_FUNCTION: "arrow_function",
|
||||||
|
FUNCTION: "function",
|
||||||
|
VARIABLE_DECLARATOR: "variable_declarator",
|
||||||
|
|
||||||
|
// Identifiers & types
|
||||||
|
IDENTIFIER: "identifier",
|
||||||
|
TYPE_IDENTIFIER: "type_identifier",
|
||||||
|
|
||||||
|
// Modifiers
|
||||||
|
ASYNC: "async",
|
||||||
|
STATIC: "static",
|
||||||
|
ABSTRACT: "abstract",
|
||||||
|
DEFAULT: "default",
|
||||||
|
ACCESSIBILITY_MODIFIER: "accessibility_modifier",
|
||||||
|
READONLY: "readonly",
|
||||||
|
} as const
|
||||||
|
|
||||||
|
export type NodeTypeValue = (typeof NodeType)[keyof typeof NodeType]
|
||||||
|
|
||||||
|
export const FieldName = {
|
||||||
|
SOURCE: "source",
|
||||||
|
NAME: "name",
|
||||||
|
ALIAS: "alias",
|
||||||
|
DECLARATION: "declaration",
|
||||||
|
PARAMETERS: "parameters",
|
||||||
|
RETURN_TYPE: "return_type",
|
||||||
|
BODY: "body",
|
||||||
|
TYPE: "type",
|
||||||
|
PATTERN: "pattern",
|
||||||
|
VALUE: "value",
|
||||||
|
} as const
|
||||||
|
|
||||||
|
export type FieldNameValue = (typeof FieldName)[keyof typeof FieldName]
|
||||||
302
packages/ipuaro/src/infrastructure/llm/OllamaClient.ts
Normal file
302
packages/ipuaro/src/infrastructure/llm/OllamaClient.ts
Normal file
@@ -0,0 +1,302 @@
|
|||||||
|
import { type Message, Ollama, type Tool } from "ollama"
|
||||||
|
import type {
|
||||||
|
ILLMClient,
|
||||||
|
LLMResponse,
|
||||||
|
ToolDef,
|
||||||
|
ToolParameter,
|
||||||
|
} from "../../domain/services/ILLMClient.js"
|
||||||
|
import type { ChatMessage } from "../../domain/value-objects/ChatMessage.js"
|
||||||
|
import { createToolCall, type ToolCall } from "../../domain/value-objects/ToolCall.js"
|
||||||
|
import type { LLMConfig } from "../../shared/constants/config.js"
|
||||||
|
import { IpuaroError } from "../../shared/errors/IpuaroError.js"
|
||||||
|
import { estimateTokens } from "../../shared/utils/tokens.js"
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Ollama LLM client implementation.
|
||||||
|
* Wraps the Ollama SDK for chat completions with tool support.
|
||||||
|
*/
|
||||||
|
export class OllamaClient implements ILLMClient {
|
||||||
|
private readonly client: Ollama
|
||||||
|
private readonly host: string
|
||||||
|
private readonly model: string
|
||||||
|
private readonly contextWindow: number
|
||||||
|
private readonly temperature: number
|
||||||
|
private readonly timeout: number
|
||||||
|
private abortController: AbortController | null = null
|
||||||
|
|
||||||
|
constructor(config: LLMConfig) {
|
||||||
|
this.host = config.host
|
||||||
|
this.client = new Ollama({ host: this.host })
|
||||||
|
this.model = config.model
|
||||||
|
this.contextWindow = config.contextWindow
|
||||||
|
this.temperature = config.temperature
|
||||||
|
this.timeout = config.timeout
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Send messages to LLM and get response.
|
||||||
|
*/
|
||||||
|
async chat(messages: ChatMessage[], tools?: ToolDef[]): Promise<LLMResponse> {
|
||||||
|
const startTime = Date.now()
|
||||||
|
this.abortController = new AbortController()
|
||||||
|
|
||||||
|
try {
|
||||||
|
const ollamaMessages = this.convertMessages(messages)
|
||||||
|
const ollamaTools = tools ? this.convertTools(tools) : undefined
|
||||||
|
|
||||||
|
const response = await this.client.chat({
|
||||||
|
model: this.model,
|
||||||
|
messages: ollamaMessages,
|
||||||
|
tools: ollamaTools,
|
||||||
|
options: {
|
||||||
|
temperature: this.temperature,
|
||||||
|
},
|
||||||
|
stream: false,
|
||||||
|
})
|
||||||
|
|
||||||
|
const timeMs = Date.now() - startTime
|
||||||
|
const toolCalls = this.extractToolCalls(response.message)
|
||||||
|
|
||||||
|
return {
|
||||||
|
content: response.message.content,
|
||||||
|
toolCalls,
|
||||||
|
tokens: response.eval_count ?? estimateTokens(response.message.content),
|
||||||
|
timeMs,
|
||||||
|
truncated: false,
|
||||||
|
stopReason: this.determineStopReason(response, toolCalls),
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
if (error instanceof Error && error.name === "AbortError") {
|
||||||
|
throw IpuaroError.llm("Request was aborted")
|
||||||
|
}
|
||||||
|
throw this.handleError(error)
|
||||||
|
} finally {
|
||||||
|
this.abortController = null
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Count tokens in text.
|
||||||
|
* Uses estimation since Ollama doesn't provide a tokenizer endpoint.
|
||||||
|
*/
|
||||||
|
async countTokens(text: string): Promise<number> {
|
||||||
|
return Promise.resolve(estimateTokens(text))
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if LLM service is available.
|
||||||
|
*/
|
||||||
|
async isAvailable(): Promise<boolean> {
|
||||||
|
try {
|
||||||
|
await this.client.list()
|
||||||
|
return true
|
||||||
|
} catch {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get current model name.
|
||||||
|
*/
|
||||||
|
getModelName(): string {
|
||||||
|
return this.model
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get context window size.
|
||||||
|
*/
|
||||||
|
getContextWindowSize(): number {
|
||||||
|
return this.contextWindow
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Pull/download model if not available locally.
|
||||||
|
*/
|
||||||
|
async pullModel(model: string): Promise<void> {
|
||||||
|
try {
|
||||||
|
await this.client.pull({ model, stream: false })
|
||||||
|
} catch (error) {
|
||||||
|
throw this.handleError(error, `Failed to pull model: ${model}`)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if a specific model is available locally.
|
||||||
|
*/
|
||||||
|
async hasModel(model: string): Promise<boolean> {
|
||||||
|
try {
|
||||||
|
const result = await this.client.list()
|
||||||
|
return result.models.some((m) => m.name === model || m.name.startsWith(`${model}:`))
|
||||||
|
} catch {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* List available models.
|
||||||
|
*/
|
||||||
|
async listModels(): Promise<string[]> {
|
||||||
|
try {
|
||||||
|
const result = await this.client.list()
|
||||||
|
return result.models.map((m) => m.name)
|
||||||
|
} catch (error) {
|
||||||
|
throw this.handleError(error, "Failed to list models")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Abort current generation.
|
||||||
|
*/
|
||||||
|
abort(): void {
|
||||||
|
if (this.abortController) {
|
||||||
|
this.abortController.abort()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Convert ChatMessage array to Ollama Message format.
|
||||||
|
*/
|
||||||
|
private convertMessages(messages: ChatMessage[]): Message[] {
|
||||||
|
return messages.map((msg): Message => {
|
||||||
|
const role = this.convertRole(msg.role)
|
||||||
|
|
||||||
|
if (msg.role === "tool" && msg.toolResults) {
|
||||||
|
return {
|
||||||
|
role: "tool",
|
||||||
|
content: msg.content,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (msg.role === "assistant" && msg.toolCalls && msg.toolCalls.length > 0) {
|
||||||
|
return {
|
||||||
|
role: "assistant",
|
||||||
|
content: msg.content,
|
||||||
|
tool_calls: msg.toolCalls.map((tc) => ({
|
||||||
|
function: {
|
||||||
|
name: tc.name,
|
||||||
|
arguments: tc.params,
|
||||||
|
},
|
||||||
|
})),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
role,
|
||||||
|
content: msg.content,
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Convert message role to Ollama role.
|
||||||
|
*/
|
||||||
|
private convertRole(role: ChatMessage["role"]): "user" | "assistant" | "system" | "tool" {
|
||||||
|
switch (role) {
|
||||||
|
case "user":
|
||||||
|
return "user"
|
||||||
|
case "assistant":
|
||||||
|
return "assistant"
|
||||||
|
case "system":
|
||||||
|
return "system"
|
||||||
|
case "tool":
|
||||||
|
return "tool"
|
||||||
|
default:
|
||||||
|
return "user"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Convert ToolDef array to Ollama Tool format.
|
||||||
|
*/
|
||||||
|
private convertTools(tools: ToolDef[]): Tool[] {
|
||||||
|
return tools.map(
|
||||||
|
(tool): Tool => ({
|
||||||
|
type: "function",
|
||||||
|
function: {
|
||||||
|
name: tool.name,
|
||||||
|
description: tool.description,
|
||||||
|
parameters: {
|
||||||
|
type: "object",
|
||||||
|
properties: this.convertParameters(tool.parameters),
|
||||||
|
required: tool.parameters.filter((p) => p.required).map((p) => p.name),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Convert ToolParameter array to JSON Schema properties.
|
||||||
|
*/
|
||||||
|
private convertParameters(
|
||||||
|
params: ToolParameter[],
|
||||||
|
): Record<string, { type: string; description: string; enum?: string[] }> {
|
||||||
|
const properties: Record<string, { type: string; description: string; enum?: string[] }> =
|
||||||
|
{}
|
||||||
|
|
||||||
|
for (const param of params) {
|
||||||
|
properties[param.name] = {
|
||||||
|
type: param.type,
|
||||||
|
description: param.description,
|
||||||
|
...(param.enum && { enum: param.enum }),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return properties
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Extract tool calls from Ollama response message.
|
||||||
|
*/
|
||||||
|
private extractToolCalls(message: Message): ToolCall[] {
|
||||||
|
if (!message.tool_calls || message.tool_calls.length === 0) {
|
||||||
|
return []
|
||||||
|
}
|
||||||
|
|
||||||
|
return message.tool_calls.map((tc, index) =>
|
||||||
|
createToolCall(
|
||||||
|
`call_${String(Date.now())}_${String(index)}`,
|
||||||
|
tc.function.name,
|
||||||
|
tc.function.arguments,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Determine stop reason from response.
|
||||||
|
*/
|
||||||
|
private determineStopReason(
|
||||||
|
response: { done_reason?: string },
|
||||||
|
toolCalls: ToolCall[],
|
||||||
|
): "end" | "length" | "tool_use" {
|
||||||
|
if (toolCalls.length > 0) {
|
||||||
|
return "tool_use"
|
||||||
|
}
|
||||||
|
|
||||||
|
if (response.done_reason === "length") {
|
||||||
|
return "length"
|
||||||
|
}
|
||||||
|
|
||||||
|
return "end"
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handle and wrap errors.
|
||||||
|
*/
|
||||||
|
private handleError(error: unknown, context?: string): IpuaroError {
|
||||||
|
const message = error instanceof Error ? error.message : String(error)
|
||||||
|
const fullMessage = context ? `${context}: ${message}` : message
|
||||||
|
|
||||||
|
if (message.includes("ECONNREFUSED") || message.includes("fetch failed")) {
|
||||||
|
return IpuaroError.llm(`Cannot connect to Ollama at ${this.host}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (message.includes("model") && message.includes("not found")) {
|
||||||
|
return IpuaroError.llm(
|
||||||
|
`Model "${this.model}" not found. Run: ollama pull ${this.model}`,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
return IpuaroError.llm(fullMessage)
|
||||||
|
}
|
||||||
|
}
|
||||||
220
packages/ipuaro/src/infrastructure/llm/ResponseParser.ts
Normal file
220
packages/ipuaro/src/infrastructure/llm/ResponseParser.ts
Normal file
@@ -0,0 +1,220 @@
|
|||||||
|
import { createToolCall, type ToolCall } from "../../domain/value-objects/ToolCall.js"
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parsed response from LLM.
|
||||||
|
*/
|
||||||
|
export interface ParsedResponse {
|
||||||
|
/** Text content (excluding tool calls) */
|
||||||
|
content: string
|
||||||
|
/** Extracted tool calls */
|
||||||
|
toolCalls: ToolCall[]
|
||||||
|
/** Whether parsing encountered issues */
|
||||||
|
hasParseErrors: boolean
|
||||||
|
/** Parse error messages */
|
||||||
|
parseErrors: string[]
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* XML tool call tag pattern.
|
||||||
|
* Matches: <tool_call name="tool_name">...</tool_call>
|
||||||
|
*/
|
||||||
|
const TOOL_CALL_REGEX = /<tool_call\s+name\s*=\s*"([^"]+)">([\s\S]*?)<\/tool_call>/gi
|
||||||
|
|
||||||
|
/**
|
||||||
|
* XML parameter tag pattern.
|
||||||
|
* Matches: <param name="param_name">value</param> or <param_name>value</param_name>
|
||||||
|
*/
|
||||||
|
const PARAM_REGEX_NAMED = /<param\s+name\s*=\s*"([^"]+)">([\s\S]*?)<\/param>/gi
|
||||||
|
const PARAM_REGEX_ELEMENT = /<([a-z_][a-z0-9_]*)>([\s\S]*?)<\/\1>/gi
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parse tool calls from LLM response text.
|
||||||
|
* Supports XML format: <tool_call name="get_lines"><path>src/index.ts</path></tool_call>
|
||||||
|
*/
|
||||||
|
export function parseToolCalls(response: string): ParsedResponse {
|
||||||
|
const toolCalls: ToolCall[] = []
|
||||||
|
const parseErrors: string[] = []
|
||||||
|
let content = response
|
||||||
|
|
||||||
|
const matches = [...response.matchAll(TOOL_CALL_REGEX)]
|
||||||
|
|
||||||
|
for (const match of matches) {
|
||||||
|
const [fullMatch, toolName, paramsXml] = match
|
||||||
|
|
||||||
|
try {
|
||||||
|
const params = parseParameters(paramsXml)
|
||||||
|
const toolCall = createToolCall(
|
||||||
|
`xml_${String(Date.now())}_${String(toolCalls.length)}`,
|
||||||
|
toolName,
|
||||||
|
params,
|
||||||
|
)
|
||||||
|
toolCalls.push(toolCall)
|
||||||
|
content = content.replace(fullMatch, "")
|
||||||
|
} catch (error) {
|
||||||
|
const errorMsg = error instanceof Error ? error.message : String(error)
|
||||||
|
parseErrors.push(`Failed to parse tool call "${toolName}": ${errorMsg}`)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
content = content.trim()
|
||||||
|
|
||||||
|
return {
|
||||||
|
content,
|
||||||
|
toolCalls,
|
||||||
|
hasParseErrors: parseErrors.length > 0,
|
||||||
|
parseErrors,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parse parameters from XML content.
|
||||||
|
*/
|
||||||
|
function parseParameters(xml: string): Record<string, unknown> {
|
||||||
|
const params: Record<string, unknown> = {}
|
||||||
|
|
||||||
|
const namedMatches = [...xml.matchAll(PARAM_REGEX_NAMED)]
|
||||||
|
for (const match of namedMatches) {
|
||||||
|
const [, name, value] = match
|
||||||
|
params[name] = parseValue(value)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (namedMatches.length === 0) {
|
||||||
|
const elementMatches = [...xml.matchAll(PARAM_REGEX_ELEMENT)]
|
||||||
|
for (const match of elementMatches) {
|
||||||
|
const [, name, value] = match
|
||||||
|
params[name] = parseValue(value)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return params
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parse a value string to appropriate type.
|
||||||
|
*/
|
||||||
|
function parseValue(value: string): unknown {
|
||||||
|
const trimmed = value.trim()
|
||||||
|
|
||||||
|
if (trimmed === "true") {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
if (trimmed === "false") {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
if (trimmed === "null") {
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
const num = Number(trimmed)
|
||||||
|
if (!isNaN(num) && trimmed !== "") {
|
||||||
|
return num
|
||||||
|
}
|
||||||
|
|
||||||
|
if (
|
||||||
|
(trimmed.startsWith("[") && trimmed.endsWith("]")) ||
|
||||||
|
(trimmed.startsWith("{") && trimmed.endsWith("}"))
|
||||||
|
) {
|
||||||
|
try {
|
||||||
|
return JSON.parse(trimmed)
|
||||||
|
} catch {
|
||||||
|
return trimmed
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return trimmed
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Format tool calls to XML for prompt injection.
|
||||||
|
* Useful when you need to show the LLM the expected format.
|
||||||
|
*/
|
||||||
|
export function formatToolCallsAsXml(toolCalls: ToolCall[]): string {
|
||||||
|
return toolCalls
|
||||||
|
.map((tc) => {
|
||||||
|
const params = Object.entries(tc.params)
|
||||||
|
.map(([key, value]) => ` <${key}>${formatValueForXml(value)}</${key}>`)
|
||||||
|
.join("\n")
|
||||||
|
return `<tool_call name="${tc.name}">\n${params}\n</tool_call>`
|
||||||
|
})
|
||||||
|
.join("\n\n")
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Format a value for XML output.
|
||||||
|
*/
|
||||||
|
function formatValueForXml(value: unknown): string {
|
||||||
|
if (value === null || value === undefined) {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
if (typeof value === "object") {
|
||||||
|
return JSON.stringify(value)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (typeof value === "string") {
|
||||||
|
return value
|
||||||
|
}
|
||||||
|
|
||||||
|
if (typeof value === "number" || typeof value === "boolean") {
|
||||||
|
return String(value)
|
||||||
|
}
|
||||||
|
|
||||||
|
return JSON.stringify(value)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Extract thinking/reasoning from response.
|
||||||
|
* Matches content between <thinking>...</thinking> tags.
|
||||||
|
*/
|
||||||
|
export function extractThinking(response: string): { thinking: string; content: string } {
|
||||||
|
const thinkingRegex = /<thinking>([\s\S]*?)<\/thinking>/gi
|
||||||
|
const matches = [...response.matchAll(thinkingRegex)]
|
||||||
|
|
||||||
|
if (matches.length === 0) {
|
||||||
|
return { thinking: "", content: response }
|
||||||
|
}
|
||||||
|
|
||||||
|
let content = response
|
||||||
|
const thoughts: string[] = []
|
||||||
|
|
||||||
|
for (const match of matches) {
|
||||||
|
thoughts.push(match[1].trim())
|
||||||
|
content = content.replace(match[0], "")
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
thinking: thoughts.join("\n\n"),
|
||||||
|
content: content.trim(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if response contains tool calls.
|
||||||
|
*/
|
||||||
|
export function hasToolCalls(response: string): boolean {
|
||||||
|
return TOOL_CALL_REGEX.test(response)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Validate tool call parameters against expected schema.
|
||||||
|
*/
|
||||||
|
export function validateToolCallParams(
|
||||||
|
toolName: string,
|
||||||
|
params: Record<string, unknown>,
|
||||||
|
requiredParams: string[],
|
||||||
|
): { valid: boolean; errors: string[] } {
|
||||||
|
const errors: string[] = []
|
||||||
|
|
||||||
|
for (const param of requiredParams) {
|
||||||
|
if (!(param in params) || params[param] === undefined || params[param] === null) {
|
||||||
|
errors.push(`Missing required parameter: ${param}`)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
valid: errors.length === 0,
|
||||||
|
errors,
|
||||||
|
}
|
||||||
|
}
|
||||||
48
packages/ipuaro/src/infrastructure/llm/index.ts
Normal file
48
packages/ipuaro/src/infrastructure/llm/index.ts
Normal file
@@ -0,0 +1,48 @@
|
|||||||
|
// LLM infrastructure exports
|
||||||
|
export { OllamaClient } from "./OllamaClient.js"
|
||||||
|
export {
|
||||||
|
SYSTEM_PROMPT,
|
||||||
|
buildInitialContext,
|
||||||
|
buildFileContext,
|
||||||
|
truncateContext,
|
||||||
|
type ProjectStructure,
|
||||||
|
} from "./prompts.js"
|
||||||
|
export {
|
||||||
|
ALL_TOOLS,
|
||||||
|
READ_TOOLS,
|
||||||
|
EDIT_TOOLS,
|
||||||
|
SEARCH_TOOLS,
|
||||||
|
ANALYSIS_TOOLS,
|
||||||
|
GIT_TOOLS,
|
||||||
|
RUN_TOOLS,
|
||||||
|
CONFIRMATION_TOOLS,
|
||||||
|
requiresConfirmation,
|
||||||
|
getToolDef,
|
||||||
|
getToolsByCategory,
|
||||||
|
GET_LINES_TOOL,
|
||||||
|
GET_FUNCTION_TOOL,
|
||||||
|
GET_CLASS_TOOL,
|
||||||
|
GET_STRUCTURE_TOOL,
|
||||||
|
EDIT_LINES_TOOL,
|
||||||
|
CREATE_FILE_TOOL,
|
||||||
|
DELETE_FILE_TOOL,
|
||||||
|
FIND_REFERENCES_TOOL,
|
||||||
|
FIND_DEFINITION_TOOL,
|
||||||
|
GET_DEPENDENCIES_TOOL,
|
||||||
|
GET_DEPENDENTS_TOOL,
|
||||||
|
GET_COMPLEXITY_TOOL,
|
||||||
|
GET_TODOS_TOOL,
|
||||||
|
GIT_STATUS_TOOL,
|
||||||
|
GIT_DIFF_TOOL,
|
||||||
|
GIT_COMMIT_TOOL,
|
||||||
|
RUN_COMMAND_TOOL,
|
||||||
|
RUN_TESTS_TOOL,
|
||||||
|
} from "./toolDefs.js"
|
||||||
|
export {
|
||||||
|
parseToolCalls,
|
||||||
|
formatToolCallsAsXml,
|
||||||
|
extractThinking,
|
||||||
|
hasToolCalls,
|
||||||
|
validateToolCallParams,
|
||||||
|
type ParsedResponse,
|
||||||
|
} from "./ResponseParser.js"
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user