mirror of
https://github.com/samiyev/puaros.git
synced 2025-12-28 07:16:53 +05:00
Compare commits
5 Commits
ipuaro-v0.
...
ipuaro-v0.
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
225480c806 | ||
|
|
fd8e97af0e | ||
|
|
d36f9a6e21 | ||
|
|
4267938dcd | ||
|
|
127c7e2185 |
@@ -74,6 +74,7 @@ export default tseslint.config(
|
||||
'@typescript-eslint/require-await': 'warn',
|
||||
'@typescript-eslint/no-unnecessary-condition': 'off', // Sometimes useful for defensive coding
|
||||
'@typescript-eslint/no-non-null-assertion': 'warn',
|
||||
'@typescript-eslint/no-unnecessary-type-parameters': 'warn', // Allow generic JSON parsers
|
||||
|
||||
// ========================================
|
||||
// Code Quality & Best Practices
|
||||
|
||||
@@ -325,17 +325,6 @@ await reportMetrics({
|
||||
| **AI Enablement** | Safely adopt AI coding tools at scale |
|
||||
| **Technical Debt Visibility** | Metrics and trends for data-driven decisions |
|
||||
|
||||
### Enterprise Success Stories
|
||||
|
||||
**Fortune 500 Financial Services** 🏦
|
||||
> "We have 200+ developers and were struggling with architectural consistency. Guardian reduced our code review cycle time by 35% and caught 12 hardcoded API keys before they hit production. ROI in first month." - VP Engineering
|
||||
|
||||
**Scale-up SaaS (Series B)** 📈
|
||||
> "Guardian allowed us to confidently adopt GitHub Copilot across our team. AI writes code 3x faster, Guardian ensures quality. We ship more features without increasing tech debt." - CTO
|
||||
|
||||
**Consulting Firm** 💼
|
||||
> "We use Guardian on every client project. It enforces our standards automatically, and clients love the quality metrics reports. Saved us from a major security incident when it caught hardcoded AWS credentials." - Lead Architect
|
||||
|
||||
## Installation
|
||||
|
||||
```bash
|
||||
@@ -970,36 +959,6 @@ Guardian follows Clean Architecture principles:
|
||||
- Node.js >= 18.0.0
|
||||
- TypeScript >= 5.0.0 (for TypeScript projects)
|
||||
|
||||
## Real-World Vibe Coding Stats
|
||||
|
||||
Based on testing Guardian with AI-generated codebases:
|
||||
|
||||
| Metric | Typical AI Code | After Guardian |
|
||||
|--------|----------------|----------------|
|
||||
| Hardcoded values | 15-30 per 1000 LOC | 0-2 per 1000 LOC |
|
||||
| Circular deps | 2-5 per project | 0 per project |
|
||||
| Architecture violations | 10-20% of files | <1% of files |
|
||||
| Time to fix issues | Manual review: 2-4 hours | Guardian + AI: 5-10 minutes |
|
||||
|
||||
**Common Issues Guardian Finds in AI Code:**
|
||||
- 🔐 Hardcoded secrets and API keys (CRITICAL)
|
||||
- ⏱️ Magic timeouts and retry counts
|
||||
- 🌐 Hardcoded URLs and endpoints
|
||||
- 🔄 Accidental circular imports
|
||||
- 📁 Files in wrong architectural layers
|
||||
- 🏷️ Inconsistent naming patterns
|
||||
|
||||
## Success Stories
|
||||
|
||||
**Prototype to Production** ⚡
|
||||
> "Built a SaaS MVP with Claude in 3 days. Guardian caught 47 hardcoded values before first deploy. Saved us from production disasters." - Indie Hacker
|
||||
|
||||
**Learning Clean Architecture** 📚
|
||||
> "Guardian taught me Clean Architecture better than any tutorial. Every violation is a mini lesson with suggestions." - Junior Dev
|
||||
|
||||
**AI-First Startup** 🚀
|
||||
> "We ship 5+ features daily using Claude + Guardian. No human code reviews needed for AI-generated code anymore." - Tech Lead
|
||||
|
||||
## FAQ for Vibe Coders
|
||||
|
||||
**Q: Will Guardian slow down my AI workflow?**
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@samiyev/guardian",
|
||||
"version": "0.9.2",
|
||||
"version": "0.9.3",
|
||||
"description": "Research-backed code quality guardian for AI-assisted development. Detects hardcodes, secrets, circular deps, framework leaks, entity exposure, and 9 architecture violations. Enforces Clean Architecture/DDD principles. Works with GitHub Copilot, Cursor, Windsurf, Claude, ChatGPT, Cline, and any AI coding tool.",
|
||||
"keywords": [
|
||||
"puaros",
|
||||
|
||||
@@ -5,6 +5,34 @@ All notable changes to this project will be documented in this file.
|
||||
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
|
||||
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
||||
|
||||
## [0.2.0] - 2025-01-30
|
||||
|
||||
### Added
|
||||
|
||||
- **Redis Storage (0.2.x milestone)**
|
||||
- RedisClient: connection wrapper with AOF persistence configuration
|
||||
- RedisStorage: full IStorage implementation with Redis hashes
|
||||
- Redis key schema: project files, AST, meta, indexes, config
|
||||
- Session keys schema: data, undo stack, sessions list
|
||||
- `generateProjectName()` utility for consistent project naming
|
||||
|
||||
- **Infrastructure Layer**
|
||||
- `src/infrastructure/storage/` module
|
||||
- Exports via `src/infrastructure/index.ts`
|
||||
|
||||
- **Testing**
|
||||
- 68 new unit tests for Redis module
|
||||
- 159 total tests
|
||||
- 99% code coverage maintained
|
||||
|
||||
### Changed
|
||||
|
||||
- Updated ESLint config: `@typescript-eslint/no-unnecessary-type-parameters` set to warn
|
||||
|
||||
### Notes
|
||||
|
||||
Redis Storage milestone complete. Next: 0.3.0 - Indexer (FileScanner, AST Parser, Watchdog)
|
||||
|
||||
## [0.1.0] - 2025-01-29
|
||||
|
||||
### Added
|
||||
|
||||
@@ -1,17 +1,75 @@
|
||||
# @samiyev/ipuaro
|
||||
# @samiyev/ipuaro 🎩
|
||||
|
||||
Local AI agent for codebase operations with "infinite" context feeling through lazy loading.
|
||||
**Local AI Agent for Codebase Operations**
|
||||
|
||||
## Features
|
||||
"Infinite" context feeling through lazy loading - work with your entire codebase using local LLM.
|
||||
|
||||
- 18 LLM tools for code operations (read, edit, search, analysis, git, run)
|
||||
- Redis persistence with AOF for durability
|
||||
[](https://www.npmjs.com/package/@samiyev/ipuaro)
|
||||
[](https://opensource.org/licenses/MIT)
|
||||
|
||||
> **Status:** 🚧 Early Development (v0.1.0 Foundation)
|
||||
>
|
||||
> Core infrastructure is ready. Active development in progress.
|
||||
|
||||
## Vision
|
||||
|
||||
Work with codebases of any size using local AI:
|
||||
- 📂 **Lazy Loading**: Load code on-demand, not all at once
|
||||
- 🧠 **Smart Context**: AST-based understanding of your code structure
|
||||
- 🔒 **100% Local**: Your code never leaves your machine
|
||||
- ⚡ **Fast**: Redis persistence + tree-sitter parsing
|
||||
|
||||
## Planned Features
|
||||
|
||||
### 18 LLM Tools
|
||||
|
||||
| Category | Tools | Status |
|
||||
|----------|-------|--------|
|
||||
| **Read** | `get_lines`, `get_function`, `get_class`, `get_structure` | 🔜 v0.5.0 |
|
||||
| **Edit** | `edit_lines`, `create_file`, `delete_file` | 🔜 v0.6.0 |
|
||||
| **Search** | `find_references`, `find_definition` | 🔜 v0.7.0 |
|
||||
| **Analysis** | `get_dependencies`, `get_dependents`, `get_complexity`, `get_todos` | 🔜 v0.8.0 |
|
||||
| **Git** | `git_status`, `git_diff`, `git_commit` | 🔜 v0.9.0 |
|
||||
| **Run** | `run_command`, `run_tests` | 🔜 v0.9.0 |
|
||||
|
||||
### Terminal UI
|
||||
|
||||
```
|
||||
┌─ ipuaro ──────────────────────────────────────────────────┐
|
||||
│ [ctx: 12%] [project: myapp] [main] [47m] ✓ Ready │
|
||||
├───────────────────────────────────────────────────────────┤
|
||||
│ You: How does the authentication flow work? │
|
||||
│ │
|
||||
│ Assistant: Let me analyze the auth module... │
|
||||
│ [get_structure src/auth/] │
|
||||
│ [get_function src/auth/service.ts login] │
|
||||
│ │
|
||||
│ The authentication flow works as follows: │
|
||||
│ 1. User calls POST /auth/login │
|
||||
│ 2. AuthService.login() validates credentials... │
|
||||
│ │
|
||||
│ ⏱ 3.2s │ 1,247 tokens │ 2 tool calls │
|
||||
├───────────────────────────────────────────────────────────┤
|
||||
│ > _ │
|
||||
└───────────────────────────────────────────────────────────┘
|
||||
```
|
||||
|
||||
### Key Capabilities
|
||||
|
||||
🔍 **Smart Code Understanding**
|
||||
- tree-sitter AST parsing (TypeScript, JavaScript)
|
||||
- Ollama LLM integration (local, private)
|
||||
- File watching for live index updates
|
||||
- Session and undo management
|
||||
- Security (blacklist/whitelist for shell commands)
|
||||
- Terminal UI with Ink/React
|
||||
- Symbol index for fast lookups
|
||||
- Dependency graph analysis
|
||||
|
||||
💾 **Persistent Sessions**
|
||||
- Redis storage with AOF persistence
|
||||
- Session history across restarts
|
||||
- Undo stack for file changes
|
||||
|
||||
🛡️ **Security**
|
||||
- Command blacklist (dangerous operations blocked)
|
||||
- Command whitelist (safe commands auto-approved)
|
||||
- Path validation (no access outside project)
|
||||
|
||||
## Installation
|
||||
|
||||
@@ -23,24 +81,47 @@ pnpm add @samiyev/ipuaro
|
||||
|
||||
## Requirements
|
||||
|
||||
- Node.js >= 20.0.0
|
||||
- Redis server (for persistence)
|
||||
- Ollama (for LLM inference)
|
||||
- **Node.js** >= 20.0.0
|
||||
- **Redis** (for persistence)
|
||||
- **Ollama** (for local LLM inference)
|
||||
|
||||
## Quick Start
|
||||
### Setup Ollama
|
||||
|
||||
```bash
|
||||
# Start in current directory
|
||||
# Install Ollama (macOS)
|
||||
brew install ollama
|
||||
|
||||
# Start Ollama
|
||||
ollama serve
|
||||
|
||||
# Pull recommended model
|
||||
ollama pull qwen2.5-coder:7b-instruct
|
||||
```
|
||||
|
||||
### Setup Redis
|
||||
|
||||
```bash
|
||||
# Install Redis (macOS)
|
||||
brew install redis
|
||||
|
||||
# Start Redis with persistence
|
||||
redis-server --appendonly yes
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
```bash
|
||||
# Start ipuaro in current directory
|
||||
ipuaro
|
||||
|
||||
# Start in specific directory
|
||||
ipuaro /path/to/project
|
||||
|
||||
# With auto-apply mode
|
||||
ipuaro --auto-apply
|
||||
|
||||
# With custom model
|
||||
ipuaro --model qwen2.5-coder:32b-instruct
|
||||
|
||||
# With auto-apply mode (skip edit confirmations)
|
||||
ipuaro --auto-apply
|
||||
```
|
||||
|
||||
## Commands
|
||||
@@ -48,7 +129,7 @@ ipuaro --model qwen2.5-coder:32b-instruct
|
||||
| Command | Description |
|
||||
|---------|-------------|
|
||||
| `ipuaro [path]` | Start TUI in directory |
|
||||
| `ipuaro init` | Create .ipuaro.json config |
|
||||
| `ipuaro init` | Create `.ipuaro.json` config |
|
||||
| `ipuaro index` | Index project without TUI |
|
||||
|
||||
## Configuration
|
||||
@@ -65,6 +146,9 @@ Create `.ipuaro.json` in your project root:
|
||||
"model": "qwen2.5-coder:7b-instruct",
|
||||
"temperature": 0.1
|
||||
},
|
||||
"project": {
|
||||
"ignorePatterns": ["node_modules", "dist", ".git"]
|
||||
},
|
||||
"edit": {
|
||||
"autoApply": false
|
||||
}
|
||||
@@ -76,55 +160,125 @@ Create `.ipuaro.json` in your project root:
|
||||
Clean Architecture with clear separation:
|
||||
|
||||
```
|
||||
src/
|
||||
├── domain/ # Business logic (entities, value objects, interfaces)
|
||||
├── application/ # Use cases, DTOs, orchestration
|
||||
├── infrastructure/ # External implementations (Redis, Ollama, tools)
|
||||
├── tui/ # Terminal UI (Ink/React components)
|
||||
├── cli/ # CLI commands
|
||||
└── shared/ # Cross-cutting concerns
|
||||
@samiyev/ipuaro/
|
||||
├── domain/ # Business logic (no dependencies)
|
||||
│ ├── entities/ # Session, Project
|
||||
│ ├── value-objects/ # FileData, FileAST, ChatMessage, etc.
|
||||
│ └── services/ # IStorage, ILLMClient, ITool, IIndexer
|
||||
├── application/ # Use cases & orchestration
|
||||
│ ├── use-cases/ # StartSession, HandleMessage, etc.
|
||||
│ └── interfaces/ # IToolRegistry
|
||||
├── infrastructure/ # External implementations
|
||||
│ ├── storage/ # Redis client & storage
|
||||
│ ├── llm/ # Ollama client & prompts
|
||||
│ ├── indexer/ # File scanner, AST parser
|
||||
│ └── tools/ # 18 tool implementations
|
||||
├── tui/ # Terminal UI (Ink/React)
|
||||
│ └── components/ # StatusBar, Chat, Input, etc.
|
||||
├── cli/ # CLI entry point
|
||||
└── shared/ # Config, errors, utils
|
||||
```
|
||||
|
||||
## Tools (18 total)
|
||||
|
||||
| Category | Tool | Description |
|
||||
|----------|------|-------------|
|
||||
| **Read** | `get_lines` | Get file lines |
|
||||
| | `get_function` | Get function by name |
|
||||
| | `get_class` | Get class by name |
|
||||
| | `get_structure` | Get project tree |
|
||||
| **Edit** | `edit_lines` | Replace lines |
|
||||
| | `create_file` | Create new file |
|
||||
| | `delete_file` | Delete file |
|
||||
| **Search** | `find_references` | Find symbol usages |
|
||||
| | `find_definition` | Find symbol definition |
|
||||
| **Analysis** | `get_dependencies` | File imports |
|
||||
| | `get_dependents` | Files importing this |
|
||||
| | `get_complexity` | Complexity metrics |
|
||||
| | `get_todos` | Find TODO/FIXME |
|
||||
| **Git** | `git_status` | Repository status |
|
||||
| | `git_diff` | Uncommitted changes |
|
||||
| | `git_commit` | Create commit |
|
||||
| **Run** | `run_command` | Execute shell command |
|
||||
| | `run_tests` | Run test suite |
|
||||
|
||||
## Development Status
|
||||
|
||||
Currently at version **0.1.0** (Foundation). See [ROADMAP.md](./ROADMAP.md) for full development plan.
|
||||
### ✅ Completed (v0.1.0)
|
||||
|
||||
### Completed
|
||||
- [x] Project setup (tsup, vitest, ESM)
|
||||
- [x] Domain entities (Session, Project)
|
||||
- [x] Value objects (FileData, FileAST, ChatMessage, etc.)
|
||||
- [x] Service interfaces (IStorage, ILLMClient, ITool, IIndexer)
|
||||
- [x] Shared module (Config, Errors, Utils)
|
||||
- [x] CLI placeholder commands
|
||||
- [x] 91 unit tests, 100% coverage
|
||||
|
||||
- [x] 0.1.1 Project Setup
|
||||
- [x] 0.1.2 Domain Value Objects
|
||||
- [x] 0.1.3 Domain Services Interfaces
|
||||
- [x] 0.1.4 Shared Config
|
||||
### 🔜 Next Up
|
||||
|
||||
### Next
|
||||
- [ ] **v0.2.0** - Redis Storage
|
||||
- [ ] **v0.3.0** - Indexer (file scanning, AST parsing)
|
||||
- [ ] **v0.4.0** - LLM Integration (Ollama)
|
||||
- [ ] **v0.5.0-0.9.0** - Tools implementation
|
||||
- [ ] **v0.10.0** - Session management
|
||||
- [ ] **v0.11.0** - TUI
|
||||
|
||||
- [ ] 0.2.0 Redis Storage
|
||||
- [ ] 0.3.0 Indexer
|
||||
- [ ] 0.4.0 LLM Integration
|
||||
See [ROADMAP.md](./ROADMAP.md) for detailed development plan.
|
||||
|
||||
## API (Coming Soon)
|
||||
|
||||
```typescript
|
||||
import { startSession, handleMessage } from "@samiyev/ipuaro"
|
||||
|
||||
// Start a session
|
||||
const session = await startSession({
|
||||
projectPath: "./my-project",
|
||||
model: "qwen2.5-coder:7b-instruct"
|
||||
})
|
||||
|
||||
// Send a message
|
||||
const response = await handleMessage(session, "Explain the auth flow")
|
||||
|
||||
console.log(response.content)
|
||||
console.log(`Tokens: ${response.stats.tokens}`)
|
||||
console.log(`Tool calls: ${response.stats.toolCalls}`)
|
||||
```
|
||||
|
||||
## How It Works
|
||||
|
||||
### Lazy Loading Context
|
||||
|
||||
Instead of loading entire codebase into context:
|
||||
|
||||
```
|
||||
Traditional approach:
|
||||
├── Load all files → 500k tokens → ❌ Exceeds context window
|
||||
|
||||
ipuaro approach:
|
||||
├── Load project structure → 2k tokens
|
||||
├── Load AST metadata → 10k tokens
|
||||
├── On demand: get_function("auth.ts", "login") → 200 tokens
|
||||
├── Total: ~12k tokens → ✅ Fits in context
|
||||
```
|
||||
|
||||
### Tool-Based Code Access
|
||||
|
||||
```
|
||||
User: "How does user creation work?"
|
||||
|
||||
ipuaro:
|
||||
1. [get_structure src/] → sees user/ folder
|
||||
2. [get_function src/user/service.ts createUser] → gets function code
|
||||
3. [find_references createUser] → finds all usages
|
||||
4. Synthesizes answer with specific code context
|
||||
```
|
||||
|
||||
## Contributing
|
||||
|
||||
Contributions welcome! This project is in early development.
|
||||
|
||||
```bash
|
||||
# Clone
|
||||
git clone https://github.com/samiyev/puaros.git
|
||||
cd puaros/packages/ipuaro
|
||||
|
||||
# Install
|
||||
pnpm install
|
||||
|
||||
# Build
|
||||
pnpm build
|
||||
|
||||
# Test
|
||||
pnpm test:run
|
||||
|
||||
# Coverage
|
||||
pnpm test:coverage
|
||||
```
|
||||
|
||||
## License
|
||||
|
||||
MIT
|
||||
MIT © Fozilbek Samiyev
|
||||
|
||||
## Links
|
||||
|
||||
- [GitHub Repository](https://github.com/samiyev/puaros/tree/main/packages/ipuaro)
|
||||
- [Issues](https://github.com/samiyev/puaros/issues)
|
||||
- [Changelog](./CHANGELOG.md)
|
||||
- [Roadmap](./ROADMAP.md)
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@samiyev/ipuaro",
|
||||
"version": "0.1.0",
|
||||
"version": "0.2.0",
|
||||
"description": "Local AI agent for codebase operations with infinite context feeling",
|
||||
"author": "Fozilbek Samiyev <fozilbek.samiyev@gmail.com>",
|
||||
"license": "MIT",
|
||||
@@ -77,4 +77,4 @@
|
||||
"url": "https://github.com/samiyev/puaros/issues"
|
||||
},
|
||||
"homepage": "https://github.com/samiyev/puaros/tree/main/packages/ipuaro#readme"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -13,5 +13,8 @@ export * from "./application/index.js"
|
||||
// Shared exports
|
||||
export * from "./shared/index.js"
|
||||
|
||||
// Infrastructure exports
|
||||
export * from "./infrastructure/index.js"
|
||||
|
||||
// Version
|
||||
export const VERSION = "0.1.0"
|
||||
export const VERSION = "0.2.0"
|
||||
|
||||
2
packages/ipuaro/src/infrastructure/index.ts
Normal file
2
packages/ipuaro/src/infrastructure/index.ts
Normal file
@@ -0,0 +1,2 @@
|
||||
// Infrastructure layer exports
|
||||
export * from "./storage/index.js"
|
||||
119
packages/ipuaro/src/infrastructure/storage/RedisClient.ts
Normal file
119
packages/ipuaro/src/infrastructure/storage/RedisClient.ts
Normal file
@@ -0,0 +1,119 @@
|
||||
import { Redis } from "ioredis"
|
||||
import type { RedisConfig } from "../../shared/constants/config.js"
|
||||
import { IpuaroError } from "../../shared/errors/IpuaroError.js"
|
||||
|
||||
/**
|
||||
* Redis client wrapper with connection management.
|
||||
* Handles connection lifecycle and AOF configuration.
|
||||
*/
|
||||
export class RedisClient {
|
||||
private client: Redis | null = null
|
||||
private readonly config: RedisConfig
|
||||
private connected = false
|
||||
|
||||
constructor(config: RedisConfig) {
|
||||
this.config = config
|
||||
}
|
||||
|
||||
/**
|
||||
* Connect to Redis server.
|
||||
* Configures AOF persistence on successful connection.
|
||||
*/
|
||||
async connect(): Promise<void> {
|
||||
if (this.connected && this.client) {
|
||||
return
|
||||
}
|
||||
|
||||
try {
|
||||
this.client = new Redis({
|
||||
host: this.config.host,
|
||||
port: this.config.port,
|
||||
db: this.config.db,
|
||||
password: this.config.password,
|
||||
keyPrefix: this.config.keyPrefix,
|
||||
lazyConnect: true,
|
||||
retryStrategy: (times: number): number | null => {
|
||||
if (times > 3) {
|
||||
return null
|
||||
}
|
||||
return Math.min(times * 200, 1000)
|
||||
},
|
||||
maxRetriesPerRequest: 3,
|
||||
enableReadyCheck: true,
|
||||
})
|
||||
|
||||
await this.client.connect()
|
||||
await this.configureAOF()
|
||||
this.connected = true
|
||||
} catch (error) {
|
||||
this.connected = false
|
||||
this.client = null
|
||||
const message = error instanceof Error ? error.message : "Unknown error"
|
||||
throw IpuaroError.redis(`Failed to connect to Redis: ${message}`)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Disconnect from Redis server.
|
||||
*/
|
||||
async disconnect(): Promise<void> {
|
||||
if (this.client) {
|
||||
await this.client.quit()
|
||||
this.client = null
|
||||
this.connected = false
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if connected to Redis.
|
||||
*/
|
||||
isConnected(): boolean {
|
||||
return this.connected && this.client !== null && this.client.status === "ready"
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the underlying Redis client.
|
||||
* @throws IpuaroError if not connected
|
||||
*/
|
||||
getClient(): Redis {
|
||||
if (!this.client || !this.connected) {
|
||||
throw IpuaroError.redis("Redis client is not connected")
|
||||
}
|
||||
return this.client
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute a health check ping.
|
||||
*/
|
||||
async ping(): Promise<boolean> {
|
||||
if (!this.client) {
|
||||
return false
|
||||
}
|
||||
try {
|
||||
const result = await this.client.ping()
|
||||
return result === "PONG"
|
||||
} catch {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Configure AOF (Append Only File) persistence.
|
||||
* AOF provides better durability by logging every write operation.
|
||||
*/
|
||||
private async configureAOF(): Promise<void> {
|
||||
if (!this.client) {
|
||||
return
|
||||
}
|
||||
|
||||
try {
|
||||
await this.client.config("SET", "appendonly", "yes")
|
||||
await this.client.config("SET", "appendfsync", "everysec")
|
||||
} catch {
|
||||
/*
|
||||
* AOF config may fail if Redis doesn't allow CONFIG SET.
|
||||
* This is non-fatal - persistence will still work with default settings.
|
||||
*/
|
||||
}
|
||||
}
|
||||
}
|
||||
236
packages/ipuaro/src/infrastructure/storage/RedisStorage.ts
Normal file
236
packages/ipuaro/src/infrastructure/storage/RedisStorage.ts
Normal file
@@ -0,0 +1,236 @@
|
||||
import type { DepsGraph, IStorage, SymbolIndex } from "../../domain/services/IStorage.js"
|
||||
import type { FileAST } from "../../domain/value-objects/FileAST.js"
|
||||
import type { FileData } from "../../domain/value-objects/FileData.js"
|
||||
import type { FileMeta } from "../../domain/value-objects/FileMeta.js"
|
||||
import { IpuaroError } from "../../shared/errors/IpuaroError.js"
|
||||
import { RedisClient } from "./RedisClient.js"
|
||||
import { IndexFields, ProjectKeys } from "./schema.js"
|
||||
|
||||
/**
|
||||
* Redis implementation of IStorage.
|
||||
* Stores project data (files, AST, meta, indexes) in Redis hashes.
|
||||
*/
|
||||
export class RedisStorage implements IStorage {
|
||||
private readonly client: RedisClient
|
||||
private readonly projectName: string
|
||||
|
||||
constructor(client: RedisClient, projectName: string) {
|
||||
this.client = client
|
||||
this.projectName = projectName
|
||||
}
|
||||
|
||||
async getFile(path: string): Promise<FileData | null> {
|
||||
const redis = this.getRedis()
|
||||
const data = await redis.hget(ProjectKeys.files(this.projectName), path)
|
||||
if (!data) {
|
||||
return null
|
||||
}
|
||||
return this.parseJSON<FileData>(data, "FileData")
|
||||
}
|
||||
|
||||
async setFile(path: string, data: FileData): Promise<void> {
|
||||
const redis = this.getRedis()
|
||||
await redis.hset(ProjectKeys.files(this.projectName), path, JSON.stringify(data))
|
||||
}
|
||||
|
||||
async deleteFile(path: string): Promise<void> {
|
||||
const redis = this.getRedis()
|
||||
await redis.hdel(ProjectKeys.files(this.projectName), path)
|
||||
}
|
||||
|
||||
async getAllFiles(): Promise<Map<string, FileData>> {
|
||||
const redis = this.getRedis()
|
||||
const data = await redis.hgetall(ProjectKeys.files(this.projectName))
|
||||
const result = new Map<string, FileData>()
|
||||
|
||||
for (const [path, value] of Object.entries(data)) {
|
||||
const parsed = this.parseJSON<FileData>(value, "FileData")
|
||||
if (parsed) {
|
||||
result.set(path, parsed)
|
||||
}
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
async getFileCount(): Promise<number> {
|
||||
const redis = this.getRedis()
|
||||
return redis.hlen(ProjectKeys.files(this.projectName))
|
||||
}
|
||||
|
||||
async getAST(path: string): Promise<FileAST | null> {
|
||||
const redis = this.getRedis()
|
||||
const data = await redis.hget(ProjectKeys.ast(this.projectName), path)
|
||||
if (!data) {
|
||||
return null
|
||||
}
|
||||
return this.parseJSON<FileAST>(data, "FileAST")
|
||||
}
|
||||
|
||||
async setAST(path: string, ast: FileAST): Promise<void> {
|
||||
const redis = this.getRedis()
|
||||
await redis.hset(ProjectKeys.ast(this.projectName), path, JSON.stringify(ast))
|
||||
}
|
||||
|
||||
async deleteAST(path: string): Promise<void> {
|
||||
const redis = this.getRedis()
|
||||
await redis.hdel(ProjectKeys.ast(this.projectName), path)
|
||||
}
|
||||
|
||||
async getAllASTs(): Promise<Map<string, FileAST>> {
|
||||
const redis = this.getRedis()
|
||||
const data = await redis.hgetall(ProjectKeys.ast(this.projectName))
|
||||
const result = new Map<string, FileAST>()
|
||||
|
||||
for (const [path, value] of Object.entries(data)) {
|
||||
const parsed = this.parseJSON<FileAST>(value, "FileAST")
|
||||
if (parsed) {
|
||||
result.set(path, parsed)
|
||||
}
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
async getMeta(path: string): Promise<FileMeta | null> {
|
||||
const redis = this.getRedis()
|
||||
const data = await redis.hget(ProjectKeys.meta(this.projectName), path)
|
||||
if (!data) {
|
||||
return null
|
||||
}
|
||||
return this.parseJSON<FileMeta>(data, "FileMeta")
|
||||
}
|
||||
|
||||
async setMeta(path: string, meta: FileMeta): Promise<void> {
|
||||
const redis = this.getRedis()
|
||||
await redis.hset(ProjectKeys.meta(this.projectName), path, JSON.stringify(meta))
|
||||
}
|
||||
|
||||
async deleteMeta(path: string): Promise<void> {
|
||||
const redis = this.getRedis()
|
||||
await redis.hdel(ProjectKeys.meta(this.projectName), path)
|
||||
}
|
||||
|
||||
async getAllMetas(): Promise<Map<string, FileMeta>> {
|
||||
const redis = this.getRedis()
|
||||
const data = await redis.hgetall(ProjectKeys.meta(this.projectName))
|
||||
const result = new Map<string, FileMeta>()
|
||||
|
||||
for (const [path, value] of Object.entries(data)) {
|
||||
const parsed = this.parseJSON<FileMeta>(value, "FileMeta")
|
||||
if (parsed) {
|
||||
result.set(path, parsed)
|
||||
}
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
async getSymbolIndex(): Promise<SymbolIndex> {
|
||||
const redis = this.getRedis()
|
||||
const data = await redis.hget(ProjectKeys.indexes(this.projectName), IndexFields.symbols)
|
||||
if (!data) {
|
||||
return new Map()
|
||||
}
|
||||
|
||||
const parsed = this.parseJSON<[string, unknown[]][]>(data, "SymbolIndex")
|
||||
if (!parsed) {
|
||||
return new Map()
|
||||
}
|
||||
|
||||
return new Map(parsed) as SymbolIndex
|
||||
}
|
||||
|
||||
async setSymbolIndex(index: SymbolIndex): Promise<void> {
|
||||
const redis = this.getRedis()
|
||||
const serialized = JSON.stringify([...index.entries()])
|
||||
await redis.hset(ProjectKeys.indexes(this.projectName), IndexFields.symbols, serialized)
|
||||
}
|
||||
|
||||
async getDepsGraph(): Promise<DepsGraph> {
|
||||
const redis = this.getRedis()
|
||||
const data = await redis.hget(ProjectKeys.indexes(this.projectName), IndexFields.depsGraph)
|
||||
if (!data) {
|
||||
return {
|
||||
imports: new Map(),
|
||||
importedBy: new Map(),
|
||||
}
|
||||
}
|
||||
|
||||
const parsed = this.parseJSON<{
|
||||
imports: [string, string[]][]
|
||||
importedBy: [string, string[]][]
|
||||
}>(data, "DepsGraph")
|
||||
|
||||
if (!parsed) {
|
||||
return {
|
||||
imports: new Map(),
|
||||
importedBy: new Map(),
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
imports: new Map(parsed.imports),
|
||||
importedBy: new Map(parsed.importedBy),
|
||||
}
|
||||
}
|
||||
|
||||
async setDepsGraph(graph: DepsGraph): Promise<void> {
|
||||
const redis = this.getRedis()
|
||||
const serialized = JSON.stringify({
|
||||
imports: [...graph.imports.entries()],
|
||||
importedBy: [...graph.importedBy.entries()],
|
||||
})
|
||||
await redis.hset(ProjectKeys.indexes(this.projectName), IndexFields.depsGraph, serialized)
|
||||
}
|
||||
|
||||
async getProjectConfig(key: string): Promise<unknown> {
|
||||
const redis = this.getRedis()
|
||||
const data = await redis.hget(ProjectKeys.config(this.projectName), key)
|
||||
if (!data) {
|
||||
return null
|
||||
}
|
||||
return this.parseJSON<unknown>(data, "ProjectConfig")
|
||||
}
|
||||
|
||||
async setProjectConfig(key: string, value: unknown): Promise<void> {
|
||||
const redis = this.getRedis()
|
||||
await redis.hset(ProjectKeys.config(this.projectName), key, JSON.stringify(value))
|
||||
}
|
||||
|
||||
async connect(): Promise<void> {
|
||||
await this.client.connect()
|
||||
}
|
||||
|
||||
async disconnect(): Promise<void> {
|
||||
await this.client.disconnect()
|
||||
}
|
||||
|
||||
isConnected(): boolean {
|
||||
return this.client.isConnected()
|
||||
}
|
||||
|
||||
async clear(): Promise<void> {
|
||||
const redis = this.getRedis()
|
||||
await Promise.all([
|
||||
redis.del(ProjectKeys.files(this.projectName)),
|
||||
redis.del(ProjectKeys.ast(this.projectName)),
|
||||
redis.del(ProjectKeys.meta(this.projectName)),
|
||||
redis.del(ProjectKeys.indexes(this.projectName)),
|
||||
redis.del(ProjectKeys.config(this.projectName)),
|
||||
])
|
||||
}
|
||||
|
||||
private getRedis(): ReturnType<RedisClient["getClient"]> {
|
||||
return this.client.getClient()
|
||||
}
|
||||
|
||||
private parseJSON<T>(data: string, type: string): T | null {
|
||||
try {
|
||||
return JSON.parse(data) as T
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : "Unknown error"
|
||||
throw IpuaroError.parse(`Failed to parse ${type}: ${message}`)
|
||||
}
|
||||
}
|
||||
}
|
||||
10
packages/ipuaro/src/infrastructure/storage/index.ts
Normal file
10
packages/ipuaro/src/infrastructure/storage/index.ts
Normal file
@@ -0,0 +1,10 @@
|
||||
// Storage module exports
|
||||
export { RedisClient } from "./RedisClient.js"
|
||||
export { RedisStorage } from "./RedisStorage.js"
|
||||
export {
|
||||
ProjectKeys,
|
||||
SessionKeys,
|
||||
IndexFields,
|
||||
SessionFields,
|
||||
generateProjectName,
|
||||
} from "./schema.js"
|
||||
95
packages/ipuaro/src/infrastructure/storage/schema.ts
Normal file
95
packages/ipuaro/src/infrastructure/storage/schema.ts
Normal file
@@ -0,0 +1,95 @@
|
||||
/**
|
||||
* Redis key schema for ipuaro data storage.
|
||||
*
|
||||
* Key structure:
|
||||
* - project:{name}:files # Hash<path, FileData>
|
||||
* - project:{name}:ast # Hash<path, FileAST>
|
||||
* - project:{name}:meta # Hash<path, FileMeta>
|
||||
* - project:{name}:indexes # Hash<name, JSON> (symbols, deps_graph)
|
||||
* - project:{name}:config # Hash<key, JSON>
|
||||
*
|
||||
* - session:{id}:data # Hash<field, JSON> (history, context, stats)
|
||||
* - session:{id}:undo # List<UndoEntry> (max 10)
|
||||
* - sessions:list # List<session_id>
|
||||
*
|
||||
* Project name format: {parent-folder}-{project-folder}
|
||||
*/
|
||||
|
||||
/**
|
||||
* Project-related Redis keys.
|
||||
*/
|
||||
export const ProjectKeys = {
|
||||
files: (projectName: string): string => `project:${projectName}:files`,
|
||||
ast: (projectName: string): string => `project:${projectName}:ast`,
|
||||
meta: (projectName: string): string => `project:${projectName}:meta`,
|
||||
indexes: (projectName: string): string => `project:${projectName}:indexes`,
|
||||
config: (projectName: string): string => `project:${projectName}:config`,
|
||||
} as const
|
||||
|
||||
/**
|
||||
* Session-related Redis keys.
|
||||
*/
|
||||
export const SessionKeys = {
|
||||
data: (sessionId: string): string => `session:${sessionId}:data`,
|
||||
undo: (sessionId: string): string => `session:${sessionId}:undo`,
|
||||
list: "sessions:list",
|
||||
} as const
|
||||
|
||||
/**
|
||||
* Index field names within project:indexes hash.
|
||||
*/
|
||||
export const IndexFields = {
|
||||
symbols: "symbols",
|
||||
depsGraph: "deps_graph",
|
||||
} as const
|
||||
|
||||
/**
|
||||
* Session data field names within session:data hash.
|
||||
*/
|
||||
export const SessionFields = {
|
||||
history: "history",
|
||||
context: "context",
|
||||
stats: "stats",
|
||||
inputHistory: "input_history",
|
||||
createdAt: "created_at",
|
||||
lastActivityAt: "last_activity_at",
|
||||
projectName: "project_name",
|
||||
} as const
|
||||
|
||||
/**
|
||||
* Generate project name from path.
|
||||
* Format: {parent-folder}-{project-folder}
|
||||
*
|
||||
* @example
|
||||
* generateProjectName("/home/user/projects/myapp") -> "projects-myapp"
|
||||
* generateProjectName("/app") -> "app"
|
||||
*/
|
||||
export function generateProjectName(projectPath: string): string {
|
||||
const normalized = projectPath.replace(/\\/g, "/").replace(/\/+$/, "")
|
||||
const parts = normalized.split("/").filter(Boolean)
|
||||
|
||||
if (parts.length === 0) {
|
||||
return "root"
|
||||
}
|
||||
|
||||
if (parts.length === 1) {
|
||||
return sanitizeName(parts[0])
|
||||
}
|
||||
|
||||
const projectFolder = sanitizeName(parts[parts.length - 1])
|
||||
const parentFolder = sanitizeName(parts[parts.length - 2])
|
||||
|
||||
return `${parentFolder}-${projectFolder}`
|
||||
}
|
||||
|
||||
/**
|
||||
* Sanitize a name for use in Redis keys.
|
||||
* Replaces non-alphanumeric characters with hyphens.
|
||||
*/
|
||||
function sanitizeName(name: string): string {
|
||||
return name
|
||||
.toLowerCase()
|
||||
.replace(/[^a-z0-9-]/g, "-")
|
||||
.replace(/-+/g, "-")
|
||||
.replace(/^-|-$/g, "")
|
||||
}
|
||||
@@ -36,9 +36,7 @@ describe("ChatMessage", () => {
|
||||
})
|
||||
|
||||
it("should create assistant message with tool calls", () => {
|
||||
const toolCalls = [
|
||||
{ id: "1", name: "get_lines", params: {}, timestamp: Date.now() },
|
||||
]
|
||||
const toolCalls = [{ id: "1", name: "get_lines", params: {}, timestamp: Date.now() }]
|
||||
const stats = { tokens: 100, timeMs: 500, toolCalls: 1 }
|
||||
const msg = createAssistantMessage("Response", toolCalls, stats)
|
||||
|
||||
@@ -49,9 +47,7 @@ describe("ChatMessage", () => {
|
||||
|
||||
describe("createToolMessage", () => {
|
||||
it("should create tool message with results", () => {
|
||||
const results = [
|
||||
{ callId: "1", success: true, data: "data", executionTimeMs: 10 },
|
||||
]
|
||||
const results = [{ callId: "1", success: true, data: "data", executionTimeMs: 10 }]
|
||||
const msg = createToolMessage(results)
|
||||
|
||||
expect(msg.role).toBe("tool")
|
||||
|
||||
@@ -1,8 +1,5 @@
|
||||
import { describe, it, expect } from "vitest"
|
||||
import {
|
||||
createFileData,
|
||||
isFileDataEqual,
|
||||
} from "../../../../src/domain/value-objects/FileData.js"
|
||||
import { createFileData, isFileDataEqual } from "../../../../src/domain/value-objects/FileData.js"
|
||||
|
||||
describe("FileData", () => {
|
||||
describe("createFileData", () => {
|
||||
|
||||
@@ -1,8 +1,5 @@
|
||||
import { describe, it, expect } from "vitest"
|
||||
import {
|
||||
createFileMeta,
|
||||
isHubFile,
|
||||
} from "../../../../src/domain/value-objects/FileMeta.js"
|
||||
import { createFileMeta, isHubFile } from "../../../../src/domain/value-objects/FileMeta.js"
|
||||
|
||||
describe("FileMeta", () => {
|
||||
describe("createFileMeta", () => {
|
||||
|
||||
@@ -1,8 +1,5 @@
|
||||
import { describe, it, expect, vi, beforeEach, afterEach } from "vitest"
|
||||
import {
|
||||
createUndoEntry,
|
||||
canUndo,
|
||||
} from "../../../../src/domain/value-objects/UndoEntry.js"
|
||||
import { createUndoEntry, canUndo } from "../../../../src/domain/value-objects/UndoEntry.js"
|
||||
|
||||
describe("UndoEntry", () => {
|
||||
beforeEach(() => {
|
||||
@@ -21,7 +18,7 @@ describe("UndoEntry", () => {
|
||||
"test.ts",
|
||||
["old line"],
|
||||
["new line"],
|
||||
"Edit line 1"
|
||||
"Edit line 1",
|
||||
)
|
||||
|
||||
expect(entry.id).toBe("undo-1")
|
||||
@@ -34,14 +31,7 @@ describe("UndoEntry", () => {
|
||||
})
|
||||
|
||||
it("should create undo entry with toolCallId", () => {
|
||||
const entry = createUndoEntry(
|
||||
"undo-2",
|
||||
"test.ts",
|
||||
[],
|
||||
[],
|
||||
"Create file",
|
||||
"tool-123"
|
||||
)
|
||||
const entry = createUndoEntry("undo-2", "test.ts", [], [], "Create file", "tool-123")
|
||||
|
||||
expect(entry.toolCallId).toBe("tool-123")
|
||||
})
|
||||
@@ -49,37 +39,19 @@ describe("UndoEntry", () => {
|
||||
|
||||
describe("canUndo", () => {
|
||||
it("should return true when current content matches newContent", () => {
|
||||
const entry = createUndoEntry(
|
||||
"undo-1",
|
||||
"test.ts",
|
||||
["old"],
|
||||
["new"],
|
||||
"Edit"
|
||||
)
|
||||
const entry = createUndoEntry("undo-1", "test.ts", ["old"], ["new"], "Edit")
|
||||
|
||||
expect(canUndo(entry, ["new"])).toBe(true)
|
||||
})
|
||||
|
||||
it("should return false when content differs", () => {
|
||||
const entry = createUndoEntry(
|
||||
"undo-1",
|
||||
"test.ts",
|
||||
["old"],
|
||||
["new"],
|
||||
"Edit"
|
||||
)
|
||||
const entry = createUndoEntry("undo-1", "test.ts", ["old"], ["new"], "Edit")
|
||||
|
||||
expect(canUndo(entry, ["modified"])).toBe(false)
|
||||
})
|
||||
|
||||
it("should return false when length differs", () => {
|
||||
const entry = createUndoEntry(
|
||||
"undo-1",
|
||||
"test.ts",
|
||||
["old"],
|
||||
["new"],
|
||||
"Edit"
|
||||
)
|
||||
const entry = createUndoEntry("undo-1", "test.ts", ["old"], ["new"], "Edit")
|
||||
|
||||
expect(canUndo(entry, ["new", "extra"])).toBe(false)
|
||||
})
|
||||
|
||||
@@ -0,0 +1,177 @@
|
||||
import { describe, it, expect, vi, beforeEach, afterEach } from "vitest"
|
||||
import type { RedisConfig } from "../../../../src/shared/constants/config.js"
|
||||
import { IpuaroError } from "../../../../src/shared/errors/IpuaroError.js"
|
||||
|
||||
const mockRedisInstance = {
|
||||
connect: vi.fn(),
|
||||
quit: vi.fn(),
|
||||
ping: vi.fn(),
|
||||
config: vi.fn(),
|
||||
status: "ready" as string,
|
||||
}
|
||||
|
||||
vi.mock("ioredis", () => {
|
||||
return {
|
||||
Redis: vi.fn(() => mockRedisInstance),
|
||||
}
|
||||
})
|
||||
|
||||
const { RedisClient } = await import("../../../../src/infrastructure/storage/RedisClient.js")
|
||||
|
||||
describe("RedisClient", () => {
|
||||
const defaultConfig: RedisConfig = {
|
||||
host: "localhost",
|
||||
port: 6379,
|
||||
db: 0,
|
||||
keyPrefix: "ipuaro:",
|
||||
}
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
mockRedisInstance.status = "ready"
|
||||
mockRedisInstance.connect.mockResolvedValue(undefined)
|
||||
mockRedisInstance.quit.mockResolvedValue(undefined)
|
||||
mockRedisInstance.ping.mockResolvedValue("PONG")
|
||||
mockRedisInstance.config.mockResolvedValue(undefined)
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
vi.restoreAllMocks()
|
||||
})
|
||||
|
||||
describe("constructor", () => {
|
||||
it("should create instance with config", () => {
|
||||
const client = new RedisClient(defaultConfig)
|
||||
expect(client).toBeDefined()
|
||||
expect(client.isConnected()).toBe(false)
|
||||
})
|
||||
})
|
||||
|
||||
describe("connect", () => {
|
||||
it("should connect to Redis", async () => {
|
||||
const client = new RedisClient(defaultConfig)
|
||||
await client.connect()
|
||||
|
||||
expect(mockRedisInstance.connect).toHaveBeenCalled()
|
||||
expect(client.isConnected()).toBe(true)
|
||||
})
|
||||
|
||||
it("should configure AOF on connect", async () => {
|
||||
const client = new RedisClient(defaultConfig)
|
||||
await client.connect()
|
||||
|
||||
expect(mockRedisInstance.config).toHaveBeenCalledWith("SET", "appendonly", "yes")
|
||||
expect(mockRedisInstance.config).toHaveBeenCalledWith("SET", "appendfsync", "everysec")
|
||||
})
|
||||
|
||||
it("should not reconnect if already connected", async () => {
|
||||
const client = new RedisClient(defaultConfig)
|
||||
await client.connect()
|
||||
await client.connect()
|
||||
|
||||
expect(mockRedisInstance.connect).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
|
||||
it("should throw IpuaroError on connection failure", async () => {
|
||||
mockRedisInstance.connect.mockRejectedValue(new Error("Connection refused"))
|
||||
|
||||
const client = new RedisClient(defaultConfig)
|
||||
|
||||
await expect(client.connect()).rejects.toThrow(IpuaroError)
|
||||
await expect(client.connect()).rejects.toMatchObject({
|
||||
type: "redis",
|
||||
})
|
||||
})
|
||||
|
||||
it("should handle AOF config failure gracefully", async () => {
|
||||
mockRedisInstance.config.mockRejectedValue(new Error("CONFIG disabled"))
|
||||
|
||||
const client = new RedisClient(defaultConfig)
|
||||
await client.connect()
|
||||
|
||||
expect(client.isConnected()).toBe(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe("disconnect", () => {
|
||||
it("should disconnect from Redis", async () => {
|
||||
const client = new RedisClient(defaultConfig)
|
||||
await client.connect()
|
||||
await client.disconnect()
|
||||
|
||||
expect(mockRedisInstance.quit).toHaveBeenCalled()
|
||||
expect(client.isConnected()).toBe(false)
|
||||
})
|
||||
|
||||
it("should handle disconnect when not connected", async () => {
|
||||
const client = new RedisClient(defaultConfig)
|
||||
await client.disconnect()
|
||||
|
||||
expect(mockRedisInstance.quit).not.toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
|
||||
describe("isConnected", () => {
|
||||
it("should return false when not connected", () => {
|
||||
const client = new RedisClient(defaultConfig)
|
||||
expect(client.isConnected()).toBe(false)
|
||||
})
|
||||
|
||||
it("should return true when connected and ready", async () => {
|
||||
const client = new RedisClient(defaultConfig)
|
||||
await client.connect()
|
||||
expect(client.isConnected()).toBe(true)
|
||||
})
|
||||
|
||||
it("should return false when client status is not ready", async () => {
|
||||
const client = new RedisClient(defaultConfig)
|
||||
await client.connect()
|
||||
mockRedisInstance.status = "connecting"
|
||||
expect(client.isConnected()).toBe(false)
|
||||
})
|
||||
})
|
||||
|
||||
describe("getClient", () => {
|
||||
it("should return Redis client when connected", async () => {
|
||||
const client = new RedisClient(defaultConfig)
|
||||
await client.connect()
|
||||
|
||||
const redis = client.getClient()
|
||||
expect(redis).toBe(mockRedisInstance)
|
||||
})
|
||||
|
||||
it("should throw when not connected", () => {
|
||||
const client = new RedisClient(defaultConfig)
|
||||
|
||||
expect(() => client.getClient()).toThrow(IpuaroError)
|
||||
expect(() => client.getClient()).toThrow("not connected")
|
||||
})
|
||||
})
|
||||
|
||||
describe("ping", () => {
|
||||
it("should return true on successful ping", async () => {
|
||||
const client = new RedisClient(defaultConfig)
|
||||
await client.connect()
|
||||
|
||||
const result = await client.ping()
|
||||
expect(result).toBe(true)
|
||||
})
|
||||
|
||||
it("should return false when not connected", async () => {
|
||||
const client = new RedisClient(defaultConfig)
|
||||
|
||||
const result = await client.ping()
|
||||
expect(result).toBe(false)
|
||||
})
|
||||
|
||||
it("should return false on ping failure", async () => {
|
||||
mockRedisInstance.ping.mockRejectedValue(new Error("Timeout"))
|
||||
|
||||
const client = new RedisClient(defaultConfig)
|
||||
await client.connect()
|
||||
|
||||
const result = await client.ping()
|
||||
expect(result).toBe(false)
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -0,0 +1,425 @@
|
||||
import { describe, it, expect, vi, beforeEach } from "vitest"
|
||||
import { RedisStorage } from "../../../../src/infrastructure/storage/RedisStorage.js"
|
||||
import { RedisClient } from "../../../../src/infrastructure/storage/RedisClient.js"
|
||||
import type { FileData } from "../../../../src/domain/value-objects/FileData.js"
|
||||
import type { FileAST } from "../../../../src/domain/value-objects/FileAST.js"
|
||||
import type { FileMeta } from "../../../../src/domain/value-objects/FileMeta.js"
|
||||
import type { SymbolIndex, DepsGraph } from "../../../../src/domain/services/IStorage.js"
|
||||
import { IpuaroError } from "../../../../src/shared/errors/IpuaroError.js"
|
||||
|
||||
describe("RedisStorage", () => {
|
||||
const projectName = "test-project"
|
||||
let mockRedis: {
|
||||
hget: ReturnType<typeof vi.fn>
|
||||
hset: ReturnType<typeof vi.fn>
|
||||
hdel: ReturnType<typeof vi.fn>
|
||||
hgetall: ReturnType<typeof vi.fn>
|
||||
hlen: ReturnType<typeof vi.fn>
|
||||
del: ReturnType<typeof vi.fn>
|
||||
}
|
||||
let mockClient: {
|
||||
connect: ReturnType<typeof vi.fn>
|
||||
disconnect: ReturnType<typeof vi.fn>
|
||||
isConnected: ReturnType<typeof vi.fn>
|
||||
getClient: ReturnType<typeof vi.fn>
|
||||
}
|
||||
let storage: RedisStorage
|
||||
|
||||
beforeEach(() => {
|
||||
mockRedis = {
|
||||
hget: vi.fn(),
|
||||
hset: vi.fn(),
|
||||
hdel: vi.fn(),
|
||||
hgetall: vi.fn(),
|
||||
hlen: vi.fn(),
|
||||
del: vi.fn(),
|
||||
}
|
||||
|
||||
mockClient = {
|
||||
connect: vi.fn().mockResolvedValue(undefined),
|
||||
disconnect: vi.fn().mockResolvedValue(undefined),
|
||||
isConnected: vi.fn().mockReturnValue(true),
|
||||
getClient: vi.fn().mockReturnValue(mockRedis),
|
||||
}
|
||||
|
||||
storage = new RedisStorage(mockClient as unknown as RedisClient, projectName)
|
||||
})
|
||||
|
||||
describe("File operations", () => {
|
||||
const testFile: FileData = {
|
||||
lines: ["line1", "line2"],
|
||||
hash: "abc123",
|
||||
size: 100,
|
||||
lastModified: Date.now(),
|
||||
}
|
||||
|
||||
describe("getFile", () => {
|
||||
it("should return file data when exists", async () => {
|
||||
mockRedis.hget.mockResolvedValue(JSON.stringify(testFile))
|
||||
|
||||
const result = await storage.getFile("src/index.ts")
|
||||
|
||||
expect(result).toEqual(testFile)
|
||||
expect(mockRedis.hget).toHaveBeenCalledWith(
|
||||
`project:${projectName}:files`,
|
||||
"src/index.ts",
|
||||
)
|
||||
})
|
||||
|
||||
it("should return null when file not found", async () => {
|
||||
mockRedis.hget.mockResolvedValue(null)
|
||||
|
||||
const result = await storage.getFile("nonexistent.ts")
|
||||
|
||||
expect(result).toBeNull()
|
||||
})
|
||||
|
||||
it("should throw on invalid JSON", async () => {
|
||||
mockRedis.hget.mockResolvedValue("invalid json")
|
||||
|
||||
await expect(storage.getFile("test.ts")).rejects.toThrow(IpuaroError)
|
||||
})
|
||||
})
|
||||
|
||||
describe("setFile", () => {
|
||||
it("should store file data", async () => {
|
||||
await storage.setFile("src/index.ts", testFile)
|
||||
|
||||
expect(mockRedis.hset).toHaveBeenCalledWith(
|
||||
`project:${projectName}:files`,
|
||||
"src/index.ts",
|
||||
JSON.stringify(testFile),
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe("deleteFile", () => {
|
||||
it("should delete file data", async () => {
|
||||
await storage.deleteFile("src/index.ts")
|
||||
|
||||
expect(mockRedis.hdel).toHaveBeenCalledWith(
|
||||
`project:${projectName}:files`,
|
||||
"src/index.ts",
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe("getAllFiles", () => {
|
||||
it("should return all files as Map", async () => {
|
||||
mockRedis.hgetall.mockResolvedValue({
|
||||
"src/a.ts": JSON.stringify(testFile),
|
||||
"src/b.ts": JSON.stringify({ ...testFile, hash: "def456" }),
|
||||
})
|
||||
|
||||
const result = await storage.getAllFiles()
|
||||
|
||||
expect(result).toBeInstanceOf(Map)
|
||||
expect(result.size).toBe(2)
|
||||
expect(result.get("src/a.ts")).toEqual(testFile)
|
||||
})
|
||||
|
||||
it("should return empty Map when no files", async () => {
|
||||
mockRedis.hgetall.mockResolvedValue({})
|
||||
|
||||
const result = await storage.getAllFiles()
|
||||
|
||||
expect(result.size).toBe(0)
|
||||
})
|
||||
})
|
||||
|
||||
describe("getFileCount", () => {
|
||||
it("should return file count", async () => {
|
||||
mockRedis.hlen.mockResolvedValue(42)
|
||||
|
||||
const result = await storage.getFileCount()
|
||||
|
||||
expect(result).toBe(42)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe("AST operations", () => {
|
||||
const testAST: FileAST = {
|
||||
imports: [],
|
||||
exports: [],
|
||||
functions: [],
|
||||
classes: [],
|
||||
interfaces: [],
|
||||
typeAliases: [],
|
||||
parseError: false,
|
||||
}
|
||||
|
||||
describe("getAST", () => {
|
||||
it("should return AST when exists", async () => {
|
||||
mockRedis.hget.mockResolvedValue(JSON.stringify(testAST))
|
||||
|
||||
const result = await storage.getAST("src/index.ts")
|
||||
|
||||
expect(result).toEqual(testAST)
|
||||
})
|
||||
|
||||
it("should return null when not found", async () => {
|
||||
mockRedis.hget.mockResolvedValue(null)
|
||||
|
||||
const result = await storage.getAST("nonexistent.ts")
|
||||
|
||||
expect(result).toBeNull()
|
||||
})
|
||||
})
|
||||
|
||||
describe("setAST", () => {
|
||||
it("should store AST", async () => {
|
||||
await storage.setAST("src/index.ts", testAST)
|
||||
|
||||
expect(mockRedis.hset).toHaveBeenCalledWith(
|
||||
`project:${projectName}:ast`,
|
||||
"src/index.ts",
|
||||
JSON.stringify(testAST),
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe("deleteAST", () => {
|
||||
it("should delete AST", async () => {
|
||||
await storage.deleteAST("src/index.ts")
|
||||
|
||||
expect(mockRedis.hdel).toHaveBeenCalledWith(
|
||||
`project:${projectName}:ast`,
|
||||
"src/index.ts",
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe("getAllASTs", () => {
|
||||
it("should return all ASTs as Map", async () => {
|
||||
mockRedis.hgetall.mockResolvedValue({
|
||||
"src/a.ts": JSON.stringify(testAST),
|
||||
})
|
||||
|
||||
const result = await storage.getAllASTs()
|
||||
|
||||
expect(result).toBeInstanceOf(Map)
|
||||
expect(result.size).toBe(1)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe("Meta operations", () => {
|
||||
const testMeta: FileMeta = {
|
||||
complexity: { loc: 10, nesting: 2, cyclomaticComplexity: 5, score: 20 },
|
||||
dependencies: ["./other.ts"],
|
||||
dependents: [],
|
||||
isHub: false,
|
||||
isEntryPoint: false,
|
||||
fileType: "source",
|
||||
}
|
||||
|
||||
describe("getMeta", () => {
|
||||
it("should return meta when exists", async () => {
|
||||
mockRedis.hget.mockResolvedValue(JSON.stringify(testMeta))
|
||||
|
||||
const result = await storage.getMeta("src/index.ts")
|
||||
|
||||
expect(result).toEqual(testMeta)
|
||||
})
|
||||
|
||||
it("should return null when not found", async () => {
|
||||
mockRedis.hget.mockResolvedValue(null)
|
||||
|
||||
const result = await storage.getMeta("nonexistent.ts")
|
||||
|
||||
expect(result).toBeNull()
|
||||
})
|
||||
})
|
||||
|
||||
describe("setMeta", () => {
|
||||
it("should store meta", async () => {
|
||||
await storage.setMeta("src/index.ts", testMeta)
|
||||
|
||||
expect(mockRedis.hset).toHaveBeenCalledWith(
|
||||
`project:${projectName}:meta`,
|
||||
"src/index.ts",
|
||||
JSON.stringify(testMeta),
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe("deleteMeta", () => {
|
||||
it("should delete meta", async () => {
|
||||
await storage.deleteMeta("src/index.ts")
|
||||
|
||||
expect(mockRedis.hdel).toHaveBeenCalledWith(
|
||||
`project:${projectName}:meta`,
|
||||
"src/index.ts",
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe("getAllMetas", () => {
|
||||
it("should return all metas as Map", async () => {
|
||||
mockRedis.hgetall.mockResolvedValue({
|
||||
"src/a.ts": JSON.stringify(testMeta),
|
||||
})
|
||||
|
||||
const result = await storage.getAllMetas()
|
||||
|
||||
expect(result).toBeInstanceOf(Map)
|
||||
expect(result.size).toBe(1)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe("Index operations", () => {
|
||||
describe("getSymbolIndex", () => {
|
||||
it("should return symbol index", async () => {
|
||||
const index: [string, { path: string; line: number; type: string }[]][] = [
|
||||
["MyClass", [{ path: "src/index.ts", line: 10, type: "class" }]],
|
||||
]
|
||||
mockRedis.hget.mockResolvedValue(JSON.stringify(index))
|
||||
|
||||
const result = await storage.getSymbolIndex()
|
||||
|
||||
expect(result).toBeInstanceOf(Map)
|
||||
expect(result.get("MyClass")).toBeDefined()
|
||||
})
|
||||
|
||||
it("should return empty Map when not found", async () => {
|
||||
mockRedis.hget.mockResolvedValue(null)
|
||||
|
||||
const result = await storage.getSymbolIndex()
|
||||
|
||||
expect(result.size).toBe(0)
|
||||
})
|
||||
})
|
||||
|
||||
describe("setSymbolIndex", () => {
|
||||
it("should store symbol index", async () => {
|
||||
const index: SymbolIndex = new Map([
|
||||
["MyClass", [{ path: "src/index.ts", line: 10, type: "class" }]],
|
||||
])
|
||||
|
||||
await storage.setSymbolIndex(index)
|
||||
|
||||
expect(mockRedis.hset).toHaveBeenCalledWith(
|
||||
`project:${projectName}:indexes`,
|
||||
"symbols",
|
||||
expect.any(String),
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe("getDepsGraph", () => {
|
||||
it("should return deps graph", async () => {
|
||||
const graph = {
|
||||
imports: [["a.ts", ["b.ts"]]],
|
||||
importedBy: [["b.ts", ["a.ts"]]],
|
||||
}
|
||||
mockRedis.hget.mockResolvedValue(JSON.stringify(graph))
|
||||
|
||||
const result = await storage.getDepsGraph()
|
||||
|
||||
expect(result.imports).toBeInstanceOf(Map)
|
||||
expect(result.importedBy).toBeInstanceOf(Map)
|
||||
})
|
||||
|
||||
it("should return empty graph when not found", async () => {
|
||||
mockRedis.hget.mockResolvedValue(null)
|
||||
|
||||
const result = await storage.getDepsGraph()
|
||||
|
||||
expect(result.imports.size).toBe(0)
|
||||
expect(result.importedBy.size).toBe(0)
|
||||
})
|
||||
})
|
||||
|
||||
describe("setDepsGraph", () => {
|
||||
it("should store deps graph", async () => {
|
||||
const graph: DepsGraph = {
|
||||
imports: new Map([["a.ts", ["b.ts"]]]),
|
||||
importedBy: new Map([["b.ts", ["a.ts"]]]),
|
||||
}
|
||||
|
||||
await storage.setDepsGraph(graph)
|
||||
|
||||
expect(mockRedis.hset).toHaveBeenCalledWith(
|
||||
`project:${projectName}:indexes`,
|
||||
"deps_graph",
|
||||
expect.any(String),
|
||||
)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe("Config operations", () => {
|
||||
describe("getProjectConfig", () => {
|
||||
it("should return config value", async () => {
|
||||
mockRedis.hget.mockResolvedValue(JSON.stringify({ key: "value" }))
|
||||
|
||||
const result = await storage.getProjectConfig("settings")
|
||||
|
||||
expect(result).toEqual({ key: "value" })
|
||||
})
|
||||
|
||||
it("should return null when not found", async () => {
|
||||
mockRedis.hget.mockResolvedValue(null)
|
||||
|
||||
const result = await storage.getProjectConfig("nonexistent")
|
||||
|
||||
expect(result).toBeNull()
|
||||
})
|
||||
})
|
||||
|
||||
describe("setProjectConfig", () => {
|
||||
it("should store config value", async () => {
|
||||
await storage.setProjectConfig("settings", { key: "value" })
|
||||
|
||||
expect(mockRedis.hset).toHaveBeenCalledWith(
|
||||
`project:${projectName}:config`,
|
||||
"settings",
|
||||
JSON.stringify({ key: "value" }),
|
||||
)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe("Lifecycle operations", () => {
|
||||
describe("connect", () => {
|
||||
it("should delegate to client", async () => {
|
||||
await storage.connect()
|
||||
|
||||
expect(mockClient.connect).toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
|
||||
describe("disconnect", () => {
|
||||
it("should delegate to client", async () => {
|
||||
await storage.disconnect()
|
||||
|
||||
expect(mockClient.disconnect).toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
|
||||
describe("isConnected", () => {
|
||||
it("should delegate to client", () => {
|
||||
mockClient.isConnected.mockReturnValue(true)
|
||||
|
||||
expect(storage.isConnected()).toBe(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe("clear", () => {
|
||||
it("should delete all project keys", async () => {
|
||||
mockRedis.del.mockResolvedValue(1)
|
||||
|
||||
await storage.clear()
|
||||
|
||||
expect(mockRedis.del).toHaveBeenCalledTimes(5)
|
||||
expect(mockRedis.del).toHaveBeenCalledWith(`project:${projectName}:files`)
|
||||
expect(mockRedis.del).toHaveBeenCalledWith(`project:${projectName}:ast`)
|
||||
expect(mockRedis.del).toHaveBeenCalledWith(`project:${projectName}:meta`)
|
||||
expect(mockRedis.del).toHaveBeenCalledWith(`project:${projectName}:indexes`)
|
||||
expect(mockRedis.del).toHaveBeenCalledWith(`project:${projectName}:config`)
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
110
packages/ipuaro/tests/unit/infrastructure/storage/schema.test.ts
Normal file
110
packages/ipuaro/tests/unit/infrastructure/storage/schema.test.ts
Normal file
@@ -0,0 +1,110 @@
|
||||
import { describe, it, expect } from "vitest"
|
||||
import {
|
||||
ProjectKeys,
|
||||
SessionKeys,
|
||||
IndexFields,
|
||||
SessionFields,
|
||||
generateProjectName,
|
||||
} from "../../../../src/infrastructure/storage/schema.js"
|
||||
|
||||
describe("schema", () => {
|
||||
describe("ProjectKeys", () => {
|
||||
it("should generate files key", () => {
|
||||
expect(ProjectKeys.files("myproject")).toBe("project:myproject:files")
|
||||
})
|
||||
|
||||
it("should generate ast key", () => {
|
||||
expect(ProjectKeys.ast("myproject")).toBe("project:myproject:ast")
|
||||
})
|
||||
|
||||
it("should generate meta key", () => {
|
||||
expect(ProjectKeys.meta("myproject")).toBe("project:myproject:meta")
|
||||
})
|
||||
|
||||
it("should generate indexes key", () => {
|
||||
expect(ProjectKeys.indexes("myproject")).toBe("project:myproject:indexes")
|
||||
})
|
||||
|
||||
it("should generate config key", () => {
|
||||
expect(ProjectKeys.config("myproject")).toBe("project:myproject:config")
|
||||
})
|
||||
})
|
||||
|
||||
describe("SessionKeys", () => {
|
||||
it("should generate data key", () => {
|
||||
expect(SessionKeys.data("session-123")).toBe("session:session-123:data")
|
||||
})
|
||||
|
||||
it("should generate undo key", () => {
|
||||
expect(SessionKeys.undo("session-123")).toBe("session:session-123:undo")
|
||||
})
|
||||
|
||||
it("should have list key", () => {
|
||||
expect(SessionKeys.list).toBe("sessions:list")
|
||||
})
|
||||
})
|
||||
|
||||
describe("IndexFields", () => {
|
||||
it("should have symbols field", () => {
|
||||
expect(IndexFields.symbols).toBe("symbols")
|
||||
})
|
||||
|
||||
it("should have depsGraph field", () => {
|
||||
expect(IndexFields.depsGraph).toBe("deps_graph")
|
||||
})
|
||||
})
|
||||
|
||||
describe("SessionFields", () => {
|
||||
it("should have all required fields", () => {
|
||||
expect(SessionFields.history).toBe("history")
|
||||
expect(SessionFields.context).toBe("context")
|
||||
expect(SessionFields.stats).toBe("stats")
|
||||
expect(SessionFields.inputHistory).toBe("input_history")
|
||||
expect(SessionFields.createdAt).toBe("created_at")
|
||||
expect(SessionFields.lastActivityAt).toBe("last_activity_at")
|
||||
expect(SessionFields.projectName).toBe("project_name")
|
||||
})
|
||||
})
|
||||
|
||||
describe("generateProjectName", () => {
|
||||
it("should generate name from path with two parts", () => {
|
||||
expect(generateProjectName("/home/user/projects/myapp")).toBe("projects-myapp")
|
||||
})
|
||||
|
||||
it("should generate name from single directory", () => {
|
||||
expect(generateProjectName("/app")).toBe("app")
|
||||
})
|
||||
|
||||
it("should handle root path", () => {
|
||||
expect(generateProjectName("/")).toBe("root")
|
||||
})
|
||||
|
||||
it("should handle empty path", () => {
|
||||
expect(generateProjectName("")).toBe("root")
|
||||
})
|
||||
|
||||
it("should handle trailing slashes", () => {
|
||||
expect(generateProjectName("/home/user/projects/myapp/")).toBe("projects-myapp")
|
||||
})
|
||||
|
||||
it("should handle Windows paths", () => {
|
||||
expect(generateProjectName("C:\\Users\\projects\\myapp")).toBe("projects-myapp")
|
||||
})
|
||||
|
||||
it("should sanitize special characters", () => {
|
||||
expect(generateProjectName("/home/my project/my@app!")).toBe("my-project-my-app")
|
||||
})
|
||||
|
||||
it("should convert to lowercase", () => {
|
||||
expect(generateProjectName("/Home/User/MYAPP")).toBe("user-myapp")
|
||||
})
|
||||
|
||||
it("should handle multiple consecutive special chars", () => {
|
||||
expect(generateProjectName("/home/my___project")).toBe("home-my-project")
|
||||
})
|
||||
|
||||
it("should handle relative paths", () => {
|
||||
expect(generateProjectName("parent/child")).toBe("parent-child")
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -1,5 +1,9 @@
|
||||
import { describe, it, expect, vi, beforeEach, afterEach } from "vitest"
|
||||
import { loadConfig, validateConfig, getConfigErrors } from "../../../../src/shared/config/loader.js"
|
||||
import {
|
||||
loadConfig,
|
||||
validateConfig,
|
||||
getConfigErrors,
|
||||
} from "../../../../src/shared/config/loader.js"
|
||||
import { DEFAULT_CONFIG } from "../../../../src/shared/constants/config.js"
|
||||
import * as fs from "node:fs"
|
||||
|
||||
@@ -28,7 +32,7 @@ describe("config loader", () => {
|
||||
return path === "/project/.ipuaro.json"
|
||||
})
|
||||
vi.mocked(fs.readFileSync).mockReturnValue(
|
||||
JSON.stringify({ llm: { model: "custom-model" } })
|
||||
JSON.stringify({ llm: { model: "custom-model" } }),
|
||||
)
|
||||
|
||||
const config = loadConfig("/project")
|
||||
|
||||
@@ -8,11 +8,7 @@ export default defineConfig({
|
||||
sourcemap: true,
|
||||
splitting: false,
|
||||
treeshake: true,
|
||||
external: [
|
||||
"tree-sitter",
|
||||
"tree-sitter-typescript",
|
||||
"tree-sitter-javascript",
|
||||
],
|
||||
external: ["tree-sitter", "tree-sitter-typescript", "tree-sitter-javascript"],
|
||||
esbuildOptions(options) {
|
||||
options.jsx = "automatic"
|
||||
},
|
||||
|
||||
@@ -9,11 +9,7 @@ export default defineConfig({
|
||||
provider: "v8",
|
||||
reporter: ["text", "html", "lcov"],
|
||||
include: ["src/**/*.ts", "src/**/*.tsx"],
|
||||
exclude: [
|
||||
"src/**/*.d.ts",
|
||||
"src/**/index.ts",
|
||||
"src/**/*.test.ts",
|
||||
],
|
||||
exclude: ["src/**/*.d.ts", "src/**/index.ts", "src/**/*.test.ts"],
|
||||
thresholds: {
|
||||
lines: 80,
|
||||
functions: 80,
|
||||
|
||||
Reference in New Issue
Block a user