mirror of
https://github.com/samiyev/puaros.git
synced 2025-12-28 07:16:53 +05:00
chore(ipuaro): release v0.20.0
Added IndexProject and ExecuteTool use cases: - IndexProject orchestrates full indexing pipeline - ExecuteTool manages tool execution with confirmation - Refactored CLI index and TUI /reindex commands - Refactored HandleMessage to use ExecuteTool - Added 19 unit tests for IndexProject - All 1463 tests passing, 91.58% branch coverage
This commit is contained in:
@@ -3,23 +3,14 @@
|
||||
* Indexes project without starting TUI.
|
||||
*/
|
||||
|
||||
import * as fs from "node:fs/promises"
|
||||
import * as path from "node:path"
|
||||
import { RedisClient } from "../../infrastructure/storage/RedisClient.js"
|
||||
import { RedisStorage } from "../../infrastructure/storage/RedisStorage.js"
|
||||
import { generateProjectName } from "../../infrastructure/storage/schema.js"
|
||||
import { FileScanner } from "../../infrastructure/indexer/FileScanner.js"
|
||||
import { ASTParser } from "../../infrastructure/indexer/ASTParser.js"
|
||||
import { MetaAnalyzer } from "../../infrastructure/indexer/MetaAnalyzer.js"
|
||||
import { IndexBuilder } from "../../infrastructure/indexer/IndexBuilder.js"
|
||||
import { createFileData } from "../../domain/value-objects/FileData.js"
|
||||
import type { FileAST } from "../../domain/value-objects/FileAST.js"
|
||||
import { IndexProject } from "../../application/use-cases/IndexProject.js"
|
||||
import { type Config, DEFAULT_CONFIG } from "../../shared/constants/config.js"
|
||||
import { md5 } from "../../shared/utils/hash.js"
|
||||
import { checkRedis } from "./onboarding.js"
|
||||
|
||||
type Language = "ts" | "tsx" | "js" | "jsx"
|
||||
|
||||
/**
|
||||
* Result of index command.
|
||||
*/
|
||||
@@ -52,7 +43,6 @@ export async function executeIndex(
|
||||
const startTime = Date.now()
|
||||
const resolvedPath = path.resolve(projectPath)
|
||||
const projectName = generateProjectName(resolvedPath)
|
||||
const errors: string[] = []
|
||||
|
||||
console.warn(`📁 Indexing project: ${resolvedPath}`)
|
||||
console.warn(` Project name: ${projectName}\n`)
|
||||
@@ -76,142 +66,69 @@ export async function executeIndex(
|
||||
await redisClient.connect()
|
||||
|
||||
const storage = new RedisStorage(redisClient, projectName)
|
||||
const scanner = new FileScanner({
|
||||
onProgress: (progress): void => {
|
||||
onProgress?.("scanning", progress.current, progress.total, progress.currentFile)
|
||||
const indexProject = new IndexProject(storage, resolvedPath)
|
||||
|
||||
let lastPhase: "scanning" | "parsing" | "analyzing" | "indexing" = "scanning"
|
||||
let lastProgress = 0
|
||||
|
||||
const stats = await indexProject.execute(resolvedPath, {
|
||||
onProgress: (progress) => {
|
||||
if (progress.phase !== lastPhase) {
|
||||
if (lastPhase === "scanning") {
|
||||
console.warn(` Found ${String(progress.total)} files\n`)
|
||||
} else if (lastProgress > 0) {
|
||||
console.warn("")
|
||||
}
|
||||
|
||||
const phaseLabels = {
|
||||
scanning: "🔍 Scanning files...",
|
||||
parsing: "📝 Parsing files...",
|
||||
analyzing: "📊 Analyzing metadata...",
|
||||
indexing: "🏗️ Building indexes...",
|
||||
}
|
||||
console.warn(phaseLabels[progress.phase])
|
||||
lastPhase = progress.phase
|
||||
}
|
||||
|
||||
if (progress.phase === "indexing") {
|
||||
onProgress?.("storing", progress.current, progress.total)
|
||||
} else {
|
||||
onProgress?.(
|
||||
progress.phase,
|
||||
progress.current,
|
||||
progress.total,
|
||||
progress.currentFile,
|
||||
)
|
||||
}
|
||||
|
||||
if (
|
||||
progress.current % 50 === 0 &&
|
||||
progress.phase !== "scanning" &&
|
||||
progress.phase !== "indexing"
|
||||
) {
|
||||
process.stdout.write(
|
||||
`\r ${progress.phase === "parsing" ? "Parsed" : "Analyzed"} ${String(progress.current)}/${String(progress.total)} files...`,
|
||||
)
|
||||
}
|
||||
lastProgress = progress.current
|
||||
},
|
||||
})
|
||||
const astParser = new ASTParser()
|
||||
const metaAnalyzer = new MetaAnalyzer(resolvedPath)
|
||||
const indexBuilder = new IndexBuilder(resolvedPath)
|
||||
|
||||
console.warn("🔍 Scanning files...")
|
||||
const files = await scanner.scanAll(resolvedPath)
|
||||
console.warn(` Found ${String(files.length)} files\n`)
|
||||
const symbolIndex = await storage.getSymbolIndex()
|
||||
const durationSec = (stats.timeMs / 1000).toFixed(2)
|
||||
|
||||
if (files.length === 0) {
|
||||
console.warn("⚠️ No files found to index.")
|
||||
return {
|
||||
success: true,
|
||||
filesIndexed: 0,
|
||||
filesSkipped: 0,
|
||||
errors: [],
|
||||
duration: Date.now() - startTime,
|
||||
}
|
||||
}
|
||||
|
||||
console.warn("📝 Parsing files...")
|
||||
const allASTs = new Map<string, FileAST>()
|
||||
const fileContents = new Map<string, string>()
|
||||
let parsed = 0
|
||||
let skipped = 0
|
||||
|
||||
for (const file of files) {
|
||||
const fullPath = path.join(resolvedPath, file.path)
|
||||
const language = getLanguage(file.path)
|
||||
|
||||
if (!language) {
|
||||
skipped++
|
||||
continue
|
||||
}
|
||||
|
||||
try {
|
||||
const content = await fs.readFile(fullPath, "utf-8")
|
||||
const ast = astParser.parse(content, language)
|
||||
|
||||
if (ast.parseError) {
|
||||
errors.push(
|
||||
`Parse error in ${file.path}: ${ast.parseErrorMessage ?? "unknown"}`,
|
||||
)
|
||||
skipped++
|
||||
continue
|
||||
}
|
||||
|
||||
allASTs.set(file.path, ast)
|
||||
fileContents.set(file.path, content)
|
||||
parsed++
|
||||
|
||||
onProgress?.("parsing", parsed + skipped, files.length, file.path)
|
||||
|
||||
if ((parsed + skipped) % 50 === 0) {
|
||||
process.stdout.write(
|
||||
`\r Parsed ${String(parsed)} files (${String(skipped)} skipped)...`,
|
||||
)
|
||||
}
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : String(error)
|
||||
errors.push(`Error reading ${file.path}: ${message}`)
|
||||
skipped++
|
||||
}
|
||||
}
|
||||
console.warn(`\r Parsed ${String(parsed)} files (${String(skipped)} skipped) \n`)
|
||||
|
||||
console.warn("📊 Analyzing metadata...")
|
||||
let analyzed = 0
|
||||
for (const [filePath, ast] of allASTs) {
|
||||
const content = fileContents.get(filePath) ?? ""
|
||||
const meta = metaAnalyzer.analyze(
|
||||
path.join(resolvedPath, filePath),
|
||||
ast,
|
||||
content,
|
||||
allASTs,
|
||||
)
|
||||
|
||||
const fileData = createFileData({
|
||||
lines: content.split("\n"),
|
||||
hash: md5(content),
|
||||
size: content.length,
|
||||
lastModified: Date.now(),
|
||||
})
|
||||
|
||||
await storage.setFile(filePath, fileData)
|
||||
await storage.setAST(filePath, ast)
|
||||
await storage.setMeta(filePath, meta)
|
||||
|
||||
analyzed++
|
||||
onProgress?.("analyzing", analyzed, allASTs.size, filePath)
|
||||
|
||||
if (analyzed % 50 === 0) {
|
||||
process.stdout.write(
|
||||
`\r Analyzed ${String(analyzed)}/${String(allASTs.size)} files...`,
|
||||
)
|
||||
}
|
||||
}
|
||||
console.warn(`\r Analyzed ${String(analyzed)} files \n`)
|
||||
|
||||
console.warn("🏗️ Building indexes...")
|
||||
onProgress?.("storing", 0, 2)
|
||||
const symbolIndex = indexBuilder.buildSymbolIndex(allASTs)
|
||||
const depsGraph = indexBuilder.buildDepsGraph(allASTs)
|
||||
|
||||
await storage.setSymbolIndex(symbolIndex)
|
||||
await storage.setDepsGraph(depsGraph)
|
||||
onProgress?.("storing", 2, 2)
|
||||
|
||||
const duration = Date.now() - startTime
|
||||
const durationSec = (duration / 1000).toFixed(2)
|
||||
|
||||
console.warn(`✅ Indexing complete in ${durationSec}s`)
|
||||
console.warn(` Files indexed: ${String(parsed)}`)
|
||||
console.warn(` Files skipped: ${String(skipped)}`)
|
||||
console.warn(`\n✅ Indexing complete in ${durationSec}s`)
|
||||
console.warn(` Files scanned: ${String(stats.filesScanned)}`)
|
||||
console.warn(` Files parsed: ${String(stats.filesParsed)}`)
|
||||
console.warn(` Parse errors: ${String(stats.parseErrors)}`)
|
||||
console.warn(` Symbols: ${String(symbolIndex.size)}`)
|
||||
|
||||
if (errors.length > 0) {
|
||||
console.warn(`\n⚠️ ${String(errors.length)} errors occurred:`)
|
||||
for (const error of errors.slice(0, 5)) {
|
||||
console.warn(` - ${error}`)
|
||||
}
|
||||
if (errors.length > 5) {
|
||||
console.warn(` ... and ${String(errors.length - 5)} more`)
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
filesIndexed: parsed,
|
||||
filesSkipped: skipped,
|
||||
errors,
|
||||
duration,
|
||||
filesIndexed: stats.filesParsed,
|
||||
filesSkipped: stats.filesScanned - stats.filesParsed,
|
||||
errors: [],
|
||||
duration: stats.timeMs,
|
||||
}
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : String(error)
|
||||
@@ -229,22 +146,3 @@ export async function executeIndex(
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get language from file extension.
|
||||
*/
|
||||
function getLanguage(filePath: string): Language | null {
|
||||
const ext = path.extname(filePath).toLowerCase()
|
||||
switch (ext) {
|
||||
case ".ts":
|
||||
return "ts"
|
||||
case ".tsx":
|
||||
return "tsx"
|
||||
case ".js":
|
||||
return "js"
|
||||
case ".jsx":
|
||||
return "jsx"
|
||||
default:
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user