Compare commits

..

3 Commits

Author SHA1 Message Date
imfozilbek
294d085ad4 chore(ipuaro): bump version to 0.3.1 2025-11-30 01:50:33 +05:00
imfozilbek
958e4daed5 chore(guardian): bump version to 0.9.4 2025-11-30 01:50:21 +05:00
imfozilbek
6234fbce92 docs: add roadmap workflow instructions 2025-11-30 01:28:44 +05:00
15 changed files with 256 additions and 125 deletions

View File

@@ -447,6 +447,35 @@ Copy and use for each release:
- [ ] Published to npm (if public release) - [ ] Published to npm (if public release)
``` ```
## Working with Roadmap
When the user points to `ROADMAP.md` or asks about the roadmap/next steps:
1. **Read both files together:**
- `packages/<package>/ROADMAP.md` - to understand the planned features and milestones
- `packages/<package>/CHANGELOG.md` - to see what's already implemented
2. **Determine current position:**
- Check the latest version in CHANGELOG.md
- Cross-reference with ROADMAP.md milestones
- Identify which roadmap items are already completed (present in CHANGELOG)
3. **Suggest next steps:**
- Find the first uncompleted item in the current milestone
- Or identify the next milestone if current one is complete
- Present clear "start here" recommendation
**Example workflow:**
```
User: "Let's work on the roadmap" or points to ROADMAP.md
Claude should:
1. Read ROADMAP.md → See milestones v0.1.0, v0.2.0, v0.3.0...
2. Read CHANGELOG.md → See latest release is v0.1.1
3. Compare → v0.1.0 milestone complete, v0.2.0 in progress
4. Report → "v0.1.0 is complete. For v0.2.0, next item is: <feature>"
```
## Common Workflows ## Common Workflows
### Adding a new CLI option ### Adding a new CLI option

View File

@@ -5,6 +5,26 @@ All notable changes to @samiyev/guardian will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
## [0.9.4] - 2025-11-30
### Added
- **VERSION export** - Package version is now exported from index.ts, automatically read from package.json
### Changed
- 🔄 **Refactored SecretDetector** - Reduced cyclomatic complexity from 24 to <15:
- Extracted helper methods: `extractByRuleId`, `extractAwsType`, `extractGithubType`, `extractSshType`, `extractSlackType`, `extractByMessage`
- Used lookup arrays for SSH and message type mappings
- 🔄 **Refactored AstNamingTraverser** - Reduced cyclomatic complexity from 17 to <15:
- Replaced if-else chain with Map-based node handlers
- Added `buildNodeHandlers()` method for cleaner architecture
### Quality
-**Zero lint warnings** - All ESLint warnings resolved
-**All 616 tests pass**
## [0.9.2] - 2025-11-27 ## [0.9.2] - 2025-11-27
### Changed ### Changed

View File

@@ -1,6 +1,6 @@
{ {
"name": "@samiyev/guardian", "name": "@samiyev/guardian",
"version": "0.9.3", "version": "0.9.4",
"description": "Research-backed code quality guardian for AI-assisted development. Detects hardcodes, secrets, circular deps, framework leaks, entity exposure, and 9 architecture violations. Enforces Clean Architecture/DDD principles. Works with GitHub Copilot, Cursor, Windsurf, Claude, ChatGPT, Cline, and any AI coding tool.", "description": "Research-backed code quality guardian for AI-assisted development. Detects hardcodes, secrets, circular deps, framework leaks, entity exposure, and 9 architecture violations. Enforces Clean Architecture/DDD principles. Works with GitHub Copilot, Cursor, Windsurf, Claude, ChatGPT, Cline, and any AI coding tool.",
"keywords": [ "keywords": [
"puaros", "puaros",

View File

@@ -215,6 +215,7 @@ export class AnalyzeProject extends UseCase<
private readonly detectionPipeline: ExecuteDetection private readonly detectionPipeline: ExecuteDetection
private readonly resultAggregator: AggregateResults private readonly resultAggregator: AggregateResults
// eslint-disable-next-line max-params
constructor( constructor(
fileScanner: IFileScanner, fileScanner: IFileScanner,
codeParser: ICodeParser, codeParser: ICodeParser,

View File

@@ -56,6 +56,7 @@ export interface DetectionResult {
* Pipeline step responsible for running all detectors * Pipeline step responsible for running all detectors
*/ */
export class ExecuteDetection { export class ExecuteDetection {
// eslint-disable-next-line max-params
constructor( constructor(
private readonly hardcodeDetector: IHardcodeDetector, private readonly hardcodeDetector: IHardcodeDetector,
private readonly namingConventionDetector: INamingConventionDetector, private readonly namingConventionDetector: INamingConventionDetector,

View File

@@ -171,6 +171,7 @@ export class HardcodedValue extends ValueObject<HardcodedValueProps> {
return `${CONSTANT_NAMES.MAGIC_NUMBER}_${String(value)}` return `${CONSTANT_NAMES.MAGIC_NUMBER}_${String(value)}`
} }
// eslint-disable-next-line complexity, max-lines-per-function
private suggestStringConstantName(): string { private suggestStringConstantName(): string {
const value = String(this.props.value) const value = String(this.props.value)
const context = this.props.context.toLowerCase() const context = this.props.context.toLowerCase()

View File

@@ -1,3 +1,7 @@
import pkg from "../package.json"
export const VERSION = pkg.version
export * from "./domain" export * from "./domain"
export * from "./application" export * from "./application"
export * from "./infrastructure" export * from "./infrastructure"

View File

@@ -90,80 +90,98 @@ export class SecretDetector implements ISecretDetector {
} }
private extractSecretType(message: string, ruleId: string): string { private extractSecretType(message: string, ruleId: string): string {
const lowerMessage = message.toLowerCase()
const ruleBasedType = this.extractByRuleId(ruleId, lowerMessage)
if (ruleBasedType) {
return ruleBasedType
}
return this.extractByMessage(lowerMessage)
}
private extractByRuleId(ruleId: string, lowerMessage: string): string | null {
if (ruleId.includes(SECRET_KEYWORDS.AWS)) { if (ruleId.includes(SECRET_KEYWORDS.AWS)) {
if (message.toLowerCase().includes(SECRET_KEYWORDS.ACCESS_KEY)) { return this.extractAwsType(lowerMessage)
return SECRET_TYPE_NAMES.AWS_ACCESS_KEY
}
if (message.toLowerCase().includes(SECRET_KEYWORDS.SECRET)) {
return SECRET_TYPE_NAMES.AWS_SECRET_KEY
}
return SECRET_TYPE_NAMES.AWS_CREDENTIAL
} }
if (ruleId.includes(SECRET_KEYWORDS.GITHUB)) { if (ruleId.includes(SECRET_KEYWORDS.GITHUB)) {
if (message.toLowerCase().includes(SECRET_KEYWORDS.PERSONAL_ACCESS_TOKEN)) { return this.extractGithubType(lowerMessage)
return SECRET_TYPE_NAMES.GITHUB_PERSONAL_ACCESS_TOKEN
}
if (message.toLowerCase().includes(SECRET_KEYWORDS.OAUTH)) {
return SECRET_TYPE_NAMES.GITHUB_OAUTH_TOKEN
}
return SECRET_TYPE_NAMES.GITHUB_TOKEN
} }
if (ruleId.includes(SECRET_KEYWORDS.NPM)) { if (ruleId.includes(SECRET_KEYWORDS.NPM)) {
return SECRET_TYPE_NAMES.NPM_TOKEN return SECRET_TYPE_NAMES.NPM_TOKEN
} }
if (ruleId.includes(SECRET_KEYWORDS.GCP) || ruleId.includes(SECRET_KEYWORDS.GOOGLE)) { if (ruleId.includes(SECRET_KEYWORDS.GCP) || ruleId.includes(SECRET_KEYWORDS.GOOGLE)) {
return SECRET_TYPE_NAMES.GCP_SERVICE_ACCOUNT_KEY return SECRET_TYPE_NAMES.GCP_SERVICE_ACCOUNT_KEY
} }
if (ruleId.includes(SECRET_KEYWORDS.PRIVATEKEY) || ruleId.includes(SECRET_KEYWORDS.SSH)) { if (ruleId.includes(SECRET_KEYWORDS.PRIVATEKEY) || ruleId.includes(SECRET_KEYWORDS.SSH)) {
if (message.toLowerCase().includes(SECRET_KEYWORDS.RSA)) { return this.extractSshType(lowerMessage)
return SECRET_TYPE_NAMES.SSH_RSA_PRIVATE_KEY
}
if (message.toLowerCase().includes(SECRET_KEYWORDS.DSA)) {
return SECRET_TYPE_NAMES.SSH_DSA_PRIVATE_KEY
}
if (message.toLowerCase().includes(SECRET_KEYWORDS.ECDSA)) {
return SECRET_TYPE_NAMES.SSH_ECDSA_PRIVATE_KEY
}
if (message.toLowerCase().includes(SECRET_KEYWORDS.ED25519)) {
return SECRET_TYPE_NAMES.SSH_ED25519_PRIVATE_KEY
}
return SECRET_TYPE_NAMES.SSH_PRIVATE_KEY
} }
if (ruleId.includes(SECRET_KEYWORDS.SLACK)) { if (ruleId.includes(SECRET_KEYWORDS.SLACK)) {
if (message.toLowerCase().includes(SECRET_KEYWORDS.BOT)) { return this.extractSlackType(lowerMessage)
return SECRET_TYPE_NAMES.SLACK_BOT_TOKEN
}
if (message.toLowerCase().includes(SECRET_KEYWORDS.USER)) {
return SECRET_TYPE_NAMES.SLACK_USER_TOKEN
}
return SECRET_TYPE_NAMES.SLACK_TOKEN
} }
if (ruleId.includes(SECRET_KEYWORDS.BASICAUTH)) { if (ruleId.includes(SECRET_KEYWORDS.BASICAUTH)) {
return SECRET_TYPE_NAMES.BASIC_AUTH_CREDENTIALS return SECRET_TYPE_NAMES.BASIC_AUTH_CREDENTIALS
} }
return null
}
if (message.toLowerCase().includes(SECRET_KEYWORDS.API_KEY)) { private extractAwsType(lowerMessage: string): string {
return SECRET_TYPE_NAMES.API_KEY if (lowerMessage.includes(SECRET_KEYWORDS.ACCESS_KEY)) {
return SECRET_TYPE_NAMES.AWS_ACCESS_KEY
} }
if (lowerMessage.includes(SECRET_KEYWORDS.SECRET)) {
if (message.toLowerCase().includes(SECRET_KEYWORDS.TOKEN)) { return SECRET_TYPE_NAMES.AWS_SECRET_KEY
return SECRET_TYPE_NAMES.AUTHENTICATION_TOKEN
} }
return SECRET_TYPE_NAMES.AWS_CREDENTIAL
}
if (message.toLowerCase().includes(SECRET_KEYWORDS.PASSWORD)) { private extractGithubType(lowerMessage: string): string {
return SECRET_TYPE_NAMES.PASSWORD if (lowerMessage.includes(SECRET_KEYWORDS.PERSONAL_ACCESS_TOKEN)) {
return SECRET_TYPE_NAMES.GITHUB_PERSONAL_ACCESS_TOKEN
} }
if (lowerMessage.includes(SECRET_KEYWORDS.OAUTH)) {
if (message.toLowerCase().includes(SECRET_KEYWORDS.SECRET)) { return SECRET_TYPE_NAMES.GITHUB_OAUTH_TOKEN
return SECRET_TYPE_NAMES.SECRET
} }
return SECRET_TYPE_NAMES.GITHUB_TOKEN
}
private extractSshType(lowerMessage: string): string {
const sshTypeMap: [string, string][] = [
[SECRET_KEYWORDS.RSA, SECRET_TYPE_NAMES.SSH_RSA_PRIVATE_KEY],
[SECRET_KEYWORDS.DSA, SECRET_TYPE_NAMES.SSH_DSA_PRIVATE_KEY],
[SECRET_KEYWORDS.ECDSA, SECRET_TYPE_NAMES.SSH_ECDSA_PRIVATE_KEY],
[SECRET_KEYWORDS.ED25519, SECRET_TYPE_NAMES.SSH_ED25519_PRIVATE_KEY],
]
for (const [keyword, typeName] of sshTypeMap) {
if (lowerMessage.includes(keyword)) {
return typeName
}
}
return SECRET_TYPE_NAMES.SSH_PRIVATE_KEY
}
private extractSlackType(lowerMessage: string): string {
if (lowerMessage.includes(SECRET_KEYWORDS.BOT)) {
return SECRET_TYPE_NAMES.SLACK_BOT_TOKEN
}
if (lowerMessage.includes(SECRET_KEYWORDS.USER)) {
return SECRET_TYPE_NAMES.SLACK_USER_TOKEN
}
return SECRET_TYPE_NAMES.SLACK_TOKEN
}
private extractByMessage(lowerMessage: string): string {
const messageTypeMap: [string, string][] = [
[SECRET_KEYWORDS.API_KEY, SECRET_TYPE_NAMES.API_KEY],
[SECRET_KEYWORDS.TOKEN, SECRET_TYPE_NAMES.AUTHENTICATION_TOKEN],
[SECRET_KEYWORDS.PASSWORD, SECRET_TYPE_NAMES.PASSWORD],
[SECRET_KEYWORDS.SECRET, SECRET_TYPE_NAMES.SECRET],
]
for (const [keyword, typeName] of messageTypeMap) {
if (lowerMessage.includes(keyword)) {
return typeName
}
}
return SECRET_TYPE_NAMES.SENSITIVE_DATA return SECRET_TYPE_NAMES.SENSITIVE_DATA
} }
} }

View File

@@ -6,6 +6,13 @@ import { AstFunctionNameAnalyzer } from "./AstFunctionNameAnalyzer"
import { AstInterfaceNameAnalyzer } from "./AstInterfaceNameAnalyzer" import { AstInterfaceNameAnalyzer } from "./AstInterfaceNameAnalyzer"
import { AstVariableNameAnalyzer } from "./AstVariableNameAnalyzer" import { AstVariableNameAnalyzer } from "./AstVariableNameAnalyzer"
type NodeAnalyzer = (
node: Parser.SyntaxNode,
layer: string,
filePath: string,
lines: string[],
) => NamingViolation | null
/** /**
* AST tree traverser for detecting naming convention violations * AST tree traverser for detecting naming convention violations
* *
@@ -13,12 +20,16 @@ import { AstVariableNameAnalyzer } from "./AstVariableNameAnalyzer"
* to detect naming violations in classes, interfaces, functions, and variables. * to detect naming violations in classes, interfaces, functions, and variables.
*/ */
export class AstNamingTraverser { export class AstNamingTraverser {
private readonly nodeHandlers: Map<string, NodeAnalyzer>
constructor( constructor(
private readonly classAnalyzer: AstClassNameAnalyzer, private readonly classAnalyzer: AstClassNameAnalyzer,
private readonly interfaceAnalyzer: AstInterfaceNameAnalyzer, private readonly interfaceAnalyzer: AstInterfaceNameAnalyzer,
private readonly functionAnalyzer: AstFunctionNameAnalyzer, private readonly functionAnalyzer: AstFunctionNameAnalyzer,
private readonly variableAnalyzer: AstVariableNameAnalyzer, private readonly variableAnalyzer: AstVariableNameAnalyzer,
) {} ) {
this.nodeHandlers = this.buildNodeHandlers()
}
/** /**
* Traverses the AST tree and collects naming violations * Traverses the AST tree and collects naming violations
@@ -38,6 +49,33 @@ export class AstNamingTraverser {
return results return results
} }
private buildNodeHandlers(): Map<string, NodeAnalyzer> {
const handlers = new Map<string, NodeAnalyzer>()
handlers.set(AST_CLASS_TYPES.CLASS_DECLARATION, (node, layer, filePath, lines) =>
this.classAnalyzer.analyze(node, layer, filePath, lines),
)
handlers.set(AST_CLASS_TYPES.INTERFACE_DECLARATION, (node, layer, filePath, lines) =>
this.interfaceAnalyzer.analyze(node, layer, filePath, lines),
)
const functionHandler: NodeAnalyzer = (node, layer, filePath, lines) =>
this.functionAnalyzer.analyze(node, layer, filePath, lines)
handlers.set(AST_FUNCTION_TYPES.FUNCTION_DECLARATION, functionHandler)
handlers.set(AST_FUNCTION_TYPES.METHOD_DEFINITION, functionHandler)
handlers.set(AST_FUNCTION_TYPES.FUNCTION_SIGNATURE, functionHandler)
const variableHandler: NodeAnalyzer = (node, layer, filePath, lines) =>
this.variableAnalyzer.analyze(node, layer, filePath, lines)
handlers.set(AST_VARIABLE_TYPES.VARIABLE_DECLARATOR, variableHandler)
handlers.set(AST_VARIABLE_TYPES.REQUIRED_PARAMETER, variableHandler)
handlers.set(AST_VARIABLE_TYPES.OPTIONAL_PARAMETER, variableHandler)
handlers.set(AST_VARIABLE_TYPES.PUBLIC_FIELD_DEFINITION, variableHandler)
handlers.set(AST_VARIABLE_TYPES.PROPERTY_SIGNATURE, variableHandler)
return handlers
}
/** /**
* Recursively visits AST nodes * Recursively visits AST nodes
*/ */
@@ -49,34 +87,10 @@ export class AstNamingTraverser {
results: NamingViolation[], results: NamingViolation[],
): void { ): void {
const node = cursor.currentNode const node = cursor.currentNode
const handler = this.nodeHandlers.get(node.type)
if (node.type === AST_CLASS_TYPES.CLASS_DECLARATION) { if (handler) {
const violation = this.classAnalyzer.analyze(node, layer, filePath, lines) const violation = handler(node, layer, filePath, lines)
if (violation) {
results.push(violation)
}
} else if (node.type === AST_CLASS_TYPES.INTERFACE_DECLARATION) {
const violation = this.interfaceAnalyzer.analyze(node, layer, filePath, lines)
if (violation) {
results.push(violation)
}
} else if (
node.type === AST_FUNCTION_TYPES.FUNCTION_DECLARATION ||
node.type === AST_FUNCTION_TYPES.METHOD_DEFINITION ||
node.type === AST_FUNCTION_TYPES.FUNCTION_SIGNATURE
) {
const violation = this.functionAnalyzer.analyze(node, layer, filePath, lines)
if (violation) {
results.push(violation)
}
} else if (
node.type === AST_VARIABLE_TYPES.VARIABLE_DECLARATOR ||
node.type === AST_VARIABLE_TYPES.REQUIRED_PARAMETER ||
node.type === AST_VARIABLE_TYPES.OPTIONAL_PARAMETER ||
node.type === AST_VARIABLE_TYPES.PUBLIC_FIELD_DEFINITION ||
node.type === AST_VARIABLE_TYPES.PROPERTY_SIGNATURE
) {
const violation = this.variableAnalyzer.analyze(node, layer, filePath, lines)
if (violation) { if (violation) {
results.push(violation) results.push(violation)
} }

View File

@@ -5,6 +5,24 @@ All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
## [0.3.1] - 2025-11-30
### Added
- **VERSION export** - Package version is now exported from index.ts, automatically read from package.json via `createRequire`
### Changed
- 🔄 **Refactored ASTParser** - Reduced complexity and nesting depth:
- Extracted `extractClassHeritage()`, `parseHeritageClause()`, `findTypeIdentifier()`, `collectImplements()` helper methods
- Max nesting depth reduced from 5 to 4
- 🔄 **Refactored RedisStorage** - Removed unnecessary type parameter from `parseJSON()` method
### Quality
-**Zero lint warnings** - All ESLint warnings resolved
-**All 321 tests pass**
## [0.3.0] - 2025-11-30 - Indexer ## [0.3.0] - 2025-11-30 - Indexer
### Added ### Added

View File

@@ -1,6 +1,6 @@
{ {
"name": "@samiyev/ipuaro", "name": "@samiyev/ipuaro",
"version": "0.3.0", "version": "0.3.1",
"description": "Local AI agent for codebase operations with infinite context feeling", "description": "Local AI agent for codebase operations with infinite context feeling",
"author": "Fozilbek Samiyev <fozilbek.samiyev@gmail.com>", "author": "Fozilbek Samiyev <fozilbek.samiyev@gmail.com>",
"license": "MIT", "license": "MIT",

View File

@@ -4,6 +4,11 @@
* Main entry point for the library. * Main entry point for the library.
*/ */
import { createRequire } from "node:module"
const require = createRequire(import.meta.url)
const pkg = require("../package.json") as { version: string }
// Domain exports // Domain exports
export * from "./domain/index.js" export * from "./domain/index.js"
@@ -17,4 +22,4 @@ export * from "./shared/index.js"
export * from "./infrastructure/index.js" export * from "./infrastructure/index.js"
// Version // Version
export const VERSION = "0.2.0" export const VERSION = pkg.version

View File

@@ -306,38 +306,7 @@ export class ASTParser {
} }
} }
let extendsName: string | undefined const { extendsName, implementsList } = this.extractClassHeritage(node)
const implementsList: string[] = []
for (const child of node.children) {
if (child.type === NodeType.CLASS_HERITAGE) {
for (const clause of child.children) {
if (clause.type === NodeType.EXTENDS_CLAUSE) {
const typeNode = clause.children.find(
(c) =>
c.type === NodeType.TYPE_IDENTIFIER ||
c.type === NodeType.IDENTIFIER,
)
extendsName = typeNode?.text
} else if (clause.type === NodeType.IMPLEMENTS_CLAUSE) {
for (const impl of clause.children) {
if (
impl.type === NodeType.TYPE_IDENTIFIER ||
impl.type === NodeType.IDENTIFIER
) {
implementsList.push(impl.text)
}
}
}
}
} else if (child.type === NodeType.EXTENDS_CLAUSE) {
const typeNode = child.children.find(
(c) => c.type === NodeType.TYPE_IDENTIFIER || c.type === NodeType.IDENTIFIER,
)
extendsName = typeNode?.text
}
}
const isAbstract = node.children.some((c) => c.type === NodeType.ABSTRACT) const isAbstract = node.children.some((c) => c.type === NodeType.ABSTRACT)
ast.classes.push({ ast.classes.push({
@@ -353,6 +322,56 @@ export class ASTParser {
}) })
} }
private extractClassHeritage(node: SyntaxNode): {
extendsName: string | undefined
implementsList: string[]
} {
let extendsName: string | undefined
const implementsList: string[] = []
for (const child of node.children) {
if (child.type === NodeType.CLASS_HERITAGE) {
this.parseHeritageClause(child, (ext) => (extendsName = ext), implementsList)
} else if (child.type === NodeType.EXTENDS_CLAUSE) {
extendsName = this.findTypeIdentifier(child)
}
}
return { extendsName, implementsList }
}
private parseHeritageClause(
heritage: SyntaxNode,
setExtends: (name: string) => void,
implementsList: string[],
): void {
for (const clause of heritage.children) {
if (clause.type === NodeType.EXTENDS_CLAUSE) {
const typeId = this.findTypeIdentifier(clause)
if (typeId) {
setExtends(typeId)
}
} else if (clause.type === NodeType.IMPLEMENTS_CLAUSE) {
this.collectImplements(clause, implementsList)
}
}
}
private findTypeIdentifier(node: SyntaxNode): string | undefined {
const typeNode = node.children.find(
(c) => c.type === NodeType.TYPE_IDENTIFIER || c.type === NodeType.IDENTIFIER,
)
return typeNode?.text
}
private collectImplements(clause: SyntaxNode, list: string[]): void {
for (const impl of clause.children) {
if (impl.type === NodeType.TYPE_IDENTIFIER || impl.type === NodeType.IDENTIFIER) {
list.push(impl.text)
}
}
}
private extractMethod(node: SyntaxNode): MethodInfo { private extractMethod(node: SyntaxNode): MethodInfo {
const nameNode = node.childForFieldName(FieldName.NAME) const nameNode = node.childForFieldName(FieldName.NAME)
const params = this.extractParameters(node) const params = this.extractParameters(node)

View File

@@ -25,7 +25,7 @@ export class RedisStorage implements IStorage {
if (!data) { if (!data) {
return null return null
} }
return this.parseJSON<FileData>(data, "FileData") return this.parseJSON(data, "FileData") as FileData
} }
async setFile(path: string, data: FileData): Promise<void> { async setFile(path: string, data: FileData): Promise<void> {
@@ -44,7 +44,7 @@ export class RedisStorage implements IStorage {
const result = new Map<string, FileData>() const result = new Map<string, FileData>()
for (const [path, value] of Object.entries(data)) { for (const [path, value] of Object.entries(data)) {
const parsed = this.parseJSON<FileData>(value, "FileData") const parsed = this.parseJSON(value, "FileData") as FileData | null
if (parsed) { if (parsed) {
result.set(path, parsed) result.set(path, parsed)
} }
@@ -64,7 +64,7 @@ export class RedisStorage implements IStorage {
if (!data) { if (!data) {
return null return null
} }
return this.parseJSON<FileAST>(data, "FileAST") return this.parseJSON(data, "FileAST") as FileAST
} }
async setAST(path: string, ast: FileAST): Promise<void> { async setAST(path: string, ast: FileAST): Promise<void> {
@@ -83,7 +83,7 @@ export class RedisStorage implements IStorage {
const result = new Map<string, FileAST>() const result = new Map<string, FileAST>()
for (const [path, value] of Object.entries(data)) { for (const [path, value] of Object.entries(data)) {
const parsed = this.parseJSON<FileAST>(value, "FileAST") const parsed = this.parseJSON(value, "FileAST") as FileAST | null
if (parsed) { if (parsed) {
result.set(path, parsed) result.set(path, parsed)
} }
@@ -98,7 +98,7 @@ export class RedisStorage implements IStorage {
if (!data) { if (!data) {
return null return null
} }
return this.parseJSON<FileMeta>(data, "FileMeta") return this.parseJSON(data, "FileMeta") as FileMeta
} }
async setMeta(path: string, meta: FileMeta): Promise<void> { async setMeta(path: string, meta: FileMeta): Promise<void> {
@@ -117,7 +117,7 @@ export class RedisStorage implements IStorage {
const result = new Map<string, FileMeta>() const result = new Map<string, FileMeta>()
for (const [path, value] of Object.entries(data)) { for (const [path, value] of Object.entries(data)) {
const parsed = this.parseJSON<FileMeta>(value, "FileMeta") const parsed = this.parseJSON(value, "FileMeta") as FileMeta | null
if (parsed) { if (parsed) {
result.set(path, parsed) result.set(path, parsed)
} }
@@ -133,7 +133,7 @@ export class RedisStorage implements IStorage {
return new Map() return new Map()
} }
const parsed = this.parseJSON<[string, unknown[]][]>(data, "SymbolIndex") const parsed = this.parseJSON(data, "SymbolIndex") as [string, unknown[]][] | null
if (!parsed) { if (!parsed) {
return new Map() return new Map()
} }
@@ -157,10 +157,10 @@ export class RedisStorage implements IStorage {
} }
} }
const parsed = this.parseJSON<{ const parsed = this.parseJSON(data, "DepsGraph") as {
imports: [string, string[]][] imports: [string, string[]][]
importedBy: [string, string[]][] importedBy: [string, string[]][]
}>(data, "DepsGraph") } | null
if (!parsed) { if (!parsed) {
return { return {
@@ -190,7 +190,7 @@ export class RedisStorage implements IStorage {
if (!data) { if (!data) {
return null return null
} }
return this.parseJSON<unknown>(data, "ProjectConfig") return this.parseJSON(data, "ProjectConfig")
} }
async setProjectConfig(key: string, value: unknown): Promise<void> { async setProjectConfig(key: string, value: unknown): Promise<void> {
@@ -225,9 +225,9 @@ export class RedisStorage implements IStorage {
return this.client.getClient() return this.client.getClient()
} }
private parseJSON<T>(data: string, type: string): T | null { private parseJSON(data: string, type: string): unknown {
try { try {
return JSON.parse(data) as T return JSON.parse(data) as unknown
} catch (error) { } catch (error) {
const message = error instanceof Error ? error.message : "Unknown error" const message = error instanceof Error ? error.message : "Unknown error"
throw IpuaroError.parse(`Failed to parse ${type}: ${message}`) throw IpuaroError.parse(`Failed to parse ${type}: ${message}`)

View File

@@ -11,6 +11,7 @@
"declarationMap": true, "declarationMap": true,
"sourceMap": true, "sourceMap": true,
"strict": true, "strict": true,
"resolveJsonModule": true,
"skipLibCheck": true, "skipLibCheck": true,
"esModuleInterop": true, "esModuleInterop": true,
"resolvePackageJsonExports": true, "resolvePackageJsonExports": true,