refactor: split AnalyzeProject into pipeline components

Split 615-line God Use-Case into focused pipeline components:
- FileCollectionStep.ts (66 lines) - file scanning and basic parsing
- ParsingStep.ts (51 lines) - AST parsing and dependency graph
- DetectionPipeline.ts (371 lines) - all 7 detectors
- ResultAggregator.ts (81 lines) - response DTO builder

Reduced AnalyzeProject.ts from 615 to 245 lines (60% reduction).

All 345 tests pass, no breaking changes.
Improved separation of concerns and testability.

Closes #0.7.5 roadmap task.
This commit is contained in:
imfozilbek
2025-11-25 16:07:20 +05:00
parent 2479bde9a8
commit 8a2c6fdc0e
7 changed files with 635 additions and 420 deletions

View File

@@ -5,7 +5,20 @@ All notable changes to @samiyev/guardian will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
## [0.7.5-beta.1] - 2025-11-25 ## [0.7.5] - 2025-11-25
### Changed
- ♻️ **Refactored AnalyzeProject use-case** - improved maintainability and testability:
- Split 615-line God Use-Case into focused pipeline components
- Created `FileCollectionStep.ts` for file scanning and basic parsing (66 lines)
- Created `ParsingStep.ts` for AST parsing and dependency graph construction (51 lines)
- Created `DetectionPipeline.ts` for running all 7 detectors (371 lines)
- Created `ResultAggregator.ts` for building response DTO (81 lines)
- Reduced `AnalyzeProject.ts` from 615 to 245 lines (60% reduction)
- All 345 tests pass, no breaking changes
- Improved separation of concerns and single responsibility
- Easier to test and modify individual pipeline steps
### Added ### Added

View File

@@ -1,6 +1,6 @@
{ {
"name": "@samiyev/guardian", "name": "@samiyev/guardian",
"version": "0.7.5-beta.1", "version": "0.7.5",
"description": "Research-backed code quality guardian for AI-assisted development. Detects hardcodes, circular deps, framework leaks, entity exposure, and 8 architecture violations. Enforces Clean Architecture/DDD principles. Works with GitHub Copilot, Cursor, Windsurf, Claude, ChatGPT, Cline, and any AI coding tool.", "description": "Research-backed code quality guardian for AI-assisted development. Detects hardcodes, circular deps, framework leaks, entity exposure, and 8 architecture violations. Enforces Clean Architecture/DDD principles. Works with GitHub Copilot, Cursor, Windsurf, Claude, ChatGPT, Cline, and any AI coding tool.",
"keywords": [ "keywords": [
"puaros", "puaros",

View File

@@ -11,18 +11,17 @@ import { IRepositoryPatternDetector } from "../../domain/services/RepositoryPatt
import { IAggregateBoundaryDetector } from "../../domain/services/IAggregateBoundaryDetector" import { IAggregateBoundaryDetector } from "../../domain/services/IAggregateBoundaryDetector"
import { SourceFile } from "../../domain/entities/SourceFile" import { SourceFile } from "../../domain/entities/SourceFile"
import { DependencyGraph } from "../../domain/entities/DependencyGraph" import { DependencyGraph } from "../../domain/entities/DependencyGraph"
import { ProjectPath } from "../../domain/value-objects/ProjectPath" import { FileCollectionStep } from "./pipeline/FileCollectionStep"
import { ParsingStep } from "./pipeline/ParsingStep"
import { DetectionPipeline } from "./pipeline/DetectionPipeline"
import { ResultAggregator } from "./pipeline/ResultAggregator"
import { import {
ERROR_MESSAGES, ERROR_MESSAGES,
HARDCODE_TYPES, HARDCODE_TYPES,
LAYERS,
NAMING_VIOLATION_TYPES, NAMING_VIOLATION_TYPES,
REGEX_PATTERNS,
REPOSITORY_VIOLATION_TYPES, REPOSITORY_VIOLATION_TYPES,
RULES, RULES,
SEVERITY_ORDER,
type SeverityLevel, type SeverityLevel,
VIOLATION_SEVERITY_MAP,
} from "../../shared/constants" } from "../../shared/constants"
export interface AnalyzeProjectRequest { export interface AnalyzeProjectRequest {
@@ -173,442 +172,74 @@ export interface ProjectMetrics {
/** /**
* Main use case for analyzing a project's codebase * Main use case for analyzing a project's codebase
* Orchestrates the analysis pipeline through focused components
*/ */
export class AnalyzeProject extends UseCase< export class AnalyzeProject extends UseCase<
AnalyzeProjectRequest, AnalyzeProjectRequest,
ResponseDto<AnalyzeProjectResponse> ResponseDto<AnalyzeProjectResponse>
> { > {
private readonly fileCollectionStep: FileCollectionStep
private readonly parsingStep: ParsingStep
private readonly detectionPipeline: DetectionPipeline
private readonly resultAggregator: ResultAggregator
constructor( constructor(
private readonly fileScanner: IFileScanner, fileScanner: IFileScanner,
private readonly codeParser: ICodeParser, codeParser: ICodeParser,
private readonly hardcodeDetector: IHardcodeDetector, hardcodeDetector: IHardcodeDetector,
private readonly namingConventionDetector: INamingConventionDetector, namingConventionDetector: INamingConventionDetector,
private readonly frameworkLeakDetector: IFrameworkLeakDetector, frameworkLeakDetector: IFrameworkLeakDetector,
private readonly entityExposureDetector: IEntityExposureDetector, entityExposureDetector: IEntityExposureDetector,
private readonly dependencyDirectionDetector: IDependencyDirectionDetector, dependencyDirectionDetector: IDependencyDirectionDetector,
private readonly repositoryPatternDetector: IRepositoryPatternDetector, repositoryPatternDetector: IRepositoryPatternDetector,
private readonly aggregateBoundaryDetector: IAggregateBoundaryDetector, aggregateBoundaryDetector: IAggregateBoundaryDetector,
) { ) {
super() super()
this.fileCollectionStep = new FileCollectionStep(fileScanner)
this.parsingStep = new ParsingStep(codeParser)
this.detectionPipeline = new DetectionPipeline(
hardcodeDetector,
namingConventionDetector,
frameworkLeakDetector,
entityExposureDetector,
dependencyDirectionDetector,
repositoryPatternDetector,
aggregateBoundaryDetector,
)
this.resultAggregator = new ResultAggregator()
} }
public async execute( public async execute(
request: AnalyzeProjectRequest, request: AnalyzeProjectRequest,
): Promise<ResponseDto<AnalyzeProjectResponse>> { ): Promise<ResponseDto<AnalyzeProjectResponse>> {
try { try {
const filePaths = await this.fileScanner.scan({ const { sourceFiles } = await this.fileCollectionStep.execute({
rootDir: request.rootDir, rootDir: request.rootDir,
include: request.include, include: request.include,
exclude: request.exclude, exclude: request.exclude,
}) })
const sourceFiles: SourceFile[] = [] const { dependencyGraph, totalFunctions } = this.parsingStep.execute({
const dependencyGraph = new DependencyGraph() sourceFiles,
let totalFunctions = 0 rootDir: request.rootDir,
for (const filePath of filePaths) {
const content = await this.fileScanner.readFile(filePath)
const projectPath = ProjectPath.create(filePath, request.rootDir)
const imports = this.extractImports(content)
const exports = this.extractExports(content)
const sourceFile = new SourceFile(projectPath, content, imports, exports)
sourceFiles.push(sourceFile)
dependencyGraph.addFile(sourceFile)
if (projectPath.isTypeScript()) {
const tree = this.codeParser.parseTypeScript(content)
const functions = this.codeParser.extractFunctions(tree)
totalFunctions += functions.length
}
for (const imp of imports) {
dependencyGraph.addDependency(
projectPath.relative,
this.resolveImportPath(imp, filePath, request.rootDir),
)
}
}
const violations = this.sortBySeverity(this.detectViolations(sourceFiles))
const hardcodeViolations = this.sortBySeverity(this.detectHardcode(sourceFiles))
const circularDependencyViolations = this.sortBySeverity(
this.detectCircularDependencies(dependencyGraph),
)
const namingViolations = this.sortBySeverity(this.detectNamingConventions(sourceFiles))
const frameworkLeakViolations = this.sortBySeverity(
this.detectFrameworkLeaks(sourceFiles),
)
const entityExposureViolations = this.sortBySeverity(
this.detectEntityExposures(sourceFiles),
)
const dependencyDirectionViolations = this.sortBySeverity(
this.detectDependencyDirections(sourceFiles),
)
const repositoryPatternViolations = this.sortBySeverity(
this.detectRepositoryPatternViolations(sourceFiles),
)
const aggregateBoundaryViolations = this.sortBySeverity(
this.detectAggregateBoundaryViolations(sourceFiles),
)
const metrics = this.calculateMetrics(sourceFiles, totalFunctions, dependencyGraph)
return ResponseDto.ok({
files: sourceFiles,
dependencyGraph,
violations,
hardcodeViolations,
circularDependencyViolations,
namingViolations,
frameworkLeakViolations,
entityExposureViolations,
dependencyDirectionViolations,
repositoryPatternViolations,
aggregateBoundaryViolations,
metrics,
}) })
const detectionResult = this.detectionPipeline.execute({
sourceFiles,
dependencyGraph,
})
const response = this.resultAggregator.execute({
sourceFiles,
dependencyGraph,
totalFunctions,
...detectionResult,
})
return ResponseDto.ok(response)
} catch (error) { } catch (error) {
const errorMessage = `${ERROR_MESSAGES.FAILED_TO_ANALYZE}: ${error instanceof Error ? error.message : String(error)}` const errorMessage = `${ERROR_MESSAGES.FAILED_TO_ANALYZE}: ${error instanceof Error ? error.message : String(error)}`
return ResponseDto.fail(errorMessage) return ResponseDto.fail(errorMessage)
} }
} }
private extractImports(content: string): string[] {
const imports: string[] = []
let match
while ((match = REGEX_PATTERNS.IMPORT_STATEMENT.exec(content)) !== null) {
imports.push(match[1])
}
return imports
}
private extractExports(content: string): string[] {
const exports: string[] = []
let match
while ((match = REGEX_PATTERNS.EXPORT_STATEMENT.exec(content)) !== null) {
exports.push(match[1])
}
return exports
}
private resolveImportPath(importPath: string, _currentFile: string, _rootDir: string): string {
if (importPath.startsWith(".")) {
return importPath
}
return importPath
}
private detectViolations(sourceFiles: SourceFile[]): ArchitectureViolation[] {
const violations: ArchitectureViolation[] = []
const layerRules: Record<string, string[]> = {
[LAYERS.DOMAIN]: [LAYERS.SHARED],
[LAYERS.APPLICATION]: [LAYERS.DOMAIN, LAYERS.SHARED],
[LAYERS.INFRASTRUCTURE]: [LAYERS.DOMAIN, LAYERS.APPLICATION, LAYERS.SHARED],
[LAYERS.SHARED]: [],
}
for (const file of sourceFiles) {
if (!file.layer) {
continue
}
const allowedLayers = layerRules[file.layer]
for (const imp of file.imports) {
const importedLayer = this.detectLayerFromImport(imp)
if (
importedLayer &&
importedLayer !== file.layer &&
!allowedLayers.includes(importedLayer)
) {
violations.push({
rule: RULES.CLEAN_ARCHITECTURE,
message: `Layer "${file.layer}" cannot import from "${importedLayer}"`,
file: file.path.relative,
severity: VIOLATION_SEVERITY_MAP.ARCHITECTURE,
})
}
}
}
return violations
}
private detectLayerFromImport(importPath: string): string | undefined {
const layers = Object.values(LAYERS)
for (const layer of layers) {
if (importPath.toLowerCase().includes(layer)) {
return layer
}
}
return undefined
}
private detectHardcode(sourceFiles: SourceFile[]): HardcodeViolation[] {
const violations: HardcodeViolation[] = []
for (const file of sourceFiles) {
const hardcodedValues = this.hardcodeDetector.detectAll(
file.content,
file.path.relative,
)
for (const hardcoded of hardcodedValues) {
violations.push({
rule: RULES.HARDCODED_VALUE,
type: hardcoded.type,
value: hardcoded.value,
file: file.path.relative,
line: hardcoded.line,
column: hardcoded.column,
context: hardcoded.context,
suggestion: {
constantName: hardcoded.suggestConstantName(),
location: hardcoded.suggestLocation(file.layer),
},
severity: VIOLATION_SEVERITY_MAP.HARDCODE,
})
}
}
return violations
}
private detectCircularDependencies(
dependencyGraph: DependencyGraph,
): CircularDependencyViolation[] {
const violations: CircularDependencyViolation[] = []
const cycles = dependencyGraph.findCycles()
for (const cycle of cycles) {
const cycleChain = [...cycle, cycle[0]].join(" → ")
violations.push({
rule: RULES.CIRCULAR_DEPENDENCY,
message: `Circular dependency detected: ${cycleChain}`,
cycle,
severity: VIOLATION_SEVERITY_MAP.CIRCULAR_DEPENDENCY,
})
}
return violations
}
private detectNamingConventions(sourceFiles: SourceFile[]): NamingConventionViolation[] {
const violations: NamingConventionViolation[] = []
for (const file of sourceFiles) {
const namingViolations = this.namingConventionDetector.detectViolations(
file.path.filename,
file.layer,
file.path.relative,
)
for (const violation of namingViolations) {
violations.push({
rule: RULES.NAMING_CONVENTION,
type: violation.violationType,
fileName: violation.fileName,
layer: violation.layer,
file: violation.filePath,
expected: violation.expected,
actual: violation.actual,
message: violation.getMessage(),
suggestion: violation.suggestion,
severity: VIOLATION_SEVERITY_MAP.NAMING_CONVENTION,
})
}
}
return violations
}
private detectFrameworkLeaks(sourceFiles: SourceFile[]): FrameworkLeakViolation[] {
const violations: FrameworkLeakViolation[] = []
for (const file of sourceFiles) {
const leaks = this.frameworkLeakDetector.detectLeaks(
file.imports,
file.path.relative,
file.layer,
)
for (const leak of leaks) {
violations.push({
rule: RULES.FRAMEWORK_LEAK,
packageName: leak.packageName,
category: leak.category,
categoryDescription: leak.getCategoryDescription(),
file: file.path.relative,
layer: leak.layer,
line: leak.line,
message: leak.getMessage(),
suggestion: leak.getSuggestion(),
severity: VIOLATION_SEVERITY_MAP.FRAMEWORK_LEAK,
})
}
}
return violations
}
private detectEntityExposures(sourceFiles: SourceFile[]): EntityExposureViolation[] {
const violations: EntityExposureViolation[] = []
for (const file of sourceFiles) {
const exposures = this.entityExposureDetector.detectExposures(
file.content,
file.path.relative,
file.layer,
)
for (const exposure of exposures) {
violations.push({
rule: RULES.ENTITY_EXPOSURE,
entityName: exposure.entityName,
returnType: exposure.returnType,
file: file.path.relative,
layer: exposure.layer,
line: exposure.line,
methodName: exposure.methodName,
message: exposure.getMessage(),
suggestion: exposure.getSuggestion(),
severity: VIOLATION_SEVERITY_MAP.ENTITY_EXPOSURE,
})
}
}
return violations
}
private detectDependencyDirections(sourceFiles: SourceFile[]): DependencyDirectionViolation[] {
const violations: DependencyDirectionViolation[] = []
for (const file of sourceFiles) {
const directionViolations = this.dependencyDirectionDetector.detectViolations(
file.content,
file.path.relative,
file.layer,
)
for (const violation of directionViolations) {
violations.push({
rule: RULES.DEPENDENCY_DIRECTION,
fromLayer: violation.fromLayer,
toLayer: violation.toLayer,
importPath: violation.importPath,
file: file.path.relative,
line: violation.line,
message: violation.getMessage(),
suggestion: violation.getSuggestion(),
severity: VIOLATION_SEVERITY_MAP.DEPENDENCY_DIRECTION,
})
}
}
return violations
}
private detectRepositoryPatternViolations(
sourceFiles: SourceFile[],
): RepositoryPatternViolation[] {
const violations: RepositoryPatternViolation[] = []
for (const file of sourceFiles) {
const patternViolations = this.repositoryPatternDetector.detectViolations(
file.content,
file.path.relative,
file.layer,
)
for (const violation of patternViolations) {
violations.push({
rule: RULES.REPOSITORY_PATTERN,
violationType: violation.violationType as
| typeof REPOSITORY_VIOLATION_TYPES.ORM_TYPE_IN_INTERFACE
| typeof REPOSITORY_VIOLATION_TYPES.CONCRETE_REPOSITORY_IN_USE_CASE
| typeof REPOSITORY_VIOLATION_TYPES.NEW_REPOSITORY_IN_USE_CASE
| typeof REPOSITORY_VIOLATION_TYPES.NON_DOMAIN_METHOD_NAME,
file: file.path.relative,
layer: violation.layer,
line: violation.line,
details: violation.details,
message: violation.getMessage(),
suggestion: violation.getSuggestion(),
severity: VIOLATION_SEVERITY_MAP.REPOSITORY_PATTERN,
})
}
}
return violations
}
private detectAggregateBoundaryViolations(
sourceFiles: SourceFile[],
): AggregateBoundaryViolation[] {
const violations: AggregateBoundaryViolation[] = []
for (const file of sourceFiles) {
const boundaryViolations = this.aggregateBoundaryDetector.detectViolations(
file.content,
file.path.relative,
file.layer,
)
for (const violation of boundaryViolations) {
violations.push({
rule: RULES.AGGREGATE_BOUNDARY,
fromAggregate: violation.fromAggregate,
toAggregate: violation.toAggregate,
entityName: violation.entityName,
importPath: violation.importPath,
file: file.path.relative,
line: violation.line,
message: violation.getMessage(),
suggestion: violation.getSuggestion(),
severity: VIOLATION_SEVERITY_MAP.AGGREGATE_BOUNDARY,
})
}
}
return violations
}
private calculateMetrics(
sourceFiles: SourceFile[],
totalFunctions: number,
_dependencyGraph: DependencyGraph,
): ProjectMetrics {
const layerDistribution: Record<string, number> = {}
let totalImports = 0
for (const file of sourceFiles) {
if (file.layer) {
layerDistribution[file.layer] = (layerDistribution[file.layer] || 0) + 1
}
totalImports += file.imports.length
}
return {
totalFiles: sourceFiles.length,
totalFunctions,
totalImports,
layerDistribution,
}
}
private sortBySeverity<T extends { severity: SeverityLevel }>(violations: T[]): T[] {
return violations.sort((a, b) => {
return SEVERITY_ORDER[a.severity] - SEVERITY_ORDER[b.severity]
})
}
} }

View File

@@ -0,0 +1,373 @@
import { IHardcodeDetector } from "../../../domain/services/IHardcodeDetector"
import { INamingConventionDetector } from "../../../domain/services/INamingConventionDetector"
import { IFrameworkLeakDetector } from "../../../domain/services/IFrameworkLeakDetector"
import { IEntityExposureDetector } from "../../../domain/services/IEntityExposureDetector"
import { IDependencyDirectionDetector } from "../../../domain/services/IDependencyDirectionDetector"
import { IRepositoryPatternDetector } from "../../../domain/services/RepositoryPatternDetectorService"
import { IAggregateBoundaryDetector } from "../../../domain/services/IAggregateBoundaryDetector"
import { SourceFile } from "../../../domain/entities/SourceFile"
import { DependencyGraph } from "../../../domain/entities/DependencyGraph"
import {
LAYERS,
REPOSITORY_VIOLATION_TYPES,
RULES,
SEVERITY_ORDER,
type SeverityLevel,
VIOLATION_SEVERITY_MAP,
} from "../../../shared/constants"
import type {
AggregateBoundaryViolation,
ArchitectureViolation,
CircularDependencyViolation,
DependencyDirectionViolation,
EntityExposureViolation,
FrameworkLeakViolation,
HardcodeViolation,
NamingConventionViolation,
RepositoryPatternViolation,
} from "../AnalyzeProject"
export interface DetectionRequest {
sourceFiles: SourceFile[]
dependencyGraph: DependencyGraph
}
export interface DetectionResult {
violations: ArchitectureViolation[]
hardcodeViolations: HardcodeViolation[]
circularDependencyViolations: CircularDependencyViolation[]
namingViolations: NamingConventionViolation[]
frameworkLeakViolations: FrameworkLeakViolation[]
entityExposureViolations: EntityExposureViolation[]
dependencyDirectionViolations: DependencyDirectionViolation[]
repositoryPatternViolations: RepositoryPatternViolation[]
aggregateBoundaryViolations: AggregateBoundaryViolation[]
}
/**
* Pipeline step responsible for running all detectors
*/
export class DetectionPipeline {
constructor(
private readonly hardcodeDetector: IHardcodeDetector,
private readonly namingConventionDetector: INamingConventionDetector,
private readonly frameworkLeakDetector: IFrameworkLeakDetector,
private readonly entityExposureDetector: IEntityExposureDetector,
private readonly dependencyDirectionDetector: IDependencyDirectionDetector,
private readonly repositoryPatternDetector: IRepositoryPatternDetector,
private readonly aggregateBoundaryDetector: IAggregateBoundaryDetector,
) {}
public execute(request: DetectionRequest): DetectionResult {
return {
violations: this.sortBySeverity(this.detectViolations(request.sourceFiles)),
hardcodeViolations: this.sortBySeverity(this.detectHardcode(request.sourceFiles)),
circularDependencyViolations: this.sortBySeverity(
this.detectCircularDependencies(request.dependencyGraph),
),
namingViolations: this.sortBySeverity(
this.detectNamingConventions(request.sourceFiles),
),
frameworkLeakViolations: this.sortBySeverity(
this.detectFrameworkLeaks(request.sourceFiles),
),
entityExposureViolations: this.sortBySeverity(
this.detectEntityExposures(request.sourceFiles),
),
dependencyDirectionViolations: this.sortBySeverity(
this.detectDependencyDirections(request.sourceFiles),
),
repositoryPatternViolations: this.sortBySeverity(
this.detectRepositoryPatternViolations(request.sourceFiles),
),
aggregateBoundaryViolations: this.sortBySeverity(
this.detectAggregateBoundaryViolations(request.sourceFiles),
),
}
}
private detectViolations(sourceFiles: SourceFile[]): ArchitectureViolation[] {
const violations: ArchitectureViolation[] = []
const layerRules: Record<string, string[]> = {
[LAYERS.DOMAIN]: [LAYERS.SHARED],
[LAYERS.APPLICATION]: [LAYERS.DOMAIN, LAYERS.SHARED],
[LAYERS.INFRASTRUCTURE]: [LAYERS.DOMAIN, LAYERS.APPLICATION, LAYERS.SHARED],
[LAYERS.SHARED]: [],
}
for (const file of sourceFiles) {
if (!file.layer) {
continue
}
const allowedLayers = layerRules[file.layer]
for (const imp of file.imports) {
const importedLayer = this.detectLayerFromImport(imp)
if (
importedLayer &&
importedLayer !== file.layer &&
!allowedLayers.includes(importedLayer)
) {
violations.push({
rule: RULES.CLEAN_ARCHITECTURE,
message: `Layer "${file.layer}" cannot import from "${importedLayer}"`,
file: file.path.relative,
severity: VIOLATION_SEVERITY_MAP.ARCHITECTURE,
})
}
}
}
return violations
}
private detectLayerFromImport(importPath: string): string | undefined {
const layers = Object.values(LAYERS)
for (const layer of layers) {
if (importPath.toLowerCase().includes(layer)) {
return layer
}
}
return undefined
}
private detectHardcode(sourceFiles: SourceFile[]): HardcodeViolation[] {
const violations: HardcodeViolation[] = []
for (const file of sourceFiles) {
const hardcodedValues = this.hardcodeDetector.detectAll(
file.content,
file.path.relative,
)
for (const hardcoded of hardcodedValues) {
violations.push({
rule: RULES.HARDCODED_VALUE,
type: hardcoded.type,
value: hardcoded.value,
file: file.path.relative,
line: hardcoded.line,
column: hardcoded.column,
context: hardcoded.context,
suggestion: {
constantName: hardcoded.suggestConstantName(),
location: hardcoded.suggestLocation(file.layer),
},
severity: VIOLATION_SEVERITY_MAP.HARDCODE,
})
}
}
return violations
}
private detectCircularDependencies(
dependencyGraph: DependencyGraph,
): CircularDependencyViolation[] {
const violations: CircularDependencyViolation[] = []
const cycles = dependencyGraph.findCycles()
for (const cycle of cycles) {
const cycleChain = [...cycle, cycle[0]].join(" → ")
violations.push({
rule: RULES.CIRCULAR_DEPENDENCY,
message: `Circular dependency detected: ${cycleChain}`,
cycle,
severity: VIOLATION_SEVERITY_MAP.CIRCULAR_DEPENDENCY,
})
}
return violations
}
private detectNamingConventions(sourceFiles: SourceFile[]): NamingConventionViolation[] {
const violations: NamingConventionViolation[] = []
for (const file of sourceFiles) {
const namingViolations = this.namingConventionDetector.detectViolations(
file.path.filename,
file.layer,
file.path.relative,
)
for (const violation of namingViolations) {
violations.push({
rule: RULES.NAMING_CONVENTION,
type: violation.violationType,
fileName: violation.fileName,
layer: violation.layer,
file: violation.filePath,
expected: violation.expected,
actual: violation.actual,
message: violation.getMessage(),
suggestion: violation.suggestion,
severity: VIOLATION_SEVERITY_MAP.NAMING_CONVENTION,
})
}
}
return violations
}
private detectFrameworkLeaks(sourceFiles: SourceFile[]): FrameworkLeakViolation[] {
const violations: FrameworkLeakViolation[] = []
for (const file of sourceFiles) {
const leaks = this.frameworkLeakDetector.detectLeaks(
file.imports,
file.path.relative,
file.layer,
)
for (const leak of leaks) {
violations.push({
rule: RULES.FRAMEWORK_LEAK,
packageName: leak.packageName,
category: leak.category,
categoryDescription: leak.getCategoryDescription(),
file: file.path.relative,
layer: leak.layer,
line: leak.line,
message: leak.getMessage(),
suggestion: leak.getSuggestion(),
severity: VIOLATION_SEVERITY_MAP.FRAMEWORK_LEAK,
})
}
}
return violations
}
private detectEntityExposures(sourceFiles: SourceFile[]): EntityExposureViolation[] {
const violations: EntityExposureViolation[] = []
for (const file of sourceFiles) {
const exposures = this.entityExposureDetector.detectExposures(
file.content,
file.path.relative,
file.layer,
)
for (const exposure of exposures) {
violations.push({
rule: RULES.ENTITY_EXPOSURE,
entityName: exposure.entityName,
returnType: exposure.returnType,
file: file.path.relative,
layer: exposure.layer,
line: exposure.line,
methodName: exposure.methodName,
message: exposure.getMessage(),
suggestion: exposure.getSuggestion(),
severity: VIOLATION_SEVERITY_MAP.ENTITY_EXPOSURE,
})
}
}
return violations
}
private detectDependencyDirections(sourceFiles: SourceFile[]): DependencyDirectionViolation[] {
const violations: DependencyDirectionViolation[] = []
for (const file of sourceFiles) {
const directionViolations = this.dependencyDirectionDetector.detectViolations(
file.content,
file.path.relative,
file.layer,
)
for (const violation of directionViolations) {
violations.push({
rule: RULES.DEPENDENCY_DIRECTION,
fromLayer: violation.fromLayer,
toLayer: violation.toLayer,
importPath: violation.importPath,
file: file.path.relative,
line: violation.line,
message: violation.getMessage(),
suggestion: violation.getSuggestion(),
severity: VIOLATION_SEVERITY_MAP.DEPENDENCY_DIRECTION,
})
}
}
return violations
}
private detectRepositoryPatternViolations(
sourceFiles: SourceFile[],
): RepositoryPatternViolation[] {
const violations: RepositoryPatternViolation[] = []
for (const file of sourceFiles) {
const patternViolations = this.repositoryPatternDetector.detectViolations(
file.content,
file.path.relative,
file.layer,
)
for (const violation of patternViolations) {
violations.push({
rule: RULES.REPOSITORY_PATTERN,
violationType: violation.violationType as
| typeof REPOSITORY_VIOLATION_TYPES.ORM_TYPE_IN_INTERFACE
| typeof REPOSITORY_VIOLATION_TYPES.CONCRETE_REPOSITORY_IN_USE_CASE
| typeof REPOSITORY_VIOLATION_TYPES.NEW_REPOSITORY_IN_USE_CASE
| typeof REPOSITORY_VIOLATION_TYPES.NON_DOMAIN_METHOD_NAME,
file: file.path.relative,
layer: violation.layer,
line: violation.line,
details: violation.details,
message: violation.getMessage(),
suggestion: violation.getSuggestion(),
severity: VIOLATION_SEVERITY_MAP.REPOSITORY_PATTERN,
})
}
}
return violations
}
private detectAggregateBoundaryViolations(
sourceFiles: SourceFile[],
): AggregateBoundaryViolation[] {
const violations: AggregateBoundaryViolation[] = []
for (const file of sourceFiles) {
const boundaryViolations = this.aggregateBoundaryDetector.detectViolations(
file.content,
file.path.relative,
file.layer,
)
for (const violation of boundaryViolations) {
violations.push({
rule: RULES.AGGREGATE_BOUNDARY,
fromAggregate: violation.fromAggregate,
toAggregate: violation.toAggregate,
entityName: violation.entityName,
importPath: violation.importPath,
file: file.path.relative,
line: violation.line,
message: violation.getMessage(),
suggestion: violation.getSuggestion(),
severity: VIOLATION_SEVERITY_MAP.AGGREGATE_BOUNDARY,
})
}
}
return violations
}
private sortBySeverity<T extends { severity: SeverityLevel }>(violations: T[]): T[] {
return violations.sort((a, b) => {
return SEVERITY_ORDER[a.severity] - SEVERITY_ORDER[b.severity]
})
}
}

View File

@@ -0,0 +1,66 @@
import { IFileScanner } from "../../../domain/services/IFileScanner"
import { SourceFile } from "../../../domain/entities/SourceFile"
import { ProjectPath } from "../../../domain/value-objects/ProjectPath"
import { REGEX_PATTERNS } from "../../../shared/constants"
export interface FileCollectionRequest {
rootDir: string
include?: string[]
exclude?: string[]
}
export interface FileCollectionResult {
sourceFiles: SourceFile[]
}
/**
* Pipeline step responsible for file collection and basic parsing
*/
export class FileCollectionStep {
constructor(private readonly fileScanner: IFileScanner) {}
public async execute(request: FileCollectionRequest): Promise<FileCollectionResult> {
const filePaths = await this.fileScanner.scan({
rootDir: request.rootDir,
include: request.include,
exclude: request.exclude,
})
const sourceFiles: SourceFile[] = []
for (const filePath of filePaths) {
const content = await this.fileScanner.readFile(filePath)
const projectPath = ProjectPath.create(filePath, request.rootDir)
const imports = this.extractImports(content)
const exports = this.extractExports(content)
const sourceFile = new SourceFile(projectPath, content, imports, exports)
sourceFiles.push(sourceFile)
}
return { sourceFiles }
}
private extractImports(content: string): string[] {
const imports: string[] = []
let match
while ((match = REGEX_PATTERNS.IMPORT_STATEMENT.exec(content)) !== null) {
imports.push(match[1])
}
return imports
}
private extractExports(content: string): string[] {
const exports: string[] = []
let match
while ((match = REGEX_PATTERNS.EXPORT_STATEMENT.exec(content)) !== null) {
exports.push(match[1])
}
return exports
}
}

View File

@@ -0,0 +1,51 @@
import { ICodeParser } from "../../../domain/services/ICodeParser"
import { SourceFile } from "../../../domain/entities/SourceFile"
import { DependencyGraph } from "../../../domain/entities/DependencyGraph"
export interface ParsingRequest {
sourceFiles: SourceFile[]
rootDir: string
}
export interface ParsingResult {
dependencyGraph: DependencyGraph
totalFunctions: number
}
/**
* Pipeline step responsible for AST parsing and dependency graph construction
*/
export class ParsingStep {
constructor(private readonly codeParser: ICodeParser) {}
public execute(request: ParsingRequest): ParsingResult {
const dependencyGraph = new DependencyGraph()
let totalFunctions = 0
for (const sourceFile of request.sourceFiles) {
dependencyGraph.addFile(sourceFile)
if (sourceFile.path.isTypeScript()) {
const tree = this.codeParser.parseTypeScript(sourceFile.content)
const functions = this.codeParser.extractFunctions(tree)
totalFunctions += functions.length
}
for (const imp of sourceFile.imports) {
dependencyGraph.addDependency(
sourceFile.path.relative,
this.resolveImportPath(imp, sourceFile.path.relative, request.rootDir),
)
}
}
return { dependencyGraph, totalFunctions }
}
private resolveImportPath(importPath: string, _currentFile: string, _rootDir: string): string {
if (importPath.startsWith(".")) {
return importPath
}
return importPath
}
}

View File

@@ -0,0 +1,81 @@
import { SourceFile } from "../../../domain/entities/SourceFile"
import { DependencyGraph } from "../../../domain/entities/DependencyGraph"
import type {
AggregateBoundaryViolation,
AnalyzeProjectResponse,
ArchitectureViolation,
CircularDependencyViolation,
DependencyDirectionViolation,
EntityExposureViolation,
FrameworkLeakViolation,
HardcodeViolation,
NamingConventionViolation,
ProjectMetrics,
RepositoryPatternViolation,
} from "../AnalyzeProject"
export interface AggregationRequest {
sourceFiles: SourceFile[]
dependencyGraph: DependencyGraph
totalFunctions: number
violations: ArchitectureViolation[]
hardcodeViolations: HardcodeViolation[]
circularDependencyViolations: CircularDependencyViolation[]
namingViolations: NamingConventionViolation[]
frameworkLeakViolations: FrameworkLeakViolation[]
entityExposureViolations: EntityExposureViolation[]
dependencyDirectionViolations: DependencyDirectionViolation[]
repositoryPatternViolations: RepositoryPatternViolation[]
aggregateBoundaryViolations: AggregateBoundaryViolation[]
}
/**
* Pipeline step responsible for building final response DTO
*/
export class ResultAggregator {
public execute(request: AggregationRequest): AnalyzeProjectResponse {
const metrics = this.calculateMetrics(
request.sourceFiles,
request.totalFunctions,
request.dependencyGraph,
)
return {
files: request.sourceFiles,
dependencyGraph: request.dependencyGraph,
violations: request.violations,
hardcodeViolations: request.hardcodeViolations,
circularDependencyViolations: request.circularDependencyViolations,
namingViolations: request.namingViolations,
frameworkLeakViolations: request.frameworkLeakViolations,
entityExposureViolations: request.entityExposureViolations,
dependencyDirectionViolations: request.dependencyDirectionViolations,
repositoryPatternViolations: request.repositoryPatternViolations,
aggregateBoundaryViolations: request.aggregateBoundaryViolations,
metrics,
}
}
private calculateMetrics(
sourceFiles: SourceFile[],
totalFunctions: number,
_dependencyGraph: DependencyGraph,
): ProjectMetrics {
const layerDistribution: Record<string, number> = {}
let totalImports = 0
for (const file of sourceFiles) {
if (file.layer) {
layerDistribution[file.layer] = (layerDistribution[file.layer] || 0) + 1
}
totalImports += file.imports.length
}
return {
totalFiles: sourceFiles.length,
totalFunctions,
totalImports,
layerDistribution,
}
}
}