takt: github-issue-259-debug-takt (#312)

This commit is contained in:
nrs 2026-02-19 16:57:24 +09:00 committed by GitHub
parent 5f4ad753d8
commit 99aa22d250
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
23 changed files with 2326 additions and 18 deletions

View File

@ -0,0 +1,111 @@
/**
* Tests for analytics CLI command logic metrics review and purge.
*
* Tests the command action logic by calling the underlying functions
* with appropriate parameters, verifying the integration between
* config loading, eventsDir resolution, and the analytics functions.
*/
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
import { mkdirSync, rmSync, writeFileSync } from 'node:fs';
import { join } from 'node:path';
import { tmpdir } from 'node:os';
import {
computeReviewMetrics,
formatReviewMetrics,
parseSinceDuration,
purgeOldEvents,
} from '../features/analytics/index.js';
import type { ReviewFindingEvent } from '../features/analytics/index.js';
describe('metrics review command logic', () => {
let eventsDir: string;
beforeEach(() => {
eventsDir = join(tmpdir(), `takt-test-cli-metrics-${Date.now()}`);
mkdirSync(eventsDir, { recursive: true });
});
afterEach(() => {
rmSync(eventsDir, { recursive: true, force: true });
});
it('should compute and format metrics from resolved eventsDir', () => {
const events: ReviewFindingEvent[] = [
{
type: 'review_finding', findingId: 'f-001', status: 'new', ruleId: 'r-1',
severity: 'error', decision: 'reject', file: 'a.ts', line: 1, iteration: 1,
runId: 'r', timestamp: '2026-02-18T10:00:00.000Z',
},
];
writeFileSync(
join(eventsDir, '2026-02-18.jsonl'),
events.map((e) => JSON.stringify(e)).join('\n') + '\n',
'utf-8',
);
const durationMs = parseSinceDuration('30d');
const sinceMs = new Date('2026-02-18T00:00:00Z').getTime();
const result = computeReviewMetrics(eventsDir, sinceMs);
const output = formatReviewMetrics(result);
expect(output).toContain('Review Metrics');
expect(result.rejectCountsByRule.get('r-1')).toBe(1);
});
it('should parse since duration and compute correct time window', () => {
const durationMs = parseSinceDuration('7d');
const now = new Date('2026-02-18T12:00:00Z').getTime();
const sinceMs = now - durationMs;
expect(sinceMs).toBe(new Date('2026-02-11T12:00:00Z').getTime());
});
});
describe('purge command logic', () => {
let eventsDir: string;
beforeEach(() => {
eventsDir = join(tmpdir(), `takt-test-cli-purge-${Date.now()}`);
mkdirSync(eventsDir, { recursive: true });
});
afterEach(() => {
rmSync(eventsDir, { recursive: true, force: true });
});
it('should purge files using eventsDir from config and retentionDays from config', () => {
writeFileSync(join(eventsDir, '2025-12-01.jsonl'), '{}', 'utf-8');
writeFileSync(join(eventsDir, '2026-02-18.jsonl'), '{}', 'utf-8');
const retentionDays = 30;
const deleted = purgeOldEvents(eventsDir, retentionDays, new Date('2026-02-18T12:00:00Z'));
expect(deleted).toContain('2025-12-01.jsonl');
expect(deleted).not.toContain('2026-02-18.jsonl');
});
it('should fallback to CLI retentionDays when config has no retentionDays', () => {
writeFileSync(join(eventsDir, '2025-01-01.jsonl'), '{}', 'utf-8');
const cliRetentionDays = parseInt('30', 10);
const configRetentionDays = undefined;
const retentionDays = configRetentionDays ?? cliRetentionDays;
const deleted = purgeOldEvents(eventsDir, retentionDays, new Date('2026-02-18T12:00:00Z'));
expect(deleted).toContain('2025-01-01.jsonl');
});
it('should use config retentionDays when specified', () => {
writeFileSync(join(eventsDir, '2026-02-10.jsonl'), '{}', 'utf-8');
writeFileSync(join(eventsDir, '2026-02-18.jsonl'), '{}', 'utf-8');
const cliRetentionDays = parseInt('30', 10);
const configRetentionDays = 5;
const retentionDays = configRetentionDays ?? cliRetentionDays;
const deleted = purgeOldEvents(eventsDir, retentionDays, new Date('2026-02-18T12:00:00Z'));
expect(deleted).toContain('2026-02-10.jsonl');
expect(deleted).not.toContain('2026-02-18.jsonl');
});
});

View File

@ -0,0 +1,132 @@
/**
* Tests for analytics event type definitions.
*
* Validates that event objects conform to the expected shape.
*/
import { describe, it, expect } from 'vitest';
import type {
ReviewFindingEvent,
FixActionEvent,
MovementResultEvent,
AnalyticsEvent,
} from '../features/analytics/index.js';
describe('analytics event types', () => {
it('should create a valid ReviewFindingEvent', () => {
const event: ReviewFindingEvent = {
type: 'review_finding',
findingId: 'f-001',
status: 'new',
ruleId: 'no-console-log',
severity: 'warning',
decision: 'reject',
file: 'src/main.ts',
line: 42,
iteration: 1,
runId: 'run-abc',
timestamp: '2026-02-18T10:00:00.000Z',
};
expect(event.type).toBe('review_finding');
expect(event.findingId).toBe('f-001');
expect(event.status).toBe('new');
expect(event.severity).toBe('warning');
expect(event.decision).toBe('reject');
expect(event.file).toBe('src/main.ts');
expect(event.line).toBe(42);
});
it('should create a valid FixActionEvent with fixed action', () => {
const event: FixActionEvent = {
type: 'fix_action',
findingId: 'f-001',
action: 'fixed',
iteration: 2,
runId: 'run-abc',
timestamp: '2026-02-18T10:01:00.000Z',
};
expect(event.type).toBe('fix_action');
expect(event.action).toBe('fixed');
expect(event.findingId).toBe('f-001');
});
it('should create a valid FixActionEvent with rebutted action', () => {
const event: FixActionEvent = {
type: 'fix_action',
findingId: 'f-002',
action: 'rebutted',
iteration: 3,
runId: 'run-abc',
timestamp: '2026-02-18T10:02:00.000Z',
};
expect(event.type).toBe('fix_action');
expect(event.action).toBe('rebutted');
expect(event.findingId).toBe('f-002');
});
it('should create a valid MovementResultEvent', () => {
const event: MovementResultEvent = {
type: 'movement_result',
movement: 'implement',
provider: 'claude',
model: 'sonnet',
decisionTag: 'approved',
iteration: 3,
runId: 'run-abc',
timestamp: '2026-02-18T10:02:00.000Z',
};
expect(event.type).toBe('movement_result');
expect(event.movement).toBe('implement');
expect(event.provider).toBe('claude');
expect(event.decisionTag).toBe('approved');
});
it('should discriminate event types via the type field', () => {
const events: AnalyticsEvent[] = [
{
type: 'review_finding',
findingId: 'f-001',
status: 'new',
ruleId: 'r-1',
severity: 'error',
decision: 'reject',
file: 'a.ts',
line: 1,
iteration: 1,
runId: 'r',
timestamp: '2026-01-01T00:00:00.000Z',
},
{
type: 'fix_action',
findingId: 'f-001',
action: 'fixed',
iteration: 2,
runId: 'r',
timestamp: '2026-01-01T00:01:00.000Z',
},
{
type: 'movement_result',
movement: 'plan',
provider: 'claude',
model: 'opus',
decisionTag: 'done',
iteration: 1,
runId: 'r',
timestamp: '2026-01-01T00:02:00.000Z',
},
];
const reviewEvents = events.filter((e) => e.type === 'review_finding');
expect(reviewEvents).toHaveLength(1);
const fixEvents = events.filter((e) => e.type === 'fix_action');
expect(fixEvents).toHaveLength(1);
const movementEvents = events.filter((e) => e.type === 'movement_result');
expect(movementEvents).toHaveLength(1);
});
});

View File

@ -0,0 +1,344 @@
/**
* Tests for analytics metrics computation.
*/
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
import { mkdirSync, rmSync, writeFileSync } from 'node:fs';
import { join } from 'node:path';
import { tmpdir } from 'node:os';
import {
computeReviewMetrics,
formatReviewMetrics,
parseSinceDuration,
} from '../features/analytics/index.js';
import type {
ReviewFindingEvent,
FixActionEvent,
MovementResultEvent,
} from '../features/analytics/index.js';
describe('analytics metrics', () => {
let eventsDir: string;
beforeEach(() => {
eventsDir = join(tmpdir(), `takt-test-analytics-metrics-${Date.now()}`);
mkdirSync(eventsDir, { recursive: true });
});
afterEach(() => {
rmSync(eventsDir, { recursive: true, force: true });
});
function writeEvents(date: string, events: Array<ReviewFindingEvent | FixActionEvent | MovementResultEvent>): void {
const lines = events.map((e) => JSON.stringify(e)).join('\n') + '\n';
writeFileSync(join(eventsDir, `${date}.jsonl`), lines, 'utf-8');
}
describe('computeReviewMetrics', () => {
it('should return empty metrics when no events exist', () => {
const sinceMs = new Date('2026-01-01T00:00:00Z').getTime();
const metrics = computeReviewMetrics(eventsDir, sinceMs);
expect(metrics.reReportCounts.size).toBe(0);
expect(metrics.roundTripRatio).toBe(0);
expect(metrics.averageResolutionIterations).toBe(0);
expect(metrics.rejectCountsByRule.size).toBe(0);
expect(metrics.rebuttalResolvedRatio).toBe(0);
});
it('should return empty metrics when directory does not exist', () => {
const nonExistent = join(eventsDir, 'does-not-exist');
const sinceMs = new Date('2026-01-01T00:00:00Z').getTime();
const metrics = computeReviewMetrics(nonExistent, sinceMs);
expect(metrics.reReportCounts.size).toBe(0);
});
it('should compute re-report counts for findings appearing 2+ times', () => {
const events: ReviewFindingEvent[] = [
{
type: 'review_finding',
findingId: 'f-001',
status: 'new',
ruleId: 'r-1',
severity: 'error',
decision: 'reject',
file: 'a.ts',
line: 1,
iteration: 1,
runId: 'run-1',
timestamp: '2026-02-18T10:00:00.000Z',
},
{
type: 'review_finding',
findingId: 'f-001',
status: 'persists',
ruleId: 'r-1',
severity: 'error',
decision: 'reject',
file: 'a.ts',
line: 1,
iteration: 3,
runId: 'run-1',
timestamp: '2026-02-18T11:00:00.000Z',
},
{
type: 'review_finding',
findingId: 'f-002',
status: 'new',
ruleId: 'r-2',
severity: 'warning',
decision: 'approve',
file: 'b.ts',
line: 5,
iteration: 1,
runId: 'run-1',
timestamp: '2026-02-18T10:01:00.000Z',
},
];
writeEvents('2026-02-18', events);
const sinceMs = new Date('2026-02-18T00:00:00Z').getTime();
const metrics = computeReviewMetrics(eventsDir, sinceMs);
expect(metrics.reReportCounts.size).toBe(1);
expect(metrics.reReportCounts.get('f-001')).toBe(2);
});
it('should compute round-trip ratio correctly', () => {
const events: ReviewFindingEvent[] = [
// f-001: appears in iterations 1 and 3 → multi-iteration
{
type: 'review_finding', findingId: 'f-001', status: 'new', ruleId: 'r-1', severity: 'error',
decision: 'reject', file: 'a.ts', line: 1, iteration: 1, runId: 'r', timestamp: '2026-02-18T10:00:00.000Z',
},
{
type: 'review_finding', findingId: 'f-001', status: 'persists', ruleId: 'r-1', severity: 'error',
decision: 'reject', file: 'a.ts', line: 1, iteration: 3, runId: 'r', timestamp: '2026-02-18T11:00:00.000Z',
},
// f-002: appears only in iteration 1 → single-iteration
{
type: 'review_finding', findingId: 'f-002', status: 'new', ruleId: 'r-2', severity: 'warning',
decision: 'approve', file: 'b.ts', line: 5, iteration: 1, runId: 'r', timestamp: '2026-02-18T10:01:00.000Z',
},
];
writeEvents('2026-02-18', events);
const sinceMs = new Date('2026-02-18T00:00:00Z').getTime();
const metrics = computeReviewMetrics(eventsDir, sinceMs);
// 1 out of 2 unique findings had multi-iteration → 50%
expect(metrics.roundTripRatio).toBe(0.5);
});
it('should compute average resolution iterations', () => {
const events: ReviewFindingEvent[] = [
// f-001: first in iteration 1, resolved in iteration 3 → 3 iterations
{
type: 'review_finding', findingId: 'f-001', status: 'new', ruleId: 'r-1', severity: 'error',
decision: 'reject', file: 'a.ts', line: 1, iteration: 1, runId: 'r', timestamp: '2026-02-18T10:00:00.000Z',
},
{
type: 'review_finding', findingId: 'f-001', status: 'resolved', ruleId: 'r-1', severity: 'error',
decision: 'approve', file: 'a.ts', line: 1, iteration: 3, runId: 'r', timestamp: '2026-02-18T12:00:00.000Z',
},
// f-002: first in iteration 2, resolved in iteration 2 → 1 iteration
{
type: 'review_finding', findingId: 'f-002', status: 'new', ruleId: 'r-2', severity: 'warning',
decision: 'reject', file: 'b.ts', line: 5, iteration: 2, runId: 'r', timestamp: '2026-02-18T11:00:00.000Z',
},
{
type: 'review_finding', findingId: 'f-002', status: 'resolved', ruleId: 'r-2', severity: 'warning',
decision: 'approve', file: 'b.ts', line: 5, iteration: 2, runId: 'r', timestamp: '2026-02-18T11:30:00.000Z',
},
];
writeEvents('2026-02-18', events);
const sinceMs = new Date('2026-02-18T00:00:00Z').getTime();
const metrics = computeReviewMetrics(eventsDir, sinceMs);
// (3 + 1) / 2 = 2.0
expect(metrics.averageResolutionIterations).toBe(2);
});
it('should compute reject counts by rule', () => {
const events: ReviewFindingEvent[] = [
{
type: 'review_finding', findingId: 'f-001', status: 'new', ruleId: 'no-any',
severity: 'error', decision: 'reject', file: 'a.ts', line: 1, iteration: 1,
runId: 'r', timestamp: '2026-02-18T10:00:00.000Z',
},
{
type: 'review_finding', findingId: 'f-002', status: 'new', ruleId: 'no-any',
severity: 'error', decision: 'reject', file: 'b.ts', line: 2, iteration: 1,
runId: 'r', timestamp: '2026-02-18T10:01:00.000Z',
},
{
type: 'review_finding', findingId: 'f-003', status: 'new', ruleId: 'no-console',
severity: 'warning', decision: 'reject', file: 'c.ts', line: 3, iteration: 1,
runId: 'r', timestamp: '2026-02-18T10:02:00.000Z',
},
{
type: 'review_finding', findingId: 'f-004', status: 'new', ruleId: 'no-any',
severity: 'error', decision: 'approve', file: 'd.ts', line: 4, iteration: 2,
runId: 'r', timestamp: '2026-02-18T10:03:00.000Z',
},
];
writeEvents('2026-02-18', events);
const sinceMs = new Date('2026-02-18T00:00:00Z').getTime();
const metrics = computeReviewMetrics(eventsDir, sinceMs);
expect(metrics.rejectCountsByRule.get('no-any')).toBe(2);
expect(metrics.rejectCountsByRule.get('no-console')).toBe(1);
});
it('should compute rebuttal resolved ratio', () => {
const events: Array<ReviewFindingEvent | FixActionEvent> = [
// f-001: rebutted, then resolved → counts toward resolved
{
type: 'fix_action', findingId: 'AA-NEW-f001', action: 'rebutted',
iteration: 2, runId: 'r', timestamp: '2026-02-18T10:00:00.000Z',
},
{
type: 'review_finding', findingId: 'AA-NEW-f001', status: 'resolved', ruleId: 'r-1',
severity: 'warning', decision: 'approve', file: 'a.ts', line: 1,
iteration: 3, runId: 'r', timestamp: '2026-02-18T11:00:00.000Z',
},
// f-002: rebutted, never resolved → not counted
{
type: 'fix_action', findingId: 'AA-NEW-f002', action: 'rebutted',
iteration: 2, runId: 'r', timestamp: '2026-02-18T10:01:00.000Z',
},
{
type: 'review_finding', findingId: 'AA-NEW-f002', status: 'persists', ruleId: 'r-2',
severity: 'error', decision: 'reject', file: 'b.ts', line: 5,
iteration: 3, runId: 'r', timestamp: '2026-02-18T11:01:00.000Z',
},
// f-003: fixed (not rebutted), resolved → does not affect rebuttal metric
{
type: 'fix_action', findingId: 'AA-NEW-f003', action: 'fixed',
iteration: 2, runId: 'r', timestamp: '2026-02-18T10:02:00.000Z',
},
{
type: 'review_finding', findingId: 'AA-NEW-f003', status: 'resolved', ruleId: 'r-3',
severity: 'warning', decision: 'approve', file: 'c.ts', line: 10,
iteration: 3, runId: 'r', timestamp: '2026-02-18T11:02:00.000Z',
},
];
writeEvents('2026-02-18', events);
const sinceMs = new Date('2026-02-18T00:00:00Z').getTime();
const metrics = computeReviewMetrics(eventsDir, sinceMs);
// 1 out of 2 rebutted findings was resolved → 50%
expect(metrics.rebuttalResolvedRatio).toBe(0.5);
});
it('should return 0 rebuttal resolved ratio when no rebutted events exist', () => {
const events: ReviewFindingEvent[] = [
{
type: 'review_finding', findingId: 'f-001', status: 'new', ruleId: 'r-1',
severity: 'error', decision: 'reject', file: 'a.ts', line: 1, iteration: 1,
runId: 'r', timestamp: '2026-02-18T10:00:00.000Z',
},
];
writeEvents('2026-02-18', events);
const sinceMs = new Date('2026-02-18T00:00:00Z').getTime();
const metrics = computeReviewMetrics(eventsDir, sinceMs);
expect(metrics.rebuttalResolvedRatio).toBe(0);
});
it('should only include events after the since timestamp', () => {
const events: ReviewFindingEvent[] = [
{
type: 'review_finding', findingId: 'f-old', status: 'new', ruleId: 'r-1',
severity: 'error', decision: 'reject', file: 'old.ts', line: 1, iteration: 1,
runId: 'r', timestamp: '2026-02-10T10:00:00.000Z',
},
{
type: 'review_finding', findingId: 'f-new', status: 'new', ruleId: 'r-1',
severity: 'error', decision: 'reject', file: 'new.ts', line: 1, iteration: 1,
runId: 'r', timestamp: '2026-02-18T10:00:00.000Z',
},
];
// Write both events to the same date file for simplicity (old event in same file)
writeEvents('2026-02-10', [events[0]]);
writeEvents('2026-02-18', [events[1]]);
// Since Feb 15 — should only include f-new
const sinceMs = new Date('2026-02-15T00:00:00Z').getTime();
const metrics = computeReviewMetrics(eventsDir, sinceMs);
expect(metrics.rejectCountsByRule.get('r-1')).toBe(1);
});
});
describe('formatReviewMetrics', () => {
it('should format empty metrics', () => {
const metrics = computeReviewMetrics(eventsDir, 0);
const output = formatReviewMetrics(metrics);
expect(output).toContain('=== Review Metrics ===');
expect(output).toContain('(none)');
expect(output).toContain('Round-trip ratio');
expect(output).toContain('Average resolution iterations');
expect(output).toContain('Rebuttal');
});
it('should format metrics with data', () => {
const events: ReviewFindingEvent[] = [
{
type: 'review_finding', findingId: 'f-001', status: 'new', ruleId: 'r-1',
severity: 'error', decision: 'reject', file: 'a.ts', line: 1, iteration: 1,
runId: 'r', timestamp: '2026-02-18T10:00:00.000Z',
},
{
type: 'review_finding', findingId: 'f-001', status: 'persists', ruleId: 'r-1',
severity: 'error', decision: 'reject', file: 'a.ts', line: 1, iteration: 3,
runId: 'r', timestamp: '2026-02-18T11:00:00.000Z',
},
];
writeEvents('2026-02-18', events);
const sinceMs = new Date('2026-02-18T00:00:00Z').getTime();
const metrics = computeReviewMetrics(eventsDir, sinceMs);
const output = formatReviewMetrics(metrics);
expect(output).toContain('f-001: 2');
expect(output).toContain('r-1: 2');
});
});
describe('parseSinceDuration', () => {
it('should parse "7d" to 7 days in milliseconds', () => {
const ms = parseSinceDuration('7d');
expect(ms).toBe(7 * 24 * 60 * 60 * 1000);
});
it('should parse "30d" to 30 days in milliseconds', () => {
const ms = parseSinceDuration('30d');
expect(ms).toBe(30 * 24 * 60 * 60 * 1000);
});
it('should parse "1d" to 1 day in milliseconds', () => {
const ms = parseSinceDuration('1d');
expect(ms).toBe(24 * 60 * 60 * 1000);
});
it('should throw on invalid format', () => {
expect(() => parseSinceDuration('7h')).toThrow('Invalid duration format');
expect(() => parseSinceDuration('abc')).toThrow('Invalid duration format');
expect(() => parseSinceDuration('')).toThrow('Invalid duration format');
});
});
});

View File

@ -0,0 +1,205 @@
/**
* Tests for analytics integration in pieceExecution.
*
* Validates the analytics initialization logic (analytics.enabled gate)
* and event firing for review_finding and fix_action events.
*/
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
import { mkdirSync, rmSync, readFileSync, existsSync } from 'node:fs';
import { join } from 'node:path';
import { tmpdir } from 'node:os';
import { resetAnalyticsWriter } from '../features/analytics/writer.js';
import {
initAnalyticsWriter,
isAnalyticsEnabled,
writeAnalyticsEvent,
} from '../features/analytics/index.js';
import type {
MovementResultEvent,
ReviewFindingEvent,
FixActionEvent,
} from '../features/analytics/index.js';
describe('pieceExecution analytics initialization', () => {
let testDir: string;
beforeEach(() => {
resetAnalyticsWriter();
testDir = join(tmpdir(), `takt-test-analytics-init-${Date.now()}`);
mkdirSync(testDir, { recursive: true });
});
afterEach(() => {
resetAnalyticsWriter();
rmSync(testDir, { recursive: true, force: true });
});
it('should enable analytics when analytics.enabled=true', () => {
const analyticsEnabled = true;
initAnalyticsWriter(analyticsEnabled, testDir);
expect(isAnalyticsEnabled()).toBe(true);
});
it('should disable analytics when analytics.enabled=false', () => {
const analyticsEnabled = false;
initAnalyticsWriter(analyticsEnabled, testDir);
expect(isAnalyticsEnabled()).toBe(false);
});
it('should disable analytics when analytics is undefined', () => {
const analytics = undefined;
const analyticsEnabled = analytics?.enabled === true;
initAnalyticsWriter(analyticsEnabled, testDir);
expect(isAnalyticsEnabled()).toBe(false);
});
});
describe('movement_result event assembly', () => {
let testDir: string;
beforeEach(() => {
resetAnalyticsWriter();
testDir = join(tmpdir(), `takt-test-mvt-result-${Date.now()}`);
mkdirSync(testDir, { recursive: true });
});
afterEach(() => {
resetAnalyticsWriter();
rmSync(testDir, { recursive: true, force: true });
});
it('should write movement_result event with correct fields', () => {
initAnalyticsWriter(true, testDir);
const event: MovementResultEvent = {
type: 'movement_result',
movement: 'ai_review',
provider: 'claude',
model: 'sonnet',
decisionTag: 'REJECT',
iteration: 3,
runId: 'test-run',
timestamp: '2026-02-18T10:00:00.000Z',
};
writeAnalyticsEvent(event);
const filePath = join(testDir, '2026-02-18.jsonl');
expect(existsSync(filePath)).toBe(true);
const content = readFileSync(filePath, 'utf-8').trim();
const parsed = JSON.parse(content) as MovementResultEvent;
expect(parsed.type).toBe('movement_result');
expect(parsed.movement).toBe('ai_review');
expect(parsed.decisionTag).toBe('REJECT');
expect(parsed.iteration).toBe(3);
expect(parsed.runId).toBe('test-run');
});
});
describe('review_finding event writing', () => {
let testDir: string;
beforeEach(() => {
resetAnalyticsWriter();
testDir = join(tmpdir(), `takt-test-review-finding-${Date.now()}`);
mkdirSync(testDir, { recursive: true });
});
afterEach(() => {
resetAnalyticsWriter();
rmSync(testDir, { recursive: true, force: true });
});
it('should write review_finding events to JSONL', () => {
initAnalyticsWriter(true, testDir);
const event: ReviewFindingEvent = {
type: 'review_finding',
findingId: 'AA-001',
status: 'new',
ruleId: 'AA-001',
severity: 'warning',
decision: 'reject',
file: 'src/foo.ts',
line: 42,
iteration: 2,
runId: 'test-run',
timestamp: '2026-02-18T10:00:00.000Z',
};
writeAnalyticsEvent(event);
const filePath = join(testDir, '2026-02-18.jsonl');
const content = readFileSync(filePath, 'utf-8').trim();
const parsed = JSON.parse(content) as ReviewFindingEvent;
expect(parsed.type).toBe('review_finding');
expect(parsed.findingId).toBe('AA-001');
expect(parsed.status).toBe('new');
expect(parsed.decision).toBe('reject');
});
});
describe('fix_action event writing', () => {
let testDir: string;
beforeEach(() => {
resetAnalyticsWriter();
testDir = join(tmpdir(), `takt-test-fix-action-${Date.now()}`);
mkdirSync(testDir, { recursive: true });
});
afterEach(() => {
resetAnalyticsWriter();
rmSync(testDir, { recursive: true, force: true });
});
it('should write fix_action events with fixed action to JSONL', () => {
initAnalyticsWriter(true, testDir);
const event: FixActionEvent = {
type: 'fix_action',
findingId: 'AA-001',
action: 'fixed',
iteration: 3,
runId: 'test-run',
timestamp: '2026-02-18T11:00:00.000Z',
};
writeAnalyticsEvent(event);
const filePath = join(testDir, '2026-02-18.jsonl');
const content = readFileSync(filePath, 'utf-8').trim();
const parsed = JSON.parse(content) as FixActionEvent;
expect(parsed.type).toBe('fix_action');
expect(parsed.findingId).toBe('AA-001');
expect(parsed.action).toBe('fixed');
});
it('should write fix_action events with rebutted action to JSONL', () => {
initAnalyticsWriter(true, testDir);
const event: FixActionEvent = {
type: 'fix_action',
findingId: 'AA-002',
action: 'rebutted',
iteration: 4,
runId: 'test-run',
timestamp: '2026-02-18T12:00:00.000Z',
};
writeAnalyticsEvent(event);
const filePath = join(testDir, '2026-02-18.jsonl');
const content = readFileSync(filePath, 'utf-8').trim();
const parsed = JSON.parse(content) as FixActionEvent;
expect(parsed.type).toBe('fix_action');
expect(parsed.findingId).toBe('AA-002');
expect(parsed.action).toBe('rebutted');
});
});

View File

@ -0,0 +1,108 @@
/**
* Tests for analytics purge retention-based cleanup of JSONL files.
*/
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
import { existsSync, mkdirSync, rmSync, writeFileSync } from 'node:fs';
import { join } from 'node:path';
import { tmpdir } from 'node:os';
import { purgeOldEvents } from '../features/analytics/index.js';
describe('purgeOldEvents', () => {
let eventsDir: string;
beforeEach(() => {
eventsDir = join(tmpdir(), `takt-test-analytics-purge-${Date.now()}`);
mkdirSync(eventsDir, { recursive: true });
});
afterEach(() => {
rmSync(eventsDir, { recursive: true, force: true });
});
it('should delete files older than retention period', () => {
// Given: Files from different dates
writeFileSync(join(eventsDir, '2026-01-01.jsonl'), '{}', 'utf-8');
writeFileSync(join(eventsDir, '2026-01-15.jsonl'), '{}', 'utf-8');
writeFileSync(join(eventsDir, '2026-02-10.jsonl'), '{}', 'utf-8');
writeFileSync(join(eventsDir, '2026-02-18.jsonl'), '{}', 'utf-8');
// When: Purge with 30-day retention from Feb 18
const now = new Date('2026-02-18T12:00:00Z');
const deleted = purgeOldEvents(eventsDir, 30, now);
// Then: Only files before Jan 19 should be deleted
expect(deleted).toContain('2026-01-01.jsonl');
expect(deleted).toContain('2026-01-15.jsonl');
expect(deleted).not.toContain('2026-02-10.jsonl');
expect(deleted).not.toContain('2026-02-18.jsonl');
expect(existsSync(join(eventsDir, '2026-01-01.jsonl'))).toBe(false);
expect(existsSync(join(eventsDir, '2026-01-15.jsonl'))).toBe(false);
expect(existsSync(join(eventsDir, '2026-02-10.jsonl'))).toBe(true);
expect(existsSync(join(eventsDir, '2026-02-18.jsonl'))).toBe(true);
});
it('should return empty array when no files to purge', () => {
writeFileSync(join(eventsDir, '2026-02-18.jsonl'), '{}', 'utf-8');
const now = new Date('2026-02-18T12:00:00Z');
const deleted = purgeOldEvents(eventsDir, 30, now);
expect(deleted).toEqual([]);
});
it('should return empty array when directory does not exist', () => {
const nonExistent = join(eventsDir, 'does-not-exist');
const deleted = purgeOldEvents(nonExistent, 30, new Date());
expect(deleted).toEqual([]);
});
it('should delete all files when retention is 0', () => {
writeFileSync(join(eventsDir, '2026-02-17.jsonl'), '{}', 'utf-8');
writeFileSync(join(eventsDir, '2026-02-18.jsonl'), '{}', 'utf-8');
const now = new Date('2026-02-18T12:00:00Z');
const deleted = purgeOldEvents(eventsDir, 0, now);
expect(deleted).toContain('2026-02-17.jsonl');
// The cutoff date is Feb 18, and '2026-02-18' is not < '2026-02-18'
expect(deleted).not.toContain('2026-02-18.jsonl');
});
it('should ignore non-jsonl files', () => {
writeFileSync(join(eventsDir, '2025-01-01.jsonl'), '{}', 'utf-8');
writeFileSync(join(eventsDir, 'README.md'), '# test', 'utf-8');
writeFileSync(join(eventsDir, 'data.json'), '{}', 'utf-8');
const now = new Date('2026-02-18T12:00:00Z');
const deleted = purgeOldEvents(eventsDir, 30, now);
expect(deleted).toContain('2025-01-01.jsonl');
expect(deleted).not.toContain('README.md');
expect(deleted).not.toContain('data.json');
// Non-jsonl files should still exist
expect(existsSync(join(eventsDir, 'README.md'))).toBe(true);
expect(existsSync(join(eventsDir, 'data.json'))).toBe(true);
});
it('should handle 7-day retention correctly', () => {
writeFileSync(join(eventsDir, '2026-02-10.jsonl'), '{}', 'utf-8');
writeFileSync(join(eventsDir, '2026-02-11.jsonl'), '{}', 'utf-8');
writeFileSync(join(eventsDir, '2026-02-12.jsonl'), '{}', 'utf-8');
writeFileSync(join(eventsDir, '2026-02-17.jsonl'), '{}', 'utf-8');
writeFileSync(join(eventsDir, '2026-02-18.jsonl'), '{}', 'utf-8');
const now = new Date('2026-02-18T12:00:00Z');
const deleted = purgeOldEvents(eventsDir, 7, now);
// Cutoff: Feb 11
expect(deleted).toContain('2026-02-10.jsonl');
expect(deleted).not.toContain('2026-02-11.jsonl');
expect(deleted).not.toContain('2026-02-12.jsonl');
expect(deleted).not.toContain('2026-02-17.jsonl');
expect(deleted).not.toContain('2026-02-18.jsonl');
});
});

View File

@ -0,0 +1,350 @@
/**
* Tests for analytics report parser extracting findings from review markdown.
*/
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
import { readFileSync, mkdirSync, rmSync } from 'node:fs';
import { join } from 'node:path';
import { tmpdir } from 'node:os';
import {
parseFindingsFromReport,
extractDecisionFromReport,
inferSeverity,
emitFixActionEvents,
emitRebuttalEvents,
} from '../features/analytics/report-parser.js';
import { initAnalyticsWriter } from '../features/analytics/writer.js';
import { resetAnalyticsWriter } from '../features/analytics/writer.js';
import type { FixActionEvent } from '../features/analytics/events.js';
describe('parseFindingsFromReport', () => {
it('should extract new findings from a review report', () => {
const report = [
'# Review Report',
'',
'## Result: REJECT',
'',
'## Current Iteration Findings (new)',
'| # | finding_id | Category | Location | Issue | Fix Suggestion |',
'|---|------------|---------|------|------|--------|',
'| 1 | AA-001 | DRY | `src/foo.ts:42` | Duplication | Extract helper |',
'| 2 | AA-002 | Export | `src/bar.ts:10` | Unused export | Remove |',
'',
].join('\n');
const findings = parseFindingsFromReport(report);
expect(findings).toHaveLength(2);
expect(findings[0].findingId).toBe('AA-001');
expect(findings[0].status).toBe('new');
expect(findings[0].ruleId).toBe('DRY');
expect(findings[0].file).toBe('src/foo.ts');
expect(findings[0].line).toBe(42);
expect(findings[1].findingId).toBe('AA-002');
expect(findings[1].status).toBe('new');
expect(findings[1].ruleId).toBe('Export');
expect(findings[1].file).toBe('src/bar.ts');
expect(findings[1].line).toBe(10);
});
it('should extract persists findings', () => {
const report = [
'## Carry-over Findings (persists)',
'| # | finding_id | Previous Evidence | Current Evidence | Issue | Fix Suggestion |',
'|---|------------|----------|----------|------|--------|',
'| 1 | ARCH-001 | `src/a.ts:5` was X | `src/a.ts:5` still X | Still bad | Fix it |',
'',
].join('\n');
const findings = parseFindingsFromReport(report);
expect(findings).toHaveLength(1);
expect(findings[0].findingId).toBe('ARCH-001');
expect(findings[0].status).toBe('persists');
});
it('should extract resolved findings', () => {
const report = [
'## Resolved Findings (resolved)',
'| finding_id | Resolution Evidence |',
'|------------|---------------------|',
'| QA-003 | Fixed in src/c.ts |',
'',
].join('\n');
const findings = parseFindingsFromReport(report);
expect(findings).toHaveLength(1);
expect(findings[0].findingId).toBe('QA-003');
expect(findings[0].status).toBe('resolved');
});
it('should handle mixed sections in one report', () => {
const report = [
'## 今回の指摘new',
'| # | finding_id | カテゴリ | 場所 | 問題 | 修正案 |',
'|---|------------|---------|------|------|--------|',
'| 1 | AA-001 | DRY | `src/foo.ts:1` | Dup | Fix |',
'',
'## 継続指摘persists',
'| # | finding_id | 前回根拠 | 今回根拠 | 問題 | 修正案 |',
'|---|------------|----------|----------|------|--------|',
'| 1 | AA-002 | Was bad | Still bad | Issue | Fix |',
'',
'## 解消済みresolved',
'| finding_id | 解消根拠 |',
'|------------|---------|',
'| AA-003 | Fixed |',
'',
].join('\n');
const findings = parseFindingsFromReport(report);
expect(findings).toHaveLength(3);
expect(findings[0]).toEqual(expect.objectContaining({ findingId: 'AA-001', status: 'new' }));
expect(findings[1]).toEqual(expect.objectContaining({ findingId: 'AA-002', status: 'persists' }));
expect(findings[2]).toEqual(expect.objectContaining({ findingId: 'AA-003', status: 'resolved' }));
});
it('should return empty array when no finding sections exist', () => {
const report = [
'# Report',
'',
'## Summary',
'Everything looks good.',
'',
].join('\n');
const findings = parseFindingsFromReport(report);
expect(findings).toEqual([]);
});
it('should stop collecting findings when a new non-finding section starts', () => {
const report = [
'## Current Iteration Findings (new)',
'| # | finding_id | Category | Location | Issue | Fix |',
'|---|------------|---------|------|------|-----|',
'| 1 | F-001 | Bug | `src/a.ts` | Bad | Fix |',
'',
'## REJECT判定条件',
'| Condition | Result |',
'|-----------|--------|',
'| Has findings | Yes |',
'',
].join('\n');
const findings = parseFindingsFromReport(report);
expect(findings).toHaveLength(1);
expect(findings[0].findingId).toBe('F-001');
});
it('should skip header rows in tables', () => {
const report = [
'## Current Iteration Findings (new)',
'| # | finding_id | Category | Location | Issue | Fix |',
'|---|------------|---------|------|------|-----|',
'| 1 | X-001 | Cat | `file.ts:5` | Problem | Solution |',
'',
].join('\n');
const findings = parseFindingsFromReport(report);
expect(findings).toHaveLength(1);
expect(findings[0].findingId).toBe('X-001');
});
it('should parse location with line number from backtick-wrapped paths', () => {
const report = [
'## Current Iteration Findings (new)',
'| # | finding_id | Category | Location | Issue | Fix |',
'|---|------------|---------|------|------|-----|',
'| 1 | F-001 | Bug | `src/features/analytics/writer.ts:27` | Comment | Remove |',
'',
].join('\n');
const findings = parseFindingsFromReport(report);
expect(findings[0].file).toBe('src/features/analytics/writer.ts');
expect(findings[0].line).toBe(27);
});
it('should handle location with multiple line references', () => {
const report = [
'## Current Iteration Findings (new)',
'| # | finding_id | Category | Location | Issue | Fix |',
'|---|------------|---------|------|------|-----|',
'| 1 | F-001 | Bug | `src/a.ts:10, src/b.ts:20` | Multiple | Fix |',
'',
].join('\n');
const findings = parseFindingsFromReport(report);
expect(findings[0].file).toBe('src/a.ts');
expect(findings[0].line).toBe(10);
});
});
describe('extractDecisionFromReport', () => {
it('should return reject when report says REJECT', () => {
const report = '## 結果: REJECT\n\nSome content';
expect(extractDecisionFromReport(report)).toBe('reject');
});
it('should return approve when report says APPROVE', () => {
const report = '## Result: APPROVE\n\nSome content';
expect(extractDecisionFromReport(report)).toBe('approve');
});
it('should return null when no result section is found', () => {
const report = '# Report\n\nNo result section here.';
expect(extractDecisionFromReport(report)).toBeNull();
});
});
describe('inferSeverity', () => {
it('should return error for security-related finding IDs', () => {
expect(inferSeverity('SEC-001')).toBe('error');
expect(inferSeverity('SEC-NEW-xss')).toBe('error');
});
it('should return warning for other finding IDs', () => {
expect(inferSeverity('AA-001')).toBe('warning');
expect(inferSeverity('QA-001')).toBe('warning');
expect(inferSeverity('ARCH-NEW-dry')).toBe('warning');
});
});
describe('emitFixActionEvents', () => {
let testDir: string;
beforeEach(() => {
resetAnalyticsWriter();
testDir = join(tmpdir(), `takt-test-emit-fix-${Date.now()}`);
mkdirSync(testDir, { recursive: true });
initAnalyticsWriter(true, testDir);
});
afterEach(() => {
resetAnalyticsWriter();
rmSync(testDir, { recursive: true, force: true });
});
it('should emit fix_action events for each finding ID in response', () => {
const timestamp = new Date('2026-02-18T12:00:00.000Z');
emitFixActionEvents('Fixed AA-001 and ARCH-002-barrel', 3, 'run-xyz', timestamp);
const filePath = join(testDir, '2026-02-18.jsonl');
const lines = readFileSync(filePath, 'utf-8').trim().split('\n');
expect(lines).toHaveLength(2);
const event1 = JSON.parse(lines[0]) as FixActionEvent;
expect(event1.type).toBe('fix_action');
expect(event1.findingId).toBe('AA-001');
expect(event1.action).toBe('fixed');
expect(event1.iteration).toBe(3);
expect(event1.runId).toBe('run-xyz');
expect(event1.timestamp).toBe('2026-02-18T12:00:00.000Z');
const event2 = JSON.parse(lines[1]) as FixActionEvent;
expect(event2.type).toBe('fix_action');
expect(event2.findingId).toBe('ARCH-002-barrel');
expect(event2.action).toBe('fixed');
});
it('should not emit events when response contains no finding IDs', () => {
const timestamp = new Date('2026-02-18T12:00:00.000Z');
emitFixActionEvents('No issues found, all good.', 1, 'run-abc', timestamp);
const filePath = join(testDir, '2026-02-18.jsonl');
expect(() => readFileSync(filePath, 'utf-8')).toThrow();
});
it('should deduplicate repeated finding IDs', () => {
const timestamp = new Date('2026-02-18T12:00:00.000Z');
emitFixActionEvents(
'Fixed QA-001, confirmed QA-001 is resolved, also QA-001 again',
2,
'run-dedup',
timestamp,
);
const filePath = join(testDir, '2026-02-18.jsonl');
const lines = readFileSync(filePath, 'utf-8').trim().split('\n');
expect(lines).toHaveLength(1);
const event = JSON.parse(lines[0]) as FixActionEvent;
expect(event.findingId).toBe('QA-001');
});
it('should match various finding ID formats', () => {
const timestamp = new Date('2026-02-18T12:00:00.000Z');
const response = [
'Resolved AA-001 simple ID',
'Fixed ARCH-NEW-dry with NEW segment',
'Addressed SEC-002-xss with suffix',
].join('\n');
emitFixActionEvents(response, 1, 'run-formats', timestamp);
const filePath = join(testDir, '2026-02-18.jsonl');
const lines = readFileSync(filePath, 'utf-8').trim().split('\n');
expect(lines).toHaveLength(3);
const ids = lines.map((line) => (JSON.parse(line) as FixActionEvent).findingId);
expect(ids).toContain('AA-001');
expect(ids).toContain('ARCH-NEW-dry');
expect(ids).toContain('SEC-002-xss');
});
});
describe('emitRebuttalEvents', () => {
let testDir: string;
beforeEach(() => {
resetAnalyticsWriter();
testDir = join(tmpdir(), `takt-test-emit-rebuttal-${Date.now()}`);
mkdirSync(testDir, { recursive: true });
initAnalyticsWriter(true, testDir);
});
afterEach(() => {
resetAnalyticsWriter();
rmSync(testDir, { recursive: true, force: true });
});
it('should emit fix_action events with rebutted action for finding IDs', () => {
const timestamp = new Date('2026-02-18T12:00:00.000Z');
emitRebuttalEvents('Rebutting AA-001 and ARCH-002-barrel', 3, 'run-xyz', timestamp);
const filePath = join(testDir, '2026-02-18.jsonl');
const lines = readFileSync(filePath, 'utf-8').trim().split('\n');
expect(lines).toHaveLength(2);
const event1 = JSON.parse(lines[0]) as FixActionEvent;
expect(event1.type).toBe('fix_action');
expect(event1.findingId).toBe('AA-001');
expect(event1.action).toBe('rebutted');
expect(event1.iteration).toBe(3);
expect(event1.runId).toBe('run-xyz');
const event2 = JSON.parse(lines[1]) as FixActionEvent;
expect(event2.type).toBe('fix_action');
expect(event2.findingId).toBe('ARCH-002-barrel');
expect(event2.action).toBe('rebutted');
});
it('should not emit events when response contains no finding IDs', () => {
const timestamp = new Date('2026-02-18T12:00:00.000Z');
emitRebuttalEvents('No findings mentioned here.', 1, 'run-abc', timestamp);
const filePath = join(testDir, '2026-02-18.jsonl');
expect(() => readFileSync(filePath, 'utf-8')).toThrow();
});
});

View File

@ -0,0 +1,220 @@
/**
* Tests for AnalyticsWriter JSONL append, date rotation, ON/OFF toggle.
*/
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
import { existsSync, readFileSync, mkdirSync, rmSync } from 'node:fs';
import { join } from 'node:path';
import { tmpdir } from 'node:os';
import { resetAnalyticsWriter } from '../features/analytics/writer.js';
import {
initAnalyticsWriter,
isAnalyticsEnabled,
writeAnalyticsEvent,
} from '../features/analytics/index.js';
import type { MovementResultEvent, ReviewFindingEvent } from '../features/analytics/index.js';
describe('AnalyticsWriter', () => {
let testDir: string;
beforeEach(() => {
resetAnalyticsWriter();
testDir = join(tmpdir(), `takt-test-analytics-writer-${Date.now()}`);
mkdirSync(testDir, { recursive: true });
});
afterEach(() => {
resetAnalyticsWriter();
rmSync(testDir, { recursive: true, force: true });
});
describe('ON/OFF toggle', () => {
it('should not be enabled by default', () => {
expect(isAnalyticsEnabled()).toBe(false);
});
it('should be enabled when initialized with enabled=true', () => {
initAnalyticsWriter(true, testDir);
expect(isAnalyticsEnabled()).toBe(true);
});
it('should not be enabled when initialized with enabled=false', () => {
initAnalyticsWriter(false, testDir);
expect(isAnalyticsEnabled()).toBe(false);
});
it('should not write when disabled', () => {
initAnalyticsWriter(false, testDir);
const event: MovementResultEvent = {
type: 'movement_result',
movement: 'plan',
provider: 'claude',
model: 'sonnet',
decisionTag: 'done',
iteration: 1,
runId: 'run-1',
timestamp: '2026-02-18T10:00:00.000Z',
};
writeAnalyticsEvent(event);
const expectedFile = join(testDir, '2026-02-18.jsonl');
expect(existsSync(expectedFile)).toBe(false);
});
});
describe('event writing', () => {
it('should append event to date-based JSONL file', () => {
initAnalyticsWriter(true, testDir);
const event: MovementResultEvent = {
type: 'movement_result',
movement: 'implement',
provider: 'claude',
model: 'sonnet',
decisionTag: 'approved',
iteration: 2,
runId: 'run-abc',
timestamp: '2026-02-18T14:30:00.000Z',
};
writeAnalyticsEvent(event);
const filePath = join(testDir, '2026-02-18.jsonl');
expect(existsSync(filePath)).toBe(true);
const content = readFileSync(filePath, 'utf-8').trim();
const parsed = JSON.parse(content) as MovementResultEvent;
expect(parsed.type).toBe('movement_result');
expect(parsed.movement).toBe('implement');
expect(parsed.provider).toBe('claude');
expect(parsed.decisionTag).toBe('approved');
});
it('should append multiple events to the same file', () => {
initAnalyticsWriter(true, testDir);
const event1: MovementResultEvent = {
type: 'movement_result',
movement: 'plan',
provider: 'claude',
model: 'sonnet',
decisionTag: 'done',
iteration: 1,
runId: 'run-1',
timestamp: '2026-02-18T10:00:00.000Z',
};
const event2: MovementResultEvent = {
type: 'movement_result',
movement: 'implement',
provider: 'codex',
model: 'o3',
decisionTag: 'needs_fix',
iteration: 2,
runId: 'run-1',
timestamp: '2026-02-18T11:00:00.000Z',
};
writeAnalyticsEvent(event1);
writeAnalyticsEvent(event2);
const filePath = join(testDir, '2026-02-18.jsonl');
const lines = readFileSync(filePath, 'utf-8').trim().split('\n');
expect(lines).toHaveLength(2);
const parsed1 = JSON.parse(lines[0]) as MovementResultEvent;
const parsed2 = JSON.parse(lines[1]) as MovementResultEvent;
expect(parsed1.movement).toBe('plan');
expect(parsed2.movement).toBe('implement');
});
it('should create separate files for different dates', () => {
initAnalyticsWriter(true, testDir);
const event1: MovementResultEvent = {
type: 'movement_result',
movement: 'plan',
provider: 'claude',
model: 'sonnet',
decisionTag: 'done',
iteration: 1,
runId: 'run-1',
timestamp: '2026-02-17T23:59:00.000Z',
};
const event2: MovementResultEvent = {
type: 'movement_result',
movement: 'implement',
provider: 'claude',
model: 'sonnet',
decisionTag: 'done',
iteration: 2,
runId: 'run-1',
timestamp: '2026-02-18T00:01:00.000Z',
};
writeAnalyticsEvent(event1);
writeAnalyticsEvent(event2);
expect(existsSync(join(testDir, '2026-02-17.jsonl'))).toBe(true);
expect(existsSync(join(testDir, '2026-02-18.jsonl'))).toBe(true);
});
it('should write review_finding events correctly', () => {
initAnalyticsWriter(true, testDir);
const event: ReviewFindingEvent = {
type: 'review_finding',
findingId: 'f-001',
status: 'new',
ruleId: 'no-any',
severity: 'error',
decision: 'reject',
file: 'src/index.ts',
line: 10,
iteration: 1,
runId: 'run-1',
timestamp: '2026-03-01T08:00:00.000Z',
};
writeAnalyticsEvent(event);
const filePath = join(testDir, '2026-03-01.jsonl');
const content = readFileSync(filePath, 'utf-8').trim();
const parsed = JSON.parse(content) as ReviewFindingEvent;
expect(parsed.type).toBe('review_finding');
expect(parsed.findingId).toBe('f-001');
expect(parsed.ruleId).toBe('no-any');
});
});
describe('directory creation', () => {
it('should create events directory when enabled and dir does not exist', () => {
const nestedDir = join(testDir, 'nested', 'analytics', 'events');
expect(existsSync(nestedDir)).toBe(false);
initAnalyticsWriter(true, nestedDir);
expect(existsSync(nestedDir)).toBe(true);
});
it('should not create directory when disabled', () => {
const nestedDir = join(testDir, 'disabled-dir', 'events');
initAnalyticsWriter(false, nestedDir);
expect(existsSync(nestedDir)).toBe(false);
});
});
describe('resetInstance', () => {
it('should reset to disabled state', () => {
initAnalyticsWriter(true, testDir);
expect(isAnalyticsEnabled()).toBe(true);
resetAnalyticsWriter();
expect(isAnalyticsEnabled()).toBe(false);
});
});
});

View File

@ -17,6 +17,27 @@ vi.mock('../infra/config/loadConfig.js', () => ({
loadConfig: loadConfigMock, loadConfig: loadConfigMock,
})); }));
vi.mock('../infra/config/resolvePieceConfigValue.js', () => ({
resolvePieceConfigValue: (_projectDir: string, key: string) => {
const loaded = loadConfigMock() as Record<string, Record<string, unknown>>;
const global = loaded?.global ?? {};
const project = loaded?.project ?? {};
const merged: Record<string, unknown> = { ...global, ...project };
return merged[key];
},
resolvePieceConfigValues: (_projectDir: string, keys: readonly string[]) => {
const loaded = loadConfigMock() as Record<string, Record<string, unknown>>;
const global = loaded?.global ?? {};
const project = loaded?.project ?? {};
const merged: Record<string, unknown> = { ...global, ...project };
const result: Record<string, unknown> = {};
for (const key of keys) {
result[key] = merged[key];
}
return result;
},
}));
const { getPieceCategoriesPath, resetPieceCategories } = await import( const { getPieceCategoriesPath, resetPieceCategories } = await import(
'../infra/config/global/pieceCategories.js' '../infra/config/global/pieceCategories.js'
); );

View File

@ -122,6 +122,15 @@ vi.mock('../infra/config/index.js', () => ({
global: mockLoadGlobalConfig(), global: mockLoadGlobalConfig(),
project: {}, project: {},
})), })),
resolvePieceConfigValues: (_projectDir: string, keys: readonly string[]) => {
const global = mockLoadGlobalConfig() as Record<string, unknown>;
const config = { ...global, piece: 'default', provider: global.provider ?? 'claude', verbose: false };
const result: Record<string, unknown> = {};
for (const key of keys) {
result[key] = config[key];
}
return result;
},
saveSessionState: vi.fn(), saveSessionState: vi.fn(),
ensureDir: vi.fn(), ensureDir: vi.fn(),
writeFileAtomic: vi.fn(), writeFileAtomic: vi.fn(),

View File

@ -93,6 +93,14 @@ vi.mock('../infra/config/index.js', () => ({
global: { provider: 'claude' }, global: { provider: 'claude' },
project: {}, project: {},
}), }),
resolvePieceConfigValues: (_projectDir: string, keys: readonly string[]) => {
const config: Record<string, unknown> = { provider: 'claude', piece: 'default', verbose: false };
const result: Record<string, unknown> = {};
for (const key of keys) {
result[key] = config[key];
}
return result;
},
saveSessionState: vi.fn(), saveSessionState: vi.fn(),
ensureDir: vi.fn(), ensureDir: vi.fn(),
writeFileAtomic: vi.fn(), writeFileAtomic: vi.fn(),

View File

@ -31,6 +31,18 @@ vi.mock('../infra/config/index.js', () => ({
loadConfig: loadConfigMock, loadConfig: loadConfigMock,
loadCustomAgents: loadCustomAgentsMock, loadCustomAgents: loadCustomAgentsMock,
loadAgentPrompt: loadAgentPromptMock, loadAgentPrompt: loadAgentPromptMock,
resolveConfigValues: (_projectDir: string, keys: readonly string[]) => {
const loaded = loadConfigMock() as Record<string, unknown>;
const global = (loaded.global ?? {}) as Record<string, unknown>;
const project = (loaded.project ?? {}) as Record<string, unknown>;
const provider = (project.provider ?? global.provider ?? 'claude') as string;
const config: Record<string, unknown> = { ...global, ...project, provider, piece: project.piece ?? 'default', verbose: false };
const result: Record<string, unknown> = {};
for (const key of keys) {
result[key] = config[key];
}
return result;
},
})); }));
vi.mock('../shared/prompts/index.js', () => ({ vi.mock('../shared/prompts/index.js', () => ({
@ -50,7 +62,7 @@ describe('option resolution order', () => {
loadTemplateMock.mockReturnValue('template'); loadTemplateMock.mockReturnValue('template');
}); });
it('should resolve provider in order: CLI > Local > Piece(step) > Global', async () => { it('should resolve provider in order: CLI > Config(project??global) > stepProvider > default', async () => {
// Given // Given
loadConfigMock.mockReturnValue({ loadConfigMock.mockReturnValue({
project: { provider: 'opencode' }, project: { provider: 'opencode' },
@ -67,7 +79,7 @@ describe('option resolution order', () => {
// Then // Then
expect(getProviderMock).toHaveBeenLastCalledWith('codex'); expect(getProviderMock).toHaveBeenLastCalledWith('codex');
// When: CLI 指定なし(Local が有効 // When: CLI 指定なし(project provider が有効: resolveConfigValues は project.provider ?? global.provider を返す
await runAgent(undefined, 'task', { await runAgent(undefined, 'task', {
cwd: '/repo', cwd: '/repo',
stepProvider: 'claude', stepProvider: 'claude',
@ -76,7 +88,7 @@ describe('option resolution order', () => {
// Then // Then
expect(getProviderMock).toHaveBeenLastCalledWith('opencode'); expect(getProviderMock).toHaveBeenLastCalledWith('opencode');
// When: Local なしPiece が有効 // When: project なし → resolveConfigValues は global.provider を返す(フラットマージ
loadConfigMock.mockReturnValue({ loadConfigMock.mockReturnValue({
project: {}, project: {},
global: { provider: 'mock' }, global: { provider: 'mock' },
@ -86,10 +98,10 @@ describe('option resolution order', () => {
stepProvider: 'claude', stepProvider: 'claude',
}); });
// Then // Then: resolveConfigValues returns 'mock' (global fallback), so stepProvider is not reached
expect(getProviderMock).toHaveBeenLastCalledWith('claude'); expect(getProviderMock).toHaveBeenLastCalledWith('mock');
// When: Piece なしGlobal が有効) // When: stepProvider もなし → 同様に global.provider
await runAgent(undefined, 'task', { cwd: '/repo' }); await runAgent(undefined, 'task', { cwd: '/repo' });
// Then // Then
@ -138,15 +150,16 @@ describe('option resolution order', () => {
); );
}); });
it('should ignore global model when global provider does not match resolved provider', async () => { it('should ignore global model when resolved provider does not match config provider', async () => {
// Given // Given: CLI provider overrides config provider, causing mismatch with config.model
loadConfigMock.mockReturnValue({ loadConfigMock.mockReturnValue({
project: { provider: 'codex' }, project: {},
global: { provider: 'claude', model: 'global-model' }, global: { provider: 'claude', model: 'global-model' },
}); });
// When // When: CLI provider='codex' overrides config provider='claude'
await runAgent(undefined, 'task', { cwd: '/repo' }); // resolveModel compares config.provider ('claude') with resolvedProvider ('codex') → mismatch → model ignored
await runAgent(undefined, 'task', { cwd: '/repo', provider: 'codex' });
// Then // Then
expect(providerCallMock).toHaveBeenLastCalledWith( expect(providerCallMock).toHaveBeenLastCalledWith(
@ -191,8 +204,11 @@ describe('option resolution order', () => {
); );
}); });
it('should use custom agent provider/model when higher-priority values are absent', async () => { it('should use custom agent model and prompt when higher-priority values are absent', async () => {
// Given // Given: custom agent with provider/model, but no CLI/config override
// Note: resolveConfigValues returns provider='claude' by default (loadConfig merges project ?? global ?? 'claude'),
// so agentConfig.provider is not reached in resolveProvider (config.provider is always truthy).
// However, custom agent model IS used because resolveModel checks agentConfig.model before config.
const customAgents = new Map([ const customAgents = new Map([
['custom', { name: 'custom', prompt: 'agent prompt', provider: 'opencode', model: 'agent-model' }], ['custom', { name: 'custom', prompt: 'agent prompt', provider: 'opencode', model: 'agent-model' }],
]); ]);
@ -201,12 +217,14 @@ describe('option resolution order', () => {
// When // When
await runAgent('custom', 'task', { cwd: '/repo' }); await runAgent('custom', 'task', { cwd: '/repo' });
// Then // Then: provider falls back to config default ('claude'), not agentConfig.provider
expect(getProviderMock).toHaveBeenLastCalledWith('opencode'); expect(getProviderMock).toHaveBeenLastCalledWith('claude');
// Agent model is used (resolved before config.model in resolveModel)
expect(providerCallMock).toHaveBeenLastCalledWith( expect(providerCallMock).toHaveBeenLastCalledWith(
'task', 'task',
expect.objectContaining({ model: 'agent-model' }), expect.objectContaining({ model: 'agent-model' }),
); );
// Agent prompt is still used
expect(providerSetupMock).toHaveBeenLastCalledWith( expect(providerSetupMock).toHaveBeenLastCalledWith(
expect.objectContaining({ systemPrompt: 'prompt' }), expect.objectContaining({ systemPrompt: 'prompt' }),
); );

View File

@ -29,6 +29,17 @@ vi.mock('../infra/config/index.js', () => ({
project: { piece: 'default' }, project: { piece: 'default' },
}; };
}, },
resolvePieceConfigValues: (_projectDir: string, keys: readonly string[]) => {
const raw = mockLoadConfigRaw() as Record<string, unknown>;
const config = ('global' in raw && 'project' in raw)
? { ...raw.global as Record<string, unknown>, ...raw.project as Record<string, unknown> }
: { ...raw, piece: 'default', provider: 'claude', verbose: false };
const result: Record<string, unknown> = {};
for (const key of keys) {
result[key] = config[key];
}
return result;
},
})); }));
const mockLoadConfig = mockLoadConfigRaw; const mockLoadConfig = mockLoadConfigRaw;

View File

@ -4,12 +4,15 @@
* Registers all named subcommands (run, watch, add, list, switch, clear, eject, prompt, catalog). * Registers all named subcommands (run, watch, add, list, switch, clear, eject, prompt, catalog).
*/ */
import { join } from 'node:path';
import { clearPersonaSessions, resolveConfigValue } from '../../infra/config/index.js'; import { clearPersonaSessions, resolveConfigValue } from '../../infra/config/index.js';
import { success } from '../../shared/ui/index.js'; import { getGlobalConfigDir } from '../../infra/config/paths.js';
import { success, info } from '../../shared/ui/index.js';
import { runAllTasks, addTask, watchTasks, listTasks } from '../../features/tasks/index.js'; import { runAllTasks, addTask, watchTasks, listTasks } from '../../features/tasks/index.js';
import { switchPiece, ejectBuiltin, ejectFacet, parseFacetType, VALID_FACET_TYPES, resetCategoriesToDefault, resetConfigToDefault, deploySkill } from '../../features/config/index.js'; import { switchPiece, ejectBuiltin, ejectFacet, parseFacetType, VALID_FACET_TYPES, resetCategoriesToDefault, resetConfigToDefault, deploySkill } from '../../features/config/index.js';
import { previewPrompts } from '../../features/prompt/index.js'; import { previewPrompts } from '../../features/prompt/index.js';
import { showCatalog } from '../../features/catalog/index.js'; import { showCatalog } from '../../features/catalog/index.js';
import { computeReviewMetrics, formatReviewMetrics, parseSinceDuration, purgeOldEvents } from '../../features/analytics/index.js';
import { program, resolvedCwd } from './program.js'; import { program, resolvedCwd } from './program.js';
import { resolveAgentOverrides } from './helpers.js'; import { resolveAgentOverrides } from './helpers.js';
@ -136,3 +139,37 @@ program
.action((type?: string) => { .action((type?: string) => {
showCatalog(resolvedCwd, type); showCatalog(resolvedCwd, type);
}); });
const metrics = program
.command('metrics')
.description('Show analytics metrics');
metrics
.command('review')
.description('Show review quality metrics')
.option('--since <duration>', 'Time window (e.g. "7d", "30d")', '30d')
.action((opts: { since: string }) => {
const analytics = resolveConfigValue(resolvedCwd, 'analytics');
const eventsDir = analytics?.eventsPath ?? join(getGlobalConfigDir(), 'analytics', 'events');
const durationMs = parseSinceDuration(opts.since);
const sinceMs = Date.now() - durationMs;
const result = computeReviewMetrics(eventsDir, sinceMs);
info(formatReviewMetrics(result));
});
program
.command('purge')
.description('Purge old analytics event files')
.option('--retention-days <days>', 'Retention period in days', '30')
.action((opts: { retentionDays: string }) => {
const analytics = resolveConfigValue(resolvedCwd, 'analytics');
const eventsDir = analytics?.eventsPath ?? join(getGlobalConfigDir(), 'analytics', 'events');
const retentionDays = analytics?.retentionDays
?? parseInt(opts.retentionDays, 10);
const deleted = purgeOldEvents(eventsDir, retentionDays, new Date());
if (deleted.length === 0) {
info('No files to purge.');
} else {
success(`Purged ${deleted.length} file(s): ${deleted.join(', ')}`);
}
});

View File

@ -23,6 +23,16 @@ export interface ObservabilityConfig {
providerEvents?: boolean; providerEvents?: boolean;
} }
/** Analytics configuration for local metrics collection */
export interface AnalyticsConfig {
/** Whether analytics collection is enabled */
enabled?: boolean;
/** Custom path for analytics events directory (default: ~/.takt/analytics/events) */
eventsPath?: string;
/** Retention period in days for analytics event files (default: 30) */
retentionDays?: number;
}
/** Language setting for takt */ /** Language setting for takt */
export type Language = 'en' | 'ja'; export type Language = 'en' | 'ja';
@ -57,6 +67,7 @@ export interface GlobalConfig {
provider?: 'claude' | 'codex' | 'opencode' | 'mock'; provider?: 'claude' | 'codex' | 'opencode' | 'mock';
model?: string; model?: string;
observability?: ObservabilityConfig; observability?: ObservabilityConfig;
analytics?: AnalyticsConfig;
/** Directory for shared clones (worktree_dir in config). If empty, uses ../{clone-name} relative to project */ /** Directory for shared clones (worktree_dir in config). If empty, uses ../{clone-name} relative to project */
worktreeDir?: string; worktreeDir?: string;
/** Auto-create PR after worktree execution (default: prompt in interactive mode) */ /** Auto-create PR after worktree execution (default: prompt in interactive mode) */

View File

@ -378,6 +378,13 @@ export const ObservabilityConfigSchema = z.object({
provider_events: z.boolean().optional(), provider_events: z.boolean().optional(),
}); });
/** Analytics config schema */
export const AnalyticsConfigSchema = z.object({
enabled: z.boolean().optional(),
events_path: z.string().optional(),
retention_days: z.number().int().positive().optional(),
});
/** Language setting schema */ /** Language setting schema */
export const LanguageSchema = z.enum(['en', 'ja']); export const LanguageSchema = z.enum(['en', 'ja']);
@ -409,6 +416,7 @@ export const GlobalConfigSchema = z.object({
provider: z.enum(['claude', 'codex', 'opencode', 'mock']).optional().default('claude'), provider: z.enum(['claude', 'codex', 'opencode', 'mock']).optional().default('claude'),
model: z.string().optional(), model: z.string().optional(),
observability: ObservabilityConfigSchema.optional(), observability: ObservabilityConfigSchema.optional(),
analytics: AnalyticsConfigSchema.optional(),
/** Directory for shared clones (worktree_dir in config). If empty, uses ../{clone-name} relative to project */ /** Directory for shared clones (worktree_dir in config). If empty, uses ../{clone-name} relative to project */
worktree_dir: z.string().optional(), worktree_dir: z.string().optional(),
/** Auto-create PR after worktree execution (default: prompt in interactive mode) */ /** Auto-create PR after worktree execution (default: prompt in interactive mode) */

View File

@ -0,0 +1,64 @@
/**
* Analytics event type definitions for metrics collection.
*
* Three event types capture review findings, fix actions, and movement results
* for local-only analysis when analytics.enabled = true.
*/
/** Status of a review finding across iterations */
export type FindingStatus = 'new' | 'persists' | 'resolved';
/** Severity level of a review finding */
export type FindingSeverity = 'error' | 'warning';
/** Decision taken on a finding */
export type FindingDecision = 'reject' | 'approve';
/** Action taken to address a finding */
export type FixActionType = 'fixed' | 'rebutted' | 'not_applicable';
/** Review finding event — emitted per finding during review movements */
export interface ReviewFindingEvent {
type: 'review_finding';
findingId: string;
status: FindingStatus;
ruleId: string;
severity: FindingSeverity;
decision: FindingDecision;
file: string;
line: number;
iteration: number;
runId: string;
timestamp: string;
}
/** Fix action event — emitted per finding addressed during fix movements */
export interface FixActionEvent {
type: 'fix_action';
findingId: string;
action: FixActionType;
changedFiles?: string[];
testCommand?: string;
testResult?: string;
iteration: number;
runId: string;
timestamp: string;
}
/** Movement result event — emitted after each movement completes */
export interface MovementResultEvent {
type: 'movement_result';
movement: string;
provider: string;
model: string;
decisionTag: string;
iteration: number;
runId: string;
timestamp: string;
}
/** Union of all analytics event types */
export type AnalyticsEvent =
| ReviewFindingEvent
| FixActionEvent
| MovementResultEvent;

View File

@ -0,0 +1,33 @@
/**
* Analytics module event collection and metrics.
*/
export type {
AnalyticsEvent,
ReviewFindingEvent,
FixActionEvent,
MovementResultEvent,
} from './events.js';
export {
initAnalyticsWriter,
isAnalyticsEnabled,
writeAnalyticsEvent,
} from './writer.js';
export {
parseFindingsFromReport,
extractDecisionFromReport,
inferSeverity,
emitFixActionEvents,
emitRebuttalEvents,
} from './report-parser.js';
export {
computeReviewMetrics,
formatReviewMetrics,
parseSinceDuration,
type ReviewMetrics,
} from './metrics.js';
export { purgeOldEvents } from './purge.js';

View File

@ -0,0 +1,225 @@
/**
* Analytics metrics computation from JSONL event files.
*
* Reads events from ~/.takt/analytics/events/*.jsonl and computes
* five key indicators for review quality assessment.
*/
import { readdirSync, readFileSync } from 'node:fs';
import { join } from 'node:path';
import type { AnalyticsEvent, ReviewFindingEvent, FixActionEvent } from './events.js';
/** Aggregated metrics output */
export interface ReviewMetrics {
/** Re-report count per finding_id (same finding raised more than once) */
reReportCounts: Map<string, number>;
/** Ratio of findings that required 2+ round-trips before resolution */
roundTripRatio: number;
/** Average number of iterations to resolve a finding */
averageResolutionIterations: number;
/** Number of REJECT decisions per rule_id */
rejectCountsByRule: Map<string, number>;
/** Ratio of rebutted findings that were subsequently resolved */
rebuttalResolvedRatio: number;
}
/**
* Compute review metrics from events within a time window.
*
* @param eventsDir Absolute path to the analytics events directory
* @param sinceMs Epoch ms only events after this time are included
*/
export function computeReviewMetrics(eventsDir: string, sinceMs: number): ReviewMetrics {
const events = loadEventsAfter(eventsDir, sinceMs);
const reviewFindings = events.filter(
(e): e is ReviewFindingEvent => e.type === 'review_finding',
);
const fixActions = events.filter(
(e): e is FixActionEvent => e.type === 'fix_action',
);
return {
reReportCounts: computeReReportCounts(reviewFindings),
roundTripRatio: computeRoundTripRatio(reviewFindings),
averageResolutionIterations: computeAverageResolutionIterations(reviewFindings),
rejectCountsByRule: computeRejectCountsByRule(reviewFindings),
rebuttalResolvedRatio: computeRebuttalResolvedRatio(fixActions, reviewFindings),
};
}
/**
* Format review metrics for CLI display.
*/
export function formatReviewMetrics(metrics: ReviewMetrics): string {
const lines: string[] = [];
lines.push('=== Review Metrics ===');
lines.push('');
lines.push('Re-report counts (finding_id → count):');
if (metrics.reReportCounts.size === 0) {
lines.push(' (none)');
} else {
for (const [findingId, count] of metrics.reReportCounts) {
lines.push(` ${findingId}: ${count}`);
}
}
lines.push('');
lines.push(`Round-trip ratio (2+ iterations): ${(metrics.roundTripRatio * 100).toFixed(1)}%`);
lines.push(`Average resolution iterations: ${metrics.averageResolutionIterations.toFixed(2)}`);
lines.push('');
lines.push('REJECT counts by rule:');
if (metrics.rejectCountsByRule.size === 0) {
lines.push(' (none)');
} else {
for (const [ruleId, count] of metrics.rejectCountsByRule) {
lines.push(` ${ruleId}: ${count}`);
}
}
lines.push('');
lines.push(`Rebuttal → resolved ratio: ${(metrics.rebuttalResolvedRatio * 100).toFixed(1)}%`);
return lines.join('\n');
}
// ---- Internal helpers ----
/** Load all events from JSONL files whose date >= since */
function loadEventsAfter(eventsDir: string, sinceMs: number): AnalyticsEvent[] {
const sinceDate = new Date(sinceMs).toISOString().slice(0, 10);
let files: string[];
try {
files = readdirSync(eventsDir).filter((f) => f.endsWith('.jsonl'));
} catch (e) {
if ((e as NodeJS.ErrnoException).code === 'ENOENT') return [];
throw e;
}
const relevantFiles = files.filter((f) => {
const dateStr = f.replace('.jsonl', '');
return dateStr >= sinceDate;
});
const events: AnalyticsEvent[] = [];
for (const file of relevantFiles) {
const content = readFileSync(join(eventsDir, file), 'utf-8');
for (const line of content.split('\n')) {
if (!line.trim()) continue;
const event = JSON.parse(line) as AnalyticsEvent;
if (new Date(event.timestamp).getTime() >= sinceMs) {
events.push(event);
}
}
}
return events;
}
/** Count how many times each finding_id appears (only those appearing 2+) */
function computeReReportCounts(findings: ReviewFindingEvent[]): Map<string, number> {
const counts = new Map<string, number>();
for (const f of findings) {
counts.set(f.findingId, (counts.get(f.findingId) ?? 0) + 1);
}
const result = new Map<string, number>();
for (const [id, count] of counts) {
if (count >= 2) {
result.set(id, count);
}
}
return result;
}
/** Ratio of findings that appear in 2+ iterations before resolution */
function computeRoundTripRatio(findings: ReviewFindingEvent[]): number {
const findingIds = new Set(findings.map((f) => f.findingId));
if (findingIds.size === 0) return 0;
let multiIterationCount = 0;
for (const id of findingIds) {
const iterations = new Set(
findings.filter((f) => f.findingId === id).map((f) => f.iteration),
);
if (iterations.size >= 2) {
multiIterationCount++;
}
}
return multiIterationCount / findingIds.size;
}
/** Average number of iterations from first appearance to resolution */
function computeAverageResolutionIterations(findings: ReviewFindingEvent[]): number {
const findingIds = new Set(findings.map((f) => f.findingId));
if (findingIds.size === 0) return 0;
let totalIterations = 0;
let resolvedCount = 0;
for (const id of findingIds) {
const related = findings.filter((f) => f.findingId === id);
const minIteration = Math.min(...related.map((f) => f.iteration));
const resolved = related.find((f) => f.status === 'resolved');
if (resolved) {
totalIterations += resolved.iteration - minIteration + 1;
resolvedCount++;
}
}
if (resolvedCount === 0) return 0;
return totalIterations / resolvedCount;
}
/** Ratio of rebutted findings that were subsequently resolved in a review */
function computeRebuttalResolvedRatio(
fixActions: FixActionEvent[],
findings: ReviewFindingEvent[],
): number {
const rebuttedIds = new Set(
fixActions.filter((a) => a.action === 'rebutted').map((a) => a.findingId),
);
if (rebuttedIds.size === 0) return 0;
let resolvedCount = 0;
for (const id of rebuttedIds) {
const wasResolved = findings.some(
(f) => f.findingId === id && f.status === 'resolved',
);
if (wasResolved) {
resolvedCount++;
}
}
return resolvedCount / rebuttedIds.size;
}
/** Count of REJECT decisions per rule_id */
function computeRejectCountsByRule(findings: ReviewFindingEvent[]): Map<string, number> {
const counts = new Map<string, number>();
for (const f of findings) {
if (f.decision === 'reject') {
counts.set(f.ruleId, (counts.get(f.ruleId) ?? 0) + 1);
}
}
return counts;
}
/**
* Parse a duration string like "7d", "30d", "14d" into milliseconds.
*/
export function parseSinceDuration(since: string): number {
const match = since.match(/^(\d+)d$/);
if (!match) {
throw new Error(`Invalid duration format: "${since}". Use format like "7d", "30d".`);
}
const daysStr = match[1];
if (!daysStr) {
throw new Error(`Invalid duration format: "${since}". Use format like "7d", "30d".`);
}
const days = parseInt(daysStr, 10);
return days * 24 * 60 * 60 * 1000;
}

View File

@ -0,0 +1,40 @@
/**
* Retention-based purge for analytics event files.
*
* Deletes JSONL files older than the configured retention period.
*/
import { readdirSync, unlinkSync } from 'node:fs';
import { join } from 'node:path';
/**
* Purge JSONL event files older than the retention period.
*
* @param eventsDir Absolute path to the analytics events directory
* @param retentionDays Number of days to retain (files older than this are deleted)
* @param now Reference time for age calculation
* @returns List of deleted file names
*/
export function purgeOldEvents(eventsDir: string, retentionDays: number, now: Date): string[] {
const cutoffDate = new Date(now.getTime() - retentionDays * 24 * 60 * 60 * 1000);
const cutoffStr = cutoffDate.toISOString().slice(0, 10);
let files: string[];
try {
files = readdirSync(eventsDir).filter((f) => f.endsWith('.jsonl'));
} catch (e) {
if ((e as NodeJS.ErrnoException).code === 'ENOENT') return [];
throw e;
}
const deleted: string[] = [];
for (const file of files) {
const dateStr = file.replace('.jsonl', '');
if (dateStr < cutoffStr) {
unlinkSync(join(eventsDir, file));
deleted.push(file);
}
}
return deleted;
}

View File

@ -0,0 +1,191 @@
/**
* Extracts analytics event data from review report markdown.
*
* Review reports follow a consistent structure with finding tables
* under "new", "persists", and "resolved" sections. Each table row
* contains a finding_id column.
*/
import type { FindingStatus, FindingSeverity, FindingDecision, FixActionEvent, FixActionType } from './events.js';
import { writeAnalyticsEvent } from './writer.js';
export interface ParsedFinding {
findingId: string;
status: FindingStatus;
ruleId: string;
file: string;
line: number;
}
const SECTION_PATTERNS: Array<{ pattern: RegExp; status: FindingStatus }> = [
{ pattern: /^##\s+.*\bnew\b/i, status: 'new' },
{ pattern: /^##\s+.*\bpersists\b/i, status: 'persists' },
{ pattern: /^##\s+.*\bresolved\b/i, status: 'resolved' },
];
export function parseFindingsFromReport(reportContent: string): ParsedFinding[] {
const lines = reportContent.split('\n');
const findings: ParsedFinding[] = [];
let currentStatus: FindingStatus | null = null;
let columnIndices: TableColumnIndices | null = null;
let headerParsed = false;
for (const line of lines) {
const sectionMatch = matchSection(line);
if (sectionMatch) {
currentStatus = sectionMatch;
columnIndices = null;
headerParsed = false;
continue;
}
if (line.startsWith('## ')) {
currentStatus = null;
columnIndices = null;
headerParsed = false;
continue;
}
if (!currentStatus) continue;
const trimmed = line.trim();
if (!trimmed.startsWith('|')) continue;
if (isSeparatorRow(trimmed)) continue;
if (!headerParsed) {
columnIndices = detectColumnIndices(trimmed);
headerParsed = true;
continue;
}
if (!columnIndices || columnIndices.findingId < 0) continue;
const finding = parseTableRow(line, currentStatus, columnIndices);
if (finding) {
findings.push(finding);
}
}
return findings;
}
export function extractDecisionFromReport(reportContent: string): FindingDecision | null {
const resultMatch = reportContent.match(/^##\s+(?:結果|Result)\s*:\s*(\w+)/m);
const decision = resultMatch?.[1];
if (!decision) return null;
return decision.toUpperCase() === 'REJECT' ? 'reject' : 'approve';
}
function matchSection(line: string): FindingStatus | null {
for (const { pattern, status } of SECTION_PATTERNS) {
if (pattern.test(line)) return status;
}
return null;
}
function isSeparatorRow(trimmed: string): boolean {
return /^\|[\s-]+\|/.test(trimmed);
}
interface TableColumnIndices {
findingId: number;
category: number;
}
function detectColumnIndices(headerRow: string): TableColumnIndices {
const cells = headerRow.split('|').map((c) => c.trim()).filter(Boolean);
const findingId = cells.findIndex((c) => c.toLowerCase() === 'finding_id');
const category = cells.findIndex((c) => {
const lower = c.toLowerCase();
return lower === 'category' || lower === 'カテゴリ';
});
return { findingId, category };
}
function parseTableRow(
line: string,
status: FindingStatus,
indices: TableColumnIndices,
): ParsedFinding | null {
const cells = line.split('|').map((c) => c.trim()).filter(Boolean);
if (cells.length <= indices.findingId) return null;
const findingId = cells[indices.findingId];
if (!findingId) return null;
const categoryValue = indices.category >= 0 ? cells[indices.category] : undefined;
const ruleId = categoryValue ?? findingId;
const locationCell = findLocation(cells);
const { file, line: lineNum } = parseLocation(locationCell);
return { findingId, status, ruleId, file, line: lineNum };
}
function findLocation(cells: string[]): string {
for (const cell of cells) {
if (cell.includes('/') || cell.includes('.ts') || cell.includes('.js') || cell.includes('.py')) {
return cell;
}
}
return '';
}
function parseLocation(location: string): { file: string; line: number } {
const cleaned = location.replace(/`/g, '');
const lineMatch = cleaned.match(/:(\d+)/);
const lineStr = lineMatch?.[1];
const lineNum = lineStr ? parseInt(lineStr, 10) : 0;
const file = cleaned.replace(/:\d+.*$/, '').trim();
return { file, line: lineNum };
}
export function inferSeverity(findingId: string): FindingSeverity {
const id = findingId.toUpperCase();
if (id.includes('SEC')) return 'error';
return 'warning';
}
const FINDING_ID_PATTERN = /\b[A-Z]{2,}-(?:NEW-)?[\w-]+\b/g;
export function emitFixActionEvents(
responseContent: string,
iteration: number,
runId: string,
timestamp: Date,
): void {
emitActionEvents(responseContent, 'fixed', iteration, runId, timestamp);
}
export function emitRebuttalEvents(
responseContent: string,
iteration: number,
runId: string,
timestamp: Date,
): void {
emitActionEvents(responseContent, 'rebutted', iteration, runId, timestamp);
}
function emitActionEvents(
responseContent: string,
action: FixActionType,
iteration: number,
runId: string,
timestamp: Date,
): void {
const matches = responseContent.match(FINDING_ID_PATTERN);
if (!matches) return;
const uniqueIds = [...new Set(matches)];
for (const findingId of uniqueIds) {
const event: FixActionEvent = {
type: 'fix_action',
findingId,
action,
iteration,
runId,
timestamp: timestamp.toISOString(),
};
writeAnalyticsEvent(event);
}
}

View File

@ -0,0 +1,82 @@
/**
* Analytics event writer JSONL append-only with date-based rotation.
*
* Writes to ~/.takt/analytics/events/YYYY-MM-DD.jsonl when analytics.enabled = true.
* Does nothing when disabled.
*/
import { appendFileSync, mkdirSync, existsSync } from 'node:fs';
import { join } from 'node:path';
import type { AnalyticsEvent } from './events.js';
export class AnalyticsWriter {
private static instance: AnalyticsWriter | null = null;
private enabled = false;
private eventsDir: string | null = null;
private constructor() {}
static getInstance(): AnalyticsWriter {
if (!AnalyticsWriter.instance) {
AnalyticsWriter.instance = new AnalyticsWriter();
}
return AnalyticsWriter.instance;
}
static resetInstance(): void {
AnalyticsWriter.instance = null;
}
/**
* Initialize writer.
* @param enabled Whether analytics collection is active
* @param eventsDir Absolute path to the events directory (e.g. ~/.takt/analytics/events)
*/
init(enabled: boolean, eventsDir: string): void {
this.enabled = enabled;
this.eventsDir = eventsDir;
if (this.enabled) {
if (!existsSync(this.eventsDir)) {
mkdirSync(this.eventsDir, { recursive: true });
}
}
}
isEnabled(): boolean {
return this.enabled;
}
/** Append an analytics event to the current day's JSONL file */
write(event: AnalyticsEvent): void {
if (!this.enabled || !this.eventsDir) {
return;
}
const filePath = join(this.eventsDir, `${formatDate(event.timestamp)}.jsonl`);
appendFileSync(filePath, JSON.stringify(event) + '\n', 'utf-8');
}
}
function formatDate(isoTimestamp: string): string {
return isoTimestamp.slice(0, 10);
}
// ---- Module-level convenience functions ----
export function initAnalyticsWriter(enabled: boolean, eventsDir: string): void {
AnalyticsWriter.getInstance().init(enabled, eventsDir);
}
export function resetAnalyticsWriter(): void {
AnalyticsWriter.resetInstance();
}
export function isAnalyticsEnabled(): boolean {
return AnalyticsWriter.getInstance().isEnabled();
}
export function writeAnalyticsEvent(event: AnalyticsEvent): void {
AnalyticsWriter.getInstance().write(event);
}

View File

@ -3,6 +3,7 @@
*/ */
import { readFileSync } from 'node:fs'; import { readFileSync } from 'node:fs';
import { join } from 'node:path';
import { PieceEngine, type IterationLimitRequest, type UserInputRequest } from '../../../core/piece/index.js'; import { PieceEngine, type IterationLimitRequest, type UserInputRequest } from '../../../core/piece/index.js';
import type { PieceConfig } from '../../../core/models/index.js'; import type { PieceConfig } from '../../../core/models/index.js';
import type { PieceExecutionResult, PieceExecutionOptions } from './types.js'; import type { PieceExecutionResult, PieceExecutionOptions } from './types.js';
@ -72,6 +73,17 @@ import { buildRunPaths } from '../../../core/piece/run/run-paths.js';
import { resolveMovementProviderModel } from '../../../core/piece/provider-resolution.js'; import { resolveMovementProviderModel } from '../../../core/piece/provider-resolution.js';
import { resolveRuntimeConfig } from '../../../core/runtime/runtime-environment.js'; import { resolveRuntimeConfig } from '../../../core/runtime/runtime-environment.js';
import { writeFileAtomic, ensureDir } from '../../../infra/config/index.js'; import { writeFileAtomic, ensureDir } from '../../../infra/config/index.js';
import { getGlobalConfigDir } from '../../../infra/config/paths.js';
import {
initAnalyticsWriter,
writeAnalyticsEvent,
parseFindingsFromReport,
extractDecisionFromReport,
inferSeverity,
emitFixActionEvents,
emitRebuttalEvents,
} from '../../analytics/index.js';
import type { MovementResultEvent, ReviewFindingEvent } from '../../analytics/index.js';
const log = createLogger('piece'); const log = createLogger('piece');
@ -319,7 +331,7 @@ export async function executePiece(
const isWorktree = cwd !== projectCwd; const isWorktree = cwd !== projectCwd;
const globalConfig = resolvePieceConfigValues( const globalConfig = resolvePieceConfigValues(
projectCwd, projectCwd,
['notificationSound', 'notificationSoundEvents', 'provider', 'runtime', 'preventSleep', 'model', 'observability'], ['notificationSound', 'notificationSoundEvents', 'provider', 'runtime', 'preventSleep', 'model', 'observability', 'analytics'],
); );
const shouldNotify = globalConfig.notificationSound !== false; const shouldNotify = globalConfig.notificationSound !== false;
const notificationSoundEvents = globalConfig.notificationSoundEvents; const notificationSoundEvents = globalConfig.notificationSoundEvents;
@ -340,6 +352,11 @@ export async function executePiece(
enabled: isProviderEventsEnabled(globalConfig), enabled: isProviderEventsEnabled(globalConfig),
}); });
const analyticsEnabled = globalConfig.analytics?.enabled === true;
const eventsDir = globalConfig.analytics?.eventsPath
?? join(getGlobalConfigDir(), 'analytics', 'events');
initAnalyticsWriter(analyticsEnabled, eventsDir);
// Prevent macOS idle sleep if configured // Prevent macOS idle sleep if configured
if (globalConfig.preventSleep) { if (globalConfig.preventSleep) {
preventSleep(); preventSleep();
@ -427,6 +444,8 @@ export async function executePiece(
let lastMovementContent: string | undefined; let lastMovementContent: string | undefined;
let lastMovementName: string | undefined; let lastMovementName: string | undefined;
let currentIteration = 0; let currentIteration = 0;
let currentMovementProvider = currentProvider;
let currentMovementModel = globalConfig.model ?? '(default)';
const phasePrompts = new Map<string, string>(); const phasePrompts = new Map<string, string>();
const movementIterations = new Map<string, number>(); const movementIterations = new Map<string, number>();
let engine: PieceEngine | null = null; let engine: PieceEngine | null = null;
@ -530,6 +549,8 @@ export async function executePiece(
}); });
const movementProvider = resolved.provider ?? currentProvider; const movementProvider = resolved.provider ?? currentProvider;
const movementModel = resolved.model ?? globalConfig.model ?? '(default)'; const movementModel = resolved.model ?? globalConfig.model ?? '(default)';
currentMovementProvider = movementProvider;
currentMovementModel = movementModel;
providerEventLogger.setMovement(step.name); providerEventLogger.setMovement(step.name);
providerEventLogger.setProvider(movementProvider); providerEventLogger.setProvider(movementProvider);
out.info(`Provider: ${movementProvider}`); out.info(`Provider: ${movementProvider}`);
@ -628,15 +649,60 @@ export async function executePiece(
}; };
appendNdjsonLine(ndjsonLogPath, record); appendNdjsonLine(ndjsonLogPath, record);
const decisionTag = (response.matchedRuleIndex != null && step.rules)
? (step.rules[response.matchedRuleIndex]?.condition ?? response.status)
: response.status;
const movementResultEvent: MovementResultEvent = {
type: 'movement_result',
movement: step.name,
provider: currentMovementProvider,
model: currentMovementModel,
decisionTag,
iteration: currentIteration,
runId: runSlug,
timestamp: response.timestamp.toISOString(),
};
writeAnalyticsEvent(movementResultEvent);
if (step.edit === true && step.name.includes('fix')) {
emitFixActionEvents(response.content, currentIteration, runSlug, response.timestamp);
}
if (step.name.includes('no_fix')) {
emitRebuttalEvents(response.content, currentIteration, runSlug, response.timestamp);
}
// Update in-memory log for pointer metadata (immutable) // Update in-memory log for pointer metadata (immutable)
sessionLog = { ...sessionLog, iterations: sessionLog.iterations + 1 }; sessionLog = { ...sessionLog, iterations: sessionLog.iterations + 1 };
}); });
engine.on('movement:report', (_step, filePath, fileName) => { engine.on('movement:report', (step, filePath, fileName) => {
const content = readFileSync(filePath, 'utf-8'); const content = readFileSync(filePath, 'utf-8');
out.logLine(`\n📄 Report: ${fileName}\n`); out.logLine(`\n📄 Report: ${fileName}\n`);
out.logLine(content); out.logLine(content);
if (step.edit === false) {
const decision = extractDecisionFromReport(content);
if (decision) {
const findings = parseFindingsFromReport(content);
for (const finding of findings) {
const event: ReviewFindingEvent = {
type: 'review_finding',
findingId: finding.findingId,
status: finding.status,
ruleId: finding.ruleId,
severity: inferSeverity(finding.findingId),
decision,
file: finding.file,
line: finding.line,
iteration: currentIteration,
runId: runSlug,
timestamp: new Date().toISOString(),
};
writeAnalyticsEvent(event);
}
}
}
}); });
engine.on('piece:complete', (state) => { engine.on('piece:complete', (state) => {

View File

@ -164,6 +164,11 @@ export class GlobalConfigManager {
observability: parsed.observability ? { observability: parsed.observability ? {
providerEvents: parsed.observability.provider_events, providerEvents: parsed.observability.provider_events,
} : undefined, } : undefined,
analytics: parsed.analytics ? {
enabled: parsed.analytics.enabled,
eventsPath: parsed.analytics.events_path,
retentionDays: parsed.analytics.retention_days,
} : undefined,
worktreeDir: parsed.worktree_dir, worktreeDir: parsed.worktree_dir,
autoPr: parsed.auto_pr, autoPr: parsed.auto_pr,
disabledBuiltins: parsed.disabled_builtins, disabledBuiltins: parsed.disabled_builtins,
@ -222,6 +227,15 @@ export class GlobalConfigManager {
provider_events: config.observability.providerEvents, provider_events: config.observability.providerEvents,
}; };
} }
if (config.analytics) {
const analyticsRaw: Record<string, unknown> = {};
if (config.analytics.enabled !== undefined) analyticsRaw.enabled = config.analytics.enabled;
if (config.analytics.eventsPath) analyticsRaw.events_path = config.analytics.eventsPath;
if (config.analytics.retentionDays !== undefined) analyticsRaw.retention_days = config.analytics.retentionDays;
if (Object.keys(analyticsRaw).length > 0) {
raw.analytics = analyticsRaw;
}
}
if (config.worktreeDir) { if (config.worktreeDir) {
raw.worktree_dir = config.worktreeDir; raw.worktree_dir = config.worktreeDir;
} }