Merge branch 'develop' into takt/308/improve-retry-instruct-interac
This commit is contained in:
commit
0441ba55d1
@ -1,105 +1,91 @@
|
||||
# TAKT global configuration sample
|
||||
# Location: ~/.takt/config.yaml
|
||||
|
||||
# ---- Core ----
|
||||
language: en
|
||||
default_piece: default
|
||||
log_level: info
|
||||
# =====================================
|
||||
# General settings (piece-independent)
|
||||
# =====================================
|
||||
language: en # UI language: en | ja
|
||||
log_level: info # Log level: debug | info | warn | error
|
||||
provider: claude # Default provider: claude | codex | opencode | mock
|
||||
# model: sonnet # Optional model name passed to provider
|
||||
|
||||
# ---- Provider ----
|
||||
# provider: claude | codex | opencode | mock
|
||||
provider: claude
|
||||
# Execution control
|
||||
# worktree_dir: ~/takt-worktrees # Base directory for shared clone execution
|
||||
# auto_pr: false # Auto-create PR after worktree execution
|
||||
branch_name_strategy: ai # Branch strategy: romaji | ai
|
||||
concurrency: 2 # Concurrent task execution for takt run (1-10)
|
||||
# task_poll_interval_ms: 500 # Polling interval in ms during takt run (100-5000)
|
||||
# prevent_sleep: false # Prevent macOS idle sleep while running
|
||||
|
||||
# Model (optional)
|
||||
# Claude examples: opus, sonnet, haiku
|
||||
# Codex examples: gpt-5.2-codex, gpt-5.1-codex
|
||||
# OpenCode format: provider/model
|
||||
# model: sonnet
|
||||
|
||||
# Per-persona provider override
|
||||
# persona_providers:
|
||||
# coder: codex
|
||||
# reviewer: claude
|
||||
|
||||
# Provider-specific movement permission policy
|
||||
# Priority:
|
||||
# 1) project provider_profiles override
|
||||
# 2) global provider_profiles override
|
||||
# 3) project provider_profiles default
|
||||
# 4) global provider_profiles default
|
||||
# 5) movement.required_permission_mode (minimum floor)
|
||||
# provider_profiles:
|
||||
# codex:
|
||||
# default_permission_mode: full
|
||||
# movement_permission_overrides:
|
||||
# ai_review: readonly
|
||||
# claude:
|
||||
# default_permission_mode: edit
|
||||
|
||||
# Provider-specific runtime options
|
||||
# provider_options:
|
||||
# codex:
|
||||
# network_access: true
|
||||
# claude:
|
||||
# sandbox:
|
||||
# allow_unsandboxed_commands: true
|
||||
|
||||
# ---- API Keys ----
|
||||
# Environment variables take priority:
|
||||
# TAKT_ANTHROPIC_API_KEY / TAKT_OPENAI_API_KEY / TAKT_OPENCODE_API_KEY
|
||||
# anthropic_api_key: ""
|
||||
# openai_api_key: ""
|
||||
# opencode_api_key: ""
|
||||
|
||||
# ---- Runtime ----
|
||||
# Global runtime preparation (piece_config.runtime overrides this)
|
||||
# runtime:
|
||||
# prepare:
|
||||
# - gradle
|
||||
# - node
|
||||
|
||||
# ---- Execution ----
|
||||
# worktree_dir: ~/takt-worktrees
|
||||
# auto_pr: false
|
||||
# prevent_sleep: false
|
||||
|
||||
# ---- Run Loop ----
|
||||
# concurrency: 1
|
||||
# task_poll_interval_ms: 500
|
||||
# interactive_preview_movements: 3
|
||||
# branch_name_strategy: romaji
|
||||
|
||||
# ---- Output ----
|
||||
# minimal_output: false
|
||||
# notification_sound: true
|
||||
# notification_sound_events:
|
||||
# Output / notifications
|
||||
# minimal_output: false # Minimized output for CI logs
|
||||
# verbose: false # Verbose output mode
|
||||
# notification_sound: true # Master switch for sounds
|
||||
# notification_sound_events: # Per-event sound toggle (unset means true)
|
||||
# iteration_limit: true
|
||||
# piece_complete: true
|
||||
# piece_abort: true
|
||||
# run_complete: true
|
||||
# run_abort: true
|
||||
# observability:
|
||||
# provider_events: true
|
||||
# provider_events: false # Persist provider stream events
|
||||
|
||||
# ---- Builtins ----
|
||||
# enable_builtin_pieces: true
|
||||
# disabled_builtins:
|
||||
# - magi
|
||||
# Credentials (environment variables take priority)
|
||||
# anthropic_api_key: "sk-ant-..." # Claude API key
|
||||
# openai_api_key: "sk-..." # Codex/OpenAI API key
|
||||
# opencode_api_key: "..." # OpenCode API key
|
||||
# codex_cli_path: "/absolute/path/to/codex" # Absolute path to Codex CLI
|
||||
|
||||
# ---- Pipeline ----
|
||||
# Pipeline
|
||||
# pipeline:
|
||||
# default_branch_prefix: "takt/"
|
||||
# commit_message_template: "feat: {title} (#{issue})"
|
||||
# pr_body_template: |
|
||||
# default_branch_prefix: "takt/" # Prefix for pipeline-created branches
|
||||
# commit_message_template: "feat: {title} (#{issue})" # Commit template
|
||||
# pr_body_template: | # PR body template
|
||||
# ## Summary
|
||||
# {issue_body}
|
||||
# Closes #{issue}
|
||||
|
||||
# ---- Preferences ----
|
||||
# bookmarks_file: ~/.takt/preferences/bookmarks.yaml
|
||||
# piece_categories_file: ~/.takt/preferences/piece-categories.yaml
|
||||
# Misc
|
||||
# bookmarks_file: ~/.takt/preferences/bookmarks.yaml # Bookmark file location
|
||||
|
||||
# ---- Debug ----
|
||||
# debug:
|
||||
# enabled: false
|
||||
# log_file: ~/.takt/logs/debug.log
|
||||
# =====================================
|
||||
# Piece-related settings (global defaults)
|
||||
# =====================================
|
||||
# 1) Route provider per persona
|
||||
# persona_providers:
|
||||
# coder: codex # Run coder persona on codex
|
||||
# reviewer: claude # Run reviewer persona on claude
|
||||
|
||||
# 2) Provider options (global < project < piece)
|
||||
# provider_options:
|
||||
# codex:
|
||||
# network_access: true # Allow network access for Codex
|
||||
# opencode:
|
||||
# network_access: true # Allow network access for OpenCode
|
||||
# claude:
|
||||
# sandbox:
|
||||
# allow_unsandboxed_commands: false # true allows unsandboxed execution for listed commands
|
||||
# excluded_commands:
|
||||
# - "npm publish" # Commands excluded from sandbox
|
||||
|
||||
# 3) Movement permission policy
|
||||
# provider_profiles:
|
||||
# codex:
|
||||
# default_permission_mode: full # Base permission: readonly | edit | full
|
||||
# movement_permission_overrides:
|
||||
# ai_review: readonly # Per-movement override
|
||||
# claude:
|
||||
# default_permission_mode: edit
|
||||
|
||||
# 4) Runtime preparation before execution (recommended: enabled)
|
||||
runtime:
|
||||
prepare:
|
||||
- gradle # Prepare Gradle cache/env under .runtime
|
||||
- node # Prepare npm cache/env under .runtime
|
||||
|
||||
# 5) Piece list / categories
|
||||
# enable_builtin_pieces: true # Enable built-in pieces from builtins/{lang}/pieces
|
||||
# disabled_builtins:
|
||||
# - magi # Built-in piece names to disable
|
||||
# piece_categories_file: ~/.takt/preferences/piece-categories.yaml # Category definition file
|
||||
# interactive_preview_movements: 3 # Preview movement count in interactive mode (0-10)
|
||||
|
||||
@ -1,105 +1,91 @@
|
||||
# TAKT グローバル設定サンプル
|
||||
# 配置場所: ~/.takt/config.yaml
|
||||
|
||||
# ---- 基本 ----
|
||||
language: ja
|
||||
default_piece: default
|
||||
log_level: info
|
||||
# =====================================
|
||||
# 通常設定(ピース非依存)
|
||||
# =====================================
|
||||
language: ja # 表示言語: ja | en
|
||||
log_level: info # ログレベル: debug | info | warn | error
|
||||
provider: claude # デフォルト実行プロバイダー: claude | codex | opencode | mock
|
||||
# model: sonnet # 省略可。providerに渡すモデル名
|
||||
|
||||
# ---- プロバイダー ----
|
||||
# provider: claude | codex | opencode | mock
|
||||
provider: claude
|
||||
# 実行制御
|
||||
# worktree_dir: ~/takt-worktrees # 共有clone作成先ディレクトリ
|
||||
# auto_pr: false # worktree実行後に自動PR作成するか
|
||||
branch_name_strategy: ai # ブランチ名生成: romaji | ai
|
||||
concurrency: 2 # takt run の同時実行数(1-10)
|
||||
# task_poll_interval_ms: 500 # takt run のタスク監視間隔ms(100-5000)
|
||||
# prevent_sleep: false # macOS実行中のスリープ防止(caffeinate)
|
||||
|
||||
# モデル(任意)
|
||||
# Claude 例: opus, sonnet, haiku
|
||||
# Codex 例: gpt-5.2-codex, gpt-5.1-codex
|
||||
# OpenCode 形式: provider/model
|
||||
# model: sonnet
|
||||
|
||||
# ペルソナ別プロバイダー上書き
|
||||
# persona_providers:
|
||||
# coder: codex
|
||||
# reviewer: claude
|
||||
|
||||
# プロバイダー別 movement 権限ポリシー
|
||||
# 優先順:
|
||||
# 1) project provider_profiles override
|
||||
# 2) global provider_profiles override
|
||||
# 3) project provider_profiles default
|
||||
# 4) global provider_profiles default
|
||||
# 5) movement.required_permission_mode(下限補正)
|
||||
# provider_profiles:
|
||||
# codex:
|
||||
# default_permission_mode: full
|
||||
# movement_permission_overrides:
|
||||
# ai_review: readonly
|
||||
# claude:
|
||||
# default_permission_mode: edit
|
||||
|
||||
# プロバイダー別ランタイムオプション
|
||||
# provider_options:
|
||||
# codex:
|
||||
# network_access: true
|
||||
# claude:
|
||||
# sandbox:
|
||||
# allow_unsandboxed_commands: true
|
||||
|
||||
# ---- API キー ----
|
||||
# 環境変数が優先:
|
||||
# TAKT_ANTHROPIC_API_KEY / TAKT_OPENAI_API_KEY / TAKT_OPENCODE_API_KEY
|
||||
# anthropic_api_key: ""
|
||||
# openai_api_key: ""
|
||||
# opencode_api_key: ""
|
||||
|
||||
# ---- ランタイム ----
|
||||
# グローバルなランタイム準備(piece_config.runtime があればそちらを優先)
|
||||
# runtime:
|
||||
# prepare:
|
||||
# - gradle
|
||||
# - node
|
||||
|
||||
# ---- 実行 ----
|
||||
# worktree_dir: ~/takt-worktrees
|
||||
# auto_pr: false
|
||||
# prevent_sleep: false
|
||||
|
||||
# ---- Run Loop ----
|
||||
# concurrency: 1
|
||||
# task_poll_interval_ms: 500
|
||||
# interactive_preview_movements: 3
|
||||
# branch_name_strategy: romaji
|
||||
|
||||
# ---- 出力 ----
|
||||
# minimal_output: false
|
||||
# notification_sound: true
|
||||
# notification_sound_events:
|
||||
# 出力・通知
|
||||
# minimal_output: false # 出力を最小化(CI向け)
|
||||
# verbose: false # 詳細ログを有効化
|
||||
# notification_sound: true # 通知音全体のON/OFF
|
||||
# notification_sound_events: # イベント別通知音(未指定はtrue扱い)
|
||||
# iteration_limit: true
|
||||
# piece_complete: true
|
||||
# piece_abort: true
|
||||
# run_complete: true
|
||||
# run_abort: true
|
||||
# observability:
|
||||
# provider_events: true
|
||||
# provider_events: false # providerイベントログを記録
|
||||
|
||||
# ---- Builtins ----
|
||||
# enable_builtin_pieces: true
|
||||
# disabled_builtins:
|
||||
# - magi
|
||||
# 認証情報(環境変数優先)
|
||||
# anthropic_api_key: "sk-ant-..." # Claude APIキー
|
||||
# openai_api_key: "sk-..." # Codex APIキー
|
||||
# opencode_api_key: "..." # OpenCode APIキー
|
||||
# codex_cli_path: "/absolute/path/to/codex" # Codex CLI絶対パス
|
||||
|
||||
# ---- Pipeline ----
|
||||
# パイプライン
|
||||
# pipeline:
|
||||
# default_branch_prefix: "takt/"
|
||||
# commit_message_template: "feat: {title} (#{issue})"
|
||||
# pr_body_template: |
|
||||
# default_branch_prefix: "takt/" # pipeline作成ブランチの接頭辞
|
||||
# commit_message_template: "feat: {title} (#{issue})" # コミット文テンプレート
|
||||
# pr_body_template: | # PR本文テンプレート
|
||||
# ## Summary
|
||||
# {issue_body}
|
||||
# Closes #{issue}
|
||||
|
||||
# ---- Preferences ----
|
||||
# bookmarks_file: ~/.takt/preferences/bookmarks.yaml
|
||||
# piece_categories_file: ~/.takt/preferences/piece-categories.yaml
|
||||
# その他
|
||||
# bookmarks_file: ~/.takt/preferences/bookmarks.yaml # ブックマーク保存先
|
||||
|
||||
# ---- Debug ----
|
||||
# debug:
|
||||
# enabled: false
|
||||
# log_file: ~/.takt/logs/debug.log
|
||||
# =====================================
|
||||
# ピースにも関わる設定(global defaults)
|
||||
# =====================================
|
||||
# 1) ペルソナ単位でプロバイダーを切り替える
|
||||
# persona_providers:
|
||||
# coder: codex # coderペルソナはcodexで実行
|
||||
# reviewer: claude # reviewerペルソナはclaudeで実行
|
||||
|
||||
# 2) provider 固有オプション(global < project < piece)
|
||||
# provider_options:
|
||||
# codex:
|
||||
# network_access: true # Codex実行時のネットワークアクセス許可
|
||||
# opencode:
|
||||
# network_access: true # OpenCode実行時のネットワークアクセス許可
|
||||
# claude:
|
||||
# sandbox:
|
||||
# allow_unsandboxed_commands: false # trueで対象コマンドを非サンドボックス実行
|
||||
# excluded_commands:
|
||||
# - "npm publish" # 非サンドボックス対象コマンド
|
||||
|
||||
# 3) movement の権限ポリシー
|
||||
# provider_profiles:
|
||||
# codex:
|
||||
# default_permission_mode: full # 既定権限: readonly | edit | full
|
||||
# movement_permission_overrides:
|
||||
# ai_review: readonly # movement単位の上書き
|
||||
# claude:
|
||||
# default_permission_mode: edit
|
||||
|
||||
# 4) 実行前のランタイム準備(推奨: 有効化)
|
||||
runtime:
|
||||
prepare:
|
||||
- gradle # Gradleキャッシュ/環境を .runtime 配下に準備
|
||||
- node # npmキャッシュ/環境を .runtime 配下に準備
|
||||
|
||||
# 5) ピース一覧/カテゴリ
|
||||
# enable_builtin_pieces: true # builtins/{lang}/pieces を有効化
|
||||
# disabled_builtins:
|
||||
# - magi # 無効化するビルトインピース名
|
||||
# piece_categories_file: ~/.takt/preferences/piece-categories.yaml # カテゴリ定義ファイル
|
||||
# interactive_preview_movements: 3 # 対話モードのプレビュー件数(0-10)
|
||||
|
||||
@ -292,11 +292,11 @@ takt eject instruction plan --global
|
||||
# 各ムーブメント・フェーズの組み立て済みプロンプトをプレビュー
|
||||
takt prompt [piece]
|
||||
|
||||
# パーミッションモードを設定
|
||||
takt config
|
||||
|
||||
# ピースカテゴリをビルトインのデフォルトにリセット
|
||||
takt reset categories
|
||||
|
||||
# グローバル config をテンプレートにリセット(旧設定は .old に退避)
|
||||
takt reset config
|
||||
```
|
||||
|
||||
### おすすめピース
|
||||
@ -562,76 +562,182 @@ Claude Code はエイリアス(`opus`、`sonnet`、`haiku`、`opusplan`、`def
|
||||
|
||||
### グローバル設定
|
||||
|
||||
デフォルトのプロバイダーとモデルを `~/.takt/config.yaml` で設定:
|
||||
`~/.takt/config.yaml` のサンプルです。
|
||||
コメントで「通常設定」と「ピースにも関わる設定」を分けています。
|
||||
|
||||
```yaml
|
||||
# ~/.takt/config.yaml
|
||||
language: ja
|
||||
default_piece: default
|
||||
log_level: info
|
||||
provider: claude # デフォルトプロバイダー: claude、codex、または opencode
|
||||
model: sonnet # デフォルトモデル(オプション)
|
||||
branch_name_strategy: romaji # ブランチ名生成: 'romaji'(高速)または 'ai'(低速)
|
||||
prevent_sleep: false # macOS の実行中スリープ防止(caffeinate)
|
||||
notification_sound: true # 通知音の有効/無効
|
||||
notification_sound_events: # タイミング別の通知音制御
|
||||
iteration_limit: false
|
||||
piece_complete: true
|
||||
piece_abort: true
|
||||
run_complete: true # 未設定時は有効。false を指定すると無効
|
||||
run_abort: true # 未設定時は有効。false を指定すると無効
|
||||
concurrency: 1 # takt run の並列タスク数(1-10、デフォルト: 1 = 逐次実行)
|
||||
task_poll_interval_ms: 500 # takt run 中の新タスク検出ポーリング間隔(100-5000、デフォルト: 500)
|
||||
interactive_preview_movements: 3 # 対話モードでのムーブメントプレビュー数(0-10、デフォルト: 3)
|
||||
|
||||
# ランタイム環境デフォルト(piece_config.runtime で上書き可能)
|
||||
# runtime:
|
||||
# prepare:
|
||||
# - gradle # Gradle のキャッシュ/設定を .runtime/ に準備
|
||||
# - node # npm キャッシュを .runtime/ に準備
|
||||
# =====================================
|
||||
# 通常設定(ピース非依存)
|
||||
# =====================================
|
||||
language: ja # 表示言語: ja | en
|
||||
log_level: info # ログレベル: debug | info | warn | error
|
||||
provider: claude # デフォルト実行プロバイダー: claude | codex | opencode | mock
|
||||
# model: sonnet # 省略可。providerに渡すモデル名
|
||||
|
||||
# ペルソナ別プロバイダー設定(オプション)
|
||||
# ピースを複製せずに特定のペルソナを異なるプロバイダーにルーティング
|
||||
# 実行制御
|
||||
# worktree_dir: ~/takt-worktrees # 共有clone作成先ディレクトリ
|
||||
# auto_pr: false # worktree実行後に自動PR作成するか
|
||||
branch_name_strategy: ai # ブランチ名生成: romaji | ai
|
||||
concurrency: 2 # takt run の同時実行数(1-10)
|
||||
# task_poll_interval_ms: 500 # takt run のタスク監視間隔ms(100-5000)
|
||||
# prevent_sleep: false # macOS実行中のスリープ防止(caffeinate)
|
||||
|
||||
# 出力・通知
|
||||
# minimal_output: false # 出力を最小化(CI向け)
|
||||
# verbose: false # 詳細ログを有効化
|
||||
# notification_sound: true # 通知音全体のON/OFF
|
||||
# notification_sound_events: # イベント別通知音(未指定はtrue扱い)
|
||||
# iteration_limit: true
|
||||
# piece_complete: true
|
||||
# piece_abort: true
|
||||
# run_complete: true
|
||||
# run_abort: true
|
||||
# observability:
|
||||
# provider_events: false # providerイベントログを記録
|
||||
|
||||
# 認証情報(環境変数優先)
|
||||
# anthropic_api_key: "sk-ant-..." # Claude APIキー
|
||||
# openai_api_key: "sk-..." # Codex APIキー
|
||||
# opencode_api_key: "..." # OpenCode APIキー
|
||||
# codex_cli_path: "/absolute/path/to/codex" # Codex CLI絶対パス
|
||||
|
||||
# パイプライン
|
||||
# pipeline:
|
||||
# default_branch_prefix: "takt/" # pipeline作成ブランチの接頭辞
|
||||
# commit_message_template: "feat: {title} (#{issue})" # コミット文テンプレート
|
||||
# pr_body_template: | # PR本文テンプレート
|
||||
# ## Summary
|
||||
# {issue_body}
|
||||
# Closes #{issue}
|
||||
|
||||
# その他
|
||||
# bookmarks_file: ~/.takt/preferences/bookmarks.yaml # ブックマーク保存先
|
||||
|
||||
# =====================================
|
||||
# ピースにも関わる設定(global defaults)
|
||||
# =====================================
|
||||
# 1) ペルソナ単位でプロバイダーを切り替える
|
||||
# persona_providers:
|
||||
# coder: codex # coder を Codex で実行
|
||||
# ai-antipattern-reviewer: claude # レビュアーは Claude のまま
|
||||
# coder: codex # coderペルソナはcodexで実行
|
||||
# reviewer: claude # reviewerペルソナはclaudeで実行
|
||||
|
||||
# 2) provider 固有オプション(global < project < piece)
|
||||
# provider_options:
|
||||
# codex:
|
||||
# network_access: true # Codex実行時のネットワークアクセス許可
|
||||
# opencode:
|
||||
# network_access: true # OpenCode実行時のネットワークアクセス許可
|
||||
# claude:
|
||||
# sandbox:
|
||||
# allow_unsandboxed_commands: false # trueで対象コマンドを非サンドボックス実行
|
||||
# excluded_commands:
|
||||
# - "npm publish" # 非サンドボックス対象コマンド
|
||||
|
||||
# 3) movement の権限ポリシー
|
||||
# provider_profiles:
|
||||
# codex:
|
||||
# default_permission_mode: full # 既定権限: readonly | edit | full
|
||||
# movement_permission_overrides:
|
||||
# ai_review: readonly # movement単位の上書き
|
||||
# claude:
|
||||
# default_permission_mode: edit
|
||||
|
||||
# 4) 実行前のランタイム準備(推奨: 有効化)
|
||||
runtime:
|
||||
prepare:
|
||||
- gradle # Gradleキャッシュ/環境を .runtime 配下に準備
|
||||
- node # npmキャッシュ/環境を .runtime 配下に準備
|
||||
|
||||
# 5) ピース一覧/カテゴリ
|
||||
# enable_builtin_pieces: true # builtins/{lang}/pieces を有効化
|
||||
# disabled_builtins:
|
||||
# - magi # 無効化するビルトインピース名
|
||||
# piece_categories_file: ~/.takt/preferences/piece-categories.yaml # カテゴリ定義ファイル
|
||||
# interactive_preview_movements: 3 # 対話モードのプレビュー件数(0-10)
|
||||
```
|
||||
|
||||
主要な設定項目の説明:
|
||||
|
||||
**通常設定**
|
||||
| 項目 | 説明 |
|
||||
|------|------|
|
||||
| `language` | 表示言語(`ja` / `en`) |
|
||||
| `log_level` | ログレベル(`debug` / `info` / `warn` / `error`) |
|
||||
| `provider` | デフォルト実行プロバイダー(`claude` / `codex` / `opencode` / `mock`) |
|
||||
| `model` | モデル名(provider にそのまま渡される) |
|
||||
| `auto_pr` | worktree 実行後のPR作成挙動 |
|
||||
| `concurrency` | `takt run` の同時実行数(1-10) |
|
||||
| `task_poll_interval_ms` | `takt run` のタスク監視間隔(100-5000ms) |
|
||||
| `minimal_output` | CI向けの簡易出力モード |
|
||||
| `verbose` | 詳細ログ出力 |
|
||||
| `notification_sound` / `notification_sound_events` | 通知音のON/OFFとイベント別制御 |
|
||||
| `pipeline.*` | pipeline 実行時のブランチ/コミット/PRテンプレート |
|
||||
|
||||
**ピースにも関わる設定**
|
||||
| 項目 | 説明 |
|
||||
|------|------|
|
||||
| `persona_providers` | ペルソナ単位の provider 上書き |
|
||||
| `provider_options` | provider固有オプション(例: `codex.network_access`、`claude.sandbox.*`) |
|
||||
| `provider_profiles` | movement ごとの permission mode 解決ルール |
|
||||
| `runtime.prepare` | 実行前の環境準備(`gradle` / `node` / 任意スクリプト) |
|
||||
| `enable_builtin_pieces` / `disabled_builtins` | ビルトインピースの有効化/除外 |
|
||||
| `piece_categories_file` | ピースカテゴリ定義ファイルの場所 |
|
||||
| `interactive_preview_movements` | 対話モードで表示する movement プレビュー数 |
|
||||
|
||||
### プロジェクトローカル設定
|
||||
|
||||
`.takt/config.yaml` のサンプルです。
|
||||
チーム/リポジトリごとの既定値を置く用途です。
|
||||
|
||||
```yaml
|
||||
# .takt/config.yaml
|
||||
|
||||
# =====================================
|
||||
# 通常設定(ピース非依存)
|
||||
# =====================================
|
||||
piece: default # このプロジェクトで使う既定ピース名
|
||||
provider: claude # プロジェクト既定プロバイダー: claude | codex | opencode | mock
|
||||
# verbose: false # このプロジェクトだけ詳細ログを有効化する場合
|
||||
# auto_pr: false # worktree実行後に自動PR作成するか
|
||||
|
||||
# =====================================
|
||||
# ピースにも関わる設定(project overrides)
|
||||
# =====================================
|
||||
# provider_options:
|
||||
# codex:
|
||||
# network_access: true # グローバル設定をこのプロジェクトで上書き
|
||||
# claude:
|
||||
# sandbox:
|
||||
# allow_unsandboxed_commands: false
|
||||
# excluded_commands:
|
||||
# - "npm publish"
|
||||
|
||||
# プロバイダー別パーミッションプロファイル(オプション)
|
||||
# 優先順: project override → global override → project default → global default → required_permission_mode(下限)
|
||||
# provider_profiles:
|
||||
# codex:
|
||||
# default_permission_mode: full
|
||||
# movement_permission_overrides:
|
||||
# ai_review: readonly
|
||||
# claude:
|
||||
# default_permission_mode: edit
|
||||
|
||||
# API Key 設定(オプション)
|
||||
# 環境変数 TAKT_ANTHROPIC_API_KEY / TAKT_OPENAI_API_KEY / TAKT_OPENCODE_API_KEY で上書き可能
|
||||
anthropic_api_key: sk-ant-... # Claude (Anthropic) を使う場合
|
||||
# openai_api_key: sk-... # Codex (OpenAI) を使う場合
|
||||
# opencode_api_key: ... # OpenCode を使う場合
|
||||
|
||||
# Codex CLI パスの上書き(オプション)
|
||||
# Codex SDK が使用する CLI バイナリを上書き(実行可能ファイルの絶対パスを指定)
|
||||
# 環境変数 TAKT_CODEX_CLI_PATH で上書き可能
|
||||
# codex_cli_path: /usr/local/bin/codex
|
||||
|
||||
# ビルトインピースのフィルタリング(オプション)
|
||||
# builtin_pieces_enabled: true # false でビルトイン全体を無効化
|
||||
# disabled_builtins: [magi, passthrough] # 特定のビルトインピースを無効化
|
||||
|
||||
# パイプライン実行設定(オプション)
|
||||
# ブランチ名、コミットメッセージ、PRの本文をカスタマイズできます。
|
||||
# pipeline:
|
||||
# default_branch_prefix: "takt/"
|
||||
# commit_message_template: "feat: {title} (#{issue})"
|
||||
# pr_body_template: |
|
||||
# ## Summary
|
||||
# {issue_body}
|
||||
# Closes #{issue}
|
||||
```
|
||||
|
||||
プロジェクトローカルで使える主な項目:
|
||||
|
||||
| 項目 | 説明 |
|
||||
|------|------|
|
||||
| `piece` | プロジェクト既定のピース |
|
||||
| `provider` | プロジェクト既定のプロバイダー |
|
||||
| `verbose` | ローカル詳細ログ |
|
||||
| `auto_pr` | ローカル既定のPR作成挙動 |
|
||||
| `provider_options` | provider固有オプションのローカル上書き |
|
||||
| `provider_profiles` | movement権限ポリシーのローカル上書き |
|
||||
|
||||
設定解決の優先順位(高 → 低):
|
||||
1. 環境変数(`TAKT_*`)
|
||||
2. `.takt/config.yaml`(プロジェクトローカル)
|
||||
3. `~/.takt/config.yaml`(グローバル)
|
||||
4. デフォルト値
|
||||
|
||||
**注意:** Codex SDK は Git 管理下のディレクトリでのみ動作します。`--skip-git-repo-check` は Codex CLI 専用です。
|
||||
|
||||
**API Key の設定方法:**
|
||||
|
||||
@ -144,14 +144,6 @@ E2Eテストを追加・変更した場合は、このドキュメントも更
|
||||
- `takt list --non-interactive --action diff --branch <branch>` で差分統計が出力されることを確認する。
|
||||
- `takt list --non-interactive --action try --branch <branch>` で変更がステージされることを確認する。
|
||||
- `takt list --non-interactive --action merge --branch <branch>` でブランチがマージされ削除されることを確認する。
|
||||
- Config permission mode(`e2e/specs/cli-config.e2e.ts`)
|
||||
- 目的: `takt config` でパーミッションモードの切り替えと永続化を確認。
|
||||
- LLM: 呼び出さない(LLM不使用の操作のみ)
|
||||
- 手順(ユーザー行動/コマンド):
|
||||
- `takt config default` を実行し、`Switched to: default` が出力されることを確認する。
|
||||
- `takt config sacrifice-my-pc` を実行し、`Switched to: sacrifice-my-pc` が出力されることを確認する。
|
||||
- `takt config sacrifice-my-pc` 実行後、`.takt/config.yaml` に `permissionMode: sacrifice-my-pc` が保存されていることを確認する。
|
||||
- `takt config invalid-mode` を実行し、`Invalid mode` が出力されることを確認する。
|
||||
- Reset categories(`e2e/specs/cli-reset-categories.e2e.ts`)
|
||||
- 目的: `takt reset categories` でカテゴリオーバーレイのリセットを確認。
|
||||
- LLM: 呼び出さない(LLM不使用の操作のみ)
|
||||
@ -159,6 +151,15 @@ E2Eテストを追加・変更した場合は、このドキュメントも更
|
||||
- `takt reset categories` を実行する。
|
||||
- 出力に `reset` を含むことを確認する。
|
||||
- `$TAKT_CONFIG_DIR/preferences/piece-categories.yaml` が存在し `piece_categories: {}` を含むことを確認する。
|
||||
- Reset config(`e2e/specs/cli-reset-config.e2e.ts`)
|
||||
- 目的: `takt reset config` でグローバル設定をテンプレートへ戻し、旧設定をバックアップすることを確認。
|
||||
- LLM: 呼び出さない(LLM不使用の操作のみ)
|
||||
- 手順(ユーザー行動/コマンド):
|
||||
- `$TAKT_CONFIG_DIR/config.yaml` に任意の設定を書き込む(例: `language: ja`, `provider: mock`)。
|
||||
- `takt reset config` を実行する。
|
||||
- 出力に `reset` と `backup:` を含むことを確認する。
|
||||
- `$TAKT_CONFIG_DIR/config.yaml` がテンプレート内容(例: `branch_name_strategy: ai`, `concurrency: 2`)に置き換わっていることを確認する。
|
||||
- `$TAKT_CONFIG_DIR/` 直下に `config.yaml.YYYYMMDD-HHmmss.old` 形式のバックアップファイルが1件作成されることを確認する。
|
||||
- Export Claude Code Skill(`e2e/specs/cli-export-cc.e2e.ts`)
|
||||
- 目的: `takt export-cc` でClaude Code Skillのデプロイを確認。
|
||||
- LLM: 呼び出さない(LLM不使用の操作のみ)
|
||||
@ -168,3 +169,53 @@ E2Eテストを追加・変更した場合は、このドキュメントも更
|
||||
- 出力に `ファイルをデプロイしました` を含むことを確認する。
|
||||
- `$HOME/.claude/skills/takt/SKILL.md` が存在することを確認する。
|
||||
- `$HOME/.claude/skills/takt/pieces/` および `$HOME/.claude/skills/takt/personas/` ディレクトリが存在し、それぞれ少なくとも1ファイルを含むことを確認する。
|
||||
|
||||
## 追記シナリオ(2026-02-19)
|
||||
過去にドキュメント未反映だったシナリオを以下に追記する。
|
||||
|
||||
- Config priority(`e2e/specs/config-priority.e2e.ts`)
|
||||
- 目的: `piece` と `auto_pr` の優先順位(config/env/CLI)を検証。
|
||||
- 手順(要約):
|
||||
- `--pipeline` で `--piece` 未指定時に設定値の `piece` が使われることを確認。
|
||||
- `auto_pr` 未設定時は確認デフォルト `true` が反映されることを確認。
|
||||
- `config` と `TAKT_AUTO_PR` の優先を確認。
|
||||
- Pipeline --skip-git on local/non-git directories(`e2e/specs/pipeline-local-repo.e2e.ts`)
|
||||
- 目的: ローカルGitリポジトリおよび非Gitディレクトリで `--pipeline --skip-git` が動作することを確認。
|
||||
- Task content_file reference(`e2e/specs/task-content-file.e2e.ts`)
|
||||
- 目的: `tasks.yaml` の `content_file` 参照が解決されること、および不正参照時エラーを確認。
|
||||
- Task status persistence(`e2e/specs/task-status-persistence.e2e.ts`)
|
||||
- 目的: 成功時/失敗時の `tasks.yaml` 状態遷移(完了消込・失敗記録)を確認。
|
||||
- Run multiple tasks(`e2e/specs/run-multiple-tasks.e2e.ts`)
|
||||
- 目的: 複数pendingタスクの連続実行、途中失敗時継続、タスク空時の終了挙動を確認。
|
||||
- Session NDJSON log output(`e2e/specs/session-log.e2e.ts`)
|
||||
- 目的: NDJSONログの主要イベント(`piece_complete` / `piece_abort` 等)出力を確認。
|
||||
- Structured output rule matching(`e2e/specs/structured-output.e2e.ts`)
|
||||
- 目的: structured output によるルール判定(Phase 3)を確認。
|
||||
- Piece error handling(`e2e/specs/piece-error-handling.e2e.ts`)
|
||||
- 目的: エージェントエラー、最大反復到達、前回応答受け渡しの挙動を確認。
|
||||
- Multi-step with parallel movements(`e2e/specs/multi-step-parallel.e2e.ts`)
|
||||
- 目的: 並列ムーブメントを含む複数ステップ遷移を確認。
|
||||
- Sequential multi-step session log transitions(`e2e/specs/multi-step-sequential.e2e.ts`)
|
||||
- 目的: 逐次ステップでのセッションログ遷移を確認。
|
||||
- Cycle detection via loop_monitors(`e2e/specs/cycle-detection.e2e.ts`)
|
||||
- 目的: ループ監視設定による abort/continue の境界を確認。
|
||||
- Provider error handling(`e2e/specs/provider-error.e2e.ts`)
|
||||
- 目的: provider上書き、mockシナリオ不足時の挙動、シナリオ不在時エラーを確認。
|
||||
- Model override(`e2e/specs/model-override.e2e.ts`)
|
||||
- 目的: `--model` オプションが通常実行/`--pipeline --skip-git` で反映されることを確認。
|
||||
- Error handling edge cases(`e2e/specs/error-handling.e2e.ts`)
|
||||
- 目的: 不正引数・存在しないpiece・不正YAMLなど代表エラーケースを確認。
|
||||
- Quiet mode(`e2e/specs/quiet-mode.e2e.ts`)
|
||||
- 目的: `--quiet` でAIストリーム出力が抑制されることを確認。
|
||||
- Catalog command(`e2e/specs/cli-catalog.e2e.ts`)
|
||||
- 目的: `takt catalog` の一覧表示・型指定・不正型エラーを確認。
|
||||
- Prompt preview command(`e2e/specs/cli-prompt.e2e.ts`)
|
||||
- 目的: `takt prompt` のプレビュー出力と不正piece時エラーを確認。
|
||||
- Switch piece command(`e2e/specs/cli-switch.e2e.ts`)
|
||||
- 目的: `takt switch` の切替成功・不正piece時エラーを確認。
|
||||
- Clear sessions command(`e2e/specs/cli-clear.e2e.ts`)
|
||||
- 目的: `takt clear` でセッション情報が削除されることを確認。
|
||||
- Help command(`e2e/specs/cli-help.e2e.ts`)
|
||||
- 目的: `takt --help` と `takt run --help` の表示内容を確認。
|
||||
- Eject builtin pieces(`e2e/specs/eject.e2e.ts`)
|
||||
- 目的: `takt eject` のproject/global出力、既存時スキップ、facet個別ejectを確認。
|
||||
|
||||
@ -1,7 +1,6 @@
|
||||
provider: claude
|
||||
language: en
|
||||
log_level: info
|
||||
default_piece: default
|
||||
notification_sound: false
|
||||
notification_sound_events:
|
||||
iteration_limit: false
|
||||
|
||||
@ -1,85 +0,0 @@
|
||||
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
|
||||
import { readFileSync } from 'node:fs';
|
||||
import { join } from 'node:path';
|
||||
import { createIsolatedEnv, type IsolatedEnv } from '../helpers/isolated-env';
|
||||
import { runTakt } from '../helpers/takt-runner';
|
||||
import { createLocalRepo, type LocalRepo } from '../helpers/test-repo';
|
||||
|
||||
// E2E更新時は docs/testing/e2e.md も更新すること
|
||||
describe('E2E: Config command (takt config)', () => {
|
||||
let isolatedEnv: IsolatedEnv;
|
||||
let repo: LocalRepo;
|
||||
|
||||
beforeEach(() => {
|
||||
isolatedEnv = createIsolatedEnv();
|
||||
repo = createLocalRepo();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
try { repo.cleanup(); } catch { /* best-effort */ }
|
||||
try { isolatedEnv.cleanup(); } catch { /* best-effort */ }
|
||||
});
|
||||
|
||||
it('should switch to default mode with explicit argument', () => {
|
||||
// Given: a local repo with isolated env
|
||||
|
||||
// When: running takt config default
|
||||
const result = runTakt({
|
||||
args: ['config', 'default'],
|
||||
cwd: repo.path,
|
||||
env: isolatedEnv.env,
|
||||
});
|
||||
|
||||
// Then: exits successfully and outputs switched message
|
||||
expect(result.exitCode).toBe(0);
|
||||
const output = result.stdout;
|
||||
expect(output).toMatch(/Switched to: default/);
|
||||
});
|
||||
|
||||
it('should switch to sacrifice-my-pc mode with explicit argument', () => {
|
||||
// Given: a local repo with isolated env
|
||||
|
||||
// When: running takt config sacrifice-my-pc
|
||||
const result = runTakt({
|
||||
args: ['config', 'sacrifice-my-pc'],
|
||||
cwd: repo.path,
|
||||
env: isolatedEnv.env,
|
||||
});
|
||||
|
||||
// Then: exits successfully and outputs switched message
|
||||
expect(result.exitCode).toBe(0);
|
||||
const output = result.stdout;
|
||||
expect(output).toMatch(/Switched to: sacrifice-my-pc/);
|
||||
});
|
||||
|
||||
it('should persist permission mode to project config', () => {
|
||||
// Given: a local repo with isolated env
|
||||
|
||||
// When: running takt config sacrifice-my-pc
|
||||
runTakt({
|
||||
args: ['config', 'sacrifice-my-pc'],
|
||||
cwd: repo.path,
|
||||
env: isolatedEnv.env,
|
||||
});
|
||||
|
||||
// Then: .takt/config.yaml contains permissionMode: sacrifice-my-pc
|
||||
const configPath = join(repo.path, '.takt', 'config.yaml');
|
||||
const content = readFileSync(configPath, 'utf-8');
|
||||
expect(content).toMatch(/permissionMode:\s*sacrifice-my-pc/);
|
||||
});
|
||||
|
||||
it('should report error for invalid mode name', () => {
|
||||
// Given: a local repo with isolated env
|
||||
|
||||
// When: running takt config with an invalid mode
|
||||
const result = runTakt({
|
||||
args: ['config', 'invalid-mode'],
|
||||
cwd: repo.path,
|
||||
env: isolatedEnv.env,
|
||||
});
|
||||
|
||||
// Then: output contains invalid mode message
|
||||
const combined = result.stdout + result.stderr;
|
||||
expect(combined).toMatch(/Invalid mode/);
|
||||
});
|
||||
});
|
||||
48
e2e/specs/cli-reset-config.e2e.ts
Normal file
48
e2e/specs/cli-reset-config.e2e.ts
Normal file
@ -0,0 +1,48 @@
|
||||
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
|
||||
import { readdirSync, readFileSync, writeFileSync } from 'node:fs';
|
||||
import { join } from 'node:path';
|
||||
import { createIsolatedEnv, type IsolatedEnv } from '../helpers/isolated-env';
|
||||
import { runTakt } from '../helpers/takt-runner';
|
||||
import { createLocalRepo, type LocalRepo } from '../helpers/test-repo';
|
||||
|
||||
// E2E更新時は docs/testing/e2e.md も更新すること
|
||||
describe('E2E: Reset config command (takt reset config)', () => {
|
||||
let isolatedEnv: IsolatedEnv;
|
||||
let repo: LocalRepo;
|
||||
|
||||
beforeEach(() => {
|
||||
isolatedEnv = createIsolatedEnv();
|
||||
repo = createLocalRepo();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
try { repo.cleanup(); } catch { /* best-effort */ }
|
||||
try { isolatedEnv.cleanup(); } catch { /* best-effort */ }
|
||||
});
|
||||
|
||||
it('should backup current config and replace with builtin template', () => {
|
||||
const configPath = join(isolatedEnv.taktDir, 'config.yaml');
|
||||
writeFileSync(configPath, ['language: ja', 'provider: mock'].join('\n'), 'utf-8');
|
||||
|
||||
const result = runTakt({
|
||||
args: ['reset', 'config'],
|
||||
cwd: repo.path,
|
||||
env: isolatedEnv.env,
|
||||
});
|
||||
|
||||
expect(result.exitCode).toBe(0);
|
||||
const output = result.stdout;
|
||||
expect(output).toMatch(/reset/i);
|
||||
expect(output).toMatch(/backup:/i);
|
||||
|
||||
const config = readFileSync(configPath, 'utf-8');
|
||||
expect(config).toContain('language: ja');
|
||||
expect(config).toContain('branch_name_strategy: ai');
|
||||
expect(config).toContain('concurrency: 2');
|
||||
|
||||
const backups = readdirSync(isolatedEnv.taktDir).filter((name) =>
|
||||
/^config\.yaml\.\d{8}-\d{6}\.old(\.\d+)?$/.test(name),
|
||||
);
|
||||
expect(backups.length).toBe(1);
|
||||
});
|
||||
});
|
||||
152
e2e/specs/config-priority.e2e.ts
Normal file
152
e2e/specs/config-priority.e2e.ts
Normal file
@ -0,0 +1,152 @@
|
||||
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
|
||||
import { dirname, join, resolve } from 'node:path';
|
||||
import { fileURLToPath } from 'node:url';
|
||||
import { mkdirSync, readFileSync, writeFileSync } from 'node:fs';
|
||||
import { parse as parseYaml } from 'yaml';
|
||||
import { createIsolatedEnv, updateIsolatedConfig, type IsolatedEnv } from '../helpers/isolated-env';
|
||||
import { createTestRepo, type TestRepo } from '../helpers/test-repo';
|
||||
import { runTakt } from '../helpers/takt-runner';
|
||||
|
||||
const __filename = fileURLToPath(import.meta.url);
|
||||
const __dirname = dirname(__filename);
|
||||
|
||||
function readFirstTask(repoPath: string): Record<string, unknown> {
|
||||
const tasksPath = join(repoPath, '.takt', 'tasks.yaml');
|
||||
const raw = readFileSync(tasksPath, 'utf-8');
|
||||
const parsed = parseYaml(raw) as { tasks?: Array<Record<string, unknown>> } | null;
|
||||
const first = parsed?.tasks?.[0];
|
||||
if (!first) {
|
||||
throw new Error(`No task record found in ${tasksPath}`);
|
||||
}
|
||||
return first;
|
||||
}
|
||||
|
||||
// E2E更新時は docs/testing/e2e.md も更新すること
|
||||
describe('E2E: Config priority (piece / autoPr)', () => {
|
||||
let isolatedEnv: IsolatedEnv;
|
||||
let testRepo: TestRepo;
|
||||
|
||||
beforeEach(() => {
|
||||
isolatedEnv = createIsolatedEnv();
|
||||
testRepo = createTestRepo();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
try {
|
||||
testRepo.cleanup();
|
||||
} catch {
|
||||
// best-effort
|
||||
}
|
||||
try {
|
||||
isolatedEnv.cleanup();
|
||||
} catch {
|
||||
// best-effort
|
||||
}
|
||||
});
|
||||
|
||||
it('should use configured piece in pipeline when --piece is omitted', () => {
|
||||
const configuredPiecePath = resolve(__dirname, '../fixtures/pieces/mock-single-step.yaml');
|
||||
const scenarioPath = resolve(__dirname, '../fixtures/scenarios/execute-done.json');
|
||||
const projectConfigDir = join(testRepo.path, '.takt');
|
||||
mkdirSync(projectConfigDir, { recursive: true });
|
||||
writeFileSync(
|
||||
join(projectConfigDir, 'config.yaml'),
|
||||
`piece: ${JSON.stringify(configuredPiecePath)}\n`,
|
||||
'utf-8',
|
||||
);
|
||||
|
||||
const result = runTakt({
|
||||
args: [
|
||||
'--pipeline',
|
||||
'--task', 'Pipeline run should resolve piece from config',
|
||||
'--skip-git',
|
||||
'--provider', 'mock',
|
||||
],
|
||||
cwd: testRepo.path,
|
||||
env: {
|
||||
...isolatedEnv.env,
|
||||
TAKT_MOCK_SCENARIO: scenarioPath,
|
||||
},
|
||||
timeout: 240_000,
|
||||
});
|
||||
|
||||
expect(result.exitCode).toBe(0);
|
||||
expect(result.stdout).toContain(`Running piece: ${configuredPiecePath}`);
|
||||
expect(result.stdout).toContain(`Piece '${configuredPiecePath}' completed`);
|
||||
}, 240_000);
|
||||
|
||||
it('should default auto_pr to true when unset in config/env', () => {
|
||||
const piecePath = resolve(__dirname, '../fixtures/pieces/mock-single-step.yaml');
|
||||
const scenarioPath = resolve(__dirname, '../fixtures/scenarios/execute-done.json');
|
||||
|
||||
const result = runTakt({
|
||||
args: [
|
||||
'--task', 'Auto PR default behavior',
|
||||
'--piece', piecePath,
|
||||
'--create-worktree', 'yes',
|
||||
'--provider', 'mock',
|
||||
],
|
||||
cwd: testRepo.path,
|
||||
env: {
|
||||
...isolatedEnv.env,
|
||||
TAKT_MOCK_SCENARIO: scenarioPath,
|
||||
},
|
||||
timeout: 240_000,
|
||||
});
|
||||
|
||||
expect(result.exitCode).toBe(0);
|
||||
const task = readFirstTask(testRepo.path);
|
||||
expect(task['auto_pr']).toBe(true);
|
||||
}, 240_000);
|
||||
|
||||
it('should use auto_pr from config when set', () => {
|
||||
const piecePath = resolve(__dirname, '../fixtures/pieces/mock-single-step.yaml');
|
||||
const scenarioPath = resolve(__dirname, '../fixtures/scenarios/execute-done.json');
|
||||
updateIsolatedConfig(isolatedEnv.taktDir, { auto_pr: false });
|
||||
|
||||
const result = runTakt({
|
||||
args: [
|
||||
'--task', 'Auto PR from config',
|
||||
'--piece', piecePath,
|
||||
'--create-worktree', 'yes',
|
||||
'--provider', 'mock',
|
||||
],
|
||||
cwd: testRepo.path,
|
||||
env: {
|
||||
...isolatedEnv.env,
|
||||
TAKT_MOCK_SCENARIO: scenarioPath,
|
||||
},
|
||||
timeout: 240_000,
|
||||
});
|
||||
|
||||
expect(result.exitCode).toBe(0);
|
||||
const task = readFirstTask(testRepo.path);
|
||||
expect(task['auto_pr']).toBe(false);
|
||||
}, 240_000);
|
||||
|
||||
it('should prioritize env auto_pr over config', () => {
|
||||
const piecePath = resolve(__dirname, '../fixtures/pieces/mock-single-step.yaml');
|
||||
const scenarioPath = resolve(__dirname, '../fixtures/scenarios/execute-done.json');
|
||||
updateIsolatedConfig(isolatedEnv.taktDir, { auto_pr: false });
|
||||
|
||||
const result = runTakt({
|
||||
args: [
|
||||
'--task', 'Auto PR from env override',
|
||||
'--piece', piecePath,
|
||||
'--create-worktree', 'yes',
|
||||
'--provider', 'mock',
|
||||
],
|
||||
cwd: testRepo.path,
|
||||
env: {
|
||||
...isolatedEnv.env,
|
||||
TAKT_AUTO_PR: 'true',
|
||||
TAKT_MOCK_SCENARIO: scenarioPath,
|
||||
},
|
||||
timeout: 240_000,
|
||||
});
|
||||
|
||||
expect(result.exitCode).toBe(0);
|
||||
const task = readFirstTask(testRepo.path);
|
||||
expect(task['auto_pr']).toBe(true);
|
||||
}, 240_000);
|
||||
});
|
||||
111
src/__tests__/analytics-cli-commands.test.ts
Normal file
111
src/__tests__/analytics-cli-commands.test.ts
Normal file
@ -0,0 +1,111 @@
|
||||
/**
|
||||
* Tests for analytics CLI command logic — metrics review and purge.
|
||||
*
|
||||
* Tests the command action logic by calling the underlying functions
|
||||
* with appropriate parameters, verifying the integration between
|
||||
* config loading, eventsDir resolution, and the analytics functions.
|
||||
*/
|
||||
|
||||
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
|
||||
import { mkdirSync, rmSync, writeFileSync } from 'node:fs';
|
||||
import { join } from 'node:path';
|
||||
import { tmpdir } from 'node:os';
|
||||
import {
|
||||
computeReviewMetrics,
|
||||
formatReviewMetrics,
|
||||
parseSinceDuration,
|
||||
purgeOldEvents,
|
||||
} from '../features/analytics/index.js';
|
||||
import type { ReviewFindingEvent } from '../features/analytics/index.js';
|
||||
|
||||
describe('metrics review command logic', () => {
|
||||
let eventsDir: string;
|
||||
|
||||
beforeEach(() => {
|
||||
eventsDir = join(tmpdir(), `takt-test-cli-metrics-${Date.now()}`);
|
||||
mkdirSync(eventsDir, { recursive: true });
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
rmSync(eventsDir, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
it('should compute and format metrics from resolved eventsDir', () => {
|
||||
const events: ReviewFindingEvent[] = [
|
||||
{
|
||||
type: 'review_finding', findingId: 'f-001', status: 'new', ruleId: 'r-1',
|
||||
severity: 'error', decision: 'reject', file: 'a.ts', line: 1, iteration: 1,
|
||||
runId: 'r', timestamp: '2026-02-18T10:00:00.000Z',
|
||||
},
|
||||
];
|
||||
writeFileSync(
|
||||
join(eventsDir, '2026-02-18.jsonl'),
|
||||
events.map((e) => JSON.stringify(e)).join('\n') + '\n',
|
||||
'utf-8',
|
||||
);
|
||||
|
||||
const durationMs = parseSinceDuration('30d');
|
||||
const sinceMs = new Date('2026-02-18T00:00:00Z').getTime();
|
||||
const result = computeReviewMetrics(eventsDir, sinceMs);
|
||||
const output = formatReviewMetrics(result);
|
||||
|
||||
expect(output).toContain('Review Metrics');
|
||||
expect(result.rejectCountsByRule.get('r-1')).toBe(1);
|
||||
});
|
||||
|
||||
it('should parse since duration and compute correct time window', () => {
|
||||
const durationMs = parseSinceDuration('7d');
|
||||
const now = new Date('2026-02-18T12:00:00Z').getTime();
|
||||
const sinceMs = now - durationMs;
|
||||
|
||||
expect(sinceMs).toBe(new Date('2026-02-11T12:00:00Z').getTime());
|
||||
});
|
||||
});
|
||||
|
||||
describe('purge command logic', () => {
|
||||
let eventsDir: string;
|
||||
|
||||
beforeEach(() => {
|
||||
eventsDir = join(tmpdir(), `takt-test-cli-purge-${Date.now()}`);
|
||||
mkdirSync(eventsDir, { recursive: true });
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
rmSync(eventsDir, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
it('should purge files using eventsDir from config and retentionDays from config', () => {
|
||||
writeFileSync(join(eventsDir, '2025-12-01.jsonl'), '{}', 'utf-8');
|
||||
writeFileSync(join(eventsDir, '2026-02-18.jsonl'), '{}', 'utf-8');
|
||||
|
||||
const retentionDays = 30;
|
||||
const deleted = purgeOldEvents(eventsDir, retentionDays, new Date('2026-02-18T12:00:00Z'));
|
||||
|
||||
expect(deleted).toContain('2025-12-01.jsonl');
|
||||
expect(deleted).not.toContain('2026-02-18.jsonl');
|
||||
});
|
||||
|
||||
it('should fallback to CLI retentionDays when config has no retentionDays', () => {
|
||||
writeFileSync(join(eventsDir, '2025-01-01.jsonl'), '{}', 'utf-8');
|
||||
|
||||
const cliRetentionDays = parseInt('30', 10);
|
||||
const configRetentionDays = undefined;
|
||||
const retentionDays = configRetentionDays ?? cliRetentionDays;
|
||||
const deleted = purgeOldEvents(eventsDir, retentionDays, new Date('2026-02-18T12:00:00Z'));
|
||||
|
||||
expect(deleted).toContain('2025-01-01.jsonl');
|
||||
});
|
||||
|
||||
it('should use config retentionDays when specified', () => {
|
||||
writeFileSync(join(eventsDir, '2026-02-10.jsonl'), '{}', 'utf-8');
|
||||
writeFileSync(join(eventsDir, '2026-02-18.jsonl'), '{}', 'utf-8');
|
||||
|
||||
const cliRetentionDays = parseInt('30', 10);
|
||||
const configRetentionDays = 5;
|
||||
const retentionDays = configRetentionDays ?? cliRetentionDays;
|
||||
const deleted = purgeOldEvents(eventsDir, retentionDays, new Date('2026-02-18T12:00:00Z'));
|
||||
|
||||
expect(deleted).toContain('2026-02-10.jsonl');
|
||||
expect(deleted).not.toContain('2026-02-18.jsonl');
|
||||
});
|
||||
});
|
||||
132
src/__tests__/analytics-events.test.ts
Normal file
132
src/__tests__/analytics-events.test.ts
Normal file
@ -0,0 +1,132 @@
|
||||
/**
|
||||
* Tests for analytics event type definitions.
|
||||
*
|
||||
* Validates that event objects conform to the expected shape.
|
||||
*/
|
||||
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import type {
|
||||
ReviewFindingEvent,
|
||||
FixActionEvent,
|
||||
MovementResultEvent,
|
||||
AnalyticsEvent,
|
||||
} from '../features/analytics/index.js';
|
||||
|
||||
describe('analytics event types', () => {
|
||||
it('should create a valid ReviewFindingEvent', () => {
|
||||
const event: ReviewFindingEvent = {
|
||||
type: 'review_finding',
|
||||
findingId: 'f-001',
|
||||
status: 'new',
|
||||
ruleId: 'no-console-log',
|
||||
severity: 'warning',
|
||||
decision: 'reject',
|
||||
file: 'src/main.ts',
|
||||
line: 42,
|
||||
iteration: 1,
|
||||
runId: 'run-abc',
|
||||
timestamp: '2026-02-18T10:00:00.000Z',
|
||||
};
|
||||
|
||||
expect(event.type).toBe('review_finding');
|
||||
expect(event.findingId).toBe('f-001');
|
||||
expect(event.status).toBe('new');
|
||||
expect(event.severity).toBe('warning');
|
||||
expect(event.decision).toBe('reject');
|
||||
expect(event.file).toBe('src/main.ts');
|
||||
expect(event.line).toBe(42);
|
||||
});
|
||||
|
||||
it('should create a valid FixActionEvent with fixed action', () => {
|
||||
const event: FixActionEvent = {
|
||||
type: 'fix_action',
|
||||
findingId: 'f-001',
|
||||
action: 'fixed',
|
||||
iteration: 2,
|
||||
runId: 'run-abc',
|
||||
timestamp: '2026-02-18T10:01:00.000Z',
|
||||
};
|
||||
|
||||
expect(event.type).toBe('fix_action');
|
||||
expect(event.action).toBe('fixed');
|
||||
expect(event.findingId).toBe('f-001');
|
||||
});
|
||||
|
||||
it('should create a valid FixActionEvent with rebutted action', () => {
|
||||
const event: FixActionEvent = {
|
||||
type: 'fix_action',
|
||||
findingId: 'f-002',
|
||||
action: 'rebutted',
|
||||
iteration: 3,
|
||||
runId: 'run-abc',
|
||||
timestamp: '2026-02-18T10:02:00.000Z',
|
||||
};
|
||||
|
||||
expect(event.type).toBe('fix_action');
|
||||
expect(event.action).toBe('rebutted');
|
||||
expect(event.findingId).toBe('f-002');
|
||||
});
|
||||
|
||||
it('should create a valid MovementResultEvent', () => {
|
||||
const event: MovementResultEvent = {
|
||||
type: 'movement_result',
|
||||
movement: 'implement',
|
||||
provider: 'claude',
|
||||
model: 'sonnet',
|
||||
decisionTag: 'approved',
|
||||
iteration: 3,
|
||||
runId: 'run-abc',
|
||||
timestamp: '2026-02-18T10:02:00.000Z',
|
||||
};
|
||||
|
||||
expect(event.type).toBe('movement_result');
|
||||
expect(event.movement).toBe('implement');
|
||||
expect(event.provider).toBe('claude');
|
||||
expect(event.decisionTag).toBe('approved');
|
||||
});
|
||||
|
||||
it('should discriminate event types via the type field', () => {
|
||||
const events: AnalyticsEvent[] = [
|
||||
{
|
||||
type: 'review_finding',
|
||||
findingId: 'f-001',
|
||||
status: 'new',
|
||||
ruleId: 'r-1',
|
||||
severity: 'error',
|
||||
decision: 'reject',
|
||||
file: 'a.ts',
|
||||
line: 1,
|
||||
iteration: 1,
|
||||
runId: 'r',
|
||||
timestamp: '2026-01-01T00:00:00.000Z',
|
||||
},
|
||||
{
|
||||
type: 'fix_action',
|
||||
findingId: 'f-001',
|
||||
action: 'fixed',
|
||||
iteration: 2,
|
||||
runId: 'r',
|
||||
timestamp: '2026-01-01T00:01:00.000Z',
|
||||
},
|
||||
{
|
||||
type: 'movement_result',
|
||||
movement: 'plan',
|
||||
provider: 'claude',
|
||||
model: 'opus',
|
||||
decisionTag: 'done',
|
||||
iteration: 1,
|
||||
runId: 'r',
|
||||
timestamp: '2026-01-01T00:02:00.000Z',
|
||||
},
|
||||
];
|
||||
|
||||
const reviewEvents = events.filter((e) => e.type === 'review_finding');
|
||||
expect(reviewEvents).toHaveLength(1);
|
||||
|
||||
const fixEvents = events.filter((e) => e.type === 'fix_action');
|
||||
expect(fixEvents).toHaveLength(1);
|
||||
|
||||
const movementEvents = events.filter((e) => e.type === 'movement_result');
|
||||
expect(movementEvents).toHaveLength(1);
|
||||
});
|
||||
});
|
||||
344
src/__tests__/analytics-metrics.test.ts
Normal file
344
src/__tests__/analytics-metrics.test.ts
Normal file
@ -0,0 +1,344 @@
|
||||
/**
|
||||
* Tests for analytics metrics computation.
|
||||
*/
|
||||
|
||||
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
|
||||
import { mkdirSync, rmSync, writeFileSync } from 'node:fs';
|
||||
import { join } from 'node:path';
|
||||
import { tmpdir } from 'node:os';
|
||||
import {
|
||||
computeReviewMetrics,
|
||||
formatReviewMetrics,
|
||||
parseSinceDuration,
|
||||
} from '../features/analytics/index.js';
|
||||
import type {
|
||||
ReviewFindingEvent,
|
||||
FixActionEvent,
|
||||
MovementResultEvent,
|
||||
} from '../features/analytics/index.js';
|
||||
|
||||
describe('analytics metrics', () => {
|
||||
let eventsDir: string;
|
||||
|
||||
beforeEach(() => {
|
||||
eventsDir = join(tmpdir(), `takt-test-analytics-metrics-${Date.now()}`);
|
||||
mkdirSync(eventsDir, { recursive: true });
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
rmSync(eventsDir, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
function writeEvents(date: string, events: Array<ReviewFindingEvent | FixActionEvent | MovementResultEvent>): void {
|
||||
const lines = events.map((e) => JSON.stringify(e)).join('\n') + '\n';
|
||||
writeFileSync(join(eventsDir, `${date}.jsonl`), lines, 'utf-8');
|
||||
}
|
||||
|
||||
describe('computeReviewMetrics', () => {
|
||||
it('should return empty metrics when no events exist', () => {
|
||||
const sinceMs = new Date('2026-01-01T00:00:00Z').getTime();
|
||||
const metrics = computeReviewMetrics(eventsDir, sinceMs);
|
||||
|
||||
expect(metrics.reReportCounts.size).toBe(0);
|
||||
expect(metrics.roundTripRatio).toBe(0);
|
||||
expect(metrics.averageResolutionIterations).toBe(0);
|
||||
expect(metrics.rejectCountsByRule.size).toBe(0);
|
||||
expect(metrics.rebuttalResolvedRatio).toBe(0);
|
||||
});
|
||||
|
||||
it('should return empty metrics when directory does not exist', () => {
|
||||
const nonExistent = join(eventsDir, 'does-not-exist');
|
||||
const sinceMs = new Date('2026-01-01T00:00:00Z').getTime();
|
||||
const metrics = computeReviewMetrics(nonExistent, sinceMs);
|
||||
|
||||
expect(metrics.reReportCounts.size).toBe(0);
|
||||
});
|
||||
|
||||
it('should compute re-report counts for findings appearing 2+ times', () => {
|
||||
const events: ReviewFindingEvent[] = [
|
||||
{
|
||||
type: 'review_finding',
|
||||
findingId: 'f-001',
|
||||
status: 'new',
|
||||
ruleId: 'r-1',
|
||||
severity: 'error',
|
||||
decision: 'reject',
|
||||
file: 'a.ts',
|
||||
line: 1,
|
||||
iteration: 1,
|
||||
runId: 'run-1',
|
||||
timestamp: '2026-02-18T10:00:00.000Z',
|
||||
},
|
||||
{
|
||||
type: 'review_finding',
|
||||
findingId: 'f-001',
|
||||
status: 'persists',
|
||||
ruleId: 'r-1',
|
||||
severity: 'error',
|
||||
decision: 'reject',
|
||||
file: 'a.ts',
|
||||
line: 1,
|
||||
iteration: 3,
|
||||
runId: 'run-1',
|
||||
timestamp: '2026-02-18T11:00:00.000Z',
|
||||
},
|
||||
{
|
||||
type: 'review_finding',
|
||||
findingId: 'f-002',
|
||||
status: 'new',
|
||||
ruleId: 'r-2',
|
||||
severity: 'warning',
|
||||
decision: 'approve',
|
||||
file: 'b.ts',
|
||||
line: 5,
|
||||
iteration: 1,
|
||||
runId: 'run-1',
|
||||
timestamp: '2026-02-18T10:01:00.000Z',
|
||||
},
|
||||
];
|
||||
|
||||
writeEvents('2026-02-18', events);
|
||||
|
||||
const sinceMs = new Date('2026-02-18T00:00:00Z').getTime();
|
||||
const metrics = computeReviewMetrics(eventsDir, sinceMs);
|
||||
|
||||
expect(metrics.reReportCounts.size).toBe(1);
|
||||
expect(metrics.reReportCounts.get('f-001')).toBe(2);
|
||||
});
|
||||
|
||||
it('should compute round-trip ratio correctly', () => {
|
||||
const events: ReviewFindingEvent[] = [
|
||||
// f-001: appears in iterations 1 and 3 → multi-iteration
|
||||
{
|
||||
type: 'review_finding', findingId: 'f-001', status: 'new', ruleId: 'r-1', severity: 'error',
|
||||
decision: 'reject', file: 'a.ts', line: 1, iteration: 1, runId: 'r', timestamp: '2026-02-18T10:00:00.000Z',
|
||||
},
|
||||
{
|
||||
type: 'review_finding', findingId: 'f-001', status: 'persists', ruleId: 'r-1', severity: 'error',
|
||||
decision: 'reject', file: 'a.ts', line: 1, iteration: 3, runId: 'r', timestamp: '2026-02-18T11:00:00.000Z',
|
||||
},
|
||||
// f-002: appears only in iteration 1 → single-iteration
|
||||
{
|
||||
type: 'review_finding', findingId: 'f-002', status: 'new', ruleId: 'r-2', severity: 'warning',
|
||||
decision: 'approve', file: 'b.ts', line: 5, iteration: 1, runId: 'r', timestamp: '2026-02-18T10:01:00.000Z',
|
||||
},
|
||||
];
|
||||
|
||||
writeEvents('2026-02-18', events);
|
||||
|
||||
const sinceMs = new Date('2026-02-18T00:00:00Z').getTime();
|
||||
const metrics = computeReviewMetrics(eventsDir, sinceMs);
|
||||
|
||||
// 1 out of 2 unique findings had multi-iteration → 50%
|
||||
expect(metrics.roundTripRatio).toBe(0.5);
|
||||
});
|
||||
|
||||
it('should compute average resolution iterations', () => {
|
||||
const events: ReviewFindingEvent[] = [
|
||||
// f-001: first in iteration 1, resolved in iteration 3 → 3 iterations
|
||||
{
|
||||
type: 'review_finding', findingId: 'f-001', status: 'new', ruleId: 'r-1', severity: 'error',
|
||||
decision: 'reject', file: 'a.ts', line: 1, iteration: 1, runId: 'r', timestamp: '2026-02-18T10:00:00.000Z',
|
||||
},
|
||||
{
|
||||
type: 'review_finding', findingId: 'f-001', status: 'resolved', ruleId: 'r-1', severity: 'error',
|
||||
decision: 'approve', file: 'a.ts', line: 1, iteration: 3, runId: 'r', timestamp: '2026-02-18T12:00:00.000Z',
|
||||
},
|
||||
// f-002: first in iteration 2, resolved in iteration 2 → 1 iteration
|
||||
{
|
||||
type: 'review_finding', findingId: 'f-002', status: 'new', ruleId: 'r-2', severity: 'warning',
|
||||
decision: 'reject', file: 'b.ts', line: 5, iteration: 2, runId: 'r', timestamp: '2026-02-18T11:00:00.000Z',
|
||||
},
|
||||
{
|
||||
type: 'review_finding', findingId: 'f-002', status: 'resolved', ruleId: 'r-2', severity: 'warning',
|
||||
decision: 'approve', file: 'b.ts', line: 5, iteration: 2, runId: 'r', timestamp: '2026-02-18T11:30:00.000Z',
|
||||
},
|
||||
];
|
||||
|
||||
writeEvents('2026-02-18', events);
|
||||
|
||||
const sinceMs = new Date('2026-02-18T00:00:00Z').getTime();
|
||||
const metrics = computeReviewMetrics(eventsDir, sinceMs);
|
||||
|
||||
// (3 + 1) / 2 = 2.0
|
||||
expect(metrics.averageResolutionIterations).toBe(2);
|
||||
});
|
||||
|
||||
it('should compute reject counts by rule', () => {
|
||||
const events: ReviewFindingEvent[] = [
|
||||
{
|
||||
type: 'review_finding', findingId: 'f-001', status: 'new', ruleId: 'no-any',
|
||||
severity: 'error', decision: 'reject', file: 'a.ts', line: 1, iteration: 1,
|
||||
runId: 'r', timestamp: '2026-02-18T10:00:00.000Z',
|
||||
},
|
||||
{
|
||||
type: 'review_finding', findingId: 'f-002', status: 'new', ruleId: 'no-any',
|
||||
severity: 'error', decision: 'reject', file: 'b.ts', line: 2, iteration: 1,
|
||||
runId: 'r', timestamp: '2026-02-18T10:01:00.000Z',
|
||||
},
|
||||
{
|
||||
type: 'review_finding', findingId: 'f-003', status: 'new', ruleId: 'no-console',
|
||||
severity: 'warning', decision: 'reject', file: 'c.ts', line: 3, iteration: 1,
|
||||
runId: 'r', timestamp: '2026-02-18T10:02:00.000Z',
|
||||
},
|
||||
{
|
||||
type: 'review_finding', findingId: 'f-004', status: 'new', ruleId: 'no-any',
|
||||
severity: 'error', decision: 'approve', file: 'd.ts', line: 4, iteration: 2,
|
||||
runId: 'r', timestamp: '2026-02-18T10:03:00.000Z',
|
||||
},
|
||||
];
|
||||
|
||||
writeEvents('2026-02-18', events);
|
||||
|
||||
const sinceMs = new Date('2026-02-18T00:00:00Z').getTime();
|
||||
const metrics = computeReviewMetrics(eventsDir, sinceMs);
|
||||
|
||||
expect(metrics.rejectCountsByRule.get('no-any')).toBe(2);
|
||||
expect(metrics.rejectCountsByRule.get('no-console')).toBe(1);
|
||||
});
|
||||
|
||||
it('should compute rebuttal resolved ratio', () => {
|
||||
const events: Array<ReviewFindingEvent | FixActionEvent> = [
|
||||
// f-001: rebutted, then resolved → counts toward resolved
|
||||
{
|
||||
type: 'fix_action', findingId: 'AA-NEW-f001', action: 'rebutted',
|
||||
iteration: 2, runId: 'r', timestamp: '2026-02-18T10:00:00.000Z',
|
||||
},
|
||||
{
|
||||
type: 'review_finding', findingId: 'AA-NEW-f001', status: 'resolved', ruleId: 'r-1',
|
||||
severity: 'warning', decision: 'approve', file: 'a.ts', line: 1,
|
||||
iteration: 3, runId: 'r', timestamp: '2026-02-18T11:00:00.000Z',
|
||||
},
|
||||
// f-002: rebutted, never resolved → not counted
|
||||
{
|
||||
type: 'fix_action', findingId: 'AA-NEW-f002', action: 'rebutted',
|
||||
iteration: 2, runId: 'r', timestamp: '2026-02-18T10:01:00.000Z',
|
||||
},
|
||||
{
|
||||
type: 'review_finding', findingId: 'AA-NEW-f002', status: 'persists', ruleId: 'r-2',
|
||||
severity: 'error', decision: 'reject', file: 'b.ts', line: 5,
|
||||
iteration: 3, runId: 'r', timestamp: '2026-02-18T11:01:00.000Z',
|
||||
},
|
||||
// f-003: fixed (not rebutted), resolved → does not affect rebuttal metric
|
||||
{
|
||||
type: 'fix_action', findingId: 'AA-NEW-f003', action: 'fixed',
|
||||
iteration: 2, runId: 'r', timestamp: '2026-02-18T10:02:00.000Z',
|
||||
},
|
||||
{
|
||||
type: 'review_finding', findingId: 'AA-NEW-f003', status: 'resolved', ruleId: 'r-3',
|
||||
severity: 'warning', decision: 'approve', file: 'c.ts', line: 10,
|
||||
iteration: 3, runId: 'r', timestamp: '2026-02-18T11:02:00.000Z',
|
||||
},
|
||||
];
|
||||
|
||||
writeEvents('2026-02-18', events);
|
||||
|
||||
const sinceMs = new Date('2026-02-18T00:00:00Z').getTime();
|
||||
const metrics = computeReviewMetrics(eventsDir, sinceMs);
|
||||
|
||||
// 1 out of 2 rebutted findings was resolved → 50%
|
||||
expect(metrics.rebuttalResolvedRatio).toBe(0.5);
|
||||
});
|
||||
|
||||
it('should return 0 rebuttal resolved ratio when no rebutted events exist', () => {
|
||||
const events: ReviewFindingEvent[] = [
|
||||
{
|
||||
type: 'review_finding', findingId: 'f-001', status: 'new', ruleId: 'r-1',
|
||||
severity: 'error', decision: 'reject', file: 'a.ts', line: 1, iteration: 1,
|
||||
runId: 'r', timestamp: '2026-02-18T10:00:00.000Z',
|
||||
},
|
||||
];
|
||||
|
||||
writeEvents('2026-02-18', events);
|
||||
|
||||
const sinceMs = new Date('2026-02-18T00:00:00Z').getTime();
|
||||
const metrics = computeReviewMetrics(eventsDir, sinceMs);
|
||||
|
||||
expect(metrics.rebuttalResolvedRatio).toBe(0);
|
||||
});
|
||||
|
||||
it('should only include events after the since timestamp', () => {
|
||||
const events: ReviewFindingEvent[] = [
|
||||
{
|
||||
type: 'review_finding', findingId: 'f-old', status: 'new', ruleId: 'r-1',
|
||||
severity: 'error', decision: 'reject', file: 'old.ts', line: 1, iteration: 1,
|
||||
runId: 'r', timestamp: '2026-02-10T10:00:00.000Z',
|
||||
},
|
||||
{
|
||||
type: 'review_finding', findingId: 'f-new', status: 'new', ruleId: 'r-1',
|
||||
severity: 'error', decision: 'reject', file: 'new.ts', line: 1, iteration: 1,
|
||||
runId: 'r', timestamp: '2026-02-18T10:00:00.000Z',
|
||||
},
|
||||
];
|
||||
|
||||
// Write both events to the same date file for simplicity (old event in same file)
|
||||
writeEvents('2026-02-10', [events[0]]);
|
||||
writeEvents('2026-02-18', [events[1]]);
|
||||
|
||||
// Since Feb 15 — should only include f-new
|
||||
const sinceMs = new Date('2026-02-15T00:00:00Z').getTime();
|
||||
const metrics = computeReviewMetrics(eventsDir, sinceMs);
|
||||
|
||||
expect(metrics.rejectCountsByRule.get('r-1')).toBe(1);
|
||||
});
|
||||
});
|
||||
|
||||
describe('formatReviewMetrics', () => {
|
||||
it('should format empty metrics', () => {
|
||||
const metrics = computeReviewMetrics(eventsDir, 0);
|
||||
const output = formatReviewMetrics(metrics);
|
||||
|
||||
expect(output).toContain('=== Review Metrics ===');
|
||||
expect(output).toContain('(none)');
|
||||
expect(output).toContain('Round-trip ratio');
|
||||
expect(output).toContain('Average resolution iterations');
|
||||
expect(output).toContain('Rebuttal');
|
||||
});
|
||||
|
||||
it('should format metrics with data', () => {
|
||||
const events: ReviewFindingEvent[] = [
|
||||
{
|
||||
type: 'review_finding', findingId: 'f-001', status: 'new', ruleId: 'r-1',
|
||||
severity: 'error', decision: 'reject', file: 'a.ts', line: 1, iteration: 1,
|
||||
runId: 'r', timestamp: '2026-02-18T10:00:00.000Z',
|
||||
},
|
||||
{
|
||||
type: 'review_finding', findingId: 'f-001', status: 'persists', ruleId: 'r-1',
|
||||
severity: 'error', decision: 'reject', file: 'a.ts', line: 1, iteration: 3,
|
||||
runId: 'r', timestamp: '2026-02-18T11:00:00.000Z',
|
||||
},
|
||||
];
|
||||
writeEvents('2026-02-18', events);
|
||||
|
||||
const sinceMs = new Date('2026-02-18T00:00:00Z').getTime();
|
||||
const metrics = computeReviewMetrics(eventsDir, sinceMs);
|
||||
const output = formatReviewMetrics(metrics);
|
||||
|
||||
expect(output).toContain('f-001: 2');
|
||||
expect(output).toContain('r-1: 2');
|
||||
});
|
||||
});
|
||||
|
||||
describe('parseSinceDuration', () => {
|
||||
it('should parse "7d" to 7 days in milliseconds', () => {
|
||||
const ms = parseSinceDuration('7d');
|
||||
expect(ms).toBe(7 * 24 * 60 * 60 * 1000);
|
||||
});
|
||||
|
||||
it('should parse "30d" to 30 days in milliseconds', () => {
|
||||
const ms = parseSinceDuration('30d');
|
||||
expect(ms).toBe(30 * 24 * 60 * 60 * 1000);
|
||||
});
|
||||
|
||||
it('should parse "1d" to 1 day in milliseconds', () => {
|
||||
const ms = parseSinceDuration('1d');
|
||||
expect(ms).toBe(24 * 60 * 60 * 1000);
|
||||
});
|
||||
|
||||
it('should throw on invalid format', () => {
|
||||
expect(() => parseSinceDuration('7h')).toThrow('Invalid duration format');
|
||||
expect(() => parseSinceDuration('abc')).toThrow('Invalid duration format');
|
||||
expect(() => parseSinceDuration('')).toThrow('Invalid duration format');
|
||||
});
|
||||
});
|
||||
});
|
||||
205
src/__tests__/analytics-pieceExecution.test.ts
Normal file
205
src/__tests__/analytics-pieceExecution.test.ts
Normal file
@ -0,0 +1,205 @@
|
||||
/**
|
||||
* Tests for analytics integration in pieceExecution.
|
||||
*
|
||||
* Validates the analytics initialization logic (analytics.enabled gate)
|
||||
* and event firing for review_finding and fix_action events.
|
||||
*/
|
||||
|
||||
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
|
||||
import { mkdirSync, rmSync, readFileSync, existsSync } from 'node:fs';
|
||||
import { join } from 'node:path';
|
||||
import { tmpdir } from 'node:os';
|
||||
import { resetAnalyticsWriter } from '../features/analytics/writer.js';
|
||||
import {
|
||||
initAnalyticsWriter,
|
||||
isAnalyticsEnabled,
|
||||
writeAnalyticsEvent,
|
||||
} from '../features/analytics/index.js';
|
||||
import type {
|
||||
MovementResultEvent,
|
||||
ReviewFindingEvent,
|
||||
FixActionEvent,
|
||||
} from '../features/analytics/index.js';
|
||||
|
||||
describe('pieceExecution analytics initialization', () => {
|
||||
let testDir: string;
|
||||
|
||||
beforeEach(() => {
|
||||
resetAnalyticsWriter();
|
||||
testDir = join(tmpdir(), `takt-test-analytics-init-${Date.now()}`);
|
||||
mkdirSync(testDir, { recursive: true });
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
resetAnalyticsWriter();
|
||||
rmSync(testDir, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
it('should enable analytics when analytics.enabled=true', () => {
|
||||
const analyticsEnabled = true;
|
||||
initAnalyticsWriter(analyticsEnabled, testDir);
|
||||
expect(isAnalyticsEnabled()).toBe(true);
|
||||
});
|
||||
|
||||
it('should disable analytics when analytics.enabled=false', () => {
|
||||
const analyticsEnabled = false;
|
||||
initAnalyticsWriter(analyticsEnabled, testDir);
|
||||
expect(isAnalyticsEnabled()).toBe(false);
|
||||
});
|
||||
|
||||
it('should disable analytics when analytics is undefined', () => {
|
||||
const analytics = undefined;
|
||||
const analyticsEnabled = analytics?.enabled === true;
|
||||
initAnalyticsWriter(analyticsEnabled, testDir);
|
||||
expect(isAnalyticsEnabled()).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('movement_result event assembly', () => {
|
||||
let testDir: string;
|
||||
|
||||
beforeEach(() => {
|
||||
resetAnalyticsWriter();
|
||||
testDir = join(tmpdir(), `takt-test-mvt-result-${Date.now()}`);
|
||||
mkdirSync(testDir, { recursive: true });
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
resetAnalyticsWriter();
|
||||
rmSync(testDir, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
it('should write movement_result event with correct fields', () => {
|
||||
initAnalyticsWriter(true, testDir);
|
||||
|
||||
const event: MovementResultEvent = {
|
||||
type: 'movement_result',
|
||||
movement: 'ai_review',
|
||||
provider: 'claude',
|
||||
model: 'sonnet',
|
||||
decisionTag: 'REJECT',
|
||||
iteration: 3,
|
||||
runId: 'test-run',
|
||||
timestamp: '2026-02-18T10:00:00.000Z',
|
||||
};
|
||||
|
||||
writeAnalyticsEvent(event);
|
||||
|
||||
const filePath = join(testDir, '2026-02-18.jsonl');
|
||||
expect(existsSync(filePath)).toBe(true);
|
||||
|
||||
const content = readFileSync(filePath, 'utf-8').trim();
|
||||
const parsed = JSON.parse(content) as MovementResultEvent;
|
||||
|
||||
expect(parsed.type).toBe('movement_result');
|
||||
expect(parsed.movement).toBe('ai_review');
|
||||
expect(parsed.decisionTag).toBe('REJECT');
|
||||
expect(parsed.iteration).toBe(3);
|
||||
expect(parsed.runId).toBe('test-run');
|
||||
});
|
||||
});
|
||||
|
||||
describe('review_finding event writing', () => {
|
||||
let testDir: string;
|
||||
|
||||
beforeEach(() => {
|
||||
resetAnalyticsWriter();
|
||||
testDir = join(tmpdir(), `takt-test-review-finding-${Date.now()}`);
|
||||
mkdirSync(testDir, { recursive: true });
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
resetAnalyticsWriter();
|
||||
rmSync(testDir, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
it('should write review_finding events to JSONL', () => {
|
||||
initAnalyticsWriter(true, testDir);
|
||||
|
||||
const event: ReviewFindingEvent = {
|
||||
type: 'review_finding',
|
||||
findingId: 'AA-001',
|
||||
status: 'new',
|
||||
ruleId: 'AA-001',
|
||||
severity: 'warning',
|
||||
decision: 'reject',
|
||||
file: 'src/foo.ts',
|
||||
line: 42,
|
||||
iteration: 2,
|
||||
runId: 'test-run',
|
||||
timestamp: '2026-02-18T10:00:00.000Z',
|
||||
};
|
||||
|
||||
writeAnalyticsEvent(event);
|
||||
|
||||
const filePath = join(testDir, '2026-02-18.jsonl');
|
||||
const content = readFileSync(filePath, 'utf-8').trim();
|
||||
const parsed = JSON.parse(content) as ReviewFindingEvent;
|
||||
|
||||
expect(parsed.type).toBe('review_finding');
|
||||
expect(parsed.findingId).toBe('AA-001');
|
||||
expect(parsed.status).toBe('new');
|
||||
expect(parsed.decision).toBe('reject');
|
||||
});
|
||||
});
|
||||
|
||||
describe('fix_action event writing', () => {
|
||||
let testDir: string;
|
||||
|
||||
beforeEach(() => {
|
||||
resetAnalyticsWriter();
|
||||
testDir = join(tmpdir(), `takt-test-fix-action-${Date.now()}`);
|
||||
mkdirSync(testDir, { recursive: true });
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
resetAnalyticsWriter();
|
||||
rmSync(testDir, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
it('should write fix_action events with fixed action to JSONL', () => {
|
||||
initAnalyticsWriter(true, testDir);
|
||||
|
||||
const event: FixActionEvent = {
|
||||
type: 'fix_action',
|
||||
findingId: 'AA-001',
|
||||
action: 'fixed',
|
||||
iteration: 3,
|
||||
runId: 'test-run',
|
||||
timestamp: '2026-02-18T11:00:00.000Z',
|
||||
};
|
||||
|
||||
writeAnalyticsEvent(event);
|
||||
|
||||
const filePath = join(testDir, '2026-02-18.jsonl');
|
||||
const content = readFileSync(filePath, 'utf-8').trim();
|
||||
const parsed = JSON.parse(content) as FixActionEvent;
|
||||
|
||||
expect(parsed.type).toBe('fix_action');
|
||||
expect(parsed.findingId).toBe('AA-001');
|
||||
expect(parsed.action).toBe('fixed');
|
||||
});
|
||||
|
||||
it('should write fix_action events with rebutted action to JSONL', () => {
|
||||
initAnalyticsWriter(true, testDir);
|
||||
|
||||
const event: FixActionEvent = {
|
||||
type: 'fix_action',
|
||||
findingId: 'AA-002',
|
||||
action: 'rebutted',
|
||||
iteration: 4,
|
||||
runId: 'test-run',
|
||||
timestamp: '2026-02-18T12:00:00.000Z',
|
||||
};
|
||||
|
||||
writeAnalyticsEvent(event);
|
||||
|
||||
const filePath = join(testDir, '2026-02-18.jsonl');
|
||||
const content = readFileSync(filePath, 'utf-8').trim();
|
||||
const parsed = JSON.parse(content) as FixActionEvent;
|
||||
|
||||
expect(parsed.type).toBe('fix_action');
|
||||
expect(parsed.findingId).toBe('AA-002');
|
||||
expect(parsed.action).toBe('rebutted');
|
||||
});
|
||||
});
|
||||
108
src/__tests__/analytics-purge.test.ts
Normal file
108
src/__tests__/analytics-purge.test.ts
Normal file
@ -0,0 +1,108 @@
|
||||
/**
|
||||
* Tests for analytics purge — retention-based cleanup of JSONL files.
|
||||
*/
|
||||
|
||||
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
|
||||
import { existsSync, mkdirSync, rmSync, writeFileSync } from 'node:fs';
|
||||
import { join } from 'node:path';
|
||||
import { tmpdir } from 'node:os';
|
||||
import { purgeOldEvents } from '../features/analytics/index.js';
|
||||
|
||||
describe('purgeOldEvents', () => {
|
||||
let eventsDir: string;
|
||||
|
||||
beforeEach(() => {
|
||||
eventsDir = join(tmpdir(), `takt-test-analytics-purge-${Date.now()}`);
|
||||
mkdirSync(eventsDir, { recursive: true });
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
rmSync(eventsDir, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
it('should delete files older than retention period', () => {
|
||||
// Given: Files from different dates
|
||||
writeFileSync(join(eventsDir, '2026-01-01.jsonl'), '{}', 'utf-8');
|
||||
writeFileSync(join(eventsDir, '2026-01-15.jsonl'), '{}', 'utf-8');
|
||||
writeFileSync(join(eventsDir, '2026-02-10.jsonl'), '{}', 'utf-8');
|
||||
writeFileSync(join(eventsDir, '2026-02-18.jsonl'), '{}', 'utf-8');
|
||||
|
||||
// When: Purge with 30-day retention from Feb 18
|
||||
const now = new Date('2026-02-18T12:00:00Z');
|
||||
const deleted = purgeOldEvents(eventsDir, 30, now);
|
||||
|
||||
// Then: Only files before Jan 19 should be deleted
|
||||
expect(deleted).toContain('2026-01-01.jsonl');
|
||||
expect(deleted).toContain('2026-01-15.jsonl');
|
||||
expect(deleted).not.toContain('2026-02-10.jsonl');
|
||||
expect(deleted).not.toContain('2026-02-18.jsonl');
|
||||
|
||||
expect(existsSync(join(eventsDir, '2026-01-01.jsonl'))).toBe(false);
|
||||
expect(existsSync(join(eventsDir, '2026-01-15.jsonl'))).toBe(false);
|
||||
expect(existsSync(join(eventsDir, '2026-02-10.jsonl'))).toBe(true);
|
||||
expect(existsSync(join(eventsDir, '2026-02-18.jsonl'))).toBe(true);
|
||||
});
|
||||
|
||||
it('should return empty array when no files to purge', () => {
|
||||
writeFileSync(join(eventsDir, '2026-02-18.jsonl'), '{}', 'utf-8');
|
||||
|
||||
const now = new Date('2026-02-18T12:00:00Z');
|
||||
const deleted = purgeOldEvents(eventsDir, 30, now);
|
||||
|
||||
expect(deleted).toEqual([]);
|
||||
});
|
||||
|
||||
it('should return empty array when directory does not exist', () => {
|
||||
const nonExistent = join(eventsDir, 'does-not-exist');
|
||||
const deleted = purgeOldEvents(nonExistent, 30, new Date());
|
||||
|
||||
expect(deleted).toEqual([]);
|
||||
});
|
||||
|
||||
it('should delete all files when retention is 0', () => {
|
||||
writeFileSync(join(eventsDir, '2026-02-17.jsonl'), '{}', 'utf-8');
|
||||
writeFileSync(join(eventsDir, '2026-02-18.jsonl'), '{}', 'utf-8');
|
||||
|
||||
const now = new Date('2026-02-18T12:00:00Z');
|
||||
const deleted = purgeOldEvents(eventsDir, 0, now);
|
||||
|
||||
expect(deleted).toContain('2026-02-17.jsonl');
|
||||
// The cutoff date is Feb 18, and '2026-02-18' is not < '2026-02-18'
|
||||
expect(deleted).not.toContain('2026-02-18.jsonl');
|
||||
});
|
||||
|
||||
it('should ignore non-jsonl files', () => {
|
||||
writeFileSync(join(eventsDir, '2025-01-01.jsonl'), '{}', 'utf-8');
|
||||
writeFileSync(join(eventsDir, 'README.md'), '# test', 'utf-8');
|
||||
writeFileSync(join(eventsDir, 'data.json'), '{}', 'utf-8');
|
||||
|
||||
const now = new Date('2026-02-18T12:00:00Z');
|
||||
const deleted = purgeOldEvents(eventsDir, 30, now);
|
||||
|
||||
expect(deleted).toContain('2025-01-01.jsonl');
|
||||
expect(deleted).not.toContain('README.md');
|
||||
expect(deleted).not.toContain('data.json');
|
||||
|
||||
// Non-jsonl files should still exist
|
||||
expect(existsSync(join(eventsDir, 'README.md'))).toBe(true);
|
||||
expect(existsSync(join(eventsDir, 'data.json'))).toBe(true);
|
||||
});
|
||||
|
||||
it('should handle 7-day retention correctly', () => {
|
||||
writeFileSync(join(eventsDir, '2026-02-10.jsonl'), '{}', 'utf-8');
|
||||
writeFileSync(join(eventsDir, '2026-02-11.jsonl'), '{}', 'utf-8');
|
||||
writeFileSync(join(eventsDir, '2026-02-12.jsonl'), '{}', 'utf-8');
|
||||
writeFileSync(join(eventsDir, '2026-02-17.jsonl'), '{}', 'utf-8');
|
||||
writeFileSync(join(eventsDir, '2026-02-18.jsonl'), '{}', 'utf-8');
|
||||
|
||||
const now = new Date('2026-02-18T12:00:00Z');
|
||||
const deleted = purgeOldEvents(eventsDir, 7, now);
|
||||
|
||||
// Cutoff: Feb 11
|
||||
expect(deleted).toContain('2026-02-10.jsonl');
|
||||
expect(deleted).not.toContain('2026-02-11.jsonl');
|
||||
expect(deleted).not.toContain('2026-02-12.jsonl');
|
||||
expect(deleted).not.toContain('2026-02-17.jsonl');
|
||||
expect(deleted).not.toContain('2026-02-18.jsonl');
|
||||
});
|
||||
});
|
||||
350
src/__tests__/analytics-report-parser.test.ts
Normal file
350
src/__tests__/analytics-report-parser.test.ts
Normal file
@ -0,0 +1,350 @@
|
||||
/**
|
||||
* Tests for analytics report parser — extracting findings from review markdown.
|
||||
*/
|
||||
|
||||
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
|
||||
import { readFileSync, mkdirSync, rmSync } from 'node:fs';
|
||||
import { join } from 'node:path';
|
||||
import { tmpdir } from 'node:os';
|
||||
import {
|
||||
parseFindingsFromReport,
|
||||
extractDecisionFromReport,
|
||||
inferSeverity,
|
||||
emitFixActionEvents,
|
||||
emitRebuttalEvents,
|
||||
} from '../features/analytics/report-parser.js';
|
||||
import { initAnalyticsWriter } from '../features/analytics/writer.js';
|
||||
import { resetAnalyticsWriter } from '../features/analytics/writer.js';
|
||||
import type { FixActionEvent } from '../features/analytics/events.js';
|
||||
|
||||
describe('parseFindingsFromReport', () => {
|
||||
it('should extract new findings from a review report', () => {
|
||||
const report = [
|
||||
'# Review Report',
|
||||
'',
|
||||
'## Result: REJECT',
|
||||
'',
|
||||
'## Current Iteration Findings (new)',
|
||||
'| # | finding_id | Category | Location | Issue | Fix Suggestion |',
|
||||
'|---|------------|---------|------|------|--------|',
|
||||
'| 1 | AA-001 | DRY | `src/foo.ts:42` | Duplication | Extract helper |',
|
||||
'| 2 | AA-002 | Export | `src/bar.ts:10` | Unused export | Remove |',
|
||||
'',
|
||||
].join('\n');
|
||||
|
||||
const findings = parseFindingsFromReport(report);
|
||||
|
||||
expect(findings).toHaveLength(2);
|
||||
expect(findings[0].findingId).toBe('AA-001');
|
||||
expect(findings[0].status).toBe('new');
|
||||
expect(findings[0].ruleId).toBe('DRY');
|
||||
expect(findings[0].file).toBe('src/foo.ts');
|
||||
expect(findings[0].line).toBe(42);
|
||||
expect(findings[1].findingId).toBe('AA-002');
|
||||
expect(findings[1].status).toBe('new');
|
||||
expect(findings[1].ruleId).toBe('Export');
|
||||
expect(findings[1].file).toBe('src/bar.ts');
|
||||
expect(findings[1].line).toBe(10);
|
||||
});
|
||||
|
||||
it('should extract persists findings', () => {
|
||||
const report = [
|
||||
'## Carry-over Findings (persists)',
|
||||
'| # | finding_id | Previous Evidence | Current Evidence | Issue | Fix Suggestion |',
|
||||
'|---|------------|----------|----------|------|--------|',
|
||||
'| 1 | ARCH-001 | `src/a.ts:5` was X | `src/a.ts:5` still X | Still bad | Fix it |',
|
||||
'',
|
||||
].join('\n');
|
||||
|
||||
const findings = parseFindingsFromReport(report);
|
||||
|
||||
expect(findings).toHaveLength(1);
|
||||
expect(findings[0].findingId).toBe('ARCH-001');
|
||||
expect(findings[0].status).toBe('persists');
|
||||
});
|
||||
|
||||
it('should extract resolved findings', () => {
|
||||
const report = [
|
||||
'## Resolved Findings (resolved)',
|
||||
'| finding_id | Resolution Evidence |',
|
||||
'|------------|---------------------|',
|
||||
'| QA-003 | Fixed in src/c.ts |',
|
||||
'',
|
||||
].join('\n');
|
||||
|
||||
const findings = parseFindingsFromReport(report);
|
||||
|
||||
expect(findings).toHaveLength(1);
|
||||
expect(findings[0].findingId).toBe('QA-003');
|
||||
expect(findings[0].status).toBe('resolved');
|
||||
});
|
||||
|
||||
it('should handle mixed sections in one report', () => {
|
||||
const report = [
|
||||
'## 今回の指摘(new)',
|
||||
'| # | finding_id | カテゴリ | 場所 | 問題 | 修正案 |',
|
||||
'|---|------------|---------|------|------|--------|',
|
||||
'| 1 | AA-001 | DRY | `src/foo.ts:1` | Dup | Fix |',
|
||||
'',
|
||||
'## 継続指摘(persists)',
|
||||
'| # | finding_id | 前回根拠 | 今回根拠 | 問題 | 修正案 |',
|
||||
'|---|------------|----------|----------|------|--------|',
|
||||
'| 1 | AA-002 | Was bad | Still bad | Issue | Fix |',
|
||||
'',
|
||||
'## 解消済み(resolved)',
|
||||
'| finding_id | 解消根拠 |',
|
||||
'|------------|---------|',
|
||||
'| AA-003 | Fixed |',
|
||||
'',
|
||||
].join('\n');
|
||||
|
||||
const findings = parseFindingsFromReport(report);
|
||||
|
||||
expect(findings).toHaveLength(3);
|
||||
expect(findings[0]).toEqual(expect.objectContaining({ findingId: 'AA-001', status: 'new' }));
|
||||
expect(findings[1]).toEqual(expect.objectContaining({ findingId: 'AA-002', status: 'persists' }));
|
||||
expect(findings[2]).toEqual(expect.objectContaining({ findingId: 'AA-003', status: 'resolved' }));
|
||||
});
|
||||
|
||||
it('should return empty array when no finding sections exist', () => {
|
||||
const report = [
|
||||
'# Report',
|
||||
'',
|
||||
'## Summary',
|
||||
'Everything looks good.',
|
||||
'',
|
||||
].join('\n');
|
||||
|
||||
const findings = parseFindingsFromReport(report);
|
||||
|
||||
expect(findings).toEqual([]);
|
||||
});
|
||||
|
||||
it('should stop collecting findings when a new non-finding section starts', () => {
|
||||
const report = [
|
||||
'## Current Iteration Findings (new)',
|
||||
'| # | finding_id | Category | Location | Issue | Fix |',
|
||||
'|---|------------|---------|------|------|-----|',
|
||||
'| 1 | F-001 | Bug | `src/a.ts` | Bad | Fix |',
|
||||
'',
|
||||
'## REJECT判定条件',
|
||||
'| Condition | Result |',
|
||||
'|-----------|--------|',
|
||||
'| Has findings | Yes |',
|
||||
'',
|
||||
].join('\n');
|
||||
|
||||
const findings = parseFindingsFromReport(report);
|
||||
|
||||
expect(findings).toHaveLength(1);
|
||||
expect(findings[0].findingId).toBe('F-001');
|
||||
});
|
||||
|
||||
it('should skip header rows in tables', () => {
|
||||
const report = [
|
||||
'## Current Iteration Findings (new)',
|
||||
'| # | finding_id | Category | Location | Issue | Fix |',
|
||||
'|---|------------|---------|------|------|-----|',
|
||||
'| 1 | X-001 | Cat | `file.ts:5` | Problem | Solution |',
|
||||
'',
|
||||
].join('\n');
|
||||
|
||||
const findings = parseFindingsFromReport(report);
|
||||
|
||||
expect(findings).toHaveLength(1);
|
||||
expect(findings[0].findingId).toBe('X-001');
|
||||
});
|
||||
|
||||
it('should parse location with line number from backtick-wrapped paths', () => {
|
||||
const report = [
|
||||
'## Current Iteration Findings (new)',
|
||||
'| # | finding_id | Category | Location | Issue | Fix |',
|
||||
'|---|------------|---------|------|------|-----|',
|
||||
'| 1 | F-001 | Bug | `src/features/analytics/writer.ts:27` | Comment | Remove |',
|
||||
'',
|
||||
].join('\n');
|
||||
|
||||
const findings = parseFindingsFromReport(report);
|
||||
|
||||
expect(findings[0].file).toBe('src/features/analytics/writer.ts');
|
||||
expect(findings[0].line).toBe(27);
|
||||
});
|
||||
|
||||
it('should handle location with multiple line references', () => {
|
||||
const report = [
|
||||
'## Current Iteration Findings (new)',
|
||||
'| # | finding_id | Category | Location | Issue | Fix |',
|
||||
'|---|------------|---------|------|------|-----|',
|
||||
'| 1 | F-001 | Bug | `src/a.ts:10, src/b.ts:20` | Multiple | Fix |',
|
||||
'',
|
||||
].join('\n');
|
||||
|
||||
const findings = parseFindingsFromReport(report);
|
||||
|
||||
expect(findings[0].file).toBe('src/a.ts');
|
||||
expect(findings[0].line).toBe(10);
|
||||
});
|
||||
});
|
||||
|
||||
describe('extractDecisionFromReport', () => {
|
||||
it('should return reject when report says REJECT', () => {
|
||||
const report = '## 結果: REJECT\n\nSome content';
|
||||
expect(extractDecisionFromReport(report)).toBe('reject');
|
||||
});
|
||||
|
||||
it('should return approve when report says APPROVE', () => {
|
||||
const report = '## Result: APPROVE\n\nSome content';
|
||||
expect(extractDecisionFromReport(report)).toBe('approve');
|
||||
});
|
||||
|
||||
it('should return null when no result section is found', () => {
|
||||
const report = '# Report\n\nNo result section here.';
|
||||
expect(extractDecisionFromReport(report)).toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
describe('inferSeverity', () => {
|
||||
it('should return error for security-related finding IDs', () => {
|
||||
expect(inferSeverity('SEC-001')).toBe('error');
|
||||
expect(inferSeverity('SEC-NEW-xss')).toBe('error');
|
||||
});
|
||||
|
||||
it('should return warning for other finding IDs', () => {
|
||||
expect(inferSeverity('AA-001')).toBe('warning');
|
||||
expect(inferSeverity('QA-001')).toBe('warning');
|
||||
expect(inferSeverity('ARCH-NEW-dry')).toBe('warning');
|
||||
});
|
||||
});
|
||||
|
||||
describe('emitFixActionEvents', () => {
|
||||
let testDir: string;
|
||||
|
||||
beforeEach(() => {
|
||||
resetAnalyticsWriter();
|
||||
testDir = join(tmpdir(), `takt-test-emit-fix-${Date.now()}`);
|
||||
mkdirSync(testDir, { recursive: true });
|
||||
initAnalyticsWriter(true, testDir);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
resetAnalyticsWriter();
|
||||
rmSync(testDir, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
it('should emit fix_action events for each finding ID in response', () => {
|
||||
const timestamp = new Date('2026-02-18T12:00:00.000Z');
|
||||
|
||||
emitFixActionEvents('Fixed AA-001 and ARCH-002-barrel', 3, 'run-xyz', timestamp);
|
||||
|
||||
const filePath = join(testDir, '2026-02-18.jsonl');
|
||||
const lines = readFileSync(filePath, 'utf-8').trim().split('\n');
|
||||
expect(lines).toHaveLength(2);
|
||||
|
||||
const event1 = JSON.parse(lines[0]) as FixActionEvent;
|
||||
expect(event1.type).toBe('fix_action');
|
||||
expect(event1.findingId).toBe('AA-001');
|
||||
expect(event1.action).toBe('fixed');
|
||||
expect(event1.iteration).toBe(3);
|
||||
expect(event1.runId).toBe('run-xyz');
|
||||
expect(event1.timestamp).toBe('2026-02-18T12:00:00.000Z');
|
||||
|
||||
const event2 = JSON.parse(lines[1]) as FixActionEvent;
|
||||
expect(event2.type).toBe('fix_action');
|
||||
expect(event2.findingId).toBe('ARCH-002-barrel');
|
||||
expect(event2.action).toBe('fixed');
|
||||
});
|
||||
|
||||
it('should not emit events when response contains no finding IDs', () => {
|
||||
const timestamp = new Date('2026-02-18T12:00:00.000Z');
|
||||
|
||||
emitFixActionEvents('No issues found, all good.', 1, 'run-abc', timestamp);
|
||||
|
||||
const filePath = join(testDir, '2026-02-18.jsonl');
|
||||
expect(() => readFileSync(filePath, 'utf-8')).toThrow();
|
||||
});
|
||||
|
||||
it('should deduplicate repeated finding IDs', () => {
|
||||
const timestamp = new Date('2026-02-18T12:00:00.000Z');
|
||||
|
||||
emitFixActionEvents(
|
||||
'Fixed QA-001, confirmed QA-001 is resolved, also QA-001 again',
|
||||
2,
|
||||
'run-dedup',
|
||||
timestamp,
|
||||
);
|
||||
|
||||
const filePath = join(testDir, '2026-02-18.jsonl');
|
||||
const lines = readFileSync(filePath, 'utf-8').trim().split('\n');
|
||||
expect(lines).toHaveLength(1);
|
||||
|
||||
const event = JSON.parse(lines[0]) as FixActionEvent;
|
||||
expect(event.findingId).toBe('QA-001');
|
||||
});
|
||||
|
||||
it('should match various finding ID formats', () => {
|
||||
const timestamp = new Date('2026-02-18T12:00:00.000Z');
|
||||
const response = [
|
||||
'Resolved AA-001 simple ID',
|
||||
'Fixed ARCH-NEW-dry with NEW segment',
|
||||
'Addressed SEC-002-xss with suffix',
|
||||
].join('\n');
|
||||
|
||||
emitFixActionEvents(response, 1, 'run-formats', timestamp);
|
||||
|
||||
const filePath = join(testDir, '2026-02-18.jsonl');
|
||||
const lines = readFileSync(filePath, 'utf-8').trim().split('\n');
|
||||
expect(lines).toHaveLength(3);
|
||||
|
||||
const ids = lines.map((line) => (JSON.parse(line) as FixActionEvent).findingId);
|
||||
expect(ids).toContain('AA-001');
|
||||
expect(ids).toContain('ARCH-NEW-dry');
|
||||
expect(ids).toContain('SEC-002-xss');
|
||||
});
|
||||
});
|
||||
|
||||
describe('emitRebuttalEvents', () => {
|
||||
let testDir: string;
|
||||
|
||||
beforeEach(() => {
|
||||
resetAnalyticsWriter();
|
||||
testDir = join(tmpdir(), `takt-test-emit-rebuttal-${Date.now()}`);
|
||||
mkdirSync(testDir, { recursive: true });
|
||||
initAnalyticsWriter(true, testDir);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
resetAnalyticsWriter();
|
||||
rmSync(testDir, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
it('should emit fix_action events with rebutted action for finding IDs', () => {
|
||||
const timestamp = new Date('2026-02-18T12:00:00.000Z');
|
||||
|
||||
emitRebuttalEvents('Rebutting AA-001 and ARCH-002-barrel', 3, 'run-xyz', timestamp);
|
||||
|
||||
const filePath = join(testDir, '2026-02-18.jsonl');
|
||||
const lines = readFileSync(filePath, 'utf-8').trim().split('\n');
|
||||
expect(lines).toHaveLength(2);
|
||||
|
||||
const event1 = JSON.parse(lines[0]) as FixActionEvent;
|
||||
expect(event1.type).toBe('fix_action');
|
||||
expect(event1.findingId).toBe('AA-001');
|
||||
expect(event1.action).toBe('rebutted');
|
||||
expect(event1.iteration).toBe(3);
|
||||
expect(event1.runId).toBe('run-xyz');
|
||||
|
||||
const event2 = JSON.parse(lines[1]) as FixActionEvent;
|
||||
expect(event2.type).toBe('fix_action');
|
||||
expect(event2.findingId).toBe('ARCH-002-barrel');
|
||||
expect(event2.action).toBe('rebutted');
|
||||
});
|
||||
|
||||
it('should not emit events when response contains no finding IDs', () => {
|
||||
const timestamp = new Date('2026-02-18T12:00:00.000Z');
|
||||
|
||||
emitRebuttalEvents('No findings mentioned here.', 1, 'run-abc', timestamp);
|
||||
|
||||
const filePath = join(testDir, '2026-02-18.jsonl');
|
||||
expect(() => readFileSync(filePath, 'utf-8')).toThrow();
|
||||
});
|
||||
});
|
||||
220
src/__tests__/analytics-writer.test.ts
Normal file
220
src/__tests__/analytics-writer.test.ts
Normal file
@ -0,0 +1,220 @@
|
||||
/**
|
||||
* Tests for AnalyticsWriter — JSONL append, date rotation, ON/OFF toggle.
|
||||
*/
|
||||
|
||||
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
|
||||
import { existsSync, readFileSync, mkdirSync, rmSync } from 'node:fs';
|
||||
import { join } from 'node:path';
|
||||
import { tmpdir } from 'node:os';
|
||||
import { resetAnalyticsWriter } from '../features/analytics/writer.js';
|
||||
import {
|
||||
initAnalyticsWriter,
|
||||
isAnalyticsEnabled,
|
||||
writeAnalyticsEvent,
|
||||
} from '../features/analytics/index.js';
|
||||
import type { MovementResultEvent, ReviewFindingEvent } from '../features/analytics/index.js';
|
||||
|
||||
describe('AnalyticsWriter', () => {
|
||||
let testDir: string;
|
||||
|
||||
beforeEach(() => {
|
||||
resetAnalyticsWriter();
|
||||
testDir = join(tmpdir(), `takt-test-analytics-writer-${Date.now()}`);
|
||||
mkdirSync(testDir, { recursive: true });
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
resetAnalyticsWriter();
|
||||
rmSync(testDir, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
describe('ON/OFF toggle', () => {
|
||||
it('should not be enabled by default', () => {
|
||||
expect(isAnalyticsEnabled()).toBe(false);
|
||||
});
|
||||
|
||||
it('should be enabled when initialized with enabled=true', () => {
|
||||
initAnalyticsWriter(true, testDir);
|
||||
expect(isAnalyticsEnabled()).toBe(true);
|
||||
});
|
||||
|
||||
it('should not be enabled when initialized with enabled=false', () => {
|
||||
initAnalyticsWriter(false, testDir);
|
||||
expect(isAnalyticsEnabled()).toBe(false);
|
||||
});
|
||||
|
||||
it('should not write when disabled', () => {
|
||||
initAnalyticsWriter(false, testDir);
|
||||
|
||||
const event: MovementResultEvent = {
|
||||
type: 'movement_result',
|
||||
movement: 'plan',
|
||||
provider: 'claude',
|
||||
model: 'sonnet',
|
||||
decisionTag: 'done',
|
||||
iteration: 1,
|
||||
runId: 'run-1',
|
||||
timestamp: '2026-02-18T10:00:00.000Z',
|
||||
};
|
||||
|
||||
writeAnalyticsEvent(event);
|
||||
|
||||
const expectedFile = join(testDir, '2026-02-18.jsonl');
|
||||
expect(existsSync(expectedFile)).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('event writing', () => {
|
||||
it('should append event to date-based JSONL file', () => {
|
||||
initAnalyticsWriter(true, testDir);
|
||||
|
||||
const event: MovementResultEvent = {
|
||||
type: 'movement_result',
|
||||
movement: 'implement',
|
||||
provider: 'claude',
|
||||
model: 'sonnet',
|
||||
decisionTag: 'approved',
|
||||
iteration: 2,
|
||||
runId: 'run-abc',
|
||||
timestamp: '2026-02-18T14:30:00.000Z',
|
||||
};
|
||||
|
||||
writeAnalyticsEvent(event);
|
||||
|
||||
const filePath = join(testDir, '2026-02-18.jsonl');
|
||||
expect(existsSync(filePath)).toBe(true);
|
||||
|
||||
const content = readFileSync(filePath, 'utf-8').trim();
|
||||
const parsed = JSON.parse(content) as MovementResultEvent;
|
||||
expect(parsed.type).toBe('movement_result');
|
||||
expect(parsed.movement).toBe('implement');
|
||||
expect(parsed.provider).toBe('claude');
|
||||
expect(parsed.decisionTag).toBe('approved');
|
||||
});
|
||||
|
||||
it('should append multiple events to the same file', () => {
|
||||
initAnalyticsWriter(true, testDir);
|
||||
|
||||
const event1: MovementResultEvent = {
|
||||
type: 'movement_result',
|
||||
movement: 'plan',
|
||||
provider: 'claude',
|
||||
model: 'sonnet',
|
||||
decisionTag: 'done',
|
||||
iteration: 1,
|
||||
runId: 'run-1',
|
||||
timestamp: '2026-02-18T10:00:00.000Z',
|
||||
};
|
||||
|
||||
const event2: MovementResultEvent = {
|
||||
type: 'movement_result',
|
||||
movement: 'implement',
|
||||
provider: 'codex',
|
||||
model: 'o3',
|
||||
decisionTag: 'needs_fix',
|
||||
iteration: 2,
|
||||
runId: 'run-1',
|
||||
timestamp: '2026-02-18T11:00:00.000Z',
|
||||
};
|
||||
|
||||
writeAnalyticsEvent(event1);
|
||||
writeAnalyticsEvent(event2);
|
||||
|
||||
const filePath = join(testDir, '2026-02-18.jsonl');
|
||||
const lines = readFileSync(filePath, 'utf-8').trim().split('\n');
|
||||
expect(lines).toHaveLength(2);
|
||||
|
||||
const parsed1 = JSON.parse(lines[0]) as MovementResultEvent;
|
||||
const parsed2 = JSON.parse(lines[1]) as MovementResultEvent;
|
||||
expect(parsed1.movement).toBe('plan');
|
||||
expect(parsed2.movement).toBe('implement');
|
||||
});
|
||||
|
||||
it('should create separate files for different dates', () => {
|
||||
initAnalyticsWriter(true, testDir);
|
||||
|
||||
const event1: MovementResultEvent = {
|
||||
type: 'movement_result',
|
||||
movement: 'plan',
|
||||
provider: 'claude',
|
||||
model: 'sonnet',
|
||||
decisionTag: 'done',
|
||||
iteration: 1,
|
||||
runId: 'run-1',
|
||||
timestamp: '2026-02-17T23:59:00.000Z',
|
||||
};
|
||||
|
||||
const event2: MovementResultEvent = {
|
||||
type: 'movement_result',
|
||||
movement: 'implement',
|
||||
provider: 'claude',
|
||||
model: 'sonnet',
|
||||
decisionTag: 'done',
|
||||
iteration: 2,
|
||||
runId: 'run-1',
|
||||
timestamp: '2026-02-18T00:01:00.000Z',
|
||||
};
|
||||
|
||||
writeAnalyticsEvent(event1);
|
||||
writeAnalyticsEvent(event2);
|
||||
|
||||
expect(existsSync(join(testDir, '2026-02-17.jsonl'))).toBe(true);
|
||||
expect(existsSync(join(testDir, '2026-02-18.jsonl'))).toBe(true);
|
||||
});
|
||||
|
||||
it('should write review_finding events correctly', () => {
|
||||
initAnalyticsWriter(true, testDir);
|
||||
|
||||
const event: ReviewFindingEvent = {
|
||||
type: 'review_finding',
|
||||
findingId: 'f-001',
|
||||
status: 'new',
|
||||
ruleId: 'no-any',
|
||||
severity: 'error',
|
||||
decision: 'reject',
|
||||
file: 'src/index.ts',
|
||||
line: 10,
|
||||
iteration: 1,
|
||||
runId: 'run-1',
|
||||
timestamp: '2026-03-01T08:00:00.000Z',
|
||||
};
|
||||
|
||||
writeAnalyticsEvent(event);
|
||||
|
||||
const filePath = join(testDir, '2026-03-01.jsonl');
|
||||
const content = readFileSync(filePath, 'utf-8').trim();
|
||||
const parsed = JSON.parse(content) as ReviewFindingEvent;
|
||||
expect(parsed.type).toBe('review_finding');
|
||||
expect(parsed.findingId).toBe('f-001');
|
||||
expect(parsed.ruleId).toBe('no-any');
|
||||
});
|
||||
});
|
||||
|
||||
describe('directory creation', () => {
|
||||
it('should create events directory when enabled and dir does not exist', () => {
|
||||
const nestedDir = join(testDir, 'nested', 'analytics', 'events');
|
||||
expect(existsSync(nestedDir)).toBe(false);
|
||||
|
||||
initAnalyticsWriter(true, nestedDir);
|
||||
|
||||
expect(existsSync(nestedDir)).toBe(true);
|
||||
});
|
||||
|
||||
it('should not create directory when disabled', () => {
|
||||
const nestedDir = join(testDir, 'disabled-dir', 'events');
|
||||
initAnalyticsWriter(false, nestedDir);
|
||||
|
||||
expect(existsSync(nestedDir)).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('resetInstance', () => {
|
||||
it('should reset to disabled state', () => {
|
||||
initAnalyticsWriter(true, testDir);
|
||||
expect(isAnalyticsEnabled()).toBe(true);
|
||||
|
||||
resetAnalyticsWriter();
|
||||
expect(isAnalyticsEnabled()).toBe(false);
|
||||
});
|
||||
});
|
||||
});
|
||||
@ -17,8 +17,17 @@ import {
|
||||
|
||||
// Mock external dependencies to isolate unit tests
|
||||
vi.mock('../infra/config/global/globalConfig.js', () => ({
|
||||
getLanguage: () => 'en',
|
||||
getBuiltinPiecesEnabled: () => true,
|
||||
loadGlobalConfig: () => ({}),
|
||||
}));
|
||||
|
||||
vi.mock('../infra/config/loadConfig.js', () => ({
|
||||
loadConfig: () => ({
|
||||
global: {
|
||||
language: 'en',
|
||||
enableBuiltinPieces: true,
|
||||
},
|
||||
project: {},
|
||||
}),
|
||||
}));
|
||||
|
||||
const mockLogError = vi.fn();
|
||||
|
||||
@ -15,7 +15,6 @@ vi.mock('../shared/ui/index.js', () => ({
|
||||
}));
|
||||
|
||||
vi.mock('../shared/prompt/index.js', () => ({
|
||||
confirm: vi.fn(() => true),
|
||||
}));
|
||||
|
||||
vi.mock('../shared/utils/index.js', async (importOriginal) => ({
|
||||
@ -51,7 +50,6 @@ vi.mock('../features/pipeline/index.js', () => ({
|
||||
vi.mock('../features/interactive/index.js', () => ({
|
||||
interactiveMode: vi.fn(),
|
||||
selectInteractiveMode: vi.fn(() => 'assistant'),
|
||||
selectRecentSession: vi.fn(() => null),
|
||||
passthroughMode: vi.fn(),
|
||||
quietMode: vi.fn(),
|
||||
personaMode: vi.fn(),
|
||||
@ -76,7 +74,9 @@ vi.mock('../infra/task/index.js', () => ({
|
||||
|
||||
vi.mock('../infra/config/index.js', () => ({
|
||||
getPieceDescription: vi.fn(() => ({ name: 'default', description: 'test piece', pieceStructure: '', movementPreviews: [] })),
|
||||
loadGlobalConfig: vi.fn(() => ({ interactivePreviewMovements: 3 })),
|
||||
resolveConfigValue: vi.fn((_: string, key: string) => (key === 'piece' ? 'default' : false)),
|
||||
resolveConfigValues: vi.fn(() => ({ language: 'en', interactivePreviewMovements: 3, provider: 'claude' })),
|
||||
loadPersonaSessions: vi.fn(() => ({})),
|
||||
}));
|
||||
|
||||
vi.mock('../shared/constants.js', () => ({
|
||||
@ -106,11 +106,11 @@ vi.mock('../app/cli/helpers.js', () => ({
|
||||
|
||||
import { checkGhCli, fetchIssue, formatIssueAsTask, parseIssueNumbers } from '../infra/github/issue.js';
|
||||
import { selectAndExecuteTask, determinePiece, createIssueFromTask, saveTaskFromInteractive } from '../features/tasks/index.js';
|
||||
import { interactiveMode, selectRecentSession } from '../features/interactive/index.js';
|
||||
import { loadGlobalConfig } from '../infra/config/index.js';
|
||||
import { confirm } from '../shared/prompt/index.js';
|
||||
import { interactiveMode } from '../features/interactive/index.js';
|
||||
import { resolveConfigValues, loadPersonaSessions } from '../infra/config/index.js';
|
||||
import { isDirectTask } from '../app/cli/helpers.js';
|
||||
import { executeDefaultAction } from '../app/cli/routing.js';
|
||||
import { info } from '../shared/ui/index.js';
|
||||
import type { GitHubIssue } from '../infra/github/types.js';
|
||||
|
||||
const mockCheckGhCli = vi.mocked(checkGhCli);
|
||||
@ -122,10 +122,10 @@ const mockDeterminePiece = vi.mocked(determinePiece);
|
||||
const mockCreateIssueFromTask = vi.mocked(createIssueFromTask);
|
||||
const mockSaveTaskFromInteractive = vi.mocked(saveTaskFromInteractive);
|
||||
const mockInteractiveMode = vi.mocked(interactiveMode);
|
||||
const mockSelectRecentSession = vi.mocked(selectRecentSession);
|
||||
const mockLoadGlobalConfig = vi.mocked(loadGlobalConfig);
|
||||
const mockConfirm = vi.mocked(confirm);
|
||||
const mockLoadPersonaSessions = vi.mocked(loadPersonaSessions);
|
||||
const mockResolveConfigValues = vi.mocked(resolveConfigValues);
|
||||
const mockIsDirectTask = vi.mocked(isDirectTask);
|
||||
const mockInfo = vi.mocked(info);
|
||||
const mockTaskRunnerListAllTaskItems = vi.mocked(mockListAllTaskItems);
|
||||
|
||||
function createMockIssue(number: number): GitHubIssue {
|
||||
@ -147,7 +147,6 @@ beforeEach(() => {
|
||||
// Default setup
|
||||
mockDeterminePiece.mockResolvedValue('default');
|
||||
mockInteractiveMode.mockResolvedValue({ action: 'execute', task: 'summarized task' });
|
||||
mockConfirm.mockResolvedValue(true);
|
||||
mockIsDirectTask.mockReturnValue(false);
|
||||
mockParseIssueNumbers.mockReturnValue([]);
|
||||
mockTaskRunnerListAllTaskItems.mockReturnValue([]);
|
||||
@ -480,41 +479,43 @@ describe('Issue resolution in routing', () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('session selection with provider=claude', () => {
|
||||
it('should pass selected session ID to interactiveMode when provider is claude', async () => {
|
||||
describe('--continue option', () => {
|
||||
it('should load saved session and pass to interactiveMode when --continue is specified', async () => {
|
||||
// Given
|
||||
mockLoadGlobalConfig.mockReturnValue({ interactivePreviewMovements: 3, provider: 'claude' });
|
||||
mockConfirm.mockResolvedValue(true);
|
||||
mockSelectRecentSession.mockResolvedValue('session-xyz');
|
||||
mockOpts.continue = true;
|
||||
mockResolveConfigValues.mockReturnValue({ language: 'en', interactivePreviewMovements: 3, provider: 'claude' });
|
||||
mockLoadPersonaSessions.mockReturnValue({ interactive: 'saved-session-123' });
|
||||
|
||||
// When
|
||||
await executeDefaultAction();
|
||||
|
||||
// Then: selectRecentSession should be called
|
||||
expect(mockSelectRecentSession).toHaveBeenCalledWith('/test/cwd', 'en');
|
||||
// Then: loadPersonaSessions should be called with provider
|
||||
expect(mockLoadPersonaSessions).toHaveBeenCalledWith('/test/cwd', 'claude');
|
||||
|
||||
// Then: interactiveMode should receive the session ID as 4th argument
|
||||
// Then: interactiveMode should receive the saved session ID
|
||||
expect(mockInteractiveMode).toHaveBeenCalledWith(
|
||||
'/test/cwd',
|
||||
undefined,
|
||||
expect.anything(),
|
||||
'session-xyz',
|
||||
'saved-session-123',
|
||||
);
|
||||
|
||||
expect(mockConfirm).toHaveBeenCalledWith('Choose a previous session?', false);
|
||||
});
|
||||
|
||||
it('should not call selectRecentSession when user selects no in confirmation', async () => {
|
||||
it('should show message and start new session when --continue has no saved session', async () => {
|
||||
// Given
|
||||
mockLoadGlobalConfig.mockReturnValue({ interactivePreviewMovements: 3, provider: 'claude' });
|
||||
mockConfirm.mockResolvedValue(false);
|
||||
mockOpts.continue = true;
|
||||
mockResolveConfigValues.mockReturnValue({ language: 'en', interactivePreviewMovements: 3, provider: 'claude' });
|
||||
mockLoadPersonaSessions.mockReturnValue({});
|
||||
|
||||
// When
|
||||
await executeDefaultAction();
|
||||
|
||||
// Then
|
||||
expect(mockConfirm).toHaveBeenCalledWith('Choose a previous session?', false);
|
||||
expect(mockSelectRecentSession).not.toHaveBeenCalled();
|
||||
// Then: info message about no session
|
||||
expect(mockInfo).toHaveBeenCalledWith(
|
||||
'No previous assistant session found. Starting a new session.',
|
||||
);
|
||||
|
||||
// Then: interactiveMode should be called with undefined session ID
|
||||
expect(mockInteractiveMode).toHaveBeenCalledWith(
|
||||
'/test/cwd',
|
||||
undefined,
|
||||
@ -523,15 +524,12 @@ describe('Issue resolution in routing', () => {
|
||||
);
|
||||
});
|
||||
|
||||
it('should not call selectRecentSession when provider is not claude', async () => {
|
||||
// Given
|
||||
mockLoadGlobalConfig.mockReturnValue({ interactivePreviewMovements: 3, provider: 'openai' });
|
||||
|
||||
it('should not load persona sessions when --continue is not specified', async () => {
|
||||
// When
|
||||
await executeDefaultAction();
|
||||
|
||||
// Then: selectRecentSession should NOT be called
|
||||
expect(mockSelectRecentSession).not.toHaveBeenCalled();
|
||||
// Then: loadPersonaSessions should NOT be called
|
||||
expect(mockLoadPersonaSessions).not.toHaveBeenCalled();
|
||||
|
||||
// Then: interactiveMode should be called with undefined session ID
|
||||
expect(mockInteractiveMode).toHaveBeenCalledWith(
|
||||
@ -543,14 +541,11 @@ describe('Issue resolution in routing', () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('run session reference', () => {
|
||||
it('should not prompt run session reference in default interactive flow', async () => {
|
||||
describe('default assistant mode (no --continue)', () => {
|
||||
it('should start new session without loading saved sessions', async () => {
|
||||
await executeDefaultAction();
|
||||
|
||||
expect(mockConfirm).not.toHaveBeenCalledWith(
|
||||
"Reference a previous run's results?",
|
||||
false,
|
||||
);
|
||||
expect(mockLoadPersonaSessions).not.toHaveBeenCalled();
|
||||
expect(mockInteractiveMode).toHaveBeenCalledWith(
|
||||
'/test/cwd',
|
||||
undefined,
|
||||
|
||||
@ -66,7 +66,6 @@ vi.mock('../infra/config/index.js', () => ({
|
||||
|
||||
vi.mock('../infra/config/paths.js', () => ({
|
||||
clearPersonaSessions: vi.fn(),
|
||||
getCurrentPiece: vi.fn(() => 'default'),
|
||||
isVerboseMode: vi.fn(() => false),
|
||||
}));
|
||||
|
||||
|
||||
53
src/__tests__/config-env-overrides.test.ts
Normal file
53
src/__tests__/config-env-overrides.test.ts
Normal file
@ -0,0 +1,53 @@
|
||||
import { afterEach, describe, expect, it } from 'vitest';
|
||||
import {
|
||||
applyGlobalConfigEnvOverrides,
|
||||
applyProjectConfigEnvOverrides,
|
||||
envVarNameFromPath,
|
||||
} from '../infra/config/env/config-env-overrides.js';
|
||||
|
||||
describe('config env overrides', () => {
|
||||
const envBackup = { ...process.env };
|
||||
|
||||
afterEach(() => {
|
||||
for (const key of Object.keys(process.env)) {
|
||||
if (!(key in envBackup)) {
|
||||
delete process.env[key];
|
||||
}
|
||||
}
|
||||
for (const [key, value] of Object.entries(envBackup)) {
|
||||
process.env[key] = value;
|
||||
}
|
||||
});
|
||||
|
||||
it('should convert dotted and camelCase paths to TAKT env variable names', () => {
|
||||
expect(envVarNameFromPath('verbose')).toBe('TAKT_VERBOSE');
|
||||
expect(envVarNameFromPath('provider_options.claude.sandbox.allow_unsandboxed_commands'))
|
||||
.toBe('TAKT_PROVIDER_OPTIONS_CLAUDE_SANDBOX_ALLOW_UNSANDBOXED_COMMANDS');
|
||||
});
|
||||
|
||||
it('should apply global env overrides from generated env names', () => {
|
||||
process.env.TAKT_LOG_LEVEL = 'debug';
|
||||
process.env.TAKT_PROVIDER_OPTIONS_CLAUDE_SANDBOX_ALLOW_UNSANDBOXED_COMMANDS = 'true';
|
||||
|
||||
const raw: Record<string, unknown> = {};
|
||||
applyGlobalConfigEnvOverrides(raw);
|
||||
|
||||
expect(raw.log_level).toBe('debug');
|
||||
expect(raw.provider_options).toEqual({
|
||||
claude: {
|
||||
sandbox: {
|
||||
allow_unsandboxed_commands: true,
|
||||
},
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('should apply project env overrides from generated env names', () => {
|
||||
process.env.TAKT_VERBOSE = 'true';
|
||||
|
||||
const raw: Record<string, unknown> = {};
|
||||
applyProjectConfigEnvOverrides(raw);
|
||||
|
||||
expect(raw.verbose).toBe(true);
|
||||
});
|
||||
});
|
||||
@ -1,5 +1,5 @@
|
||||
/**
|
||||
* Tests for takt config functions
|
||||
* Tests for config functions
|
||||
*/
|
||||
|
||||
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
|
||||
@ -13,7 +13,6 @@ import {
|
||||
loadPiece,
|
||||
listPieces,
|
||||
loadPersonaPromptFromPath,
|
||||
getCurrentPiece,
|
||||
setCurrentPiece,
|
||||
getProjectConfigDir,
|
||||
getBuiltinPersonasDir,
|
||||
@ -35,17 +34,19 @@ import {
|
||||
updateWorktreeSession,
|
||||
getLanguage,
|
||||
loadProjectConfig,
|
||||
isVerboseMode,
|
||||
invalidateGlobalConfigCache,
|
||||
} from '../infra/config/index.js';
|
||||
|
||||
describe('getBuiltinPiece', () => {
|
||||
it('should return builtin piece when it exists in resources', () => {
|
||||
const piece = getBuiltinPiece('default');
|
||||
const piece = getBuiltinPiece('default', process.cwd());
|
||||
expect(piece).not.toBeNull();
|
||||
expect(piece!.name).toBe('default');
|
||||
});
|
||||
|
||||
it('should resolve builtin instruction_template without projectCwd', () => {
|
||||
const piece = getBuiltinPiece('default');
|
||||
const piece = getBuiltinPiece('default', process.cwd());
|
||||
expect(piece).not.toBeNull();
|
||||
|
||||
const planMovement = piece!.movements.find((movement) => movement.name === 'plan');
|
||||
@ -54,15 +55,15 @@ describe('getBuiltinPiece', () => {
|
||||
});
|
||||
|
||||
it('should return null for non-existent piece names', () => {
|
||||
expect(getBuiltinPiece('nonexistent-piece')).toBeNull();
|
||||
expect(getBuiltinPiece('unknown')).toBeNull();
|
||||
expect(getBuiltinPiece('')).toBeNull();
|
||||
expect(getBuiltinPiece('nonexistent-piece', process.cwd())).toBeNull();
|
||||
expect(getBuiltinPiece('unknown', process.cwd())).toBeNull();
|
||||
expect(getBuiltinPiece('', process.cwd())).toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
describe('default piece parallel reviewers movement', () => {
|
||||
it('should have a reviewers movement with parallel sub-movements', () => {
|
||||
const piece = getBuiltinPiece('default');
|
||||
const piece = getBuiltinPiece('default', process.cwd());
|
||||
expect(piece).not.toBeNull();
|
||||
|
||||
const reviewersMovement = piece!.movements.find((s) => s.name === 'reviewers');
|
||||
@ -72,7 +73,7 @@ describe('default piece parallel reviewers movement', () => {
|
||||
});
|
||||
|
||||
it('should have arch-review and qa-review as parallel sub-movements', () => {
|
||||
const piece = getBuiltinPiece('default');
|
||||
const piece = getBuiltinPiece('default', process.cwd());
|
||||
const reviewersMovement = piece!.movements.find((s) => s.name === 'reviewers')!;
|
||||
const subMovementNames = reviewersMovement.parallel!.map((s) => s.name);
|
||||
|
||||
@ -81,7 +82,7 @@ describe('default piece parallel reviewers movement', () => {
|
||||
});
|
||||
|
||||
it('should have aggregate conditions on the reviewers parent movement', () => {
|
||||
const piece = getBuiltinPiece('default');
|
||||
const piece = getBuiltinPiece('default', process.cwd());
|
||||
const reviewersMovement = piece!.movements.find((s) => s.name === 'reviewers')!;
|
||||
|
||||
expect(reviewersMovement.rules).toBeDefined();
|
||||
@ -99,7 +100,7 @@ describe('default piece parallel reviewers movement', () => {
|
||||
});
|
||||
|
||||
it('should have matching conditions on sub-movements for aggregation', () => {
|
||||
const piece = getBuiltinPiece('default');
|
||||
const piece = getBuiltinPiece('default', process.cwd());
|
||||
const reviewersMovement = piece!.movements.find((s) => s.name === 'reviewers')!;
|
||||
|
||||
for (const subMovement of reviewersMovement.parallel!) {
|
||||
@ -111,7 +112,7 @@ describe('default piece parallel reviewers movement', () => {
|
||||
});
|
||||
|
||||
it('should have ai_review transitioning to reviewers movement', () => {
|
||||
const piece = getBuiltinPiece('default');
|
||||
const piece = getBuiltinPiece('default', process.cwd());
|
||||
const aiReviewMovement = piece!.movements.find((s) => s.name === 'ai_review')!;
|
||||
|
||||
const approveRule = aiReviewMovement.rules!.find((r) => r.next === 'reviewers');
|
||||
@ -119,7 +120,7 @@ describe('default piece parallel reviewers movement', () => {
|
||||
});
|
||||
|
||||
it('should have ai_fix transitioning to ai_review movement', () => {
|
||||
const piece = getBuiltinPiece('default');
|
||||
const piece = getBuiltinPiece('default', process.cwd());
|
||||
const aiFixMovement = piece!.movements.find((s) => s.name === 'ai_fix')!;
|
||||
|
||||
const fixedRule = aiFixMovement.rules!.find((r) => r.next === 'ai_review');
|
||||
@ -127,7 +128,7 @@ describe('default piece parallel reviewers movement', () => {
|
||||
});
|
||||
|
||||
it('should have fix movement transitioning back to reviewers', () => {
|
||||
const piece = getBuiltinPiece('default');
|
||||
const piece = getBuiltinPiece('default', process.cwd());
|
||||
const fixMovement = piece!.movements.find((s) => s.name === 'fix')!;
|
||||
|
||||
const fixedRule = fixMovement.rules!.find((r) => r.next === 'reviewers');
|
||||
@ -135,7 +136,7 @@ describe('default piece parallel reviewers movement', () => {
|
||||
});
|
||||
|
||||
it('should not have old separate review/security_review/improve movements', () => {
|
||||
const piece = getBuiltinPiece('default');
|
||||
const piece = getBuiltinPiece('default', process.cwd());
|
||||
const movementNames = piece!.movements.map((s) => s.name);
|
||||
|
||||
expect(movementNames).not.toContain('review');
|
||||
@ -145,7 +146,7 @@ describe('default piece parallel reviewers movement', () => {
|
||||
});
|
||||
|
||||
it('should have sub-movements with correct agents', () => {
|
||||
const piece = getBuiltinPiece('default');
|
||||
const piece = getBuiltinPiece('default', process.cwd());
|
||||
const reviewersMovement = piece!.movements.find((s) => s.name === 'reviewers')!;
|
||||
|
||||
const archReview = reviewersMovement.parallel!.find((s) => s.name === 'arch-review')!;
|
||||
@ -156,7 +157,7 @@ describe('default piece parallel reviewers movement', () => {
|
||||
});
|
||||
|
||||
it('should have output contracts configured on sub-movements', () => {
|
||||
const piece = getBuiltinPiece('default');
|
||||
const piece = getBuiltinPiece('default', process.cwd());
|
||||
const reviewersMovement = piece!.movements.find((s) => s.name === 'reviewers')!;
|
||||
|
||||
const archReview = reviewersMovement.parallel!.find((s) => s.name === 'arch-review')!;
|
||||
@ -288,54 +289,13 @@ describe('loadPersonaPromptFromPath (builtin paths)', () => {
|
||||
const personaPath = join(builtinPersonasDir, 'coder.md');
|
||||
|
||||
if (existsSync(personaPath)) {
|
||||
const prompt = loadPersonaPromptFromPath(personaPath);
|
||||
const prompt = loadPersonaPromptFromPath(personaPath, process.cwd());
|
||||
expect(prompt).toBeTruthy();
|
||||
expect(typeof prompt).toBe('string');
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe('getCurrentPiece', () => {
|
||||
let testDir: string;
|
||||
|
||||
beforeEach(() => {
|
||||
testDir = join(tmpdir(), `takt-test-${randomUUID()}`);
|
||||
mkdirSync(testDir, { recursive: true });
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
if (existsSync(testDir)) {
|
||||
rmSync(testDir, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
|
||||
it('should return default when no config exists', () => {
|
||||
const piece = getCurrentPiece(testDir);
|
||||
|
||||
expect(piece).toBe('default');
|
||||
});
|
||||
|
||||
it('should return saved piece name from config.yaml', () => {
|
||||
const configDir = getProjectConfigDir(testDir);
|
||||
mkdirSync(configDir, { recursive: true });
|
||||
writeFileSync(join(configDir, 'config.yaml'), 'piece: default\n');
|
||||
|
||||
const piece = getCurrentPiece(testDir);
|
||||
|
||||
expect(piece).toBe('default');
|
||||
});
|
||||
|
||||
it('should return default for empty config', () => {
|
||||
const configDir = getProjectConfigDir(testDir);
|
||||
mkdirSync(configDir, { recursive: true });
|
||||
writeFileSync(join(configDir, 'config.yaml'), '');
|
||||
|
||||
const piece = getCurrentPiece(testDir);
|
||||
|
||||
expect(piece).toBe('default');
|
||||
});
|
||||
});
|
||||
|
||||
describe('setCurrentPiece', () => {
|
||||
let testDir: string;
|
||||
|
||||
@ -371,12 +331,160 @@ describe('setCurrentPiece', () => {
|
||||
setCurrentPiece(testDir, 'first');
|
||||
setCurrentPiece(testDir, 'second');
|
||||
|
||||
const piece = getCurrentPiece(testDir);
|
||||
const piece = loadProjectConfig(testDir).piece;
|
||||
|
||||
expect(piece).toBe('second');
|
||||
});
|
||||
});
|
||||
|
||||
describe('loadProjectConfig provider_options', () => {
|
||||
let testDir: string;
|
||||
|
||||
beforeEach(() => {
|
||||
testDir = join(tmpdir(), `takt-test-${randomUUID()}`);
|
||||
mkdirSync(testDir, { recursive: true });
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
if (existsSync(testDir)) {
|
||||
rmSync(testDir, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
|
||||
it('should normalize provider_options into providerOptions (camelCase)', () => {
|
||||
const projectConfigDir = getProjectConfigDir(testDir);
|
||||
mkdirSync(projectConfigDir, { recursive: true });
|
||||
writeFileSync(join(projectConfigDir, 'config.yaml'), [
|
||||
'piece: default',
|
||||
'provider_options:',
|
||||
' codex:',
|
||||
' network_access: true',
|
||||
' claude:',
|
||||
' sandbox:',
|
||||
' allow_unsandboxed_commands: true',
|
||||
].join('\n'));
|
||||
|
||||
const config = loadProjectConfig(testDir);
|
||||
|
||||
expect(config.providerOptions).toEqual({
|
||||
codex: { networkAccess: true },
|
||||
claude: { sandbox: { allowUnsandboxedCommands: true } },
|
||||
});
|
||||
});
|
||||
|
||||
it('should apply TAKT_PROVIDER_OPTIONS_* env overrides for project config', () => {
|
||||
const original = process.env.TAKT_PROVIDER_OPTIONS_CODEX_NETWORK_ACCESS;
|
||||
process.env.TAKT_PROVIDER_OPTIONS_CODEX_NETWORK_ACCESS = 'false';
|
||||
|
||||
const config = loadProjectConfig(testDir);
|
||||
expect(config.providerOptions).toEqual({
|
||||
codex: { networkAccess: false },
|
||||
});
|
||||
|
||||
if (original === undefined) {
|
||||
delete process.env.TAKT_PROVIDER_OPTIONS_CODEX_NETWORK_ACCESS;
|
||||
} else {
|
||||
process.env.TAKT_PROVIDER_OPTIONS_CODEX_NETWORK_ACCESS = original;
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe('isVerboseMode', () => {
|
||||
let testDir: string;
|
||||
let originalTaktConfigDir: string | undefined;
|
||||
let originalTaktVerbose: string | undefined;
|
||||
|
||||
beforeEach(() => {
|
||||
testDir = join(tmpdir(), `takt-test-${randomUUID()}`);
|
||||
mkdirSync(testDir, { recursive: true });
|
||||
originalTaktConfigDir = process.env.TAKT_CONFIG_DIR;
|
||||
originalTaktVerbose = process.env.TAKT_VERBOSE;
|
||||
process.env.TAKT_CONFIG_DIR = join(testDir, 'global-takt');
|
||||
delete process.env.TAKT_VERBOSE;
|
||||
invalidateGlobalConfigCache();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
if (originalTaktConfigDir === undefined) {
|
||||
delete process.env.TAKT_CONFIG_DIR;
|
||||
} else {
|
||||
process.env.TAKT_CONFIG_DIR = originalTaktConfigDir;
|
||||
}
|
||||
if (originalTaktVerbose === undefined) {
|
||||
delete process.env.TAKT_VERBOSE;
|
||||
} else {
|
||||
process.env.TAKT_VERBOSE = originalTaktVerbose;
|
||||
}
|
||||
|
||||
if (existsSync(testDir)) {
|
||||
rmSync(testDir, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
|
||||
it('should return project verbose when project config has verbose: true', () => {
|
||||
const projectConfigDir = getProjectConfigDir(testDir);
|
||||
mkdirSync(projectConfigDir, { recursive: true });
|
||||
writeFileSync(join(projectConfigDir, 'config.yaml'), 'piece: default\nverbose: true\n');
|
||||
|
||||
const globalConfigDir = process.env.TAKT_CONFIG_DIR!;
|
||||
mkdirSync(globalConfigDir, { recursive: true });
|
||||
writeFileSync(join(globalConfigDir, 'config.yaml'), 'verbose: false\n');
|
||||
|
||||
expect(isVerboseMode(testDir)).toBe(true);
|
||||
});
|
||||
|
||||
it('should return project verbose when project config has verbose: false', () => {
|
||||
const projectConfigDir = getProjectConfigDir(testDir);
|
||||
mkdirSync(projectConfigDir, { recursive: true });
|
||||
writeFileSync(join(projectConfigDir, 'config.yaml'), 'piece: default\nverbose: false\n');
|
||||
|
||||
const globalConfigDir = process.env.TAKT_CONFIG_DIR!;
|
||||
mkdirSync(globalConfigDir, { recursive: true });
|
||||
writeFileSync(join(globalConfigDir, 'config.yaml'), 'verbose: true\n');
|
||||
|
||||
expect(isVerboseMode(testDir)).toBe(false);
|
||||
});
|
||||
|
||||
it('should fallback to global verbose when project verbose is not set', () => {
|
||||
const projectConfigDir = getProjectConfigDir(testDir);
|
||||
mkdirSync(projectConfigDir, { recursive: true });
|
||||
writeFileSync(join(projectConfigDir, 'config.yaml'), 'piece: default\n');
|
||||
|
||||
const globalConfigDir = process.env.TAKT_CONFIG_DIR!;
|
||||
mkdirSync(globalConfigDir, { recursive: true });
|
||||
writeFileSync(join(globalConfigDir, 'config.yaml'), 'verbose: true\n');
|
||||
|
||||
expect(isVerboseMode(testDir)).toBe(true);
|
||||
});
|
||||
|
||||
it('should return false when neither project nor global verbose is set', () => {
|
||||
expect(isVerboseMode(testDir)).toBe(false);
|
||||
});
|
||||
|
||||
it('should prioritize TAKT_VERBOSE over project and global config', () => {
|
||||
const projectConfigDir = getProjectConfigDir(testDir);
|
||||
mkdirSync(projectConfigDir, { recursive: true });
|
||||
writeFileSync(join(projectConfigDir, 'config.yaml'), 'piece: default\nverbose: false\n');
|
||||
|
||||
const globalConfigDir = process.env.TAKT_CONFIG_DIR!;
|
||||
mkdirSync(globalConfigDir, { recursive: true });
|
||||
writeFileSync(join(globalConfigDir, 'config.yaml'), 'verbose: false\n');
|
||||
|
||||
process.env.TAKT_VERBOSE = 'true';
|
||||
expect(isVerboseMode(testDir)).toBe(true);
|
||||
});
|
||||
|
||||
it('should throw on TAKT_VERBOSE=0', () => {
|
||||
process.env.TAKT_VERBOSE = '0';
|
||||
expect(() => isVerboseMode(testDir)).toThrow('TAKT_VERBOSE must be one of: true, false');
|
||||
});
|
||||
|
||||
it('should throw on invalid TAKT_VERBOSE value', () => {
|
||||
process.env.TAKT_VERBOSE = 'yes';
|
||||
expect(() => isVerboseMode(testDir)).toThrow('TAKT_VERBOSE must be one of: true, false');
|
||||
});
|
||||
});
|
||||
|
||||
describe('loadInputHistory', () => {
|
||||
let testDir: string;
|
||||
|
||||
|
||||
215
src/__tests__/conversationLoop-resume.test.ts
Normal file
215
src/__tests__/conversationLoop-resume.test.ts
Normal file
@ -0,0 +1,215 @@
|
||||
/**
|
||||
* Tests for /resume command and initializeSession changes.
|
||||
*
|
||||
* Verifies:
|
||||
* - initializeSession returns sessionId: undefined (no implicit auto-load)
|
||||
* - /resume command calls selectRecentSession and updates sessionId
|
||||
* - /resume with cancel does not change sessionId
|
||||
*/
|
||||
|
||||
import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
|
||||
import {
|
||||
setupRawStdin,
|
||||
restoreStdin,
|
||||
toRawInputs,
|
||||
createMockProvider,
|
||||
createScenarioProvider,
|
||||
type MockProviderCapture,
|
||||
} from './helpers/stdinSimulator.js';
|
||||
|
||||
// --- Infrastructure mocks ---
|
||||
|
||||
vi.mock('../infra/config/global/globalConfig.js', () => ({
|
||||
loadGlobalConfig: vi.fn(() => ({ provider: 'mock', language: 'en' })),
|
||||
getBuiltinPiecesEnabled: vi.fn().mockReturnValue(true),
|
||||
}));
|
||||
|
||||
vi.mock('../infra/providers/index.js', () => ({
|
||||
getProvider: vi.fn(),
|
||||
}));
|
||||
|
||||
vi.mock('../shared/utils/index.js', async (importOriginal) => ({
|
||||
...(await importOriginal<Record<string, unknown>>()),
|
||||
createLogger: () => ({ info: vi.fn(), debug: vi.fn(), error: vi.fn() }),
|
||||
}));
|
||||
|
||||
vi.mock('../shared/context.js', () => ({
|
||||
isQuietMode: vi.fn(() => false),
|
||||
}));
|
||||
|
||||
vi.mock('../infra/config/paths.js', async (importOriginal) => ({
|
||||
...(await importOriginal<Record<string, unknown>>()),
|
||||
loadPersonaSessions: vi.fn(() => ({})),
|
||||
updatePersonaSession: vi.fn(),
|
||||
getProjectConfigDir: vi.fn(() => '/tmp'),
|
||||
loadSessionState: vi.fn(() => null),
|
||||
clearSessionState: vi.fn(),
|
||||
}));
|
||||
|
||||
vi.mock('../shared/ui/index.js', () => ({
|
||||
info: vi.fn(),
|
||||
error: vi.fn(),
|
||||
blankLine: vi.fn(),
|
||||
StreamDisplay: vi.fn().mockImplementation(() => ({
|
||||
createHandler: vi.fn(() => vi.fn()),
|
||||
flush: vi.fn(),
|
||||
})),
|
||||
}));
|
||||
|
||||
vi.mock('../shared/prompt/index.js', () => ({
|
||||
selectOption: vi.fn().mockResolvedValue('execute'),
|
||||
}));
|
||||
|
||||
const mockSelectRecentSession = vi.fn<(cwd: string, lang: 'en' | 'ja') => Promise<string | null>>();
|
||||
|
||||
vi.mock('../features/interactive/sessionSelector.js', () => ({
|
||||
selectRecentSession: (...args: [string, 'en' | 'ja']) => mockSelectRecentSession(...args),
|
||||
}));
|
||||
|
||||
vi.mock('../shared/i18n/index.js', () => ({
|
||||
getLabel: vi.fn((_key: string, _lang: string) => 'Mock label'),
|
||||
getLabelObject: vi.fn(() => ({
|
||||
intro: 'Intro',
|
||||
resume: 'Resume',
|
||||
noConversation: 'No conversation',
|
||||
summarizeFailed: 'Summarize failed',
|
||||
continuePrompt: 'Continue?',
|
||||
proposed: 'Proposed:',
|
||||
actionPrompt: 'What next?',
|
||||
playNoTask: 'No task for /play',
|
||||
cancelled: 'Cancelled',
|
||||
actions: { execute: 'Execute', saveTask: 'Save', continue: 'Continue' },
|
||||
})),
|
||||
}));
|
||||
|
||||
// --- Imports (after mocks) ---
|
||||
|
||||
import { getProvider } from '../infra/providers/index.js';
|
||||
import { selectOption } from '../shared/prompt/index.js';
|
||||
import { info as logInfo } from '../shared/ui/index.js';
|
||||
import { initializeSession, runConversationLoop, type SessionContext } from '../features/interactive/conversationLoop.js';
|
||||
|
||||
const mockGetProvider = vi.mocked(getProvider);
|
||||
const mockSelectOption = vi.mocked(selectOption);
|
||||
const mockLogInfo = vi.mocked(logInfo);
|
||||
|
||||
// --- Helpers ---
|
||||
|
||||
function setupProvider(responses: string[]): MockProviderCapture {
|
||||
const { provider, capture } = createMockProvider(responses);
|
||||
mockGetProvider.mockReturnValue(provider);
|
||||
return capture;
|
||||
}
|
||||
|
||||
function createSessionContext(overrides: Partial<SessionContext> = {}): SessionContext {
|
||||
const { provider } = createMockProvider([]);
|
||||
mockGetProvider.mockReturnValue(provider);
|
||||
return {
|
||||
provider: provider as SessionContext['provider'],
|
||||
providerType: 'mock' as SessionContext['providerType'],
|
||||
model: undefined,
|
||||
lang: 'en',
|
||||
personaName: 'interactive',
|
||||
sessionId: undefined,
|
||||
...overrides,
|
||||
};
|
||||
}
|
||||
|
||||
const defaultStrategy = {
|
||||
systemPrompt: 'test system prompt',
|
||||
allowedTools: ['Read'],
|
||||
transformPrompt: (msg: string) => msg,
|
||||
introMessage: 'Test intro',
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
mockSelectOption.mockResolvedValue('execute');
|
||||
mockSelectRecentSession.mockResolvedValue(null);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
restoreStdin();
|
||||
});
|
||||
|
||||
// =================================================================
|
||||
// initializeSession: no implicit session auto-load
|
||||
// =================================================================
|
||||
describe('initializeSession', () => {
|
||||
it('should return sessionId as undefined (no implicit auto-load)', () => {
|
||||
const ctx = initializeSession('/test/cwd', 'interactive');
|
||||
|
||||
expect(ctx.sessionId).toBeUndefined();
|
||||
expect(ctx.personaName).toBe('interactive');
|
||||
});
|
||||
});
|
||||
|
||||
// =================================================================
|
||||
// /resume command
|
||||
// =================================================================
|
||||
describe('/resume command', () => {
|
||||
it('should call selectRecentSession and update sessionId when session selected', async () => {
|
||||
// Given: /resume → select session → /cancel
|
||||
setupRawStdin(toRawInputs(['/resume', '/cancel']));
|
||||
setupProvider([]);
|
||||
mockSelectRecentSession.mockResolvedValue('selected-session-abc');
|
||||
|
||||
const ctx = createSessionContext();
|
||||
|
||||
// When
|
||||
const result = await runConversationLoop('/test', ctx, defaultStrategy, undefined, undefined);
|
||||
|
||||
// Then: selectRecentSession called
|
||||
expect(mockSelectRecentSession).toHaveBeenCalledWith('/test', 'en');
|
||||
|
||||
// Then: info about loaded session displayed
|
||||
expect(mockLogInfo).toHaveBeenCalledWith('Mock label');
|
||||
|
||||
// Then: cancelled at the end
|
||||
expect(result.action).toBe('cancel');
|
||||
});
|
||||
|
||||
it('should not change sessionId when user cancels session selection', async () => {
|
||||
// Given: /resume → cancel selection → /cancel
|
||||
setupRawStdin(toRawInputs(['/resume', '/cancel']));
|
||||
setupProvider([]);
|
||||
mockSelectRecentSession.mockResolvedValue(null);
|
||||
|
||||
const ctx = createSessionContext();
|
||||
|
||||
// When
|
||||
const result = await runConversationLoop('/test', ctx, defaultStrategy, undefined, undefined);
|
||||
|
||||
// Then: selectRecentSession called but returned null
|
||||
expect(mockSelectRecentSession).toHaveBeenCalledWith('/test', 'en');
|
||||
|
||||
// Then: cancelled
|
||||
expect(result.action).toBe('cancel');
|
||||
});
|
||||
|
||||
it('should use resumed session for subsequent AI calls', async () => {
|
||||
// Given: /resume → select session → send message → /cancel
|
||||
setupRawStdin(toRawInputs(['/resume', 'hello world', '/cancel']));
|
||||
mockSelectRecentSession.mockResolvedValue('resumed-session-xyz');
|
||||
|
||||
const { provider, capture } = createScenarioProvider([
|
||||
{ content: 'AI response' },
|
||||
]);
|
||||
|
||||
const ctx: SessionContext = {
|
||||
provider: provider as SessionContext['provider'],
|
||||
providerType: 'mock' as SessionContext['providerType'],
|
||||
model: undefined,
|
||||
lang: 'en',
|
||||
personaName: 'interactive',
|
||||
sessionId: undefined,
|
||||
};
|
||||
|
||||
// When
|
||||
const result = await runConversationLoop('/test', ctx, defaultStrategy, undefined, undefined);
|
||||
|
||||
// Then: AI call should use the resumed session ID
|
||||
expect(capture.sessionIds[0]).toBe('resumed-session-xyz');
|
||||
expect(result.action).toBe('cancel');
|
||||
});
|
||||
});
|
||||
@ -85,7 +85,6 @@ describe('createIsolatedEnv', () => {
|
||||
|
||||
expect(config.language).toBe('en');
|
||||
expect(config.log_level).toBe('info');
|
||||
expect(config.default_piece).toBe('default');
|
||||
expect(config.notification_sound).toBe(false);
|
||||
expect(config.notification_sound_events).toEqual({
|
||||
iteration_limit: false,
|
||||
@ -173,7 +172,6 @@ describe('createIsolatedEnv', () => {
|
||||
[
|
||||
'language: en',
|
||||
'log_level: info',
|
||||
'default_piece: default',
|
||||
'notification_sound: true',
|
||||
'notification_sound_events: true',
|
||||
].join('\n'),
|
||||
|
||||
135
src/__tests__/engine-provider-options.test.ts
Normal file
135
src/__tests__/engine-provider-options.test.ts
Normal file
@ -0,0 +1,135 @@
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
import { rmSync } from 'node:fs';
|
||||
|
||||
vi.mock('../agents/runner.js', () => ({
|
||||
runAgent: vi.fn(),
|
||||
}));
|
||||
|
||||
vi.mock('../core/piece/evaluation/index.js', () => ({
|
||||
detectMatchedRule: vi.fn(),
|
||||
}));
|
||||
|
||||
vi.mock('../core/piece/phase-runner.js', () => ({
|
||||
needsStatusJudgmentPhase: vi.fn(),
|
||||
runReportPhase: vi.fn(),
|
||||
runStatusJudgmentPhase: vi.fn(),
|
||||
}));
|
||||
|
||||
vi.mock('../shared/utils/index.js', async (importOriginal) => ({
|
||||
...(await importOriginal<Record<string, unknown>>()),
|
||||
generateReportDir: vi.fn().mockReturnValue('test-report-dir'),
|
||||
}));
|
||||
|
||||
import { PieceEngine } from '../core/piece/index.js';
|
||||
import { runAgent } from '../agents/runner.js';
|
||||
import {
|
||||
applyDefaultMocks,
|
||||
cleanupPieceEngine,
|
||||
createTestTmpDir,
|
||||
makeMovement,
|
||||
makeResponse,
|
||||
makeRule,
|
||||
mockDetectMatchedRuleSequence,
|
||||
mockRunAgentSequence,
|
||||
} from './engine-test-helpers.js';
|
||||
import type { PieceConfig } from '../core/models/index.js';
|
||||
|
||||
describe('PieceEngine provider_options resolution', () => {
|
||||
let tmpDir: string;
|
||||
let engine: PieceEngine | undefined;
|
||||
|
||||
beforeEach(() => {
|
||||
vi.resetAllMocks();
|
||||
applyDefaultMocks();
|
||||
tmpDir = createTestTmpDir();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
if (engine) {
|
||||
cleanupPieceEngine(engine);
|
||||
engine = undefined;
|
||||
}
|
||||
if (tmpDir) {
|
||||
rmSync(tmpDir, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
|
||||
it('should merge provider_options in order: global < project < movement', async () => {
|
||||
const movement = makeMovement('implement', {
|
||||
providerOptions: {
|
||||
codex: { networkAccess: false },
|
||||
claude: { sandbox: { excludedCommands: ['./gradlew'] } },
|
||||
},
|
||||
rules: [makeRule('done', 'COMPLETE')],
|
||||
});
|
||||
|
||||
const config: PieceConfig = {
|
||||
name: 'provider-options-priority',
|
||||
movements: [movement],
|
||||
initialMovement: 'implement',
|
||||
maxMovements: 1,
|
||||
};
|
||||
|
||||
mockRunAgentSequence([
|
||||
makeResponse({ persona: movement.persona, content: 'done' }),
|
||||
]);
|
||||
mockDetectMatchedRuleSequence([{ index: 0, method: 'phase1_tag' }]);
|
||||
|
||||
engine = new PieceEngine(config, tmpDir, 'test task', {
|
||||
projectCwd: tmpDir,
|
||||
provider: 'claude',
|
||||
providerOptions: {
|
||||
codex: { networkAccess: true },
|
||||
claude: { sandbox: { allowUnsandboxedCommands: false } },
|
||||
opencode: { networkAccess: true },
|
||||
},
|
||||
});
|
||||
|
||||
await engine.run();
|
||||
|
||||
const options = vi.mocked(runAgent).mock.calls[0]?.[2];
|
||||
expect(options?.providerOptions).toEqual({
|
||||
codex: { networkAccess: false },
|
||||
opencode: { networkAccess: true },
|
||||
claude: {
|
||||
sandbox: {
|
||||
allowUnsandboxedCommands: false,
|
||||
excludedCommands: ['./gradlew'],
|
||||
},
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('should pass global provider_options when project and movement options are absent', async () => {
|
||||
const movement = makeMovement('implement', {
|
||||
rules: [makeRule('done', 'COMPLETE')],
|
||||
});
|
||||
|
||||
const config: PieceConfig = {
|
||||
name: 'provider-options-global-only',
|
||||
movements: [movement],
|
||||
initialMovement: 'implement',
|
||||
maxMovements: 1,
|
||||
};
|
||||
|
||||
mockRunAgentSequence([
|
||||
makeResponse({ persona: movement.persona, content: 'done' }),
|
||||
]);
|
||||
mockDetectMatchedRuleSequence([{ index: 0, method: 'phase1_tag' }]);
|
||||
|
||||
engine = new PieceEngine(config, tmpDir, 'test task', {
|
||||
projectCwd: tmpDir,
|
||||
provider: 'claude',
|
||||
providerOptions: {
|
||||
codex: { networkAccess: true },
|
||||
},
|
||||
});
|
||||
|
||||
await engine.run();
|
||||
|
||||
const options = vi.mocked(runAgent).mock.calls[0]?.[2];
|
||||
expect(options?.providerOptions).toEqual({
|
||||
codex: { networkAccess: true },
|
||||
});
|
||||
});
|
||||
});
|
||||
@ -7,14 +7,35 @@ import { tmpdir } from 'node:os';
|
||||
import { dirname, join } from 'node:path';
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
const loadGlobalConfigMock = vi.hoisted(() => vi.fn());
|
||||
const loadConfigMock = vi.hoisted(() => vi.fn());
|
||||
|
||||
vi.mock('../infra/config/paths.js', () => ({
|
||||
getGlobalConfigDir: () => '/tmp/.takt',
|
||||
}));
|
||||
|
||||
vi.mock('../infra/config/global/globalConfig.js', () => ({
|
||||
loadGlobalConfig: loadGlobalConfigMock,
|
||||
vi.mock('../infra/config/loadConfig.js', () => ({
|
||||
loadConfig: loadConfigMock,
|
||||
}));
|
||||
|
||||
vi.mock('../infra/config/resolvePieceConfigValue.js', () => ({
|
||||
resolvePieceConfigValue: (_projectDir: string, key: string) => {
|
||||
const loaded = loadConfigMock() as Record<string, Record<string, unknown>>;
|
||||
const global = loaded?.global ?? {};
|
||||
const project = loaded?.project ?? {};
|
||||
const merged: Record<string, unknown> = { ...global, ...project };
|
||||
return merged[key];
|
||||
},
|
||||
resolvePieceConfigValues: (_projectDir: string, keys: readonly string[]) => {
|
||||
const loaded = loadConfigMock() as Record<string, Record<string, unknown>>;
|
||||
const global = loaded?.global ?? {};
|
||||
const project = loaded?.project ?? {};
|
||||
const merged: Record<string, unknown> = { ...global, ...project };
|
||||
const result: Record<string, unknown> = {};
|
||||
for (const key of keys) {
|
||||
result[key] = merged[key];
|
||||
}
|
||||
return result;
|
||||
},
|
||||
}));
|
||||
|
||||
const { getPieceCategoriesPath, resetPieceCategories } = await import(
|
||||
@ -28,17 +49,18 @@ function createTempCategoriesPath(): string {
|
||||
|
||||
describe('getPieceCategoriesPath', () => {
|
||||
beforeEach(() => {
|
||||
loadGlobalConfigMock.mockReset();
|
||||
loadConfigMock.mockReset();
|
||||
});
|
||||
|
||||
it('should return configured path when pieceCategoriesFile is set', () => {
|
||||
// Given
|
||||
loadGlobalConfigMock.mockReturnValue({
|
||||
pieceCategoriesFile: '/custom/piece-categories.yaml',
|
||||
loadConfigMock.mockReturnValue({
|
||||
global: { pieceCategoriesFile: '/custom/piece-categories.yaml' },
|
||||
project: {},
|
||||
});
|
||||
|
||||
// When
|
||||
const path = getPieceCategoriesPath();
|
||||
const path = getPieceCategoriesPath(process.cwd());
|
||||
|
||||
// Then
|
||||
expect(path).toBe('/custom/piece-categories.yaml');
|
||||
@ -46,10 +68,10 @@ describe('getPieceCategoriesPath', () => {
|
||||
|
||||
it('should return default path when pieceCategoriesFile is not set', () => {
|
||||
// Given
|
||||
loadGlobalConfigMock.mockReturnValue({});
|
||||
loadConfigMock.mockReturnValue({ global: {}, project: {} });
|
||||
|
||||
// When
|
||||
const path = getPieceCategoriesPath();
|
||||
const path = getPieceCategoriesPath(process.cwd());
|
||||
|
||||
// Then
|
||||
expect(path).toBe('/tmp/.takt/preferences/piece-categories.yaml');
|
||||
@ -57,12 +79,12 @@ describe('getPieceCategoriesPath', () => {
|
||||
|
||||
it('should rethrow when global config loading fails', () => {
|
||||
// Given
|
||||
loadGlobalConfigMock.mockImplementation(() => {
|
||||
loadConfigMock.mockImplementation(() => {
|
||||
throw new Error('invalid global config');
|
||||
});
|
||||
|
||||
// When / Then
|
||||
expect(() => getPieceCategoriesPath()).toThrow('invalid global config');
|
||||
expect(() => getPieceCategoriesPath(process.cwd())).toThrow('invalid global config');
|
||||
});
|
||||
});
|
||||
|
||||
@ -70,7 +92,7 @@ describe('resetPieceCategories', () => {
|
||||
const tempRoots: string[] = [];
|
||||
|
||||
beforeEach(() => {
|
||||
loadGlobalConfigMock.mockReset();
|
||||
loadConfigMock.mockReset();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
@ -84,12 +106,13 @@ describe('resetPieceCategories', () => {
|
||||
// Given
|
||||
const categoriesPath = createTempCategoriesPath();
|
||||
tempRoots.push(dirname(dirname(categoriesPath)));
|
||||
loadGlobalConfigMock.mockReturnValue({
|
||||
pieceCategoriesFile: categoriesPath,
|
||||
loadConfigMock.mockReturnValue({
|
||||
global: { pieceCategoriesFile: categoriesPath },
|
||||
project: {},
|
||||
});
|
||||
|
||||
// When
|
||||
resetPieceCategories();
|
||||
resetPieceCategories(process.cwd());
|
||||
|
||||
// Then
|
||||
expect(existsSync(dirname(categoriesPath))).toBe(true);
|
||||
@ -102,14 +125,15 @@ describe('resetPieceCategories', () => {
|
||||
const categoriesDir = dirname(categoriesPath);
|
||||
const tempRoot = dirname(categoriesDir);
|
||||
tempRoots.push(tempRoot);
|
||||
loadGlobalConfigMock.mockReturnValue({
|
||||
pieceCategoriesFile: categoriesPath,
|
||||
loadConfigMock.mockReturnValue({
|
||||
global: { pieceCategoriesFile: categoriesPath },
|
||||
project: {},
|
||||
});
|
||||
mkdirSync(categoriesDir, { recursive: true });
|
||||
writeFileSync(categoriesPath, 'piece_categories:\n old:\n - stale-piece\n', 'utf-8');
|
||||
|
||||
// When
|
||||
resetPieceCategories();
|
||||
resetPieceCategories(process.cwd());
|
||||
|
||||
// Then
|
||||
expect(readFileSync(categoriesPath, 'utf-8')).toBe('piece_categories: {}\n');
|
||||
|
||||
@ -39,7 +39,6 @@ describe('loadGlobalConfig', () => {
|
||||
const config = loadGlobalConfig();
|
||||
|
||||
expect(config.language).toBe('en');
|
||||
expect(config.defaultPiece).toBe('default');
|
||||
expect(config.logLevel).toBe('info');
|
||||
expect(config.provider).toBe('claude');
|
||||
expect(config.model).toBeUndefined();
|
||||
@ -79,6 +78,23 @@ describe('loadGlobalConfig', () => {
|
||||
expect(config.logLevel).toBe('debug');
|
||||
});
|
||||
|
||||
it('should apply env override for nested provider_options key', () => {
|
||||
const original = process.env.TAKT_PROVIDER_OPTIONS_CLAUDE_SANDBOX_ALLOW_UNSANDBOXED_COMMANDS;
|
||||
try {
|
||||
process.env.TAKT_PROVIDER_OPTIONS_CLAUDE_SANDBOX_ALLOW_UNSANDBOXED_COMMANDS = 'true';
|
||||
invalidateGlobalConfigCache();
|
||||
|
||||
const config = loadGlobalConfig();
|
||||
expect(config.providerOptions?.claude?.sandbox?.allowUnsandboxedCommands).toBe(true);
|
||||
} finally {
|
||||
if (original === undefined) {
|
||||
delete process.env.TAKT_PROVIDER_OPTIONS_CLAUDE_SANDBOX_ALLOW_UNSANDBOXED_COMMANDS;
|
||||
} else {
|
||||
process.env.TAKT_PROVIDER_OPTIONS_CLAUDE_SANDBOX_ALLOW_UNSANDBOXED_COMMANDS = original;
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
it('should load pipeline config from config.yaml', () => {
|
||||
const taktDir = join(testHomeDir, '.takt');
|
||||
mkdirSync(taktDir, { recursive: true });
|
||||
|
||||
@ -97,7 +97,6 @@ describe('GlobalConfig load/save with API keys', () => {
|
||||
it('should load config with API keys from YAML', () => {
|
||||
const yaml = [
|
||||
'language: en',
|
||||
'default_piece: default',
|
||||
'log_level: info',
|
||||
'provider: claude',
|
||||
'anthropic_api_key: sk-ant-from-yaml',
|
||||
@ -113,7 +112,6 @@ describe('GlobalConfig load/save with API keys', () => {
|
||||
it('should load config without API keys', () => {
|
||||
const yaml = [
|
||||
'language: en',
|
||||
'default_piece: default',
|
||||
'log_level: info',
|
||||
'provider: claude',
|
||||
].join('\n');
|
||||
@ -128,7 +126,6 @@ describe('GlobalConfig load/save with API keys', () => {
|
||||
// Write initial config
|
||||
const yaml = [
|
||||
'language: en',
|
||||
'default_piece: default',
|
||||
'log_level: info',
|
||||
'provider: claude',
|
||||
].join('\n');
|
||||
@ -147,7 +144,6 @@ describe('GlobalConfig load/save with API keys', () => {
|
||||
it('should not persist API keys when not set', () => {
|
||||
const yaml = [
|
||||
'language: en',
|
||||
'default_piece: default',
|
||||
'log_level: info',
|
||||
'provider: claude',
|
||||
].join('\n');
|
||||
@ -183,7 +179,6 @@ describe('resolveAnthropicApiKey', () => {
|
||||
process.env['TAKT_ANTHROPIC_API_KEY'] = 'sk-ant-from-env';
|
||||
const yaml = [
|
||||
'language: en',
|
||||
'default_piece: default',
|
||||
'log_level: info',
|
||||
'provider: claude',
|
||||
'anthropic_api_key: sk-ant-from-yaml',
|
||||
@ -198,7 +193,6 @@ describe('resolveAnthropicApiKey', () => {
|
||||
delete process.env['TAKT_ANTHROPIC_API_KEY'];
|
||||
const yaml = [
|
||||
'language: en',
|
||||
'default_piece: default',
|
||||
'log_level: info',
|
||||
'provider: claude',
|
||||
'anthropic_api_key: sk-ant-from-yaml',
|
||||
@ -213,7 +207,6 @@ describe('resolveAnthropicApiKey', () => {
|
||||
delete process.env['TAKT_ANTHROPIC_API_KEY'];
|
||||
const yaml = [
|
||||
'language: en',
|
||||
'default_piece: default',
|
||||
'log_level: info',
|
||||
'provider: claude',
|
||||
].join('\n');
|
||||
@ -254,7 +247,6 @@ describe('resolveOpenaiApiKey', () => {
|
||||
process.env['TAKT_OPENAI_API_KEY'] = 'sk-openai-from-env';
|
||||
const yaml = [
|
||||
'language: en',
|
||||
'default_piece: default',
|
||||
'log_level: info',
|
||||
'provider: claude',
|
||||
'openai_api_key: sk-openai-from-yaml',
|
||||
@ -269,7 +261,6 @@ describe('resolveOpenaiApiKey', () => {
|
||||
delete process.env['TAKT_OPENAI_API_KEY'];
|
||||
const yaml = [
|
||||
'language: en',
|
||||
'default_piece: default',
|
||||
'log_level: info',
|
||||
'provider: claude',
|
||||
'openai_api_key: sk-openai-from-yaml',
|
||||
@ -284,7 +275,6 @@ describe('resolveOpenaiApiKey', () => {
|
||||
delete process.env['TAKT_OPENAI_API_KEY'];
|
||||
const yaml = [
|
||||
'language: en',
|
||||
'default_piece: default',
|
||||
'log_level: info',
|
||||
'provider: claude',
|
||||
].join('\n');
|
||||
@ -318,7 +308,6 @@ describe('resolveCodexCliPath', () => {
|
||||
process.env['TAKT_CODEX_CLI_PATH'] = envCodexPath;
|
||||
const yaml = [
|
||||
'language: en',
|
||||
'default_piece: default',
|
||||
'log_level: info',
|
||||
'provider: codex',
|
||||
`codex_cli_path: ${configCodexPath}`,
|
||||
@ -334,7 +323,6 @@ describe('resolveCodexCliPath', () => {
|
||||
const configCodexPath = createExecutableFile('config-codex');
|
||||
const yaml = [
|
||||
'language: en',
|
||||
'default_piece: default',
|
||||
'log_level: info',
|
||||
'provider: codex',
|
||||
`codex_cli_path: ${configCodexPath}`,
|
||||
@ -349,7 +337,6 @@ describe('resolveCodexCliPath', () => {
|
||||
delete process.env['TAKT_CODEX_CLI_PATH'];
|
||||
const yaml = [
|
||||
'language: en',
|
||||
'default_piece: default',
|
||||
'log_level: info',
|
||||
'provider: codex',
|
||||
].join('\n');
|
||||
@ -395,7 +382,6 @@ describe('resolveCodexCliPath', () => {
|
||||
delete process.env['TAKT_CODEX_CLI_PATH'];
|
||||
const yaml = [
|
||||
'language: en',
|
||||
'default_piece: default',
|
||||
'log_level: info',
|
||||
'provider: codex',
|
||||
`codex_cli_path: ${join(testDir, 'missing-codex-from-config')}`,
|
||||
@ -427,7 +413,6 @@ describe('resolveOpencodeApiKey', () => {
|
||||
process.env['TAKT_OPENCODE_API_KEY'] = 'sk-opencode-from-env';
|
||||
const yaml = [
|
||||
'language: en',
|
||||
'default_piece: default',
|
||||
'log_level: info',
|
||||
'provider: claude',
|
||||
'opencode_api_key: sk-opencode-from-yaml',
|
||||
@ -442,7 +427,6 @@ describe('resolveOpencodeApiKey', () => {
|
||||
delete process.env['TAKT_OPENCODE_API_KEY'];
|
||||
const yaml = [
|
||||
'language: en',
|
||||
'default_piece: default',
|
||||
'log_level: info',
|
||||
'provider: claude',
|
||||
'opencode_api_key: sk-opencode-from-yaml',
|
||||
@ -457,7 +441,6 @@ describe('resolveOpencodeApiKey', () => {
|
||||
delete process.env['TAKT_OPENCODE_API_KEY'];
|
||||
const yaml = [
|
||||
'language: en',
|
||||
'default_piece: default',
|
||||
'log_level: info',
|
||||
'provider: claude',
|
||||
].join('\n');
|
||||
|
||||
203
src/__tests__/it-config-provider-options.test.ts
Normal file
203
src/__tests__/it-config-provider-options.test.ts
Normal file
@ -0,0 +1,203 @@
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
import { mkdirSync, rmSync, writeFileSync } from 'node:fs';
|
||||
import { join } from 'node:path';
|
||||
import { tmpdir } from 'node:os';
|
||||
import { randomUUID } from 'node:crypto';
|
||||
|
||||
vi.mock('../agents/runner.js', () => ({
|
||||
runAgent: vi.fn(),
|
||||
}));
|
||||
|
||||
vi.mock('../agents/ai-judge.js', async (importOriginal) => {
|
||||
const original = await importOriginal<typeof import('../agents/ai-judge.js')>();
|
||||
return {
|
||||
...original,
|
||||
callAiJudge: vi.fn().mockResolvedValue(-1),
|
||||
};
|
||||
});
|
||||
|
||||
vi.mock('../core/piece/phase-runner.js', () => ({
|
||||
needsStatusJudgmentPhase: vi.fn().mockReturnValue(false),
|
||||
runReportPhase: vi.fn().mockResolvedValue(undefined),
|
||||
runStatusJudgmentPhase: vi.fn().mockResolvedValue({ tag: '', ruleIndex: 0, method: 'auto_select' }),
|
||||
}));
|
||||
|
||||
vi.mock('../shared/utils/index.js', async (importOriginal) => ({
|
||||
...(await importOriginal<Record<string, unknown>>()),
|
||||
generateReportDir: vi.fn().mockReturnValue('test-report-dir'),
|
||||
}));
|
||||
|
||||
import { runAgent } from '../agents/runner.js';
|
||||
import { executeTask } from '../features/tasks/execute/taskExecution.js';
|
||||
import { invalidateGlobalConfigCache } from '../infra/config/index.js';
|
||||
|
||||
interface TestEnv {
|
||||
projectDir: string;
|
||||
globalDir: string;
|
||||
}
|
||||
|
||||
function createEnv(): TestEnv {
|
||||
const root = join(tmpdir(), `takt-it-config-${randomUUID()}`);
|
||||
const projectDir = join(root, 'project');
|
||||
const globalDir = join(root, 'global');
|
||||
|
||||
mkdirSync(projectDir, { recursive: true });
|
||||
mkdirSync(join(projectDir, '.takt', 'pieces', 'personas'), { recursive: true });
|
||||
mkdirSync(globalDir, { recursive: true });
|
||||
|
||||
writeFileSync(
|
||||
join(projectDir, '.takt', 'pieces', 'config-it.yaml'),
|
||||
[
|
||||
'name: config-it',
|
||||
'description: config provider options integration test',
|
||||
'max_movements: 3',
|
||||
'initial_movement: plan',
|
||||
'movements:',
|
||||
' - name: plan',
|
||||
' persona: ./personas/planner.md',
|
||||
' instruction: "{task}"',
|
||||
' rules:',
|
||||
' - condition: done',
|
||||
' next: COMPLETE',
|
||||
].join('\n'),
|
||||
'utf-8',
|
||||
);
|
||||
writeFileSync(join(projectDir, '.takt', 'pieces', 'personas', 'planner.md'), 'You are planner.', 'utf-8');
|
||||
|
||||
return { projectDir, globalDir };
|
||||
}
|
||||
|
||||
function setGlobalConfig(globalDir: string, body: string): void {
|
||||
writeFileSync(join(globalDir, 'config.yaml'), body, 'utf-8');
|
||||
}
|
||||
|
||||
function setProjectConfig(projectDir: string, body: string): void {
|
||||
writeFileSync(join(projectDir, '.takt', 'config.yaml'), body, 'utf-8');
|
||||
}
|
||||
|
||||
function makeDoneResponse() {
|
||||
return {
|
||||
persona: 'planner',
|
||||
status: 'done',
|
||||
content: '[PLAN:1]\ndone',
|
||||
timestamp: new Date(),
|
||||
sessionId: 'session-it',
|
||||
};
|
||||
}
|
||||
|
||||
describe('IT: config provider_options reflection', () => {
|
||||
let env: TestEnv;
|
||||
let originalConfigDir: string | undefined;
|
||||
let originalEnvCodex: string | undefined;
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
env = createEnv();
|
||||
originalConfigDir = process.env.TAKT_CONFIG_DIR;
|
||||
originalEnvCodex = process.env.TAKT_PROVIDER_OPTIONS_CODEX_NETWORK_ACCESS;
|
||||
|
||||
process.env.TAKT_CONFIG_DIR = env.globalDir;
|
||||
delete process.env.TAKT_PROVIDER_OPTIONS_CODEX_NETWORK_ACCESS;
|
||||
invalidateGlobalConfigCache();
|
||||
|
||||
vi.mocked(runAgent).mockResolvedValue(makeDoneResponse());
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
if (originalConfigDir === undefined) {
|
||||
delete process.env.TAKT_CONFIG_DIR;
|
||||
} else {
|
||||
process.env.TAKT_CONFIG_DIR = originalConfigDir;
|
||||
}
|
||||
if (originalEnvCodex === undefined) {
|
||||
delete process.env.TAKT_PROVIDER_OPTIONS_CODEX_NETWORK_ACCESS;
|
||||
} else {
|
||||
process.env.TAKT_PROVIDER_OPTIONS_CODEX_NETWORK_ACCESS = originalEnvCodex;
|
||||
}
|
||||
invalidateGlobalConfigCache();
|
||||
rmSync(join(env.projectDir, '..'), { recursive: true, force: true });
|
||||
});
|
||||
|
||||
it('global provider_options should be passed to runAgent', async () => {
|
||||
setGlobalConfig(
|
||||
env.globalDir,
|
||||
[
|
||||
'provider_options:',
|
||||
' codex:',
|
||||
' network_access: true',
|
||||
].join('\n'),
|
||||
);
|
||||
|
||||
const ok = await executeTask({
|
||||
task: 'test task',
|
||||
cwd: env.projectDir,
|
||||
projectCwd: env.projectDir,
|
||||
pieceIdentifier: 'config-it',
|
||||
});
|
||||
|
||||
expect(ok).toBe(true);
|
||||
const options = vi.mocked(runAgent).mock.calls[0]?.[2];
|
||||
expect(options?.providerOptions).toEqual({
|
||||
codex: { networkAccess: true },
|
||||
});
|
||||
});
|
||||
|
||||
it('project provider_options should override global provider_options', async () => {
|
||||
setGlobalConfig(
|
||||
env.globalDir,
|
||||
[
|
||||
'provider_options:',
|
||||
' codex:',
|
||||
' network_access: true',
|
||||
].join('\n'),
|
||||
);
|
||||
setProjectConfig(
|
||||
env.projectDir,
|
||||
[
|
||||
'provider_options:',
|
||||
' codex:',
|
||||
' network_access: false',
|
||||
].join('\n'),
|
||||
);
|
||||
|
||||
const ok = await executeTask({
|
||||
task: 'test task',
|
||||
cwd: env.projectDir,
|
||||
projectCwd: env.projectDir,
|
||||
pieceIdentifier: 'config-it',
|
||||
});
|
||||
|
||||
expect(ok).toBe(true);
|
||||
const options = vi.mocked(runAgent).mock.calls[0]?.[2];
|
||||
expect(options?.providerOptions).toEqual({
|
||||
codex: { networkAccess: false },
|
||||
});
|
||||
});
|
||||
|
||||
it('env provider_options should override yaml provider_options', async () => {
|
||||
setGlobalConfig(
|
||||
env.globalDir,
|
||||
[
|
||||
'provider_options:',
|
||||
' codex:',
|
||||
' network_access: true',
|
||||
].join('\n'),
|
||||
);
|
||||
process.env.TAKT_PROVIDER_OPTIONS_CODEX_NETWORK_ACCESS = 'false';
|
||||
invalidateGlobalConfigCache();
|
||||
|
||||
const ok = await executeTask({
|
||||
task: 'test task',
|
||||
cwd: env.projectDir,
|
||||
projectCwd: env.projectDir,
|
||||
pieceIdentifier: 'config-it',
|
||||
});
|
||||
|
||||
expect(ok).toBe(true);
|
||||
const options = vi.mocked(runAgent).mock.calls[0]?.[2];
|
||||
expect(options?.providerOptions).toEqual({
|
||||
codex: { networkAccess: false },
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@ -118,6 +118,19 @@ vi.mock('../infra/config/index.js', () => ({
|
||||
loadWorktreeSessions: vi.fn().mockReturnValue({}),
|
||||
updateWorktreeSession: vi.fn(),
|
||||
loadGlobalConfig: mockLoadGlobalConfig,
|
||||
loadConfig: vi.fn().mockImplementation(() => ({
|
||||
global: mockLoadGlobalConfig(),
|
||||
project: {},
|
||||
})),
|
||||
resolvePieceConfigValues: (_projectDir: string, keys: readonly string[]) => {
|
||||
const global = mockLoadGlobalConfig() as Record<string, unknown>;
|
||||
const config = { ...global, piece: 'default', provider: global.provider ?? 'claude', verbose: false };
|
||||
const result: Record<string, unknown> = {};
|
||||
for (const key of keys) {
|
||||
result[key] = config[key];
|
||||
}
|
||||
return result;
|
||||
},
|
||||
saveSessionState: vi.fn(),
|
||||
ensureDir: vi.fn(),
|
||||
writeFileAtomic: vi.fn(),
|
||||
|
||||
@ -4,7 +4,7 @@
|
||||
* Tests the 3-tier piece resolution (project-local → user → builtin)
|
||||
* and YAML parsing including special rule syntax (ai(), all(), any()).
|
||||
*
|
||||
* Mocked: globalConfig (for language/builtins)
|
||||
* Mocked: loadConfig (for language/builtins)
|
||||
* Not mocked: loadPiece, parsePiece, rule parsing
|
||||
*/
|
||||
|
||||
@ -18,9 +18,24 @@ const languageState = vi.hoisted(() => ({ value: 'en' as 'en' | 'ja' }));
|
||||
|
||||
vi.mock('../infra/config/global/globalConfig.js', () => ({
|
||||
loadGlobalConfig: vi.fn().mockReturnValue({}),
|
||||
getLanguage: vi.fn(() => languageState.value),
|
||||
getDisabledBuiltins: vi.fn().mockReturnValue([]),
|
||||
getBuiltinPiecesEnabled: vi.fn().mockReturnValue(true),
|
||||
}));
|
||||
|
||||
vi.mock('../infra/config/resolveConfigValue.js', () => ({
|
||||
resolveConfigValue: vi.fn((_cwd: string, key: string) => {
|
||||
if (key === 'language') return languageState.value;
|
||||
if (key === 'enableBuiltinPieces') return true;
|
||||
if (key === 'disabledBuiltins') return [];
|
||||
return undefined;
|
||||
}),
|
||||
resolveConfigValues: vi.fn((_cwd: string, keys: readonly string[]) => {
|
||||
const result: Record<string, unknown> = {};
|
||||
for (const key of keys) {
|
||||
if (key === 'language') result[key] = languageState.value;
|
||||
if (key === 'enableBuiltinPieces') result[key] = true;
|
||||
if (key === 'disabledBuiltins') result[key] = [];
|
||||
}
|
||||
return result;
|
||||
}),
|
||||
}));
|
||||
|
||||
// --- Imports (after mocks) ---
|
||||
@ -38,6 +53,7 @@ function createTestDir(): string {
|
||||
|
||||
describe('Piece Loader IT: builtin piece loading', () => {
|
||||
let testDir: string;
|
||||
const builtinNames = listBuiltinPieceNames(process.cwd(), { includeDisabled: true });
|
||||
|
||||
beforeEach(() => {
|
||||
testDir = createTestDir();
|
||||
@ -48,8 +64,6 @@ describe('Piece Loader IT: builtin piece loading', () => {
|
||||
rmSync(testDir, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
const builtinNames = listBuiltinPieceNames({ includeDisabled: true });
|
||||
|
||||
for (const name of builtinNames) {
|
||||
it(`should load builtin piece: ${name}`, () => {
|
||||
const config = loadPiece(name, testDir);
|
||||
@ -85,7 +99,7 @@ describe('Piece Loader IT: builtin piece loading', () => {
|
||||
it('should load e2e-test as a builtin piece in ja locale', () => {
|
||||
languageState.value = 'ja';
|
||||
|
||||
const jaBuiltinNames = listBuiltinPieceNames({ includeDisabled: true });
|
||||
const jaBuiltinNames = listBuiltinPieceNames(testDir, { includeDisabled: true });
|
||||
expect(jaBuiltinNames).toContain('e2e-test');
|
||||
|
||||
const config = loadPiece('e2e-test', testDir);
|
||||
|
||||
@ -57,6 +57,24 @@ vi.mock('../infra/config/project/projectConfig.js', () => ({
|
||||
loadProjectConfig: vi.fn().mockReturnValue({}),
|
||||
}));
|
||||
|
||||
vi.mock('../infra/config/resolveConfigValue.js', () => ({
|
||||
resolveConfigValue: vi.fn((_cwd: string, key: string) => {
|
||||
if (key === 'language') return 'en';
|
||||
if (key === 'enableBuiltinPieces') return true;
|
||||
if (key === 'disabledBuiltins') return [];
|
||||
return undefined;
|
||||
}),
|
||||
resolveConfigValues: vi.fn((_cwd: string, keys: readonly string[]) => {
|
||||
const result: Record<string, unknown> = {};
|
||||
for (const key of keys) {
|
||||
if (key === 'language') result[key] = 'en';
|
||||
if (key === 'enableBuiltinPieces') result[key] = true;
|
||||
if (key === 'disabledBuiltins') result[key] = [];
|
||||
}
|
||||
return result;
|
||||
}),
|
||||
}));
|
||||
|
||||
// --- Imports (after mocks) ---
|
||||
|
||||
import { PieceEngine } from '../core/piece/index.js';
|
||||
|
||||
@ -109,7 +109,6 @@ vi.mock('../infra/config/paths.js', async (importOriginal) => {
|
||||
updatePersonaSession: vi.fn(),
|
||||
loadWorktreeSessions: vi.fn().mockReturnValue({}),
|
||||
updateWorktreeSession: vi.fn(),
|
||||
getCurrentPiece: vi.fn().mockReturnValue('default'),
|
||||
getProjectConfigDir: vi.fn().mockImplementation((cwd: string) => join(cwd, '.takt')),
|
||||
};
|
||||
});
|
||||
@ -118,7 +117,11 @@ vi.mock('../infra/config/global/globalConfig.js', async (importOriginal) => {
|
||||
const original = await importOriginal<typeof import('../infra/config/global/globalConfig.js')>();
|
||||
return {
|
||||
...original,
|
||||
loadGlobalConfig: vi.fn().mockReturnValue({}),
|
||||
loadGlobalConfig: vi.fn().mockReturnValue({
|
||||
language: 'en',
|
||||
enableBuiltinPieces: true,
|
||||
disabledBuiltins: [],
|
||||
}),
|
||||
getLanguage: vi.fn().mockReturnValue('en'),
|
||||
getDisabledBuiltins: vi.fn().mockReturnValue([]),
|
||||
};
|
||||
|
||||
@ -91,7 +91,6 @@ vi.mock('../infra/config/paths.js', async (importOriginal) => {
|
||||
updatePersonaSession: vi.fn(),
|
||||
loadWorktreeSessions: vi.fn().mockReturnValue({}),
|
||||
updateWorktreeSession: vi.fn(),
|
||||
getCurrentPiece: vi.fn().mockReturnValue('default'),
|
||||
getProjectConfigDir: vi.fn().mockImplementation((cwd: string) => join(cwd, '.takt')),
|
||||
};
|
||||
});
|
||||
@ -100,7 +99,11 @@ vi.mock('../infra/config/global/globalConfig.js', async (importOriginal) => {
|
||||
const original = await importOriginal<typeof import('../infra/config/global/globalConfig.js')>();
|
||||
return {
|
||||
...original,
|
||||
loadGlobalConfig: vi.fn().mockReturnValue({}),
|
||||
loadGlobalConfig: vi.fn().mockReturnValue({
|
||||
language: 'en',
|
||||
enableBuiltinPieces: true,
|
||||
disabledBuiltins: [],
|
||||
}),
|
||||
getLanguage: vi.fn().mockReturnValue('en'),
|
||||
};
|
||||
});
|
||||
|
||||
170
src/__tests__/it-run-config-provider-options.test.ts
Normal file
170
src/__tests__/it-run-config-provider-options.test.ts
Normal file
@ -0,0 +1,170 @@
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
import { mkdirSync, rmSync, writeFileSync } from 'node:fs';
|
||||
import { join } from 'node:path';
|
||||
import { tmpdir } from 'node:os';
|
||||
import { randomUUID } from 'node:crypto';
|
||||
|
||||
vi.mock('../agents/runner.js', () => ({
|
||||
runAgent: vi.fn(),
|
||||
}));
|
||||
|
||||
vi.mock('../agents/ai-judge.js', async (importOriginal) => {
|
||||
const original = await importOriginal<typeof import('../agents/ai-judge.js')>();
|
||||
return {
|
||||
...original,
|
||||
callAiJudge: vi.fn().mockResolvedValue(-1),
|
||||
};
|
||||
});
|
||||
|
||||
vi.mock('../core/piece/phase-runner.js', () => ({
|
||||
needsStatusJudgmentPhase: vi.fn().mockReturnValue(false),
|
||||
runReportPhase: vi.fn().mockResolvedValue(undefined),
|
||||
runStatusJudgmentPhase: vi.fn().mockResolvedValue({ tag: '', ruleIndex: 0, method: 'auto_select' }),
|
||||
}));
|
||||
|
||||
vi.mock('../shared/utils/index.js', async (importOriginal) => ({
|
||||
...(await importOriginal<Record<string, unknown>>()),
|
||||
generateReportDir: vi.fn().mockReturnValue('test-report-dir'),
|
||||
notifySuccess: vi.fn(),
|
||||
notifyError: vi.fn(),
|
||||
sendSlackNotification: vi.fn(),
|
||||
getSlackWebhookUrl: vi.fn(() => undefined),
|
||||
}));
|
||||
|
||||
import { runAllTasks } from '../features/tasks/index.js';
|
||||
import { TaskRunner } from '../infra/task/index.js';
|
||||
import { runAgent } from '../agents/runner.js';
|
||||
import { invalidateGlobalConfigCache } from '../infra/config/index.js';
|
||||
|
||||
interface TestEnv {
|
||||
root: string;
|
||||
projectDir: string;
|
||||
globalDir: string;
|
||||
}
|
||||
|
||||
function createEnv(): TestEnv {
|
||||
const root = join(tmpdir(), `takt-it-run-config-${randomUUID()}`);
|
||||
const projectDir = join(root, 'project');
|
||||
const globalDir = join(root, 'global');
|
||||
|
||||
mkdirSync(join(projectDir, '.takt', 'pieces', 'personas'), { recursive: true });
|
||||
mkdirSync(globalDir, { recursive: true });
|
||||
|
||||
writeFileSync(
|
||||
join(projectDir, '.takt', 'pieces', 'run-config-it.yaml'),
|
||||
[
|
||||
'name: run-config-it',
|
||||
'description: run config provider options integration test',
|
||||
'max_movements: 3',
|
||||
'initial_movement: plan',
|
||||
'movements:',
|
||||
' - name: plan',
|
||||
' persona: ./personas/planner.md',
|
||||
' instruction: "{task}"',
|
||||
' rules:',
|
||||
' - condition: done',
|
||||
' next: COMPLETE',
|
||||
].join('\n'),
|
||||
'utf-8',
|
||||
);
|
||||
writeFileSync(join(projectDir, '.takt', 'pieces', 'personas', 'planner.md'), 'You are planner.', 'utf-8');
|
||||
|
||||
return { root, projectDir, globalDir };
|
||||
}
|
||||
|
||||
function setGlobalConfig(globalDir: string, body: string): void {
|
||||
writeFileSync(join(globalDir, 'config.yaml'), body, 'utf-8');
|
||||
}
|
||||
|
||||
function setProjectConfig(projectDir: string, body: string): void {
|
||||
writeFileSync(join(projectDir, '.takt', 'config.yaml'), body, 'utf-8');
|
||||
}
|
||||
|
||||
function mockDoneResponse() {
|
||||
return {
|
||||
persona: 'planner',
|
||||
status: 'done',
|
||||
content: '[PLAN:1]\ndone',
|
||||
timestamp: new Date(),
|
||||
sessionId: 'session-it',
|
||||
};
|
||||
}
|
||||
|
||||
describe('IT: runAllTasks provider_options reflection', () => {
|
||||
let env: TestEnv;
|
||||
let originalConfigDir: string | undefined;
|
||||
let originalEnvCodex: string | undefined;
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
env = createEnv();
|
||||
originalConfigDir = process.env.TAKT_CONFIG_DIR;
|
||||
originalEnvCodex = process.env.TAKT_PROVIDER_OPTIONS_CODEX_NETWORK_ACCESS;
|
||||
process.env.TAKT_CONFIG_DIR = env.globalDir;
|
||||
delete process.env.TAKT_PROVIDER_OPTIONS_CODEX_NETWORK_ACCESS;
|
||||
invalidateGlobalConfigCache();
|
||||
|
||||
vi.mocked(runAgent).mockResolvedValue(mockDoneResponse());
|
||||
|
||||
const runner = new TaskRunner(env.projectDir);
|
||||
runner.addTask('test task');
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
if (originalConfigDir === undefined) {
|
||||
delete process.env.TAKT_CONFIG_DIR;
|
||||
} else {
|
||||
process.env.TAKT_CONFIG_DIR = originalConfigDir;
|
||||
}
|
||||
if (originalEnvCodex === undefined) {
|
||||
delete process.env.TAKT_PROVIDER_OPTIONS_CODEX_NETWORK_ACCESS;
|
||||
} else {
|
||||
process.env.TAKT_PROVIDER_OPTIONS_CODEX_NETWORK_ACCESS = originalEnvCodex;
|
||||
}
|
||||
invalidateGlobalConfigCache();
|
||||
rmSync(env.root, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
it('project provider_options should override global in runAllTasks flow', async () => {
|
||||
setGlobalConfig(env.globalDir, [
|
||||
'provider_options:',
|
||||
' codex:',
|
||||
' network_access: true',
|
||||
].join('\n'));
|
||||
setProjectConfig(env.projectDir, [
|
||||
'provider_options:',
|
||||
' codex:',
|
||||
' network_access: false',
|
||||
].join('\n'));
|
||||
|
||||
await runAllTasks(env.projectDir, 'run-config-it');
|
||||
|
||||
const options = vi.mocked(runAgent).mock.calls[0]?.[2];
|
||||
expect(options?.providerOptions).toEqual({
|
||||
codex: { networkAccess: false },
|
||||
});
|
||||
});
|
||||
|
||||
it('env provider_options should override yaml in runAllTasks flow', async () => {
|
||||
setGlobalConfig(env.globalDir, [
|
||||
'provider_options:',
|
||||
' codex:',
|
||||
' network_access: false',
|
||||
].join('\n'));
|
||||
setProjectConfig(env.projectDir, [
|
||||
'provider_options:',
|
||||
' codex:',
|
||||
' network_access: false',
|
||||
].join('\n'));
|
||||
process.env.TAKT_PROVIDER_OPTIONS_CODEX_NETWORK_ACCESS = 'true';
|
||||
invalidateGlobalConfigCache();
|
||||
|
||||
await runAllTasks(env.projectDir, 'run-config-it');
|
||||
|
||||
const options = vi.mocked(runAgent).mock.calls[0]?.[2];
|
||||
expect(options?.providerOptions).toEqual({
|
||||
codex: { networkAccess: true },
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@ -89,6 +89,18 @@ vi.mock('../infra/config/index.js', () => ({
|
||||
loadWorktreeSessions: vi.fn().mockReturnValue({}),
|
||||
updateWorktreeSession: vi.fn(),
|
||||
loadGlobalConfig: vi.fn().mockReturnValue({ provider: 'claude' }),
|
||||
loadConfig: vi.fn().mockReturnValue({
|
||||
global: { provider: 'claude' },
|
||||
project: {},
|
||||
}),
|
||||
resolvePieceConfigValues: (_projectDir: string, keys: readonly string[]) => {
|
||||
const config: Record<string, unknown> = { provider: 'claude', piece: 'default', verbose: false };
|
||||
const result: Record<string, unknown> = {};
|
||||
for (const key of keys) {
|
||||
result[key] = config[key];
|
||||
}
|
||||
return result;
|
||||
},
|
||||
saveSessionState: vi.fn(),
|
||||
ensureDir: vi.fn(),
|
||||
writeFileAtomic: vi.fn(),
|
||||
|
||||
@ -495,7 +495,6 @@ describe('GlobalConfigSchema', () => {
|
||||
const config = {};
|
||||
const result = GlobalConfigSchema.parse(config);
|
||||
|
||||
expect(result.default_piece).toBe('default');
|
||||
expect(result.log_level).toBe('info');
|
||||
expect(result.provider).toBe('claude');
|
||||
expect(result.observability).toBeUndefined();
|
||||
@ -503,7 +502,6 @@ describe('GlobalConfigSchema', () => {
|
||||
|
||||
it('should accept valid config', () => {
|
||||
const config = {
|
||||
default_piece: 'custom',
|
||||
log_level: 'debug' as const,
|
||||
observability: {
|
||||
provider_events: false,
|
||||
|
||||
@ -1,11 +1,11 @@
|
||||
/**
|
||||
* Unit tests for task naming utilities
|
||||
*
|
||||
* Tests nowIso, firstLine, and sanitizeTaskName functions.
|
||||
* Tests nowIso and firstLine functions.
|
||||
*/
|
||||
|
||||
import { describe, it, expect, vi, afterEach } from 'vitest';
|
||||
import { nowIso, firstLine, sanitizeTaskName } from '../infra/task/naming.js';
|
||||
import { nowIso, firstLine } from '../infra/task/naming.js';
|
||||
|
||||
describe('nowIso', () => {
|
||||
afterEach(() => {
|
||||
@ -54,34 +54,3 @@ describe('firstLine', () => {
|
||||
expect(firstLine(' \n ')).toBe('');
|
||||
});
|
||||
});
|
||||
|
||||
describe('sanitizeTaskName', () => {
|
||||
it('should lowercase the input', () => {
|
||||
expect(sanitizeTaskName('Hello World')).toBe('hello-world');
|
||||
});
|
||||
|
||||
it('should replace special characters with spaces then hyphens', () => {
|
||||
expect(sanitizeTaskName('task@name#123')).toBe('task-name-123');
|
||||
});
|
||||
|
||||
it('should collapse multiple hyphens', () => {
|
||||
expect(sanitizeTaskName('a---b')).toBe('a-b');
|
||||
});
|
||||
|
||||
it('should trim leading/trailing whitespace', () => {
|
||||
expect(sanitizeTaskName(' hello ')).toBe('hello');
|
||||
});
|
||||
|
||||
it('should handle typical task names', () => {
|
||||
expect(sanitizeTaskName('Fix: login bug (#42)')).toBe('fix-login-bug-42');
|
||||
});
|
||||
|
||||
it('should generate fallback name for empty result', () => {
|
||||
const result = sanitizeTaskName('!@#$%');
|
||||
expect(result).toMatch(/^task-\d+$/);
|
||||
});
|
||||
|
||||
it('should preserve numbers and lowercase letters', () => {
|
||||
expect(sanitizeTaskName('abc123def')).toBe('abc123def');
|
||||
});
|
||||
});
|
||||
|
||||
@ -2,8 +2,7 @@ import { beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
const {
|
||||
getProviderMock,
|
||||
loadProjectConfigMock,
|
||||
loadGlobalConfigMock,
|
||||
loadConfigMock,
|
||||
loadCustomAgentsMock,
|
||||
loadAgentPromptMock,
|
||||
loadTemplateMock,
|
||||
@ -15,8 +14,7 @@ const {
|
||||
|
||||
return {
|
||||
getProviderMock: vi.fn(() => ({ setup: providerSetup })),
|
||||
loadProjectConfigMock: vi.fn(),
|
||||
loadGlobalConfigMock: vi.fn(),
|
||||
loadConfigMock: vi.fn(),
|
||||
loadCustomAgentsMock: vi.fn(),
|
||||
loadAgentPromptMock: vi.fn(),
|
||||
loadTemplateMock: vi.fn(),
|
||||
@ -30,10 +28,21 @@ vi.mock('../infra/providers/index.js', () => ({
|
||||
}));
|
||||
|
||||
vi.mock('../infra/config/index.js', () => ({
|
||||
loadProjectConfig: loadProjectConfigMock,
|
||||
loadGlobalConfig: loadGlobalConfigMock,
|
||||
loadConfig: loadConfigMock,
|
||||
loadCustomAgents: loadCustomAgentsMock,
|
||||
loadAgentPrompt: loadAgentPromptMock,
|
||||
resolveConfigValues: (_projectDir: string, keys: readonly string[]) => {
|
||||
const loaded = loadConfigMock() as Record<string, unknown>;
|
||||
const global = (loaded.global ?? {}) as Record<string, unknown>;
|
||||
const project = (loaded.project ?? {}) as Record<string, unknown>;
|
||||
const provider = (project.provider ?? global.provider ?? 'claude') as string;
|
||||
const config: Record<string, unknown> = { ...global, ...project, provider, piece: project.piece ?? 'default', verbose: false };
|
||||
const result: Record<string, unknown> = {};
|
||||
for (const key of keys) {
|
||||
result[key] = config[key];
|
||||
}
|
||||
return result;
|
||||
},
|
||||
}));
|
||||
|
||||
vi.mock('../shared/prompts/index.js', () => ({
|
||||
@ -47,17 +56,18 @@ describe('option resolution order', () => {
|
||||
vi.clearAllMocks();
|
||||
|
||||
providerCallMock.mockResolvedValue({ content: 'ok' });
|
||||
loadProjectConfigMock.mockReturnValue({});
|
||||
loadGlobalConfigMock.mockReturnValue({});
|
||||
loadConfigMock.mockReturnValue({ global: {}, project: {} });
|
||||
loadCustomAgentsMock.mockReturnValue(new Map());
|
||||
loadAgentPromptMock.mockReturnValue('prompt');
|
||||
loadTemplateMock.mockReturnValue('template');
|
||||
});
|
||||
|
||||
it('should resolve provider in order: CLI > Local > Piece(step) > Global', async () => {
|
||||
it('should resolve provider in order: CLI > Config(project??global) > stepProvider > default', async () => {
|
||||
// Given
|
||||
loadProjectConfigMock.mockReturnValue({ provider: 'opencode' });
|
||||
loadGlobalConfigMock.mockReturnValue({ provider: 'mock' });
|
||||
loadConfigMock.mockReturnValue({
|
||||
project: { provider: 'opencode' },
|
||||
global: { provider: 'mock' },
|
||||
});
|
||||
|
||||
// When: CLI provider が指定される
|
||||
await runAgent(undefined, 'task', {
|
||||
@ -69,7 +79,7 @@ describe('option resolution order', () => {
|
||||
// Then
|
||||
expect(getProviderMock).toHaveBeenLastCalledWith('codex');
|
||||
|
||||
// When: CLI 指定なし(Local が有効)
|
||||
// When: CLI 指定なし(project provider が有効: resolveConfigValues は project.provider ?? global.provider を返す)
|
||||
await runAgent(undefined, 'task', {
|
||||
cwd: '/repo',
|
||||
stepProvider: 'claude',
|
||||
@ -78,17 +88,20 @@ describe('option resolution order', () => {
|
||||
// Then
|
||||
expect(getProviderMock).toHaveBeenLastCalledWith('opencode');
|
||||
|
||||
// When: Local なし(Piece が有効)
|
||||
loadProjectConfigMock.mockReturnValue({});
|
||||
// When: project なし → resolveConfigValues は global.provider を返す(フラットマージ)
|
||||
loadConfigMock.mockReturnValue({
|
||||
project: {},
|
||||
global: { provider: 'mock' },
|
||||
});
|
||||
await runAgent(undefined, 'task', {
|
||||
cwd: '/repo',
|
||||
stepProvider: 'claude',
|
||||
});
|
||||
|
||||
// Then
|
||||
expect(getProviderMock).toHaveBeenLastCalledWith('claude');
|
||||
// Then: resolveConfigValues returns 'mock' (global fallback), so stepProvider is not reached
|
||||
expect(getProviderMock).toHaveBeenLastCalledWith('mock');
|
||||
|
||||
// When: Piece なし(Global が有効)
|
||||
// When: stepProvider もなし → 同様に global.provider
|
||||
await runAgent(undefined, 'task', { cwd: '/repo' });
|
||||
|
||||
// Then
|
||||
@ -97,8 +110,10 @@ describe('option resolution order', () => {
|
||||
|
||||
it('should resolve model in order: CLI > Piece(step) > Global(matching provider)', async () => {
|
||||
// Given
|
||||
loadProjectConfigMock.mockReturnValue({ provider: 'claude' });
|
||||
loadGlobalConfigMock.mockReturnValue({ provider: 'claude', model: 'global-model' });
|
||||
loadConfigMock.mockReturnValue({
|
||||
project: { provider: 'claude' },
|
||||
global: { provider: 'claude', model: 'global-model' },
|
||||
});
|
||||
|
||||
// When: CLI model あり
|
||||
await runAgent(undefined, 'task', {
|
||||
@ -135,13 +150,16 @@ describe('option resolution order', () => {
|
||||
);
|
||||
});
|
||||
|
||||
it('should ignore global model when global provider does not match resolved provider', async () => {
|
||||
// Given
|
||||
loadProjectConfigMock.mockReturnValue({ provider: 'codex' });
|
||||
loadGlobalConfigMock.mockReturnValue({ provider: 'claude', model: 'global-model' });
|
||||
it('should ignore global model when resolved provider does not match config provider', async () => {
|
||||
// Given: CLI provider overrides config provider, causing mismatch with config.model
|
||||
loadConfigMock.mockReturnValue({
|
||||
project: {},
|
||||
global: { provider: 'claude', model: 'global-model' },
|
||||
});
|
||||
|
||||
// When
|
||||
await runAgent(undefined, 'task', { cwd: '/repo' });
|
||||
// When: CLI provider='codex' overrides config provider='claude'
|
||||
// resolveModel compares config.provider ('claude') with resolvedProvider ('codex') → mismatch → model ignored
|
||||
await runAgent(undefined, 'task', { cwd: '/repo', provider: 'codex' });
|
||||
|
||||
// Then
|
||||
expect(providerCallMock).toHaveBeenLastCalledWith(
|
||||
@ -160,16 +178,15 @@ describe('option resolution order', () => {
|
||||
},
|
||||
};
|
||||
|
||||
loadProjectConfigMock.mockReturnValue({
|
||||
provider: 'claude',
|
||||
provider_options: {
|
||||
claude: { sandbox: { allow_unsandboxed_commands: true } },
|
||||
loadConfigMock.mockReturnValue({
|
||||
project: {
|
||||
provider: 'claude',
|
||||
},
|
||||
});
|
||||
loadGlobalConfigMock.mockReturnValue({
|
||||
provider: 'claude',
|
||||
providerOptions: {
|
||||
claude: { sandbox: { allowUnsandboxedCommands: true } },
|
||||
global: {
|
||||
provider: 'claude',
|
||||
providerOptions: {
|
||||
claude: { sandbox: { allowUnsandboxedCommands: true } },
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
@ -187,8 +204,11 @@ describe('option resolution order', () => {
|
||||
);
|
||||
});
|
||||
|
||||
it('should use custom agent provider/model when higher-priority values are absent', async () => {
|
||||
// Given
|
||||
it('should use custom agent model and prompt when higher-priority values are absent', async () => {
|
||||
// Given: custom agent with provider/model, but no CLI/config override
|
||||
// Note: resolveConfigValues returns provider='claude' by default (loadConfig merges project ?? global ?? 'claude'),
|
||||
// so agentConfig.provider is not reached in resolveProvider (config.provider is always truthy).
|
||||
// However, custom agent model IS used because resolveModel checks agentConfig.model before config.
|
||||
const customAgents = new Map([
|
||||
['custom', { name: 'custom', prompt: 'agent prompt', provider: 'opencode', model: 'agent-model' }],
|
||||
]);
|
||||
@ -197,12 +217,14 @@ describe('option resolution order', () => {
|
||||
// When
|
||||
await runAgent('custom', 'task', { cwd: '/repo' });
|
||||
|
||||
// Then
|
||||
expect(getProviderMock).toHaveBeenLastCalledWith('opencode');
|
||||
// Then: provider falls back to config default ('claude'), not agentConfig.provider
|
||||
expect(getProviderMock).toHaveBeenLastCalledWith('claude');
|
||||
// Agent model is used (resolved before config.model in resolveModel)
|
||||
expect(providerCallMock).toHaveBeenLastCalledWith(
|
||||
'task',
|
||||
expect.objectContaining({ model: 'agent-model' }),
|
||||
);
|
||||
// Agent prompt is still used
|
||||
expect(providerSetupMock).toHaveBeenLastCalledWith(
|
||||
expect.objectContaining({ systemPrompt: 'prompt' }),
|
||||
);
|
||||
|
||||
@ -16,8 +16,8 @@ function createMovement(overrides: Partial<PieceMovement> = {}): PieceMovement {
|
||||
function createBuilder(step: PieceMovement, engineOverrides: Partial<PieceEngineOptions> = {}): OptionsBuilder {
|
||||
const engineOptions: PieceEngineOptions = {
|
||||
projectCwd: '/project',
|
||||
globalProvider: 'codex',
|
||||
globalProviderProfiles: {
|
||||
provider: 'codex',
|
||||
providerProfiles: {
|
||||
codex: {
|
||||
defaultPermissionMode: 'full',
|
||||
},
|
||||
@ -60,15 +60,57 @@ describe('OptionsBuilder.buildBaseOptions', () => {
|
||||
it('uses default profile when provider_profiles are not provided', () => {
|
||||
const step = createMovement();
|
||||
const builder = createBuilder(step, {
|
||||
globalProvider: undefined,
|
||||
globalProviderProfiles: undefined,
|
||||
projectProvider: undefined,
|
||||
provider: undefined,
|
||||
providerProfiles: undefined,
|
||||
});
|
||||
|
||||
const options = builder.buildBaseOptions(step);
|
||||
expect(options.permissionMode).toBe('edit');
|
||||
});
|
||||
|
||||
it('merges provider options with precedence: global < project < movement', () => {
|
||||
const step = createMovement({
|
||||
providerOptions: {
|
||||
codex: { networkAccess: false },
|
||||
claude: { sandbox: { excludedCommands: ['./gradlew'] } },
|
||||
},
|
||||
});
|
||||
const builder = createBuilder(step, {
|
||||
providerOptions: {
|
||||
codex: { networkAccess: true },
|
||||
claude: { sandbox: { allowUnsandboxedCommands: true } },
|
||||
opencode: { networkAccess: true },
|
||||
},
|
||||
});
|
||||
|
||||
const options = builder.buildBaseOptions(step);
|
||||
|
||||
expect(options.providerOptions).toEqual({
|
||||
codex: { networkAccess: false },
|
||||
opencode: { networkAccess: true },
|
||||
claude: {
|
||||
sandbox: {
|
||||
allowUnsandboxedCommands: true,
|
||||
excludedCommands: ['./gradlew'],
|
||||
},
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('falls back to global/project provider options when movement has none', () => {
|
||||
const step = createMovement();
|
||||
const builder = createBuilder(step, {
|
||||
providerOptions: {
|
||||
codex: { networkAccess: false },
|
||||
},
|
||||
});
|
||||
|
||||
const options = builder.buildBaseOptions(step);
|
||||
|
||||
expect(options.providerOptions).toEqual({
|
||||
codex: { networkAccess: false },
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('OptionsBuilder.buildResumeOptions', () => {
|
||||
|
||||
@ -17,6 +17,24 @@ vi.mock('../infra/config/global/globalConfig.js', async (importOriginal) => {
|
||||
};
|
||||
});
|
||||
|
||||
vi.mock('../infra/config/resolveConfigValue.js', () => ({
|
||||
resolveConfigValue: (_cwd: string, key: string) => {
|
||||
if (key === 'language') return 'en';
|
||||
if (key === 'enableBuiltinPieces') return false;
|
||||
if (key === 'disabledBuiltins') return [];
|
||||
return undefined;
|
||||
},
|
||||
resolveConfigValues: (_cwd: string, keys: readonly string[]) => {
|
||||
const result: Record<string, unknown> = {};
|
||||
for (const key of keys) {
|
||||
if (key === 'language') result[key] = 'en';
|
||||
if (key === 'enableBuiltinPieces') result[key] = false;
|
||||
if (key === 'disabledBuiltins') result[key] = [];
|
||||
}
|
||||
return result;
|
||||
},
|
||||
}));
|
||||
|
||||
const { listPieces } = await import('../infra/config/loaders/pieceLoader.js');
|
||||
|
||||
const SAMPLE_PIECE = `name: test-piece
|
||||
|
||||
@ -22,12 +22,28 @@ vi.mock('../infra/config/global/globalConfig.js', async (importOriginal) => {
|
||||
const original = await importOriginal() as Record<string, unknown>;
|
||||
return {
|
||||
...original,
|
||||
getLanguage: () => languageState.value,
|
||||
getBuiltinPiecesEnabled: () => true,
|
||||
getDisabledBuiltins: () => [],
|
||||
loadGlobalConfig: () => ({}),
|
||||
};
|
||||
});
|
||||
|
||||
vi.mock('../infra/config/resolveConfigValue.js', () => ({
|
||||
resolveConfigValue: (_cwd: string, key: string) => {
|
||||
if (key === 'language') return languageState.value;
|
||||
if (key === 'enableBuiltinPieces') return true;
|
||||
if (key === 'disabledBuiltins') return [];
|
||||
return undefined;
|
||||
},
|
||||
resolveConfigValues: (_cwd: string, keys: readonly string[]) => {
|
||||
const result: Record<string, unknown> = {};
|
||||
for (const key of keys) {
|
||||
if (key === 'language') result[key] = languageState.value;
|
||||
if (key === 'enableBuiltinPieces') result[key] = true;
|
||||
if (key === 'disabledBuiltins') result[key] = [];
|
||||
}
|
||||
return result;
|
||||
},
|
||||
}));
|
||||
|
||||
vi.mock('../infra/resources/index.js', async (importOriginal) => {
|
||||
const original = await importOriginal() as Record<string, unknown>;
|
||||
return {
|
||||
@ -45,6 +61,7 @@ vi.mock('../infra/config/global/pieceCategories.js', async (importOriginal) => {
|
||||
});
|
||||
|
||||
const {
|
||||
BUILTIN_CATEGORY_NAME,
|
||||
getPieceCategories,
|
||||
loadDefaultCategories,
|
||||
buildCategorizedPieces,
|
||||
@ -92,7 +109,7 @@ describe('piece category config loading', () => {
|
||||
});
|
||||
|
||||
it('should return null when builtin categories file is missing', () => {
|
||||
const config = getPieceCategories();
|
||||
const config = getPieceCategories(testDir);
|
||||
expect(config).toBeNull();
|
||||
});
|
||||
|
||||
@ -104,7 +121,7 @@ piece_categories:
|
||||
- default
|
||||
`);
|
||||
|
||||
const config = loadDefaultCategories();
|
||||
const config = loadDefaultCategories(testDir);
|
||||
expect(config).not.toBeNull();
|
||||
expect(config!.pieceCategories).toEqual([
|
||||
{ name: 'Quick Start', pieces: ['default'], children: [] },
|
||||
@ -113,6 +130,7 @@ piece_categories:
|
||||
{ name: 'Quick Start', pieces: ['default'], children: [] },
|
||||
]);
|
||||
expect(config!.userPieceCategories).toEqual([]);
|
||||
expect(config!.hasUserCategories).toBe(false);
|
||||
});
|
||||
|
||||
it('should use builtin categories when user overlay file is missing', () => {
|
||||
@ -125,17 +143,18 @@ show_others_category: true
|
||||
others_category_name: Others
|
||||
`);
|
||||
|
||||
const config = getPieceCategories();
|
||||
const config = getPieceCategories(testDir);
|
||||
expect(config).not.toBeNull();
|
||||
expect(config!.pieceCategories).toEqual([
|
||||
{ name: 'Main', pieces: ['default'], children: [] },
|
||||
]);
|
||||
expect(config!.userPieceCategories).toEqual([]);
|
||||
expect(config!.hasUserCategories).toBe(false);
|
||||
expect(config!.showOthersCategory).toBe(true);
|
||||
expect(config!.othersCategoryName).toBe('Others');
|
||||
});
|
||||
|
||||
it('should merge user overlay categories with builtin categories', () => {
|
||||
it('should separate user categories from builtin categories with builtin wrapper', () => {
|
||||
writeYaml(join(resourcesDir, 'piece-categories.yaml'), `
|
||||
piece_categories:
|
||||
Main:
|
||||
@ -165,18 +184,25 @@ show_others_category: false
|
||||
others_category_name: Unclassified
|
||||
`);
|
||||
|
||||
const config = getPieceCategories();
|
||||
const config = getPieceCategories(testDir);
|
||||
expect(config).not.toBeNull();
|
||||
expect(config!.pieceCategories).toEqual([
|
||||
{ name: 'Main', pieces: ['custom'], children: [] },
|
||||
{ name: 'My Team', pieces: ['team-flow'], children: [] },
|
||||
{
|
||||
name: 'Main',
|
||||
pieces: ['custom'],
|
||||
name: BUILTIN_CATEGORY_NAME,
|
||||
pieces: [],
|
||||
children: [
|
||||
{ name: 'Child', pieces: ['nested'], children: [] },
|
||||
{
|
||||
name: 'Main',
|
||||
pieces: ['default', 'coding'],
|
||||
children: [
|
||||
{ name: 'Child', pieces: ['nested'], children: [] },
|
||||
],
|
||||
},
|
||||
{ name: 'Review', pieces: ['review-only', 'e2e-test'], children: [] },
|
||||
],
|
||||
},
|
||||
{ name: 'Review', pieces: ['review-only', 'e2e-test'], children: [] },
|
||||
{ name: 'My Team', pieces: ['team-flow'], children: [] },
|
||||
]);
|
||||
expect(config!.builtinPieceCategories).toEqual([
|
||||
{
|
||||
@ -192,6 +218,7 @@ others_category_name: Unclassified
|
||||
{ name: 'Main', pieces: ['custom'], children: [] },
|
||||
{ name: 'My Team', pieces: ['team-flow'], children: [] },
|
||||
]);
|
||||
expect(config!.hasUserCategories).toBe(true);
|
||||
expect(config!.showOthersCategory).toBe(false);
|
||||
expect(config!.othersCategoryName).toBe('Unclassified');
|
||||
});
|
||||
@ -207,7 +234,7 @@ piece_categories:
|
||||
- e2e-test
|
||||
`);
|
||||
|
||||
const config = getPieceCategories();
|
||||
const config = getPieceCategories(testDir);
|
||||
expect(config).not.toBeNull();
|
||||
expect(config!.pieceCategories).toEqual([
|
||||
{ name: 'レビュー', pieces: ['review-only', 'e2e-test'], children: [] },
|
||||
@ -232,7 +259,7 @@ show_others_category: false
|
||||
others_category_name: Unclassified
|
||||
`);
|
||||
|
||||
const config = getPieceCategories();
|
||||
const config = getPieceCategories(testDir);
|
||||
expect(config).not.toBeNull();
|
||||
expect(config!.pieceCategories).toEqual([
|
||||
{ name: 'Main', pieces: ['default'], children: [] },
|
||||
@ -243,6 +270,7 @@ others_category_name: Unclassified
|
||||
{ name: 'Review', pieces: ['review-only'], children: [] },
|
||||
]);
|
||||
expect(config!.userPieceCategories).toEqual([]);
|
||||
expect(config!.hasUserCategories).toBe(false);
|
||||
expect(config!.showOthersCategory).toBe(false);
|
||||
expect(config!.othersCategoryName).toBe('Unclassified');
|
||||
});
|
||||
@ -274,11 +302,12 @@ describe('buildCategorizedPieces', () => {
|
||||
userPieceCategories: [
|
||||
{ name: 'My Team', pieces: ['missing-user-piece'], children: [] },
|
||||
],
|
||||
hasUserCategories: true,
|
||||
showOthersCategory: true,
|
||||
othersCategoryName: 'Others',
|
||||
};
|
||||
|
||||
const categorized = buildCategorizedPieces(allPieces, config);
|
||||
const categorized = buildCategorizedPieces(allPieces, config, process.cwd());
|
||||
expect(categorized.categories).toEqual([
|
||||
{
|
||||
name: 'Main',
|
||||
@ -306,11 +335,12 @@ describe('buildCategorizedPieces', () => {
|
||||
{ name: 'Main', pieces: ['default'], children: [] },
|
||||
],
|
||||
userPieceCategories: [],
|
||||
hasUserCategories: false,
|
||||
showOthersCategory: true,
|
||||
othersCategoryName: 'Others',
|
||||
};
|
||||
|
||||
const categorized = buildCategorizedPieces(allPieces, config);
|
||||
const categorized = buildCategorizedPieces(allPieces, config, process.cwd());
|
||||
expect(categorized.categories).toEqual([
|
||||
{ name: 'Main', pieces: ['default'], children: [] },
|
||||
{ name: 'Others', pieces: ['extra'], children: [] },
|
||||
@ -330,13 +360,60 @@ describe('buildCategorizedPieces', () => {
|
||||
{ name: 'Main', pieces: ['default'], children: [] },
|
||||
],
|
||||
userPieceCategories: [],
|
||||
hasUserCategories: false,
|
||||
showOthersCategory: false,
|
||||
othersCategoryName: 'Others',
|
||||
};
|
||||
|
||||
const categorized = buildCategorizedPieces(allPieces, config, process.cwd());
|
||||
expect(categorized.categories).toEqual([
|
||||
{ name: 'Main', pieces: ['default'], children: [] },
|
||||
]);
|
||||
});
|
||||
|
||||
it('should categorize pieces through builtin wrapper node', () => {
|
||||
const allPieces = createPieceMap([
|
||||
{ name: 'custom', source: 'user' },
|
||||
{ name: 'default', source: 'builtin' },
|
||||
{ name: 'review-only', source: 'builtin' },
|
||||
{ name: 'extra', source: 'builtin' },
|
||||
]);
|
||||
const config = {
|
||||
pieceCategories: [
|
||||
{ name: 'My Team', pieces: ['custom'], children: [] },
|
||||
{
|
||||
name: BUILTIN_CATEGORY_NAME,
|
||||
pieces: [],
|
||||
children: [
|
||||
{ name: 'Quick Start', pieces: ['default'], children: [] },
|
||||
{ name: 'Review', pieces: ['review-only'], children: [] },
|
||||
],
|
||||
},
|
||||
],
|
||||
builtinPieceCategories: [
|
||||
{ name: 'Quick Start', pieces: ['default'], children: [] },
|
||||
{ name: 'Review', pieces: ['review-only'], children: [] },
|
||||
],
|
||||
userPieceCategories: [
|
||||
{ name: 'My Team', pieces: ['custom'], children: [] },
|
||||
],
|
||||
hasUserCategories: true,
|
||||
showOthersCategory: true,
|
||||
othersCategoryName: 'Others',
|
||||
};
|
||||
|
||||
const categorized = buildCategorizedPieces(allPieces, config);
|
||||
expect(categorized.categories).toEqual([
|
||||
{ name: 'Main', pieces: ['default'], children: [] },
|
||||
{ name: 'My Team', pieces: ['custom'], children: [] },
|
||||
{
|
||||
name: BUILTIN_CATEGORY_NAME,
|
||||
pieces: [],
|
||||
children: [
|
||||
{ name: 'Quick Start', pieces: ['default'], children: [] },
|
||||
{ name: 'Review', pieces: ['review-only'], children: [] },
|
||||
],
|
||||
},
|
||||
{ name: 'Others', pieces: ['extra'], children: [] },
|
||||
]);
|
||||
});
|
||||
|
||||
|
||||
@ -40,7 +40,7 @@ const configMock = vi.hoisted(() => ({
|
||||
getPieceCategories: vi.fn(),
|
||||
buildCategorizedPieces: vi.fn(),
|
||||
getCurrentPiece: vi.fn(),
|
||||
findPieceCategories: vi.fn(() => []),
|
||||
resolveConfigValue: vi.fn(),
|
||||
}));
|
||||
|
||||
vi.mock('../infra/config/index.js', () => configMock);
|
||||
@ -242,6 +242,65 @@ describe('selectPieceFromCategorizedPieces', () => {
|
||||
// Should NOT contain the parent category again
|
||||
expect(labels.some((l) => l.includes('Dev'))).toBe(false);
|
||||
});
|
||||
|
||||
it('should navigate into builtin wrapper category and select a piece', async () => {
|
||||
const categorized: CategorizedPieces = {
|
||||
categories: [
|
||||
{ name: 'My Team', pieces: ['custom'], children: [] },
|
||||
{
|
||||
name: 'builtin',
|
||||
pieces: [],
|
||||
children: [
|
||||
{ name: 'Quick Start', pieces: ['default'], children: [] },
|
||||
],
|
||||
},
|
||||
],
|
||||
allPieces: createPieceMap([
|
||||
{ name: 'custom', source: 'user' },
|
||||
{ name: 'default', source: 'builtin' },
|
||||
]),
|
||||
missingPieces: [],
|
||||
};
|
||||
|
||||
// Select builtin category → Quick Start subcategory → piece
|
||||
selectOptionMock
|
||||
.mockResolvedValueOnce('__custom_category__:builtin')
|
||||
.mockResolvedValueOnce('__category__:Quick Start')
|
||||
.mockResolvedValueOnce('default');
|
||||
|
||||
const selected = await selectPieceFromCategorizedPieces(categorized, '');
|
||||
expect(selected).toBe('default');
|
||||
expect(selectOptionMock).toHaveBeenCalledTimes(3);
|
||||
});
|
||||
|
||||
it('should show builtin wrapper as a folder in top-level options', async () => {
|
||||
const categorized: CategorizedPieces = {
|
||||
categories: [
|
||||
{ name: 'My Team', pieces: ['custom'], children: [] },
|
||||
{
|
||||
name: 'builtin',
|
||||
pieces: [],
|
||||
children: [
|
||||
{ name: 'Quick Start', pieces: ['default'], children: [] },
|
||||
],
|
||||
},
|
||||
],
|
||||
allPieces: createPieceMap([
|
||||
{ name: 'custom', source: 'user' },
|
||||
{ name: 'default', source: 'builtin' },
|
||||
]),
|
||||
missingPieces: [],
|
||||
};
|
||||
|
||||
selectOptionMock.mockResolvedValueOnce(null);
|
||||
|
||||
await selectPieceFromCategorizedPieces(categorized, '');
|
||||
|
||||
const firstCallOptions = selectOptionMock.mock.calls[0]![1] as { label: string; value: string }[];
|
||||
const labels = firstCallOptions.map((o) => o.label);
|
||||
expect(labels.some((l) => l.includes('My Team'))).toBe(true);
|
||||
expect(labels.some((l) => l.includes('builtin'))).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('selectPiece', () => {
|
||||
@ -258,13 +317,13 @@ describe('selectPiece', () => {
|
||||
configMock.loadAllPiecesWithSources.mockReset();
|
||||
configMock.getPieceCategories.mockReset();
|
||||
configMock.buildCategorizedPieces.mockReset();
|
||||
configMock.getCurrentPiece.mockReset();
|
||||
configMock.resolveConfigValue.mockReset();
|
||||
});
|
||||
|
||||
it('should return default piece when no pieces found and fallbackToDefault is true', async () => {
|
||||
configMock.getPieceCategories.mockReturnValue(null);
|
||||
configMock.listPieces.mockReturnValue([]);
|
||||
configMock.getCurrentPiece.mockReturnValue('default');
|
||||
configMock.resolveConfigValue.mockReturnValue('default');
|
||||
|
||||
const result = await selectPiece('/cwd');
|
||||
|
||||
@ -274,7 +333,7 @@ describe('selectPiece', () => {
|
||||
it('should return null when no pieces found and fallbackToDefault is false', async () => {
|
||||
configMock.getPieceCategories.mockReturnValue(null);
|
||||
configMock.listPieces.mockReturnValue([]);
|
||||
configMock.getCurrentPiece.mockReturnValue('default');
|
||||
configMock.resolveConfigValue.mockReturnValue('default');
|
||||
|
||||
const result = await selectPiece('/cwd', { fallbackToDefault: false });
|
||||
|
||||
@ -287,7 +346,7 @@ describe('selectPiece', () => {
|
||||
configMock.listPieceEntries.mockReturnValue([
|
||||
{ name: 'only-piece', path: '/tmp/only-piece.yaml', source: 'user' },
|
||||
]);
|
||||
configMock.getCurrentPiece.mockReturnValue('only-piece');
|
||||
configMock.resolveConfigValue.mockReturnValue('only-piece');
|
||||
selectOptionMock.mockResolvedValueOnce('only-piece');
|
||||
|
||||
const result = await selectPiece('/cwd');
|
||||
@ -307,7 +366,7 @@ describe('selectPiece', () => {
|
||||
configMock.getPieceCategories.mockReturnValue({ categories: ['Dev'] });
|
||||
configMock.loadAllPiecesWithSources.mockReturnValue(pieceMap);
|
||||
configMock.buildCategorizedPieces.mockReturnValue(categorized);
|
||||
configMock.getCurrentPiece.mockReturnValue('my-piece');
|
||||
configMock.resolveConfigValue.mockReturnValue('my-piece');
|
||||
|
||||
selectOptionMock.mockResolvedValueOnce('__current__');
|
||||
|
||||
@ -321,7 +380,7 @@ describe('selectPiece', () => {
|
||||
configMock.getPieceCategories.mockReturnValue(null);
|
||||
configMock.listPieces.mockReturnValue(['piece-a', 'piece-b']);
|
||||
configMock.listPieceEntries.mockReturnValue(entries);
|
||||
configMock.getCurrentPiece.mockReturnValue('piece-a');
|
||||
configMock.resolveConfigValue.mockReturnValue('piece-a');
|
||||
|
||||
selectOptionMock
|
||||
.mockResolvedValueOnce('custom')
|
||||
|
||||
@ -90,7 +90,15 @@ vi.mock('../infra/config/index.js', () => ({
|
||||
updatePersonaSession: vi.fn(),
|
||||
loadWorktreeSessions: vi.fn().mockReturnValue({}),
|
||||
updateWorktreeSession: vi.fn(),
|
||||
loadGlobalConfig: vi.fn().mockReturnValue({ provider: 'claude' }),
|
||||
resolvePieceConfigValues: vi.fn().mockReturnValue({
|
||||
notificationSound: true,
|
||||
notificationSoundEvents: {},
|
||||
provider: 'claude',
|
||||
runtime: undefined,
|
||||
preventSleep: false,
|
||||
model: undefined,
|
||||
observability: undefined,
|
||||
}),
|
||||
saveSessionState: vi.fn(),
|
||||
ensureDir: vi.fn(),
|
||||
writeFileAtomic: vi.fn(),
|
||||
|
||||
@ -59,7 +59,15 @@ vi.mock('../infra/config/index.js', () => ({
|
||||
updatePersonaSession: vi.fn(),
|
||||
loadWorktreeSessions: mockLoadWorktreeSessions,
|
||||
updateWorktreeSession: vi.fn(),
|
||||
loadGlobalConfig: vi.fn().mockReturnValue({ provider: 'claude' }),
|
||||
resolvePieceConfigValues: vi.fn().mockReturnValue({
|
||||
notificationSound: true,
|
||||
notificationSoundEvents: {},
|
||||
provider: 'claude',
|
||||
runtime: undefined,
|
||||
preventSleep: false,
|
||||
model: undefined,
|
||||
observability: undefined,
|
||||
}),
|
||||
saveSessionState: vi.fn(),
|
||||
ensureDir: vi.fn(),
|
||||
writeFileAtomic: vi.fn(),
|
||||
|
||||
@ -37,12 +37,13 @@ describe('generateReportDir', () => {
|
||||
vi.useRealTimers();
|
||||
});
|
||||
|
||||
it('should preserve Japanese characters in summary', () => {
|
||||
it('should strip CJK characters from summary', () => {
|
||||
vi.useFakeTimers();
|
||||
vi.setSystemTime(new Date('2025-06-01T12:00:00.000Z'));
|
||||
|
||||
const result = generateReportDir('タスク指示書の実装');
|
||||
expect(result).toContain('タスク指示書の実装');
|
||||
// CJK characters are removed by slugify, leaving empty → falls back to 'task'
|
||||
expect(result).toBe('20250601-120000-task');
|
||||
|
||||
vi.useRealTimers();
|
||||
});
|
||||
@ -53,7 +54,7 @@ describe('generateReportDir', () => {
|
||||
|
||||
const result = generateReportDir('Fix: bug (#42)');
|
||||
const slug = result.replace(/^20250101-000000-/, '');
|
||||
expect(slug).not.toMatch(/[^a-z0-9\u3040-\u309f\u30a0-\u30ff\u4e00-\u9faf-]/);
|
||||
expect(slug).not.toMatch(/[^a-z0-9-]/);
|
||||
|
||||
vi.useRealTimers();
|
||||
});
|
||||
|
||||
54
src/__tests__/reset-global-config.test.ts
Normal file
54
src/__tests__/reset-global-config.test.ts
Normal file
@ -0,0 +1,54 @@
|
||||
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
|
||||
import { mkdtempSync, mkdirSync, readFileSync, writeFileSync, existsSync, rmSync } from 'node:fs';
|
||||
import { join } from 'node:path';
|
||||
import { tmpdir } from 'node:os';
|
||||
import { resetGlobalConfigToTemplate } from '../infra/config/global/resetConfig.js';
|
||||
|
||||
describe('resetGlobalConfigToTemplate', () => {
|
||||
const originalEnv = process.env;
|
||||
let testRoot: string;
|
||||
let taktDir: string;
|
||||
let configPath: string;
|
||||
|
||||
beforeEach(() => {
|
||||
testRoot = mkdtempSync(join(tmpdir(), 'takt-reset-config-'));
|
||||
taktDir = join(testRoot, '.takt');
|
||||
mkdirSync(taktDir, { recursive: true });
|
||||
configPath = join(taktDir, 'config.yaml');
|
||||
process.env = { ...originalEnv, TAKT_CONFIG_DIR: taktDir };
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
process.env = originalEnv;
|
||||
rmSync(testRoot, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
it('should backup existing config and replace with language-matched template', () => {
|
||||
writeFileSync(configPath, ['language: ja', 'provider: mock'].join('\n'), 'utf-8');
|
||||
|
||||
const result = resetGlobalConfigToTemplate(new Date('2026-02-19T12:00:00Z'));
|
||||
|
||||
expect(result.language).toBe('ja');
|
||||
expect(result.backupPath).toBeDefined();
|
||||
expect(existsSync(result.backupPath!)).toBe(true);
|
||||
expect(readFileSync(result.backupPath!, 'utf-8')).toContain('provider: mock');
|
||||
|
||||
const newConfig = readFileSync(configPath, 'utf-8');
|
||||
expect(newConfig).toContain('language: ja');
|
||||
expect(newConfig).toContain('branch_name_strategy: ai');
|
||||
expect(newConfig).toContain('concurrency: 2');
|
||||
});
|
||||
|
||||
it('should create config from default language template when config does not exist', () => {
|
||||
rmSync(configPath, { force: true });
|
||||
|
||||
const result = resetGlobalConfigToTemplate(new Date('2026-02-19T12:00:00Z'));
|
||||
|
||||
expect(result.backupPath).toBeUndefined();
|
||||
expect(result.language).toBe('en');
|
||||
expect(existsSync(configPath)).toBe(true);
|
||||
const newConfig = readFileSync(configPath, 'utf-8');
|
||||
expect(newConfig).toContain('language: en');
|
||||
expect(newConfig).toContain('branch_name_strategy: ai');
|
||||
});
|
||||
});
|
||||
@ -31,13 +31,14 @@ describe('resetCategoriesToDefault', () => {
|
||||
|
||||
it('should reset user category overlay and show updated message', async () => {
|
||||
// Given
|
||||
const cwd = '/tmp/test-cwd';
|
||||
|
||||
// When
|
||||
await resetCategoriesToDefault();
|
||||
await resetCategoriesToDefault(cwd);
|
||||
|
||||
// Then
|
||||
expect(mockHeader).toHaveBeenCalledWith('Reset Categories');
|
||||
expect(mockResetPieceCategories).toHaveBeenCalledTimes(1);
|
||||
expect(mockResetPieceCategories).toHaveBeenCalledWith(cwd);
|
||||
expect(mockSuccess).toHaveBeenCalledWith('User category overlay reset.');
|
||||
expect(mockInfo).toHaveBeenCalledWith(' /tmp/user-piece-categories.yaml');
|
||||
});
|
||||
|
||||
@ -5,25 +5,44 @@
|
||||
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
||||
import type { TaskInfo } from '../infra/task/index.js';
|
||||
|
||||
// Mock dependencies before importing the module under test
|
||||
vi.mock('../infra/config/index.js', () => ({
|
||||
loadPieceByIdentifier: vi.fn(),
|
||||
isPiecePath: vi.fn(() => false),
|
||||
loadGlobalConfig: vi.fn(() => ({
|
||||
const { mockLoadConfigRaw } = vi.hoisted(() => ({
|
||||
mockLoadConfigRaw: vi.fn(() => ({
|
||||
language: 'en',
|
||||
defaultPiece: 'default',
|
||||
logLevel: 'info',
|
||||
concurrency: 1,
|
||||
taskPollIntervalMs: 500,
|
||||
})),
|
||||
loadProjectConfig: vi.fn(() => ({
|
||||
piece: 'default',
|
||||
permissionMode: 'default',
|
||||
})),
|
||||
}));
|
||||
|
||||
import { loadGlobalConfig } from '../infra/config/index.js';
|
||||
const mockLoadGlobalConfig = vi.mocked(loadGlobalConfig);
|
||||
// Mock dependencies before importing the module under test
|
||||
vi.mock('../infra/config/index.js', () => ({
|
||||
loadPieceByIdentifier: vi.fn(),
|
||||
isPiecePath: vi.fn(() => false),
|
||||
loadConfig: (...args: unknown[]) => {
|
||||
const raw = mockLoadConfigRaw(...args) as Record<string, unknown>;
|
||||
if ('global' in raw && 'project' in raw) {
|
||||
return raw;
|
||||
}
|
||||
return {
|
||||
global: raw,
|
||||
project: { piece: 'default' },
|
||||
};
|
||||
},
|
||||
resolvePieceConfigValues: (_projectDir: string, keys: readonly string[]) => {
|
||||
const raw = mockLoadConfigRaw() as Record<string, unknown>;
|
||||
const config = ('global' in raw && 'project' in raw)
|
||||
? { ...raw.global as Record<string, unknown>, ...raw.project as Record<string, unknown> }
|
||||
: { ...raw, piece: 'default', provider: 'claude', verbose: false };
|
||||
const result: Record<string, unknown> = {};
|
||||
for (const key of keys) {
|
||||
result[key] = config[key];
|
||||
}
|
||||
return result;
|
||||
},
|
||||
}));
|
||||
|
||||
const mockLoadConfig = mockLoadConfigRaw;
|
||||
|
||||
const {
|
||||
mockClaimNextTasks,
|
||||
@ -167,7 +186,7 @@ beforeEach(() => {
|
||||
describe('runAllTasks concurrency', () => {
|
||||
describe('sequential execution (concurrency=1)', () => {
|
||||
beforeEach(() => {
|
||||
mockLoadGlobalConfig.mockReturnValue({
|
||||
mockLoadConfig.mockReturnValue({
|
||||
language: 'en',
|
||||
defaultPiece: 'default',
|
||||
logLevel: 'info',
|
||||
@ -210,7 +229,7 @@ describe('runAllTasks concurrency', () => {
|
||||
|
||||
describe('parallel execution (concurrency>1)', () => {
|
||||
beforeEach(() => {
|
||||
mockLoadGlobalConfig.mockReturnValue({
|
||||
mockLoadConfig.mockReturnValue({
|
||||
language: 'en',
|
||||
defaultPiece: 'default',
|
||||
logLevel: 'info',
|
||||
@ -288,7 +307,7 @@ describe('runAllTasks concurrency', () => {
|
||||
describe('default concurrency', () => {
|
||||
it('should default to sequential when concurrency is not set', async () => {
|
||||
// Given: Config without explicit concurrency (defaults to 1)
|
||||
mockLoadGlobalConfig.mockReturnValue({
|
||||
mockLoadConfig.mockReturnValue({
|
||||
language: 'en',
|
||||
defaultPiece: 'default',
|
||||
logLevel: 'info',
|
||||
@ -324,7 +343,7 @@ describe('runAllTasks concurrency', () => {
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
mockLoadGlobalConfig.mockReturnValue({
|
||||
mockLoadConfig.mockReturnValue({
|
||||
language: 'en',
|
||||
defaultPiece: 'default',
|
||||
logLevel: 'info',
|
||||
@ -371,7 +390,7 @@ describe('runAllTasks concurrency', () => {
|
||||
|
||||
it('should fill slots immediately when a task completes (no batch waiting)', async () => {
|
||||
// Given: 3 tasks, concurrency=2, task1 finishes quickly, task2 takes longer
|
||||
mockLoadGlobalConfig.mockReturnValue({
|
||||
mockLoadConfig.mockReturnValue({
|
||||
language: 'en',
|
||||
defaultPiece: 'default',
|
||||
logLevel: 'info',
|
||||
@ -413,7 +432,7 @@ describe('runAllTasks concurrency', () => {
|
||||
|
||||
it('should count partial failures correctly', async () => {
|
||||
// Given: 3 tasks, 1 fails, 2 succeed
|
||||
mockLoadGlobalConfig.mockReturnValue({
|
||||
mockLoadConfig.mockReturnValue({
|
||||
language: 'en',
|
||||
defaultPiece: 'default',
|
||||
logLevel: 'info',
|
||||
@ -495,7 +514,7 @@ describe('runAllTasks concurrency', () => {
|
||||
|
||||
it('should pass abortSignal but not taskPrefix in sequential mode', async () => {
|
||||
// Given: Sequential mode
|
||||
mockLoadGlobalConfig.mockReturnValue({
|
||||
mockLoadConfig.mockReturnValue({
|
||||
language: 'en',
|
||||
defaultPiece: 'default',
|
||||
logLevel: 'info',
|
||||
@ -525,7 +544,7 @@ describe('runAllTasks concurrency', () => {
|
||||
});
|
||||
|
||||
it('should only notify once at run completion when multiple tasks succeed', async () => {
|
||||
mockLoadGlobalConfig.mockReturnValue({
|
||||
mockLoadConfig.mockReturnValue({
|
||||
language: 'en',
|
||||
defaultPiece: 'default',
|
||||
logLevel: 'info',
|
||||
@ -550,7 +569,7 @@ describe('runAllTasks concurrency', () => {
|
||||
});
|
||||
|
||||
it('should not notify run completion when runComplete is explicitly false', async () => {
|
||||
mockLoadGlobalConfig.mockReturnValue({
|
||||
mockLoadConfig.mockReturnValue({
|
||||
language: 'en',
|
||||
defaultPiece: 'default',
|
||||
logLevel: 'info',
|
||||
@ -572,7 +591,7 @@ describe('runAllTasks concurrency', () => {
|
||||
});
|
||||
|
||||
it('should notify run completion by default when notification_sound_events is not set', async () => {
|
||||
mockLoadGlobalConfig.mockReturnValue({
|
||||
mockLoadConfig.mockReturnValue({
|
||||
language: 'en',
|
||||
defaultPiece: 'default',
|
||||
logLevel: 'info',
|
||||
@ -594,7 +613,7 @@ describe('runAllTasks concurrency', () => {
|
||||
});
|
||||
|
||||
it('should notify run abort by default when notification_sound_events is not set', async () => {
|
||||
mockLoadGlobalConfig.mockReturnValue({
|
||||
mockLoadConfig.mockReturnValue({
|
||||
language: 'en',
|
||||
defaultPiece: 'default',
|
||||
logLevel: 'info',
|
||||
@ -617,7 +636,7 @@ describe('runAllTasks concurrency', () => {
|
||||
});
|
||||
|
||||
it('should not notify run abort when runAbort is explicitly false', async () => {
|
||||
mockLoadGlobalConfig.mockReturnValue({
|
||||
mockLoadConfig.mockReturnValue({
|
||||
language: 'en',
|
||||
defaultPiece: 'default',
|
||||
logLevel: 'info',
|
||||
@ -640,7 +659,7 @@ describe('runAllTasks concurrency', () => {
|
||||
});
|
||||
|
||||
it('should notify run abort and rethrow when worker pool throws', async () => {
|
||||
mockLoadGlobalConfig.mockReturnValue({
|
||||
mockLoadConfig.mockReturnValue({
|
||||
language: 'en',
|
||||
defaultPiece: 'default',
|
||||
logLevel: 'info',
|
||||
@ -675,7 +694,7 @@ describe('runAllTasks concurrency', () => {
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
mockLoadGlobalConfig.mockReturnValue({
|
||||
mockLoadConfig.mockReturnValue({
|
||||
language: 'en',
|
||||
defaultPiece: 'default',
|
||||
logLevel: 'info',
|
||||
|
||||
@ -66,6 +66,8 @@ describe('saveTaskFile', () => {
|
||||
expect(tasks).toHaveLength(1);
|
||||
expect(tasks[0]?.content).toBeUndefined();
|
||||
expect(tasks[0]?.task_dir).toBeTypeOf('string');
|
||||
expect(tasks[0]?.slug).toBeTypeOf('string');
|
||||
expect(tasks[0]?.summary).toBe('Implement feature X');
|
||||
const taskDir = path.join(testDir, String(tasks[0]?.task_dir));
|
||||
expect(fs.existsSync(path.join(taskDir, 'order.md'))).toBe(true);
|
||||
expect(fs.readFileSync(path.join(taskDir, 'order.md'), 'utf-8')).toContain('Implement feature X');
|
||||
|
||||
@ -9,6 +9,7 @@ const {
|
||||
mockCompleteTask,
|
||||
mockFailTask,
|
||||
mockExecuteTask,
|
||||
mockResolvePieceConfigValue,
|
||||
} = vi.hoisted(() => ({
|
||||
mockAddTask: vi.fn(() => ({
|
||||
name: 'test-task',
|
||||
@ -21,6 +22,7 @@ const {
|
||||
mockCompleteTask: vi.fn(),
|
||||
mockFailTask: vi.fn(),
|
||||
mockExecuteTask: vi.fn(),
|
||||
mockResolvePieceConfigValue: vi.fn((_: string, key: string) => (key === 'autoPr' ? undefined : 'default')),
|
||||
}));
|
||||
|
||||
vi.mock('../shared/prompt/index.js', () => ({
|
||||
@ -28,11 +30,10 @@ vi.mock('../shared/prompt/index.js', () => ({
|
||||
}));
|
||||
|
||||
vi.mock('../infra/config/index.js', () => ({
|
||||
getCurrentPiece: vi.fn(),
|
||||
resolvePieceConfigValue: (...args: unknown[]) => mockResolvePieceConfigValue(...args),
|
||||
listPieces: vi.fn(() => ['default']),
|
||||
listPieceEntries: vi.fn(() => []),
|
||||
isPiecePath: vi.fn(() => false),
|
||||
loadGlobalConfig: vi.fn(() => ({})),
|
||||
}));
|
||||
|
||||
vi.mock('../infra/task/index.js', () => ({
|
||||
@ -102,7 +103,7 @@ beforeEach(() => {
|
||||
|
||||
describe('resolveAutoPr default in selectAndExecuteTask', () => {
|
||||
it('should call auto-PR confirm with default true when no CLI option or config', async () => {
|
||||
// Given: worktree is enabled via override, no autoPr option, no global config autoPr
|
||||
// Given: worktree is enabled via override, no autoPr option, no config autoPr
|
||||
mockConfirm.mockResolvedValue(true);
|
||||
mockSummarizeTaskName.mockResolvedValue('test-task');
|
||||
mockCreateSharedClone.mockReturnValue({
|
||||
@ -121,10 +122,7 @@ describe('resolveAutoPr default in selectAndExecuteTask', () => {
|
||||
createWorktree: true,
|
||||
});
|
||||
|
||||
// Then: the 'Create pull request?' confirm is called with default true
|
||||
const autoPrCall = mockConfirm.mock.calls.find(
|
||||
(call) => call[0] === 'Create pull request?',
|
||||
);
|
||||
const autoPrCall = mockConfirm.mock.calls.find((call) => call[0] === 'Create pull request?');
|
||||
expect(autoPrCall).toBeDefined();
|
||||
expect(autoPrCall![1]).toBe(true);
|
||||
});
|
||||
|
||||
@ -1,7 +1,7 @@
|
||||
/**
|
||||
* Unit tests for slugify utility
|
||||
*
|
||||
* Tests URL/filename-safe slug generation with CJK support.
|
||||
* Tests URL/filename-safe slug generation (a-z 0-9 hyphen, max 30 chars).
|
||||
*/
|
||||
|
||||
import { describe, it, expect } from 'vitest';
|
||||
@ -25,17 +25,17 @@ describe('slugify', () => {
|
||||
expect(slugify(' hello ')).toBe('hello');
|
||||
});
|
||||
|
||||
it('should truncate to 50 characters', () => {
|
||||
it('should truncate to 30 characters', () => {
|
||||
const long = 'a'.repeat(100);
|
||||
expect(slugify(long).length).toBeLessThanOrEqual(50);
|
||||
expect(slugify(long).length).toBeLessThanOrEqual(30);
|
||||
});
|
||||
|
||||
it('should preserve CJK characters', () => {
|
||||
expect(slugify('タスク指示書')).toBe('タスク指示書');
|
||||
it('should strip CJK characters', () => {
|
||||
expect(slugify('タスク指示書')).toBe('');
|
||||
});
|
||||
|
||||
it('should handle mixed ASCII and CJK', () => {
|
||||
expect(slugify('Add タスク Feature')).toBe('add-タスク-feature');
|
||||
expect(slugify('Add タスク Feature')).toBe('add-feature');
|
||||
});
|
||||
|
||||
it('should handle numbers', () => {
|
||||
@ -43,11 +43,18 @@ describe('slugify', () => {
|
||||
});
|
||||
|
||||
it('should handle empty result after stripping', () => {
|
||||
// All special characters → becomes empty string
|
||||
expect(slugify('!@#$%')).toBe('');
|
||||
});
|
||||
|
||||
it('should handle typical GitHub issue titles', () => {
|
||||
expect(slugify('Fix: login not working (#42)')).toBe('fix-login-not-working-42');
|
||||
});
|
||||
|
||||
it('should strip trailing hyphen after truncation', () => {
|
||||
// 30 chars of slug that ends with a hyphen after slice
|
||||
const input = 'abcdefghijklmnopqrstuvwxyz-abc-xyz';
|
||||
const result = slugify(input);
|
||||
expect(result.length).toBeLessThanOrEqual(30);
|
||||
expect(result).not.toMatch(/-$/);
|
||||
});
|
||||
});
|
||||
|
||||
@ -6,7 +6,7 @@ import { beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
|
||||
vi.mock('../infra/config/index.js', () => ({
|
||||
loadPiece: vi.fn(() => null),
|
||||
getCurrentPiece: vi.fn(() => 'default'),
|
||||
resolveConfigValue: vi.fn(() => 'default'),
|
||||
setCurrentPiece: vi.fn(),
|
||||
}));
|
||||
|
||||
@ -20,11 +20,11 @@ vi.mock('../shared/ui/index.js', () => ({
|
||||
error: vi.fn(),
|
||||
}));
|
||||
|
||||
import { getCurrentPiece, loadPiece, setCurrentPiece } from '../infra/config/index.js';
|
||||
import { resolveConfigValue, loadPiece, setCurrentPiece } from '../infra/config/index.js';
|
||||
import { selectPiece } from '../features/pieceSelection/index.js';
|
||||
import { switchPiece } from '../features/config/switchPiece.js';
|
||||
|
||||
const mockGetCurrentPiece = vi.mocked(getCurrentPiece);
|
||||
const mockResolveConfigValue = vi.mocked(resolveConfigValue);
|
||||
const mockLoadPiece = vi.mocked(loadPiece);
|
||||
const mockSetCurrentPiece = vi.mocked(setCurrentPiece);
|
||||
const mockSelectPiece = vi.mocked(selectPiece);
|
||||
@ -32,6 +32,7 @@ const mockSelectPiece = vi.mocked(selectPiece);
|
||||
describe('switchPiece', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
mockResolveConfigValue.mockReturnValue('default');
|
||||
});
|
||||
|
||||
it('should call selectPiece with fallbackToDefault: false', async () => {
|
||||
|
||||
@ -5,15 +5,15 @@
|
||||
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
||||
import type { TaskInfo } from '../infra/task/index.js';
|
||||
|
||||
const { mockResolveTaskExecution, mockExecutePiece, mockLoadPieceByIdentifier, mockLoadGlobalConfig, mockLoadProjectConfig, mockBuildTaskResult, mockPersistTaskResult, mockPostExecutionFlow } =
|
||||
const { mockResolveTaskExecution, mockExecutePiece, mockLoadPieceByIdentifier, mockResolvePieceConfigValues, mockBuildTaskResult, mockPersistTaskResult, mockPersistTaskError, mockPostExecutionFlow } =
|
||||
vi.hoisted(() => ({
|
||||
mockResolveTaskExecution: vi.fn(),
|
||||
mockExecutePiece: vi.fn(),
|
||||
mockLoadPieceByIdentifier: vi.fn(),
|
||||
mockLoadGlobalConfig: vi.fn(),
|
||||
mockLoadProjectConfig: vi.fn(),
|
||||
mockResolvePieceConfigValues: vi.fn(),
|
||||
mockBuildTaskResult: vi.fn(),
|
||||
mockPersistTaskResult: vi.fn(),
|
||||
mockPersistTaskError: vi.fn(),
|
||||
mockPostExecutionFlow: vi.fn(),
|
||||
}));
|
||||
|
||||
@ -28,6 +28,7 @@ vi.mock('../features/tasks/execute/pieceExecution.js', () => ({
|
||||
vi.mock('../features/tasks/execute/taskResultHandler.js', () => ({
|
||||
buildTaskResult: (...args: unknown[]) => mockBuildTaskResult(...args),
|
||||
persistTaskResult: (...args: unknown[]) => mockPersistTaskResult(...args),
|
||||
persistTaskError: (...args: unknown[]) => mockPersistTaskError(...args),
|
||||
}));
|
||||
|
||||
vi.mock('../features/tasks/execute/postExecution.js', () => ({
|
||||
@ -37,8 +38,7 @@ vi.mock('../features/tasks/execute/postExecution.js', () => ({
|
||||
vi.mock('../infra/config/index.js', () => ({
|
||||
loadPieceByIdentifier: (...args: unknown[]) => mockLoadPieceByIdentifier(...args),
|
||||
isPiecePath: () => false,
|
||||
loadGlobalConfig: () => mockLoadGlobalConfig(),
|
||||
loadProjectConfig: () => mockLoadProjectConfig(),
|
||||
resolvePieceConfigValues: (...args: unknown[]) => mockResolvePieceConfigValues(...args),
|
||||
}));
|
||||
|
||||
vi.mock('../shared/ui/index.js', () => ({
|
||||
@ -83,15 +83,19 @@ describe('executeAndCompleteTask', () => {
|
||||
name: 'default',
|
||||
movements: [],
|
||||
});
|
||||
mockLoadGlobalConfig.mockReturnValue({
|
||||
mockResolvePieceConfigValues.mockReturnValue({
|
||||
language: 'en',
|
||||
provider: 'claude',
|
||||
model: undefined,
|
||||
personaProviders: {},
|
||||
providerProfiles: {},
|
||||
});
|
||||
mockLoadProjectConfig.mockReturnValue({
|
||||
provider: 'claude',
|
||||
providerProfiles: {},
|
||||
providerOptions: {
|
||||
claude: { sandbox: { allowUnsandboxedCommands: true } },
|
||||
},
|
||||
notificationSound: true,
|
||||
notificationSoundEvents: {},
|
||||
concurrency: 1,
|
||||
taskPollIntervalMs: 500,
|
||||
});
|
||||
mockBuildTaskResult.mockReturnValue({ success: true });
|
||||
mockResolveTaskExecution.mockResolvedValue({
|
||||
@ -130,8 +134,12 @@ describe('executeAndCompleteTask', () => {
|
||||
const pieceExecutionOptions = mockExecutePiece.mock.calls[0]?.[3] as {
|
||||
taskDisplayLabel?: string;
|
||||
taskPrefix?: string;
|
||||
providerOptions?: unknown;
|
||||
};
|
||||
expect(pieceExecutionOptions?.taskDisplayLabel).toBe(taskDisplayLabel);
|
||||
expect(pieceExecutionOptions?.taskPrefix).toBe(taskDisplayLabel);
|
||||
expect(pieceExecutionOptions?.providerOptions).toEqual({
|
||||
claude: { sandbox: { allowUnsandboxedCommands: true } },
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@ -48,7 +48,7 @@ vi.mock('../infra/task/index.js', () => ({
|
||||
}));
|
||||
|
||||
vi.mock('../infra/config/index.js', () => ({
|
||||
loadGlobalConfig: vi.fn(() => ({ interactivePreviewMovements: 3, language: 'en' })),
|
||||
resolvePieceConfigValues: vi.fn(() => ({ interactivePreviewMovements: 3, language: 'en' })),
|
||||
getPieceDescription: vi.fn(() => ({
|
||||
name: 'default',
|
||||
description: 'desc',
|
||||
|
||||
@ -4,7 +4,7 @@ const {
|
||||
mockExistsSync,
|
||||
mockSelectPiece,
|
||||
mockSelectOption,
|
||||
mockLoadGlobalConfig,
|
||||
mockResolvePieceConfigValue,
|
||||
mockLoadPieceByIdentifier,
|
||||
mockGetPieceDescription,
|
||||
mockRunRetryMode,
|
||||
@ -16,7 +16,7 @@ const {
|
||||
mockExistsSync: vi.fn(() => true),
|
||||
mockSelectPiece: vi.fn(),
|
||||
mockSelectOption: vi.fn(),
|
||||
mockLoadGlobalConfig: vi.fn(),
|
||||
mockResolvePieceConfigValue: vi.fn(),
|
||||
mockLoadPieceByIdentifier: vi.fn(),
|
||||
mockGetPieceDescription: vi.fn(() => ({
|
||||
name: 'default',
|
||||
@ -60,7 +60,7 @@ vi.mock('../shared/utils/index.js', async (importOriginal) => ({
|
||||
}));
|
||||
|
||||
vi.mock('../infra/config/index.js', () => ({
|
||||
loadGlobalConfig: (...args: unknown[]) => mockLoadGlobalConfig(...args),
|
||||
resolvePieceConfigValue: (...args: unknown[]) => mockResolvePieceConfigValue(...args),
|
||||
loadPieceByIdentifier: (...args: unknown[]) => mockLoadPieceByIdentifier(...args),
|
||||
getPieceDescription: (...args: unknown[]) => mockGetPieceDescription(...args),
|
||||
}));
|
||||
@ -127,7 +127,7 @@ beforeEach(() => {
|
||||
mockExistsSync.mockReturnValue(true);
|
||||
|
||||
mockSelectPiece.mockResolvedValue('default');
|
||||
mockLoadGlobalConfig.mockReturnValue({ defaultPiece: 'default' });
|
||||
mockResolvePieceConfigValue.mockReturnValue(3);
|
||||
mockLoadPieceByIdentifier.mockReturnValue(defaultPieceConfig);
|
||||
mockSelectOption.mockResolvedValue('plan');
|
||||
mockRunRetryMode.mockResolvedValue({ action: 'execute', task: '追加指示A' });
|
||||
|
||||
@ -1,39 +1,50 @@
|
||||
import { describe, expect, it } from 'vitest';
|
||||
import { formatTaskStatusLabel } from '../features/tasks/list/taskStatusLabel.js';
|
||||
import { formatTaskStatusLabel, formatShortDate } from '../features/tasks/list/taskStatusLabel.js';
|
||||
import type { TaskListItem } from '../infra/task/types.js';
|
||||
|
||||
function makeTask(overrides: Partial<TaskListItem>): TaskListItem {
|
||||
return {
|
||||
kind: 'pending',
|
||||
name: 'test-task',
|
||||
createdAt: '2026-02-11T00:00:00.000Z',
|
||||
filePath: '/tmp/task.md',
|
||||
content: 'content',
|
||||
...overrides,
|
||||
};
|
||||
}
|
||||
|
||||
describe('formatTaskStatusLabel', () => {
|
||||
it("should format pending task as '[pending] name'", () => {
|
||||
// Given: pending タスク
|
||||
const task: TaskListItem = {
|
||||
kind: 'pending',
|
||||
name: 'implement test',
|
||||
createdAt: '2026-02-11T00:00:00.000Z',
|
||||
filePath: '/tmp/task.md',
|
||||
content: 'content',
|
||||
};
|
||||
|
||||
// When: ステータスラベルを生成する
|
||||
const result = formatTaskStatusLabel(task);
|
||||
|
||||
// Then: pending は pending 表示になる
|
||||
expect(result).toBe('[pending] implement test');
|
||||
const task = makeTask({ kind: 'pending', name: 'implement-test' });
|
||||
expect(formatTaskStatusLabel(task)).toBe('[pending] implement-test');
|
||||
});
|
||||
|
||||
it("should format failed task as '[failed] name'", () => {
|
||||
// Given: failed タスク
|
||||
const task: TaskListItem = {
|
||||
kind: 'failed',
|
||||
name: 'retry payment',
|
||||
createdAt: '2026-02-11T00:00:00.000Z',
|
||||
filePath: '/tmp/task.md',
|
||||
content: 'content',
|
||||
};
|
||||
const task = makeTask({ kind: 'failed', name: 'retry-payment' });
|
||||
expect(formatTaskStatusLabel(task)).toBe('[failed] retry-payment');
|
||||
});
|
||||
|
||||
// When: ステータスラベルを生成する
|
||||
const result = formatTaskStatusLabel(task);
|
||||
it('should include branch when present', () => {
|
||||
const task = makeTask({
|
||||
kind: 'completed',
|
||||
name: 'fix-login-bug',
|
||||
branch: 'takt/284/fix-login-bug',
|
||||
});
|
||||
expect(formatTaskStatusLabel(task)).toBe('[completed] fix-login-bug (takt/284/fix-login-bug)');
|
||||
});
|
||||
|
||||
// Then: failed は failed 表示になる
|
||||
expect(result).toBe('[failed] retry payment');
|
||||
it('should not include branch when absent', () => {
|
||||
const task = makeTask({ kind: 'running', name: 'my-task' });
|
||||
expect(formatTaskStatusLabel(task)).toBe('[running] my-task');
|
||||
});
|
||||
});
|
||||
|
||||
describe('formatShortDate', () => {
|
||||
it('should format ISO string to MM/DD HH:mm', () => {
|
||||
expect(formatShortDate('2025-02-18T14:30:00.000Z')).toBe('02/18 14:30');
|
||||
});
|
||||
|
||||
it('should zero-pad single digit values', () => {
|
||||
expect(formatShortDate('2025-01-05T03:07:00.000Z')).toBe('01/05 03:07');
|
||||
});
|
||||
});
|
||||
|
||||
@ -14,7 +14,7 @@ const {
|
||||
mockSuccess,
|
||||
mockWarn,
|
||||
mockError,
|
||||
mockGetCurrentPiece,
|
||||
mockResolveConfigValue,
|
||||
} = vi.hoisted(() => ({
|
||||
mockRecoverInterruptedRunningTasks: vi.fn(),
|
||||
mockGetTasksDir: vi.fn(),
|
||||
@ -28,7 +28,7 @@ const {
|
||||
mockSuccess: vi.fn(),
|
||||
mockWarn: vi.fn(),
|
||||
mockError: vi.fn(),
|
||||
mockGetCurrentPiece: vi.fn(),
|
||||
mockResolveConfigValue: vi.fn(),
|
||||
}));
|
||||
|
||||
vi.mock('../infra/task/index.js', () => ({
|
||||
@ -61,7 +61,7 @@ vi.mock('../shared/i18n/index.js', () => ({
|
||||
}));
|
||||
|
||||
vi.mock('../infra/config/index.js', () => ({
|
||||
getCurrentPiece: mockGetCurrentPiece,
|
||||
resolveConfigValue: mockResolveConfigValue,
|
||||
}));
|
||||
|
||||
import { watchTasks } from '../features/tasks/watch/index.js';
|
||||
@ -69,7 +69,7 @@ import { watchTasks } from '../features/tasks/watch/index.js';
|
||||
describe('watchTasks', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
mockGetCurrentPiece.mockReturnValue('default');
|
||||
mockResolveConfigValue.mockReturnValue('default');
|
||||
mockRecoverInterruptedRunningTasks.mockReturnValue(0);
|
||||
mockGetTasksDir.mockReturnValue('/project/.takt/tasks.yaml');
|
||||
mockExecuteAndCompleteTask.mockResolvedValue(true);
|
||||
|
||||
@ -4,7 +4,7 @@
|
||||
|
||||
import { existsSync, readFileSync } from 'node:fs';
|
||||
import { basename, dirname } from 'node:path';
|
||||
import { loadCustomAgents, loadAgentPrompt, loadGlobalConfig, loadProjectConfig } from '../infra/config/index.js';
|
||||
import { loadCustomAgents, loadAgentPrompt, resolveConfigValues } from '../infra/config/index.js';
|
||||
import { getProvider, type ProviderType, type ProviderCallOptions } from '../infra/providers/index.js';
|
||||
import type { AgentResponse, CustomAgentConfig } from '../core/models/index.js';
|
||||
import { createLogger } from '../shared/utils/index.js';
|
||||
@ -29,16 +29,10 @@ export class AgentRunner {
|
||||
agentConfig?: CustomAgentConfig,
|
||||
): ProviderType {
|
||||
if (options?.provider) return options.provider;
|
||||
const projectConfig = loadProjectConfig(cwd);
|
||||
if (projectConfig.provider) return projectConfig.provider;
|
||||
const config = resolveConfigValues(cwd, ['provider']);
|
||||
if (config.provider) return config.provider;
|
||||
if (options?.stepProvider) return options.stepProvider;
|
||||
if (agentConfig?.provider) return agentConfig.provider;
|
||||
try {
|
||||
const globalConfig = loadGlobalConfig();
|
||||
if (globalConfig.provider) return globalConfig.provider;
|
||||
} catch (error) {
|
||||
log.debug('Global config not available for provider resolution', { error });
|
||||
}
|
||||
return 'claude';
|
||||
}
|
||||
|
||||
@ -55,14 +49,11 @@ export class AgentRunner {
|
||||
if (options?.model) return options.model;
|
||||
if (options?.stepModel) return options.stepModel;
|
||||
if (agentConfig?.model) return agentConfig.model;
|
||||
try {
|
||||
const globalConfig = loadGlobalConfig();
|
||||
if (globalConfig.model) {
|
||||
const globalProvider = globalConfig.provider ?? 'claude';
|
||||
if (globalProvider === resolvedProvider) return globalConfig.model;
|
||||
}
|
||||
} catch (error) {
|
||||
log.debug('Global config not available for model resolution', { error });
|
||||
if (!options?.cwd) return undefined;
|
||||
const config = resolveConfigValues(options.cwd, ['provider', 'model']);
|
||||
if (config.model) {
|
||||
const defaultProvider = config.provider ?? 'claude';
|
||||
if (defaultProvider === resolvedProvider) return config.model;
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
@ -131,7 +122,7 @@ export class AgentRunner {
|
||||
name: agentConfig.name,
|
||||
systemPrompt: agentConfig.claudeAgent || agentConfig.claudeSkill
|
||||
? undefined
|
||||
: loadAgentPrompt(agentConfig),
|
||||
: loadAgentPrompt(agentConfig, options.cwd),
|
||||
claudeAgent: agentConfig.claudeAgent,
|
||||
claudeSkill: agentConfig.claudeSkill,
|
||||
});
|
||||
|
||||
@ -1,15 +1,18 @@
|
||||
/**
|
||||
* CLI subcommand definitions
|
||||
*
|
||||
* Registers all named subcommands (run, watch, add, list, switch, clear, eject, config, prompt, catalog).
|
||||
* Registers all named subcommands (run, watch, add, list, switch, clear, eject, prompt, catalog).
|
||||
*/
|
||||
|
||||
import { clearPersonaSessions, getCurrentPiece } from '../../infra/config/index.js';
|
||||
import { success } from '../../shared/ui/index.js';
|
||||
import { join } from 'node:path';
|
||||
import { clearPersonaSessions, resolveConfigValue } from '../../infra/config/index.js';
|
||||
import { getGlobalConfigDir } from '../../infra/config/paths.js';
|
||||
import { success, info } from '../../shared/ui/index.js';
|
||||
import { runAllTasks, addTask, watchTasks, listTasks } from '../../features/tasks/index.js';
|
||||
import { switchPiece, switchConfig, ejectBuiltin, ejectFacet, parseFacetType, VALID_FACET_TYPES, resetCategoriesToDefault, deploySkill } from '../../features/config/index.js';
|
||||
import { switchPiece, ejectBuiltin, ejectFacet, parseFacetType, VALID_FACET_TYPES, resetCategoriesToDefault, resetConfigToDefault, deploySkill } from '../../features/config/index.js';
|
||||
import { previewPrompts } from '../../features/prompt/index.js';
|
||||
import { showCatalog } from '../../features/catalog/index.js';
|
||||
import { computeReviewMetrics, formatReviewMetrics, parseSinceDuration, purgeOldEvents } from '../../features/analytics/index.js';
|
||||
import { program, resolvedCwd } from './program.js';
|
||||
import { resolveAgentOverrides } from './helpers.js';
|
||||
|
||||
@ -17,7 +20,7 @@ program
|
||||
.command('run')
|
||||
.description('Run all pending tasks from .takt/tasks.yaml')
|
||||
.action(async () => {
|
||||
const piece = getCurrentPiece(resolvedCwd);
|
||||
const piece = resolveConfigValue(resolvedCwd, 'piece');
|
||||
await runAllTasks(resolvedCwd, piece, resolveAgentOverrides(program));
|
||||
});
|
||||
|
||||
@ -96,23 +99,22 @@ program
|
||||
}
|
||||
});
|
||||
|
||||
program
|
||||
.command('config')
|
||||
.description('Configure settings (permission mode)')
|
||||
.argument('[key]', 'Configuration key')
|
||||
.action(async (key?: string) => {
|
||||
await switchConfig(resolvedCwd, key);
|
||||
});
|
||||
|
||||
const reset = program
|
||||
.command('reset')
|
||||
.description('Reset settings to defaults');
|
||||
|
||||
reset
|
||||
.command('config')
|
||||
.description('Reset global config to builtin template (with backup)')
|
||||
.action(async () => {
|
||||
await resetConfigToDefault();
|
||||
});
|
||||
|
||||
reset
|
||||
.command('categories')
|
||||
.description('Reset piece categories to builtin defaults')
|
||||
.action(async () => {
|
||||
await resetCategoriesToDefault();
|
||||
await resetCategoriesToDefault(resolvedCwd);
|
||||
});
|
||||
|
||||
program
|
||||
@ -137,3 +139,37 @@ program
|
||||
.action((type?: string) => {
|
||||
showCatalog(resolvedCwd, type);
|
||||
});
|
||||
|
||||
const metrics = program
|
||||
.command('metrics')
|
||||
.description('Show analytics metrics');
|
||||
|
||||
metrics
|
||||
.command('review')
|
||||
.description('Show review quality metrics')
|
||||
.option('--since <duration>', 'Time window (e.g. "7d", "30d")', '30d')
|
||||
.action((opts: { since: string }) => {
|
||||
const analytics = resolveConfigValue(resolvedCwd, 'analytics');
|
||||
const eventsDir = analytics?.eventsPath ?? join(getGlobalConfigDir(), 'analytics', 'events');
|
||||
const durationMs = parseSinceDuration(opts.since);
|
||||
const sinceMs = Date.now() - durationMs;
|
||||
const result = computeReviewMetrics(eventsDir, sinceMs);
|
||||
info(formatReviewMetrics(result));
|
||||
});
|
||||
|
||||
program
|
||||
.command('purge')
|
||||
.description('Purge old analytics event files')
|
||||
.option('--retention-days <days>', 'Retention period in days', '30')
|
||||
.action((opts: { retentionDays: string }) => {
|
||||
const analytics = resolveConfigValue(resolvedCwd, 'analytics');
|
||||
const eventsDir = analytics?.eventsPath ?? join(getGlobalConfigDir(), 'analytics', 'events');
|
||||
const retentionDays = analytics?.retentionDays
|
||||
?? parseInt(opts.retentionDays, 10);
|
||||
const deleted = purgeOldEvents(eventsDir, retentionDays, new Date());
|
||||
if (deleted.length === 0) {
|
||||
info('No files to purge.');
|
||||
} else {
|
||||
success(`Purged ${deleted.length} file(s): ${deleted.join(', ')}`);
|
||||
}
|
||||
});
|
||||
|
||||
@ -11,7 +11,7 @@ import { resolve } from 'node:path';
|
||||
import {
|
||||
initGlobalDirs,
|
||||
initProjectDirs,
|
||||
loadGlobalConfig,
|
||||
resolveConfigValues,
|
||||
isVerboseMode,
|
||||
} from '../../infra/config/index.js';
|
||||
import { setQuietMode } from '../../shared/context.js';
|
||||
@ -51,7 +51,8 @@ program
|
||||
.option('--pipeline', 'Pipeline mode: non-interactive, no worktree, direct branch creation')
|
||||
.option('--skip-git', 'Skip branch creation, commit, and push (pipeline mode)')
|
||||
.option('--create-worktree <yes|no>', 'Skip the worktree prompt by explicitly specifying yes or no')
|
||||
.option('-q, --quiet', 'Minimal output mode: suppress AI output (for CI)');
|
||||
.option('-q, --quiet', 'Minimal output mode: suppress AI output (for CI)')
|
||||
.option('-c, --continue', 'Continue from the last assistant session');
|
||||
|
||||
/**
|
||||
* Run pre-action hook: common initialization for all commands.
|
||||
@ -69,7 +70,7 @@ export async function runPreActionHook(): Promise<void> {
|
||||
const verbose = isVerboseMode(resolvedCwd);
|
||||
initDebugLogger(verbose ? { enabled: true } : undefined, resolvedCwd);
|
||||
|
||||
const config = loadGlobalConfig();
|
||||
const config = resolveConfigValues(resolvedCwd, ['logLevel', 'minimalOutput']);
|
||||
|
||||
if (verbose) {
|
||||
setVerboseConsole(true);
|
||||
|
||||
@ -6,7 +6,6 @@
|
||||
*/
|
||||
|
||||
import { info, error as logError, withProgress } from '../../shared/ui/index.js';
|
||||
import { confirm } from '../../shared/prompt/index.js';
|
||||
import { getErrorMessage } from '../../shared/utils/index.js';
|
||||
import { getLabel } from '../../shared/i18n/index.js';
|
||||
import { fetchIssue, formatIssueAsTask, checkGhCli, parseIssueNumbers, type GitHubIssue } from '../../infra/github/index.js';
|
||||
@ -15,7 +14,6 @@ import { executePipeline } from '../../features/pipeline/index.js';
|
||||
import {
|
||||
interactiveMode,
|
||||
selectInteractiveMode,
|
||||
selectRecentSession,
|
||||
passthroughMode,
|
||||
quietMode,
|
||||
personaMode,
|
||||
@ -23,8 +21,7 @@ import {
|
||||
dispatchConversationAction,
|
||||
type InteractiveModeResult,
|
||||
} from '../../features/interactive/index.js';
|
||||
import { getPieceDescription, loadGlobalConfig } from '../../infra/config/index.js';
|
||||
import { DEFAULT_PIECE_NAME } from '../../shared/constants.js';
|
||||
import { getPieceDescription, resolveConfigValue, resolveConfigValues, loadPersonaSessions } from '../../infra/config/index.js';
|
||||
import { program, resolvedCwd, pipelineMode } from './program.js';
|
||||
import { resolveAgentOverrides, parseCreateWorktreeOption, isDirectTask } from './helpers.js';
|
||||
import { loadTaskHistory } from './taskHistory.js';
|
||||
@ -85,8 +82,12 @@ export async function executeDefaultAction(task?: string): Promise<void> {
|
||||
const opts = program.opts();
|
||||
const agentOverrides = resolveAgentOverrides(program);
|
||||
const createWorktreeOverride = parseCreateWorktreeOption(opts.createWorktree as string | undefined);
|
||||
const resolvedPipelinePiece = (opts.piece as string | undefined) ?? resolveConfigValue(resolvedCwd, 'piece');
|
||||
const resolvedPipelineAutoPr = opts.autoPr === true
|
||||
? true
|
||||
: (resolveConfigValue(resolvedCwd, 'autoPr') ?? false);
|
||||
const selectOptions: SelectAndExecuteOptions = {
|
||||
autoPr: opts.autoPr === true,
|
||||
autoPr: opts.autoPr === true ? true : undefined,
|
||||
repo: opts.repo as string | undefined,
|
||||
piece: opts.piece as string | undefined,
|
||||
createWorktree: createWorktreeOverride,
|
||||
@ -97,9 +98,9 @@ export async function executeDefaultAction(task?: string): Promise<void> {
|
||||
const exitCode = await executePipeline({
|
||||
issueNumber: opts.issue as number | undefined,
|
||||
task: opts.task as string | undefined,
|
||||
piece: (opts.piece as string | undefined) ?? DEFAULT_PIECE_NAME,
|
||||
piece: resolvedPipelinePiece,
|
||||
branch: opts.branch as string | undefined,
|
||||
autoPr: opts.autoPr === true,
|
||||
autoPr: resolvedPipelineAutoPr,
|
||||
repo: opts.repo as string | undefined,
|
||||
skipGit: opts.skipGit === true,
|
||||
cwd: resolvedCwd,
|
||||
@ -137,7 +138,7 @@ export async function executeDefaultAction(task?: string): Promise<void> {
|
||||
}
|
||||
|
||||
// All paths below go through interactive mode
|
||||
const globalConfig = loadGlobalConfig();
|
||||
const globalConfig = resolveConfigValues(resolvedCwd, ['language', 'interactivePreviewMovements', 'provider']);
|
||||
const lang = resolveLanguage(globalConfig.language);
|
||||
|
||||
const pieceId = await determinePiece(resolvedCwd, selectOptions.piece);
|
||||
@ -169,17 +170,14 @@ export async function executeDefaultAction(task?: string): Promise<void> {
|
||||
switch (selectedMode) {
|
||||
case 'assistant': {
|
||||
let selectedSessionId: string | undefined;
|
||||
const provider = globalConfig.provider;
|
||||
if (provider === 'claude') {
|
||||
const shouldSelectSession = await confirm(
|
||||
getLabel('interactive.sessionSelector.confirm', lang),
|
||||
false,
|
||||
);
|
||||
if (shouldSelectSession) {
|
||||
const sessionId = await selectRecentSession(resolvedCwd, lang);
|
||||
if (sessionId) {
|
||||
selectedSessionId = sessionId;
|
||||
}
|
||||
if (opts.continue === true) {
|
||||
const providerType = globalConfig.provider;
|
||||
const savedSessions = loadPersonaSessions(resolvedCwd, providerType);
|
||||
const savedSessionId = savedSessions['interactive'];
|
||||
if (savedSessionId) {
|
||||
selectedSessionId = savedSessionId;
|
||||
} else {
|
||||
info(getLabel('interactive.continueNoSession', lang));
|
||||
}
|
||||
}
|
||||
result = await interactiveMode(resolvedCwd, initialInput, pieceContext, selectedSessionId);
|
||||
|
||||
@ -23,6 +23,16 @@ export interface ObservabilityConfig {
|
||||
providerEvents?: boolean;
|
||||
}
|
||||
|
||||
/** Analytics configuration for local metrics collection */
|
||||
export interface AnalyticsConfig {
|
||||
/** Whether analytics collection is enabled */
|
||||
enabled?: boolean;
|
||||
/** Custom path for analytics events directory (default: ~/.takt/analytics/events) */
|
||||
eventsPath?: string;
|
||||
/** Retention period in days for analytics event files (default: 30) */
|
||||
retentionDays?: number;
|
||||
}
|
||||
|
||||
/** Language setting for takt */
|
||||
export type Language = 'en' | 'ja';
|
||||
|
||||
@ -53,11 +63,11 @@ export interface NotificationSoundEventsConfig {
|
||||
/** Global configuration for takt */
|
||||
export interface GlobalConfig {
|
||||
language: Language;
|
||||
defaultPiece: string;
|
||||
logLevel: 'debug' | 'info' | 'warn' | 'error';
|
||||
provider?: 'claude' | 'codex' | 'opencode' | 'mock';
|
||||
model?: string;
|
||||
observability?: ObservabilityConfig;
|
||||
analytics?: AnalyticsConfig;
|
||||
/** Directory for shared clones (worktree_dir in config). If empty, uses ../{clone-name} relative to project */
|
||||
worktreeDir?: string;
|
||||
/** Auto-create PR after worktree execution (default: prompt in interactive mode) */
|
||||
@ -100,6 +110,8 @@ export interface GlobalConfig {
|
||||
notificationSoundEvents?: NotificationSoundEventsConfig;
|
||||
/** Number of movement previews to inject into interactive mode (0 to disable, max 10) */
|
||||
interactivePreviewMovements?: number;
|
||||
/** Verbose output mode */
|
||||
verbose?: boolean;
|
||||
/** Number of tasks to run concurrently in takt run (default: 1 = sequential) */
|
||||
concurrency: number;
|
||||
/** Polling interval in ms for picking up new tasks during takt run (default: 500, range: 100-5000) */
|
||||
@ -109,7 +121,6 @@ export interface GlobalConfig {
|
||||
/** Project-level configuration */
|
||||
export interface ProjectConfig {
|
||||
piece?: string;
|
||||
agents?: CustomAgentConfig[];
|
||||
provider?: 'claude' | 'codex' | 'opencode' | 'mock';
|
||||
providerOptions?: MovementProviderOptions;
|
||||
/** Provider-specific permission profiles */
|
||||
|
||||
@ -378,6 +378,13 @@ export const ObservabilityConfigSchema = z.object({
|
||||
provider_events: z.boolean().optional(),
|
||||
});
|
||||
|
||||
/** Analytics config schema */
|
||||
export const AnalyticsConfigSchema = z.object({
|
||||
enabled: z.boolean().optional(),
|
||||
events_path: z.string().optional(),
|
||||
retention_days: z.number().int().positive().optional(),
|
||||
});
|
||||
|
||||
/** Language setting schema */
|
||||
export const LanguageSchema = z.enum(['en', 'ja']);
|
||||
|
||||
@ -405,11 +412,11 @@ export const PieceCategoryConfigSchema = z.record(z.string(), PieceCategoryConfi
|
||||
/** Global config schema */
|
||||
export const GlobalConfigSchema = z.object({
|
||||
language: LanguageSchema.optional().default(DEFAULT_LANGUAGE),
|
||||
default_piece: z.string().optional().default('default'),
|
||||
log_level: z.enum(['debug', 'info', 'warn', 'error']).optional().default('info'),
|
||||
provider: z.enum(['claude', 'codex', 'opencode', 'mock']).optional().default('claude'),
|
||||
model: z.string().optional(),
|
||||
observability: ObservabilityConfigSchema.optional(),
|
||||
analytics: AnalyticsConfigSchema.optional(),
|
||||
/** Directory for shared clones (worktree_dir in config). If empty, uses ../{clone-name} relative to project */
|
||||
worktree_dir: z.string().optional(),
|
||||
/** Auto-create PR after worktree execution (default: prompt in interactive mode) */
|
||||
@ -458,6 +465,8 @@ export const GlobalConfigSchema = z.object({
|
||||
}).optional(),
|
||||
/** Number of movement previews to inject into interactive mode (0 to disable, max 10) */
|
||||
interactive_preview_movements: z.number().int().min(0).max(10).optional().default(3),
|
||||
/** Verbose output mode */
|
||||
verbose: z.boolean().optional(),
|
||||
/** Number of tasks to run concurrently in takt run (default: 1 = sequential, max: 10) */
|
||||
concurrency: z.number().int().min(1).max(10).optional().default(1),
|
||||
/** Polling interval in ms for picking up new tasks during takt run (default: 500, range: 100-5000) */
|
||||
@ -467,7 +476,6 @@ export const GlobalConfigSchema = z.object({
|
||||
/** Project config schema */
|
||||
export const ProjectConfigSchema = z.object({
|
||||
piece: z.string().optional(),
|
||||
agents: z.array(CustomAgentConfigSchema).optional(),
|
||||
provider: z.enum(['claude', 'codex', 'opencode', 'mock']).optional(),
|
||||
provider_options: MovementProviderOptionsSchema,
|
||||
provider_profiles: ProviderPermissionProfilesSchema,
|
||||
|
||||
@ -1,5 +1,6 @@
|
||||
import { join } from 'node:path';
|
||||
import type { PieceMovement, PieceState, Language } from '../../models/types.js';
|
||||
import type { MovementProviderOptions } from '../../models/piece-types.js';
|
||||
import type { RunAgentOptions } from '../../../agents/runner.js';
|
||||
import type { PhaseRunnerContext } from '../phase-runner.js';
|
||||
import type { PieceEngineOptions, PhaseName } from '../types.js';
|
||||
@ -7,6 +8,27 @@ import { buildSessionKey } from '../session-key.js';
|
||||
import { resolveMovementProviderModel } from '../provider-resolution.js';
|
||||
import { DEFAULT_PROVIDER_PERMISSION_PROFILES, resolveMovementPermissionMode } from '../permission-profile-resolution.js';
|
||||
|
||||
function mergeProviderOptions(
|
||||
...layers: (MovementProviderOptions | undefined)[]
|
||||
): MovementProviderOptions | undefined {
|
||||
const result: MovementProviderOptions = {};
|
||||
for (const layer of layers) {
|
||||
if (!layer) continue;
|
||||
if (layer.codex) {
|
||||
result.codex = { ...result.codex, ...layer.codex };
|
||||
}
|
||||
if (layer.opencode) {
|
||||
result.opencode = { ...result.opencode, ...layer.opencode };
|
||||
}
|
||||
if (layer.claude?.sandbox) {
|
||||
result.claude = {
|
||||
sandbox: { ...result.claude?.sandbox, ...layer.claude.sandbox },
|
||||
};
|
||||
}
|
||||
}
|
||||
return Object.keys(result).length > 0 ? result : undefined;
|
||||
}
|
||||
|
||||
export class OptionsBuilder {
|
||||
constructor(
|
||||
private readonly engineOptions: PieceEngineOptions,
|
||||
@ -34,9 +56,7 @@ export class OptionsBuilder {
|
||||
|
||||
const resolvedProviderForPermissions =
|
||||
this.engineOptions.provider
|
||||
?? this.engineOptions.projectProvider
|
||||
?? resolved.provider
|
||||
?? this.engineOptions.globalProvider
|
||||
?? 'claude';
|
||||
|
||||
return {
|
||||
@ -51,10 +71,13 @@ export class OptionsBuilder {
|
||||
movementName: step.name,
|
||||
requiredPermissionMode: step.requiredPermissionMode,
|
||||
provider: resolvedProviderForPermissions,
|
||||
projectProviderProfiles: this.engineOptions.projectProviderProfiles,
|
||||
globalProviderProfiles: this.engineOptions.globalProviderProfiles ?? DEFAULT_PROVIDER_PERMISSION_PROFILES,
|
||||
projectProviderProfiles: this.engineOptions.providerProfiles,
|
||||
globalProviderProfiles: DEFAULT_PROVIDER_PERMISSION_PROFILES,
|
||||
}),
|
||||
providerOptions: step.providerOptions,
|
||||
providerOptions: mergeProviderOptions(
|
||||
this.engineOptions.providerOptions,
|
||||
step.providerOptions,
|
||||
),
|
||||
language: this.getLanguage(),
|
||||
onStream: this.engineOptions.onStream,
|
||||
onPermissionRequest: this.engineOptions.onPermissionRequest,
|
||||
|
||||
@ -8,6 +8,7 @@
|
||||
import type { PermissionResult, PermissionUpdate } from '@anthropic-ai/claude-agent-sdk';
|
||||
import type { PieceMovement, AgentResponse, PieceState, Language, LoopMonitorConfig } from '../models/types.js';
|
||||
import type { ProviderPermissionProfiles } from '../models/provider-profiles.js';
|
||||
import type { MovementProviderOptions } from '../models/piece-types.js';
|
||||
|
||||
export type ProviderType = 'claude' | 'codex' | 'opencode' | 'mock';
|
||||
|
||||
@ -171,24 +172,20 @@ export interface PieceEngineOptions {
|
||||
onAskUserQuestion?: AskUserQuestionHandler;
|
||||
/** Callback when iteration limit is reached - returns additional iterations or null to stop */
|
||||
onIterationLimit?: IterationLimitCallback;
|
||||
/** Bypass all permission checks (sacrifice-my-pc mode) */
|
||||
/** Bypass all permission checks */
|
||||
bypassPermissions?: boolean;
|
||||
/** Project root directory (where .takt/ lives). */
|
||||
projectCwd: string;
|
||||
/** Language for instruction metadata. Defaults to 'en'. */
|
||||
language?: Language;
|
||||
provider?: ProviderType;
|
||||
/** Project config provider (used for provider/profile resolution parity with AgentRunner) */
|
||||
projectProvider?: ProviderType;
|
||||
/** Global config provider (used for provider/profile resolution parity with AgentRunner) */
|
||||
globalProvider?: ProviderType;
|
||||
model?: string;
|
||||
/** Resolved provider options */
|
||||
providerOptions?: MovementProviderOptions;
|
||||
/** Per-persona provider overrides (e.g., { coder: 'codex' }) */
|
||||
personaProviders?: Record<string, ProviderType>;
|
||||
/** Project-level provider permission profiles */
|
||||
projectProviderProfiles?: ProviderPermissionProfiles;
|
||||
/** Global-level provider permission profiles */
|
||||
globalProviderProfiles?: ProviderPermissionProfiles;
|
||||
/** Resolved provider permission profiles */
|
||||
providerProfiles?: ProviderPermissionProfiles;
|
||||
/** Enable interactive-only rules and user-input transitions */
|
||||
interactive?: boolean;
|
||||
/** Rule tag index detector (required for rules evaluation) */
|
||||
|
||||
64
src/features/analytics/events.ts
Normal file
64
src/features/analytics/events.ts
Normal file
@ -0,0 +1,64 @@
|
||||
/**
|
||||
* Analytics event type definitions for metrics collection.
|
||||
*
|
||||
* Three event types capture review findings, fix actions, and movement results
|
||||
* for local-only analysis when analytics.enabled = true.
|
||||
*/
|
||||
|
||||
/** Status of a review finding across iterations */
|
||||
export type FindingStatus = 'new' | 'persists' | 'resolved';
|
||||
|
||||
/** Severity level of a review finding */
|
||||
export type FindingSeverity = 'error' | 'warning';
|
||||
|
||||
/** Decision taken on a finding */
|
||||
export type FindingDecision = 'reject' | 'approve';
|
||||
|
||||
/** Action taken to address a finding */
|
||||
export type FixActionType = 'fixed' | 'rebutted' | 'not_applicable';
|
||||
|
||||
/** Review finding event — emitted per finding during review movements */
|
||||
export interface ReviewFindingEvent {
|
||||
type: 'review_finding';
|
||||
findingId: string;
|
||||
status: FindingStatus;
|
||||
ruleId: string;
|
||||
severity: FindingSeverity;
|
||||
decision: FindingDecision;
|
||||
file: string;
|
||||
line: number;
|
||||
iteration: number;
|
||||
runId: string;
|
||||
timestamp: string;
|
||||
}
|
||||
|
||||
/** Fix action event — emitted per finding addressed during fix movements */
|
||||
export interface FixActionEvent {
|
||||
type: 'fix_action';
|
||||
findingId: string;
|
||||
action: FixActionType;
|
||||
changedFiles?: string[];
|
||||
testCommand?: string;
|
||||
testResult?: string;
|
||||
iteration: number;
|
||||
runId: string;
|
||||
timestamp: string;
|
||||
}
|
||||
|
||||
/** Movement result event — emitted after each movement completes */
|
||||
export interface MovementResultEvent {
|
||||
type: 'movement_result';
|
||||
movement: string;
|
||||
provider: string;
|
||||
model: string;
|
||||
decisionTag: string;
|
||||
iteration: number;
|
||||
runId: string;
|
||||
timestamp: string;
|
||||
}
|
||||
|
||||
/** Union of all analytics event types */
|
||||
export type AnalyticsEvent =
|
||||
| ReviewFindingEvent
|
||||
| FixActionEvent
|
||||
| MovementResultEvent;
|
||||
33
src/features/analytics/index.ts
Normal file
33
src/features/analytics/index.ts
Normal file
@ -0,0 +1,33 @@
|
||||
/**
|
||||
* Analytics module — event collection and metrics.
|
||||
*/
|
||||
|
||||
export type {
|
||||
AnalyticsEvent,
|
||||
ReviewFindingEvent,
|
||||
FixActionEvent,
|
||||
MovementResultEvent,
|
||||
} from './events.js';
|
||||
|
||||
export {
|
||||
initAnalyticsWriter,
|
||||
isAnalyticsEnabled,
|
||||
writeAnalyticsEvent,
|
||||
} from './writer.js';
|
||||
|
||||
export {
|
||||
parseFindingsFromReport,
|
||||
extractDecisionFromReport,
|
||||
inferSeverity,
|
||||
emitFixActionEvents,
|
||||
emitRebuttalEvents,
|
||||
} from './report-parser.js';
|
||||
|
||||
export {
|
||||
computeReviewMetrics,
|
||||
formatReviewMetrics,
|
||||
parseSinceDuration,
|
||||
type ReviewMetrics,
|
||||
} from './metrics.js';
|
||||
|
||||
export { purgeOldEvents } from './purge.js';
|
||||
225
src/features/analytics/metrics.ts
Normal file
225
src/features/analytics/metrics.ts
Normal file
@ -0,0 +1,225 @@
|
||||
/**
|
||||
* Analytics metrics computation from JSONL event files.
|
||||
*
|
||||
* Reads events from ~/.takt/analytics/events/*.jsonl and computes
|
||||
* five key indicators for review quality assessment.
|
||||
*/
|
||||
|
||||
import { readdirSync, readFileSync } from 'node:fs';
|
||||
import { join } from 'node:path';
|
||||
import type { AnalyticsEvent, ReviewFindingEvent, FixActionEvent } from './events.js';
|
||||
|
||||
/** Aggregated metrics output */
|
||||
export interface ReviewMetrics {
|
||||
/** Re-report count per finding_id (same finding raised more than once) */
|
||||
reReportCounts: Map<string, number>;
|
||||
/** Ratio of findings that required 2+ round-trips before resolution */
|
||||
roundTripRatio: number;
|
||||
/** Average number of iterations to resolve a finding */
|
||||
averageResolutionIterations: number;
|
||||
/** Number of REJECT decisions per rule_id */
|
||||
rejectCountsByRule: Map<string, number>;
|
||||
/** Ratio of rebutted findings that were subsequently resolved */
|
||||
rebuttalResolvedRatio: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Compute review metrics from events within a time window.
|
||||
*
|
||||
* @param eventsDir Absolute path to the analytics events directory
|
||||
* @param sinceMs Epoch ms — only events after this time are included
|
||||
*/
|
||||
export function computeReviewMetrics(eventsDir: string, sinceMs: number): ReviewMetrics {
|
||||
const events = loadEventsAfter(eventsDir, sinceMs);
|
||||
const reviewFindings = events.filter(
|
||||
(e): e is ReviewFindingEvent => e.type === 'review_finding',
|
||||
);
|
||||
const fixActions = events.filter(
|
||||
(e): e is FixActionEvent => e.type === 'fix_action',
|
||||
);
|
||||
|
||||
return {
|
||||
reReportCounts: computeReReportCounts(reviewFindings),
|
||||
roundTripRatio: computeRoundTripRatio(reviewFindings),
|
||||
averageResolutionIterations: computeAverageResolutionIterations(reviewFindings),
|
||||
rejectCountsByRule: computeRejectCountsByRule(reviewFindings),
|
||||
rebuttalResolvedRatio: computeRebuttalResolvedRatio(fixActions, reviewFindings),
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Format review metrics for CLI display.
|
||||
*/
|
||||
export function formatReviewMetrics(metrics: ReviewMetrics): string {
|
||||
const lines: string[] = [];
|
||||
lines.push('=== Review Metrics ===');
|
||||
lines.push('');
|
||||
|
||||
lines.push('Re-report counts (finding_id → count):');
|
||||
if (metrics.reReportCounts.size === 0) {
|
||||
lines.push(' (none)');
|
||||
} else {
|
||||
for (const [findingId, count] of metrics.reReportCounts) {
|
||||
lines.push(` ${findingId}: ${count}`);
|
||||
}
|
||||
}
|
||||
lines.push('');
|
||||
|
||||
lines.push(`Round-trip ratio (2+ iterations): ${(metrics.roundTripRatio * 100).toFixed(1)}%`);
|
||||
lines.push(`Average resolution iterations: ${metrics.averageResolutionIterations.toFixed(2)}`);
|
||||
lines.push('');
|
||||
|
||||
lines.push('REJECT counts by rule:');
|
||||
if (metrics.rejectCountsByRule.size === 0) {
|
||||
lines.push(' (none)');
|
||||
} else {
|
||||
for (const [ruleId, count] of metrics.rejectCountsByRule) {
|
||||
lines.push(` ${ruleId}: ${count}`);
|
||||
}
|
||||
}
|
||||
lines.push('');
|
||||
|
||||
lines.push(`Rebuttal → resolved ratio: ${(metrics.rebuttalResolvedRatio * 100).toFixed(1)}%`);
|
||||
|
||||
return lines.join('\n');
|
||||
}
|
||||
|
||||
// ---- Internal helpers ----
|
||||
|
||||
/** Load all events from JSONL files whose date >= since */
|
||||
function loadEventsAfter(eventsDir: string, sinceMs: number): AnalyticsEvent[] {
|
||||
const sinceDate = new Date(sinceMs).toISOString().slice(0, 10);
|
||||
|
||||
let files: string[];
|
||||
try {
|
||||
files = readdirSync(eventsDir).filter((f) => f.endsWith('.jsonl'));
|
||||
} catch (e) {
|
||||
if ((e as NodeJS.ErrnoException).code === 'ENOENT') return [];
|
||||
throw e;
|
||||
}
|
||||
|
||||
const relevantFiles = files.filter((f) => {
|
||||
const dateStr = f.replace('.jsonl', '');
|
||||
return dateStr >= sinceDate;
|
||||
});
|
||||
|
||||
const events: AnalyticsEvent[] = [];
|
||||
for (const file of relevantFiles) {
|
||||
const content = readFileSync(join(eventsDir, file), 'utf-8');
|
||||
for (const line of content.split('\n')) {
|
||||
if (!line.trim()) continue;
|
||||
const event = JSON.parse(line) as AnalyticsEvent;
|
||||
if (new Date(event.timestamp).getTime() >= sinceMs) {
|
||||
events.push(event);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return events;
|
||||
}
|
||||
|
||||
/** Count how many times each finding_id appears (only those appearing 2+) */
|
||||
function computeReReportCounts(findings: ReviewFindingEvent[]): Map<string, number> {
|
||||
const counts = new Map<string, number>();
|
||||
for (const f of findings) {
|
||||
counts.set(f.findingId, (counts.get(f.findingId) ?? 0) + 1);
|
||||
}
|
||||
|
||||
const result = new Map<string, number>();
|
||||
for (const [id, count] of counts) {
|
||||
if (count >= 2) {
|
||||
result.set(id, count);
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
/** Ratio of findings that appear in 2+ iterations before resolution */
|
||||
function computeRoundTripRatio(findings: ReviewFindingEvent[]): number {
|
||||
const findingIds = new Set(findings.map((f) => f.findingId));
|
||||
if (findingIds.size === 0) return 0;
|
||||
|
||||
let multiIterationCount = 0;
|
||||
for (const id of findingIds) {
|
||||
const iterations = new Set(
|
||||
findings.filter((f) => f.findingId === id).map((f) => f.iteration),
|
||||
);
|
||||
if (iterations.size >= 2) {
|
||||
multiIterationCount++;
|
||||
}
|
||||
}
|
||||
|
||||
return multiIterationCount / findingIds.size;
|
||||
}
|
||||
|
||||
/** Average number of iterations from first appearance to resolution */
|
||||
function computeAverageResolutionIterations(findings: ReviewFindingEvent[]): number {
|
||||
const findingIds = new Set(findings.map((f) => f.findingId));
|
||||
if (findingIds.size === 0) return 0;
|
||||
|
||||
let totalIterations = 0;
|
||||
let resolvedCount = 0;
|
||||
|
||||
for (const id of findingIds) {
|
||||
const related = findings.filter((f) => f.findingId === id);
|
||||
const minIteration = Math.min(...related.map((f) => f.iteration));
|
||||
const resolved = related.find((f) => f.status === 'resolved');
|
||||
if (resolved) {
|
||||
totalIterations += resolved.iteration - minIteration + 1;
|
||||
resolvedCount++;
|
||||
}
|
||||
}
|
||||
|
||||
if (resolvedCount === 0) return 0;
|
||||
return totalIterations / resolvedCount;
|
||||
}
|
||||
|
||||
/** Ratio of rebutted findings that were subsequently resolved in a review */
|
||||
function computeRebuttalResolvedRatio(
|
||||
fixActions: FixActionEvent[],
|
||||
findings: ReviewFindingEvent[],
|
||||
): number {
|
||||
const rebuttedIds = new Set(
|
||||
fixActions.filter((a) => a.action === 'rebutted').map((a) => a.findingId),
|
||||
);
|
||||
if (rebuttedIds.size === 0) return 0;
|
||||
|
||||
let resolvedCount = 0;
|
||||
for (const id of rebuttedIds) {
|
||||
const wasResolved = findings.some(
|
||||
(f) => f.findingId === id && f.status === 'resolved',
|
||||
);
|
||||
if (wasResolved) {
|
||||
resolvedCount++;
|
||||
}
|
||||
}
|
||||
|
||||
return resolvedCount / rebuttedIds.size;
|
||||
}
|
||||
|
||||
/** Count of REJECT decisions per rule_id */
|
||||
function computeRejectCountsByRule(findings: ReviewFindingEvent[]): Map<string, number> {
|
||||
const counts = new Map<string, number>();
|
||||
for (const f of findings) {
|
||||
if (f.decision === 'reject') {
|
||||
counts.set(f.ruleId, (counts.get(f.ruleId) ?? 0) + 1);
|
||||
}
|
||||
}
|
||||
return counts;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse a duration string like "7d", "30d", "14d" into milliseconds.
|
||||
*/
|
||||
export function parseSinceDuration(since: string): number {
|
||||
const match = since.match(/^(\d+)d$/);
|
||||
if (!match) {
|
||||
throw new Error(`Invalid duration format: "${since}". Use format like "7d", "30d".`);
|
||||
}
|
||||
const daysStr = match[1];
|
||||
if (!daysStr) {
|
||||
throw new Error(`Invalid duration format: "${since}". Use format like "7d", "30d".`);
|
||||
}
|
||||
const days = parseInt(daysStr, 10);
|
||||
return days * 24 * 60 * 60 * 1000;
|
||||
}
|
||||
40
src/features/analytics/purge.ts
Normal file
40
src/features/analytics/purge.ts
Normal file
@ -0,0 +1,40 @@
|
||||
/**
|
||||
* Retention-based purge for analytics event files.
|
||||
*
|
||||
* Deletes JSONL files older than the configured retention period.
|
||||
*/
|
||||
|
||||
import { readdirSync, unlinkSync } from 'node:fs';
|
||||
import { join } from 'node:path';
|
||||
|
||||
/**
|
||||
* Purge JSONL event files older than the retention period.
|
||||
*
|
||||
* @param eventsDir Absolute path to the analytics events directory
|
||||
* @param retentionDays Number of days to retain (files older than this are deleted)
|
||||
* @param now Reference time for age calculation
|
||||
* @returns List of deleted file names
|
||||
*/
|
||||
export function purgeOldEvents(eventsDir: string, retentionDays: number, now: Date): string[] {
|
||||
const cutoffDate = new Date(now.getTime() - retentionDays * 24 * 60 * 60 * 1000);
|
||||
const cutoffStr = cutoffDate.toISOString().slice(0, 10);
|
||||
|
||||
let files: string[];
|
||||
try {
|
||||
files = readdirSync(eventsDir).filter((f) => f.endsWith('.jsonl'));
|
||||
} catch (e) {
|
||||
if ((e as NodeJS.ErrnoException).code === 'ENOENT') return [];
|
||||
throw e;
|
||||
}
|
||||
|
||||
const deleted: string[] = [];
|
||||
for (const file of files) {
|
||||
const dateStr = file.replace('.jsonl', '');
|
||||
if (dateStr < cutoffStr) {
|
||||
unlinkSync(join(eventsDir, file));
|
||||
deleted.push(file);
|
||||
}
|
||||
}
|
||||
|
||||
return deleted;
|
||||
}
|
||||
191
src/features/analytics/report-parser.ts
Normal file
191
src/features/analytics/report-parser.ts
Normal file
@ -0,0 +1,191 @@
|
||||
/**
|
||||
* Extracts analytics event data from review report markdown.
|
||||
*
|
||||
* Review reports follow a consistent structure with finding tables
|
||||
* under "new", "persists", and "resolved" sections. Each table row
|
||||
* contains a finding_id column.
|
||||
*/
|
||||
|
||||
import type { FindingStatus, FindingSeverity, FindingDecision, FixActionEvent, FixActionType } from './events.js';
|
||||
import { writeAnalyticsEvent } from './writer.js';
|
||||
|
||||
export interface ParsedFinding {
|
||||
findingId: string;
|
||||
status: FindingStatus;
|
||||
ruleId: string;
|
||||
file: string;
|
||||
line: number;
|
||||
}
|
||||
|
||||
const SECTION_PATTERNS: Array<{ pattern: RegExp; status: FindingStatus }> = [
|
||||
{ pattern: /^##\s+.*\bnew\b/i, status: 'new' },
|
||||
{ pattern: /^##\s+.*\bpersists\b/i, status: 'persists' },
|
||||
{ pattern: /^##\s+.*\bresolved\b/i, status: 'resolved' },
|
||||
];
|
||||
|
||||
export function parseFindingsFromReport(reportContent: string): ParsedFinding[] {
|
||||
const lines = reportContent.split('\n');
|
||||
const findings: ParsedFinding[] = [];
|
||||
let currentStatus: FindingStatus | null = null;
|
||||
let columnIndices: TableColumnIndices | null = null;
|
||||
let headerParsed = false;
|
||||
|
||||
for (const line of lines) {
|
||||
const sectionMatch = matchSection(line);
|
||||
if (sectionMatch) {
|
||||
currentStatus = sectionMatch;
|
||||
columnIndices = null;
|
||||
headerParsed = false;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (line.startsWith('## ')) {
|
||||
currentStatus = null;
|
||||
columnIndices = null;
|
||||
headerParsed = false;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!currentStatus) continue;
|
||||
|
||||
const trimmed = line.trim();
|
||||
if (!trimmed.startsWith('|')) continue;
|
||||
if (isSeparatorRow(trimmed)) continue;
|
||||
|
||||
if (!headerParsed) {
|
||||
columnIndices = detectColumnIndices(trimmed);
|
||||
headerParsed = true;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!columnIndices || columnIndices.findingId < 0) continue;
|
||||
|
||||
const finding = parseTableRow(line, currentStatus, columnIndices);
|
||||
if (finding) {
|
||||
findings.push(finding);
|
||||
}
|
||||
}
|
||||
|
||||
return findings;
|
||||
}
|
||||
|
||||
export function extractDecisionFromReport(reportContent: string): FindingDecision | null {
|
||||
const resultMatch = reportContent.match(/^##\s+(?:結果|Result)\s*:\s*(\w+)/m);
|
||||
const decision = resultMatch?.[1];
|
||||
if (!decision) return null;
|
||||
return decision.toUpperCase() === 'REJECT' ? 'reject' : 'approve';
|
||||
}
|
||||
|
||||
function matchSection(line: string): FindingStatus | null {
|
||||
for (const { pattern, status } of SECTION_PATTERNS) {
|
||||
if (pattern.test(line)) return status;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
function isSeparatorRow(trimmed: string): boolean {
|
||||
return /^\|[\s-]+\|/.test(trimmed);
|
||||
}
|
||||
|
||||
interface TableColumnIndices {
|
||||
findingId: number;
|
||||
category: number;
|
||||
}
|
||||
|
||||
function detectColumnIndices(headerRow: string): TableColumnIndices {
|
||||
const cells = headerRow.split('|').map((c) => c.trim()).filter(Boolean);
|
||||
const findingId = cells.findIndex((c) => c.toLowerCase() === 'finding_id');
|
||||
const category = cells.findIndex((c) => {
|
||||
const lower = c.toLowerCase();
|
||||
return lower === 'category' || lower === 'カテゴリ';
|
||||
});
|
||||
return { findingId, category };
|
||||
}
|
||||
|
||||
function parseTableRow(
|
||||
line: string,
|
||||
status: FindingStatus,
|
||||
indices: TableColumnIndices,
|
||||
): ParsedFinding | null {
|
||||
const cells = line.split('|').map((c) => c.trim()).filter(Boolean);
|
||||
if (cells.length <= indices.findingId) return null;
|
||||
|
||||
const findingId = cells[indices.findingId];
|
||||
if (!findingId) return null;
|
||||
|
||||
const categoryValue = indices.category >= 0 ? cells[indices.category] : undefined;
|
||||
const ruleId = categoryValue ?? findingId;
|
||||
|
||||
const locationCell = findLocation(cells);
|
||||
const { file, line: lineNum } = parseLocation(locationCell);
|
||||
|
||||
return { findingId, status, ruleId, file, line: lineNum };
|
||||
}
|
||||
|
||||
function findLocation(cells: string[]): string {
|
||||
for (const cell of cells) {
|
||||
if (cell.includes('/') || cell.includes('.ts') || cell.includes('.js') || cell.includes('.py')) {
|
||||
return cell;
|
||||
}
|
||||
}
|
||||
return '';
|
||||
}
|
||||
|
||||
function parseLocation(location: string): { file: string; line: number } {
|
||||
const cleaned = location.replace(/`/g, '');
|
||||
const lineMatch = cleaned.match(/:(\d+)/);
|
||||
const lineStr = lineMatch?.[1];
|
||||
const lineNum = lineStr ? parseInt(lineStr, 10) : 0;
|
||||
const file = cleaned.replace(/:\d+.*$/, '').trim();
|
||||
return { file, line: lineNum };
|
||||
}
|
||||
|
||||
export function inferSeverity(findingId: string): FindingSeverity {
|
||||
const id = findingId.toUpperCase();
|
||||
if (id.includes('SEC')) return 'error';
|
||||
return 'warning';
|
||||
}
|
||||
|
||||
const FINDING_ID_PATTERN = /\b[A-Z]{2,}-(?:NEW-)?[\w-]+\b/g;
|
||||
|
||||
export function emitFixActionEvents(
|
||||
responseContent: string,
|
||||
iteration: number,
|
||||
runId: string,
|
||||
timestamp: Date,
|
||||
): void {
|
||||
emitActionEvents(responseContent, 'fixed', iteration, runId, timestamp);
|
||||
}
|
||||
|
||||
export function emitRebuttalEvents(
|
||||
responseContent: string,
|
||||
iteration: number,
|
||||
runId: string,
|
||||
timestamp: Date,
|
||||
): void {
|
||||
emitActionEvents(responseContent, 'rebutted', iteration, runId, timestamp);
|
||||
}
|
||||
|
||||
function emitActionEvents(
|
||||
responseContent: string,
|
||||
action: FixActionType,
|
||||
iteration: number,
|
||||
runId: string,
|
||||
timestamp: Date,
|
||||
): void {
|
||||
const matches = responseContent.match(FINDING_ID_PATTERN);
|
||||
if (!matches) return;
|
||||
|
||||
const uniqueIds = [...new Set(matches)];
|
||||
for (const findingId of uniqueIds) {
|
||||
const event: FixActionEvent = {
|
||||
type: 'fix_action',
|
||||
findingId,
|
||||
action,
|
||||
iteration,
|
||||
runId,
|
||||
timestamp: timestamp.toISOString(),
|
||||
};
|
||||
writeAnalyticsEvent(event);
|
||||
}
|
||||
}
|
||||
82
src/features/analytics/writer.ts
Normal file
82
src/features/analytics/writer.ts
Normal file
@ -0,0 +1,82 @@
|
||||
/**
|
||||
* Analytics event writer — JSONL append-only with date-based rotation.
|
||||
*
|
||||
* Writes to ~/.takt/analytics/events/YYYY-MM-DD.jsonl when analytics.enabled = true.
|
||||
* Does nothing when disabled.
|
||||
*/
|
||||
|
||||
import { appendFileSync, mkdirSync, existsSync } from 'node:fs';
|
||||
import { join } from 'node:path';
|
||||
import type { AnalyticsEvent } from './events.js';
|
||||
|
||||
export class AnalyticsWriter {
|
||||
private static instance: AnalyticsWriter | null = null;
|
||||
|
||||
private enabled = false;
|
||||
private eventsDir: string | null = null;
|
||||
|
||||
private constructor() {}
|
||||
|
||||
static getInstance(): AnalyticsWriter {
|
||||
if (!AnalyticsWriter.instance) {
|
||||
AnalyticsWriter.instance = new AnalyticsWriter();
|
||||
}
|
||||
return AnalyticsWriter.instance;
|
||||
}
|
||||
|
||||
static resetInstance(): void {
|
||||
AnalyticsWriter.instance = null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize writer.
|
||||
* @param enabled Whether analytics collection is active
|
||||
* @param eventsDir Absolute path to the events directory (e.g. ~/.takt/analytics/events)
|
||||
*/
|
||||
init(enabled: boolean, eventsDir: string): void {
|
||||
this.enabled = enabled;
|
||||
this.eventsDir = eventsDir;
|
||||
|
||||
if (this.enabled) {
|
||||
if (!existsSync(this.eventsDir)) {
|
||||
mkdirSync(this.eventsDir, { recursive: true });
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
isEnabled(): boolean {
|
||||
return this.enabled;
|
||||
}
|
||||
|
||||
/** Append an analytics event to the current day's JSONL file */
|
||||
write(event: AnalyticsEvent): void {
|
||||
if (!this.enabled || !this.eventsDir) {
|
||||
return;
|
||||
}
|
||||
|
||||
const filePath = join(this.eventsDir, `${formatDate(event.timestamp)}.jsonl`);
|
||||
appendFileSync(filePath, JSON.stringify(event) + '\n', 'utf-8');
|
||||
}
|
||||
}
|
||||
|
||||
function formatDate(isoTimestamp: string): string {
|
||||
return isoTimestamp.slice(0, 10);
|
||||
}
|
||||
|
||||
// ---- Module-level convenience functions ----
|
||||
|
||||
export function initAnalyticsWriter(enabled: boolean, eventsDir: string): void {
|
||||
AnalyticsWriter.getInstance().init(enabled, eventsDir);
|
||||
}
|
||||
|
||||
export function resetAnalyticsWriter(): void {
|
||||
AnalyticsWriter.resetInstance();
|
||||
}
|
||||
|
||||
export function isAnalyticsEnabled(): boolean {
|
||||
return AnalyticsWriter.getInstance().isEnabled();
|
||||
}
|
||||
|
||||
export function writeAnalyticsEvent(event: AnalyticsEvent): void {
|
||||
AnalyticsWriter.getInstance().write(event);
|
||||
}
|
||||
@ -11,7 +11,7 @@ import chalk from 'chalk';
|
||||
import type { PieceSource } from '../../infra/config/loaders/pieceResolver.js';
|
||||
import { getLanguageResourcesDir } from '../../infra/resources/index.js';
|
||||
import { getGlobalConfigDir, getProjectConfigDir } from '../../infra/config/paths.js';
|
||||
import { getLanguage, getBuiltinPiecesEnabled } from '../../infra/config/global/globalConfig.js';
|
||||
import { resolvePieceConfigValues } from '../../infra/config/index.js';
|
||||
import { section, error as logError, info } from '../../shared/ui/index.js';
|
||||
|
||||
const FACET_TYPES = [
|
||||
@ -62,10 +62,11 @@ function getFacetDirs(
|
||||
facetType: FacetType,
|
||||
cwd: string,
|
||||
): { dir: string; source: PieceSource }[] {
|
||||
const config = resolvePieceConfigValues(cwd, ['enableBuiltinPieces', 'language']);
|
||||
const dirs: { dir: string; source: PieceSource }[] = [];
|
||||
|
||||
if (getBuiltinPiecesEnabled()) {
|
||||
const lang = getLanguage();
|
||||
if (config.enableBuiltinPieces !== false) {
|
||||
const lang = config.language;
|
||||
dirs.push({ dir: join(getLanguageResourcesDir(lang), facetType), source: 'builtin' });
|
||||
}
|
||||
|
||||
|
||||
@ -3,7 +3,7 @@
|
||||
*/
|
||||
|
||||
export { switchPiece } from './switchPiece.js';
|
||||
export { switchConfig, getCurrentPermissionMode, setPermissionMode, type PermissionMode } from './switchConfig.js';
|
||||
export { ejectBuiltin, ejectFacet, parseFacetType, VALID_FACET_TYPES } from './ejectBuiltin.js';
|
||||
export { resetCategoriesToDefault } from './resetCategories.js';
|
||||
export { resetConfigToDefault } from './resetConfig.js';
|
||||
export { deploySkill } from './deploySkill.js';
|
||||
|
||||
@ -5,12 +5,12 @@
|
||||
import { resetPieceCategories, getPieceCategoriesPath } from '../../infra/config/global/pieceCategories.js';
|
||||
import { header, success, info } from '../../shared/ui/index.js';
|
||||
|
||||
export async function resetCategoriesToDefault(): Promise<void> {
|
||||
export async function resetCategoriesToDefault(cwd: string): Promise<void> {
|
||||
header('Reset Categories');
|
||||
|
||||
resetPieceCategories();
|
||||
resetPieceCategories(cwd);
|
||||
|
||||
const userPath = getPieceCategoriesPath();
|
||||
const userPath = getPieceCategoriesPath(cwd);
|
||||
success('User category overlay reset.');
|
||||
info(` ${userPath}`);
|
||||
}
|
||||
|
||||
13
src/features/config/resetConfig.ts
Normal file
13
src/features/config/resetConfig.ts
Normal file
@ -0,0 +1,13 @@
|
||||
import { resetGlobalConfigToTemplate } from '../../infra/config/global/index.js';
|
||||
import { header, info, success } from '../../shared/ui/index.js';
|
||||
|
||||
export async function resetConfigToDefault(): Promise<void> {
|
||||
header('Reset Config');
|
||||
|
||||
const result = resetGlobalConfigToTemplate();
|
||||
success('Global config reset from builtin template.');
|
||||
info(` config: ${result.configPath}`);
|
||||
if (result.backupPath) {
|
||||
info(` backup: ${result.backupPath}`);
|
||||
}
|
||||
}
|
||||
@ -1,134 +0,0 @@
|
||||
/**
|
||||
* Config switching command (like piece switching)
|
||||
*
|
||||
* Permission mode selection that works from CLI.
|
||||
* Uses selectOption for prompt selection, same pattern as switchPiece.
|
||||
*/
|
||||
|
||||
import chalk from 'chalk';
|
||||
import { info, success } from '../../shared/ui/index.js';
|
||||
import { selectOption } from '../../shared/prompt/index.js';
|
||||
import {
|
||||
loadProjectConfig,
|
||||
updateProjectConfig,
|
||||
} from '../../infra/config/index.js';
|
||||
import type { PermissionMode } from '../../infra/config/index.js';
|
||||
|
||||
// Re-export for convenience
|
||||
export type { PermissionMode } from '../../infra/config/index.js';
|
||||
|
||||
/**
|
||||
* Get permission mode options for selection
|
||||
*/
|
||||
/** Common permission mode option definitions */
|
||||
export const PERMISSION_MODE_OPTIONS: {
|
||||
key: PermissionMode;
|
||||
label: string;
|
||||
description: string;
|
||||
details: string[];
|
||||
icon: string;
|
||||
}[] = [
|
||||
{
|
||||
key: 'default',
|
||||
label: 'デフォルト (default)',
|
||||
description: 'Agent SDK標準モード(ファイル編集自動承認、最小限の確認)',
|
||||
details: [
|
||||
'Claude Agent SDKの標準設定(acceptEdits)を使用',
|
||||
'ファイル編集は自動承認され、確認プロンプトなしで実行',
|
||||
'Bash等の危険な操作は権限確認が表示される',
|
||||
'通常の開発作業に推奨',
|
||||
],
|
||||
icon: '📋',
|
||||
},
|
||||
{
|
||||
key: 'sacrifice-my-pc',
|
||||
label: 'SACRIFICE-MY-PC',
|
||||
description: '全ての権限リクエストが自動承認されます',
|
||||
details: [
|
||||
'⚠️ 警告: 全ての操作が確認なしで実行されます',
|
||||
'Bash, ファイル削除, システム操作も自動承認',
|
||||
'ブロック状態(判断待ち)も自動スキップ',
|
||||
'完全自動化が必要な場合のみ使用してください',
|
||||
],
|
||||
icon: '💀',
|
||||
},
|
||||
];
|
||||
|
||||
function getPermissionModeOptions(currentMode: PermissionMode): {
|
||||
label: string;
|
||||
value: PermissionMode;
|
||||
description: string;
|
||||
details: string[];
|
||||
}[] {
|
||||
return PERMISSION_MODE_OPTIONS.map((opt) => ({
|
||||
label: currentMode === opt.key
|
||||
? (opt.key === 'sacrifice-my-pc' ? chalk.red : chalk.blue)(`${opt.icon} ${opt.label}`) + ' (current)'
|
||||
: (opt.key === 'sacrifice-my-pc' ? chalk.red : chalk.blue)(`${opt.icon} ${opt.label}`),
|
||||
value: opt.key,
|
||||
description: opt.description,
|
||||
details: opt.details,
|
||||
}));
|
||||
}
|
||||
|
||||
/**
|
||||
* Get current permission mode from project config
|
||||
*/
|
||||
export function getCurrentPermissionMode(cwd: string): PermissionMode {
|
||||
const config = loadProjectConfig(cwd);
|
||||
if (config.permissionMode) {
|
||||
return config.permissionMode as PermissionMode;
|
||||
}
|
||||
return 'default';
|
||||
}
|
||||
|
||||
/**
|
||||
* Set permission mode in project config
|
||||
*/
|
||||
export function setPermissionMode(cwd: string, mode: PermissionMode): void {
|
||||
updateProjectConfig(cwd, 'permissionMode', mode);
|
||||
}
|
||||
|
||||
/**
|
||||
* Switch permission mode (like switchPiece)
|
||||
* @returns true if switch was successful
|
||||
*/
|
||||
export async function switchConfig(cwd: string, modeName?: string): Promise<boolean> {
|
||||
const currentMode = getCurrentPermissionMode(cwd);
|
||||
|
||||
// No mode specified - show selection prompt
|
||||
if (!modeName) {
|
||||
info(`Current mode: ${currentMode}`);
|
||||
|
||||
const options = getPermissionModeOptions(currentMode);
|
||||
const selected = await selectOption('Select permission mode:', options);
|
||||
|
||||
if (!selected) {
|
||||
info('Cancelled');
|
||||
return false;
|
||||
}
|
||||
|
||||
modeName = selected;
|
||||
}
|
||||
|
||||
// Validate mode name
|
||||
if (modeName !== 'default' && modeName !== 'sacrifice-my-pc') {
|
||||
info(`Invalid mode: ${modeName}`);
|
||||
info('Available modes: default, sacrifice-my-pc');
|
||||
return false;
|
||||
}
|
||||
|
||||
const finalMode: PermissionMode = modeName as PermissionMode;
|
||||
|
||||
// Save to project config
|
||||
setPermissionMode(cwd, finalMode);
|
||||
|
||||
if (finalMode === 'sacrifice-my-pc') {
|
||||
success('Switched to: sacrifice-my-pc 💀');
|
||||
info('All permission requests will be auto-approved.');
|
||||
} else {
|
||||
success('Switched to: default 📋');
|
||||
info('Using Agent SDK default mode (acceptEdits - minimal permission prompts).');
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
@ -4,7 +4,7 @@
|
||||
|
||||
import {
|
||||
loadPiece,
|
||||
getCurrentPiece,
|
||||
resolveConfigValue,
|
||||
setCurrentPiece,
|
||||
} from '../../infra/config/index.js';
|
||||
import { info, success, error } from '../../shared/ui/index.js';
|
||||
@ -16,7 +16,7 @@ import { selectPiece } from '../pieceSelection/index.js';
|
||||
*/
|
||||
export async function switchPiece(cwd: string, pieceName?: string): Promise<boolean> {
|
||||
if (!pieceName) {
|
||||
const current = getCurrentPiece(cwd);
|
||||
const current = resolveConfigValue(cwd, 'piece');
|
||||
info(`Current piece: ${current}`);
|
||||
|
||||
const selected = await selectPiece(cwd, { fallbackToDefault: false });
|
||||
|
||||
@ -10,7 +10,7 @@
|
||||
|
||||
import chalk from 'chalk';
|
||||
import {
|
||||
loadGlobalConfig,
|
||||
resolveConfigValues,
|
||||
loadPersonaSessions,
|
||||
updatePersonaSession,
|
||||
loadSessionState,
|
||||
@ -22,6 +22,7 @@ import { createLogger, getErrorMessage } from '../../shared/utils/index.js';
|
||||
import { info, error, blankLine, StreamDisplay } from '../../shared/ui/index.js';
|
||||
import { getLabel, getLabelObject } from '../../shared/i18n/index.js';
|
||||
import { readMultilineInput } from './lineEditor.js';
|
||||
import { selectRecentSession } from './sessionSelector.js';
|
||||
import { EXIT_SIGINT } from '../../shared/exitCodes.js';
|
||||
import {
|
||||
type PieceContext,
|
||||
@ -55,10 +56,14 @@ export interface SessionContext {
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize provider, session, and language for interactive conversation.
|
||||
* Initialize provider and language for interactive conversation.
|
||||
*
|
||||
* Session ID is always undefined (new session).
|
||||
* Callers that need session continuity pass sessionId explicitly
|
||||
* (e.g., --continue flag or /resume command).
|
||||
*/
|
||||
export function initializeSession(cwd: string, personaName: string): SessionContext {
|
||||
const globalConfig = loadGlobalConfig();
|
||||
const globalConfig = resolveConfigValues(cwd, ['language', 'provider', 'model']);
|
||||
const lang = resolveLanguage(globalConfig.language);
|
||||
if (!globalConfig.provider) {
|
||||
throw new Error('Provider is not configured.');
|
||||
@ -66,10 +71,8 @@ export function initializeSession(cwd: string, personaName: string): SessionCont
|
||||
const providerType = globalConfig.provider as ProviderType;
|
||||
const provider = getProvider(providerType);
|
||||
const model = globalConfig.model as string | undefined;
|
||||
const savedSessions = loadPersonaSessions(cwd, providerType);
|
||||
const sessionId: string | undefined = savedSessions[personaName];
|
||||
|
||||
return { provider, providerType, model, lang, personaName, sessionId };
|
||||
return { provider, providerType, model, lang, personaName, sessionId: undefined };
|
||||
}
|
||||
|
||||
/**
|
||||
@ -317,6 +320,15 @@ export async function runConversationLoop(
|
||||
return { action: 'cancel', task: '' };
|
||||
}
|
||||
|
||||
if (trimmed === '/resume') {
|
||||
const selectedId = await selectRecentSession(cwd, ctx.lang);
|
||||
if (selectedId) {
|
||||
sessionId = selectedId;
|
||||
info(getLabel('interactive.resumeSessionLoaded', ctx.lang));
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
history.push({ role: 'user', content: trimmed });
|
||||
log.debug('Sending to AI', { messageCount: history.length, sessionId });
|
||||
process.stdin.pause();
|
||||
|
||||
@ -21,7 +21,7 @@ import {
|
||||
import { resolveLanguage } from './interactive.js';
|
||||
import { loadTemplate } from '../../shared/prompts/index.js';
|
||||
import { getLabelObject } from '../../shared/i18n/index.js';
|
||||
import { loadGlobalConfig } from '../../infra/config/index.js';
|
||||
import { resolveConfigValues } from '../../infra/config/index.js';
|
||||
import type { InstructModeResult, InstructUIText } from '../tasks/list/instructMode.js';
|
||||
|
||||
/** Failure information for a retry task */
|
||||
@ -103,7 +103,7 @@ export async function runRetryMode(
|
||||
retryContext: RetryContext,
|
||||
previousOrderContent: string | null,
|
||||
): Promise<InstructModeResult> {
|
||||
const globalConfig = loadGlobalConfig();
|
||||
const globalConfig = resolveConfigValues(cwd, ['language', 'provider']);
|
||||
const lang = resolveLanguage(globalConfig.language);
|
||||
|
||||
if (!globalConfig.provider) {
|
||||
|
||||
@ -11,13 +11,12 @@ import {
|
||||
removeBookmark,
|
||||
} from '../../infra/config/global/index.js';
|
||||
import {
|
||||
findPieceCategories,
|
||||
listPieces,
|
||||
listPieceEntries,
|
||||
loadAllPiecesWithSources,
|
||||
getPieceCategories,
|
||||
buildCategorizedPieces,
|
||||
getCurrentPiece,
|
||||
resolveConfigValue,
|
||||
type PieceDirEntry,
|
||||
type PieceCategoryNode,
|
||||
type CategorizedPieces,
|
||||
@ -160,8 +159,6 @@ function buildCategoryLevelOptions(
|
||||
categories: PieceCategoryNode[],
|
||||
pieces: string[],
|
||||
currentPiece: string,
|
||||
rootCategories: PieceCategoryNode[],
|
||||
currentPathLabel: string,
|
||||
): {
|
||||
options: SelectionOption[];
|
||||
categoryMap: Map<string, PieceCategoryNode>;
|
||||
@ -181,19 +178,7 @@ function buildCategoryLevelOptions(
|
||||
|
||||
for (const pieceName of pieces) {
|
||||
const isCurrent = pieceName === currentPiece;
|
||||
const alsoIn = findPieceCategories(pieceName, rootCategories)
|
||||
.filter((path) => path !== currentPathLabel);
|
||||
const alsoInLabel = alsoIn.length > 0 ? `also in ${alsoIn.join(', ')}` : '';
|
||||
|
||||
let label = `🎼 ${pieceName}`;
|
||||
if (isCurrent && alsoInLabel) {
|
||||
label = `🎼 ${pieceName} (current, ${alsoInLabel})`;
|
||||
} else if (isCurrent) {
|
||||
label = `🎼 ${pieceName} (current)`;
|
||||
} else if (alsoInLabel) {
|
||||
label = `🎼 ${pieceName} (${alsoInLabel})`;
|
||||
}
|
||||
|
||||
const label = isCurrent ? `🎼 ${pieceName} (current)` : `🎼 ${pieceName}`;
|
||||
options.push({ label, value: pieceName });
|
||||
}
|
||||
|
||||
@ -223,8 +208,6 @@ async function selectPieceFromCategoryTree(
|
||||
currentCategories,
|
||||
currentPieces,
|
||||
currentPiece,
|
||||
categories,
|
||||
currentPathLabel,
|
||||
);
|
||||
|
||||
if (options.length === 0) {
|
||||
@ -521,8 +504,8 @@ export async function selectPiece(
|
||||
options?: SelectPieceOptions,
|
||||
): Promise<string | null> {
|
||||
const fallbackToDefault = options?.fallbackToDefault !== false;
|
||||
const categoryConfig = getPieceCategories();
|
||||
const currentPiece = getCurrentPiece(cwd);
|
||||
const categoryConfig = getPieceCategories(cwd);
|
||||
const currentPiece = resolveConfigValue(cwd, 'piece');
|
||||
|
||||
if (categoryConfig) {
|
||||
const allPieces = loadAllPiecesWithSources(cwd);
|
||||
@ -534,7 +517,7 @@ export async function selectPiece(
|
||||
info('No pieces found.');
|
||||
return null;
|
||||
}
|
||||
const categorized = buildCategorizedPieces(allPieces, categoryConfig);
|
||||
const categorized = buildCategorizedPieces(allPieces, categoryConfig, cwd);
|
||||
warnMissingPieces(categorized.missingPieces.filter((missing) => missing.source === 'user'));
|
||||
return selectPieceFromCategorizedPieces(categorized, currentPiece);
|
||||
}
|
||||
|
||||
@ -21,7 +21,7 @@ import {
|
||||
} from '../../infra/github/index.js';
|
||||
import { stageAndCommit, getCurrentBranch } from '../../infra/task/index.js';
|
||||
import { executeTask, type TaskExecutionOptions, type PipelineExecutionOptions } from '../tasks/index.js';
|
||||
import { loadGlobalConfig } from '../../infra/config/index.js';
|
||||
import { resolveConfigValues } from '../../infra/config/index.js';
|
||||
import { info, error, success, status, blankLine } from '../../shared/ui/index.js';
|
||||
import { createLogger, getErrorMessage } from '../../shared/utils/index.js';
|
||||
import type { PipelineConfig } from '../../core/models/index.js';
|
||||
@ -106,7 +106,7 @@ function buildPipelinePrBody(
|
||||
*/
|
||||
export async function executePipeline(options: PipelineExecutionOptions): Promise<number> {
|
||||
const { cwd, piece, autoPr, skipGit } = options;
|
||||
const globalConfig = loadGlobalConfig();
|
||||
const globalConfig = resolveConfigValues(cwd, ['pipeline']);
|
||||
const pipelineConfig = globalConfig.pipeline;
|
||||
let issue: GitHubIssue | undefined;
|
||||
let task: string;
|
||||
|
||||
@ -5,7 +5,7 @@
|
||||
* Useful for debugging and understanding what prompts agents will receive.
|
||||
*/
|
||||
|
||||
import { loadPieceByIdentifier, getCurrentPiece, loadGlobalConfig } from '../../infra/config/index.js';
|
||||
import { loadPieceByIdentifier, resolvePieceConfigValue } from '../../infra/config/index.js';
|
||||
import { InstructionBuilder } from '../../core/piece/instruction/InstructionBuilder.js';
|
||||
import { ReportInstructionBuilder } from '../../core/piece/instruction/ReportInstructionBuilder.js';
|
||||
import { StatusJudgmentBuilder } from '../../core/piece/instruction/StatusJudgmentBuilder.js';
|
||||
@ -21,7 +21,7 @@ import { header, info, error, blankLine } from '../../shared/ui/index.js';
|
||||
* the Phase 1, Phase 2, and Phase 3 prompts with sample variable values.
|
||||
*/
|
||||
export async function previewPrompts(cwd: string, pieceIdentifier?: string): Promise<void> {
|
||||
const identifier = pieceIdentifier ?? getCurrentPiece(cwd);
|
||||
const identifier = pieceIdentifier ?? resolvePieceConfigValue(cwd, 'piece');
|
||||
const config = loadPieceByIdentifier(identifier, cwd);
|
||||
|
||||
if (!config) {
|
||||
@ -29,8 +29,7 @@ export async function previewPrompts(cwd: string, pieceIdentifier?: string): Pro
|
||||
return;
|
||||
}
|
||||
|
||||
const globalConfig = loadGlobalConfig();
|
||||
const language: Language = globalConfig.language ?? 'en';
|
||||
const language = resolvePieceConfigValue(cwd, 'language') as Language;
|
||||
|
||||
header(`Prompt Preview: ${config.name}`);
|
||||
info(`Movements: ${config.movements.length}`);
|
||||
|
||||
@ -8,10 +8,11 @@ import * as path from 'node:path';
|
||||
import * as fs from 'node:fs';
|
||||
import { promptInput, confirm } from '../../../shared/prompt/index.js';
|
||||
import { success, info, error, withProgress } from '../../../shared/ui/index.js';
|
||||
import { TaskRunner, type TaskFileData } from '../../../infra/task/index.js';
|
||||
import { TaskRunner, type TaskFileData, summarizeTaskName } from '../../../infra/task/index.js';
|
||||
import { determinePiece } from '../execute/selectAndExecute.js';
|
||||
import { createLogger, getErrorMessage, generateReportDir } from '../../../shared/utils/index.js';
|
||||
import { isIssueReference, resolveIssueTask, parseIssueNumbers, createIssue } from '../../../infra/github/index.js';
|
||||
import { firstLine } from '../../../infra/task/naming.js';
|
||||
|
||||
const log = createLogger('add-task');
|
||||
|
||||
@ -39,9 +40,11 @@ export async function saveTaskFile(
|
||||
options?: { piece?: string; issue?: number; worktree?: boolean | string; branch?: string; autoPr?: boolean },
|
||||
): Promise<{ taskName: string; tasksFile: string }> {
|
||||
const runner = new TaskRunner(cwd);
|
||||
const taskSlug = resolveUniqueTaskSlug(cwd, generateReportDir(taskContent));
|
||||
const taskDir = path.join(cwd, '.takt', 'tasks', taskSlug);
|
||||
const taskDirRelative = `.takt/tasks/${taskSlug}`;
|
||||
const slug = await summarizeTaskName(taskContent, { cwd });
|
||||
const summary = firstLine(taskContent);
|
||||
const taskDirSlug = resolveUniqueTaskSlug(cwd, generateReportDir(taskContent));
|
||||
const taskDir = path.join(cwd, '.takt', 'tasks', taskDirSlug);
|
||||
const taskDirRelative = `.takt/tasks/${taskDirSlug}`;
|
||||
const orderPath = path.join(taskDir, 'order.md');
|
||||
fs.mkdirSync(taskDir, { recursive: true });
|
||||
fs.writeFileSync(orderPath, taskContent, 'utf-8');
|
||||
@ -55,6 +58,8 @@ export async function saveTaskFile(
|
||||
const created = runner.addTask(taskContent, {
|
||||
...config,
|
||||
task_dir: taskDirRelative,
|
||||
slug,
|
||||
summary,
|
||||
});
|
||||
const tasksFile = path.join(cwd, '.takt', 'tasks.yaml');
|
||||
log.info('Task created', { taskName: created.name, tasksFile, config });
|
||||
@ -69,8 +74,8 @@ export async function saveTaskFile(
|
||||
*/
|
||||
export function createIssueFromTask(task: string): number | undefined {
|
||||
info('Creating GitHub Issue...');
|
||||
const firstLine = task.split('\n')[0] || task;
|
||||
const title = firstLine.length > 100 ? `${firstLine.slice(0, 97)}...` : firstLine;
|
||||
const titleLine = task.split('\n')[0] || task;
|
||||
const title = titleLine.length > 100 ? `${titleLine.slice(0, 97)}...` : titleLine;
|
||||
const issueResult = createIssue({ title, body: task });
|
||||
if (issueResult.success) {
|
||||
success(`Issue created: ${issueResult.url}`);
|
||||
|
||||
@ -3,6 +3,7 @@
|
||||
*/
|
||||
|
||||
import { readFileSync } from 'node:fs';
|
||||
import { join } from 'node:path';
|
||||
import { PieceEngine, type IterationLimitRequest, type UserInputRequest } from '../../../core/piece/index.js';
|
||||
import type { PieceConfig } from '../../../core/models/index.js';
|
||||
import type { PieceExecutionResult, PieceExecutionOptions } from './types.js';
|
||||
@ -17,7 +18,7 @@ import {
|
||||
updatePersonaSession,
|
||||
loadWorktreeSessions,
|
||||
updateWorktreeSession,
|
||||
loadGlobalConfig,
|
||||
resolvePieceConfigValues,
|
||||
saveSessionState,
|
||||
type SessionState,
|
||||
} from '../../../infra/config/index.js';
|
||||
@ -72,6 +73,17 @@ import { buildRunPaths } from '../../../core/piece/run/run-paths.js';
|
||||
import { resolveMovementProviderModel } from '../../../core/piece/provider-resolution.js';
|
||||
import { resolveRuntimeConfig } from '../../../core/runtime/runtime-environment.js';
|
||||
import { writeFileAtomic, ensureDir } from '../../../infra/config/index.js';
|
||||
import { getGlobalConfigDir } from '../../../infra/config/paths.js';
|
||||
import {
|
||||
initAnalyticsWriter,
|
||||
writeAnalyticsEvent,
|
||||
parseFindingsFromReport,
|
||||
extractDecisionFromReport,
|
||||
inferSeverity,
|
||||
emitFixActionEvents,
|
||||
emitRebuttalEvents,
|
||||
} from '../../analytics/index.js';
|
||||
import type { MovementResultEvent, ReviewFindingEvent } from '../../analytics/index.js';
|
||||
|
||||
const log = createLogger('piece');
|
||||
|
||||
@ -317,13 +329,16 @@ export async function executePiece(
|
||||
|
||||
// Load saved agent sessions only on retry; normal runs start with empty sessions
|
||||
const isWorktree = cwd !== projectCwd;
|
||||
const globalConfig = loadGlobalConfig();
|
||||
const globalConfig = resolvePieceConfigValues(
|
||||
projectCwd,
|
||||
['notificationSound', 'notificationSoundEvents', 'provider', 'runtime', 'preventSleep', 'model', 'observability', 'analytics'],
|
||||
);
|
||||
const shouldNotify = globalConfig.notificationSound !== false;
|
||||
const notificationSoundEvents = globalConfig.notificationSoundEvents;
|
||||
const shouldNotifyIterationLimit = shouldNotify && notificationSoundEvents?.iterationLimit !== false;
|
||||
const shouldNotifyPieceComplete = shouldNotify && notificationSoundEvents?.pieceComplete !== false;
|
||||
const shouldNotifyPieceAbort = shouldNotify && notificationSoundEvents?.pieceAbort !== false;
|
||||
const currentProvider = globalConfig.provider ?? 'claude';
|
||||
const currentProvider = globalConfig.provider;
|
||||
const effectivePieceConfig: PieceConfig = {
|
||||
...pieceConfig,
|
||||
runtime: resolveRuntimeConfig(globalConfig.runtime, pieceConfig.runtime),
|
||||
@ -337,6 +352,11 @@ export async function executePiece(
|
||||
enabled: isProviderEventsEnabled(globalConfig),
|
||||
});
|
||||
|
||||
const analyticsEnabled = globalConfig.analytics?.enabled === true;
|
||||
const eventsDir = globalConfig.analytics?.eventsPath
|
||||
?? join(getGlobalConfigDir(), 'analytics', 'events');
|
||||
initAnalyticsWriter(analyticsEnabled, eventsDir);
|
||||
|
||||
// Prevent macOS idle sleep if configured
|
||||
if (globalConfig.preventSleep) {
|
||||
preventSleep();
|
||||
@ -424,6 +444,8 @@ export async function executePiece(
|
||||
let lastMovementContent: string | undefined;
|
||||
let lastMovementName: string | undefined;
|
||||
let currentIteration = 0;
|
||||
let currentMovementProvider = currentProvider;
|
||||
let currentMovementModel = globalConfig.model ?? '(default)';
|
||||
const phasePrompts = new Map<string, string>();
|
||||
const movementIterations = new Map<string, number>();
|
||||
let engine: PieceEngine | null = null;
|
||||
@ -443,12 +465,10 @@ export async function executePiece(
|
||||
projectCwd,
|
||||
language: options.language,
|
||||
provider: options.provider,
|
||||
projectProvider: options.projectProvider,
|
||||
globalProvider: options.globalProvider,
|
||||
model: options.model,
|
||||
providerOptions: options.providerOptions,
|
||||
personaProviders: options.personaProviders,
|
||||
projectProviderProfiles: options.projectProviderProfiles,
|
||||
globalProviderProfiles: options.globalProviderProfiles,
|
||||
providerProfiles: options.providerProfiles,
|
||||
interactive: interactiveUserInput,
|
||||
detectRuleIndex,
|
||||
callAiJudge,
|
||||
@ -529,6 +549,8 @@ export async function executePiece(
|
||||
});
|
||||
const movementProvider = resolved.provider ?? currentProvider;
|
||||
const movementModel = resolved.model ?? globalConfig.model ?? '(default)';
|
||||
currentMovementProvider = movementProvider;
|
||||
currentMovementModel = movementModel;
|
||||
providerEventLogger.setMovement(step.name);
|
||||
providerEventLogger.setProvider(movementProvider);
|
||||
out.info(`Provider: ${movementProvider}`);
|
||||
@ -627,15 +649,60 @@ export async function executePiece(
|
||||
};
|
||||
appendNdjsonLine(ndjsonLogPath, record);
|
||||
|
||||
const decisionTag = (response.matchedRuleIndex != null && step.rules)
|
||||
? (step.rules[response.matchedRuleIndex]?.condition ?? response.status)
|
||||
: response.status;
|
||||
const movementResultEvent: MovementResultEvent = {
|
||||
type: 'movement_result',
|
||||
movement: step.name,
|
||||
provider: currentMovementProvider,
|
||||
model: currentMovementModel,
|
||||
decisionTag,
|
||||
iteration: currentIteration,
|
||||
runId: runSlug,
|
||||
timestamp: response.timestamp.toISOString(),
|
||||
};
|
||||
writeAnalyticsEvent(movementResultEvent);
|
||||
|
||||
if (step.edit === true && step.name.includes('fix')) {
|
||||
emitFixActionEvents(response.content, currentIteration, runSlug, response.timestamp);
|
||||
}
|
||||
|
||||
if (step.name.includes('no_fix')) {
|
||||
emitRebuttalEvents(response.content, currentIteration, runSlug, response.timestamp);
|
||||
}
|
||||
|
||||
// Update in-memory log for pointer metadata (immutable)
|
||||
sessionLog = { ...sessionLog, iterations: sessionLog.iterations + 1 };
|
||||
});
|
||||
|
||||
engine.on('movement:report', (_step, filePath, fileName) => {
|
||||
engine.on('movement:report', (step, filePath, fileName) => {
|
||||
const content = readFileSync(filePath, 'utf-8');
|
||||
out.logLine(`\n📄 Report: ${fileName}\n`);
|
||||
out.logLine(content);
|
||||
|
||||
if (step.edit === false) {
|
||||
const decision = extractDecisionFromReport(content);
|
||||
if (decision) {
|
||||
const findings = parseFindingsFromReport(content);
|
||||
for (const finding of findings) {
|
||||
const event: ReviewFindingEvent = {
|
||||
type: 'review_finding',
|
||||
findingId: finding.findingId,
|
||||
status: finding.status,
|
||||
ruleId: finding.ruleId,
|
||||
severity: inferSeverity(finding.findingId),
|
||||
decision,
|
||||
file: finding.file,
|
||||
line: finding.line,
|
||||
iteration: currentIteration,
|
||||
runId: runSlug,
|
||||
timestamp: new Date().toISOString(),
|
||||
};
|
||||
writeAnalyticsEvent(event);
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
engine.on('piece:complete', (state) => {
|
||||
|
||||
@ -5,7 +5,7 @@
|
||||
* instructBranch (instruct mode from takt list).
|
||||
*/
|
||||
|
||||
import { loadGlobalConfig } from '../../../infra/config/index.js';
|
||||
import { resolvePieceConfigValue } from '../../../infra/config/index.js';
|
||||
import { confirm } from '../../../shared/prompt/index.js';
|
||||
import { autoCommitAndPush } from '../../../infra/task/index.js';
|
||||
import { info, error, success } from '../../../shared/ui/index.js';
|
||||
@ -18,16 +18,15 @@ const log = createLogger('postExecution');
|
||||
/**
|
||||
* Resolve auto-PR setting with priority: CLI option > config > prompt.
|
||||
*/
|
||||
export async function resolveAutoPr(optionAutoPr: boolean | undefined): Promise<boolean> {
|
||||
export async function resolveAutoPr(optionAutoPr: boolean | undefined, cwd: string): Promise<boolean> {
|
||||
if (typeof optionAutoPr === 'boolean') {
|
||||
return optionAutoPr;
|
||||
}
|
||||
|
||||
const globalConfig = loadGlobalConfig();
|
||||
if (typeof globalConfig.autoPr === 'boolean') {
|
||||
return globalConfig.autoPr;
|
||||
const autoPr = resolvePieceConfigValue(cwd, 'autoPr');
|
||||
if (typeof autoPr === 'boolean') {
|
||||
return autoPr;
|
||||
}
|
||||
|
||||
return confirm('Create pull request?', true);
|
||||
}
|
||||
|
||||
|
||||
@ -4,7 +4,7 @@
|
||||
|
||||
import * as fs from 'node:fs';
|
||||
import * as path from 'node:path';
|
||||
import { loadGlobalConfig } from '../../../infra/config/index.js';
|
||||
import { resolvePieceConfigValue } from '../../../infra/config/index.js';
|
||||
import { type TaskInfo, createSharedClone, summarizeTaskName, getCurrentBranch } from '../../../infra/task/index.js';
|
||||
import { withProgress } from '../../../shared/ui/index.js';
|
||||
import { getTaskSlugFromTaskDir } from '../../../shared/utils/taskPaths.js';
|
||||
@ -104,7 +104,7 @@ export async function resolveTaskExecution(
|
||||
worktreePath = task.worktreePath;
|
||||
isWorktree = true;
|
||||
} else {
|
||||
const taskSlug = await withProgress(
|
||||
const taskSlug = task.slug ?? await withProgress(
|
||||
'Generating branch name...',
|
||||
(slug) => `Branch name generated: ${slug}`,
|
||||
() => summarizeTaskName(task.content, { cwd: defaultCwd }),
|
||||
@ -141,8 +141,7 @@ export async function resolveTaskExecution(
|
||||
if (data.auto_pr !== undefined) {
|
||||
autoPr = data.auto_pr;
|
||||
} else {
|
||||
const globalConfig = loadGlobalConfig();
|
||||
autoPr = globalConfig.autoPr ?? false;
|
||||
autoPr = resolvePieceConfigValue(defaultCwd, 'autoPr') ?? false;
|
||||
}
|
||||
|
||||
return {
|
||||
|
||||
@ -72,7 +72,7 @@ export async function confirmAndCreateWorktree(
|
||||
}),
|
||||
);
|
||||
|
||||
return { execCwd: result.path, isWorktree: true, branch: result.branch, baseBranch };
|
||||
return { execCwd: result.path, isWorktree: true, branch: result.branch, baseBranch, taskSlug };
|
||||
}
|
||||
|
||||
/**
|
||||
@ -92,7 +92,7 @@ export async function selectAndExecuteTask(
|
||||
return;
|
||||
}
|
||||
|
||||
const { execCwd, isWorktree, branch, baseBranch } = await confirmAndCreateWorktree(
|
||||
const { execCwd, isWorktree, branch, baseBranch, taskSlug } = await confirmAndCreateWorktree(
|
||||
cwd,
|
||||
task,
|
||||
options?.createWorktree,
|
||||
@ -101,7 +101,7 @@ export async function selectAndExecuteTask(
|
||||
// Ask for PR creation BEFORE execution (only if worktree is enabled)
|
||||
let shouldCreatePr = false;
|
||||
if (isWorktree) {
|
||||
shouldCreatePr = await resolveAutoPr(options?.autoPr);
|
||||
shouldCreatePr = await resolveAutoPr(options?.autoPr, cwd);
|
||||
}
|
||||
|
||||
log.info('Starting task execution', { piece: pieceIdentifier, worktree: isWorktree, autoPr: shouldCreatePr });
|
||||
@ -112,6 +112,7 @@ export async function selectAndExecuteTask(
|
||||
...(branch ? { branch } : {}),
|
||||
...(isWorktree ? { worktree_path: execCwd } : {}),
|
||||
auto_pr: shouldCreatePr,
|
||||
...(taskSlug ? { slug: taskSlug } : {}),
|
||||
});
|
||||
const startedAt = new Date().toISOString();
|
||||
|
||||
|
||||
@ -2,7 +2,7 @@
|
||||
* Session management helpers for agent execution
|
||||
*/
|
||||
|
||||
import { loadPersonaSessions, updatePersonaSession, loadGlobalConfig } from '../../../infra/config/index.js';
|
||||
import { loadPersonaSessions, updatePersonaSession, resolvePieceConfigValue } from '../../../infra/config/index.js';
|
||||
import type { AgentResponse } from '../../../core/models/index.js';
|
||||
|
||||
/**
|
||||
@ -15,7 +15,7 @@ export async function withPersonaSession(
|
||||
fn: (sessionId?: string) => Promise<AgentResponse>,
|
||||
provider?: string
|
||||
): Promise<AgentResponse> {
|
||||
const resolvedProvider = provider ?? loadGlobalConfig().provider ?? 'claude';
|
||||
const resolvedProvider = provider ?? resolvePieceConfigValue(cwd, 'provider');
|
||||
const sessions = loadPersonaSessions(cwd, resolvedProvider);
|
||||
const sessionId = sessions[personaName];
|
||||
|
||||
|
||||
@ -2,7 +2,7 @@
|
||||
* Task execution logic
|
||||
*/
|
||||
|
||||
import { loadPieceByIdentifier, isPiecePath, loadGlobalConfig, loadProjectConfig } from '../../../infra/config/index.js';
|
||||
import { loadPieceByIdentifier, isPiecePath, resolvePieceConfigValues } from '../../../infra/config/index.js';
|
||||
import { TaskRunner, type TaskInfo } from '../../../infra/task/index.js';
|
||||
import {
|
||||
header,
|
||||
@ -86,18 +86,22 @@ async function executeTaskWithResult(options: ExecuteTaskOptions): Promise<Piece
|
||||
movements: pieceConfig.movements.map((s: { name: string }) => s.name),
|
||||
});
|
||||
|
||||
const globalConfig = loadGlobalConfig();
|
||||
const projectConfig = loadProjectConfig(projectCwd);
|
||||
const config = resolvePieceConfigValues(projectCwd, [
|
||||
'language',
|
||||
'provider',
|
||||
'model',
|
||||
'providerOptions',
|
||||
'personaProviders',
|
||||
'providerProfiles',
|
||||
]);
|
||||
return await executePiece(pieceConfig, task, cwd, {
|
||||
projectCwd,
|
||||
language: globalConfig.language,
|
||||
provider: agentOverrides?.provider,
|
||||
projectProvider: projectConfig.provider,
|
||||
globalProvider: globalConfig.provider,
|
||||
model: agentOverrides?.model,
|
||||
personaProviders: globalConfig.personaProviders,
|
||||
projectProviderProfiles: projectConfig.providerProfiles,
|
||||
globalProviderProfiles: globalConfig.providerProfiles,
|
||||
language: config.language,
|
||||
provider: agentOverrides?.provider ?? config.provider,
|
||||
model: agentOverrides?.model ?? config.model,
|
||||
providerOptions: config.providerOptions,
|
||||
personaProviders: config.personaProviders,
|
||||
providerProfiles: config.providerProfiles,
|
||||
interactiveUserInput,
|
||||
interactiveMetadata,
|
||||
startMovement,
|
||||
@ -234,7 +238,10 @@ export async function runAllTasks(
|
||||
options?: TaskExecutionOptions,
|
||||
): Promise<void> {
|
||||
const taskRunner = new TaskRunner(cwd);
|
||||
const globalConfig = loadGlobalConfig();
|
||||
const globalConfig = resolvePieceConfigValues(
|
||||
cwd,
|
||||
['notificationSound', 'notificationSoundEvents', 'concurrency', 'taskPollIntervalMs'],
|
||||
);
|
||||
const shouldNotifyRunComplete = globalConfig.notificationSound !== false
|
||||
&& globalConfig.notificationSoundEvents?.runComplete !== false;
|
||||
const shouldNotifyRunAbort = globalConfig.notificationSound !== false
|
||||
|
||||
@ -4,6 +4,7 @@
|
||||
|
||||
import type { Language } from '../../../core/models/index.js';
|
||||
import type { ProviderPermissionProfiles } from '../../../core/models/provider-profiles.js';
|
||||
import type { MovementProviderOptions } from '../../../core/models/piece-types.js';
|
||||
import type { ProviderType } from '../../../infra/providers/index.js';
|
||||
import type { GitHubIssue } from '../../../infra/github/index.js';
|
||||
|
||||
@ -32,17 +33,13 @@ export interface PieceExecutionOptions {
|
||||
/** Language for instruction metadata */
|
||||
language?: Language;
|
||||
provider?: ProviderType;
|
||||
/** Project config provider */
|
||||
projectProvider?: ProviderType;
|
||||
/** Global config provider */
|
||||
globalProvider?: ProviderType;
|
||||
model?: string;
|
||||
/** Resolved provider options */
|
||||
providerOptions?: MovementProviderOptions;
|
||||
/** Per-persona provider overrides (e.g., { coder: 'codex' }) */
|
||||
personaProviders?: Record<string, ProviderType>;
|
||||
/** Project-level provider permission profiles */
|
||||
projectProviderProfiles?: ProviderPermissionProfiles;
|
||||
/** Global-level provider permission profiles */
|
||||
globalProviderProfiles?: ProviderPermissionProfiles;
|
||||
/** Resolved provider permission profiles */
|
||||
providerProfiles?: ProviderPermissionProfiles;
|
||||
/** Enable interactive user input during step transitions */
|
||||
interactiveUserInput?: boolean;
|
||||
/** Interactive mode result metadata for NDJSON logging */
|
||||
@ -125,6 +122,7 @@ export interface WorktreeConfirmationResult {
|
||||
isWorktree: boolean;
|
||||
branch?: string;
|
||||
baseBranch?: string;
|
||||
taskSlug?: string;
|
||||
}
|
||||
|
||||
export interface SelectAndExecuteOptions {
|
||||
|
||||
@ -26,7 +26,7 @@ import {
|
||||
import { deletePendingTask, deleteFailedTask, deleteCompletedTask } from './taskDeleteActions.js';
|
||||
import { retryFailedTask } from './taskRetryActions.js';
|
||||
import { listTasksNonInteractive, type ListNonInteractiveOptions } from './listNonInteractive.js';
|
||||
import { formatTaskStatusLabel } from './taskStatusLabel.js';
|
||||
import { formatTaskStatusLabel, formatShortDate } from './taskStatusLabel.js';
|
||||
|
||||
export type { ListNonInteractiveOptions } from './listNonInteractive.js';
|
||||
|
||||
@ -130,7 +130,7 @@ export async function listTasks(
|
||||
const menuOptions = tasks.map((task, idx) => ({
|
||||
label: formatTaskStatusLabel(task),
|
||||
value: `${task.kind}:${idx}`,
|
||||
description: `${task.content} | ${task.createdAt}`,
|
||||
description: `${task.summary ?? task.content} | ${formatShortDate(task.createdAt)}`,
|
||||
}));
|
||||
|
||||
const selected = await selectOption<string>(
|
||||
|
||||
@ -21,7 +21,7 @@ import { createSelectActionWithoutExecute, buildReplayHint } from '../../interac
|
||||
import { type RunSessionContext, formatRunSessionForPrompt } from '../../interactive/runSessionReader.js';
|
||||
import { loadTemplate } from '../../../shared/prompts/index.js';
|
||||
import { getLabelObject } from '../../../shared/i18n/index.js';
|
||||
import { loadGlobalConfig } from '../../../infra/config/index.js';
|
||||
import { resolvePieceConfigValues } from '../../../infra/config/index.js';
|
||||
|
||||
export type InstructModeAction = 'execute' | 'save_task' | 'cancel';
|
||||
|
||||
@ -96,7 +96,7 @@ export async function runInstructMode(
|
||||
runSessionContext?: RunSessionContext,
|
||||
previousOrderContent?: string | null,
|
||||
): Promise<InstructModeResult> {
|
||||
const globalConfig = loadGlobalConfig();
|
||||
const globalConfig = resolvePieceConfigValues(cwd, ['language', 'provider']);
|
||||
const lang = resolveLanguage(globalConfig.language);
|
||||
|
||||
if (!globalConfig.provider) {
|
||||
|
||||
@ -18,7 +18,7 @@ import {
|
||||
mergeBranch,
|
||||
deleteBranch,
|
||||
} from './taskActions.js';
|
||||
import { formatTaskStatusLabel } from './taskStatusLabel.js';
|
||||
import { formatTaskStatusLabel, formatShortDate } from './taskStatusLabel.js';
|
||||
|
||||
export interface ListNonInteractiveOptions {
|
||||
enabled: boolean;
|
||||
@ -43,7 +43,7 @@ function printNonInteractiveList(tasks: TaskListItem[], format?: string): void {
|
||||
}
|
||||
|
||||
for (const task of tasks) {
|
||||
info(`${formatTaskStatusLabel(task)} - ${task.content} (${task.createdAt})`);
|
||||
info(`${formatTaskStatusLabel(task)} - ${task.summary ?? task.content} (${formatShortDate(task.createdAt)})`);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -11,7 +11,7 @@ import {
|
||||
TaskRunner,
|
||||
detectDefaultBranch,
|
||||
} from '../../../infra/task/index.js';
|
||||
import { loadGlobalConfig, getPieceDescription } from '../../../infra/config/index.js';
|
||||
import { resolvePieceConfigValues, getPieceDescription } from '../../../infra/config/index.js';
|
||||
import { info, error as logError } from '../../../shared/ui/index.js';
|
||||
import { createLogger, getErrorMessage } from '../../../shared/utils/index.js';
|
||||
import { runInstructMode } from './instructMode.js';
|
||||
@ -93,7 +93,7 @@ export async function instructBranch(
|
||||
return false;
|
||||
}
|
||||
|
||||
const globalConfig = loadGlobalConfig();
|
||||
const globalConfig = resolvePieceConfigValues(projectDir, ['interactivePreviewMovements', 'language']);
|
||||
const pieceDesc = getPieceDescription(selectedPiece, projectDir, globalConfig.interactivePreviewMovements);
|
||||
const pieceContext: PieceContext = {
|
||||
name: pieceDesc.name,
|
||||
|
||||
@ -8,7 +8,7 @@
|
||||
import * as fs from 'node:fs';
|
||||
import type { TaskListItem } from '../../../infra/task/index.js';
|
||||
import { TaskRunner } from '../../../infra/task/index.js';
|
||||
import { loadPieceByIdentifier, loadGlobalConfig, getPieceDescription } from '../../../infra/config/index.js';
|
||||
import { loadPieceByIdentifier, resolvePieceConfigValue, getPieceDescription } from '../../../infra/config/index.js';
|
||||
import { selectPiece } from '../../pieceSelection/index.js';
|
||||
import { selectOption } from '../../../shared/prompt/index.js';
|
||||
import { info, header, blankLine, status } from '../../../shared/ui/index.js';
|
||||
@ -134,7 +134,7 @@ export async function retryFailedTask(
|
||||
return false;
|
||||
}
|
||||
|
||||
const globalConfig = loadGlobalConfig();
|
||||
const previewCount = resolvePieceConfigValue(projectDir, 'interactivePreviewMovements');
|
||||
const pieceConfig = loadPieceByIdentifier(selectedPiece, projectDir);
|
||||
|
||||
if (!pieceConfig) {
|
||||
@ -146,7 +146,7 @@ export async function retryFailedTask(
|
||||
return false;
|
||||
}
|
||||
|
||||
const pieceDesc = getPieceDescription(selectedPiece, projectDir, globalConfig.interactivePreviewMovements);
|
||||
const pieceDesc = getPieceDescription(selectedPiece, projectDir, previewCount);
|
||||
const pieceContext = {
|
||||
name: pieceDesc.name,
|
||||
description: pieceDesc.description,
|
||||
|
||||
@ -8,5 +8,18 @@ const TASK_STATUS_BY_KIND: Record<TaskListItem['kind'], string> = {
|
||||
};
|
||||
|
||||
export function formatTaskStatusLabel(task: TaskListItem): string {
|
||||
return `[${TASK_STATUS_BY_KIND[task.kind]}] ${task.name}`;
|
||||
const status = `[${TASK_STATUS_BY_KIND[task.kind]}] ${task.name}`;
|
||||
if (task.branch) {
|
||||
return `${status} (${task.branch})`;
|
||||
}
|
||||
return status;
|
||||
}
|
||||
|
||||
export function formatShortDate(isoString: string): string {
|
||||
const date = new Date(isoString);
|
||||
const month = String(date.getUTCMonth() + 1).padStart(2, '0');
|
||||
const day = String(date.getUTCDate()).padStart(2, '0');
|
||||
const hours = String(date.getUTCHours()).padStart(2, '0');
|
||||
const minutes = String(date.getUTCMinutes()).padStart(2, '0');
|
||||
return `${month}/${day} ${hours}:${minutes}`;
|
||||
}
|
||||
|
||||
@ -6,7 +6,7 @@
|
||||
*/
|
||||
|
||||
import { TaskRunner, type TaskInfo, TaskWatcher } from '../../../infra/task/index.js';
|
||||
import { getCurrentPiece } from '../../../infra/config/index.js';
|
||||
import { resolveConfigValue } from '../../../infra/config/index.js';
|
||||
import {
|
||||
header,
|
||||
info,
|
||||
@ -15,7 +15,6 @@ import {
|
||||
blankLine,
|
||||
} from '../../../shared/ui/index.js';
|
||||
import { executeAndCompleteTask } from '../execute/taskExecution.js';
|
||||
import { DEFAULT_PIECE_NAME } from '../../../shared/constants.js';
|
||||
import { EXIT_SIGINT } from '../../../shared/exitCodes.js';
|
||||
import { ShutdownManager } from '../execute/shutdownManager.js';
|
||||
import type { TaskExecutionOptions } from '../execute/types.js';
|
||||
@ -25,7 +24,7 @@ import type { TaskExecutionOptions } from '../execute/types.js';
|
||||
* Runs until Ctrl+C.
|
||||
*/
|
||||
export async function watchTasks(cwd: string, options?: TaskExecutionOptions): Promise<void> {
|
||||
const pieceName = getCurrentPiece(cwd) || DEFAULT_PIECE_NAME;
|
||||
const pieceName = resolveConfigValue(cwd, 'piece');
|
||||
const taskRunner = new TaskRunner(cwd);
|
||||
const watcher = new TaskWatcher(cwd);
|
||||
const recovered = taskRunner.recoverInterruptedRunningTasks();
|
||||
|
||||
@ -30,10 +30,11 @@ export {
|
||||
} from './infra/config/loaders/index.js';
|
||||
export type { PieceSource, PieceWithSource, PieceDirEntry } from './infra/config/loaders/index.js';
|
||||
export {
|
||||
loadProjectConfig,
|
||||
loadConfig,
|
||||
} from './infra/config/loadConfig.js';
|
||||
export {
|
||||
saveProjectConfig,
|
||||
updateProjectConfig,
|
||||
getCurrentPiece,
|
||||
setCurrentPiece,
|
||||
isVerboseMode,
|
||||
type ProjectLocalConfig,
|
||||
|
||||
@ -141,7 +141,7 @@ export interface ClaudeCallOptions {
|
||||
onPermissionRequest?: PermissionHandler;
|
||||
/** Custom handler for AskUserQuestion tool */
|
||||
onAskUserQuestion?: AskUserQuestionHandler;
|
||||
/** Bypass all permission checks (sacrifice-my-pc mode) */
|
||||
/** Bypass all permission checks */
|
||||
bypassPermissions?: boolean;
|
||||
/** Anthropic API key to inject via env (bypasses CLI auth) */
|
||||
anthropicApiKey?: string;
|
||||
@ -172,7 +172,7 @@ export interface ClaudeSpawnOptions {
|
||||
onPermissionRequest?: PermissionHandler;
|
||||
/** Custom handler for AskUserQuestion tool */
|
||||
onAskUserQuestion?: AskUserQuestionHandler;
|
||||
/** Bypass all permission checks (sacrifice-my-pc mode) */
|
||||
/** Bypass all permission checks */
|
||||
bypassPermissions?: boolean;
|
||||
/** Anthropic API key to inject via env (bypasses CLI auth) */
|
||||
anthropicApiKey?: string;
|
||||
|
||||
142
src/infra/config/env/config-env-overrides.ts
vendored
Normal file
142
src/infra/config/env/config-env-overrides.ts
vendored
Normal file
@ -0,0 +1,142 @@
|
||||
type EnvValueType = 'string' | 'boolean' | 'number' | 'json';
|
||||
|
||||
interface EnvSpec {
|
||||
path: string;
|
||||
type: EnvValueType;
|
||||
}
|
||||
|
||||
function normalizeEnvSegment(segment: string): string {
|
||||
return segment
|
||||
.replace(/([a-z0-9])([A-Z])/g, '$1_$2')
|
||||
.replace(/[^a-zA-Z0-9]+/g, '_')
|
||||
.replace(/_+/g, '_')
|
||||
.replace(/^_|_$/g, '')
|
||||
.toUpperCase();
|
||||
}
|
||||
|
||||
export function envVarNameFromPath(path: string): string {
|
||||
const key = path
|
||||
.split('.')
|
||||
.map(normalizeEnvSegment)
|
||||
.filter((segment) => segment.length > 0)
|
||||
.join('_');
|
||||
return `TAKT_${key}`;
|
||||
}
|
||||
|
||||
function parseEnvValue(envKey: string, raw: string, type: EnvValueType): unknown {
|
||||
if (type === 'string') {
|
||||
return raw;
|
||||
}
|
||||
if (type === 'boolean') {
|
||||
const normalized = raw.trim().toLowerCase();
|
||||
if (normalized === 'true') return true;
|
||||
if (normalized === 'false') return false;
|
||||
throw new Error(`${envKey} must be one of: true, false`);
|
||||
}
|
||||
if (type === 'number') {
|
||||
const trimmed = raw.trim();
|
||||
const value = Number(trimmed);
|
||||
if (!Number.isFinite(value)) {
|
||||
throw new Error(`${envKey} must be a number`);
|
||||
}
|
||||
return value;
|
||||
}
|
||||
try {
|
||||
return JSON.parse(raw);
|
||||
} catch {
|
||||
throw new Error(`${envKey} must be valid JSON`);
|
||||
}
|
||||
}
|
||||
|
||||
function setNested(target: Record<string, unknown>, path: string, value: unknown): void {
|
||||
const parts = path.split('.');
|
||||
let current: Record<string, unknown> = target;
|
||||
for (let i = 0; i < parts.length - 1; i++) {
|
||||
const part = parts[i];
|
||||
if (!part) continue;
|
||||
const next = current[part];
|
||||
if (typeof next !== 'object' || next === null || Array.isArray(next)) {
|
||||
current[part] = {};
|
||||
}
|
||||
current = current[part] as Record<string, unknown>;
|
||||
}
|
||||
const leaf = parts[parts.length - 1];
|
||||
if (!leaf) return;
|
||||
current[leaf] = value;
|
||||
}
|
||||
|
||||
function applyEnvOverrides(target: Record<string, unknown>, specs: readonly EnvSpec[]): void {
|
||||
for (const spec of specs) {
|
||||
const envKey = envVarNameFromPath(spec.path);
|
||||
const raw = process.env[envKey];
|
||||
if (raw === undefined) continue;
|
||||
const parsedValue = parseEnvValue(envKey, raw, spec.type);
|
||||
setNested(target, spec.path, parsedValue);
|
||||
}
|
||||
}
|
||||
|
||||
const GLOBAL_ENV_SPECS: readonly EnvSpec[] = [
|
||||
{ path: 'language', type: 'string' },
|
||||
{ path: 'log_level', type: 'string' },
|
||||
{ path: 'provider', type: 'string' },
|
||||
{ path: 'model', type: 'string' },
|
||||
{ path: 'observability', type: 'json' },
|
||||
{ path: 'observability.provider_events', type: 'boolean' },
|
||||
{ path: 'worktree_dir', type: 'string' },
|
||||
{ path: 'auto_pr', type: 'boolean' },
|
||||
{ path: 'disabled_builtins', type: 'json' },
|
||||
{ path: 'enable_builtin_pieces', type: 'boolean' },
|
||||
{ path: 'anthropic_api_key', type: 'string' },
|
||||
{ path: 'openai_api_key', type: 'string' },
|
||||
{ path: 'codex_cli_path', type: 'string' },
|
||||
{ path: 'opencode_api_key', type: 'string' },
|
||||
{ path: 'pipeline', type: 'json' },
|
||||
{ path: 'pipeline.default_branch_prefix', type: 'string' },
|
||||
{ path: 'pipeline.commit_message_template', type: 'string' },
|
||||
{ path: 'pipeline.pr_body_template', type: 'string' },
|
||||
{ path: 'minimal_output', type: 'boolean' },
|
||||
{ path: 'bookmarks_file', type: 'string' },
|
||||
{ path: 'piece_categories_file', type: 'string' },
|
||||
{ path: 'persona_providers', type: 'json' },
|
||||
{ path: 'provider_options', type: 'json' },
|
||||
{ path: 'provider_options.codex.network_access', type: 'boolean' },
|
||||
{ path: 'provider_options.opencode.network_access', type: 'boolean' },
|
||||
{ path: 'provider_options.claude.sandbox.allow_unsandboxed_commands', type: 'boolean' },
|
||||
{ path: 'provider_options.claude.sandbox.excluded_commands', type: 'json' },
|
||||
{ path: 'provider_profiles', type: 'json' },
|
||||
{ path: 'runtime', type: 'json' },
|
||||
{ path: 'runtime.prepare', type: 'json' },
|
||||
{ path: 'branch_name_strategy', type: 'string' },
|
||||
{ path: 'prevent_sleep', type: 'boolean' },
|
||||
{ path: 'notification_sound', type: 'boolean' },
|
||||
{ path: 'notification_sound_events', type: 'json' },
|
||||
{ path: 'notification_sound_events.iteration_limit', type: 'boolean' },
|
||||
{ path: 'notification_sound_events.piece_complete', type: 'boolean' },
|
||||
{ path: 'notification_sound_events.piece_abort', type: 'boolean' },
|
||||
{ path: 'notification_sound_events.run_complete', type: 'boolean' },
|
||||
{ path: 'notification_sound_events.run_abort', type: 'boolean' },
|
||||
{ path: 'interactive_preview_movements', type: 'number' },
|
||||
{ path: 'verbose', type: 'boolean' },
|
||||
{ path: 'concurrency', type: 'number' },
|
||||
{ path: 'task_poll_interval_ms', type: 'number' },
|
||||
];
|
||||
|
||||
const PROJECT_ENV_SPECS: readonly EnvSpec[] = [
|
||||
{ path: 'piece', type: 'string' },
|
||||
{ path: 'provider', type: 'string' },
|
||||
{ path: 'verbose', type: 'boolean' },
|
||||
{ path: 'provider_options', type: 'json' },
|
||||
{ path: 'provider_options.codex.network_access', type: 'boolean' },
|
||||
{ path: 'provider_options.opencode.network_access', type: 'boolean' },
|
||||
{ path: 'provider_options.claude.sandbox.allow_unsandboxed_commands', type: 'boolean' },
|
||||
{ path: 'provider_options.claude.sandbox.excluded_commands', type: 'json' },
|
||||
{ path: 'provider_profiles', type: 'json' },
|
||||
];
|
||||
|
||||
export function applyGlobalConfigEnvOverrides(target: Record<string, unknown>): void {
|
||||
applyEnvOverrides(target, GLOBAL_ENV_SPECS);
|
||||
}
|
||||
|
||||
export function applyProjectConfigEnvOverrides(target: Record<string, unknown>): void {
|
||||
applyEnvOverrides(target, PROJECT_ENV_SPECS);
|
||||
}
|
||||
@ -15,6 +15,7 @@ import { normalizeProviderOptions } from '../loaders/pieceParser.js';
|
||||
import { getGlobalConfigPath } from '../paths.js';
|
||||
import { DEFAULT_LANGUAGE } from '../../../shared/constants.js';
|
||||
import { parseProviderModel } from '../../../shared/utils/providerModel.js';
|
||||
import { applyGlobalConfigEnvOverrides, envVarNameFromPath } from '../env/config-env-overrides.js';
|
||||
|
||||
/** Claude-specific model aliases that are not valid for other providers */
|
||||
const CLAUDE_MODEL_ALIASES = new Set(['opus', 'sonnet', 'haiku']);
|
||||
@ -107,20 +108,6 @@ function denormalizeProviderProfiles(
|
||||
}])) as Record<string, { default_permission_mode: string; movement_permission_overrides?: Record<string, string> }>;
|
||||
}
|
||||
|
||||
/** Create default global configuration (fresh instance each call) */
|
||||
function createDefaultGlobalConfig(): GlobalConfig {
|
||||
return {
|
||||
language: DEFAULT_LANGUAGE,
|
||||
defaultPiece: 'default',
|
||||
logLevel: 'info',
|
||||
provider: 'claude',
|
||||
enableBuiltinPieces: true,
|
||||
interactivePreviewMovements: 3,
|
||||
concurrency: 1,
|
||||
taskPollIntervalMs: 500,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Manages global configuration loading and caching.
|
||||
* Singleton — use GlobalConfigManager.getInstance().
|
||||
@ -154,23 +141,34 @@ export class GlobalConfigManager {
|
||||
return this.cachedConfig;
|
||||
}
|
||||
const configPath = getGlobalConfigPath();
|
||||
if (!existsSync(configPath)) {
|
||||
const defaultConfig = createDefaultGlobalConfig();
|
||||
this.cachedConfig = defaultConfig;
|
||||
return defaultConfig;
|
||||
|
||||
const rawConfig: Record<string, unknown> = {};
|
||||
if (existsSync(configPath)) {
|
||||
const content = readFileSync(configPath, 'utf-8');
|
||||
const parsedRaw = parseYaml(content);
|
||||
if (parsedRaw && typeof parsedRaw === 'object' && !Array.isArray(parsedRaw)) {
|
||||
Object.assign(rawConfig, parsedRaw as Record<string, unknown>);
|
||||
} else if (parsedRaw != null) {
|
||||
throw new Error('Configuration error: ~/.takt/config.yaml must be a YAML object.');
|
||||
}
|
||||
}
|
||||
const content = readFileSync(configPath, 'utf-8');
|
||||
const raw = parseYaml(content);
|
||||
const parsed = GlobalConfigSchema.parse(raw);
|
||||
|
||||
applyGlobalConfigEnvOverrides(rawConfig);
|
||||
|
||||
const parsed = GlobalConfigSchema.parse(rawConfig);
|
||||
const config: GlobalConfig = {
|
||||
language: parsed.language,
|
||||
defaultPiece: parsed.default_piece,
|
||||
logLevel: parsed.log_level,
|
||||
provider: parsed.provider,
|
||||
model: parsed.model,
|
||||
observability: parsed.observability ? {
|
||||
providerEvents: parsed.observability.provider_events,
|
||||
} : undefined,
|
||||
analytics: parsed.analytics ? {
|
||||
enabled: parsed.analytics.enabled,
|
||||
eventsPath: parsed.analytics.events_path,
|
||||
retentionDays: parsed.analytics.retention_days,
|
||||
} : undefined,
|
||||
worktreeDir: parsed.worktree_dir,
|
||||
autoPr: parsed.auto_pr,
|
||||
disabledBuiltins: parsed.disabled_builtins,
|
||||
@ -204,6 +202,7 @@ export class GlobalConfigManager {
|
||||
runAbort: parsed.notification_sound_events.run_abort,
|
||||
} : undefined,
|
||||
interactivePreviewMovements: parsed.interactive_preview_movements,
|
||||
verbose: parsed.verbose,
|
||||
concurrency: parsed.concurrency,
|
||||
taskPollIntervalMs: parsed.task_poll_interval_ms,
|
||||
};
|
||||
@ -217,7 +216,6 @@ export class GlobalConfigManager {
|
||||
const configPath = getGlobalConfigPath();
|
||||
const raw: Record<string, unknown> = {
|
||||
language: config.language,
|
||||
default_piece: config.defaultPiece,
|
||||
log_level: config.logLevel,
|
||||
provider: config.provider,
|
||||
};
|
||||
@ -229,6 +227,15 @@ export class GlobalConfigManager {
|
||||
provider_events: config.observability.providerEvents,
|
||||
};
|
||||
}
|
||||
if (config.analytics) {
|
||||
const analyticsRaw: Record<string, unknown> = {};
|
||||
if (config.analytics.enabled !== undefined) analyticsRaw.enabled = config.analytics.enabled;
|
||||
if (config.analytics.eventsPath) analyticsRaw.events_path = config.analytics.eventsPath;
|
||||
if (config.analytics.retentionDays !== undefined) analyticsRaw.retention_days = config.analytics.retentionDays;
|
||||
if (Object.keys(analyticsRaw).length > 0) {
|
||||
raw.analytics = analyticsRaw;
|
||||
}
|
||||
}
|
||||
if (config.worktreeDir) {
|
||||
raw.worktree_dir = config.worktreeDir;
|
||||
}
|
||||
@ -316,6 +323,9 @@ export class GlobalConfigManager {
|
||||
if (config.interactivePreviewMovements !== undefined) {
|
||||
raw.interactive_preview_movements = config.interactivePreviewMovements;
|
||||
}
|
||||
if (config.verbose !== undefined) {
|
||||
raw.verbose = config.verbose;
|
||||
}
|
||||
if (config.concurrency !== undefined && config.concurrency > 1) {
|
||||
raw.concurrency = config.concurrency;
|
||||
}
|
||||
@ -383,7 +393,7 @@ export function setProvider(provider: 'claude' | 'codex' | 'opencode'): void {
|
||||
* Priority: TAKT_ANTHROPIC_API_KEY env var > config.yaml > undefined (CLI auth fallback)
|
||||
*/
|
||||
export function resolveAnthropicApiKey(): string | undefined {
|
||||
const envKey = process.env['TAKT_ANTHROPIC_API_KEY'];
|
||||
const envKey = process.env[envVarNameFromPath('anthropic_api_key')];
|
||||
if (envKey) return envKey;
|
||||
|
||||
try {
|
||||
@ -399,7 +409,7 @@ export function resolveAnthropicApiKey(): string | undefined {
|
||||
* Priority: TAKT_OPENAI_API_KEY env var > config.yaml > undefined (CLI auth fallback)
|
||||
*/
|
||||
export function resolveOpenaiApiKey(): string | undefined {
|
||||
const envKey = process.env['TAKT_OPENAI_API_KEY'];
|
||||
const envKey = process.env[envVarNameFromPath('openai_api_key')];
|
||||
if (envKey) return envKey;
|
||||
|
||||
try {
|
||||
@ -415,7 +425,7 @@ export function resolveOpenaiApiKey(): string | undefined {
|
||||
* Priority: TAKT_CODEX_CLI_PATH env var > config.yaml > undefined (SDK vendored binary fallback)
|
||||
*/
|
||||
export function resolveCodexCliPath(): string | undefined {
|
||||
const envPath = process.env['TAKT_CODEX_CLI_PATH'];
|
||||
const envPath = process.env[envVarNameFromPath('codex_cli_path')];
|
||||
if (envPath !== undefined) {
|
||||
return validateCodexCliPath(envPath, 'TAKT_CODEX_CLI_PATH');
|
||||
}
|
||||
@ -437,7 +447,7 @@ export function resolveCodexCliPath(): string | undefined {
|
||||
* Priority: TAKT_OPENCODE_API_KEY env var > config.yaml > undefined
|
||||
*/
|
||||
export function resolveOpencodeApiKey(): string | undefined {
|
||||
const envKey = process.env['TAKT_OPENCODE_API_KEY'];
|
||||
const envKey = process.env[envVarNameFromPath('opencode_api_key')];
|
||||
if (envKey) return envKey;
|
||||
|
||||
try {
|
||||
@ -447,4 +457,3 @@ export function resolveOpencodeApiKey(): string | undefined {
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user