From 889bf4ce1772ad78ede864da906fd34b6e396c61 Mon Sep 17 00:00:00 2001 From: "Arnaud (Martient) Leherpeur" Date: Wed, 18 Mar 2026 18:43:52 +0100 Subject: [PATCH 1/2] Multi-package repository support and update dependencies (#37) * feat: add multi-package repository support and update dependencies Add comprehensive multi-package repository configuration system with scope detection, workflow orchestration, and version management. * chore(fmt): format code with rustfmt * refactor: optimize path comparisons by using Path::new instead of PathBuf::from Replace PathBuf::from conversions with Path::new for more efficient path comparisons across package status, sync, and versioning modules. * fix(core): packages mapping * test: improve cli response * ci: remove mcp checks --- .committy/config.toml | 63 + .dockerignore | 6 + .github/workflows/lint.yml | 73 +- .github/workflows/native-git-verification.yml | 12 + AGENTS.md | 165 ++ CLAUDE.md | 165 ++ Cargo.toml | 31 +- README.md | 28 +- docs/CONFIGURATION.md | 652 +++++++ docs/EXAMPLES.md | 1071 +++++++++++ docs/MULTI_PACKAGE_README.md | 666 +++++++ docs/NATIVE_GIT_E2E.md | 38 + docs/QUICKSTART.md | 462 +++++ docs/USER_GUIDE.md | 706 +++++++ docs/src/content/docs/index.mdx | 6 +- .../docs/reference/agent-workflows.mdx | 120 ++ docs/src/content/docs/reference/ai-flags.mdx | 4 - docs/src/content/docs/reference/amend.mdx | 28 +- docs/src/content/docs/reference/commit.mdx | 31 +- .../content/docs/reference/group-commit.mdx | 5 +- docs/src/content/docs/reference/mcp.mdx | 129 -- docs/src/content/docs/reference/tag.mdx | 35 +- mcp-server-committy/.gitignore | 2 - mcp-server-committy/.npmignore | 5 - mcp-server-committy/LICENSE | 201 -- mcp-server-committy/README.md | 126 -- mcp-server-committy/package-lock.json | 1687 ----------------- mcp-server-committy/package.json | 51 - mcp-server-committy/scripts/fake-committy.mjs | 83 - mcp-server-committy/scripts/test.mjs | 165 -- mcp-server-committy/src/commit_groups.ts | 151 -- mcp-server-committy/src/committy.ts | 302 --- mcp-server-committy/src/git.ts | 55 - mcp-server-committy/src/index.ts | 294 --- mcp-server-committy/tsconfig.json | 18 - scripts/verify_native_git_e2e.sh | 12 + src/cli/commands/amend.rs | 70 +- src/cli/commands/branch.rs | 299 ++- src/cli/commands/commit.rs | 329 +++- src/cli/commands/config.rs | 418 ++++ src/cli/commands/group_commit.rs | 135 +- src/cli/commands/init.rs | 309 +++ src/cli/commands/lint.rs | 6 + src/cli/commands/lint_message.rs | 16 +- src/cli/commands/mod.rs | 3 + src/cli/commands/packages.rs | 940 +++++++++ src/cli/commands/tag.rs | 475 ++++- src/cli/mod.rs | 13 +- src/clock.rs | 57 + src/config.rs | 4 + src/config/hierarchy.rs | 142 ++ src/config/repository.rs | 712 +++++++ src/dependency/handlers/dockerfile.rs | 182 ++ src/dependency/handlers/json.rs | 162 ++ src/dependency/handlers/mod.rs | 6 + src/dependency/handlers/toml.rs | 156 ++ src/dependency/handlers/yaml.rs | 165 ++ src/dependency/mod.rs | 4 + src/dependency/updater.rs | 340 ++++ src/git/branch.rs | 52 +- src/git/commit.rs | 112 +- src/git/mod.rs | 15 +- src/git/repository.rs | 37 +- src/git/tag.rs | 246 ++- src/input/prompts.rs | 32 + src/lib.rs | 6 + src/linter/mod.rs | 251 ++- src/main.rs | 17 +- src/packages/cargo.rs | 244 +++ src/packages/detector.rs | 393 ++++ src/packages/mod.rs | 8 + src/packages/npm.rs | 308 +++ src/packages/types.rs | 145 ++ src/scope/detector.rs | 343 ++++ src/scope/matcher.rs | 216 +++ src/scope/mod.rs | 4 + src/versioning/hybrid.rs | 389 ++++ src/versioning/independent.rs | 242 +++ src/versioning/manager.rs | 101 + src/versioning/mod.rs | 6 + src/versioning/unified.rs | 197 ++ src/workflow/mod.rs | 3 + src/workflow/orchestrator.rs | 414 ++++ tests/agent_cli_tests.rs | 417 ++++ tests/common/assert.rs | 0 tests/common/mod.rs | 7 + tests/git_tests.rs | 232 ++- tests/group_commit_tests.rs | 196 +- tests/integration_tests.rs | 38 +- tests/lint_message_cli_tests.rs | 73 +- tests/machine_readable_cli_tests.rs | 272 +++ tests/main_tests.rs | 20 +- tests/repository_tests.rs | 28 +- tests/tag_tests.rs | 13 +- tests/tag_version_bump_tests.rs | 42 +- tests/test_linter.rs | 95 + tools/native-git-e2e/Dockerfile | 21 + tools/native-git-e2e/entrypoint.sh | 209 ++ 98 files changed, 14192 insertions(+), 3843 deletions(-) create mode 100644 .committy/config.toml create mode 100644 .dockerignore create mode 100644 .github/workflows/native-git-verification.yml create mode 100644 AGENTS.md create mode 100644 CLAUDE.md create mode 100644 docs/CONFIGURATION.md create mode 100644 docs/EXAMPLES.md create mode 100644 docs/MULTI_PACKAGE_README.md create mode 100644 docs/NATIVE_GIT_E2E.md create mode 100644 docs/QUICKSTART.md create mode 100644 docs/USER_GUIDE.md create mode 100644 docs/src/content/docs/reference/agent-workflows.mdx delete mode 100644 docs/src/content/docs/reference/mcp.mdx delete mode 100644 mcp-server-committy/.gitignore delete mode 100644 mcp-server-committy/.npmignore delete mode 100644 mcp-server-committy/LICENSE delete mode 100644 mcp-server-committy/README.md delete mode 100644 mcp-server-committy/package-lock.json delete mode 100644 mcp-server-committy/package.json delete mode 100644 mcp-server-committy/scripts/fake-committy.mjs delete mode 100644 mcp-server-committy/scripts/test.mjs delete mode 100644 mcp-server-committy/src/commit_groups.ts delete mode 100644 mcp-server-committy/src/committy.ts delete mode 100644 mcp-server-committy/src/git.ts delete mode 100644 mcp-server-committy/src/index.ts delete mode 100644 mcp-server-committy/tsconfig.json create mode 100755 scripts/verify_native_git_e2e.sh create mode 100644 src/cli/commands/config.rs create mode 100644 src/cli/commands/init.rs create mode 100644 src/cli/commands/packages.rs create mode 100644 src/clock.rs create mode 100644 src/config/hierarchy.rs create mode 100644 src/config/repository.rs create mode 100644 src/dependency/handlers/dockerfile.rs create mode 100644 src/dependency/handlers/json.rs create mode 100644 src/dependency/handlers/mod.rs create mode 100644 src/dependency/handlers/toml.rs create mode 100644 src/dependency/handlers/yaml.rs create mode 100644 src/dependency/mod.rs create mode 100644 src/dependency/updater.rs create mode 100644 src/packages/cargo.rs create mode 100644 src/packages/detector.rs create mode 100644 src/packages/mod.rs create mode 100644 src/packages/npm.rs create mode 100644 src/packages/types.rs create mode 100644 src/scope/detector.rs create mode 100644 src/scope/matcher.rs create mode 100644 src/scope/mod.rs create mode 100644 src/versioning/hybrid.rs create mode 100644 src/versioning/independent.rs create mode 100644 src/versioning/manager.rs create mode 100644 src/versioning/mod.rs create mode 100644 src/versioning/unified.rs create mode 100644 src/workflow/mod.rs create mode 100644 src/workflow/orchestrator.rs create mode 100644 tests/agent_cli_tests.rs create mode 100644 tests/common/assert.rs create mode 100644 tests/machine_readable_cli_tests.rs create mode 100644 tools/native-git-e2e/Dockerfile create mode 100755 tools/native-git-e2e/entrypoint.sh diff --git a/.committy/config.toml b/.committy/config.toml new file mode 100644 index 0000000..3bd2a6a --- /dev/null +++ b/.committy/config.toml @@ -0,0 +1,63 @@ +[repository] +name = "committy" +type = "multi-package" +description = "Generate clear, concise, and structured commit messages effortlessly" + +[versioning] +strategy = "hybrid" + +[[packages]] +name = "committy-cli" +type = "rust-cargo" +path = "." +version_file = "Cargo.toml" +version_field = "package.version" +primary = true +description = "Main CLI application" + +[[packages]] +name = "mcp-server" +type = "node-npm" +path = "mcp-server-committy" +version_file = "package.json" +version_field = "version" +sync_with = "committy-cli" +description = "MCP server wrapper for Committy" + +[[packages]] +name = "docs" +type = "node-npm" +path = "docs" +version_file = "package.json" +version_field = "version" +independent = true +description = "Documentation site" + +[scopes] +auto_detect = true +require_scope_for_multi_package = true +allow_multiple_scopes = true +scope_separator = "," + +[[scopes.mappings]] +pattern = "src/**" +scope = "core" +package = "committy-cli" +description = "Core CLI code" + +[[scopes.mappings]] +pattern = "mcp-server-committy/**" +scope = "mcp" +package = "mcp-server" +description = "MCP server code" + +[[scopes.mappings]] +pattern = "docs/**" +scope = "docs" +package = "docs" +description = "Documentation" + +[commit_rules] +max_subject_length = 72 +max_body_line_length = 100 +require_body = false diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 0000000..a611bde --- /dev/null +++ b/.dockerignore @@ -0,0 +1,6 @@ +.git +target +mcp-server-committy/node_modules +mcp-server-committy/dist +docs/node_modules +docs/dist diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index 22f4a14..3c35008 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -2,9 +2,9 @@ name: Rust on: push: - branches: [ main, develop ] + branches: [main, develop] pull_request: - branches: [ "*"] + branches: ["*"] env: CARGO_TERM_COLOR: always @@ -12,60 +12,21 @@ env: jobs: lint-and-test: runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - - name: Install Rust - uses: actions-rs/toolchain@v1.0.7 - with: - profile: minimal - toolchain: stable - override: true - components: rustfmt, clippy - - name: Check formatting - run: cargo fmt -- --check - - name: Lint with clippy - run: cargo clippy -- -D warnings - - name: Run tests - run: cargo test -- --test-threads=1 - - name: Smoke test lint-message CLI - run: | - cargo run -- --non-interactive lint-message --message "feat: CI smoke test for lint-message" - - mcp_ts: - runs-on: ubuntu-latest - defaults: - run: - working-directory: mcp-server-committy steps: - uses: actions/checkout@v4 - - uses: actions/setup-node@v4 + - name: Install Rust + uses: actions-rs/toolchain@v1.0.7 with: - node-version: 20 - cache: npm - cache-dependency-path: mcp-server-committy/package-lock.json - - name: Install dependencies - run: npm ci - - name: Typecheck - run: npm run typecheck - - name: Build - run: npm run build - - name: Run unit tests - run: npm test --if-present - - # docs_build: - # runs-on: ubuntu-latest - # defaults: - # run: - # working-directory: docs - # steps: - # - uses: actions/checkout@v4 - # - uses: actions/setup-node@v4 - # with: - # node-version: 20 - # registry-url: 'https://registry.npmjs.org' - # cache: npm - # cache-dependency-path: docs/package-lock.json - # - name: Install dependencies - # run: npm install - # - name: Build docs - # run: npm run build \ No newline at end of file + profile: minimal + toolchain: stable + override: true + components: rustfmt, clippy + - name: Check formatting + run: cargo fmt -- --check + - name: Lint with clippy + run: cargo clippy -- -D warnings + - name: Run tests + run: cargo test -- --test-threads=1 + - name: Smoke test lint-message CLI + run: | + cargo run -- --non-interactive lint-message --message "feat: CI smoke test for lint-message" diff --git a/.github/workflows/native-git-verification.yml b/.github/workflows/native-git-verification.yml new file mode 100644 index 0000000..3710d50 --- /dev/null +++ b/.github/workflows/native-git-verification.yml @@ -0,0 +1,12 @@ +name: Native Git Verification + +on: + workflow_dispatch: + +jobs: + native-git-e2e: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - name: Run native git Docker verification + run: ./scripts/verify_native_git_e2e.sh diff --git a/AGENTS.md b/AGENTS.md new file mode 100644 index 0000000..a825b9e --- /dev/null +++ b/AGENTS.md @@ -0,0 +1,165 @@ +# AGENTS.md + +This file provides guidance to Codex (Codex.ai/code) when working with code in this repository. + +## Overview + +Committy is a Rust CLI tool for generating structured, conventional commit messages compatible with SemVer. It supports interactive and non-interactive modes, commit linting, semantic versioning via tags, AI-assisted commit messages, and group commits. + +## Essential Commands + +### Build and Test +```bash +# Build the project +cargo build + +# Run all tests +cargo test + +# Run a specific test +cargo test test_name + +# Run tests with verbose output +cargo test -- --nocapture +``` + +### Running the CLI +```bash +# Run from source +cargo run + +# Run with arguments +cargo run -- [options] + +# Examples +cargo run -- commit +cargo run -- amend +cargo run -- tag --dry-run +cargo run -- lint --output json + +# NEW: Interactive TUI mode +cargo run -- tui +cargo run -- tui --ai # With AI assistance +``` + +### Development +```bash +# Check code without building +cargo check + +# Format code +cargo fmt + +# Run linter +cargo clippy +``` + +## Architecture + +### Module Structure + +**TUI Layer** (`src/tui/`) - NEW! +- `app.rs`: Main TUI application loop and event handling +- `state.rs`: Application state management (files, commits, groups) +- `event.rs`: Keyboard/mouse event handling +- `ui/`: UI components + - `file_list.rs`: File staging/selection interface + - `commit_form.rs`: Commit message input form + - `group_view.rs`: Auto-grouped commits view + - `help.rs`: Help overlay +- Built with `ratatui` (modern fork of `tui-rs 0.19`) +- Features: + - Interactive file staging/unstaging + - Auto-grouping changes by type (docs, tests, ci, deps, code) + - Multi-commit workflow support + - Diff preview + - Real-time UI updates + +**CLI Layer** (`src/cli/`) +- `commands/`: Individual command implementations (commit, amend, tag, lint, lint_message, branch, group_commit, tui) +- Each command implements the `Command` trait with `execute(&self, non_interactive: bool)` method +- Commands are defined via `StructOpt` for argument parsing + +**Git Operations** (`src/git/`) +- `repository.rs`: Core git operations (staged changes, file listing, config validation) +- `commit.rs`: Commit creation and message formatting +- `tag.rs`: Tag generation with `TagGenerator` and `TagGeneratorOptions` +- `branch.rs`: Branch operations + +**Configuration** (`src/config.rs`) +- Config file location: `~/.config/committy/config.toml` +- Override via `COMMITTY_CONFIG_DIR` environment variable +- Contains: + - `major_regex`, `minor_regex`, `patch_regex`: Configurable regex patterns for semver bump detection + - `metrics_enabled`: Telemetry toggle + - `last_update_check`, `last_metrics_reminder`: Timestamps + - `user_id`: Anonymous UUID for metrics + +**Linting** (`src/linter/`) +- Validates conventional commit format +- Used by `lint` and `lint_message` commands +- Returns structured error information + +**Version Management** (`src/version/`) +- `VersionManager`: Handles version updates across multiple file types +- Supports Cargo.toml, package.json, pyproject.toml, composer.json, pom.xml, *.csproj +- Called during tag creation to automatically bump version files + +**AI Integration** (`src/ai/`) +- Supports OpenRouter and Ollama providers +- Used in `group_commit` command when `--ai` flag is provided +- Generates commit message suggestions based on diffs + +**Input/Prompts** (`src/input/`) +- Interactive prompt handling via `inquire` crate +- Input validation for commit messages, scopes, ticket names +- Handles non-interactive mode gracefully + +**Error Handling** (`src/error.rs`) +- `CliError`: Central error type for all CLI operations +- Special exit code `3` for lint issues (distinct from general errors) + +### Key Flow: Tag Command + +1. `TagCommand::execute()` in `src/cli/commands/tag.rs` +2. Creates `TagGenerator` with options (dry-run, fetch, publish) +3. `TagGenerator::generate_and_create_tag()`: + - Fetches tags from remote (unless `--no-fetch`) + - Gets latest tag + - Analyzes commits since last tag using regex patterns from config + - Determines version bump (major/minor/patch) + - Updates version files via `VersionManager` + - Creates git tag (unless `--dry-run`) + - Publishes tag to remote (unless `--not-publish`) + +### Key Flow: Group Commit + +1. `GroupCommitCommand::execute()` in `src/cli/commands/group_commit.rs` +2. Groups changed files by type (docs, tests, ci, deps, build, chore, code) +3. In "plan" mode: outputs JSON with suggested commits per group +4. In "apply" mode: creates commits for each group (optionally with AI-generated messages) +5. Optionally pushes commits after creation + +### Non-Interactive Mode + +- Enabled via `--non-interactive` flag, `COMMITTY_NONINTERACTIVE=1`, or `CI=1` +- All commands support non-interactive mode +- Prompts are skipped; sensible defaults or errors are used + +### Build System + +- `build.rs`: Injects `SENTRY_DSN` and `POSTHOG_API_KEY` at compile time from environment variables +- Defaults to "undefined" if not set + +## Testing + +- Tests located in `tests/` directory +- Integration tests use `assert_cmd` for CLI testing +- Many tests require git repositories, use `tempfile` for temporary test repos +- Tests using shared state are marked with `serial_test::serial` + +## Contributing Notes + +- PRs should target the `develop` branch, not `main` +- Commit messages should follow conventional commit format +- Version is managed in `Cargo.toml` and bumped via the `tag` command \ No newline at end of file diff --git a/CLAUDE.md b/CLAUDE.md new file mode 100644 index 0000000..6f4d1e0 --- /dev/null +++ b/CLAUDE.md @@ -0,0 +1,165 @@ +# CLAUDE.md + +This file provides guidance to Claude Code (claude.ai/code) when working with code in this repository. + +## Overview + +Committy is a Rust CLI tool for generating structured, conventional commit messages compatible with SemVer. It supports interactive and non-interactive modes, commit linting, semantic versioning via tags, AI-assisted commit messages, and group commits. + +## Essential Commands + +### Build and Test +```bash +# Build the project +cargo build + +# Run all tests +cargo test + +# Run a specific test +cargo test test_name + +# Run tests with verbose output +cargo test -- --nocapture +``` + +### Running the CLI +```bash +# Run from source +cargo run + +# Run with arguments +cargo run -- [options] + +# Examples +cargo run -- commit +cargo run -- amend +cargo run -- tag --dry-run +cargo run -- lint --output json + +# NEW: Interactive TUI mode +cargo run -- tui +cargo run -- tui --ai # With AI assistance +``` + +### Development +```bash +# Check code without building +cargo check + +# Format code +cargo fmt + +# Run linter +cargo clippy +``` + +## Architecture + +### Module Structure + +**TUI Layer** (`src/tui/`) - NEW! +- `app.rs`: Main TUI application loop and event handling +- `state.rs`: Application state management (files, commits, groups) +- `event.rs`: Keyboard/mouse event handling +- `ui/`: UI components + - `file_list.rs`: File staging/selection interface + - `commit_form.rs`: Commit message input form + - `group_view.rs`: Auto-grouped commits view + - `help.rs`: Help overlay +- Built with `ratatui` (modern fork of `tui-rs 0.19`) +- Features: + - Interactive file staging/unstaging + - Auto-grouping changes by type (docs, tests, ci, deps, code) + - Multi-commit workflow support + - Diff preview + - Real-time UI updates + +**CLI Layer** (`src/cli/`) +- `commands/`: Individual command implementations (commit, amend, tag, lint, lint_message, branch, group_commit, tui) +- Each command implements the `Command` trait with `execute(&self, non_interactive: bool)` method +- Commands are defined via `StructOpt` for argument parsing + +**Git Operations** (`src/git/`) +- `repository.rs`: Core git operations (staged changes, file listing, config validation) +- `commit.rs`: Commit creation and message formatting +- `tag.rs`: Tag generation with `TagGenerator` and `TagGeneratorOptions` +- `branch.rs`: Branch operations + +**Configuration** (`src/config.rs`) +- Config file location: `~/.config/committy/config.toml` +- Override via `COMMITTY_CONFIG_DIR` environment variable +- Contains: + - `major_regex`, `minor_regex`, `patch_regex`: Configurable regex patterns for semver bump detection + - `metrics_enabled`: Telemetry toggle + - `last_update_check`, `last_metrics_reminder`: Timestamps + - `user_id`: Anonymous UUID for metrics + +**Linting** (`src/linter/`) +- Validates conventional commit format +- Used by `lint` and `lint_message` commands +- Returns structured error information + +**Version Management** (`src/version/`) +- `VersionManager`: Handles version updates across multiple file types +- Supports Cargo.toml, package.json, pyproject.toml, composer.json, pom.xml, *.csproj +- Called during tag creation to automatically bump version files + +**AI Integration** (`src/ai/`) +- Supports OpenRouter and Ollama providers +- Used in `group_commit` command when `--ai` flag is provided +- Generates commit message suggestions based on diffs + +**Input/Prompts** (`src/input/`) +- Interactive prompt handling via `inquire` crate +- Input validation for commit messages, scopes, ticket names +- Handles non-interactive mode gracefully + +**Error Handling** (`src/error.rs`) +- `CliError`: Central error type for all CLI operations +- Special exit code `3` for lint issues (distinct from general errors) + +### Key Flow: Tag Command + +1. `TagCommand::execute()` in `src/cli/commands/tag.rs` +2. Creates `TagGenerator` with options (dry-run, fetch, publish) +3. `TagGenerator::generate_and_create_tag()`: + - Fetches tags from remote (unless `--no-fetch`) + - Gets latest tag + - Analyzes commits since last tag using regex patterns from config + - Determines version bump (major/minor/patch) + - Updates version files via `VersionManager` + - Creates git tag (unless `--dry-run`) + - Publishes tag to remote (unless `--not-publish`) + +### Key Flow: Group Commit + +1. `GroupCommitCommand::execute()` in `src/cli/commands/group_commit.rs` +2. Groups changed files by type (docs, tests, ci, deps, build, chore, code) +3. In "plan" mode: outputs JSON with suggested commits per group +4. In "apply" mode: creates commits for each group (optionally with AI-generated messages) +5. Optionally pushes commits after creation + +### Non-Interactive Mode + +- Enabled via `--non-interactive` flag, `COMMITTY_NONINTERACTIVE=1`, or `CI=1` +- All commands support non-interactive mode +- Prompts are skipped; sensible defaults or errors are used + +### Build System + +- `build.rs`: Injects `SENTRY_DSN` and `POSTHOG_API_KEY` at compile time from environment variables +- Defaults to "undefined" if not set + +## Testing + +- Tests located in `tests/` directory +- Integration tests use `assert_cmd` for CLI testing +- Many tests require git repositories, use `tempfile` for temporary test repos +- Tests using shared state are marked with `serial_test::serial` + +## Contributing Notes + +- PRs should target the `develop` branch, not `main` +- Commit messages should follow conventional commit format +- Version is managed in `Cargo.toml` and bumped via the `tag` command \ No newline at end of file diff --git a/Cargo.toml b/Cargo.toml index 6318615..5c3cd6f 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -11,15 +11,15 @@ include = ["/src", "LICENSE"] [dependencies] structopt = "0.3.26" -inquire = "0.7.5" +inquire = "0.9.1" git2 = { version = "0.20.2", features = ["vendored-openssl"] } -thiserror = "2.0.16" -anyhow = "1.0.99" -sentry = "0.42.0" -chrono = { version = "0.4.41", features = ["serde"] } -semver = "1.0.24" -regex = "1.11.1" -log = "0.4.25" +thiserror = "2.0.17" +anyhow = "1.0.100" +sentry = "0.46.0" +chrono = { version = "0.4.42", features = ["serde"] } +semver = "1.0.27" +regex = "1.12.2" +log = "0.4.29" env_logger = "0.11.6" strsim = "0.11.1" self_update = { version = "0.42.0", features = [ @@ -28,19 +28,22 @@ self_update = { version = "0.42.0", features = [ "archive-zip", ] } colored = "3.0.0" -indicatif = "0.18.0" +indicatif = "0.18.3" serde = { version = "1.0.219", features = ["derive"] } serde_json = "1.0.145" -toml = "0.9.7" +serde_norway = "0.9.42" +toml = "0.9.8" +toml_edit = "0.23.7" +glob = "0.3" dirs = "6.0.0" -reqwest = { version = "0.12.23", features = ["json", "blocking"] } -tokio = { version = "1.47.1", features = ["full"] } +reqwest = { version = "0.12.24", features = ["json", "blocking"] } +tokio = { version = "1.48.0", features = ["full"] } once_cell = "1.21.3" -uuid = {version = "1.18.1", features = ["v4"]} +uuid = {version = "1.19.0", features = ["v4"]} async-trait = "0.1.83" [dev-dependencies] -assert_cmd = "2.0.17" +assert_cmd = "2.1.1" predicates = "3.1.3" tempfile = "3.23.0" mockall = "0.13.1" diff --git a/README.md b/README.md index 557d2c4..e4110ce 100644 --- a/README.md +++ b/README.md @@ -46,6 +46,7 @@ committy - Full docs live in `docs/` (Astro + Starlight): `docs/src/content/docs/` - Key references: + - Agent Workflows: `docs/src/content/docs/reference/agent-workflows.mdx` - Group Commit: `docs/src/content/docs/reference/group-commit.mdx` - AI Flags & Security: `docs/src/content/docs/reference/ai-flags.mdx` @@ -55,6 +56,12 @@ committy ```shell committy amend + +# Non-interactive amend preview +committy --non-interactive amend --type fix --message "adjust release note wording" --dry-run --output json + +# The same amend flow also works through commit flags +committy --non-interactive commit --amend --type fix --message "adjust release note wording" --dry-run --output json ``` #### Demo @@ -78,8 +85,11 @@ committy -s "change the api version" amend ### Output format - Use `--output json|text` on commands that support it. +- `branch --dry-run --output json` returns a machine-readable branch plan. +- `commit --dry-run --output json` returns the resolved commit message and workflow preview. - `lint --output json` prints `{ ok, count, issues }`. - `tag --output json` (with `--dry-run`) prints `{ ok, new_tag }`. +- `--repo-path` lets agents target another checkout without changing `cwd`. ### Verbosity @@ -96,12 +106,23 @@ committy -s "change the api version" amend - `--fetch` / `--no-fetch` controls whether tags are fetched from remote before calculation. - Default: fetch is enabled unless `--no-fetch` is provided. +- Remote publishing is explicit: use `--publish --confirm-publish` to push tags or version-bump commits. - Example (no remote access): ```bash committy --non-interactive tag --no-fetch --dry-run --not-publish --output json ``` +### Agent-friendly preview flow + +- Preview first, then apply: + - `committy --non-interactive branch --type feat --ticket AI42 --subject "agent flow" --dry-run --output json` + - `committy --non-interactive commit --type feat --message "add agent flow" --dry-run --output json` + - `committy --non-interactive group-commit --mode plan --output json` +- Remote mutations require confirmation: + - `committy --non-interactive group-commit --mode apply --push --confirm-push --output json` + - `committy --non-interactive tag --publish --confirm-publish --output json` + ### Stable lint exit codes (for CI) - `0` = OK, no issues @@ -116,6 +137,11 @@ committy --non-interactive lint --repo-path . --output json || { } ``` +### Native git verification + +- For signed commit/tag and SSH identity smoke testing, run `./scripts/verify_native_git_e2e.sh`. +- The harness is documented in `docs/NATIVE_GIT_E2E.md` and can also be triggered from the manual GitHub Actions workflow `Native Git Verification`. + ### Configurable version bump rules Committy determines semantic version bumps using regex patterns loaded from `config.toml`. @@ -187,4 +213,4 @@ This project is licensed under the Apache 2.0 License - see the [LICENSE](LICENS ## 🙏 Acknowledgments - Inspired by the need for consistent commit messages -- Built with love using Rust 🦀 \ No newline at end of file +- Built with love using Rust 🦀 diff --git a/docs/CONFIGURATION.md b/docs/CONFIGURATION.md new file mode 100644 index 0000000..0d6feb7 --- /dev/null +++ b/docs/CONFIGURATION.md @@ -0,0 +1,652 @@ +# Committy Configuration Reference + +Complete reference for all configuration options in Committy. + +## Table of Contents + +1. [Configuration Files](#configuration-files) +2. [Repository Configuration](#repository-configuration) +3. [User Configuration](#user-configuration) +4. [Configuration Hierarchy](#configuration-hierarchy) +5. [Environment Variables](#environment-variables) + +## Configuration Files + +### Repository Configuration + +**Location**: `.committy/config.toml` (in repository root) + +This file is **committed** to the repository and shared across all contributors. + +### User Configuration + +**Location**: `~/.config/committy/config.toml` (user home directory) + +This file is **not committed** and contains user-specific settings. + +## Repository Configuration + +### `[repository]` Section + +Defines repository metadata. + +```toml +[repository] +name = "my-project" # Required: Repository name +type = "multi-package" # Required: "single-package" or "multi-package" +description = "Description" # Optional: Repository description +max_depth = 3 # Optional: Max directory depth for package detection (default: 3) +``` + +**Fields:** + +- `name` (string, required): Repository name +- `type` (string, required): Repository type + - `"single-package"`: Single package repository + - `"multi-package"`: Multi-package repository (monorepo) +- `description` (string, optional): Human-readable description +- `max_depth` (integer, optional): Maximum directory depth for package detection (default: 3) + +### `[versioning]` Section + +Defines versioning strategy and rules. + +```toml +[versioning] +strategy = "independent" # Required: Versioning strategy +unified_version = "1.0.0" # Optional: For unified strategy + +[versioning.rules] +breaking_change_bumps_major = true # Optional: Default true +feat_bumps_minor = true # Optional: Default true +fix_bumps_patch = true # Optional: Default true +``` + +**Fields:** + +- `strategy` (string, required): Versioning strategy + - `"independent"`: Each package has its own version + - `"unified"`: All packages share one version + - `"hybrid"`: Mix of primary, synced, and independent packages +- `unified_version` (string, optional): Version for unified strategy +- `rules` (table, optional): Version bump rules + - `breaking_change_bumps_major` (boolean): Breaking changes bump major version + - `feat_bumps_minor` (boolean): Features bump minor version + - `fix_bumps_patch` (boolean): Fixes bump patch version + +### `[[packages]]` Section + +Defines packages in the repository. Can have multiple `[[packages]]` entries. + +```toml +[[packages]] +name = "package-name" # Required: Package name +type = "rust-cargo" # Required: Package type +path = "path/to/package" # Required: Relative path from repo root +version_file = "Cargo.toml" # Optional: Auto-detected +version_field = "package.version" # Optional: Auto-detected +primary = false # Optional: For hybrid strategy +sync_with = "" # Optional: For hybrid strategy +independent = true # Optional: For hybrid strategy +workspace_member = false # Optional: Is workspace member +description = "Description" # Optional: Package description +``` + +**Fields:** + +- `name` (string, required): Package name (must be unique) +- `type` (string, required): Package manager type + - `"rust-cargo"`: Rust with Cargo + - `"node-npm"`: Node.js with npm + - `"node-pnpm"`: Node.js with pnpm + - `"node-yarn"`: Node.js with yarn +- `path` (string, required): Relative path from repository root +- `version_file` (string, optional): File containing version (auto-detected) +- `version_field` (string, optional): Field path to version (auto-detected) +- `primary` (boolean, optional): Is primary package (hybrid strategy only) +- `sync_with` (string, optional): Sync version with this package (hybrid strategy only) +- `independent` (boolean, optional): Has independent version (hybrid strategy only) +- `workspace_member` (boolean, optional): Is part of a workspace +- `description` (string, optional): Human-readable description + +**Package Types:** + +| Type | File | Field | Example | +|------|------|-------|---------| +| `rust-cargo` | `Cargo.toml` | `package.version` | `version = "1.0.0"` | +| `node-npm` | `package.json` | `version` | `"version": "1.0.0"` | +| `node-pnpm` | `package.json` | `version` | `"version": "1.0.0"` | +| `node-yarn` | `package.json` | `version` | `"version": "1.0.0"` | + +### `[scopes]` Section + +Defines scope detection behavior. + +```toml +[scopes] +auto_detect = true # Optional: Enable auto-detection (default: true) +require_scope_for_multi_package = true # Optional: Require scope in multi-package (default: true) +allow_multiple_scopes = true # Optional: Allow multiple scopes (default: true) +scope_separator = "," # Optional: Separator for multiple scopes (default: ",") +``` + +**Fields:** + +- `auto_detect` (boolean, optional): Enable automatic scope detection +- `require_scope_for_multi_package` (boolean, optional): Require scope in multi-package repos +- `allow_multiple_scopes` (boolean, optional): Allow multiple scopes in one commit +- `scope_separator` (string, optional): Separator for multiple scopes (`,`, `/`, `-`, etc.) + +### `[[scopes.mappings]]` Section + +Defines file pattern to scope mappings. Can have multiple `[[scopes.mappings]]` entries. + +```toml +[[scopes.mappings]] +pattern = "path/**/*" # Required: Glob pattern +scope = "scope-name" # Required: Scope name +package = "package-name" # Optional: Associated package +description = "Description" # Optional: Human-readable description +``` + +**Fields:** + +- `pattern` (string, required): Glob pattern for file matching + - Supports `**` for recursive matching + - Supports `*` for single-level matching + - Examples: `src/**/*.rs`, `packages/cli/**/*`, `*.md` +- `scope` (string, required): Scope name to use when pattern matches +- `package` (string, optional): Associated package name +- `description` (string, optional): Human-readable description + +**Pattern Examples:** + +```toml +# Match all files in a directory +pattern = "packages/cli/**/*" + +# Match specific file types +pattern = "**/*.test.ts" + +# Match specific files +pattern = "**/Dockerfile" + +# Match top-level directory only +pattern = "src/*" + +# Match documentation +pattern = "docs/**/*.md" +``` + +### `[[dependencies]]` Section + +Defines dependency version references. Can have multiple `[[dependencies]]` entries. + +```toml +[[dependencies]] +source = "package-name" # Required: Source package +description = "Description" # Optional: Human-readable description +``` + +**Fields:** + +- `source` (string, required): Name of the source package whose version is referenced +- `description` (string, optional): Human-readable description + +### `[[dependencies.targets]]` Section + +Defines where a dependency version is referenced. Each `[[dependencies]]` can have multiple targets. + +```toml +[[dependencies.targets]] +file = "path/to/file.yaml" # Required: File path +field = "nested.field.path" # Required: Field path (dot notation) +strategy = "auto" # Required: Update strategy +format = "" # Optional: Format string +``` + +**Fields:** + +- `file` (string, required): Relative path to file from repository root +- `field` (string, required): Field path using dot notation + - YAML/JSON/TOML: `image.tag`, `dependencies.mypackage` + - Dockerfile: Package name (e.g., `myapp`) +- `strategy` (string, required): Update strategy + - `"auto"`: Automatically update + - `"prompt"`: Prompt user for confirmation + - `"manual"`: Manual update only +- `format` (string, optional): Format string for version (future use) + +**File Type Support:** + +| File Type | Extension | Field Format | Example | +|-----------|-----------|--------------|---------| +| YAML | `.yaml`, `.yml` | Dot notation | `image.tag` | +| JSON | `.json` | Dot notation | `dependencies.pkg` | +| TOML | `.toml` | Dot notation | `dependencies.lib` | +| Dockerfile | `Dockerfile` | Package name | `myapp` | + +**Examples:** + +```toml +# YAML file (HELM chart) +[[dependencies.targets]] +file = "deploy/helm/values.yaml" +field = "api.image.tag" +strategy = "auto" + +# JSON file (package.json) +[[dependencies.targets]] +file = "packages/web/package.json" +field = "dependencies.@myorg/shared" +strategy = "auto" + +# TOML file (Cargo.toml) +[[dependencies.targets]] +file = "crates/cli/Cargo.toml" +field = "dependencies.mylib-core" +strategy = "auto" + +# Dockerfile +[[dependencies.targets]] +file = "Dockerfile" +field = "base-image" +strategy = "auto" +``` + +### `[commit_rules]` Section + +Defines commit message rules and validation. + +```toml +[commit_rules] +max_subject_length = 72 # Optional: Max subject length (default: 72) +max_body_line_length = 100 # Optional: Max body line length (default: 100) +require_body = false # Optional: Require body (default: false) +allowed_types = [] # Optional: Allowed commit types (empty = all) +custom_types = [] # Optional: Custom commit types +``` + +**Fields:** + +- `max_subject_length` (integer, optional): Maximum subject line length (default: 72) +- `max_body_line_length` (integer, optional): Maximum body line length (default: 100) +- `require_body` (boolean, optional): Require commit body (default: false) +- `allowed_types` (array, optional): Allowed commit types (empty = all allowed) +- `custom_types` (array, optional): Custom commit type definitions + +**Default Commit Types:** + +- `feat`: New feature +- `fix`: Bug fix +- `docs`: Documentation +- `style`: Code style +- `refactor`: Code refactoring +- `perf`: Performance improvement +- `test`: Tests +- `chore`: Maintenance + +### `[[commit_rules.custom_types]]` Section + +Defines custom commit types. + +```toml +[[commit_rules.custom_types]] +name = "custom" # Required: Type name +description = "Description" # Required: Type description +``` + +**Fields:** + +- `name` (string, required): Custom type name (lowercase, no spaces) +- `description` (string, required): Human-readable description + +**Example:** + +```toml +[[commit_rules.custom_types]] +name = "security" +description = "Security improvements" + +[[commit_rules.custom_types]] +name = "deps" +description = "Dependency updates" +``` + +### `[workspace]` Section + +Defines workspace-specific settings (optional). + +```toml +[workspace] +root = "." # Optional: Workspace root (default: ".") +members = [] # Optional: Workspace members +``` + +**Fields:** + +- `root` (string, optional): Workspace root directory (default: ".") +- `members` (array, optional): List of workspace member paths + +## User Configuration + +User-level configuration at `~/.config/committy/config.toml`. + +```toml +last_update_check = "2024-01-01T00:00:00Z" # Last update check timestamp +metrics_enabled = true # Enable metrics collection +last_metrics_reminder = "2024-01-01T00:00:00Z" # Last metrics reminder +user_id = "unique-user-id" # Unique user identifier + +# Regex patterns for version bump detection +major_regex = "BREAKING CHANGE:|!:" # Major version bump pattern +minor_regex = "^feat" # Minor version bump pattern +patch_regex = "^fix" # Patch version bump pattern +``` + +**Fields:** + +- `last_update_check` (datetime): Last time update was checked +- `metrics_enabled` (boolean): Enable anonymous metrics collection +- `last_metrics_reminder` (datetime): Last time metrics reminder was shown +- `user_id` (string): Unique user identifier for metrics +- `major_regex` (string): Regex pattern for major version bumps +- `minor_regex` (string): Regex pattern for minor version bumps +- `patch_regex` (string): Regex pattern for patch version bumps + +## Configuration Hierarchy + +Configuration is merged in this order (later overrides earlier): + +1. **Default values**: Built-in defaults +2. **User configuration**: `~/.config/committy/config.toml` +3. **Repository configuration**: `.committy/config.toml` + +### Merge Behavior + +- **Scalars** (strings, numbers, booleans): Repository config overrides user config +- **Arrays**: Repository config replaces user config (no merging) +- **Tables**: Merged recursively + +**Example:** + +User config: +```toml +major_regex = "BREAKING CHANGE:" +minor_regex = "^feat" +``` + +Repository config: +```toml +major_regex = "!:" +``` + +Result: +```toml +major_regex = "!:" # From repository +minor_regex = "^feat" # From user +``` + +## Environment Variables + +### `COMMITTY_CONFIG_DIR` + +Override configuration directory. + +```bash +export COMMITTY_CONFIG_DIR=/custom/path +committy commit +``` + +### `COMMITTY_NO_UPDATE_CHECK` + +Disable update checks. + +```bash +export COMMITTY_NO_UPDATE_CHECK=1 +committy commit +``` + +### `COMMITTY_NO_METRICS` + +Disable metrics collection. + +```bash +export COMMITTY_NO_METRICS=1 +committy commit +``` + +### `COMMITTY_LOG_LEVEL` + +Set log level. + +```bash +export COMMITTY_LOG_LEVEL=debug # trace, debug, info, warn, error +committy commit +``` + +## Validation + +### Validate Configuration + +```bash +# Validate repository config +committy config validate + +# Validate with verbose output +committy config validate --verbose + +# Show merged configuration +committy config show + +# Show as JSON +committy config show --json +``` + +### Common Validation Errors + +**Duplicate package names:** +``` +Error: Duplicate package name 'cli' found +``` + +**Circular dependency:** +``` +Error: Circular dependency detected: cli -> server -> cli +``` + +**Invalid pattern:** +``` +Error: Invalid glob pattern in scope mapping: '[invalid' +``` + +**Missing required field:** +``` +Error: Missing required field 'name' in package configuration +``` + +## Best Practices + +### 1. Commit Repository Config + +```bash +git add .committy/config.toml +git commit -m "chore: add committy configuration" +``` + +### 2. Document Custom Scopes + +Add comments in config: + +```toml +[[scopes.mappings]] +pattern = "packages/cli/**/*" +scope = "cli" +package = "cli" +description = "CLI application - user-facing command-line tool" +``` + +### 3. Use Descriptive Package Names + +```toml +# Good +name = "api-server" +name = "@myorg/shared-utils" + +# Avoid +name = "pkg1" +name = "temp" +``` + +### 4. Group Related Configurations + +```toml +# Core packages +[[packages]] +name = "core" +# ... + +[[packages]] +name = "shared" +# ... + +# Applications +[[packages]] +name = "cli" +# ... + +[[packages]] +name = "server" +# ... +``` + +### 5. Test Configuration Changes + +```bash +# After editing config +committy config validate +committy packages list +committy config show +``` + +## Complete Example + +```toml +# .committy/config.toml +[repository] +name = "my-monorepo" +type = "multi-package" +description = "Full-stack application monorepo" +max_depth = 4 + +[versioning] +strategy = "hybrid" + +[versioning.rules] +breaking_change_bumps_major = true +feat_bumps_minor = true +fix_bumps_patch = true + +[[packages]] +name = "shared" +type = "rust-cargo" +path = "packages/shared" +primary = true +description = "Shared library" + +[[packages]] +name = "cli" +type = "rust-cargo" +path = "packages/cli" +sync_with = "shared" +description = "CLI application" + +[[packages]] +name = "server" +type = "node-npm" +path = "packages/server" +sync_with = "shared" +description = "API server" + +[[packages]] +name = "utils" +type = "rust-cargo" +path = "packages/utils" +independent = true +description = "Utility functions" + +[scopes] +auto_detect = true +require_scope_for_multi_package = true +allow_multiple_scopes = true +scope_separator = "," + +[[scopes.mappings]] +pattern = "packages/shared/**/*" +scope = "shared" +package = "shared" +description = "Shared library" + +[[scopes.mappings]] +pattern = "packages/cli/**/*" +scope = "cli" +package = "cli" +description = "CLI application" + +[[scopes.mappings]] +pattern = "packages/server/**/*" +scope = "server" +package = "server" +description = "API server" + +[[scopes.mappings]] +pattern = "packages/utils/**/*" +scope = "utils" +package = "utils" +description = "Utility functions" + +[[scopes.mappings]] +pattern = "docs/**/*" +scope = "docs" +package = "" +description = "Documentation" + +[[dependencies]] +source = "shared" +description = "Shared library version in dependents" + +[[dependencies.targets]] +file = "packages/cli/Cargo.toml" +field = "dependencies.shared" +strategy = "auto" + +[[dependencies.targets]] +file = "packages/server/package.json" +field = "dependencies.shared" +strategy = "auto" + +[[dependencies]] +source = "server" +description = "Server version in deployment" + +[[dependencies.targets]] +file = "deploy/helm/values.yaml" +field = "server.image.tag" +strategy = "auto" + +[commit_rules] +max_subject_length = 72 +max_body_line_length = 100 +require_body = false +allowed_types = ["feat", "fix", "docs", "style", "refactor", "perf", "test", "chore"] + +[[commit_rules.custom_types]] +name = "security" +description = "Security improvements" +``` + +## Next Steps + +- Review [User Guide](USER_GUIDE.md) for usage instructions +- Check [Examples](EXAMPLES.md) for real-world configurations +- Read [Quick Start](QUICKSTART.md) for getting started diff --git a/docs/EXAMPLES.md b/docs/EXAMPLES.md new file mode 100644 index 0000000..99de194 --- /dev/null +++ b/docs/EXAMPLES.md @@ -0,0 +1,1071 @@ +# Committy Configuration Examples + +This document provides real-world configuration examples for different repository types and use cases. + +## Table of Contents + +1. [Single Package Examples](#single-package-examples) +2. [Multi-Package Examples](#multi-package-examples) +3. [Versioning Strategy Examples](#versioning-strategy-examples) +4. [Scope Detection Examples](#scope-detection-examples) +5. [Dependency Management Examples](#dependency-management-examples) +6. [Complete Real-World Examples](#complete-real-world-examples) + +## Single Package Examples + +### Basic Rust Project + +```toml +# .committy/config.toml +[repository] +name = "my-rust-app" +type = "single-package" +description = "A Rust CLI application" + +[versioning] +strategy = "independent" + +[[packages]] +name = "my-rust-app" +type = "rust-cargo" +path = "." +version_file = "Cargo.toml" +version_field = "package.version" + +[scopes] +auto_detect = false + +[commit_rules] +max_subject_length = 72 +max_body_line_length = 100 +``` + +### Basic Node.js Project + +```toml +# .committy/config.toml +[repository] +name = "my-node-app" +type = "single-package" + +[versioning] +strategy = "independent" + +[[packages]] +name = "my-node-app" +type = "node-npm" +path = "." + +[scopes] +auto_detect = true + +[[scopes.mappings]] +pattern = "src/api/**/*" +scope = "api" +package = "my-node-app" + +[[scopes.mappings]] +pattern = "src/ui/**/*" +scope = "ui" +package = "my-node-app" + +[[scopes.mappings]] +pattern = "docs/**/*" +scope = "docs" +package = "" +``` + +## Multi-Package Examples + +### Rust Workspace with Multiple Crates + +```toml +# .committy/config.toml +[repository] +name = "rust-workspace" +type = "multi-package" +description = "Rust workspace with multiple crates" + +[versioning] +strategy = "independent" + +[[packages]] +name = "core" +type = "rust-cargo" +path = "crates/core" +independent = true +description = "Core library" + +[[packages]] +name = "cli" +type = "rust-cargo" +path = "crates/cli" +independent = true +description = "CLI application" + +[[packages]] +name = "server" +type = "rust-cargo" +path = "crates/server" +independent = true +description = "Server application" + +[scopes] +auto_detect = true +require_scope_for_multi_package = true +allow_multiple_scopes = true + +[[scopes.mappings]] +pattern = "crates/core/**/*" +scope = "core" +package = "core" + +[[scopes.mappings]] +pattern = "crates/cli/**/*" +scope = "cli" +package = "cli" + +[[scopes.mappings]] +pattern = "crates/server/**/*" +scope = "server" +package = "server" +``` + +### Node.js Monorepo with npm Workspaces + +```toml +# .committy/config.toml +[repository] +name = "node-monorepo" +type = "multi-package" + +[versioning] +strategy = "independent" + +[[packages]] +name = "@myorg/shared" +type = "node-npm" +path = "packages/shared" +independent = true + +[[packages]] +name = "@myorg/web" +type = "node-npm" +path = "packages/web" +independent = true + +[[packages]] +name = "@myorg/mobile" +type = "node-npm" +path = "packages/mobile" +independent = true + +[scopes] +auto_detect = true +allow_multiple_scopes = true + +[[scopes.mappings]] +pattern = "packages/shared/**/*" +scope = "shared" +package = "@myorg/shared" + +[[scopes.mappings]] +pattern = "packages/web/**/*" +scope = "web" +package = "@myorg/web" + +[[scopes.mappings]] +pattern = "packages/mobile/**/*" +scope = "mobile" +package = "@myorg/mobile" +``` + +### Mixed Language Monorepo + +```toml +# .committy/config.toml +[repository] +name = "mixed-monorepo" +type = "multi-package" +description = "Rust backend + Node.js frontend" + +[versioning] +strategy = "independent" + +[[packages]] +name = "api" +type = "rust-cargo" +path = "backend/api" +independent = true + +[[packages]] +name = "worker" +type = "rust-cargo" +path = "backend/worker" +independent = true + +[[packages]] +name = "web" +type = "node-npm" +path = "frontend/web" +independent = true + +[[packages]] +name = "admin" +type = "node-npm" +path = "frontend/admin" +independent = true + +[scopes] +auto_detect = true +allow_multiple_scopes = true + +[[scopes.mappings]] +pattern = "backend/api/**/*" +scope = "api" +package = "api" + +[[scopes.mappings]] +pattern = "backend/worker/**/*" +scope = "worker" +package = "worker" + +[[scopes.mappings]] +pattern = "frontend/web/**/*" +scope = "web" +package = "web" + +[[scopes.mappings]] +pattern = "frontend/admin/**/*" +scope = "admin" +package = "admin" + +[[scopes.mappings]] +pattern = "infrastructure/**/*" +scope = "infra" +package = "" +description = "Infrastructure and deployment" +``` + +## Versioning Strategy Examples + +### Independent Versioning + +Each package has its own version that evolves independently. + +```toml +[versioning] +strategy = "independent" + +[[packages]] +name = "utils" +type = "rust-cargo" +path = "packages/utils" +independent = true +# Current version: 1.2.3 + +[[packages]] +name = "cli" +type = "rust-cargo" +path = "packages/cli" +independent = true +# Current version: 2.0.1 + +[[packages]] +name = "server" +type = "rust-cargo" +path = "packages/server" +independent = true +# Current version: 1.5.0 +``` + +**Use case**: Packages have different release cycles, evolve at different rates. + +### Unified Versioning + +All packages share the same version number. + +```toml +[versioning] +strategy = "unified" +unified_version = "1.0.0" + +[[packages]] +name = "core" +type = "rust-cargo" +path = "packages/core" +# Version: 1.0.0 + +[[packages]] +name = "cli" +type = "rust-cargo" +path = "packages/cli" +# Version: 1.0.0 + +[[packages]] +name = "server" +type = "rust-cargo" +path = "packages/server" +# Version: 1.0.0 +``` + +**Use case**: Tightly coupled packages, always released together. + +### Hybrid Versioning + +Mix of primary, synced, and independent packages. + +```toml +[versioning] +strategy = "hybrid" + +[[packages]] +name = "core" +type = "rust-cargo" +path = "packages/core" +primary = true +# Drives the version: 2.0.0 + +[[packages]] +name = "cli" +type = "rust-cargo" +path = "packages/cli" +sync_with = "core" +# Syncs with core: 2.0.0 + +[[packages]] +name = "server" +type = "rust-cargo" +path = "packages/server" +sync_with = "core" +# Syncs with core: 2.0.0 + +[[packages]] +name = "utils" +type = "rust-cargo" +path = "packages/utils" +independent = true +# Independent: 1.5.0 + +[[packages]] +name = "dev-tools" +type = "rust-cargo" +path = "packages/dev-tools" +independent = true +# Independent: 0.3.0 +``` + +**Use case**: Core packages drive version, some packages sync, utilities independent. + +## Scope Detection Examples + +### Feature-Based Scopes + +```toml +[scopes] +auto_detect = true + +[[scopes.mappings]] +pattern = "src/auth/**/*" +scope = "auth" +package = "myapp" +description = "Authentication and authorization" + +[[scopes.mappings]] +pattern = "src/api/**/*" +scope = "api" +package = "myapp" +description = "API endpoints" + +[[scopes.mappings]] +pattern = "src/database/**/*" +scope = "db" +package = "myapp" +description = "Database layer" + +[[scopes.mappings]] +pattern = "src/ui/**/*" +scope = "ui" +package = "myapp" +description = "User interface" + +[[scopes.mappings]] +pattern = "tests/**/*" +scope = "test" +package = "" +description = "Tests" + +[[scopes.mappings]] +pattern = "docs/**/*" +scope = "docs" +package = "" +description = "Documentation" +``` + +### Package-Based Scopes with Submodules + +```toml +[scopes] +auto_detect = true +allow_multiple_scopes = true + +# Main packages +[[scopes.mappings]] +pattern = "packages/core/**/*" +scope = "core" +package = "core" + +[[scopes.mappings]] +pattern = "packages/cli/**/*" +scope = "cli" +package = "cli" + +# Shared resources +[[scopes.mappings]] +pattern = "shared/types/**/*" +scope = "types" +package = "" +description = "Shared type definitions" + +[[scopes.mappings]] +pattern = "shared/utils/**/*" +scope = "utils" +package = "" +description = "Shared utilities" + +# Infrastructure +[[scopes.mappings]] +pattern = "deploy/**/*" +scope = "deploy" +package = "" +description = "Deployment configurations" + +[[scopes.mappings]] +pattern = ".github/**/*" +scope = "ci" +package = "" +description = "CI/CD workflows" +``` + +### Complex Pattern Matching + +```toml +[scopes] +auto_detect = true + +# Match specific file types +[[scopes.mappings]] +pattern = "**/*.test.ts" +scope = "test" +package = "" + +[[scopes.mappings]] +pattern = "**/*.spec.ts" +scope = "test" +package = "" + +# Match configuration files +[[scopes.mappings]] +pattern = "**/tsconfig.json" +scope = "config" +package = "" + +[[scopes.mappings]] +pattern = "**/package.json" +scope = "deps" +package = "" + +# Match documentation +[[scopes.mappings]] +pattern = "**/*.md" +scope = "docs" +package = "" + +# Match specific directories +[[scopes.mappings]] +pattern = "src/components/**/*" +scope = "components" +package = "web" + +[[scopes.mappings]] +pattern = "src/hooks/**/*" +scope = "hooks" +package = "web" +``` + +## Dependency Management Examples + +### Kubernetes/HELM Chart Updates + +```toml +[[dependencies]] +source = "api" +description = "API version in HELM chart" + +[[dependencies.targets]] +file = "deploy/helm/values.yaml" +field = "api.image.tag" +strategy = "auto" + +[[dependencies.targets]] +file = "deploy/helm/values.yaml" +field = "api.version" +strategy = "auto" + +[[dependencies]] +source = "worker" +description = "Worker version in HELM chart" + +[[dependencies.targets]] +file = "deploy/helm/values.yaml" +field = "worker.image.tag" +strategy = "auto" +``` + +Example `values.yaml`: +```yaml +api: + image: + repository: myorg/api + tag: "1.2.3" # Auto-updated + version: "1.2.3" # Auto-updated + +worker: + image: + repository: myorg/worker + tag: "2.0.1" # Auto-updated +``` + +### Docker Compose Updates + +```toml +[[dependencies]] +source = "web" + +[[dependencies.targets]] +file = "docker-compose.yml" +field = "services.web.image" +strategy = "auto" + +[[dependencies]] +source = "api" + +[[dependencies.targets]] +file = "docker-compose.yml" +field = "services.api.image" +strategy = "auto" +``` + +Example `docker-compose.yml`: +```yaml +services: + web: + image: "myorg/web:1.0.0" # Auto-updated + ports: + - "3000:3000" + + api: + image: "myorg/api:2.1.0" # Auto-updated + ports: + - "8080:8080" +``` + +### Dockerfile Updates + +```toml +[[dependencies]] +source = "base-image" + +[[dependencies.targets]] +file = "Dockerfile" +field = "base-image" +strategy = "auto" +``` + +Example `Dockerfile`: +```dockerfile +ARG BASE_IMAGE_VERSION=1.2.3 +FROM myorg/base-image:${BASE_IMAGE_VERSION} + +# Or direct FROM +FROM myorg/base-image:1.2.3 + +COPY . /app +WORKDIR /app +``` + +### Package.json Dependency Updates + +```toml +[[dependencies]] +source = "@myorg/shared" + +[[dependencies.targets]] +file = "packages/web/package.json" +field = "dependencies.@myorg/shared" +strategy = "auto" + +[[dependencies.targets]] +file = "packages/mobile/package.json" +field = "dependencies.@myorg/shared" +strategy = "auto" +``` + +### Cargo.toml Dependency Updates + +```toml +[[dependencies]] +source = "mylib-core" + +[[dependencies.targets]] +file = "crates/cli/Cargo.toml" +field = "dependencies.mylib-core" +strategy = "auto" + +[[dependencies.targets]] +file = "crates/server/Cargo.toml" +field = "dependencies.mylib-core" +strategy = "auto" +``` + +## Complete Real-World Examples + +### Example 1: Full-Stack TypeScript Monorepo + +```toml +# .committy/config.toml +[repository] +name = "fullstack-app" +type = "multi-package" +description = "Full-stack TypeScript application" + +[versioning] +strategy = "hybrid" + +[versioning.rules] +breaking_change_bumps_major = true +feat_bumps_minor = true +fix_bumps_patch = true + +# Core shared library (primary) +[[packages]] +name = "@myapp/shared" +type = "node-pnpm" +path = "packages/shared" +primary = true +description = "Shared types and utilities" + +# Frontend (synced with shared) +[[packages]] +name = "@myapp/web" +type = "node-pnpm" +path = "packages/web" +sync_with = "@myapp/shared" +description = "Web application" + +# Backend (synced with shared) +[[packages]] +name = "@myapp/api" +type = "node-pnpm" +path = "packages/api" +sync_with = "@myapp/shared" +description = "API server" + +# Mobile (synced with shared) +[[packages]] +name = "@myapp/mobile" +type = "node-pnpm" +path = "packages/mobile" +sync_with = "@myapp/shared" +description = "Mobile application" + +# Dev tools (independent) +[[packages]] +name = "@myapp/dev-tools" +type = "node-pnpm" +path = "packages/dev-tools" +independent = true +description = "Development tools" + +[scopes] +auto_detect = true +require_scope_for_multi_package = true +allow_multiple_scopes = true +scope_separator = "," + +[[scopes.mappings]] +pattern = "packages/shared/**/*" +scope = "shared" +package = "@myapp/shared" + +[[scopes.mappings]] +pattern = "packages/web/**/*" +scope = "web" +package = "@myapp/web" + +[[scopes.mappings]] +pattern = "packages/api/**/*" +scope = "api" +package = "@myapp/api" + +[[scopes.mappings]] +pattern = "packages/mobile/**/*" +scope = "mobile" +package = "@myapp/mobile" + +[[scopes.mappings]] +pattern = "packages/dev-tools/**/*" +scope = "dev" +package = "@myapp/dev-tools" + +[[scopes.mappings]] +pattern = "docs/**/*" +scope = "docs" +package = "" + +[[scopes.mappings]] +pattern = ".github/**/*" +scope = "ci" +package = "" + +# Dependency management +[[dependencies]] +source = "@myapp/shared" +description = "Shared library version in dependents" + +[[dependencies.targets]] +file = "packages/web/package.json" +field = "dependencies.@myapp/shared" +strategy = "auto" + +[[dependencies.targets]] +file = "packages/api/package.json" +field = "dependencies.@myapp/shared" +strategy = "auto" + +[[dependencies.targets]] +file = "packages/mobile/package.json" +field = "dependencies.@myapp/shared" +strategy = "auto" + +[[dependencies]] +source = "@myapp/api" +description = "API version in deployment" + +[[dependencies.targets]] +file = "deploy/k8s/values.yaml" +field = "api.image.tag" +strategy = "auto" + +[[dependencies]] +source = "@myapp/web" +description = "Web version in deployment" + +[[dependencies.targets]] +file = "deploy/k8s/values.yaml" +field = "web.image.tag" +strategy = "auto" + +[commit_rules] +max_subject_length = 72 +max_body_line_length = 100 +require_body = false +``` + +### Example 2: Rust CLI Tool with Plugins + +```toml +# .committy/config.toml +[repository] +name = "rust-cli-tool" +type = "multi-package" +description = "Rust CLI tool with plugin system" + +[versioning] +strategy = "hybrid" + +# Core CLI (primary) +[[packages]] +name = "mycli" +type = "rust-cargo" +path = "crates/mycli" +primary = true +description = "Main CLI application" + +# Core library (synced) +[[packages]] +name = "mycli-core" +type = "rust-cargo" +path = "crates/core" +sync_with = "mycli" +description = "Core library" + +# Plugin API (synced) +[[packages]] +name = "mycli-plugin-api" +type = "rust-cargo" +path = "crates/plugin-api" +sync_with = "mycli" +description = "Plugin API" + +# Plugins (independent) +[[packages]] +name = "mycli-plugin-git" +type = "rust-cargo" +path = "plugins/git" +independent = true +description = "Git integration plugin" + +[[packages]] +name = "mycli-plugin-docker" +type = "rust-cargo" +path = "plugins/docker" +independent = true +description = "Docker integration plugin" + +[scopes] +auto_detect = true +allow_multiple_scopes = true + +[[scopes.mappings]] +pattern = "crates/mycli/**/*" +scope = "cli" +package = "mycli" + +[[scopes.mappings]] +pattern = "crates/core/**/*" +scope = "core" +package = "mycli-core" + +[[scopes.mappings]] +pattern = "crates/plugin-api/**/*" +scope = "plugin-api" +package = "mycli-plugin-api" + +[[scopes.mappings]] +pattern = "plugins/git/**/*" +scope = "plugin-git" +package = "mycli-plugin-git" + +[[scopes.mappings]] +pattern = "plugins/docker/**/*" +scope = "plugin-docker" +package = "mycli-plugin-docker" + +# Dependency management +[[dependencies]] +source = "mycli-core" + +[[dependencies.targets]] +file = "crates/mycli/Cargo.toml" +field = "dependencies.mycli-core" +strategy = "auto" + +[[dependencies]] +source = "mycli-plugin-api" + +[[dependencies.targets]] +file = "plugins/git/Cargo.toml" +field = "dependencies.mycli-plugin-api" +strategy = "auto" + +[[dependencies.targets]] +file = "plugins/docker/Cargo.toml" +field = "dependencies.mycli-plugin-api" +strategy = "auto" + +[commit_rules] +max_subject_length = 72 +allowed_types = ["feat", "fix", "docs", "style", "refactor", "perf", "test", "chore"] +``` + +### Example 3: Microservices with Shared Libraries + +```toml +# .committy/config.toml +[repository] +name = "microservices" +type = "multi-package" + +[versioning] +strategy = "independent" + +# Shared libraries +[[packages]] +name = "common" +type = "rust-cargo" +path = "libs/common" +independent = true + +[[packages]] +name = "proto" +type = "rust-cargo" +path = "libs/proto" +independent = true + +# Services +[[packages]] +name = "auth-service" +type = "rust-cargo" +path = "services/auth" +independent = true + +[[packages]] +name = "user-service" +type = "rust-cargo" +path = "services/user" +independent = true + +[[packages]] +name = "payment-service" +type = "rust-cargo" +path = "services/payment" +independent = true + +# API Gateway +[[packages]] +name = "api-gateway" +type = "node-npm" +path = "gateway" +independent = true + +[scopes] +auto_detect = true +allow_multiple_scopes = true + +[[scopes.mappings]] +pattern = "libs/common/**/*" +scope = "common" +package = "common" + +[[scopes.mappings]] +pattern = "libs/proto/**/*" +scope = "proto" +package = "proto" + +[[scopes.mappings]] +pattern = "services/auth/**/*" +scope = "auth" +package = "auth-service" + +[[scopes.mappings]] +pattern = "services/user/**/*" +scope = "user" +package = "user-service" + +[[scopes.mappings]] +pattern = "services/payment/**/*" +scope = "payment" +package = "payment-service" + +[[scopes.mappings]] +pattern = "gateway/**/*" +scope = "gateway" +package = "api-gateway" + +[[scopes.mappings]] +pattern = "deploy/**/*" +scope = "deploy" +package = "" + +# Dependency management for HELM charts +[[dependencies]] +source = "auth-service" + +[[dependencies.targets]] +file = "deploy/helm/values.yaml" +field = "services.auth.image.tag" +strategy = "auto" + +[[dependencies]] +source = "user-service" + +[[dependencies.targets]] +file = "deploy/helm/values.yaml" +field = "services.user.image.tag" +strategy = "auto" + +[[dependencies]] +source = "payment-service" + +[[dependencies.targets]] +file = "deploy/helm/values.yaml" +field = "services.payment.image.tag" +strategy = "auto" + +[[dependencies]] +source = "api-gateway" + +[[dependencies.targets]] +file = "deploy/helm/values.yaml" +field = "gateway.image.tag" +strategy = "auto" +``` + +## Tips and Tricks + +### 1. Testing Configuration + +```bash +# Validate configuration +committy config validate --verbose + +# Show merged configuration +committy config show + +# Test scope detection +git add path/to/file +committy commit --dry-run +``` + +### 2. Gradual Migration + +Start with minimal config and add features incrementally: + +```toml +# Step 1: Basic setup +[repository] +name = "my-project" +type = "multi-package" + +# Step 2: Add packages +[[packages]] +name = "package1" +type = "rust-cargo" +path = "packages/package1" + +# Step 3: Add scope detection +[scopes] +auto_detect = true + +# Step 4: Add dependency management +[[dependencies]] +source = "package1" +``` + +### 3. Custom Commit Types + +```toml +[commit_rules] +allowed_types = ["feat", "fix", "docs", "chore", "custom"] + +[[commit_rules.custom_types]] +name = "custom" +description = "Custom commit type for special cases" +``` + +### 4. Multiple Scope Separators + +```toml +[scopes] +allow_multiple_scopes = true +scope_separator = "," # or "/" or "-" +``` + +Results in: `feat(web,api): add feature` or `feat(web/api): add feature` + +## Next Steps + +- Review [User Guide](USER_GUIDE.md) for detailed usage +- Check [Configuration Reference](CONFIGURATION.md) for all options +- See [API Documentation](API.md) for programmatic usage diff --git a/docs/MULTI_PACKAGE_README.md b/docs/MULTI_PACKAGE_README.md new file mode 100644 index 0000000..af28540 --- /dev/null +++ b/docs/MULTI_PACKAGE_README.md @@ -0,0 +1,666 @@ +# Committy - Multi-Package Repository Support + +Complete guide to using Committy with multi-package repositories (monorepos). + +## Overview + +Committy provides comprehensive support for managing multiple packages in a single repository, with features including: + +- 📦 **Multi-Package Manager Support**: Cargo, npm, pnpm, yarn +- 🔄 **Flexible Versioning**: Independent, unified, or hybrid strategies +- 🎯 **Automatic Scope Detection**: Based on changed files +- 🔗 **Dependency Management**: Auto-update version references +- ✅ **Package Validation**: Ensure consistency across packages + +## Quick Start + +### 1. Initialize Configuration + +```bash +mkdir -p .committy +cat > .committy/config.toml << 'EOF' +[repository] +name = "my-monorepo" +type = "multi-package" + +[versioning] +strategy = "independent" + +[[packages]] +name = "cli" +type = "rust-cargo" +path = "packages/cli" + +[[packages]] +name = "server" +type = "node-npm" +path = "packages/server" + +[scopes] +auto_detect = true + +[[scopes.mappings]] +pattern = "packages/cli/**/*" +scope = "cli" +package = "cli" + +[[scopes.mappings]] +pattern = "packages/server/**/*" +scope = "server" +package = "server" +EOF +``` + +### 2. Verify Setup + +```bash +committy config validate +committy packages list +``` + +### 3. Make Changes and Commit + +```bash +# Edit files +vim packages/cli/src/main.rs +vim packages/server/src/index.ts + +# Stage and commit +git add packages/ +committy commit +# Scopes auto-detected: cli, server +``` + +## Features + +### Package Detection + +Committy automatically detects packages in your repository: + +```bash +$ committy packages list --verbose + +Found 4 packages: + + cli (rust-cargo) + Path: packages/cli + Version: 1.2.0 + File: packages/cli/Cargo.toml + + server (node-npm) + Path: packages/server + Version: 2.1.0 + File: packages/server/package.json + + shared (rust-cargo) + Path: packages/shared + Version: 1.0.0 + File: packages/shared/Cargo.toml + Workspace: true + + utils (node-pnpm) + Path: packages/utils + Version: 0.5.0 + File: packages/utils/package.json +``` + +**Supported Package Managers:** + +- **Cargo** (Rust): Detects `Cargo.toml` with workspace support +- **npm** (Node.js): Detects `package.json` with workspaces +- **pnpm** (Node.js): Detects `pnpm-workspace.yaml` and `package.json` +- **yarn** (Node.js): Detects `package.json` with workspaces + +### Versioning Strategies + +#### Independent Versioning + +Each package maintains its own version independently. + +```toml +[versioning] +strategy = "independent" + +[[packages]] +name = "cli" +independent = true +# Version: 1.2.0 + +[[packages]] +name = "server" +independent = true +# Version: 2.0.1 +``` + +**Best for**: Packages that evolve at different rates. + +#### Unified Versioning + +All packages share the same version number. + +```toml +[versioning] +strategy = "unified" +unified_version = "1.0.0" + +[[packages]] +name = "cli" +# Version: 1.0.0 + +[[packages]] +name = "server" +# Version: 1.0.0 +``` + +**Best for**: Tightly coupled packages that should always be released together. + +#### Hybrid Versioning + +Mix of primary, synced, and independent packages. + +```toml +[versioning] +strategy = "hybrid" + +[[packages]] +name = "core" +primary = true +# Drives version: 2.0.0 + +[[packages]] +name = "cli" +sync_with = "core" +# Syncs with core: 2.0.0 + +[[packages]] +name = "utils" +independent = true +# Independent: 1.5.0 +``` + +**Best for**: Complex monorepos with mixed coupling. + +### Scope Detection + +Committy automatically detects scopes based on changed files: + +```toml +[scopes] +auto_detect = true +allow_multiple_scopes = true + +[[scopes.mappings]] +pattern = "packages/cli/**/*" +scope = "cli" +package = "cli" + +[[scopes.mappings]] +pattern = "packages/server/**/*" +scope = "server" +package = "server" +``` + +**Example:** + +```bash +# Edit files in multiple packages +vim packages/cli/src/main.rs +vim packages/server/src/index.ts + +# Stage changes +git add packages/ + +# Commit +committy commit +``` + +Committy detects: +``` +Detected scopes: cli, server +``` + +Result: +``` +feat(cli,server): add health check endpoint + +- CLI: Add health check command +- Server: Implement /health endpoint +``` + +### Dependency Management + +Automatically update version references across files: + +```toml +[[dependencies]] +source = "cli" +description = "CLI version in HELM chart" + +[[dependencies.targets]] +file = "deploy/helm/values.yaml" +field = "cli.image.tag" +strategy = "auto" + +[[dependencies.targets]] +file = "Dockerfile" +field = "cli" +strategy = "auto" +``` + +**Supported File Types:** + +- **YAML**: HELM charts, configs (dot notation: `image.tag`) +- **JSON**: package.json, configs (dot notation: `dependencies.pkg`) +- **TOML**: Cargo.toml, configs (dot notation: `dependencies.lib`) +- **Dockerfile**: FROM/ARG patterns + +**Example:** + +When `cli` version changes from `1.0.0` to `1.1.0`: + +```yaml +# deploy/helm/values.yaml (before) +cli: + image: + tag: "1.0.0" + +# deploy/helm/values.yaml (after) +cli: + image: + tag: "1.1.0" # Auto-updated! +``` + +### Package Management Commands + +#### List Packages + +```bash +# Basic list +committy packages list + +# With details +committy packages list --verbose + +# JSON output +committy packages list --json +``` + +#### Check Status + +```bash +# Check package consistency +committy packages status +``` + +Output: +``` +Package Status: + cli: v1.2.0 ✓ + server: v2.0.1 ✓ + shared: v1.0.0 ✓ + +All packages are in sync +``` + +#### Sync Versions + +```bash +# Preview changes +committy packages sync --dry-run + +# Apply sync +committy packages sync +``` + +For hybrid strategy, this syncs packages with `sync_with` to their primary package. + +## Workflows + +### Basic Workflow + +```bash +# 1. Check status +committy packages status + +# 2. Make changes +vim packages/cli/src/main.rs + +# 3. Stage and commit +git add packages/cli/ +committy commit + +# 4. Verify +committy packages status +``` + +### Multi-Package Change + +```bash +# 1. Make changes to multiple packages +vim packages/cli/src/main.rs +vim packages/server/src/index.ts +vim packages/shared/src/lib.rs + +# 2. Stage all changes +git add packages/ + +# 3. Commit (scopes auto-detected) +committy commit +# Suggests: cli, server, shared + +# 4. Check if sync needed +committy packages status + +# 5. Sync if using hybrid strategy +committy packages sync +``` + +### Release Workflow + +```bash +# 1. Ensure clean state +git status +committy packages status + +# 2. Lint commits +committy lint + +# 3. Create tags (for each package or unified) +committy tag + +# 4. Push with tags +git push --follow-tags +``` + +## Configuration Examples + +### Rust Workspace + +```toml +[repository] +name = "rust-workspace" +type = "multi-package" + +[versioning] +strategy = "independent" + +[[packages]] +name = "core" +type = "rust-cargo" +path = "crates/core" + +[[packages]] +name = "cli" +type = "rust-cargo" +path = "crates/cli" + +[[packages]] +name = "server" +type = "rust-cargo" +path = "crates/server" + +[scopes] +auto_detect = true + +[[scopes.mappings]] +pattern = "crates/core/**/*" +scope = "core" +package = "core" + +[[scopes.mappings]] +pattern = "crates/cli/**/*" +scope = "cli" +package = "cli" + +[[scopes.mappings]] +pattern = "crates/server/**/*" +scope = "server" +package = "server" +``` + +### Node.js Monorepo (pnpm) + +```toml +[repository] +name = "node-monorepo" +type = "multi-package" + +[versioning] +strategy = "unified" +unified_version = "1.0.0" + +[[packages]] +name = "@myorg/shared" +type = "node-pnpm" +path = "packages/shared" + +[[packages]] +name = "@myorg/web" +type = "node-pnpm" +path = "packages/web" + +[[packages]] +name = "@myorg/mobile" +type = "node-pnpm" +path = "packages/mobile" + +[scopes] +auto_detect = true + +[[scopes.mappings]] +pattern = "packages/shared/**/*" +scope = "shared" +package = "@myorg/shared" + +[[scopes.mappings]] +pattern = "packages/web/**/*" +scope = "web" +package = "@myorg/web" + +[[scopes.mappings]] +pattern = "packages/mobile/**/*" +scope = "mobile" +package = "@myorg/mobile" +``` + +### Mixed Language Monorepo + +```toml +[repository] +name = "fullstack" +type = "multi-package" + +[versioning] +strategy = "hybrid" + +[[packages]] +name = "api" +type = "rust-cargo" +path = "backend/api" +primary = true + +[[packages]] +name = "worker" +type = "rust-cargo" +path = "backend/worker" +sync_with = "api" + +[[packages]] +name = "web" +type = "node-npm" +path = "frontend/web" +sync_with = "api" + +[[packages]] +name = "shared" +type = "rust-cargo" +path = "shared" +independent = true + +[scopes] +auto_detect = true + +[[scopes.mappings]] +pattern = "backend/api/**/*" +scope = "api" +package = "api" + +[[scopes.mappings]] +pattern = "backend/worker/**/*" +scope = "worker" +package = "worker" + +[[scopes.mappings]] +pattern = "frontend/web/**/*" +scope = "web" +package = "web" + +[[scopes.mappings]] +pattern = "shared/**/*" +scope = "shared" +package = "shared" + +# Dependency management +[[dependencies]] +source = "api" + +[[dependencies.targets]] +file = "deploy/helm/values.yaml" +field = "api.image.tag" +strategy = "auto" + +[[dependencies]] +source = "web" + +[[dependencies.targets]] +file = "deploy/helm/values.yaml" +field = "web.image.tag" +strategy = "auto" +``` + +## Best Practices + +### 1. Use Descriptive Package Names + +```toml +# Good +name = "api-server" +name = "@myorg/shared-utils" + +# Avoid +name = "pkg1" +name = "temp" +``` + +### 2. Group Related Packages + +```toml +# Core packages +[[packages]] +name = "core" +path = "packages/core" + +[[packages]] +name = "shared" +path = "packages/shared" + +# Applications +[[packages]] +name = "cli" +path = "packages/cli" + +[[packages]] +name = "server" +path = "packages/server" +``` + +### 3. Document Scope Mappings + +```toml +[[scopes.mappings]] +pattern = "packages/cli/**/*" +scope = "cli" +package = "cli" +description = "CLI application - user-facing command-line tool" +``` + +### 4. Choose Appropriate Versioning Strategy + +- **Independent**: Loosely coupled packages +- **Unified**: Tightly coupled packages +- **Hybrid**: Mixed coupling + +### 5. Automate Dependency Updates + +```toml +[[dependencies.targets]] +strategy = "auto" # For CI/CD +# or +strategy = "prompt" # For manual review +``` + +### 6. Validate Configuration + +```bash +# After editing config +committy config validate +committy packages list +committy config show +``` + +## Troubleshooting + +### Packages Not Detected + +```bash +# Check detection +committy packages list --verbose + +# Verify package files exist +ls packages/*/Cargo.toml +ls packages/*/package.json + +# Check max depth +committy config show | grep max_depth +``` + +### Scope Not Auto-Detected + +```bash +# Check patterns +committy config show + +# Test with staged files +git add path/to/file +committy commit +``` + +### Version Sync Issues + +```bash +# Check status +committy packages status + +# Preview sync +committy packages sync --dry-run + +# Apply sync +committy packages sync +``` + +## Documentation + +- 📖 [User Guide](USER_GUIDE.md) - Complete usage guide +- 🚀 [Quick Start](QUICKSTART.md) - Get started in 5 minutes +- 💡 [Examples](EXAMPLES.md) - Real-world configurations +- 🔧 [Configuration Reference](CONFIGURATION.md) - All configuration options + +## Support + +- **Issues**: https://github.com/yourusername/committy/issues +- **Discussions**: https://github.com/yourusername/committy/discussions +- **Documentation**: https://github.com/yourusername/committy/docs + +--- + +**Happy committing! 🎉** diff --git a/docs/NATIVE_GIT_E2E.md b/docs/NATIVE_GIT_E2E.md new file mode 100644 index 0000000..8c0472f --- /dev/null +++ b/docs/NATIVE_GIT_E2E.md @@ -0,0 +1,38 @@ +# Native Git E2E Verification + +This repository includes a Docker-based smoke test for the native git integration paths that matter for signed commits, signed tags, and SSH identity selection. + +## What it verifies + +- `committy commit` creates a signed commit when git signing is enabled. +- `committy amend` and `committy commit --amend` use the same native git commit flow. +- `committy tag` creates a signed annotated tag when git tag signing is enabled. +- `committy tag --publish --confirm-publish` pushes through normal git and ssh resolution. +- Different repositories can use different SSH identities through normal `~/.ssh/config` host aliases. +- `committy tag --fetch` exercises the native git fetch path. + +## How to run it + +```bash +./scripts/verify_native_git_e2e.sh +``` + +The script builds a Docker image, compiles `committy`, starts a local `sshd`, generates: + +- one OpenPGP signing key +- two distinct SSH keys +- two distinct git users on the local ssh server + +It then runs real `committy` operations against those remotes and fails fast if signing or transport behavior does not work. + +## When to use it + +Run this before shipping changes that affect: + +- commit creation +- amend behavior +- tag creation +- remote fetch and push +- repository path handling +- signing support +- SSH transport behavior diff --git a/docs/QUICKSTART.md b/docs/QUICKSTART.md new file mode 100644 index 0000000..42d8b92 --- /dev/null +++ b/docs/QUICKSTART.md @@ -0,0 +1,462 @@ +# Committy Quick Start Guide + +Get up and running with Committy in 5 minutes! + +## Installation + +```bash +cargo install committy +``` + +## Single Package Repository + +### 1. Create Your First Commit + +```bash +# Stage your changes +git add . + +# Create a commit +committy commit +``` + +Follow the prompts: +- **Type**: Select `feat` for new features, `fix` for bug fixes +- **Scope**: (optional) Enter a scope like `api`, `ui`, `auth` +- **Message**: Brief description (e.g., "add user authentication") +- **Body**: (optional) Detailed description +- **Breaking**: (optional) Mark as breaking change + +Result: +``` +feat(auth): add user authentication + +Implements JWT-based authentication with refresh tokens +``` + +### 2. Create a Version Tag + +```bash +committy tag +``` + +This will: +- Analyze commits since last tag +- Suggest version bump (major/minor/patch) +- Create a new git tag + +## Multi-Package Repository (Monorepo) + +### 1. Initialize Configuration + +```bash +mkdir -p .committy +cat > .committy/config.toml << 'EOF' +[repository] +name = "my-monorepo" +type = "multi-package" + +[versioning] +strategy = "independent" + +[[packages]] +name = "cli" +type = "rust-cargo" +path = "packages/cli" + +[[packages]] +name = "server" +type = "node-npm" +path = "packages/server" + +[scopes] +auto_detect = true + +[[scopes.mappings]] +pattern = "packages/cli/**/*" +scope = "cli" +package = "cli" + +[[scopes.mappings]] +pattern = "packages/server/**/*" +scope = "server" +package = "server" +EOF +``` + +### 2. Verify Setup + +```bash +# Validate configuration +committy config validate + +# List detected packages +committy packages list +``` + +Output: +``` +Found 2 packages: + cli (rust-cargo) at packages/cli - v1.0.0 + server (node-npm) at packages/server - v1.2.0 +``` + +### 3. Make Changes and Commit + +```bash +# Edit files in multiple packages +vim packages/cli/src/main.rs +vim packages/server/src/index.ts + +# Stage changes +git add packages/ + +# Commit (scopes auto-detected!) +committy commit +``` + +Committy will detect that you changed both `cli` and `server` and suggest: +``` +Detected scopes: cli, server +``` + +### 4. Check Package Status + +```bash +committy packages status +``` + +Output: +``` +Package Status: + cli: v1.0.0 ✓ + server: v1.2.0 ✓ +All packages are in sync +``` + +### 5. Sync Versions (if needed) + +```bash +# Preview changes +committy packages sync --dry-run + +# Apply sync +committy packages sync +``` + +## Common Workflows + +### Feature Development + +```bash +# 1. Create feature branch +git checkout -b feature/new-feature + +# 2. Make changes +vim src/feature.rs + +# 3. Commit with committy +git add src/feature.rs +committy commit +# Select: feat +# Scope: feature +# Message: add new feature + +# 4. Push +git push origin feature/new-feature +``` + +### Bug Fix + +```bash +# 1. Make fix +vim src/bug.rs + +# 2. Commit +git add src/bug.rs +committy commit +# Select: fix +# Scope: bug +# Message: resolve memory leak + +# 3. Tag patch version +committy tag +# Suggests: v1.0.1 (patch bump) +``` + +### Breaking Change + +```bash +# 1. Make breaking change +vim src/api.rs + +# 2. Commit with breaking flag +git add src/api.rs +committy commit --breaking +# Select: feat +# Message: redesign API + +# 3. Tag major version +committy tag +# Suggests: v2.0.0 (major bump) +``` + +## Configuration Templates + +### Rust Workspace + +```toml +[repository] +name = "rust-workspace" +type = "multi-package" + +[versioning] +strategy = "independent" + +[[packages]] +name = "core" +type = "rust-cargo" +path = "crates/core" + +[[packages]] +name = "cli" +type = "rust-cargo" +path = "crates/cli" + +[scopes] +auto_detect = true + +[[scopes.mappings]] +pattern = "crates/core/**/*" +scope = "core" +package = "core" + +[[scopes.mappings]] +pattern = "crates/cli/**/*" +scope = "cli" +package = "cli" +``` + +### Node.js Monorepo (pnpm) + +```toml +[repository] +name = "node-monorepo" +type = "multi-package" + +[versioning] +strategy = "independent" + +[[packages]] +name = "@myorg/shared" +type = "node-pnpm" +path = "packages/shared" + +[[packages]] +name = "@myorg/web" +type = "node-pnpm" +path = "packages/web" + +[scopes] +auto_detect = true + +[[scopes.mappings]] +pattern = "packages/shared/**/*" +scope = "shared" +package = "@myorg/shared" + +[[scopes.mappings]] +pattern = "packages/web/**/*" +scope = "web" +package = "@myorg/web" +``` + +### Mixed Language Monorepo + +```toml +[repository] +name = "fullstack" +type = "multi-package" + +[versioning] +strategy = "hybrid" + +[[packages]] +name = "api" +type = "rust-cargo" +path = "backend/api" +primary = true + +[[packages]] +name = "web" +type = "node-npm" +path = "frontend/web" +sync_with = "api" + +[scopes] +auto_detect = true + +[[scopes.mappings]] +pattern = "backend/**/*" +scope = "api" +package = "api" + +[[scopes.mappings]] +pattern = "frontend/**/*" +scope = "web" +package = "web" +``` + +## Versioning Strategies + +### Independent (Default) + +Each package has its own version. + +```toml +[versioning] +strategy = "independent" +``` + +**Best for**: Loosely coupled packages + +### Unified + +All packages share one version. + +```toml +[versioning] +strategy = "unified" +unified_version = "1.0.0" +``` + +**Best for**: Tightly coupled packages + +### Hybrid + +Mix of primary, synced, and independent. + +```toml +[versioning] +strategy = "hybrid" + +[[packages]] +name = "core" +primary = true + +[[packages]] +name = "cli" +sync_with = "core" + +[[packages]] +name = "utils" +independent = true +``` + +**Best for**: Complex monorepos + +## Tips + +### 1. Auto-detect Scopes + +Enable automatic scope detection: + +```toml +[scopes] +auto_detect = true +``` + +Committy will suggest scopes based on changed files. + +### 2. Multiple Scopes + +Allow commits to span multiple packages: + +```toml +[scopes] +allow_multiple_scopes = true +scope_separator = "," +``` + +Result: `feat(cli,server): add feature` + +### 3. Validate Before Commit + +```bash +# Check what would be committed +committy lint + +# Validate configuration +committy config validate +``` + +### 4. Non-Interactive Mode + +For CI/CD: + +```bash +committy commit --non-interactive \ + --type feat \ + --scope api \ + --message "add endpoint" +``` + +### 5. JSON Output + +For scripting: + +```bash +committy tag --json --dry-run +committy packages list --json +``` + +## Troubleshooting + +### Packages Not Detected + +```bash +# Check detection +committy packages list --verbose + +# Verify package files exist +ls packages/*/Cargo.toml +ls packages/*/package.json +``` + +### Scope Not Auto-Detected + +```bash +# Check patterns +committy config show + +# Test with staged files +git add path/to/file +committy commit +``` + +### Version Sync Issues + +```bash +# Check status +committy packages status + +# Force sync +committy packages sync +``` + +## Next Steps + +- 📖 Read the [User Guide](USER_GUIDE.md) for detailed documentation +- 💡 Check [Examples](EXAMPLES.md) for real-world configurations +- 🔧 Review [Configuration Reference](CONFIGURATION.md) for all options +- 🚀 Set up CI/CD integration + +## Getting Help + +- **Issues**: https://github.com/yourusername/committy/issues +- **Discussions**: https://github.com/yourusername/committy/discussions +- **Documentation**: https://github.com/yourusername/committy/docs + +--- + +**Happy committing! 🎉** diff --git a/docs/USER_GUIDE.md b/docs/USER_GUIDE.md new file mode 100644 index 0000000..30aebb7 --- /dev/null +++ b/docs/USER_GUIDE.md @@ -0,0 +1,706 @@ +# Committy User Guide + +## Table of Contents + +1. [Introduction](#introduction) +2. [Installation](#installation) +3. [Getting Started](#getting-started) +4. [Multi-Package Repository Support](#multi-package-repository-support) +5. [Configuration](#configuration) +6. [Commands](#commands) +7. [Workflows](#workflows) +8. [Best Practices](#best-practices) + +## Introduction + +Committy is a powerful CLI tool for creating conventional commits with support for multi-package repositories (monorepos). It helps teams maintain consistent commit messages, manage versions across multiple packages, and automate dependency updates. + +### Key Features + +- 🎯 **Conventional Commits**: Enforces conventional commit format +- 📦 **Multi-Package Support**: Manages Cargo, npm, pnpm, and yarn workspaces +- 🔄 **Version Management**: Independent, unified, or hybrid versioning strategies +- 🎨 **Scope Detection**: Automatic scope detection from changed files +- 🔗 **Dependency Management**: Automatic version updates across files +- 🤖 **AI Integration**: Optional AI-powered commit message generation +- ✅ **Validation**: Built-in linting and validation + +## Installation + +### From Source + +```bash +git clone https://github.com/yourusername/committy.git +cd committy +cargo install --path . +``` + +### Using Cargo + +```bash +cargo install committy +``` + +## Getting Started + +### Single Package Repository + +For a simple single-package repository, committy works out of the box: + +```bash +# Stage your changes +git add . + +# Create a commit +committy commit +``` + +You'll be prompted to: +1. Select a commit type (feat, fix, chore, etc.) +2. Enter a scope (optional) +3. Write a short description +4. Add a longer description (optional) +5. Mark as breaking change (optional) + +### Multi-Package Repository + +For monorepos with multiple packages, you'll want to set up configuration: + +```bash +# Initialize committy configuration +mkdir -p .committy +cat > .committy/config.toml << 'EOF' +[repository] +name = "my-monorepo" +type = "multi-package" + +[versioning] +strategy = "independent" + +[[packages]] +name = "cli" +type = "rust-cargo" +path = "packages/cli" + +[[packages]] +name = "server" +type = "node-npm" +path = "packages/server" +EOF + +# Validate configuration +committy config validate + +# List detected packages +committy packages list +``` + +## Multi-Package Repository Support + +### Package Managers + +Committy supports multiple package managers: + +- **Cargo** (Rust): `Cargo.toml` with workspaces +- **npm** (Node.js): `package.json` with workspaces +- **pnpm** (Node.js): `pnpm-workspace.yaml` +- **yarn** (Node.js): `package.json` with workspaces + +### Versioning Strategies + +#### Independent Versioning + +Each package maintains its own version independently. + +```toml +[versioning] +strategy = "independent" + +[[packages]] +name = "cli" +type = "rust-cargo" +path = "packages/cli" +independent = true + +[[packages]] +name = "server" +type = "node-npm" +path = "packages/server" +independent = true +``` + +**Use case**: Packages evolve at different rates, have different release cycles. + +#### Unified Versioning + +All packages share the same version number. + +```toml +[versioning] +strategy = "unified" +unified_version = "1.0.0" + +[[packages]] +name = "cli" +type = "rust-cargo" +path = "packages/cli" + +[[packages]] +name = "server" +type = "node-npm" +path = "packages/server" +``` + +**Use case**: Tightly coupled packages that should always be released together. + +#### Hybrid Versioning + +Mix of primary, synced, and independent packages. + +```toml +[versioning] +strategy = "hybrid" + +[[packages]] +name = "core" +type = "rust-cargo" +path = "packages/core" +primary = true + +[[packages]] +name = "cli" +type = "rust-cargo" +path = "packages/cli" +sync_with = "core" + +[[packages]] +name = "utils" +type = "rust-cargo" +path = "packages/utils" +independent = true +``` + +**Use case**: Core packages drive version, some packages sync, others independent. + +### Scope Detection + +Committy can automatically detect scopes based on changed files: + +```toml +[scopes] +auto_detect = true +require_scope_for_multi_package = true +allow_multiple_scopes = true +scope_separator = "," + +[[scopes.mappings]] +pattern = "packages/cli/**/*" +scope = "cli" +package = "cli" +description = "CLI application" + +[[scopes.mappings]] +pattern = "packages/server/**/*" +scope = "server" +package = "server" +description = "Server application" + +[[scopes.mappings]] +pattern = "docs/**/*" +scope = "docs" +package = "" +description = "Documentation" +``` + +When you commit, committy will: +1. Check which files are staged +2. Match them against patterns +3. Suggest appropriate scopes +4. Allow multiple scopes if changes span packages + +### Dependency Management + +Automatically update version references across files: + +```toml +[[dependencies]] +source = "cli" +description = "CLI version in HELM chart" + +[[dependencies.targets]] +file = "deploy/helm/values.yaml" +field = "image.tag" +strategy = "auto" + +[[dependencies.targets]] +file = "deploy/docker-compose.yml" +field = "services.cli.image" +strategy = "auto" + +[[dependencies]] +source = "server" + +[[dependencies.targets]] +file = "Dockerfile" +field = "server" +strategy = "auto" +``` + +Supported file types: +- **YAML**: HELM charts, configs (dot notation: `image.tag`) +- **JSON**: package.json, configs (dot notation: `dependencies.mypackage`) +- **TOML**: Cargo.toml, configs (dot notation: `dependencies.mylib`) +- **Dockerfile**: FROM/ARG patterns + +## Configuration + +### Repository Configuration + +Create `.committy/config.toml` in your repository root: + +```toml +[repository] +name = "my-project" +type = "multi-package" # or "single-package" +description = "My awesome project" + +[versioning] +strategy = "independent" # or "unified" or "hybrid" +unified_version = "1.0.0" # only for unified strategy + +[versioning.rules] +breaking_change_bumps_major = true +feat_bumps_minor = true +fix_bumps_patch = true + +[[packages]] +name = "package-name" +type = "rust-cargo" # or "node-npm", "node-pnpm", "node-yarn" +path = "path/to/package" +version_file = "Cargo.toml" # auto-detected +version_field = "package.version" # auto-detected +primary = false +sync_with = "" # for hybrid strategy +independent = true +workspace_member = false +description = "Package description" + +[scopes] +auto_detect = true +require_scope_for_multi_package = true +allow_multiple_scopes = true +scope_separator = "," + +[[scopes.mappings]] +pattern = "path/**/*" +scope = "scope-name" +package = "package-name" +description = "Description" + +[[dependencies]] +source = "package-name" +description = "Where this package version is referenced" + +[[dependencies.targets]] +file = "path/to/file.yaml" +field = "nested.field.path" +strategy = "auto" # or "prompt" or "manual" +format = "" # optional format string + +[commit_rules] +max_subject_length = 72 +max_body_line_length = 100 +require_body = false +allowed_types = [] # empty = all types allowed +custom_types = [] + +[[commit_rules.custom_types]] +name = "custom" +description = "Custom commit type" +``` + +### User Configuration + +User-level config at `~/.config/committy/config.toml`: + +```toml +last_update_check = "2024-01-01T00:00:00Z" +metrics_enabled = true +last_metrics_reminder = "2024-01-01T00:00:00Z" +user_id = "unique-id" + +# Regex patterns for version bump detection +major_regex = "BREAKING CHANGE:|!:" +minor_regex = "^feat" +patch_regex = "^fix" +``` + +## Commands + +### commit + +Create a new conventional commit. + +```bash +# Interactive mode +committy commit + +# Non-interactive mode +committy commit --non-interactive \ + --type feat \ + --scope api \ + --message "add user authentication" \ + --body "Implements JWT-based authentication" + +# With breaking change +committy commit --breaking + +# Skip hooks +committy commit --no-verify +``` + +### amend + +Amend the previous commit. + +```bash +committy amend +``` + +### tag + +Create a version tag. + +```bash +# Interactive mode +committy tag + +# With specific version +committy tag --version 1.2.3 + +# With message +committy tag --message "Release v1.2.3" + +# Dry run +committy tag --dry-run + +# JSON output +committy tag --json +``` + +### lint + +Check commits since last tag for conventional format. + +```bash +# Lint all commits since last tag +committy lint + +# JSON output +committy lint --json + +# Verbose output +committy lint --verbose +``` + +### lint-message + +Lint a single commit message. + +```bash +# From text +committy lint-message --text "feat: add feature" + +# From file +committy lint-message --file commit-msg.txt + +# JSON output +committy lint-message --json +``` + +### config + +Manage repository configuration. + +```bash +# Show merged configuration +committy config show + +# Show as JSON +committy config show --json + +# Validate configuration +committy config validate + +# Validate with verbose output +committy config validate --verbose +``` + +### packages + +Manage packages in multi-package repositories. + +```bash +# List all packages +committy packages list + +# List with details +committy packages list --verbose + +# Check package status +committy packages status + +# Sync package versions +committy packages sync + +# Dry run sync +committy packages sync --dry-run +``` + +### branch + +Create a new branch. + +```bash +committy branch +``` + +### group-commit + +Group changes and create commits (with optional AI). + +```bash +# Plan commits +committy group-commit plan + +# Apply planned commits +committy group-commit apply + +# With AI +committy group-commit plan --ai +``` + +## Workflows + +### Basic Workflow + +```bash +# 1. Make changes +vim src/main.rs + +# 2. Stage changes +git add src/main.rs + +# 3. Create commit +committy commit + +# 4. Push +git push +``` + +### Multi-Package Workflow + +```bash +# 1. Check current package status +committy packages status + +# 2. Make changes to multiple packages +vim packages/cli/src/main.rs +vim packages/server/src/index.ts + +# 3. Stage changes +git add packages/ + +# 4. Commit with auto-detected scopes +committy commit +# Committy detects: cli, server + +# 5. Check if versions need syncing +committy packages status + +# 6. Sync versions if needed +committy packages sync + +# 7. Create tags +committy tag + +# 8. Push with tags +git push --follow-tags +``` + +### Release Workflow + +```bash +# 1. Ensure clean state +git status +committy packages status + +# 2. Lint commits since last release +committy lint + +# 3. Create release tag +committy tag --message "Release v1.2.0" + +# 4. Verify tag +git tag -l -n9 v1.2.0 + +# 5. Push release +git push origin main --follow-tags + +# 6. Create GitHub release (manual or CI) +``` + +### CI/CD Integration + +```yaml +# .github/workflows/commit-lint.yml +name: Commit Lint + +on: [pull_request] + +jobs: + lint: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + with: + fetch-depth: 0 + + - name: Install committy + run: cargo install committy + + - name: Lint commits + run: committy lint --json +``` + +## Best Practices + +### Commit Messages + +✅ **Good:** +``` +feat(auth): add JWT authentication + +Implements JWT-based authentication with refresh tokens. +Includes middleware for protected routes. + +BREAKING CHANGE: Auth header format changed from Basic to Bearer +``` + +❌ **Bad:** +``` +updated stuff +``` + +### Scopes + +- Use **lowercase** scopes +- Keep scopes **short** and **descriptive** +- Use **package names** for multi-package repos +- Use **feature areas** for single-package repos + +Examples: +- `auth`, `api`, `ui`, `docs` +- `cli`, `server`, `shared` + +### Multi-Package Commits + +When changes span multiple packages: + +``` +feat(cli,server): add health check endpoint + +- CLI: Add health check command +- Server: Implement /health endpoint +``` + +### Version Management + +1. **Independent**: Use for loosely coupled packages +2. **Unified**: Use for tightly coupled packages +3. **Hybrid**: Use for mixed coupling scenarios + +### Configuration + +1. **Commit** `.committy/config.toml` to repository +2. **Don't commit** user config (`~/.config/committy/config.toml`) +3. **Validate** config after changes: `committy config validate` +4. **Document** custom scopes and patterns in README + +### Dependency Updates + +1. **Define** all version references in config +2. **Use** `auto` strategy for CI/CD +3. **Use** `prompt` strategy for manual review +4. **Test** after dependency updates + +## Troubleshooting + +### Committy doesn't detect my packages + +1. Check package manager files exist: + - Cargo: `Cargo.toml` + - npm: `package.json` with `workspaces` + - pnpm: `pnpm-workspace.yaml` + - yarn: `package.json` with `workspaces` + +2. Verify paths in config: + ```bash + committy packages list --verbose + ``` + +3. Check max depth (default: 3): + ```toml + [repository] + max_depth = 5 + ``` + +### Scope detection not working + +1. Verify patterns in config: + ```bash + committy config show + ``` + +2. Check file paths match patterns: + ```toml + [[scopes.mappings]] + pattern = "packages/cli/**/*" # Matches all files in packages/cli/ + scope = "cli" + ``` + +3. Test with staged files: + ```bash + git add packages/cli/src/main.rs + committy commit # Should suggest "cli" scope + ``` + +### Version sync issues + +1. Check versioning strategy: + ```bash + committy config show | grep strategy + ``` + +2. Verify package configuration: + ```bash + committy packages status + ``` + +3. Manually sync: + ```bash + committy packages sync --dry-run # Preview + committy packages sync # Apply + ``` + +## Getting Help + +- **Documentation**: https://github.com/yourusername/committy/docs +- **Issues**: https://github.com/yourusername/committy/issues +- **Discussions**: https://github.com/yourusername/committy/discussions + +## Next Steps + +- Read [Configuration Guide](CONFIGURATION.md) for detailed config options +- Check [Examples](examples/) for real-world configurations +- Review [API Documentation](API.md) for programmatic usage diff --git a/docs/src/content/docs/index.mdx b/docs/src/content/docs/index.mdx index 0204804..7e090ac 100644 --- a/docs/src/content/docs/index.mdx +++ b/docs/src/content/docs/index.mdx @@ -43,10 +43,10 @@ import { Card, CardGrid } from '@astrojs/starlight/components'; [Read the guide](/reference/group-commit/) - - Use Committy via Model Context Protocol tools. + + Use dry-run JSON previews and confirmation gates from system prompts or skills. - [Explore MCP tools](/reference/mcp/) + [Read the recipes](/reference/agent-workflows/) Configure AI providers, models, and privacy safeguards. diff --git a/docs/src/content/docs/reference/agent-workflows.mdx b/docs/src/content/docs/reference/agent-workflows.mdx new file mode 100644 index 0000000..f645e07 --- /dev/null +++ b/docs/src/content/docs/reference/agent-workflows.mdx @@ -0,0 +1,120 @@ +--- +title: Agent Workflows +description: Prompt-first recipes for AI coding agents using Committy +sidebar: + label: agent workflows + order: 55 +--- + +## Overview + +Committy can be embedded directly in system prompts or skills when you want an agent to manage branches, commits, and history quality without scraping human-oriented terminal text. + +Preferred sequence: + +1. Run a `--dry-run --output json` preview. +2. Inspect the JSON payload. +3. Rerun without `--dry-run` only if the plan looks correct. +4. Require explicit confirmation for any remote mutation. +5. Use `--repo-path` when the agent should operate on a checkout other than its current shell directory. + +## Branch recipe + +Preview a branch creation: + +```bash +committy --non-interactive branch \ + --type feat \ + --ticket AI42 \ + --subject "agent hardening" \ + --dry-run \ + --output json +``` + +Apply it: + +```bash +committy --non-interactive branch \ + --type feat \ + --ticket AI42 \ + --subject "agent hardening" \ + --output json +``` + +Suggested prompt snippet: + +```text +Use Committy for branch creation. Prefer structured flags: `committy --non-interactive branch --type --ticket --subject --dry-run --output json`. +If `ok` is true and `branch_name` matches the intended scope, rerun without `--dry-run`. +Do not create the branch if the JSON payload contains `errors`. +``` + +## Single-commit recipe + +Preview a conventional commit: + +```bash +committy --non-interactive commit \ + --type feat \ + --scope agent \ + --message "add prompt-first branch planning" \ + --dry-run \ + --output json +``` + +Apply it: + +```bash +committy --non-interactive commit \ + --type feat \ + --scope agent \ + --message "add prompt-first branch planning" \ + --output json +``` + +Suggested prompt snippet: + +```text +Use Committy to compose commits. Always preview with `--dry-run --output json` first. +Inspect `message`, `commit_type`, `scope`, and `workflow`. +Only create the commit if `ok` is true and the planned message matches the diff. +``` + +## Log-quality recipe + +Audit commits before a push or release: + +```bash +committy --non-interactive lint --repo-path . --output json +``` + +Preview grouped commits for dirty worktrees: + +```bash +committy --non-interactive group-commit --mode plan --output json +``` + +Publish only with explicit confirmation: + +```bash +committy --non-interactive group-commit --mode apply --push --confirm-push --output json +committy --non-interactive tag --publish --confirm-publish --output json +``` + +Suggested prompt snippet: + +```text +Before any push or release, run `committy lint --output json`. +If lint reports issues, stop and summarize them. +For grouped commits or tags, require explicit confirmation flags before any remote push. +``` + +## JSON fields to rely on + +- `command`: the command that produced the payload. +- `ok`: whether the command-level operation succeeded. +- `dry_run`: whether the payload is only a preview. +- `errors`: command-level errors or warnings when present. +- `mode`: present on multi-mode commands such as `group-commit`. + +Command-specific fields remain stable on top of that envelope. diff --git a/docs/src/content/docs/reference/ai-flags.mdx b/docs/src/content/docs/reference/ai-flags.mdx index bff0e22..f290cf4 100644 --- a/docs/src/content/docs/reference/ai-flags.mdx +++ b/docs/src/content/docs/reference/ai-flags.mdx @@ -48,10 +48,6 @@ committy --non-interactive group-commit --mode plan --output json \ - AI suggestions are validated by Committy's linter. If invalid or timed out, Committy falls back to safe default messages per group. - JSON parsing is strict; malformed outputs are rejected and replaced with defaults. -## MCP integration - -When using the MCP server, equivalent fields are forwarded to the Rust CLI. See the [MCP server](/reference/mcp/) page for tools, inputs, and examples. - ## Provider examples ### OpenRouter diff --git a/docs/src/content/docs/reference/amend.mdx b/docs/src/content/docs/reference/amend.mdx index a5276e9..d199d2a 100644 --- a/docs/src/content/docs/reference/amend.mdx +++ b/docs/src/content/docs/reference/amend.mdx @@ -5,7 +5,7 @@ description: How to amend a commit ## Overview -Interactively amend the previous commit using conventional commit prompts. This command is interactive-only. +Amend the previous commit using conventional commit prompts or non-interactive flags. Amends the previous commit message and optionally content. ## Usage @@ -13,22 +13,38 @@ Amends the previous commit message and optionally content. ```bash # Interactive prompts for type, breaking-change, scope, short and long messages committy amend + +# Non-interactive amend without staged changes +committy --non-interactive amend \ + --repo-path /path/to/repo \ + --type fix \ + --message "adjust release note wording" + +# Agent preview +committy --non-interactive amend \ + --type fix \ + --message "adjust release note wording" \ + --dry-run \ + --output json ``` ## Behavior -- __Interactive only__: running with `--non-interactive` returns an error. -- Prompts for: type, breaking change, scope, short message, and long message. +- Prompts for: type, breaking change, scope, short message, and long message when flags are omitted interactively. +- Non-interactive mode requires `--type` and `--message`. - Formats final message as `()!: \n\n` as applicable, then amends the last commit. - __No staged changes required__: you can amend just the message. If there are staged changes, they will be included. - Uses the current index (if any) for the amended commit tree. -- For non-interactive amend, use `committy commit --amend` with flags; note that path __requires staged changes__. +- `--dry-run --output json` returns `command`, `ok`, `dry_run`, `message`, `commit_type`, `scope`, `breaking_change`, and `errors`. +- `--repo-path` lets agents amend a repository without changing the shell working directory. +- Repo `commit_rules` are enforced the same way as `commit` and `lint`. ## Exit codes - __0__: success -- __1__: generic error (e.g., non-interactive usage, git errors) +- __1__: generic error (e.g., git errors) +- __3__: lint issues found ## Notes -- Use regular `committy commit` for new commits. Use `--amend` when you only want to adjust the previous commit message/content. \ No newline at end of file +- Use regular `committy commit` for new commits. `committy amend` and `committy commit --amend` now share the same amend behavior. diff --git a/docs/src/content/docs/reference/commit.mdx b/docs/src/content/docs/reference/commit.mdx index 2c4c8f2..e954137 100644 --- a/docs/src/content/docs/reference/commit.mdx +++ b/docs/src/content/docs/reference/commit.mdx @@ -9,6 +9,8 @@ Create a conventional commit for your staged changes. Interactive by default; su Requires staged changes. If no files are staged, the CLI returns an error. +For agent workflows, prefer `--dry-run --output json` first, then rerun without `--dry-run` once the payload looks correct. + ## Usage ```bash @@ -26,8 +28,19 @@ committy --non-interactive commit \ # Amend previous commit (interactive, message-only allowed) committy amend -# Amend previous commit via commit flags (requires staged changes) +# Amend previous commit via amend flags (no staged changes required) +committy --non-interactive amend --type fix --message "adjust release note wording" + +# Amend previous commit via commit flags (message-only allowed) committy commit --amend + +# Agent preview +committy --non-interactive commit \ + --type feat \ + --scope web \ + --message "add login flow" \ + --dry-run \ + --output json ``` ## Flags @@ -53,13 +66,23 @@ committy commit --amend - __--amend__ Amend the previous commit using the composed message. +- __--dry-run__ + Resolve the commit message and workflow side effects without creating a git commit. + +- __--output__ `` + Output format. Use `json` for agent/system-prompt integrations. + +- __--repo-path__ `` + Resolve the target repository without changing the process working directory. + ## Behavior -- Validates git config first; errors if misconfigured. -- Errors if no staged changes. +- Errors if no staged changes, unless `--amend` is used. +- Validates git config before mutating git state. - Non-interactive mode requires `--type` and `--message`. - Message is formatted as `()!: \n\n` depending on provided options. -- To amend the last commit without staged changes, use `committy amend` (interactive). Using `committy commit --amend` requires staged changes. +- `--dry-run --output json` returns `command`, `ok`, `dry_run`, `message`, `commit_type`, `scope`, `breaking_change`, and an optional `workflow` preview. +- `committy amend` and `committy commit --amend` now use the same validation and output flow, including message-only amend when nothing is staged. ## Examples diff --git a/docs/src/content/docs/reference/group-commit.mdx b/docs/src/content/docs/reference/group-commit.mdx index cedddc8..e0b5837 100644 --- a/docs/src/content/docs/reference/group-commit.mdx +++ b/docs/src/content/docs/reference/group-commit.mdx @@ -21,12 +21,13 @@ committy --non-interactive group-commit --mode plan --output json [--include-uns ```bash committy --non-interactive group-commit --mode apply --output json \ - [--include-unstaged] [--auto-stage] [--push] + [--include-unstaged] [--auto-stage] [--push --confirm-push] ``` - Creates one commit per group using validated messages. - `--auto-stage`: stages only the files of the current group before committing. - `--push`: push commits after creation. +- `--confirm-push`: required whenever `--push` is used. ## Grouping categories @@ -53,6 +54,7 @@ Plan result (shape): "command": "group-commit", "mode": "plan", "ok": true, + "dry_run": true, "groups": [ {"name": "docs", "commit_type": "docs", "files": ["README.md"], "suggested_message": "docs: update README"} ], @@ -67,6 +69,7 @@ Apply result (shape): "command": "group-commit", "mode": "apply", "ok": true, + "dry_run": false, "groups": [...], "commits": [ {"group": "docs", "message": "docs: update README", "ok": true, "sha": ""} diff --git a/docs/src/content/docs/reference/mcp.mdx b/docs/src/content/docs/reference/mcp.mdx deleted file mode 100644 index 33731a0..0000000 --- a/docs/src/content/docs/reference/mcp.mdx +++ /dev/null @@ -1,129 +0,0 @@ ---- -title: MCP server -description: Use Committy via Model Context Protocol tools -sidebar: - label: MCP server - order: 65 ---- - -## Overview - -The Committy MCP server is a Node.js/TypeScript stdio server that wraps the Rust `committy` CLI and exposes tools for linting, tagging, formatting conventional commits, and grouping/applying changes (optionally with AI). - -- Package path: `mcp-server-committy/` -- Server type: stdio (started by an MCP client) - -## Requirements - -- Node.js 18+ -- The Rust `committy` binary available on PATH as `committy`, or set `COMMITTY_BIN` to its absolute path. - -## Install and run - -```bash -# In mcp-server-committy/ -npm ci -npm run build - -# Development (watch mode) -npm run dev - -# Start (built) -npm start -# or -node dist/index.js -``` - -Optional: link a local CLI binary for convenience. - -```bash -npm link -mcp-server-committy # stdio server -``` - -If `committy` isn’t on PATH, set: - -```bash -export COMMITTY_BIN=/absolute/path/to/committy -``` - -## Configure your MCP client - -- Configure your MCP client to launch the stdio server command in `mcp-server-committy/`: - - `mcp-server-committy` (after `npm link`) or `node dist/index.js` -- Environment variables to consider: - - `COMMITTY_BIN` — absolute path to the Rust CLI if not on PATH - - For AI usage, provide provider keys via an env var named by the tool input `ai_api_key_env` (e.g., `OPENROUTER_API_KEY`). Set this on the MCP server process. - -## Tools - -- mcp0_lint_repo_since_last_tag - - Input: `{ repo_path: string }` - - Lints commits since the last tag. - -- mcp0_lint_message - - Input: `{ message: string }` - - Lints a single commit message. - -- mcp0_compute_next_tag - - Input: `{ repo_path: string, fetch?: boolean, prerelease?: boolean, prerelease_suffix?: string, release_branches?: string[] }` - - Computes the next tag without mutating the repo. - -- mcp0_apply_tag - - Input: `{ repo_path: string, name?: string, fetch?: boolean, prerelease?: boolean, prerelease_suffix?: string, release_branches?: string[], bump_files?: boolean, tag_message?: string, confirm_push: boolean }` - - Creates/pushes a tag. Requires `confirm_push: true`. - -- mcp0_format_message - - Input: `{ commit_type: string, short: string, scope?: string, long?: string, breaking?: boolean }` - - Returns a conventional commit message string. - -- mcp0_generate_guidelines - - Input: `{ repo_path: string, additional_files?: string[], max_bytes?: number }` - - Reads and summarizes repo guidelines (README, CONTRIBUTING, changelog config). - -- mcp0_group_commit_plan - - Input: `{ repo_path: string, include_unstaged?: boolean, ai?: boolean, ai_provider?: "openrouter"|"ollama", ai_model?: string, ai_api_key_env?: string, ai_base_url?: string, ai_max_tokens?: number, ai_temperature?: number, ai_timeout_ms?: number, no_ai_json_mode?: boolean, ai_system_prompt?: string, ai_system_prompt_file?: string, ai_file_limit?: number, ai_allow_sensitive?: boolean }` - - Plans grouped commits and returns JSON plan: `{ command: "group-commit", mode: "plan", ok, groups, errors? }`. - -- mcp0_group_commit_apply - - Same inputs as plan plus apply behavior; executes grouped commits and optionally pushes. - - Returns `{ command: "group-commit", mode: "apply", ok, groups, commits, pushed?, errors? }`. - -## AI flags & security - -- AI-related inputs mirror the Rust CLI `group-commit` flags. See [AI flags & security](/reference/ai-flags/). -- By default, sensitive file content is not sent. Only set `ai_allow_sensitive=true` if you accept the risk. -- Provide API keys via an env var set on the MCP server process, referenced by `ai_api_key_env`. - -## Examples - -- Lint repo since last tag: - -```json -{ - "tool": "mcp0_lint_repo_since_last_tag", - "params": { "repo_path": "/path/to/repo" } -} -``` - -- Group-commit plan with OpenRouter: - -```json -{ - "tool": "mcp0_group_commit_plan", - "params": { - "repo_path": "/path/to/repo", - "ai": true, - "ai_provider": "openrouter", - "ai_model": "openrouter/anthropic/claude-3.5-sonnet", - "ai_api_key_env": "OPENROUTER_API_KEY", - "ai_max_tokens": 600, - "ai_temperature": 0.2 - } -} -``` - -## Troubleshooting - -- "committy not found": set `COMMITTY_BIN` to the Rust CLI path. -- Tool outputs are JSON strings in `text` content; your client should parse the JSON. diff --git a/docs/src/content/docs/reference/tag.mdx b/docs/src/content/docs/reference/tag.mdx index 31fd4d2..4018418 100644 --- a/docs/src/content/docs/reference/tag.mdx +++ b/docs/src/content/docs/reference/tag.mdx @@ -5,14 +5,14 @@ description: How the tag works ## Overview -Create and push a new semantic version tag based on commit history. Supports explicit tag names or automatic calculation with rich options. +Create a new semantic version tag based on commit history. Remote publishing is explicit and requires confirmation. Rejects execution when there are staged changes. ## Usage ```bash -# Interactive: confirm and create next tag +# Interactive: confirm and create next tag locally committy tag # Non-interactive: auto-calculate and act based on flags @@ -21,6 +21,9 @@ committy --non-interactive tag --fetch --default-bump minor # Create a tag with an explicit name (skips calculation) committy tag --name v1.2.3 +# Publish a tag to origin (explicit + confirmed) +committy --non-interactive tag --publish --confirm-publish + # Pre-release flow committy --non-interactive tag --prerelease --prerelease-suffix beta @@ -58,15 +61,20 @@ committy --non-interactive tag --bump-files - __--force-without-change__ - __--tag-message__ `` - __--not-publish__ +- __--publish__ + Request publishing to the remote after local tag creation. +- __--confirm-publish__ + Required together with `--publish` before any remote push is attempted. - __--fetch__ - __--no-fetch__ ## Behavior - Errors if there are staged changes. -- When `--name` is provided: creates and (unless `--not-publish`) pushes that tag immediately. -- Non-interactive without `--name`: calculates new tag from commit log using regex rules in config and options above, then creates/pushes it. -- `--bump-files`: updates common version files, commits them (message: `chore: bump version to `), and pushes commit (unless `--not-publish`). +- When `--name` is provided: creates that tag locally. +- Non-interactive without `--name`: calculates the new tag from commit log using regex rules in config and options above, then creates it locally. +- `--publish --confirm-publish` is required before the tag or any version-bump commit is pushed to `origin`. +- `--bump-files`: updates common version files and commits them (message: `chore: bump version to `). Remote push still requires `--publish --confirm-publish`. - Fetch behavior: default is fetch unless `--no-fetch`; `--fetch` forces fetch. ## JSON output @@ -74,7 +82,13 @@ committy --non-interactive tag --bump-files - With `--name`: ```json -{ "ok": true, "new_tag": "v1.2.3" } +{ + "command": "tag", + "ok": true, + "dry_run": false, + "new_tag": "v1.2.3", + "published": false +} ``` - Calculated tag (interactive or non-interactive when `--output json`): @@ -82,9 +96,12 @@ committy --non-interactive tag --bump-files ```json { "ok": true, + "command": "tag", + "dry_run": true, "old_tag": "v1.2.2", "new_tag": "v1.2.3", - "pre_release": false + "pre_release": false, + "published": false } ``` @@ -104,3 +121,7 @@ committy --non-interactive tag --prerelease --prerelease-suffix rc # Tag without leading 'v' committy --non-interactive tag --not-with-v + +# Publish after preview +committy --non-interactive tag --dry-run --output json +committy --non-interactive tag --publish --confirm-publish --output json diff --git a/mcp-server-committy/.gitignore b/mcp-server-committy/.gitignore deleted file mode 100644 index de4d1f0..0000000 --- a/mcp-server-committy/.gitignore +++ /dev/null @@ -1,2 +0,0 @@ -dist -node_modules diff --git a/mcp-server-committy/.npmignore b/mcp-server-committy/.npmignore deleted file mode 100644 index 088d339..0000000 --- a/mcp-server-committy/.npmignore +++ /dev/null @@ -1,5 +0,0 @@ -scripts -scripts -node_modules -.gitignore -tsconfig.json \ No newline at end of file diff --git a/mcp-server-committy/LICENSE b/mcp-server-committy/LICENSE deleted file mode 100644 index 08699b5..0000000 --- a/mcp-server-committy/LICENSE +++ /dev/null @@ -1,201 +0,0 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/mcp-server-committy/README.md b/mcp-server-committy/README.md deleted file mode 100644 index add54a7..0000000 --- a/mcp-server-committy/README.md +++ /dev/null @@ -1,126 +0,0 @@ -# MCP Server: Committy Wrapper - -A Node.js/TypeScript MCP server that wraps the Rust `committy` CLI to provide conventional-commit linting and release tooling via MCP tools. - -## Requirements - -- Node.js 18+ -- Rust `committy` CLI installed and available as `committy` in PATH, or set `COMMITTY_BIN` to its absolute path. - -## Install / Build - -```bash -# In mcp-server-committy/ -npm ci -npm run build -``` - -## Development - -```bash -# Start in watch mode (stdio server) -npm run dev -``` - -This starts an MCP stdio server and waits for an MCP client to connect. - -## Local CLI (npm link) - -You can link the package locally to get the CLI binary in your PATH: - -```bash -# In mcp-server-committy/ -npm link - -# Now you can run the stdio MCP server directly -mcp-server-committy -``` - -Note: This is a stdio server intended to be started by an MCP client. - -## Run (built) - -```bash -# After build -npm start -# or -node dist/index.js -``` - -If the `committy` binary is not available, set: - -```bash -export COMMITTY_BIN=/absolute/path/to/committy -``` - -## Tools - -- mcp0_lint_repo_since_last_tag - - Input: `{ repo_path: string }` - - Lints repository commits since the last tag. - -- mcp0_lint_message - - Input: `{ message: string }` - - Lints a single commit message. - -- mcp0_compute_next_tag - - Input: `{ repo_path: string, fetch?: boolean, prerelease?: boolean, prerelease_suffix?: string, release_branches?: string[] }` - - Computes the next tag without mutating the repo. - -- mcp0_apply_tag - - Input: `{ repo_path: string, name?: string, fetch?: boolean, prerelease?: boolean, prerelease_suffix?: string, release_branches?: string[], bump_files?: boolean, tag_message?: string, confirm_push: boolean }` - - Creates/pushes a tag. Refuses unless `confirm_push` is `true`. - -- mcp0_format_message - - Input: `{ commit_type: string, short: string, scope?: string, long?: string, breaking?: boolean }` - - Returns a conventional commit message from parts. - -- mcp0_generate_guidelines - - Input: `{ repo_path: string, additional_files?: string[], max_bytes?: number }` - - Reads `README.md`, `CONTRIBUTING.md`, and changelog config if present. - -- mcp0_commit_grouped_changes - - Input: `{ repo_path: string, dry_run?: boolean, include_unstaged?: boolean, auto_stage?: boolean, push?: boolean, signoff?: boolean, confirm?: boolean, group_overrides?: { [group]: { commit_type?: string, scope?: string, short?: string, long?: string } } }` - - Analyzes pending changes, groups by kind (docs/tests/ci/deps/build/chore/code), returns a plan with suggested conventional commit messages. When `confirm=true` (and `dry_run=false`), stages per group and creates one commit per group. If `push=true`, pushes after committing. - -- mcp0_group_commit_plan - - Input: `{ repo_path: string, include_unstaged?: boolean, ai?: boolean, ai_provider?: "openrouter"|"ollama", ai_model?: string, ai_api_key_env?: string, ai_base_url?: string, ai_max_tokens?: number, ai_temperature?: number, ai_timeout_ms?: number, no_ai_json_mode?: boolean, ai_system_prompt?: string, ai_system_prompt_file?: string, ai_file_limit?: number, ai_allow_sensitive?: boolean }` - - Wraps Rust `committy group-commit --mode plan --output json`. Returns `{ command: "group-commit", mode: "plan", ok, groups, errors? }`. - -- mcp0_group_commit_apply - - Input: `{ repo_path: string, include_unstaged?: boolean, auto_stage?: boolean, push?: boolean, ai?: boolean, ai_provider?: "openrouter"|"ollama", ai_model?: string, ai_api_key_env?: string, ai_base_url?: string, ai_max_tokens?: number, ai_temperature?: number, ai_timeout_ms?: number, no_ai_json_mode?: boolean, ai_system_prompt?: string, ai_system_prompt_file?: string, ai_file_limit?: number, ai_allow_sensitive?: boolean }` - - Wraps Rust `committy group-commit --mode apply --output json` and returns `{ command: "group-commit", mode: "apply", ok, groups, commits, pushed?, errors? }`. - -## Testing - -A simple Node-based test script validates behavior without extra deps: - -```bash -npm test -``` - -This checks: -- `formatMessage()` formatting -- Friendly error when `committy` binary is missing -- `generateGuidelines()` reads common repo files -- `computeNextTag()` and `applyTag()` success flows using a fake CLI script -- `groupCommitPlan()` and `groupCommitApply()` success flows using a fake CLI script - -For internal tests, a fake CLI is used by setting: - -```bash -export COMMITTY_BIN=node -export COMMITTY_SCRIPT=$(pwd)/scripts/fake-committy.mjs -``` - -## AI flags & security - -- These tools pass AI-related inputs through to the Rust CLI. See `group-commit` flags in the Rust README. -- Key options: `ai`, `ai_provider`, `ai_model`, `ai_api_key_env`, `ai_base_url`, `ai_max_tokens`, `ai_temperature`, `ai_timeout_ms`, `no_ai_json_mode`, `ai_system_prompt`, `ai_system_prompt_file`, `ai_file_limit`, `ai_allow_sensitive`. -- Security: by default, sensitive file contents are not sent. Set `ai_allow_sensitive=true` only if you understand the risks. -- API keys: provide via environment variable named by `ai_api_key_env` on the MCP server process. - -## Notes - -- Returns tool outputs as `text` (JSON string) to conform with the SDK content types. -- CLI binary name: `mcp-server-committy` (after `npm link`). diff --git a/mcp-server-committy/package-lock.json b/mcp-server-committy/package-lock.json deleted file mode 100644 index bfed61f..0000000 --- a/mcp-server-committy/package-lock.json +++ /dev/null @@ -1,1687 +0,0 @@ -{ - "name": "mcp-server-committy", - "version": "0.1.0", - "lockfileVersion": 3, - "requires": true, - "packages": { - "": { - "name": "mcp-server-committy", - "version": "0.1.0", - "license": "Apache-2.0", - "dependencies": { - "@modelcontextprotocol/sdk": "^1.17.3", - "ajv": "^8.17.1", - "yaml": "^2.5.1", - "zod": "^3.23.8" - }, - "bin": { - "mcp-server-committy": "dist/index.js" - }, - "devDependencies": { - "@types/node": "^22.5.0", - "tsx": "^4.19.2", - "typescript": "^5.6.3" - }, - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/aix-ppc64": { - "version": "0.25.9", - "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.25.9.tgz", - "integrity": "sha512-OaGtL73Jck6pBKjNIe24BnFE6agGl+6KxDtTfHhy1HmhthfKouEcOhqpSL64K4/0WCtbKFLOdzD/44cJ4k9opA==", - "cpu": [ - "ppc64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "aix" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/android-arm": { - "version": "0.25.9", - "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.25.9.tgz", - "integrity": "sha512-5WNI1DaMtxQ7t7B6xa572XMXpHAaI/9Hnhk8lcxF4zVN4xstUgTlvuGDorBguKEnZO70qwEcLpfifMLoxiPqHQ==", - "cpu": [ - "arm" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "android" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/android-arm64": { - "version": "0.25.9", - "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.25.9.tgz", - "integrity": "sha512-IDrddSmpSv51ftWslJMvl3Q2ZT98fUSL2/rlUXuVqRXHCs5EUF1/f+jbjF5+NG9UffUDMCiTyh8iec7u8RlTLg==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "android" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/android-x64": { - "version": "0.25.9", - "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.25.9.tgz", - "integrity": "sha512-I853iMZ1hWZdNllhVZKm34f4wErd4lMyeV7BLzEExGEIZYsOzqDWDf+y082izYUE8gtJnYHdeDpN/6tUdwvfiw==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "android" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/darwin-arm64": { - "version": "0.25.9", - "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.25.9.tgz", - "integrity": "sha512-XIpIDMAjOELi/9PB30vEbVMs3GV1v2zkkPnuyRRURbhqjyzIINwj+nbQATh4H9GxUgH1kFsEyQMxwiLFKUS6Rg==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "darwin" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/darwin-x64": { - "version": "0.25.9", - "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.25.9.tgz", - "integrity": "sha512-jhHfBzjYTA1IQu8VyrjCX4ApJDnH+ez+IYVEoJHeqJm9VhG9Dh2BYaJritkYK3vMaXrf7Ogr/0MQ8/MeIefsPQ==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "darwin" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/freebsd-arm64": { - "version": "0.25.9", - "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.25.9.tgz", - "integrity": "sha512-z93DmbnY6fX9+KdD4Ue/H6sYs+bhFQJNCPZsi4XWJoYblUqT06MQUdBCpcSfuiN72AbqeBFu5LVQTjfXDE2A6Q==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "freebsd" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/freebsd-x64": { - "version": "0.25.9", - "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.25.9.tgz", - "integrity": "sha512-mrKX6H/vOyo5v71YfXWJxLVxgy1kyt1MQaD8wZJgJfG4gq4DpQGpgTB74e5yBeQdyMTbgxp0YtNj7NuHN0PoZg==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "freebsd" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/linux-arm": { - "version": "0.25.9", - "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.25.9.tgz", - "integrity": "sha512-HBU2Xv78SMgaydBmdor38lg8YDnFKSARg1Q6AT0/y2ezUAKiZvc211RDFHlEZRFNRVhcMamiToo7bDx3VEOYQw==", - "cpu": [ - "arm" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/linux-arm64": { - "version": "0.25.9", - "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.25.9.tgz", - "integrity": "sha512-BlB7bIcLT3G26urh5Dmse7fiLmLXnRlopw4s8DalgZ8ef79Jj4aUcYbk90g8iCa2467HX8SAIidbL7gsqXHdRw==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/linux-ia32": { - "version": "0.25.9", - "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.25.9.tgz", - "integrity": "sha512-e7S3MOJPZGp2QW6AK6+Ly81rC7oOSerQ+P8L0ta4FhVi+/j/v2yZzx5CqqDaWjtPFfYz21Vi1S0auHrap3Ma3A==", - "cpu": [ - "ia32" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/linux-loong64": { - "version": "0.25.9", - "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.25.9.tgz", - "integrity": "sha512-Sbe10Bnn0oUAB2AalYztvGcK+o6YFFA/9829PhOCUS9vkJElXGdphz0A3DbMdP8gmKkqPmPcMJmJOrI3VYB1JQ==", - "cpu": [ - "loong64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/linux-mips64el": { - "version": "0.25.9", - "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.25.9.tgz", - "integrity": "sha512-YcM5br0mVyZw2jcQeLIkhWtKPeVfAerES5PvOzaDxVtIyZ2NUBZKNLjC5z3/fUlDgT6w89VsxP2qzNipOaaDyA==", - "cpu": [ - "mips64el" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/linux-ppc64": { - "version": "0.25.9", - "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.25.9.tgz", - "integrity": "sha512-++0HQvasdo20JytyDpFvQtNrEsAgNG2CY1CLMwGXfFTKGBGQT3bOeLSYE2l1fYdvML5KUuwn9Z8L1EWe2tzs1w==", - "cpu": [ - "ppc64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/linux-riscv64": { - "version": "0.25.9", - "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.25.9.tgz", - "integrity": "sha512-uNIBa279Y3fkjV+2cUjx36xkx7eSjb8IvnL01eXUKXez/CBHNRw5ekCGMPM0BcmqBxBcdgUWuUXmVWwm4CH9kg==", - "cpu": [ - "riscv64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/linux-s390x": { - "version": "0.25.9", - "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.25.9.tgz", - "integrity": "sha512-Mfiphvp3MjC/lctb+7D287Xw1DGzqJPb/J2aHHcHxflUo+8tmN/6d4k6I2yFR7BVo5/g7x2Monq4+Yew0EHRIA==", - "cpu": [ - "s390x" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/linux-x64": { - "version": "0.25.9", - "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.25.9.tgz", - "integrity": "sha512-iSwByxzRe48YVkmpbgoxVzn76BXjlYFXC7NvLYq+b+kDjyyk30J0JY47DIn8z1MO3K0oSl9fZoRmZPQI4Hklzg==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/netbsd-arm64": { - "version": "0.25.9", - "resolved": "https://registry.npmjs.org/@esbuild/netbsd-arm64/-/netbsd-arm64-0.25.9.tgz", - "integrity": "sha512-9jNJl6FqaUG+COdQMjSCGW4QiMHH88xWbvZ+kRVblZsWrkXlABuGdFJ1E9L7HK+T0Yqd4akKNa/lO0+jDxQD4Q==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "netbsd" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/netbsd-x64": { - "version": "0.25.9", - "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.25.9.tgz", - "integrity": "sha512-RLLdkflmqRG8KanPGOU7Rpg829ZHu8nFy5Pqdi9U01VYtG9Y0zOG6Vr2z4/S+/3zIyOxiK6cCeYNWOFR9QP87g==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "netbsd" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/openbsd-arm64": { - "version": "0.25.9", - "resolved": "https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.25.9.tgz", - "integrity": "sha512-YaFBlPGeDasft5IIM+CQAhJAqS3St3nJzDEgsgFixcfZeyGPCd6eJBWzke5piZuZ7CtL656eOSYKk4Ls2C0FRQ==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "openbsd" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/openbsd-x64": { - "version": "0.25.9", - "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.25.9.tgz", - "integrity": "sha512-1MkgTCuvMGWuqVtAvkpkXFmtL8XhWy+j4jaSO2wxfJtilVCi0ZE37b8uOdMItIHz4I6z1bWWtEX4CJwcKYLcuA==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "openbsd" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/openharmony-arm64": { - "version": "0.25.9", - "resolved": "https://registry.npmjs.org/@esbuild/openharmony-arm64/-/openharmony-arm64-0.25.9.tgz", - "integrity": "sha512-4Xd0xNiMVXKh6Fa7HEJQbrpP3m3DDn43jKxMjxLLRjWnRsfxjORYJlXPO4JNcXtOyfajXorRKY9NkOpTHptErg==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "openharmony" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/sunos-x64": { - "version": "0.25.9", - "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.25.9.tgz", - "integrity": "sha512-WjH4s6hzo00nNezhp3wFIAfmGZ8U7KtrJNlFMRKxiI9mxEK1scOMAaa9i4crUtu+tBr+0IN6JCuAcSBJZfnphw==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "sunos" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/win32-arm64": { - "version": "0.25.9", - "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.25.9.tgz", - "integrity": "sha512-mGFrVJHmZiRqmP8xFOc6b84/7xa5y5YvR1x8djzXpJBSv/UsNK6aqec+6JDjConTgvvQefdGhFDAs2DLAds6gQ==", - "cpu": [ - "arm64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "win32" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/win32-ia32": { - "version": "0.25.9", - "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.25.9.tgz", - "integrity": "sha512-b33gLVU2k11nVx1OhX3C8QQP6UHQK4ZtN56oFWvVXvz2VkDoe6fbG8TOgHFxEvqeqohmRnIHe5A1+HADk4OQww==", - "cpu": [ - "ia32" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "win32" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/win32-x64": { - "version": "0.25.9", - "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.25.9.tgz", - "integrity": "sha512-PPOl1mi6lpLNQxnGoyAfschAodRFYXJ+9fs6WHXz7CSWKbOqiMZsubC+BQsVKuul+3vKLuwTHsS2c2y9EoKwxQ==", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "win32" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@modelcontextprotocol/sdk": { - "version": "1.17.3", - "resolved": "https://registry.npmjs.org/@modelcontextprotocol/sdk/-/sdk-1.17.3.tgz", - "integrity": "sha512-JPwUKWSsbzx+DLFznf/QZ32Qa+ptfbUlHhRLrBQBAFu9iI1iYvizM4p+zhhRDceSsPutXp4z+R/HPVphlIiclg==", - "license": "MIT", - "dependencies": { - "ajv": "^6.12.6", - "content-type": "^1.0.5", - "cors": "^2.8.5", - "cross-spawn": "^7.0.5", - "eventsource": "^3.0.2", - "eventsource-parser": "^3.0.0", - "express": "^5.0.1", - "express-rate-limit": "^7.5.0", - "pkce-challenge": "^5.0.0", - "raw-body": "^3.0.0", - "zod": "^3.23.8", - "zod-to-json-schema": "^3.24.1" - }, - "engines": { - "node": ">=18" - } - }, - "node_modules/@modelcontextprotocol/sdk/node_modules/ajv": { - "version": "6.12.6", - "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", - "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", - "license": "MIT", - "dependencies": { - "fast-deep-equal": "^3.1.1", - "fast-json-stable-stringify": "^2.0.0", - "json-schema-traverse": "^0.4.1", - "uri-js": "^4.2.2" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/epoberezkin" - } - }, - "node_modules/@modelcontextprotocol/sdk/node_modules/json-schema-traverse": { - "version": "0.4.1", - "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", - "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", - "license": "MIT" - }, - "node_modules/@types/node": { - "version": "22.17.2", - "resolved": "https://registry.npmjs.org/@types/node/-/node-22.17.2.tgz", - "integrity": "sha512-gL6z5N9Jm9mhY+U2KXZpteb+09zyffliRkZyZOHODGATyC5B1Jt/7TzuuiLkFsSUMLbS1OLmlj/E+/3KF4Q/4w==", - "dev": true, - "license": "MIT", - "dependencies": { - "undici-types": "~6.21.0" - } - }, - "node_modules/accepts": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/accepts/-/accepts-2.0.0.tgz", - "integrity": "sha512-5cvg6CtKwfgdmVqY1WIiXKc3Q1bkRqGLi+2W/6ao+6Y7gu/RCwRuAhGEzh5B4KlszSuTLgZYuqFqo5bImjNKng==", - "license": "MIT", - "dependencies": { - "mime-types": "^3.0.0", - "negotiator": "^1.0.0" - }, - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/ajv": { - "version": "8.17.1", - "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.17.1.tgz", - "integrity": "sha512-B/gBuNg5SiMTrPkC+A2+cW0RszwxYmn6VYxB/inlBStS5nx6xHIt/ehKRhIMhqusl7a8LjQoZnjCs5vhwxOQ1g==", - "license": "MIT", - "dependencies": { - "fast-deep-equal": "^3.1.3", - "fast-uri": "^3.0.1", - "json-schema-traverse": "^1.0.0", - "require-from-string": "^2.0.2" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/epoberezkin" - } - }, - "node_modules/body-parser": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-2.2.0.tgz", - "integrity": "sha512-02qvAaxv8tp7fBa/mw1ga98OGm+eCbqzJOKoRt70sLmfEEi+jyBYVTDGfCL/k06/4EMk/z01gCe7HoCH/f2LTg==", - "license": "MIT", - "dependencies": { - "bytes": "^3.1.2", - "content-type": "^1.0.5", - "debug": "^4.4.0", - "http-errors": "^2.0.0", - "iconv-lite": "^0.6.3", - "on-finished": "^2.4.1", - "qs": "^6.14.0", - "raw-body": "^3.0.0", - "type-is": "^2.0.0" - }, - "engines": { - "node": ">=18" - } - }, - "node_modules/bytes": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.2.tgz", - "integrity": "sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg==", - "license": "MIT", - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/call-bind-apply-helpers": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/call-bind-apply-helpers/-/call-bind-apply-helpers-1.0.2.tgz", - "integrity": "sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ==", - "license": "MIT", - "dependencies": { - "es-errors": "^1.3.0", - "function-bind": "^1.1.2" - }, - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/call-bound": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/call-bound/-/call-bound-1.0.4.tgz", - "integrity": "sha512-+ys997U96po4Kx/ABpBCqhA9EuxJaQWDQg7295H4hBphv3IZg0boBKuwYpt4YXp6MZ5AmZQnU/tyMTlRpaSejg==", - "license": "MIT", - "dependencies": { - "call-bind-apply-helpers": "^1.0.2", - "get-intrinsic": "^1.3.0" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/content-disposition": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-1.0.0.tgz", - "integrity": "sha512-Au9nRL8VNUut/XSzbQA38+M78dzP4D+eqg3gfJHMIHHYa3bg067xj1KxMUWj+VULbiZMowKngFFbKczUrNJ1mg==", - "license": "MIT", - "dependencies": { - "safe-buffer": "5.2.1" - }, - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/content-type": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/content-type/-/content-type-1.0.5.tgz", - "integrity": "sha512-nTjqfcBFEipKdXCv4YDQWCfmcLZKm81ldF0pAopTvyrFGVbcR6P/VAAd5G7N+0tTr8QqiU0tFadD6FK4NtJwOA==", - "license": "MIT", - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/cookie": { - "version": "0.7.2", - "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.7.2.tgz", - "integrity": "sha512-yki5XnKuf750l50uGTllt6kKILY4nQ1eNIQatoXEByZ5dWgnKqbnqmTrBE5B4N7lrMJKQ2ytWMiTO2o0v6Ew/w==", - "license": "MIT", - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/cookie-signature": { - "version": "1.2.2", - "resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.2.2.tgz", - "integrity": "sha512-D76uU73ulSXrD1UXF4KE2TMxVVwhsnCgfAyTg9k8P6KGZjlXKrOLe4dJQKI3Bxi5wjesZoFXJWElNWBjPZMbhg==", - "license": "MIT", - "engines": { - "node": ">=6.6.0" - } - }, - "node_modules/cors": { - "version": "2.8.5", - "resolved": "https://registry.npmjs.org/cors/-/cors-2.8.5.tgz", - "integrity": "sha512-KIHbLJqu73RGr/hnbrO9uBeixNGuvSQjul/jdFvS/KFSIH1hWVd1ng7zOHx+YrEfInLG7q4n6GHQ9cDtxv/P6g==", - "license": "MIT", - "dependencies": { - "object-assign": "^4", - "vary": "^1" - }, - "engines": { - "node": ">= 0.10" - } - }, - "node_modules/cross-spawn": { - "version": "7.0.6", - "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz", - "integrity": "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==", - "license": "MIT", - "dependencies": { - "path-key": "^3.1.0", - "shebang-command": "^2.0.0", - "which": "^2.0.1" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/debug": { - "version": "4.4.1", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.1.tgz", - "integrity": "sha512-KcKCqiftBJcZr++7ykoDIEwSa3XWowTfNPo92BYxjXiyYEVrUQh2aLyhxBCwww+heortUFxEJYcRzosstTEBYQ==", - "license": "MIT", - "dependencies": { - "ms": "^2.1.3" - }, - "engines": { - "node": ">=6.0" - }, - "peerDependenciesMeta": { - "supports-color": { - "optional": true - } - } - }, - "node_modules/depd": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz", - "integrity": "sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==", - "license": "MIT", - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/dunder-proto": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/dunder-proto/-/dunder-proto-1.0.1.tgz", - "integrity": "sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A==", - "license": "MIT", - "dependencies": { - "call-bind-apply-helpers": "^1.0.1", - "es-errors": "^1.3.0", - "gopd": "^1.2.0" - }, - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/ee-first": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz", - "integrity": "sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow==", - "license": "MIT" - }, - "node_modules/encodeurl": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-2.0.0.tgz", - "integrity": "sha512-Q0n9HRi4m6JuGIV1eFlmvJB7ZEVxu93IrMyiMsGC0lrMJMWzRgx6WGquyfQgZVb31vhGgXnfmPNNXmxnOkRBrg==", - "license": "MIT", - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/es-define-property": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.1.tgz", - "integrity": "sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g==", - "license": "MIT", - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/es-errors": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/es-errors/-/es-errors-1.3.0.tgz", - "integrity": "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==", - "license": "MIT", - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/es-object-atoms": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/es-object-atoms/-/es-object-atoms-1.1.1.tgz", - "integrity": "sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA==", - "license": "MIT", - "dependencies": { - "es-errors": "^1.3.0" - }, - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/esbuild": { - "version": "0.25.9", - "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.25.9.tgz", - "integrity": "sha512-CRbODhYyQx3qp7ZEwzxOk4JBqmD/seJrzPa/cGjY1VtIn5E09Oi9/dB4JwctnfZ8Q8iT7rioVv5k/FNT/uf54g==", - "dev": true, - "hasInstallScript": true, - "license": "MIT", - "bin": { - "esbuild": "bin/esbuild" - }, - "engines": { - "node": ">=18" - }, - "optionalDependencies": { - "@esbuild/aix-ppc64": "0.25.9", - "@esbuild/android-arm": "0.25.9", - "@esbuild/android-arm64": "0.25.9", - "@esbuild/android-x64": "0.25.9", - "@esbuild/darwin-arm64": "0.25.9", - "@esbuild/darwin-x64": "0.25.9", - "@esbuild/freebsd-arm64": "0.25.9", - "@esbuild/freebsd-x64": "0.25.9", - "@esbuild/linux-arm": "0.25.9", - "@esbuild/linux-arm64": "0.25.9", - "@esbuild/linux-ia32": "0.25.9", - "@esbuild/linux-loong64": "0.25.9", - "@esbuild/linux-mips64el": "0.25.9", - "@esbuild/linux-ppc64": "0.25.9", - "@esbuild/linux-riscv64": "0.25.9", - "@esbuild/linux-s390x": "0.25.9", - "@esbuild/linux-x64": "0.25.9", - "@esbuild/netbsd-arm64": "0.25.9", - "@esbuild/netbsd-x64": "0.25.9", - "@esbuild/openbsd-arm64": "0.25.9", - "@esbuild/openbsd-x64": "0.25.9", - "@esbuild/openharmony-arm64": "0.25.9", - "@esbuild/sunos-x64": "0.25.9", - "@esbuild/win32-arm64": "0.25.9", - "@esbuild/win32-ia32": "0.25.9", - "@esbuild/win32-x64": "0.25.9" - } - }, - "node_modules/escape-html": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/escape-html/-/escape-html-1.0.3.tgz", - "integrity": "sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow==", - "license": "MIT" - }, - "node_modules/etag": { - "version": "1.8.1", - "resolved": "https://registry.npmjs.org/etag/-/etag-1.8.1.tgz", - "integrity": "sha512-aIL5Fx7mawVa300al2BnEE4iNvo1qETxLrPI/o05L7z6go7fCw1J6EQmbK4FmJ2AS7kgVF/KEZWufBfdClMcPg==", - "license": "MIT", - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/eventsource": { - "version": "3.0.7", - "resolved": "https://registry.npmjs.org/eventsource/-/eventsource-3.0.7.tgz", - "integrity": "sha512-CRT1WTyuQoD771GW56XEZFQ/ZoSfWid1alKGDYMmkt2yl8UXrVR4pspqWNEcqKvVIzg6PAltWjxcSSPrboA4iA==", - "license": "MIT", - "dependencies": { - "eventsource-parser": "^3.0.1" - }, - "engines": { - "node": ">=18.0.0" - } - }, - "node_modules/eventsource-parser": { - "version": "3.0.5", - "resolved": "https://registry.npmjs.org/eventsource-parser/-/eventsource-parser-3.0.5.tgz", - "integrity": "sha512-bSRG85ZrMdmWtm7qkF9He9TNRzc/Bm99gEJMaQoHJ9E6Kv9QBbsldh2oMj7iXmYNEAVvNgvv5vPorG6W+XtBhQ==", - "license": "MIT", - "engines": { - "node": ">=20.0.0" - } - }, - "node_modules/express": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/express/-/express-5.1.0.tgz", - "integrity": "sha512-DT9ck5YIRU+8GYzzU5kT3eHGA5iL+1Zd0EutOmTE9Dtk+Tvuzd23VBU+ec7HPNSTxXYO55gPV/hq4pSBJDjFpA==", - "license": "MIT", - "dependencies": { - "accepts": "^2.0.0", - "body-parser": "^2.2.0", - "content-disposition": "^1.0.0", - "content-type": "^1.0.5", - "cookie": "^0.7.1", - "cookie-signature": "^1.2.1", - "debug": "^4.4.0", - "encodeurl": "^2.0.0", - "escape-html": "^1.0.3", - "etag": "^1.8.1", - "finalhandler": "^2.1.0", - "fresh": "^2.0.0", - "http-errors": "^2.0.0", - "merge-descriptors": "^2.0.0", - "mime-types": "^3.0.0", - "on-finished": "^2.4.1", - "once": "^1.4.0", - "parseurl": "^1.3.3", - "proxy-addr": "^2.0.7", - "qs": "^6.14.0", - "range-parser": "^1.2.1", - "router": "^2.2.0", - "send": "^1.1.0", - "serve-static": "^2.2.0", - "statuses": "^2.0.1", - "type-is": "^2.0.1", - "vary": "^1.1.2" - }, - "engines": { - "node": ">= 18" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/express" - } - }, - "node_modules/express-rate-limit": { - "version": "7.5.1", - "resolved": "https://registry.npmjs.org/express-rate-limit/-/express-rate-limit-7.5.1.tgz", - "integrity": "sha512-7iN8iPMDzOMHPUYllBEsQdWVB6fPDMPqwjBaFrgr4Jgr/+okjvzAy+UHlYYL/Vs0OsOrMkwS6PJDkFlJwoxUnw==", - "license": "MIT", - "engines": { - "node": ">= 16" - }, - "funding": { - "url": "https://github.com/sponsors/express-rate-limit" - }, - "peerDependencies": { - "express": ">= 4.11" - } - }, - "node_modules/fast-deep-equal": { - "version": "3.1.3", - "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", - "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==", - "license": "MIT" - }, - "node_modules/fast-json-stable-stringify": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", - "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==", - "license": "MIT" - }, - "node_modules/fast-uri": { - "version": "3.0.6", - "resolved": "https://registry.npmjs.org/fast-uri/-/fast-uri-3.0.6.tgz", - "integrity": "sha512-Atfo14OibSv5wAp4VWNsFYE1AchQRTv9cBGWET4pZWHzYshFSS9NQI6I57rdKn9croWVMbYFbLhJ+yJvmZIIHw==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/fastify" - }, - { - "type": "opencollective", - "url": "https://opencollective.com/fastify" - } - ], - "license": "BSD-3-Clause" - }, - "node_modules/finalhandler": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-2.1.0.tgz", - "integrity": "sha512-/t88Ty3d5JWQbWYgaOGCCYfXRwV1+be02WqYYlL6h0lEiUAMPM8o8qKGO01YIkOHzka2up08wvgYD0mDiI+q3Q==", - "license": "MIT", - "dependencies": { - "debug": "^4.4.0", - "encodeurl": "^2.0.0", - "escape-html": "^1.0.3", - "on-finished": "^2.4.1", - "parseurl": "^1.3.3", - "statuses": "^2.0.1" - }, - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/forwarded": { - "version": "0.2.0", - "resolved": "https://registry.npmjs.org/forwarded/-/forwarded-0.2.0.tgz", - "integrity": "sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow==", - "license": "MIT", - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/fresh": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/fresh/-/fresh-2.0.0.tgz", - "integrity": "sha512-Rx/WycZ60HOaqLKAi6cHRKKI7zxWbJ31MhntmtwMoaTeF7XFH9hhBp8vITaMidfljRQ6eYWCKkaTK+ykVJHP2A==", - "license": "MIT", - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/fsevents": { - "version": "2.3.3", - "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz", - "integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==", - "dev": true, - "hasInstallScript": true, - "license": "MIT", - "optional": true, - "os": [ - "darwin" - ], - "engines": { - "node": "^8.16.0 || ^10.6.0 || >=11.0.0" - } - }, - "node_modules/function-bind": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz", - "integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==", - "license": "MIT", - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/get-intrinsic": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.3.0.tgz", - "integrity": "sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ==", - "license": "MIT", - "dependencies": { - "call-bind-apply-helpers": "^1.0.2", - "es-define-property": "^1.0.1", - "es-errors": "^1.3.0", - "es-object-atoms": "^1.1.1", - "function-bind": "^1.1.2", - "get-proto": "^1.0.1", - "gopd": "^1.2.0", - "has-symbols": "^1.1.0", - "hasown": "^2.0.2", - "math-intrinsics": "^1.1.0" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/get-proto": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/get-proto/-/get-proto-1.0.1.tgz", - "integrity": "sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g==", - "license": "MIT", - "dependencies": { - "dunder-proto": "^1.0.1", - "es-object-atoms": "^1.0.0" - }, - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/get-tsconfig": { - "version": "4.10.1", - "resolved": "https://registry.npmjs.org/get-tsconfig/-/get-tsconfig-4.10.1.tgz", - "integrity": "sha512-auHyJ4AgMz7vgS8Hp3N6HXSmlMdUyhSUrfBF16w153rxtLIEOE+HGqaBppczZvnHLqQJfiHotCYpNhl0lUROFQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "resolve-pkg-maps": "^1.0.0" - }, - "funding": { - "url": "https://github.com/privatenumber/get-tsconfig?sponsor=1" - } - }, - "node_modules/gopd": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.2.0.tgz", - "integrity": "sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==", - "license": "MIT", - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/has-symbols": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.1.0.tgz", - "integrity": "sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ==", - "license": "MIT", - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/hasown": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz", - "integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==", - "license": "MIT", - "dependencies": { - "function-bind": "^1.1.2" - }, - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/http-errors": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-2.0.0.tgz", - "integrity": "sha512-FtwrG/euBzaEjYeRqOgly7G0qviiXoJWnvEH2Z1plBdXgbyjv34pHTSb9zoeHMyDy33+DWy5Wt9Wo+TURtOYSQ==", - "license": "MIT", - "dependencies": { - "depd": "2.0.0", - "inherits": "2.0.4", - "setprototypeof": "1.2.0", - "statuses": "2.0.1", - "toidentifier": "1.0.1" - }, - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/iconv-lite": { - "version": "0.6.3", - "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.6.3.tgz", - "integrity": "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==", - "license": "MIT", - "dependencies": { - "safer-buffer": ">= 2.1.2 < 3.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/inherits": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", - "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", - "license": "ISC" - }, - "node_modules/ipaddr.js": { - "version": "1.9.1", - "resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-1.9.1.tgz", - "integrity": "sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g==", - "license": "MIT", - "engines": { - "node": ">= 0.10" - } - }, - "node_modules/is-promise": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/is-promise/-/is-promise-4.0.0.tgz", - "integrity": "sha512-hvpoI6korhJMnej285dSg6nu1+e6uxs7zG3BYAm5byqDsgJNWwxzM6z6iZiAgQR4TJ30JmBTOwqZUw3WlyH3AQ==", - "license": "MIT" - }, - "node_modules/isexe": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", - "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==", - "license": "ISC" - }, - "node_modules/json-schema-traverse": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz", - "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==", - "license": "MIT" - }, - "node_modules/math-intrinsics": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/math-intrinsics/-/math-intrinsics-1.1.0.tgz", - "integrity": "sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g==", - "license": "MIT", - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/media-typer": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/media-typer/-/media-typer-1.1.0.tgz", - "integrity": "sha512-aisnrDP4GNe06UcKFnV5bfMNPBUw4jsLGaWwWfnH3v02GnBuXX2MCVn5RbrWo0j3pczUilYblq7fQ7Nw2t5XKw==", - "license": "MIT", - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/merge-descriptors": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-2.0.0.tgz", - "integrity": "sha512-Snk314V5ayFLhp3fkUREub6WtjBfPdCPY1Ln8/8munuLuiYhsABgBVWsozAG+MWMbVEvcdcpbi9R7ww22l9Q3g==", - "license": "MIT", - "engines": { - "node": ">=18" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/mime-db": { - "version": "1.54.0", - "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.54.0.tgz", - "integrity": "sha512-aU5EJuIN2WDemCcAp2vFBfp/m4EAhWJnUNSSw0ixs7/kXbd6Pg64EmwJkNdFhB8aWt1sH2CTXrLxo/iAGV3oPQ==", - "license": "MIT", - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/mime-types": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-3.0.1.tgz", - "integrity": "sha512-xRc4oEhT6eaBpU1XF7AjpOFD+xQmXNB5OVKwp4tqCuBpHLS/ZbBDrc07mYTDqVMg6PfxUjjNp85O6Cd2Z/5HWA==", - "license": "MIT", - "dependencies": { - "mime-db": "^1.54.0" - }, - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/ms": { - "version": "2.1.3", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", - "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", - "license": "MIT" - }, - "node_modules/negotiator": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-1.0.0.tgz", - "integrity": "sha512-8Ofs/AUQh8MaEcrlq5xOX0CQ9ypTF5dl78mjlMNfOK08fzpgTHQRQPBxcPlEtIw0yRpws+Zo/3r+5WRby7u3Gg==", - "license": "MIT", - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/object-assign": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", - "integrity": "sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==", - "license": "MIT", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/object-inspect": { - "version": "1.13.4", - "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.4.tgz", - "integrity": "sha512-W67iLl4J2EXEGTbfeHCffrjDfitvLANg0UlX3wFUUSTx92KXRFegMHUVgSqE+wvhAbi4WqjGg9czysTV2Epbew==", - "license": "MIT", - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/on-finished": { - "version": "2.4.1", - "resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.4.1.tgz", - "integrity": "sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg==", - "license": "MIT", - "dependencies": { - "ee-first": "1.1.1" - }, - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/once": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", - "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", - "license": "ISC", - "dependencies": { - "wrappy": "1" - } - }, - "node_modules/parseurl": { - "version": "1.3.3", - "resolved": "https://registry.npmjs.org/parseurl/-/parseurl-1.3.3.tgz", - "integrity": "sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ==", - "license": "MIT", - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/path-key": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", - "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/path-to-regexp": { - "version": "8.2.0", - "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-8.2.0.tgz", - "integrity": "sha512-TdrF7fW9Rphjq4RjrW0Kp2AW0Ahwu9sRGTkS6bvDi0SCwZlEZYmcfDbEsTz8RVk0EHIS/Vd1bv3JhG+1xZuAyQ==", - "license": "MIT", - "engines": { - "node": ">=16" - } - }, - "node_modules/pkce-challenge": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/pkce-challenge/-/pkce-challenge-5.0.0.tgz", - "integrity": "sha512-ueGLflrrnvwB3xuo/uGob5pd5FN7l0MsLf0Z87o/UQmRtwjvfylfc9MurIxRAWywCYTgrvpXBcqjV4OfCYGCIQ==", - "license": "MIT", - "engines": { - "node": ">=16.20.0" - } - }, - "node_modules/proxy-addr": { - "version": "2.0.7", - "resolved": "https://registry.npmjs.org/proxy-addr/-/proxy-addr-2.0.7.tgz", - "integrity": "sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg==", - "license": "MIT", - "dependencies": { - "forwarded": "0.2.0", - "ipaddr.js": "1.9.1" - }, - "engines": { - "node": ">= 0.10" - } - }, - "node_modules/punycode": { - "version": "2.3.1", - "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz", - "integrity": "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==", - "license": "MIT", - "engines": { - "node": ">=6" - } - }, - "node_modules/qs": { - "version": "6.14.0", - "resolved": "https://registry.npmjs.org/qs/-/qs-6.14.0.tgz", - "integrity": "sha512-YWWTjgABSKcvs/nWBi9PycY/JiPJqOD4JA6o9Sej2AtvSGarXxKC3OQSk4pAarbdQlKAh5D4FCQkJNkW+GAn3w==", - "license": "BSD-3-Clause", - "dependencies": { - "side-channel": "^1.1.0" - }, - "engines": { - "node": ">=0.6" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/range-parser": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/range-parser/-/range-parser-1.2.1.tgz", - "integrity": "sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg==", - "license": "MIT", - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/raw-body": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-3.0.0.tgz", - "integrity": "sha512-RmkhL8CAyCRPXCE28MMH0z2PNWQBNk2Q09ZdxM9IOOXwxwZbN+qbWaatPkdkWIKL2ZVDImrN/pK5HTRz2PcS4g==", - "license": "MIT", - "dependencies": { - "bytes": "3.1.2", - "http-errors": "2.0.0", - "iconv-lite": "0.6.3", - "unpipe": "1.0.0" - }, - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/require-from-string": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/require-from-string/-/require-from-string-2.0.2.tgz", - "integrity": "sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw==", - "license": "MIT", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/resolve-pkg-maps": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/resolve-pkg-maps/-/resolve-pkg-maps-1.0.0.tgz", - "integrity": "sha512-seS2Tj26TBVOC2NIc2rOe2y2ZO7efxITtLZcGSOnHHNOQ7CkiUBfw0Iw2ck6xkIhPwLhKNLS8BO+hEpngQlqzw==", - "dev": true, - "license": "MIT", - "funding": { - "url": "https://github.com/privatenumber/resolve-pkg-maps?sponsor=1" - } - }, - "node_modules/router": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/router/-/router-2.2.0.tgz", - "integrity": "sha512-nLTrUKm2UyiL7rlhapu/Zl45FwNgkZGaCpZbIHajDYgwlJCOzLSk+cIPAnsEqV955GjILJnKbdQC1nVPz+gAYQ==", - "license": "MIT", - "dependencies": { - "debug": "^4.4.0", - "depd": "^2.0.0", - "is-promise": "^4.0.0", - "parseurl": "^1.3.3", - "path-to-regexp": "^8.0.0" - }, - "engines": { - "node": ">= 18" - } - }, - "node_modules/safe-buffer": { - "version": "5.2.1", - "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", - "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ], - "license": "MIT" - }, - "node_modules/safer-buffer": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", - "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==", - "license": "MIT" - }, - "node_modules/send": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/send/-/send-1.2.0.tgz", - "integrity": "sha512-uaW0WwXKpL9blXE2o0bRhoL2EGXIrZxQ2ZQ4mgcfoBxdFmQold+qWsD2jLrfZ0trjKL6vOw0j//eAwcALFjKSw==", - "license": "MIT", - "dependencies": { - "debug": "^4.3.5", - "encodeurl": "^2.0.0", - "escape-html": "^1.0.3", - "etag": "^1.8.1", - "fresh": "^2.0.0", - "http-errors": "^2.0.0", - "mime-types": "^3.0.1", - "ms": "^2.1.3", - "on-finished": "^2.4.1", - "range-parser": "^1.2.1", - "statuses": "^2.0.1" - }, - "engines": { - "node": ">= 18" - } - }, - "node_modules/serve-static": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/serve-static/-/serve-static-2.2.0.tgz", - "integrity": "sha512-61g9pCh0Vnh7IutZjtLGGpTA355+OPn2TyDv/6ivP2h/AdAVX9azsoxmg2/M6nZeQZNYBEwIcsne1mJd9oQItQ==", - "license": "MIT", - "dependencies": { - "encodeurl": "^2.0.0", - "escape-html": "^1.0.3", - "parseurl": "^1.3.3", - "send": "^1.2.0" - }, - "engines": { - "node": ">= 18" - } - }, - "node_modules/setprototypeof": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.2.0.tgz", - "integrity": "sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw==", - "license": "ISC" - }, - "node_modules/shebang-command": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", - "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", - "license": "MIT", - "dependencies": { - "shebang-regex": "^3.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/shebang-regex": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", - "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/side-channel": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.1.0.tgz", - "integrity": "sha512-ZX99e6tRweoUXqR+VBrslhda51Nh5MTQwou5tnUDgbtyM0dBgmhEDtWGP/xbKn6hqfPRHujUNwz5fy/wbbhnpw==", - "license": "MIT", - "dependencies": { - "es-errors": "^1.3.0", - "object-inspect": "^1.13.3", - "side-channel-list": "^1.0.0", - "side-channel-map": "^1.0.1", - "side-channel-weakmap": "^1.0.2" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/side-channel-list": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/side-channel-list/-/side-channel-list-1.0.0.tgz", - "integrity": "sha512-FCLHtRD/gnpCiCHEiJLOwdmFP+wzCmDEkc9y7NsYxeF4u7Btsn1ZuwgwJGxImImHicJArLP4R0yX4c2KCrMrTA==", - "license": "MIT", - "dependencies": { - "es-errors": "^1.3.0", - "object-inspect": "^1.13.3" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/side-channel-map": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/side-channel-map/-/side-channel-map-1.0.1.tgz", - "integrity": "sha512-VCjCNfgMsby3tTdo02nbjtM/ewra6jPHmpThenkTYh8pG9ucZ/1P8So4u4FGBek/BjpOVsDCMoLA/iuBKIFXRA==", - "license": "MIT", - "dependencies": { - "call-bound": "^1.0.2", - "es-errors": "^1.3.0", - "get-intrinsic": "^1.2.5", - "object-inspect": "^1.13.3" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/side-channel-weakmap": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/side-channel-weakmap/-/side-channel-weakmap-1.0.2.tgz", - "integrity": "sha512-WPS/HvHQTYnHisLo9McqBHOJk2FkHO/tlpvldyrnem4aeQp4hai3gythswg6p01oSoTl58rcpiFAjF2br2Ak2A==", - "license": "MIT", - "dependencies": { - "call-bound": "^1.0.2", - "es-errors": "^1.3.0", - "get-intrinsic": "^1.2.5", - "object-inspect": "^1.13.3", - "side-channel-map": "^1.0.1" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/statuses": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.1.tgz", - "integrity": "sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ==", - "license": "MIT", - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/toidentifier": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/toidentifier/-/toidentifier-1.0.1.tgz", - "integrity": "sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA==", - "license": "MIT", - "engines": { - "node": ">=0.6" - } - }, - "node_modules/tsx": { - "version": "4.20.4", - "resolved": "https://registry.npmjs.org/tsx/-/tsx-4.20.4.tgz", - "integrity": "sha512-yyxBKfORQ7LuRt/BQKBXrpcq59ZvSW0XxwfjAt3w2/8PmdxaFzijtMhTawprSHhpzeM5BgU2hXHG3lklIERZXg==", - "dev": true, - "license": "MIT", - "dependencies": { - "esbuild": "~0.25.0", - "get-tsconfig": "^4.7.5" - }, - "bin": { - "tsx": "dist/cli.mjs" - }, - "engines": { - "node": ">=18.0.0" - }, - "optionalDependencies": { - "fsevents": "~2.3.3" - } - }, - "node_modules/type-is": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/type-is/-/type-is-2.0.1.tgz", - "integrity": "sha512-OZs6gsjF4vMp32qrCbiVSkrFmXtG/AZhY3t0iAMrMBiAZyV9oALtXO8hsrHbMXF9x6L3grlFuwW2oAz7cav+Gw==", - "license": "MIT", - "dependencies": { - "content-type": "^1.0.5", - "media-typer": "^1.1.0", - "mime-types": "^3.0.0" - }, - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/typescript": { - "version": "5.9.2", - "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.9.2.tgz", - "integrity": "sha512-CWBzXQrc/qOkhidw1OzBTQuYRbfyxDXJMVJ1XNwUHGROVmuaeiEm3OslpZ1RV96d7SKKjZKrSJu3+t/xlw3R9A==", - "dev": true, - "license": "Apache-2.0", - "bin": { - "tsc": "bin/tsc", - "tsserver": "bin/tsserver" - }, - "engines": { - "node": ">=14.17" - } - }, - "node_modules/undici-types": { - "version": "6.21.0", - "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.21.0.tgz", - "integrity": "sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==", - "dev": true, - "license": "MIT" - }, - "node_modules/unpipe": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/unpipe/-/unpipe-1.0.0.tgz", - "integrity": "sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ==", - "license": "MIT", - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/uri-js": { - "version": "4.4.1", - "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz", - "integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==", - "license": "BSD-2-Clause", - "dependencies": { - "punycode": "^2.1.0" - } - }, - "node_modules/vary": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/vary/-/vary-1.1.2.tgz", - "integrity": "sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg==", - "license": "MIT", - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/which": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", - "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", - "license": "ISC", - "dependencies": { - "isexe": "^2.0.0" - }, - "bin": { - "node-which": "bin/node-which" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/wrappy": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", - "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==", - "license": "ISC" - }, - "node_modules/yaml": { - "version": "2.8.1", - "resolved": "https://registry.npmjs.org/yaml/-/yaml-2.8.1.tgz", - "integrity": "sha512-lcYcMxX2PO9XMGvAJkJ3OsNMw+/7FKes7/hgerGUYWIoWu5j/+YQqcZr5JnPZWzOsEBgMbSbiSTn/dv/69Mkpw==", - "license": "ISC", - "bin": { - "yaml": "bin.mjs" - }, - "engines": { - "node": ">= 14.6" - } - }, - "node_modules/zod": { - "version": "3.25.76", - "resolved": "https://registry.npmjs.org/zod/-/zod-3.25.76.tgz", - "integrity": "sha512-gzUt/qt81nXsFGKIFcC3YnfEAx5NkunCfnDlvuBSSFS02bcXu4Lmea0AFIUwbLWxWPx3d9p8S5QoaujKcNQxcQ==", - "license": "MIT", - "funding": { - "url": "https://github.com/sponsors/colinhacks" - } - }, - "node_modules/zod-to-json-schema": { - "version": "3.24.6", - "resolved": "https://registry.npmjs.org/zod-to-json-schema/-/zod-to-json-schema-3.24.6.tgz", - "integrity": "sha512-h/z3PKvcTcTetyjl1fkj79MHNEjm+HpD6NXheWjzOekY7kV+lwDYnHw+ivHkijnCSMz1yJaWBD9vu/Fcmk+vEg==", - "license": "ISC", - "peerDependencies": { - "zod": "^3.24.1" - } - } - } -} diff --git a/mcp-server-committy/package.json b/mcp-server-committy/package.json deleted file mode 100644 index 6bc002c..0000000 --- a/mcp-server-committy/package.json +++ /dev/null @@ -1,51 +0,0 @@ -{ - "name": "mcp-server-committy", - "version": "0.1.0", - "type": "module", - "main": "dist/index.js", - "bin": { - "mcp-server-committy": "dist/index.js" - }, - "description": "MCP server wrapper for the Committy Rust CLI", - "license": "Apache-2.0", - "scripts": { - "build": "tsc -p tsconfig.json", - "dev": "tsx watch src/index.ts", - "typecheck": "tsc -p tsconfig.json --noEmit", - "clean": "rm -rf dist", - "start": "node dist/index.js", - "prepare": "npm run build", - "prepublishOnly": "npm run build", - "test": "node scripts/test.mjs" - }, - "engines": { - "node": ">=18" - }, - "files": [ - "dist", - "README.md", - "LICENSE" - ], - "publishConfig": { - "access": "public" - }, - "repository": { - "type": "git", - "url": "git+https://github.com/martient/committy.git" - }, - "bugs": { - "url": "https://github.com/martient/committy/issues" - }, - "homepage": "https://github.com/martient/committy#readme", - "dependencies": { - "@modelcontextprotocol/sdk": "^1.17.3", - "ajv": "^8.17.1", - "yaml": "^2.5.1", - "zod": "^3.23.8" - }, - "devDependencies": { - "@types/node": "^22.5.0", - "tsx": "^4.19.2", - "typescript": "^5.6.3" - } -} diff --git a/mcp-server-committy/scripts/fake-committy.mjs b/mcp-server-committy/scripts/fake-committy.mjs deleted file mode 100644 index cccab71..0000000 --- a/mcp-server-committy/scripts/fake-committy.mjs +++ /dev/null @@ -1,83 +0,0 @@ -#!/usr/bin/env node -// A minimal fake committy CLI to simulate JSON outputs for tests - -function getArg(flag) { - const i = process.argv.indexOf(flag); - if (i !== -1 && i + 1 < process.argv.length) return process.argv[i + 1]; - return undefined; -} - -function has(flag) { - return process.argv.includes(flag); -} - -async function main() { - const args = process.argv.slice(2); - // Expected forms: - // committy --non-interactive lint-message --message --output json - // committy --non-interactive tag --output json --source --dry-run (computeNextTag) - // committy --non-interactive tag --output json --source (applyTag) - // committy --non-interactive group-commit --mode --output json [flags] - - if (args.includes('lint-message')) { - const msg = getArg('--message') || ''; - const out = { command: 'lint-message', valid: true, issues: [], message: msg }; - process.stdout.write(JSON.stringify(out)); - process.exit(0); - } - - if (args.includes('group-commit')) { - const mode = getArg('--mode') || 'plan'; - const bad = process.env.FAKE_BAD_JSON; - const forcedPlanExit = process.env.FAKE_GC_EXIT_PLAN ? parseInt(process.env.FAKE_GC_EXIT_PLAN, 10) : 0; - const forcedApplyExit = process.env.FAKE_GC_EXIT_APPLY ? parseInt(process.env.FAKE_GC_EXIT_APPLY, 10) : 0; - const groups = [ - { name: 'docs', commit_type: 'docs', files: ['README.md'], suggested_message: 'docs: update README' }, - { name: 'code', commit_type: 'feat', files: ['src/app.ts'], suggested_message: 'feat: add feature' }, - ]; - if (mode === 'plan') { - if (bad === 'plan') { - process.stdout.write('{ not-json: true '); - process.exit(0); - } - const out = { command: 'group-commit', mode: 'plan', ok: true, groups }; - process.stdout.write(JSON.stringify(out)); - process.exit(forcedPlanExit || 0); - } else { - if (bad === 'apply') { - process.stdout.write('not-json'); - process.exit(0); - } - const commits = [ - { group: 'docs', message: 'docs: update README', ok: true, sha: 'abc123' }, - { group: 'code', message: 'feat: add feature', ok: true, sha: 'def456' }, - ]; - const pushed = has('--push'); - const out = { command: 'group-commit', mode: 'apply', ok: true, groups, commits, pushed }; - process.stdout.write(JSON.stringify(out)); - process.exit(forcedApplyExit || 0); - } - } - - if (args.includes('tag')) { - const source = getArg('--source') || ''; - if (has('--dry-run')) { - const out = { command: 'compute-next-tag', source, next_tag: 'v1.2.3' }; - process.stdout.write(JSON.stringify(out)); - process.exit(0); - } - const name = getArg('--name') || 'v1.2.3'; - const out = { command: 'apply-tag', source, name, pushed: true }; - process.stdout.write(JSON.stringify(out)); - process.exit(0); - } - - // Default: unknown command - process.stderr.write('Unknown command'); - process.exit(1); -} - -main().catch((e) => { - console.error(e); - process.exit(1); -}); diff --git a/mcp-server-committy/scripts/test.mjs b/mcp-server-committy/scripts/test.mjs deleted file mode 100644 index 5704c7c..0000000 --- a/mcp-server-committy/scripts/test.mjs +++ /dev/null @@ -1,165 +0,0 @@ -import assert from 'node:assert/strict'; -import { mkdtemp, writeFile, mkdir } from 'node:fs/promises'; -import path from 'node:path'; -import os from 'node:os'; -import { formatMessage, lintMessage, generateGuidelines, computeNextTag, applyTag, groupCommitPlan, groupCommitApply } from '../dist/committy.js'; - -async function testFormatMessage() { - const msg = formatMessage({ - commit_type: 'feat', - short: 'add new API', - scope: 'core', - long: 'This introduces a new API.', - breaking: true, - }); - const expected = 'feat(core)!: add new API\n\nThis introduces a new API.\n'; - assert.equal(msg, expected, 'formatMessage should build proper conventional commit'); -} - -async function testNonZeroExitWithJson() { - const oldBin = process.env.COMMITTY_BIN; - const oldScript = process.env.COMMITTY_SCRIPT; - const oldPlanExit = process.env.FAKE_GC_EXIT_PLAN; - const oldApplyExit = process.env.FAKE_GC_EXIT_APPLY; - try { - process.env.COMMITTY_BIN = 'node'; - const dirname = path.dirname(new URL(import.meta.url).pathname); - process.env.COMMITTY_SCRIPT = path.join(dirname, 'fake-committy.mjs'); - - const tmpRepo = await mkdtemp(path.join(os.tmpdir(), 'committy-gc-nonzero-')); - - process.env.FAKE_GC_EXIT_PLAN = '3'; - const plan = await groupCommitPlan(tmpRepo, {}); - assert.equal(plan.ok, false, 'plan ok should be false when exit code is non-zero'); - assert.ok(plan.result && plan.result.mode === 'plan', 'plan should still parse JSON result'); - - process.env.FAKE_GC_EXIT_APPLY = '2'; - const apply = await groupCommitApply(tmpRepo, {}); - assert.equal(apply.ok, false, 'apply ok should be false when exit code is non-zero'); - assert.ok(apply.result && apply.result.mode === 'apply', 'apply should still parse JSON result'); - } finally { - if (oldBin === undefined) delete process.env.COMMITTY_BIN; else process.env.COMMITTY_BIN = oldBin; - if (oldScript === undefined) delete process.env.COMMITTY_SCRIPT; else process.env.COMMITTY_SCRIPT = oldScript; - if (oldPlanExit === undefined) delete process.env.FAKE_GC_EXIT_PLAN; else process.env.FAKE_GC_EXIT_PLAN = oldPlanExit; - if (oldApplyExit === undefined) delete process.env.FAKE_GC_EXIT_APPLY; else process.env.FAKE_GC_EXIT_APPLY = oldApplyExit; - } -} - -async function testBadJsonGroupCommit() { - const oldBin = process.env.COMMITTY_BIN; - const oldScript = process.env.COMMITTY_SCRIPT; - const oldBad = process.env.FAKE_BAD_JSON; - try { - process.env.COMMITTY_BIN = 'node'; - const dirname = path.dirname(new URL(import.meta.url).pathname); - process.env.COMMITTY_SCRIPT = path.join(dirname, 'fake-committy.mjs'); - - const tmpRepo = await mkdtemp(path.join(os.tmpdir(), 'committy-gc-bad-')); - - process.env.FAKE_BAD_JSON = 'plan'; - const plan = await groupCommitPlan(tmpRepo, { includeUnstaged: true }); - assert.equal(plan.ok, false, 'plan ok should be false when JSON is malformed'); - assert.equal(plan.result, undefined, 'plan result should be undefined on JSON parse failure'); - - process.env.FAKE_BAD_JSON = 'apply'; - const apply = await groupCommitApply(tmpRepo, { autoStage: true }); - assert.equal(apply.ok, false, 'apply ok should be false when JSON is malformed'); - assert.equal(apply.result, undefined, 'apply result should be undefined on JSON parse failure'); - } finally { - if (oldBin === undefined) delete process.env.COMMITTY_BIN; else process.env.COMMITTY_BIN = oldBin; - if (oldScript === undefined) delete process.env.COMMITTY_SCRIPT; else process.env.COMMITTY_SCRIPT = oldScript; - if (oldBad === undefined) delete process.env.FAKE_BAD_JSON; else process.env.FAKE_BAD_JSON = oldBad; - } -} - -async function testFakeCliGroupCommitPlanApply() { - const oldBin = process.env.COMMITTY_BIN; - const oldScript = process.env.COMMITTY_SCRIPT; - try { - process.env.COMMITTY_BIN = 'node'; - const dirname = path.dirname(new URL(import.meta.url).pathname); - process.env.COMMITTY_SCRIPT = path.join(dirname, 'fake-committy.mjs'); - - const tmpRepo = await mkdtemp(path.join(os.tmpdir(), 'committy-gc-')); - - const plan = await groupCommitPlan(tmpRepo, { includeUnstaged: true, ai: false }); - assert.equal(plan.ok, true, 'groupCommitPlan should succeed with fake CLI'); - assert.ok(plan.result && plan.result.command === 'group-commit', 'plan result has command'); - assert.equal(plan.result.mode, 'plan', 'plan mode is plan'); - assert.ok(Array.isArray(plan.result.groups) && plan.result.groups.length >= 1, 'plan has groups'); - - const apply = await groupCommitApply(tmpRepo, { autoStage: true, push: true }); - assert.equal(apply.ok, true, 'groupCommitApply should succeed with fake CLI'); - assert.equal(apply.result?.mode, 'apply', 'apply mode is apply'); - assert.ok(Array.isArray(apply.result?.commits) && apply.result.commits.length >= 1, 'apply has commits'); - assert.equal(apply.result?.pushed, true, 'apply pushed should be true when --push is passed'); - } finally { - if (oldBin === undefined) delete process.env.COMMITTY_BIN; else process.env.COMMITTY_BIN = oldBin; - if (oldScript === undefined) delete process.env.COMMITTY_SCRIPT; else process.env.COMMITTY_SCRIPT = oldScript; - } -} - -async function testFakeCliComputeAndApplyTag() { - const oldBin = process.env.COMMITTY_BIN; - const oldScript = process.env.COMMITTY_SCRIPT; - try { - // Use node to run the fake CLI script - process.env.COMMITTY_BIN = 'node'; - const dirname = path.dirname(new URL(import.meta.url).pathname); - process.env.COMMITTY_SCRIPT = path.join(dirname, 'fake-committy.mjs'); - - const comp = await computeNextTag({ source: '/tmp/repo', fetch: false }); - assert.equal(comp.ok, true, 'computeNextTag should succeed with fake CLI'); - assert.ok(comp.result && comp.result.next_tag === 'v1.2.3', 'computeNextTag returns next_tag'); - - const appl = await applyTag({ source: '/tmp/repo', name: 'v1.2.3' }); - assert.equal(appl.ok, true, 'applyTag should succeed with fake CLI'); - assert.ok(appl.result && appl.result.name === 'v1.2.3', 'applyTag returns name'); - } finally { - if (oldBin === undefined) delete process.env.COMMITTY_BIN; else process.env.COMMITTY_BIN = oldBin; - if (oldScript === undefined) delete process.env.COMMITTY_SCRIPT; else process.env.COMMITTY_SCRIPT = oldScript; - } -} - -async function testMissingBinary() { - const old = process.env.COMMITTY_BIN; - try { - process.env.COMMITTY_BIN = '/nonexistent/committy-binary'; - const res = await lintMessage('feat: test'); - assert.equal(res.ok, false, 'ok should be false when binary is missing'); - assert.equal(res.raw.code, 127, 'exit code should be 127 on spawn error'); - assert.match(res.raw.stderr || '', /Failed to spawn committy binary/, 'stderr should mention spawn failure'); - } finally { - if (old === undefined) delete process.env.COMMITTY_BIN; else process.env.COMMITTY_BIN = old; - } -} - -async function testGenerateGuidelines() { - const tmp = await mkdtemp(path.join(os.tmpdir(), 'committy-guidelines-')); - // Create files - await writeFile(path.join(tmp, 'README.md'), '# Test Repo\n'); - await mkdir(path.join(tmp, '.github'), { recursive: true }); - await writeFile(path.join(tmp, 'CONTRIBUTING.md'), 'Contribute here'); - await writeFile(path.join(tmp, '.github', 'changelog-config.json'), '{"preset":"conventional"}'); - - const res = await generateGuidelines(tmp); - assert.ok(res.readme && res.readme.includes('# Test Repo'), 'should read README.md'); - assert.ok(res.contributing && res.contributing.includes('Contribute'), 'should read CONTRIBUTING.md'); - assert.ok(res.changelogConfig && res.changelogConfig.includes('conventional'), 'should read changelog config'); -} - -(async function main() { - try { - await testFormatMessage(); - await testMissingBinary(); - await testGenerateGuidelines(); - await testFakeCliComputeAndApplyTag(); - await testFakeCliGroupCommitPlanApply(); - await testBadJsonGroupCommit(); - await testNonZeroExitWithJson(); - console.log('All tests passed'); - } catch (err) { - console.error('Test failed:', err); - process.exit(1); - } -})(); diff --git a/mcp-server-committy/src/commit_groups.ts b/mcp-server-committy/src/commit_groups.ts deleted file mode 100644 index 03430a6..0000000 --- a/mcp-server-committy/src/commit_groups.ts +++ /dev/null @@ -1,151 +0,0 @@ -import { formatMessage } from "./committy.js"; -import { listChangedFiles, stageFiles, commit as gitCommit, push as gitPush } from "./git.js"; - -export type ChangeGroupName = "docs" | "tests" | "ci" | "deps" | "build" | "chore" | "code"; - -export interface GroupOverride { - commit_type?: string; - scope?: string; - short?: string; - long?: string; -} - -export interface GroupPlan { - name: ChangeGroupName; - commit_type: string; - files: string[]; - suggested_message: string; -} - -export interface CommitRecord { - group: ChangeGroupName; - message: string; - sha?: string; - ok: boolean; - error?: string; -} - -export interface CommitGroupedChangesOptions { - repoPath: string; - includeUnstaged?: boolean; // default true - autoStage?: boolean; // default true - dryRun?: boolean; // default true - push?: boolean; // default false - signoff?: boolean; // default false - confirm?: boolean; // default false - must be true to commit - groupOverrides?: Partial>; -} - -export interface CommitGroupedChangesResult { - ok: boolean; - groups: GroupPlan[]; - commits?: CommitRecord[]; - pushed?: boolean; - errors?: string[]; -} - -export async function commitGroupedChanges(opts: CommitGroupedChangesOptions): Promise { - const includeUnstaged = opts.includeUnstaged ?? true; - const autoStage = opts.autoStage ?? true; - const dryRun = opts.dryRun ?? true; - const confirm = opts.confirm ?? false; - - const { all } = await listChangedFiles(opts.repoPath, includeUnstaged); - - const byGroup: Record = { - docs: [], tests: [], ci: [], deps: [], build: [], chore: [], code: [], - }; - - for (const f of all) { - byGroup[classifyFile(f)]?.push(f); - } - - const groups: GroupPlan[] = []; - (Object.keys(byGroup) as ChangeGroupName[]).forEach((name) => { - const files = byGroup[name]; - if (!files.length) return; - const defType = defaultTypeFor(name); - const override = opts.groupOverrides?.[name]; - const commit_type = override?.commit_type || defType; - const short = override?.short || defaultShortFor(name); - const scope = override?.scope; - const long = override?.long; - const message = formatMessage({ commit_type, short, scope, long }); - groups.push({ name, commit_type, files, suggested_message: message }); - }); - - if (dryRun || !confirm) { - return { ok: true, groups }; - } - - const commits: CommitRecord[] = []; - const errors: string[] = []; - - for (const g of groups) { - if (autoStage) { - const addRes = await stageFiles(opts.repoPath, g.files); - if (addRes.code !== 0) { - const err = `git add failed for group ${g.name}: ${addRes.stderr || addRes.stdout}`; - errors.push(err); - commits.push({ group: g.name, message: g.suggested_message, ok: false, error: err }); - continue; - } - } - const c = await gitCommit(opts.repoPath, g.suggested_message, opts.signoff); - commits.push({ group: g.name, message: g.suggested_message, ok: c.ok, sha: c.sha, error: c.ok ? undefined : (c.raw.stderr || c.raw.stdout) }); - if (!c.ok) { - errors.push(`commit failed for group ${g.name}: ${c.raw.stderr || c.raw.stdout}`); - } - } - - let pushed = false; - if (opts.push) { - const pr = await gitPush(opts.repoPath); - pushed = pr.code === 0; - if (!pushed) errors.push(`git push failed: ${pr.stderr || pr.stdout}`); - } - - return { ok: errors.length === 0, groups, commits, pushed, errors: errors.length ? errors : undefined }; -} - -function defaultTypeFor(name: ChangeGroupName): string { - switch (name) { - case "docs": return "docs"; - case "tests": return "test"; - case "ci": return "ci"; - case "deps": return "chore"; - case "build": return "build"; - case "chore": return "chore"; - case "code": return "chore"; // conservative default - } -} - -function defaultShortFor(name: ChangeGroupName): string { - switch (name) { - case "docs": return "update docs"; - case "tests": return "update tests"; - case "ci": return "update CI"; - case "deps": return "update dependencies"; - case "build": return "update build config"; - case "chore": return "misc maintenance"; - case "code": return "update code"; - } -} - -export function classifyFile(file: string): ChangeGroupName { - const f = file.replace(/^\.\/?/, ""); - // CI - if (f.startsWith(".github/")) return "ci"; - // Docs - if (f.startsWith("docs/") || /(^|\/)README\.md$/i.test(f) || /\.mdx?$/i.test(f)) return "docs"; - // Tests - if (f.startsWith("tests/") || /\.(test|spec)\.[jt]s$/i.test(f) || /_test\.rs$/i.test(f)) return "tests"; - // Deps (lockfiles) - if (/(^|\/)package-lock\.json$/.test(f) || /(^|\/)npm-shrinkwrap\.json$/.test(f) || /(^|\/)pnpm-lock\.yaml$/.test(f) || /(^|\/)yarn\.lock$/.test(f) || /(^|\/)Cargo\.lock$/.test(f)) return "deps"; - // Build/config - if (/(^|\/)Cargo\.toml$/.test(f) || /(^|\/)build\.rs$/.test(f) || /(^|\/)package\.json$/.test(f) || /(^|\/)tsconfig\.json$/.test(f) || /(^|\/)eslint\.(json|js|cjs|yml|yaml|config\.js)$/.test(f) || /(^|\/)\.eslintrc(\..*)?$/.test(f) || /(^|\/)vite\.(config\.)?\w+$/.test(f) || /(^|\/)rollup\.config\.[cm]?js$/.test(f)) return "build"; - // Chore (editor/config meta) - if (f.startsWith(".vscode/") || /(^|\/)\.editorconfig$/.test(f) || /(^|\/)\.gitignore$/.test(f) || /(^|\/)\.npmrc$/.test(f)) return "chore"; - // Everything else - return "code"; -} diff --git a/mcp-server-committy/src/committy.ts b/mcp-server-committy/src/committy.ts deleted file mode 100644 index c21a1d4..0000000 --- a/mcp-server-committy/src/committy.ts +++ /dev/null @@ -1,302 +0,0 @@ -import { spawn } from "node:child_process"; -import { readFile } from "node:fs/promises"; -import path from "node:path"; - -export type Json = any; - -export interface RunResult { - code: number | null; - stdout: string; - stderr: string; -} - -export function getCommittyBin(): string { - return process.env.COMMITTY_BIN || "committy"; -} - -async function runCommittyRaw(args: string[], opts?: { cwd?: string; env?: NodeJS.ProcessEnv }): Promise { - return new Promise((resolve) => { - const script = process.env.COMMITTY_SCRIPT; - const finalArgs = script ? [script, ...args] : args; - const child = spawn(getCommittyBin(), finalArgs, { - cwd: opts?.cwd, - env: { - ...process.env, - CI: "1", - COMMITTY_NONINTERACTIVE: "1", - ...(opts?.env || {}), - }, - stdio: ["ignore", "pipe", "pipe"], - }); - - let stdout = ""; - let stderr = ""; - - child.stdout.on("data", (d) => (stdout += d.toString())); - child.stderr.on("data", (d) => (stderr += d.toString())); - - child.on("error", (err: NodeJS.ErrnoException) => { - // Common when binary is missing: err.code === 'ENOENT' - const bin = getCommittyBin(); - const msg = `Failed to spawn committy binary (\"${bin}\"): ${err.code || err.name || "ERROR"}`; - resolve({ code: 127, stdout, stderr: stderr ? `${stderr}\n${msg}` : msg }); - }); - - child.on("close", (code) => resolve({ code, stdout, stderr })); - }); -} - -async function runCommittyJson(args: string[], opts?: { cwd?: string; env?: NodeJS.ProcessEnv }): Promise<{ result?: T; raw: RunResult; ok: boolean }> { - const raw = await runCommittyRaw(args, opts); - if (raw.stdout.trim().length === 0) { - return { raw, ok: raw.code === 0 }; - } - try { - const parsed = JSON.parse(raw.stdout) as T; - return { result: parsed, raw, ok: raw.code === 0 }; - } catch (e) { - return { raw, ok: false }; - } -} - -// Lint commits in repo since last tag -export async function lintRepoSinceLastTag(repoPath: string, opts?: { cwd?: string }) { - const args = ["--non-interactive", "lint", "--repo-path", repoPath, "--output", "json"]; - return runCommittyJson(args, { cwd: opts?.cwd }); -} - -// Lint a single commit message string -export async function lintMessage(message: string, opts?: { cwd?: string }) { - const args = [ - "--non-interactive", - "lint-message", - "--message", - message, - "--output", - "json", - ]; - return runCommittyJson(args, { cwd: opts?.cwd }); -} - -export interface ComputeNextTagOptions { - source: string; // repo path - fetch?: boolean; // default: false - prerelease?: boolean; - prereleaseSuffix?: string; - releaseBranches?: string[]; -} - -export async function computeNextTag(options: ComputeNextTagOptions, opts?: { cwd?: string }) { - const args = [ - "--non-interactive", - "tag", - "--output", - "json", - "--source", - options.source, - "--dry-run", - ]; - if (options.fetch === true) args.push("--fetch"); - if (options.fetch === false) args.push("--no-fetch"); - if (options.prerelease) args.push("--prerelease"); - if (options.prereleaseSuffix) args.push("--prerelease-suffix", options.prereleaseSuffix); - if (options.releaseBranches && options.releaseBranches.length > 0) { - args.push("--release-branches", options.releaseBranches.join(",")); - } - return runCommittyJson(args, { cwd: opts?.cwd }); -} - -export interface ApplyTagOptions { - source: string; // repo path - name?: string; - fetch?: boolean; - prerelease?: boolean; - prereleaseSuffix?: string; - releaseBranches?: string[]; - bumpFiles?: boolean; - tagMessage?: string; -} - -export async function applyTag(options: ApplyTagOptions, opts?: { cwd?: string }) { - const args = [ - "--non-interactive", - "tag", - "--output", - "json", - "--source", - options.source, - ]; - if (options.name) args.push("--name", options.name); - if (options.bumpFiles) args.push("--bump-files"); - if (options.tagMessage) args.push("--tag-message", options.tagMessage); - if (options.fetch) args.push("--fetch"); - if (options.prerelease) args.push("--prerelease"); - if (options.prereleaseSuffix) args.push("--prerelease-suffix", options.prereleaseSuffix); - if (options.releaseBranches && options.releaseBranches.length > 0) { - args.push("--release-branches", options.releaseBranches.join(",")); - } - return runCommittyJson(args, { cwd: opts?.cwd }); -} - -export interface FormatMessageInput { - commit_type: string; // feat, fix, chore, docs, refactor, etc. - short: string; // short description - scope?: string; - long?: string; // body - breaking?: boolean; -} - -export function formatMessage(input: FormatMessageInput): string { - const scope = input.scope ? `(${input.scope})` : ""; - const bang = input.breaking ? "!" : ""; - const header = `${input.commit_type}${scope}${bang}: ${input.short}`; - const body = input.long ? `\n\n${input.long}\n` : "\n"; - return header + body; -} - -export interface GenerateGuidelinesResult { - readme?: string; - contributing?: string; - changelogConfig?: string; -} - -export async function generateGuidelines(repoPath: string): Promise { - const tryRead = async (p: string) => { - try { - return await readFile(p, "utf8"); - } catch { - return undefined; - } - }; - const candidates = { - readme: ["README.md", "readme.md"].map((f) => path.join(repoPath, f)), - contributing: ["CONTRIBUTING.md", ".github/CONTRIBUTING.md"].map((f) => path.join(repoPath, f)), - changelog: [".github/changelog-config.json", "changelog-config.json", ".github/changelog.json"].map((f) => path.join(repoPath, f)), - }; - - const [readme, contributing, changelogConfig] = await Promise.all([ - (async () => { - for (const p of candidates.readme) { - const c = await tryRead(p); - if (c) return c; - } - return undefined; - })(), - (async () => { - for (const p of candidates.contributing) { - const c = await tryRead(p); - if (c) return c; - } - return undefined; - })(), - (async () => { - for (const p of candidates.changelog) { - const c = await tryRead(p); - if (c) return c; - } - return undefined; - })(), - ]); - - return { readme, contributing, changelogConfig }; -} - -// ------------------------- -// group-commit (Rust CLI) -// ------------------------- - -export type GroupName = "docs" | "tests" | "ci" | "deps" | "build" | "chore" | "code"; - -export interface PlanGroup { - name: GroupName; - commit_type: string; - files: string[]; - suggested_message: string; -} - -export interface CommitRecord { - group: GroupName; - message: string; - ok: boolean; - sha?: string; - error?: string; -} - -export interface GroupCommitPlanResult { - command: "group-commit"; - mode: "plan"; - ok: boolean; - groups: PlanGroup[]; - errors?: string[]; -} - -export interface GroupCommitApplyResult { - command: "group-commit"; - mode: "apply"; - ok: boolean; - groups: PlanGroup[]; - commits: CommitRecord[]; - pushed?: boolean; - errors?: string[]; -} - -export interface GroupCommitCommonOptions { - includeUnstaged?: boolean; - // AI options (must match Rust flags) - ai?: boolean; - aiProvider?: "openrouter" | "ollama"; - aiModel?: string; - aiApiKeyEnv?: string; - aiBaseUrl?: string; - aiMaxTokens?: number; - aiTemperature?: number; - aiTimeoutMs?: number; - noAiJsonMode?: boolean; - aiSystemPrompt?: string; - aiSystemPromptFile?: string; - aiFileLimit?: number; - aiAllowSensitive?: boolean; -} - -function buildGroupCommitArgs(mode: "plan" | "apply", options?: GroupCommitCommonOptions & { autoStage?: boolean; push?: boolean }): string[] { - const args: string[] = [ - "--non-interactive", - "group-commit", - "--mode", - mode, - "--output", - "json", - ]; - if (options?.includeUnstaged) args.push("--include-unstaged"); - if (mode === "apply" && options?.autoStage) args.push("--auto-stage"); - if (mode === "apply" && options?.push) args.push("--push"); - if (options?.ai) { - args.push("--ai"); - if (options.aiProvider) args.push("--ai-provider", options.aiProvider); - if (options.aiModel) args.push("--ai-model", options.aiModel); - if (options.aiApiKeyEnv) args.push("--ai-api-key-env", options.aiApiKeyEnv); - if (options.aiBaseUrl) args.push("--ai-base-url", options.aiBaseUrl); - if (typeof options.aiMaxTokens === "number") args.push("--ai-max-tokens", String(options.aiMaxTokens)); - if (typeof options.aiTemperature === "number") args.push("--ai-temperature", String(options.aiTemperature)); - if (typeof options.aiTimeoutMs === "number") args.push("--ai-timeout-ms", String(options.aiTimeoutMs)); - if (options.noAiJsonMode) args.push("--no-ai-json-mode"); - if (options.aiSystemPrompt) args.push("--ai-system-prompt", options.aiSystemPrompt); - if (options.aiSystemPromptFile) args.push("--ai-system-prompt-file", options.aiSystemPromptFile); - if (typeof options.aiFileLimit === "number") args.push("--ai-file-limit", String(options.aiFileLimit)); - if (options.aiAllowSensitive) args.push("--ai-allow-sensitive"); - } - return args; -} - -export async function groupCommitPlan(repoPath: string, options?: GroupCommitCommonOptions) { - const args = buildGroupCommitArgs("plan", options); - return runCommittyJson(args, { cwd: repoPath }); -} - -export async function groupCommitApply( - repoPath: string, - options?: GroupCommitCommonOptions & { autoStage?: boolean; push?: boolean } -) { - const args = buildGroupCommitArgs("apply", options); - return runCommittyJson(args, { cwd: repoPath }); -} diff --git a/mcp-server-committy/src/git.ts b/mcp-server-committy/src/git.ts deleted file mode 100644 index 7793cea..0000000 --- a/mcp-server-committy/src/git.ts +++ /dev/null @@ -1,55 +0,0 @@ -import { spawn } from "node:child_process"; - -export interface GitRunResult { - code: number | null; - stdout: string; - stderr: string; -} - -export function runGit(cwd: string, args: string[]): Promise { - return new Promise((resolve) => { - const child = spawn("git", args, { cwd, stdio: ["ignore", "pipe", "pipe"] }); - let stdout = ""; - let stderr = ""; - child.stdout.setEncoding("utf8"); - child.stderr.setEncoding("utf8"); - child.stdout.on("data", (d) => (stdout += d)); - child.stderr.on("data", (d) => (stderr += d)); - child.on("close", (code) => resolve({ code, stdout, stderr })); - child.on("error", (err: any) => resolve({ code: 127, stdout: "", stderr: String(err?.message || err) })); - }); -} - -export async function listChangedFiles(cwd: string, includeUnstaged: boolean): Promise<{ staged: string[]; unstaged: string[]; all: string[]; }>{ - const stagedRes = await runGit(cwd, ["diff", "--name-only", "--cached", "-z"]); - const unstagedRes = await runGit(cwd, ["diff", "--name-only", "-z"]); - const staged = splitNullList(stagedRes.stdout); - const unstaged = splitNullList(unstagedRes.stdout); - const all = Array.from(new Set([ ...staged, ...(includeUnstaged ? unstaged : []) ])); - return { staged, unstaged, all }; -} - -function splitNullList(out: string): string[] { - if (!out) return []; - // Some git versions may output with trailing null - return out.split("\u0000").filter(Boolean); -} - -export async function stageFiles(cwd: string, files: string[]): Promise { - if (!files.length) return { code: 0, stdout: "", stderr: "" }; - return runGit(cwd, ["add", "--", ...files]); -} - -export async function commit(cwd: string, message: string, signoff?: boolean): Promise<{ ok: boolean; sha?: string; raw: GitRunResult }>{ - const args = ["commit", "-m", message]; - if (signoff) args.push("-s"); - const res = await runGit(cwd, args); - if (res.code !== 0) return { ok: false, raw: res }; - const shaRes = await runGit(cwd, ["rev-parse", "HEAD"]); - const sha = shaRes.stdout.trim(); - return { ok: true, sha, raw: res }; -} - -export async function push(cwd: string): Promise { - return runGit(cwd, ["push"]); -} diff --git a/mcp-server-committy/src/index.ts b/mcp-server-committy/src/index.ts deleted file mode 100644 index 79bb578..0000000 --- a/mcp-server-committy/src/index.ts +++ /dev/null @@ -1,294 +0,0 @@ -#!/usr/bin/env node -import { McpServer } from "@modelcontextprotocol/sdk/server/mcp.js"; -import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js"; -import { z } from "zod"; -import { - lintRepoSinceLastTag, - lintMessage, - computeNextTag, - applyTag, - formatMessage, - generateGuidelines, - groupCommitPlan, - groupCommitApply, -} from "./committy.js"; -import { commitGroupedChanges } from "./commit_groups.js"; - -const server = new McpServer({ name: "committy", version: "0.1.0" }); - -// mcp0_lint_repo_since_last_tag -server.registerTool( - "mcp0_lint_repo_since_last_tag", - { - description: "Lint repository commits since last tag using committy CLI", - inputSchema: { - repo_path: z.string().describe("Path to the git repository"), - }, - }, - async ({ repo_path }) => { - const res = await lintRepoSinceLastTag(repo_path); - const payload = res.result ?? { stdout: res.raw.stdout, stderr: res.raw.stderr, code: res.raw.code, ok: res.ok }; - return { content: [{ type: "text", text: JSON.stringify(payload) }] }; - } -); - -// mcp0_lint_message -server.registerTool( - "mcp0_lint_message", - { - description: "Lint a single commit message for conventional commit compliance", - inputSchema: { - message: z.string().describe("Commit message to validate"), - }, - }, - async ({ message }) => { - const res = await lintMessage(message); - const payload = res.result ?? { stdout: res.raw.stdout, stderr: res.raw.stderr, code: res.raw.code, ok: res.ok }; - return { content: [{ type: "text", text: JSON.stringify(payload) }] }; - } -); - -// mcp0_compute_next_tag -server.registerTool( - "mcp0_compute_next_tag", - { - description: "Compute the next tag without mutating the repo", - inputSchema: { - repo_path: z.string().describe("Path to the git repository"), - fetch: z.boolean().optional().describe("Whether to git fetch before computing"), - prerelease: z.boolean().optional().describe("Use prerelease semantics"), - prerelease_suffix: z.string().optional().describe("Suffix for prerelease, e.g. beta.1"), - release_branches: z.array(z.string()).optional().describe("Branches considered release branches") - }, - }, - async ({ repo_path, fetch, prerelease, prerelease_suffix, release_branches }) => { - const res = await computeNextTag({ - source: repo_path, - fetch, - prerelease, - prereleaseSuffix: prerelease_suffix, - releaseBranches: release_branches, - }); - const payload = res.result ?? { stdout: res.raw.stdout, stderr: res.raw.stderr, code: res.raw.code, ok: res.ok }; - return { content: [{ type: "text", text: JSON.stringify(payload) }] }; - } -); - -// mcp0_apply_tag -server.registerTool( - "mcp0_apply_tag", - { - description: "Creates and pushes a new tag. Requires confirm_push and allowlist.", - inputSchema: { - repo_path: z.string().describe("Path to the git repository"), - name: z.string().optional().describe("Tag name to create, otherwise computed"), - fetch: z.boolean().optional(), - prerelease: z.boolean().optional(), - prerelease_suffix: z.string().optional(), - release_branches: z.array(z.string()).optional(), - bump_files: z.boolean().optional(), - tag_message: z.string().optional(), - confirm_push: z.boolean().describe("Must be true to proceed"), - }, - }, - async ({ repo_path, name, fetch, prerelease, prerelease_suffix, release_branches, bump_files, tag_message, confirm_push }) => { - if (!confirm_push) { - return { content: [{ type: "text", text: "confirm_push is false; refusing to mutate repo." }] }; - } - const res = await applyTag({ - source: repo_path, - name, - fetch, - prerelease, - prereleaseSuffix: prerelease_suffix, - releaseBranches: release_branches, - bumpFiles: bump_files, - tagMessage: tag_message, - }); - const payload = res.result ?? { stdout: res.raw.stdout, stderr: res.raw.stderr, code: res.raw.code, ok: res.ok }; - return { content: [{ type: "text", text: JSON.stringify(payload) }] }; - } -); - -// mcp0_format_message -server.registerTool( - "mcp0_format_message", - { - description: "Return a conventional commit message from parts", - inputSchema: { - commit_type: z.string().describe("Type: feat, fix, docs, chore, refactor, etc."), - short: z.string().describe("Short description"), - scope: z.string().optional(), - long: z.string().optional().describe("Body / long description"), - breaking: z.boolean().optional(), - }, - }, - async ({ commit_type, short, scope, long, breaking }) => { - const text = formatMessage({ commit_type, short, scope, long, breaking }); - return { content: [{ type: "text", text }] }; - } -); - -// mcp0_generate_guidelines -server.registerTool( - "mcp0_generate_guidelines", - { - description: "Reads common guideline files (CONTRIBUTING.md, README.md, changelog config) and returns a combined summary.", - inputSchema: { - repo_path: z.string().describe("Path to the git repository"), - additional_files: z.array(z.string()).optional(), - max_bytes: z.number().optional(), - }, - }, - async ({ repo_path }) => { - const result = await generateGuidelines(repo_path); - return { content: [{ type: "text", text: JSON.stringify(result) }] }; - } -); - -// mcp0_commit_grouped_changes -server.registerTool( - "mcp0_commit_grouped_changes", - { - description: "Analyze pending changes, group by kind (docs/tests/ci/deps/build/chore/code), and optionally commit and push.", - inputSchema: { - repo_path: z.string().describe("Path to the git repository"), - dry_run: z.boolean().optional().describe("Only return plan; default true"), - include_unstaged: z.boolean().optional().describe("Include unstaged changes; default true"), - auto_stage: z.boolean().optional().describe("Stage files per group before committing; default true"), - push: z.boolean().optional().describe("Push after committing; default false"), - signoff: z.boolean().optional().describe("Add Signed-off-by; default false"), - confirm: z.boolean().optional().describe("Must be true to mutate repo (commit/push)"), - group_overrides: z - .record( - z.object({ - commit_type: z.string().optional(), - scope: z.string().optional(), - short: z.string().optional(), - long: z.string().optional(), - }) - ) - .optional() - .describe("Per-group message/type overrides (keys: docs,tests,ci,deps,build,chore,code)"), - }, - }, - async ({ repo_path, dry_run, include_unstaged, auto_stage, push, signoff, confirm, group_overrides }) => { - const result = await commitGroupedChanges({ - repoPath: repo_path, - dryRun: dry_run, - includeUnstaged: include_unstaged, - autoStage: auto_stage, - push, - signoff, - confirm, - groupOverrides: group_overrides as any, - }); - return { content: [{ type: "text", text: JSON.stringify(result) }] }; - } -); - -// mcp0_group_commit_plan (Rust CLI) -server.registerTool( - "mcp0_group_commit_plan", - { - description: "Plan grouped commits using Rust committy CLI (no mutations).", - inputSchema: { - repo_path: z.string().describe("Path to the git repository"), - include_unstaged: z.boolean().optional(), - // AI flags - ai: z.boolean().optional(), - ai_provider: z.enum(["openrouter", "ollama"]).optional(), - ai_model: z.string().optional(), - ai_api_key_env: z.string().optional(), - ai_base_url: z.string().optional(), - ai_max_tokens: z.number().optional(), - ai_temperature: z.number().optional(), - ai_timeout_ms: z.number().optional(), - no_ai_json_mode: z.boolean().optional(), - ai_system_prompt: z.string().optional(), - ai_system_prompt_file: z.string().optional(), - ai_file_limit: z.number().optional(), - ai_allow_sensitive: z.boolean().optional(), - }, - }, - async (input) => { - const res = await groupCommitPlan(input.repo_path, { - includeUnstaged: input.include_unstaged, - ai: input.ai, - aiProvider: input.ai_provider, - aiModel: input.ai_model, - aiApiKeyEnv: input.ai_api_key_env, - aiBaseUrl: input.ai_base_url, - aiMaxTokens: input.ai_max_tokens, - aiTemperature: input.ai_temperature, - aiTimeoutMs: input.ai_timeout_ms, - noAiJsonMode: input.no_ai_json_mode, - aiSystemPrompt: input.ai_system_prompt, - aiSystemPromptFile: input.ai_system_prompt_file, - aiFileLimit: input.ai_file_limit, - aiAllowSensitive: input.ai_allow_sensitive, - }); - const payload = res.result ?? { stdout: res.raw.stdout, stderr: res.raw.stderr, code: res.raw.code, ok: res.ok }; - return { content: [{ type: "text", text: JSON.stringify(payload) }] }; - } -); - -// mcp0_group_commit_apply (Rust CLI) -server.registerTool( - "mcp0_group_commit_apply", - { - description: "Apply grouped commits using Rust committy CLI (mutating).", - inputSchema: { - repo_path: z.string().describe("Path to the git repository"), - include_unstaged: z.boolean().optional(), - auto_stage: z.boolean().optional(), - push: z.boolean().optional(), - // AI flags - ai: z.boolean().optional(), - ai_provider: z.enum(["openrouter", "ollama"]).optional(), - ai_model: z.string().optional(), - ai_api_key_env: z.string().optional(), - ai_base_url: z.string().optional(), - ai_max_tokens: z.number().optional(), - ai_temperature: z.number().optional(), - ai_timeout_ms: z.number().optional(), - no_ai_json_mode: z.boolean().optional(), - ai_system_prompt: z.string().optional(), - ai_system_prompt_file: z.string().optional(), - ai_file_limit: z.number().optional(), - ai_allow_sensitive: z.boolean().optional(), - }, - }, - async (input) => { - const res = await groupCommitApply(input.repo_path, { - includeUnstaged: input.include_unstaged, - autoStage: input.auto_stage, - push: input.push, - ai: input.ai, - aiProvider: input.ai_provider, - aiModel: input.ai_model, - aiApiKeyEnv: input.ai_api_key_env, - aiBaseUrl: input.ai_base_url, - aiMaxTokens: input.ai_max_tokens, - aiTemperature: input.ai_temperature, - aiTimeoutMs: input.ai_timeout_ms, - noAiJsonMode: input.no_ai_json_mode, - aiSystemPrompt: input.ai_system_prompt, - aiSystemPromptFile: input.ai_system_prompt_file, - aiFileLimit: input.ai_file_limit, - aiAllowSensitive: input.ai_allow_sensitive, - }); - const payload = res.result ?? { stdout: res.raw.stdout, stderr: res.raw.stderr, code: res.raw.code, ok: res.ok }; - return { content: [{ type: "text", text: JSON.stringify(payload) }] }; - } -); - -async function main() { - const transport = new StdioServerTransport(); - await server.connect(transport); -} - -main().catch((err) => { - console.error(err); - process.exit(1); -}); diff --git a/mcp-server-committy/tsconfig.json b/mcp-server-committy/tsconfig.json deleted file mode 100644 index 39a89d8..0000000 --- a/mcp-server-committy/tsconfig.json +++ /dev/null @@ -1,18 +0,0 @@ -{ - "compilerOptions": { - "target": "ES2022", - "module": "NodeNext", - "moduleResolution": "NodeNext", - "outDir": "dist", - "rootDir": "src", - "esModuleInterop": true, - "forceConsistentCasingInFileNames": true, - "strict": true, - "skipLibCheck": true, - "resolveJsonModule": true, - "declaration": false, - "sourceMap": true - }, - "include": ["src/**/*.ts"], - "exclude": ["node_modules", "dist"] -} diff --git a/scripts/verify_native_git_e2e.sh b/scripts/verify_native_git_e2e.sh new file mode 100755 index 0000000..dfdbcb9 --- /dev/null +++ b/scripts/verify_native_git_e2e.sh @@ -0,0 +1,12 @@ +#!/usr/bin/env bash +set -euo pipefail + +repo_root="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)" +image_name="committy-native-git-e2e" + +docker build \ + --file "$repo_root/tools/native-git-e2e/Dockerfile" \ + --tag "$image_name" \ + "$repo_root" + +docker run --rm --tty "$image_name" diff --git a/src/cli/commands/amend.rs b/src/cli/commands/amend.rs index 37d6ffd..3e5c802 100644 --- a/src/cli/commands/amend.rs +++ b/src/cli/commands/amend.rs @@ -1,38 +1,50 @@ +use crate::cli::commands::commit::CommitCommand; use crate::cli::Command; use crate::error::CliError; -use crate::git; -use crate::input; -use log::info; +use std::path::PathBuf; use structopt::StructOpt; -#[derive(StructOpt)] -pub struct AmendCommand {} +#[derive(Debug, StructOpt, Default)] +pub struct AmendCommand { + #[structopt(long = "type", help = "Type of commit (e.g., feat, fix, docs)")] + commit_type: Option, + + #[structopt(long, help = "Scope of the commit")] + scope: Option, + + #[structopt(long, help = "Short commit message")] + message: Option, + + #[structopt(long, help = "Long/detailed commit message")] + long_message: Option, + + #[structopt(long, help = "Mark this as a breaking change")] + breaking_change: bool, + + #[structopt(long, help = "Preview the amend without writing to git")] + dry_run: bool, + + #[structopt(long, default_value = "text", possible_values = &["text", "json"])] + output: String, + + #[structopt(long, default_value = ".", parse(from_os_str))] + repo_path: PathBuf, +} impl Command for AmendCommand { fn execute(&self, non_interactive: bool) -> Result<(), CliError> { - if non_interactive { - return Err(CliError::InputError( - "Amend command is not supported in non-interactive mode".to_string(), - )); - } - - let commit_type = input::select_commit_type()?; - let breaking_change = input::confirm_breaking_change()?; - let scope = input::input_scope()?; - let short_message = input::input_short_message()?; - let long_message = input::input_long_message()?; - - let full_message = git::format_commit_message( - &commit_type, - breaking_change, - &scope, - &short_message, - &long_message, - ); - - git::commit_changes(&full_message, true)?; - - info!("Previous commit amended successfully! 🎉"); - Ok(()) + let commit = CommitCommand { + commit_type: self.commit_type.clone(), + scope: self.scope.clone(), + message: self.message.clone(), + long_message: self.long_message.clone(), + breaking_change: self.breaking_change, + amend: true, + dry_run: self.dry_run, + output: self.output.clone(), + repo_path: self.repo_path.clone(), + }; + + commit.execute_with_command_name(non_interactive, "amend") } } diff --git a/src/cli/commands/branch.rs b/src/cli/commands/branch.rs index 9f16683..f23f45d 100644 --- a/src/cli/commands/branch.rs +++ b/src/cli/commands/branch.rs @@ -1,13 +1,17 @@ use std::collections::HashMap; use crate::cli::Command; +use crate::config::BRANCH_TYPES; use crate::error::CliError; use crate::git; use crate::input; +use crate::input::validation::validate_section; use crate::telemetry; use log::debug; use log::info; +use serde::Serialize; use serde_json::Value; +use std::path::PathBuf; use structopt::StructOpt; #[derive(Debug, StructOpt, Default)] @@ -15,61 +19,119 @@ pub struct BranchCommand { #[structopt(short, long, help = "Name of the branch to create")] name: Option, + #[structopt(long = "type", help = "Structured branch type (e.g., feat, fix, docs)")] + branch_type: Option, + + #[structopt(long, help = "Structured ticket identifier")] + ticket: Option, + + #[structopt(long, help = "Structured branch subject")] + subject: Option, + #[structopt(short, long, help = "Force create branch")] force: bool, #[structopt(short, long, help = "Validate branch name")] validate: bool, + + #[structopt(long, help = "Preview the branch operation without creating it")] + dry_run: bool, + + #[structopt(long, default_value = "text", possible_values = &["text", "json"])] + output: String, + + #[structopt(long, default_value = ".", parse(from_os_str))] + repo_path: PathBuf, +} + +#[derive(Debug, Serialize)] +struct BranchCommandOutput { + command: String, + ok: bool, + dry_run: bool, + branch_name: String, + branch_type: String, + ticket: String, + subject: String, + would_create: bool, + would_checkout: bool, + errors: Option>, +} + +struct BranchPlan { + branch_name: String, + branch_type: String, + ticket: String, + subject: String, + would_checkout: bool, } impl Command for BranchCommand { fn execute(&self, non_interactive: bool) -> Result<(), CliError> { - git::validate_git_config()?; + let Some(plan) = self.build_plan(non_interactive)? else { + return Ok(()); + }; + git::validate_branch_name(&plan.branch_name)?; - if let Some(name) = &self.name { - git::create_branch(name, self.force)?; - println!("Branch {name} created successfully!"); - } else { - if non_interactive { - return Err(CliError::InputError( - "Branch name is required in non-interactive mode".to_string(), - )); - } - - let branch_type = input::select_branch_type()?; - let ticket = input::input_ticket()?; - let subject = input::input_subject()?; + let mut errors = Vec::new(); + if git::branch_exists_in(&self.repo_path, &plan.branch_name)? && !self.force { + errors.push(format!( + "Branch '{}' already exists. Use --force to recreate it.", + plan.branch_name + )); + } - let branch_name = if ticket.is_empty() { - format!("{branch_type}-{subject}") + let output = BranchCommandOutput { + command: "branch".into(), + ok: errors.is_empty(), + dry_run: self.dry_run, + branch_name: plan.branch_name.clone(), + branch_type: plan.branch_type.clone(), + ticket: plan.ticket.clone(), + subject: plan.subject.clone(), + would_create: errors.is_empty(), + would_checkout: errors.is_empty() && plan.would_checkout, + errors: if errors.is_empty() { + None } else { - format!("{branch_type}-{ticket}-{subject}") - }; + Some(errors.clone()) + }, + }; - let validate = if !self.validate { - input::ask_want_create_new_branch(&branch_name)? - } else { - true - }; - if !validate { - info!("Abort"); - return Ok(()); - } - git::create_branch(&branch_name, self.force)?; - println!("Branch {branch_name} created successfully!"); - git::checkout_branch(&branch_name)?; - println!("Switched to branch {branch_name}"); + if !errors.is_empty() { + return Err(CliError::InputError(errors.join(" "))); + } + + if self.dry_run { + self.print_output(&output); + return Ok(()); + } + + git::create_branch_in(&self.repo_path, &plan.branch_name, self.force)?; + if plan.would_checkout { + git::checkout_branch_in(&self.repo_path, &plan.branch_name)?; + } + + self.print_output(&output); + + if self.name.is_none() { if let Err(e) = tokio::runtime::Runtime::new() .unwrap() .block_on(telemetry::posthog::publish_event( "branch_created", HashMap::from([ - ("branch_type", Value::from(branch_type.as_str())), - ("as_ticket", Value::from((!ticket.is_empty()).to_string())), - ("len_ticket", Value::from(ticket.len())), - ("as_subject", Value::from((!subject.is_empty()).to_string())), - ("len_subject", Value::from(subject.len())), + ("branch_type", Value::from(plan.branch_type.as_str())), + ( + "as_ticket", + Value::from((!plan.ticket.is_empty()).to_string()), + ), + ("len_ticket", Value::from(plan.ticket.len())), + ( + "as_subject", + Value::from((!plan.subject.is_empty()).to_string()), + ), + ("len_subject", Value::from(plan.subject.len())), ]), )) { @@ -80,3 +142,168 @@ impl Command for BranchCommand { Ok(()) } } + +impl BranchCommand { + fn build_plan(&self, non_interactive: bool) -> Result, CliError> { + if self.name.is_some() + && (self.branch_type.is_some() || self.ticket.is_some() || self.subject.is_some()) + { + return Err(CliError::InputError( + "Use either --name or structured branch flags (--type/--ticket/--subject), not both" + .to_string(), + )); + } + + if let Some(name) = &self.name { + let (branch_type, ticket, subject) = parse_branch_name(name); + return Ok(Some(BranchPlan { + branch_name: name.clone(), + branch_type, + ticket, + subject, + would_checkout: false, + })); + } + + if self.branch_type.is_some() || self.ticket.is_some() || self.subject.is_some() { + return self.build_structured_plan(non_interactive); + } + + if non_interactive { + return Err(CliError::InputError( + "Branch name is required in non-interactive mode. Use --name or --type with --subject." + .to_string(), + )); + } + + let branch_type = input::select_branch_type()?; + let ticket = input::input_ticket()?; + let subject = input::input_subject()?; + + let branch_name = if ticket.is_empty() { + format!("{branch_type}-{subject}") + } else { + format!("{branch_type}-{ticket}-{subject}") + }; + + let validate = if !self.validate && !self.dry_run { + input::ask_want_create_new_branch(&branch_name)? + } else { + true + }; + if !validate { + info!("Abort"); + return Ok(None); + } + + Ok(Some(BranchPlan { + branch_name, + branch_type, + ticket, + subject, + would_checkout: true, + })) + } + + fn build_structured_plan(&self, non_interactive: bool) -> Result, CliError> { + let branch_type = match &self.branch_type { + Some(branch_type) => validate_branch_type(branch_type)?, + None if !non_interactive => input::select_branch_type()?, + None => { + return Err(CliError::InputError( + "Branch type is required when using structured branch flags in non-interactive mode" + .to_string(), + )) + } + }; + + let ticket = match &self.ticket { + Some(ticket) => validate_structured_section(ticket, "ticket")?, + None if !non_interactive => input::input_ticket()?, + None => String::new(), + }; + + let subject = match &self.subject { + Some(subject) => validate_structured_section(subject, "subject")?, + None if !non_interactive => input::input_subject()?, + None => return Err(CliError::InputError( + "Subject is required when using structured branch flags in non-interactive mode" + .to_string(), + )), + }; + + let branch_name = build_branch_name(&branch_type, &ticket, &subject); + let would_checkout = !non_interactive; + + if !non_interactive && !self.validate && !self.dry_run { + let validate = input::ask_want_create_new_branch(&branch_name)?; + if !validate { + info!("Abort"); + return Ok(None); + } + } + + Ok(Some(BranchPlan { + branch_name, + branch_type, + ticket, + subject, + would_checkout, + })) + } + + fn print_output(&self, output: &BranchCommandOutput) { + if self.output == "json" { + println!("{}", serde_json::to_string(output).unwrap()); + return; + } + + if output.dry_run { + if output.would_checkout { + println!("Would create and switch to branch {}", output.branch_name); + } else { + println!("Would create branch {}", output.branch_name); + } + return; + } + + println!("Branch {} created successfully!", output.branch_name); + if output.would_checkout { + println!("Switched to branch {}", output.branch_name); + } + } +} + +fn build_branch_name(branch_type: &str, ticket: &str, subject: &str) -> String { + if ticket.is_empty() { + format!("{branch_type}-{subject}") + } else { + format!("{branch_type}-{ticket}-{subject}") + } +} + +fn validate_branch_type(branch_type: &str) -> Result { + if BRANCH_TYPES.iter().any(|known| known == &branch_type) { + Ok(branch_type.to_string()) + } else { + Err(CliError::InputError(format!( + "Invalid branch type '{}'. Valid branch types are: {}", + branch_type, + BRANCH_TYPES.join(", ") + ))) + } +} + +fn validate_structured_section(value: &str, field_name: &str) -> Result { + validate_section(value).map_err(|error| { + CliError::InputError(format!("Invalid {field_name} value '{}': {error}", value)) + }) +} + +fn parse_branch_name(name: &str) -> (String, String, String) { + let mut branch_parts = name.splitn(2, '-'); + let branch_type = branch_parts.next().unwrap_or("unknown").to_string(); + let ticket = String::new(); + let subject = branch_parts.next().unwrap_or("").to_string(); + (branch_type, ticket, subject) +} diff --git a/src/cli/commands/commit.rs b/src/cli/commands/commit.rs index 2ae62cd..3eefaa7 100644 --- a/src/cli/commands/commit.rs +++ b/src/cli/commands/commit.rs @@ -1,44 +1,88 @@ use serde_json::Value; use std::collections::HashMap; +use std::path::{Path, PathBuf}; use crate::cli::Command; +use crate::config::hierarchy::MergedConfig; +use crate::config::repository::RepositoryConfig; use crate::error::CliError; use crate::git; use crate::input; use crate::input::validation::{auto_correct_scope, suggest_commit_type}; +use crate::linter::{allowed_commit_types_for_repo, check_message_format_for_repo}; +use crate::scope::detector::ScopeDetector; use crate::telemetry; -use log::{debug, info}; +use crate::workflow::orchestrator::WorkflowOrchestrator; +use log::{debug, info, warn}; +use serde::Serialize; use structopt::StructOpt; #[derive(Debug, StructOpt, Default)] pub struct CommitCommand { #[structopt(long = "type", help = "Type of commit (e.g., feat, fix, docs)")] - commit_type: Option, + pub(crate) commit_type: Option, #[structopt(long, help = "Scope of the commit")] - scope: Option, + pub(crate) scope: Option, #[structopt(long, help = "Short commit message")] - message: Option, + pub(crate) message: Option, #[structopt(long, help = "Long/detailed commit message")] - long_message: Option, + pub(crate) long_message: Option, #[structopt(long, help = "Mark this as a breaking change")] - breaking_change: bool, + pub(crate) breaking_change: bool, #[structopt(long, help = "Amend the previous commit")] - amend: bool, + pub(crate) amend: bool, + + #[structopt(long, help = "Preview the commit without writing to git")] + pub(crate) dry_run: bool, + + #[structopt(long, default_value = "text", possible_values = &["text", "json"])] + pub(crate) output: String, + + #[structopt(long, default_value = ".", parse(from_os_str))] + pub(crate) repo_path: PathBuf, +} + +#[derive(Debug, Serialize)] +struct CommitCommandOutput { + command: String, + ok: bool, + dry_run: bool, + message: String, + commit_type: String, + scope: String, + breaking_change: bool, + errors: Option>, + #[serde(skip_serializing_if = "Option::is_none")] + workflow: Option, } impl Command for CommitCommand { fn execute(&self, non_interactive: bool) -> Result<(), CliError> { - // Validate git configuration first - git::validate_git_config()?; + self.execute_with_command_name(non_interactive, "commit") + } +} - if !git::has_staged_changes()? { +impl CommitCommand { + pub(crate) fn execute_with_command_name( + &self, + non_interactive: bool, + command_name: &str, + ) -> Result<(), CliError> { + let repo = git::discover_repository_from(&self.repo_path)?; + let repo_path = repo + .workdir() + .ok_or_else(|| CliError::GitError(git2::Error::from_str("No working directory")))?; + let has_staged_changes = git::has_staged_changes_from(repo_path)?; + if !self.amend && !has_staged_changes { return Err(CliError::NoStagedChanges); } + let allowed_types = allowed_commit_types_for_repo(repo_path) + .map_err(|e| CliError::Generic(e.to_string()))?; // In non-interactive mode (from the command root), all required fields must be provided if non_interactive { @@ -50,19 +94,37 @@ impl Command for CommitCommand { } } + // Detect scopes from staged files early (for multi-package repositories) + let (detected_scopes, available_scopes, allow_multiple_scopes, require_scope) = + self.detect_scopes_from_config(repo_path, has_staged_changes)?; + // Handle commit type with auto-correction let commit_type = if let Some(commit_type) = &self.commit_type { if let Some(suggested) = suggest_commit_type(commit_type) { - if suggested != commit_type { + if allowed_types.iter().any(|allowed| allowed == suggested) + && suggested != commit_type + { info!("Auto-correcting commit type from '{commit_type}' to '{suggested}'"); debug!("Auto-corrected commit type from '{commit_type}' to '{suggested}'"); + suggested.to_string() + } else if allowed_types.iter().any(|allowed| allowed == commit_type) { + commit_type.clone() + } else if allowed_types.iter().any(|allowed| allowed == suggested) { + suggested.to_string() + } else { + return Err(CliError::InputError(format!( + "Invalid commit type '{}'. Valid types are: {}", + commit_type, + allowed_types.join(", ") + ))); } - suggested.to_string() + } else if allowed_types.iter().any(|allowed| allowed == commit_type) { + commit_type.clone() } else { return Err(CliError::InputError(format!( "Invalid commit type '{}'. Valid types are: {}", commit_type, - crate::config::COMMIT_TYPES.join(", ") + allowed_types.join(", ") ))); } } else { @@ -79,8 +141,9 @@ impl Command for CommitCommand { false }; - // Handle scope with auto-correction + // Handle scope with auto-detection and correction let scope = if let Some(scope) = &self.scope { + // CLI flag provided - use it if !non_interactive { // In interactive mode, validate and potentially correct the scope input::validate_scope_input(scope)? @@ -93,7 +156,20 @@ impl Command for CommitCommand { corrected } } else if !non_interactive { - input::input_scope()? + // Interactive mode - use smart scope detection + input::select_detected_scopes( + &detected_scopes, + &available_scopes, + allow_multiple_scopes, + )? + } else if detected_scopes.len() == 1 { + // Non-interactive mode - auto-select single detected scope + detected_scopes[0].clone() + } else if detected_scopes.is_empty() && require_scope { + // Multi-package repo requires scope but none detected + return Err(CliError::InputError( + "Scope is required for multi-package repository but could not be auto-detected. Please provide --scope flag.".to_string(), + )); } else { String::new() }; @@ -124,7 +200,52 @@ impl Command for CommitCommand { ); debug!("Formatted commit message: {full_message}"); - git::commit_changes(&full_message, self.amend)?; + + let validation_issues = check_message_format_for_repo(repo_path, &full_message) + .map_err(|e| CliError::Generic(e.to_string()))?; + if !validation_issues.is_empty() { + let output = CommitCommandOutput { + command: command_name.into(), + ok: false, + dry_run: self.dry_run, + message: full_message, + commit_type, + scope, + breaking_change, + errors: Some(validation_issues.clone()), + workflow: None, + }; + self.print_output(&output, command_name); + return Err(CliError::LintIssues(validation_issues.len())); + } + + if !self.dry_run { + // Validate git configuration before mutating git state + git::validate_git_config_from(repo_path)?; + } + + let workflow = + self.execute_workflow_if_configured(repo_path, &full_message, !self.dry_run)?; + + let output = CommitCommandOutput { + command: command_name.into(), + ok: true, + dry_run: self.dry_run, + message: full_message.clone(), + commit_type: commit_type.clone(), + scope: scope.clone(), + breaking_change, + errors: None, + workflow, + }; + + if self.dry_run { + self.print_output(&output, command_name); + return Ok(()); + } + + git::commit_changes_in(repo_path, &full_message, self.amend)?; + self.print_output(&output, command_name); // fire off telemetry without making this function async if let Err(e) = tokio::runtime::Runtime::new() @@ -154,7 +275,181 @@ impl Command for CommitCommand { { debug!("Telemetry error: {e:?}"); } - info!("Changes committed successfully! 🎉"); + if self.output != "json" { + if self.amend { + info!("Previous commit amended successfully! 🎉"); + } else { + info!("Changes committed successfully! 🎉"); + } + } Ok(()) } + /// Detect scopes from staged files and load available scopes from config + fn detect_scopes_from_config( + &self, + repo_path: &Path, + has_staged_changes: bool, + ) -> Result<(Vec, Vec, bool, bool), CliError> { + // Try to load merged config + let merged_config = match MergedConfig::load(repo_path) { + Ok(config) => config, + Err(_) => { + // No config - return empty detected scopes + debug!("No repository config found for scope detection"); + return Ok((vec![], vec![], false, false)); + } + }; + + // Get repository config if it exists + if let Some(repo_config) = &merged_config.repository { + // Check if multi-package mode and auto_detect is enabled + if !repo_config.scopes.auto_detect { + debug!("Scope auto-detection disabled in config"); + return Ok((vec![], vec![], false, false)); + } + + debug!("Attempting to auto-detect scopes from staged files"); + + // Create scope detector + let detector = ScopeDetector::new(repo_config.clone(), repo_path); + + // Detect scopes from staged files + let detected_scopes = if has_staged_changes { + detector.detect_from_staged().unwrap_or_else(|e| { + warn!("Scope detection failed: {}", e); + vec![] + }) + } else { + vec![] + }; + + // Get available scopes + let available_scopes = detector.suggest_scopes().unwrap_or_else(|e| { + warn!("Failed to get scope suggestions: {}", e); + vec![] + }); + + let allow_multiple = repo_config.scopes.allow_multiple_scopes; + let require_scope = repo_config.scopes.require_scope_for_multi_package; + + if !detected_scopes.is_empty() { + info!( + "Auto-detected {} scope(s): {}", + detected_scopes.len(), + detected_scopes.join(", ") + ); + } + + Ok(( + detected_scopes, + available_scopes, + allow_multiple, + require_scope, + )) + } else { + // No repository config + Ok((vec![], vec![], false, false)) + } + } + + /// Run workflow orchestrator if repository config exists + fn execute_workflow_if_configured( + &self, + repo_path: &Path, + commit_message: &str, + apply_changes: bool, + ) -> Result, CliError> { + // Try to load repository config + let config = match RepositoryConfig::try_load(repo_path) { + Ok(Some(config)) => config, + Ok(None) => { + debug!("No repository config found, skipping workflow orchestrator"); + return Ok(None); + } + Err(e) => { + warn!("Failed to load repository config: {}", e); + return Ok(None); + } + }; + + debug!("Repository config loaded, running workflow orchestrator"); + + // Create and configure orchestrator + let orchestrator = WorkflowOrchestrator::new(repo_path, config); + + // Run workflow (detect scopes, calculate updates, apply changes) + match orchestrator.run_workflow(commit_message, apply_changes) { + Ok(result) => { + if !result.scopes.is_empty() { + info!("Workflow detected scopes: {:?}", result.scopes); + } + if apply_changes && result.has_changes() { + info!( + "Applied {} version update(s) and {} dependency update(s)", + result.version_updates.len(), + result.dependency_updates.len() + ); + + // Stage the updated files + for file in &result.modified_files { + if let Err(e) = git::stage_file(file) { + warn!("Failed to stage {}: {}", file.display(), e); + } + } + } + Ok(Some(result)) + } + Err(e) => { + warn!("Workflow orchestrator failed: {}", e); + // Don't fail the commit if workflow fails + Ok(None) + } + } + } + + fn print_output(&self, output: &CommitCommandOutput, command_name: &str) { + if self.output == "json" { + println!("{}", serde_json::to_string(output).unwrap()); + return; + } + + if let Some(errors) = &output.errors { + if command_name == "amend" { + println!("Amend message validation failed:"); + } else { + println!("Commit message validation failed:"); + } + for issue in errors { + println!("- {issue}"); + } + println!(); + println!("{}", output.message); + return; + } + + if output.dry_run { + if command_name == "amend" { + println!("Would amend commit to:\n\n{}", output.message); + } else { + println!("Would create commit:\n\n{}", output.message); + } + if let Some(workflow) = &output.workflow { + if !workflow.scopes.is_empty() { + println!("Detected scopes: {}", workflow.scopes.join(", ")); + } + if workflow.has_changes() { + println!( + "Would apply {} version update(s) and {} dependency update(s)", + workflow.version_updates.len(), + workflow.dependency_updates.len() + ); + } + } + return; + } + + if command_name == "amend" { + println!("Commit amended successfully!"); + } + } } diff --git a/src/cli/commands/config.rs b/src/cli/commands/config.rs new file mode 100644 index 0000000..805c94f --- /dev/null +++ b/src/cli/commands/config.rs @@ -0,0 +1,418 @@ +// Config command for validating and showing repository configuration + +use crate::cli::Command; +use crate::config::hierarchy::MergedConfig; +use crate::config::repository::RepositoryConfig; +use crate::error::CliError; +use anyhow::Result; +use colored::Colorize; +use serde_json::json; +use std::path::Path; +use structopt::StructOpt; + +#[derive(StructOpt)] +pub struct ConfigCommand { + #[structopt(subcommand)] + pub subcommand: ConfigSubcommand, +} + +#[derive(StructOpt)] +pub enum ConfigSubcommand { + #[structopt(about = "Validate repository configuration")] + Validate { + #[structopt(short, long, help = "Show detailed information")] + verbose: bool, + #[structopt(long, default_value = "text", possible_values = &["text", "json"])] + output: String, + #[structopt( + long, + help = "Repository path", + default_value = ".", + parse(from_os_str) + )] + repo_path: std::path::PathBuf, + }, + #[structopt(about = "Show merged configuration (repository + user)")] + Show { + #[structopt(short, long, help = "Show detailed information including sources")] + verbose: bool, + #[structopt(long, default_value = "text", possible_values = &["text", "json"])] + output: String, + #[structopt( + long, + help = "Repository path", + default_value = ".", + parse(from_os_str) + )] + repo_path: std::path::PathBuf, + }, +} + +impl Command for ConfigCommand { + fn execute(&self, _non_interactive: bool) -> Result<(), CliError> { + match &self.subcommand { + ConfigSubcommand::Validate { + verbose, + output, + repo_path, + } => { + validate(repo_path, *verbose, output).map_err(|e| CliError::Generic(e.to_string())) + } + ConfigSubcommand::Show { + verbose, + output, + repo_path, + } => show(repo_path, *verbose, output).map_err(|e| CliError::Generic(e.to_string())), + } + } +} + +/// Validate repository configuration +pub fn validate(repo_path: &Path, verbose: bool, output: &str) -> Result<()> { + let config_path = RepositoryConfig::get_config_path(repo_path)?; + let repo_config = RepositoryConfig::try_load(repo_path)?; + + if output == "json" { + let warnings = repo_config + .as_ref() + .map(|config| collect_validation_warnings(repo_path, config)) + .unwrap_or_default(); + + let payload = if let Some(config) = repo_config { + json!({ + "command": "config", + "mode": "validate", + "ok": true, + "dry_run": false, + "errors": serde_json::Value::Null, + "config_found": true, + "single_package_mode": false, + "repo_path": repo_path.display().to_string(), + "config_path": config_path.display().to_string(), + "repository": { + "name": config.repository.name, + "type": config.repository.repo_type, + "versioning_strategy": config.versioning.strategy, + "package_count": config.packages.len(), + "dependency_count": config.dependencies.len(), + "scope_mapping_count": config.scopes.mappings.len(), + }, + "packages": config.packages, + "dependencies": config.dependencies, + "scope_mappings": config.scopes.mappings, + "warnings": warnings, + "verbose": verbose, + }) + } else { + json!({ + "command": "config", + "mode": "validate", + "ok": true, + "dry_run": false, + "errors": serde_json::Value::Null, + "config_found": false, + "single_package_mode": true, + "repo_path": repo_path.display().to_string(), + "config_path": config_path.display().to_string(), + "repository": serde_json::Value::Null, + "packages": Vec::::new(), + "dependencies": Vec::::new(), + "scope_mappings": Vec::::new(), + "warnings": Vec::::new(), + "verbose": verbose, + }) + }; + + println!("{}", serde_json::to_string(&payload)?); + return Ok(()); + } + + println!("{}", "Validating repository configuration...".bold()); + println!(); + + // Try to load repository config + let repo_config = match repo_config { + Some(config) => config, + None => { + println!("{}", "✓ No .committy/config.toml found".green()); + println!("{}", " Running in single-package mode".dimmed()); + return Ok(()); + } + }; + + println!("{}", "✓ Configuration file found".green()); + println!("{}", "✓ Configuration is valid".green()); + println!(); + + // Show summary + println!("{}", "Configuration Summary:".bold()); + println!(" Repository: {}", repo_config.repository.name); + println!(" Type: {:?}", repo_config.repository.repo_type); + println!( + " Versioning Strategy: {:?}", + repo_config.versioning.strategy + ); + println!(" Packages: {}", repo_config.packages.len()); + println!(" Dependencies: {}", repo_config.dependencies.len()); + println!(" Scope Mappings: {}", repo_config.scopes.mappings.len()); + println!(); + + // Show packages + if !repo_config.packages.is_empty() { + println!("{}", "Packages:".bold()); + for pkg in &repo_config.packages { + let mut flags = Vec::new(); + if pkg.primary { + flags.push("primary".yellow().to_string()); + } + if pkg.independent { + flags.push("independent".cyan().to_string()); + } + if let Some(ref sync_with) = pkg.sync_with { + flags.push(format!("syncs with {}", sync_with).blue().to_string()); + } + if pkg.workspace_member { + flags.push("workspace member".magenta().to_string()); + } + + let flags_str = if flags.is_empty() { + String::new() + } else { + format!(" ({})", flags.join(", ")) + }; + + println!( + " {} {} - {} at {}{}", + "✓".green(), + pkg.name.bold(), + pkg.package_type, + pkg.path, + flags_str + ); + + if verbose { + println!(" Version file: {}", pkg.version_file); + println!(" Version field: {}", pkg.version_field); + if let Some(ref desc) = pkg.description { + println!(" Description: {}", desc); + } + } + } + println!(); + } + + // Show dependencies + if !repo_config.dependencies.is_empty() { + println!("{}", "Dependencies:".bold()); + for dep in &repo_config.dependencies { + println!(" {} → {} targets", dep.source.bold(), dep.targets.len()); + if verbose { + for target in &dep.targets { + println!( + " - {} (field: {}, strategy: {:?})", + target.file, target.field, target.strategy + ); + } + } + } + println!(); + } + + // Show scope mappings + if !repo_config.scopes.mappings.is_empty() && verbose { + println!("{}", "Scope Mappings:".bold()); + for mapping in &repo_config.scopes.mappings { + println!( + " {} → {} (package: {})", + mapping.pattern, + mapping.scope.cyan(), + mapping.package + ); + } + println!(); + } + + // Warnings + let warnings = collect_validation_warnings(repo_path, &repo_config); + + if !warnings.is_empty() { + println!("{}", "Warnings:".yellow().bold()); + for warning in warnings { + println!(" {} {}", "⚠".yellow(), warning); + } + println!(); + } + + println!("{}", "✓ Validation complete".green().bold()); + Ok(()) +} + +/// Show merged configuration (repository + user) +pub fn show(repo_path: &Path, verbose: bool, output: &str) -> Result<()> { + let merged = MergedConfig::load(repo_path)?; + + if output == "json" { + let payload = json!({ + "command": "config", + "mode": "show", + "ok": true, + "dry_run": false, + "errors": serde_json::Value::Null, + "repo_path": repo_path.display().to_string(), + "multi_package": merged.is_multi_package(), + "effective_patterns": { + "major_regex": merged.get_major_regex(), + "minor_regex": merged.get_minor_regex(), + "patch_regex": merged.get_patch_regex(), + }, + "repository_config": merged.repository_config(), + "user_config": merged.user_config(), + "verbose": verbose, + }); + println!("{}", serde_json::to_string(&payload)?); + return Ok(()); + } + + println!("{}", "Configuration Hierarchy:".bold()); + println!(); + + // Repository config + if let Some(repo_config) = merged.repository_config() { + println!( + "{}", + "Repository Config (.committy/config.toml):".green().bold() + ); + println!( + " Location: {}", + RepositoryConfig::get_config_path(repo_path)?.display() + ); + println!(" Repository: {}", repo_config.repository.name); + println!(" Type: {:?}", repo_config.repository.repo_type); + println!(" Versioning: {:?}", repo_config.versioning.strategy); + + if verbose { + if let Some(ref rules) = repo_config.versioning.rules { + println!(" Custom Regex Patterns:"); + if let Some(ref major) = rules.major_regex { + println!(" Major: {}", major); + } + if let Some(ref minor) = rules.minor_regex { + println!(" Minor: {}", minor); + } + if let Some(ref patch) = rules.patch_regex { + println!(" Patch: {}", patch); + } + } + } + println!(); + } else { + println!("{}", "Repository Config: Not found".dimmed()); + println!(" Running in single-package mode"); + println!(); + } + + // User config + println!( + "{}", + "User Config (~/.config/committy/config.toml):" + .cyan() + .bold() + ); + let user_config = merged.user_config(); + println!(" Metrics Enabled: {}", user_config.metrics_enabled); + + if verbose { + println!(" Regex Patterns:"); + println!(" Major: {}", user_config.major_regex); + println!(" Minor: {}", user_config.minor_regex); + println!(" Patch: {}", user_config.patch_regex); + } + println!(); + + // Effective config + println!("{}", "Effective Configuration:".yellow().bold()); + println!(" Multi-package mode: {}", merged.is_multi_package()); + println!(" Major regex: {}", merged.get_major_regex()); + println!(" Minor regex: {}", merged.get_minor_regex()); + println!(" Patch regex: {}", merged.get_patch_regex()); + + Ok(()) +} + +fn collect_validation_warnings(repo_path: &Path, repo_config: &RepositoryConfig) -> Vec { + let mut warnings = Vec::new(); + + for pkg in &repo_config.packages { + let pkg_path = repo_path.join(&pkg.path); + let version_file_path = pkg_path.join(&pkg.version_file); + if !version_file_path.exists() { + warnings.push(format!( + "Version file not found: {} (package: {})", + pkg.version_file, pkg.name + )); + } + } + + for dep in &repo_config.dependencies { + for target in &dep.targets { + let target_path = repo_path.join(&target.file); + if !target_path.exists() { + warnings.push(format!("Dependency target not found: {}", target.file)); + } + } + } + + warnings +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::config::repository::{ + RepositoryConfig, RepositoryMetadata, RepositoryType, VersioningConfig, VersioningStrategy, + }; + use tempfile::TempDir; + + #[test] + fn test_validate_without_config() { + let temp_dir = TempDir::new().unwrap(); + let result = validate(temp_dir.path(), false, "text"); + assert!(result.is_ok()); + } + + #[test] + fn test_validate_with_config() { + let temp_dir = TempDir::new().unwrap(); + + // Create a minimal valid config + let config = RepositoryConfig { + repository: RepositoryMetadata { + name: "test".to_string(), + repo_type: RepositoryType::SinglePackage, + description: None, + }, + versioning: VersioningConfig { + strategy: VersioningStrategy::Independent, + unified_version: None, + rules: None, + }, + packages: vec![], + dependencies: vec![], + scopes: Default::default(), + commit_rules: Default::default(), + workspace: None, + }; + + config.save(temp_dir.path()).unwrap(); + + let result = validate(temp_dir.path(), false, "text"); + assert!(result.is_ok()); + } + + #[test] + fn test_show_config() { + let temp_dir = TempDir::new().unwrap(); + let result = show(temp_dir.path(), false, "text"); + assert!(result.is_ok()); + } +} diff --git a/src/cli/commands/group_commit.rs b/src/cli/commands/group_commit.rs index 8fcc6f2..d030dce 100644 --- a/src/cli/commands/group_commit.rs +++ b/src/cli/commands/group_commit.rs @@ -2,12 +2,13 @@ use crate::ai::{AiCommitSuggestion, LlmClient, LlmError, OllamaClient, OpenRoute use crate::cli::Command; use crate::error::CliError; use crate::git::format_commit_message; -use crate::git::list_changed_files; -use crate::linter::check_message_format; +use crate::git::{discover_repository_from, list_changed_files_from}; +use crate::linter::check_message_format_for_repo; use git2::Repository; use serde::Serialize; use std::env; use std::fs; +use std::path::{Path, PathBuf}; use std::process::Command as ProcCommand; use structopt::StructOpt; @@ -29,6 +30,8 @@ pub struct PlanGroup { pub commit_type: String, pub files: Vec, pub suggested_message: String, + #[serde(skip_serializing_if = "Option::is_none")] + pub issues: Option>, } #[derive(Debug, Serialize)] @@ -36,6 +39,7 @@ pub struct GroupCommitPlanResult { pub command: String, pub mode: String, pub ok: bool, + pub dry_run: bool, pub groups: Vec, pub errors: Option>, } @@ -54,6 +58,7 @@ pub struct GroupCommitApplyResult { pub command: String, pub mode: String, pub ok: bool, + pub dry_run: bool, pub groups: Vec, pub commits: Vec, pub pushed: Option, @@ -78,6 +83,10 @@ pub struct GroupCommitCommand { #[structopt(long)] push: bool, + /// Confirm the remote push when --push is used + #[structopt(long)] + confirm_push: bool, + /// Output format: text or json #[structopt(long, default_value = "json", possible_values = &["text", "json"])] output: String, @@ -138,6 +147,9 @@ pub struct GroupCommitCommand { /// Allow sending sensitive content to external AI providers #[structopt(long = "ai-allow-sensitive")] ai_allow_sensitive: bool, + + #[structopt(long, default_value = ".", parse(from_os_str))] + repo_path: PathBuf, } impl Default for GroupCommitCommand { @@ -147,6 +159,7 @@ impl Default for GroupCommitCommand { include_unstaged: false, auto_stage: false, push: false, + confirm_push: false, output: "json".into(), ai: false, ai_provider: "openrouter".into(), @@ -162,6 +175,7 @@ impl Default for GroupCommitCommand { ai_file_limit: 20, _ai_diff_lines_per_file: 80, ai_allow_sensitive: false, + repo_path: PathBuf::from("."), } } } @@ -283,7 +297,16 @@ impl Command for GroupCommitCommand { fn execute(&self, _non_interactive: bool) -> Result<(), CliError> { match self.mode.as_str() { "plan" => { - let files = list_changed_files(self.include_unstaged)?; + if self.push { + return Err(CliError::InputError( + "--push is only valid in apply mode".to_string(), + )); + } + let files = list_changed_files_from(&self.repo_path, self.include_unstaged)?; + let repo = discover_repository_from(&self.repo_path)?; + let repo_path = repo.workdir().ok_or_else(|| { + CliError::GitError(git2::Error::from_str("No working directory")) + })?; let mut by_group: std::collections::BTreeMap> = [ (GroupName::Docs, vec![]), (GroupName::Tests, vec![]), @@ -316,6 +339,7 @@ impl Command for GroupCommitCommand { commit_type, files, suggested_message: message, + issues: None, }); } @@ -457,7 +481,8 @@ impl Command for GroupCommitCommand { .to_string() }; // Lint and fallback - let issues = check_message_format(&candidate); + let issues = check_message_format_for_repo(repo_path, &candidate) + .map_err(|e| CliError::Generic(e.to_string()))?; if issues.is_empty() { g.suggested_message = candidate; } else { @@ -471,10 +496,14 @@ impl Command for GroupCommitCommand { } } + validate_group_messages(repo_path, &mut groups, &mut errors) + .map_err(|e| CliError::Generic(e.to_string()))?; + let res = GroupCommitPlanResult { command: "group-commit".into(), mode: "plan".into(), - ok: true, + ok: errors.is_empty(), + dry_run: true, groups, errors: if errors.is_empty() { None @@ -490,8 +519,17 @@ impl Command for GroupCommitCommand { Ok(()) } "apply" => { + if self.push && !self.confirm_push { + return Err(CliError::InputError( + "Remote push requires --confirm-push".to_string(), + )); + } + let repo = discover_repository_from(&self.repo_path)?; + let repo_path = repo.workdir().ok_or_else(|| { + CliError::GitError(git2::Error::from_str("No working directory")) + })?; // Build groups as in plan - let files = list_changed_files(self.include_unstaged)?; + let files = list_changed_files_from(&self.repo_path, self.include_unstaged)?; let mut by_group: std::collections::BTreeMap> = [ (GroupName::Docs, vec![]), (GroupName::Tests, vec![]), @@ -524,6 +562,7 @@ impl Command for GroupCommitCommand { commit_type, files, suggested_message: message, + issues: None, }); } @@ -657,7 +696,8 @@ impl Command for GroupCommitCommand { .trim() .to_string() }; - let issues = check_message_format(&candidate); + let issues = check_message_format_for_repo(repo_path, &candidate) + .map_err(|e| CliError::Generic(e.to_string()))?; if issues.is_empty() { g.suggested_message = candidate; } else { @@ -672,9 +712,9 @@ impl Command for GroupCommitCommand { } // Helper: quietly run `git` command - fn run_git(args: &[&str]) -> Result<(), CliError> { + fn run_git(repo_path: &Path, args: &[&str]) -> Result<(), CliError> { let mut cmd = ProcCommand::new("git"); - cmd.args(args); + cmd.current_dir(repo_path).args(args); let status = cmd .stdout(std::process::Stdio::null()) .stderr(std::process::Stdio::null()) @@ -689,8 +729,8 @@ impl Command for GroupCommitCommand { } } - fn last_commit_sha() -> Option { - if let Ok(repo) = Repository::discover(std::env::current_dir().ok()?) { + fn last_commit_sha(repo_path: &Path) -> Option { + if let Ok(repo) = Repository::discover(repo_path) { if let Ok(head) = repo.head() { if let Ok(commit) = head.peel_to_commit() { return Some(commit.id().to_string()); @@ -703,21 +743,33 @@ impl Command for GroupCommitCommand { let mut commits: Vec = Vec::new(); // Commit per group - for g in &groups { - // Validate message again and fallback to default formatting - let candidate = g.suggested_message.trim().to_string(); - let final_msg = if check_message_format(&candidate).is_empty() { - candidate - } else { - // Rebuild from defaults - let short = default_short_for(&g.name); - format_commit_message(&g.commit_type, false, "", short, "") - }; + for g in &mut groups { + let final_msg = g.suggested_message.trim().to_string(); + let issues = check_message_format_for_repo(repo_path, &final_msg) + .map_err(|e| CliError::Generic(e.to_string()))?; + if !issues.is_empty() { + g.issues = Some(issues.clone()); + let error_message = format!( + "group {} message failed commit rules: {}", + group_name_str(g.name), + issues.join("; ") + ); + errors.push(error_message.clone()); + commits.push(CommitRecord { + group: g.name, + message: final_msg, + ok: false, + sha: None, + error: Some(error_message), + }); + continue; + } + g.issues = None; // Stage only this group's files if requested if self.auto_stage { // Unstage everything back to HEAD, then stage only the group's files - if let Err(e) = run_git(&["reset", "-q", "HEAD", "--"]) { + if let Err(e) = run_git(repo_path, &["reset", "-q", "HEAD", "--"]) { errors.push(format!( "git reset failed before staging {}: {}", group_name_str(g.name), @@ -729,7 +781,7 @@ impl Command for GroupCommitCommand { for f in &g.files { args.push(f.as_str()); } - if let Err(e) = run_git(&args) { + if let Err(e) = run_git(repo_path, &args) { errors.push(format!( "git add failed for group {}: {}", group_name_str(g.name), @@ -747,9 +799,9 @@ impl Command for GroupCommitCommand { } // Create commit - match crate::git::commit_changes(&final_msg, false) { + match crate::git::commit_changes_in(repo_path, &final_msg, false) { Ok(_) => { - let sha = last_commit_sha(); + let sha = last_commit_sha(repo_path); commits.push(CommitRecord { group: g.name, message: final_msg.clone(), @@ -778,14 +830,19 @@ impl Command for GroupCommitCommand { // Optional push let mut pushed: Option = None; if self.push { - pushed = Some(run_git(&["push"]).is_ok()); + let push_ok = run_git(repo_path, &["push"]).is_ok(); + if !push_ok { + errors.push("git push failed".to_string()); + } + pushed = Some(push_ok); } - let ok = commits.iter().all(|c| c.ok); + let ok = commits.iter().all(|c| c.ok) && errors.is_empty(); let res = GroupCommitApplyResult { command: "group-commit".into(), mode: "apply".into(), ok, + dry_run: false, groups: groups.clone(), commits, pushed, @@ -806,3 +863,27 @@ impl Command for GroupCommitCommand { } } } + +fn validate_group_messages( + repo_path: &Path, + groups: &mut [PlanGroup], + errors: &mut Vec, +) -> Result<(), CliError> { + for group in groups.iter_mut() { + let issues = check_message_format_for_repo(repo_path, &group.suggested_message) + .map_err(|e| CliError::Generic(e.to_string()))?; + if issues.is_empty() { + group.issues = None; + continue; + } + + group.issues = Some(issues.clone()); + errors.push(format!( + "group {} message failed commit rules: {}", + group_name_str(group.name), + issues.join("; ") + )); + } + + Ok(()) +} diff --git a/src/cli/commands/init.rs b/src/cli/commands/init.rs new file mode 100644 index 0000000..fd5b7d1 --- /dev/null +++ b/src/cli/commands/init.rs @@ -0,0 +1,309 @@ +use crate::cli::Command; +use crate::config::repository::{ + CommitRulesConfig, PackageConfig, RepositoryConfig, RepositoryMetadata, RepositoryType, + ScopeConfig, VersioningConfig, VersioningStrategy, +}; +use crate::error::CliError; +use crate::packages::detector::MultiPackageDetector; +use colored::Colorize; +use log::{debug, info}; +use serde_json::json; +use std::fs; +use std::path::Path; +use structopt::StructOpt; + +#[derive(Debug, StructOpt)] +pub struct InitCommand { + #[structopt(long, help = "Initialize multi-package support")] + #[allow(dead_code)] + pub multi_package: bool, + + #[structopt(short, long, help = "Show what would be created (don't write files)")] + pub dry_run: bool, + + #[structopt( + long, + help = "Output format: text or json", + default_value = "text", + possible_values = &["text", "json"] + )] + pub output: String, +} + +impl Command for InitCommand { + fn execute(&self, non_interactive: bool) -> Result<(), CliError> { + debug!("InitCommand::execute called"); + + // Check if already initialized + if Path::new(".committy/config.toml").exists() { + if non_interactive { + return Err(CliError::Generic( + "Repository already initialized with .committy/config.toml".to_string(), + )); + } else if self.output != "json" { + println!( + "{}", + "Warning: .committy/config.toml already exists".yellow() + ); + // In interactive mode, we still proceed but will prompt for confirmation later + } + } + + if self.output != "json" { + println!("{}", "Detecting packages in repository...".bold()); + } + let current_dir = std::env::current_dir() + .map_err(|e| CliError::InputError(format!("Failed to get current directory: {}", e)))?; + let detector = MultiPackageDetector::new().with_max_depth(5); + let detected_packages = detector + .detect_all(¤t_dir) + .map_err(|e| CliError::Generic(format!("Package detection failed: {}", e)))?; + + if detected_packages.is_empty() { + if self.output == "json" { + self.output_json_result( + None, + false, + false, + false, + Some(vec![ + "No packages detected. Create at least one package before initializing." + .to_string(), + ]), + )?; + } else { + println!( + "{}", + "No packages detected. Create at least one package before initializing." + .yellow() + ); + } + return Ok(()); + } + + if self.output != "json" { + println!( + "{}", + format!("Detected {} package(s):", detected_packages.len()).green() + ); + for pkg in &detected_packages { + println!(" ● {} ({})", pkg.name.bold(), pkg.manager.name()); + } + println!(); + } + + // Gather configuration + let repo_name = if non_interactive { + "my-repo".to_string() + } else { + println!("Repository name (default: my-repo):"); + let mut input = String::new(); + std::io::stdin() + .read_line(&mut input) + .map_err(|e| CliError::InputError(format!("Failed to read input: {}", e)))?; + let trimmed = input.trim().to_string(); + if trimmed.is_empty() { + "my-repo".to_string() + } else { + trimmed + } + }; + + let strategy = if non_interactive { + VersioningStrategy::Independent + } else { + println!("Versioning strategy (independent/unified/hybrid) [default: independent]:"); + let mut input = String::new(); + std::io::stdin() + .read_line(&mut input) + .map_err(|e| CliError::InputError(format!("Failed to read input: {}", e)))?; + let trimmed = input.trim().to_lowercase(); + match trimmed.as_str() { + "unified" => VersioningStrategy::Unified, + "hybrid" => VersioningStrategy::Hybrid, + _ => VersioningStrategy::Independent, + } + }; + + let auto_detect_scopes = if non_interactive { + true + } else { + println!("Enable automatic scope detection? [default: yes]:"); + let mut input = String::new(); + std::io::stdin() + .read_line(&mut input) + .map_err(|e| CliError::InputError(format!("Failed to read input: {}", e)))?; + let trimmed = input.trim().to_lowercase(); + trimmed != "no" && trimmed != "n" + }; + + // Build configuration + let config = RepositoryConfig { + repository: RepositoryMetadata { + name: repo_name.clone(), + description: Some("Multi-package repository".to_string()), + repo_type: RepositoryType::MultiPackage, + }, + versioning: VersioningConfig { + strategy: strategy.clone(), + unified_version: if strategy == VersioningStrategy::Unified { + Some("0.1.0".to_string()) + } else { + None + }, + rules: None, + }, + packages: detected_packages + .iter() + .map(|pkg| PackageConfig { + name: pkg.name.clone(), + package_type: pkg.manager.package_type().to_string(), + path: pkg.path.to_string_lossy().to_string(), + version_file: pkg.version_file.clone(), + version_field: pkg.version_field.clone(), + primary: false, + sync_with: None, + independent: false, + workspace_member: false, + description: None, + }) + .collect(), + dependencies: vec![], + scopes: ScopeConfig { + auto_detect: auto_detect_scopes, + require_scope_for_multi_package: false, + allow_multiple_scopes: false, + scope_separator: ",".to_string(), + mappings: vec![], + }, + commit_rules: CommitRulesConfig::default(), + workspace: None, + }; + + // Validate configuration + config + .validate(¤t_dir) + .map_err(|e| CliError::Generic(format!("Configuration validation failed: {}", e)))?; + + // Serialize to TOML + let toml_string = toml::to_string_pretty(&config) + .map_err(|e| CliError::Generic(format!("Failed to serialize config: {}", e)))?; + + // Display configuration + if self.output != "json" { + println!("{}", "Configuration to be created:".bold()); + println!("{}", toml_string); + println!(); + } + + // Handle dry-run + if self.dry_run { + if self.output == "json" { + self.output_json_result(Some(&config), false, true, false, None)?; + } else { + println!("{}", "Dry run - no files created".yellow()); + } + return Ok(()); + } + + // Confirm before writing (interactive mode) + if !non_interactive { + println!("Create .committy/config.toml? (yes/no) [default: yes]:"); + let mut input = String::new(); + std::io::stdin() + .read_line(&mut input) + .map_err(|e| CliError::InputError(format!("Failed to read input: {}", e)))?; + let trimmed = input.trim().to_lowercase(); + if trimmed == "no" || trimmed == "n" { + if self.output == "json" { + self.output_json_result(Some(&config), false, false, true, None)?; + } else { + println!("{}", "Cancelled".yellow()); + } + return Ok(()); + } + } + + // Write file + fs::create_dir_all(".committy").map_err(|e| { + CliError::Generic(format!("Failed to create .committy directory: {}", e)) + })?; + + fs::write(".committy/config.toml", &toml_string).map_err(|e| { + CliError::Generic(format!("Failed to write .committy/config.toml: {}", e)) + })?; + + if self.output == "json" { + self.output_json_result(Some(&config), true, true, false, None)?; + } else { + println!( + "{}", + "✓ Configuration created: .committy/config.toml" + .green() + .bold() + ); + println!( + "{}", + "Tip: Run 'committy config validate' to verify".dimmed() + ); + } + + info!("Repository initialized successfully"); + Ok(()) + } +} + +impl InitCommand { + fn output_json_result( + &self, + config: Option<&RepositoryConfig>, + created: bool, + ok: bool, + cancelled: bool, + errors: Option>, + ) -> Result<(), CliError> { + let config_json = config.map(|config| { + json!({ + "repository": config.repository.name, + "repository_type": config.repository.repo_type, + "versioning_strategy": config.versioning.strategy, + "packages": config.packages, + "scope_mappings": config.scopes.mappings, + }) + }); + + let result = json!({ + "command": "init", + "ok": ok, + "dry_run": self.dry_run, + "errors": errors, + "created": created, + "cancelled": cancelled, + "config": config_json, + "path": ".committy/config.toml", + }); + println!( + "{}", + serde_json::to_string(&result) + .map_err(|e| CliError::Generic(format!("Failed to serialize JSON: {}", e)))? + ); + Ok(()) + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_init_command_creation() { + let cmd = InitCommand { + multi_package: true, + dry_run: true, + output: "text".to_string(), + }; + assert!(cmd.multi_package); + assert!(cmd.dry_run); + assert_eq!(cmd.output, "text"); + } +} diff --git a/src/cli/commands/lint.rs b/src/cli/commands/lint.rs index ee1ec2a..fd12c47 100644 --- a/src/cli/commands/lint.rs +++ b/src/cli/commands/lint.rs @@ -25,14 +25,20 @@ impl Command for LintCommand { if self.output == "json" { #[derive(Serialize)] struct LintOutput<'a> { + command: &'static str, ok: bool, + dry_run: bool, count: usize, issues: &'a [crate::linter::CommitIssue], + errors: Option>, } let payload = LintOutput { + command: "lint", ok: issues.is_empty(), + dry_run: false, count: issues.len(), issues: &issues, + errors: None, }; println!("{}", serde_json::to_string(&payload).unwrap()); } else if issues.is_empty() { diff --git a/src/cli/commands/lint_message.rs b/src/cli/commands/lint_message.rs index 42d03ae..880036b 100644 --- a/src/cli/commands/lint_message.rs +++ b/src/cli/commands/lint_message.rs @@ -1,9 +1,10 @@ use crate::cli::Command; use crate::error::CliError; -use crate::linter::check_message_format; +use crate::linter::check_message_format_for_repo; use serde::Serialize; use std::fs; use std::io::{self, Read}; +use std::path::PathBuf; use structopt::StructOpt; #[derive(Debug, StructOpt)] @@ -23,6 +24,10 @@ pub struct LintMessageCommand { /// Output format: text or json #[structopt(long, default_value = "text", possible_values = &["text", "json"])] output: String, + + /// Repository path for loading repository lint rules + #[structopt(long, default_value = ".", parse(from_os_str))] + repo_path: PathBuf, } impl Command for LintMessageCommand { @@ -40,19 +45,26 @@ impl Command for LintMessageCommand { buf }; - let issues = check_message_format(&msg); + let issues = check_message_format_for_repo(&self.repo_path, &msg) + .map_err(|e| CliError::Generic(e.to_string()))?; if self.output == "json" { #[derive(Serialize)] struct LintMessageOutput<'a> { + command: &'static str, ok: bool, + dry_run: bool, count: usize, issues: &'a [String], + errors: Option>, } let payload = LintMessageOutput { + command: "lint-message", ok: issues.is_empty(), + dry_run: false, count: issues.len(), issues: &issues, + errors: None, }; println!("{}", serde_json::to_string(&payload).unwrap()); } else if issues.is_empty() { diff --git a/src/cli/commands/mod.rs b/src/cli/commands/mod.rs index cb21cbf..9f4948b 100644 --- a/src/cli/commands/mod.rs +++ b/src/cli/commands/mod.rs @@ -1,7 +1,10 @@ pub mod amend; pub mod branch; pub mod commit; +pub mod config; pub mod group_commit; +pub mod init; pub mod lint; pub mod lint_message; +pub mod packages; pub mod tag; diff --git a/src/cli/commands/packages.rs b/src/cli/commands/packages.rs new file mode 100644 index 0000000..4ddb051 --- /dev/null +++ b/src/cli/commands/packages.rs @@ -0,0 +1,940 @@ +// Packages command for listing and managing detected packages + +use crate::cli::Command; +use crate::config::hierarchy::MergedConfig; +use crate::config::repository::RepositoryConfig; +use crate::error::CliError; +use crate::packages::MultiPackageDetector; +use anyhow::Result; +use colored::Colorize; +use serde::Serialize; +use serde_json::json; +use std::collections::HashMap; +use std::path::{Path, PathBuf}; +use structopt::StructOpt; + +#[derive(StructOpt)] +pub struct PackagesCommand { + #[structopt(subcommand)] + pub subcommand: PackagesSubcommand, +} + +#[derive(StructOpt)] +pub enum PackagesSubcommand { + #[structopt(about = "List all detected packages")] + List { + #[structopt(short, long, help = "Show detailed information")] + verbose: bool, + #[structopt(long, default_value = "text", possible_values = &["text", "json"])] + output: String, + #[structopt( + long, + help = "Repository path", + default_value = ".", + parse(from_os_str) + )] + repo_path: std::path::PathBuf, + #[structopt( + long, + help = "Maximum depth to search for packages", + default_value = "5" + )] + max_depth: usize, + }, + #[structopt(about = "Show package version status and detect inconsistencies")] + Status { + #[structopt(short, long, help = "Show detailed information")] + verbose: bool, + #[structopt(long, default_value = "text", possible_values = &["text", "json"])] + output: String, + #[structopt( + long, + help = "Repository path", + default_value = ".", + parse(from_os_str) + )] + repo_path: std::path::PathBuf, + #[structopt(long, help = "Exit with error code if issues found (for CI)")] + check: bool, + }, + #[structopt(about = "Synchronize package versions according to configuration")] + Sync { + #[structopt(short, long, help = "Show what would be done without making changes")] + dry_run: bool, + #[structopt(long, default_value = "text", possible_values = &["text", "json"])] + output: String, + #[structopt( + long, + help = "Repository path", + default_value = ".", + parse(from_os_str) + )] + repo_path: std::path::PathBuf, + }, + #[structopt(about = "Show packages changed in a git range")] + Changed { + #[structopt(long, help = "Git range (e.g., main..HEAD, HEAD~1)")] + range: Option, + #[structopt(long, default_value = "text", possible_values = &["text", "json"])] + output: String, + #[structopt(short, long)] + verbose: bool, + #[structopt( + long, + help = "Repository path", + default_value = ".", + parse(from_os_str) + )] + repo_path: std::path::PathBuf, + }, +} + +impl Command for PackagesCommand { + fn execute(&self, _non_interactive: bool) -> Result<(), CliError> { + match &self.subcommand { + PackagesSubcommand::List { + verbose, + output, + repo_path, + max_depth, + } => list(repo_path, *verbose, *max_depth, output) + .map_err(|e| CliError::Generic(e.to_string())), + PackagesSubcommand::Status { + verbose, + output, + repo_path, + check, + } => status(repo_path, *verbose, *check, output) + .map_err(|e| CliError::Generic(e.to_string())), + PackagesSubcommand::Sync { + dry_run, + output, + repo_path, + } => sync(repo_path, *dry_run, output).map_err(|e| CliError::Generic(e.to_string())), + PackagesSubcommand::Changed { + range, + output, + verbose, + repo_path, + } => changed(repo_path, range.as_deref(), output, *verbose) + .map_err(|e| CliError::Generic(e.to_string())), + } + } +} + +#[derive(Debug, Serialize)] +struct PackageListItem { + name: String, + manager: String, + package_type: String, + path: String, + version: String, + version_file: String, + version_field: String, + workspace: bool, + workspace_members: Vec, +} + +#[derive(Debug, Serialize)] +struct ConfiguredPackageStatus { + name: String, + path: String, + package_type: String, + found: bool, + version: Option, + manager: Option, + primary: bool, + sync_with: Option, + issues: Vec, +} + +#[derive(Debug, Serialize)] +struct SyncOperation { + name: String, + path: String, + from_version: String, + to_version: String, + manager: String, +} + +#[derive(Debug, Serialize)] +struct ChangedPackageJson { + name: String, + path: String, + version: String, + files_changed: usize, + files: Vec, +} + +/// List all detected packages +pub fn list(repo_path: &Path, verbose: bool, max_depth: usize, output: &str) -> Result<()> { + let detector = MultiPackageDetector::new().with_max_depth(max_depth); + let packages = detector.detect_all(repo_path)?; + + if output == "json" { + let package_items: Vec<_> = packages.iter().map(package_list_item).collect(); + let payload = json!({ + "command": "packages", + "mode": "list", + "ok": true, + "dry_run": false, + "errors": serde_json::Value::Null, + "repo_path": repo_path.display().to_string(), + "max_depth": max_depth, + "total_packages": package_items.len(), + "packages": package_items, + "verbose": verbose, + }); + println!("{}", serde_json::to_string(&payload)?); + return Ok(()); + } + + println!("{}", "Detecting packages...".bold()); + println!(); + + if packages.is_empty() { + println!("{}", "No packages detected".yellow()); + println!(" Try running with --max-depth to search deeper"); + return Ok(()); + } + + println!("{}", format!("Found {} package(s):", packages.len()).bold()); + println!(); + + for pkg in &packages { + let workspace_indicator = if pkg.is_workspace() { + " [workspace]".magenta().to_string() + } else { + String::new() + }; + + println!( + " {} {} - {} at {}{}", + "●".green(), + pkg.name.bold(), + pkg.manager.name().cyan(), + pkg.path.display(), + workspace_indicator + ); + + if verbose { + println!(" Version: {}", pkg.version); + println!(" Version file: {}", pkg.version_file); + println!(" Version field: {}", pkg.version_field); + println!(" Package type: {}", pkg.manager.package_type()); + + if !pkg.workspace_members.is_empty() { + println!(" Workspace members:"); + for member in &pkg.workspace_members { + println!(" - {}", member); + } + } + println!(); + } + } + + if !verbose { + println!(); + println!("{}", "Tip: Use --verbose for more details".dimmed()); + } + + Ok(()) +} + +/// Show package version status +pub fn status(repo_path: &Path, verbose: bool, check: bool, output: &str) -> Result<()> { + // Try to load repository config + let repo_config = RepositoryConfig::try_load(repo_path)?; + + if repo_config.is_none() { + if output == "json" { + let payload = json!({ + "command": "packages", + "mode": "status", + "ok": true, + "dry_run": false, + "errors": serde_json::Value::Null, + "configured": false, + "repo_path": repo_path.display().to_string(), + "issues": Vec::::new(), + "packages": Vec::::new(), + "unconfigured_packages": Vec::::new(), + "verbose": verbose, + "check": check, + }); + println!("{}", serde_json::to_string(&payload)?); + return Ok(()); + } + + println!("{}", "Checking package status...".bold()); + println!(); + println!("{}", "No .committy/config.toml found".yellow()); + println!(" Running in single-package mode"); + println!(" Use 'committy init --multi-package' to enable multi-package support"); + return Ok(()); + } + + let config = repo_config.unwrap(); + + // Detect actual packages + let detector = MultiPackageDetector::new(); + let detected_packages = detector.detect_all(repo_path)?; + + let mut package_statuses = Vec::new(); + let mut unconfigured_packages = Vec::new(); + if output != "json" { + println!("{}", "Checking package status...".bold()); + println!(); + println!("{}", "Package Status:".bold()); + println!(); + } + + let mut issues = Vec::new(); + + // Check each configured package + for cfg_pkg in &config.packages { + let _pkg_path = repo_path.join(&cfg_pkg.path); + + // Find matching detected package + let detected = detected_packages + .iter() + .find(|p| p.path == std::path::Path::new(&cfg_pkg.path)); + + if let Some(detected_pkg) = detected { + // Package found + if output != "json" { + let status_icon = "✓".green(); + println!( + " {} {} - {} ({})", + status_icon, + cfg_pkg.name.bold(), + detected_pkg.version, + detected_pkg.manager.name() + ); + + if verbose { + println!(" Path: {}", cfg_pkg.path); + println!(" Type: {}", cfg_pkg.package_type); + if cfg_pkg.primary { + println!(" {}", "Primary package".yellow()); + } + if let Some(ref sync_with) = cfg_pkg.sync_with { + println!(" Syncs with: {}", sync_with.cyan()); + } + println!(); + } + } + + // Check for version sync issues + if let Some(ref sync_with) = cfg_pkg.sync_with { + // Find the package we should sync with + if let Some(sync_pkg) = config.packages.iter().find(|p| &p.name == sync_with) { + let _sync_pkg_path = repo_path.join(&sync_pkg.path); + if let Some(sync_detected) = detected_packages + .iter() + .find(|p| p.path == std::path::Path::new(&sync_pkg.path)) + { + if detected_pkg.version != sync_detected.version { + issues.push(format!( + "{} version ({}) does not match {} version ({})", + cfg_pkg.name, + detected_pkg.version, + sync_with, + sync_detected.version + )); + } + } + } + } + + let mut package_issues = Vec::new(); + if let Some(ref sync_with) = cfg_pkg.sync_with { + if let Some(sync_pkg) = config.packages.iter().find(|p| &p.name == sync_with) { + if let Some(sync_detected) = detected_packages + .iter() + .find(|p| p.path == std::path::Path::new(&sync_pkg.path)) + { + if detected_pkg.version != sync_detected.version { + package_issues.push(format!( + "{} version ({}) does not match {} version ({})", + cfg_pkg.name, + detected_pkg.version, + sync_with, + sync_detected.version + )); + } + } + } + } + + package_statuses.push(ConfiguredPackageStatus { + name: cfg_pkg.name.clone(), + path: cfg_pkg.path.clone(), + package_type: cfg_pkg.package_type.clone(), + found: true, + version: Some(detected_pkg.version.clone()), + manager: Some(detected_pkg.manager.name().to_string()), + primary: cfg_pkg.primary, + sync_with: cfg_pkg.sync_with.clone(), + issues: package_issues, + }); + } else { + // Package not found + if output != "json" { + let status_icon = "✗".red(); + println!( + " {} {} - {}", + status_icon, + cfg_pkg.name.bold(), + "NOT FOUND".red() + ); + } + issues.push(format!( + "Package '{}' not found at {}", + cfg_pkg.name, cfg_pkg.path + )); + package_statuses.push(ConfiguredPackageStatus { + name: cfg_pkg.name.clone(), + path: cfg_pkg.path.clone(), + package_type: cfg_pkg.package_type.clone(), + found: false, + version: None, + manager: None, + primary: cfg_pkg.primary, + sync_with: cfg_pkg.sync_with.clone(), + issues: vec![format!( + "Package '{}' not found at {}", + cfg_pkg.name, cfg_pkg.path + )], + }); + } + } + + // Check for detected packages not in config + for detected_pkg in &detected_packages { + if !config + .packages + .iter() + .any(|p| std::path::Path::new(&p.path) == detected_pkg.path) + { + unconfigured_packages.push(package_list_item(detected_pkg)); + if output != "json" { + println!( + " {} {} - {} ({})", + "⚠".yellow(), + detected_pkg.name.bold(), + detected_pkg.version, + "not in config".yellow() + ); + if verbose { + println!(" Path: {}", detected_pkg.path.display()); + println!(); + } + } + } + } + + if output == "json" { + let has_issues = !issues.is_empty(); + let payload = json!({ + "command": "packages", + "mode": "status", + "ok": !has_issues, + "dry_run": false, + "errors": serde_json::Value::Null, + "configured": true, + "repo_path": repo_path.display().to_string(), + "issues": issues, + "packages": package_statuses, + "unconfigured_packages": unconfigured_packages, + "verbose": verbose, + "check": check, + }); + println!("{}", serde_json::to_string(&payload)?); + if check && has_issues { + return Err(anyhow::anyhow!("Package status check failed")); + } + return Ok(()); + } + + println!(); + + // Show issues + if !issues.is_empty() { + println!("{}", "Issues Found:".red().bold()); + for issue in &issues { + println!(" {} {}", "✗".red(), issue); + } + println!(); + + if check { + return Err(anyhow::anyhow!("Package status check failed")); + } + } else { + println!("{}", "✓ All packages are in sync".green().bold()); + } + + Ok(()) +} + +/// Synchronize package versions according to configuration (Q30: Essential - support --dry-run) +pub fn sync(repo_path: &Path, dry_run: bool, output: &str) -> Result<()> { + // Load repository config + let config = match RepositoryConfig::try_load(repo_path)? { + Some(c) => c, + None => { + if output == "json" { + let payload = json!({ + "command": "packages", + "mode": "sync", + "ok": true, + "dry_run": dry_run, + "errors": serde_json::Value::Null, + "config_found": false, + "repo_path": repo_path.display().to_string(), + "operations": Vec::::new(), + "updated": Vec::::new(), + "failed": Vec::::new(), + }); + println!("{}", serde_json::to_string(&payload)?); + return Ok(()); + } + + println!("{}", "Synchronizing package versions...".bold()); + println!(); + println!("{}", "No .committy/config.toml found".yellow()); + println!(" Use 'committy init --multi-package' to enable multi-package support"); + return Ok(()); + } + }; + + // Detect current packages + let detector = MultiPackageDetector::new(); + let detected_packages = detector.detect_all(repo_path)?; + + let mut sync_operations = Vec::new(); + + // Find packages that need syncing + for cfg_pkg in &config.packages { + if let Some(ref sync_with) = cfg_pkg.sync_with { + // Find the package we should sync with + let sync_target = config + .packages + .iter() + .find(|p| &p.name == sync_with) + .ok_or_else(|| { + anyhow::anyhow!( + "Package '{}' syncs with non-existent package '{}'", + cfg_pkg.name, + sync_with + ) + })?; + + // Get current versions + let current_pkg = detected_packages + .iter() + .find(|p| p.path == std::path::Path::new(&cfg_pkg.path)); + + let target_pkg = detected_packages + .iter() + .find(|p| p.path == std::path::Path::new(&sync_target.path)); + + if let (Some(current), Some(target)) = (current_pkg, target_pkg) { + if current.version != target.version { + sync_operations.push(( + cfg_pkg.name.clone(), + cfg_pkg.path.clone(), + current.version.clone(), + target.version.clone(), + current.manager.clone(), + )); + } + } + } + } + + let operations_json: Vec<_> = sync_operations + .iter() + .map(|(name, path, old_ver, new_ver, manager)| SyncOperation { + name: name.clone(), + path: path.clone(), + from_version: old_ver.clone(), + to_version: new_ver.clone(), + manager: manager.name().to_string(), + }) + .collect(); + + if sync_operations.is_empty() { + if output == "json" { + let payload = json!({ + "command": "packages", + "mode": "sync", + "ok": true, + "dry_run": dry_run, + "errors": serde_json::Value::Null, + "config_found": true, + "repo_path": repo_path.display().to_string(), + "operations": operations_json, + "updated": Vec::::new(), + "failed": Vec::::new(), + }); + println!("{}", serde_json::to_string(&payload)?); + return Ok(()); + } + + println!("{}", "Synchronizing package versions...".bold()); + println!(); + println!("{}", "✓ All packages are already in sync".green().bold()); + return Ok(()); + } + + if output == "json" && dry_run { + let payload = json!({ + "command": "packages", + "mode": "sync", + "ok": true, + "dry_run": true, + "errors": serde_json::Value::Null, + "config_found": true, + "repo_path": repo_path.display().to_string(), + "operations": operations_json, + "updated": Vec::::new(), + "failed": Vec::::new(), + }); + println!("{}", serde_json::to_string(&payload)?); + return Ok(()); + } + + if output != "json" { + println!("{}", "Synchronizing package versions...".bold()); + println!(); + println!("{}", "Packages to sync:".bold()); + for (name, _path, old_ver, new_ver, _manager) in &sync_operations { + println!( + " {} {} → {}", + name.bold(), + old_ver.dimmed(), + new_ver.green() + ); + } + println!(); + } + + if dry_run { + if output != "json" { + println!("{}", "Dry run - no changes made".yellow()); + } + return Ok(()); + } + + // Q29: Prompt user to commit + if output != "json" { + println!("{}", "Applying version updates...".bold()); + } + + // Apply updates + use crate::packages::cargo::CargoDetector; + use crate::packages::npm::NpmDetector; + use crate::packages::types::PackageDetector; + + let mut updated = Vec::new(); + let mut failed = Vec::new(); + + for (name, path, _old_ver, new_ver, manager) in &sync_operations { + let pkg_path = repo_path.join(path); + + let result = match manager { + crate::packages::types::PackageManager::Cargo { .. } => { + CargoDetector.set_version(&pkg_path, new_ver) + } + crate::packages::types::PackageManager::Npm { .. } + | crate::packages::types::PackageManager::Pnpm { .. } + | crate::packages::types::PackageManager::Yarn { .. } => { + NpmDetector.set_version(&pkg_path, new_ver) + } + _ => { + let message = format!("{name} - unsupported package manager"); + failed.push(message.clone()); + if output != "json" { + println!(" {} {}", "⚠".yellow(), message); + } + continue; + } + }; + + match result { + Ok(_) => { + updated.push(name.clone()); + if output != "json" { + println!(" {} {} updated", "✓".green(), name); + } + } + Err(e) => { + failed.push(format!("{name} failed: {e}")); + if output != "json" { + println!(" {} {} failed: {}", "✗".red(), name, e); + } + } + } + } + + if output == "json" { + let payload = json!({ + "command": "packages", + "mode": "sync", + "ok": failed.is_empty(), + "dry_run": false, + "errors": if failed.is_empty() { serde_json::Value::Null } else { json!(failed.clone()) }, + "config_found": true, + "repo_path": repo_path.display().to_string(), + "operations": operations_json, + "updated": updated, + "failed": failed, + }); + println!("{}", serde_json::to_string(&payload)?); + return Ok(()); + } + + println!(); + println!("{}", "✓ Synchronization complete".green().bold()); + println!(); + println!("{}", "Remember to commit these changes:".dimmed()); + println!(" {}", "git add .".dimmed()); + println!( + " {}", + "git commit -m \"chore: sync package versions\"".dimmed() + ); + + Ok(()) +} + +/// Show packages changed in a git range +pub fn changed( + repo_path: &Path, + range: Option<&str>, + output_format: &str, + verbose: bool, +) -> Result<()> { + // Load repository config (for potential future use) + let _merged_config = MergedConfig::load(repo_path) + .map_err(|e| anyhow::anyhow!("Failed to load config: {}", e)) + .ok(); + + // Get changed files in the git range + let default_range = "HEAD~1..HEAD"; + let range_to_use = range.unwrap_or(default_range); + let changed_files = get_changed_files(repo_path, range_to_use)?; + + if changed_files.is_empty() { + if output_format == "json" { + let result = json!({ + "command": "packages", + "mode": "changed", + "ok": true, + "dry_run": false, + "errors": serde_json::Value::Null, + "packages": [], + "total_packages": 0, + "total_files": 0, + }); + println!("{}", serde_json::to_string_pretty(&result)?); + } else { + println!("{}", "No files changed in this range".yellow()); + } + return Ok(()); + } + + // Detect packages and map files to them + let detector = MultiPackageDetector::new(); + let all_packages = detector.detect_all(repo_path)?; + + let mut changed_packages: HashMap = HashMap::new(); + + for file in &changed_files { + // Find which package this file belongs to + for pkg in &all_packages { + if file.starts_with(&pkg.path) { + changed_packages + .entry(pkg.name.clone()) + .or_insert_with(|| ChangedPackageInfo { + name: pkg.name.clone(), + path: pkg.path.to_string_lossy().to_string(), + version: pkg.version.clone(), + files_changed: vec![], + }) + .files_changed + .push(file.clone()); + break; + } + } + } + + // Output results + if output_format == "json" { + output_json(&changed_packages, &changed_files)?; + } else { + output_text(&changed_packages, &changed_files, verbose)?; + } + + Ok(()) +} + +/// Information about a changed package +#[derive(Debug, Clone)] +pub struct ChangedPackageInfo { + pub name: String, + pub path: String, + pub version: String, + pub files_changed: Vec, +} + +/// Get changed files in a git range +fn get_changed_files(repo_path: &Path, range: &str) -> Result> { + let output = std::process::Command::new("git") + .args(["diff", range, "--name-only"]) + .current_dir(repo_path) + .output() + .map_err(|e| anyhow::anyhow!("Failed to get git diff: {}", e))?; + + if !output.status.success() { + return Err(anyhow::anyhow!( + "Git diff failed: {}", + String::from_utf8_lossy(&output.stderr) + )); + } + + let files = String::from_utf8(output.stdout)? + .lines() + .map(PathBuf::from) + .collect(); + + Ok(files) +} + +/// Output changed packages in text format +fn output_text( + packages: &HashMap, + files: &[PathBuf], + verbose: bool, +) -> Result<()> { + if packages.is_empty() { + println!("{}", "No packages changed".yellow()); + return Ok(()); + } + + println!( + "{}", + format!("Packages changed: {}", packages.len()) + .bold() + .green() + ); + println!(); + + for pkg in packages.values() { + println!( + " {} {} - {}", + "●".green(), + pkg.name.bold(), + pkg.version.cyan() + ); + if verbose { + println!(" Path: {}", pkg.path); + println!(" Files changed: {}", pkg.files_changed.len()); + for file in &pkg.files_changed { + println!(" - {}", file.display().to_string().dimmed()); + } + } + } + + println!(); + println!("Total files changed: {}", files.len()); + + Ok(()) +} + +/// Output changed packages in JSON format +fn output_json(packages: &HashMap, files: &[PathBuf]) -> Result<()> { + let packages_json: Vec<_> = packages.values().map(changed_package_json).collect(); + + let result = json!({ + "command": "packages", + "mode": "changed", + "ok": true, + "dry_run": false, + "errors": serde_json::Value::Null, + "packages": packages_json, + "total_packages": packages.len(), + "total_files": files.len(), + }); + + println!("{}", serde_json::to_string_pretty(&result)?); + + Ok(()) +} + +fn package_list_item(pkg: &crate::packages::types::PackageInfo) -> PackageListItem { + PackageListItem { + name: pkg.name.clone(), + manager: pkg.manager.name().to_string(), + package_type: pkg.manager.package_type().to_string(), + path: pkg.path.display().to_string(), + version: pkg.version.clone(), + version_file: pkg.version_file.clone(), + version_field: pkg.version_field.clone(), + workspace: pkg.is_workspace(), + workspace_members: pkg.workspace_members.clone(), + } +} + +fn changed_package_json(pkg: &ChangedPackageInfo) -> ChangedPackageJson { + ChangedPackageJson { + name: pkg.name.clone(), + path: pkg.path.clone(), + version: pkg.version.clone(), + files_changed: pkg.files_changed.len(), + files: pkg + .files_changed + .iter() + .map(|file| file.to_string_lossy().to_string()) + .collect(), + } +} + +#[cfg(test)] +mod tests { + use super::*; + use std::fs; + use tempfile::TempDir; + + #[test] + fn test_list_no_packages() { + let temp_dir = TempDir::new().unwrap(); + let result = list(temp_dir.path(), false, 5, "text"); + assert!(result.is_ok()); + } + + #[test] + fn test_list_with_package() { + let temp_dir = TempDir::new().unwrap(); + fs::write( + temp_dir.path().join("Cargo.toml"), + r#" +[package] +name = "test-package" +version = "1.0.0" +edition = "2021" + "#, + ) + .unwrap(); + + let result = list(temp_dir.path(), false, 5, "text"); + assert!(result.is_ok()); + } + + #[test] + fn test_status_without_config() { + let temp_dir = TempDir::new().unwrap(); + let result = status(temp_dir.path(), false, false, "text"); + assert!(result.is_ok()); + } +} diff --git a/src/cli/commands/tag.rs b/src/cli/commands/tag.rs index e6d0dd8..ccdfd33 100644 --- a/src/cli/commands/tag.rs +++ b/src/cli/commands/tag.rs @@ -1,13 +1,24 @@ use std::collections::HashMap; use crate::cli::Command; +use crate::config::hierarchy::MergedConfig; +use crate::config::repository::{RepositoryConfig, VersioningStrategy}; +use crate::dependency::updater::DependencyUpdater; use crate::error::CliError; use crate::git; use crate::input; use crate::telemetry; +use crate::versioning::hybrid::HybridVersioning; +use crate::versioning::independent::IndependentVersioning; +use crate::versioning::manager::{BumpType, VersionManager}; +use crate::versioning::unified::UnifiedVersioning; use log::debug; use log::info; +use regex::Regex; +use serde::Serialize; use serde_json::Value; +use std::fs; +use std::path::{Path, PathBuf}; use structopt::StructOpt; #[derive(Debug, StructOpt)] @@ -31,41 +42,113 @@ pub struct TagCommand { /// Output format: text or json #[structopt(long, default_value = "text", possible_values = &["text", "json"])] output: String, + + /// Update dependency references during tagging + #[structopt(long, help = "Update dependency references in other packages")] + update_deps: bool, + + #[structopt(long, default_value = ".", parse(from_os_str))] + repo_path: PathBuf, +} + +#[derive(Debug, Serialize)] +struct TagCommandOutput { + command: String, + ok: bool, + dry_run: bool, + errors: Option>, + old_tag: Option, + new_tag: Option, + pre_release: Option, + published: bool, } impl Command for TagCommand { fn execute(&self, non_interactive: bool) -> Result<(), CliError> { - if git::has_staged_changes()? { + if self.tag_options.publish_requested() && !self.tag_options.confirm_publish() { + return Err(CliError::InputError( + "Publishing a tag requires --confirm-publish".to_string(), + )); + } + + let default_repo_path = PathBuf::from("."); + let effective_repo_path = if self.repo_path != default_repo_path { + self.repo_path.clone() + } else { + PathBuf::from(&self.tag_options.source) + }; + + if self.tag_options.source != "." + && self.repo_path != default_repo_path + && Path::new(&self.tag_options.source) != self.repo_path.as_path() + { + return Err(CliError::InputError( + "Use either --repo-path or --source for tag repository selection, not both" + .to_string(), + )); + } + + if git::has_staged_changes_from(&effective_repo_path)? { return Err(CliError::StagedChanges); } + // Load merged config to detect multi-package mode + let merged_config = MergedConfig::load(&effective_repo_path).ok(); + + // Check if multi-package mode + let is_multi_package = merged_config + .as_ref() + .map(|c| c.is_multi_package()) + .unwrap_or(false); + + let mut tag_options = self.tag_options.clone(); + tag_options.source = effective_repo_path.display().to_string(); + if let Some(name) = &self.name { + // Explicit tag name provided - use legacy flow let version_manager = - git::TagGenerator::new(self.tag_options.clone(), self.bump_config_files); + git::TagGenerator::new(tag_options.clone(), self.bump_config_files); version_manager.create_and_push_tag(&version_manager.open_repository()?, name)?; + let payload = TagCommandOutput { + command: "tag".into(), + ok: true, + dry_run: self.tag_options.dry_run(), + errors: None, + old_tag: None, + new_tag: Some(name.clone()), + pre_release: None, + published: self.tag_options.will_publish_remote(), + }; if self.output == "json" { - let payload = serde_json::json!({ - "ok": true, - "new_tag": name, - }); println!("{}", serde_json::to_string(&payload).unwrap()); } else { println!("Tag {name} created successfully!"); } + } else if is_multi_package { + // Multi-package mode - use new strategy-aware flow + self.execute_multi_package( + non_interactive, + merged_config.as_ref().unwrap(), + &effective_repo_path, + )?; } else if non_interactive { // In non-interactive mode, auto-calculate and act based on options let mut version_manager = - git::TagGenerator::new(self.tag_options.clone(), self.bump_config_files); + git::TagGenerator::new(tag_options.clone(), self.bump_config_files); version_manager.run()?; // Print the calculated tag so callers/tests can consume it + let payload = TagCommandOutput { + command: "tag".into(), + ok: true, + dry_run: self.tag_options.dry_run(), + errors: None, + old_tag: Some(version_manager.current_tag.clone()), + new_tag: Some(version_manager.new_tag.clone()), + pre_release: Some(version_manager.is_pre_release), + published: self.tag_options.will_publish_remote(), + }; if self.output == "json" { - let payload = serde_json::json!({ - "ok": true, - "old_tag": version_manager.current_tag, - "new_tag": version_manager.new_tag, - "pre_release": version_manager.is_pre_release, - }); println!("{}", serde_json::to_string(&payload).unwrap()); } else { println!("{}", version_manager.new_tag); @@ -80,16 +163,19 @@ impl Command for TagCommand { info!("Abort"); return Ok(()); } - let mut version_manager = - git::TagGenerator::new(self.tag_options.clone(), self.bump_config_files); + let mut version_manager = git::TagGenerator::new(tag_options, self.bump_config_files); version_manager.run()?; + let payload = TagCommandOutput { + command: "tag".into(), + ok: true, + dry_run: self.tag_options.dry_run(), + errors: None, + old_tag: Some(version_manager.current_tag.clone()), + new_tag: Some(version_manager.new_tag.clone()), + pre_release: Some(version_manager.is_pre_release), + published: self.tag_options.will_publish_remote(), + }; if self.output == "json" { - let payload = serde_json::json!({ - "ok": true, - "old_tag": version_manager.current_tag, - "new_tag": version_manager.new_tag, - "pre_release": version_manager.is_pre_release, - }); println!("{}", serde_json::to_string(&payload).unwrap()); } else { println!("Tag {} created successfully!", version_manager.new_tag); @@ -117,3 +203,350 @@ impl Command for TagCommand { Ok(()) } } + +impl TagCommand { + /// Execute multi-package tag operation + fn execute_multi_package( + &self, + _non_interactive: bool, + config: &MergedConfig, + repo_path: &Path, + ) -> Result<(), CliError> { + let repo_config = config + .repository + .as_ref() + .ok_or_else(|| CliError::Generic("Repository config not found".to_string()))?; + + info!( + "🏢 Multi-package mode detected, using {:?} strategy", + repo_config.versioning.strategy + ); + + // Step 1: Get commit log since last tag + let repo = git::discover_repository_from(repo_path)?; + let commit_log = self.get_commit_log_since_last_tag(&repo)?; + debug!("Commit log since last tag:\n{}", commit_log); + + // Step 2: Detect scopes (affected packages) from commit log + let affected_packages = self.detect_affected_packages(&commit_log, repo_config)?; + if affected_packages.is_empty() { + info!("ℹ️ No packages affected by commits. Skipping tag creation."); + return Ok(()); + } + info!("📦 Affected packages: {}", affected_packages.join(", ")); + + // Step 3: Determine version bump type from commit messages + let bump_type = self.determine_bump_type(&commit_log, config)?; + info!("📈 Version bump type: {:?}", bump_type); + + // Step 4: Route through versioning strategy + let version_updates = + self.calculate_version_updates(repo_config, repo_path, &affected_packages, bump_type)?; + + if version_updates.is_empty() { + info!("ℹ️ No version updates calculated. Skipping tag creation."); + return Ok(()); + } + + // Step 5: Update version files per package + if self.bump_config_files { + self.apply_version_updates(repo_path, &version_updates)?; + info!( + "✅ Updated version files for {} package(s)", + version_updates.len() + ); + } + + // Step 6: Update dependencies if requested + if self.update_deps { + self.apply_dependency_updates(repo_config, repo_path, &version_updates)?; + info!("✅ Updated dependency references"); + } + + // Step 7: Create tags based on versioning strategy + self.create_multi_package_tags(&repo, repo_config, &version_updates)?; + info!("✅ Tags created successfully"); + + Ok(()) + } + + /// Get commit log since last tag + fn get_commit_log_since_last_tag(&self, repo: &git2::Repository) -> Result { + // Try to find the latest tag + let latest_tag = self.find_latest_tag(repo)?; + let range = if latest_tag.is_empty() { + "HEAD".to_string() + } else { + format!("{}..HEAD", latest_tag) + }; + + // Use git log to get commit messages + let repo_path = repo.path(); + + let output = std::process::Command::new("git") + .args(["log", "--pretty=%B", &range]) + .current_dir(repo_path) + .output() + .map_err(|e| CliError::Generic(format!("Failed to get commit log: {}", e)))?; + + String::from_utf8(output.stdout) + .map_err(|e| CliError::Generic(format!("Invalid UTF-8 in commit log: {}", e))) + } + + /// Find the latest tag in the repository + fn find_latest_tag(&self, repo: &git2::Repository) -> Result { + let tags = repo.tag_names(None).map_err(CliError::from)?; + Ok(tags.iter().flatten().last().unwrap_or("").to_string()) + } + + /// Detect affected packages from commit log using scopes + fn detect_affected_packages( + &self, + commit_log: &str, + config: &RepositoryConfig, + ) -> Result, CliError> { + let mut packages = std::collections::HashSet::new(); + + // Extract scopes from commit messages (pattern: type(scope): message) + let scope_regex = Regex::new(r"^[a-z]+\(([^)]+)\):") + .map_err(|e| CliError::Generic(format!("Regex error: {}", e)))?; + + for line in commit_log.lines() { + if let Some(caps) = scope_regex.captures(line) { + let scope = caps.get(1).map(|m| m.as_str()).unwrap_or(""); + // Check if scope matches a package + if config.packages.iter().any(|p| p.name == scope) { + packages.insert(scope.to_string()); + } + } + } + + Ok(packages.into_iter().collect()) + } + + /// Determine version bump type from commit messages + fn determine_bump_type( + &self, + commit_log: &str, + config: &MergedConfig, + ) -> Result { + let major_regex = config.get_major_regex(); + let minor_regex = config.get_minor_regex(); + + let major_re = Regex::new(major_regex) + .map_err(|e| CliError::Generic(format!("Invalid major regex: {}", e)))?; + let minor_re = Regex::new(minor_regex) + .map_err(|e| CliError::Generic(format!("Invalid minor regex: {}", e)))?; + + if major_re.is_match(commit_log) { + Ok(BumpType::Major) + } else if minor_re.is_match(commit_log) { + Ok(BumpType::Minor) + } else { + Ok(BumpType::Patch) + } + } + + /// Calculate version updates using appropriate strategy + fn calculate_version_updates( + &self, + config: &RepositoryConfig, + repo_path: &Path, + affected_packages: &[String], + bump_type: BumpType, + ) -> Result, CliError> { + let strategy = &config.versioning.strategy; + + let updates = match strategy { + VersioningStrategy::Independent => { + let strat = IndependentVersioning::new(config.clone(), repo_path); + strat + .calculate_updates(affected_packages, bump_type) + .map_err(|e| CliError::Generic(format!("Failed to calculate updates: {}", e)))? + } + VersioningStrategy::Unified => { + let strat = UnifiedVersioning::new(config.clone(), repo_path); + strat + .calculate_updates(affected_packages, bump_type) + .map_err(|e| CliError::Generic(format!("Failed to calculate updates: {}", e)))? + } + VersioningStrategy::Hybrid => { + let strat = HybridVersioning::new(config.clone(), repo_path); + strat + .calculate_updates(affected_packages, bump_type) + .map_err(|e| CliError::Generic(format!("Failed to calculate updates: {}", e)))? + } + }; + + Ok(updates) + } + + /// Apply version updates to package files + fn apply_version_updates( + &self, + repo_path: &Path, + updates: &[crate::versioning::manager::VersionUpdate], + ) -> Result<(), CliError> { + for update in updates { + // Find package config to get version file + let config = RepositoryConfig::try_load(repo_path) + .map_err(|e| CliError::Generic(format!("Failed to load config: {}", e)))? + .ok_or_else(|| CliError::Generic("No repository config".to_string()))?; + + let pkg_config = config + .packages + .iter() + .find(|p| p.name == update.package_name) + .ok_or_else(|| { + CliError::Generic(format!( + "Package '{}' not found in config", + update.package_name + )) + })?; + + // Update version file + let version_file = repo_path + .join(&pkg_config.path) + .join(&pkg_config.version_file); + + let content = fs::read_to_string(&version_file) + .map_err(|e| CliError::Generic(format!("Failed to read version file: {}", e)))?; + + let updated = content.replace(&update.old_version, &update.new_version); + + fs::write(&version_file, updated) + .map_err(|e| CliError::Generic(format!("Failed to write version file: {}", e)))?; + + // Stage the updated file + if let Err(e) = git::stage_file(&version_file) { + debug!("Failed to stage {}: {}", version_file.display(), e); + } + } + + Ok(()) + } + + /// Apply dependency updates to other packages + fn apply_dependency_updates( + &self, + config: &RepositoryConfig, + repo_path: &Path, + updates: &[crate::versioning::manager::VersionUpdate], + ) -> Result<(), CliError> { + let updater = DependencyUpdater::new(config.clone(), repo_path); + + for update in updates { + match updater.calculate_updates(&update.package_name, &update.new_version) { + Ok(dep_updates) => { + match updater.apply_updates(&dep_updates) { + Ok(updated_files) => { + for file in updated_files { + // Stage the updated dependency file + if let Err(e) = git::stage_file(Path::new(&file)) { + debug!("Failed to stage {}: {}", file, e); + } + } + } + Err(e) => { + debug!("Failed to apply dependency updates: {}", e); + } + } + } + Err(e) => { + debug!("Failed to calculate dependency updates: {}", e); + } + } + } + + Ok(()) + } + + /// Create appropriate tags based on versioning strategy + fn create_multi_package_tags( + &self, + repo: &git2::Repository, + config: &RepositoryConfig, + updates: &[crate::versioning::manager::VersionUpdate], + ) -> Result<(), CliError> { + let strategy = &config.versioning.strategy; + + match strategy { + VersioningStrategy::Unified => { + // Single tag for all packages + if let Some(update) = updates.first() { + let tag_name = format!("v{}", update.new_version); + self.create_and_push_tag(repo, &tag_name)?; + info!("📌 Created unified tag: {}", tag_name); + } + } + VersioningStrategy::Independent | VersioningStrategy::Hybrid => { + // Per-package tags + for update in updates { + let tag_name = format!("{}-v{}", update.package_name, update.new_version); + self.create_and_push_tag(repo, &tag_name)?; + info!("📌 Created tag: {}", tag_name); + } + } + } + + Ok(()) + } + + /// Create a git tag and optionally push to remote + fn create_and_push_tag(&self, repo: &git2::Repository, tag_name: &str) -> Result<(), CliError> { + let repo_path = repo + .workdir() + .ok_or_else(|| CliError::GitError(git2::Error::from_str("No working directory")))?; + self.run_git( + repo_path, + &["tag", "-a", tag_name, "-m", tag_name], + "create tag", + )?; + + info!("✅ Tag '{}' created locally", tag_name); + + // Try to push to remote + if self.tag_options.will_publish_remote() { + if let Err(e) = self.push_tag_to_remote(repo, tag_name) { + debug!("Failed to push tag to remote: {}", e); + } else { + info!("📤 Tag '{}' pushed to remote", tag_name); + } + } + + Ok(()) + } + + /// Push tag to remote repository + fn push_tag_to_remote(&self, repo: &git2::Repository, tag_name: &str) -> Result<(), CliError> { + repo.find_remote("origin").map_err(CliError::from)?; + let repo_path = repo + .workdir() + .ok_or_else(|| CliError::GitError(git2::Error::from_str("No working directory")))?; + self.run_git( + repo_path, + &["push", "origin", &format!("refs/tags/{tag_name}")], + "push tag to remote", + ) + } + + fn run_git(&self, repo_path: &Path, args: &[&str], action: &str) -> Result<(), CliError> { + let output = std::process::Command::new("git") + .current_dir(repo_path) + .args(args) + .output() + .map_err(CliError::IoError)?; + + if output.status.success() { + return Ok(()); + } + + let stderr = String::from_utf8_lossy(&output.stderr).trim().to_string(); + let detail = if stderr.is_empty() { + format!("git {:?} failed", args) + } else { + stderr + }; + Err(CliError::Generic(format!("Failed to {action}: {detail}"))) + } +} diff --git a/src/cli/mod.rs b/src/cli/mod.rs index 2e7e889..ef99f13 100644 --- a/src/cli/mod.rs +++ b/src/cli/mod.rs @@ -1,6 +1,8 @@ pub mod commands; -use self::commands::{amend, branch, commit, group_commit, lint, lint_message, tag}; +use self::commands::{ + amend, branch, commit, config, group_commit, init, lint, lint_message, packages, tag, +}; use crate::error::CliError; use structopt::StructOpt; @@ -24,6 +26,12 @@ pub enum CliCommand { Branch(branch::BranchCommand), #[structopt(about = "Group changes and optionally commit/apply them (with optional AI)")] GroupCommit(group_commit::GroupCommitCommand), + #[structopt(about = "Initialize multi-package support")] + Init(init::InitCommand), + #[structopt(about = "Manage repository configuration")] + Config(config::ConfigCommand), + #[structopt(about = "Manage packages in multi-package repositories")] + Packages(packages::PackagesCommand), } impl CliCommand { @@ -36,6 +44,9 @@ impl CliCommand { CliCommand::LintMessage(cmd) => cmd.execute(non_interactive), CliCommand::Branch(cmd) => cmd.execute(non_interactive), CliCommand::GroupCommit(cmd) => cmd.execute(non_interactive), + CliCommand::Init(cmd) => cmd.execute(non_interactive), + CliCommand::Config(cmd) => cmd.execute(non_interactive), + CliCommand::Packages(cmd) => cmd.execute(non_interactive), } } } diff --git a/src/clock.rs b/src/clock.rs new file mode 100644 index 0000000..2e65c6b --- /dev/null +++ b/src/clock.rs @@ -0,0 +1,57 @@ +use anyhow::{anyhow, Result}; +use chrono::{DateTime, Duration, FixedOffset, Local}; + +const FIXED_NOW_ENV: &str = "COMMITTY_FIXED_NOW"; + +pub fn current_time() -> Result> { + if let Ok(value) = std::env::var(FIXED_NOW_ENV) { + return DateTime::parse_from_rfc3339(&value) + .map_err(|e| anyhow!("Invalid {FIXED_NOW_ENV} value: {e}")); + } + + Ok(Local::now().fixed_offset()) +} + +pub fn should_check_update( + last_check: DateTime, + current_time: DateTime, +) -> bool { + current_time - last_check >= Duration::days(1) +} + +pub fn should_remind_metrics( + last_reminder: DateTime, + current_time: DateTime, +) -> bool { + current_time - last_reminder >= Duration::days(7) +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_current_time_uses_override() { + let expected = "2026-03-15T11:22:33+01:00"; + std::env::set_var(FIXED_NOW_ENV, expected); + + let current = current_time().unwrap(); + + std::env::remove_var(FIXED_NOW_ENV); + assert_eq!(current.to_rfc3339(), expected); + } + + #[test] + fn test_should_check_update() { + let current = DateTime::parse_from_rfc3339("2026-03-15T12:00:00+01:00").unwrap(); + assert!(!should_check_update(current - Duration::hours(23), current)); + assert!(should_check_update(current - Duration::hours(24), current)); + } + + #[test] + fn test_should_remind_metrics() { + let current = DateTime::parse_from_rfc3339("2026-03-15T12:00:00+01:00").unwrap(); + assert!(!should_remind_metrics(current - Duration::days(6), current)); + assert!(should_remind_metrics(current - Duration::days(7), current)); + } +} diff --git a/src/config.rs b/src/config.rs index a000d73..14bda71 100644 --- a/src/config.rs +++ b/src/config.rs @@ -1,3 +1,7 @@ +// Multi-package support modules +pub mod hierarchy; +pub mod repository; + pub const COMMIT_TYPES: &[&str] = &[ "feat", "fix", "build", "chore", "ci", "cd", "docs", "perf", "refactor", "revert", "style", "test", "security", "config", diff --git a/src/config/hierarchy.rs b/src/config/hierarchy.rs new file mode 100644 index 0000000..0513e9b --- /dev/null +++ b/src/config/hierarchy.rs @@ -0,0 +1,142 @@ +// Configuration hierarchy: repository config > user config > defaults +// Q4: Repository config always wins + +use super::repository::RepositoryConfig; +use super::Config as UserConfig; +use anyhow::Result; +use std::path::Path; + +/// Merged configuration from repository and user configs +pub struct MergedConfig { + /// Repository-level config (from .committy/config.toml) + pub repository: Option, + /// User-level config (from ~/.config/committy/config.toml) + pub user: UserConfig, +} + +impl MergedConfig { + /// Load and merge configurations + /// Priority: repository > user > defaults + pub fn load(repo_path: &Path) -> Result { + let repository = RepositoryConfig::try_load(repo_path)?; + let user = UserConfig::load()?; + + Ok(Self { repository, user }) + } + + /// Check if multi-package mode is enabled + pub fn is_multi_package(&self) -> bool { + self.repository + .as_ref() + .map(|r| r.is_multi_package()) + .unwrap_or(false) + } + + /// Get major version bump regex pattern + /// Repository config takes precedence over user config + pub fn get_major_regex(&self) -> &str { + self.repository + .as_ref() + .and_then(|r| r.versioning.rules.as_ref()) + .and_then(|rules| rules.major_regex.as_deref()) + .unwrap_or(&self.user.major_regex) + } + + /// Get minor version bump regex pattern + /// Repository config takes precedence over user config + pub fn get_minor_regex(&self) -> &str { + self.repository + .as_ref() + .and_then(|r| r.versioning.rules.as_ref()) + .and_then(|rules| rules.minor_regex.as_deref()) + .unwrap_or(&self.user.minor_regex) + } + + /// Get patch version bump regex pattern + /// Repository config takes precedence over user config + pub fn get_patch_regex(&self) -> &str { + self.repository + .as_ref() + .and_then(|r| r.versioning.rules.as_ref()) + .and_then(|rules| rules.patch_regex.as_deref()) + .unwrap_or(&self.user.patch_regex) + } + + /// Get repository config if it exists + pub fn repository_config(&self) -> Option<&RepositoryConfig> { + self.repository.as_ref() + } + + /// Get user config + pub fn user_config(&self) -> &UserConfig { + &self.user + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::config::repository::{ + RepositoryMetadata, RepositoryType, VersioningConfig, VersioningRules, VersioningStrategy, + }; + use tempfile::TempDir; + + #[test] + fn test_merged_config_without_repository() { + let temp_dir = TempDir::new().unwrap(); + let merged = MergedConfig::load(temp_dir.path()).unwrap(); + + assert!(merged.repository.is_none()); + assert!(!merged.is_multi_package()); + } + + #[test] + fn test_regex_fallback_to_user_config() { + let temp_dir = TempDir::new().unwrap(); + let merged = MergedConfig::load(temp_dir.path()).unwrap(); + + // Should use user config patterns + assert!(!merged.get_major_regex().is_empty()); + assert!(!merged.get_minor_regex().is_empty()); + assert!(!merged.get_patch_regex().is_empty()); + } + + #[test] + fn test_repository_config_overrides_user() { + let temp_dir = TempDir::new().unwrap(); + + // Create a repository config with custom regex + let repo_config = RepositoryConfig { + repository: RepositoryMetadata { + name: "test".to_string(), + repo_type: RepositoryType::MultiPackage, + description: None, + }, + versioning: VersioningConfig { + strategy: VersioningStrategy::Independent, + unified_version: None, + rules: Some(VersioningRules { + major_regex: Some("custom_major".to_string()), + minor_regex: Some("custom_minor".to_string()), + patch_regex: Some("custom_patch".to_string()), + }), + }, + packages: vec![], + dependencies: vec![], + scopes: Default::default(), + commit_rules: Default::default(), + workspace: None, + }; + + // Save it + repo_config.save(temp_dir.path()).unwrap(); + + // Load merged config + let merged = MergedConfig::load(temp_dir.path()).unwrap(); + + // Should use repository config patterns + assert_eq!(merged.get_major_regex(), "custom_major"); + assert_eq!(merged.get_minor_regex(), "custom_minor"); + assert_eq!(merged.get_patch_regex(), "custom_patch"); + } +} diff --git a/src/config/repository.rs b/src/config/repository.rs new file mode 100644 index 0000000..9716d24 --- /dev/null +++ b/src/config/repository.rs @@ -0,0 +1,712 @@ +// Repository-level configuration for multi-package support +// Loaded from .committy/config.toml + +use anyhow::{Context, Result}; +use serde::{Deserialize, Serialize}; +use std::collections::{HashMap, HashSet}; +use std::fs; +use std::path::{Path, PathBuf}; + +/// Repository-level configuration +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct RepositoryConfig { + pub repository: RepositoryMetadata, + pub versioning: VersioningConfig, + pub packages: Vec, + #[serde(default)] + pub dependencies: Vec, + #[serde(default)] + pub scopes: ScopeConfig, + #[serde(default)] + pub commit_rules: CommitRulesConfig, + #[serde(default)] + pub workspace: Option, +} + +/// Repository metadata +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct RepositoryMetadata { + pub name: String, + #[serde(rename = "type")] + pub repo_type: RepositoryType, + #[serde(default)] + pub description: Option, +} + +/// Repository type +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +#[serde(rename_all = "kebab-case")] +pub enum RepositoryType { + SinglePackage, + MultiPackage, + Monorepo, +} + +/// Versioning configuration +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct VersioningConfig { + pub strategy: VersioningStrategy, + #[serde(default)] + pub unified_version: Option, + #[serde(default)] + pub rules: Option, +} + +/// Versioning strategy +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +#[serde(rename_all = "lowercase")] +pub enum VersioningStrategy { + Independent, + Unified, + Hybrid, +} + +/// Custom versioning rules (regex patterns) +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct VersioningRules { + #[serde(default)] + pub major_regex: Option, + #[serde(default)] + pub minor_regex: Option, + #[serde(default)] + pub patch_regex: Option, +} + +/// Package configuration +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct PackageConfig { + pub name: String, + #[serde(rename = "type")] + pub package_type: String, + pub path: String, + pub version_file: String, + pub version_field: String, + #[serde(default)] + pub primary: bool, + #[serde(default)] + pub sync_with: Option, + #[serde(default)] + pub independent: bool, + #[serde(default)] + pub workspace_member: bool, + #[serde(default)] + pub description: Option, +} + +/// Dependency configuration +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct DependencyConfig { + pub source: String, + #[serde(default)] + pub description: Option, + pub targets: Vec, +} + +/// Dependency target +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct DependencyTarget { + pub file: String, + pub field: String, + pub strategy: UpdateStrategy, + #[serde(default)] + pub format: Option, +} + +/// Update strategy for dependencies +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +#[serde(rename_all = "lowercase")] +pub enum UpdateStrategy { + Auto, + Prompt, + Manual, +} + +/// Scope configuration +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct ScopeConfig { + #[serde(default = "default_true")] + pub auto_detect: bool, + #[serde(default = "default_true")] + pub require_scope_for_multi_package: bool, + #[serde(default = "default_true")] + pub allow_multiple_scopes: bool, + #[serde(default = "default_comma")] + pub scope_separator: String, + #[serde(default)] + pub mappings: Vec, +} + +impl Default for ScopeConfig { + fn default() -> Self { + Self { + auto_detect: true, + require_scope_for_multi_package: true, + allow_multiple_scopes: true, + scope_separator: ",".to_string(), + mappings: vec![], + } + } +} + +/// Scope mapping (file pattern -> scope) +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct ScopeMapping { + pub pattern: String, + pub scope: String, + pub package: String, + #[serde(default)] + pub description: Option, +} + +/// Commit rules configuration +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct CommitRulesConfig { + #[serde(default = "default_72")] + pub max_subject_length: usize, + #[serde(default = "default_100")] + pub max_body_line_length: usize, + #[serde(default)] + pub require_body: bool, + #[serde(default)] + pub allowed_types: Vec, + #[serde(default)] + pub custom_types: Vec, +} + +impl Default for CommitRulesConfig { + fn default() -> Self { + Self { + max_subject_length: 72, + max_body_line_length: 100, + require_body: false, + allowed_types: vec![], + custom_types: vec![], + } + } +} + +/// Custom commit type +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct CustomCommitType { + pub name: String, + pub description: String, + pub bump: BumpType, +} + +/// Version bump type +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +#[serde(rename_all = "lowercase")] +pub enum BumpType { + Major, + Minor, + Patch, + None, +} + +/// Workspace configuration +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct WorkspaceConfig { + #[serde(rename = "type")] + pub workspace_type: WorkspaceType, + pub root: String, + #[serde(default)] + pub members: Vec, +} + +/// Workspace type +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +#[serde(rename_all = "lowercase")] +pub enum WorkspaceType { + Cargo, + Npm, + Pnpm, + Yarn, + Go, + None, +} + +// Helper functions for serde defaults +fn default_true() -> bool { + true +} + +fn default_comma() -> String { + ",".to_string() +} + +fn default_72() -> usize { + 72 +} + +fn default_100() -> usize { + 100 +} + +impl RepositoryConfig { + /// Load repository configuration from .committy/config.toml + pub fn load(repo_path: &Path) -> Result { + let config_path = Self::get_config_path(repo_path)?; + + if !config_path.exists() { + return Err(anyhow::anyhow!( + "No .committy/config.toml found at {}", + config_path.display() + )); + } + + let content = fs::read_to_string(&config_path) + .with_context(|| format!("Failed to read {}", config_path.display()))?; + + let config: Self = toml::from_str(&content) + .with_context(|| format!("Failed to parse {}", config_path.display()))?; + + config.validate(repo_path)?; + Ok(config) + } + + /// Try to load repository configuration, returns None if not found + pub fn try_load(repo_path: &Path) -> Result> { + match Self::load(repo_path) { + Ok(config) => Ok(Some(config)), + Err(e) => { + if e.to_string().contains("No .committy/config.toml found") { + Ok(None) + } else { + Err(e) + } + } + } + } + + /// Get the path to the repository config file + pub fn get_config_path(repo_path: &Path) -> Result { + Ok(repo_path.join(".committy").join("config.toml")) + } + + /// Validate the configuration + pub fn validate(&self, repo_path: &Path) -> Result<()> { + // Validate package names are unique + let mut names = HashSet::new(); + for pkg in &self.packages { + if !names.insert(&pkg.name) { + return Err(anyhow::anyhow!("Duplicate package name: {}", pkg.name)); + } + } + + // Validate primary packages for hybrid strategy + if self.versioning.strategy == VersioningStrategy::Hybrid { + let primary_count = self.packages.iter().filter(|p| p.primary).count(); + if primary_count == 0 { + return Err(anyhow::anyhow!( + "Hybrid strategy requires at least one primary package" + )); + } + + // Q13: Multiple primaries allowed, but they must sync + let primaries: Vec<_> = self.packages.iter().filter(|p| p.primary).collect(); + if primaries.len() > 1 { + // Check that all primaries sync with each other + for primary in &primaries { + if let Some(ref sync_with) = primary.sync_with { + if !primaries.iter().any(|p| &p.name == sync_with) { + return Err(anyhow::anyhow!( + "Primary package '{}' must sync with another primary package, but syncs with '{}'", + primary.name, sync_with + )); + } + } + } + } + } + + // Validate unified version if strategy is unified + if self.versioning.strategy == VersioningStrategy::Unified + && self.versioning.unified_version.is_none() + { + return Err(anyhow::anyhow!( + "Unified strategy requires unified_version to be set" + )); + } + + // Validate sync_with references + for pkg in &self.packages { + if let Some(ref sync_with) = pkg.sync_with { + if !self.packages.iter().any(|p| &p.name == sync_with) { + return Err(anyhow::anyhow!( + "Package '{}' syncs with non-existent package '{}'", + pkg.name, + sync_with + )); + } + } + } + + // Q51: Full DAG support - validate no circular dependencies + self.validate_no_cycles()?; + + // Validate dependency sources exist + for dep in &self.dependencies { + if !self.packages.iter().any(|p| p.name == dep.source) { + return Err(anyhow::anyhow!( + "Dependency source '{}' does not exist", + dep.source + )); + } + + // Q48: Missing targets are errors - validate all target files exist + for target in &dep.targets { + let target_path = repo_path.join(&target.file); + if !target_path.exists() { + return Err(anyhow::anyhow!( + "Dependency target file not found: {}", + target.file + )); + } + } + } + + // Validate package paths and version files exist + for pkg in &self.packages { + let pkg_path = repo_path.join(&pkg.path); + if !pkg_path.exists() { + return Err(anyhow::anyhow!( + "Package path not found: {} (package: {})", + pkg.path, + pkg.name + )); + } + + let version_file_path = pkg_path.join(&pkg.version_file); + if !version_file_path.exists() { + return Err(anyhow::anyhow!( + "Version file not found: {} (package: {})", + pkg.version_file, + pkg.name + )); + } + } + + Ok(()) + } + + /// Validate no circular dependencies in sync_with relationships + fn validate_no_cycles(&self) -> Result<()> { + let mut graph: HashMap<&str, &str> = HashMap::new(); + for pkg in &self.packages { + if let Some(ref sync_with) = pkg.sync_with { + graph.insert(&pkg.name, sync_with); + } + } + + // DFS to detect cycles + for start in graph.keys() { + let mut visited = HashSet::new(); + let mut current = *start; + + while let Some(next) = graph.get(current) { + if !visited.insert(current) { + return Err(anyhow::anyhow!( + "Circular dependency detected: package '{}' has a cycle in sync_with chain", + start + )); + } + current = next; + } + } + + Ok(()) + } + + /// Check if multi-package mode is enabled + pub fn is_multi_package(&self) -> bool { + self.repository.repo_type != RepositoryType::SinglePackage || self.packages.len() > 1 + } +} + +#[cfg(test)] +impl RepositoryConfig { + /// Check if repository config exists + pub fn exists(repo_path: &Path) -> bool { + Self::get_config_path(repo_path) + .map(|p| p.exists()) + .unwrap_or(false) + } + + /// Save repository configuration to .committy/config.toml + pub fn save(&self, repo_path: &Path) -> Result<()> { + let config_path = Self::get_config_path(repo_path)?; + + if let Some(parent) = config_path.parent() { + fs::create_dir_all(parent) + .with_context(|| format!("Failed to create directory {}", parent.display()))?; + } + + let content = toml::to_string_pretty(self).context("Failed to serialize configuration")?; + + fs::write(&config_path, content) + .with_context(|| format!("Failed to write {}", config_path.display()))?; + + Ok(()) + } + + /// Get the primary package (for hybrid strategy) + pub fn get_primary_package(&self) -> Option<&PackageConfig> { + self.packages.iter().find(|p| p.primary) + } + + /// Get all packages that sync with a given package + pub fn get_synced_packages(&self, package_name: &str) -> Vec<&PackageConfig> { + self.packages + .iter() + .filter(|p| { + p.sync_with + .as_ref() + .map(|s| s == package_name) + .unwrap_or(false) + }) + .collect() + } +} + +#[cfg(test)] +mod tests { + use super::*; + use tempfile::TempDir; + + #[test] + fn test_config_path() { + let temp_dir = TempDir::new().unwrap(); + let config_path = RepositoryConfig::get_config_path(temp_dir.path()).unwrap(); + assert_eq!( + config_path, + temp_dir.path().join(".committy").join("config.toml") + ); + } + + #[test] + fn test_config_not_exists() { + let temp_dir = TempDir::new().unwrap(); + assert!(!RepositoryConfig::exists(temp_dir.path())); + } + + #[test] + fn test_get_primary_package() { + let config = RepositoryConfig { + repository: RepositoryMetadata { + name: "test".to_string(), + repo_type: RepositoryType::MultiPackage, + description: None, + }, + versioning: VersioningConfig { + strategy: VersioningStrategy::Independent, + unified_version: None, + rules: None, + }, + packages: vec![ + PackageConfig { + name: "pkg1".to_string(), + package_type: "rust-cargo".to_string(), + path: "packages/pkg1".to_string(), + version_file: "packages/pkg1/Cargo.toml".to_string(), + version_field: "package.version".to_string(), + primary: true, + sync_with: None, + independent: false, + workspace_member: true, + description: None, + }, + PackageConfig { + name: "pkg2".to_string(), + package_type: "rust-cargo".to_string(), + path: "packages/pkg2".to_string(), + version_file: "packages/pkg2/Cargo.toml".to_string(), + version_field: "package.version".to_string(), + primary: false, + sync_with: Some("pkg1".to_string()), + independent: false, + workspace_member: true, + description: None, + }, + ], + dependencies: vec![], + scopes: ScopeConfig::default(), + commit_rules: CommitRulesConfig::default(), + workspace: None, + }; + + let primary = config.get_primary_package(); + assert!(primary.is_some()); + assert_eq!(primary.unwrap().name, "pkg1"); + } + + #[test] + fn test_get_synced_packages() { + let config = RepositoryConfig { + repository: RepositoryMetadata { + name: "test".to_string(), + repo_type: RepositoryType::MultiPackage, + description: None, + }, + versioning: VersioningConfig { + strategy: VersioningStrategy::Independent, + unified_version: None, + rules: None, + }, + packages: vec![ + PackageConfig { + name: "pkg1".to_string(), + package_type: "rust-cargo".to_string(), + path: "packages/pkg1".to_string(), + version_file: "packages/pkg1/Cargo.toml".to_string(), + version_field: "package.version".to_string(), + primary: true, + sync_with: None, + independent: false, + workspace_member: true, + description: None, + }, + PackageConfig { + name: "pkg2".to_string(), + package_type: "rust-cargo".to_string(), + path: "packages/pkg2".to_string(), + version_file: "packages/pkg2/Cargo.toml".to_string(), + version_field: "package.version".to_string(), + primary: false, + sync_with: Some("pkg1".to_string()), + independent: false, + workspace_member: true, + description: None, + }, + PackageConfig { + name: "pkg3".to_string(), + package_type: "rust-cargo".to_string(), + path: "packages/pkg3".to_string(), + version_file: "packages/pkg3/Cargo.toml".to_string(), + version_field: "package.version".to_string(), + primary: false, + sync_with: Some("pkg1".to_string()), + independent: false, + workspace_member: true, + description: None, + }, + ], + dependencies: vec![], + scopes: ScopeConfig::default(), + commit_rules: CommitRulesConfig::default(), + workspace: None, + }; + + let synced = config.get_synced_packages("pkg1"); + let names: Vec<_> = synced.iter().map(|p| p.name.as_str()).collect(); + assert_eq!(names, vec!["pkg2", "pkg3"]); + } + + #[test] + fn test_validate_duplicate_package_names() { + let config = RepositoryConfig { + repository: RepositoryMetadata { + name: "test".to_string(), + repo_type: RepositoryType::MultiPackage, + description: None, + }, + versioning: VersioningConfig { + strategy: VersioningStrategy::Independent, + unified_version: None, + rules: None, + }, + packages: vec![ + PackageConfig { + name: "pkg1".to_string(), + package_type: "rust-cargo".to_string(), + path: ".".to_string(), + version_file: "Cargo.toml".to_string(), + version_field: "package.version".to_string(), + primary: false, + sync_with: None, + independent: false, + workspace_member: false, + description: None, + }, + PackageConfig { + name: "pkg1".to_string(), // Duplicate! + package_type: "rust-cargo".to_string(), + path: ".".to_string(), + version_file: "Cargo.toml".to_string(), + version_field: "package.version".to_string(), + primary: false, + sync_with: None, + independent: false, + workspace_member: false, + description: None, + }, + ], + dependencies: vec![], + scopes: ScopeConfig::default(), + commit_rules: CommitRulesConfig::default(), + workspace: None, + }; + + let temp_dir = TempDir::new().unwrap(); + let result = config.validate(temp_dir.path()); + assert!(result.is_err()); + assert!(result + .unwrap_err() + .to_string() + .contains("Duplicate package name")); + } + + #[test] + fn test_validate_circular_dependency() { + let temp_dir = TempDir::new().unwrap(); + + let config = RepositoryConfig { + repository: RepositoryMetadata { + name: "test".to_string(), + repo_type: RepositoryType::MultiPackage, + description: None, + }, + versioning: VersioningConfig { + strategy: VersioningStrategy::Independent, + unified_version: None, + rules: None, + }, + packages: vec![ + PackageConfig { + name: "pkg1".to_string(), + package_type: "rust-cargo".to_string(), + path: ".".to_string(), + version_file: "Cargo.toml".to_string(), + version_field: "package.version".to_string(), + primary: false, + sync_with: Some("pkg2".to_string()), + independent: false, + workspace_member: false, + description: None, + }, + PackageConfig { + name: "pkg2".to_string(), + package_type: "rust-cargo".to_string(), + path: ".".to_string(), + version_file: "Cargo.toml".to_string(), + version_field: "package.version".to_string(), + primary: false, + sync_with: Some("pkg1".to_string()), // Circular! + independent: false, + workspace_member: false, + description: None, + }, + ], + dependencies: vec![], + scopes: ScopeConfig::default(), + commit_rules: CommitRulesConfig::default(), + workspace: None, + }; + + let result = config.validate(temp_dir.path()); + assert!(result.is_err()); + assert!(result + .unwrap_err() + .to_string() + .contains("Circular dependency")); + } +} diff --git a/src/dependency/handlers/dockerfile.rs b/src/dependency/handlers/dockerfile.rs new file mode 100644 index 0000000..d65d584 --- /dev/null +++ b/src/dependency/handlers/dockerfile.rs @@ -0,0 +1,182 @@ +// Dockerfile handler for dependency updates + +use anyhow::{Context, Result}; +use regex::Regex; +use std::fs; +use std::path::Path; + +/// Read version from a Dockerfile by finding FROM or ARG lines with the package name +pub fn read_version(file_path: &Path, package_name: &str) -> Result { + let content = fs::read_to_string(file_path) + .with_context(|| format!("Failed to read {}", file_path.display()))?; + + // Try to find version in FROM line: FROM package:version + let from_pattern = format!(r"FROM\s+{}:([^\s]+)", regex::escape(package_name)); + let from_re = Regex::new(&from_pattern)?; + + if let Some(caps) = from_re.captures(&content) { + return Ok(caps[1].to_string()); + } + + // Try to find version in ARG line: ARG PACKAGE_VERSION=version + let arg_name = package_name.to_uppercase().replace('-', "_") + "_VERSION"; + let arg_pattern = format!(r"ARG\s+{}=([^\s]+)", regex::escape(&arg_name)); + let arg_re = Regex::new(&arg_pattern)?; + + if let Some(caps) = arg_re.captures(&content) { + return Ok(caps[1].to_string()); + } + + Err(anyhow::anyhow!( + "Version for package '{}' not found in Dockerfile", + package_name + )) +} + +/// Update version in a Dockerfile +pub fn update_version(file_path: &Path, package_name: &str, new_version: &str) -> Result<()> { + let content = fs::read_to_string(file_path) + .with_context(|| format!("Failed to read {}", file_path.display()))?; + + let mut updated_content = content.clone(); + let mut updated = false; + + // Update FROM line: FROM package:version + let from_pattern = format!(r"FROM\s+{}:([^\s]+)", regex::escape(package_name)); + let from_re = Regex::new(&from_pattern)?; + + if from_re.is_match(&content) { + let replacement = format!("FROM {}:{}", package_name, new_version); + updated_content = from_re + .replace(&updated_content, replacement.as_str()) + .to_string(); + updated = true; + } + + // Update ARG line: ARG PACKAGE_VERSION=version + let arg_name = package_name.to_uppercase().replace('-', "_") + "_VERSION"; + let arg_pattern = format!(r"ARG\s+{}=([^\s]+)", regex::escape(&arg_name)); + let arg_re = Regex::new(&arg_pattern)?; + + if arg_re.is_match(&content) { + let replacement = format!("ARG {}={}", arg_name, new_version); + updated_content = arg_re + .replace(&updated_content, replacement.as_str()) + .to_string(); + updated = true; + } + + if !updated { + return Err(anyhow::anyhow!( + "No version reference for package '{}' found in Dockerfile", + package_name + )); + } + + fs::write(file_path, updated_content) + .with_context(|| format!("Failed to write {}", file_path.display()))?; + + Ok(()) +} + +#[cfg(test)] +mod tests { + use super::*; + use tempfile::TempDir; + + #[test] + fn test_read_version_from_line() { + let temp_dir = TempDir::new().unwrap(); + let file_path = temp_dir.path().join("Dockerfile"); + + fs::write( + &file_path, + r#"FROM node:18 +FROM myapp:1.0.0 +RUN echo "test" +"#, + ) + .unwrap(); + + let version = read_version(&file_path, "myapp").unwrap(); + assert_eq!(version, "1.0.0"); + } + + #[test] + fn test_read_version_from_arg() { + let temp_dir = TempDir::new().unwrap(); + let file_path = temp_dir.path().join("Dockerfile"); + + fs::write( + &file_path, + r#"ARG MYAPP_VERSION=1.5.0 +FROM node:18 +"#, + ) + .unwrap(); + + let version = read_version(&file_path, "myapp").unwrap(); + assert_eq!(version, "1.5.0"); + } + + #[test] + fn test_update_version_from_line() { + let temp_dir = TempDir::new().unwrap(); + let file_path = temp_dir.path().join("Dockerfile"); + + fs::write( + &file_path, + r#"FROM node:18 +FROM myapp:1.0.0 +RUN echo "test" +"#, + ) + .unwrap(); + + update_version(&file_path, "myapp", "2.0.0").unwrap(); + + let version = read_version(&file_path, "myapp").unwrap(); + assert_eq!(version, "2.0.0"); + } + + #[test] + fn test_update_version_from_arg() { + let temp_dir = TempDir::new().unwrap(); + let file_path = temp_dir.path().join("Dockerfile"); + + fs::write( + &file_path, + r#"ARG MYAPP_VERSION=1.5.0 +FROM myapp:${MYAPP_VERSION} +"#, + ) + .unwrap(); + + update_version(&file_path, "myapp", "2.5.0").unwrap(); + + let version = read_version(&file_path, "myapp").unwrap(); + assert_eq!(version, "2.5.0"); + } + + #[test] + fn test_package_with_hyphen() { + let temp_dir = TempDir::new().unwrap(); + let file_path = temp_dir.path().join("Dockerfile"); + + fs::write( + &file_path, + r#"ARG MY_APP_VERSION=1.0.0 +FROM node:18 +"#, + ) + .unwrap(); + + let version = read_version(&file_path, "my-app").unwrap(); + assert_eq!(version, "1.0.0"); + + update_version(&file_path, "my-app", "1.1.0").unwrap(); + + let version = read_version(&file_path, "my-app").unwrap(); + assert_eq!(version, "1.1.0"); + } +} diff --git a/src/dependency/handlers/json.rs b/src/dependency/handlers/json.rs new file mode 100644 index 0000000..0bf0373 --- /dev/null +++ b/src/dependency/handlers/json.rs @@ -0,0 +1,162 @@ +// JSON file handler for dependency updates + +use anyhow::{Context, Result}; +use serde_json::Value; +use std::fs; +use std::path::Path; + +/// Read version from a JSON file using a dot-notation field path +pub fn read_version(file_path: &Path, field: &str) -> Result { + let content = fs::read_to_string(file_path) + .with_context(|| format!("Failed to read {}", file_path.display()))?; + + let json: Value = serde_json::from_str(&content) + .with_context(|| format!("Failed to parse JSON in {}", file_path.display()))?; + + let version = get_nested_value(&json, field)?; + + Ok(version + .as_str() + .ok_or_else(|| anyhow::anyhow!("Version field '{}' is not a string", field))? + .to_string()) +} + +/// Update version in a JSON file using a dot-notation field path +pub fn update_version(file_path: &Path, field: &str, new_version: &str) -> Result<()> { + let content = fs::read_to_string(file_path) + .with_context(|| format!("Failed to read {}", file_path.display()))?; + + let mut json: Value = serde_json::from_str(&content) + .with_context(|| format!("Failed to parse JSON in {}", file_path.display()))?; + + set_nested_value(&mut json, field, Value::String(new_version.to_string()))?; + + let updated_content = + serde_json::to_string_pretty(&json).context("Failed to serialize JSON")?; + + fs::write(file_path, updated_content + "\n") + .with_context(|| format!("Failed to write {}", file_path.display()))?; + + Ok(()) +} + +/// Get a nested value from JSON using dot notation +fn get_nested_value<'a>(json: &'a Value, path: &str) -> Result<&'a Value> { + let parts: Vec<&str> = path.split('.').collect(); + let mut current = json; + + for part in parts { + current = current + .get(part) + .ok_or_else(|| anyhow::anyhow!("Field '{}' not found in path '{}'", part, path))?; + } + + Ok(current) +} + +/// Set a nested value in JSON using dot notation +fn set_nested_value(json: &mut Value, path: &str, value: Value) -> Result<()> { + let parts: Vec<&str> = path.split('.').collect(); + + if parts.is_empty() { + return Err(anyhow::anyhow!("Empty path")); + } + + let mut current = json; + + for (i, part) in parts.iter().enumerate() { + if i == parts.len() - 1 { + // Last part - set the value + if let Value::Object(map) = current { + map.insert(part.to_string(), value); + return Ok(()); + } else { + return Err(anyhow::anyhow!("Cannot set field '{}' on non-object", part)); + } + } else { + // Intermediate part - navigate deeper + if let Value::Object(map) = current { + current = map + .get_mut(*part) + .ok_or_else(|| anyhow::anyhow!("Field '{}' not found", part))?; + } else { + return Err(anyhow::anyhow!( + "Cannot navigate through non-object at '{}'", + part + )); + } + } + } + + Ok(()) +} + +#[cfg(test)] +mod tests { + use super::*; + use tempfile::TempDir; + + #[test] + fn test_read_version() { + let temp_dir = TempDir::new().unwrap(); + let file_path = temp_dir.path().join("config.json"); + + fs::write( + &file_path, + r#"{ + "app": { + "version": "1.0.0" + } +}"#, + ) + .unwrap(); + + let version = read_version(&file_path, "app.version").unwrap(); + assert_eq!(version, "1.0.0"); + } + + #[test] + fn test_update_version() { + let temp_dir = TempDir::new().unwrap(); + let file_path = temp_dir.path().join("config.json"); + + fs::write( + &file_path, + r#"{ + "app": { + "version": "1.0.0" + } +}"#, + ) + .unwrap(); + + update_version(&file_path, "app.version", "2.0.0").unwrap(); + + let version = read_version(&file_path, "app.version").unwrap(); + assert_eq!(version, "2.0.0"); + } + + #[test] + fn test_nested_path() { + let temp_dir = TempDir::new().unwrap(); + let file_path = temp_dir.path().join("package.json"); + + fs::write( + &file_path, + r#"{ + "dependencies": { + "mypackage": "1.0.0" + } +}"#, + ) + .unwrap(); + + let version = read_version(&file_path, "dependencies.mypackage").unwrap(); + assert_eq!(version, "1.0.0"); + + update_version(&file_path, "dependencies.mypackage", "1.5.0").unwrap(); + + let version = read_version(&file_path, "dependencies.mypackage").unwrap(); + assert_eq!(version, "1.5.0"); + } +} diff --git a/src/dependency/handlers/mod.rs b/src/dependency/handlers/mod.rs new file mode 100644 index 0000000..5fa95dc --- /dev/null +++ b/src/dependency/handlers/mod.rs @@ -0,0 +1,6 @@ +// File type handlers for dependency updates + +pub mod dockerfile; +pub mod json; +pub mod toml; +pub mod yaml; diff --git a/src/dependency/handlers/toml.rs b/src/dependency/handlers/toml.rs new file mode 100644 index 0000000..0f35fae --- /dev/null +++ b/src/dependency/handlers/toml.rs @@ -0,0 +1,156 @@ +// TOML file handler for dependency updates + +use anyhow::{Context, Result}; +use std::fs; +use std::path::Path; +use toml_edit::{value, DocumentMut, Item}; + +/// Read version from a TOML file using a dot-notation field path +pub fn read_version(file_path: &Path, field: &str) -> Result { + let content = fs::read_to_string(file_path) + .with_context(|| format!("Failed to read {}", file_path.display()))?; + + let doc: DocumentMut = content + .parse() + .with_context(|| format!("Failed to parse TOML in {}", file_path.display()))?; + + let version = get_nested_value(&doc, field)?; + + Ok(version + .as_str() + .ok_or_else(|| anyhow::anyhow!("Version field '{}' is not a string", field))? + .to_string()) +} + +/// Update version in a TOML file using a dot-notation field path +pub fn update_version(file_path: &Path, field: &str, new_version: &str) -> Result<()> { + let content = fs::read_to_string(file_path) + .with_context(|| format!("Failed to read {}", file_path.display()))?; + + let mut doc: DocumentMut = content + .parse() + .with_context(|| format!("Failed to parse TOML in {}", file_path.display()))?; + + set_nested_value(&mut doc, field, value(new_version))?; + + fs::write(file_path, doc.to_string()) + .with_context(|| format!("Failed to write {}", file_path.display()))?; + + Ok(()) +} + +/// Get a nested value from TOML using dot notation +fn get_nested_value<'a>(doc: &'a DocumentMut, path: &str) -> Result<&'a Item> { + let parts: Vec<&str> = path.split('.').collect(); + let mut current = doc.as_item(); + + for part in parts { + current = current + .get(part) + .ok_or_else(|| anyhow::anyhow!("Field '{}' not found in path '{}'", part, path))?; + } + + Ok(current) +} + +/// Set a nested value in TOML using dot notation +fn set_nested_value(doc: &mut DocumentMut, path: &str, value: Item) -> Result<()> { + let parts: Vec<&str> = path.split('.').collect(); + + if parts.is_empty() { + return Err(anyhow::anyhow!("Empty path")); + } + + let mut current = doc.as_item_mut(); + + for (i, part) in parts.iter().enumerate() { + if i == parts.len() - 1 { + // Last part - set the value + if let Some(table) = current.as_table_mut() { + table[part] = value; + return Ok(()); + } else { + return Err(anyhow::anyhow!("Cannot set field '{}' on non-table", part)); + } + } else { + // Intermediate part - navigate deeper + current = current + .get_mut(part) + .ok_or_else(|| anyhow::anyhow!("Field '{}' not found", part))?; + } + } + + Ok(()) +} + +#[cfg(test)] +mod tests { + use super::*; + use tempfile::TempDir; + + #[test] + fn test_read_version() { + let temp_dir = TempDir::new().unwrap(); + let file_path = temp_dir.path().join("config.toml"); + + fs::write( + &file_path, + r#" +[package] +version = "1.0.0" +"#, + ) + .unwrap(); + + let version = read_version(&file_path, "package.version").unwrap(); + assert_eq!(version, "1.0.0"); + } + + #[test] + fn test_update_version() { + let temp_dir = TempDir::new().unwrap(); + let file_path = temp_dir.path().join("config.toml"); + + fs::write( + &file_path, + r#" +[package] +version = "1.0.0" +name = "test" +"#, + ) + .unwrap(); + + update_version(&file_path, "package.version", "2.0.0").unwrap(); + + let version = read_version(&file_path, "package.version").unwrap(); + assert_eq!(version, "2.0.0"); + } + + #[test] + fn test_nested_path() { + let temp_dir = TempDir::new().unwrap(); + let file_path = temp_dir.path().join("Cargo.toml"); + + fs::write( + &file_path, + r#" +[package] +name = "myapp" +version = "1.0.0" + +[dependencies] +mylib = "1.0.0" +"#, + ) + .unwrap(); + + let version = read_version(&file_path, "dependencies.mylib").unwrap(); + assert_eq!(version, "1.0.0"); + + update_version(&file_path, "dependencies.mylib", "1.5.0").unwrap(); + + let version = read_version(&file_path, "dependencies.mylib").unwrap(); + assert_eq!(version, "1.5.0"); + } +} diff --git a/src/dependency/handlers/yaml.rs b/src/dependency/handlers/yaml.rs new file mode 100644 index 0000000..d5bc2e0 --- /dev/null +++ b/src/dependency/handlers/yaml.rs @@ -0,0 +1,165 @@ +// YAML file handler for dependency updates + +use anyhow::{Context, Result}; +use serde_norway::Value; +use std::fs; +use std::path::Path; + +/// Read version from a YAML file using a dot-notation field path +/// Example: "image.tag" reads yaml["image"]["tag"] +pub fn read_version(file_path: &Path, field: &str) -> Result { + let content = fs::read_to_string(file_path) + .with_context(|| format!("Failed to read {}", file_path.display()))?; + + let yaml: Value = serde_norway::from_str(&content) + .with_context(|| format!("Failed to parse YAML in {}", file_path.display()))?; + + let version = get_nested_value(&yaml, field)?; + + Ok(version + .as_str() + .ok_or_else(|| anyhow::anyhow!("Version field '{}' is not a string", field))? + .to_string()) +} + +/// Update version in a YAML file using a dot-notation field path +pub fn update_version(file_path: &Path, field: &str, new_version: &str) -> Result<()> { + let content = fs::read_to_string(file_path) + .with_context(|| format!("Failed to read {}", file_path.display()))?; + + let mut yaml: Value = serde_norway::from_str(&content) + .with_context(|| format!("Failed to parse YAML in {}", file_path.display()))?; + + set_nested_value(&mut yaml, field, Value::String(new_version.to_string()))?; + + let updated_content = serde_norway::to_string(&yaml).context("Failed to serialize YAML")?; + + fs::write(file_path, updated_content) + .with_context(|| format!("Failed to write {}", file_path.display()))?; + + Ok(()) +} + +/// Get a nested value from YAML using dot notation +fn get_nested_value<'a>(yaml: &'a Value, path: &str) -> Result<&'a Value> { + let parts: Vec<&str> = path.split('.').collect(); + let mut current = yaml; + + for part in parts { + current = current + .get(part) + .ok_or_else(|| anyhow::anyhow!("Field '{}' not found in path '{}'", part, path))?; + } + + Ok(current) +} + +/// Set a nested value in YAML using dot notation +fn set_nested_value(yaml: &mut Value, path: &str, value: Value) -> Result<()> { + let parts: Vec<&str> = path.split('.').collect(); + + if parts.is_empty() { + return Err(anyhow::anyhow!("Empty path")); + } + + let mut current = yaml; + + for (i, part) in parts.iter().enumerate() { + if i == parts.len() - 1 { + // Last part - set the value + if let Value::Mapping(map) = current { + map.insert(Value::String(part.to_string()), value); + return Ok(()); + } else { + return Err(anyhow::anyhow!( + "Cannot set field '{}' on non-mapping", + part + )); + } + } else { + // Intermediate part - navigate deeper + if let Value::Mapping(map) = current { + current = map + .get_mut(Value::String(part.to_string())) + .ok_or_else(|| anyhow::anyhow!("Field '{}' not found", part))?; + } else { + return Err(anyhow::anyhow!( + "Cannot navigate through non-mapping at '{}'", + part + )); + } + } + } + + Ok(()) +} + +#[cfg(test)] +mod tests { + use super::*; + use tempfile::TempDir; + + #[test] + fn test_read_version() { + let temp_dir = TempDir::new().unwrap(); + let file_path = temp_dir.path().join("values.yaml"); + + fs::write( + &file_path, + r#" +image: + tag: "1.0.0" + repository: "myapp" +"#, + ) + .unwrap(); + + let version = read_version(&file_path, "image.tag").unwrap(); + assert_eq!(version, "1.0.0"); + } + + #[test] + fn test_update_version() { + let temp_dir = TempDir::new().unwrap(); + let file_path = temp_dir.path().join("values.yaml"); + + fs::write( + &file_path, + r#" +image: + tag: "1.0.0" + repository: "myapp" +"#, + ) + .unwrap(); + + update_version(&file_path, "image.tag", "2.0.0").unwrap(); + + let version = read_version(&file_path, "image.tag").unwrap(); + assert_eq!(version, "2.0.0"); + } + + #[test] + fn test_nested_path() { + let temp_dir = TempDir::new().unwrap(); + let file_path = temp_dir.path().join("config.yaml"); + + fs::write( + &file_path, + r#" +app: + backend: + version: "1.0.0" +"#, + ) + .unwrap(); + + let version = read_version(&file_path, "app.backend.version").unwrap(); + assert_eq!(version, "1.0.0"); + + update_version(&file_path, "app.backend.version", "1.5.0").unwrap(); + + let version = read_version(&file_path, "app.backend.version").unwrap(); + assert_eq!(version, "1.5.0"); + } +} diff --git a/src/dependency/mod.rs b/src/dependency/mod.rs new file mode 100644 index 0000000..da63ccc --- /dev/null +++ b/src/dependency/mod.rs @@ -0,0 +1,4 @@ +// Dependency management for multi-package repositories + +pub mod handlers; +pub mod updater; diff --git a/src/dependency/updater.rs b/src/dependency/updater.rs new file mode 100644 index 0000000..9f464c3 --- /dev/null +++ b/src/dependency/updater.rs @@ -0,0 +1,340 @@ +// Dependency updater for managing version references + +use crate::config::repository::RepositoryConfig; +use anyhow::Result; +use log::debug; +use serde::Serialize; +use std::path::{Path, PathBuf}; + +/// Information about a dependency update +#[derive(Debug, Clone, Serialize)] +pub struct DependencyUpdate { + /// File that needs updating + pub file_path: PathBuf, + /// Package whose version is referenced + pub package_name: String, + /// Old version reference + pub old_version: String, + /// New version reference + pub new_version: String, + /// Field/key where the version is stored + pub field: String, +} + +impl DependencyUpdate { + /// Create a new dependency update + pub fn new( + file_path: PathBuf, + package_name: String, + old_version: String, + new_version: String, + field: String, + ) -> Self { + Self { + file_path, + package_name, + old_version, + new_version, + field, + } + } +} + +/// Dependency updater for managing version references across files +pub struct DependencyUpdater { + config: RepositoryConfig, + repo_path: PathBuf, +} + +impl DependencyUpdater { + /// Create a new dependency updater + pub fn new(config: RepositoryConfig, repo_path: &Path) -> Self { + Self { + config, + repo_path: repo_path.to_path_buf(), + } + } + + /// Calculate dependency updates needed when a package version changes + /// + /// # Arguments + /// * `package_name` - Name of the package whose version changed + /// * `new_version` - New version of the package + /// + /// # Returns + /// List of dependency updates to apply + pub fn calculate_updates( + &self, + package_name: &str, + new_version: &str, + ) -> Result> { + let mut updates = Vec::new(); + + debug!( + "Calculating dependency updates for package '{}' with version '{}'", + package_name, new_version + ); + + // Find all dependencies that reference this package + for dep in &self.config.dependencies { + if dep.source == package_name { + debug!( + "Found dependency: source '{}' has {} target(s)", + dep.source, + dep.targets.len() + ); + + // Process each target + for target in &dep.targets { + let file_path = self.repo_path.join(&target.file); + + if let Ok(old_version) = self.read_version_from_file(&file_path, target) { + if old_version != new_version { + updates.push(DependencyUpdate::new( + file_path, + package_name.to_string(), + old_version, + new_version.to_string(), + target.field.clone(), + )); + } + } else { + debug!("Could not read version from {}", target.file); + } + } + } + } + + Ok(updates) + } + + /// Apply dependency updates to files + pub fn apply_updates(&self, updates: &[DependencyUpdate]) -> Result> { + let mut updated_files = Vec::new(); + + for update in updates { + debug!( + "Applying update to {} (field '{}'): {} -> {}", + update.file_path.display(), + update.field, + update.old_version, + update.new_version + ); + + // Find the dependency target for this file + let target = self.find_target_for_update(update)?; + + // Update the file based on its type + self.update_file(&update.file_path, target, &update.new_version)?; + + updated_files.push(update.file_path.display().to_string()); + } + + Ok(updated_files) + } + + /// Find the dependency target for an update + fn find_target_for_update( + &self, + update: &DependencyUpdate, + ) -> Result<&crate::config::repository::DependencyTarget> { + for dep in &self.config.dependencies { + if dep.source == update.package_name { + for target in &dep.targets { + if self.repo_path.join(&target.file) == update.file_path { + return Ok(target); + } + } + } + } + Err(anyhow::anyhow!( + "Dependency target not found for {}", + update.file_path.display() + )) + } + + /// Read version from a file based on dependency target + fn read_version_from_file( + &self, + file_path: &Path, + target: &crate::config::repository::DependencyTarget, + ) -> Result { + use super::handlers; + + let file_type = self.detect_file_type(file_path)?; + + match file_type.as_str() { + "yaml" | "yml" => handlers::yaml::read_version(file_path, &target.field), + "json" => handlers::json::read_version(file_path, &target.field), + "toml" => handlers::toml::read_version(file_path, &target.field), + "dockerfile" => { + // For dockerfile, extract package name from field (e.g., "myapp" from field) + handlers::dockerfile::read_version(file_path, &target.field) + } + _ => Err(anyhow::anyhow!("Unsupported file type: {}", file_type)), + } + } + + /// Update version in a file based on dependency target + fn update_file( + &self, + file_path: &Path, + target: &crate::config::repository::DependencyTarget, + new_version: &str, + ) -> Result<()> { + use super::handlers; + + let file_type = self.detect_file_type(file_path)?; + + match file_type.as_str() { + "yaml" | "yml" => handlers::yaml::update_version(file_path, &target.field, new_version), + "json" => handlers::json::update_version(file_path, &target.field, new_version), + "toml" => handlers::toml::update_version(file_path, &target.field, new_version), + "dockerfile" => { + // For dockerfile, use field as package name + handlers::dockerfile::update_version(file_path, &target.field, new_version) + } + _ => Err(anyhow::anyhow!("Unsupported file type: {}", file_type)), + } + } + + /// Detect file type from extension + fn detect_file_type(&self, file_path: &Path) -> Result { + let file_name = file_path + .file_name() + .and_then(|n| n.to_str()) + .ok_or_else(|| anyhow::anyhow!("Invalid file name"))?; + + if file_name.to_lowercase() == "dockerfile" || file_name.starts_with("Dockerfile") { + return Ok("dockerfile".to_string()); + } + + let extension = file_path + .extension() + .and_then(|e| e.to_str()) + .ok_or_else(|| anyhow::anyhow!("No file extension"))?; + + Ok(extension.to_lowercase()) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::config::repository::{ + DependencyConfig, PackageConfig, RepositoryConfig, RepositoryMetadata, RepositoryType, + VersioningConfig, VersioningStrategy, + }; + use std::fs; + use tempfile::TempDir; + + fn create_test_config() -> RepositoryConfig { + use crate::config::repository::{DependencyTarget, UpdateStrategy}; + + RepositoryConfig { + repository: RepositoryMetadata { + name: "test".to_string(), + repo_type: RepositoryType::MultiPackage, + description: None, + }, + versioning: VersioningConfig { + strategy: VersioningStrategy::Independent, + unified_version: None, + rules: None, + }, + packages: vec![PackageConfig { + name: "cli".to_string(), + package_type: "rust-cargo".to_string(), + path: ".".to_string(), + version_file: "Cargo.toml".to_string(), + version_field: "package.version".to_string(), + primary: false, + sync_with: None, + independent: true, + workspace_member: false, + description: None, + }], + dependencies: vec![DependencyConfig { + source: "cli".to_string(), + description: Some("CLI version in HELM chart".to_string()), + targets: vec![DependencyTarget { + file: "chart/values.yaml".to_string(), + field: "image.tag".to_string(), + strategy: UpdateStrategy::Auto, + format: None, + }], + }], + scopes: Default::default(), + commit_rules: Default::default(), + workspace: None, + } + } + + #[test] + fn test_detect_file_type() { + let temp_dir = TempDir::new().unwrap(); + let config = create_test_config(); + let updater = DependencyUpdater::new(config, temp_dir.path()); + + assert_eq!( + updater.detect_file_type(Path::new("values.yaml")).unwrap(), + "yaml" + ); + assert_eq!( + updater.detect_file_type(Path::new("package.json")).unwrap(), + "json" + ); + assert_eq!( + updater.detect_file_type(Path::new("Cargo.toml")).unwrap(), + "toml" + ); + assert_eq!( + updater.detect_file_type(Path::new("Dockerfile")).unwrap(), + "dockerfile" + ); + } + + #[test] + fn test_calculate_updates() { + let temp_dir = TempDir::new().unwrap(); + let config = create_test_config(); + + // Create the values.yaml file + let chart_dir = temp_dir.path().join("chart"); + fs::create_dir(&chart_dir).unwrap(); + fs::write( + chart_dir.join("values.yaml"), + r#" +image: + tag: "1.0.0" +"#, + ) + .unwrap(); + + let updater = DependencyUpdater::new(config, temp_dir.path()); + let updates = updater.calculate_updates("cli", "1.1.0").unwrap(); + + assert_eq!(updates.len(), 1); + assert_eq!(updates[0].package_name, "cli"); + assert_eq!(updates[0].old_version, "1.0.0"); + assert_eq!(updates[0].new_version, "1.1.0"); + assert_eq!(updates[0].field, "image.tag"); + } + + #[test] + fn test_dependency_update_new() { + let update = DependencyUpdate::new( + PathBuf::from("values.yaml"), + "cli".to_string(), + "1.0.0".to_string(), + "1.1.0".to_string(), + "image.tag".to_string(), + ); + + assert_eq!(update.file_path, PathBuf::from("values.yaml")); + assert_eq!(update.package_name, "cli"); + assert_eq!(update.old_version, "1.0.0"); + assert_eq!(update.new_version, "1.1.0"); + assert_eq!(update.field, "image.tag"); + } +} diff --git a/src/git/branch.rs b/src/git/branch.rs index c103f43..adaf9d7 100644 --- a/src/git/branch.rs +++ b/src/git/branch.rs @@ -1,18 +1,66 @@ -use git2::{build::CheckoutBuilder, BranchType, Error as GitError}; +use git2::{build::CheckoutBuilder, BranchType, Error as GitError, Reference}; -use super::repository::discover_repository; +use super::repository::{discover_repository, discover_repository_from}; use crate::error::CliError; +use std::path::Path; +#[allow(dead_code)] pub fn create_branch(name: &str, force: bool) -> Result<(), CliError> { let repo = discover_repository()?; + create_branch_in_repo(&repo, name, force) +} + +pub fn create_branch_in(path: &Path, name: &str, force: bool) -> Result<(), CliError> { + let repo = discover_repository_from(path)?; + create_branch_in_repo(&repo, name, force) +} + +fn create_branch_in_repo(repo: &git2::Repository, name: &str, force: bool) -> Result<(), CliError> { let head = repo.head()?; let head_commit = head.peel_to_commit()?; repo.branch(name, &head_commit, force)?; Ok(()) } +#[allow(dead_code)] +pub fn branch_exists(name: &str) -> Result { + let repo = discover_repository()?; + branch_exists_in_repo(&repo, name) +} + +pub fn branch_exists_in(path: &Path, name: &str) -> Result { + let repo = discover_repository_from(path)?; + branch_exists_in_repo(&repo, name) +} + +fn branch_exists_in_repo(repo: &git2::Repository, name: &str) -> Result { + let exists = repo.find_branch(name, BranchType::Local).is_ok(); + Ok(exists) +} + +pub fn validate_branch_name(name: &str) -> Result<(), CliError> { + let ref_name = format!("refs/heads/{name}"); + if Reference::is_valid_name(&ref_name) { + Ok(()) + } else { + Err(CliError::InputError(format!( + "Invalid branch name '{name}'" + ))) + } +} + +#[allow(dead_code)] pub fn checkout_branch(name: &str) -> Result<(), CliError> { let repo = discover_repository()?; + checkout_branch_in_repo(&repo, name) +} + +pub fn checkout_branch_in(path: &Path, name: &str) -> Result<(), CliError> { + let repo = discover_repository_from(path)?; + checkout_branch_in_repo(&repo, name) +} + +fn checkout_branch_in_repo(repo: &git2::Repository, name: &str) -> Result<(), CliError> { let mut checkout_builder = CheckoutBuilder::default(); // Find the branch reference diff --git a/src/git/commit.rs b/src/git/commit.rs index e369a03..6dae536 100644 --- a/src/git/commit.rs +++ b/src/git/commit.rs @@ -1,47 +1,91 @@ -use super::repository::discover_repository; +use super::repository::{discover_repository, discover_repository_from}; use crate::error::CliError; +use std::path::Path; +use std::process::{Command, Stdio}; +/// Stage a file for commit +pub fn stage_file(file_path: &Path) -> Result<(), CliError> { + let repo = if file_path.is_absolute() { + discover_repository_from(file_path)? + } else { + discover_repository()? + }; + let mut index = repo.index()?; + + // Convert absolute path to relative path from repo root + let repo_path = repo + .workdir() + .ok_or_else(|| CliError::GitError(git2::Error::from_str("No working directory")))?; + let relative_path = file_path.strip_prefix(repo_path).unwrap_or(file_path); + + index.add_path(relative_path)?; + index.write()?; + Ok(()) +} + +#[allow(dead_code)] pub fn commit_changes(message: &str, amend: bool) -> Result<(), CliError> { let repo = discover_repository()?; - let signature = repo.signature()?; - let mut index = repo.index()?; - let oid = index.write_tree()?; - let tree = repo.find_tree(oid)?; + commit_changes_in_repo(&repo, message, amend) +} + +pub fn commit_changes_in(path: &Path, message: &str, amend: bool) -> Result<(), CliError> { + let repo = discover_repository_from(path)?; + commit_changes_in_repo(&repo, message, amend) +} +fn commit_changes_in_repo( + repo: &git2::Repository, + message: &str, + amend: bool, +) -> Result<(), CliError> { + let repo_path = repo + .workdir() + .ok_or_else(|| CliError::GitError(git2::Error::from_str("No working directory")))?; + let mut args = vec!["commit", "-F", "-"]; if amend { - let head = repo.head()?; - let parent_commit = head.peel_to_commit()?; - parent_commit.amend( - Some("HEAD"), - Some(&signature), - Some(&signature), - None, - Some(message), - Some(&tree), - )?; - } else { - let parents = match repo.head() { - Ok(head) => { - let commit = head.peel_to_commit()?; - vec![commit] - } - Err(_) => vec![], - }; + args.push("--amend"); + } + + run_git_with_input(repo_path, &args, message, "create commit")?; + + Ok(()) +} - let parents_refs: Vec<&git2::Commit> = parents.iter().collect(); +fn run_git_with_input( + repo_path: &Path, + args: &[&str], + input: &str, + action: &str, +) -> Result<(), CliError> { + let mut child = Command::new("git") + .current_dir(repo_path) + .args(args) + .stdin(Stdio::piped()) + .stdout(Stdio::null()) + .stderr(Stdio::piped()) + .spawn() + .map_err(CliError::IoError)?; - // Create the commit - repo.commit( - Some("HEAD"), - &signature, - &signature, - message, - &tree, - &parents_refs, - )?; + if let Some(mut stdin) = child.stdin.take() { + use std::io::Write; + stdin + .write_all(input.as_bytes()) + .map_err(CliError::IoError)?; } - Ok(()) + let output = child.wait_with_output().map_err(CliError::IoError)?; + if output.status.success() { + return Ok(()); + } + + let stderr = String::from_utf8_lossy(&output.stderr).trim().to_string(); + let detail = if stderr.is_empty() { + format!("git {:?} failed", args) + } else { + stderr + }; + Err(CliError::Generic(format!("Failed to {action}: {detail}"))) } pub fn format_commit_message( diff --git a/src/git/mod.rs b/src/git/mod.rs index b593b52..97b475c 100644 --- a/src/git/mod.rs +++ b/src/git/mod.rs @@ -3,7 +3,16 @@ mod commit; mod repository; mod tag; -pub use branch::{checkout_branch, create_branch}; -pub use commit::{commit_changes, format_commit_message}; -pub use repository::{has_staged_changes, list_changed_files, validate_git_config}; +#[allow(unused_imports)] +pub use branch::{ + branch_exists, branch_exists_in, checkout_branch, checkout_branch_in, create_branch, + create_branch_in, validate_branch_name, +}; +#[allow(unused_imports)] +pub use commit::{commit_changes, commit_changes_in, format_commit_message, stage_file}; +#[allow(unused_imports)] +pub use repository::{ + discover_repository, discover_repository_from, has_staged_changes, has_staged_changes_from, + list_changed_files, list_changed_files_from, validate_git_config, validate_git_config_from, +}; pub use tag::{TagGenerator, TagGeneratorOptions}; diff --git a/src/git/repository.rs b/src/git/repository.rs index c7b09ac..682f274 100644 --- a/src/git/repository.rs +++ b/src/git/repository.rs @@ -1,12 +1,17 @@ use crate::error::CliError; use git2::{Config, Repository, StatusOptions, StatusShow}; use std::env; +use std::path::Path; pub fn discover_repository() -> Result { let current_dir = env::current_dir()?; - log::debug!("Starting repository discovery from: {current_dir:?}"); + discover_repository_from(¤t_dir) +} + +pub fn discover_repository_from(path: &Path) -> Result { + log::debug!("Starting repository discovery from: {path:?}"); - match Repository::discover(¤t_dir) { + match Repository::discover(path) { Ok(repo) => { // Get the absolute path to the repository root let repo_path = repo @@ -29,7 +34,7 @@ pub fn discover_repository() -> Result { } } Err(e) => { - log::error!("Failed to discover repository from {current_dir:?}: {e}"); + log::error!("Failed to discover repository from {path:?}: {e}"); Err(CliError::GitError(git2::Error::from_str( "Could not find Git repository in current directory or any parent directories", ))) @@ -38,7 +43,12 @@ pub fn discover_repository() -> Result { } pub fn has_staged_changes() -> Result { - let repo = discover_repository()?; + let current_dir = env::current_dir()?; + has_staged_changes_from(¤t_dir) +} + +pub fn has_staged_changes_from(path: &Path) -> Result { + let repo = discover_repository_from(path)?; let mut opts = StatusOptions::new(); opts.include_ignored(false) .include_untracked(false) @@ -67,8 +77,17 @@ pub fn has_staged_changes() -> Result { /// List changed files in the repository. If `include_unstaged` is true, /// include workdir modifications in addition to index changes. +#[allow(dead_code)] pub fn list_changed_files(include_unstaged: bool) -> Result, CliError> { - let repo = discover_repository()?; + let current_dir = env::current_dir()?; + list_changed_files_from(¤t_dir, include_unstaged) +} + +pub fn list_changed_files_from( + path: &Path, + include_unstaged: bool, +) -> Result, CliError> { + let repo = discover_repository_from(path)?; let mut opts = StatusOptions::new(); opts.include_ignored(false) .include_untracked(true) @@ -109,8 +128,14 @@ fn get_config_value(config: &Config, key: &str) -> Option { } } +#[allow(dead_code)] pub fn validate_git_config() -> Result<(), CliError> { - let repo = discover_repository()?; + let current_dir = env::current_dir()?; + validate_git_config_from(¤t_dir) +} + +pub fn validate_git_config_from(path: &Path) -> Result<(), CliError> { + let repo = discover_repository_from(path)?; let config = repo.config()?; // Try to get user.name from local or global config diff --git a/src/git/tag.rs b/src/git/tag.rs index a8af36b..b794163 100644 --- a/src/git/tag.rs +++ b/src/git/tag.rs @@ -1,11 +1,11 @@ -use std::env; - use crate::version::VersionManager; use crate::{config, error::CliError}; -use git2::{FetchOptions, Oid, PushOptions, RemoteCallbacks, Repository}; +use git2::{Oid, Repository}; use log::{debug, error, info}; use regex::Regex; use semver::Version; +use std::path::Path; +use std::process::Command; use structopt::StructOpt; #[derive(Clone, Debug, StructOpt)] @@ -24,7 +24,7 @@ pub struct TagGeneratorOptions { release_branches: String, #[structopt(long, default_value = ".", help = "Source directory")] - source: String, + pub(crate) source: String, #[structopt(long, help = "Perform a dry run without creating tags")] dry_run: bool, @@ -58,6 +58,12 @@ pub struct TagGeneratorOptions { #[structopt(long, help = "Do not publish the new tag")] not_publish: bool, + #[structopt(long, help = "Publish the new tag after calculation")] + publish: bool, + + #[structopt(long, help = "Confirm publishing the tag to remote")] + confirm_publish: bool, + #[structopt(long, help = "Fetch tags from remote before calculation")] fetch: bool, @@ -68,6 +74,24 @@ pub struct TagGeneratorOptions { no_fetch: bool, } +impl TagGeneratorOptions { + pub fn dry_run(&self) -> bool { + self.dry_run + } + + pub fn publish_requested(&self) -> bool { + self.publish + } + + pub fn confirm_publish(&self) -> bool { + self.confirm_publish + } + + pub fn will_publish_remote(&self) -> bool { + self.publish && self.confirm_publish && !self.not_publish + } +} + pub struct TagGenerator { default_bump: String, not_with_v: bool, @@ -81,6 +105,7 @@ pub struct TagGenerator { force_without_change: bool, tag_message: String, not_publish: bool, + publish_remote: bool, fetch: bool, bump_config_files: bool, pub current_tag: String, @@ -106,7 +131,8 @@ impl TagGenerator { none_string_token: options.none_string_token, force_without_change: options.force_without_change, tag_message: options.tag_message.unwrap_or_default(), - not_publish: options.not_publish, + not_publish: options.not_publish || !options.publish, + publish_remote: options.publish && options.confirm_publish && !options.not_publish, // default to fetching unless --no-fetch is explicitly passed; --fetch enforces true fetch: if options.fetch { true @@ -124,6 +150,10 @@ impl TagGenerator { self.fetch } + fn should_publish_remote(&self) -> bool { + self.publish_remote && !self.not_publish + } + pub fn run(&mut self) -> Result<(), CliError> { info!("🚀 Starting tag generation process"); let repo = self.open_repository()?; @@ -206,39 +236,14 @@ impl TagGenerator { fn fetch_tags(&self, repo: &Repository) -> Result<(), CliError> { debug!("Fetching tags from remote"); match repo.find_remote("origin") { - Ok(mut remote) => { - let mut callbacks = RemoteCallbacks::new(); - - callbacks.credentials(|_url, username_from_url, _allowed_types| { - git2::Cred::ssh_key( - username_from_url.unwrap_or("git"), - None, - std::path::Path::new(&format!( - "{}/.ssh/id_rsa", - std::env::var("HOME").unwrap() - )), - None, - ) - }); - - let mut fetch_options = FetchOptions::new(); - fetch_options.remote_callbacks(callbacks); - - remote.fetch(&["refs/tags/*:refs/tags/*"], Some(&mut fetch_options), None) - .map_err(|e| { - error!("Failed to fetch tags from remote: {e}"); - match e.code() { - git2::ErrorCode::Auth => { - error!("Authentication error. Please ensure your credentials are set up correctly."); - error!("For SSH: Ensure your SSH key is added to the ssh-agent or located at ~/.ssh/id_rsa"); - error!("For HTTPS: Check your Git credential helper or use a personal access token."); - error!("Debug info: SSH_AUTH_SOCK={:?}, HOME={:?}", env::var("SSH_AUTH_SOCK"), env::var("HOME")); - error!("Remote URL: {:?}", remote.url()); - }, - _ => error!("Unexpected error occurred. Please check your network connection and repository permissions."), - } - CliError::from(e) - }) + Ok(_) => { + let repo_path = self.repo_root(repo)?; + run_git( + repo_path, + &["fetch", "origin", "refs/tags/*:refs/tags/*"], + "fetch tags from remote", + ) + .inspect_err(|e| error!("{e}")) } Err(e) if e.code() == git2::ErrorCode::NotFound => { debug!("No remote 'origin' found, skipping tag fetch"); @@ -547,72 +552,38 @@ impl TagGenerator { return Ok(()); } - let signature = repo.signature()?; - let tree_id = { - let mut index = repo.index()?; - for file in updated_files { - index.add_path(std::path::Path::new(file))?; - } - index.write()?; - index.write_tree()? - }; + let repo_path = self.repo_root(repo)?; + let mut add_args = vec!["add", "--"]; + for file in updated_files { + add_args.push(file.as_str()); + } + run_git(repo_path, &add_args, "stage version updates")?; - let tree = repo.find_tree(tree_id)?; - let parent_commit = repo.head()?.peel_to_commit()?; let version_without_v = new_version.trim_start_matches('v'); let message = format!("chore: bump version to {version_without_v}"); - - repo.commit( - Some("HEAD"), - &signature, - &signature, - &message, - &tree, - &[&parent_commit], + run_git( + repo_path, + &["commit", "-m", &message], + "create version bump commit", )?; - // Push the commit to remote if we're not in dry run mode and not set to not publish - if !self.dry_run && !self.not_publish { + // Push the commit to remote only when publishing has been explicitly confirmed + if !self.dry_run && self.should_publish_remote() { info!("🔄 Pushing version bump commit to remote"); match repo.find_remote("origin") { - Ok(mut remote) => { - let mut callbacks = RemoteCallbacks::new(); - callbacks.credentials(|_url, username_from_url, _allowed_types| { - git2::Cred::ssh_key( - username_from_url.unwrap_or("git"), - None, - std::path::Path::new(&format!( - "{}/.ssh/id_rsa", - std::env::var("HOME").unwrap() - )), - None, - ) - }); - - let mut push_options = PushOptions::new(); - push_options.remote_callbacks(callbacks); - + Ok(_) => { let current_branch = self.get_current_branch(repo)?; - let refspec = format!("refs/heads/{current_branch}"); - - match remote.push(&[&refspec], Some(&mut push_options)) { - Ok(_) => { - debug!("Successfully pushed commit to remote branch {current_branch}"); - info!( - "✅ Pushed version bump commit to remote branch {current_branch}" - ); - } - Err(e) => { - error!("Failed to push commit to remote: {e}"); - if e.code() == git2::ErrorCode::Auth { - error!( - "Authentication error. Please ensure your SSH key is set up correctly." - ); - error!("You may need to add your SSH key to the ssh-agent or use HTTPS with a personal access token."); - } - return Err(e.into()); - } - } + run_git( + repo_path, + &[ + "push", + "origin", + &format!("HEAD:refs/heads/{current_branch}"), + ], + "push version bump commit to remote", + )?; + debug!("Successfully pushed commit to remote branch {current_branch}"); + info!("✅ Pushed version bump commit to remote branch {current_branch}"); } Err(e) if e.code() == git2::ErrorCode::NotFound => { debug!("Remote 'origin' not found, skipping push"); @@ -626,8 +597,7 @@ impl TagGenerator { pub fn create_and_push_tag(&self, repo: &Repository, new_tag: &str) -> Result<(), CliError> { debug!("Creating and pushing new tag: {new_tag}"); - let head = repo.head()?.peel_to_commit()?; - let signature = repo.signature()?; + let repo_path = self.repo_root(repo)?; let tag_message = if !self.tag_message.is_empty() { &self.tag_message @@ -635,43 +605,22 @@ impl TagGenerator { new_tag }; - // Create tag - repo.tag(new_tag, head.as_object(), &signature, tag_message, false)?; + run_git( + repo_path, + &["tag", "-a", new_tag, "-m", tag_message], + "create tag", + )?; - // Only try to push if not in dry run mode and not explicitly set to not publish - if !self.dry_run && !self.not_publish { + // Only try to push when publishing has been explicitly confirmed + if !self.dry_run && self.should_publish_remote() { match repo.find_remote("origin") { - Ok(mut remote) => { - let mut callbacks = RemoteCallbacks::new(); - callbacks.credentials(|_url, username_from_url, _allowed_types| { - git2::Cred::ssh_key( - username_from_url.unwrap_or("git"), - None, - std::path::Path::new(&format!( - "{}/.ssh/id_rsa", - std::env::var("HOME").unwrap() - )), - None, - ) - }); - - let mut push_options = PushOptions::new(); - push_options.remote_callbacks(callbacks); - - let refspec = format!("refs/tags/{new_tag}"); - match remote.push(&[&refspec], Some(&mut push_options)) { - Ok(_) => debug!("Successfully pushed tag {new_tag} to remote"), - Err(e) => { - error!("Failed to push tag {new_tag} to remote: {e}"); - if e.code() == git2::ErrorCode::Auth { - error!( - "Authentication error. Please ensure your SSH key is set up correctly." - ); - error!("You may need to add your SSH key to the ssh-agent or use HTTPS with a personal access token."); - } - return Err(e.into()); - } - } + Ok(_) => { + run_git( + repo_path, + &["push", "origin", &format!("refs/tags/{new_tag}")], + "push tag to remote", + )?; + debug!("Successfully pushed tag {new_tag} to remote"); } Err(e) if e.code() == git2::ErrorCode::NotFound => { debug!("Remote 'origin' not found, skipping push"); @@ -682,6 +631,31 @@ impl TagGenerator { Ok(()) } + + fn repo_root<'a>(&self, repo: &'a Repository) -> Result<&'a Path, CliError> { + repo.workdir() + .ok_or_else(|| CliError::GitError(git2::Error::from_str("No working directory"))) + } +} + +fn run_git(repo_path: &Path, args: &[&str], action: &str) -> Result<(), CliError> { + let output = Command::new("git") + .current_dir(repo_path) + .args(args) + .output() + .map_err(CliError::IoError)?; + + if output.status.success() { + return Ok(()); + } + + let stderr = String::from_utf8_lossy(&output.stderr).trim().to_string(); + let detail = if stderr.is_empty() { + format!("git {:?} failed", args) + } else { + stderr + }; + Err(CliError::Generic(format!("Failed to {action}: {detail}"))) } #[cfg(test)] @@ -713,7 +687,7 @@ mod tests { // Tag v8.3.2 (regular) repo.tag( "v8.3.2", - &repo.head().unwrap().peel_to_commit().unwrap().as_object(), + repo.head().unwrap().peel_to_commit().unwrap().as_object(), &signature, "Regular release", false, @@ -722,7 +696,7 @@ mod tests { // Tag v10.0.0-beta.1 (pre-release) repo.tag( "v10.0.0-beta.1", - &repo.head().unwrap().peel_to_commit().unwrap().as_object(), + repo.head().unwrap().peel_to_commit().unwrap().as_object(), &signature, "Pre-release", false, @@ -756,6 +730,8 @@ mod tests { none_string_token: "#none".to_string(), force_without_change: false, tag_message: None, + publish: false, + confirm_publish: false, not_publish: true, fetch: false, no_fetch: true, diff --git a/src/input/prompts.rs b/src/input/prompts.rs index 7694f17..a4a9dab 100644 --- a/src/input/prompts.rs +++ b/src/input/prompts.rs @@ -192,6 +192,38 @@ pub fn input_scope() -> Result { } } +pub fn select_detected_scopes( + detected: &[String], + _all_available: &[String], + _allow_multiple: bool, +) -> Result { + if non_interactive_env() { + return Err(CliError::InputError( + "Non-interactive environment: cannot prompt for scope".to_string(), + )); + } + + // If no detected scopes: fall back to generic input + if detected.is_empty() { + return input_scope(); + } + + // Single detected scope: auto-select with confirmation + if detected.len() == 1 { + use colored::Colorize; + println!("✓ Detected scope: {}", detected[0].green()); + return Ok(detected[0].clone()); + } + + // Multiple detected: ask user to select one + let scope = Select::new("Select scope:", detected.to_vec()) + .with_help_message("Use arrow keys to navigate, Enter to select") + .prompt() + .map_err(|e| CliError::InputError(e.to_string()))?; + + Ok(scope.to_string()) +} + pub fn input_short_message() -> Result { if non_interactive_env() { return Err(CliError::InputError( diff --git a/src/lib.rs b/src/lib.rs index a50bb4f..e7f402e 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -1,12 +1,18 @@ pub mod ai; pub mod cli; +pub mod clock; pub mod config; +pub mod dependency; pub mod error; pub mod git; pub mod input; pub mod linter; pub mod logger; +pub mod packages; pub mod release; +pub mod scope; pub mod telemetry; pub mod update; pub mod version; +pub mod versioning; +pub mod workflow; diff --git a/src/linter/mod.rs b/src/linter/mod.rs index d07432a..83c14b9 100644 --- a/src/linter/mod.rs +++ b/src/linter/mod.rs @@ -1,10 +1,16 @@ use anyhow::Result; -use git2::{ObjectType, Repository, Tag}; +use git2::{Oid, Repository}; use regex::Regex; use serde::Serialize; +use std::collections::HashMap; +use std::fs; +use std::path::{Path, PathBuf}; + +use crate::config::repository::{CommitRulesConfig, RepositoryConfig}; pub struct CommitLinter { repo: Repository, + rules: CommitRulesConfig, } #[derive(Debug, Serialize)] @@ -17,7 +23,12 @@ pub struct CommitIssue { impl CommitLinter { pub fn new(repo_path: &str) -> Result { let repo = Repository::open(repo_path)?; - Ok(CommitLinter { repo }) + let rules = repo + .workdir() + .map(load_commit_rules) + .transpose()? + .unwrap_or_default(); + Ok(CommitLinter { repo, rules }) } pub fn check_commits_since_last_tag(&self) -> Result> { @@ -40,113 +51,139 @@ impl CommitLinter { revwalk.push(head_commit.id())?; // If there's a tag, only check commits since that tag - if let Ok(Some(tag)) = self.get_last_tag() { - let tag_commit = tag.target()?.peel_to_commit()?; - revwalk.hide(tag_commit.id())?; + if let Some(tag_commit) = self.get_last_tag_commit(head_commit.id())? { + revwalk.hide(tag_commit)?; } - // Conventional commit regex parts - let type_pattern = format!(r"(?:{})", crate::config::COMMIT_TYPES.join("|")); - let scope_pattern = r"(?:\([a-z0-9-]+\))?"; - let breaking_change = r"(?:!)?"; // Optional breaking change indicator - let separator = r"\: "; - let description = r".+"; - let full_pattern = - format!("^{type_pattern}{scope_pattern}{breaking_change}{separator}{description}$"); - let commit_regex = Regex::new(&full_pattern).unwrap(); - // Check each commit for commit_id in revwalk { let commit_id = commit_id?; let commit = self.repo.find_commit(commit_id)?; let message = commit.message().unwrap_or("").trim(); - let first_line = message.lines().next().unwrap_or(""); - - // Check if commit message follows conventional commit format - if !commit_regex.is_match(first_line) { - let issue = if !first_line.contains(": ") { - "Missing ': ' separator between type/scope and description".to_string() - } else if !crate::config::COMMIT_TYPES - .iter() - .any(|t| first_line.starts_with(t)) - { - let types = crate::config::COMMIT_TYPES.join(", "); - format!("Commit type must be one of: {types}") - } else if first_line.contains("(") && !first_line.contains(")") { - "Unclosed scope parenthesis".to_string() - } else if first_line.contains(")") && !first_line.contains("(") { - "Unopened scope parenthesis".to_string() - } else if first_line.contains("()") { - "Empty scope parenthesis".to_string() - } else { - "Commit message format should be: (): ".to_string() - }; - + for issue in check_message_format_with_rules(message, &self.rules) { issues.push(CommitIssue { commit_id: commit_id.to_string(), message: message.to_string(), issue, }); - continue; - } - - // Check minimum length - if first_line.len() < 10 { - let len = first_line.len(); - issues.push(CommitIssue { - commit_id: commit_id.to_string(), - message: message.to_string(), - issue: format!( - "Commit message is too short (got {len} characters, minimum is 10)" - ), - }); - } - - // Check maximum length of first line - if first_line.len() > 72 { - let len = first_line.len(); - issues.push(CommitIssue { - commit_id: commit_id.to_string(), - message: message.to_string(), - issue: format!( - "First line of commit message is too long (got {len} characters, maximum is 72)" - ), - }); } } Ok(issues) } - fn get_last_tag(&'_ self) -> Result>> { - let mut tags = Vec::new(); - self.repo.tag_foreach(|id, _| { - if let Ok(obj) = self.repo.find_object(id, Some(ObjectType::Tag)) { - if let Ok(tag) = obj.into_tag() { - tags.push(tag); - } + fn get_last_tag_commit(&self, head_commit: Oid) -> Result> { + let mut tag_commits: HashMap> = HashMap::new(); + let tag_names = self.repo.tag_names(None)?; + + for tag_name in tag_names.iter().flatten() { + let ref_name = format!("refs/tags/{tag_name}"); + let obj = match self.repo.revparse_single(&ref_name) { + Ok(obj) => obj, + Err(_) => continue, + }; + let commit = match obj.peel_to_commit() { + Ok(commit) => commit, + Err(_) => continue, + }; + + if commit.id() == head_commit + || self.repo.graph_descendant_of(head_commit, commit.id())? + { + tag_commits + .entry(commit.id()) + .or_default() + .push(tag_name.to_string()); } - true - })?; + } + + if tag_commits.is_empty() { + return Ok(None); + } - // Sort tags by time - tags.sort_by_key(|b| b.tagger().unwrap().when()); + let mut revwalk = self.repo.revwalk()?; + revwalk.push(head_commit)?; - Ok(tags.into_iter().next()) + for commit_id in revwalk { + let commit_id = commit_id?; + if tag_commits.contains_key(&commit_id) { + return Ok(Some(commit_id)); + } + } + + Ok(None) } } /// Lint a single commit message string using the same rules as repository linting. /// Returns a list of issue descriptions; empty if the message passes all checks. +#[allow(dead_code)] pub fn check_message_format(message: &str) -> Vec { + check_message_format_with_rules(message, &CommitRulesConfig::default()) +} + +pub fn check_message_format_for_repo(repo_path: &Path, message: &str) -> Result> { + let rules = load_commit_rules(repo_path)?; + Ok(check_message_format_with_rules(message, &rules)) +} + +pub fn allowed_commit_types_for_repo(repo_path: &Path) -> Result> { + let rules = load_commit_rules(repo_path)?; + Ok(allowed_commit_types(&rules)) +} + +fn load_commit_rules(repo_path: &Path) -> Result { + let config_root = discover_config_root(repo_path); + let config_path = RepositoryConfig::get_config_path(&config_root)?; + + if !config_path.exists() { + return Ok(CommitRulesConfig::default()); + } + + let content = fs::read_to_string(&config_path)?; + let config: RepositoryConfig = toml::from_str(&content)?; + Ok(config.commit_rules) +} + +fn discover_config_root(repo_path: &Path) -> PathBuf { + let mut current = if repo_path.is_file() { + repo_path + .parent() + .map(Path::to_path_buf) + .unwrap_or_else(|| repo_path.to_path_buf()) + } else { + repo_path.to_path_buf() + }; + + loop { + if current.join(".committy").join("config.toml").exists() || current.join(".git").exists() { + return current; + } + + let Some(parent) = current.parent() else { + return repo_path.to_path_buf(); + }; + current = parent.to_path_buf(); + } +} + +fn check_message_format_with_rules(message: &str, rules: &CommitRulesConfig) -> Vec { let mut issues = Vec::new(); let message = message.trim(); let first_line = message.lines().next().unwrap_or(""); + let allowed_types = allowed_commit_types(rules); + let type_pattern = format!( + r"(?:{})", + allowed_types + .iter() + .map(|item| regex::escape(item)) + .collect::>() + .join("|") + ); // Conventional commit regex parts - let type_pattern = format!(r"(?:{})", crate::config::COMMIT_TYPES.join("|")); let scope_pattern = r"(?:\([a-z0-9-]+\))?"; let breaking_change = r"(?:!)?"; // Optional breaking change indicator let separator = r"\: "; @@ -159,11 +196,15 @@ pub fn check_message_format(message: &str) -> Vec { if !commit_regex.is_match(first_line) { let issue = if !first_line.contains(": ") { "Missing ': ' separator between type/scope and description".to_string() - } else if !crate::config::COMMIT_TYPES - .iter() - .any(|t| first_line.starts_with(t)) + } else if !extract_commit_type(first_line) + .map(|candidate| { + allowed_types + .iter() + .any(|commit_type| commit_type == candidate) + }) + .unwrap_or(false) { - let types = crate::config::COMMIT_TYPES.join(", "); + let types = allowed_types.join(", "); format!("Commit type must be one of: {types}") } else if first_line.contains("(") && !first_line.contains(")") { "Unclosed scope parenthesis".to_string() @@ -187,16 +228,64 @@ pub fn check_message_format(message: &str) -> Vec { } // Check maximum length of first line - if first_line.len() > 72 { + if first_line.len() > rules.max_subject_length { let len = first_line.len(); issues.push(format!( - "First line of commit message is too long (got {len} characters, maximum is 72)" + "First line of commit message is too long (got {len} characters, maximum is {})", + rules.max_subject_length )); } + let body_lines: Vec<_> = message.lines().skip(1).collect(); + let has_body_content = body_lines.iter().any(|line| !line.trim().is_empty()); + + if rules.require_body && !has_body_content { + issues.push("Commit body is required by repository configuration".to_string()); + } + + for (idx, line) in body_lines.iter().enumerate() { + if line.trim().is_empty() { + continue; + } + if line.len() > rules.max_body_line_length { + issues.push(format!( + "Body line {} is too long (got {} characters, maximum is {})", + idx + 2, + line.len(), + rules.max_body_line_length + )); + } + } + issues } +fn allowed_commit_types(rules: &CommitRulesConfig) -> Vec { + let mut types = if rules.allowed_types.is_empty() { + crate::config::COMMIT_TYPES + .iter() + .map(|item| item.to_string()) + .collect::>() + } else { + rules.allowed_types.clone() + }; + + for custom in &rules.custom_types { + if !types.iter().any(|item| item == &custom.name) { + types.push(custom.name.clone()); + } + } + + types +} + +fn extract_commit_type(first_line: &str) -> Option<&str> { + let separator_index = first_line.find(": ")?; + let header = &first_line[..separator_index]; + let type_end = header.find(['(', '!']).unwrap_or(header.len()); + Some(&header[..type_end]) +} + #[cfg(test)] mod tests { use super::*; diff --git a/src/main.rs b/src/main.rs index 642cdad..2ae48f2 100644 --- a/src/main.rs +++ b/src/main.rs @@ -2,16 +2,22 @@ include!(concat!(env!("OUT_DIR"), "/sentry_dsn.rs")); mod ai; mod cli; +mod clock; mod config; +mod dependency; mod error; mod git; mod input; mod linter; mod logger; +mod packages; mod release; +mod scope; mod telemetry; mod update; mod version; +mod versioning; +mod workflow; use anyhow::Result; use env_logger::{Builder, Env}; @@ -21,10 +27,11 @@ use structopt::StructOpt; use crate::cli::commands::commit::CommitCommand; use crate::cli::{CliCommand, Command}; +use crate::clock::{current_time, should_check_update, should_remind_metrics}; use crate::config::Config; use crate::error::CliError; use crate::update::Updater; -use chrono::{DateTime, Duration}; +use chrono::DateTime; #[derive(StructOpt)] #[structopt( @@ -127,13 +134,11 @@ fn run(config: &mut Config) -> Result<()> { return Ok(()); } - let current_time = DateTime::parse_from_rfc3339("2025-01-08T17:49:53+01:00").unwrap(); - let one_week = Duration::days(7); - let one_day = Duration::days(1); + let current_time: DateTime<_> = current_time()?; let mut config_updated = false; // Show metrics reminder if enabled and it's been a week - if config.metrics_enabled && current_time - config.last_metrics_reminder >= one_week { + if config.metrics_enabled && should_remind_metrics(config.last_metrics_reminder, current_time) { logger::info( " Metrics collection is enabled to help improve Committy. You can opt-out anytime with --metrics-toggle", ); @@ -184,7 +189,7 @@ fn run(config: &mut Config) -> Result<()> { if !non_interactive && !opt.check_update && !opt.update - && current_time - config.last_update_check >= one_day + && should_check_update(config.last_update_check, current_time) { let mut updater = Updater::new(env!("CARGO_PKG_VERSION"))?; updater.with_prerelease(true); diff --git a/src/packages/cargo.rs b/src/packages/cargo.rs new file mode 100644 index 0000000..9d48402 --- /dev/null +++ b/src/packages/cargo.rs @@ -0,0 +1,244 @@ +// Cargo (Rust) package detector + +use super::types::{PackageDetector, PackageInfo, PackageManager}; +use anyhow::{Context, Result}; +use std::fs; +use std::path::Path; +use toml_edit::{value, DocumentMut}; + +pub struct CargoDetector; + +impl PackageDetector for CargoDetector { + fn detect(&self, path: &Path) -> Result> { + let cargo_toml = path.join("Cargo.toml"); + if !cargo_toml.exists() { + return Ok(None); + } + + let content = fs::read_to_string(&cargo_toml) + .with_context(|| format!("Failed to read {}", cargo_toml.display()))?; + + let doc: DocumentMut = content + .parse() + .with_context(|| format!("Failed to parse {}", cargo_toml.display()))?; + + // Check if it's a workspace + let is_workspace = doc.get("workspace").is_some(); + let has_package = doc.get("package").is_some(); + + // Get workspace members if it's a workspace + let mut workspace_members = Vec::new(); + if is_workspace { + if let Some(workspace) = doc.get("workspace") { + if let Some(members) = workspace.get("members") { + if let Some(members_array) = members.as_array() { + for member in members_array.iter() { + if let Some(member_str) = member.as_str() { + workspace_members.push(member_str.to_string()); + } + } + } + } + } + } + + // If it's a workspace without a package section, we'll detect it but note it's workspace-only + let name = if has_package { + doc.get("package") + .and_then(|p| p.get("name")) + .and_then(|n| n.as_str()) + .ok_or_else(|| anyhow::anyhow!("No package name in Cargo.toml"))? + .to_string() + } else if is_workspace { + // Workspace-only Cargo.toml + path.file_name() + .and_then(|n| n.to_str()) + .unwrap_or("workspace") + .to_string() + } else { + return Err(anyhow::anyhow!( + "Cargo.toml has neither [package] nor [workspace] section" + )); + }; + + let version = if has_package { + self.get_version(path)? + } else { + "0.0.0".to_string() // Workspace-only has no version + }; + + Ok(Some(PackageInfo { + name, + manager: PackageManager::Cargo { + workspace: is_workspace, + }, + path: path.to_path_buf(), + version, + version_file: "Cargo.toml".to_string(), + version_field: "package.version".to_string(), + workspace_members, + })) + } + + fn get_version(&self, path: &Path) -> Result { + let cargo_toml = path.join("Cargo.toml"); + let content = fs::read_to_string(&cargo_toml) + .with_context(|| format!("Failed to read {}", cargo_toml.display()))?; + + let doc: DocumentMut = content + .parse() + .with_context(|| format!("Failed to parse {}", cargo_toml.display()))?; + + let version = doc + .get("package") + .and_then(|p| p.get("version")) + .and_then(|v| v.as_str()) + .ok_or_else(|| anyhow::anyhow!("No version in Cargo.toml"))? + .to_string(); + + Ok(version) + } + + fn set_version(&self, path: &Path, version: &str) -> Result<()> { + let cargo_toml = path.join("Cargo.toml"); + let content = fs::read_to_string(&cargo_toml) + .with_context(|| format!("Failed to read {}", cargo_toml.display()))?; + + let mut doc: DocumentMut = content + .parse() + .with_context(|| format!("Failed to parse {}", cargo_toml.display()))?; + + // Update version + if let Some(package) = doc.get_mut("package") { + if let Some(package_table) = package.as_table_mut() { + package_table["version"] = value(version); + } + } else { + return Err(anyhow::anyhow!("No [package] section in Cargo.toml")); + } + + fs::write(&cargo_toml, doc.to_string()) + .with_context(|| format!("Failed to write {}", cargo_toml.display()))?; + + Ok(()) + } + + fn name(&self) -> &str { + "Cargo" + } +} + +#[cfg(test)] +mod tests { + use super::*; + use tempfile::TempDir; + + #[test] + fn test_detect_cargo_package() { + let temp_dir = TempDir::new().unwrap(); + let cargo_toml = temp_dir.path().join("Cargo.toml"); + + fs::write( + &cargo_toml, + r#" +[package] +name = "test-package" +version = "1.0.0" +edition = "2021" + "#, + ) + .unwrap(); + + let detector = CargoDetector; + let result = detector.detect(temp_dir.path()).unwrap(); + + assert!(result.is_some()); + let pkg = result.unwrap(); + assert_eq!(pkg.name, "test-package"); + assert_eq!(pkg.version, "1.0.0"); + assert_eq!(pkg.version_file, "Cargo.toml"); + assert!(!pkg.is_workspace()); + } + + #[test] + fn test_detect_cargo_workspace() { + let temp_dir = TempDir::new().unwrap(); + let cargo_toml = temp_dir.path().join("Cargo.toml"); + + fs::write( + &cargo_toml, + r#" +[workspace] +members = ["cli", "lib", "server"] + +[package] +name = "workspace-root" +version = "1.0.0" +edition = "2021" + "#, + ) + .unwrap(); + + let detector = CargoDetector; + let result = detector.detect(temp_dir.path()).unwrap(); + + assert!(result.is_some()); + let pkg = result.unwrap(); + assert_eq!(pkg.name, "workspace-root"); + assert!(pkg.is_workspace()); + assert_eq!(pkg.workspace_members.len(), 3); + assert!(pkg.workspace_members.contains(&"cli".to_string())); + } + + #[test] + fn test_get_version() { + let temp_dir = TempDir::new().unwrap(); + let cargo_toml = temp_dir.path().join("Cargo.toml"); + + fs::write( + &cargo_toml, + r#" +[package] +name = "test-package" +version = "2.5.3" +edition = "2021" + "#, + ) + .unwrap(); + + let detector = CargoDetector; + let version = detector.get_version(temp_dir.path()).unwrap(); + assert_eq!(version, "2.5.3"); + } + + #[test] + fn test_set_version() { + let temp_dir = TempDir::new().unwrap(); + let cargo_toml = temp_dir.path().join("Cargo.toml"); + + fs::write( + &cargo_toml, + r#" +[package] +name = "test-package" +version = "1.0.0" +edition = "2021" + "#, + ) + .unwrap(); + + let detector = CargoDetector; + detector.set_version(temp_dir.path(), "2.0.0").unwrap(); + + let version = detector.get_version(temp_dir.path()).unwrap(); + assert_eq!(version, "2.0.0"); + } + + #[test] + fn test_no_cargo_toml() { + let temp_dir = TempDir::new().unwrap(); + let detector = CargoDetector; + let result = detector.detect(temp_dir.path()).unwrap(); + assert!(result.is_none()); + } +} diff --git a/src/packages/detector.rs b/src/packages/detector.rs new file mode 100644 index 0000000..47bf107 --- /dev/null +++ b/src/packages/detector.rs @@ -0,0 +1,393 @@ +// Multi-package detector that orchestrates all package manager detectors + +use super::cargo::CargoDetector; +use super::npm::NpmDetector; +use super::types::{PackageDetector, PackageInfo}; +use anyhow::Result; +use log::debug; +use std::collections::HashSet; +use std::fs; +use std::path::{Path, PathBuf}; + +/// Multi-package detector that can detect various package managers +pub struct MultiPackageDetector { + detectors: Vec>, + max_depth: usize, +} + +impl MultiPackageDetector { + /// Create a new multi-package detector with default detectors + pub fn new() -> Self { + Self { + detectors: vec![ + Box::new(CargoDetector), + Box::new(NpmDetector), + // Add more detectors here as they're implemented + ], + max_depth: 5, // Q5: Default max depth of 5 + } + } + + /// Create a new multi-package detector with custom max depth + pub fn with_max_depth(mut self, max_depth: usize) -> Self { + self.max_depth = max_depth; + self + } + + /// Detect all packages in the repository + pub fn detect_all(&self, repo_path: &Path) -> Result> { + debug!("Detecting packages in {}", repo_path.display()); + let mut packages = Vec::new(); + let mut visited = HashSet::new(); + + self.detect_recursive(repo_path, repo_path, 0, &mut packages, &mut visited)?; + + debug!("Found {} packages", packages.len()); + Ok(packages) + } + + /// Recursively detect packages + fn detect_recursive( + &self, + repo_root: &Path, + current_path: &Path, + depth: usize, + packages: &mut Vec, + visited: &mut HashSet, + ) -> Result<()> { + // Check max depth (Q5: Configurable max depth) + if depth > self.max_depth { + return Ok(()); + } + + // Avoid infinite loops + let canonical = current_path + .canonicalize() + .unwrap_or_else(|_| current_path.to_path_buf()); + if !visited.insert(canonical) { + return Ok(()); + } + + debug!("Scanning {} (depth: {})", current_path.display(), depth); + + // Try each detector + for detector in &self.detectors { + if let Some(mut pkg) = detector.detect(current_path)? { + debug!( + "Detected {} package: {} at {}", + detector.name(), + pkg.name, + current_path.display() + ); + + // Make path relative to repo root and normalize root to "." for display + let relative_path = current_path.strip_prefix(repo_root).unwrap_or(current_path); + + pkg.path = if relative_path.as_os_str().is_empty() { + PathBuf::from(".") + } else { + relative_path.to_path_buf() + }; + + // Q6: Detect both workspace root and members, mark relationship + packages.push(pkg.clone()); + + // If it's a workspace, detect members + if pkg.is_workspace() && !pkg.workspace_members.is_empty() { + self.detect_workspace_members( + repo_root, + current_path, + &pkg.workspace_members, + depth, + packages, + visited, + )?; + } + + // Only allow deeper traversal from the repo root; otherwise stop here + if current_path != repo_root { + return Ok(()); + } + + break; + } + } + + // Recurse into subdirectories when appropriate. If a package was detected at the + // repository root we still want to scan siblings (other top-level packages). For + // detected non-root packages we return early above. + if current_path.is_dir() { + for entry in fs::read_dir(current_path)? { + let entry = entry?; + let path = entry.path(); + + // Skip hidden directories and common ignore patterns + if let Some(name) = path.file_name().and_then(|n| n.to_str()) { + if self.should_skip(name) { + continue; + } + } + + if path.is_dir() { + self.detect_recursive(repo_root, &path, depth + 1, packages, visited)?; + } + } + } + + Ok(()) + } + + /// Detect workspace members + fn detect_workspace_members( + &self, + repo_root: &Path, + workspace_root: &Path, + members: &[String], + depth: usize, + packages: &mut Vec, + visited: &mut HashSet, + ) -> Result<()> { + for member_pattern in members { + // Handle glob patterns (simple implementation) + if member_pattern.contains('*') { + // Expand glob pattern + let base_path = member_pattern.split('*').next().unwrap_or(""); + let search_path = workspace_root.join(base_path); + + if search_path.exists() && search_path.is_dir() { + for entry in fs::read_dir(&search_path)? { + let entry = entry?; + let path = entry.path(); + if path.is_dir() { + self.detect_recursive(repo_root, &path, depth + 1, packages, visited)?; + } + } + } + } else { + // Direct member path + let member_path = workspace_root.join(member_pattern); + if member_path.exists() { + self.detect_recursive(repo_root, &member_path, depth + 1, packages, visited)?; + } + } + } + + Ok(()) + } + + /// Check if a directory should be skipped + fn should_skip(&self, name: &str) -> bool { + // Skip hidden directories + if name.starts_with('.') { + return true; + } + + // Skip common ignore patterns + matches!( + name, + "node_modules" | "target" | "dist" | "build" | ".git" | ".svn" | ".hg" + ) + } +} + +impl Default for MultiPackageDetector { + fn default() -> Self { + Self::new() + } +} + +#[cfg(test)] +mod tests { + use super::*; + use tempfile::TempDir; + + #[test] + fn test_detect_single_cargo_package() { + let temp_dir = TempDir::new().unwrap(); + fs::write( + temp_dir.path().join("Cargo.toml"), + r#" +[package] +name = "test-package" +version = "1.0.0" +edition = "2021" + "#, + ) + .unwrap(); + + let detector = MultiPackageDetector::new(); + let packages = detector.detect_all(temp_dir.path()).unwrap(); + + assert_eq!(packages.len(), 1); + assert_eq!(packages[0].name, "test-package"); + } + + #[test] + fn test_detect_multiple_packages() { + let temp_dir = TempDir::new().unwrap(); + + // Create two separate directories with packages + let rust_dir = temp_dir.path().join("rust-app"); + fs::create_dir(&rust_dir).unwrap(); + fs::write( + rust_dir.join("Cargo.toml"), + r#" +[package] +name = "rust-package" +version = "1.0.0" +edition = "2021" + "#, + ) + .unwrap(); + + // Create npm package in separate directory + let ui_dir = temp_dir.path().join("ui"); + fs::create_dir(&ui_dir).unwrap(); + fs::write( + ui_dir.join("package.json"), + r#" +{ + "name": "ui-package", + "version": "1.0.0" +} + "#, + ) + .unwrap(); + + let detector = MultiPackageDetector::new(); + let packages = detector.detect_all(temp_dir.path()).unwrap(); + + assert_eq!(packages.len(), 2); + assert!(packages.iter().any(|p| p.name == "rust-package")); + assert!(packages.iter().any(|p| p.name == "ui-package")); + } + + #[test] + fn test_detect_cargo_workspace() { + let temp_dir = TempDir::new().unwrap(); + + // Create workspace root + fs::write( + temp_dir.path().join("Cargo.toml"), + r#" +[workspace] +members = ["cli", "lib"] + +[package] +name = "workspace-root" +version = "1.0.0" +edition = "2021" + "#, + ) + .unwrap(); + + // Create cli member + let cli_dir = temp_dir.path().join("cli"); + fs::create_dir(&cli_dir).unwrap(); + fs::write( + cli_dir.join("Cargo.toml"), + r#" +[package] +name = "cli" +version = "1.0.0" +edition = "2021" + "#, + ) + .unwrap(); + + // Create lib member + let lib_dir = temp_dir.path().join("lib"); + fs::create_dir(&lib_dir).unwrap(); + fs::write( + lib_dir.join("Cargo.toml"), + r#" +[package] +name = "lib" +version = "1.0.0" +edition = "2021" + "#, + ) + .unwrap(); + + let detector = MultiPackageDetector::new(); + let packages = detector.detect_all(temp_dir.path()).unwrap(); + + // Should detect workspace root + 2 members = 3 packages + assert_eq!(packages.len(), 3); + assert!(packages.iter().any(|p| p.name == "workspace-root")); + assert!(packages.iter().any(|p| p.name == "cli")); + assert!(packages.iter().any(|p| p.name == "lib")); + } + + #[test] + fn test_max_depth() { + let temp_dir = TempDir::new().unwrap(); + + // Create nested structure + let mut current = temp_dir.path().to_path_buf(); + for i in 0..10 { + current = current.join(format!("level{}", i)); + fs::create_dir(¤t).unwrap(); + } + + // Add package at depth 10 + fs::write( + current.join("package.json"), + r#" +{ + "name": "deep-package", + "version": "1.0.0" +} + "#, + ) + .unwrap(); + + // With max_depth = 5, should not find the package + let detector = MultiPackageDetector::new().with_max_depth(5); + let packages = detector.detect_all(temp_dir.path()).unwrap(); + assert_eq!(packages.len(), 0); + + // With max_depth = 15, should find it + let detector = MultiPackageDetector::new().with_max_depth(15); + let packages = detector.detect_all(temp_dir.path()).unwrap(); + assert_eq!(packages.len(), 1); + } + + #[test] + fn test_skip_node_modules() { + let temp_dir = TempDir::new().unwrap(); + + // Create package at root + fs::write( + temp_dir.path().join("package.json"), + r#" +{ + "name": "root-package", + "version": "1.0.0" +} + "#, + ) + .unwrap(); + + // Create package in node_modules (should be skipped) + let node_modules = temp_dir.path().join("node_modules").join("some-dep"); + fs::create_dir_all(&node_modules).unwrap(); + fs::write( + node_modules.join("package.json"), + r#" +{ + "name": "dependency", + "version": "1.0.0" +} + "#, + ) + .unwrap(); + + let detector = MultiPackageDetector::new(); + let packages = detector.detect_all(temp_dir.path()).unwrap(); + + // Should only find root package, not the one in node_modules + assert_eq!(packages.len(), 1); + assert_eq!(packages[0].name, "root-package"); + } +} diff --git a/src/packages/mod.rs b/src/packages/mod.rs new file mode 100644 index 0000000..eebc8d6 --- /dev/null +++ b/src/packages/mod.rs @@ -0,0 +1,8 @@ +// Package detection and management + +pub mod cargo; +pub mod detector; +pub mod npm; +pub mod types; + +pub use detector::MultiPackageDetector; diff --git a/src/packages/npm.rs b/src/packages/npm.rs new file mode 100644 index 0000000..af33b8c --- /dev/null +++ b/src/packages/npm.rs @@ -0,0 +1,308 @@ +// npm/pnpm/yarn (Node.js) package detector + +use super::types::{PackageDetector, PackageInfo, PackageManager}; +use anyhow::{Context, Result}; +use serde_json::{json, Value}; +use std::fs; +use std::path::Path; + +pub struct NpmDetector; + +impl PackageDetector for NpmDetector { + fn detect(&self, path: &Path) -> Result> { + let package_json = path.join("package.json"); + if !package_json.exists() { + return Ok(None); + } + + let content = fs::read_to_string(&package_json) + .with_context(|| format!("Failed to read {}", package_json.display()))?; + + let json: Value = serde_json::from_str(&content) + .with_context(|| format!("Failed to parse {}", package_json.display()))?; + + // Get package name + let name = json + .get("name") + .and_then(|n| n.as_str()) + .ok_or_else(|| anyhow::anyhow!("No name in package.json"))? + .to_string(); + + // Check if it's a workspace + let is_workspace = json.get("workspaces").is_some(); + + // Get workspace members if it's a workspace + let mut workspace_members = Vec::new(); + if is_workspace { + if let Some(workspaces) = json.get("workspaces") { + // workspaces can be an array or an object with "packages" field + let workspaces_array = if workspaces.is_array() { + workspaces.as_array() + } else { + workspaces.get("packages").and_then(|p| p.as_array()) + }; + + if let Some(arr) = workspaces_array { + for member in arr.iter() { + if let Some(member_str) = member.as_str() { + workspace_members.push(member_str.to_string()); + } + } + } + } + } + + // Determine package manager type + let manager = self.detect_package_manager(path, is_workspace)?; + + let version = self.get_version(path)?; + + Ok(Some(PackageInfo { + name, + manager, + path: path.to_path_buf(), + version, + version_file: "package.json".to_string(), + version_field: "version".to_string(), + workspace_members, + })) + } + + fn get_version(&self, path: &Path) -> Result { + let package_json = path.join("package.json"); + let content = fs::read_to_string(&package_json) + .with_context(|| format!("Failed to read {}", package_json.display()))?; + + let json: Value = serde_json::from_str(&content) + .with_context(|| format!("Failed to parse {}", package_json.display()))?; + + let version = json + .get("version") + .and_then(|v| v.as_str()) + .ok_or_else(|| anyhow::anyhow!("No version in package.json"))? + .to_string(); + + Ok(version) + } + + fn set_version(&self, path: &Path, version: &str) -> Result<()> { + let package_json = path.join("package.json"); + let content = fs::read_to_string(&package_json) + .with_context(|| format!("Failed to read {}", package_json.display()))?; + + let mut json: Value = serde_json::from_str(&content) + .with_context(|| format!("Failed to parse {}", package_json.display()))?; + + // Update version + if let Some(obj) = json.as_object_mut() { + obj.insert("version".to_string(), json!(version)); + } else { + return Err(anyhow::anyhow!("package.json is not an object")); + } + + // Write back with pretty formatting + let formatted = + serde_json::to_string_pretty(&json).context("Failed to serialize package.json")?; + + fs::write(&package_json, formatted) + .with_context(|| format!("Failed to write {}", package_json.display()))?; + + Ok(()) + } + + fn name(&self) -> &str { + "npm" + } +} + +impl NpmDetector { + /// Detect which package manager is being used (npm, pnpm, or yarn) + fn detect_package_manager(&self, path: &Path, is_workspace: bool) -> Result { + // Check for lock files to determine package manager + if path.join("pnpm-lock.yaml").exists() { + return Ok(PackageManager::Pnpm { + workspace: is_workspace, + }); + } + + if path.join("yarn.lock").exists() { + return Ok(PackageManager::Yarn { + workspace: is_workspace, + }); + } + + // Default to npm + Ok(PackageManager::Npm { + workspace: is_workspace, + }) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use tempfile::TempDir; + + #[test] + fn test_detect_npm_package() { + let temp_dir = TempDir::new().unwrap(); + let package_json = temp_dir.path().join("package.json"); + + fs::write( + &package_json, + r#" +{ + "name": "test-package", + "version": "1.0.0", + "description": "Test package" +} + "#, + ) + .unwrap(); + + let detector = NpmDetector; + let result = detector.detect(temp_dir.path()).unwrap(); + + assert!(result.is_some()); + let pkg = result.unwrap(); + assert_eq!(pkg.name, "test-package"); + assert_eq!(pkg.version, "1.0.0"); + assert_eq!(pkg.version_file, "package.json"); + assert!(!pkg.is_workspace()); + assert_eq!(pkg.manager.name(), "npm"); + } + + #[test] + fn test_detect_npm_workspace() { + let temp_dir = TempDir::new().unwrap(); + let package_json = temp_dir.path().join("package.json"); + + fs::write( + &package_json, + r#" +{ + "name": "workspace-root", + "version": "1.0.0", + "workspaces": ["packages/*", "apps/*"] +} + "#, + ) + .unwrap(); + + let detector = NpmDetector; + let result = detector.detect(temp_dir.path()).unwrap(); + + assert!(result.is_some()); + let pkg = result.unwrap(); + assert_eq!(pkg.name, "workspace-root"); + assert!(pkg.is_workspace()); + assert_eq!(pkg.workspace_members.len(), 2); + assert!(pkg.workspace_members.contains(&"packages/*".to_string())); + } + + #[test] + fn test_detect_pnpm() { + let temp_dir = TempDir::new().unwrap(); + let package_json = temp_dir.path().join("package.json"); + let pnpm_lock = temp_dir.path().join("pnpm-lock.yaml"); + + fs::write( + &package_json, + r#" +{ + "name": "test-package", + "version": "1.0.0" +} + "#, + ) + .unwrap(); + + fs::write(&pnpm_lock, "# pnpm lock file").unwrap(); + + let detector = NpmDetector; + let result = detector.detect(temp_dir.path()).unwrap(); + + assert!(result.is_some()); + let pkg = result.unwrap(); + assert_eq!(pkg.manager.name(), "pnpm"); + } + + #[test] + fn test_detect_yarn() { + let temp_dir = TempDir::new().unwrap(); + let package_json = temp_dir.path().join("package.json"); + let yarn_lock = temp_dir.path().join("yarn.lock"); + + fs::write( + &package_json, + r#" +{ + "name": "test-package", + "version": "1.0.0" +} + "#, + ) + .unwrap(); + + fs::write(&yarn_lock, "# yarn lock file").unwrap(); + + let detector = NpmDetector; + let result = detector.detect(temp_dir.path()).unwrap(); + + assert!(result.is_some()); + let pkg = result.unwrap(); + assert_eq!(pkg.manager.name(), "Yarn"); + } + + #[test] + fn test_get_version() { + let temp_dir = TempDir::new().unwrap(); + let package_json = temp_dir.path().join("package.json"); + + fs::write( + &package_json, + r#" +{ + "name": "test-package", + "version": "2.5.3" +} + "#, + ) + .unwrap(); + + let detector = NpmDetector; + let version = detector.get_version(temp_dir.path()).unwrap(); + assert_eq!(version, "2.5.3"); + } + + #[test] + fn test_set_version() { + let temp_dir = TempDir::new().unwrap(); + let package_json = temp_dir.path().join("package.json"); + + fs::write( + &package_json, + r#" +{ + "name": "test-package", + "version": "1.0.0" +} + "#, + ) + .unwrap(); + + let detector = NpmDetector; + detector.set_version(temp_dir.path(), "2.0.0").unwrap(); + + let version = detector.get_version(temp_dir.path()).unwrap(); + assert_eq!(version, "2.0.0"); + } + + #[test] + fn test_no_package_json() { + let temp_dir = TempDir::new().unwrap(); + let detector = NpmDetector; + let result = detector.detect(temp_dir.path()).unwrap(); + assert!(result.is_none()); + } +} diff --git a/src/packages/types.rs b/src/packages/types.rs new file mode 100644 index 0000000..81e1b4c --- /dev/null +++ b/src/packages/types.rs @@ -0,0 +1,145 @@ +// Package types and traits + +use anyhow::Result; +use serde::{Deserialize, Serialize}; +use std::path::PathBuf; + +/// Package manager type +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +pub enum PackageManager { + Cargo { workspace: bool }, + Npm { workspace: bool }, + Pnpm { workspace: bool }, + Yarn { workspace: bool }, + GoMod, + Poetry, + Pipenv, + Maven, + Gradle, + Helm, +} + +impl PackageManager { + /// Get the package manager name as a string + pub fn name(&self) -> &str { + match self { + PackageManager::Cargo { .. } => "Cargo", + PackageManager::Npm { .. } => "npm", + PackageManager::Pnpm { .. } => "pnpm", + PackageManager::Yarn { .. } => "Yarn", + PackageManager::GoMod => "Go modules", + PackageManager::Poetry => "Poetry", + PackageManager::Pipenv => "Pipenv", + PackageManager::Maven => "Maven", + PackageManager::Gradle => "Gradle", + PackageManager::Helm => "HELM", + } + } + + /// Check if this is a workspace + pub fn is_workspace(&self) -> bool { + match self { + PackageManager::Cargo { workspace } => *workspace, + PackageManager::Npm { workspace } => *workspace, + PackageManager::Pnpm { workspace } => *workspace, + PackageManager::Yarn { workspace } => *workspace, + _ => false, + } + } + + /// Get the package type string for config + pub fn package_type(&self) -> &str { + match self { + PackageManager::Cargo { .. } => "rust-cargo", + PackageManager::Npm { .. } => "node-npm", + PackageManager::Pnpm { .. } => "node-pnpm", + PackageManager::Yarn { .. } => "node-yarn", + PackageManager::GoMod => "go-mod", + PackageManager::Poetry => "python-poetry", + PackageManager::Pipenv => "python-pipenv", + PackageManager::Maven => "java-maven", + PackageManager::Gradle => "java-gradle", + PackageManager::Helm => "helm", + } + } +} + +/// Information about a detected package +#[derive(Debug, Clone)] +pub struct PackageInfo { + /// Package name + pub name: String, + /// Package manager type + pub manager: PackageManager, + /// Path to package (relative to repository root) + pub path: PathBuf, + /// Current version + pub version: String, + /// Version file name (e.g., "Cargo.toml", "package.json") + pub version_file: String, + /// Version field path (e.g., "package.version", "version") + pub version_field: String, + /// Workspace members (if this is a workspace) + pub workspace_members: Vec, +} + +impl PackageInfo { + /// Get a display name for the package + #[allow(dead_code)] + pub fn display_name(&self) -> String { + format!("{} ({})", self.name, self.manager.name()) + } + + /// Check if this is a workspace + pub fn is_workspace(&self) -> bool { + self.manager.is_workspace() + } +} + +/// Trait for package manager detectors +pub trait PackageDetector: Send + Sync { + /// Detect if a package exists at the given path + /// Returns Some(PackageInfo) if detected, None otherwise + fn detect(&self, path: &std::path::Path) -> Result>; + + /// Get the version from a package at the given path + fn get_version(&self, path: &std::path::Path) -> Result; + + /// Set the version for a package at the given path + fn set_version(&self, path: &std::path::Path, version: &str) -> Result<()>; + + /// Get the name of the package manager + fn name(&self) -> &str; +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_package_manager_name() { + assert_eq!(PackageManager::Cargo { workspace: false }.name(), "Cargo"); + assert_eq!(PackageManager::Npm { workspace: false }.name(), "npm"); + assert_eq!(PackageManager::GoMod.name(), "Go modules"); + } + + #[test] + fn test_package_manager_is_workspace() { + assert!(PackageManager::Cargo { workspace: true }.is_workspace()); + assert!(!PackageManager::Cargo { workspace: false }.is_workspace()); + assert!(!PackageManager::GoMod.is_workspace()); + } + + #[test] + fn test_package_manager_package_type() { + assert_eq!( + PackageManager::Cargo { workspace: false }.package_type(), + "rust-cargo" + ); + assert_eq!( + PackageManager::Npm { workspace: false }.package_type(), + "node-npm" + ); + assert_eq!(PackageManager::GoMod.package_type(), "go-mod"); + } +} diff --git a/src/scope/detector.rs b/src/scope/detector.rs new file mode 100644 index 0000000..d12e84e --- /dev/null +++ b/src/scope/detector.rs @@ -0,0 +1,343 @@ +// Scope detector for multi-package repositories + +use super::matcher::{FileMatcher, ScopeMapping}; +use crate::config::repository::RepositoryConfig; +use crate::packages::MultiPackageDetector; +use anyhow::{Context, Result}; +use log::debug; +use std::collections::HashSet; +use std::path::{Path, PathBuf}; +use std::process::Command; + +/// Scope detector for determining affected scopes from changed files +pub struct ScopeDetector { + #[cfg_attr(not(test), allow(dead_code))] + config: RepositoryConfig, + repo_path: PathBuf, + file_matcher: FileMatcher, +} + +impl ScopeDetector { + /// Create a new scope detector + pub fn new(config: RepositoryConfig, repo_path: &Path) -> Self { + // Build file matcher from config mappings + let mappings = config + .scopes + .mappings + .iter() + .map(|m| ScopeMapping::new(m.pattern.clone(), m.scope.clone())) + .collect(); + + let file_matcher = FileMatcher::new(mappings); + + Self { + config, + repo_path: repo_path.to_path_buf(), + file_matcher, + } + } + + /// Detect scopes from git staged files (Q16: Staged files) + pub fn detect_from_staged(&self) -> Result> { + let staged_files = self.get_staged_files()?; + debug!("Staged files: {:?}", staged_files); + + if staged_files.is_empty() { + return Ok(Vec::new()); + } + + self.detect_from_files(&staged_files) + } + + /// Detect scopes from a list of file paths + pub fn detect_from_files(&self, files: &[PathBuf]) -> Result> { + let mut scopes = HashSet::new(); + + // Convert to Path references + let file_refs: Vec<&Path> = files.iter().map(|p| p.as_path()).collect(); + + // First, try file pattern matching from config + let matched_scopes = self.file_matcher.find_scopes(&file_refs)?; + for scope in matched_scopes { + scopes.insert(scope); + } + + // If no scopes found from patterns, try package-based detection (Q17: Both) + if scopes.is_empty() { + let package_scopes = self.detect_from_packages(&file_refs)?; + for scope in package_scopes { + scopes.insert(scope); + } + } + + Ok(scopes.into_iter().collect()) + } + + /// Detect scopes based on which packages contain the files + fn detect_from_packages(&self, files: &[&Path]) -> Result> { + let mut scopes = HashSet::new(); + + // Detect packages + let detector = MultiPackageDetector::new(); + let packages = detector.detect_all(&self.repo_path)?; + + for file in files { + // Normalize the file path (handle both absolute and relative) + let file_path = if file.is_absolute() { + file.strip_prefix(&self.repo_path).unwrap_or(file) + } else { + file + }; + + // Find which package this file belongs to + for pkg in &packages { + if file_path.starts_with(&pkg.path) { + // File is in this package + // Use package name as scope + scopes.insert(pkg.name.clone()); + break; + } + } + } + + Ok(scopes.into_iter().collect()) + } + + /// Get list of staged files from git + fn get_staged_files(&self) -> Result> { + let output = Command::new("git") + .args(["diff", "--cached", "--name-only"]) + .current_dir(&self.repo_path) + .output() + .context("Failed to run git diff")?; + + if !output.status.success() { + return Err(anyhow::anyhow!("git diff failed")); + } + + let stdout = String::from_utf8(output.stdout)?; + let files: Vec = stdout + .lines() + .filter(|line| !line.is_empty()) + .map(PathBuf::from) + .collect(); + + Ok(files) + } + + /// Suggest scopes based on available packages and config + #[cfg_attr(not(test), allow(dead_code))] + pub fn suggest_scopes(&self) -> Result> { + let mut scopes = HashSet::new(); + + // Add scopes from config mappings + for mapping in &self.config.scopes.mappings { + scopes.insert(mapping.scope.clone()); + } + + // Add package names as potential scopes + let detector = MultiPackageDetector::new(); + let packages = detector.detect_all(&self.repo_path)?; + for pkg in packages { + scopes.insert(pkg.name); + } + + let mut scope_list: Vec = scopes.into_iter().collect(); + scope_list.sort(); + Ok(scope_list) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::config::repository::{ + PackageConfig, RepositoryConfig, RepositoryMetadata, RepositoryType, VersioningConfig, + VersioningStrategy, + }; + use std::fs; + use tempfile::TempDir; + + fn create_test_config() -> RepositoryConfig { + use crate::config::repository::ScopeConfig; + use crate::config::repository::ScopeMapping as ConfigScopeMapping; + + let scopes = ScopeConfig { + auto_detect: true, + require_scope_for_multi_package: true, + allow_multiple_scopes: true, + scope_separator: ",".to_string(), + mappings: vec![ + ConfigScopeMapping { + pattern: "cli/**/*".to_string(), + scope: "cli".to_string(), + package: "cli".to_string(), + description: None, + }, + ConfigScopeMapping { + pattern: "server/**/*".to_string(), + scope: "server".to_string(), + package: "server".to_string(), + description: None, + }, + ConfigScopeMapping { + pattern: "docs/**/*".to_string(), + scope: "docs".to_string(), + package: "docs".to_string(), + description: None, + }, + ], + }; + + RepositoryConfig { + repository: RepositoryMetadata { + name: "test".to_string(), + repo_type: RepositoryType::MultiPackage, + description: None, + }, + versioning: VersioningConfig { + strategy: VersioningStrategy::Independent, + unified_version: None, + rules: None, + }, + packages: vec![ + PackageConfig { + name: "cli".to_string(), + package_type: "rust-cargo".to_string(), + path: "cli".to_string(), + version_file: "Cargo.toml".to_string(), + version_field: "package.version".to_string(), + primary: false, + sync_with: None, + independent: true, + workspace_member: false, + description: None, + }, + PackageConfig { + name: "server".to_string(), + package_type: "node-npm".to_string(), + path: "server".to_string(), + version_file: "package.json".to_string(), + version_field: "version".to_string(), + primary: false, + sync_with: None, + independent: true, + workspace_member: false, + description: None, + }, + ], + dependencies: vec![], + scopes, + commit_rules: Default::default(), + workspace: None, + } + } + + #[test] + fn test_detect_from_files_with_patterns() { + let temp_dir = TempDir::new().unwrap(); + let config = create_test_config(); + + let detector = ScopeDetector::new(config, temp_dir.path()); + + let files = vec![ + PathBuf::from("cli/src/main.rs"), + PathBuf::from("cli/Cargo.toml"), + ]; + + let scopes = detector.detect_from_files(&files).unwrap(); + assert_eq!(scopes.len(), 1); + assert!(scopes.contains(&"cli".to_string())); + } + + #[test] + fn test_detect_from_files_multiple_scopes() { + let temp_dir = TempDir::new().unwrap(); + let config = create_test_config(); + + let detector = ScopeDetector::new(config, temp_dir.path()); + + let files = vec![ + PathBuf::from("cli/src/main.rs"), + PathBuf::from("server/package.json"), + PathBuf::from("docs/README.md"), + ]; + + let scopes = detector.detect_from_files(&files).unwrap(); + assert_eq!(scopes.len(), 3); + assert!(scopes.contains(&"cli".to_string())); + assert!(scopes.contains(&"server".to_string())); + assert!(scopes.contains(&"docs".to_string())); + } + + #[test] + fn test_detect_from_packages() { + let temp_dir = TempDir::new().unwrap(); + let config = create_test_config(); + + // Create cli package + let cli_dir = temp_dir.path().join("cli"); + fs::create_dir(&cli_dir).unwrap(); + fs::write( + cli_dir.join("Cargo.toml"), + r#" +[package] +name = "cli" +version = "1.0.0" +edition = "2021" + "#, + ) + .unwrap(); + + // Create server package + let server_dir = temp_dir.path().join("server"); + fs::create_dir(&server_dir).unwrap(); + fs::write( + server_dir.join("package.json"), + r#" +{ + "name": "server", + "version": "1.0.0" +} + "#, + ) + .unwrap(); + + let detector = ScopeDetector::new(config, temp_dir.path()); + + let files = vec![Path::new("cli/src/main.rs"), Path::new("server/index.js")]; + + let scopes = detector.detect_from_packages(&files).unwrap(); + assert_eq!(scopes.len(), 2); + assert!(scopes.contains(&"cli".to_string())); + assert!(scopes.contains(&"server".to_string())); + } + + #[test] + fn test_suggest_scopes() { + let temp_dir = TempDir::new().unwrap(); + let config = create_test_config(); + + // Create packages + let cli_dir = temp_dir.path().join("cli"); + fs::create_dir(&cli_dir).unwrap(); + fs::write( + cli_dir.join("Cargo.toml"), + r#" +[package] +name = "cli" +version = "1.0.0" +edition = "2021" + "#, + ) + .unwrap(); + + let detector = ScopeDetector::new(config, temp_dir.path()); + let scopes = detector.suggest_scopes().unwrap(); + + // Should include scopes from config and detected packages + assert!(scopes.contains(&"cli".to_string())); + assert!(scopes.contains(&"server".to_string())); + assert!(scopes.contains(&"docs".to_string())); + } +} diff --git a/src/scope/matcher.rs b/src/scope/matcher.rs new file mode 100644 index 0000000..7fdab30 --- /dev/null +++ b/src/scope/matcher.rs @@ -0,0 +1,216 @@ +// Scope matcher for detecting file-based scopes + +use anyhow::Result; +use glob::Pattern; +use serde::{Deserialize, Serialize}; +use std::path::Path; + +/// Mapping from file patterns to scopes +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct ScopeMapping { + /// Glob pattern to match files + pub pattern: String, + /// Scope to assign when pattern matches + pub scope: String, + /// Optional description + #[serde(skip_serializing_if = "Option::is_none")] + pub description: Option, +} + +impl ScopeMapping { + /// Create a new scope mapping + pub fn new(pattern: String, scope: String) -> Self { + Self { + pattern, + scope, + description: None, + } + } + + /// Create a new scope mapping with description + #[cfg(test)] + pub fn with_description(pattern: String, scope: String, description: String) -> Self { + Self { + pattern, + scope, + description: Some(description), + } + } + + /// Check if a file path matches this mapping + pub fn matches(&self, path: &Path) -> Result { + let pattern = Pattern::new(&self.pattern)?; + let path_str = path.to_string_lossy(); + Ok(pattern.matches(&path_str)) + } +} + +/// File matcher for determining scopes from file paths +pub struct FileMatcher { + mappings: Vec, +} + +impl FileMatcher { + /// Create a new file matcher with the given mappings + pub fn new(mappings: Vec) -> Self { + Self { mappings } + } + + /// Find scopes for a list of file paths + /// Returns unique scopes that match the given files + pub fn find_scopes(&self, files: &[&Path]) -> Result> { + let mut scopes = Vec::new(); + + for file in files { + for mapping in &self.mappings { + if mapping.matches(file)? && !scopes.contains(&mapping.scope) { + scopes.push(mapping.scope.clone()); + } + } + } + + Ok(scopes) + } + + /// Find the first matching scope for a file path + #[cfg(test)] + pub fn find_scope(&self, file: &Path) -> Result> { + for mapping in &self.mappings { + if mapping.matches(file)? { + return Ok(Some(mapping.scope.clone())); + } + } + Ok(None) + } + + /// Get all mappings + #[cfg(test)] + pub fn mappings(&self) -> &[ScopeMapping] { + &self.mappings + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_scope_mapping_matches() { + let mapping = ScopeMapping::new("src/**/*.rs".to_string(), "core".to_string()); + + assert!(mapping.matches(Path::new("src/main.rs")).unwrap()); + assert!(mapping.matches(Path::new("src/lib/mod.rs")).unwrap()); + assert!(!mapping.matches(Path::new("tests/test.rs")).unwrap()); + } + + #[test] + fn test_scope_mapping_with_description() { + let mapping = ScopeMapping::with_description( + "docs/**/*.md".to_string(), + "docs".to_string(), + "Documentation files".to_string(), + ); + + assert_eq!(mapping.scope, "docs"); + assert_eq!(mapping.description, Some("Documentation files".to_string())); + assert!(mapping.matches(Path::new("docs/README.md")).unwrap()); + } + + #[test] + fn test_file_matcher_find_scopes() { + let mappings = vec![ + ScopeMapping::new("src/**/*.rs".to_string(), "core".to_string()), + ScopeMapping::new("tests/**/*.rs".to_string(), "tests".to_string()), + ScopeMapping::new("docs/**/*.md".to_string(), "docs".to_string()), + ]; + + let matcher = FileMatcher::new(mappings); + + let files = vec![ + Path::new("src/main.rs"), + Path::new("src/lib.rs"), + Path::new("docs/README.md"), + ]; + + let scopes = matcher.find_scopes(&files).unwrap(); + assert_eq!(scopes.len(), 2); + assert!(scopes.contains(&"core".to_string())); + assert!(scopes.contains(&"docs".to_string())); + } + + #[test] + fn test_file_matcher_find_scope() { + let mappings = vec![ + ScopeMapping::new("src/**/*.rs".to_string(), "core".to_string()), + ScopeMapping::new("tests/**/*.rs".to_string(), "tests".to_string()), + ]; + + let matcher = FileMatcher::new(mappings); + + assert_eq!( + matcher.find_scope(Path::new("src/main.rs")).unwrap(), + Some("core".to_string()) + ); + assert_eq!( + matcher.find_scope(Path::new("tests/test.rs")).unwrap(), + Some("tests".to_string()) + ); + assert_eq!(matcher.find_scope(Path::new("README.md")).unwrap(), None); + } + + #[test] + fn test_file_matcher_unique_scopes() { + let mappings = vec![ScopeMapping::new( + "src/**/*.rs".to_string(), + "core".to_string(), + )]; + + let matcher = FileMatcher::new(mappings); + + let files = vec![ + Path::new("src/main.rs"), + Path::new("src/lib.rs"), + Path::new("src/config.rs"), + ]; + + let scopes = matcher.find_scopes(&files).unwrap(); + assert_eq!(scopes.len(), 1); + assert_eq!(scopes[0], "core"); + } + + #[test] + fn test_file_matcher_package_paths() { + let mappings = vec![ + ScopeMapping::new("cli/**/*".to_string(), "cli".to_string()), + ScopeMapping::new("server/**/*".to_string(), "server".to_string()), + ScopeMapping::new("docs/**/*".to_string(), "docs".to_string()), + ]; + + let matcher = FileMatcher::new(mappings); + + let files = vec![ + Path::new("cli/src/main.rs"), + Path::new("server/package.json"), + ]; + + let scopes = matcher.find_scopes(&files).unwrap(); + assert_eq!(scopes.len(), 2); + assert!(scopes.contains(&"cli".to_string())); + assert!(scopes.contains(&"server".to_string())); + } + + #[test] + fn test_file_matcher_mappings_accessor() { + let mappings = vec![ + ScopeMapping::new("src/**/*.rs".to_string(), "core".to_string()), + ScopeMapping::new("docs/**/*.md".to_string(), "docs".to_string()), + ]; + + let matcher = FileMatcher::new(mappings.clone()); + let exposed = matcher.mappings(); + + assert_eq!(exposed.len(), mappings.len()); + assert_eq!(exposed[0].pattern, mappings[0].pattern); + assert_eq!(exposed[1].scope, mappings[1].scope); + } +} diff --git a/src/scope/mod.rs b/src/scope/mod.rs new file mode 100644 index 0000000..939a0d9 --- /dev/null +++ b/src/scope/mod.rs @@ -0,0 +1,4 @@ +// Scope detection for multi-package repositories + +pub mod detector; +pub mod matcher; diff --git a/src/versioning/hybrid.rs b/src/versioning/hybrid.rs new file mode 100644 index 0000000..6a4811c --- /dev/null +++ b/src/versioning/hybrid.rs @@ -0,0 +1,389 @@ +// Hybrid versioning strategy +// Primary package drives version, others can sync or stay independent + +use super::manager::{BumpType, VersionManager, VersionUpdate}; +use crate::config::repository::RepositoryConfig; +use crate::packages::MultiPackageDetector; +use anyhow::{Context, Result}; +use semver::Version; +use std::collections::HashSet; +use std::path::Path; + +/// Hybrid versioning strategy +/// Primary package(s) drive the version, other packages can sync with them or stay independent +pub struct HybridVersioning { + config: RepositoryConfig, + repo_path: std::path::PathBuf, +} + +impl HybridVersioning { + /// Create a new hybrid versioning strategy + pub fn new(config: RepositoryConfig, repo_path: &Path) -> Self { + Self { + config, + repo_path: repo_path.to_path_buf(), + } + } + + /// Get all packages that should be updated when a package is bumped + fn get_synced_packages(&self, package_name: &str) -> Vec { + let mut synced = vec![package_name.to_string()]; + + // Find all packages that sync with this package + for pkg in &self.config.packages { + if let Some(ref sync_with) = pkg.sync_with { + if sync_with == package_name { + synced.push(pkg.name.clone()); + // Recursively find packages that sync with this one + synced.extend(self.get_synced_packages(&pkg.name)); + } + } + } + + synced + } +} + +impl VersionManager for HybridVersioning { + fn calculate_updates( + &self, + affected_packages: &[String], + bump_type: BumpType, + ) -> Result> { + let mut updates = Vec::new(); + let mut processed = HashSet::new(); + + // Detect current packages to get their versions + let detector = MultiPackageDetector::new(); + let detected_packages = detector.detect_all(&self.repo_path)?; + + // Determine which packages to update + let mut packages_to_update = HashSet::new(); + + for pkg_name in affected_packages { + // Find the package in config + let cfg_pkg = self + .config + .packages + .iter() + .find(|p| &p.name == pkg_name) + .ok_or_else(|| anyhow::anyhow!("Package '{}' not found in config", pkg_name))?; + + if cfg_pkg.independent { + // Independent packages are updated on their own + packages_to_update.insert(pkg_name.clone()); + } else if cfg_pkg.primary { + // Primary package: update it and all packages that sync with it + for synced_pkg in self.get_synced_packages(pkg_name) { + packages_to_update.insert(synced_pkg); + } + } else if let Some(ref sync_with) = cfg_pkg.sync_with { + // Package syncs with another: update the sync target and all its synced packages + for synced_pkg in self.get_synced_packages(sync_with) { + packages_to_update.insert(synced_pkg); + } + } else { + // Regular package (not primary, not synced, not independent) + packages_to_update.insert(pkg_name.clone()); + } + } + + // Calculate updates for each package to update + for pkg_name in packages_to_update { + if processed.contains(&pkg_name) { + continue; + } + processed.insert(pkg_name.clone()); + + // Find the package in config + let cfg_pkg = self + .config + .packages + .iter() + .find(|p| p.name == pkg_name) + .ok_or_else(|| anyhow::anyhow!("Package '{}' not found in config", pkg_name))?; + + // Find the detected package to get current version + let detected_pkg = detected_packages + .iter() + .find(|p| p.path == std::path::Path::new(&cfg_pkg.path)) + .ok_or_else(|| { + anyhow::anyhow!("Package '{}' not found at {}", pkg_name, cfg_pkg.path) + })?; + + // Parse current version + let current_version = Version::parse(&detected_pkg.version).with_context(|| { + format!( + "Invalid version '{}' for package '{}'", + detected_pkg.version, pkg_name + ) + })?; + + // Calculate new version + let new_version = bump_type.apply(¤t_version); + + updates.push(VersionUpdate::new( + pkg_name.clone(), + current_version.to_string(), + new_version.to_string(), + )); + } + + Ok(updates) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::config::repository::{ + PackageConfig, RepositoryConfig, RepositoryMetadata, RepositoryType, VersioningConfig, + VersioningStrategy, + }; + use std::fs; + use tempfile::TempDir; + + fn create_test_config() -> RepositoryConfig { + RepositoryConfig { + repository: RepositoryMetadata { + name: "test".to_string(), + repo_type: RepositoryType::MultiPackage, + description: None, + }, + versioning: VersioningConfig { + strategy: VersioningStrategy::Hybrid, + unified_version: None, + rules: None, + }, + packages: vec![ + PackageConfig { + name: "cli".to_string(), + package_type: "rust-cargo".to_string(), + path: "cli".to_string(), + version_file: "Cargo.toml".to_string(), + version_field: "package.version".to_string(), + primary: true, + sync_with: None, + independent: false, + workspace_member: false, + description: None, + }, + PackageConfig { + name: "server".to_string(), + package_type: "node-npm".to_string(), + path: "server".to_string(), + version_file: "package.json".to_string(), + version_field: "version".to_string(), + primary: false, + sync_with: Some("cli".to_string()), + independent: false, + workspace_member: false, + description: None, + }, + PackageConfig { + name: "docs".to_string(), + package_type: "node-npm".to_string(), + path: "docs".to_string(), + version_file: "package.json".to_string(), + version_field: "version".to_string(), + primary: false, + sync_with: None, + independent: true, + workspace_member: false, + description: None, + }, + ], + dependencies: vec![], + scopes: Default::default(), + commit_rules: Default::default(), + workspace: None, + } + } + + #[test] + fn test_hybrid_versioning_primary_package() { + let temp_dir = TempDir::new().unwrap(); + let config = create_test_config(); + + // Create cli (primary) + let cli_dir = temp_dir.path().join("cli"); + fs::create_dir(&cli_dir).unwrap(); + fs::write( + cli_dir.join("Cargo.toml"), + r#" +[package] +name = "cli" +version = "1.0.0" +edition = "2021" + "#, + ) + .unwrap(); + + // Create server (syncs with cli) + let server_dir = temp_dir.path().join("server"); + fs::create_dir(&server_dir).unwrap(); + fs::write( + server_dir.join("package.json"), + r#" +{ + "name": "server", + "version": "1.0.0" +} + "#, + ) + .unwrap(); + + // Create docs (independent) + let docs_dir = temp_dir.path().join("docs"); + fs::create_dir(&docs_dir).unwrap(); + fs::write( + docs_dir.join("package.json"), + r#" +{ + "name": "docs", + "version": "2.0.0" +} + "#, + ) + .unwrap(); + + let strategy = HybridVersioning::new(config, temp_dir.path()); + let updates = strategy + .calculate_updates(&["cli".to_string()], BumpType::Minor) + .unwrap(); + + // Should update cli and server (synced), but NOT docs (independent) + assert_eq!(updates.len(), 2); + + let cli_update = updates.iter().find(|u| u.package_name == "cli").unwrap(); + assert_eq!(cli_update.old_version, "1.0.0"); + assert_eq!(cli_update.new_version, "1.1.0"); + + let server_update = updates.iter().find(|u| u.package_name == "server").unwrap(); + assert_eq!(server_update.old_version, "1.0.0"); + assert_eq!(server_update.new_version, "1.1.0"); + + // docs should NOT be in updates + assert!(updates.iter().all(|u| u.package_name != "docs")); + } + + #[test] + fn test_hybrid_versioning_independent_package() { + let temp_dir = TempDir::new().unwrap(); + let config = create_test_config(); + + // Create all packages + let cli_dir = temp_dir.path().join("cli"); + fs::create_dir(&cli_dir).unwrap(); + fs::write( + cli_dir.join("Cargo.toml"), + r#" +[package] +name = "cli" +version = "1.0.0" +edition = "2021" + "#, + ) + .unwrap(); + + let server_dir = temp_dir.path().join("server"); + fs::create_dir(&server_dir).unwrap(); + fs::write( + server_dir.join("package.json"), + r#" +{ + "name": "server", + "version": "1.0.0" +} + "#, + ) + .unwrap(); + + let docs_dir = temp_dir.path().join("docs"); + fs::create_dir(&docs_dir).unwrap(); + fs::write( + docs_dir.join("package.json"), + r#" +{ + "name": "docs", + "version": "2.0.0" +} + "#, + ) + .unwrap(); + + let strategy = HybridVersioning::new(config, temp_dir.path()); + let updates = strategy + .calculate_updates(&["docs".to_string()], BumpType::Patch) + .unwrap(); + + // Should only update docs (independent) + assert_eq!(updates.len(), 1); + assert_eq!(updates[0].package_name, "docs"); + assert_eq!(updates[0].old_version, "2.0.0"); + assert_eq!(updates[0].new_version, "2.0.1"); + } + + #[test] + fn test_hybrid_versioning_synced_package() { + let temp_dir = TempDir::new().unwrap(); + let config = create_test_config(); + + // Create all packages + let cli_dir = temp_dir.path().join("cli"); + fs::create_dir(&cli_dir).unwrap(); + fs::write( + cli_dir.join("Cargo.toml"), + r#" +[package] +name = "cli" +version = "1.5.0" +edition = "2021" + "#, + ) + .unwrap(); + + let server_dir = temp_dir.path().join("server"); + fs::create_dir(&server_dir).unwrap(); + fs::write( + server_dir.join("package.json"), + r#" +{ + "name": "server", + "version": "1.5.0" +} + "#, + ) + .unwrap(); + + let docs_dir = temp_dir.path().join("docs"); + fs::create_dir(&docs_dir).unwrap(); + fs::write( + docs_dir.join("package.json"), + r#" +{ + "name": "docs", + "version": "2.0.0" +} + "#, + ) + .unwrap(); + + let strategy = HybridVersioning::new(config, temp_dir.path()); + // When server is affected, it should update cli (its sync target) and server + let updates = strategy + .calculate_updates(&["server".to_string()], BumpType::Major) + .unwrap(); + + // Should update both cli and server + assert_eq!(updates.len(), 2); + + let cli_update = updates.iter().find(|u| u.package_name == "cli").unwrap(); + assert_eq!(cli_update.old_version, "1.5.0"); + assert_eq!(cli_update.new_version, "2.0.0"); + + let server_update = updates.iter().find(|u| u.package_name == "server").unwrap(); + assert_eq!(server_update.old_version, "1.5.0"); + assert_eq!(server_update.new_version, "2.0.0"); + } +} diff --git a/src/versioning/independent.rs b/src/versioning/independent.rs new file mode 100644 index 0000000..38d653b --- /dev/null +++ b/src/versioning/independent.rs @@ -0,0 +1,242 @@ +// Independent versioning strategy +// Each package maintains its own version independently + +use super::manager::{BumpType, VersionManager, VersionUpdate}; +use crate::config::repository::RepositoryConfig; +use crate::packages::MultiPackageDetector; +use anyhow::{Context, Result}; +use semver::Version; +use std::path::Path; + +/// Independent versioning strategy +/// Each package has its own version that is bumped independently +pub struct IndependentVersioning { + config: RepositoryConfig, + repo_path: std::path::PathBuf, +} + +impl IndependentVersioning { + /// Create a new independent versioning strategy + pub fn new(config: RepositoryConfig, repo_path: &Path) -> Self { + Self { + config, + repo_path: repo_path.to_path_buf(), + } + } +} + +impl VersionManager for IndependentVersioning { + fn calculate_updates( + &self, + affected_packages: &[String], + bump_type: BumpType, + ) -> Result> { + let mut updates = Vec::new(); + + // Detect current packages to get their versions + let detector = MultiPackageDetector::new(); + let detected_packages = detector.detect_all(&self.repo_path)?; + + // For each affected package, calculate the new version + for pkg_name in affected_packages { + // Find the package in config + let cfg_pkg = self + .config + .packages + .iter() + .find(|p| &p.name == pkg_name) + .ok_or_else(|| anyhow::anyhow!("Package '{}' not found in config", pkg_name))?; + + // Find the detected package to get current version + let detected_pkg = detected_packages + .iter() + .find(|p| p.path == std::path::Path::new(&cfg_pkg.path)) + .ok_or_else(|| { + anyhow::anyhow!("Package '{}' not found at {}", pkg_name, cfg_pkg.path) + })?; + + // Parse current version + let current_version = Version::parse(&detected_pkg.version).with_context(|| { + format!( + "Invalid version '{}' for package '{}'", + detected_pkg.version, pkg_name + ) + })?; + + // Calculate new version + let new_version = bump_type.apply(¤t_version); + + updates.push(VersionUpdate::new( + pkg_name.clone(), + current_version.to_string(), + new_version.to_string(), + )); + } + + Ok(updates) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::config::repository::{ + PackageConfig, RepositoryConfig, RepositoryMetadata, RepositoryType, VersioningConfig, + VersioningStrategy, + }; + use std::fs; + use tempfile::TempDir; + + fn create_test_config() -> RepositoryConfig { + RepositoryConfig { + repository: RepositoryMetadata { + name: "test".to_string(), + repo_type: RepositoryType::MultiPackage, + description: None, + }, + versioning: VersioningConfig { + strategy: VersioningStrategy::Independent, + unified_version: None, + rules: None, + }, + packages: vec![ + PackageConfig { + name: "pkg1".to_string(), + package_type: "rust-cargo".to_string(), + path: "pkg1".to_string(), + version_file: "Cargo.toml".to_string(), + version_field: "package.version".to_string(), + primary: false, + sync_with: None, + independent: true, + workspace_member: false, + description: None, + }, + PackageConfig { + name: "pkg2".to_string(), + package_type: "node-npm".to_string(), + path: "pkg2".to_string(), + version_file: "package.json".to_string(), + version_field: "version".to_string(), + primary: false, + sync_with: None, + independent: true, + workspace_member: false, + description: None, + }, + ], + dependencies: vec![], + scopes: Default::default(), + commit_rules: Default::default(), + workspace: None, + } + } + + #[test] + fn test_independent_versioning_single_package() { + let temp_dir = TempDir::new().unwrap(); + let config = create_test_config(); + + // Create pkg1 + let pkg1_dir = temp_dir.path().join("pkg1"); + fs::create_dir(&pkg1_dir).unwrap(); + fs::write( + pkg1_dir.join("Cargo.toml"), + r#" +[package] +name = "pkg1" +version = "1.0.0" +edition = "2021" + "#, + ) + .unwrap(); + + let strategy = IndependentVersioning::new(config, temp_dir.path()); + let updates = strategy + .calculate_updates(&["pkg1".to_string()], BumpType::Minor) + .unwrap(); + + assert_eq!(updates.len(), 1); + assert_eq!(updates[0].package_name, "pkg1"); + assert_eq!(updates[0].old_version, "1.0.0"); + assert_eq!(updates[0].new_version, "1.1.0"); + } + + #[test] + fn test_independent_versioning_multiple_packages() { + let temp_dir = TempDir::new().unwrap(); + let config = create_test_config(); + + // Create pkg1 + let pkg1_dir = temp_dir.path().join("pkg1"); + fs::create_dir(&pkg1_dir).unwrap(); + fs::write( + pkg1_dir.join("Cargo.toml"), + r#" +[package] +name = "pkg1" +version = "1.0.0" +edition = "2021" + "#, + ) + .unwrap(); + + // Create pkg2 + let pkg2_dir = temp_dir.path().join("pkg2"); + fs::create_dir(&pkg2_dir).unwrap(); + fs::write( + pkg2_dir.join("package.json"), + r#" +{ + "name": "pkg2", + "version": "2.5.0" +} + "#, + ) + .unwrap(); + + let strategy = IndependentVersioning::new(config, temp_dir.path()); + let updates = strategy + .calculate_updates(&["pkg1".to_string(), "pkg2".to_string()], BumpType::Patch) + .unwrap(); + + assert_eq!(updates.len(), 2); + + let pkg1_update = updates.iter().find(|u| u.package_name == "pkg1").unwrap(); + assert_eq!(pkg1_update.old_version, "1.0.0"); + assert_eq!(pkg1_update.new_version, "1.0.1"); + + let pkg2_update = updates.iter().find(|u| u.package_name == "pkg2").unwrap(); + assert_eq!(pkg2_update.old_version, "2.5.0"); + assert_eq!(pkg2_update.new_version, "2.5.1"); + } + + #[test] + fn test_independent_versioning_major_bump() { + let temp_dir = TempDir::new().unwrap(); + let config = create_test_config(); + + // Create pkg1 + let pkg1_dir = temp_dir.path().join("pkg1"); + fs::create_dir(&pkg1_dir).unwrap(); + fs::write( + pkg1_dir.join("Cargo.toml"), + r#" +[package] +name = "pkg1" +version = "1.5.3" +edition = "2021" + "#, + ) + .unwrap(); + + let strategy = IndependentVersioning::new(config, temp_dir.path()); + let updates = strategy + .calculate_updates(&["pkg1".to_string()], BumpType::Major) + .unwrap(); + + assert_eq!(updates.len(), 1); + assert_eq!(updates[0].old_version, "1.5.3"); + assert_eq!(updates[0].new_version, "2.0.0"); + } +} diff --git a/src/versioning/manager.rs b/src/versioning/manager.rs new file mode 100644 index 0000000..369f88d --- /dev/null +++ b/src/versioning/manager.rs @@ -0,0 +1,101 @@ +// Version manager trait and types + +use anyhow::Result; +use semver::Version; +use serde::{Deserialize, Serialize}; + +/// Type of version bump +#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)] +pub enum BumpType { + Major, + Minor, + Patch, +} + +impl BumpType { + /// Apply the bump to a version + pub fn apply(&self, version: &Version) -> Version { + match self { + BumpType::Major => Version::new(version.major + 1, 0, 0), + BumpType::Minor => Version::new(version.major, version.minor + 1, 0), + BumpType::Patch => Version::new(version.major, version.minor, version.patch + 1), + } + } +} + +/// Information about a version update +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct VersionUpdate { + /// Package name + pub package_name: String, + /// Old version + pub old_version: String, + /// New version + pub new_version: String, +} + +impl VersionUpdate { + /// Create a new version update + pub fn new(package_name: String, old_version: String, new_version: String) -> Self { + Self { + package_name, + old_version, + new_version, + } + } +} + +/// Trait for version management strategies +pub trait VersionManager { + /// Calculate version updates for affected packages + /// + /// # Arguments + /// * `affected_packages` - Names of packages affected by the change + /// * `bump_type` - Type of version bump (major, minor, patch) + /// + /// # Returns + /// List of version updates to apply + fn calculate_updates( + &self, + affected_packages: &[String], + bump_type: BumpType, + ) -> Result>; +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_bump_type_major() { + let version = Version::new(1, 2, 3); + let bumped = BumpType::Major.apply(&version); + assert_eq!(bumped, Version::new(2, 0, 0)); + } + + #[test] + fn test_bump_type_minor() { + let version = Version::new(1, 2, 3); + let bumped = BumpType::Minor.apply(&version); + assert_eq!(bumped, Version::new(1, 3, 0)); + } + + #[test] + fn test_bump_type_patch() { + let version = Version::new(1, 2, 3); + let bumped = BumpType::Patch.apply(&version); + assert_eq!(bumped, Version::new(1, 2, 4)); + } + + #[test] + fn test_version_update_new() { + let update = VersionUpdate::new( + "test-package".to_string(), + "1.0.0".to_string(), + "1.1.0".to_string(), + ); + assert_eq!(update.package_name, "test-package"); + assert_eq!(update.old_version, "1.0.0"); + assert_eq!(update.new_version, "1.1.0"); + } +} diff --git a/src/versioning/mod.rs b/src/versioning/mod.rs new file mode 100644 index 0000000..bf95cd8 --- /dev/null +++ b/src/versioning/mod.rs @@ -0,0 +1,6 @@ +// Version management strategies for multi-package repositories + +pub mod hybrid; +pub mod independent; +pub mod manager; +pub mod unified; diff --git a/src/versioning/unified.rs b/src/versioning/unified.rs new file mode 100644 index 0000000..5c19c6a --- /dev/null +++ b/src/versioning/unified.rs @@ -0,0 +1,197 @@ +// Unified versioning strategy +// All packages share the same version number + +use super::manager::{BumpType, VersionManager, VersionUpdate}; +use crate::config::repository::RepositoryConfig; +use anyhow::Result; +use semver::Version; +use std::path::Path; + +/// Unified versioning strategy +/// All packages share the same version and are bumped together +pub struct UnifiedVersioning { + config: RepositoryConfig, + _repo_path: std::path::PathBuf, +} + +impl UnifiedVersioning { + /// Create a new unified versioning strategy + pub fn new(config: RepositoryConfig, repo_path: &Path) -> Self { + Self { + config, + _repo_path: repo_path.to_path_buf(), + } + } +} + +impl VersionManager for UnifiedVersioning { + fn calculate_updates( + &self, + _affected_packages: &[String], + bump_type: BumpType, + ) -> Result> { + let mut updates = Vec::new(); + + // Get the unified version from config (Q11: Both - config overrides package) + let current_version_str = + self.config + .versioning + .unified_version + .as_ref() + .ok_or_else(|| { + anyhow::anyhow!("Unified versioning requires unified_version in config") + })?; + + // Parse current version + let current_version = Version::parse(current_version_str)?; + + // Calculate new version + let new_version = bump_type.apply(¤t_version); + + // Create updates for ALL packages + for pkg in &self.config.packages { + updates.push(VersionUpdate::new( + pkg.name.clone(), + current_version.to_string(), + new_version.to_string(), + )); + } + + Ok(updates) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::config::repository::{ + PackageConfig, RepositoryConfig, RepositoryMetadata, RepositoryType, VersioningConfig, + VersioningStrategy, + }; + use tempfile::TempDir; + + fn create_test_config(unified_version: &str) -> RepositoryConfig { + RepositoryConfig { + repository: RepositoryMetadata { + name: "test".to_string(), + repo_type: RepositoryType::Monorepo, + description: None, + }, + versioning: VersioningConfig { + strategy: VersioningStrategy::Unified, + unified_version: Some(unified_version.to_string()), + rules: None, + }, + packages: vec![ + PackageConfig { + name: "pkg1".to_string(), + package_type: "rust-cargo".to_string(), + path: "pkg1".to_string(), + version_file: "Cargo.toml".to_string(), + version_field: "package.version".to_string(), + primary: false, + sync_with: None, + independent: false, + workspace_member: false, + description: None, + }, + PackageConfig { + name: "pkg2".to_string(), + package_type: "node-npm".to_string(), + path: "pkg2".to_string(), + version_file: "package.json".to_string(), + version_field: "version".to_string(), + primary: false, + sync_with: None, + independent: false, + workspace_member: false, + description: None, + }, + PackageConfig { + name: "pkg3".to_string(), + package_type: "node-npm".to_string(), + path: "pkg3".to_string(), + version_file: "package.json".to_string(), + version_field: "version".to_string(), + primary: false, + sync_with: None, + independent: false, + workspace_member: false, + description: None, + }, + ], + dependencies: vec![], + scopes: Default::default(), + commit_rules: Default::default(), + workspace: None, + } + } + + #[test] + fn test_unified_versioning_minor_bump() { + let temp_dir = TempDir::new().unwrap(); + let config = create_test_config("1.0.0"); + + let strategy = UnifiedVersioning::new(config, temp_dir.path()); + let updates = strategy + .calculate_updates(&["pkg1".to_string()], BumpType::Minor) + .unwrap(); + + // Should update ALL packages + assert_eq!(updates.len(), 3); + + for update in &updates { + assert_eq!(update.old_version, "1.0.0"); + assert_eq!(update.new_version, "1.1.0"); + } + } + + #[test] + fn test_unified_versioning_major_bump() { + let temp_dir = TempDir::new().unwrap(); + let config = create_test_config("2.5.3"); + + let strategy = UnifiedVersioning::new(config, temp_dir.path()); + let updates = strategy + .calculate_updates(&["pkg2".to_string()], BumpType::Major) + .unwrap(); + + // Should update ALL packages + assert_eq!(updates.len(), 3); + + for update in &updates { + assert_eq!(update.old_version, "2.5.3"); + assert_eq!(update.new_version, "3.0.0"); + } + } + + #[test] + fn test_unified_versioning_patch_bump() { + let temp_dir = TempDir::new().unwrap(); + let config = create_test_config("1.2.3"); + + let strategy = UnifiedVersioning::new(config, temp_dir.path()); + let updates = strategy.calculate_updates(&[], BumpType::Patch).unwrap(); + + // Should update ALL packages even if no specific packages affected + assert_eq!(updates.len(), 3); + + for update in &updates { + assert_eq!(update.old_version, "1.2.3"); + assert_eq!(update.new_version, "1.2.4"); + } + } + + #[test] + fn test_unified_versioning_no_version_error() { + let temp_dir = TempDir::new().unwrap(); + let mut config = create_test_config("1.0.0"); + config.versioning.unified_version = None; + + let strategy = UnifiedVersioning::new(config, temp_dir.path()); + let result = strategy.calculate_updates(&["pkg1".to_string()], BumpType::Minor); + + assert!(result.is_err()); + assert!(result.unwrap_err().to_string().contains("unified_version")); + } +} diff --git a/src/workflow/mod.rs b/src/workflow/mod.rs new file mode 100644 index 0000000..27e055c --- /dev/null +++ b/src/workflow/mod.rs @@ -0,0 +1,3 @@ +// Workflow orchestrator for multi-package operations + +pub mod orchestrator; diff --git a/src/workflow/orchestrator.rs b/src/workflow/orchestrator.rs new file mode 100644 index 0000000..ee95ed2 --- /dev/null +++ b/src/workflow/orchestrator.rs @@ -0,0 +1,414 @@ +// Workflow orchestrator for coordinating scope detection, versioning, and dependency updates + +use crate::config::repository::RepositoryConfig; +use crate::dependency::updater::{DependencyUpdate, DependencyUpdater}; +use crate::scope::detector::ScopeDetector; +use crate::versioning::hybrid::HybridVersioning; +use crate::versioning::independent::IndependentVersioning; +use crate::versioning::manager::{BumpType, VersionManager, VersionUpdate}; +use crate::versioning::unified::UnifiedVersioning; +use anyhow::{Context, Result}; +use log::{debug, info}; +use serde::Serialize; +use std::collections::HashSet; +use std::path::{Path, PathBuf}; + +/// Result of workflow orchestration +#[derive(Debug, Clone, Serialize)] +pub struct WorkflowResult { + /// Detected scopes from staged files + pub scopes: Vec, + /// Version updates to apply + pub version_updates: Vec, + /// Dependency updates to apply + pub dependency_updates: Vec, + /// Files that will be modified + pub modified_files: Vec, +} + +impl WorkflowResult { + /// Check if there are any changes to apply + pub fn has_changes(&self) -> bool { + !self.version_updates.is_empty() || !self.dependency_updates.is_empty() + } +} + +/// Workflow orchestrator for multi-package operations +pub struct WorkflowOrchestrator { + repo_path: PathBuf, + config: RepositoryConfig, +} + +impl WorkflowOrchestrator { + /// Create a new workflow orchestrator + pub fn new(repo_path: &Path, config: RepositoryConfig) -> Self { + Self { + repo_path: repo_path.to_path_buf(), + config, + } + } + + /// Detect scopes from staged files + /// + /// Returns detected scopes based on file patterns and package membership + pub fn detect_scopes(&self) -> Result> { + if !self.config.scopes.auto_detect { + debug!("Auto-detect is disabled in config"); + return Ok(vec![]); + } + + let detector = ScopeDetector::new(self.config.clone(), &self.repo_path); + let scopes = detector.detect_from_staged()?; + + info!("Detected scopes: {:?}", scopes); + Ok(scopes) + } + + /// Suggest scopes for the current repository state + /// Calculate version updates based on commit message and affected packages + /// + /// # Arguments + /// * `commit_message` - The commit message to analyze for bump type + /// * `scopes` - Scopes/packages affected by the commit + pub fn calculate_version_updates( + &self, + commit_message: &str, + scopes: &[String], + ) -> Result> { + // Determine bump type from commit message + let bump_type = self.determine_bump_type(commit_message)?; + + // Calculate updates based on versioning strategy + let updates = match self.config.versioning.strategy { + crate::config::repository::VersioningStrategy::Independent => { + let strategy = IndependentVersioning::new(self.config.clone(), &self.repo_path); + strategy.calculate_updates(scopes, bump_type)? + } + crate::config::repository::VersioningStrategy::Unified => { + let strategy = UnifiedVersioning::new(self.config.clone(), &self.repo_path); + strategy.calculate_updates(scopes, bump_type)? + } + crate::config::repository::VersioningStrategy::Hybrid => { + let strategy = HybridVersioning::new(self.config.clone(), &self.repo_path); + strategy.calculate_updates(scopes, bump_type)? + } + }; + + info!("Calculated {} version update(s)", updates.len()); + Ok(updates) + } + + /// Calculate dependency updates based on version changes + pub fn calculate_dependency_updates( + &self, + version_updates: &[VersionUpdate], + ) -> Result> { + if self.config.dependencies.is_empty() { + debug!("No dependencies configured"); + return Ok(vec![]); + } + + let updater = DependencyUpdater::new(self.config.clone(), &self.repo_path); + let mut all_updates = Vec::new(); + + for version_update in version_updates { + let updates = updater + .calculate_updates(&version_update.package_name, &version_update.new_version)?; + all_updates.extend(updates); + } + + info!("Calculated {} dependency update(s)", all_updates.len()); + Ok(all_updates) + } + + /// Apply version updates to package files + pub fn apply_version_updates(&self, updates: &[VersionUpdate]) -> Result> { + let mut updated_files = Vec::new(); + + for update in updates { + debug!( + "Applying version update: {} {} -> {}", + update.package_name, update.old_version, update.new_version + ); + + // Find the package config to get version_file and version_field + let pkg_config = self + .config + .packages + .iter() + .find(|p| p.name == update.package_name) + .context(format!( + "Package '{}' not found in config", + update.package_name + ))?; + + // Update the version file for this package + let version_file = self + .repo_path + .join(&pkg_config.path) + .join(&pkg_config.version_file); + + // Use the appropriate handler based on file type + self.update_version_file( + &version_file, + &pkg_config.version_field, + &update.new_version, + )?; + + updated_files.push(version_file); + } + + Ok(updated_files) + } + + /// Apply dependency updates to files + pub fn apply_dependency_updates(&self, updates: &[DependencyUpdate]) -> Result> { + let updater = DependencyUpdater::new(self.config.clone(), &self.repo_path); + + let updated_file_strings = updater.apply_updates(updates)?; + let updated_files = updated_file_strings + .into_iter() + .map(PathBuf::from) + .collect(); + + Ok(updated_files) + } + + /// Run the full workflow: detect scopes, calculate updates, and optionally apply them + /// + /// # Arguments + /// * `commit_message` - The commit message to analyze + /// * `apply_changes` - Whether to apply the calculated changes + pub fn run_workflow( + &self, + commit_message: &str, + apply_changes: bool, + ) -> Result { + info!("Running workflow orchestrator"); + + // Step 1: Detect scopes + let scopes = self.detect_scopes()?; + + // Step 2: Calculate version updates + let version_updates = if !scopes.is_empty() { + self.calculate_version_updates(commit_message, &scopes)? + } else { + debug!("No scopes detected, skipping version updates"); + vec![] + }; + + // Step 3: Calculate dependency updates + let dependency_updates = self.calculate_dependency_updates(&version_updates)?; + + // Step 4: Apply changes if requested + let mut modified_files = Vec::new(); + if apply_changes { + let version_files = self.apply_version_updates(&version_updates)?; + let dependency_files = self.apply_dependency_updates(&dependency_updates)?; + + modified_files.extend(version_files); + modified_files.extend(dependency_files); + + // Deduplicate files + let unique_files: HashSet = modified_files.into_iter().collect(); + modified_files = unique_files.into_iter().collect(); + + info!("Applied changes to {} file(s)", modified_files.len()); + } + + Ok(WorkflowResult { + scopes, + version_updates, + dependency_updates, + modified_files, + }) + } + + /// Determine bump type from commit message + fn determine_bump_type(&self, commit_message: &str) -> Result { + // Use custom rules if available, otherwise use defaults + let (major_pattern, minor_pattern) = if let Some(rules) = &self.config.versioning.rules { + ( + rules + .major_regex + .as_deref() + .unwrap_or(crate::config::MAJOR_REGEX), + rules + .minor_regex + .as_deref() + .unwrap_or(crate::config::MINOR_REGEX), + ) + } else { + (crate::config::MAJOR_REGEX, crate::config::MINOR_REGEX) + }; + + let major_regex = regex::Regex::new(major_pattern)?; + let minor_regex = regex::Regex::new(minor_pattern)?; + + if major_regex.is_match(commit_message) { + Ok(BumpType::Major) + } else if minor_regex.is_match(commit_message) { + Ok(BumpType::Minor) + } else { + // Default to patch for any other commit + Ok(BumpType::Patch) + } + } + + /// Update a version file with a new version + fn update_version_file(&self, file_path: &Path, field: &str, new_version: &str) -> Result<()> { + use crate::dependency::handlers; + + let file_type = self.detect_file_type(file_path)?; + + match file_type.as_str() { + "yaml" | "yml" => handlers::yaml::update_version(file_path, field, new_version), + "json" => handlers::json::update_version(file_path, field, new_version), + "toml" => handlers::toml::update_version(file_path, field, new_version), + _ => Err(anyhow::anyhow!("Unsupported file type: {}", file_type)), + } + } + + /// Detect file type from extension + fn detect_file_type(&self, file_path: &Path) -> Result { + let extension = file_path + .extension() + .and_then(|e| e.to_str()) + .ok_or_else(|| anyhow::anyhow!("No file extension"))?; + + Ok(extension.to_lowercase()) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::config::repository::{ + PackageConfig, RepositoryConfig, RepositoryMetadata, RepositoryType, ScopeConfig, + VersioningConfig, VersioningStrategy, + }; + use tempfile::TempDir; + + fn create_test_config() -> RepositoryConfig { + RepositoryConfig { + repository: RepositoryMetadata { + name: "test-repo".to_string(), + repo_type: RepositoryType::MultiPackage, + description: None, + }, + versioning: VersioningConfig { + strategy: VersioningStrategy::Independent, + unified_version: None, + rules: None, + }, + packages: vec![PackageConfig { + name: "test-package".to_string(), + package_type: "rust-cargo".to_string(), + path: ".".to_string(), + version_file: "Cargo.toml".to_string(), + version_field: "package.version".to_string(), + primary: false, + sync_with: None, + independent: true, + workspace_member: false, + description: None, + }], + dependencies: vec![], + scopes: ScopeConfig::default(), + commit_rules: Default::default(), + workspace: None, + } + } + + #[test] + fn test_orchestrator_creation() { + let temp_dir = TempDir::new().unwrap(); + let config = create_test_config(); + let orchestrator = WorkflowOrchestrator::new(temp_dir.path(), config.clone()); + + assert_eq!(orchestrator.repo_path, temp_dir.path()); + assert_eq!(orchestrator.config.repository.name, "test-repo"); + } + + #[test] + fn test_orchestrator_with_config() { + let temp_dir = TempDir::new().unwrap(); + let config = create_test_config(); + let orchestrator = WorkflowOrchestrator::new(temp_dir.path(), config.clone()); + + assert_eq!(orchestrator.config.repository.name, "test-repo"); + } + + #[test] + fn test_determine_bump_type() { + let temp_dir = TempDir::new().unwrap(); + let config = create_test_config(); + let orchestrator = WorkflowOrchestrator::new(temp_dir.path(), config); + + assert_eq!( + orchestrator + .determine_bump_type("feat: add new feature") + .unwrap(), + BumpType::Minor + ); + assert_eq!( + orchestrator.determine_bump_type("fix: fix bug").unwrap(), + BumpType::Patch + ); + assert_eq!( + orchestrator + .determine_bump_type("feat!: breaking change") + .unwrap(), + BumpType::Major + ); + } + + #[test] + fn test_workflow_result_has_changes() { + let result = WorkflowResult { + scopes: vec!["test".to_string()], + version_updates: vec![], + dependency_updates: vec![], + modified_files: vec![], + }; + assert!(!result.has_changes()); + + let result_with_changes = WorkflowResult { + scopes: vec!["test".to_string()], + version_updates: vec![VersionUpdate::new( + "test".to_string(), + "1.0.0".to_string(), + "1.1.0".to_string(), + )], + dependency_updates: vec![], + modified_files: vec![], + }; + assert!(result_with_changes.has_changes()); + } + + #[test] + fn test_detect_file_type() { + let temp_dir = TempDir::new().unwrap(); + let config = create_test_config(); + let orchestrator = WorkflowOrchestrator::new(temp_dir.path(), config); + + assert_eq!( + orchestrator + .detect_file_type(Path::new("Cargo.toml")) + .unwrap(), + "toml" + ); + assert_eq!( + orchestrator + .detect_file_type(Path::new("package.json")) + .unwrap(), + "json" + ); + assert_eq!( + orchestrator + .detect_file_type(Path::new("values.yaml")) + .unwrap(), + "yaml" + ); + } +} diff --git a/tests/agent_cli_tests.rs b/tests/agent_cli_tests.rs new file mode 100644 index 0000000..0560a1c --- /dev/null +++ b/tests/agent_cli_tests.rs @@ -0,0 +1,417 @@ +mod common; + +use serde_json::Value; +use std::fs; +use std::process::Command as StdCommand; +use tempfile::tempdir; + +fn setup_repo() -> tempfile::TempDir { + common::setup_test_env(); + + let dir = tempdir().expect("Failed to create temp directory"); + + StdCommand::new("git") + .args(["init"]) + .current_dir(&dir) + .output() + .expect("Failed to initialize git repository"); + + StdCommand::new("git") + .args(["config", "user.name", "Test User"]) + .current_dir(&dir) + .output() + .expect("Failed to configure git user name"); + + StdCommand::new("git") + .args(["config", "user.email", "test@example.com"]) + .current_dir(&dir) + .output() + .expect("Failed to configure git user email"); + + let file = dir.path().join("tracked.txt"); + fs::write(&file, "initial\n").expect("Failed to write tracked file"); + StdCommand::new("git") + .args(["add", "tracked.txt"]) + .current_dir(&dir) + .output() + .expect("Failed to stage tracked file"); + StdCommand::new("git") + .args(["commit", "-m", "feat: initial"]) + .current_dir(&dir) + .output() + .expect("Failed to create initial commit"); + + dir +} + +fn write_commit_rules_config(dir: &std::path::Path, body: &str) { + fs::create_dir_all(dir.join(".committy")).expect("Failed to create .committy"); + fs::write( + dir.join(".committy/config.toml"), + format!( + r#"packages = [] + +[repository] +name = "agent-repo" +type = "single-package" + +[versioning] +strategy = "independent" + +[scopes] +auto_detect = false +require_scope_for_multi_package = false +allow_multiple_scopes = false + +[commit_rules] +{body} +"# + ), + ) + .expect("Failed to write commit rules config"); +} + +#[test] +fn test_branch_dry_run_json_outputs_plan() { + let temp_dir = setup_repo(); + + let assert = common::committy_cmd() + .current_dir(&temp_dir) + .arg("--non-interactive") + .arg("branch") + .arg("--name") + .arg("feat-agent-plan") + .arg("--dry-run") + .arg("--output") + .arg("json") + .assert() + .success(); + + let output = String::from_utf8(assert.get_output().stdout.clone()).unwrap(); + let v: Value = serde_json::from_str(output.trim()).unwrap(); + + assert_eq!(v["command"], Value::String("branch".into())); + assert_eq!(v["ok"], Value::Bool(true)); + assert_eq!(v["dry_run"], Value::Bool(true)); + assert_eq!(v["branch_name"], Value::String("feat-agent-plan".into())); + assert_eq!(v["branch_type"], Value::String("feat".into())); + assert_eq!(v["ticket"], Value::String(String::new())); + assert_eq!(v["subject"], Value::String("agent-plan".into())); + assert_eq!(v["would_create"], Value::Bool(true)); + assert_eq!(v["would_checkout"], Value::Bool(false)); + + let branches = StdCommand::new("git") + .args(["branch", "--list", "feat-agent-plan"]) + .current_dir(&temp_dir) + .output() + .expect("Failed to list branches"); + assert!( + String::from_utf8_lossy(&branches.stdout).trim().is_empty(), + "dry-run must not create the branch" + ); +} + +#[test] +fn test_branch_structured_dry_run_json_outputs_plan() { + let temp_dir = setup_repo(); + + let assert = common::committy_cmd() + .current_dir(&temp_dir) + .arg("--non-interactive") + .arg("branch") + .arg("--type") + .arg("feat") + .arg("--ticket") + .arg("AI42") + .arg("--subject") + .arg("agent flow") + .arg("--dry-run") + .arg("--output") + .arg("json") + .assert() + .success(); + + let output = String::from_utf8(assert.get_output().stdout.clone()).unwrap(); + let v: Value = serde_json::from_str(output.trim()).unwrap(); + + assert_eq!(v["command"], Value::String("branch".into())); + assert_eq!(v["ok"], Value::Bool(true)); + assert_eq!(v["dry_run"], Value::Bool(true)); + assert_eq!( + v["branch_name"], + Value::String("feat-AI42-agent_flow".into()) + ); + assert_eq!(v["branch_type"], Value::String("feat".into())); + assert_eq!(v["ticket"], Value::String("AI42".into())); + assert_eq!(v["subject"], Value::String("agent_flow".into())); + assert_eq!(v["would_checkout"], Value::Bool(false)); +} + +#[test] +fn test_branch_structured_flags_conflict_with_name() { + let temp_dir = setup_repo(); + + common::committy_cmd() + .current_dir(&temp_dir) + .arg("--non-interactive") + .arg("branch") + .arg("--name") + .arg("feat-agent-plan") + .arg("--type") + .arg("feat") + .arg("--subject") + .arg("agent") + .assert() + .failure() + .stderr(predicates::str::contains( + "Use either --name or structured branch flags", + )); +} + +#[test] +fn test_branch_dry_run_json_supports_repo_path_without_chdir() { + let temp_dir = setup_repo(); + let runner_dir = tempdir().expect("Failed to create runner directory"); + + let assert = common::committy_cmd() + .current_dir(&runner_dir) + .arg("--non-interactive") + .arg("branch") + .arg("--repo-path") + .arg(temp_dir.path()) + .arg("--name") + .arg("feat-agent-plan") + .arg("--dry-run") + .arg("--output") + .arg("json") + .assert() + .success(); + + let output = String::from_utf8(assert.get_output().stdout.clone()).unwrap(); + let v: Value = serde_json::from_str(output.trim()).unwrap(); + assert_eq!(v["ok"], Value::Bool(true)); + assert_eq!(v["branch_name"], Value::String("feat-agent-plan".into())); +} + +#[test] +fn test_commit_dry_run_json_does_not_create_commit() { + let temp_dir = setup_repo(); + let file = temp_dir.path().join("tracked.txt"); + fs::write(&file, "changed\n").expect("Failed to update tracked file"); + StdCommand::new("git") + .args(["add", "tracked.txt"]) + .current_dir(&temp_dir) + .output() + .expect("Failed to stage tracked file"); + + let before = StdCommand::new("git") + .args(["rev-list", "--count", "HEAD"]) + .current_dir(&temp_dir) + .output() + .expect("Failed to count commits"); + + let assert = common::committy_cmd() + .current_dir(&temp_dir) + .arg("--non-interactive") + .arg("commit") + .arg("--type") + .arg("feat") + .arg("--scope") + .arg("agent") + .arg("--message") + .arg("preview agent commit") + .arg("--dry-run") + .arg("--output") + .arg("json") + .assert() + .success(); + + let output = String::from_utf8(assert.get_output().stdout.clone()).unwrap(); + let v: Value = serde_json::from_str(output.trim()).unwrap(); + assert_eq!(v["command"], Value::String("commit".into())); + assert_eq!(v["ok"], Value::Bool(true)); + assert_eq!(v["dry_run"], Value::Bool(true)); + assert_eq!( + v["message"], + Value::String("feat(agent): preview agent commit".into()) + ); + + let after = StdCommand::new("git") + .args(["rev-list", "--count", "HEAD"]) + .current_dir(&temp_dir) + .output() + .expect("Failed to count commits"); + + assert_eq!( + before.stdout, after.stdout, + "dry-run must not create commits" + ); +} + +#[test] +fn test_commit_dry_run_json_supports_repo_path_without_chdir() { + let temp_dir = setup_repo(); + let runner_dir = tempdir().expect("Failed to create runner directory"); + let file = temp_dir.path().join("tracked.txt"); + fs::write(&file, "changed\n").expect("Failed to update tracked file"); + StdCommand::new("git") + .args(["add", "tracked.txt"]) + .current_dir(&temp_dir) + .output() + .expect("Failed to stage tracked file"); + + let assert = common::committy_cmd() + .current_dir(&runner_dir) + .arg("--non-interactive") + .arg("commit") + .arg("--repo-path") + .arg(temp_dir.path()) + .arg("--type") + .arg("feat") + .arg("--message") + .arg("preview agent commit") + .arg("--dry-run") + .arg("--output") + .arg("json") + .assert() + .success(); + + let output = String::from_utf8(assert.get_output().stdout.clone()).unwrap(); + let v: Value = serde_json::from_str(output.trim()).unwrap(); + assert_eq!(v["command"], Value::String("commit".into())); + assert_eq!(v["ok"], Value::Bool(true)); +} + +#[test] +fn test_commit_dry_run_json_respects_require_body_rule() { + let temp_dir = setup_repo(); + write_commit_rules_config(temp_dir.path(), "require_body = true"); + + let file = temp_dir.path().join("tracked.txt"); + fs::write(&file, "changed\n").expect("Failed to update tracked file"); + StdCommand::new("git") + .args(["add", "tracked.txt"]) + .current_dir(&temp_dir) + .output() + .expect("Failed to stage tracked file"); + + let assert = common::committy_cmd() + .current_dir(&temp_dir) + .arg("--non-interactive") + .arg("commit") + .arg("--type") + .arg("feat") + .arg("--message") + .arg("preview without body") + .arg("--dry-run") + .arg("--output") + .arg("json") + .assert() + .code(3); + + let output = String::from_utf8(assert.get_output().stdout.clone()).unwrap(); + let v: Value = serde_json::from_str(output.trim()).unwrap(); + assert_eq!(v["command"], Value::String("commit".into())); + assert_eq!(v["ok"], Value::Bool(false)); + assert_eq!(v["dry_run"], Value::Bool(true)); + assert_eq!( + v["errors"][0], + Value::String("Commit body is required by repository configuration".into()) + ); +} + +#[test] +fn test_commit_dry_run_json_accepts_custom_commit_type_from_repo_rules() { + let temp_dir = setup_repo(); + write_commit_rules_config( + temp_dir.path(), + r#"allowed_types = ["feat"] + +[[commit_rules.custom_types]] +name = "wip" +description = "Work in progress" +bump = "none""#, + ); + + let file = temp_dir.path().join("tracked.txt"); + fs::write(&file, "changed again\n").expect("Failed to update tracked file"); + StdCommand::new("git") + .args(["add", "tracked.txt"]) + .current_dir(&temp_dir) + .output() + .expect("Failed to stage tracked file"); + + let assert = common::committy_cmd() + .current_dir(&temp_dir) + .arg("--non-interactive") + .arg("commit") + .arg("--type") + .arg("wip") + .arg("--message") + .arg("preview custom type") + .arg("--dry-run") + .arg("--output") + .arg("json") + .assert() + .success(); + + let output = String::from_utf8(assert.get_output().stdout.clone()).unwrap(); + let v: Value = serde_json::from_str(output.trim()).unwrap(); + assert_eq!(v["ok"], Value::Bool(true)); + assert_eq!(v["commit_type"], Value::String("wip".into())); + assert_eq!( + v["message"], + Value::String("wip: preview custom type".into()) + ); +} + +#[test] +fn test_tag_publish_requires_confirmation() { + let temp_dir = setup_repo(); + + common::committy_cmd() + .current_dir(&temp_dir) + .arg("--non-interactive") + .arg("tag") + .arg("--name") + .arg("v1.2.3") + .arg("--publish") + .assert() + .failure() + .stderr(predicates::str::contains( + "Publishing a tag requires --confirm-publish", + )); + + let tags = StdCommand::new("git") + .args(["tag", "--list", "v1.2.3"]) + .current_dir(&temp_dir) + .output() + .expect("Failed to list tags"); + assert!( + String::from_utf8_lossy(&tags.stdout).trim().is_empty(), + "tag should not be created when publish confirmation is missing" + ); +} + +#[test] +fn test_tag_dry_run_json_supports_repo_path_without_chdir() { + let temp_dir = setup_repo(); + let runner_dir = tempdir().expect("Failed to create runner directory"); + + let assert = common::committy_cmd() + .current_dir(&runner_dir) + .arg("--non-interactive") + .arg("tag") + .arg("--repo-path") + .arg(temp_dir.path()) + .arg("--dry-run") + .arg("--output") + .arg("json") + .assert() + .success(); + + let output = String::from_utf8(assert.get_output().stdout.clone()).unwrap(); + let v: Value = serde_json::from_str(output.trim()).unwrap(); + assert_eq!(v["command"], Value::String("tag".into())); + assert_eq!(v["ok"], Value::Bool(true)); +} diff --git a/tests/common/assert.rs b/tests/common/assert.rs new file mode 100644 index 0000000..e69de29 diff --git a/tests/common/mod.rs b/tests/common/mod.rs index 06619e7..1a91898 100644 --- a/tests/common/mod.rs +++ b/tests/common/mod.rs @@ -1,3 +1,5 @@ +use assert_cmd::cargo::cargo_bin_cmd; +use assert_cmd::Command; use once_cell::sync::Lazy; use std::env; use std::sync::Once; @@ -25,3 +27,8 @@ pub fn setup_test_env() { env::set_var("HOME", TEST_HOME.path()); }); } + +#[allow(dead_code)] +pub fn committy_cmd() -> Command { + cargo_bin_cmd!("committy") +} diff --git a/tests/git_tests.rs b/tests/git_tests.rs index 783a836..c508db0 100644 --- a/tests/git_tests.rs +++ b/tests/git_tests.rs @@ -1,6 +1,5 @@ mod common; -use assert_cmd::Command; use predicates::prelude::*; use std::fs; use std::process::Command as StdCommand; @@ -81,7 +80,7 @@ fn test_commit_message_formatting() { .expect("Failed to stage test file"); // Test commit with scope and breaking change - let mut cmd = Command::cargo_bin("committy").unwrap(); + let mut cmd = common::committy_cmd(); cmd.current_dir(&temp_dir) .env("RUST_LOG", "info") .arg("--non-interactive") @@ -118,7 +117,7 @@ fn test_unstaged_changes() { let test_file = temp_dir.path().join("test.txt"); fs::write(&test_file, "test content").expect("Failed to write test file"); - let mut cmd = Command::cargo_bin("committy").unwrap(); + let mut cmd = common::committy_cmd(); cmd.current_dir(&temp_dir) .env("RUST_LOG", "off") .arg("--non-interactive") @@ -148,7 +147,7 @@ fn test_commit_without_git_config() { .output() .expect("Failed to stage test file"); - let mut cmd = Command::cargo_bin("committy").unwrap(); + let mut cmd = common::committy_cmd(); cmd.current_dir(&temp_dir) .env("RUST_LOG", "off") .env("GIT_COMMITTER_NAME", "") @@ -181,7 +180,7 @@ fn test_commit_with_amend() { .expect("Failed to stage test file"); // Initial commit - let mut cmd = Command::cargo_bin("committy").unwrap(); + let mut cmd = common::committy_cmd(); cmd.current_dir(&temp_dir) .env("RUST_LOG", "info") .arg("--non-interactive") @@ -202,7 +201,7 @@ fn test_commit_with_amend() { .expect("Failed to stage updated file"); // Amend commit with non-interactive mode - let mut cmd = Command::cargo_bin("committy").unwrap(); + let mut cmd = common::committy_cmd(); cmd.current_dir(&temp_dir) .env("RUST_LOG", "info") .arg("--non-interactive") @@ -226,3 +225,224 @@ fn test_commit_with_amend() { assert_eq!(log_output.lines().count(), 1); assert!(log_output.contains("feat: Amended commit")); } + +#[test] +fn test_amend_non_interactive_without_staged_changes() { + let temp_dir = setup_git_repo(); + + let test_file = temp_dir.path().join("test.txt"); + fs::write(&test_file, "test content").expect("Failed to write test file"); + let _ = StdCommand::new("git") + .args(["add", "test.txt"]) + .current_dir(&temp_dir) + .output() + .expect("Failed to stage test file"); + + common::committy_cmd() + .current_dir(&temp_dir) + .arg("--non-interactive") + .arg("commit") + .arg("--type") + .arg("feat") + .arg("--message") + .arg("Initial commit") + .assert() + .success(); + + common::committy_cmd() + .current_dir(&temp_dir) + .arg("--non-interactive") + .arg("amend") + .arg("--type") + .arg("fix") + .arg("--message") + .arg("Amended without staged changes") + .assert() + .success(); + + let git_log = StdCommand::new("git") + .args(["log", "--format=%s", "-n", "1"]) + .current_dir(&temp_dir) + .output() + .expect("Failed to get git log"); + + let log_message = String::from_utf8_lossy(&git_log.stdout); + assert_eq!(log_message.lines().count(), 1); + assert!(log_message.contains("fix: Amended without staged changes")); +} + +#[test] +fn test_commit_amend_without_staged_changes_matches_amend_command() { + let temp_dir = setup_git_repo(); + + let test_file = temp_dir.path().join("test.txt"); + fs::write(&test_file, "test content").expect("Failed to write test file"); + let _ = StdCommand::new("git") + .args(["add", "test.txt"]) + .current_dir(&temp_dir) + .output() + .expect("Failed to stage test file"); + + common::committy_cmd() + .current_dir(&temp_dir) + .arg("--non-interactive") + .arg("commit") + .arg("--type") + .arg("feat") + .arg("--message") + .arg("Initial commit") + .assert() + .success(); + + common::committy_cmd() + .current_dir(&temp_dir) + .arg("--non-interactive") + .arg("commit") + .arg("--amend") + .arg("--type") + .arg("fix") + .arg("--message") + .arg("Amended via commit flag") + .assert() + .success(); + + let git_log = StdCommand::new("git") + .args(["log", "--format=%s", "-n", "1"]) + .current_dir(&temp_dir) + .output() + .expect("Failed to get git log"); + + let log_message = String::from_utf8_lossy(&git_log.stdout); + assert_eq!(log_message.lines().count(), 1); + assert!(log_message.contains("fix: Amended via commit flag")); +} + +#[test] +fn test_amend_dry_run_json_does_not_rewrite_commit() { + let temp_dir = setup_git_repo(); + + let test_file = temp_dir.path().join("test.txt"); + fs::write(&test_file, "test content").expect("Failed to write test file"); + let _ = StdCommand::new("git") + .args(["add", "test.txt"]) + .current_dir(&temp_dir) + .output() + .expect("Failed to stage test file"); + + common::committy_cmd() + .current_dir(&temp_dir) + .arg("--non-interactive") + .arg("commit") + .arg("--type") + .arg("feat") + .arg("--message") + .arg("Initial commit") + .assert() + .success(); + + let before = StdCommand::new("git") + .args(["rev-parse", "HEAD"]) + .current_dir(&temp_dir) + .output() + .expect("Failed to get HEAD before amend"); + + let assert = common::committy_cmd() + .current_dir(&temp_dir) + .arg("--non-interactive") + .arg("amend") + .arg("--type") + .arg("fix") + .arg("--message") + .arg("Preview amend") + .arg("--dry-run") + .arg("--output") + .arg("json") + .assert() + .success(); + + let output = String::from_utf8(assert.get_output().stdout.clone()).unwrap(); + let v: serde_json::Value = serde_json::from_str(output.trim()).unwrap(); + assert_eq!(v["command"], serde_json::json!("amend")); + assert_eq!(v["ok"], serde_json::json!(true)); + assert_eq!(v["dry_run"], serde_json::json!(true)); + assert_eq!(v["message"], serde_json::json!("fix: Preview amend")); + + let after = StdCommand::new("git") + .args(["rev-parse", "HEAD"]) + .current_dir(&temp_dir) + .output() + .expect("Failed to get HEAD after amend preview"); + + assert_eq!(before.stdout, after.stdout); +} + +#[test] +fn test_amend_respects_repo_commit_rules() { + let temp_dir = setup_git_repo(); + + fs::create_dir_all(temp_dir.path().join(".committy")).expect("Failed to create .committy"); + fs::write( + temp_dir.path().join(".committy/config.toml"), + r#"packages = [] + +[repository] +name = "amend-repo" +type = "single-package" + +[versioning] +strategy = "independent" + +[scopes] +auto_detect = false +require_scope_for_multi_package = false +allow_multiple_scopes = false + +[commit_rules] +require_body = true +"#, + ) + .expect("Failed to write config"); + + let test_file = temp_dir.path().join("test.txt"); + fs::write(&test_file, "test content").expect("Failed to write test file"); + let _ = StdCommand::new("git") + .args(["add", "test.txt"]) + .current_dir(&temp_dir) + .output() + .expect("Failed to stage test file"); + + common::committy_cmd() + .current_dir(&temp_dir) + .arg("--non-interactive") + .arg("commit") + .arg("--type") + .arg("feat") + .arg("--message") + .arg("Initial commit") + .arg("--long-message") + .arg("Initial body") + .assert() + .success(); + + let assert = common::committy_cmd() + .current_dir(&temp_dir) + .arg("--non-interactive") + .arg("amend") + .arg("--type") + .arg("fix") + .arg("--message") + .arg("Missing body") + .arg("--dry-run") + .arg("--output") + .arg("json") + .assert() + .code(3); + + let output = String::from_utf8(assert.get_output().stdout.clone()).unwrap(); + let v: serde_json::Value = serde_json::from_str(output.trim()).unwrap(); + assert_eq!(v["ok"], serde_json::json!(false)); + assert_eq!( + v["errors"][0], + serde_json::json!("Commit body is required by repository configuration") + ); +} diff --git a/tests/group_commit_tests.rs b/tests/group_commit_tests.rs index 05b3ed7..d73ee8d 100644 --- a/tests/group_commit_tests.rs +++ b/tests/group_commit_tests.rs @@ -1,7 +1,7 @@ mod common; -use assert_cmd::Command; use serde_json::Value; +use std::fs; use std::process::Command as StdCommand; use tempfile::tempdir; @@ -40,6 +40,28 @@ fn setup_repo() -> tempfile::TempDir { dir } +fn write_commit_rules_config(dir: &std::path::Path, body: &str) { + fs::create_dir_all(dir.join(".committy")).expect("Failed to create .committy"); + fs::write( + dir.join(".committy/config.toml"), + format!( + r#"packages = [] + +[repository] +name = "group-repo" +type = "single-package" + +[versioning] +strategy = "independent" + +[commit_rules] +{body} +"# + ), + ) + .expect("Failed to write commit rules config"); +} + #[test] fn test_group_commit_apply_without_auto_stage_only_staged_committed() { let temp_dir = setup_repo(); @@ -62,8 +84,7 @@ fn test_group_commit_apply_without_auto_stage_only_staged_committed() { std::fs::write(&code_file, "pub fn x() {}\n").unwrap(); // Apply without include-unstaged and without auto-stage - let assert = Command::cargo_bin("committy") - .unwrap() + let assert = common::committy_cmd() .current_dir(&temp_dir) .env("RUST_LOG", "off") .arg("--non-interactive") @@ -93,6 +114,33 @@ fn test_group_commit_apply_without_auto_stage_only_staged_committed() { assert!(!ls.status.success(), "unstaged file should not be tracked"); } +#[test] +fn test_group_commit_apply_with_push_requires_confirmation() { + let temp_dir = setup_repo(); + + let docs_file = temp_dir.path().join("docs/PUSH.md"); + std::fs::create_dir_all(docs_file.parent().unwrap()).unwrap(); + std::fs::write(&docs_file, "Push test\n").unwrap(); + let _ = StdCommand::new("git") + .args(["add", "docs/PUSH.md"]) + .current_dir(&temp_dir) + .output() + .expect("Failed to stage PUSH.md"); + + common::committy_cmd() + .current_dir(&temp_dir) + .env("RUST_LOG", "off") + .arg("--non-interactive") + .arg("group-commit") + .arg("--mode") + .arg("apply") + .arg("--push") + .arg("--output") + .arg("json") + .assert() + .failure(); +} + #[test] fn test_group_commit_apply_with_push_sets_pushed_false_without_remote() { let temp_dir = setup_repo(); @@ -107,8 +155,7 @@ fn test_group_commit_apply_with_push_sets_pushed_false_without_remote() { .output() .expect("Failed to stage PUSH.md"); - let assert = Command::cargo_bin("committy") - .unwrap() + let assert = common::committy_cmd() .current_dir(&temp_dir) .env("RUST_LOG", "off") .arg("--non-interactive") @@ -116,6 +163,7 @@ fn test_group_commit_apply_with_push_sets_pushed_false_without_remote() { .arg("--mode") .arg("apply") .arg("--push") + .arg("--confirm-push") .arg("--output") .arg("json") .assert() @@ -124,8 +172,10 @@ fn test_group_commit_apply_with_push_sets_pushed_false_without_remote() { let output = String::from_utf8(assert.get_output().stdout.clone()).unwrap(); let v: Value = serde_json::from_str(output.trim()).unwrap(); assert_eq!(v["mode"], Value::String("apply".into())); + assert_eq!(v["dry_run"], Value::Bool(false)); // No remote -> push should fail and be false assert_eq!(v["pushed"], Value::Bool(false)); + assert_eq!(v["ok"], Value::Bool(false)); } #[test] @@ -148,8 +198,7 @@ fn test_group_commit_plan_json_offline() { .output() .expect("Failed to stage files"); - let assert = Command::cargo_bin("committy") - .unwrap() + let assert = common::committy_cmd() .current_dir(&temp_dir) .env("RUST_LOG", "off") .arg("--non-interactive") @@ -166,6 +215,7 @@ fn test_group_commit_plan_json_offline() { assert_eq!(v["command"], Value::String("group-commit".into())); assert_eq!(v["mode"], Value::String("plan".into())); + assert_eq!(v["dry_run"], Value::Bool(true)); assert_eq!(v["ok"], Value::Bool(true)); let groups = v["groups"].as_array().expect("groups array"); @@ -203,6 +253,41 @@ fn test_group_commit_plan_json_offline() { ); } +#[test] +fn test_group_commit_plan_supports_repo_path_without_chdir() { + let temp_dir = setup_repo(); + let runner_dir = tempdir().expect("Failed to create runner directory"); + + let docs_file = temp_dir.path().join("docs/README.md"); + std::fs::create_dir_all(docs_file.parent().unwrap()).unwrap(); + std::fs::write(&docs_file, "# Docs\n").unwrap(); + + let _ = StdCommand::new("git") + .args(["add", "--all"]) + .current_dir(&temp_dir) + .output() + .expect("Failed to stage files"); + + let assert = common::committy_cmd() + .current_dir(&runner_dir) + .env("RUST_LOG", "off") + .arg("--non-interactive") + .arg("group-commit") + .arg("--repo-path") + .arg(temp_dir.path()) + .arg("--mode") + .arg("plan") + .arg("--output") + .arg("json") + .assert() + .success(); + + let output = String::from_utf8(assert.get_output().stdout.clone()).unwrap(); + let v: Value = serde_json::from_str(output.trim()).unwrap(); + assert_eq!(v["command"], Value::String("group-commit".into())); + assert_eq!(v["ok"], Value::Bool(true)); +} + #[test] fn test_group_commit_apply_auto_stage_creates_commits() { let temp_dir = setup_repo(); @@ -217,8 +302,7 @@ fn test_group_commit_apply_auto_stage_creates_commits() { std::fs::write(&code_file, "pub fn hello() {}\n").unwrap(); // Run apply with auto-stage - let assert = Command::cargo_bin("committy") - .unwrap() + let assert = common::committy_cmd() .current_dir(&temp_dir) .env("RUST_LOG", "off") .arg("--non-interactive") @@ -236,6 +320,7 @@ fn test_group_commit_apply_auto_stage_creates_commits() { let v: Value = serde_json::from_str(output.trim()).unwrap(); assert_eq!(v["mode"], Value::String("apply".into())); + assert_eq!(v["dry_run"], Value::Bool(false)); assert_eq!(v["ok"], Value::Bool(true)); let commits = v["commits"].as_array().expect("commits array"); @@ -260,3 +345,96 @@ fn test_group_commit_apply_auto_stage_creates_commits() { assert!(log_s.contains("update docs") || log_s.contains("docs:")); assert!(log_s.contains("misc maintenance") || log_s.contains("chore:")); } + +#[test] +fn test_group_commit_plan_reports_repo_rule_violations() { + let temp_dir = setup_repo(); + write_commit_rules_config(temp_dir.path(), "require_body = true"); + + let docs_file = temp_dir.path().join("docs/RULES.md"); + std::fs::create_dir_all(docs_file.parent().unwrap()).unwrap(); + std::fs::write(&docs_file, "rules\n").unwrap(); + + let _ = StdCommand::new("git") + .args(["add", "docs/RULES.md"]) + .current_dir(&temp_dir) + .output() + .expect("Failed to stage RULES.md"); + + let assert = common::committy_cmd() + .current_dir(&temp_dir) + .env("RUST_LOG", "off") + .arg("--non-interactive") + .arg("group-commit") + .arg("--mode") + .arg("plan") + .arg("--output") + .arg("json") + .assert() + .success(); + + let output = String::from_utf8(assert.get_output().stdout.clone()).unwrap(); + let v: Value = serde_json::from_str(output.trim()).unwrap(); + assert_eq!(v["ok"], Value::Bool(false)); + assert!(v["errors"].as_array().unwrap().iter().any(|entry| { + entry + .as_str() + .unwrap_or("") + .contains("Commit body is required by repository configuration") + })); + assert!(v["groups"][0]["issues"].as_array().is_some()); +} + +#[test] +fn test_group_commit_apply_refuses_invalid_repo_rule_messages() { + let temp_dir = setup_repo(); + write_commit_rules_config(temp_dir.path(), "require_body = true"); + + let docs_file = temp_dir.path().join("docs/APPLY.md"); + std::fs::create_dir_all(docs_file.parent().unwrap()).unwrap(); + std::fs::write(&docs_file, "apply rules\n").unwrap(); + + let _ = StdCommand::new("git") + .args(["add", "docs/APPLY.md"]) + .current_dir(&temp_dir) + .output() + .expect("Failed to stage APPLY.md"); + + let before = StdCommand::new("git") + .args(["rev-list", "--count", "HEAD"]) + .current_dir(&temp_dir) + .output() + .expect("Failed to count commits"); + + let assert = common::committy_cmd() + .current_dir(&temp_dir) + .env("RUST_LOG", "off") + .arg("--non-interactive") + .arg("group-commit") + .arg("--mode") + .arg("apply") + .arg("--output") + .arg("json") + .assert() + .success(); + + let output = String::from_utf8(assert.get_output().stdout.clone()).unwrap(); + let v: Value = serde_json::from_str(output.trim()).unwrap(); + assert_eq!(v["ok"], Value::Bool(false)); + assert_eq!(v["commits"][0]["ok"], Value::Bool(false)); + assert!(v["commits"][0]["error"] + .as_str() + .unwrap_or("") + .contains("Commit body is required by repository configuration")); + + let after = StdCommand::new("git") + .args(["rev-list", "--count", "HEAD"]) + .current_dir(&temp_dir) + .output() + .expect("Failed to count commits"); + + assert_eq!( + before.stdout, after.stdout, + "invalid group-commit messages must not create commits" + ); +} diff --git a/tests/integration_tests.rs b/tests/integration_tests.rs index f2f7f40..d450019 100644 --- a/tests/integration_tests.rs +++ b/tests/integration_tests.rs @@ -1,6 +1,5 @@ mod common; -use assert_cmd::Command; use predicates::prelude::*; use std::process::Command as StdCommand; use tempfile::tempdir; @@ -56,8 +55,7 @@ fn test_verbosity_quiet_suppresses_info_logs() { .expect("Failed to create commit"); // With -q, only errors should be logged; dry run should produce none - Command::cargo_bin("committy") - .unwrap() + common::committy_cmd() .current_dir(&temp_dir) .env("RUST_LOG", "off") .arg("--non-interactive") @@ -85,8 +83,7 @@ fn test_verbosity_v_shows_debug_logs() { .expect("Failed to create commit"); // With -v and --no-fetch, expect debug about skipping fetch due to flag - Command::cargo_bin("committy") - .unwrap() + common::committy_cmd() .current_dir(&temp_dir) .env("RUST_LOG", "off") .arg("--non-interactive") @@ -114,8 +111,7 @@ fn test_fetch_flag_no_fetch_skips_fetch_path() { .output() .expect("Failed to create commit"); - Command::cargo_bin("committy") - .unwrap() + common::committy_cmd() .current_dir(&temp_dir) .env("RUST_LOG", "off") .arg("--non-interactive") @@ -145,8 +141,7 @@ fn test_fetch_flag_fetch_attempts_fetch_path() { // With --fetch, we should log that we're fetching tags; since repo has no origin, // subsequent message may indicate skipping due to not found, but the "Fetching tags" info should appear - Command::cargo_bin("committy") - .unwrap() + common::committy_cmd() .current_dir(&temp_dir) .env("RUST_LOG", "off") .arg("--non-interactive") @@ -180,7 +175,7 @@ fn cleanup(temp_dir: tempfile::TempDir) { fn test_commit_command_with_valid_input() { let temp_dir = setup(); - let mut cmd = Command::cargo_bin("committy").unwrap(); + let mut cmd = common::committy_cmd(); cmd.current_dir(&temp_dir) .env("RUST_LOG", "info") .arg("--non-interactive") @@ -218,8 +213,7 @@ fn test_lint_json_exit_code_and_payload() { .output() .expect("Failed to create commit"); - let assert = Command::cargo_bin("committy") - .unwrap() + let assert = common::committy_cmd() .current_dir(&temp_dir) .env("RUST_LOG", "off") .arg("--non-interactive") @@ -233,7 +227,9 @@ fn test_lint_json_exit_code_and_payload() { let output = String::from_utf8(assert.get_output().stdout.clone()).unwrap(); let v: serde_json::Value = serde_json::from_str(output.trim()).unwrap(); + assert_eq!(v["command"], serde_json::json!("lint")); assert_eq!(v["ok"], serde_json::json!(false)); + assert_eq!(v["dry_run"], serde_json::json!(false)); assert_eq!(v["count"], serde_json::json!(1)); cleanup(temp_dir); @@ -250,8 +246,7 @@ fn test_tag_json_dry_run_output_non_interactive() { .output() .expect("Failed to create commit"); - let assert = Command::cargo_bin("committy") - .unwrap() + let assert = common::committy_cmd() .current_dir(&temp_dir) .env("RUST_LOG", "off") .arg("--non-interactive") @@ -266,8 +261,11 @@ fn test_tag_json_dry_run_output_non_interactive() { let output = String::from_utf8(assert.get_output().stdout.clone()).unwrap(); let v: serde_json::Value = serde_json::from_str(output.trim()).unwrap(); + assert_eq!(v["command"], serde_json::json!("tag")); assert_eq!(v["ok"], serde_json::json!(true)); + assert_eq!(v["dry_run"], serde_json::json!(true)); assert_eq!(v["new_tag"], serde_json::json!("v0.1.0")); + assert_eq!(v["published"], serde_json::json!(false)); cleanup(temp_dir); } @@ -294,8 +292,7 @@ major_regex = '(?im)^(breaking change:|feat(?:\s*\([^)]*\))?!:)' "#; std::fs::write(&cfg_path, config_toml).unwrap(); - let assert = Command::cargo_bin("committy") - .unwrap() + let assert = common::committy_cmd() .current_dir(&temp_dir) .env("COMMITTY_CONFIG_DIR", &cfg_dir) .env("RUST_LOG", "off") @@ -329,8 +326,7 @@ fn test_tag_fix_default_is_patch() { .expect("Failed to create commit"); // Default config should treat fix as patch -> v0.0.1 - let assert = Command::cargo_bin("committy") - .unwrap() + let assert = common::committy_cmd() .current_dir(&temp_dir) .env("RUST_LOG", "off") .arg("--non-interactive") @@ -354,7 +350,7 @@ fn test_tag_fix_default_is_patch() { fn test_commit_command_with_auto_correction() { let temp_dir = setup(); - let mut cmd = Command::cargo_bin("committy").unwrap(); + let mut cmd = common::committy_cmd(); cmd.current_dir(&temp_dir) .env("RUST_LOG", "info") .arg("--non-interactive") @@ -385,7 +381,7 @@ fn test_commit_command_with_auto_correction() { fn test_commit_command_with_invalid_input() { let temp_dir = setup(); - let mut cmd = Command::cargo_bin("committy").unwrap(); + let mut cmd = common::committy_cmd(); cmd.current_dir(&temp_dir) .env("RUST_LOG", "info") .arg("--non-interactive") @@ -405,7 +401,7 @@ fn test_commit_command_with_invalid_input() { fn test_commit_with_breaking_change() { let temp_dir = setup(); - let mut cmd = Command::cargo_bin("committy").unwrap(); + let mut cmd = common::committy_cmd(); cmd.current_dir(&temp_dir) .env("RUST_LOG", "info") .arg("--non-interactive") diff --git a/tests/lint_message_cli_tests.rs b/tests/lint_message_cli_tests.rs index 1222c9d..7eb5a77 100644 --- a/tests/lint_message_cli_tests.rs +++ b/tests/lint_message_cli_tests.rs @@ -1,10 +1,34 @@ -use assert_cmd::prelude::*; +mod common; + use predicates::prelude::*; -use std::process::Command; +use std::fs; +use tempfile::tempdir; + +fn write_commit_rules_config(dir: &std::path::Path, body: &str) { + fs::create_dir_all(dir.join(".committy")).expect("Failed to create .committy"); + fs::write( + dir.join(".committy/config.toml"), + format!( + r#"packages = [] + +[repository] +name = "lint-repo" +type = "single-package" + +[versioning] +strategy = "independent" + +[commit_rules] +{body} +"# + ), + ) + .expect("Failed to write config"); +} #[test] fn lint_message_valid_text() { - let mut cmd = Command::cargo_bin("committy").unwrap(); + let mut cmd = common::committy_cmd(); cmd.args([ "--non-interactive", "lint-message", @@ -18,7 +42,7 @@ fn lint_message_valid_text() { #[test] fn lint_message_invalid_text() { - let mut cmd = Command::cargo_bin("committy").unwrap(); + let mut cmd = common::committy_cmd(); cmd.args([ "--non-interactive", "lint-message", @@ -35,7 +59,7 @@ fn lint_message_invalid_text() { #[test] fn lint_message_valid_json() { - let mut cmd = Command::cargo_bin("committy").unwrap(); + let mut cmd = common::committy_cmd(); cmd.args([ "--non-interactive", "lint-message", @@ -47,13 +71,15 @@ fn lint_message_valid_json() { let output = cmd.assert().success().get_output().stdout.clone(); let s = String::from_utf8(output).unwrap(); let v: serde_json::Value = serde_json::from_str(&s).unwrap(); + assert_eq!(v["command"], "lint-message"); assert_eq!(v["ok"], true); + assert_eq!(v["dry_run"], false); assert_eq!(v["count"], 0); } #[test] fn lint_message_invalid_json() { - let mut cmd = Command::cargo_bin("committy").unwrap(); + let mut cmd = common::committy_cmd(); cmd.args([ "--non-interactive", "lint-message", @@ -70,6 +96,41 @@ fn lint_message_invalid_json() { let s = String::from_utf8(output).unwrap(); let v: serde_json::Value = serde_json::from_str(&s).unwrap(); + assert_eq!(v["command"], "lint-message"); assert_eq!(v["ok"], false); assert!(v["count"].as_u64().unwrap() >= 1); } + +#[test] +fn lint_message_uses_repo_commit_rules() { + common::setup_test_env(); + + let dir = tempdir().expect("Failed to create temp dir"); + write_commit_rules_config( + dir.path(), + r#"allowed_types = ["feat"] + +[[commit_rules.custom_types]] +name = "wip" +description = "Work in progress" +bump = "none""#, + ); + + let mut cmd = common::committy_cmd(); + cmd.current_dir(&dir).args([ + "--non-interactive", + "lint-message", + "--repo-path", + ".", + "--message", + "wip: checkpoint", + "--output", + "json", + ]); + + let output = cmd.assert().success().get_output().stdout.clone(); + let s = String::from_utf8(output).unwrap(); + let v: serde_json::Value = serde_json::from_str(&s).unwrap(); + assert_eq!(v["ok"], true); + assert_eq!(v["count"], 0); +} diff --git a/tests/machine_readable_cli_tests.rs b/tests/machine_readable_cli_tests.rs new file mode 100644 index 0000000..fede4cb --- /dev/null +++ b/tests/machine_readable_cli_tests.rs @@ -0,0 +1,272 @@ +mod common; + +use serde_json::Value; +use std::fs; +use tempfile::tempdir; + +fn write_root_cargo_package(dir: &std::path::Path, name: &str, version: &str) { + fs::write( + dir.join("Cargo.toml"), + format!( + r#"[package] +name = "{name}" +version = "{version}" +edition = "2021" +"# + ), + ) + .expect("Failed to write Cargo.toml"); +} + +fn write_nested_cargo_package(dir: &std::path::Path, name: &str, version: &str) { + fs::create_dir_all(dir).expect("Failed to create package directory"); + write_root_cargo_package(dir, name, version); +} + +fn write_single_package_config(dir: &std::path::Path, repo_name: &str, package_name: &str) { + fs::create_dir_all(dir.join(".committy")).expect("Failed to create .committy"); + fs::write( + dir.join(".committy/config.toml"), + format!( + r#"[repository] +name = "{repo_name}" +type = "multi-package" + +[versioning] +strategy = "independent" + +[[packages]] +name = "{package_name}" +type = "rust-cargo" +path = "." +version_file = "Cargo.toml" +version_field = "version" +"# + ), + ) + .expect("Failed to write .committy/config.toml"); +} + +fn write_sync_config(dir: &std::path::Path) { + fs::create_dir_all(dir.join(".committy")).expect("Failed to create .committy"); + fs::write( + dir.join(".committy/config.toml"), + r#"[repository] +name = "workspace" +type = "multi-package" + +[versioning] +strategy = "independent" + +[[packages]] +name = "pkg-a" +type = "rust-cargo" +path = "pkg-a" +version_file = "Cargo.toml" +version_field = "version" + +[[packages]] +name = "pkg-b" +type = "rust-cargo" +path = "pkg-b" +version_file = "Cargo.toml" +version_field = "version" +sync_with = "pkg-a" +"#, + ) + .expect("Failed to write sync config"); +} + +#[test] +fn test_init_dry_run_json_is_clean_stdout() { + common::setup_test_env(); + + let dir = tempdir().expect("Failed to create temp dir"); + write_root_cargo_package(dir.path(), "init-test", "0.1.0"); + + let assert = common::committy_cmd() + .current_dir(&dir) + .arg("--non-interactive") + .arg("init") + .arg("--dry-run") + .arg("--output") + .arg("json") + .assert() + .success(); + + let stdout = String::from_utf8(assert.get_output().stdout.clone()).unwrap(); + let payload: Value = serde_json::from_str(stdout.trim()).unwrap(); + + assert_eq!(payload["command"], Value::String("init".into())); + assert_eq!(payload["ok"], Value::Bool(true)); + assert_eq!(payload["dry_run"], Value::Bool(true)); + assert_eq!(payload["created"], Value::Bool(false)); + assert_eq!( + payload["config"]["repository"], + Value::String("my-repo".into()) + ); + assert!( + !dir.path().join(".committy/config.toml").exists(), + "dry-run must not create the config file" + ); +} + +#[test] +fn test_config_validate_json_is_clean_stdout() { + common::setup_test_env(); + + let dir = tempdir().expect("Failed to create temp dir"); + write_root_cargo_package(dir.path(), "config-test", "0.1.0"); + write_single_package_config(dir.path(), "config-repo", "config-test"); + + let assert = common::committy_cmd() + .current_dir(&dir) + .arg("--non-interactive") + .arg("config") + .arg("validate") + .arg("--repo-path") + .arg(".") + .arg("--output") + .arg("json") + .assert() + .success(); + + let stdout = String::from_utf8(assert.get_output().stdout.clone()).unwrap(); + let payload: Value = serde_json::from_str(stdout.trim()).unwrap(); + + assert_eq!(payload["command"], Value::String("config".into())); + assert_eq!(payload["mode"], Value::String("validate".into())); + assert_eq!(payload["ok"], Value::Bool(true)); + assert_eq!(payload["config_found"], Value::Bool(true)); + assert_eq!( + payload["repository"]["name"], + Value::String("config-repo".into()) + ); +} + +#[test] +fn test_config_show_json_is_clean_stdout() { + common::setup_test_env(); + + let dir = tempdir().expect("Failed to create temp dir"); + write_root_cargo_package(dir.path(), "config-show", "0.1.0"); + write_single_package_config(dir.path(), "show-repo", "config-show"); + + let assert = common::committy_cmd() + .current_dir(&dir) + .arg("--non-interactive") + .arg("config") + .arg("show") + .arg("--repo-path") + .arg(".") + .arg("--output") + .arg("json") + .assert() + .success(); + + let stdout = String::from_utf8(assert.get_output().stdout.clone()).unwrap(); + let payload: Value = serde_json::from_str(stdout.trim()).unwrap(); + + assert_eq!(payload["command"], Value::String("config".into())); + assert_eq!(payload["mode"], Value::String("show".into())); + assert_eq!(payload["ok"], Value::Bool(true)); + assert_eq!( + payload["repository_config"]["repository"]["name"], + Value::String("show-repo".into()) + ); + assert!(payload["user_config"].is_object()); +} + +#[test] +fn test_packages_list_json_is_clean_stdout() { + common::setup_test_env(); + + let dir = tempdir().expect("Failed to create temp dir"); + write_root_cargo_package(dir.path(), "packages-list", "0.1.0"); + + let assert = common::committy_cmd() + .current_dir(&dir) + .arg("--non-interactive") + .arg("packages") + .arg("list") + .arg("--output") + .arg("json") + .assert() + .success(); + + let stdout = String::from_utf8(assert.get_output().stdout.clone()).unwrap(); + let payload: Value = serde_json::from_str(stdout.trim()).unwrap(); + + assert_eq!(payload["command"], Value::String("packages".into())); + assert_eq!(payload["mode"], Value::String("list".into())); + assert_eq!(payload["ok"], Value::Bool(true)); + assert_eq!(payload["total_packages"], Value::Number(1.into())); + assert_eq!( + payload["packages"][0]["name"], + Value::String("packages-list".into()) + ); +} + +#[test] +fn test_packages_status_check_json_failure_is_clean_stdout() { + common::setup_test_env(); + + let dir = tempdir().expect("Failed to create temp dir"); + write_nested_cargo_package(&dir.path().join("pkg-a"), "pkg-a", "1.0.0"); + write_nested_cargo_package(&dir.path().join("pkg-b"), "pkg-b", "0.9.0"); + write_sync_config(dir.path()); + + let assert = common::committy_cmd() + .current_dir(&dir) + .arg("--non-interactive") + .arg("packages") + .arg("status") + .arg("--check") + .arg("--output") + .arg("json") + .assert() + .failure(); + + let stdout = String::from_utf8(assert.get_output().stdout.clone()).unwrap(); + let payload: Value = serde_json::from_str(stdout.trim()).unwrap(); + + assert_eq!(payload["command"], Value::String("packages".into())); + assert_eq!(payload["mode"], Value::String("status".into())); + assert_eq!(payload["ok"], Value::Bool(false)); + assert_eq!(payload["check"], Value::Bool(true)); + assert_eq!(payload["issues"].as_array().unwrap().len(), 1); +} + +#[test] +fn test_packages_sync_dry_run_json_is_clean_stdout() { + common::setup_test_env(); + + let dir = tempdir().expect("Failed to create temp dir"); + write_nested_cargo_package(&dir.path().join("pkg-a"), "pkg-a", "1.0.0"); + write_nested_cargo_package(&dir.path().join("pkg-b"), "pkg-b", "0.9.0"); + write_sync_config(dir.path()); + + let assert = common::committy_cmd() + .current_dir(&dir) + .arg("--non-interactive") + .arg("packages") + .arg("sync") + .arg("--dry-run") + .arg("--output") + .arg("json") + .assert() + .success(); + + let stdout = String::from_utf8(assert.get_output().stdout.clone()).unwrap(); + let payload: Value = serde_json::from_str(stdout.trim()).unwrap(); + + assert_eq!(payload["command"], Value::String("packages".into())); + assert_eq!(payload["mode"], Value::String("sync".into())); + assert_eq!(payload["ok"], Value::Bool(true)); + assert_eq!(payload["dry_run"], Value::Bool(true)); + assert_eq!(payload["operations"].as_array().unwrap().len(), 1); + assert_eq!( + payload["operations"][0]["name"], + Value::String("pkg-b".into()) + ); +} diff --git a/tests/main_tests.rs b/tests/main_tests.rs index 83b73c0..56efd1b 100644 --- a/tests/main_tests.rs +++ b/tests/main_tests.rs @@ -1,15 +1,8 @@ use chrono::{DateTime, Duration}; +use committy::clock::{current_time, should_check_update}; use std::env; use tempfile::TempDir; -// Mock the main functionality for testing -fn should_check_update( - last_check: DateTime, - current_time: DateTime, -) -> bool { - current_time - last_check >= Duration::days(1) -} - #[test] fn test_update_check_timing() { let current_time = DateTime::parse_from_rfc3339("2025-01-08T17:39:49+01:00").unwrap(); @@ -40,3 +33,14 @@ fn test_config_integration() { assert!(should_check_update(old_time, current_time)); } + +#[test] +fn test_clock_override_via_env() { + let expected = "2026-03-15T09:30:00+01:00"; + env::set_var("COMMITTY_FIXED_NOW", expected); + + let now = current_time().unwrap(); + + env::remove_var("COMMITTY_FIXED_NOW"); + assert_eq!(now.to_rfc3339(), expected); +} diff --git a/tests/repository_tests.rs b/tests/repository_tests.rs index 99e503d..42c29bc 100644 --- a/tests/repository_tests.rs +++ b/tests/repository_tests.rs @@ -4,6 +4,7 @@ use git2::Repository; use serial_test::serial; use std::env; use std::fs; +use std::path::PathBuf; use tempfile::TempDir; fn setup_test_repo() -> (TempDir, Repository) { @@ -39,6 +40,17 @@ fn setup_test_repo() -> (TempDir, Repository) { (temp_dir, repo) } +fn stable_current_dir() -> PathBuf { + match env::current_dir() { + Ok(path) => path, + Err(_) => { + let fallback = env::temp_dir(); + env::set_current_dir(&fallback).unwrap(); + fallback + } + } +} + #[test] #[serial] fn test_repository_discovery_from_subdirectory() -> Result<(), CliError> { @@ -58,7 +70,7 @@ fn test_repository_discovery_from_subdirectory() -> Result<(), CliError> { index.write().unwrap(); // Change to the deep subdirectory - let original_dir = env::current_dir().unwrap(); + let original_dir = stable_current_dir(); env::set_current_dir(temp_dir.path()).unwrap(); env::set_current_dir("src/deep/path").unwrap(); @@ -80,7 +92,7 @@ fn test_repository_not_found() { let temp_dir = TempDir::new().unwrap(); // Change to the temporary directory - let original_dir = env::current_dir().unwrap(); + let original_dir = stable_current_dir(); env::set_current_dir(temp_dir.path()).unwrap(); // Verify we get an appropriate error when there's no git repository @@ -135,7 +147,7 @@ fn test_staged_deleted_file() -> Result<(), CliError> { index.write().unwrap(); // Change to the repository directory to ensure we're in the right context - let original_dir = env::current_dir().unwrap(); + let original_dir = stable_current_dir(); env::set_current_dir(temp_dir.path()).unwrap(); // Verify that has_staged_changes detects the deleted file @@ -177,7 +189,7 @@ fn test_no_staged_changes() -> Result<(), CliError> { .unwrap(); // Change to the repository directory to ensure we're in the right context - let original_dir = env::current_dir().unwrap(); + let original_dir = stable_current_dir(); env::set_current_dir(temp_dir.path()).unwrap(); // Verify no staged changes are detected @@ -203,7 +215,7 @@ fn test_unstaged_changes_only() -> Result<(), CliError> { fs::write(&test_file, "test content").unwrap(); // Change to the repository directory - let original_dir = env::current_dir().unwrap(); + let original_dir = stable_current_dir(); env::set_current_dir(temp_dir.path()).unwrap(); // Verify no staged changes are detected @@ -229,7 +241,7 @@ fn test_repository_discovery_without_staged_changes() -> Result<(), CliError> { fs::create_dir_all(&subdir_path).unwrap(); // Change to the deep subdirectory - let original_dir = env::current_dir().unwrap(); + let original_dir = stable_current_dir(); env::set_current_dir(temp_dir.path()).unwrap(); env::set_current_dir("src/deep/path").unwrap(); @@ -263,7 +275,7 @@ fn test_commit_from_subdirectory() -> Result<(), CliError> { index.write().unwrap(); // Change to the deep subdirectory - let original_dir = env::current_dir().unwrap(); + let original_dir = stable_current_dir(); env::set_current_dir(temp_dir.path()).unwrap(); // Create a new repository object from the current directory @@ -285,7 +297,7 @@ fn test_commit_from_subdirectory() -> Result<(), CliError> { // Verify the commit was created with the correct message let head_commit = repo.head()?.peel_to_commit()?; - let head_message = head_commit.message().unwrap_or(""); + let head_message = head_commit.message().unwrap_or("").trim_end_matches('\n'); assert_eq!( head_message, commit_message, "Expected commit message '{commit_message}' but got '{head_message}'" diff --git a/tests/tag_tests.rs b/tests/tag_tests.rs index 405b3c0..e1ec6b4 100644 --- a/tests/tag_tests.rs +++ b/tests/tag_tests.rs @@ -1,4 +1,5 @@ -use assert_cmd::Command; +mod common; + use git2::{Repository, Signature}; use predicates::prelude::*; use std::fs; @@ -37,7 +38,7 @@ fn setup_test_repo() -> tempfile::TempDir { fn test_tag_with_message() { let dir = setup_test_repo(); - let mut cmd = Command::cargo_bin("committy").unwrap(); + let mut cmd = common::committy_cmd(); cmd.current_dir(dir.path()) .arg("tag") .arg("--name") @@ -58,7 +59,7 @@ fn test_tag_with_message() { fn test_tag_without_message() { let dir = setup_test_repo(); - let mut cmd = Command::cargo_bin("committy").unwrap(); + let mut cmd = common::committy_cmd(); cmd.current_dir(dir.path()) .arg("tag") .arg("--name") @@ -119,7 +120,7 @@ fn test_pre_release_continues_from_highest_version() { } // Run the tag command in pre-release mode (should produce v10.0.0-beta.2) - let mut cmd = Command::cargo_bin("committy").unwrap(); + let mut cmd = common::committy_cmd(); cmd.current_dir(dir.path()) .arg("--non-interactive") .arg("tag") @@ -179,7 +180,7 @@ fn test_beta_to_main_promotion() { } // Run tag command on main branch (should promote beta to stable v0.7.2) - let mut cmd = Command::cargo_bin("committy").unwrap(); + let mut cmd = common::committy_cmd(); cmd.current_dir(dir.path()) .arg("--non-interactive") .arg("tag") @@ -205,7 +206,7 @@ fn test_tag_with_staged_changes() { index.add_path(std::path::Path::new("test.txt")).unwrap(); index.write().unwrap(); - let mut cmd = Command::cargo_bin("committy").unwrap(); + let mut cmd = common::committy_cmd(); cmd.current_dir(dir.path()) .arg("tag") .arg("--name") diff --git a/tests/tag_version_bump_tests.rs b/tests/tag_version_bump_tests.rs index 6c22b5b..e95d6f9 100644 --- a/tests/tag_version_bump_tests.rs +++ b/tests/tag_version_bump_tests.rs @@ -1,3 +1,5 @@ +mod common; + use git2::{Repository, Signature}; use std::fs; use tempfile::tempdir; @@ -121,19 +123,31 @@ fn test_beta_increments_counter_not_version() { .unwrap(); // Run tag command in prerelease mode - let mut cmd = assert_cmd::Command::cargo_bin("committy").unwrap(); - cmd.current_dir(dir.path()) + let output = common::committy_cmd() + .current_dir(dir.path()) .arg("--non-interactive") .arg("tag") - .arg("--prerelease") - .arg("--dry-run") .arg("--no-fetch") - .arg("--output") - .arg("json"); + .arg("--publish") + .arg("--confirm-publish") + .arg("--prerelease") + .arg("--prerelease-suffix") + .arg("beta") + .output() + .expect("failed to execute committy tag prerelease"); + + if !output.status.success() { + eprintln!("stdout: {}", String::from_utf8_lossy(&output.stdout)); + eprintln!("stderr: {}", String::from_utf8_lossy(&output.stderr)); + } - let output = cmd.output().unwrap(); - let stdout = String::from_utf8(output.stdout).unwrap(); - println!("Output: {}", stdout); + assert!( + output.status.success(), + "committy tag prerelease exited with status {:?}", + output.status.code() + ); + + let stdout = String::from_utf8(output.stdout).expect("stdout should be valid UTF-8"); // Should produce v1.2.0-beta.2, not v1.3.0-beta.0 assert!( @@ -195,7 +209,7 @@ fn test_beta_with_breaking_change_increments_counter() { ) .unwrap(); - let mut cmd = assert_cmd::Command::cargo_bin("committy").unwrap(); + let mut cmd = common::committy_cmd(); cmd.current_dir(dir.path()) .arg("--non-interactive") .arg("tag") @@ -256,7 +270,7 @@ fn test_first_beta_after_main_applies_bump() { ) .unwrap(); - let mut cmd = assert_cmd::Command::cargo_bin("committy").unwrap(); + let mut cmd = common::committy_cmd(); cmd.current_dir(dir.path()) .arg("--non-interactive") .arg("tag") @@ -321,7 +335,7 @@ fn test_multiple_patches_on_beta() { ) .unwrap(); - let mut cmd = assert_cmd::Command::cargo_bin("committy").unwrap(); + let mut cmd = common::committy_cmd(); cmd.current_dir(dir.path()) .arg("--non-interactive") .arg("tag") @@ -391,7 +405,7 @@ fn test_beta_catches_up_to_main() { ) .unwrap(); - let mut cmd = assert_cmd::Command::cargo_bin("committy").unwrap(); + let mut cmd = common::committy_cmd(); cmd.current_dir(dir.path()) .arg("--non-interactive") .arg("tag") @@ -456,7 +470,7 @@ fn test_chore_on_beta_increments_counter() { ) .unwrap(); - let mut cmd = assert_cmd::Command::cargo_bin("committy").unwrap(); + let mut cmd = common::committy_cmd(); cmd.current_dir(dir.path()) .arg("--non-interactive") .arg("tag") diff --git a/tests/test_linter.rs b/tests/test_linter.rs index 1318e04..d70f312 100644 --- a/tests/test_linter.rs +++ b/tests/test_linter.rs @@ -1,5 +1,6 @@ use committy::linter::CommitLinter; use git2::{Repository, Signature}; +use std::fs; use tempfile::TempDir; mod common; @@ -49,6 +50,33 @@ fn create_tag(repo: &Repository, commit_id: git2::Oid, tag_name: &str) { .unwrap(); } +fn create_lightweight_tag(repo: &Repository, commit_id: git2::Oid, tag_name: &str) { + let obj = repo.find_object(commit_id, None).unwrap(); + repo.tag_lightweight(tag_name, &obj, false).unwrap(); +} + +fn write_commit_rules_config(dir: &std::path::Path, body: &str) { + fs::create_dir_all(dir.join(".committy")).unwrap(); + fs::write( + dir.join(".committy/config.toml"), + format!( + r#"packages = [] + +[repository] +name = "lint-repo" +type = "single-package" + +[versioning] +strategy = "independent" + +[commit_rules] +{body} +"# + ), + ) + .unwrap(); +} + #[test] fn test_linter_with_tags() { common::setup_test_env(); @@ -80,6 +108,7 @@ fn test_linter_with_multiple_tags() { create_tag(&repo, commit1, "v0.1.0"); // Create more commits and another tag + create_commit(&repo, "invalid message before latest tag"); let commit2 = create_commit(&repo, "feat: another feature"); create_tag(&repo, commit2, "v0.2.0"); @@ -95,6 +124,29 @@ fn test_linter_with_multiple_tags() { assert!(issues[0].message.contains("invalid: wrong type")); } +#[test] +fn test_linter_with_lightweight_latest_tag() { + common::setup_test_env(); + let (temp_dir, repo) = setup_test_repo(); + + let commit1 = create_commit(&repo, "feat: initial commit"); + create_tag(&repo, commit1, "v0.1.0"); + + create_commit(&repo, "invalid message before lightweight tag"); + let commit2 = create_commit(&repo, "feat: release boundary"); + create_lightweight_tag(&repo, commit2, "v0.2.0"); + + create_commit(&repo, "fix: valid after tag"); + + let linter = CommitLinter::new(temp_dir.path().to_str().unwrap()).unwrap(); + let issues = linter.check_commits_since_last_tag().unwrap(); + + assert!( + issues.is_empty(), + "issues after latest lightweight tag should be ignored: {issues:?}" + ); +} + #[test] fn test_linter_with_no_tags() { common::setup_test_env(); @@ -143,3 +195,46 @@ fn test_linter_with_empty_repo() { assert!(issues.is_empty()); } + +#[test] +fn test_linter_respects_custom_commit_type_rules() { + common::setup_test_env(); + let (temp_dir, repo) = setup_test_repo(); + write_commit_rules_config( + temp_dir.path(), + r#"allowed_types = ["feat"] + +[[commit_rules.custom_types]] +name = "wip" +description = "Work in progress" +bump = "none""#, + ); + + create_commit(&repo, "wip: checkpoint"); + + let linter = CommitLinter::new(temp_dir.path().to_str().unwrap()).unwrap(); + let issues = linter.check_commits_since_last_tag().unwrap(); + + assert!( + issues.is_empty(), + "custom type should be accepted: {issues:?}" + ); +} + +#[test] +fn test_linter_respects_require_body_rule() { + common::setup_test_env(); + let (temp_dir, repo) = setup_test_repo(); + write_commit_rules_config(temp_dir.path(), "require_body = true"); + + create_commit(&repo, "feat: missing body"); + + let linter = CommitLinter::new(temp_dir.path().to_str().unwrap()).unwrap(); + let issues = linter.check_commits_since_last_tag().unwrap(); + + assert_eq!(issues.len(), 1); + assert_eq!( + issues[0].issue, + "Commit body is required by repository configuration" + ); +} diff --git a/tools/native-git-e2e/Dockerfile b/tools/native-git-e2e/Dockerfile new file mode 100644 index 0000000..fc52a60 --- /dev/null +++ b/tools/native-git-e2e/Dockerfile @@ -0,0 +1,21 @@ +FROM rust:stable-bookworm + +RUN apt-get update \ + && apt-get install -y --no-install-recommends \ + ca-certificates \ + git \ + gnupg \ + openssh-client \ + openssh-server \ + && rm -rf /var/lib/apt/lists/* + +WORKDIR /workspace + +COPY . /workspace + +RUN cargo build + +COPY tools/native-git-e2e/entrypoint.sh /usr/local/bin/committy-native-git-e2e +RUN chmod +x /usr/local/bin/committy-native-git-e2e + +ENTRYPOINT ["/usr/local/bin/committy-native-git-e2e"] diff --git a/tools/native-git-e2e/entrypoint.sh b/tools/native-git-e2e/entrypoint.sh new file mode 100755 index 0000000..6a8c086 --- /dev/null +++ b/tools/native-git-e2e/entrypoint.sh @@ -0,0 +1,209 @@ +#!/usr/bin/env bash +set -euo pipefail + +export HOME=/tmp/committy-e2e-home +export GNUPGHOME="$HOME/.gnupg" +export COMMITTY_NONINTERACTIVE=1 +export CI=1 + +committy_bin="/workspace/target/debug/committy" + +log() { + printf '[native-git-e2e] %s\n' "$1" +} + +run_git() { + local repo_path="$1" + shift + git -C "$repo_path" "$@" +} + +ensure_signing_config() { + mkdir -p "$GNUPGHOME" "$HOME/.ssh" + chmod 700 "$GNUPGHOME" "$HOME/.ssh" + + cat > /tmp/committy-e2e-gpg-batch <<'EOF' +%no-protection +Key-Type: RSA +Key-Length: 3072 +Subkey-Type: RSA +Subkey-Length: 3072 +Name-Real: Committy E2E +Name-Email: committy-e2e@example.com +Expire-Date: 0 +EOF + + gpg --batch --generate-key /tmp/committy-e2e-gpg-batch + + local fingerprint + fingerprint="$(gpg --batch --list-secret-keys --with-colons committy-e2e@example.com | awk -F: '/^fpr:/ { print $10; exit }')" + if [[ -z "$fingerprint" ]]; then + echo "failed to resolve generated signing key fingerprint" >&2 + exit 1 + fi + + printf '%s:6:\n' "$fingerprint" | gpg --import-ownertrust + + git config --global user.name "Committy E2E" + git config --global user.email "committy-e2e@example.com" + git config --global user.signingkey "$fingerprint" + git config --global commit.gpgsign true + git config --global tag.gpgsign true + git config --global gpg.program gpg + git config --global init.defaultBranch main +} + +create_remote_user() { + local user_name="$1" + local key_file="$2" + + useradd --create-home --shell /usr/bin/git-shell "$user_name" + mkdir -p "/home/$user_name/.ssh" "/home/$user_name/repos" + chmod 700 "/home/$user_name/.ssh" + cat "${key_file}.pub" > "/home/$user_name/.ssh/authorized_keys" + chmod 600 "/home/$user_name/.ssh/authorized_keys" + chown -R "$user_name:$user_name" "/home/$user_name/.ssh" "/home/$user_name/repos" +} + +ensure_ssh_server() { + ssh-keygen -A + + ssh-keygen -t ed25519 -N '' -f "$HOME/.ssh/alpha_key" >/dev/null + ssh-keygen -t ed25519 -N '' -f "$HOME/.ssh/beta_key" >/dev/null + + create_remote_user "gitalpha" "$HOME/.ssh/alpha_key" + create_remote_user "gitbeta" "$HOME/.ssh/beta_key" + + runuser -u gitalpha -- git init --bare /home/gitalpha/repos/alpha.git >/dev/null + runuser -u gitbeta -- git init --bare /home/gitbeta/repos/beta.git >/dev/null + + cat > "$HOME/.ssh/config" < /tmp/committy-sshd-config <<'EOF' +Port 2222 +ListenAddress 127.0.0.1 +HostKey /etc/ssh/ssh_host_ed25519_key +HostKey /etc/ssh/ssh_host_rsa_key +PasswordAuthentication no +PermitRootLogin no +PubkeyAuthentication yes +UsePAM no +PidFile /run/sshd_committy_e2e.pid +AuthorizedKeysFile .ssh/authorized_keys +Subsystem sftp internal-sftp +EOF + + mkdir -p /run/sshd + /usr/sbin/sshd -f /tmp/committy-sshd-config +} + +assert_signed_commit() { + local repo_path="$1" + run_git "$repo_path" verify-commit HEAD >/dev/null +} + +assert_signed_tag() { + local repo_path="$1" + local tag_name="$2" + run_git "$repo_path" tag -v "$tag_name" >/dev/null +} + +assert_remote_has_tag() { + local repo_path="$1" + local tag_name="$2" + run_git "$repo_path" ls-remote --tags origin | grep -q "refs/tags/${tag_name}$" +} + +exercise_repo() { + local repo_name="$1" + local remote_host="$2" + local worktree="/tmp/${repo_name}-worktree" + local remote_path + local tag_name="v0.1.0" + + case "$repo_name" in + alpha) remote_path="/home/gitalpha/repos/alpha.git" ;; + beta) remote_path="/home/gitbeta/repos/beta.git" ;; + *) + echo "unknown repo name: $repo_name" >&2 + exit 1 + ;; + esac + + rm -rf "$worktree" + mkdir -p "$worktree" + run_git "$worktree" init >/dev/null + run_git "$worktree" remote add origin "${remote_host}:${remote_path}" + + printf '%s\n' "hello from ${repo_name}" > "$worktree/README.md" + run_git "$worktree" add README.md + + "$committy_bin" --non-interactive commit \ + --repo-path "$worktree" \ + --type feat \ + --message "add ${repo_name} smoke coverage" + + assert_signed_commit "$worktree" + + "$committy_bin" --non-interactive tag \ + --repo-path "$worktree" \ + --name "$tag_name" \ + --publish \ + --confirm-publish + + assert_signed_tag "$worktree" "$tag_name" + assert_remote_has_tag "$worktree" "$tag_name" + + printf '%s\n' "follow-up for ${repo_name}" >> "$worktree/README.md" + run_git "$worktree" add README.md + + "$committy_bin" --non-interactive commit \ + --repo-path "$worktree" \ + --type fix \ + --message "prepare ${repo_name} fetch validation" + + "$committy_bin" --non-interactive tag \ + --repo-path "$worktree" \ + --fetch \ + --dry-run \ + --output json >/tmp/"${repo_name}"-fetch.json + + grep -q '"command":"tag"' /tmp/"${repo_name}"-fetch.json +} + +main() { + log "configuring signing environment" + ensure_signing_config + + log "starting local ssh server with two distinct identities" + ensure_ssh_server + + log "verifying signed commit/tag flow with first ssh identity" + exercise_repo "alpha" "git-alpha" + + log "verifying signed commit/tag flow with second ssh identity" + exercise_repo "beta" "git-beta" + + log "native git signing and multi-identity ssh verification passed" +} + +main "$@" From 0a1a61b1b0bb578083342562b51798691190b32b Mon Sep 17 00:00:00 2001 From: "Arnaud (Martient) Leherpeur" Date: Wed, 18 Mar 2026 18:44:23 +0100 Subject: [PATCH 2/2] chore: bump version to 1.7.0-beta.0 --- Cargo.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Cargo.toml b/Cargo.toml index 5c3cd6f..c2eb7dc 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "committy" -version = "1.5.10" +version = "1.7.0-beta.0" authors = ["Arnaud (Martient) Leherpeur "] edition = "2021" description = "🚀 Generate clear, concise, and structured commit messages effortlessly"