diff --git a/.editorconfig b/.editorconfig new file mode 100644 index 0000000..449cc8c --- /dev/null +++ b/.editorconfig @@ -0,0 +1,19 @@ +# Editor configuration +root = true + +[*] +charset = utf-8 +end_of_line = lf +insert_final_newline = true +indent_style = space +indent_size = 2 +trim_trailing_whitespace = true + +[*.go] +indent_style = tab +tab_width = 8 +[Makefile] +indent_style = tab + +[*.md] +trim_trailing_whitespace = false diff --git a/.githooks/pre-commit b/.githooks/pre-commit new file mode 100755 index 0000000..e80707c --- /dev/null +++ b/.githooks/pre-commit @@ -0,0 +1,31 @@ +#!/usr/bin/env bash +set -euo pipefail + +echo "Running gofmt check..." +make fmt-check + +echo "Running golangci-lint..." +if ! command -v golangci-lint >/dev/null 2>&1; then + echo "golangci-lint not found. Install from https://golangci-lint.run/ before committing." >&2 + exit 1 +fi +make lint + +echo "Running go tests..." +make test + +echo "Regenerating docs to ensure sync..." +BIN="${MARKDOWN_TRANSCLUSION_BIN:-markdown-transclusion}" +if ! command -v "$BIN" >/dev/null 2>&1; then + echo "Required '$BIN' not found. Install it or set MARKDOWN_TRANSCLUSION_BIN." >&2 + exit 1 +fi +MARKDOWN_TRANSCLUSION_BIN="$BIN" make docs +# Abort commit if generation changed files; force developers to stage them. +if ! git diff --quiet --exit-code; then + echo "Docs changed during pre-commit. Stage the updates and re-run commit." >&2 + git --no-pager diff --stat + exit 1 +fi + +echo "Pre-commit checks passed." diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml new file mode 100644 index 0000000..2170806 --- /dev/null +++ b/.github/workflows/docs.yml @@ -0,0 +1,38 @@ +name: Docs Pipeline + +'on': + push: + branches: [main] + pull_request: +permissions: + contents: read + +jobs: + docs: + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Set up Go + uses: actions/setup-go@v5 + with: + go-version-file: go.mod + - name: Set up Node + uses: actions/setup-node@v4 + with: + node-version: '20' + - name: Install markdown-transclusion CLI + run: npm install -g markdown-transclusion@^1 + # Alternatively: + # - name: Generate docs + # run: npx --yes markdown-transclusion --version && make docs + # env: + # MARKDOWN_TRANSCLUSION_BIN: npx --yes markdown-transclusion + - name: Generate docs + env: + MARKDOWN_TRANSCLUSION_BIN: markdown-transclusion + run: make docs + - name: Ensure docs are up to date + run: git diff --stat --exit-code + - name: Run docs tests + run: make docs-test diff --git a/.github/workflows/quality.yml b/.github/workflows/quality.yml new file mode 100644 index 0000000..601e611 --- /dev/null +++ b/.github/workflows/quality.yml @@ -0,0 +1,51 @@ +name: Quality Checks + +on: + push: + branches: [main, docs-release-pipeline] + pull_request: + +permissions: + contents: read + +jobs: + quality: + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Set up Go + uses: actions/setup-go@v5 + with: + go-version: '1.25' + + - name: Install markdown-transclusion + run: npm install -g markdown-transclusion + + - name: gofmt check + run: make fmt-check + + - name: Run golangci-lint + uses: golangci/golangci-lint-action@v3 + with: + version: v1.59.1 + install-mode: binary + args: ./... + env: + GOTOOLCHAIN: go1.22.7 + + - name: Go tests + run: make test + + - name: Generate docs + run: make docs + + - name: Verify docs + run: make docs-verify + + - name: Ensure docs committed + run: | + git status --porcelain + git diff --stat + git diff --exit-code diff --git a/.golangci.yml b/.golangci.yml new file mode 100644 index 0000000..1ddd1ed --- /dev/null +++ b/.golangci.yml @@ -0,0 +1,23 @@ +run: + timeout: 5m + tests: true +linters: + enable: + - govet + - staticcheck + - gofmt + - goimports + - revive + - errcheck + - ineffassign + - gosec +issues: + exclude-use-default: false + +linters-settings: + gofmt: + simplify: true + goimports: + local-prefixes: github.com/flyingrobots/hubless + revive: + ignore-generated-header: true diff --git a/@hubless/README.md b/@hubless/README.md new file mode 100644 index 0000000..744d87b --- /dev/null +++ b/@hubless/README.md @@ -0,0 +1,15 @@ +# Hubless Planning Data + +This directory holds the human-authored JSON records plus generated markdown artifacts derived from them. + +- `schema/` – JSON schemas (hand maintained). +- `roadmap/` – milestone & feature data. + - `*.json` – source of truth (hand maintained). + - `templates/` – Markdown templates that reference shared components (edit these when layout changes). + - `generated/` – Output Markdown rendered from templates (do not edit, regenerated via `make docs`). +- `issues/` – stories & tasks following the task lifecycle. + - `tasks/*.json` and `stories/*.json` – source of truth (hand maintained). + - `templates/` – Markdown templates to document task/story tables (edit as needed). + - `generated/` – Output Markdown rendered from templates (`tasks.md`, `archive.md`, etc.). Do not edit by hand. + +Run `make docs` from the repository root to regenerate anything in `generated/` after changing source JSON or templates. diff --git a/@hubless/issues/generated/archive.md b/@hubless/issues/generated/archive.md new file mode 100644 index 0000000..9d9fd4e --- /dev/null +++ b/@hubless/issues/generated/archive.md @@ -0,0 +1,17 @@ +# Hubless Archive + +> Generated overview of completed work. Regenerate with `make docs` after updating story or task JSON. + +## Completed Stories + + +| ID | Title | Completed Status | +| --- | --- | --- | +| — | — | — | + +## Completed Tasks + + +| ID | Title | Completed On | Badges | +| --- | --- | --- | --- | +| hubless/m0/task/0005 | Evaluate markdown component library | 2025-09-19 | Tested, Documented, Shipped | \ No newline at end of file diff --git a/@hubless/issues/tasks.md b/@hubless/issues/generated/tasks.md similarity index 62% rename from @hubless/issues/tasks.md rename to @hubless/issues/generated/tasks.md index a52d60c..09143b0 100644 --- a/@hubless/issues/tasks.md +++ b/@hubless/issues/generated/tasks.md @@ -1,15 +1,26 @@ # Hubless Tasks -> Source of truth: individual JSON files in `@hubless/issues/tasks/`. Task IDs follow `{project}/{milestone}/{type}/{number}`.\ -> Regenerate this Markdown whenever tasks change (manual for now). +> Source of truth: individual JSON files in `@hubless/issues/tasks/`. Task IDs follow `{project}/{milestone}/{type}/{number}`.\\ +> Regenerate this Markdown with `make docs` after updating task JSON files. -| ID | Title | Status | Owner | Labels | Badges | Updated | -| ------------------------------------------------------- | --------------------------------------- | ------- | ------------ | ------------------------ | ------ | ---------- | -| [hubless/m0/task/0001](tasks/hubless-m0-task-0001.json) | Port progress updater from Python to Go | PLANNED | _unassigned_ | m0-foundations, prog | — | — | -| [hubless/m0/task/0004](tasks/hubless-m0-task-0004.json) | Structure @hubless planning artifacts | STARTED | _unassigned_ | m0-foundations, planning | — | 2025-09-18 | -| [hubless/m1/task/0002](tasks/hubless-m1-task-0002.json) | Introduce Fang-based CLI skeleton | PLANNED | _unassigned_ | m1-cli, cli | — | — | -| [hubless/m1/task/0003](tasks/hubless-m1-task-0003.json) | Implement Git event store adapter | PLANNED | _unassigned_ | m1-cli, event-store | — | — | -| [hubless/m1/task/0005](tasks/hubless-m1-task-0005.json) | Prototype Bubbletea TUI and Fang CLI wireframes | STARTED | _unassigned_ | m1-cli, tui, cli | — | 2025-09-19 | + +| ID | Title | Status | Owner | Labels | Badges | Updated | +| --- | --- | --- | --- | --- | --- | --- | +| [hubless/m0/task/0001](tasks/hubless-m0-task-0001.json) | Port progress updater from Python to Go | PLANNED | _unassigned_ | m0-foundations, prog | — | — | +| [hubless/m0/task/0004](tasks/hubless-m0-task-0004.json) | Structure @hubless planning artifacts | STARTED | _unassigned_ | m0-foundations, planning | — | 2025-09-18 | +| [hubless/m0/task/0005](tasks/hubless-m0-task-0005.json) | Evaluate markdown component library | DONE | _unassigned_ | m0-foundations, docs, automation | Tested, Documented, Shipped | 2025-09-19 | +| [hubless/m1/task/0002](tasks/hubless-m1-task-0002.json) | Introduce Fang-based CLI skeleton | PLANNED | _unassigned_ | m1-cli, cli | — | — | +| [hubless/m1/task/0003](tasks/hubless-m1-task-0003.json) | Implement Git event store adapter | PLANNED | _unassigned_ | m1-cli, event-store | — | — | +| [hubless/m1/task/0005](tasks/hubless-m1-task-0005.json) | Prototype Bubbletea TUI and Fang CLI wireframes with mocked data | STARTED | _unassigned_ | m1-cli, tui, cli | — | — | + +## Status Breakdown + + +| Status | Count | +| --- | --- | +| DONE | 1 | +| STARTED | 2 | +| PLANNED | 3 | ## Task Dependency Graph @@ -50,4 +61,4 @@ {"at": "YYYY-MM-DDThh:mm:ssZ", "author": "name", "body": "Short update."} ] } -``` +``` \ No newline at end of file diff --git a/@hubless/issues/stories/hubless-story-0007.json b/@hubless/issues/stories/hubless-story-0007.json new file mode 100644 index 0000000..8962c3e --- /dev/null +++ b/@hubless/issues/stories/hubless-story-0007.json @@ -0,0 +1,22 @@ +{ + "id": "hubless/story/0007", + "title": "As a documentarian I can compose docs from reusable components", + "status": "PLANNED", + "persona": "Documentarian", + "need": "maintain consistent documentation without manual duplication", + "outcome": "Docs are assembled from shared Markdown snippets using automation integrated with Hubless tooling", + "acceptance_criteria": [ + "Component snippets library exists", + "At least one doc (e.g., PRD) generated from components", + "Build command integrates snippet assembly" + ], + "features": [ + "hubless/feature/docs-components" + ], + "dependencies": [ + "hubless/story/0001" + ], + "tasks": [ + "hubless/m0/task/0005" + ] +} diff --git a/@hubless/issues/tasks.archive.md b/@hubless/issues/tasks.archive.md index b55325c..d6bf4bb 100644 --- a/@hubless/issues/tasks.archive.md +++ b/@hubless/issues/tasks.archive.md @@ -4,4 +4,3 @@ Move completed tasks here once they have badges **Tested**, **Documented**, **Sh | ID | Title | Completed | Badges | Notes | |----|-------|-----------|--------|-------| - diff --git a/@hubless/issues/tasks/hubless-m0-task-0005.json b/@hubless/issues/tasks/hubless-m0-task-0005.json new file mode 100644 index 0000000..613252e --- /dev/null +++ b/@hubless/issues/tasks/hubless-m0-task-0005.json @@ -0,0 +1,88 @@ +{ + "id": "hubless/m0/task/0005", + "title": "Evaluate markdown component library", + "status": "DONE", + "owner": null, + "description": "Assess the existing markdown-transclusion project and plan integration or Go-based alternative for reusable documentation components.", + "labels": [ + "m0-foundations", + "docs", + "automation" + ], + "links": [ + { + "type": "repo", + "url": "https://github.com/flyingrobots/markdown-transclusion", + "label": "markdown-transclusion" + }, + { + "type": "doc", + "url": "../../docs/PRD.md", + "label": "PRD" + }, + { + "type": "doc", + "url": "../../docs/TechSpec.md", + "label": "Tech spec" + } + ], + "required_inputs": [ + { + "resource": "https://github.com/flyingrobots/markdown-transclusion", + "exclusivity": "read-only", + "notes": "Review existing snippet tooling." + }, + { + "resource": "../../docs/PRD.md", + "exclusivity": "read-only", + "notes": "Identify repeated content candidates." + }, + { + "resource": "../../@hubless/schema", + "exclusivity": "read-only", + "notes": "Ensure schema-driven docs compatible with snippet outputs." + } + ], + "expected_outputs": [ + { + "resource": "../../docs/reference/docs-components-plan.md", + "notes": "Plan outlining snippet integration approach." + }, + { + "resource": "../../@hubless/roadmap/features/hubless-feature-docs-components.json", + "notes": "Feature status updated with findings." + } + ], + "expertise": [ + "Documentation tooling", + "Go", + "Automation pipelines" + ], + "dependencies": [ + "hubless/m0/task/0004" + ], + "badges": [ + "Tested", + "Documented", + "Shipped" + ], + "created_at": "2025-09-18", + "updated_at": "2025-09-19", + "notes": [ + { + "at": "2025-09-19T17:20:00Z", + "author": "codex", + "body": "Integrated markdown-transclusion pipeline, added reusable snippets under docs/components, and documented make docs workflow." + }, + { + "at": "2025-09-19T18:05:00Z", + "author": "codex", + "body": "Reorganized @hubless directories, added progress/dependency/status components, wired them into PRD/TechSpec, and added generator unit tests." + }, + { + "at": "2025-09-19T18:25:00Z", + "author": "codex", + "body": "Parameterised dependency graph output and published archived rollups with CI enforcement." + } + ] +} diff --git a/@hubless/issues/templates/archive.md b/@hubless/issues/templates/archive.md new file mode 100644 index 0000000..2590dee --- /dev/null +++ b/@hubless/issues/templates/archive.md @@ -0,0 +1,11 @@ +# Hubless Archive + +> Generated overview of completed work. Regenerate with `make docs` after updating story or task JSON. + +## Completed Stories + +![[docs/components/issues/archived-stories.md]] + +## Completed Tasks + +![[docs/components/issues/archived-tasks.md]] diff --git a/@hubless/issues/templates/tasks.md b/@hubless/issues/templates/tasks.md new file mode 100644 index 0000000..f463da8 --- /dev/null +++ b/@hubless/issues/templates/tasks.md @@ -0,0 +1,50 @@ +# Hubless Tasks + +> Source of truth: individual JSON files in `@hubless/issues/tasks/`. Task IDs follow `{project}/{milestone}/{type}/{number}`.\\ +> Regenerate this Markdown with `make docs` after updating task JSON files. + +![[docs/components/issues/tasks-table.md]] + +## Status Breakdown + +![[docs/components/issues/status-summary.md]] + +## Task Dependency Graph + +- Keep task dependencies in-sync with the JSON files. +- A future automation will render the DAG and feed the frontier worker pool. + +## How to Update + +1. Add or edit a JSON file under `@hubless/issues/tasks/` (see `../schema/task.schema.json`). +2. Do not edit generated tables. Update the JSON under `@hubless/issues/tasks/` and re-run `make docs`. +3. Once a task is `DONE` and has badges **Tested**, **Documented**, **Shipped**, move the JSON file to `tasks.archive/` and record it in `tasks.archive.md`. +## Anatomy of a Task + +```json +{ + "id": "hubless/mX/task/0000", + "title": "One-line summary", + "status": "PLANNED | STARTED | BLOCKED | DONE", + "owner": "github-handle or null", + "description": "Paragraph with context and acceptance criteria.", + "labels": ["epic", "area"], + "links": [ + {"type": "doc", "url": "../../docs/...", "label": "Related spec"} + ], + "required_inputs": [ + {"resource": "../../path-or-url", "exclusivity": "read-only", "notes": "Constraints."} + ], + "expected_outputs": [ + {"resource": "../../artifact", "notes": "Acceptance hints."} + ], + "expertise": ["Go", "Charmbracelet"], + "dependencies": ["hubless/mX/task/0000"], + "badges": ["Tested", "Documented", "Shipped"], + "created_at": "YYYY-MM-DD", + "updated_at": "YYYY-MM-DD or null", + "notes": [ + {"at": "YYYY-MM-DDThh:mm:ssZ", "author": "name", "body": "Short update."} + ] +} +``` diff --git a/@hubless/roadmap/README.md b/@hubless/roadmap/README.md deleted file mode 100644 index 0a5bedb..0000000 --- a/@hubless/roadmap/README.md +++ /dev/null @@ -1,41 +0,0 @@ -# Hubless Roadmap Data - -Structured roadmap data lives alongside JSON schemas so automation can generate schedules, dependency graphs, and dashboards. - -## Milestones -JSON records under `milestones/` follow `@hubless/schema/milestone.schema.json`. - -| ID | Title | Status | -|----|-------|--------| -| [hubless/milestone/m0-foundations](milestones/hubless-milestone-m0-foundations.json) | Repository foundations | IN_PROGRESS | -| [hubless/milestone/m0-5-cli-proof](milestones/hubless-milestone-m0-5-cli-proof.json) | CLI proof of concept | PLANNED | -| [hubless/milestone/m1-mvp](milestones/hubless-milestone-m1-mvp.json) | MVP release | PLANNED | -| [hubless/milestone/m1-5-enhancements](milestones/hubless-milestone-m1-5-enhancements.json) | Filters and activity enhancements | PLANNED | -| [hubless/milestone/m2-github-sync](milestones/hubless-milestone-m2-github-sync.json) | GitHub synchronization | PLANNED | -| [hubless/milestone/m3-ide](milestones/hubless-milestone-m3-ide.json) | IDE integrations | PLANNED | - -## Features -Feature records under `features/` follow `@hubless/schema/feature.schema.json`. - -| ID | Title | Status | -|----|-------|--------| -| [hubless/feature/repo-foundations](features/hubless-feature-repo-foundations.json) | Repository foundations and automation | IN_PROGRESS | -| [hubless/feature/event-store-cli](features/hubless-feature-event-store-cli.json) | Event store and CLI foundations | PLANNED | -| [hubless/feature/tui-experience](features/hubless-feature-tui-experience.json) | Magit-grade TUI experience | PLANNED | -| [hubless/feature/git-sync](features/hubless-feature-git-sync.json) | Robust Git synchronization | PLANNED | -| [hubless/feature/github-projection](features/hubless-feature-github-projection.json) | GitHub projection integration | PLANNED | -| [hubless/feature/ide-integration](features/hubless-feature-ide-integration.json) | Editor integrations via LSP | PLANNED | - -## Stories -Stories reside in `../issues/stories/` following `@hubless/schema/story.schema.json`. - -| ID | Title | Status | -|----|-------|--------| -| [hubless/story/0001](../issues/stories/hubless-story-0001.json) | As a maintainer I have documented workflows and automation | IN_PROGRESS | -| [hubless/story/0002](../issues/stories/hubless-story-0002.json) | As a developer I can manage issues via CLI commands | PLANNED | -| [hubless/story/0003](../issues/stories/hubless-story-0003.json) | As a developer I can browse and update work in the TUI | PLANNED | -| [hubless/story/0004](../issues/stories/hubless-story-0004.json) | As a contributor I can sync Hubless work via Git remotes | PLANNED | -| [hubless/story/0005](../issues/stories/hubless-story-0005.json) | As a maintainer I can mirror events to GitHub | PLANNED | -| [hubless/story/0006](../issues/stories/hubless-story-0006.json) | As a developer I can interact with Hubless from my IDE | PLANNED | - -Keep these tables in sync with the JSON records. Automation will eventually consume the JSON directly to render dependency graphs and schedule projections. diff --git a/@hubless/roadmap/features/hubless-feature-docs-components.json b/@hubless/roadmap/features/hubless-feature-docs-components.json new file mode 100644 index 0000000..9a65dfc --- /dev/null +++ b/@hubless/roadmap/features/hubless-feature-docs-components.json @@ -0,0 +1,23 @@ +{ + "id": "hubless/feature/docs-components", + "title": "Markdown component library integration", + "status": "PLANNED", + "description": "Reuse or recreate markdown-transclusion workflows to assemble documentation from reusable components.", + "value": "Creates reusable docs building blocks so specs stay consistent and automation can regenerate outputs quickly.", + "metrics": [ + "Docs assembled from components in CI", + "At least three docs share common snippets" + ], + "milestones": [ + "hubless/milestone/m0-foundations" + ], + "stories": [ + "hubless/story/0007" + ], + "dependencies": [ + "hubless/feature/repo-foundations" + ], + "tasks": [ + "hubless/m0/task/0005" + ] +} diff --git a/@hubless/roadmap/generated/README.md b/@hubless/roadmap/generated/README.md new file mode 100644 index 0000000..4643346 --- /dev/null +++ b/@hubless/roadmap/generated/README.md @@ -0,0 +1,191 @@ +# Hubless Roadmap Data + +Structured roadmap data lives alongside JSON schemas so automation can generate schedules, dependency graphs, and dashboards. + +> Regenerate this document with `make docs` after updating roadmap JSON. + +## Snapshot + + +| Artifact | Progress | Done | Total | +| --- | --- | --- | --- | +| Milestones | [----------] 0% | 0 | 6 | +| Features | [----------] 0% | 0 | 7 | +| Stories | [----------] 0% | 0 | 7 | +| Tasks | [##--------] 17% | 1 | 6 | + +## Dependencies + + +### Milestones + +| ID | Depends On | +|----|-------------| +| [hubless/milestone/m0-5-cli-proof](milestones/hubless-milestone-m0-5-cli-proof.json) | hubless/milestone/m0-foundations | +| [hubless/milestone/m1-5-enhancements](milestones/hubless-milestone-m1-5-enhancements.json) | hubless/milestone/m1-mvp | +| [hubless/milestone/m1-mvp](milestones/hubless-milestone-m1-mvp.json) | hubless/milestone/m0-5-cli-proof | +| [hubless/milestone/m2-github-sync](milestones/hubless-milestone-m2-github-sync.json) | hubless/milestone/m1-mvp | +| [hubless/milestone/m3-ide](milestones/hubless-milestone-m3-ide.json) | hubless/milestone/m2-github-sync | + +### Features + +| ID | Depends On | +|----|-------------| +| [hubless/feature/docs-components](features/hubless-feature-docs-components.json) | hubless/feature/repo-foundations | +| [hubless/feature/event-store-cli](features/hubless-feature-event-store-cli.json) | hubless/feature/repo-foundations | +| [hubless/feature/git-sync](features/hubless-feature-git-sync.json) | hubless/feature/event-store-cli | +| [hubless/feature/github-projection](features/hubless-feature-github-projection.json) | hubless/feature/git-sync | +| [hubless/feature/ide-integration](features/hubless-feature-ide-integration.json) | hubless/feature/tui-experience, hubless/feature/git-sync | +| [hubless/feature/tui-experience](features/hubless-feature-tui-experience.json) | hubless/feature/event-store-cli | + +### Stories + +| ID | Depends On | +|----|-------------| +| [hubless/story/0002](../issues/stories/hubless-story-0002.json) | hubless/story/0001 | +| [hubless/story/0003](../issues/stories/hubless-story-0003.json) | hubless/story/0002 | +| [hubless/story/0004](../issues/stories/hubless-story-0004.json) | hubless/story/0002 | +| [hubless/story/0005](../issues/stories/hubless-story-0005.json) | hubless/story/0004 | +| [hubless/story/0006](../issues/stories/hubless-story-0006.json) | hubless/story/0004, hubless/story/0003 | +| [hubless/story/0007](../issues/stories/hubless-story-0007.json) | hubless/story/0001 | + +### Tasks + +| ID | Depends On | +|----|-------------| +| [hubless/m0/task/0004](tasks/hubless-m0-task-0004.json) | hubless/m0/task/0001 | +| [hubless/m0/task/0005](tasks/hubless-m0-task-0005.json) | hubless/m0/task/0004 | +| [hubless/m1/task/0002](tasks/hubless-m1-task-0002.json) | hubless/m0/task/0001 | +| [hubless/m1/task/0003](tasks/hubless-m1-task-0003.json) | hubless/m1/task/0002 | + +## Dependency Graph + + +```mermaid +graph LR + n0["Milestone\\nCLI proof of concept"] + n1["Milestone\\nRepository foundations"] + n10["Feature\\nEditor integrations via LSP"] + n11["Feature\\nRepository foundations and automation"] + n12["Feature\\nMagit-grade TUI experience"] + n13["Story\\nAs a maintainer I have documented workflows and automation"] + n14["Story\\nAs a developer I can manage issues via CLI commands"] + n15["Story\\nAs a developer I can browse and update work in the TUI"] + n16["Story\\nAs a contributor I can sync Hubless work via Git remotes"] + n17["Story\\nAs a maintainer I can mirror events to GitHub"] + n18["Story\\nAs a developer I can interact with Hubless from my IDE"] + n19["Story\\nAs a documentarian I can compose docs from reusable components"] + n2["Milestone\\nFilters and activity enhancements"] + n20["Task\\nPort progress updater from Python to Go"] + n21["Task\\nStructure @hubless planning artifacts"] + n22["Task\\nEvaluate markdown component library"] + n23["Task\\nIntroduce Fang-based CLI skeleton"] + n24["Task\\nImplement Git event store adapter"] + n25["Task\\nPrototype Bubbletea TUI and Fang CLI wireframes with mocked data"] + n3["Milestone\\nMVP release"] + n4["Milestone\\nGitHub synchronization"] + n5["Milestone\\nIDE integrations"] + n6["Feature\\nMarkdown component library integration"] + n7["Feature\\nEvent store and CLI foundations"] + n8["Feature\\nRobust Git synchronization"] + n9["Feature\\nGitHub projection integration"] + classDef feature fill:#6B9F7F,stroke:#2C5F2D,color:#F4F1E8,stroke-width:1px; + classDef milestone fill:#2C5F2D,stroke:#C86E3B,color:#F4F1E8,stroke-width:1px; + classDef story fill:#E9B872,stroke:#C86E3B,color:#0E1111,stroke-width:1px; + classDef task fill:#C86E3B,stroke:#2C5F2D,color:#F4F1E8,stroke-width:1px; + class n0 milestone; + class n1 milestone; + class n10 feature; + class n11 feature; + class n12 feature; + class n13 story; + class n14 story; + class n15 story; + class n16 story; + class n17 story; + class n18 story; + class n19 story; + class n2 milestone; + class n20 task; + class n21 task; + class n22 task; + class n23 task; + class n24 task; + class n25 task; + class n3 milestone; + class n4 milestone; + class n5 milestone; + class n6 feature; + class n7 feature; + class n8 feature; + class n9 feature; + n0 --> n1 + n2 --> n3 + n3 --> n0 + n4 --> n3 + n5 --> n4 + n6 --> n11 + n7 --> n11 + n8 --> n7 + n9 --> n8 + n10 --> n12 + n10 --> n8 + n12 --> n7 + n14 --> n13 + n15 --> n14 + n16 --> n14 + n17 --> n16 + n18 --> n16 + n18 --> n15 + n19 --> n13 + n21 --> n20 + n22 --> n21 + n23 --> n20 + n24 --> n23 +``` + +## Milestones + +JSON records under `milestones/` follow `@hubless/schema/milestone.schema.json`. + + +| ID | Title | Status | +|----|-------|--------| +| [hubless/milestone/m0-5-cli-proof](milestones/hubless-milestone-m0-5-cli-proof.json) | CLI proof of concept | PLANNED | +| [hubless/milestone/m0-foundations](milestones/hubless-milestone-m0-foundations.json) | Repository foundations | IN_PROGRESS | +| [hubless/milestone/m1-5-enhancements](milestones/hubless-milestone-m1-5-enhancements.json) | Filters and activity enhancements | PLANNED | +| [hubless/milestone/m1-mvp](milestones/hubless-milestone-m1-mvp.json) | MVP release | PLANNED | +| [hubless/milestone/m2-github-sync](milestones/hubless-milestone-m2-github-sync.json) | GitHub synchronization | PLANNED | +| [hubless/milestone/m3-ide](milestones/hubless-milestone-m3-ide.json) | IDE integrations | PLANNED | + +## Features + +Feature records under `features/` follow `@hubless/schema/feature.schema.json`. + + +| ID | Title | Status | +|----|-------|--------| +| [hubless/feature/docs-components](features/hubless-feature-docs-components.json) | Markdown component library integration | PLANNED | +| [hubless/feature/event-store-cli](features/hubless-feature-event-store-cli.json) | Event store and CLI foundations | PLANNED | +| [hubless/feature/git-sync](features/hubless-feature-git-sync.json) | Robust Git synchronization | PLANNED | +| [hubless/feature/github-projection](features/hubless-feature-github-projection.json) | GitHub projection integration | PLANNED | +| [hubless/feature/ide-integration](features/hubless-feature-ide-integration.json) | Editor integrations via LSP | PLANNED | +| [hubless/feature/repo-foundations](features/hubless-feature-repo-foundations.json) | Repository foundations and automation | IN_PROGRESS | +| [hubless/feature/tui-experience](features/hubless-feature-tui-experience.json) | Magit-grade TUI experience | PLANNED | + +## Stories + +Stories reside in `../issues/stories/` following `@hubless/schema/story.schema.json`. + + +| ID | Title | Status | +|----|-------|--------| +| [hubless/story/0001](../issues/stories/hubless-story-0001.json) | As a maintainer I have documented workflows and automation | IN_PROGRESS | +| [hubless/story/0002](../issues/stories/hubless-story-0002.json) | As a developer I can manage issues via CLI commands | PLANNED | +| [hubless/story/0003](../issues/stories/hubless-story-0003.json) | As a developer I can browse and update work in the TUI | PLANNED | +| [hubless/story/0004](../issues/stories/hubless-story-0004.json) | As a contributor I can sync Hubless work via Git remotes | PLANNED | +| [hubless/story/0005](../issues/stories/hubless-story-0005.json) | As a maintainer I can mirror events to GitHub | PLANNED | +| [hubless/story/0006](../issues/stories/hubless-story-0006.json) | As a developer I can interact with Hubless from my IDE | PLANNED | +| [hubless/story/0007](../issues/stories/hubless-story-0007.json) | As a documentarian I can compose docs from reusable components | PLANNED | + +Keep these tables in sync with the JSON records. Automation will eventually consume the JSON directly to render dependency graphs and schedule projections. \ No newline at end of file diff --git a/@hubless/roadmap/milestones/hubless-milestone-m0-foundations.json b/@hubless/roadmap/milestones/hubless-milestone-m0-foundations.json index b3e1e07..9b19094 100644 --- a/@hubless/roadmap/milestones/hubless-milestone-m0-foundations.json +++ b/@hubless/roadmap/milestones/hubless-milestone-m0-foundations.json @@ -3,7 +3,10 @@ "title": "Repository foundations", "status": "IN_PROGRESS", "description": "Bootstrap documentation, workflows, and automation scaffolding for Hubless.", - "timeframe": {"start": "2025-09-18", "end": null}, + "timeframe": { + "start": "2025-09-18", + "end": null + }, "objectives": [ "Specs and technical designs captured in docs/", "Task DAG schema and automation seeds established" @@ -14,11 +17,13 @@ ], "dependencies": [], "features": [ - "hubless/feature/repo-foundations" + "hubless/feature/repo-foundations", + "hubless/feature/docs-components" ], "tasks": [ "hubless/m0/task/0001", - "hubless/m0/task/0004" + "hubless/m0/task/0004", + "hubless/m0/task/0005" ], "notes": [] } diff --git a/@hubless/roadmap/templates/README.md b/@hubless/roadmap/templates/README.md new file mode 100644 index 0000000..78b83fd --- /dev/null +++ b/@hubless/roadmap/templates/README.md @@ -0,0 +1,37 @@ +# Hubless Roadmap Data + +Structured roadmap data lives alongside JSON schemas so automation can generate schedules, dependency graphs, and dashboards. + +> Regenerate this document with `make docs` after updating roadmap JSON. + +## Snapshot + +![[docs/components/roadmap/progress.md]] + +## Dependencies + +![[docs/components/roadmap/dependencies.md]] + +## Dependency Graph + +![[docs/components/roadmap/dependencies-graph.md]] + +## Milestones + +JSON records under `milestones/` follow `@hubless/schema/milestone.schema.json`. + +![[docs/components/roadmap/milestones-table.md]] + +## Features + +Feature records under `features/` follow `@hubless/schema/feature.schema.json`. + +![[docs/components/roadmap/features-table.md]] + +## Stories + +Stories reside in `../issues/stories/` following `@hubless/schema/story.schema.json`. + +![[docs/components/roadmap/stories-table.md]] + +Keep these tables in sync with the JSON records. Automation will eventually consume the JSON directly to render dependency graphs and schedule projections. diff --git a/AGENTS.md b/AGENTS.md index 1fffc86..5b3a8ad 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -1,20 +1,21 @@ ## Workflow -- Tasks live in @hubless/issues/tasks.md +- Tasks data lives in `@hubless/issues/tasks/*.json`; the generated rollup sits at `@hubless/issues/generated/tasks.md`. - Try to associate all work with a task id - Maintain task dependencies so we have an accurate DAG; update `@hubless/issues/tasks/` whenever prerequisites change. This DAG will drive the rolling frontier worker pool. - Tasks are defined as JSON files under `@hubless/issues/tasks/` matching the schema in `@hubless/schema/task.schema.json`; IDs follow `{project}/{milestone}/{type}/{number}` (e.g., `hubless/m0/task/0001`). - Follow the Task lifecycle: -1. Task added to @hubless/issues/tasks.md; status = "PLANNED" +1. Task added to `@hubless/issues/tasks/.json`; the generated rollup (`@hubless/issues/generated/tasks.md`) should show it as `PLANNED` after regeneration. 2. Start task? status = "STARTED" 3. Task blocked? status = "BLOCKED" 4. Task finished? status = "DONE" -5. Once status = "DONE", Tasks need the following badges: (i) Tested (ii) Documented (iii) Shipped; only then shall we remove them from the tasks.md file, and add it to the "tasks.archive.md" file +5. Once status = "DONE" with badges (i) Tested (ii) Documented (iii) Shipped, the generator removes the item from the tasks rollup and adds it to the archive automatically on `make docs`. This also refreshes `CHANGELOG.md` and release notes. - NEVER GIT AMEND; just make a new commit. - NEVER REBASE; just git merge. Embrace the messy history–the truth shall set you free. - NEVER EVER FORCE PUSH!!! If you feel like you must halt and seek permission from the user. +- Install repo git hooks via `make hooks` so fmt/lint/test/docs run before every commit. ## Code Quality diff --git a/CHANGELOG.md b/CHANGELOG.md new file mode 100644 index 0000000..0244c1b --- /dev/null +++ b/CHANGELOG.md @@ -0,0 +1,25 @@ +# Hubless Changelog + +> This file is generated. Edit `CHANGELOG.template.md` or underlying JSON, then run `make docs`. + +## Latest Release Snapshot + +# Hubless Release Notes + +> Generated via `make docs`. Edit JSON/tasks, then rerun to refresh. + +## Recently Completed Tasks + + +- 2025-09-19 — **Evaluate markdown component library** (hubless/m0/task/0005) — Tested, Documented, Shipped + +## Completed Stories (For Reference) + + +| ID | Title | Completed Status | +| --- | --- | --- | +| — | — | — | + +## Historical Archives + +See `@hubless/issues/generated/archive.md` for the full backlog of completed stories and tasks. diff --git a/CHANGELOG.template.md b/CHANGELOG.template.md new file mode 100644 index 0000000..44dbe70 --- /dev/null +++ b/CHANGELOG.template.md @@ -0,0 +1,11 @@ +# Hubless Changelog + +> This file is generated. Edit `CHANGELOG.template.md` or underlying JSON, then run `make docs`. + +## Latest Release Snapshot + +![[docs/reference/release-notes.md]] + +## Historical Archives + +See `@hubless/issues/generated/archive.md` for the full backlog of completed stories and tasks. diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 575ad27..2df17e3 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -3,23 +3,27 @@ Thanks for building Hubless! This document summarizes how we collaborate. For the full agent handbook see `AGENTS.md`. ## Prerequisites + - Go 1.22+ - Git 2.30+ - Familiarity with Charmbracelet (Bubbletea, Lipgloss, Bubbles) is helpful. ## Task Workflow -1. Add or pick up a task in `@hubless/issues/tasks.md` and set status to `PLANNED`. + +1. Add or pick up a task in `@hubless/issues/tasks/*.json` (the table in `@hubless/issues/generated/tasks.md` is generated) and set status to `PLANNED`. 2. When you begin work, flip status to `STARTED` and link the task ID in your branch, PR, and commit messages. 3. Blocked tasks move to `BLOCKED` with context on what’s needed. 4. When finished, mark as `DONE` and ensure it carries the badges **Tested**, **Documented**, **Shipped**. 5. After verifying badges, move the entry to `tasks.archive.md`. ## Git Practices + - **Never amend**: make new commits for follow-up fixes. - **Never rebase**: merge instead; we keep history messy but truthful. - **Never force push**: if you think you need to, stop and consult the team. ## Code Quality + - Single Responsibility Principle. - One file per entity (struct, interface, enum, etc.). - Test-double friendly design (interfaces over concretions). @@ -28,19 +32,41 @@ Thanks for building Hubless! This document summarizes how we collaborate. For th - Developer and user experience are top priorities. ## Development Flow + 1. Read `docs/PRD.md` and `docs/TechSpec.md` to understand the current goals. 2. Update specs/design docs first when changing direction; treat them as living documents. 3. Build features through application services so both CLI and TUI can reuse logic. 4. Write tests alongside features. Unit test domain logic, adapters, and Bubbletea models. 5. Run `go fmt`, `go vet`, and any configured linters before opening a PR. +## Documentation Automation + +- Structured data under `@hubless/` now feeds Markdown via reusable snippets in `docs/components/`. +- Install or clone [`markdown-transclusion`](https://github.com/flyingrobots/markdown-transclusion) (Node ≥20). Set `MARKDOWN_TRANSCLUSION_BIN` to the executable (`markdown-transclusion` if installed globally, or `node`) and `MARKDOWN_TRANSCLUSION_ARGS` to the CLI script path when using a local clone (e.g., `/path/to/markdown-transclusion/dist/cli.js`). +- Run `make docs` (or `./scripts/render-docs.sh`) after editing JSON records or templates. This regenerates shared snippets and rewrites `@hubless/roadmap/generated/README.md` and `@hubless/issues/generated/tasks.md` from their templates. +- Run `make docs-test` to execute generator unit tests and ensure snippets format as expected. +- Run `make docs-verify` to confirm all generated Markdown is fully transcluded (no `![[…]]` placeholders). +- For custom dependency graph styling, pass `--graph-direction`, `--graph-clusters`, or `--graph-palette` to `cmd/docs-components` (see `README.md`). +- `CHANGELOG.md` is generated from `CHANGELOG.template.md` and `docs/reference/release-notes.*`; edit the template or JSON, not the generated file. +- Palette overrides live in `docs/reference/palettes.json` (validated by `docs/reference/palettes.schema.json`); point `--palette-file` elsewhere if you keep custom palettes in another location. +- Install [`golangci-lint`](https://golangci-lint.run/) locally so `make lint` and the pre-commit hook can execute successfully. Recommended: + +## Tooling & Hooks + +- Run `make fmt`, `make lint`, and `make test` before opening a PR. CI expects them to pass. +- Install local git hooks via `make hooks` (wraps `scripts/install-git-hooks.sh`) so pre-commit checks run automatically. +- `.golangci.yml` houses lint configuration; feel free to propose tweaks but keep the suite running clean. +- `.editorconfig` defines base formatting (tabs for Makefiles, spaces elsewhere). + ## Commit & PR Guidance + - Reference task IDs in commit messages (`TASK-123: implement catalog writer`). - Small, focused commits preferred. - Ensure docs and changelog entries are updated where relevant. - PR description should include testing evidence (manual commands, unit test output) and links to updated docs. ## Communication + - Inline documentation lives in `docs/`. Update related files (PRD, TechSpec, design docs) as features evolve. - Capture architectural decisions either in commit messages or lightweight ADRs under `docs/` if needed. diff --git a/Dockerfile.release-test b/Dockerfile.release-test new file mode 100644 index 0000000..3e913f2 --- /dev/null +++ b/Dockerfile.release-test @@ -0,0 +1,26 @@ +FROM golang:1.25 + +ENV PATH="/usr/local/go/bin:/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin" + +RUN apt-get update && apt-get install -y --no-install-recommends \ + curl \ + git \ + nodejs \ + npm \ + && rm -rf /var/lib/apt/lists/* + +RUN npm install -g markdown-transclusion + +WORKDIR /app +COPY . /app + +RUN rm -rf .git && \ + git init && \ + git config user.name "Release Tester" && \ + git config user.email "tester@example.com" && \ + git add . && \ + git commit -m "container snapshot" && \ + git remote remove origin || true + +HEALTHCHECK --interval=30s --timeout=5s --start-period=10s CMD go version >/dev/null 2>&1 || exit 1 +CMD ["/bin/bash"] diff --git a/Makefile b/Makefile new file mode 100644 index 0000000..889be1e --- /dev/null +++ b/Makefile @@ -0,0 +1,44 @@ +.PHONY: all clean docs docs-components docs-test docs-verify fmt fmt-check lint test hooks release-docs release release-dry + +docs docs-components release-docs: + ./scripts/render-docs.sh + +docs-test: + go test ./internal/docscomponents + +docs-verify: + ./scripts/verify-docs.sh + +fmt: + find . -name '*.go' -not -path './vendor/*' -not -path './.git/*' -print0 | xargs -0 gofmt -w + +fmt-check: + @output=$$(find . -name '*.go' -not -path './vendor/*' -not -path './.git/*' -print0 | xargs -0 gofmt -l); \ + if [ -n "$$output" ]; then \ + echo "Files need gofmt:"; \ + echo "$$output"; \ + exit 1; \ + fi + +lint: + golangci-lint run ./... + +test: + go test ./cmd/... ./internal/docscomponents ./internal/release + +hooks: + bash ./scripts/install-git-hooks.sh + +release: + @if [ -z "$(VERSION)" ]; then \ + echo "VERSION env var required (e.g., make release VERSION=0.1.0)" >&2; \ + exit 1; \ + fi + go run ./cmd/release --version $(VERSION) $(if $(NOTES),--notes $(NOTES)) + +release-dry: + @if [ -z "$(VERSION)" ]; then \ + echo "VERSION env var required (e.g., make release-dry VERSION=0.1.0)" >&2; \ + exit 1; \ + fi + go run ./cmd/release --version $(VERSION) --dry-run $(if $(NOTES),--notes $(NOTES)) diff --git a/README.md b/README.md index 9f87746..960e392 100644 --- a/README.md +++ b/README.md @@ -1,53 +1,50 @@ # Hubless -Hubless is a terminal-native, Git-backed work tracker. It treats issues, pull requests, and boards as append-only event streams under `refs/hubless/**`, then presents them through a Charmbracelet-powered TUI and CLI. This repository houses the specs, tooling, and implementation that turn Git repositories into fully auditable planning systems. +> *Imagine GitHub… but in your repo. No hub; just Git.* -## Getting Started +> [!INFO] +> **EARLY DAYS.** I just started this project yesterday. Expect rapid iteration, rough edges, and breaking changes. +> If you want boring stability, wait. If you want to see Git-native project state come alive, jump in now. -> **Status:** Early development. Specs are in place; implementation is in progress. +--- -### Prerequisites -- Go 1.22+ -- Git 2.30+ -- Optional: [gh](https://github.com/cli/cli) for GitHub integration experiments +## Why Hubless? -### Clone -```bash -git clone https://github.com/flyingrobots/hubless.git -cd hubless -``` +> *Art doesn’t need to be explained; its purpose is to create a new reality as powerful and engaging as the one we live in.* -### Build the utilities -The Go module is initialized but the primary CLI is still under construction. A helper binary for progress updates exists today: -```bash -go build ./cmd/update-progress -``` +### **Hubless = Freedom** -### Run the progress updater -The legacy Python script has been replaced with the Go implementation (spec documented in `docs/reference/update-progress-algorithm.md`). Point the tool at your `git-mind` checkout once the Go port is finished. +Keep your entire project in your repo. No SaaS lock-in. Offline-first. Fast. Minimalist. Deeply integrated. +Web frontend optional (for PMs or when you’re on the go), but developers live in Git. -```bash -./update-progress --root ../git-mind -``` +### The Vision -## Project Docs -- `docs/PRD.md` – Product requirements and roadmap. -- `docs/TechSpec.md` – Architecture, data model, sync contracts. -- `docs/design/tui.md` – Bubbletea TUI views, interactions, styling. -- `docs/reference/implementation-skeleton.md` – Hexagonal layout and scaffolding. -- `docs/reference/update-progress-algorithm.md` – Transcription of the ledger updater logic. -- `AGENTS.md` – Workflow rules, coding standards, collaboration notes. -- `@hubless/` – Structured planning data (tasks, stories, features, milestones schemas). +- **Git-native issues, boards, and execution.** + Every change is a commit. No website required. +- **Conflict-free by design.** + CRDT event streams, snapshots, and catalogs keep state boringly coherent. -## Development Principles -- Git is the source of truth; no central server required. -- Conflict-free, append-only event streams for issues, boards, and PRs. -- Hexagonal architecture with Go application services, Git adapters, and Charmbracelet UI layers. -- CLI command surface will use Charmbracelet Fang/Cobra to keep styling consistent with the TUI. +- **Auditable forever.** + Undo = append-only. No rewrites, no drift. -## Contributing -See `CONTRIBUTING.md` for task workflow, branching rules, and code quality expectations. +- **Sort of like Magit, but for project flow.** + Fast TUI, consistent keystrokes, muscle-memory ergonomics. -## License -This project is licensed under the MIT License – see `LICENSE` for details. +- **Optional Play button.** + Don’t just track issues — press ▶ to execute DAG-style tasks automatically. + +- **Boring stuff just happens.** + **Old way:** + ticket → + website → + bookkeeping → + copy-paste → + PR. + **New way:** + ```bash + git hubless start issue 34 + # branch created, issue assigned, kanban updated, draft PR opened + ... + git hubless submit issue 34 + # PR updated, undrafted, review requested diff --git a/cmd/docs-components/main.go b/cmd/docs-components/main.go new file mode 100644 index 0000000..085650a --- /dev/null +++ b/cmd/docs-components/main.go @@ -0,0 +1,158 @@ +package main + +import ( + "context" + "flag" + "log" + "os" + "strings" + + "github.com/flyingrobots/hubless/internal/docscomponents" +) + +type stringSliceFlag []string + +func (s *stringSliceFlag) String() string { + return strings.Join(*s, ",") +} + +func (s *stringSliceFlag) Set(value string) error { + *s = append(*s, value) + return nil +} + +// main is the CLI entrypoint for generating documentation components and optionally +// rendering documentation templates via the markdown-transclusion tool. +// +// It parses command-line flags to configure repository and output paths, generator +// options (graph direction, clusters, palette and palette file), and transclusion +// settings (binary, base path, and additional args). It constructs a docs +// generator, runs component generation, and—unless -skip-transclusion is set—resolves +// the transclusion binary and arguments (from flags or MARKDOWN_TRANSCLUSION_* env +// vars) and invokes markdown-transclusion to render a set of templates to their +// configured outputs. Any initialization, generation, or rendering error causes the +// program to log a fatal error and exit. +func main() { + log.SetFlags(0) + log.SetPrefix("docs-components: ") + + var ( + repoRoot string + componentsDir string + roadmapTemplate string + roadmapOutput string + tasksTemplate string + tasksOutput string + archiveTemplate string + archiveOutput string + releaseTemplate string + releaseOutput string + changelogTemplate string + changelogOutput string + transclusionBin string + transclusionBase string + skipTransclusion bool + graphDirection string + graphClusters bool + graphPalette string + paletteFile string + transclusionArgs stringSliceFlag + ) + + flag.StringVar(&repoRoot, "repo", ".", "Repository root (defaults to current directory)") + flag.StringVar(&componentsDir, "components", "", "Components output directory (defaults to docs/components under repo)") + flag.StringVar(&roadmapTemplate, "roadmap-template", "@hubless/roadmap/templates/README.md", "Template path for roadmap documentation") + flag.StringVar(&roadmapOutput, "roadmap-output", "@hubless/roadmap/generated/README.md", "Output path for generated roadmap documentation") + flag.StringVar(&tasksTemplate, "tasks-template", "@hubless/issues/templates/tasks.md", "Template path for tasks overview") + flag.StringVar(&tasksOutput, "tasks-output", "@hubless/issues/generated/tasks.md", "Output path for generated tasks overview") + flag.StringVar(&archiveTemplate, "archive-template", "@hubless/issues/templates/archive.md", "Template path for archive overview") + flag.StringVar(&archiveOutput, "archive-output", "@hubless/issues/generated/archive.md", "Output path for generated archive overview") + flag.StringVar(&releaseTemplate, "release-template", "docs/reference/release-notes.template.md", "Template path for release notes") + flag.StringVar(&releaseOutput, "release-output", "docs/reference/release-notes.md", "Output path for generated release notes") + flag.StringVar(&changelogTemplate, "changelog-template", "CHANGELOG.template.md", "Template path for root changelog") + flag.StringVar(&changelogOutput, "changelog-output", "CHANGELOG.md", "Output path for generated changelog") + flag.StringVar(&transclusionBin, "transclusion-bin", "", "Executable for markdown-transclusion CLI (defaults to MARKDOWN_TRANSCLUSION_BIN env or markdown-transclusion)") + flag.StringVar(&transclusionBase, "transclusion-base", "", "Base path passed to markdown-transclusion (defaults to repo root)") + flag.BoolVar(&skipTransclusion, "skip-transclusion", false, "Skip rendering templates with markdown-transclusion") + flag.StringVar(&graphDirection, "graph-direction", "LR", "Direction for Mermaid dependency graph (LR, RL, TB, BT)") + flag.BoolVar(&graphClusters, "graph-clusters", false, "Group dependency graph nodes by type using Mermaid subgraphs") + flag.StringVar(&graphPalette, "graph-palette", "evergreen", "Mermaid palette for dependency graph (evergreen, infrared, zerothrow)") + flag.StringVar(&paletteFile, "palette-file", "docs/reference/palettes.json", "Optional palette definition file (JSON)") + flag.Var(&transclusionArgs, "transclusion-args", "Additional argument passed to markdown-transclusion (repeatable)") + flag.Parse() + + ctx := context.Background() + + generator, err := docscomponents.NewGenerator(repoRoot, componentsDir, docscomponents.GeneratorOptions{ + GraphDirection: graphDirection, + GraphClusters: graphClusters, + GraphPalette: graphPalette, + PaletteFile: paletteFile, + }) + if err != nil { + log.Fatalf("initialise generator: %v", err) + } + + if err := generator.Generate(ctx); err != nil { + log.Fatalf("generate components: %v", err) + } + + if skipTransclusion { + return + } + + if transclusionBin == "" { + if envValue := os.Getenv("MARKDOWN_TRANSCLUSION_BIN"); envValue != "" { + transclusionBin = envValue + } else { + transclusionBin = "markdown-transclusion" + } + } + + if len(transclusionArgs) == 0 { + if envValue := os.Getenv("MARKDOWN_TRANSCLUSION_ARGS"); envValue != "" { + transclusionArgs = append(transclusionArgs, parseArgs(envValue)...) + } + } + + if transclusionBase == "" { + transclusionBase = generator.RepoRoot() + } + + documents := []struct { + template string + output string + }{ + {template: roadmapTemplate, output: roadmapOutput}, + {template: tasksTemplate, output: tasksOutput}, + {template: archiveTemplate, output: archiveOutput}, + {template: releaseTemplate, output: releaseOutput}, + {template: changelogTemplate, output: changelogOutput}, + } + + for _, doc := range documents { + if doc.template == "" || doc.output == "" { + continue + } + + opts := docscomponents.TransclusionOptions{ + Bin: transclusionBin, + Args: []string(transclusionArgs), + BasePath: transclusionBase, + InputPath: doc.template, + OutputPath: doc.output, + } + + if err := docscomponents.RunTransclusion(ctx, opts); err != nil { + log.Fatalf("render %s -> %s: %v", doc.template, doc.output, err) + } + } +} + +// parseArgs splits raw into whitespace-separated fields (using strings.Fields) +// and returns a newly allocated slice containing those fields. An empty or +// all-whitespace input yields an empty slice. +func parseArgs(raw string) []string { + fields := strings.Fields(raw) + return append([]string(nil), fields...) +} diff --git a/cmd/release/main.go b/cmd/release/main.go new file mode 100644 index 0000000..ea9a329 --- /dev/null +++ b/cmd/release/main.go @@ -0,0 +1,62 @@ +package main + +import ( + "context" + "errors" + "flag" + "fmt" + "log" + "os" + + "github.com/flyingrobots/hubless/internal/release" +) + +// main is the entry point for the hubless release CLI. +// +// It parses command-line flags to configure a release and invokes the releaser: +// - repo: repository root (default ".") +// - version: version to tag (required) +// - notes: path to release notes markdown (default "docs/reference/release-notes.md") +// - dry-run: show actions without creating a tag +// - skip-checks: skip fmt/lint/test/docs before tagging +// +// If --version is omitted the program prints a short message, shows usage and exits with code 2. +// Any other initialization or run error is logged and the program exits non‑zero. +func main() { + log.SetFlags(0) + log.SetPrefix("hubless-release: ") + + var ( + repoRoot string + version string + notesPath string + dryRun bool + skipChecks bool + ) + + flag.StringVar(&repoRoot, "repo", ".", "Repository root (defaults to current directory)") + flag.StringVar(&version, "version", "", "Version to tag (required)") + flag.StringVar(¬esPath, "notes", "docs/reference/release-notes.md", "Path to release notes markdown") + flag.BoolVar(&dryRun, "dry-run", false, "Show actions without creating a tag") + flag.BoolVar(&skipChecks, "skip-checks", false, "Skip fmt/lint/test/docs before tagging") + flag.Parse() + + releaser, err := release.New(repoRoot) + if err != nil { + log.Fatalf("initialize releaser: %v", err) + } + + if err := releaser.Run(context.Background(), release.Options{ + Version: version, + NotesPath: notesPath, + DryRun: dryRun, + SkipChecks: skipChecks, + }); err != nil { + if errors.Is(err, release.ErrVersionRequired) { + fmt.Fprintln(os.Stderr, "--version is required") + flag.Usage() + os.Exit(2) + } + log.Fatalf("release failed: %v", err) + } +} diff --git a/docs/PRD.md b/docs/PRD.md index 7b554f8..9fb0e17 100644 --- a/docs/PRD.md +++ b/docs/PRD.md @@ -1,14 +1,17 @@ # Hubless Product Requirements Document ## Document Control + - Version: 0.1 (working draft) - Last updated: 2025-09-18 - Authors: Hubless Core Team ## 1. Executive Summary + Hubless is a terminal-native work tracker that treats issues, pull requests, and boards as Git-native data. It extends a repository with append-only event streams and presents them through an ergonomic text user interface (TUI). Hubless targets teams that prefer Git workflows, need offline access, and require auditable change history without depending on SaaS task managers. The product must feel as fast and expressive as Magit while eliminating the friction of context switching to web dashboards. ## 2. Product Vision and Goals + - **Vision**: Make collaborative planning feel like a first-class Git primitive. - **Primary Goal**: Provide conflict-free, offline-capable issue and PR tracking that syncs cleanly across Git remotes and optional GitHub projections. - **Secondary Goals**: @@ -17,27 +20,33 @@ Hubless is a terminal-native work tracker that treats issues, pull requests, and - Allow gradual adoption: start with Git-only workflows, add GitHub synchronization when required. ## 3. Target Users and Personas + - **Hands-on Developers**: Live in the terminal, already comfortable with Git plumbing, and want issue tracking that keeps pace with code review workflows. - **Tech Leads / Maintainers**: Need immediate visibility into status across multiple contributors without manual status reporting. - **Tooling Enthusiasts**: Evaluate new workflows, expect scripting hooks, and will extend the tool. ## 4. Problem Statement + Traditional issue trackers fragment context between code and planning artifacts and require constant network connectivity. Teams maintaining long-lived repositories lack an auditable, offline-first issue tracker that integrates with Git operations. Hubless solves this by storing planning data directly in the repository while offering an efficient interface and optional projections into GitHub. ## 5. Scope + ### 5.1 In Scope + - Append-only event streams for issues, boards, and pull requests stored under `refs/hubless/**`. - Local-first command-line and TUI workflows to create, view, update, and sync work items. - Snapshotting and catalog indexes that keep large backlogs responsive. - Optional GitHub synchronization that treats GitHub as a projection of Hubless state. ### 5.2 Out of Scope (Initial Releases) + - Full parity with GitHub project boards or enterprise integrations (Jira, Linear, etc.). - Web UI or mobile clients. - Real-time multi-user collaboration beyond Git’s eventual consistency model. - Automated analytics dashboards beyond basic activity feeds. ## 6. Product Principles + 1. **Git is the source of truth**: All work artifacts originate as Git refs and commits. 2. **No merge conflicts**: Event sourcing and CRDT-friendly data structures avoid write contention. 3. **Offline-first**: The product works without network access; sync happens on demand. @@ -45,7 +54,9 @@ Traditional issue trackers fragment context between code and planning artifacts 5. **Extensible vocabulary**: Event types can evolve without breaking compatibility. ## 7. Use Cases and Functional Requirements + ### 7.1 Core Use Cases + - View an overview of open work and drill into issue timelines. - Create and edit issues using editors developers already use (`$EDITOR`). - Update status, assignment, and comments from either CLI commands or the TUI. @@ -54,6 +65,7 @@ Traditional issue trackers fragment context between code and planning artifacts - Optionally synchronize with GitHub issues, comments, and PRs. ### 7.2 Functional Requirements (MVP) + 1. **List issues**: `hubless list` and TUI list view show title, ID, status, priority, assignee. 2. **View issue timeline**: `hubless view ` replays events, including comments and status changes. 3. **Create issue**: `hubless create` opens a template, commits an `issue:created` event. @@ -64,12 +76,14 @@ Traditional issue trackers fragment context between code and planning artifacts 8. **GitHub projection** (post-MVP toggle): translate events to GitHub issues/comments and back while preserving event IDs. ### 7.3 Stretch Goals (Phase 2+) + - Promote issue to PR (`hubless pr `) with event linkage. - Export state for external systems (e.g., Jira) via CLI commands. - Provide a feed of recent changes for status reporting. - Expose an LSP endpoint for IDE integrations. ## 8. Non-Functional Requirements + - **Performance**: Listing 10k issues with 100k total events in under 200 ms via catalog indexing. Viewing a single issue under 100 ms using snapshot + tail replay. - **Reliability**: Commands are idempotent and safe to retry. Sync detects and de-duplicates events using stable IDs. - **Security**: Honor repository permissions; reuse existing Git or `gh` authentication flows for GitHub API use. @@ -77,6 +91,7 @@ Traditional issue trackers fragment context between code and planning artifacts - **Portability**: Works on macOS, Linux, and WSL out of the box. ## 9. Release Strategy + | Phase | Objectives | Key Deliverables | |-------|-------------|------------------| | Phase 0.5 – CLI Proof | Validate event model; basic CLI create/list/view | Event schema, append-only refs, CLI surface | @@ -85,19 +100,38 @@ Traditional issue trackers fragment context between code and planning artifacts | Phase 2 – GitHub Sync | Round-trip GitHub integration, PR support | Sync adapter, PR events, mapping metadata | | Phase 3 – IDE Integration | Surface Hubless data in editors | LSP service, integration guides | +### 9.1 Planning Snapshot + +Live metrics sourced from `@hubless/` JSON keep this PRD grounded in the current plan: + +![[docs/components/roadmap/progress.md]] + +![[docs/components/roadmap/dependencies.md]] + +![[docs/components/roadmap/dependencies-graph.md]] + +### 9.2 Recently Completed Work + +The archive rolls up completed items for release notes: + +![[docs/components/issues/archived-tasks.md]] + ## 10. Success Metrics + - Team of 3–10 developers can replace GitHub Issues within one sprint. - Offline work for two weeks reconciles without conflicts on sync. - TUI satisfaction scores exceed web-based alternatives in qualitative interviews. - 90% of status updates originate from Hubless commands/TUI rather than GitHub UI. ## 11. Dependencies and Assumptions + - Developers have Git 2.30+ and can install a Go-based CLI. - Repositories may already contain Charmbracelet-based tooling; Hubless must coexist without conflicting key bindings. - GitHub API access tokens available when sync is enabled. - Repository maintainers permit additional refs under `refs/hubless/**`. ## 12. Risks and Mitigations + | Risk | Impact | Mitigation | |------|--------|------------| | GitHub API mismatch with event model | Data loss or drift | Treat GitHub as projection; store canonical event IDs; run diff checks before publishing | @@ -106,12 +140,14 @@ Traditional issue trackers fragment context between code and planning artifacts | Authentication complexity | Failed sync operations | Reuse `gh` CLI auth and document token scopes | ## 13. Open Questions + - Do we require migration tooling for existing GitHub issues when onboarding a repository? - What is the default cadence for snapshots (per N events vs. time-based)? - Should board definitions support custom columns beyond To Do / In Progress / Done in MVP? - How much configuration should live in repo-level manifests versus CLI config files? ## 14. Related Documents -- `docs/TechSpec.md` + +- [docs/TechSpec.md](docs/TechSpec.md) - `docs/design/tui.md` - `docs/reference/implementation-skeleton.md` diff --git a/docs/TechSpec.md b/docs/TechSpec.md index 679cad5..30ed773 100644 --- a/docs/TechSpec.md +++ b/docs/TechSpec.md @@ -1,17 +1,21 @@ # Hubless Technical Specification ## Document Control + - Version: 0.1 (working draft) - Last updated: 2025-09-18 - Owners: Architecture & Platform Engineering ## 1. Purpose + This document defines the architecture, data model, and integration contracts for Hubless. It complements the Product Requirements Document (`docs/PRD.md`) by detailing how the system delivers the promised capabilities. The specification focuses on the initial MVP and highlights extension points required for later phases. ## 2. System Overview + Hubless augments a Git repository with an event-sourced work-tracking subsystem and exposes the data through a Go-based CLI/TUI application. ### 2.1 High-Level Components + - **CLI/TUI Application**: Binary invoked as `hubless`. Provides commands for list/view/create/update/sync and a Bubbletea-based TUI (see `docs/design/tui.md`). - **Event Store**: Git refs under `refs/hubless/**`. Issues, boards, feeds, and metadata are modeled as append-only commit chains. - **Snapshot & Catalog Indexes**: Periodic commits that cache computed state to keep read operations under latency targets. @@ -20,6 +24,7 @@ Hubless augments a Git repository with an event-sourced work-tracking subsystem - **GitHub Projection** (Phase 2+): Translates Hubless events into GitHub issues/PRs/comments and vice versa via the GitHub API. ### 2.2 Data Flow Summary + 1. User triggers an action via CLI/TUI. 2. Command composes an event payload and writes a Git commit to the relevant ref. 3. Catalog and optional feed refs are updated as part of the same operation. @@ -27,7 +32,9 @@ Hubless augments a Git repository with an event-sourced work-tracking subsystem 5. GitHub projection (when enabled) processes new events and mirrors them to the GitHub API, recording a mapping in metadata refs. ## 3. Data Model + ### 3.1 Namespace Layout + ``` refs/ hubless/ @@ -42,6 +49,7 @@ refs/ ``` ### 3.2 Event Vocabulary + | Event Type | Description | Typical Payload Keys | |------------|-------------|----------------------| | `issue:created` | Initial creation of an issue | `title`, `body`, `priority`, `labels` | @@ -57,16 +65,21 @@ refs/ Event types are extensible. Consumers must tolerate unknown payload fields. ### 3.3 Commit Message Schema + Each event commit uses a two-line format: + ``` ``` + Example: + ``` issue:status_changed {"type":"issue:status_changed","issue":"000123","actor":"james","ts":"2025-09-18T23:15:47Z","payload":{"from":"open","to":"in-progress"},"lamport":2,"event_id":"1c0b8e6e3f3c6..."} ``` + - `type`: canonical event type string. - `issue` or resource identifier. - `actor`: normalized username. @@ -76,10 +89,13 @@ issue:status_changed - `event_id`: optional stable hash (SHA1 of canonicalized payload) used for deduplication across projections. ### 3.4 Snapshot Commits + Snapshots capture materialized issue state. They live under `refs/hubless/snapshots//` and store a tree containing serialized issue data (status, metadata, last event ID). The CLI reads the latest snapshot and replays subsequent events to rebuild current state within the desired latency budget. ### 3.5 Catalog Commit Layout + The catalog ref points to a commit whose tree summarizes all issues: + ``` /issues/ 000123 # blob contains tip OID, last update timestamp, priority tag, status @@ -87,13 +103,17 @@ The catalog ref points to a commit whose tree summarizes all issues: version updated_at ``` + The list command and TUI list view read this tree to avoid enumerating every issue ref. ### 3.6 Activity Feed (Optional) + `refs/hubless/feed` aggregates event IDs since the previous feed head. Each commit lists event OIDs in chronological order. The TUI can tail this ref to render a “what changed recently” panel without scanning every issue. ## 4. Command and API Surface + ### 4.1 CLI Commands (MVP) + | Command | Description | Event(s) Produced | |---------|-------------|-------------------| | `hubless list` | Print issue summaries from catalog | none | @@ -108,9 +128,11 @@ The list command and TUI list view read this tree to avoid enumerating every iss Future commands include `hubless pr ` (creates `pr:opened`), `hubless export`, and `hubless lsp`. ### 4.2 TUI Interaction Model + See `docs/design/tui.md` for detailed view flows, key bindings, and Bubbletea composition. The TUI invokes the same application services described in Section 6. ## 5. Application Architecture + Hubless follows a hexagonal architecture with the following layers: - **Domain**: Event definitions, issue aggregates, replay logic. - **Application Services**: Orchestrate commands, perform validation, compute derived data. @@ -128,13 +150,16 @@ hubless/ ``` ## 6. Git Adapter Specification + ### 6.1 Responsibilities + - Write event commits using `git mktree` + `git commit-tree` with empty or minimal trees. - Atomically advance refs with `git update-ref` to avoid race conditions. - Maintain catalog and optional feed refs in the same transaction window. - Expose a `ListIssues`, `LoadEvents`, and `AppendEvent` API to the application layer. ### 6.2 Plumbing Sequence (Append Event) + ``` 1. tree=$(printf "" | git mktree) 2. msg=$'issue:status_changed\n{"type":"issue:status_changed",...}' @@ -142,49 +167,78 @@ hubless/ 4. git update-ref refs/hubless/issues/ "$new_oid" "$current_ref_head" 5. Update catalog and feed refs via additional commit-tree + update-ref operations ``` + All plumbing commands run within the repository root passed to the adapter. Failures must roll back to the previous ref head. ### 6.3 Stable Event IDs + Stable IDs prevent duplicate publications during sync. The recommended algorithm: + ``` event_id = sha1(type + "\0" + issue + "\0" + ts + "\0" + actor + "\0" + canonical_json(payload)) ``` + The adapter stores `event_id` in the commit payload and uses it to match remote commits and GitHub artifacts. ## 7. Synchronization Model + ### 7.1 Git Remotes + Configure remotes to fetch/push Hubless refs: + ``` [remote "origin"] fetch = +refs/heads/*:refs/remotes/origin/* fetch = +refs/hubless/**:refs/hubless/** push = +refs/hubless/**:refs/hubless/** ``` + `hubless sync` executes `git fetch --prune` followed by `git push` with failure handling that retries idempotently. ### 7.2 GitHub Projection (Phase 2) + - **Outbound mapping**: Translate new Hubless events into GitHub API calls (create issue, add comment, update labels/state). Record GitHub issue IDs in `refs/hubless/meta/github-map`. - **Inbound mapping**: Periodically poll GitHub for changes. Convert remote updates into Hubless events, using `event_id` deduplication. - **Authentication**: Reuse `gh` CLI credentials or PAT tokens. Store no secrets in the repository. ## 8. Performance and Scaling + - Catalog reads avoid scanning every ref; list view target <200 ms for 10k issues. - Snapshot frequency tuned to replay at most 100 events per issue in the hot path. - Use compression-friendly payloads; event commits are small (<2 KB typical). - `hubless sync` should short-circuit if no refs changed since last fetch. ## 9. Security and Compliance + - Respect Git repository ACLs; Hubless never bypasses Git permissions. - GitHub integration uses least-privilege tokens (issues + pull_request scopes). - Event payloads should exclude secrets; clients must redact or encrypt sensitive data before commit. - Provide audit logs by virtue of Git history; document procedures for deletion (force-push not recommended). ## 10. Observability and Operations + - CLI emits structured logs (JSON) when `HUBLESS_LOG=json` is set. - Provide `hubless doctor` to verify ref health, snapshot freshness, and remote configuration. - Document backup strategy: repository cloning suffices; snapshots ensure quick restore. +### 10.1 Live Snapshot + +Operational rollups derived from the structured planning data keep this spec anchored to the live backlog: + +![[docs/components/roadmap/progress.md]] + +![[docs/components/roadmap/dependencies-graph.md]] + +![[docs/components/issues/status-summary.md]] + +### 10.2 Recently Completed + +Up-to-date archive gleaned from structured data: + +![[docs/components/issues/archived-tasks.md]] + ## 11. Risks and Mitigations + | Risk | Mitigation | |------|------------| | Large repositories hitting ref limits | Namespaces keep refs organized; monitor ref counts; allow pruning of archived issues | @@ -192,8 +246,11 @@ Configure remotes to fetch/push Hubless refs: | Divergent schemas between clients | Encode schema version in `refs/hubless/meta/version`; clients refuse incompatible versions | ## 12. Appendices + ### Appendix A: Mermaid Diagrams + #### Per-Issue Chain with Snapshots + ```mermaid gitGraph commit id: "root" @@ -211,6 +268,7 @@ gitGraph ``` #### Board Event Chain + ```mermaid gitGraph commit id: "root" @@ -224,6 +282,7 @@ gitGraph ``` #### Activity Feed + ```mermaid gitGraph commit id: "root" @@ -235,4 +294,5 @@ gitGraph ``` ### Appendix B: Reference Commands + See Section 6.2 for the commit sequence. Additional helper scripts can live under `scripts/` in the repository. diff --git a/docs/components/issues/archived-stories.md b/docs/components/issues/archived-stories.md new file mode 100644 index 0000000..aeb6080 --- /dev/null +++ b/docs/components/issues/archived-stories.md @@ -0,0 +1,5 @@ + + +| ID | Title | Completed Status | +| --- | --- | --- | +| — | No archived stories yet | — | diff --git a/docs/components/issues/archived-tasks.md b/docs/components/issues/archived-tasks.md new file mode 100644 index 0000000..1b433bc --- /dev/null +++ b/docs/components/issues/archived-tasks.md @@ -0,0 +1,5 @@ + + +| ID | Title | Completed On | Badges | +| --- | --- | --- | --- | +| hubless/m0/task/0005 | Evaluate markdown component library | 2025-09-19 | Tested, Documented, Shipped | diff --git a/docs/components/issues/changelog.md b/docs/components/issues/changelog.md new file mode 100644 index 0000000..00be90f --- /dev/null +++ b/docs/components/issues/changelog.md @@ -0,0 +1,3 @@ + + +- 2025-09-19 — **Evaluate markdown component library** (hubless/m0/task/0005) — Tested, Documented, Shipped diff --git a/docs/components/issues/status-summary.md b/docs/components/issues/status-summary.md new file mode 100644 index 0000000..2257ff9 --- /dev/null +++ b/docs/components/issues/status-summary.md @@ -0,0 +1,7 @@ + + +| Status | Count | +| --- | --- | +| DONE | 1 | +| STARTED | 2 | +| PLANNED | 3 | diff --git a/docs/components/issues/tasks-table.md b/docs/components/issues/tasks-table.md new file mode 100644 index 0000000..422c306 --- /dev/null +++ b/docs/components/issues/tasks-table.md @@ -0,0 +1,10 @@ + + +| ID | Title | Status | Owner | Labels | Badges | Updated | +| --- | --- | --- | --- | --- | --- | --- | +| [hubless/m0/task/0001](tasks/hubless-m0-task-0001.json) | Port progress updater from Python to Go | PLANNED | _unassigned_ | m0-foundations, prog | — | — | +| [hubless/m0/task/0004](tasks/hubless-m0-task-0004.json) | Structure @hubless planning artifacts | STARTED | _unassigned_ | m0-foundations, planning | — | 2025-09-18 | +| [hubless/m0/task/0005](tasks/hubless-m0-task-0005.json) | Evaluate markdown component library | DONE | _unassigned_ | m0-foundations, docs, automation | Tested, Documented, Shipped | 2025-09-19 | +| [hubless/m1/task/0002](tasks/hubless-m1-task-0002.json) | Introduce Fang-based CLI skeleton | PLANNED | _unassigned_ | m1-cli, cli | — | — | +| [hubless/m1/task/0003](tasks/hubless-m1-task-0003.json) | Implement Git event store adapter | PLANNED | _unassigned_ | m1-cli, event-store | — | — | +| [hubless/m1/task/0005](tasks/hubless-m1-task-0005.json) | Prototype Bubbletea TUI and Fang CLI wireframes with mocked data | STARTED | _unassigned_ | m1-cli, tui, cli | — | — | diff --git a/docs/components/roadmap/dependencies-graph.md b/docs/components/roadmap/dependencies-graph.md new file mode 100644 index 0000000..0293c1a --- /dev/null +++ b/docs/components/roadmap/dependencies-graph.md @@ -0,0 +1,82 @@ + + + n0["Milestone\\nCLI proof of concept"] + n1["Milestone\\nRepository foundations"] + n10["Feature\\nEditor integrations via LSP"] + n11["Feature\\nRepository foundations and automation"] + n12["Feature\\nMagit-grade TUI experience"] + n13["Story\\nAs a maintainer I have documented workflows and automation"] + n14["Story\\nAs a developer I can manage issues via CLI commands"] + n15["Story\\nAs a developer I can browse and update work in the TUI"] + n16["Story\\nAs a contributor I can sync Hubless work via Git remotes"] + n17["Story\\nAs a maintainer I can mirror events to GitHub"] + n18["Story\\nAs a developer I can interact with Hubless from my IDE"] + n19["Story\\nAs a documentarian I can compose docs from reusable components"] + n2["Milestone\\nFilters and activity enhancements"] + n20["Task\\nPort progress updater from Python to Go"] + n21["Task\\nStructure @hubless planning artifacts"] + n22["Task\\nEvaluate markdown component library"] + n23["Task\\nIntroduce Fang-based CLI skeleton"] + n24["Task\\nImplement Git event store adapter"] + n25["Task\\nPrototype Bubbletea TUI and Fang CLI wireframes with mocked data"] + n3["Milestone\\nMVP release"] + n4["Milestone\\nGitHub synchronization"] + n5["Milestone\\nIDE integrations"] + n6["Feature\\nMarkdown component library integration"] + n7["Feature\\nEvent store and CLI foundations"] + n8["Feature\\nRobust Git synchronization"] + n9["Feature\\nGitHub projection integration"] + classDef feature fill:#5AA7F3,stroke:#00B3A4,color:#121417,stroke-width:1px; + classDef milestone fill:#121417,stroke:#00B3A4,color:#F5F7FB,stroke-width:1px; + classDef story fill:#C7F36B,stroke:#5AA7F3,color:#121417,stroke-width:1px; + classDef task fill:#B056A1,stroke:#00B3A4,color:#F5F7FB,stroke-width:1px; + class n0 milestone; + class n1 milestone; + class n10 feature; + class n11 feature; + class n12 feature; + class n13 story; + class n14 story; + class n15 story; + class n16 story; + class n17 story; + class n18 story; + class n19 story; + class n2 milestone; + class n20 task; + class n21 task; + class n22 task; + class n23 task; + class n24 task; + class n25 task; + class n3 milestone; + class n4 milestone; + class n5 milestone; + class n6 feature; + class n7 feature; + class n8 feature; + class n9 feature; + n0 --> n1 + n2 --> n3 + n3 --> n0 + n4 --> n3 + n5 --> n4 + n6 --> n11 + n7 --> n11 + n8 --> n7 + n9 --> n8 + n10 --> n12 + n10 --> n8 + n12 --> n7 + n14 --> n13 + n15 --> n14 + n16 --> n14 + n17 --> n16 + n18 --> n16 + n18 --> n15 + n19 --> n13 + n21 --> n20 + n22 --> n21 + n23 --> n20 + n24 --> n23 +``` diff --git a/docs/components/roadmap/dependencies.md b/docs/components/roadmap/dependencies.md new file mode 100644 index 0000000..9cb4787 --- /dev/null +++ b/docs/components/roadmap/dependencies.md @@ -0,0 +1,42 @@ + +### Milestones + +| ID | Depends On | +|----|-------------| +| [hubless/milestone/m0-5-cli-proof](milestones/hubless-milestone-m0-5-cli-proof.json) | hubless/milestone/m0-foundations | +| [hubless/milestone/m1-5-enhancements](milestones/hubless-milestone-m1-5-enhancements.json) | hubless/milestone/m1-mvp | +| [hubless/milestone/m1-mvp](milestones/hubless-milestone-m1-mvp.json) | hubless/milestone/m0-5-cli-proof | +| [hubless/milestone/m2-github-sync](milestones/hubless-milestone-m2-github-sync.json) | hubless/milestone/m1-mvp | +| [hubless/milestone/m3-ide](milestones/hubless-milestone-m3-ide.json) | hubless/milestone/m2-github-sync | + +### Features + +| ID | Depends On | +|----|-------------| +| [hubless/feature/docs-components](features/hubless-feature-docs-components.json) | hubless/feature/repo-foundations | +| [hubless/feature/event-store-cli](features/hubless-feature-event-store-cli.json) | hubless/feature/repo-foundations | +| [hubless/feature/git-sync](features/hubless-feature-git-sync.json) | hubless/feature/event-store-cli | +| [hubless/feature/github-projection](features/hubless-feature-github-projection.json) | hubless/feature/git-sync | +| [hubless/feature/ide-integration](features/hubless-feature-ide-integration.json) | hubless/feature/tui-experience, hubless/feature/git-sync | +| [hubless/feature/tui-experience](features/hubless-feature-tui-experience.json) | hubless/feature/event-store-cli | + +### Stories + +| ID | Depends On | +|----|-------------| +| [hubless/story/0002](../issues/stories/hubless-story-0002.json) | hubless/story/0001 | +| [hubless/story/0003](../issues/stories/hubless-story-0003.json) | hubless/story/0002 | +| [hubless/story/0004](../issues/stories/hubless-story-0004.json) | hubless/story/0002 | +| [hubless/story/0005](../issues/stories/hubless-story-0005.json) | hubless/story/0004 | +| [hubless/story/0006](../issues/stories/hubless-story-0006.json) | hubless/story/0004, hubless/story/0003 | +| [hubless/story/0007](../issues/stories/hubless-story-0007.json) | hubless/story/0001 | + +### Tasks + +| ID | Depends On | +|----|-------------| +| [hubless/m0/task/0004](tasks/hubless-m0-task-0004.json) | hubless/m0/task/0001 | +| [hubless/m0/task/0005](tasks/hubless-m0-task-0005.json) | hubless/m0/task/0004 | +| [hubless/m1/task/0002](tasks/hubless-m1-task-0002.json) | hubless/m0/task/0001 | +| [hubless/m1/task/0003](tasks/hubless-m1-task-0003.json) | hubless/m1/task/0002 | + diff --git a/docs/components/roadmap/features-table.md b/docs/components/roadmap/features-table.md new file mode 100644 index 0000000..da23877 --- /dev/null +++ b/docs/components/roadmap/features-table.md @@ -0,0 +1,11 @@ + + +| ID | Title | Status | +|----|-------|--------| +| [hubless/feature/docs-components](features/hubless-feature-docs-components.json) | Markdown component library integration | PLANNED | +| [hubless/feature/event-store-cli](features/hubless-feature-event-store-cli.json) | Event store and CLI foundations | PLANNED | +| [hubless/feature/git-sync](features/hubless-feature-git-sync.json) | Robust Git synchronization | PLANNED | +| [hubless/feature/github-projection](features/hubless-feature-github-projection.json) | GitHub projection integration | PLANNED | +| [hubless/feature/ide-integration](features/hubless-feature-ide-integration.json) | Editor integrations via LSP | PLANNED | +| [hubless/feature/repo-foundations](features/hubless-feature-repo-foundations.json) | Repository foundations and automation | IN_PROGRESS | +| [hubless/feature/tui-experience](features/hubless-feature-tui-experience.json) | Magit-grade TUI experience | PLANNED | diff --git a/docs/components/roadmap/milestones-table.md b/docs/components/roadmap/milestones-table.md new file mode 100644 index 0000000..256869a --- /dev/null +++ b/docs/components/roadmap/milestones-table.md @@ -0,0 +1,10 @@ + + +| ID | Title | Status | +|----|-------|--------| +| [hubless/milestone/m0-5-cli-proof](milestones/hubless-milestone-m0-5-cli-proof.json) | CLI proof of concept | PLANNED | +| [hubless/milestone/m0-foundations](milestones/hubless-milestone-m0-foundations.json) | Repository foundations | IN_PROGRESS | +| [hubless/milestone/m1-5-enhancements](milestones/hubless-milestone-m1-5-enhancements.json) | Filters and activity enhancements | PLANNED | +| [hubless/milestone/m1-mvp](milestones/hubless-milestone-m1-mvp.json) | MVP release | PLANNED | +| [hubless/milestone/m2-github-sync](milestones/hubless-milestone-m2-github-sync.json) | GitHub synchronization | PLANNED | +| [hubless/milestone/m3-ide](milestones/hubless-milestone-m3-ide.json) | IDE integrations | PLANNED | diff --git a/docs/components/roadmap/progress.md b/docs/components/roadmap/progress.md new file mode 100644 index 0000000..dd8dd4c --- /dev/null +++ b/docs/components/roadmap/progress.md @@ -0,0 +1,8 @@ + + +| Artifact | Progress | Done | Total | +| --- | --- | --- | --- | +| Milestones | [----------] 0% | 0 | 6 | +| Features | [----------] 0% | 0 | 7 | +| Stories | [----------] 0% | 0 | 7 | +| Tasks | [##--------] 17% | 1 | 6 | diff --git a/docs/components/roadmap/stories-table.md b/docs/components/roadmap/stories-table.md new file mode 100644 index 0000000..e48944b --- /dev/null +++ b/docs/components/roadmap/stories-table.md @@ -0,0 +1,11 @@ + + +| ID | Title | Status | +|----|-------|--------| +| [hubless/story/0001](../issues/stories/hubless-story-0001.json) | As a maintainer I have documented workflows and automation | IN_PROGRESS | +| [hubless/story/0002](../issues/stories/hubless-story-0002.json) | As a developer I can manage issues via CLI commands | PLANNED | +| [hubless/story/0003](../issues/stories/hubless-story-0003.json) | As a developer I can browse and update work in the TUI | PLANNED | +| [hubless/story/0004](../issues/stories/hubless-story-0004.json) | As a contributor I can sync Hubless work via Git remotes | PLANNED | +| [hubless/story/0005](../issues/stories/hubless-story-0005.json) | As a maintainer I can mirror events to GitHub | PLANNED | +| [hubless/story/0006](../issues/stories/hubless-story-0006.json) | As a developer I can interact with Hubless from my IDE | PLANNED | +| [hubless/story/0007](../issues/stories/hubless-story-0007.json) | As a documentarian I can compose docs from reusable components | PLANNED | diff --git a/docs/reference/archive-structure.md b/docs/reference/archive-structure.md new file mode 100644 index 0000000..28334c8 --- /dev/null +++ b/docs/reference/archive-structure.md @@ -0,0 +1,14 @@ +# Archive Structure Overview + +Hubless keeps authoritative planning data in `@hubless/` and generates documentation from it. The archive-related outputs follow this structure: + +- `@hubless/issues/tasks/*.json` – canonical task records. When a task reaches `DONE` with the required badges, rerunning `make docs` updates all archive outputs. +- `docs/components/issues/archived-tasks.md` – Markdown table listing completed tasks with completion date and badges. Generated by `internal/docscomponents`. +- `docs/components/issues/archived-stories.md` – Placeholder table for completed stories (populates automatically once story JSON carries `DONE` statuses). +- `@hubless/issues/templates/archive.md` – Human-edited template that transcludes the archived snippets and additional guidance. +- `@hubless/issues/generated/archive.md` – Fully rendered archive document. Never edit directly; regenerate with `make docs`. +- `docs/components/issues/changelog.md` – Bullet list of recently completed tasks (reverse chronological) designed for release notes and changelog consumption. +- `docs/reference/release-notes.md` – Generated release notes incorporating the changelog bullets; feeds into `CHANGELOG.md`. +- `CHANGELOG.md` – Root changelog generated from `CHANGELOG.template.md`. Regenerated on each `make docs` run to stay aligned with the release notes. + +The generator uses only canonical JSON inputs (`@hubless/issues/tasks/*.json`, `@hubless/issues/stories/*.json`). Any manual edits should happen either in the JSON source or in the corresponding template (`@hubless/issues/templates/*.md`). diff --git a/docs/reference/docs-components-plan.md b/docs/reference/docs-components-plan.md new file mode 100644 index 0000000..1b5bd8b --- /dev/null +++ b/docs/reference/docs-components-plan.md @@ -0,0 +1,83 @@ +# Docs Component Library Plan + +## Integration Summary +- `cmd/docs-components` walks the `@hubless/` JSON (milestones, features, stories, tasks) and emits reusable Markdown fragments under `docs/components/`. +- Templates now live alongside the data (`@hubless/roadmap/templates/`, `@hubless/issues/templates/`) and transclude those fragments with the `markdown-transclusion` CLI. +- Rendered docs land in `@hubless/roadmap/generated/` and `@hubless/issues/generated/`; everything in `generated/` is overwritten by the pipeline, while JSON + templates remain human-edited. +- Shared snippets (progress, dependencies, tables, status summaries) can be embedded in any doc—`docs/PRD.md` and `docs/TechSpec.md` already import them. +- Release notes (`docs/reference/release-notes.md`) are generated from the same archive/changelog snippets for easy copy/paste into changelogs, and the root `CHANGELOG.md` is rebuilt from `CHANGELOG.template.md` on every `make docs` run. + +## Data Flow +1. **Generator** (`go run ./cmd/docs-components`) reads JSON, validates required fields, and writes Markdown snippets to `docs/components/...`. +2. **Transclusion** runs `markdown-transclusion` against each template, resolving `![[...]]` references into complete documents in `@hubless/**/generated/`. +3. Downstream docs (PRD, TechSpec, etc.) embed the same snippets so product and engineering specs share a single data source. + +## Directory Layout +- `@hubless/README.md` – quick guide on what’s generated vs. hand-edited. +- `@hubless/roadmap/` + - `milestones/*.json`, `features/*.json` – source data (edit these). + - `templates/` – Markdown shells with `![[...]]` includes (edit these). + - `generated/` – rendered Markdown (do **not** edit). +- `@hubless/issues/` + - `stories/*.json`, `tasks/*.json` – source data. + - `templates/`, `generated/` – same pattern as above. +- `docs/components/` + - `roadmap/milestones-table.md`, `.../stories-table.md` – tabular listings. + - `roadmap/progress.md` – completion bars across milestones/features/stories/tasks. + - `roadmap/dependencies.md` – dependency matrix for milestones → tasks. + - `roadmap/dependencies-graph.md` – Mermaid graph of in-repo dependencies. + - `issues/tasks-table.md` – task rollup. + - `issues/status-summary.md` – task counts by status. + - `issues/archived-stories.md`, `issues/archived-tasks.md` – completed work rollups feeding the archive template. + - `@hubless/issues/templates/archive.md` / `generated/archive.md` – archive overview assembled from the archived snippets. + - `issues/changelog.md` – release-ready bullet list of completed tasks (consumed by `docs/reference/release-notes.*`). +- `docs/reference/release-notes.template.md` / `docs/reference/release-notes.md` – generated release notes ready to drop into changelog entries. +- `docs/reference/palettes.json` (+ `docs/reference/palettes.schema.json`) – optional custom palette definitions merged with built-ins. + - See `docs/reference/archive-structure.md` for a deeper explanation of archive/changelog relationships. + - Downstream docs (PRD §9.2, TechSpec §10.2) embed the archived task rollup directly. + +## Running the Pipeline +1. Install or clone [`markdown-transclusion`](https://github.com/flyingrobots/markdown-transclusion) (Node ≥20). Build it locally if not installed globally. +2. Configure how to invoke the CLI: + ```bash + export MARKDOWN_TRANSCLUSION_BIN=markdown-transclusion # global install + # or + export MARKDOWN_TRANSCLUSION_BIN=node + export MARKDOWN_TRANSCLUSION_ARGS=/path/to/markdown-transclusion/dist/cli.js + ``` +3. From the repo root run: + ```bash + make docs # preferred entrypoint + # or run the script directly + ./scripts/render-docs.sh + ``` + Optional variables: + - `MARKDOWN_TRANSCLUSION_BASE` – override the base path passed to the CLI. + - `MARKDOWN_TRANSCLUSION_ARGS` – extra flags forwarded to the CLI. + Useful CLI flags: + - `--graph-direction` (LR/RL/TB/BT) tunes the dependency graph orientation. + - `--graph-clusters` groups nodes by type using Mermaid subgraphs. + - `--graph-palette` selects a Mermaid color palette (`evergreen`, `infrared`, `zerothrow`, or any palette declared in `docs/reference/palettes.json`). + - `--palette-file` points to an alternate palette JSON document (defaults to `docs/reference/palettes.json`). +4. Validate outputs: + ```bash + make docs-test # generator unit tests + make docs-verify # ensure transclusions fully resolved + ``` + +## Verification +- `go test ./internal/docscomponents` exercises the generator (fixtures & formatting guards). +- `go run ./cmd/docs-components --skip-transclusion` regenerates snippets without touching templates (helpful during development). +- `make docs` should leave only expected deltas under `docs/components/` and `@hubless/**/generated/`. +- `.github/workflows/docs.yml` enforces `make docs` + `make docs-test` on every push/PR and fails if generated files drift. + +## Extending the Library +1. Model the new component in Go (add a generator method and unit tests). +2. Emit the Markdown snippet under an appropriate `docs/components//` path. +3. Reference it from the relevant template(s) in `@hubless/**/templates/`. +4. Add the snippet to any docs that should surface the data. + +## Future Enhancements +- Render dependency graphs (Mermaid or Fang-based diagrams) once graph schemas stabilize. +- Expand the Makefile with grouped doc targets (e.g., `make docs/roadmap`, `make docs/tui`). +- Integrate the pipeline into CI so merges fail if generated docs drift from JSON. diff --git a/docs/reference/implementation-skeleton.md b/docs/reference/implementation-skeleton.md index 60f1aeb..c6137be 100644 --- a/docs/reference/implementation-skeleton.md +++ b/docs/reference/implementation-skeleton.md @@ -1,15 +1,18 @@ # Hubless Implementation Skeleton ## Document Control + - Version: 0.1 - Last updated: 2025-09-18 - Maintainer: Platform Engineering ## 1. Purpose -This reference collects scaffolding snippets for implementing Hubless using Go. It mirrors the architecture described in `docs/TechSpec.md` and provides minimal, compilable examples to accelerate prototyping. The code is illustrative and omits error handling and testing for brevity. + +This reference collects scaffolding snippets for implementing Hubless using Go. It mirrors the architecture described in `docs/TechSpec.md` and provides minimal examples to accelerate prototyping. The code is illustrative and may omit imports, helpers, error handling, and tests for brevity. ## 2. Project Layout -``` + +```bash hubless/ ├─ cmd/ │ └─ hubless/ @@ -35,6 +38,7 @@ hubless/ ``` ## 3. Module Definition (`go.mod`) + ```go module github.com/flyingrobots/hubless @@ -49,7 +53,9 @@ require ( ``` ## 4. Domain Layer + ### 4.1 Events (`internal/domain/events.go`) + ```go package domain @@ -78,6 +84,7 @@ type Event struct { ``` ### 4.2 Issue Aggregate (`internal/domain/issue.go`) + ```go package domain @@ -128,6 +135,7 @@ func Replay(id IssueID, events []Event) Issue { ``` ## 5. Application Layer (`internal/application/services.go`) + ```go package application @@ -180,6 +188,7 @@ func (s *Service) List(ctx context.Context) ([]IssueSummary, error) { ``` ## 6. Ports (`internal/ports/repository.go`) + ```go package ports @@ -192,6 +201,7 @@ type EventStore interface { ``` ## 7. Git Adapter (`internal/adapters/gitstore/git_store.go`) + ```go package gitstore @@ -215,6 +225,7 @@ func (s *Store) AppendEvent(ctx context.Context, evt domain.Event) (string, erro ``` ## 8. TUI Wiring (`internal/ui/tui/model.go`) + ```go package tui @@ -251,6 +262,7 @@ func (m Model) Update(msg tea.Msg) (tea.Model, tea.Cmd) { ``` ## 9. Next Steps + - Flesh out unit tests for domain replay and adapters. - Expand the Git adapter with catalog and feed updates. - Integrate TUI commands with mutation operations (`CreateIssue`, `ChangeStatus`, `Comment`). diff --git a/docs/reference/palettes.json b/docs/reference/palettes.json new file mode 100644 index 0000000..4a64864 --- /dev/null +++ b/docs/reference/palettes.json @@ -0,0 +1,9 @@ +{ + "$schema": "./palettes.schema.json", + "quantum": { + "milestone": { "fill": "#121417", "stroke": "#00B3A4", "text": "#F5F7FB" }, + "feature": { "fill": "#5AA7F3", "stroke": "#00B3A4", "text": "#121417" }, + "story": { "fill": "#C7F36B", "stroke": "#5AA7F3", "text": "#121417" }, + "task": { "fill": "#B056A1", "stroke": "#00B3A4", "text": "#F5F7FB" } + } +} diff --git a/docs/reference/palettes.schema.json b/docs/reference/palettes.schema.json new file mode 100644 index 0000000..015f766 --- /dev/null +++ b/docs/reference/palettes.schema.json @@ -0,0 +1,45 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "Docs Components Palette Schema", + "type": "object", + "additionalProperties": { + "type": "object", + "properties": { + "milestone": { + "$ref": "#/definitions/color" + }, + "feature": { + "$ref": "#/definitions/color" + }, + "story": { + "$ref": "#/definitions/color" + }, + "task": { + "$ref": "#/definitions/color" + } + }, + "required": ["milestone", "feature", "story", "task"], + "additionalProperties": false + }, + "definitions": { + "color": { + "type": "object", + "properties": { + "fill": { + "type": "string", + "pattern": "^#[0-9A-Fa-f]{6}$" + }, + "stroke": { + "type": "string", + "pattern": "^#[0-9A-Fa-f]{6}$" + }, + "text": { + "type": "string", + "pattern": "^#[0-9A-Fa-f]{6}$" + } + }, + "required": ["fill", "stroke", "text"], + "additionalProperties": false + } + } +} diff --git a/docs/reference/release-notes.md b/docs/reference/release-notes.md new file mode 100644 index 0000000..9aa2b4d --- /dev/null +++ b/docs/reference/release-notes.md @@ -0,0 +1,15 @@ +# Hubless Release Notes + +> Generated via `make docs`. Edit JSON/tasks, then rerun to refresh. + +## Recently Completed Tasks + + +- 2025-09-19 — **Evaluate markdown component library** (hubless/m0/task/0005) — Tested, Documented, Shipped + +## Completed Stories (For Reference) + + +| ID | Title | Completed Status | +| --- | --- | --- | +| — | — | — | diff --git a/docs/reference/release-notes.template.md b/docs/reference/release-notes.template.md new file mode 100644 index 0000000..de2fa8a --- /dev/null +++ b/docs/reference/release-notes.template.md @@ -0,0 +1,11 @@ +# Hubless Release Notes + +> Generated via `make docs`. Edit JSON/tasks, then rerun to refresh. + +## Recently Completed Tasks + +![[docs/components/issues/changelog.md]] + +## Completed Stories (For Reference) + +![[docs/components/issues/archived-stories.md]] diff --git a/docs/reference/rfcs/0001-release-automation.md b/docs/reference/rfcs/0001-release-automation.md new file mode 100644 index 0000000..c787011 --- /dev/null +++ b/docs/reference/rfcs/0001-release-automation.md @@ -0,0 +1,124 @@ +# RFC 0001: Release Automation CLI + +## Summary +Provide a Git-first release command that bumps versions, produces annotated (signed) tags from the generated release notes, and optionally pushes/publishes the results. The tool must integrate with existing changelog automation, support dry-runs, and remain safe to use in local or CI environments. + +## Goals +- Keep releases entirely Git-driven (tags and commits are the source of truth). +- Automate version bumping using conventional commits, with manual overrides. +- Default to signed annotated tags, with escape hatches when necessary. +- Allow optional version manifest updates (`VERSION` file) without making them mandatory. +- Support preflight checks (fmt/lint/test/docs) with configuration to skip when appropriate. +- Provide clear recovery guidance if any step fails. +- Make publishing to remotes (Git push, GitHub Releases in the future) opt-in. + +## Non-goals +- Automatic publishing to GitHub Releases in v1 (captured as a follow-up adapter). +- Full semantic-release parity. We only need basic semantic bumping and release note templating. +- Replacing existing CI release workflows; the CLI should integrate, not dictate. + +## CLI Design +``` +hubless release \ + [--patch | --minor | --major | --bump auto] \ + [--notes docs/reference/release-notes.md] \ + [--version-file VERSION] \ + [--tag-prefix v] \ + [--sign | --no-sign] \ + [--push []] \ + [--dry-run] [--no-edit] \ + [--skip-verify | --skip-fmt | --skip-lint | --skip-test | --skip-docs] +``` + +- `--bump auto` (default) inspects commits since the previous tag using conventional commit semantics: + - `BREAKING CHANGE` → major + - `feat` → minor + - otherwise → patch + - `--patch|--minor|--major` override the automatic decision. +- `--tag-prefix` defaults to `v` (e.g., `v1.2.3`). +- `--notes` points to the rendered release notes (defaults to `docs/reference/release-notes.md`). If the file is missing and `--allow-generate-notes` is set (future), the CLI can synthesize notes from commit messages. +- `--version-file` updates the specified manifest (e.g., `VERSION`) and commits it as `chore(release): vX.Y.Z` unless `--no-commit-version` is passed. +- `--sign` (default) runs `git tag -s`; `--no-sign` downgrades to `-a`. +- `--push` pushes the tag (and version commit if it exists). Default remote `origin`; allow overrides via `--push upstream`. +- `--dry-run` prints the resolved bump, notes, and tag command without executing changes. +- `--no-edit` skips launching `$EDITOR` for last-mile edits. +- `--skip-*` controls preflight checks. `--skip-verify` skips all; individual flags skip specific steps. + +## Workflow + +1. **Preflight** + - Ensure working tree is clean (`git diff --quiet` for tracked/untracked). + - Run preflight checks unless skipped: + - `make fmt` + - `make lint` + - `make test` + - `make docs` + - After each check, confirm the worktree is still clean or abort. + +2. **Determine Version** + - Find the latest tag matching `^\d+\.\d+\.\d+$`. + - If `--patch|--minor|--major` specified, use that bump. + - Otherwise, compute bump using conventional commit analysis from previous tag to `HEAD`. + - Apply bump to previous version; default to `0.1.0` if no prior tag. + +3. **Assemble Notes** + - Load release notes from `--notes` (default `docs/reference/release-notes.md`). If missing, fail unless future `--generate-notes` is provided. + - Render notes through an optional Go template (future enhancement) or write raw notes to a temp file. + - Unless `--no-edit`, open `$EDITOR` for final tweaks. + +4. **Version Manifest (optional)** + - If `--version-file` provided: + - Update the file to the new version. + - Create a commit `chore(release): vX.Y.Z` (signed commit is optional; default unsigned, with `--sign-commit` to opt-in). + +5. **Create Tag** + - Create annotated tag (`git tag -s` or `-a` depending on flags) using the prepared notes. + - If the tag already exists, abort unless `--force-replace` and `--confirm` specified (dangerous). + +6. **Push (optional)** + - If `--push` provided, push the tag and the manifest commit (if created) to the specified remote (default `origin`). + +7. **Dry-run handling** + - Skip tag/commit creation, but still report intended version, tag, and note location. Optionally skip expensive checks if `--skip-verify` used. + +8. **Rollback Guidance** + - Document recovery steps: `git tag -d ` and reset/checkout for the version manifest commit. + +## Implementation Plan + +- **`internal/release` service** + - Add commit parsing for conventional commits (lightweight parser). + - Add version bump logic and optional manifest update. + - Support signed tags (`git tag -s`) with `--no-sign` fallback. + - Implement push logic via `git push ` (and `git push HEAD` for manifest commit if present). + - Provide structured dry-run output for CI logs. + +- **CLI `cmd/release`** + - Map flags to service options. + - Handle `$EDITOR`, environment detection, and graceful error messaging. + - Provide `--help` with detailed flag descriptions. + +- **Makefile integration** + - Targets `make release VERSION=X.Y.Z` and `make release-dry VERSION=X.Y.Z` (with optional `NOTES`, `VERSION_FILE`, etc.). + +- **Tests & Validation** + - Unit tests for version bump inference, manifest updates, tag creation commands (mock os/exec). + - Docker-based integration test (`scripts/test-release-docker.sh`) to exercise the CLI in an isolated repo without remotes. + - Optional GitHub Action to run the docker test on PRs touching release tooling. + +## Documentation & Adoption +- Update `README.md` with release command usage, examples, and hooks. +- Expand `CONTRIBUTING.md` with release checklist (install hooks, ensure GPG configured, run `make release` flow). +- Provide recovery instructions in docs (`docs/reference/archive-structure.md` or new release guide). +- Track future adapters (GitHub Release, manifest syncing) in follow-up RFCs. + +## Open Questions +- Should auto-generated notes include commit sections (feat/fix/etc.) or rely purely on docs/reference output? +- Do we want to support tagging multiple modules (monorepo scenario) in the future? +- Should we add guardrails for tag naming (prefix enforcement, semantic validation) beyond the current regex? + +## Rollout +1. Implement bump inference, signing, manifest handling, and push flags. +2. Wire Docker release test into the quality GitHub Action. +3. Update docs and run the release command on a staged repo to build trust. +4. After team sign-off, mark the feature DONE and update the changelog via the new tooling. diff --git a/docs/reference/rfcs/FEEDBACK-0001-release-automation.md b/docs/reference/rfcs/FEEDBACK-0001-release-automation.md new file mode 100644 index 0000000..e0518b8 --- /dev/null +++ b/docs/reference/rfcs/FEEDBACK-0001-release-automation.md @@ -0,0 +1,11 @@ +# Feedback: RFC 0001 – Release Automation +Love the direction. A couple of places to sharpen before we merge: + +1. **Version source of truth** – Let's stick with tags as canonical. Optional VERSION file is fine, but release tooling should update it automatically when present so we don't drift. This also means the CLI needs a flag to skip the file update if a team doesn't use it. +2. **Bump inference** – Yes to conventional commits. I'd suggest `--bump auto` defaulting to conventional-commit scan over `prevTag..HEAD`, with `--patch|--minor|--major` as overrides. Call out that without conventional commits, users need to specify the bump explicitly. +3. **Signed tags** – Default to `git tag -s`. Provide `--no-sign` for edge cases but make sure we surface the GPG requirement up front. Maybe add a helper command to check GPG config or a doc link. +4. **Push semantics** – Agree tag creation should be local by default. A `--push` flag that pushes both the tag and optional version bump commit sounds right. Maybe `--push-origin ` for flexibility. +5. **Skip controls / dry-run** – Current design always runs fmt/lint/test/docs. Keep that as default but add `--skip-verify`, `--skip-docs`, etc., so large repos can iterate faster. Dry run should still validate cleanliness but skip expensive steps when requested. +6. **Failure / rollback story** – Add a section describing how to recover if tag creation fails halfway (e.g., tag exists, GPG failure). A simple `git tag -d ` plus re-run is likely enough, but let's document it. + +Address those notes and I’m +1 on landing the RFC. diff --git a/docs/reference/update-progress-algorithm.md b/docs/reference/update-progress-algorithm.md index e2f89ad..8eddca7 100644 --- a/docs/reference/update-progress-algorithm.md +++ b/docs/reference/update-progress-algorithm.md @@ -1,14 +1,17 @@ # Update Progress Algorithm ## Document Control + - Version: 0.1 - Last updated: 2025-09-18 - Source: Transcribed from `update_progress.py` ## 1. Purpose + This document captures the exact behavior of the original Python script that synchronized GitMind’s Features Ledger and README progress indicators. It exists so the algorithm can be reimplemented in other languages (e.g., Go) without referring back to the deleted script. ## 2. Inputs and Outputs + - **Inputs**: - Markdown ledger at `/docs/features/Features_Ledger.md`. - Optional `/README.md` status section. @@ -18,19 +21,13 @@ This document captures the exact behavior of the original Python script that syn - Updated README progress block when the document contains a `## 📊 Status` section. ## 3. Root Resolution + 1. Accept an optional `--root` flag. -2. Look for `GITMIND_ROOT` in the environment. -3. Use the script directory as a base and probe the following paths in order: - - `/git-mind` - - `/../git-mind` - - `` itself -4. For good measure, probe the current working directory variants: - - `/git-mind` - - `/../git-mind` - - `` -5. Resolve each candidate to an absolute path and return the first that contains `docs/features/Features_Ledger.md`. If none is found, exit with an error message. +2. Look for `HUBLESS_ROOT` in the environment. +3. `git rev-parse --show-toplevel` ## 4. Markdown Block Patterns + The script uses compiled regular expressions to locate fenced blocks: - `` - `` @@ -39,7 +36,9 @@ The script uses compiled regular expressions to locate fenced blocks: - README guard block: `` … ``. ## 5. Progress Bar Renderer + Given a percentage in the range `[0, 1]`, the script: + 1. Clamps the value. 2. Computes `filled = round(pct * width)` with `width = 40`. 3. Inserts an edge character `▓` if there is a fractional remainder and space for an additional cell. @@ -47,7 +46,9 @@ Given a percentage in the range `[0, 1]`, the script: 5. Appends a textual percentage (e.g., ` 72%`). ## 6. Group Progress Calculation + For each group block in the ledger: + 1. Identify the next Markdown table that starts with a header cell containing `Emoji`. 2. Parse the table header to locate the `Progress`, `KLoC`, and `Milestone` columns (case-insensitive substring match). 3. Iterate rows until a non-table line is hit. @@ -60,9 +61,11 @@ For each group block in the ledger: 8. Compute the weighted percentage or fallback to arithmetic mean if all weights are zero. ### Feature Tagging + The replacement block includes a `features=` footer where `` is the number of table rows processed for that group. ## 7. Milestone Aggregation + 1. Map milestone labels to canonical keys using: - `MVP → mvp` - `Alpha → alpha` @@ -74,7 +77,9 @@ The replacement block includes a `features=` footer where `` is th 5. Calculate overall progress as the weighted sum of milestone percentages using fixed weights `{mvp: 0.3, alpha: 0.3, beta: 0.2, v1: 0.2}`. ## 8. Ledger Mutation Steps + For every ledger run: + 1. Replace each group block with a fenced code block containing the new progress bar, a legend row, and the `features=` footer. 2. Update the overall section (``) with a progress bar and inline legend `MVP 70% | Alpha 55% | …`. 3. Replace each milestone block (`progress-mvp`, etc.) with the gated percentage. @@ -82,6 +87,7 @@ For every ledger run: 5. Write the ledger back to disk only if changes were detected. ## 9. Tasklist Parsing + 1. Locate the first occurrence of `## Tasklist` (case-insensitive). 2. Inspect subsequent lines for unchecked tasks (`- [ ] …`). 3. Detect milestone tags in either `[tag]` or `(tag)` prefixes. Tags may include dotted forms (e.g., `MVP.core`); every segment is checked against the milestone label map. @@ -89,29 +95,35 @@ For every ledger run: 5. Return a mapping `{mvp|alpha|beta|v1 → list[str]}` where list entries omit the tag wrapper but preserve the checkbox syntax. ## 10. README Update Logic + 1. Skip if `/README.md` does not exist or lacks the `## 📊 Status` heading. 2. Ensure the status section contains a guard block. If missing, insert the placeholder: - ``` + + ````markdown ```text Feature progress to be updated via hubless/update_progress.py ``` - ``` + ```` + 3. When an overall percentage is available, replace the guard block with the rendered progress bar. 4. Collapse runs of more than two blank lines to keep the document tidy. ## 11. Execution Flow -``` + +```bash parse_args() root = resolve_root(args.root) configure_paths(root) # sets global ROOT, LEDGER, README overall, milestone_progress, tasks = update_ledger() update_readme(overall) ``` + The script exits with code `0` on success and prints errors to stderr before exiting non-zero when path resolution fails or file operations raise exceptions. ## 12. Reimplementation Checklist + - Re-create the regex guards exactly as listed above to ensure idempotent updates. - Preserve task defaulting to MVP when no tag is provided. - Honor the gating rule for milestone percentages before calculating the overall score. diff --git a/go.mod b/go.mod index c8d3008..bd9effb 100644 --- a/go.mod +++ b/go.mod @@ -1,3 +1,28 @@ module github.com/flyingrobots/hubless go 1.25.1 + +require ( + github.com/76creates/stickers v1.4.0 + github.com/charmbracelet/bubbletea v0.27.0 +) + +require ( + github.com/aymanbagabas/go-osc52/v2 v2.0.1 // indirect + github.com/charmbracelet/lipgloss v0.10.0 // indirect + github.com/charmbracelet/x/ansi v0.8.0 // indirect + github.com/charmbracelet/x/term v0.2.1 // indirect + github.com/erikgeiser/coninput v0.0.0-20211004153227-1c3628e74d0f // indirect + github.com/lucasb-eyer/go-colorful v1.2.0 // indirect + github.com/mattn/go-isatty v0.0.20 // indirect + github.com/mattn/go-localereader v0.0.1 // indirect + github.com/mattn/go-runewidth v0.0.16 // indirect + github.com/muesli/ansi v0.0.0-20230316100256-276c6243b2f6 // indirect + github.com/muesli/cancelreader v0.2.2 // indirect + github.com/muesli/reflow v0.3.0 // indirect + github.com/muesli/termenv v0.15.2 // indirect + github.com/rivo/uniseg v0.4.7 // indirect + golang.org/x/sync v0.13.0 // indirect + golang.org/x/sys v0.33.0 // indirect + golang.org/x/text v0.24.0 // indirect +) diff --git a/go.sum b/go.sum new file mode 100644 index 0000000..b4ab6a5 --- /dev/null +++ b/go.sum @@ -0,0 +1,43 @@ +github.com/76creates/stickers v1.4.0 h1:UD1ShH0mndxzvuyO4Ho4Ct3+EB6FnTbCRecwPs/WFSo= +github.com/76creates/stickers v1.4.0/go.mod h1:OnGyCp42wnTwuZv2Ewh4dkvMuaiWMoH4I80yU2IJVmI= +github.com/aymanbagabas/go-osc52/v2 v2.0.1 h1:HwpRHbFMcZLEVr42D4p7XBqjyuxQH5SMiErDT4WkJ2k= +github.com/aymanbagabas/go-osc52/v2 v2.0.1/go.mod h1:uYgXzlJ7ZpABp8OJ+exZzJJhRNQ2ASbcXHWsFqH8hp8= +github.com/charmbracelet/bubbletea v0.27.0 h1:Mznj+vvYuYagD9Pn2mY7fuelGvP0HAXtZYGgRBCbHvU= +github.com/charmbracelet/bubbletea v0.27.0/go.mod h1:5MdP9XH6MbQkgGhnlxUqCNmBXf9I74KRQ8HIidRxV1Y= +github.com/charmbracelet/lipgloss v0.10.0 h1:KWeXFSexGcfahHX+54URiZGkBFazf70JNMtwg/AFW3s= +github.com/charmbracelet/lipgloss v0.10.0/go.mod h1:Wig9DSfvANsxqkRsqj6x87irdy123SR4dOXlKa91ciE= +github.com/charmbracelet/x/ansi v0.8.0 h1:9GTq3xq9caJW8ZrBTe0LIe2fvfLR/bYXKTx2llXn7xE= +github.com/charmbracelet/x/ansi v0.8.0/go.mod h1:wdYl/ONOLHLIVmQaxbIYEC/cRKOQyjTkowiI4blgS9Q= +github.com/charmbracelet/x/term v0.2.1 h1:AQeHeLZ1OqSXhrAWpYUtZyX1T3zVxfpZuEQMIQaGIAQ= +github.com/charmbracelet/x/term v0.2.1/go.mod h1:oQ4enTYFV7QN4m0i9mzHrViD7TQKvNEEkHUMCmsxdUg= +github.com/erikgeiser/coninput v0.0.0-20211004153227-1c3628e74d0f h1:Y/CXytFA4m6baUTXGLOoWe4PQhGxaX0KpnayAqC48p4= +github.com/erikgeiser/coninput v0.0.0-20211004153227-1c3628e74d0f/go.mod h1:vw97MGsxSvLiUE2X8qFplwetxpGLQrlU1Q9AUEIzCaM= +github.com/lucasb-eyer/go-colorful v1.2.0 h1:1nnpGOrhyZZuNyfu1QjKiUICQ74+3FNCN69Aj6K7nkY= +github.com/lucasb-eyer/go-colorful v1.2.0/go.mod h1:R4dSotOR9KMtayYi1e77YzuveK+i7ruzyGqttikkLy0= +github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY= +github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= +github.com/mattn/go-localereader v0.0.1 h1:ygSAOl7ZXTx4RdPYinUpg6W99U8jWvWi9Ye2JC/oIi4= +github.com/mattn/go-localereader v0.0.1/go.mod h1:8fBrzywKY7BI3czFoHkuzRoWE9C+EiG4R1k4Cjx5p88= +github.com/mattn/go-runewidth v0.0.12/go.mod h1:RAqKPSqVFrSLVXbA8x7dzmKdmGzieGRCM46jaSJTDAk= +github.com/mattn/go-runewidth v0.0.16 h1:E5ScNMtiwvlvB5paMFdw9p4kSQzbXFikJ5SQO6TULQc= +github.com/mattn/go-runewidth v0.0.16/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w= +github.com/muesli/ansi v0.0.0-20230316100256-276c6243b2f6 h1:ZK8zHtRHOkbHy6Mmr5D264iyp3TiX5OmNcI5cIARiQI= +github.com/muesli/ansi v0.0.0-20230316100256-276c6243b2f6/go.mod h1:CJlz5H+gyd6CUWT45Oy4q24RdLyn7Md9Vj2/ldJBSIo= +github.com/muesli/cancelreader v0.2.2 h1:3I4Kt4BQjOR54NavqnDogx/MIoWBFa0StPA8ELUXHmA= +github.com/muesli/cancelreader v0.2.2/go.mod h1:3XuTXfFS2VjM+HTLZY9Ak0l6eUKfijIfMUZ4EgX0QYo= +github.com/muesli/reflow v0.3.0 h1:IFsN6K9NfGtjeggFP+68I4chLZV2yIKsXJFNZ+eWh6s= +github.com/muesli/reflow v0.3.0/go.mod h1:pbwTDkVPibjO2kyvBQRBxTWEEGDGq0FlB1BIKtnHY/8= +github.com/muesli/termenv v0.15.2 h1:GohcuySI0QmI3wN8Ok9PtKGkgkFIk7y6Vpb5PvrY+Wo= +github.com/muesli/termenv v0.15.2/go.mod h1:Epx+iuz8sNs7mNKhxzH4fWXGNpZwUaJKRS1noLXviQ8= +github.com/rivo/uniseg v0.1.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc= +github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc= +github.com/rivo/uniseg v0.4.7 h1:WUdvkW8uEhrYfLC4ZzdpI2ztxP1I582+49Oc5Mq64VQ= +github.com/rivo/uniseg v0.4.7/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88= +golang.org/x/sync v0.13.0 h1:AauUjRAJ9OSnvULf/ARrrVywoJDy0YS2AwQ98I37610= +golang.org/x/sync v0.13.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA= +golang.org/x/sys v0.0.0-20210809222454-d867a43fc93e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.33.0 h1:q3i8TbbEz+JRD9ywIRlyRAQbM0qF7hu24q3teo2hbuw= +golang.org/x/sys v0.33.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k= +golang.org/x/text v0.24.0 h1:dd5Bzh4yt5KYA8f9CJHCP4FB4D51c2c6JvN37xJJkJ0= +golang.org/x/text v0.24.0/go.mod h1:L8rBsPeo2pSS+xqN0d5u2ikmjtmoJbDBT1b7nHvFCdU= diff --git a/internal/docscomponents/generator.go b/internal/docscomponents/generator.go new file mode 100644 index 0000000..1a92ebe --- /dev/null +++ b/internal/docscomponents/generator.go @@ -0,0 +1,1242 @@ +package docscomponents + +import ( + "context" + "encoding/json" + "errors" + "fmt" + "math" + "os" + "path" + "path/filepath" + "sort" + "strings" +) + +const generatedNotice = "" + +// Generator creates reusable Markdown snippets from structured planning data. +type GeneratorOptions struct { + GraphDirection string + GraphClusters bool + GraphPalette string + PaletteFile string +} + +type Generator struct { + repoRoot string + componentsDir string + options GeneratorOptions + palettes map[string]map[string]paletteColor +} + +// NewGenerator creates a Generator configured to write component snippets for the repository at +// repoRoot. It resolves and stores absolute paths for repoRoot and componentsDir (defaulting +// componentsDir to repoRoot/docs/components when empty), ensures the components directory exists, +// loads the built-in palettes and merges palettes from options.PaletteFile if provided, and +// normalizes the supplied GeneratorOptions. Returns an initialized Generator or an error if input +// validation, path resolution, directory creation, or palette loading fails. +func NewGenerator(repoRoot, componentsDir string, options GeneratorOptions) (*Generator, error) { + if repoRoot == "" { + return nil, errors.New("repoRoot is required") + } + + absRepoRoot, err := filepath.Abs(repoRoot) + if err != nil { + return nil, fmt.Errorf("resolve repo root: %w", err) + } + + absComponentsDir := componentsDir + if absComponentsDir == "" { + absComponentsDir = filepath.Join(absRepoRoot, "docs", "components") + } + absComponentsDir, err = filepath.Abs(absComponentsDir) + if err != nil { + return nil, fmt.Errorf("resolve components dir: %w", err) + } + + if err := os.MkdirAll(absComponentsDir, 0o755); err != nil { + return nil, fmt.Errorf("ensure components dir: %w", err) + } + + palettes := defaultPalettes() + if options.PaletteFile != "" { + palettePath := options.PaletteFile + if !filepath.IsAbs(palettePath) { + palettePath = filepath.Join(absRepoRoot, palettePath) + } + if err := mergePaletteFile(palettePath, palettes); err != nil { + return nil, fmt.Errorf("load palette file: %w", err) + } + } + + options = normalizeOptions(options, palettes) + + return &Generator{repoRoot: absRepoRoot, componentsDir: absComponentsDir, options: options, palettes: palettes}, nil +} + +// RepoRoot exposes the absolute repository root path used by the generator. +func (g *Generator) RepoRoot() string { + return g.repoRoot +} + +// ComponentsDir exposes the absolute components directory path. +func (g *Generator) ComponentsDir() string { + return g.componentsDir +} + +// Generate produces the default set of Markdown snippets. +func (g *Generator) Generate(ctx context.Context) error { + roadmap, err := g.loadRoadmapArtifacts() + if err != nil { + return err + } + + tasks, err := g.readTaskRecords(filepath.Join(g.repoRoot, "@hubless", "issues", "tasks")) + if err != nil { + return fmt.Errorf("load task records: %w", err) + } + + if err := g.generateRoadmapTables(ctx, roadmap); err != nil { + return err + } + + if err := g.generateProgressOverview(ctx, roadmap, tasks); err != nil { + return err + } + + if err := g.generateDependencySummary(ctx, roadmap, tasks); err != nil { + return err + } + + if err := g.generateDependencyGraph(ctx, roadmap, tasks); err != nil { + return err + } + + if err := g.generateTaskTable(ctx, tasks); err != nil { + return err + } + + if err := g.generateTaskStatusSummary(ctx, tasks); err != nil { + return err + } + + if err := g.generateArchivedStories(ctx, roadmap); err != nil { + return err + } + + if err := g.generateArchivedTasks(ctx, tasks); err != nil { + return err + } + + if err := g.generateChangelog(ctx, tasks); err != nil { + return err + } + + return nil +} + +type artifactRecord struct { + ID string `json:"id"` + Title string `json:"title"` + Status string `json:"status"` + Dependencies []string `json:"dependencies"` +} + +type taskRecord struct { + ID string `json:"id"` + Title string `json:"title"` + Status string `json:"status"` + Owner *string `json:"owner"` + Labels []string `json:"labels"` + Badges []string `json:"badges"` + UpdatedAt *string `json:"updated_at"` + Dependencies []string `json:"dependencies"` +} + +type roadmapArtifacts struct { + Milestones []recordWithPath[artifactRecord] + Features []recordWithPath[artifactRecord] + Stories []recordWithPath[artifactRecord] +} + +func (g *Generator) generateRoadmapTables(ctx context.Context, data roadmapArtifacts) error { + tables := []struct { + name string + records []recordWithPath[artifactRecord] + linkPrefix string + outputFilename string + }{ + { + name: "milestones", + records: data.Milestones, + linkPrefix: "milestones", + outputFilename: "milestones-table.md", + }, + { + name: "features", + records: data.Features, + linkPrefix: "features", + outputFilename: "features-table.md", + }, + { + name: "stories", + records: data.Stories, + linkPrefix: "../issues/stories", + outputFilename: "stories-table.md", + }, + } + + for _, table := range tables { + select { + case <-ctx.Done(): + return ctx.Err() + default: + } + + builder := &strings.Builder{} + builder.WriteString(generatedNotice) + builder.WriteString("\n") + builder.WriteString("| ID | Title | Status |\n") + builder.WriteString("|----|-------|--------|\n") + + for _, record := range table.records { + link := path.Join(table.linkPrefix, filepath.Base(record.sourcePath)) + fmt.Fprintf(builder, "| [%s](%s) | %s | %s |\n", record.data.ID, link, record.data.Title, record.data.Status) + } + + outputPath := filepath.Join(g.componentsDir, "roadmap", table.outputFilename) + if err := g.writeFile(outputPath, builder.String()); err != nil { + return fmt.Errorf("write %s table: %w", table.name, err) + } + } + + return nil +} + +func (g *Generator) generateProgressOverview(ctx context.Context, data roadmapArtifacts, tasks []recordWithPath[taskRecord]) error { + select { + case <-ctx.Done(): + return ctx.Err() + default: + } + + builder := &strings.Builder{} + builder.WriteString(generatedNotice) + builder.WriteString("\n") + builder.WriteString("| Artifact | Progress | Done | Total |\n") + builder.WriteString("| --- | --- | --- | --- |\n") + + entries := []struct { + label string + done int + total int + }{ + { + label: "Milestones", + done: countDoneArtifacts(data.Milestones), + total: len(data.Milestones), + }, + { + label: "Features", + done: countDoneArtifacts(data.Features), + total: len(data.Features), + }, + { + label: "Stories", + done: countDoneArtifacts(data.Stories), + total: len(data.Stories), + }, + { + label: "Tasks", + done: countDoneTasks(tasks), + total: len(tasks), + }, + } + + for _, entry := range entries { + progress := renderProgressBar(entry.done, entry.total) + fmt.Fprintf(builder, "| %s | %s | %d | %d |\n", entry.label, progress, entry.done, entry.total) + } + + outputPath := filepath.Join(g.componentsDir, "roadmap", "progress.md") + if err := g.writeFile(outputPath, builder.String()); err != nil { + return fmt.Errorf("write roadmap progress: %w", err) + } + + return nil +} + +func (g *Generator) generateDependencySummary(ctx context.Context, data roadmapArtifacts, tasks []recordWithPath[taskRecord]) error { + select { + case <-ctx.Done(): + return ctx.Err() + default: + } + + sections := []struct { + heading string + rows []dependencyRow + }{ + { + heading: "Milestones", + rows: dependencyRowsFromArtifacts(data.Milestones, "milestones"), + }, + { + heading: "Features", + rows: dependencyRowsFromArtifacts(data.Features, "features"), + }, + { + heading: "Stories", + rows: dependencyRowsFromArtifacts(data.Stories, "../issues/stories"), + }, + { + heading: "Tasks", + rows: dependencyRowsFromTasks(tasks, "tasks"), + }, + } + + builder := &strings.Builder{} + builder.WriteString(generatedNotice) + builder.WriteString("\n") + + for _, section := range sections { + builder.WriteString("### " + section.heading + "\n\n") + builder.WriteString("| ID | Depends On |\n") + builder.WriteString("|----|-------------|\n") + + if len(section.rows) == 0 { + builder.WriteString("| — | None recorded |\n\n") + continue + } + + for _, row := range section.rows { + fmt.Fprintf(builder, "| [%s](%s) | %s |\n", row.id, row.link, formatList(row.deps)) + } + builder.WriteString("\n") + } + + outputPath := filepath.Join(g.componentsDir, "roadmap", "dependencies.md") + if err := g.writeFile(outputPath, builder.String()); err != nil { + return fmt.Errorf("write dependencies summary: %w", err) + } + + return nil +} + +func (g *Generator) generateDependencyGraph(ctx context.Context, data roadmapArtifacts, tasks []recordWithPath[taskRecord]) error { + select { + case <-ctx.Done(): + return ctx.Err() + default: + } + + var all []typedRecord + + for _, record := range data.Milestones { + all = append(all, typedRecord{ + id: record.data.ID, + title: record.data.Title, + typeLabel: "Milestone", + deps: cleanValues(record.data.Dependencies), + }) + } + for _, record := range data.Features { + all = append(all, typedRecord{ + id: record.data.ID, + title: record.data.Title, + typeLabel: "Feature", + deps: cleanValues(record.data.Dependencies), + }) + } + for _, record := range data.Stories { + all = append(all, typedRecord{ + id: record.data.ID, + title: record.data.Title, + typeLabel: "Story", + deps: cleanValues(record.data.Dependencies), + }) + } + for _, record := range tasks { + all = append(all, typedRecord{ + id: record.data.ID, + title: record.data.Title, + typeLabel: "Task", + deps: cleanValues(record.data.Dependencies), + }) + } + + if len(all) == 0 { + return nil + } + + nodeIDs := map[string]string{} + labels := map[string]string{} + + for idx, record := range all { + if record.id == "" { + continue + } + nodeName := fmt.Sprintf("n%d", idx) + nodeIDs[record.id] = nodeName + labels[nodeName] = escapeMermaidLabel(fmt.Sprintf("%s\\n%s", record.typeLabel, record.title)) + } + + edges := make([][2]string, 0) + for _, record := range all { + src, ok := nodeIDs[record.id] + if !ok { + continue + } + for _, dep := range record.deps { + if dst, ok := nodeIDs[dep]; ok { + edges = append(edges, [2]string{src, dst}) + } + } + } + + direction := strings.ToUpper(strings.TrimSpace(g.options.GraphDirection)) + builder := &strings.Builder{} + builder.WriteString(generatedNotice) + builder.WriteString("\n") + builder.WriteString("```mermaid\n") + builder.WriteString(fmt.Sprintf("graph %s\n", direction)) + + nodeNames := make([]string, 0, len(labels)) + for node := range labels { + nodeNames = append(nodeNames, node) + } + sort.Strings(nodeNames) + + for _, node := range nodeNames { + label := labels[node] + fmt.Fprintf(builder, " %s[\"%s\"]\n", node, label) + } + + palette := g.palettes[g.options.GraphPalette] + if palette == nil { + palette = g.palettes["evergreen"] + } + classNames := make([]string, 0, len(palette)) + for className := range palette { + classNames = append(classNames, className) + } + sort.Strings(classNames) + for _, className := range classNames { + color := palette[className] + fmt.Fprintf(builder, " classDef %s fill:%s,stroke:%s,color:%s,stroke-width:1px;\n", className, color.Fill, color.Stroke, color.Text) + } + + if g.options.GraphClusters { + for _, cluster := range orderedTypes { + nodes := collectNodesByType(cluster, all, nodeIDs) + if len(nodes) == 0 { + continue + } + fmt.Fprintf(builder, " subgraph %s\n", cluster) + fmt.Fprintf(builder, " direction %s\n", direction) + for _, node := range nodes { + fmt.Fprintf(builder, " %s\n", node) + } + builder.WriteString(" end\n") + } + } + + classAssignments := make(map[string]string) + for _, record := range all { + if node, ok := nodeIDs[record.id]; ok { + className := strings.ToLower(record.typeLabel) + if _, ok := palette[className]; ok { + classAssignments[node] = className + } + } + } + + assignmentNodes := make([]string, 0, len(classAssignments)) + for node := range classAssignments { + assignmentNodes = append(assignmentNodes, node) + } + sort.Strings(assignmentNodes) + for _, node := range assignmentNodes { + fmt.Fprintf(builder, " class %s %s;\n", node, classAssignments[node]) + } + + for _, edge := range edges { + fmt.Fprintf(builder, " %s --> %s\n", edge[0], edge[1]) + } + + builder.WriteString("```\n") + + outputPath := filepath.Join(g.componentsDir, "roadmap", "dependencies-graph.md") + if err := g.writeFile(outputPath, builder.String()); err != nil { + return fmt.Errorf("write dependency graph: %w", err) + } + + return nil +} + +func (g *Generator) generateTaskTable(ctx context.Context, records []recordWithPath[taskRecord]) error { + select { + case <-ctx.Done(): + return ctx.Err() + default: + } + + builder := &strings.Builder{} + builder.WriteString(generatedNotice) + builder.WriteString("\n") + builder.WriteString("| ID | Title | Status | Owner | Labels | Badges | Updated |\n") + builder.WriteString("| --- | --- | --- | --- | --- | --- | --- |\n") + + for _, record := range records { + owner := "_unassigned_" + if record.data.Owner != nil && strings.TrimSpace(*record.data.Owner) != "" { + owner = *record.data.Owner + } + + labels := formatList(record.data.Labels) + badges := formatList(record.data.Badges) + + updated := "—" + if record.data.UpdatedAt != nil && strings.TrimSpace(*record.data.UpdatedAt) != "" { + updated = *record.data.UpdatedAt + } + + link := path.Join("tasks", filepath.Base(record.sourcePath)) + fmt.Fprintf( + builder, + "| [%s](%s) | %s | %s | %s | %s | %s | %s |\n", + record.data.ID, + link, + record.data.Title, + record.data.Status, + owner, + labels, + badges, + updated, + ) + } + + outputPath := filepath.Join(g.componentsDir, "issues", "tasks-table.md") + if err := g.writeFile(outputPath, builder.String()); err != nil { + return fmt.Errorf("write tasks table: %w", err) + } + + return nil +} + +func (g *Generator) generateTaskStatusSummary(ctx context.Context, records []recordWithPath[taskRecord]) error { + select { + case <-ctx.Done(): + return ctx.Err() + default: + } + + counts := map[string]int{} + for _, record := range records { + status := strings.TrimSpace(record.data.Status) + if status == "" { + status = "UNKNOWN" + } + counts[status]++ + } + + statuses := make([]string, 0, len(counts)) + for status := range counts { + statuses = append(statuses, status) + } + + sort.Slice(statuses, func(i, j int) bool { + ri := statusRank(statuses[i]) + rj := statusRank(statuses[j]) + if ri != rj { + return ri < rj + } + return strings.ToLower(statuses[i]) < strings.ToLower(statuses[j]) + }) + + builder := &strings.Builder{} + builder.WriteString(generatedNotice) + builder.WriteString("\n") + builder.WriteString("| Status | Count |\n") + builder.WriteString("| --- | --- |\n") + + for _, status := range statuses { + fmt.Fprintf(builder, "| %s | %d |\n", status, counts[status]) + } + + outputPath := filepath.Join(g.componentsDir, "issues", "status-summary.md") + if err := g.writeFile(outputPath, builder.String()); err != nil { + return fmt.Errorf("write task status summary: %w", err) + } + + return nil +} + +func (g *Generator) generateArchivedStories(ctx context.Context, data roadmapArtifacts) error { + select { + case <-ctx.Done(): + return ctx.Err() + default: + } + + rows := make([]recordWithPath[artifactRecord], 0) + for _, record := range data.Stories { + if isDoneStatus(record.data.Status) { + rows = append(rows, record) + } + } + + builder := &strings.Builder{} + builder.WriteString(generatedNotice) + builder.WriteString("\n") + builder.WriteString("| ID | Title | Completed Status |\n") + builder.WriteString("| --- | --- | --- |\n") + + if len(rows) == 0 { + builder.WriteString("| — | — | — |\n") + } else { + for _, record := range rows { + fmt.Fprintf(builder, "| %s | %s | %s |\n", record.data.ID, record.data.Title, record.data.Status) + } + } + + outputPath := filepath.Join(g.componentsDir, "issues", "archived-stories.md") + if err := g.writeFile(outputPath, builder.String()); err != nil { + return fmt.Errorf("write archived stories: %w", err) + } + + return nil +} + +func (g *Generator) generateArchivedTasks(ctx context.Context, records []recordWithPath[taskRecord]) error { + select { + case <-ctx.Done(): + return ctx.Err() + default: + } + + rows := make([]recordWithPath[taskRecord], 0) + for _, record := range records { + if isDoneStatus(record.data.Status) { + rows = append(rows, record) + } + } + + builder := &strings.Builder{} + builder.WriteString(generatedNotice) + builder.WriteString("\n") + builder.WriteString("| ID | Title | Completed On | Badges |\n") + builder.WriteString("| --- | --- | --- | --- |\n") + + if len(rows) == 0 { + builder.WriteString("| — | — | — | — |\n") + } else { + for _, record := range rows { + completed := "—" + if record.data.UpdatedAt != nil && strings.TrimSpace(*record.data.UpdatedAt) != "" { + completed = *record.data.UpdatedAt + } + fmt.Fprintf( + builder, + "| %s | %s | %s | %s |\n", + record.data.ID, + record.data.Title, + completed, + formatList(record.data.Badges), + ) + } + } + + outputPath := filepath.Join(g.componentsDir, "issues", "archived-tasks.md") + if err := g.writeFile(outputPath, builder.String()); err != nil { + return fmt.Errorf("write archived tasks: %w", err) + } + + return nil +} + +type dependencyRow struct { + id string + link string + deps []string +} + +type typedRecord struct { + id string + title string + typeLabel string + deps []string +} + +type recordWithPath[T any] struct { + sourcePath string + data T +} + +func (g *Generator) loadRoadmapArtifacts() (roadmapArtifacts, error) { + var result roadmapArtifacts + + milestones, err := g.readArtifactRecords(filepath.Join(g.repoRoot, "@hubless", "roadmap", "milestones")) + if err != nil { + return result, fmt.Errorf("load milestones: %w", err) + } + + features, err := g.readArtifactRecords(filepath.Join(g.repoRoot, "@hubless", "roadmap", "features")) + if err != nil { + return result, fmt.Errorf("load features: %w", err) + } + + stories, err := g.readArtifactRecords(filepath.Join(g.repoRoot, "@hubless", "issues", "stories")) + if err != nil { + return result, fmt.Errorf("load stories: %w", err) + } + + result.Milestones = milestones + result.Features = features + result.Stories = stories + + return result, nil +} + +func (g *Generator) readArtifactRecords(dir string) ([]recordWithPath[artifactRecord], error) { + if _, err := os.Stat(dir); err != nil { + return nil, err + } + + files, err := filepath.Glob(filepath.Join(dir, "*.json")) + if err != nil { + return nil, fmt.Errorf("glob %s: %w", dir, err) + } + + if len(files) == 0 { + return nil, fmt.Errorf("no JSON files found in %s", dir) + } + + sort.Strings(files) + + results := make([]recordWithPath[artifactRecord], 0, len(files)) + for _, file := range files { + data, err := os.ReadFile(file) + if err != nil { + return nil, fmt.Errorf("read %s: %w", file, err) + } + + var record artifactRecord + if err := json.Unmarshal(data, &record); err != nil { + return nil, fmt.Errorf("decode %s: %w", file, err) + } + + if record.ID == "" || record.Title == "" || record.Status == "" { + return nil, fmt.Errorf("record %s missing required fields", file) + } + + results = append(results, recordWithPath[artifactRecord]{sourcePath: file, data: record}) + } + + return results, nil +} + +func (g *Generator) readTaskRecords(dir string) ([]recordWithPath[taskRecord], error) { + if _, err := os.Stat(dir); err != nil { + return nil, err + } + + files, err := filepath.Glob(filepath.Join(dir, "*.json")) + if err != nil { + return nil, fmt.Errorf("glob %s: %w", dir, err) + } + + if len(files) == 0 { + return nil, fmt.Errorf("no JSON files found in %s", dir) + } + + sort.Strings(files) + + results := make([]recordWithPath[taskRecord], 0, len(files)) + for _, file := range files { + data, err := os.ReadFile(file) + if err != nil { + return nil, fmt.Errorf("read %s: %w", file, err) + } + + var record taskRecord + if err := json.Unmarshal(data, &record); err != nil { + return nil, fmt.Errorf("decode %s: %w", file, err) + } + + if record.ID == "" || record.Title == "" || record.Status == "" { + return nil, fmt.Errorf("task %s missing required fields", file) + } + + results = append(results, recordWithPath[taskRecord]{sourcePath: file, data: record}) + } + + return results, nil +} + +// dependencyRowsFromArtifacts builds dependency rows for artifacts. +// It examines each artifact record, cleans its Dependencies with cleanValues, +// and for records that have one or more dependencies appends a dependencyRow +// containing the artifact ID, a link formed by joining linkPrefix with the +// basename of the record's sourcePath, and the cleaned dependency list. +// Records without dependencies are omitted from the returned slice. +func dependencyRowsFromArtifacts(records []recordWithPath[artifactRecord], linkPrefix string) []dependencyRow { + rows := make([]dependencyRow, 0, len(records)) + for _, record := range records { + deps := cleanValues(record.data.Dependencies) + if len(deps) == 0 { + continue + } + rows = append(rows, dependencyRow{ + id: record.data.ID, + link: path.Join(linkPrefix, filepath.Base(record.sourcePath)), + deps: deps, + }) + } + + return rows +} + +// dependencyRowsFromTasks builds dependencyRow entries for each task record that has +// one or more dependencies. The returned rows contain the task ID, a link formed by +// joining linkPrefix with the task record's source file base name, and the cleaned +// list of dependency IDs. Records without dependencies are omitted. +func dependencyRowsFromTasks(records []recordWithPath[taskRecord], linkPrefix string) []dependencyRow { + rows := make([]dependencyRow, 0) + for _, record := range records { + deps := cleanValues(record.data.Dependencies) + if len(deps) == 0 { + continue + } + rows = append(rows, dependencyRow{ + id: record.data.ID, + link: path.Join(linkPrefix, filepath.Base(record.sourcePath)), + deps: deps, + }) + } + + return rows +} + +// countDoneArtifacts returns the number of artifact records whose Status is considered completed. +// It uses isDoneStatus to determine completion (e.g., DONE, COMPLETED, COMPLETE, SHIPPED, case-insensitive). +func countDoneArtifacts(records []recordWithPath[artifactRecord]) int { + count := 0 + for _, record := range records { + if isDoneStatus(record.data.Status) { + count++ + } + } + return count +} + +// (e.g., DONE, COMPLETED, COMPLETE, SHIPPED). +func countDoneTasks(records []recordWithPath[taskRecord]) int { + count := 0 + for _, record := range records { + if isDoneStatus(record.data.Status) { + count++ + } + } + return count +} + +type paletteColor struct { + Fill string + Stroke string + Text string +} + +// defaultPalettes returns the built-in color palettes used for graph node styling. +// +// The return value is a map from palette name to a map of class names +// ("milestone", "feature", "story", "task") to paletteColor. The function +// returns a deep copy of the internal base definitions so callers can modify +// the result without affecting the originals. +func defaultPalettes() map[string]map[string]paletteColor { + base := map[string]map[string]paletteColor{ + "evergreen": { + "milestone": {Fill: "#2C5F2D", Stroke: "#C86E3B", Text: "#F4F1E8"}, + "feature": {Fill: "#6B9F7F", Stroke: "#2C5F2D", Text: "#F4F1E8"}, + "story": {Fill: "#E9B872", Stroke: "#C86E3B", Text: "#0E1111"}, + "task": {Fill: "#C86E3B", Stroke: "#2C5F2D", Text: "#F4F1E8"}, + }, + "infrared": { + "milestone": {Fill: "#1A1C23", Stroke: "#FF2A6D", Text: "#B9C1D6"}, + "feature": {Fill: "#6C63FF", Stroke: "#27F0C7", Text: "#FFFFFF"}, + "story": {Fill: "#0A0B0F", Stroke: "#FF2A6D", Text: "#FFFFFF"}, + "task": {Fill: "#27F0C7", Stroke: "#6C63FF", Text: "#0A0B0F"}, + }, + "zerothrow": { + "milestone": {Fill: "#141820", Stroke: "#10B9D8", Text: "#D8DEE9"}, + "feature": {Fill: "#0D0F12", Stroke: "#64D6E8", Text: "#D8DEE9"}, + "story": {Fill: "#10B9D8", Stroke: "#A3E635", Text: "#0D0F12"}, + "task": {Fill: "#A3E635", Stroke: "#10B9D8", Text: "#0D0F12"}, + }, + } + + cloned := make(map[string]map[string]paletteColor, len(base)) + for name, classes := range base { + copyClasses := make(map[string]paletteColor, len(classes)) + for className, color := range classes { + copyClasses[className] = color + } + cloned[name] = copyClasses + } + return cloned +} + +type rawPaletteColor struct { + Fill string `json:"fill"` + Stroke string `json:"stroke"` + Text string `json:"text"` +} + +// mergePaletteFile reads a JSON palette file at path and merges its palette +// definitions into the provided palettes map. +// +// The file may contain multiple palette objects keyed by name; a top-level +// "$schema" key is ignored. Palette and class names are normalized to lower +// case and trimmed. Each palette class must be one of the known classes +// (milestone, feature, story, task) and must include non-empty `fill`, +// `stroke`, and `text` color values. Missing file is not an error (returns +// nil). On success the palettes map is mutated to include or update the +// palettes from the file; on failure a descriptive error is returned. +func mergePaletteFile(path string, palettes map[string]map[string]paletteColor) error { + data, err := os.ReadFile(path) + if err != nil { + if errors.Is(err, os.ErrNotExist) { + return nil + } + return err + } + + var fileData map[string]json.RawMessage + if err := json.Unmarshal(data, &fileData); err != nil { + return fmt.Errorf("parse palette file: %w", err) + } + + for paletteName, payload := range fileData { + if paletteName == "$schema" { + continue + } + name := strings.ToLower(strings.TrimSpace(paletteName)) + if name == "" { + return fmt.Errorf("palette name cannot be empty") + } + existing := palettes[name] + if existing == nil { + existing = make(map[string]paletteColor) + palettes[name] = existing + } + var classes map[string]rawPaletteColor + if err := json.Unmarshal(payload, &classes); err != nil { + return fmt.Errorf("parse palette %q: %w", paletteName, err) + } + for className, color := range classes { + key := strings.ToLower(strings.TrimSpace(className)) + if !isKnownPaletteClass(key) { + return fmt.Errorf("unknown palette class %q in palette %q", className, paletteName) + } + if strings.TrimSpace(color.Fill) == "" || strings.TrimSpace(color.Stroke) == "" || strings.TrimSpace(color.Text) == "" { + return fmt.Errorf("palette %q class %q must include fill, stroke, and text colors", paletteName, className) + } + existing[key] = paletteColor{ + Fill: color.Fill, + Stroke: color.Stroke, + Text: color.Text, + } + } + } + + return nil +} + +// renderProgressBar returns a fixed-width textual progress bar (10 units) and a +// percentage for the given done/total pair. +// +// If total is <= 0 the function returns a zeroed bar "[----------] 0%". The +// progress ratio is clamped to [0,1]; the number of filled units is computed by +// rounding ratio*10 and the percentage is ratio*100 rounded to the nearest +// integer. +func renderProgressBar(done, total int) string { + const width = 10 + if total <= 0 { + return "[----------] 0%" + } + + ratio := float64(done) / float64(total) + if ratio < 0 { + ratio = 0 + } + if ratio > 1 { + ratio = 1 + } + + filled := int(math.Round(ratio * width)) + if filled > width { + filled = width + } + + percent := int(math.Round(ratio * 100)) + return fmt.Sprintf("[%s%s] %d%%", strings.Repeat("#", filled), strings.Repeat("-", width-filled), percent) +} + +// isDoneStatus reports whether a status string represents a completed state. +// It returns true for the case-insensitive values "DONE", "COMPLETED", "COMPLETE", or "SHIPPED"; otherwise false. +func isDoneStatus(status string) bool { + switch strings.ToUpper(strings.TrimSpace(status)) { + case "DONE", "COMPLETED", "COMPLETE", "SHIPPED": + return true + default: + return false + } +} + +// any other or empty status -> 5 +func statusRank(status string) int { + switch strings.ToUpper(strings.TrimSpace(status)) { + case "DONE", "COMPLETED", "COMPLETE", "SHIPPED": + return 0 + case "IN_PROGRESS": + return 1 + case "STARTED": + return 2 + case "BLOCKED": + return 3 + case "PLANNED": + return 4 + default: + return 5 + } +} + +// cleanValues returns a new slice containing the non-empty strings from values +// after trimming surrounding whitespace. The original order is preserved. +// If values is nil or has length zero, cleanValues returns nil. +func cleanValues(values []string) []string { + if len(values) == 0 { + return nil + } + + cleaned := make([]string, 0, len(values)) + for _, value := range values { + trimmed := strings.TrimSpace(value) + if trimmed == "" { + continue + } + cleaned = append(cleaned, trimmed) + } + + return cleaned +} + +func (g *Generator) generateChangelog(ctx context.Context, records []recordWithPath[taskRecord]) error { + select { + case <-ctx.Done(): + return ctx.Err() + default: + } + + done := make([]recordWithPath[taskRecord], 0) + for _, record := range records { + if isDoneStatus(record.data.Status) { + done = append(done, record) + } + } + + sort.Slice(done, func(i, j int) bool { + di := derefString(done[i].data.UpdatedAt) + dj := derefString(done[j].data.UpdatedAt) + if di != dj { + return di > dj + } + return done[i].data.ID < done[j].data.ID + }) + + builder := &strings.Builder{} + builder.WriteString(generatedNotice) + builder.WriteString("\n") + + if len(done) == 0 { + builder.WriteString("_No completed tasks yet._\n") + } else { + sort.Slice(done, func(i, j int) bool { + ti := parseTimestamp(derefString(done[i].data.UpdatedAt)) + tj := parseTimestamp(derefString(done[j].data.UpdatedAt)) + if !ti.Equal(tj) { + return ti.After(tj) + } + return done[i].data.ID < done[j].data.ID + }) + + for _, record := range done { + date := derefString(record.data.UpdatedAt) + if strings.TrimSpace(date) == "" { + date = "—" + } + badges := formatList(record.data.Badges) + if badges == "—" { + badges = "" + } else { + badges = " — " + badges + } + fmt.Fprintf( + builder, + "- %s — **%s** (%s)%s\n", + date, + record.data.Title, + record.data.ID, + badges, + ) + } + } + + outputPath := filepath.Join(g.componentsDir, "issues", "changelog.md") + if err := g.writeFile(outputPath, builder.String()); err != nil { + return fmt.Errorf("write changelog snippet: %w", err) + } + + return nil +} + +func parseTimestamp(s string) time.Time { + s = strings.TrimSpace(s) + if s == "" { + return time.Time{} + } + // Try common formats (include non-padded and zero-padded forms) + layouts := []string{ + time.RFC3339, + "2006-1-2 15:04:05", + "2006-1-2 15:04", + "2006-1-2", + "2006-01-02 15:04:05", + "2006-01-02 15:04", + "2006-01-02", + } + for _, layout := range layouts { + if t, err := time.Parse(layout, s); err == nil { + return t + } + } + // Fallback: normalize YYYY-M-D by parsing numeric month/day and zero-padding + parts := strings.Split(s, "-") + if len(parts) == 3 { + y := strings.TrimSpace(parts[0]) + mStr := strings.TrimSpace(parts[1]) + dStr := strings.TrimSpace(parts[2]) + if mi, err1 := strconv.Atoi(mStr); err1 == nil { + if di, err2 := strconv.Atoi(dStr); err2 == nil { + if t, err := time.Parse("2006-01-02", fmt.Sprintf("%s-%02d-%02d", y, mi, di)); err == nil { + return t + } + } + } + } + return time.Time{} +} + +func (g *Generator) writeFile(destPath, contents string) error { + if err := os.MkdirAll(filepath.Dir(destPath), 0o755); err != nil { + return fmt.Errorf("ensure directory for %s: %w", destPath, err) + } + + return os.WriteFile(destPath, []byte(contents), 0o644) +} + +// formatList trims and filters empty strings from values and returns them joined by ", ". +// If the resulting list is empty it returns an em dash "—". +func formatList(values []string) string { + cleaned := cleanValues(values) + if len(cleaned) == 0 { + return "—" + } + + return strings.Join(cleaned, ", ") +} + +// escapeMermaidLabel returns label with characters escaped for use in Mermaid diagrams. +// It escapes backslashes, double quotes, and square brackets so the label can be safely +// embedded in Mermaid node definitions. +func escapeMermaidLabel(label string) string { + replacer := strings.NewReplacer( + "\\", "\\\\", + "\"", "\\\"", + "[", "\\[", + "]", "\\]", + ) + return replacer.Replace(label) +} + +// the record's id; records without a corresponding entry in nodeIDs are skipped. +func collectNodesByType(typeLabel string, records []typedRecord, nodeIDs map[string]string) []string { + nodes := make([]string, 0) + for _, record := range records { + if record.typeLabel != typeLabel { + continue + } + if node, ok := nodeIDs[record.id]; ok { + nodes = append(nodes, node) + } + } + + sort.Strings(nodes) + return nodes +} + +// normalizeOptions normalizes and validates graph-related fields in a GeneratorOptions value. +// It uppercases and validates GraphDirection (defaults to "LR" if invalid) and lowercases +// GraphPalette (defaults to "evergreen" or to "evergreen" if the named palette is not present +// in the provided palettes map), returning the adjusted options. +func normalizeOptions(options GeneratorOptions, palettes map[string]map[string]paletteColor) GeneratorOptions { + direction := strings.ToUpper(strings.TrimSpace(options.GraphDirection)) + if !isValidDirection(direction) { + direction = "LR" + } + options.GraphDirection = direction + + palette := strings.ToLower(strings.TrimSpace(options.GraphPalette)) + if palette == "" { + palette = "evergreen" + } + if _, ok := palettes[palette]; !ok { + palette = "evergreen" + } + options.GraphPalette = palette + + return options +} + +// isValidDirection reports whether the given graph direction is one of the +// supported Mermaid directions: "LR", "RL", "TB", or "BT". +func isValidDirection(direction string) bool { + switch direction { + case "LR", "RL", "TB", "BT": + return true + default: + return false + } +} + +var orderedTypes = []string{"Milestone", "Feature", "Story", "Task"} + +// string if the pointer is nil. +func derefString(value *string) string { + if value == nil { + return "" + } + return strings.TrimSpace(*value) +} + +// isKnownPaletteClass reports whether the given class name is a recognized palette +// class. Valid names are "milestone", "feature", "story", and "task". +func isKnownPaletteClass(class string) bool { + switch class { + case "milestone", "feature", "story", "task": + return true + default: + return false + } +} diff --git a/internal/docscomponents/generator_test.go b/internal/docscomponents/generator_test.go new file mode 100644 index 0000000..942e7f5 --- /dev/null +++ b/internal/docscomponents/generator_test.go @@ -0,0 +1,264 @@ +package docscomponents_test + +import ( + "context" + "encoding/json" + "os" + "path/filepath" + "strings" + "testing" + + "github.com/flyingrobots/hubless/internal/docscomponents" +) + +func TestGeneratorGenerate(t *testing.T) { + t.Parallel() + + repoRoot := t.TempDir() + + writeJSON(t, filepath.Join(repoRoot, "@hubless", "roadmap", "milestones", "sample-milestone.json"), map[string]any{ + "id": "sample/milestone/0001", + "title": "Sample Milestone", + "status": "DONE", + "dependencies": []any{}, + "features": []any{}, + "tasks": []any{}, + "notes": []any{}, + }) + + writeJSON(t, filepath.Join(repoRoot, "@hubless", "roadmap", "features", "sample-feature.json"), map[string]any{ + "id": "sample/feature/0001", + "title": "Sample Feature", + "status": "PLANNED", + "dependencies": []any{ + "sample/milestone/0001", + }, + "stories": []any{}, + "tasks": []any{}, + }) + + writeJSON(t, filepath.Join(repoRoot, "@hubless", "issues", "stories", "sample-story.json"), map[string]any{ + "id": "sample/story/0001", + "title": "Sample Story", + "status": "DONE", + "dependencies": []any{ + "sample/feature/0001", + }, + "tasks": []any{}, + }) + + writeJSON(t, filepath.Join(repoRoot, "@hubless", "issues", "tasks", "sample-task-1.json"), map[string]any{ + "id": "sample/task/0001", + "title": "Sample Task Done", + "status": "DONE", + "owner": "dev", + "labels": []any{"docs"}, + "badges": []any{"Tested"}, + "updated_at": "2025-09-19", + "dependencies": []any{"sample/story/0001"}, + }) + + writeJSON(t, filepath.Join(repoRoot, "@hubless", "issues", "tasks", "sample-task-2.json"), map[string]any{ + "id": "sample/task/0002", + "title": "Sample Task Planned", + "status": "PLANNED", + "labels": []any{"docs"}, + "badges": []any{}, + "updated_at": nil, + "dependencies": []any{}, + }) + + componentsDir := filepath.Join(repoRoot, "docs", "components") + gen, err := docscomponents.NewGenerator(repoRoot, componentsDir, docscomponents.GeneratorOptions{}) + if err != nil { + t.Fatalf("NewGenerator: %v", err) + } + + if err := gen.Generate(context.Background()); err != nil { + t.Fatalf("Generate: %v", err) + } + + progress := readFile(t, filepath.Join(componentsDir, "roadmap", "progress.md")) + if !strings.Contains(progress, "[##########] 100%") { + t.Fatalf("expected progress bar to show 100%% completion, got:\n%s", progress) + } + + dependencies := readFile(t, filepath.Join(componentsDir, "roadmap", "dependencies.md")) + if !strings.Contains(dependencies, "sample/task/0001") { + t.Fatalf("expected task dependency row in dependencies summary, got:\n%s", dependencies) + } + + archivedStories := readFile(t, filepath.Join(componentsDir, "issues", "archived-stories.md")) + if !strings.Contains(archivedStories, "sample/story/0001") { + t.Fatalf("expected archived stories snippet to include completed story, got:\n%s", archivedStories) + } + + archivedTasks := readFile(t, filepath.Join(componentsDir, "issues", "archived-tasks.md")) + if !strings.Contains(archivedTasks, "Sample Task Done") { + t.Fatalf("expected archived tasks snippet to include completed task, got:\n%s", archivedTasks) + } + + changelog := readFile(t, filepath.Join(componentsDir, "issues", "changelog.md")) + if !strings.Contains(changelog, "- 2025-09-19") { + t.Fatalf("expected changelog snippet to include dated bullet, got:\n%s", changelog) + } + + graph := readFile(t, filepath.Join(componentsDir, "roadmap", "dependencies-graph.md")) + if !strings.Contains(graph, "graph LR") || !strings.Contains(graph, "Sample Task Done") { + t.Fatalf("expected mermaid dependency graph to include task node label, got:\n%s", graph) + } + if !strings.Contains(graph, "classDef milestone") { + t.Fatalf("expected mermaid graph to include class definitions, got:\n%s", graph) + } +} + +func TestGeneratorGenerateCustomGraphOptions(t *testing.T) { + t.Parallel() + + repoRoot := t.TempDir() + + writeJSON(t, filepath.Join(repoRoot, "@hubless", "roadmap", "milestones", "m.json"), map[string]any{ + "id": "custom/milestone", + "title": "Custom Milestone", + "status": "DONE", + }) + + writeJSON(t, filepath.Join(repoRoot, "@hubless", "roadmap", "features", "f.json"), map[string]any{ + "id": "custom/feature", + "title": "Custom Feature", + "status": "DONE", + "dependencies": []any{"custom/milestone"}, + }) + + writeJSON(t, filepath.Join(repoRoot, "@hubless", "issues", "stories", "s.json"), map[string]any{ + "id": "custom/story", + "title": "Custom Story", + "status": "DONE", + "dependencies": []any{"custom/feature"}, + }) + + writeJSON(t, filepath.Join(repoRoot, "@hubless", "issues", "tasks", "t.json"), map[string]any{ + "id": "custom/task", + "title": "Custom Task", + "status": "DONE", + "updated_at": "2025-09-19", + "dependencies": []any{"custom/story"}, + }) + + componentsDir := filepath.Join(repoRoot, "docs", "components") + gen, err := docscomponents.NewGenerator(repoRoot, componentsDir, docscomponents.GeneratorOptions{ + GraphDirection: "tb", + GraphClusters: true, + GraphPalette: "infrared", + }) + if err != nil { + t.Fatalf("NewGenerator: %v", err) + } + + if err := gen.Generate(context.Background()); err != nil { + t.Fatalf("Generate: %v", err) + } + +graph := readFile(t, filepath.Join(componentsDir, "roadmap", "dependencies-graph.md")) +if !strings.Contains(graph, "graph TB") { + t.Fatalf("expected dependency graph to honour direction TB, got:\n%s", graph) +} +if !strings.Contains(strings.ToLower(graph), "subgraph feature") { + t.Fatalf("expected dependency graph to include clusters, got:\n%s", graph) +} +if !strings.Contains(strings.ToLower(strings.ReplaceAll(graph, " ", "")), + "classdefmilestonefill:#1a1c23") { + t.Fatalf("expected infrared palette colors in graph, got:\n%s", graph) +} +} + +func TestGeneratorPaletteFile(t *testing.T) { + t.Parallel() + + repoRoot := t.TempDir() + + palettePath := filepath.Join(repoRoot, "palettes.json") + writeJSON(t, palettePath, map[string]any{ + "custom": map[string]any{ + "milestone": map[string]string{"fill": "#123456", "stroke": "#654321", "text": "#FFFFFF"}, + "feature": map[string]string{"fill": "#abcdef", "stroke": "#fedcba", "text": "#000000"}, + "story": map[string]string{"fill": "#111111", "stroke": "#222222", "text": "#EEEEEE"}, + "task": map[string]string{"fill": "#333333", "stroke": "#444444", "text": "#DDDDDD"}, + }, + }) + + writeJSON(t, filepath.Join(repoRoot, "@hubless", "roadmap", "milestones", "m.json"), map[string]any{ + "id": "custom/milestone", + "title": "Custom Milestone", + "status": "DONE", + }) + + writeJSON(t, filepath.Join(repoRoot, "@hubless", "roadmap", "features", "f.json"), map[string]any{ + "id": "custom/feature", + "title": "Custom Feature", + "status": "DONE", + "dependencies": []any{"custom/milestone"}, + }) + + writeJSON(t, filepath.Join(repoRoot, "@hubless", "issues", "stories", "s.json"), map[string]any{ + "id": "custom/story", + "title": "Custom Story", + "status": "DONE", + "dependencies": []any{"custom/feature"}, + }) + + writeJSON(t, filepath.Join(repoRoot, "@hubless", "issues", "tasks", "t.json"), map[string]any{ + "id": "custom/task", + "title": "Custom Task", + "status": "DONE", + "dependencies": []any{"custom/story"}, + }) + + componentsDir := filepath.Join(repoRoot, "docs", "components") + gen, err := docscomponents.NewGenerator(repoRoot, componentsDir, docscomponents.GeneratorOptions{ + GraphPalette: "custom", + PaletteFile: "palettes.json", + }) + if err != nil { + t.Fatalf("NewGenerator: %v", err) + } + + if err := gen.Generate(context.Background()); err != nil { + t.Fatalf("Generate: %v", err) + } + + graph := readFile(t, filepath.Join(componentsDir, "roadmap", "dependencies-graph.md")) + if !(strings.Contains(graph, "classDef milestone fill:#123456") && + strings.Contains(graph, "classDef milestone stroke:#654321") && + strings.Contains(graph, "classDef milestone color:#FFFFFF")) { + t.Fatalf("expected custom palette colors in graph, got:\n%s", graph) + } +} + +func writeJSON(t *testing.T, path string, payload any) { + t.Helper() + + if err := os.MkdirAll(filepath.Dir(path), 0o755); err != nil { + t.Fatalf("mkdir %s: %v", path, err) + } + + data, err := json.MarshalIndent(payload, "", " ") + if err != nil { + t.Fatalf("marshal %s: %v", path, err) + } + + if err := os.WriteFile(path, data, 0o644); err != nil { + t.Fatalf("write %s: %v", path, err) + } +} + +func readFile(t *testing.T, path string) string { + t.Helper() + + data, err := os.ReadFile(path) + if err != nil { + t.Fatalf("read %s: %v", path, err) + } + + return string(data) +} diff --git a/internal/docscomponents/transclusion.go b/internal/docscomponents/transclusion.go new file mode 100644 index 0000000..3fb273c --- /dev/null +++ b/internal/docscomponents/transclusion.go @@ -0,0 +1,94 @@ +package docscomponents + +import ( + "context" + "errors" + "fmt" + "os" + "os/exec" + "path/filepath" + "strings" +) + +// TransclusionOptions instructs the CLI invocation that renders templates. +type TransclusionOptions struct { + Bin string + Args []string + BasePath string + InputPath string + OutputPath string +} + +// RunTransclusion runs the markdown-transclusion CLI to render a document template +// using opts. It validates that opts.InputPath and opts.OutputPath are set, defaults +// opts.Bin to "markdown-transclusion" when empty, and uses opts.BasePath or the +// current working directory as the base. Paths are resolved to absolute values +// relative to the base, the output directory is created if necessary, and the CLI is +// executed with the provided context. On failure the returned error includes the +// CLI's combined output. +func RunTransclusion(ctx context.Context, opts TransclusionOptions) error { + if opts.InputPath == "" { + return errors.New("input path is required") + } + if opts.OutputPath == "" { + return errors.New("output path is required") + } + + bin := strings.TrimSpace(opts.Bin) + if bin == "" { + bin = "markdown-transclusion" + } + + basePath := opts.BasePath + if basePath == "" { + detected, err := os.Getwd() + if err != nil { + return fmt.Errorf("determine working directory: %w", err) + } + basePath = detected + } + + absBasePath, err := filepath.Abs(basePath) + if err != nil { + return fmt.Errorf("resolve base path: %w", err) + } + + absInput, err := makeAbsoluteWithBase(opts.InputPath, absBasePath) + if err != nil { + return fmt.Errorf("resolve input path: %w", err) + } + + absOutput, err := makeAbsoluteWithBase(opts.OutputPath, absBasePath) + if err != nil { + return fmt.Errorf("resolve output path: %w", err) + } + + if err := os.MkdirAll(filepath.Dir(absOutput), 0o755); err != nil { + return fmt.Errorf("ensure output directory: %w", err) + } + + args := append([]string{}, opts.Args...) + args = append(args, absInput) + args = append(args, "--output", absOutput, "--base-path", absBasePath) + + cmd := exec.CommandContext(ctx, bin, args...) + cmd.Dir = absBasePath + output, err := cmd.CombinedOutput() + if err != nil { + return fmt.Errorf("run %s: %w\n%s", bin, err, strings.TrimSpace(string(output))) + } + + return nil +} + +// absolute path fails. +func makeAbsoluteWithBase(pathValue, base string) (string, error) { + if filepath.IsAbs(pathValue) { + return pathValue, nil + } + if strings.TrimSpace(pathValue) == "" { + return "", errors.New("path cannot be empty") + } + candidate := filepath.Join(base, pathValue) + return filepath.Abs(candidate) +} diff --git a/internal/mock/data.go b/internal/mock/data.go new file mode 100644 index 0000000..4782fd7 --- /dev/null +++ b/internal/mock/data.go @@ -0,0 +1,186 @@ +package mock + +import ( + "time" +) + +// Issue represents a simplified issue summary/detail payload used by mocked CLI/TUI flows. +type Issue struct { + ID string + Title string + Status string + Priority string + Assignee string + LastUpdated time.Time + Body string + Comments []Comment + Events []TimelineEvent +} + +// Comment represents a lightweight discussion entry. +type Comment struct { + Author string + Body string + CreatedAt time.Time +} + +// TimelineEvent captures key events rendered in the detail timeline. +type TimelineEvent struct { + Label string + Actor string + Timestamp time.Time + Note string +} + +// BoardColumn records kanban column membership with simple counts. +type BoardColumn struct { + Name string + Limit int + Issues []BoardCard + Highlight string +} + +// BoardCard is a compact representation of a card within the kanban view. +type BoardCard struct { + ID string + Title string + Assignee string + Priority string +} + +// StatusSection summarises one status buffer section. +type StatusSection struct { + Title string + Items []string + Hint string + Counter int +} + +// MockCatalog returns a slice of sample Issue values used by list/detail wireframes. +// The provided now time is used to compute LastUpdated, Comment.CreatedAt, and TimelineEvent.Timestamp +// so callers can control the generated timestamps. +func MockCatalog(now time.Time) []Issue { + return []Issue{ + { + ID: "hubless/m1/task/0005", + Title: "Prototype Bubbletea TUI and Fang CLI wireframes", + Status: "in-progress", + Priority: "high", + Assignee: "james", + LastUpdated: now.Add(-2 * time.Hour), + Body: "- Validate layout decisions\n- Capture screenshots of primary buffers\n- Iterate on keyboard flow feedback", + Comments: []Comment{ + {Author: "james", Body: "Kicking off mocked UI pass.", CreatedAt: now.Add(-90 * time.Minute)}, + {Author: "codex", Body: "Wireframes landed in docs/images.", CreatedAt: now.Add(-30 * time.Minute)}, + }, + Events: []TimelineEvent{ + {Label: "status:started", Actor: "james", Timestamp: now.Add(-3 * time.Hour), Note: "Moved from backlog"}, + {Label: "comment", Actor: "james", Timestamp: now.Add(-90 * time.Minute), Note: "Kicking off mocked UI pass."}, + {Label: "comment", Actor: "codex", Timestamp: now.Add(-30 * time.Minute), Note: "Wireframes landed in docs/images."}, + }, + }, + { + ID: "hubless/m1/task/0002", + Title: "Introduce Fang-based CLI skeleton", + Status: "planned", + Priority: "medium", + Assignee: "_unassigned_", + LastUpdated: now.Add(-6 * time.Hour), + Body: "Skeleton CLI with Fang command tree and dependency injection for services.", + Comments: []Comment{ + {Author: "codex", Body: "Waiting on wireframe validation.", CreatedAt: now.Add(-5 * time.Hour)}, + }, + Events: []TimelineEvent{ + {Label: "planned", Actor: "codex", Timestamp: now.Add(-7 * time.Hour), Note: "Pulled into milestone."}, + }, + }, + { + ID: "hubless/m0/task/0004", + Title: "Structure @hubless planning artifacts", + Status: "in-progress", + Priority: "medium", + Assignee: "james", + LastUpdated: now.Add(-26 * time.Hour), + Body: "Maintain DAG of tasks and stories for upcoming milestones.", + Comments: []Comment{ + {Author: "james", Body: "Docs refreshed with new sections.", CreatedAt: now.Add(-23 * time.Hour)}, + }, + Events: []TimelineEvent{ + {Label: "status:started", Actor: "james", Timestamp: now.Add(-27 * time.Hour), Note: "Board grooming"}, + }, + }, + } +} + +// MockStatusSections returns sample status sections used by the home buffer view. +// +// It produces four static sections — "Focus", "Inbox", "Boards", and "Saved Filters" — +// each with example items, a short hint for keyboard interaction, and a counter. +// +// The `now` parameter is accepted for API consistency with other mock factories but is +// not used to compute the returned data. +func MockStatusSections(now time.Time) []StatusSection { + return []StatusSection{ + { + Title: "Focus", + Items: []string{"hubless/m1/task/0005 · Mocked UI flows", "hubless/m0/task/0004 · Planning artifacts"}, + Hint: "g i to drill into issues", + Counter: 2, + }, + { + Title: "Inbox", + Items: []string{"codex commented on hubless/m1/task/0005", "Merge request awaiting review"}, + Hint: "enter to expand timeline", + Counter: 4, + }, + { + Title: "Boards", + Items: []string{"Open: 8", "In Progress: 5 (1 over WIP)", "Review: 2", "Done: 12"}, + Hint: "g b to inspect columns", + Counter: 5, + }, + { + Title: "Saved Filters", + Items: []string{"1. @me", "2. High priority", "3. Needs review"}, + Hint: "number keys to apply", + Counter: 3, + }, + } +} + +// MockBoard returns a deterministic set of sample Kanban columns and cards used by mock UI flows. +// +// The returned slice contains three columns ("Open", "In Progress", "Review") populated with +// BoardCard entries (ID, Title, Assignee, Priority). Intended for development and testing of +// Kanban/board views—not for production data. +func MockBoard() []BoardColumn { + return []BoardColumn{ + { + Name: "Open", + Limit: 8, + Highlight: "", + Issues: []BoardCard{ + {ID: "task/0010", Title: "Capture sync diagnostics", Assignee: "alex", Priority: "medium"}, + {ID: "task/0011", Title: "Draft CLI usage guide", Assignee: "james", Priority: "high"}, + }, + }, + { + Name: "In Progress", + Limit: 5, + Highlight: "over", + Issues: []BoardCard{ + {ID: "task/0005", Title: "Prototype Bubbletea wireframes", Assignee: "james", Priority: "high"}, + {ID: "task/0004", Title: "Planning DAG maintenance", Assignee: "james", Priority: "medium"}, + {ID: "task/0009", Title: "Sync command telemetry", Assignee: "sam", Priority: "low"}, + }, + }, + { + Name: "Review", + Limit: 3, + Highlight: "", + Issues: []BoardCard{ + {ID: "task/0007", Title: "Git adapter tests", Assignee: "mia", Priority: "high"}, + }, + }, + } +} diff --git a/internal/release/releaser.go b/internal/release/releaser.go new file mode 100644 index 0000000..3374635 --- /dev/null +++ b/internal/release/releaser.go @@ -0,0 +1,187 @@ +package release + +import ( + "context" + "errors" + "fmt" + "os" + "os/exec" + "path/filepath" + "strings" +) + +var ( + ErrVersionRequired = errors.New("version is required") +) + +type Options struct { + Version string + NotesPath string + DryRun bool + SkipChecks bool +} + +type Releaser struct { + repoRoot string +} + +// New returns a Releaser for the repository located at repoRoot. +// repoRoot must be a non-empty path; it may be relative and will be resolved +// to an absolute path. Returns an error if repoRoot is empty or the path +// cannot be resolved. +func New(repoRoot string) (*Releaser, error) { + if repoRoot == "" { + return nil, errors.New("repo root is required") + } + + absRoot, err := filepath.Abs(repoRoot) + if err != nil { + return nil, fmt.Errorf("resolve repo root: %w", err) + } + + return &Releaser{repoRoot: absRoot}, nil +} + +func (r *Releaser) Run(ctx context.Context, opts Options) error { + if strings.TrimSpace(opts.Version) == "" { + return ErrVersionRequired + } + version := normalizeVersion(opts.Version) + + notesPath := opts.NotesPath + if notesPath == "" { + notesPath = filepath.Join(r.repoRoot, "docs", "reference", "release-notes.md") + } else if !filepath.IsAbs(notesPath) { + notesPath = filepath.Join(r.repoRoot, notesPath) + } + + notes, err := os.ReadFile(notesPath) + if err != nil { + return fmt.Errorf("read release notes: %w", err) + } + trimmedNotes := strings.TrimSpace(string(notes)) + if trimmedNotes == "" { + return errors.New("release notes file is empty") + } + + if !opts.SkipChecks { + if err := r.runChecks(ctx); err != nil { + return err + } + } + + if err := r.ensureClean(ctx); err != nil { + return err + } + + if opts.DryRun { + fmt.Printf("[dry-run] Ready to tag %s using notes from %s\n", version, notesPath) + fmt.Printf("[dry-run] Tag message preview:\n%s\n", trimmedNotes) + fmt.Println("[dry-run] Next steps:") + fmt.Printf(" git tag -a %s -F \n", version) + fmt.Printf(" git push origin %s\n", version) + fmt.Println(" Optionally: gh release create", version, "-F", notesPath) + return nil + } + + if err := r.ensureTagDoesNotExist(ctx, version); err != nil { + return err + } + + tempFile, err := os.CreateTemp("", "release-notes-*.md") + if err != nil { + return fmt.Errorf("create temp file: %w", err) + } + defer func() { + _ = os.Remove(tempFile.Name()) + }() + + if _, err := tempFile.WriteString(trimmedNotes + "\n"); err != nil { + return fmt.Errorf("write temp release notes: %w", err) + } + if err := tempFile.Close(); err != nil { + return fmt.Errorf("close temp release notes: %w", err) + } + + if err := r.runCommand(ctx, "git", "tag", "-a", version, "-F", tempFile.Name()); err != nil { + return fmt.Errorf("create tag: %w", err) + } + + fmt.Printf("Created annotated tag %s.\n", version) + fmt.Println("Next steps:") + fmt.Printf(" git push origin %s\n", version) + fmt.Println(" Optionally: gh release create", version, "-F", notesPath) + return nil +} + +func (r *Releaser) runChecks(ctx context.Context) error { + commands := [][]string{ + {"make", "fmt"}, + {"make", "lint"}, + {"make", "test"}, + {"make", "docs"}, + } + + for _, args := range commands { + if err := r.runCommand(ctx, args[0], args[1:]...); err != nil { + return fmt.Errorf("run %s: %w", strings.Join(args, " "), err) + } + if err := r.ensureClean(ctx); err != nil { + return err + } + } + + return nil +} + +func (r *Releaser) ensureClean(ctx context.Context) error { + output, err := r.capture(ctx, "git", "status", "--porcelain=v1", "--untracked-files=no") + if err != nil { + return fmt.Errorf("git status: %w", err) + } + if strings.TrimSpace(output) != "" { + return fmt.Errorf("working tree has uncommitted changes:\n%s", output) + } + return nil +} + +func (r *Releaser) ensureTagDoesNotExist(ctx context.Context, version string) error { + if err := r.runCommand(ctx, "git", "fetch", "--tags", "--prune", "--quiet"); err != nil { + return fmt.Errorf("fetch tags: %w", err) + } + output, err := r.capture(ctx, "git", "tag", "--list", version) + if err != nil { + return fmt.Errorf("check existing tags: %w", err) + } + if strings.TrimSpace(output) != "" { + return fmt.Errorf("tag %s already exists", version) + } + return nil +} + +func (r *Releaser) runCommand(ctx context.Context, name string, args ...string) error { + cmd := exec.CommandContext(ctx, name, args...) + cmd.Dir = r.repoRoot + cmd.Stdout = os.Stdout + cmd.Stderr = os.Stderr + return cmd.Run() +} + +func (r *Releaser) capture(ctx context.Context, name string, args ...string) (string, error) { + cmd := exec.CommandContext(ctx, name, args...) + cmd.Dir = r.repoRoot + output, err := cmd.CombinedOutput() + return string(output), err +} + +// normalizeVersion trims whitespace from v and ensures it begins with a "v". +// If the trimmed version is empty it is returned unchanged; otherwise, a +// leading "v" is added when missing (e.g. "1.2.3" -> "v1.2.3"). +func normalizeVersion(version string) string { + version = strings.TrimSpace(version) + if version == "" { + return version + } + version = strings.TrimLeft(version, "vV") + return "v" + version +} diff --git a/internal/ui/tui/mock/app.go b/internal/ui/tui/mock/app.go new file mode 100644 index 0000000..94dcf2b --- /dev/null +++ b/internal/ui/tui/mock/app.go @@ -0,0 +1,156 @@ +package mock + +import ( + "time" + + flexbox "github.com/76creates/stickers/flexbox" + tea "github.com/charmbracelet/bubbletea" +) + +// Screen identifies which buffer is currently active in the mock TUI. +type Screen int + +const ( + ScreenStatus Screen = iota + ScreenIssues + ScreenDetail + ScreenKanban +) + +// AppModel hosts the mocked Bubbletea state used to exercise layouts. +type AppModel struct { + screen Screen + ready bool + + width int + height int + + sections []mockStatusSection + issues []mockIssue + board []mockBoardColumn + + sectionIndex int + issueIndex int + + profile layoutProfile + + keyPrefix rune + + styles Styles +} + +// NewModel creates an AppModel initialized for a mocked TUI. +// +// The returned model is set to the Status screen, sized to the provided +// width and height, and populated with the given sections, issues, and board +// columns. The layout profile is selected based on width, default styles are +// applied, and selection indices (sectionIndex, issueIndex) start at 0. +// +// This constructor is intended for creating self-contained mock UI state used +// in layout and rendering tests. +func NewModel(width, height int, sections []mockStatusSection, issues []mockIssue, board []mockBoardColumn) AppModel { + prof := profileForWidth(width) + return AppModel{ + screen: ScreenStatus, + width: width, + height: height, + sections: sections, + issues: issues, + board: board, + profile: prof, + styles: newStyles(), + sectionIndex: 0, + issueIndex: 0, + } +} + +func (m AppModel) Init() tea.Cmd { + return tea.Tick(150*time.Millisecond, func(time.Time) tea.Msg { + return readyMsg{} + }) +} + +func (m AppModel) Update(msg tea.Msg) (tea.Model, tea.Cmd) { + switch msg := msg.(type) { + case tea.WindowSizeMsg: + m.width = msg.Width + m.height = msg.Height + m.profile = profileForWidth(msg.Width) + return m, nil + case readyMsg: + m.ready = true + return m, nil + case tea.KeyMsg: + return m.handleKey(msg) + } + return m, nil +} + +func (m AppModel) View() string { + box := flexbox.New(m.width, m.height) + headerRow := box.NewRow() + headerRow.AddCells(flexbox.NewCell(1, 1).SetContent(m.styles.Statusline.Render("Hubless Mock · " + m.profile.Name()))) + bodyRow := box.NewRow() + bodyRow.AddCells(flexbox.NewCell(1, 6).SetContent("mocked body")) + footerRow := box.NewRow() + footerRow.AddCells(flexbox.NewCell(1, 1).SetContent(m.styles.Footer.Render("Press q to quit"))) + box.SetRows([]*flexbox.Row{headerRow, bodyRow, footerRow}) + return box.Render() +} + +func (m AppModel) handleKey(msg tea.KeyMsg) (tea.Model, tea.Cmd) { + switch msg.String() { + case "q", "ctrl+c": + return m, tea.Quit + } + return m, nil +} + +type readyMsg struct{} + +// Supporting mock-friendly copies of data structures to avoid coupling to domain package. +type mockStatusSection struct { + Title string + Items []string + Hint string + Counter int +} + +type mockIssue struct { + ID string + Title string + Status string + Priority string + Assignee string + LastUpdated time.Time + Body string + Comments []mockComment + Events []mockEvent +} + +type mockComment struct { + Author string + Body string + CreatedAt time.Time +} + +type mockEvent struct { + Label string + Actor string + Timestamp time.Time + Note string +} + +type mockBoardColumn struct { + Name string + Limit int + Issues []mockBoardCard + Highlight string +} + +type mockBoardCard struct { + ID string + Title string + Assignee string + Priority string +} diff --git a/internal/ui/tui/mock/profile.go b/internal/ui/tui/mock/profile.go new file mode 100644 index 0000000..b15469d --- /dev/null +++ b/internal/ui/tui/mock/profile.go @@ -0,0 +1,31 @@ +package mock + +// layoutProfile encapsulates responsive settings derived from Stickers breakpoints. +type layoutProfile struct { + id string + name string +} + +// Name returns the human-readable label for the profile. +func (p layoutProfile) Name() string { + if p.name == "" { + return p.id + } + return p.name +} + +// profileForWidth returns the responsive layoutProfile for the given width. +// It maps widths < 100 to the "sm" (small) profile, widths >= 100 and < 140 to +// the "md" (medium) profile, and widths >= 140 to the "lg" (large) profile. +// layoutProfile encapsulates responsive settings derived from Stickers breakpoints (in terminal columns). +func profileForWidth(width int) layoutProfile { + switch { + case width < 100: + return layoutProfile{id: "sm", name: "small"} + case width < 140: + return layoutProfile{id: "md", name: "medium"} + default: + return layoutProfile{id: "lg", name: "large"} + } +} +} diff --git a/internal/ui/tui/mock/styles.go b/internal/ui/tui/mock/styles.go new file mode 100644 index 0000000..d38276d --- /dev/null +++ b/internal/ui/tui/mock/styles.go @@ -0,0 +1,21 @@ +package mock + +import "github.com/charmbracelet/lipgloss" + +// Styles mirrors the styling helpers used by the real TUI so mock rendering stays consistent and lintable. +type Styles struct { + Statusline lipgloss.Style + Footer lipgloss.Style +} + +// NewStyles constructs and returns a Styles value with the mock TUI's default styling. +// +// The returned Styles sets: +// - Statusline: foreground color #00B3A4 and bold text. +// - Footer: foreground color #6B9F7F. +func NewStyles() Styles { + return Styles{ + Statusline: lipgloss.NewStyle().Foreground(lipgloss.Color("#00B3A4")).Bold(true), + Footer: lipgloss.NewStyle().Foreground(lipgloss.Color("#6B9F7F")), + } +} diff --git a/scripts/install-git-hooks.sh b/scripts/install-git-hooks.sh new file mode 100755 index 0000000..4eb7d19 --- /dev/null +++ b/scripts/install-git-hooks.sh @@ -0,0 +1,17 @@ +#!/usr/bin/env bash +set -euo pipefail + +ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)" +HOOK_DIR="$ROOT_DIR/.githooks" + +if [ ! -d "$HOOK_DIR" ]; then + echo "No .githooks directory found" >&2 + exit 1 +fi + +if ! git rev-parse --is-inside-work-tree >/dev/null 2>&1; then + echo "Not inside a git repository" >&2 + exit 1 +fi +git config --local core.hooksPath "$HOOK_DIR" +echo "Git hooks path set to $HOOK_DIR" diff --git a/scripts/render-docs.sh b/scripts/render-docs.sh new file mode 100755 index 0000000..19a8d3b --- /dev/null +++ b/scripts/render-docs.sh @@ -0,0 +1,25 @@ +#!/usr/bin/env bash +set -euo pipefail + +ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)" +BIN="${MARKDOWN_TRANSCLUSION_BIN:-markdown-transclusion}" +BASE_OVERRIDE="${MARKDOWN_TRANSCLUSION_BASE:-}" +ARGS_ENV="${MARKDOWN_TRANSCLUSION_ARGS:-}" + +cd "$ROOT_DIR" + +cmd=(go run ./cmd/docs-components --repo "$ROOT_DIR" --transclusion-bin "$BIN") + +if [[ -n "$BASE_OVERRIDE" ]]; then + cmd+=(--transclusion-base "$BASE_OVERRIDE") +fi + +if [[ -n "$ARGS_ENV" ]]; then + # shellcheck disable=SC2206 + extra_args=($ARGS_ENV) + for arg in "${extra_args[@]}"; do + cmd+=(--transclusion-args "$arg") + done +fi + +exec "${cmd[@]}" diff --git a/scripts/test-release-docker.sh b/scripts/test-release-docker.sh new file mode 100755 index 0000000..2ca37b6 --- /dev/null +++ b/scripts/test-release-docker.sh @@ -0,0 +1,21 @@ +#!/usr/bin/env bash +set -euo pipefail + +ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)" +IMAGE_NAME="hubless-release-test" +VERSION="0.0.1" + +cd "$ROOT_DIR" + +docker build --pull -f Dockerfile.release-test -t "$IMAGE_NAME" . + +docker run --rm "$IMAGE_NAME" /bin/bash -lc "\ + export PATH=/usr/local/go/bin:/go/bin:\$PATH && \ + set -x && \ + cd /app && \ + git remote -v && \ + go run ./cmd/release --version $VERSION --dry-run --skip-checks && \ + go run ./cmd/release --version $VERSION --skip-checks && \ + git tag --list && \ + ls docs/reference | grep release-notes.md\ +" diff --git a/scripts/verify-docs.sh b/scripts/verify-docs.sh new file mode 100644 index 0000000..21dbbb8 --- /dev/null +++ b/scripts/verify-docs.sh @@ -0,0 +1,55 @@ +#!/usr/bin/env bash +set -euo pipefail + +ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)" +TARGETS=( + "@hubless/issues/generated" + "@hubless/roadmap/generated" + "docs/reference/release-notes.md" + "CHANGELOG.md" +) + +missing=() +failures=() + +# contains_placeholder returns 0 if the specified file contains an unresolved placeholder of the form `![[...]]`; otherwise returns 1. +contains_placeholder() { + local file="$1" + if rg -n "!\\[\\[[^]]+\\]\\]" "$file" >/dev/null; then + return 0 + fi + return 1 +} + +for target in "${TARGETS[@]}"; do + path="$ROOT_DIR/$target" + if [ ! -e "$path" ]; then + missing+=("$target") + continue + fi + if [ -d "$path" ]; then + while IFS= read -r -d '' file; do + if contains_placeholder "$file"; then + failures+=("${file#$ROOT_DIR/}") + fi + done < <(find "$path" -type f -name '*.md' -print0) + else + if contains_placeholder "$path"; then + failures+=("${target}") + fi + fi +done + +if ((${#missing[@]} > 0)); then + printf 'verify-docs: missing generated targets:\n' + printf ' %s\n' "${missing[@]}" + exit 1 +fi + +if ((${#failures[@]} > 0)); then + printf 'verify-docs: unresolved placeholders found in:\n' + printf ' %s\n' "${failures[@]}" + exit 1 +fi + +echo "Docs verification passed."