Compare commits
2 Commits
master
...
754f71e673
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
754f71e673 | ||
|
|
35c12f6ddc |
98
.beads/.gitignore
vendored
98
.beads/.gitignore
vendored
@@ -1,73 +1,39 @@
|
||||
# Dolt database (managed by Dolt, not git)
|
||||
dolt/
|
||||
embeddeddolt/
|
||||
|
||||
# Runtime files
|
||||
bd.sock
|
||||
bd.sock.startlock
|
||||
sync-state.json
|
||||
last-touched
|
||||
.exclusive-lock
|
||||
|
||||
# Daemon runtime (lock, log, pid)
|
||||
daemon.*
|
||||
|
||||
# Interactions log (runtime, not versioned)
|
||||
interactions.jsonl
|
||||
|
||||
# Push state (runtime, per-machine)
|
||||
push-state.json
|
||||
|
||||
# Lock files (various runtime locks)
|
||||
*.lock
|
||||
|
||||
# Credential key (encryption key for federation peer auth — never commit)
|
||||
.beads-credential-key
|
||||
|
||||
# Local version tracking (prevents upgrade notification spam after git ops)
|
||||
.local_version
|
||||
|
||||
# Worktree redirect file (contains relative path to main repo's .beads/)
|
||||
# Must not be committed as paths would be wrong in other clones
|
||||
redirect
|
||||
|
||||
# Sync state (local-only, per-machine)
|
||||
# These files are machine-specific and should not be shared across clones
|
||||
.sync.lock
|
||||
export-state/
|
||||
export-state.json
|
||||
|
||||
# Ephemeral store (SQLite - wisps/molecules, intentionally not versioned)
|
||||
ephemeral.sqlite3
|
||||
ephemeral.sqlite3-journal
|
||||
ephemeral.sqlite3-wal
|
||||
ephemeral.sqlite3-shm
|
||||
|
||||
# Dolt server management (auto-started by bd)
|
||||
dolt-server.pid
|
||||
dolt-server.log
|
||||
dolt-server.lock
|
||||
dolt-server.port
|
||||
dolt-server.activity
|
||||
|
||||
# Corrupt backup directories (created by bd doctor --fix recovery)
|
||||
*.corrupt.backup/
|
||||
|
||||
# Backup data (auto-exported JSONL, local-only)
|
||||
backup/
|
||||
|
||||
# Per-project environment file (Dolt connection config, GH#2520)
|
||||
.env
|
||||
|
||||
# Legacy files (from pre-Dolt versions)
|
||||
# SQLite databases
|
||||
*.db
|
||||
*.db?*
|
||||
*.db-journal
|
||||
*.db-wal
|
||||
*.db-shm
|
||||
|
||||
# Daemon runtime files
|
||||
daemon.lock
|
||||
daemon.log
|
||||
daemon.pid
|
||||
bd.sock
|
||||
sync-state.json
|
||||
last-touched
|
||||
|
||||
# Local version tracking (prevents upgrade notification spam after git ops)
|
||||
.local_version
|
||||
|
||||
# Legacy database files
|
||||
db.sqlite
|
||||
bd.db
|
||||
# NOTE: Do NOT add negation patterns here.
|
||||
# They would override fork protection in .git/info/exclude.
|
||||
# Config files (metadata.json, config.yaml) are tracked by git by default
|
||||
# since no pattern above ignores them.
|
||||
|
||||
# Worktree redirect file (contains relative path to main repo's .beads/)
|
||||
# Must not be committed as paths would be wrong in other clones
|
||||
redirect
|
||||
|
||||
# Merge artifacts (temporary files from 3-way merge)
|
||||
beads.base.jsonl
|
||||
beads.base.meta.json
|
||||
beads.left.jsonl
|
||||
beads.left.meta.json
|
||||
beads.right.jsonl
|
||||
beads.right.meta.json
|
||||
|
||||
# NOTE: Do NOT add negation patterns (e.g., !issues.jsonl) here.
|
||||
# They would override fork protection in .git/info/exclude, allowing
|
||||
# contributors to accidentally commit upstream issue databases.
|
||||
# The JSONL files (issues.jsonl, interactions.jsonl) and config files
|
||||
# are tracked by git by default since no pattern above ignores them.
|
||||
|
||||
0
.beads/.sync.lock
Normal file
0
.beads/.sync.lock
Normal file
@@ -23,17 +23,17 @@ bd list
|
||||
bd show <issue-id>
|
||||
|
||||
# Update issue status
|
||||
bd update <issue-id> --claim
|
||||
bd update <issue-id> --status in_progress
|
||||
bd update <issue-id> --status done
|
||||
|
||||
# Sync with Dolt remote
|
||||
bd dolt push
|
||||
# Sync with git remote
|
||||
bd sync
|
||||
```
|
||||
|
||||
### Working with Issues
|
||||
|
||||
Issues in Beads are:
|
||||
- **Git-native**: Stored in Dolt database with version control and branching
|
||||
- **Git-native**: Stored in `.beads/issues.jsonl` and synced like code
|
||||
- **AI-friendly**: CLI-first design works perfectly with AI coding agents
|
||||
- **Branch-aware**: Issues can follow your branch workflow
|
||||
- **Always in sync**: Auto-syncs with your commits
|
||||
@@ -53,7 +53,7 @@ Issues in Beads are:
|
||||
🔧 **Git Integration**
|
||||
- Automatic sync with git commits
|
||||
- Branch-aware issue tracking
|
||||
- Dolt-native three-way merge resolution
|
||||
- Intelligent JSONL merge resolution
|
||||
|
||||
## Get Started with Beads
|
||||
|
||||
|
||||
@@ -8,42 +8,50 @@
|
||||
# Example: issue-prefix: "myproject" creates issues like "myproject-1", "myproject-2", etc.
|
||||
# issue-prefix: ""
|
||||
|
||||
# Use no-db mode: JSONL-only, no Dolt database
|
||||
# Use no-db mode: load from JSONL, no SQLite, write back after each command
|
||||
# When true, bd will use .beads/issues.jsonl as the source of truth
|
||||
# instead of SQLite database
|
||||
# no-db: false
|
||||
|
||||
# Disable daemon for RPC communication (forces direct database access)
|
||||
# no-daemon: false
|
||||
|
||||
# Disable auto-flush of database to JSONL after mutations
|
||||
# no-auto-flush: false
|
||||
|
||||
# Disable auto-import from JSONL when it's newer than database
|
||||
# no-auto-import: false
|
||||
|
||||
# Enable JSON output by default
|
||||
# json: false
|
||||
|
||||
# Feedback title formatting for mutating commands (create/update/close/dep/edit)
|
||||
# 0 = hide titles, N > 0 = truncate to N characters
|
||||
# output:
|
||||
# title-length: 255
|
||||
|
||||
# Default actor for audit trails (overridden by BEADS_ACTOR or --actor)
|
||||
# Default actor for audit trails (overridden by BD_ACTOR or --actor)
|
||||
# actor: ""
|
||||
|
||||
# Export events (audit trail) to .beads/events.jsonl on each flush/sync
|
||||
# When enabled, new events are appended incrementally using a high-water mark.
|
||||
# Use 'bd export --events' to trigger manually regardless of this setting.
|
||||
# events-export: false
|
||||
# Path to database (overridden by BEADS_DB or --db)
|
||||
# db: ""
|
||||
|
||||
# Auto-start daemon if not running (can also use BEADS_AUTO_START_DAEMON)
|
||||
# auto-start-daemon: true
|
||||
|
||||
# Debounce interval for auto-flush (can also use BEADS_FLUSH_DEBOUNCE)
|
||||
# flush-debounce: "5s"
|
||||
|
||||
# Git branch for beads commits (bd sync will commit to this branch)
|
||||
# IMPORTANT: Set this for team projects so all clones use the same sync branch.
|
||||
# This setting persists across clones (unlike database config which is gitignored).
|
||||
# Can also use BEADS_SYNC_BRANCH env var for local override.
|
||||
# If not set, bd sync will require you to run 'bd config set sync.branch <branch>'.
|
||||
# sync-branch: "beads-sync"
|
||||
|
||||
# Multi-repo configuration (experimental - bd-307)
|
||||
# Allows hydrating from multiple repositories and routing writes to the correct database
|
||||
# Allows hydrating from multiple repositories and routing writes to the correct JSONL
|
||||
# repos:
|
||||
# primary: "." # Primary repo (where this database lives)
|
||||
# additional: # Additional repos to hydrate from (read-only)
|
||||
# - ~/beads-planning # Personal planning repo
|
||||
# - ~/work-planning # Work planning repo
|
||||
|
||||
# JSONL backup (periodic export for off-machine recovery)
|
||||
# Auto-enabled when a git remote exists. Override explicitly:
|
||||
# backup:
|
||||
# enabled: false # Disable auto-backup entirely
|
||||
# interval: 15m # Minimum time between auto-exports
|
||||
# git-push: false # Disable git push (export locally only)
|
||||
# git-repo: "" # Separate git repo for backups (default: project repo)
|
||||
|
||||
# Integration settings (access with 'bd config get/set')
|
||||
# These are stored in the database, not in this file:
|
||||
# - jira.url
|
||||
@@ -52,5 +60,3 @@
|
||||
# - linear.api-key
|
||||
# - github.org
|
||||
# - github.repo
|
||||
|
||||
sync.remote: "git+ssh://gitea@code.m3ta.dev/m3tam3re/nixpkgs.git"
|
||||
@@ -1,24 +0,0 @@
|
||||
#!/usr/bin/env sh
|
||||
# --- BEGIN BEADS INTEGRATION v1.0.2 ---
|
||||
# This section is managed by beads. Do not remove these markers.
|
||||
if command -v bd >/dev/null 2>&1; then
|
||||
export BD_GIT_HOOK=1
|
||||
_bd_timeout=${BEADS_HOOK_TIMEOUT:-300}
|
||||
if command -v timeout >/dev/null 2>&1; then
|
||||
timeout "$_bd_timeout" bd hooks run post-checkout "$@"
|
||||
_bd_exit=$?
|
||||
if [ $_bd_exit -eq 124 ]; then
|
||||
echo >&2 "beads: hook 'post-checkout' timed out after ${_bd_timeout}s — continuing without beads"
|
||||
_bd_exit=0
|
||||
fi
|
||||
else
|
||||
bd hooks run post-checkout "$@"
|
||||
_bd_exit=$?
|
||||
fi
|
||||
if [ $_bd_exit -eq 3 ]; then
|
||||
echo >&2 "beads: database not initialized — skipping hook 'post-checkout'"
|
||||
_bd_exit=0
|
||||
fi
|
||||
if [ $_bd_exit -ne 0 ]; then exit $_bd_exit; fi
|
||||
fi
|
||||
# --- END BEADS INTEGRATION v1.0.2 ---
|
||||
@@ -1,24 +0,0 @@
|
||||
#!/usr/bin/env sh
|
||||
# --- BEGIN BEADS INTEGRATION v1.0.2 ---
|
||||
# This section is managed by beads. Do not remove these markers.
|
||||
if command -v bd >/dev/null 2>&1; then
|
||||
export BD_GIT_HOOK=1
|
||||
_bd_timeout=${BEADS_HOOK_TIMEOUT:-300}
|
||||
if command -v timeout >/dev/null 2>&1; then
|
||||
timeout "$_bd_timeout" bd hooks run post-merge "$@"
|
||||
_bd_exit=$?
|
||||
if [ $_bd_exit -eq 124 ]; then
|
||||
echo >&2 "beads: hook 'post-merge' timed out after ${_bd_timeout}s — continuing without beads"
|
||||
_bd_exit=0
|
||||
fi
|
||||
else
|
||||
bd hooks run post-merge "$@"
|
||||
_bd_exit=$?
|
||||
fi
|
||||
if [ $_bd_exit -eq 3 ]; then
|
||||
echo >&2 "beads: database not initialized — skipping hook 'post-merge'"
|
||||
_bd_exit=0
|
||||
fi
|
||||
if [ $_bd_exit -ne 0 ]; then exit $_bd_exit; fi
|
||||
fi
|
||||
# --- END BEADS INTEGRATION v1.0.2 ---
|
||||
@@ -1,24 +0,0 @@
|
||||
#!/usr/bin/env sh
|
||||
# --- BEGIN BEADS INTEGRATION v1.0.2 ---
|
||||
# This section is managed by beads. Do not remove these markers.
|
||||
if command -v bd >/dev/null 2>&1; then
|
||||
export BD_GIT_HOOK=1
|
||||
_bd_timeout=${BEADS_HOOK_TIMEOUT:-300}
|
||||
if command -v timeout >/dev/null 2>&1; then
|
||||
timeout "$_bd_timeout" bd hooks run pre-commit "$@"
|
||||
_bd_exit=$?
|
||||
if [ $_bd_exit -eq 124 ]; then
|
||||
echo >&2 "beads: hook 'pre-commit' timed out after ${_bd_timeout}s — continuing without beads"
|
||||
_bd_exit=0
|
||||
fi
|
||||
else
|
||||
bd hooks run pre-commit "$@"
|
||||
_bd_exit=$?
|
||||
fi
|
||||
if [ $_bd_exit -eq 3 ]; then
|
||||
echo >&2 "beads: database not initialized — skipping hook 'pre-commit'"
|
||||
_bd_exit=0
|
||||
fi
|
||||
if [ $_bd_exit -ne 0 ]; then exit $_bd_exit; fi
|
||||
fi
|
||||
# --- END BEADS INTEGRATION v1.0.2 ---
|
||||
@@ -1,24 +0,0 @@
|
||||
#!/usr/bin/env sh
|
||||
# --- BEGIN BEADS INTEGRATION v1.0.2 ---
|
||||
# This section is managed by beads. Do not remove these markers.
|
||||
if command -v bd >/dev/null 2>&1; then
|
||||
export BD_GIT_HOOK=1
|
||||
_bd_timeout=${BEADS_HOOK_TIMEOUT:-300}
|
||||
if command -v timeout >/dev/null 2>&1; then
|
||||
timeout "$_bd_timeout" bd hooks run pre-push "$@"
|
||||
_bd_exit=$?
|
||||
if [ $_bd_exit -eq 124 ]; then
|
||||
echo >&2 "beads: hook 'pre-push' timed out after ${_bd_timeout}s — continuing without beads"
|
||||
_bd_exit=0
|
||||
fi
|
||||
else
|
||||
bd hooks run pre-push "$@"
|
||||
_bd_exit=$?
|
||||
fi
|
||||
if [ $_bd_exit -eq 3 ]; then
|
||||
echo >&2 "beads: database not initialized — skipping hook 'pre-push'"
|
||||
_bd_exit=0
|
||||
fi
|
||||
if [ $_bd_exit -ne 0 ]; then exit $_bd_exit; fi
|
||||
fi
|
||||
# --- END BEADS INTEGRATION v1.0.2 ---
|
||||
@@ -1,24 +0,0 @@
|
||||
#!/usr/bin/env sh
|
||||
# --- BEGIN BEADS INTEGRATION v1.0.2 ---
|
||||
# This section is managed by beads. Do not remove these markers.
|
||||
if command -v bd >/dev/null 2>&1; then
|
||||
export BD_GIT_HOOK=1
|
||||
_bd_timeout=${BEADS_HOOK_TIMEOUT:-300}
|
||||
if command -v timeout >/dev/null 2>&1; then
|
||||
timeout "$_bd_timeout" bd hooks run prepare-commit-msg "$@"
|
||||
_bd_exit=$?
|
||||
if [ $_bd_exit -eq 124 ]; then
|
||||
echo >&2 "beads: hook 'prepare-commit-msg' timed out after ${_bd_timeout}s — continuing without beads"
|
||||
_bd_exit=0
|
||||
fi
|
||||
else
|
||||
bd hooks run prepare-commit-msg "$@"
|
||||
_bd_exit=$?
|
||||
fi
|
||||
if [ $_bd_exit -eq 3 ]; then
|
||||
echo >&2 "beads: database not initialized — skipping hook 'prepare-commit-msg'"
|
||||
_bd_exit=0
|
||||
fi
|
||||
if [ $_bd_exit -ne 0 ]; then exit $_bd_exit; fi
|
||||
fi
|
||||
# --- END BEADS INTEGRATION v1.0.2 ---
|
||||
0
.beads/interactions.jsonl
Normal file
0
.beads/interactions.jsonl
Normal file
@@ -1 +1,20 @@
|
||||
{"id":"nixpkgs-ng1","title":"Configure agent git identity in nixpkgs repo","description":"Git commits are using p@m3ta.dev instead of m3ta-chiron@agentmail.to. The GIT_AUTHOR_NAME and GIT_AUTHOR_EMAIL environment variables are not set in this environment. Need to configure the agent git identity for this repository following the pattern in AGENTS.md","status":"open","priority":2,"issue_type":"task","owner":"p@m3ta.dev","created_at":"2026-04-27T18:16:17Z","created_by":"m3tm3re","updated_at":"2026-04-27T18:16:17Z","dependency_count":0,"dependent_count":0,"comment_count":0}
|
||||
{"id":"nixpkgs-1xm","title":"Package Basecamp MCP Server","status":"closed","priority":2,"issue_type":"task","created_at":"2026-01-12T05:01:20.465656454+01:00","created_by":"m3tam3re","updated_at":"2026-01-12T05:01:26.623404603+01:00","closed_at":"2026-01-12T05:01:26.623404603+01:00","close_reason":"Packaged successfully with env file support"}
|
||||
{"id":"nixpkgs-3k8","title":"Export project config as environment variable","description":"Export project configuration as home.sessionVariables (similar to zellij-ps pattern). Use JSON format for the env var since projects now have structured data (path + args).","status":"closed","priority":2,"issue_type":"task","created_at":"2026-01-05T12:45:50.803017318+01:00","created_by":"m3tam3re","updated_at":"2026-01-05T12:48:22.968626155+01:00","closed_at":"2026-01-05T12:48:22.968626155+01:00","close_reason":"Added home.sessionVariables.ROFI_PROJECT_OPENER_CONFIG with JSON config","dependencies":[{"issue_id":"nixpkgs-3k8","depends_on_id":"nixpkgs-hrh","type":"parent-child","created_at":"2026-01-05T12:46:03.16885012+01:00","created_by":"m3tam3re"}]}
|
||||
{"id":"nixpkgs-3w3","title":"Add authentication command and environment file generation to basecamp-mcp-server","description":"The basecamp-mcp-server package at pkgs/basecamp-mcp-server/default.nix wraps the FastMCP server for Basecamp 3 integration. Currently, there's no convenient way for users to:\n\n1. Set up OAuth 2.0 authentication interactively\n2. Generate a sample environment file with required credentials\n3. Guide users through the Basecamp OAuth flow\n\n## Current State\n\nThe package wraps but users must:\n- Manually clone the upstream repository\n- Run to generate template\n- Edit with OAuth credentials from https://launchpad.37signals.com/integrations\n- Run to complete OAuth flow\n- Handle token storage manually\n\n## Required Environment Variables\n\n### OAuth Configuration (Primary)\n- - OAuth client ID from Basecamp\n- - OAuth client secret\n- - Found in Basecamp URL: https://3.basecamp.com/ID/...\n- - Format: \"App Name (email@domain.com)\"\n- - http://localhost:8000/auth/callback\n- - For Flask session security\n\n### Basic Auth (Legacy)\n- - Email for direct API access\n- - Password for direct API access\n\n## Proposed Solution\n\nAdd a Nix package wrapper command () that:\n\n1. **Interactive Setup Wizard**\n - Guides users through OAuth app creation at launchpad.37signals.com\n - Prompts for credentials (with secure input for secrets)\n - Validates inputs before proceeding\n\n2. **Environment File Generation**\n - Creates or project-local \n - Includes all required variables with clear documentation\n - Sets secure permissions (600)\n - Provides example values\n\n3. **OAuth Flow Handler**\n - Starts local Flask server on port 8000\n - Opens browser to initiate OAuth flow\n - Handles callback and token exchange\n - Stores tokens securely in \n - Shows success/failure status\n\n4. **Documentation**\n - Inline help for all steps\n - Links to Basecamp integration setup\n - Account ID discovery instructions\n\n## References\n\n- Upstream repo: https://github.com/georgeantonopoulos/Basecamp-MCP-Server\n- Key files: , , , \n- OAuth endpoints: launchpad.37signals.com/authorization/new\n\n## Subtasks\n\n- [ ] Create auth command wrapper (basecamp-mcp-auth)\n- [ ] Implement interactive OAuth wizard\n- [ ] Generate sample environment file template\n- [ ] Add token storage handling\n- [ ] Update package documentation\n- [ ] Add Home Manager module support (optional)","status":"closed","priority":2,"issue_type":"feature","created_at":"2026-01-12T18:25:50.929926505+01:00","created_by":"m3tam3re","updated_at":"2026-01-12T19:12:57.941267399+01:00","closed_at":"2026-01-12T19:12:57.941267399+01:00","close_reason":"Closed"}
|
||||
{"id":"nixpkgs-5ml","title":"Update rofi-project-opener script to pass args to opencode","description":"Modify pkgs/rofi-project-opener script to read the new config format and launch opencode with: 'opencode \u003cdirectory\u003e \u003carguments\u003e' instead of just 'opencode' in the directory.","status":"closed","priority":1,"issue_type":"task","created_at":"2026-01-05T12:45:49.748958951+01:00","created_by":"m3tam3re","updated_at":"2026-01-05T12:49:40.800083615+01:00","closed_at":"2026-01-05T12:49:40.800083615+01:00","close_reason":"Script updated to read JSON config and pass args to opencode","dependencies":[{"issue_id":"nixpkgs-5ml","depends_on_id":"nixpkgs-w3u","type":"parent-child","created_at":"2026-01-05T12:46:02.338350208+01:00","created_by":"m3tam3re"},{"issue_id":"nixpkgs-5ml","depends_on_id":"nixpkgs-sys","type":"blocks","created_at":"2026-01-05T12:46:04.966269033+01:00","created_by":"m3tam3re"}]}
|
||||
{"id":"nixpkgs-61l","title":"Update documentation to reflect latest changes","status":"closed","priority":3,"issue_type":"task","created_at":"2026-01-10T18:48:19.599467206+01:00","created_by":"m3tam3re","updated_at":"2026-01-10T19:12:26.294895563+01:00","closed_at":"2026-01-10T19:12:26.294895563+01:00","close_reason":"Documentation updated: Added stt-ptt language support docs, rofi-project-opener module docs, and updated zellij-ps docs"}
|
||||
{"id":"nixpkgs-69z","title":"n8n overlay -\u003e pkgs","description":"Create a full package definition in pkgs/n8n/default.nix that mirrors nixpkgs-unstable's n8n package but with version 2.4.1 and updated hashes from overlays/mods/n8n.nix. Register the new package in pkgs/default.nix. The overlay overrides: src (n8n-io/n8n@2.4.1) and pnpmDeps hash. Reference nixpkgs-unstable for the base package structure, then apply the version/hash modifications.","status":"closed","priority":2,"issue_type":"chore","owner":"p@m3ta.dev","created_at":"2026-01-13T19:42:35.643928163+01:00","created_by":"m3tm3re","updated_at":"2026-01-13T20:25:53.712987689+01:00","closed_at":"2026-01-13T20:25:53.712987689+01:00","close_reason":"Packages created successfully: pkgs/n8n/default.nix, pkgs/beads/default.nix, pkgs/opencode/default.nix. All packages build successfully."}
|
||||
{"id":"nixpkgs-7ez","title":"Add authentication command for basecamp-mcp-server","description":"Add a command for the basecamp-mcp-server that will guide the user through the authentication flow and create a sample environment file with the necessary values. This involves researching the original basecamp-mcp-server repository for authentication details and implementing a user-friendly authentication workflow.","status":"tombstone","priority":0,"issue_type":"feature","created_at":"2026-01-12T18:20:55.665717166+01:00","created_by":"m3tam3re","updated_at":"2026-01-13T19:46:32.112562429+01:00","dependencies":[{"issue_id":"nixpkgs-7ez","depends_on_id":"nixpkgs-3w3","type":"blocks","created_at":"2026-01-12T18:48:31.806330931+01:00","created_by":"m3tam3re"}],"deleted_at":"2026-01-13T19:46:32.112562429+01:00","deleted_by":"daemon","delete_reason":"delete","original_type":"feature"}
|
||||
{"id":"nixpkgs-8jw","title":"fix: self-hosted nixos runner missing node in PATH for Gitea Actions","description":"The nix-update workflow fails on self-hosted nixos runner because node is not available in PATH. Error: Cannot find: node in PATH. Root cause: actions/checkout@v4 requires Node.js to execute post-checkout steps. The self-hosted nixos runner does not have node installed or not in PATH. Possible solutions: 1) Add Node.js to the nixos runner environment, 2) Use container-based runner instead of bare nixos, 3) Use a different checkout action that does not require node, 4) Configure PATH to include node installation. Impact: Automated package updates are completely blocked.","status":"closed","priority":1,"issue_type":"bug","owner":"p@m3ta.dev","created_at":"2026-01-14T20:50:59.153145341+01:00","created_by":"m3tm3re","updated_at":"2026-01-18T18:36:50.926416564+01:00","closed_at":"2026-01-17T09:49:26.658187402+01:00"}
|
||||
{"id":"nixpkgs-8ng","title":"fix: Add nix-update arguments for opencode subpackage","description":"The Gitea workflow does not work correctly on the opencode package because opencode uses a subpackage. To fix this, nix-update needs to be run with additional arguments to adjust the output hash of the subpackage.\n\n**Solution:**\nRun nix-update with the following arguments:\n$ nix-instantiate --eval --json --strict /nix/store/ybiynv89drnshjdlb089r3i687c5k878-nix-update-1.14.0/lib/python3.13/site-packages/nix_update/eval.nix --argstr importPath /home/m3tam3re/p/NIX/nixpkgs --argstr attribute '[\"opencode\"]' --arg isFlake true --arg sanitizePositions true\nfetch https://github.com/anomalyco/opencode/releases.atom\nNot updating version, already 1.1.25\nUpdating subpackage node_modules\n$ nix-instantiate --eval --json --strict /nix/store/ybiynv89drnshjdlb089r3i687c5k878-nix-update-1.14.0/lib/python3.13/site-packages/nix_update/eval.nix --argstr importPath /home/m3tam3re/p/NIX/nixpkgs --argstr attribute '[\"opencode\", \"node_modules\"]' --arg isFlake true --arg sanitizePositions true\n$ nix-build --expr 'let src = (let flake = builtins.getFlake \"/home/m3tam3re/p/NIX/nixpkgs\"; in flake.packages.${builtins.currentSystem}.\"opencode\".\"node_modules\" or flake.\"opencode\".\"node_modules\").src; in (src.overrideAttrs or (f: src // f src)) (_: { outputHash = \"\"; outputHashAlgo = \"sha256\"; })' --extra-experimental-features 'flakes nix-command'\n$ nix-build --expr 'let src = (let flake = builtins.getFlake \"/home/m3tam3re/p/NIX/nixpkgs\"; in flake.packages.${builtins.currentSystem}.\"opencode\".\"node_modules\" or flake.\"opencode\".\"node_modules\"); in (src.overrideAttrs or (f: src // f src)) (_: { outputHash = \"\"; outputHashAlgo = \"sha256\"; })' --extra-experimental-features 'flakes nix-command'\nPackage maintainers:\n - Thierry Delafontaine (@delafthi)\n$ git -C /home/m3tam3re/p/NIX/nixpkgs diff -- /home/m3tam3re/p/NIX/nixpkgs/pkgs/opencode\n\nThe workflow file needs to be updated to pass these arguments when updating the opencode package.","status":"closed","priority":2,"issue_type":"bug","owner":"p@m3ta.dev","created_at":"2026-01-17T09:47:55.750805329+01:00","created_by":"m3tm3re","updated_at":"2026-01-18T10:38:06.536530593+01:00","closed_at":"2026-01-18T10:38:06.536530593+01:00","close_reason":"Closed"}
|
||||
{"id":"nixpkgs-98j","title":"beads overlay -\u003e pkgs","description":"Create a full package definition in pkgs/beads/default.nix that mirrors nixpkgs-unstable's beads package but with version 0.47.1 and updated hashes from overlays/mods/beads.nix. Register the new package in pkgs/default.nix. The overlay overrides: src (steveyegge/beads@v0.47.1), vendorHash, and disables tests (doCheck = false). Reference nixpkgs-unstable for the base package structure, then apply the version/hash modifications.","status":"closed","priority":2,"issue_type":"chore","owner":"p@m3ta.dev","created_at":"2026-01-13T19:43:35.645275221+01:00","created_by":"m3tm3re","updated_at":"2026-01-13T20:25:53.715613545+01:00","closed_at":"2026-01-13T20:25:53.715613545+01:00","close_reason":"Packages created successfully: pkgs/n8n/default.nix, pkgs/beads/default.nix, pkgs/opencode/default.nix. All packages build successfully."}
|
||||
{"id":"nixpkgs-bqc","title":"opencode overlay -\u003e pkgs","description":"Create a full package definition in pkgs/opencode/default.nix that mirrors nixpkgs-unstable's opencode package but with version 1.1.18 and updated hashes from overlays/mods/opencode.nix. Register the new package in pkgs/default.nix. The overlay overrides: src (anomalyco/opencode@v1.1.18) and node_modules hash. Reference nixpkgs-unstable for the base package structure, then apply the version/hash modifications.","status":"closed","priority":2,"issue_type":"chore","owner":"p@m3ta.dev","created_at":"2026-01-13T19:43:36.450930004+01:00","created_by":"m3tm3re","updated_at":"2026-01-13T20:25:53.717928297+01:00","closed_at":"2026-01-13T20:25:53.717928297+01:00","close_reason":"Packages created successfully: pkgs/n8n/default.nix, pkgs/beads/default.nix, pkgs/opencode/default.nix. All packages build successfully."}
|
||||
{"id":"nixpkgs-e2u","title":"Change projectDirs from list to attrset with path and args","description":"Change projectDirs option type from 'types.listOf types.str' to an attrset like:\n\nprojectDirs = {\n nixpkgs = { path = \"~/p/NIX/nixpkgs\"; args = \"--agent Planner-Sisyphus\"; };\n myproject = { path = \"~/dev/myproject\"; }; # args optional\n};\n\nMust maintain backward compatibility consideration.","status":"closed","priority":1,"issue_type":"task","created_at":"2026-01-05T12:45:48.6992807+01:00","created_by":"m3tam3re","updated_at":"2026-01-05T12:48:12.178120719+01:00","closed_at":"2026-01-05T12:48:12.178120719+01:00","close_reason":"Changed projectDirs from listOf str to attrsOf submodule with path+args","dependencies":[{"issue_id":"nixpkgs-e2u","depends_on_id":"nixpkgs-w3u","type":"parent-child","created_at":"2026-01-05T12:46:00.515400521+01:00","created_by":"m3tam3re"}]}
|
||||
{"id":"nixpkgs-fka","title":"Ensure docs are staged and committed","status":"closed","priority":3,"issue_type":"task","created_at":"2026-01-10T18:48:18.05472995+01:00","created_by":"m3tam3re","updated_at":"2026-01-10T19:12:26.306880401+01:00","closed_at":"2026-01-10T19:12:26.306880401+01:00","close_reason":"Documentation staged and ready for commit"}
|
||||
{"id":"nixpkgs-hrh","title":"projectDirs on rofi-project-switcher should be exported to users home-manager environment","status":"closed","priority":2,"issue_type":"feature","created_at":"2026-01-05T11:46:43.640224459+01:00","created_by":"m3tam3re","updated_at":"2026-01-05T12:49:43.584087541+01:00","closed_at":"2026-01-05T12:49:43.584087541+01:00","close_reason":"Feature complete: config exported as ROFI_PROJECT_OPENER_CONFIG env var","dependencies":[{"issue_id":"nixpkgs-hrh","depends_on_id":"nixpkgs-w3u","type":"blocks","created_at":"2026-01-05T12:46:05.867959608+01:00","created_by":"m3tam3re"}]}
|
||||
{"id":"nixpkgs-p79","title":"feat: Add auto-language detection support to stt-ptt package","description":"## Current State\n- **Package**: `/home/m3tam3re/p/NIX/nixpkgs/pkgs/stt-ptt/default.nix`\n- **Module**: `/home/m3tam3re/p/NIX/nixpkgs/modules/home-manager/cli/stt-ptt.nix`\n- **Current whisper-cli call** (line 55): `\"$WHISPER\" -m \"$STT_MODEL\" -f \"$AUDIO\" -np -nt 2\u003e/dev/null`\n- **Missing**: Language configuration option and auto-detection\n\n## Required Changes\n\n### 1. Update Package (`pkgs/stt-ptt/default.nix`)\nAdd support for language configuration:\n- Add `STT_LANGUAGE` environment variable (default: \"auto\" for auto-detection)\n- Modify whisper-cli call to use `-l \"$STT_LANGUAGE\"` flag\n- Support the special value \"auto\" for automatic language detection\n- Update help text to document the new environment variable\n\n### 2. Update Home Manager Module (`modules/home-manager/cli/stt-ptt.nix`)\nAdd new `language` option:\n- Add `language` option with type `types.enum [\"auto\" \"en\" \"es\" \"fr\" \"de\" \"it\" \"pt\" \"ru\" \"zh\" \"ja\" \"ko\" ...]`\n- Default should be \"auto\" (auto-detect)\n- Set `STT_LANGUAGE` environment variable in `home.sessionVariables`\n- Add documentation describing available languages and auto-detection behavior\n- Consider using `types.nullOr(types.str)` with \"auto\" as default for more flexibility\n\n### 3. Technical Details\nFrom whisper.cpp CLI documentation:\n- `-l LANG, --language LANG [en] spoken language ('auto' for auto-detect)`\n- Auto-detection analyzes audio to determine spoken language automatically\n- Specifying a language can improve accuracy if you know the language\n- Language codes follow ISO 639-1 standard (2-letter codes)\n\n## Implementation Plan\n\n### Package Changes:\n```nix\n# Add to environment variables section:\nSTT_LANGUAGE=\"${STT_LANGUAGE:-auto}\"\n\n# Modify whisper call:\n\"$WHISPER\" -m \"$STT_MODEL\" -f \"$AUDIO\" -l \"$STT_LANGUAGE\" -np -nt 2\u003e/dev/null\n\n# Update help text:\necho \" STT_LANGUAGE - Language code or 'auto' for auto-detection (default: auto)\"\n```\n\n### Module Changes:\n```nix\nlanguage = mkOption {\n type = types.enum [\"auto\" \"en\" \"es\" \"fr\" \"de\" \"it\" \"pt\" \"ru\" \"zh\" \"ja\" \"ko\"];\n default = \"auto\";\n description = ''\n Language for speech recognition. Use \"auto\" for automatic language detection,\n or specify a language code (e.g., \"en\", \"es\", \"fr\") for better accuracy.\n Auto-detection analyzes the audio to determine the spoken language.\n '';\n};\n\nhome.sessionVariables = {\n STT_LANGUAGE = cfg.language;\n # ... existing variables\n};\n```\n\n## Benefits\n- **Multilingual support**: Users can speak in any language\n- **Convenience**: Auto-detection eliminates need to specify language\n- **Accuracy**: Explicit language selection improves transcription accuracy\n- **Backward compatible**: Default behavior (auto) matches current functionality\n\n## Testing Considerations\n1. Test auto-detection with multiple languages\n2. Test explicit language selection\n3. Test fallback behavior when auto-detection fails\n4. Test with different Whisper models\n\n## Related\n- whisper.cpp supports 100+ languages\n- Language codes follow ISO 639-1 standard\n- Auto-detection adds slight processing overhead","status":"closed","priority":2,"issue_type":"feature","created_at":"2026-01-10T08:40:51.478869019+01:00","created_by":"m3tam3re","updated_at":"2026-01-10T09:35:49.421572334+01:00","closed_at":"2026-01-10T09:35:49.421572334+01:00","close_reason":"Implemented auto-language detection support: Added STT_LANGUAGE environment variable to package, modified whisper-cli call to use -l flag, updated help text, and added language option to HM module with enum support for 23 common languages plus 'auto'."}
|
||||
{"id":"nixpkgs-r3u","title":"Add a Gitea action for nix-update to automatically update packages","description":"Create a Gitea action that runs nix-update to automatically update packages in this repository. My Gitea instance runs on NixOS (hostname: m3-atlas). Check the Gitea configuration in /home/m3tam3re/p/NIX/nixos-config for reference on how to set up actions. The action should target the packages directory and create PRs with updates when nix-update finds newer versions.","status":"closed","priority":2,"issue_type":"task","owner":"p@m3ta.dev","created_at":"2026-01-13T19:50:22.953433727+01:00","created_by":"m3tm3re","updated_at":"2026-01-18T18:36:50.936219311+01:00","closed_at":"2026-01-17T09:49:05.573319795+01:00","dependencies":[{"issue_id":"nixpkgs-r3u","depends_on_id":"nixpkgs-69z","type":"discovered-from","created_at":"2026-01-13T19:52:40.98774707+01:00","created_by":"m3tm3re"},{"issue_id":"nixpkgs-r3u","depends_on_id":"nixpkgs-98j","type":"discovered-from","created_at":"2026-01-13T19:52:41.501620297+01:00","created_by":"m3tm3re"},{"issue_id":"nixpkgs-r3u","depends_on_id":"nixpkgs-bqc","type":"discovered-from","created_at":"2026-01-13T19:52:42.010721971+01:00","created_by":"m3tm3re"}]}
|
||||
{"id":"nixpkgs-sys","title":"Update config file format to include args per project","description":"Update the config file at ~/.config/rofi-project-opener/config to support per-project args. Change from PROJECT_DIRS to a format that encodes both path and args, e.g. JSON or structured text.","status":"closed","priority":1,"issue_type":"task","created_at":"2026-01-05T12:45:51.793810575+01:00","created_by":"m3tam3re","updated_at":"2026-01-05T12:48:21.75755315+01:00","closed_at":"2026-01-05T12:48:21.75755315+01:00","close_reason":"Config file now writes JSON (projects.json) with per-project path and args","dependencies":[{"issue_id":"nixpkgs-sys","depends_on_id":"nixpkgs-w3u","type":"parent-child","created_at":"2026-01-05T12:46:01.400263722+01:00","created_by":"m3tam3re"},{"issue_id":"nixpkgs-sys","depends_on_id":"nixpkgs-e2u","type":"blocks","created_at":"2026-01-05T12:46:04.055948778+01:00","created_by":"m3tam3re"}]}
|
||||
{"id":"nixpkgs-tsh","title":"Prevent verification of ignored packages in nix-update workflow","status":"closed","priority":2,"issue_type":"task","owner":"p@m3ta.dev","created_at":"2026-01-18T10:36:45.393068138+01:00","created_by":"m3tm3re","updated_at":"2026-01-18T10:37:31.686158451+01:00","closed_at":"2026-01-18T10:37:31.686158451+01:00","close_reason":"Fixed by changing return 0 to return 1 for ignored packages"}
|
||||
{"id":"nixpkgs-w3u","title":"projectDirs should support args, for example --agent \u003cagentname\u003e","description":"This means project directories need to be an attribute set and we also need to change the launch command to \"opencode \u003cdirectory name\u003e \u003carguments\u003e\".\n","status":"closed","priority":2,"issue_type":"feature","created_at":"2026-01-05T11:56:13.844735432+01:00","created_by":"m3tam3re","updated_at":"2026-01-05T12:49:42.34144046+01:00","closed_at":"2026-01-05T12:49:42.34144046+01:00","close_reason":"Feature complete: projectDirs now supports args per directory"}
|
||||
{"id":"nixpkgs-xi7","title":"There is a bug in the basecamp-mcp server","description":"OSError: [Errno 30] Read-only file system when trying to write log file.\n\nTraceback:\nFile: /nix/store/708ksr7z3484bj8faysd7djwpa8xvw12-basecamp-mcp-server-0.0.1-unstable/lib/basecamp-mcp-server/basecamp_fastmcp.py, line 35\nError: logging.FileHandler(LOG_FILE_PATH) - attempting to write to read-only Nix store location: /nix/store/708ksr7z3484bj8faysd7djwpa8xvw12-basecamp-mcp-server-0.0.1-unstable/lib/basecamp-mcp-server/basecamp_fastmcp.log\n\nThe package needs to use a writable directory (e.g., XDG_DATA_HOME or /tmp) for log files instead of the Nix store.","status":"closed","priority":2,"issue_type":"bug","created_at":"2026-01-12T18:16:37.597147907+01:00","created_by":"m3tam3re","updated_at":"2026-01-12T19:12:57.91076765+01:00","closed_at":"2026-01-12T19:12:57.91076765+01:00","close_reason":"Closed"}
|
||||
|
||||
@@ -1,7 +1,4 @@
|
||||
{
|
||||
"database": "dolt",
|
||||
"backend": "dolt",
|
||||
"dolt_mode": "embedded",
|
||||
"dolt_database": "nixpkgs",
|
||||
"project_id": "b57a167a-6526-4211-a6c1-51686e431912"
|
||||
"database": "beads.db",
|
||||
"jsonl_export": "issues.jsonl"
|
||||
}
|
||||
18
.beads/sync_base.jsonl
Normal file
18
.beads/sync_base.jsonl
Normal file
@@ -0,0 +1,18 @@
|
||||
{"id":"nixpkgs-1xm","title":"Package Basecamp MCP Server","status":"closed","priority":2,"issue_type":"task","created_at":"2026-01-12T05:01:20.465656454+01:00","created_by":"m3tam3re","updated_at":"2026-01-12T05:01:26.623404603+01:00","closed_at":"2026-01-12T05:01:26.623404603+01:00","close_reason":"Packaged successfully with env file support"}
|
||||
{"id":"nixpkgs-3k8","title":"Export project config as environment variable","description":"Export project configuration as home.sessionVariables (similar to zellij-ps pattern). Use JSON format for the env var since projects now have structured data (path + args).","status":"closed","priority":2,"issue_type":"task","created_at":"2026-01-05T12:45:50.803017318+01:00","created_by":"m3tam3re","updated_at":"2026-01-05T12:48:22.968626155+01:00","closed_at":"2026-01-05T12:48:22.968626155+01:00","close_reason":"Added home.sessionVariables.ROFI_PROJECT_OPENER_CONFIG with JSON config","dependencies":[{"issue_id":"nixpkgs-3k8","depends_on_id":"nixpkgs-hrh","type":"parent-child","created_at":"2026-01-05T12:46:03.16885012+01:00","created_by":"m3tam3re"}]}
|
||||
{"id":"nixpkgs-3w3","title":"Add authentication command and environment file generation to basecamp-mcp-server","description":"The basecamp-mcp-server package at pkgs/basecamp-mcp-server/default.nix wraps the FastMCP server for Basecamp 3 integration. Currently, there's no convenient way for users to:\n\n1. Set up OAuth 2.0 authentication interactively\n2. Generate a sample environment file with required credentials\n3. Guide users through the Basecamp OAuth flow\n\n## Current State\n\nThe package wraps but users must:\n- Manually clone the upstream repository\n- Run to generate template\n- Edit with OAuth credentials from https://launchpad.37signals.com/integrations\n- Run to complete OAuth flow\n- Handle token storage manually\n\n## Required Environment Variables\n\n### OAuth Configuration (Primary)\n- - OAuth client ID from Basecamp\n- - OAuth client secret\n- - Found in Basecamp URL: https://3.basecamp.com/ID/...\n- - Format: \"App Name (email@domain.com)\"\n- - http://localhost:8000/auth/callback\n- - For Flask session security\n\n### Basic Auth (Legacy)\n- - Email for direct API access\n- - Password for direct API access\n\n## Proposed Solution\n\nAdd a Nix package wrapper command () that:\n\n1. **Interactive Setup Wizard**\n - Guides users through OAuth app creation at launchpad.37signals.com\n - Prompts for credentials (with secure input for secrets)\n - Validates inputs before proceeding\n\n2. **Environment File Generation**\n - Creates or project-local \n - Includes all required variables with clear documentation\n - Sets secure permissions (600)\n - Provides example values\n\n3. **OAuth Flow Handler**\n - Starts local Flask server on port 8000\n - Opens browser to initiate OAuth flow\n - Handles callback and token exchange\n - Stores tokens securely in \n - Shows success/failure status\n\n4. **Documentation**\n - Inline help for all steps\n - Links to Basecamp integration setup\n - Account ID discovery instructions\n\n## References\n\n- Upstream repo: https://github.com/georgeantonopoulos/Basecamp-MCP-Server\n- Key files: , , , \n- OAuth endpoints: launchpad.37signals.com/authorization/new\n\n## Subtasks\n\n- [ ] Create auth command wrapper (basecamp-mcp-auth)\n- [ ] Implement interactive OAuth wizard\n- [ ] Generate sample environment file template\n- [ ] Add token storage handling\n- [ ] Update package documentation\n- [ ] Add Home Manager module support (optional)","status":"closed","priority":2,"issue_type":"feature","created_at":"2026-01-12T18:25:50.929926505+01:00","created_by":"m3tam3re","updated_at":"2026-01-12T19:12:57.941267399+01:00","closed_at":"2026-01-12T19:12:57.941267399+01:00","close_reason":"Closed"}
|
||||
{"id":"nixpkgs-5ml","title":"Update rofi-project-opener script to pass args to opencode","description":"Modify pkgs/rofi-project-opener script to read the new config format and launch opencode with: 'opencode <directory> <arguments>' instead of just 'opencode' in the directory.","status":"closed","priority":1,"issue_type":"task","created_at":"2026-01-05T12:45:49.748958951+01:00","created_by":"m3tam3re","updated_at":"2026-01-05T12:49:40.800083615+01:00","closed_at":"2026-01-05T12:49:40.800083615+01:00","close_reason":"Script updated to read JSON config and pass args to opencode","dependencies":[{"issue_id":"nixpkgs-5ml","depends_on_id":"nixpkgs-w3u","type":"parent-child","created_at":"2026-01-05T12:46:02.338350208+01:00","created_by":"m3tam3re"},{"issue_id":"nixpkgs-5ml","depends_on_id":"nixpkgs-sys","type":"blocks","created_at":"2026-01-05T12:46:04.966269033+01:00","created_by":"m3tam3re"}]}
|
||||
{"id":"nixpkgs-61l","title":"Update documentation to reflect latest changes","status":"closed","priority":3,"issue_type":"task","created_at":"2026-01-10T18:48:19.599467206+01:00","created_by":"m3tam3re","updated_at":"2026-01-10T19:12:26.294895563+01:00","closed_at":"2026-01-10T19:12:26.294895563+01:00","close_reason":"Documentation updated: Added stt-ptt language support docs, rofi-project-opener module docs, and updated zellij-ps docs"}
|
||||
{"id":"nixpkgs-69z","title":"n8n overlay -> pkgs","description":"Create a full package definition in pkgs/n8n/default.nix that mirrors nixpkgs-unstable's n8n package but with version 2.4.1 and updated hashes from overlays/mods/n8n.nix. Register the new package in pkgs/default.nix. The overlay overrides: src (n8n-io/n8n@2.4.1) and pnpmDeps hash. Reference nixpkgs-unstable for the base package structure, then apply the version/hash modifications.","status":"closed","priority":2,"issue_type":"chore","owner":"p@m3ta.dev","created_at":"2026-01-13T19:42:35.643928163+01:00","created_by":"m3tm3re","updated_at":"2026-01-13T20:25:53.712987689+01:00","closed_at":"2026-01-13T20:25:53.712987689+01:00","close_reason":"Packages created successfully: pkgs/n8n/default.nix, pkgs/beads/default.nix, pkgs/opencode/default.nix. All packages build successfully."}
|
||||
{"id":"nixpkgs-7ez","title":"Add authentication command for basecamp-mcp-server","description":"Add a command for the basecamp-mcp-server that will guide the user through the authentication flow and create a sample environment file with the necessary values. This involves researching the original basecamp-mcp-server repository for authentication details and implementing a user-friendly authentication workflow.","status":"tombstone","priority":0,"issue_type":"feature","created_at":"2026-01-12T18:20:55.665717166+01:00","created_by":"m3tam3re","updated_at":"2026-01-13T19:46:32.112562429+01:00","dependencies":[{"issue_id":"nixpkgs-7ez","depends_on_id":"nixpkgs-3w3","type":"blocks","created_at":"2026-01-12T18:48:31.806330931+01:00","created_by":"m3tam3re"}],"deleted_at":"2026-01-13T19:46:32.112562429+01:00","deleted_by":"daemon","delete_reason":"delete","original_type":"feature"}
|
||||
{"id":"nixpkgs-8jw","title":"fix: self-hosted nixos runner missing node in PATH for Gitea Actions","description":"The nix-update workflow fails on self-hosted nixos runner because node is not available in PATH. Error: Cannot find: node in PATH. Root cause: actions/checkout@v4 requires Node.js to execute post-checkout steps. The self-hosted nixos runner does not have node installed or not in PATH. Possible solutions: 1) Add Node.js to the nixos runner environment, 2) Use container-based runner instead of bare nixos, 3) Use a different checkout action that does not require node, 4) Configure PATH to include node installation. Impact: Automated package updates are completely blocked.","status":"open","priority":1,"issue_type":"bug","owner":"p@m3ta.dev","created_at":"2026-01-14T20:50:59.153145341+01:00","created_by":"m3tm3re","updated_at":"2026-01-14T20:51:34.044954071+01:00"}
|
||||
{"id":"nixpkgs-98j","title":"beads overlay -> pkgs","description":"Create a full package definition in pkgs/beads/default.nix that mirrors nixpkgs-unstable's beads package but with version 0.47.1 and updated hashes from overlays/mods/beads.nix. Register the new package in pkgs/default.nix. The overlay overrides: src (steveyegge/beads@v0.47.1), vendorHash, and disables tests (doCheck = false). Reference nixpkgs-unstable for the base package structure, then apply the version/hash modifications.","status":"closed","priority":2,"issue_type":"chore","owner":"p@m3ta.dev","created_at":"2026-01-13T19:43:35.645275221+01:00","created_by":"m3tm3re","updated_at":"2026-01-13T20:25:53.715613545+01:00","closed_at":"2026-01-13T20:25:53.715613545+01:00","close_reason":"Packages created successfully: pkgs/n8n/default.nix, pkgs/beads/default.nix, pkgs/opencode/default.nix. All packages build successfully."}
|
||||
{"id":"nixpkgs-bqc","title":"opencode overlay -> pkgs","description":"Create a full package definition in pkgs/opencode/default.nix that mirrors nixpkgs-unstable's opencode package but with version 1.1.18 and updated hashes from overlays/mods/opencode.nix. Register the new package in pkgs/default.nix. The overlay overrides: src (anomalyco/opencode@v1.1.18) and node_modules hash. Reference nixpkgs-unstable for the base package structure, then apply the version/hash modifications.","status":"closed","priority":2,"issue_type":"chore","owner":"p@m3ta.dev","created_at":"2026-01-13T19:43:36.450930004+01:00","created_by":"m3tm3re","updated_at":"2026-01-13T20:25:53.717928297+01:00","closed_at":"2026-01-13T20:25:53.717928297+01:00","close_reason":"Packages created successfully: pkgs/n8n/default.nix, pkgs/beads/default.nix, pkgs/opencode/default.nix. All packages build successfully."}
|
||||
{"id":"nixpkgs-e2u","title":"Change projectDirs from list to attrset with path and args","description":"Change projectDirs option type from 'types.listOf types.str' to an attrset like:\n\nprojectDirs = {\n nixpkgs = { path = \"~/p/NIX/nixpkgs\"; args = \"--agent Planner-Sisyphus\"; };\n myproject = { path = \"~/dev/myproject\"; }; # args optional\n};\n\nMust maintain backward compatibility consideration.","status":"closed","priority":1,"issue_type":"task","created_at":"2026-01-05T12:45:48.6992807+01:00","created_by":"m3tam3re","updated_at":"2026-01-05T12:48:12.178120719+01:00","closed_at":"2026-01-05T12:48:12.178120719+01:00","close_reason":"Changed projectDirs from listOf str to attrsOf submodule with path+args","dependencies":[{"issue_id":"nixpkgs-e2u","depends_on_id":"nixpkgs-w3u","type":"parent-child","created_at":"2026-01-05T12:46:00.515400521+01:00","created_by":"m3tam3re"}]}
|
||||
{"id":"nixpkgs-fka","title":"Ensure docs are staged and committed","status":"closed","priority":3,"issue_type":"task","created_at":"2026-01-10T18:48:18.05472995+01:00","created_by":"m3tam3re","updated_at":"2026-01-10T19:12:26.306880401+01:00","closed_at":"2026-01-10T19:12:26.306880401+01:00","close_reason":"Documentation staged and ready for commit"}
|
||||
{"id":"nixpkgs-hrh","title":"projectDirs on rofi-project-switcher should be exported to users home-manager environment","status":"closed","priority":2,"issue_type":"feature","created_at":"2026-01-05T11:46:43.640224459+01:00","created_by":"m3tam3re","updated_at":"2026-01-05T12:49:43.584087541+01:00","closed_at":"2026-01-05T12:49:43.584087541+01:00","close_reason":"Feature complete: config exported as ROFI_PROJECT_OPENER_CONFIG env var","dependencies":[{"issue_id":"nixpkgs-hrh","depends_on_id":"nixpkgs-w3u","type":"blocks","created_at":"2026-01-05T12:46:05.867959608+01:00","created_by":"m3tam3re"}]}
|
||||
{"id":"nixpkgs-p79","title":"feat: Add auto-language detection support to stt-ptt package","description":"## Current State\n- **Package**: `/home/m3tam3re/p/NIX/nixpkgs/pkgs/stt-ptt/default.nix`\n- **Module**: `/home/m3tam3re/p/NIX/nixpkgs/modules/home-manager/cli/stt-ptt.nix`\n- **Current whisper-cli call** (line 55): `\"$WHISPER\" -m \"$STT_MODEL\" -f \"$AUDIO\" -np -nt 2>/dev/null`\n- **Missing**: Language configuration option and auto-detection\n\n## Required Changes\n\n### 1. Update Package (`pkgs/stt-ptt/default.nix`)\nAdd support for language configuration:\n- Add `STT_LANGUAGE` environment variable (default: \"auto\" for auto-detection)\n- Modify whisper-cli call to use `-l \"$STT_LANGUAGE\"` flag\n- Support the special value \"auto\" for automatic language detection\n- Update help text to document the new environment variable\n\n### 2. Update Home Manager Module (`modules/home-manager/cli/stt-ptt.nix`)\nAdd new `language` option:\n- Add `language` option with type `types.enum [\"auto\" \"en\" \"es\" \"fr\" \"de\" \"it\" \"pt\" \"ru\" \"zh\" \"ja\" \"ko\" ...]`\n- Default should be \"auto\" (auto-detect)\n- Set `STT_LANGUAGE` environment variable in `home.sessionVariables`\n- Add documentation describing available languages and auto-detection behavior\n- Consider using `types.nullOr(types.str)` with \"auto\" as default for more flexibility\n\n### 3. Technical Details\nFrom whisper.cpp CLI documentation:\n- `-l LANG, --language LANG [en] spoken language ('auto' for auto-detect)`\n- Auto-detection analyzes audio to determine spoken language automatically\n- Specifying a language can improve accuracy if you know the language\n- Language codes follow ISO 639-1 standard (2-letter codes)\n\n## Implementation Plan\n\n### Package Changes:\n```nix\n# Add to environment variables section:\nSTT_LANGUAGE=\"${STT_LANGUAGE:-auto}\"\n\n# Modify whisper call:\n\"$WHISPER\" -m \"$STT_MODEL\" -f \"$AUDIO\" -l \"$STT_LANGUAGE\" -np -nt 2>/dev/null\n\n# Update help text:\necho \" STT_LANGUAGE - Language code or 'auto' for auto-detection (default: auto)\"\n```\n\n### Module Changes:\n```nix\nlanguage = mkOption {\n type = types.enum [\"auto\" \"en\" \"es\" \"fr\" \"de\" \"it\" \"pt\" \"ru\" \"zh\" \"ja\" \"ko\"];\n default = \"auto\";\n description = ''\n Language for speech recognition. Use \"auto\" for automatic language detection,\n or specify a language code (e.g., \"en\", \"es\", \"fr\") for better accuracy.\n Auto-detection analyzes the audio to determine the spoken language.\n '';\n};\n\nhome.sessionVariables = {\n STT_LANGUAGE = cfg.language;\n # ... existing variables\n};\n```\n\n## Benefits\n- **Multilingual support**: Users can speak in any language\n- **Convenience**: Auto-detection eliminates need to specify language\n- **Accuracy**: Explicit language selection improves transcription accuracy\n- **Backward compatible**: Default behavior (auto) matches current functionality\n\n## Testing Considerations\n1. Test auto-detection with multiple languages\n2. Test explicit language selection\n3. Test fallback behavior when auto-detection fails\n4. Test with different Whisper models\n\n## Related\n- whisper.cpp supports 100+ languages\n- Language codes follow ISO 639-1 standard\n- Auto-detection adds slight processing overhead","status":"closed","priority":2,"issue_type":"feature","created_at":"2026-01-10T08:40:51.478869019+01:00","created_by":"m3tam3re","updated_at":"2026-01-10T09:35:49.421572334+01:00","closed_at":"2026-01-10T09:35:49.421572334+01:00","close_reason":"Implemented auto-language detection support: Added STT_LANGUAGE environment variable to package, modified whisper-cli call to use -l flag, updated help text, and added language option to HM module with enum support for 23 common languages plus 'auto'."}
|
||||
{"id":"nixpkgs-r3u","title":"Add a Gitea action for nix-update to automatically update packages","description":"Create a Gitea action that runs nix-update to automatically update packages in this repository. My Gitea instance runs on NixOS (hostname: m3-atlas). Check the Gitea configuration in /home/m3tam3re/p/NIX/nixos-config for reference on how to set up actions. The action should target the packages directory and create PRs with updates when nix-update finds newer versions.","status":"open","priority":2,"issue_type":"task","owner":"p@m3ta.dev","created_at":"2026-01-13T19:50:22.953433727+01:00","created_by":"m3tm3re","updated_at":"2026-01-13T19:52:40.071361668+01:00","dependencies":[{"issue_id":"nixpkgs-r3u","depends_on_id":"nixpkgs-69z","type":"discovered-from","created_at":"2026-01-13T19:52:40.98774707+01:00","created_by":"m3tm3re"},{"issue_id":"nixpkgs-r3u","depends_on_id":"nixpkgs-98j","type":"discovered-from","created_at":"2026-01-13T19:52:41.501620297+01:00","created_by":"m3tm3re"},{"issue_id":"nixpkgs-r3u","depends_on_id":"nixpkgs-bqc","type":"discovered-from","created_at":"2026-01-13T19:52:42.010721971+01:00","created_by":"m3tm3re"}]}
|
||||
{"id":"nixpkgs-sys","title":"Update config file format to include args per project","description":"Update the config file at ~/.config/rofi-project-opener/config to support per-project args. Change from PROJECT_DIRS to a format that encodes both path and args, e.g. JSON or structured text.","status":"closed","priority":1,"issue_type":"task","created_at":"2026-01-05T12:45:51.793810575+01:00","created_by":"m3tam3re","updated_at":"2026-01-05T12:48:21.75755315+01:00","closed_at":"2026-01-05T12:48:21.75755315+01:00","close_reason":"Config file now writes JSON (projects.json) with per-project path and args","dependencies":[{"issue_id":"nixpkgs-sys","depends_on_id":"nixpkgs-w3u","type":"parent-child","created_at":"2026-01-05T12:46:01.400263722+01:00","created_by":"m3tam3re"},{"issue_id":"nixpkgs-sys","depends_on_id":"nixpkgs-e2u","type":"blocks","created_at":"2026-01-05T12:46:04.055948778+01:00","created_by":"m3tam3re"}]}
|
||||
{"id":"nixpkgs-w3u","title":"projectDirs should support args, for example --agent <agentname>","description":"This means project directories need to be an attribute set and we also need to change the launch command to \"opencode <directory name> <arguments>\".\n","status":"closed","priority":2,"issue_type":"feature","created_at":"2026-01-05T11:56:13.844735432+01:00","created_by":"m3tam3re","updated_at":"2026-01-05T12:49:42.34144046+01:00","closed_at":"2026-01-05T12:49:42.34144046+01:00","close_reason":"Feature complete: projectDirs now supports args per directory"}
|
||||
{"id":"nixpkgs-xi7","title":"There is a bug in the basecamp-mcp server","description":"OSError: [Errno 30] Read-only file system when trying to write log file.\n\nTraceback:\nFile: /nix/store/708ksr7z3484bj8faysd7djwpa8xvw12-basecamp-mcp-server-0.0.1-unstable/lib/basecamp-mcp-server/basecamp_fastmcp.py, line 35\nError: logging.FileHandler(LOG_FILE_PATH) - attempting to write to read-only Nix store location: /nix/store/708ksr7z3484bj8faysd7djwpa8xvw12-basecamp-mcp-server-0.0.1-unstable/lib/basecamp-mcp-server/basecamp_fastmcp.log\n\nThe package needs to use a writable directory (e.g., XDG_DATA_HOME or /tmp) for log files instead of the Nix store.","status":"closed","priority":2,"issue_type":"bug","created_at":"2026-01-12T18:16:37.597147907+01:00","created_by":"m3tam3re","updated_at":"2026-01-12T19:12:57.91076765+01:00","closed_at":"2026-01-12T19:12:57.91076765+01:00","close_reason":"Closed"}
|
||||
@@ -52,93 +52,6 @@ jobs:
|
||||
"https://m3tam3re@code.m3ta.dev/m3tam3re/nixpkgs.git" \
|
||||
"$REPO_DIR"
|
||||
|
||||
- name: Update All Flake Inputs
|
||||
id: update-flake-inputs
|
||||
run: |
|
||||
cd "$REPO_DIR"
|
||||
|
||||
echo "::group::Discovering version-pinned flake inputs"
|
||||
|
||||
# Get GitHub inputs with version refs (e.g., v1.2.9)
|
||||
VERSIONED_INPUTS=$(nix flake metadata --json | jq -r '
|
||||
.locks.nodes | to_entries[] |
|
||||
select(.value.original.type == "github") |
|
||||
select(.value.original.ref != null) |
|
||||
select(.value.original.ref | test("^v?[0-9]+\\.[0-9]+")) |
|
||||
"\(.key) \(.value.original.owner) \(.value.original.repo) \(.value.original.ref)"
|
||||
')
|
||||
|
||||
echo "Discovered version-pinned inputs:"
|
||||
echo "$VERSIONED_INPUTS"
|
||||
echo "::endgroup::"
|
||||
|
||||
UPDATED_INPUTS=""
|
||||
FAILED_INPUTS=""
|
||||
|
||||
# Update each version-pinned input
|
||||
while read -r INPUT_NAME OWNER REPO CURRENT_REF; do
|
||||
[ -z "$INPUT_NAME" ] && continue
|
||||
|
||||
echo "::group::Checking $INPUT_NAME ($OWNER/$REPO)"
|
||||
|
||||
# Get latest stable release (exclude prereleases)
|
||||
# The /releases/latest endpoint already returns the latest non-prerelease, non-draft release
|
||||
LATEST=$(curl -sf "https://api.github.com/repos/$OWNER/$REPO/releases/latest" | \
|
||||
jq -r 'if .prerelease == false then .tag_name else empty end')
|
||||
|
||||
if [ -z "$LATEST" ]; then
|
||||
echo "⚠️ No stable release found for $INPUT_NAME (repo may only have prereleases)"
|
||||
FAILED_INPUTS="$FAILED_INPUTS $INPUT_NAME(no-stable-release)"
|
||||
echo "::endgroup::"
|
||||
continue
|
||||
fi
|
||||
|
||||
echo "Current: $CURRENT_REF | Latest: $LATEST"
|
||||
|
||||
if [ "$LATEST" != "$CURRENT_REF" ]; then
|
||||
echo "Updating $INPUT_NAME from $CURRENT_REF to $LATEST"
|
||||
|
||||
# Update flake.nix
|
||||
sed -i "s|github:$OWNER/$REPO/[^\"']*|github:$OWNER/$REPO/$LATEST|g" flake.nix
|
||||
|
||||
# Update flake.lock for this input
|
||||
if nix flake update "$INPUT_NAME" 2>&1 | tee /tmp/input-update.log; then
|
||||
UPDATED_INPUTS="$UPDATED_INPUTS $INPUT_NAME($LATEST)"
|
||||
echo "✅ Updated $INPUT_NAME to $LATEST"
|
||||
else
|
||||
echo "❌ Failed to update $INPUT_NAME"
|
||||
FAILED_INPUTS="$FAILED_INPUTS $INPUT_NAME(update-failed)"
|
||||
git checkout flake.nix flake.lock 2>/dev/null || true
|
||||
fi
|
||||
else
|
||||
echo "✓ $INPUT_NAME is already up to date"
|
||||
fi
|
||||
echo "::endgroup::"
|
||||
done <<< "$VERSIONED_INPUTS"
|
||||
|
||||
echo "::group::Updating non-version-pinned inputs"
|
||||
# Update all non-version-pinned inputs (branches, no-ref)
|
||||
nix flake update
|
||||
echo "::endgroup::"
|
||||
|
||||
# Check if we have any changes
|
||||
if [ -n "$(git status --porcelain flake.nix flake.lock)" ]; then
|
||||
echo "::group::Committing flake input updates"
|
||||
nix fmt flake.nix
|
||||
git add flake.nix flake.lock
|
||||
|
||||
COMMIT_MSG="chore: update flake inputs"
|
||||
[ -n "$UPDATED_INPUTS" ] && COMMIT_MSG="$COMMIT_MSG - $(echo $UPDATED_INPUTS | tr ' ' ', ')"
|
||||
|
||||
git commit -m "$COMMIT_MSG"
|
||||
echo "flake_inputs_updated=true" >> $GITHUB_OUTPUT
|
||||
echo "updated_inputs=${UPDATED_INPUTS# }" >> $GITHUB_OUTPUT
|
||||
[ -n "$FAILED_INPUTS" ] && echo "failed_inputs=${FAILED_INPUTS# }" >> $GITHUB_OUTPUT
|
||||
echo "::endgroup::"
|
||||
else
|
||||
echo "flake_inputs_updated=false" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Check Prerequisites
|
||||
id: check
|
||||
run: |
|
||||
@@ -175,108 +88,19 @@ jobs:
|
||||
nix eval .#${pkg}.passthru.updateScript --json >/dev/null 2>&1
|
||||
}
|
||||
|
||||
# Check if updateScript is a custom script (path-based) vs nix-update-script
|
||||
is_custom_update_script() {
|
||||
local pkg=$1
|
||||
local result
|
||||
# Custom scripts (./update.sh) become store paths ending in .sh
|
||||
# nix-update-script produces a list with nix-update binary path
|
||||
result=$(nix eval --impure --raw --expr "
|
||||
let
|
||||
flake = builtins.getFlake (toString ./.);
|
||||
pkg = flake.packages.\${builtins.currentSystem}.${pkg};
|
||||
script = pkg.passthru.updateScript or null;
|
||||
in
|
||||
if script == null then \"none\"
|
||||
else if builtins.isPath script then \"custom\"
|
||||
else if builtins.isString script then
|
||||
(if builtins.match \".*\\.sh$\" script != null then \"custom\" else \"other\")
|
||||
else if builtins.isList script then
|
||||
let first = builtins.head script;
|
||||
in if builtins.isString first && builtins.match \".*/nix-update$\" first != null
|
||||
then \"nix-update-script\"
|
||||
else \"custom\"
|
||||
else if builtins.isAttrs script && script ? command then \"custom\"
|
||||
else \"other\"
|
||||
" 2>/dev/null || echo "other")
|
||||
[[ "$result" == "custom" ]]
|
||||
}
|
||||
|
||||
# Run a custom update script directly
|
||||
# Scripts must use nix-shell shebang for their own dependencies
|
||||
run_custom_update_script() {
|
||||
local pkg=$1
|
||||
local before_hash=$(git rev-parse HEAD)
|
||||
|
||||
echo " 🔧 Detected custom update script for $pkg"
|
||||
|
||||
# Resolve the store path of the update script
|
||||
local script_path
|
||||
script_path=$(nix eval --impure --raw --expr "
|
||||
let
|
||||
flake = builtins.getFlake (toString ./.);
|
||||
pkg = flake.packages.\${builtins.currentSystem}.${pkg};
|
||||
script = pkg.passthru.updateScript;
|
||||
cmd = if builtins.isAttrs script then script.command
|
||||
else if builtins.isList script then builtins.head script
|
||||
else script;
|
||||
in toString cmd
|
||||
" 2>/dev/null)
|
||||
|
||||
if [ -z "$script_path" ]; then
|
||||
echo "❌ Could not resolve update script path for $pkg"
|
||||
return 1
|
||||
fi
|
||||
|
||||
# Set environment variables that nix-update would normally provide
|
||||
export UPDATE_NIX_NAME=$(nix eval --raw .#${pkg}.name 2>/dev/null || echo "$pkg")
|
||||
export UPDATE_NIX_PNAME=$(nix eval --raw .#${pkg}.pname 2>/dev/null || echo "$pkg")
|
||||
export UPDATE_NIX_OLD_VERSION=$(nix eval --raw .#${pkg}.version 2>/dev/null || echo "unknown")
|
||||
export UPDATE_NIX_ATTR_PATH="$pkg"
|
||||
|
||||
echo " Running: $script_path"
|
||||
if bash "$script_path" 2>&1 | tee /tmp/update-${pkg}.log; then
|
||||
if [ "$(check_commit "$before_hash")" = "true" ]; then
|
||||
echo "✅ Updated $pkg (via custom script)"
|
||||
return 0
|
||||
fi
|
||||
# Script succeeded but no commit — may already be up to date
|
||||
if grep -q "already at latest\|nothing to do" /tmp/update-${pkg}.log; then
|
||||
echo "✓ $pkg already up to date"
|
||||
return 1
|
||||
fi
|
||||
fi
|
||||
|
||||
# Clean up on failure
|
||||
git checkout -- . 2>/dev/null || true
|
||||
git clean -fd 2>/dev/null || true
|
||||
|
||||
if ! grep -q "already at latest\|nothing to do\|No new version found" /tmp/update-${pkg}.log; then
|
||||
echo "⚠️ Custom update script failed for $pkg"
|
||||
fi
|
||||
return 1
|
||||
}
|
||||
|
||||
run_update() {
|
||||
local pkg=$1
|
||||
local before_hash=$(git rev-parse HEAD)
|
||||
|
||||
echo "::group::Updating $pkg"
|
||||
|
||||
# Check if this package has a custom update script
|
||||
if is_custom_update_script "$pkg"; then
|
||||
if run_custom_update_script "$pkg"; then
|
||||
echo "::endgroup::"
|
||||
return 0
|
||||
else
|
||||
echo "::endgroup::"
|
||||
return 1
|
||||
fi
|
||||
fi
|
||||
|
||||
# Standard nix-update for packages with nix-update-script
|
||||
local args=("--flake" "--commit" "--use-github-releases")
|
||||
|
||||
# Handle subpackages (opencode has node_modules)
|
||||
if [ "$pkg" = "opencode" ]; then
|
||||
args+=("--subpackage" "node_modules")
|
||||
fi
|
||||
|
||||
args+=("$pkg")
|
||||
|
||||
if nix-update "${args[@]}" 2>&1 | tee /tmp/update-${pkg}.log; then
|
||||
@@ -286,11 +110,6 @@ jobs:
|
||||
return 0
|
||||
fi
|
||||
fi
|
||||
|
||||
# Clean up any uncommitted changes from failed update
|
||||
git checkout -- . 2>/dev/null || true
|
||||
git clean -fd 2>/dev/null || true
|
||||
|
||||
echo "::endgroup::"
|
||||
|
||||
if ! grep -q "already up to date\|No new version found" /tmp/update-${pkg}.log; then
|
||||
@@ -340,16 +159,34 @@ jobs:
|
||||
echo "📦 Found $(echo $UPDATABLE_PACKAGES | wc -w) updatable packages"
|
||||
echo ""
|
||||
|
||||
# Parallel updates with 4 concurrent jobs
|
||||
MAX_JOBS=4
|
||||
JOB_COUNT=0
|
||||
SUCCESS_LIST=()
|
||||
|
||||
for pkg in $UPDATABLE_PACKAGES; do
|
||||
if run_update "$pkg"; then
|
||||
UPDATES_FOUND=true
|
||||
if [ -n "$UPDATED_PACKAGES" ]; then
|
||||
UPDATED_PACKAGES="$UPDATED_PACKAGES, $pkg"
|
||||
else
|
||||
UPDATED_PACKAGES="$pkg"
|
||||
fi
|
||||
(run_update "$pkg" && echo "$pkg" >> /tmp/success.txt || true) &
|
||||
|
||||
JOB_COUNT=$((JOB_COUNT + 1))
|
||||
|
||||
# Wait if we hit max concurrent jobs
|
||||
if [ $JOB_COUNT -ge $MAX_JOBS ]; then
|
||||
wait
|
||||
JOB_COUNT=0
|
||||
fi
|
||||
done
|
||||
|
||||
# Wait for remaining jobs
|
||||
wait
|
||||
|
||||
# Parse results
|
||||
if [ -f /tmp/success.txt ]; then
|
||||
SUCCESS_LIST=$(cat /tmp/success.txt | tr '\n' ' ')
|
||||
UPDATED_PACKAGES=$(echo "$SUCCESS_LIST" | sed 's/ /, /g' | sed 's/, $//')
|
||||
UPDATES_FOUND=true
|
||||
fi
|
||||
|
||||
rm -f /tmp/success.txt
|
||||
fi
|
||||
|
||||
COMMIT_COUNT=$(git rev-list --count origin/master..HEAD)
|
||||
@@ -364,7 +201,7 @@ jobs:
|
||||
fi
|
||||
|
||||
- name: Verify Builds
|
||||
if: steps.update.outputs.has_updates == 'true' || steps.update-flake-inputs.outputs.flake_inputs_updated == 'true'
|
||||
if: steps.update.outputs.has_updates == 'true'
|
||||
run: |
|
||||
cd "$REPO_DIR"
|
||||
|
||||
@@ -416,20 +253,11 @@ jobs:
|
||||
echo "✅ All packages built successfully: ${SUCCESSFUL_PACKAGES[*]}"
|
||||
|
||||
- name: Push Changes
|
||||
if: steps.update.outputs.has_updates == 'true' || steps.update-flake-inputs.outputs.flake_inputs_updated == 'true'
|
||||
if: steps.update.outputs.has_updates == 'true'
|
||||
run: |
|
||||
cd "$REPO_DIR"
|
||||
PACKAGES="${{ steps.update.outputs.updated_packages }}"
|
||||
|
||||
if [ "${{ steps.update-flake-inputs.outputs.flake_inputs_updated }}" = "true" ]; then
|
||||
UPDATED_INPUTS="${{ steps.update-flake-inputs.outputs.updated_inputs }}"
|
||||
if [ -n "$PACKAGES" ]; then
|
||||
PACKAGES="$PACKAGES, flake inputs ($UPDATED_INPUTS)"
|
||||
else
|
||||
PACKAGES="flake inputs ($UPDATED_INPUTS)"
|
||||
fi
|
||||
fi
|
||||
|
||||
echo "::group::Git Operations"
|
||||
echo "Current commit: $(git rev-parse HEAD)"
|
||||
echo "Pending commits: $(git rev-list --count origin/master..HEAD)"
|
||||
@@ -439,10 +267,10 @@ jobs:
|
||||
if git pull --rebase origin master; then
|
||||
echo "✅ Rebase successful"
|
||||
else
|
||||
echo "⚠️ Rebase failed, resetting and retrying..."
|
||||
git rebase --abort 2>/dev/null || true
|
||||
echo "⚠️ Rebase failed, attempting force push..."
|
||||
git reset --hard origin/master
|
||||
echo "❌ Could not rebase, updates lost. Will retry next run."
|
||||
git push --force-with-lease origin master
|
||||
echo "✓ Force push completed"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
@@ -454,6 +282,16 @@ jobs:
|
||||
echo "✅ Successfully pushed updates for: $PACKAGES"
|
||||
echo "::endgroup::"
|
||||
|
||||
- name: Upload Build Logs
|
||||
if: failure()
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: build-logs-${{ github.run_number }}
|
||||
path: |
|
||||
/tmp/update-*.log
|
||||
/tmp/build-*.log
|
||||
retention-days: 7
|
||||
|
||||
- name: Cleanup
|
||||
if: always()
|
||||
run: |
|
||||
@@ -465,7 +303,7 @@ jobs:
|
||||
rm -rf "$REPO_DIR"
|
||||
|
||||
# Remove all log files
|
||||
rm -f /tmp/update-*.log /tmp/build-*.log /tmp/opencode-build.log /tmp/update-log.txt /tmp/success-packages.txt
|
||||
rm -f /tmp/update-*.log /tmp/build-*.log /tmp/update-log.txt /tmp/success-packages.txt
|
||||
|
||||
# Clear sensitive environment variables
|
||||
unset GIT_AUTHOR_EMAIL GIT_COMMITTER_EMAIL
|
||||
@@ -473,45 +311,17 @@ jobs:
|
||||
- name: Summary
|
||||
if: always()
|
||||
run: |
|
||||
HAS_UPDATES="false"
|
||||
|
||||
if [ "${{ steps.update.outputs.has_updates }}" = "true" ]; then
|
||||
HAS_UPDATES="true"
|
||||
echo "# ✅ Update Summary" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "## Updated Packages" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "\`${{ steps.update.outputs.updated_packages }}\`" >> $GITHUB_STEP_SUMMARY
|
||||
fi
|
||||
|
||||
if [ "${{ steps.update-flake-inputs.outputs.flake_inputs_updated }}" = "true" ]; then
|
||||
HAS_UPDATES="true"
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "## Updated Flake Inputs" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
UPDATED_INPUTS="${{ steps.update-flake-inputs.outputs.updated_inputs }}"
|
||||
if [ -n "$UPDATED_INPUTS" ]; then
|
||||
echo "$UPDATED_INPUTS" | tr ' ' '\n' | while read -r input; do
|
||||
[ -n "$input" ] && echo "- **$input**" >> $GITHUB_STEP_SUMMARY
|
||||
done
|
||||
fi
|
||||
FAILED_INPUTS="${{ steps.update-flake-inputs.outputs.failed_inputs }}"
|
||||
if [ -n "$FAILED_INPUTS" ]; then
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "### Failed Inputs" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "$FAILED_INPUTS" | tr ' ' '\n' | while read -r input; do
|
||||
[ -n "$input" ] && echo "- $input" >> $GITHUB_STEP_SUMMARY
|
||||
done
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ "$HAS_UPDATES" = "true" ]; then
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "## Status" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- ✅ All updates validated with \`nix flake check\`" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- ✅ All builds successful" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- ✅ All packages validated with \`nix flake check\`" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- ✅ All packages built successfully" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- ✅ Changes pushed to master" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "## Workflow Performance" >> $GITHUB_STEP_SUMMARY
|
||||
@@ -522,5 +332,5 @@ jobs:
|
||||
else
|
||||
echo "# ℹ️ No Updates Required" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "No updates found this run. All packages and flake inputs are up to date." >> $GITHUB_STEP_SUMMARY
|
||||
echo "No package updates found this run. All packages are up to date." >> $GITHUB_STEP_SUMMARY
|
||||
fi
|
||||
|
||||
19
.gitignore
vendored
19
.gitignore
vendored
@@ -31,22 +31,3 @@ test-result/
|
||||
# Local configuration (if you want to keep local overrides)
|
||||
local.nix
|
||||
flake.lock.bak
|
||||
.todos/
|
||||
|
||||
# AI agent state
|
||||
.sidecar/
|
||||
.sidecar-*
|
||||
.sisyphus/
|
||||
.sidecar-agent
|
||||
.sidecar-task
|
||||
.sidecar-pr
|
||||
.sidecar-start.sh
|
||||
.sidecar-base
|
||||
.td-root
|
||||
.cache
|
||||
.pi*
|
||||
|
||||
# Beads / Dolt files (added by bd init)
|
||||
.dolt/
|
||||
*.db
|
||||
.beads-credential-key
|
||||
|
||||
215
AGENTS.md
215
AGENTS.md
@@ -1,84 +1,165 @@
|
||||
# Agent Instructions
|
||||
# m3ta-nixpkgs Knowledge Base
|
||||
|
||||
This project uses **bd** (beads) for issue tracking. Run `bd prime` for full workflow context.
|
||||
**Generated:** 2026-01-13
|
||||
**Commit:** 366af12
|
||||
**Branch:** master
|
||||
|
||||
## Quick Reference
|
||||
## OVERVIEW
|
||||
|
||||
```bash
|
||||
bd ready # Find available work
|
||||
bd show <id> # View issue details
|
||||
bd update <id> --claim # Claim work atomically
|
||||
bd close <id> # Complete work
|
||||
bd dolt push # Push beads data to remote
|
||||
Personal Nix flake: custom packages, overlays, NixOS/Home Manager modules, dev shells. Flakes-only (no channels).
|
||||
|
||||
## STRUCTURE
|
||||
|
||||
```
|
||||
.
|
||||
├── flake.nix # Entry: packages, overlays, modules, shells, lib
|
||||
├── pkgs/ # Custom packages (one dir each, callPackage registry)
|
||||
├── modules/
|
||||
│ ├── nixos/ # System modules (ports.nix)
|
||||
│ └── home-manager/ # User modules by category (cli/, coding/, ports.nix)
|
||||
├── lib/ # Shared utilities (ports.nix)
|
||||
├── shells/ # Dev environments (default, python, devops)
|
||||
├── overlays/mods/ # Package modifications (n8n version bump)
|
||||
├── templates/ # Boilerplate for new packages/modules
|
||||
├── examples/ # Usage examples
|
||||
└── .gitea/workflows/ # CI/CD workflows (nix-update automation)
|
||||
```
|
||||
|
||||
## Non-Interactive Shell Commands
|
||||
## WHERE TO LOOK
|
||||
|
||||
**ALWAYS use non-interactive flags** with file operations to avoid hanging on confirmation prompts.
|
||||
| Task | Location | Notes |
|
||||
| -------------------- | ---------------------------------- | ------------------------------------- |
|
||||
| Add package | `pkgs/<name>/default.nix` | Register in `pkgs/default.nix` |
|
||||
| Add NixOS module | `modules/nixos/<name>.nix` | Import in `modules/nixos/default.nix` |
|
||||
| Add HM module | `modules/home-manager/<category>/` | Category: cli, coding, or root |
|
||||
| Override nixpkgs pkg | `overlays/mods/<name>.nix` | Import in `overlays/mods/default.nix` |
|
||||
| Add dev shell | `shells/<name>.nix` | Register in `shells/default.nix` |
|
||||
| Use port management | `config.m3ta.ports.get "service"` | Host-specific via `hostOverrides` |
|
||||
| CI/CD workflows | `.gitea/workflows/<name>.yml` | Automated package updates (nix-update) |
|
||||
|
||||
Shell commands like `cp`, `mv`, and `rm` may be aliased to include `-i` (interactive) mode on some systems, causing the agent to hang indefinitely waiting for y/n input.
|
||||
## CONVENTIONS
|
||||
|
||||
**Use these forms instead:**
|
||||
```bash
|
||||
# Force overwrite without prompting
|
||||
cp -f source dest # NOT: cp source dest
|
||||
mv -f source dest # NOT: mv source dest
|
||||
rm -f file # NOT: rm file
|
||||
**Formatter**: `nix fmt` before commit (alejandra)
|
||||
|
||||
# For recursive operations
|
||||
rm -rf directory # NOT: rm -r directory
|
||||
cp -rf source dest # NOT: cp -r source dest
|
||||
**Naming**:
|
||||
|
||||
- Packages: `lowercase-hyphen` (e.g., `hyprpaper-random`)
|
||||
- Variables: `camelCase` (e.g., `portHelpers`)
|
||||
- Module options: `m3ta.*` namespace
|
||||
|
||||
**Imports**: Multi-line, trailing commas:
|
||||
|
||||
```nix
|
||||
{
|
||||
lib,
|
||||
stdenv,
|
||||
fetchFromGitHub,
|
||||
}:
|
||||
```
|
||||
|
||||
**Other commands that may prompt:**
|
||||
- `scp` - use `-o BatchMode=yes` for non-interactive
|
||||
- `ssh` - use `-o BatchMode=yes` to fail instead of prompting
|
||||
- `apt-get` - use `-y` flag
|
||||
- `brew` - use `HOMEBREW_NO_AUTO_UPDATE=1` env var
|
||||
**Modules**: Standard pattern:
|
||||
|
||||
<!-- BEGIN BEADS INTEGRATION v:1 profile:minimal hash:ca08a54f -->
|
||||
## Beads Issue Tracker
|
||||
|
||||
This project uses **bd (beads)** for issue tracking. Run `bd prime` to see full workflow context and commands.
|
||||
|
||||
### Quick Reference
|
||||
|
||||
```bash
|
||||
bd ready # Find available work
|
||||
bd show <id> # View issue details
|
||||
bd update <id> --claim # Claim work
|
||||
bd close <id> # Complete work
|
||||
```nix
|
||||
{ config, lib, pkgs, ... }:
|
||||
with lib; let
|
||||
cfg = config.m3ta.myModule;
|
||||
in {
|
||||
options.m3ta.myModule = {
|
||||
enable = mkEnableOption "description";
|
||||
};
|
||||
config = mkIf cfg.enable { ... };
|
||||
}
|
||||
```
|
||||
|
||||
### Rules
|
||||
**Meta**: Always include all fields:
|
||||
|
||||
- Use `bd` for ALL task tracking — do NOT use TodoWrite, TaskCreate, or markdown TODO lists
|
||||
- Run `bd prime` for detailed command reference and session close protocol
|
||||
- Use `bd remember` for persistent knowledge — do NOT use MEMORY.md files
|
||||
|
||||
## Session Completion
|
||||
|
||||
**When ending a work session**, you MUST complete ALL steps below. Work is NOT complete until `git push` succeeds.
|
||||
|
||||
**MANDATORY WORKFLOW:**
|
||||
|
||||
1. **File issues for remaining work** - Create issues for anything that needs follow-up
|
||||
2. **Run quality gates** (if code changed) - Tests, linters, builds
|
||||
3. **Update issue status** - Close finished work, update in-progress items
|
||||
4. **PUSH TO REMOTE** - This is MANDATORY:
|
||||
```bash
|
||||
git pull --rebase
|
||||
bd dolt push
|
||||
git push
|
||||
git status # MUST show "up to date with origin"
|
||||
```nix
|
||||
meta = with lib; {
|
||||
description = "...";
|
||||
homepage = "...";
|
||||
license = licenses.mit;
|
||||
platforms = platforms.linux;
|
||||
mainProgram = "...";
|
||||
};
|
||||
```
|
||||
5. **Clean up** - Clear stashes, prune remote branches
|
||||
6. **Verify** - All changes committed AND pushed
|
||||
7. **Hand off** - Provide context for next session
|
||||
|
||||
**CRITICAL RULES:**
|
||||
- Work is NOT complete until `git push` succeeds
|
||||
- NEVER stop before pushing - that leaves work stranded locally
|
||||
- NEVER say "ready to push when you are" - YOU must push
|
||||
- If push fails, resolve and retry until it succeeds
|
||||
<!-- END BEADS INTEGRATION -->
|
||||
## PACKAGE PATTERNS
|
||||
|
||||
**Rust**: `rustPlatform.buildRustPackage rec { cargoLock.lockFile = src + "/Cargo.lock"; }`
|
||||
|
||||
**Shell**: `writeShellScriptBin "name" ''script''` or `mkDerivation` with custom `installPhase`
|
||||
|
||||
**AppImage**: `appimageTools.wrapType2 { ... }`
|
||||
|
||||
**Custom fetcher**: `fetchFromGitea { domain = "code.m3ta.dev"; owner = "m3tam3re"; ... }`
|
||||
|
||||
## MODULE PATTERNS
|
||||
|
||||
**Simple**: `options.cli.name = { enable = mkEnableOption "..."; }; config = mkIf cfg.enable { ... };`
|
||||
|
||||
**Multiple**: `config = mkMerge [ (mkIf cfg.x.enable { ... }) (mkIf cfg.y.enable { ... }) ];`
|
||||
|
||||
**Shared lib**: `portsLib = import ../../lib/ports.nix { inherit lib; }; portHelpers = portsLib.mkPortHelpers { ... };`
|
||||
|
||||
## PORT MANAGEMENT
|
||||
|
||||
Central port management: `config.m3ta.ports.get "service"` with host-specific via `hostOverrides`
|
||||
|
||||
Generated: `/etc/m3ta/ports.json` (NixOS), `~/.config/m3ta/ports.json` (HM)
|
||||
|
||||
## COMMANDS
|
||||
|
||||
```bash
|
||||
nix flake check # Validate flake
|
||||
nix fmt # Format (alejandra)
|
||||
nix build .#<pkg> # Build package
|
||||
nix flake show # List outputs
|
||||
nix develop # Enter dev shell
|
||||
nix develop .#python # Python shell
|
||||
nix develop .#devops # DevOps shell
|
||||
|
||||
# In dev shell only:
|
||||
statix check . # Lint
|
||||
deadnix . # Find dead code
|
||||
```
|
||||
|
||||
## ANTI-PATTERNS
|
||||
|
||||
| Don't | Do Instead |
|
||||
| ------------------------- | ------------------------------------------------------------------- |
|
||||
| `lib.fakeHash` in commits | Get real hash: `nix build`, copy from error |
|
||||
| Flat module files | Organize by category (`cli/`, `coding/`) |
|
||||
| Hardcode ports | Use `m3ta.ports` module |
|
||||
| Skip meta fields | Include all: description, homepage, license, platforms, mainProgram |
|
||||
| `with pkgs;` in modules | Explicit `pkgs.package` or `with pkgs; [ ... ]` in lists only |
|
||||
|
||||
## COMMIT FORMAT
|
||||
|
||||
```
|
||||
type: brief description
|
||||
```
|
||||
|
||||
Types: `feat`, `fix`, `docs`, `style`, `refactor`, `chore`
|
||||
|
||||
## NOTES
|
||||
|
||||
- **Hash fetching**: Use `lib.fakeHash` initially, build to get real hash
|
||||
- **HM modules**: Category subdirs (`cli/`, `coding/`) have own `default.nix` aggregators
|
||||
- **Ports module**: Different for NixOS vs HM (HM adds `generateEnvVars` option)
|
||||
- **Overlays**: `modifications` overlay uses `{prev}:` pattern, not `{final, prev}:`
|
||||
- **Dev shell tools**: `statix`, `deadnix` only available inside `nix develop`
|
||||
- **Automated package updates**: Packages are automatically updated weekly via Gitea Actions using `nix-update`. Review PRs from the automation before merging. For urgent updates, manually run the workflow or update manually.
|
||||
|
||||
## Issue Tracking
|
||||
|
||||
This project uses **bd (beads)** for issue tracking.
|
||||
Run `bd prime` for workflow context, or install hooks (`bd hooks install`) for auto-injection.
|
||||
|
||||
**Quick reference:**
|
||||
|
||||
- `bd ready` - Find unblocked work
|
||||
- `bd create "Title" --type task --priority 2` - Create issue
|
||||
- `bd close <id>` - Complete work
|
||||
- `bd sync` - Sync with git (run at session end)
|
||||
|
||||
For full workflow details: `bd prime`
|
||||
|
||||
108
CHANGELOG.md
108
CHANGELOG.md
@@ -1,108 +0,0 @@
|
||||
# Changelog
|
||||
|
||||
All notable changes to this project will be documented in this file.
|
||||
The format is based on [Keep a Changelog](https://keepachangelog.com/).
|
||||
|
||||
## [Unreleased]
|
||||
|
||||
### Changed
|
||||
- Remove duplicate opencode-rules.nix (backward-compat alias preserved)
|
||||
- Tool-agnostic naming in coding-rules lib internals
|
||||
- Remove redundant overlay entries for non-existent flake inputs
|
||||
- Remove redundant 'additions' overlay (identical to 'default')
|
||||
|
||||
### Removed
|
||||
- Dead overlay entries for non-existent flake inputs
|
||||
- Legacy `mkOpencodeRules` alias and `lib.opencode-rules` backward-compat entry (use `mkCodingRules` / `lib.coding-rules`)
|
||||
|
||||
## [0.4.0] - 2026-04-15
|
||||
|
||||
### Added
|
||||
- Pi-agent wrapper with systemd sandbox and per-host-user policy
|
||||
- Containerized Pi agent
|
||||
- `lib.agents.nix` with loadCanonical, renderers (OpenCode, Claude Code, Pi), and shellHook
|
||||
- `lib.coding-rules` helper for per-project rule injection (renamed from opencode-rules)
|
||||
- Home Manager modules for coding agents: `claude-code`, `opencode`, `pi`
|
||||
- Agents rework with canonical TOML format and harness-agnostic renderers
|
||||
- `vibetyper` and `eigent` packages
|
||||
- `openspec` package
|
||||
- `basecamp-cli` package
|
||||
- `openshell` package (0.0.14 through 0.0.23)
|
||||
- `openwork` package
|
||||
- Opencode config moved into m3ta-nixpkgs
|
||||
- Opencode dev shell with mkCodingRules demo
|
||||
|
||||
### Changed
|
||||
- OpenCode flake input updated through v1.1.65 to v1.3.6
|
||||
- Switched from local opencode package to upstream flake input
|
||||
- Removed opencode-desktop (awaiting upstream fix), later re-enabled
|
||||
- Nix eval warnings resolved
|
||||
- Flake inputs updated throughout
|
||||
|
||||
### Fixed
|
||||
- Pi settings sync
|
||||
- Remove openwork sidecars in preFixup to prevent .opencode-wrapped conflict
|
||||
- Remove sidecar binaries from openwork $out/bin to fix buildEnv conflict
|
||||
- Vibetyper .desktop entry
|
||||
- Opencode module formatting
|
||||
- Formatting opencode module
|
||||
|
||||
## [0.3.0] - 2026-02-20
|
||||
|
||||
### Added
|
||||
- `notesmd-cli` package with flake checks
|
||||
- `sidecar` and `td` packages
|
||||
- `opencode-desktop` package with Wayland support
|
||||
- `mem0` package (1.0.2 through 1.0.9)
|
||||
- `kestracli` / `kestractl` package (1.0.0 to 1.2.2)
|
||||
|
||||
### Changed
|
||||
- Nix-update CI workflow optimized with caching and parallel processing
|
||||
- Restructured n8n version handling for nix-update compatibility
|
||||
- Switched formatter from nixpkgs-fmt to alejandra
|
||||
- Replace local opencode with upstream flake input v1.1.27
|
||||
|
||||
### Fixed
|
||||
- n8n build error
|
||||
- n8n pnpm hash
|
||||
- n8n update script
|
||||
- Gitea runner opencode.url flake input
|
||||
- nix-update workflow: YAML syntax, jobs indentation, PR body formatting
|
||||
- Arithmetic increment failing with set -e in nix-update workflow
|
||||
- Removed magic-nix-cache-action causing platform mapping error
|
||||
- Opencode bun version requirement patched to match upstream lockfile
|
||||
- Deprecated opencode update logic removed
|
||||
- nix fmt without arg in workflow
|
||||
- Extra Lua config renamed initLua
|
||||
- Stt-ptt use pkill for better process management
|
||||
|
||||
## [0.2.0] - 2026-01-13
|
||||
|
||||
### Added
|
||||
- Gitea Actions workflow for automated package updates with nix-update
|
||||
- `n8n`, `beads`, and `opencode` packages
|
||||
- `stt-ptt` package with auto-language detection
|
||||
- `rofi-project-opener` for rofi-based project launching
|
||||
- Hierarchical AGENTS.md knowledge base
|
||||
- Dev shell structure with python and devops shells
|
||||
- Port management modules (NixOS + Home Manager)
|
||||
- Port helper library (`lib/ports.nix`)
|
||||
|
||||
### Changed
|
||||
- Beads updated through v0.49.1
|
||||
- N8n updated through v2.8.1
|
||||
- Opencode updated through v1.1.18
|
||||
- Documentation expanded with comprehensive patterns and HM module docs
|
||||
|
||||
### Fixed
|
||||
- Python env version fix for marimo
|
||||
|
||||
## [0.1.0] - 2025-10-04
|
||||
|
||||
### Added
|
||||
- Initial flake setup with packages, overlays, modules, and shells
|
||||
- NixOS and Home Manager module infrastructure
|
||||
- `lib/` shared utilities
|
||||
- `overlays/mods/` for package modifications
|
||||
- `templates/` for new packages/modules
|
||||
- `examples/` for usage documentation
|
||||
25
README.md
25
README.md
@@ -38,16 +38,21 @@ nix run git+https://code.m3ta.dev/m3tam3re/nixpkgs#zellij-ps
|
||||
|
||||
## Available Packages
|
||||
|
||||
See [📦 Packages](./docs/packages/) for the full index with descriptions.
|
||||
|
||||
Quick reference — build any package directly:
|
||||
|
||||
```bash
|
||||
nix build git+https://code.m3ta.dev/m3tam3re/nixpkgs#<package-name>
|
||||
nix run git+https://code.m3ta.dev/m3tam3re/nixpkgs#<package-name>
|
||||
```
|
||||
|
||||
Notable packages: `sidecar`, `td`, `code2prompt`, `mem0`, `n8n`, `zellij-ps`.
|
||||
| Package | Description |
|
||||
| ------------------ | ------------------------------------- |
|
||||
| `beads` | Lightweight memory system for AI coding agents with graph-based issue tracking |
|
||||
| `code2prompt` | Convert code to prompts |
|
||||
| `hyprpaper-random` | Random wallpaper setter for Hyprpaper |
|
||||
| `launch-webapp` | Launch web applications |
|
||||
| `mem0` | AI memory assistant with vector storage |
|
||||
| `msty-studio` | Msty Studio application |
|
||||
| `n8n` | Free and source-available fair-code licensed workflow automation tool |
|
||||
| `opencode` | AI coding agent built for the terminal |
|
||||
| `pomodoro-timer` | Pomodoro timer utility |
|
||||
| `rofi-project-opener` | Rofi-based project launcher |
|
||||
| `stt-ptt` | Push to Talk Speech to Text |
|
||||
| `tuxedo-backlight` | Backlight control for Tuxedo laptops |
|
||||
| `zellij-ps` | Project switcher for Zellij |
|
||||
|
||||
## Automated Package Updates
|
||||
|
||||
|
||||
@@ -20,16 +20,27 @@ Step-by-step guides for common tasks:
|
||||
|
||||
- [Getting Started](./guides/getting-started.md) - Initial setup and basic usage
|
||||
- [Adding Packages](./guides/adding-packages.md) - How to add new packages
|
||||
- [Adding Modules](./guides/adding-modules.md) - How to add new NixOS or Home Manager modules
|
||||
- [Port Management](./guides/port-management.md) - Managing service ports across hosts
|
||||
- [Using Modules](./guides/using-modules.md) - Using NixOS and Home Manager modules
|
||||
- [Development Workflow](./guides/development-workflow.md) - Development and testing workflow
|
||||
|
||||
### 📦 Packages
|
||||
|
||||
- [Packages Index](./packages/) - All packages with descriptions
|
||||
- [Adding Packages](../guides/adding-packages.md) - How to add new packages
|
||||
- [Templates](../templates.md) - Boilerplate templates
|
||||
Documentation for all custom packages:
|
||||
|
||||
- [beads](./packages/beads.md) - Lightweight memory system for AI coding agents with graph-based issue tracking
|
||||
- [code2prompt](./packages/code2prompt.md) - Convert code to prompts
|
||||
- [hyprpaper-random](./packages/hyprpaper-random.md) - Random wallpaper setter for Hyprpaper
|
||||
- [launch-webapp](./packages/launch-webapp.md) - Launch web applications
|
||||
- [mem0](./packages/mem0.md) - AI memory assistant with vector storage
|
||||
- [msty-studio](./packages/msty-studio.md) - Msty Studio application
|
||||
- [n8n](./packages/n8n.md) - Free and source-available fair-code licensed workflow automation tool
|
||||
- [opencode](./packages/opencode.md) - AI coding agent built for terminal
|
||||
- [pomodoro-timer](./packages/pomodoro-timer.md) - Pomodoro timer utility
|
||||
- [rofi-project-opener](./packages/rofi-project-opener.md) - Rofi-based project launcher with custom args
|
||||
- [stt-ptt](./packages/stt-ptt.md) - Push to Talk Speech to Text using Whisper
|
||||
- [tuxedo-backlight](./packages/tuxedo-backlight.md) - Backlight control for Tuxedo laptops
|
||||
- [zellij-ps](./packages/zellij-ps.md) - Project switcher for Zellij
|
||||
|
||||
### ⚙️ Modules
|
||||
|
||||
@@ -55,7 +66,6 @@ Technical references and APIs:
|
||||
|
||||
- [Functions](./reference/functions.md) - Library functions documentation
|
||||
- [Patterns](./reference/patterns.md) - Code patterns and anti-patterns
|
||||
- [Templates](../templates.md) - Boilerplate for packages and modules
|
||||
|
||||
## Repository Structure
|
||||
|
||||
|
||||
@@ -1,261 +0,0 @@
|
||||
# Adding Modules Guide
|
||||
|
||||
How to add new NixOS and Home Manager modules to m3ta-nixpkgs.
|
||||
|
||||
## Overview
|
||||
|
||||
Modules extend your system or user configuration with reusable, declarative options. m3ta-nixpkgs uses the standard NixOS module system with a `m3ta.*` namespace.
|
||||
|
||||
## Quick Start
|
||||
|
||||
Use a template for quick setup:
|
||||
|
||||
```bash
|
||||
# NixOS module
|
||||
nix flake init -t .#nixos-module my-module
|
||||
|
||||
# Home Manager module
|
||||
nix flake init -t .#home-manager-module my-module
|
||||
```
|
||||
|
||||
This copies the template into `templates/` — move it to the appropriate location and customize.
|
||||
|
||||
## Adding a NixOS Module
|
||||
|
||||
### 1. Create the Module File
|
||||
|
||||
Create `modules/nixos/<my-module>.nix`:
|
||||
|
||||
```nix
|
||||
{config, lib, pkgs, ...}:
|
||||
with lib; let
|
||||
cfg = config.m3ta.myModule;
|
||||
in {
|
||||
options.m3ta.myModule = {
|
||||
enable = mkEnableOption "my module description";
|
||||
# Add custom options here
|
||||
someOption = mkOption {
|
||||
type = types.str;
|
||||
default = "default-value";
|
||||
description = "Description of this option";
|
||||
};
|
||||
};
|
||||
|
||||
config = mkIf cfg.enable {
|
||||
# System configuration goes here
|
||||
environment.systemPackages = [pkgs.some-package];
|
||||
|
||||
# Or systemd services
|
||||
systemd.services.my-service = {
|
||||
enable = true;
|
||||
description = "My service";
|
||||
wantedBy = ["multi-user.target"];
|
||||
serviceConfig = {
|
||||
ExecStart = "${pkgs.some-package}/bin/some-daemon";
|
||||
};
|
||||
};
|
||||
};
|
||||
}
|
||||
```
|
||||
|
||||
### 2. Register in the Aggregator
|
||||
|
||||
Add to `modules/nixos/default.nix`:
|
||||
|
||||
```nix
|
||||
{
|
||||
imports = [
|
||||
./ports.nix
|
||||
./mem0.nix
|
||||
./<my-module>.nix # ← add your module
|
||||
];
|
||||
}
|
||||
```
|
||||
|
||||
### 3. Export from flake.nix
|
||||
|
||||
Add to the `nixosModules` output in `flake.nix` (optional, for direct import):
|
||||
|
||||
```nix
|
||||
nixosModules = {
|
||||
default = ./modules/nixos;
|
||||
ports = ./modules/nixos/ports.nix;
|
||||
mem0 = ./modules/nixos/mem0.nix;
|
||||
my-module = ./modules/nixos/<my-module>.nix; # ← add this
|
||||
};
|
||||
```
|
||||
|
||||
## Adding a Home Manager Module
|
||||
|
||||
Home Manager modules are organized by category under `modules/home-manager/`.
|
||||
|
||||
### Categories
|
||||
|
||||
| Category | Purpose | Location |
|
||||
|----------|---------|----------|
|
||||
| `cli/` | Command-line tools and utilities | `modules/home-manager/cli/` |
|
||||
| `coding/` | Development tools, editors, agents | `modules/home-manager/coding/` |
|
||||
| Root | Cross-cutting concerns (e.g., ports) | `modules/home-manager/` |
|
||||
|
||||
### 1. Choose a Category
|
||||
|
||||
- **CLI tools** (zsh plugins, tmux config, etc.) → `cli/`
|
||||
- **Development tools** (editor config, linters, etc.) → `coding/`
|
||||
- **System-wide settings** (ports, environment) → root level
|
||||
|
||||
### 2. Create the Module File
|
||||
|
||||
Create `modules/home-manager/<category>/<my-module>.nix`:
|
||||
|
||||
```nix
|
||||
{config, lib, pkgs, ...}:
|
||||
with lib; let
|
||||
cfg = config.m3ta.myModule;
|
||||
in {
|
||||
options.m3ta.myModule = {
|
||||
enable = mkEnableOption "my user module description";
|
||||
someOption = mkOption {
|
||||
type = types.str;
|
||||
default = "value";
|
||||
description = "An option for this module";
|
||||
};
|
||||
};
|
||||
|
||||
config = mkIf cfg.enable {
|
||||
home.packages = [pkgs.some-package];
|
||||
|
||||
# Or Home Manager-specific options
|
||||
programs.zsh.enable = true;
|
||||
};
|
||||
}
|
||||
```
|
||||
|
||||
### 3. Register in the Category Aggregator
|
||||
|
||||
For `cli/` modules, add to `modules/home-manager/cli/default.nix`:
|
||||
|
||||
```nix
|
||||
{
|
||||
imports = [
|
||||
./rofi-project-opener.nix
|
||||
./stt-ptt.nix
|
||||
./zellij-ps.nix
|
||||
./<my-module>.nix # ← add your module
|
||||
];
|
||||
}
|
||||
```
|
||||
|
||||
For `coding/` modules, add to `modules/home-manager/coding/default.nix`:
|
||||
|
||||
```nix
|
||||
{
|
||||
imports = [
|
||||
./editors.nix
|
||||
./opencode.nix
|
||||
./agents
|
||||
./<my-module>.nix # ← add your module
|
||||
];
|
||||
}
|
||||
```
|
||||
|
||||
### 4. Export from flake.nix
|
||||
|
||||
Add to `homeManagerModules` in `flake.nix`:
|
||||
|
||||
```nix
|
||||
homeManagerModules = {
|
||||
default = import ./modules/home-manager;
|
||||
my-module = import ./modules/home-manager/<category>/<my-module>.nix; # ← add this
|
||||
};
|
||||
```
|
||||
|
||||
## Module Patterns
|
||||
|
||||
### Standard Enable Option
|
||||
|
||||
Always start with `mkEnableOption`:
|
||||
|
||||
```nix
|
||||
options.m3ta.myModule = {
|
||||
enable = mkEnableOption "my module";
|
||||
};
|
||||
```
|
||||
|
||||
### Conditional Configuration
|
||||
|
||||
Use `mkIf` for conditional config:
|
||||
|
||||
```nix
|
||||
config = mkIf cfg.enable {
|
||||
# Only applied when enabled
|
||||
};
|
||||
```
|
||||
|
||||
### Multiple Conditions
|
||||
|
||||
Use `mkMerge` when combining multiple conditional blocks:
|
||||
|
||||
```nix
|
||||
config = mkMerge [
|
||||
(mkIf cfg.feature1.enable { ... })
|
||||
(mkIf cfg.feature2.enable { ... })
|
||||
];
|
||||
```
|
||||
|
||||
### Nested Namespaces
|
||||
|
||||
For logically grouped options, use nested namespaces:
|
||||
|
||||
```nix
|
||||
options.m3ta.coding = {
|
||||
myTool = {
|
||||
enable = mkEnableOption "my coding tool";
|
||||
# ...
|
||||
};
|
||||
};
|
||||
```
|
||||
|
||||
Usage: `m3ta.coding.myTool.enable = true;`
|
||||
|
||||
### Shared Library Functions
|
||||
|
||||
For shared utilities (port helpers, etc.), import from `lib/`:
|
||||
|
||||
```nix
|
||||
let
|
||||
portsLib = import ../../lib/ports.nix {inherit lib;};
|
||||
portHelpers = portsLib.mkPortHelpers { /* ... */ };
|
||||
in {
|
||||
# use portHelpers
|
||||
}
|
||||
```
|
||||
|
||||
## Documentation
|
||||
|
||||
Add documentation for your module:
|
||||
|
||||
1. Create `docs/modules/nixos/<my-module>.md` (NixOS) or `docs/modules/home-manager/<category>/<my-module>.md` (HM)
|
||||
2. Follow the existing format in `docs/modules/`
|
||||
3. Add it to the appropriate overview page's "Available Modules" list
|
||||
4. Link it from `docs/guides/using-modules.md`
|
||||
|
||||
## Testing
|
||||
|
||||
```bash
|
||||
# Validate the module loads correctly
|
||||
nix flake check
|
||||
|
||||
# Test with a minimal configuration (NixOS)
|
||||
nixos-rebuild dry-build -I nixpkgs=. --option experimental-features flakes
|
||||
|
||||
# Format before commit
|
||||
nix fmt
|
||||
```
|
||||
|
||||
## Related
|
||||
|
||||
- [Using Modules](./using-modules.md) - How to use existing modules
|
||||
- [Port Management](./port-management.md) - Centralized port management
|
||||
- [Development Workflow](./development-workflow.md) - Local development
|
||||
- [Adding Packages](./adding-packages.md) - Adding packages (not modules)
|
||||
- [Architecture](../ARCHITECTURE.md) - Repository structure
|
||||
@@ -32,13 +32,7 @@ modules/home-manager/
|
||||
│ └── zellij-ps.nix
|
||||
└── coding/ # Development tools
|
||||
├── default.nix # Aggregates coding modules
|
||||
├── editors.nix
|
||||
├── opencode.nix # OpenCode non-agent config
|
||||
└── agents/ # Per-tool agent deployment
|
||||
├── default.nix
|
||||
├── opencode.nix
|
||||
├── claude-code.nix
|
||||
└── pi.nix
|
||||
└── editors.nix
|
||||
```
|
||||
|
||||
## Importing Modules
|
||||
@@ -203,61 +197,6 @@ m3ta.coding.editors = {
|
||||
|
||||
**Documentation**: [Editors Module](../modules/home-manager/coding/editors.md)
|
||||
|
||||
### `coding.opencode`
|
||||
|
||||
OpenCode AI coding assistant (non-agent config: theme, formatter, plugins).
|
||||
|
||||
```nix
|
||||
coding.opencode = {
|
||||
enable = true;
|
||||
ohMyOpencodeSettings = {
|
||||
agents.sisyphus.model = "anthropic/claude-opus-4-5";
|
||||
};
|
||||
extraSettings = {
|
||||
provider.anthropic.name = "Anthropic";
|
||||
};
|
||||
};
|
||||
```
|
||||
|
||||
### `coding.agents.opencode`
|
||||
|
||||
OpenCode agent deployment from canonical TOML definitions.
|
||||
|
||||
```nix
|
||||
coding.agents.opencode = {
|
||||
enable = true;
|
||||
agentsInput = inputs.agents;
|
||||
modelOverrides = {
|
||||
chiron = "anthropic/claude-sonnet-4";
|
||||
};
|
||||
externalSkills = [
|
||||
{ src = inputs.skills-anthropic; }
|
||||
];
|
||||
};
|
||||
```
|
||||
|
||||
### `coding.agents.claude-code`
|
||||
|
||||
Claude Code agent deployment from canonical TOML definitions.
|
||||
|
||||
```nix
|
||||
coding.agents.claude-code = {
|
||||
enable = true;
|
||||
agentsInput = inputs.agents;
|
||||
};
|
||||
```
|
||||
|
||||
### `coding.agents.pi`
|
||||
|
||||
Pi agent deployment from canonical TOML definitions.
|
||||
|
||||
```nix
|
||||
coding.agents.pi = {
|
||||
enable = true;
|
||||
agentsInput = inputs.agents;
|
||||
};
|
||||
```
|
||||
|
||||
## Common Patterns
|
||||
|
||||
### Module Configuration
|
||||
@@ -662,7 +601,5 @@ nix eval .#nixosConfigurations.hostname.config.m3ta --apply builtins.attrNames
|
||||
|
||||
- [Port Management](./port-management.md) - Detailed port management guide
|
||||
- [Adding Packages](./adding-packages.md) - How to add new packages
|
||||
- [Adding Modules](./adding-modules.md) - How to add new NixOS or Home Manager modules
|
||||
- [Templates](../templates.md) - Boilerplate for new packages and modules
|
||||
- [Architecture](../ARCHITECTURE.md) - Understanding module structure
|
||||
- [Contributing](../CONTRIBUTING.md) - Code style and guidelines
|
||||
|
||||
@@ -1,53 +0,0 @@
|
||||
# Packages
|
||||
|
||||
Documentation for packages in m3ta-nixpkgs. Each package directory may contain a `README.md` with detailed documentation.
|
||||
|
||||
## Index
|
||||
|
||||
Packages are organized in `pkgs/<name>/`. Add a `README.md` inside a package directory to document it here.
|
||||
|
||||
### Local Packages
|
||||
|
||||
These packages are built from source in `pkgs/<name>/`:
|
||||
|
||||
| Package | Description | Type | Location |
|
||||
|---------|-------------|------|----------|
|
||||
| `sidecar` | Companion tool for CLI agents with diffs, file trees, and task management | Go | `pkgs/sidecar/` |
|
||||
| `td` | Minimalist CLI for tracking tasks across AI coding sessions | Go | `pkgs/td/` |
|
||||
| `code2prompt` | Convert code to prompts | Go | `pkgs/code2prompt/` |
|
||||
| `eigent` | Eigenvalue tool | Python | `pkgs/eigent/` |
|
||||
| `hyprpaper-random` | Random wallpaper setter for Hyprpaper | Shell | `pkgs/hyprpaper-random/` |
|
||||
| `kestractl` | CLI for Kestra workflow orchestration | Go | `pkgs/kestractl/` |
|
||||
| `launch-webapp` | Launch web applications | Shell | `pkgs/launch-webapp/` |
|
||||
| `mem0` | AI memory assistant with vector storage | Python | `pkgs/mem0/` |
|
||||
| `msty-studio` | Msty Studio application | Python | `pkgs/msty-studio/` |
|
||||
| `n8n` | Workflow automation tool | Node.js | `pkgs/n8n/` |
|
||||
| `openshell` | AI shell assistant | Go | `pkgs/openshell/` |
|
||||
| `pomodoro-timer` | Pomodoro timer utility | Shell | `pkgs/pomodoro-timer/` |
|
||||
| `rofi-project-opener` | Rofi-based project launcher | Shell | `pkgs/rofi-project-opener/` |
|
||||
| `stt-ptt` | Push to Talk Speech to Text | Python | `pkgs/stt-ptt/` |
|
||||
| `tuxedo-backlight` | Backlight control for Tuxedo laptops | C | `pkgs/tuxedo-backlight/` |
|
||||
| `vibetyper` | Typing practice tool | Python | `pkgs/vibetyper/` |
|
||||
| `zellij-ps` | Project switcher for Zellij | Rust | `pkgs/zellij-ps/` |
|
||||
|
||||
### Pass-Through Packages
|
||||
|
||||
These packages are imported directly from flake inputs with minor modifications:
|
||||
|
||||
| Package | Source | Modification | Location |
|
||||
|---------|--------|-------------|----------|
|
||||
| `opencode-desktop` | `inputs.opencode` | Tauri desktop wrapper + Wayland fix | `pkgs/opencode-desktop/` |
|
||||
|
||||
## Adding Package Documentation
|
||||
|
||||
To document a package in detail, add a `README.md` inside the package directory (e.g., `pkgs/sidecar/README.md`). This guide indexes all packages and provides a quick overview.
|
||||
|
||||
## Automated Updates
|
||||
|
||||
Packages are automatically updated weekly by the Gitea Actions `nix-update` workflow. See the main README for details.
|
||||
|
||||
## Related
|
||||
|
||||
- [Adding Packages](../guides/adding-packages.md) - How to add new packages
|
||||
- [Architecture](../ARCHITECTURE.md) - Repository structure
|
||||
- [Quick Start](../QUICKSTART.md) - Getting started
|
||||
@@ -1,23 +1,220 @@
|
||||
# beads (Removed)
|
||||
# beads
|
||||
|
||||
> **Note**: The `beads` package has been removed from this repository.
|
||||
Lightweight memory system for AI coding agents with graph-based issue tracking.
|
||||
|
||||
## Why was it removed?
|
||||
## Description
|
||||
|
||||
The beads package was removed as it is no longer actively used.
|
||||
beads is a command-line tool designed to provide persistent memory and issue tracking for AI coding agents. It features a graph-based system for managing issues, dependencies, and discovered work across development sessions.
|
||||
|
||||
## What was beads?
|
||||
## Features
|
||||
|
||||
Beads was a lightweight memory system for AI coding agents with graph-based issue tracking. It provided:
|
||||
- Persistent memory across AI sessions
|
||||
- Graph-based issue tracking with dependencies
|
||||
- Discovered work tracking
|
||||
- Git integration
|
||||
- 🧠 **Persistent Memory**: Store and retrieve context across AI sessions
|
||||
- 📊 **Graph-Based Issue Tracking**: Manage issues with dependency relationships
|
||||
- 🔄 **Discovered Work**: Track work discovered during development
|
||||
- 🎯 **Multi-Session Continuity**: Resume work from previous sessions
|
||||
- 📝 **Git Integration**: Seamless integration with git workflows
|
||||
- 🐚 **Shell Completions**: Bash, Fish, and Zsh completions included
|
||||
|
||||
If you need beads, you can still build it from source:
|
||||
## Installation
|
||||
|
||||
### Via Overlay
|
||||
|
||||
```nix
|
||||
{pkgs, ...}: {
|
||||
environment.systemPackages = with pkgs; [
|
||||
beads
|
||||
];
|
||||
}
|
||||
```
|
||||
|
||||
### Direct Reference
|
||||
|
||||
```nix
|
||||
{pkgs, ...}: {
|
||||
environment.systemPackages = with pkgs; [
|
||||
inputs.m3ta-nixpkgs.packages.${pkgs.system}.beads
|
||||
];
|
||||
}
|
||||
```
|
||||
|
||||
### Run Directly
|
||||
|
||||
```bash
|
||||
git clone https://github.com/steveyegge/beads
|
||||
cd beads
|
||||
go build ./cmd/bd
|
||||
nix run git+https://code.m3ta.dev/m3tam3re/nixpkgs#beads
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
### Basic Commands
|
||||
|
||||
```bash
|
||||
# Show available issues ready to work on
|
||||
bd ready
|
||||
|
||||
# Create a new issue
|
||||
bd create "Fix authentication bug" --type bug --priority 2
|
||||
|
||||
# Show issue details
|
||||
bd show beads-123
|
||||
|
||||
# Update issue status
|
||||
bd update beads-123 --status in_progress
|
||||
|
||||
# Close completed issues
|
||||
bd close beads-123
|
||||
|
||||
# Sync with git remote
|
||||
bd sync
|
||||
```
|
||||
|
||||
### Issue Types
|
||||
|
||||
- `task`: General tasks
|
||||
- `bug`: Bug fixes
|
||||
- `feature`: New features
|
||||
- `epic`: Large-scale initiatives
|
||||
|
||||
### Priority Levels
|
||||
|
||||
- `0` (P0): Critical
|
||||
- `1` (P1): High
|
||||
- `2` (P2): Medium
|
||||
- `3` (P3): Low
|
||||
- `4` (P4): Backlog
|
||||
|
||||
### Dependency Management
|
||||
|
||||
```bash
|
||||
# Add dependency (beads-123 depends on beads-456)
|
||||
bd dep add beads-123 beads-456
|
||||
|
||||
# Show blocked issues
|
||||
bd blocked
|
||||
|
||||
# Show what blocks an issue
|
||||
bd show beads-123 --blocked-by
|
||||
```
|
||||
|
||||
## Advanced Usage
|
||||
|
||||
### Finding Work
|
||||
|
||||
```bash
|
||||
# Show ready tasks (no blockers)
|
||||
bd ready
|
||||
|
||||
# Show all open issues
|
||||
bd list --status open
|
||||
|
||||
# Show in-progress work
|
||||
bd list --status in_progress
|
||||
```
|
||||
|
||||
### Assignment
|
||||
|
||||
```bash
|
||||
# Assign issue to yourself
|
||||
bd update beads-123 --assignee username
|
||||
|
||||
# Create assigned issue
|
||||
bd create "Review PR" --assignee reviewer
|
||||
```
|
||||
|
||||
### Bulk Operations
|
||||
|
||||
```bash
|
||||
# Close multiple issues at once
|
||||
bd close beads-123 beads-456 beads-789
|
||||
|
||||
# Close with reason
|
||||
bd close beads-123 --reason "Completed in v1.2.0"
|
||||
```
|
||||
|
||||
### Hooks
|
||||
|
||||
```bash
|
||||
# Install git hooks for automatic sync
|
||||
bd hooks install
|
||||
|
||||
# Remove hooks
|
||||
bd hooks uninstall
|
||||
```
|
||||
|
||||
## Configuration
|
||||
|
||||
### Environment Variables
|
||||
|
||||
- `BEADS_DATA_DIR`: Custom directory for beads data (default: `.beads/`)
|
||||
- `BEADS_CONFIG`: Custom configuration file path
|
||||
- `BEADS_EDITOR`: Default editor for editing issues
|
||||
|
||||
### Git Integration
|
||||
|
||||
beads integrates with git for version-controlled issue tracking:
|
||||
|
||||
- Automatic sync before commits (via hooks)
|
||||
- Issue references in commit messages
|
||||
- Branch name tracking
|
||||
- Git-aware issue states
|
||||
|
||||
## Workflow Integration
|
||||
|
||||
### Typical Development Workflow
|
||||
|
||||
1. **Start session**: `bd prime` or check `bd ready`
|
||||
2. **Claim work**: `bd update beads-123 --status in_progress`
|
||||
3. **Work on task**: Implement changes
|
||||
4. **Discover new work**: `bd create "Discovered subtask"` as needed
|
||||
5. **Complete task**: `bd close beads-123`
|
||||
6. **Sync**: `bd sync` (automatic via hooks)
|
||||
|
||||
### Team Collaboration
|
||||
|
||||
```bash
|
||||
# Create issue and assign
|
||||
bd create "Implement feature X" --assignee dev1
|
||||
|
||||
# Review assigned work
|
||||
bd list --assignee yourname
|
||||
|
||||
# Close with review notes
|
||||
bd close beads-123 --reason "Reviewed and approved"
|
||||
```
|
||||
|
||||
## Shell Completions
|
||||
|
||||
beads provides shell completions for bash, fish, and zsh:
|
||||
|
||||
```bash
|
||||
# Bash completions are auto-loaded
|
||||
source <(bd completion bash)
|
||||
|
||||
# Fish completions
|
||||
bd completion fish | source
|
||||
|
||||
# Zsh completions
|
||||
bd completion zsh > ~/.zfunc/_bd
|
||||
```
|
||||
|
||||
## Build Information
|
||||
|
||||
- **Version**: 0.47.1
|
||||
- **Language**: Go
|
||||
- **License**: MIT
|
||||
- **Source**: [GitHub](https://github.com/steveyegge/beads)
|
||||
|
||||
## Platform Support
|
||||
|
||||
- Linux
|
||||
- macOS
|
||||
|
||||
## Notes
|
||||
|
||||
- Tests are disabled in the Nix package due to git worktree operations that fail in the sandbox
|
||||
- Security tests on Darwin are skipped due to `/etc/passwd` unavailability in sandbox
|
||||
- Shell completions are installed for platforms that can execute the build target
|
||||
|
||||
## Related
|
||||
|
||||
- [Adding Packages](../guides/adding-packages.md) - How to add new packages
|
||||
- [Quick Start](../QUICKSTART.md) - Getting started guide
|
||||
|
||||
@@ -1,115 +0,0 @@
|
||||
# kestractl
|
||||
|
||||
CLI for the Kestra workflow orchestration platform.
|
||||
|
||||
## Description
|
||||
|
||||
kestractl is the official command-line interface for [Kestra](https://kestra.io), an open-source workflow orchestration platform. It allows you to interact with Kestra instances to manage flows, trigger executions, inspect namespaces, and automate orchestration tasks from the terminal.
|
||||
|
||||
## Features
|
||||
|
||||
- 🔄 **Flow Management**: Deploy, inspect, and delete flows
|
||||
- ▶️ **Execution Control**: Trigger and monitor workflow executions
|
||||
- 📁 **Namespace Operations**: Manage Kestra namespaces and their resources
|
||||
- 📂 **Namespace Files**: Upload and manage files in namespace storage
|
||||
- 🌐 **Multi-Environment**: Switch between dev, staging, and production contexts
|
||||
- ⚡ **Pre-built Binary**: No compilation required — fetched directly from GitHub releases
|
||||
|
||||
## Installation
|
||||
|
||||
### Via Overlay
|
||||
|
||||
```nix
|
||||
{pkgs, ...}: {
|
||||
environment.systemPackages = with pkgs; [
|
||||
kestractl
|
||||
];
|
||||
}
|
||||
```
|
||||
|
||||
### Direct Reference
|
||||
|
||||
```nix
|
||||
{pkgs, ...}: {
|
||||
environment.systemPackages = with pkgs; [
|
||||
inputs.m3ta-nixpkgs.packages.${pkgs.system}.kestractl
|
||||
];
|
||||
}
|
||||
```
|
||||
|
||||
### Run Directly
|
||||
|
||||
```bash
|
||||
nix run git+https://code.m3ta.dev/m3tam3re/nixpkgs#kestractl
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
### Basic Commands
|
||||
|
||||
```bash
|
||||
# Check version
|
||||
kestractl version
|
||||
|
||||
# Show help
|
||||
kestractl --help
|
||||
|
||||
# Connect to a Kestra instance
|
||||
kestractl context set --api-url http://localhost:8080
|
||||
|
||||
# List flows in a namespace
|
||||
kestractl flow list --namespace my.namespace
|
||||
|
||||
# Trigger a flow execution
|
||||
kestractl execution create --namespace my.namespace --flow-id my-flow
|
||||
|
||||
# Monitor executions
|
||||
kestractl execution list --namespace my.namespace
|
||||
```
|
||||
|
||||
## Configuration
|
||||
|
||||
kestractl uses a context system to manage connections to Kestra instances:
|
||||
|
||||
```bash
|
||||
# Create a context for a local instance
|
||||
kestractl context set local --api-url http://localhost:8080
|
||||
|
||||
# Create a context for a remote instance with auth
|
||||
kestractl context set prod --api-url https://kestra.example.com --token <your-token>
|
||||
|
||||
# Switch active context
|
||||
kestractl context use prod
|
||||
```
|
||||
|
||||
## Build Information
|
||||
|
||||
- **Version**: 1.0.0
|
||||
- **Language**: Go (pre-built binary)
|
||||
- **License**: Apache 2.0
|
||||
- **Source**: [GitHub](https://github.com/kestra-io/kestractl)
|
||||
|
||||
## Platform Support
|
||||
|
||||
- `x86_64-linux`
|
||||
- `aarch64-linux`
|
||||
|
||||
## Package Structure
|
||||
|
||||
This package uses a `sources.json` + `update.sh` pattern for multi-platform binary fetching:
|
||||
|
||||
```
|
||||
pkgs/kestractl/
|
||||
├── default.nix — reads version + hashes from sources.json
|
||||
├── sources.json — per-platform URLs and SRI hashes
|
||||
└── update.sh — fetches latest GitHub release, updates sources.json
|
||||
```
|
||||
|
||||
Updates are handled by `update.sh` (called by the Gitea Actions nix-update workflow), which fetches the latest release from GitHub, downloads each platform's tarball, computes SRI hashes, and rewrites `sources.json`.
|
||||
|
||||
## Related
|
||||
|
||||
- [Kestra Documentation](https://kestra.io/docs)
|
||||
- [kestractl GitHub](https://github.com/kestra-io/kestractl)
|
||||
- [Adding Packages](../guides/adding-packages.md) - How to add new packages
|
||||
- [Quick Start](../QUICKSTART.md) - Getting started guide
|
||||
@@ -1,37 +1,346 @@
|
||||
# opencode (Deprecated)
|
||||
# opencode
|
||||
|
||||
> **Note**: The `opencode` package has been removed from this repository.
|
||||
AI coding agent built for the terminal that can build anything. Combines a TypeScript/JavaScript core with a Go-based TUI for an interactive AI coding experience.
|
||||
|
||||
## Why was it removed?
|
||||
## Description
|
||||
|
||||
OpenCode (CLI version) has been removed because there is now a well-maintained upstream repository for AI coding tools:
|
||||
OpenCode is a terminal-based AI coding agent designed for power users. It provides a comprehensive development environment with AI assistance, code generation, refactoring, and project management capabilities. The tool features a sophisticated TUI (Terminal User Interface) built with Go, while the core functionality is implemented in TypeScript/JavaScript.
|
||||
|
||||
**[numtide/llm-agents.nix](https://github.com/numtide/llm-agents.nix)**
|
||||
## Features
|
||||
|
||||
This repository provides Nix packages for various AI coding agents, including OpenCode and others, with active maintenance and updates.
|
||||
- 🤖 **AI-Powered Coding**: Generate, refactor, and optimize code with AI assistance
|
||||
- 🖥️ **Modern TUI**: Beautiful terminal interface built with Go
|
||||
- 🔍 **Code Understanding**: Parse and understand existing codebases
|
||||
- 🌳 **Tree-Sitter Integration**: Accurate syntax highlighting and code structure analysis
|
||||
- 📁 **Project Management**: Navigate and manage projects efficiently
|
||||
- 🧠 **Multi-LLM Support**: Works with various language models (OpenAI, Anthropic, etc.)
|
||||
- 📝 **Code Generation**: Create new files and features from natural language
|
||||
- 🔄 **Refactoring**: Intelligent code refactoring with AI
|
||||
- 🐛 **Bug Detection**: Find and fix bugs automatically
|
||||
- 📚 **Context Awareness**: Maintains context across editing sessions
|
||||
- 🎯 **Task Orchestration**: Break down and execute complex tasks
|
||||
- 💾 **Local Development**: Runs entirely on your machine
|
||||
|
||||
## What should I use instead?
|
||||
## Installation
|
||||
|
||||
Use the [llm-agents.nix](https://github.com/numtide/llm-agents.nix) flake directly:
|
||||
### Via Overlay
|
||||
|
||||
```nix
|
||||
{
|
||||
inputs = {
|
||||
llm-agents.url = "github:numtide/llm-agents.nix";
|
||||
};
|
||||
|
||||
outputs = { inputs, ... }: {
|
||||
# Access packages via inputs.llm-agents.packages.${system}
|
||||
};
|
||||
{pkgs, ...}: {
|
||||
environment.systemPackages = with pkgs; [
|
||||
opencode
|
||||
];
|
||||
}
|
||||
```
|
||||
|
||||
Or run directly:
|
||||
### Direct Reference
|
||||
|
||||
```bash
|
||||
nix run github:numtide/llm-agents.nix#opencode
|
||||
```nix
|
||||
{pkgs, ...}: {
|
||||
environment.systemPackages = with pkgs; [
|
||||
inputs.m3ta-nixpkgs.packages.${pkgs.system}.opencode
|
||||
];
|
||||
}
|
||||
```
|
||||
|
||||
## What about opencode-desktop?
|
||||
### Run Directly
|
||||
|
||||
The `opencode-desktop` package remains available in this repository as it includes a Wayland support workaround for [upstream issue #11755](https://github.com/opencode-ai/opencode/issues/11755). Once this issue is resolved upstream, `opencode-desktop` may also be removed in favor of the llm-agents.nix repository.
|
||||
```bash
|
||||
nix run git+https://code.m3ta.dev/m3tam3re/nixpkgs#opencode
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
### Basic Usage
|
||||
|
||||
```bash
|
||||
# Start OpenCode in current directory
|
||||
opencode
|
||||
|
||||
# Open specific project
|
||||
opencode /path/to/project
|
||||
|
||||
# Start with specific task description
|
||||
opencode "Fix the login bug"
|
||||
|
||||
# Show help
|
||||
opencode --help
|
||||
|
||||
# Show version
|
||||
opencode --version
|
||||
```
|
||||
|
||||
### Interactive Commands
|
||||
|
||||
OpenCode provides an interactive TUI with various commands:
|
||||
|
||||
- **Navigation**: Arrow keys to move, Enter to select
|
||||
- **Search**: `/` to search files, `?` for help
|
||||
- **Edit**: `e` to edit selected file
|
||||
- **Command Palette**: `Ctrl+p` to access commands
|
||||
- **AI Chat**: `Ctrl+c` to open AI chat
|
||||
- **Exit**: `q` or `Ctrl+d` to quit
|
||||
|
||||
### AI Chat Mode
|
||||
|
||||
```bash
|
||||
# Start OpenCode
|
||||
opencode
|
||||
|
||||
# Enter AI chat mode (Ctrl+c)
|
||||
# Ask questions, request code changes, etc.
|
||||
# Examples:
|
||||
# - "Generate a REST API endpoint for user management"
|
||||
# - "Refactor this function to use async/await"
|
||||
# - "Find and fix potential memory leaks"
|
||||
# - "Add unit tests for this module"
|
||||
```
|
||||
|
||||
### Task Management
|
||||
|
||||
```bash
|
||||
# Ask OpenCode to work on a specific task
|
||||
opencode "Implement authentication with JWT tokens"
|
||||
|
||||
# The agent will:
|
||||
# 1. Understand the codebase
|
||||
# 2. Plan the implementation
|
||||
# 3. Generate necessary code
|
||||
# 4. Make the changes
|
||||
# 5. Verify the implementation
|
||||
```
|
||||
|
||||
## Configuration
|
||||
|
||||
### Environment Variables
|
||||
|
||||
- `OPENCODE_API_KEY`: API key for LLM provider
|
||||
- `OPENCODE_MODEL`: Default model to use (e.g., gpt-4, claude-3-opus)
|
||||
- `OPENCODE_PROVIDER`: LLM provider (openai, anthropic, etc.)
|
||||
- `OPENCODE_MAX_TOKENS`: Maximum tokens for responses
|
||||
- `OPENCODE_TEMPERATURE`: Sampling temperature (0-1)
|
||||
- `OPENCODE_CONFIG`: Path to configuration file
|
||||
|
||||
### Configuration File
|
||||
|
||||
Create `~/.opencode/config.json`:
|
||||
|
||||
```json
|
||||
{
|
||||
"model": "gpt-4",
|
||||
"provider": "openai",
|
||||
"maxTokens": 4096,
|
||||
"temperature": 0.7,
|
||||
"systemPrompt": "You are a helpful coding assistant"
|
||||
}
|
||||
```
|
||||
|
||||
### Project-Specific Config
|
||||
|
||||
Create `.opencode.json` in your project:
|
||||
|
||||
```json
|
||||
{
|
||||
"include": ["src/**/*.ts", "tests/**/*.ts"],
|
||||
"exclude": ["node_modules", "dist", "*.test.ts"],
|
||||
"systemPrompt": "You are a TypeScript expert"
|
||||
}
|
||||
```
|
||||
|
||||
## Advanced Usage
|
||||
|
||||
### Code Refactoring
|
||||
|
||||
```bash
|
||||
# Ask for refactoring suggestions
|
||||
opencode "Review and refactor src/utils.ts for better performance"
|
||||
|
||||
# Apply refactoring automatically
|
||||
opencode "Optimize the database queries in src/db/*.ts"
|
||||
```
|
||||
|
||||
### Bug Fixing
|
||||
|
||||
```bash
|
||||
# Describe the bug
|
||||
opencode "Fix the race condition in the payment processing module"
|
||||
|
||||
# OpenCode will:
|
||||
# 1. Analyze the code
|
||||
# 2. Identify the issue
|
||||
# 3. Propose and implement fixes
|
||||
# 4. Verify the solution
|
||||
```
|
||||
|
||||
### Feature Implementation
|
||||
|
||||
```bash
|
||||
# Request new features
|
||||
opencode "Add support for OAuth2 authentication"
|
||||
|
||||
# Be specific about requirements
|
||||
opencode "Create a REST API with these endpoints: GET /users, POST /users, PUT /users/:id, DELETE /users/:id"
|
||||
```
|
||||
|
||||
### Code Review
|
||||
|
||||
```bash
|
||||
# Get code review
|
||||
opencode "Review src/api/*.ts for security vulnerabilities"
|
||||
|
||||
# Check for best practices
|
||||
opencode "Review the entire codebase and suggest improvements following SOLID principles"
|
||||
```
|
||||
|
||||
### Documentation Generation
|
||||
|
||||
```bash
|
||||
# Generate documentation
|
||||
opencode "Add JSDoc comments to all functions in src/utils.ts"
|
||||
|
||||
# Create README
|
||||
opencode "Generate a comprehensive README.md for this project"
|
||||
```
|
||||
|
||||
## Integration with Editors
|
||||
|
||||
### VS Code
|
||||
|
||||
OpenCode can work alongside your editor:
|
||||
|
||||
```bash
|
||||
# Keep VS Code running for editing
|
||||
code .
|
||||
|
||||
# Use OpenCode for AI assistance in another terminal
|
||||
opencode
|
||||
|
||||
# Switch between them as needed
|
||||
```
|
||||
|
||||
### Vim/Neovim
|
||||
|
||||
```bash
|
||||
# Use Vim/Neovim as your editor
|
||||
vim src/main.ts
|
||||
|
||||
# Use OpenCode for complex tasks
|
||||
opencode "Refactor the authentication module"
|
||||
```
|
||||
|
||||
## Use Cases
|
||||
|
||||
### Learning New Codebases
|
||||
|
||||
```bash
|
||||
# OpenCode will:
|
||||
# 1. Analyze the code structure
|
||||
# 2. Explain how components work
|
||||
# 3. Answer questions about the code
|
||||
opencode "Explain how this project handles user authentication"
|
||||
```
|
||||
|
||||
### Porting Code
|
||||
|
||||
```bash
|
||||
# Port from one language to another
|
||||
opencode "Port this Python function to TypeScript"
|
||||
```
|
||||
|
||||
### Writing Tests
|
||||
|
||||
```bash
|
||||
# Generate unit tests
|
||||
opencode "Add comprehensive unit tests for src/utils.ts with 100% coverage"
|
||||
```
|
||||
|
||||
### Debugging
|
||||
|
||||
```bash
|
||||
# Get help with debugging
|
||||
opencode "I'm getting a null pointer exception in src/api/users.ts. Help me debug it"
|
||||
```
|
||||
|
||||
## Keyboard Shortcuts
|
||||
|
||||
### Global
|
||||
|
||||
- `Ctrl+p` - Open command palette
|
||||
- `Ctrl+c` - Open AI chat
|
||||
- `Ctrl+s` - Save current file
|
||||
- `Ctrl+q` - Quit
|
||||
- `?` - Show help
|
||||
|
||||
### Navigation
|
||||
|
||||
- `j` / `k` - Down / Up
|
||||
- `h` / `l` - Left / Right
|
||||
- `gg` - Go to top
|
||||
- `G` - Go to bottom
|
||||
- `/` - Search
|
||||
- `n` - Next search result
|
||||
- `N` - Previous search result
|
||||
|
||||
### File Operations
|
||||
|
||||
- `e` - Edit file
|
||||
- `o` - Open in external editor
|
||||
- `d` - Delete file (with confirmation)
|
||||
- `y` - Yank (copy)
|
||||
- `p` - Paste
|
||||
|
||||
## Build Information
|
||||
|
||||
- **Version**: 1.1.18
|
||||
- **Language**: TypeScript/JavaScript (core), Go (TUI)
|
||||
- **Runtime**: Bun
|
||||
- **License**: MIT
|
||||
- **Source**: [GitHub](https://github.com/anomalyco/opencode)
|
||||
|
||||
## Dependencies
|
||||
|
||||
- `bun` - JavaScript runtime and package manager
|
||||
- `fzf` - Fuzzy finder for file selection
|
||||
- `ripgrep` - Fast text search
|
||||
- `models-dev` - Model definitions and schemas
|
||||
|
||||
## Platform Support
|
||||
|
||||
- Linux (aarch64, x86_64)
|
||||
- macOS (aarch64, x86_64)
|
||||
|
||||
## Notes
|
||||
|
||||
- Includes a patch to relax Bun version check (changed to warning instead of error)
|
||||
- Shell completions are installed for supported platforms (excludes x86_64-darwin)
|
||||
- Tree-sitter WASM files are patched to use absolute store paths
|
||||
- JSON schema is generated and installed to `$out/share/opencode/schema.json`
|
||||
|
||||
## Tips and Best Practices
|
||||
|
||||
### Getting Started
|
||||
|
||||
1. **Start Small**: Begin with simple tasks to get familiar with the interface
|
||||
2. **Provide Context**: Give clear, detailed descriptions of what you want
|
||||
3. **Iterate**: Work with OpenCode iteratively, refining requests as needed
|
||||
4. **Review Changes**: Always review AI-generated code before committing
|
||||
|
||||
### Effective Prompts
|
||||
|
||||
- Be specific about requirements
|
||||
- Provide examples of expected behavior
|
||||
- Mention constraints or preferences
|
||||
- Break complex tasks into smaller steps
|
||||
|
||||
### Project Structure
|
||||
|
||||
- Keep your project well-organized
|
||||
- Use consistent naming conventions
|
||||
- Add clear comments to complex logic
|
||||
- Maintain a clean git history
|
||||
|
||||
## Related
|
||||
|
||||
- [Adding Packages](../guides/adding-packages.md) - How to add new packages
|
||||
- [Quick Start](../QUICKSTART.md) - Getting started guide
|
||||
- [OpenCode Documentation](https://github.com/anomalyco/opencode) - Official repository and documentation
|
||||
|
||||
@@ -1,134 +0,0 @@
|
||||
# sidecar
|
||||
|
||||
A companion tool for CLI coding agents, providing diffs, file trees, conversation history, and task management with td integration.
|
||||
|
||||
## Description
|
||||
|
||||
sidecar is a terminal UI tool designed to enhance the experience of using AI coding agents in the terminal. It provides a side panel interface for viewing diffs, file trees, conversation history, and integrates with `td` for task management across coding sessions.
|
||||
|
||||
## Features
|
||||
|
||||
- 🔀 **Diff Viewer**: Visual diff display for code changes
|
||||
- 📁 **File Tree**: Navigate and understand project structure
|
||||
- 💬 **Conversation History**: Review and search past AI interactions
|
||||
- ✅ **Task Management**: Integrated with `td` for tracking tasks
|
||||
- 🖥️ **Terminal UI**: Clean interface using tmux panes
|
||||
- 🤖 **AI Agent Integration**: Designed to work with opencode and similar CLI agents
|
||||
|
||||
## Installation
|
||||
|
||||
### Via Overlay
|
||||
|
||||
```nix
|
||||
{pkgs, ...}: {
|
||||
environment.systemPackages = with pkgs; [
|
||||
sidecar
|
||||
];
|
||||
}
|
||||
```
|
||||
|
||||
### Direct Reference
|
||||
|
||||
```nix
|
||||
{pkgs, ...}: {
|
||||
environment.systemPackages = with pkgs; [
|
||||
inputs.m3ta-nixpkgs.packages.${pkgs.system}.sidecar
|
||||
];
|
||||
}
|
||||
```
|
||||
|
||||
### Run Directly
|
||||
|
||||
```bash
|
||||
nix run git+https://code.m3ta.dev/m3tam3re/nixpkgs#sidecar
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
### Starting sidecar
|
||||
|
||||
```bash
|
||||
# Start sidecar alongside your AI coding agent
|
||||
sidecar
|
||||
|
||||
# Start with a specific agent
|
||||
sidecar --agent opencode
|
||||
```
|
||||
|
||||
### Basic Commands
|
||||
|
||||
```bash
|
||||
# Show help
|
||||
sidecar --help
|
||||
|
||||
# Check version
|
||||
sidecar --version
|
||||
```
|
||||
|
||||
## Dependencies
|
||||
|
||||
sidecar is packaged with the following runtime dependencies:
|
||||
|
||||
- **opencode**: AI coding agent
|
||||
- **td**: Task tracking CLI
|
||||
- **tmux**: Terminal multiplexer for UI layout
|
||||
|
||||
These are automatically included in the PATH when running sidecar.
|
||||
|
||||
## Workflow Integration
|
||||
|
||||
### Typical Session
|
||||
|
||||
1. Start `sidecar` in your project directory
|
||||
2. The tool opens a tmux session with panes for:
|
||||
- Your AI coding agent (opencode)
|
||||
- Task list (via td)
|
||||
- Diff viewer
|
||||
- File tree navigator
|
||||
3. Work with your AI agent as usual
|
||||
4. View diffs and changes in real-time
|
||||
5. Track tasks using the integrated td panel
|
||||
|
||||
### With opencode
|
||||
|
||||
```bash
|
||||
# sidecar automatically integrates with opencode
|
||||
cd your-project
|
||||
sidecar
|
||||
```
|
||||
|
||||
## Configuration
|
||||
|
||||
### Environment Variables
|
||||
|
||||
- `SIDECAR_CONFIG`: Custom configuration file path
|
||||
- `SIDECAR_AGENT`: Default AI agent to use (default: opencode)
|
||||
|
||||
### Customization
|
||||
|
||||
Configuration is managed through sidecar's own config system. See the upstream documentation for details.
|
||||
|
||||
## Build Information
|
||||
|
||||
- **Version**: 0.71.1
|
||||
- **Language**: Go
|
||||
- **License**: MIT
|
||||
- **Source**: [GitHub](https://github.com/marcus/sidecar)
|
||||
|
||||
## Platform Support
|
||||
|
||||
- Linux
|
||||
- macOS (Unix systems)
|
||||
|
||||
## Notes
|
||||
|
||||
- Tests are disabled in the Nix package build
|
||||
- The package wraps the binary with required dependencies (opencode, td, tmux) in PATH
|
||||
- Version check is enabled for the Nix package
|
||||
|
||||
## Related
|
||||
|
||||
- [td](./td.md) - Task tracking CLI used by sidecar
|
||||
- [opencode](./opencode.md) - AI coding agent integration
|
||||
- [Adding Packages](../guides/adding-packages.md) - How to add new packages
|
||||
- [Quick Start](../QUICKSTART.md) - Getting started guide
|
||||
@@ -1,130 +0,0 @@
|
||||
# td
|
||||
|
||||
Minimalist CLI for tracking tasks across AI coding sessions.
|
||||
|
||||
## Description
|
||||
|
||||
td (task daemon) is a lightweight command-line tool designed for tracking tasks during AI-assisted coding sessions. It provides a simple, fast way to manage todos and maintain context across conversations with AI coding agents.
|
||||
|
||||
## Features
|
||||
|
||||
- ✅ **Minimal Task Tracking**: Simple, focused task management
|
||||
- 🤖 **AI Session Aware**: Designed to work with AI coding workflows
|
||||
- 📊 **Usage Tracking**: Track session usage and context
|
||||
- 🔄 **Session Continuity**: Resume tasks from previous sessions
|
||||
- 📝 **Git Integration**: Works alongside git workflows
|
||||
- ⚡ **Fast**: Lightweight Go binary with minimal overhead
|
||||
|
||||
## Installation
|
||||
|
||||
### Via Overlay
|
||||
|
||||
```nix
|
||||
{pkgs, ...}: {
|
||||
environment.systemPackages = with pkgs; [
|
||||
td
|
||||
];
|
||||
}
|
||||
```
|
||||
|
||||
### Direct Reference
|
||||
|
||||
```nix
|
||||
{pkgs, ...}: {
|
||||
environment.systemPackages = with pkgs; [
|
||||
inputs.m3ta-nixpkgs.packages.${pkgs.system}.td
|
||||
];
|
||||
}
|
||||
```
|
||||
|
||||
### Run Directly
|
||||
|
||||
```bash
|
||||
nix run git+https://code.m3ta.dev/m3tam3re/nixpkgs#td
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
### Starting a Session
|
||||
|
||||
```bash
|
||||
# Start a new session and view current tasks
|
||||
td usage --new-session
|
||||
|
||||
# Quick view of current tasks (no session tracking)
|
||||
td usage -q
|
||||
```
|
||||
|
||||
### Basic Commands
|
||||
|
||||
```bash
|
||||
# Show version
|
||||
td version
|
||||
|
||||
# View help
|
||||
td --help
|
||||
```
|
||||
|
||||
### Task Management
|
||||
|
||||
td integrates with AI coding workflows to track tasks across sessions. Use it at the start of conversations to establish context:
|
||||
|
||||
```bash
|
||||
# At conversation start (or after /clear)
|
||||
td usage --new-session
|
||||
|
||||
# For subsequent reads within the same session
|
||||
td usage -q
|
||||
```
|
||||
|
||||
## Integration with AI Agents
|
||||
|
||||
td is designed to be used by AI coding agents as part of their workflow:
|
||||
|
||||
1. **Session Start**: Agent reads current tasks with `td usage --new-session`
|
||||
2. **Work Progress**: Tasks are tracked and updated during the session
|
||||
3. **Session End**: State is preserved for the next session
|
||||
|
||||
### Example Integration
|
||||
|
||||
In an AI agent's system prompt or configuration:
|
||||
|
||||
```
|
||||
You must run td usage --new-session at conversation start (or after /clear) to see current work.
|
||||
Use td usage -q for subsequent reads.
|
||||
```
|
||||
|
||||
## Configuration
|
||||
|
||||
### Environment Variables
|
||||
|
||||
- `TD_DATA_DIR`: Custom directory for td data storage
|
||||
|
||||
### Data Storage
|
||||
|
||||
Task data is stored locally in the project or user directory. See upstream documentation for exact storage location.
|
||||
|
||||
## Build Information
|
||||
|
||||
- **Version**: 0.34.0
|
||||
- **Language**: Go
|
||||
- **License**: MIT
|
||||
- **Source**: [GitHub](https://github.com/marcus/td)
|
||||
|
||||
## Platform Support
|
||||
|
||||
- Linux
|
||||
- macOS (Unix systems)
|
||||
|
||||
## Notes
|
||||
|
||||
- Tests are disabled in the Nix package build due to git worktree operations
|
||||
- Version check is enabled for the Nix package (`td version`)
|
||||
- Minimal dependencies - pure Go binary
|
||||
|
||||
## Related
|
||||
|
||||
- [sidecar](./sidecar.md) - Uses td for integrated task management
|
||||
- [opencode](./opencode.md) - AI coding agent that integrates with td
|
||||
- [Adding Packages](../guides/adding-packages.md) - How to add new packages
|
||||
- [Quick Start](../QUICKSTART.md) - Getting started guide
|
||||
@@ -1,637 +0,0 @@
|
||||
# m3ta-nixpkgs: Cleanup & Improvements Plan
|
||||
|
||||
> **For Hermes:** Use subagent-driven-development skill to implement this plan task-by-task.
|
||||
|
||||
**Goal:** Address 10 issues identified in codebase review — reduce duplication, improve naming consistency, extract inline scripts, add testing, and update documentation.
|
||||
|
||||
**Architecture:** Incremental improvements across lib/, modules/, overlays/, docs/, and CI. Each change is self-contained and can be merged independently. No breaking changes to public API (backward-compat aliases preserved where needed).
|
||||
|
||||
**Repo:** `gitea@code.m3ta.dev:m3tam3re/nixpkgs.git` (master branch)
|
||||
|
||||
---
|
||||
|
||||
## Phase 1: Deduplication & Naming (Low Risk)
|
||||
|
||||
### Task 1: Remove duplicate opencode-rules.nix file
|
||||
|
||||
**Objective:** Eliminate the duplicate file import. The `coding-rules.nix` is the canonical source; `opencode-rules.nix` is an identical copy. Make the alias a one-liner in `lib/default.nix`.
|
||||
|
||||
**Files:**
|
||||
- Delete: `lib/opencode-rules.nix`
|
||||
- Modify: `lib/default.nix`
|
||||
|
||||
**Step 1: Update lib/default.nix to alias directly**
|
||||
|
||||
```nix
|
||||
{lib}: {
|
||||
ports = import ./ports.nix {inherit lib;};
|
||||
|
||||
coding-rules = import ./coding-rules.nix {inherit lib;};
|
||||
|
||||
# Backward-compat alias: opencode-rules → coding-rules
|
||||
opencode-rules = import ./coding-rules.nix {inherit lib;};
|
||||
opencode = import ./coding-rules.nix {inherit lib;};
|
||||
}
|
||||
```
|
||||
|
||||
**Step 2: Delete the duplicate file**
|
||||
|
||||
```bash
|
||||
git rm lib/opencode-rules.nix
|
||||
```
|
||||
|
||||
**Step 3: Verify nothing breaks**
|
||||
|
||||
```bash
|
||||
nix flake check
|
||||
```
|
||||
|
||||
**Step 4: Commit**
|
||||
|
||||
```bash
|
||||
git commit -m "refactor: remove duplicate opencode-rules.nix, use alias in default.nix"
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### Task 2: Tool-agnostic naming in coding-rules.nix internals
|
||||
|
||||
**Objective:** Rename internal variables and output artifacts in `coding-rules.nix` from opencode-specific names to generic names, while keeping the backward-compat alias `mkOpencodeRules`.
|
||||
|
||||
**Files:**
|
||||
- Modify: `lib/coding-rules.nix`
|
||||
|
||||
**Step 1: Rename internal symbols**
|
||||
|
||||
In `lib/coding-rules.nix`, rename:
|
||||
- `rulesDir` stays `.opencode-rules` (this is a filesystem path used by existing projects, changing it would break)
|
||||
- `opencodeConfig` → `rulesConfig`
|
||||
- `opencode.json` output → `coding-rules.json` (add a comment noting it was renamed)
|
||||
- Add `rulesDir` option to function signature with default `.opencode-rules`
|
||||
|
||||
Updated function:
|
||||
|
||||
```nix
|
||||
{lib}: let
|
||||
mkCodingRules = {
|
||||
agents,
|
||||
languages ? [],
|
||||
concerns ? [
|
||||
"coding-style"
|
||||
"naming"
|
||||
"documentation"
|
||||
"testing"
|
||||
"git-workflow"
|
||||
"project-structure"
|
||||
],
|
||||
frameworks ? [],
|
||||
extraInstructions ? [],
|
||||
rulesDir ? ".opencode-rules",
|
||||
}: let
|
||||
instructions =
|
||||
(map (c: "${rulesDir}/concerns/${c}.md") concerns)
|
||||
++ (map (l: "${rulesDir}/languages/${l}.md") languages)
|
||||
++ (map (f: "${rulesDir}/frameworks/${f}.md") frameworks)
|
||||
++ extraInstructions;
|
||||
|
||||
rulesConfig = {
|
||||
"$schema" = "https://opencode.ai/config.json";
|
||||
inherit instructions;
|
||||
};
|
||||
in {
|
||||
inherit instructions;
|
||||
|
||||
shellHook = ''
|
||||
# Create/update symlink to AGENTS rules directory
|
||||
ln -sfn ${agents}/rules ${rulesDir}
|
||||
|
||||
# Generate coding-rules configuration file
|
||||
cat > coding-rules.json <<'RULES_EOF'
|
||||
${builtins.toJSON rulesConfig}
|
||||
RULES_EOF
|
||||
'';
|
||||
};
|
||||
|
||||
# Backward-compat alias
|
||||
mkOpencodeRules = mkCodingRules;
|
||||
in {
|
||||
inherit mkCodingRules mkOpencodeRules;
|
||||
};
|
||||
```
|
||||
|
||||
**Step 2: Update shellHook comment in AGENTS.md**
|
||||
|
||||
In `AGENTS.md`, update the coding-rules section to mention the new `rulesDir` parameter and the `coding-rules.json` output file.
|
||||
|
||||
**Step 3: Verify**
|
||||
|
||||
```bash
|
||||
nix flake check
|
||||
```
|
||||
|
||||
**Step 4: Commit**
|
||||
|
||||
```bash
|
||||
git commit -m "refactor: tool-agnostic naming in coding-rules.nix internals"
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### Task 3: Remove redundant overlays entry in flake.nix
|
||||
|
||||
**Objective:** The `default` and `additions` overlays in `flake.nix` produce identical output. Remove `additions` if not referenced elsewhere, or document why both exist.
|
||||
|
||||
**Files:**
|
||||
- Modify: `flake.nix`
|
||||
- Check: all consumer repos for references to `overlays.additions`
|
||||
|
||||
**Step 1: Search for consumers of overlays.additions**
|
||||
|
||||
```bash
|
||||
# Check nixos-config and other repos
|
||||
grep -r "overlays.additions" /data/.hermes/repos/nixos-config/
|
||||
grep -r "additions" /data/.hermes/repos/nixos-config/ --include="*.nix" | grep overlay
|
||||
```
|
||||
|
||||
**Step 2: If no consumers found, remove additions**
|
||||
|
||||
In `flake.nix`, simplify overlays to:
|
||||
|
||||
```nix
|
||||
overlays = {
|
||||
default = final: prev:
|
||||
import ./pkgs {
|
||||
pkgs = final;
|
||||
inputs = inputs;
|
||||
};
|
||||
|
||||
modifications = final: prev: import ./overlays/mods {inherit prev;};
|
||||
};
|
||||
```
|
||||
|
||||
**Step 3: Verify**
|
||||
|
||||
```bash
|
||||
nix flake check
|
||||
```
|
||||
|
||||
**Step 4: Commit**
|
||||
|
||||
```bash
|
||||
git commit -m "refactor: remove redundant 'additions' overlay (identical to 'default')"
|
||||
```
|
||||
|
||||
**Note:** If `additions` IS used elsewhere, add a comment explaining the convention and skip this task.
|
||||
|
||||
---
|
||||
|
||||
## Phase 2: Extract Inline Scripts (Medium Risk)
|
||||
|
||||
### Task 4: Extract pi-agent runner script to standalone file
|
||||
|
||||
**Objective:** Move the ~200-line inline bash script in `modules/nixos/pi-agent.nix` (the `runner` variable) to a separate file `modules/nixos/pi-agent-runner.sh` that gets imported via `builtins.readFile` + `pkgs.writeShellApplication`.
|
||||
|
||||
**Files:**
|
||||
- Create: `modules/nixos/pi-agent-runner.sh`
|
||||
- Modify: `modules/nixos/pi-agent.nix`
|
||||
|
||||
**Step 1: Create the runner script file**
|
||||
|
||||
Extract the body of the `runner` script (everything inside the `pkgs.writeShellScriptBin cfg.wrapper.runnerName '' ... ''`) into `modules/nixos/pi-agent-runner.sh`.
|
||||
|
||||
The script uses Nix-style variable interpolation (`${...}`). We need to keep Nix template variables as `${...}` and convert runtime bash variables to use `$` prefix. Since the script already uses Nix `escapeShellArg` and `escapeShellArg` calls, the cleanest approach is:
|
||||
|
||||
Create `modules/nixos/pi-agent-runner.sh` as a template that `pkgs.substituteAll` or `builtins.readFile` + string replacement can process. However, given the heavy Nix interpolation, the pragmatic approach is to use `pkgs.writeShellApplication` with the script body inline but extracted to a `let` binding:
|
||||
|
||||
```nix
|
||||
# In pi-agent.nix, replace the inline runner with:
|
||||
let
|
||||
runnerScript = builtins.readFile ./pi-agent-runner.sh;
|
||||
# ... or keep as let binding but move the body to a separate derivation
|
||||
```
|
||||
|
||||
**Important caveat:** The script has ~30 Nix variable interpolations (`${cfg.user}`, `${escapeShellArg ...}`, etc.). Full extraction to a .sh file would require either:
|
||||
- (a) `substituteAll` with `--replace` for each variable — unwieldy at 30+ substitutions
|
||||
- (b) Converting to env vars passed at runtime — cleaner but changes security posture
|
||||
- (c) Keeping the Nix interpolation but extracting to a `let` block in a separate `.nix` file
|
||||
|
||||
**Recommended approach: Option (c)** — Create `modules/nixos/pi-agent-runner.nix` as a function that takes `cfg` and returns the script:
|
||||
|
||||
```nix
|
||||
# modules/nixos/pi-agent-runner.nix
|
||||
{cfg, pkgs, lib, ...}:
|
||||
with lib; let
|
||||
# ... all the helper variables from pi-agent.nix ...
|
||||
in
|
||||
pkgs.writeShellScriptBin cfg.wrapper.runnerName ''
|
||||
# ... the script body ...
|
||||
'';
|
||||
```
|
||||
|
||||
Then in `pi-agent.nix`:
|
||||
```nix
|
||||
runner = import ./pi-agent-runner.nix {inherit cfg pkgs lib;};
|
||||
```
|
||||
|
||||
**Step 2: Similarly extract the wrapper script**
|
||||
|
||||
Create `modules/nixos/pi-agent-wrapper.nix` for the `wrapper` variable.
|
||||
|
||||
**Step 3: Verify**
|
||||
|
||||
```bash
|
||||
nix flake check
|
||||
# Also test in a nixos-rebuild if possible
|
||||
```
|
||||
|
||||
**Step 4: Commit**
|
||||
|
||||
```bash
|
||||
git add modules/nixos/pi-agent-runner.nix modules/nixos/pi-agent-wrapper.nix
|
||||
git commit -m "refactor: extract pi-agent runner and wrapper to separate files"
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Phase 3: Testing (Higher Value)
|
||||
|
||||
### Task 5: Add basic lib function tests
|
||||
|
||||
**Objective:** Add `nix eval`-based tests for `lib/agents.nix` parseRule logic and `lib/coding-rules.nix` instruction generation.
|
||||
|
||||
**Files:**
|
||||
- Create: `tests/lib/agents-test.nix`
|
||||
- Create: `tests/lib/coding-rules-test.nix`
|
||||
- Modify: `flake.nix` (add checks)
|
||||
|
||||
**Step 1: Create test infrastructure**
|
||||
|
||||
```nix
|
||||
# tests/lib/default.nix
|
||||
{
|
||||
agents = import ./agents-test.nix;
|
||||
coding-rules = import ./coding-rules-test.nix;
|
||||
}
|
||||
```
|
||||
|
||||
**Step 2: Write agents.nix parseRule test**
|
||||
|
||||
```nix
|
||||
# tests/lib/agents-test.nix
|
||||
let
|
||||
lib = import <nixpkgs/lib>;
|
||||
agentsLib = (import ../../lib {inherit lib;}).agents;
|
||||
|
||||
# Test parseRule helper
|
||||
test1 = let
|
||||
result = builtins.tryEval (
|
||||
let
|
||||
# We can't directly test parseRule since it's internal.
|
||||
# Instead, test the renderer with minimal input.
|
||||
canonical = {
|
||||
test-agent = {
|
||||
description = "Test agent";
|
||||
mode = "primary";
|
||||
systemPrompt = "You are a test.";
|
||||
permissions = {
|
||||
bash = { intent = "allow"; };
|
||||
edit = { intent = "ask"; rules = ["rm -rf *:deny"]; };
|
||||
};
|
||||
};
|
||||
};
|
||||
pkgs = import <nixpkgs> { system = "x86_64-linux"; };
|
||||
rendered = agentsLib.renderForOpencode {
|
||||
inherit pkgs canonical;
|
||||
};
|
||||
in
|
||||
# Verify the derivation builds
|
||||
builtins.pathExists "${rendered}/test-agent.md"
|
||||
);
|
||||
in assert result.value == true; true;
|
||||
|
||||
in {
|
||||
parseRule-basic = test1;
|
||||
}
|
||||
```
|
||||
|
||||
**Step 3: Write coding-rules test**
|
||||
|
||||
```nix
|
||||
# tests/lib/coding-rules-test.nix
|
||||
let
|
||||
lib = import <nixpkgs/lib>;
|
||||
codingRulesLib = (import ../../lib {inherit lib;}).coding-rules;
|
||||
|
||||
rules = codingRulesLib.mkCodingRules {
|
||||
agents = "/tmp/fake-agents";
|
||||
languages = ["python"];
|
||||
concerns = ["naming"];
|
||||
rulesDir = ".coding-rules";
|
||||
};
|
||||
|
||||
# Verify instructions are generated correctly
|
||||
test1 = assert rules.instructions == [
|
||||
".coding-rules/concerns/naming.md"
|
||||
".coding-rules/languages/python.md"
|
||||
]; true;
|
||||
|
||||
# Verify backward-compat alias exists
|
||||
test2 = assert codingRulesLib.mkOpencodeRules == codingRulesLib.mkCodingRules; true;
|
||||
|
||||
in {
|
||||
instructions-correct = test1;
|
||||
backward-compat = test2;
|
||||
}
|
||||
```
|
||||
|
||||
**Step 4: Add to flake.nix checks**
|
||||
|
||||
In `flake.nix`, extend the `checks` attribute:
|
||||
|
||||
```nix
|
||||
checks = forAllSystems (system: let
|
||||
pkgs = pkgsFor system;
|
||||
packages = import ./pkgs {inherit pkgs inputs;};
|
||||
in
|
||||
builtins.mapAttrs (name: pkg: pkgs.lib.hydraJob pkg) packages
|
||||
// {
|
||||
formatting = pkgs.runCommand "check-formatting" {} ''
|
||||
${pkgs.alejandra}/bin/alejandra --check ${./.}
|
||||
touch $out
|
||||
'';
|
||||
lib-tests = pkgs.runCommand "lib-tests" {} ''
|
||||
${pkgs.nix}/bin/nix-instantiate --eval ${./tests/lib/default.nix}
|
||||
touch $out
|
||||
'';
|
||||
});
|
||||
```
|
||||
|
||||
**Step 5: Verify**
|
||||
|
||||
```bash
|
||||
nix flake check
|
||||
```
|
||||
|
||||
**Step 6: Commit**
|
||||
|
||||
```bash
|
||||
git add tests/
|
||||
git commit -m "test: add basic lib function tests for agents and coding-rules"
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### Task 6: Add NixOS VM test for pi-agent module
|
||||
|
||||
**Objective:** Add a basic NixOS VM test that verifies the pi-agent module can be evaluated and the wrapper/runner scripts exist.
|
||||
|
||||
**Files:**
|
||||
- Create: `tests/nixos/pi-agent-test.nix`
|
||||
- Modify: `flake.nix` (add to checks)
|
||||
|
||||
**Step 1: Write the VM test**
|
||||
|
||||
```nix
|
||||
# tests/nixos/pi-agent-test.nix
|
||||
{pkgs, ...}: {
|
||||
name = "pi-agent";
|
||||
|
||||
nodes.machine = {config, ...}: {
|
||||
imports = [
|
||||
${(pkgs.path + "/nixos/modules/module-list.nix")}
|
||||
];
|
||||
|
||||
# Minimal pi-agent config
|
||||
m3ta.pi-agent = {
|
||||
enable = true;
|
||||
package = pkgs.writeScriptBin "pi-agent" ''
|
||||
#!/bin/sh
|
||||
echo "pi-agent mock"
|
||||
'';
|
||||
createUser = true;
|
||||
hostUsers = {
|
||||
testuser = {
|
||||
projectRoots = ["/tmp/test-project"];
|
||||
};
|
||||
};
|
||||
};
|
||||
|
||||
users.users.testuser = {
|
||||
isNormalUser = true;
|
||||
};
|
||||
};
|
||||
|
||||
testScript = ''
|
||||
machine.start()
|
||||
machine.wait_for_unit("multi-user.target")
|
||||
|
||||
# Verify user was created
|
||||
machine.succeed("id pi-agent")
|
||||
|
||||
# Verify wrapper exists
|
||||
machine.succeed("which pi")
|
||||
|
||||
# Verify state directory
|
||||
machine.succeed("test -d /var/lib/pi-agent")
|
||||
machine.succeed("test -d /var/lib/pi-agent/.pi")
|
||||
'';
|
||||
}
|
||||
```
|
||||
|
||||
**Step 2: Add to flake.nix checks**
|
||||
|
||||
```nix
|
||||
# In the checks attrset:
|
||||
pi-agent-vm-test = pkgs.nixosTest (import ./tests/nixos/pi-agent-test.nix {inherit pkgs;});
|
||||
```
|
||||
|
||||
**Step 3: Verify**
|
||||
|
||||
```bash
|
||||
nix build .#checks.x86_64-linux.pi-agent-vm-test
|
||||
```
|
||||
|
||||
**Step 4: Commit**
|
||||
|
||||
```bash
|
||||
git add tests/nixos/
|
||||
git commit -m "test: add NixOS VM test for pi-agent module"
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Phase 4: Documentation (Low Risk, High Value)
|
||||
|
||||
### Task 7: Update AGENTS.md to reflect current state
|
||||
|
||||
**Objective:** Remove outdated migration sections, update function signatures, and align with current code.
|
||||
|
||||
**Files:**
|
||||
- Modify: `AGENTS.md`
|
||||
|
||||
**Step 1: Update the AGENTS REWORK migration section**
|
||||
|
||||
The section starting with `## MIGRATION: Agent System (OpenCode → Canonical TOML)` describes a completed migration. Convert it to a brief "Architecture" section that describes the current state, not the migration path.
|
||||
|
||||
**Step 2: Update lib.agents function table**
|
||||
|
||||
Verify that the function signatures and descriptions in the AGENTS.md table match the actual functions in `lib/agents.nix`. Specifically:
|
||||
- `loadCanonical` takes `{agentsInput}` — confirm docs match
|
||||
- `renderForPi` now has `primaryAgent` parameter — confirm documented
|
||||
- `shellHookForTool` exists — confirm documented
|
||||
|
||||
**Step 3: Update coding-rules documentation**
|
||||
|
||||
Replace references to `mkOpencodeRules` with `mkCodingRules` as primary, `mkOpencodeRules` as backward-compat alias. Document the new `rulesDir` parameter.
|
||||
|
||||
**Step 4: Update overlay documentation**
|
||||
|
||||
Remove or annotate the `additions` overlay depending on Task 3 outcome.
|
||||
|
||||
**Step 5: Commit**
|
||||
|
||||
```bash
|
||||
git commit -m "docs: update AGENTS.md to reflect current codebase state"
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### Task 8: Add CHANGELOG.md
|
||||
|
||||
**Objective:** Create a changelog that captures recent work (from git log) so consumers can track changes.
|
||||
|
||||
**Files:**
|
||||
- Create: `CHANGELOG.md`
|
||||
|
||||
**Step 1: Generate changelog from git history**
|
||||
|
||||
```bash
|
||||
cd /data/.hermes/repos/nixpkgs-review
|
||||
git log --oneline --no-merges master | head -30
|
||||
```
|
||||
|
||||
**Step 2: Write CHANGELOG.md**
|
||||
|
||||
Structure as Keep a Changelog format:
|
||||
|
||||
```markdown
|
||||
# Changelog
|
||||
|
||||
All notable changes to this project will be documented in this file.
|
||||
|
||||
The format is based on [Keep a Changelog](https://keepachangelog.com/).
|
||||
|
||||
## [Unreleased]
|
||||
|
||||
## [0.4.0] - 2026-04-15
|
||||
|
||||
### Added
|
||||
- Pi agent wrapper with per-host-user policy enforcement (`m3ta.pi-agent` NixOS module)
|
||||
- `coding.agents.pi` Home Manager module with settings, MCP, and skills support
|
||||
- `coding.agents.claude-code` Home Manager module with MCP integration
|
||||
- Automated package updates via Gitea Actions (`nix-update` workflow)
|
||||
- `lib.agents.renderForPi` with primaryAgent selection and pi-subagents format
|
||||
- `pkgs/td` - Task management CLI for AI coding sessions
|
||||
|
||||
### Changed
|
||||
- Renamed `lib.opencode-rules` → `lib.coding-rules` (backward-compat alias preserved)
|
||||
- Agent system migrated to harness-agnostic canonical format
|
||||
- Pi settings sync now merges host and Nix-managed values via deep_merge
|
||||
|
||||
### Fixed
|
||||
- Pi settings sync race condition on first run
|
||||
```
|
||||
|
||||
**Step 3: Commit**
|
||||
|
||||
```bash
|
||||
git commit -m "docs: add CHANGELOG.md"
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Phase 5: Minor Cleanups (Low Risk)
|
||||
|
||||
### Task 9: Clean up pkgs/default.nix unused `system` binding
|
||||
|
||||
**Objective:** The `system = pkgs.stdenv.hostPlatform.system;` binding in `pkgs/default.nix` is only used for the two input-pass-throughs. If those are the only consumers, it's fine, but add a clarifying comment.
|
||||
|
||||
**Files:**
|
||||
- Modify: `pkgs/default.nix`
|
||||
|
||||
**Step 1: Add clarifying comment**
|
||||
|
||||
```nix
|
||||
{
|
||||
pkgs,
|
||||
inputs,
|
||||
...
|
||||
}: let
|
||||
# Only used for flake input pass-throughs below
|
||||
system = pkgs.stdenv.hostPlatform.system;
|
||||
in {
|
||||
...
|
||||
```
|
||||
|
||||
**Step 2: Commit**
|
||||
|
||||
```bash
|
||||
git commit -m "docs: clarify system binding in pkgs/default.nix"
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### Task 10: Remove commented-out overlay entries in overlays/default.nix
|
||||
|
||||
**Objective:** Clean up the large block of commented-out code in `overlays/default.nix` (nodejs_24, paperless-ngx, anytype-heart, hyprpanel, etc.). These belong in git history, not in active code.
|
||||
|
||||
**Files:**
|
||||
- Modify: `overlays/default.nix`
|
||||
|
||||
**Step 1: Remove commented-out blocks**
|
||||
|
||||
Remove:
|
||||
- The `rose-pine-hyprcursor` addition from `additions` (if it's unused — check with grep)
|
||||
- The commented-out `nodejs_24`, `paperless-ngx`, `anytype-heart`, `trezord`, `mesa`, `hyprpanel` blocks from `modifications`
|
||||
- The commented-out overlay inputs (`temp-packages`, `stable-packages`, `pinned-packages`, `locked-packages`, `master-packages`) if they reference inputs not in `flake.nix`
|
||||
|
||||
Actually, `nixpkgs-stable`, `nixpkgs-9e9486b`, `nixpkgs-9472de4`, `nixpkgs-locked`, `nixpkgs-master` are NOT in the current `flake.nix` inputs. These overlays will fail if referenced. They should either be removed or the inputs should be added.
|
||||
|
||||
**Action:**
|
||||
- Keep `master-packages` IF `nixpkgs-master` is in flake.nix inputs (it IS — good)
|
||||
- Remove `temp-packages`, `pinned-packages`, `locked-packages` (inputs don't exist)
|
||||
- Keep `stable-packages` IF `nixpkgs-stable` exists in inputs (check — it does NOT currently exist)
|
||||
- Keep `additions` with `rose-pine-hyprcursor` IF `rose-pine-hyprcursor` input exists (check)
|
||||
|
||||
**Step 2: Verify**
|
||||
|
||||
```bash
|
||||
nix flake check
|
||||
```
|
||||
|
||||
**Step 3: Commit**
|
||||
|
||||
```bash
|
||||
git commit -m "chore: remove dead overlay entries for non-existent flake inputs"
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Execution Order & Priority
|
||||
|
||||
| Task | Risk | Effort | Impact | Dependencies |
|
||||
|------|------|--------|--------|-------------|
|
||||
| T1: Remove opencode-rules.nix | Low | 5min | Clean | None |
|
||||
| T2: Tool-agnostic naming | Low | 15min | Consistency | None |
|
||||
| T3: Remove redundant overlay | Low | 10min | Clean | Check consumers |
|
||||
| T9: Clarify system binding | Low | 2min | Docs | None |
|
||||
| T10: Remove dead overlays | Low | 10min | Clean | None |
|
||||
| T7: Update AGENTS.md | Low | 20min | Docs | After T1, T2 |
|
||||
| T8: Add CHANGELOG.md | Low | 15min | Docs | None |
|
||||
| T4: Extract pi-agent scripts | Medium | 45min | Maintainability | None |
|
||||
| T5: Lib function tests | Medium | 30min | Quality | None |
|
||||
| T6: NixOS VM test | Medium | 45min | Quality | None |
|
||||
|
||||
**Recommended order:** T1 → T9 → T10 → T3 → T2 → T7 → T8 → T5 → T4 → T6
|
||||
|
||||
**Branching strategy:** Create a feature branch `chore/cleanup-review` from master, implement all tasks, open PR for review before merging.
|
||||
@@ -153,6 +153,33 @@ allServices = portHelpers.listServices;
|
||||
# Returns: ["nginx" "grafana" "prometheus" "homepage"]
|
||||
```
|
||||
|
||||
### `getDefaultPort`
|
||||
|
||||
Simple helper to get a port without host override.
|
||||
|
||||
#### Signature
|
||||
|
||||
```nix
|
||||
getDefaultPort :: portsConfig -> string -> int-or-null
|
||||
```
|
||||
|
||||
#### Arguments
|
||||
|
||||
1. `portsConfig` - Same structure as `mkPortHelpers`
|
||||
2. `service` - The service name (string)
|
||||
|
||||
#### Returns
|
||||
|
||||
Port number (int) or `null` if service not found.
|
||||
|
||||
#### Usage
|
||||
|
||||
```nix
|
||||
services.my-service = {
|
||||
port = m3taLib.ports.getDefaultPort myPorts "my-service";
|
||||
};
|
||||
```
|
||||
|
||||
## Using Library Functions
|
||||
|
||||
### Importing
|
||||
@@ -235,7 +262,7 @@ in {
|
||||
| `getPort` | Get port with optional host override | `int or null` |
|
||||
| `getHostPorts` | Get all ports for host | `attrs` |
|
||||
| `listServices` | List all service names | `[string]` |
|
||||
|
||||
| `getDefaultPort` | Get default port only | `int or null` |
|
||||
|
||||
## Related
|
||||
|
||||
|
||||
@@ -1,162 +0,0 @@
|
||||
# Templates
|
||||
|
||||
Boilerplate templates for quickly adding new packages or modules to m3ta-nixpkgs.
|
||||
|
||||
## Available Templates
|
||||
|
||||
| Template | Command | Creates |
|
||||
|---------|---------|---------|
|
||||
| Package | `nix flake init -t .#package` | `templates/package/` |
|
||||
| NixOS Module | `nix flake init -t .#nixos-module` | `templates/nixos-module/` |
|
||||
| Home Manager Module | `nix flake init -t .#home-manager-module` | `templates/home-manager-module/` |
|
||||
|
||||
## Using Templates
|
||||
|
||||
### 1. List Available Templates
|
||||
|
||||
```bash
|
||||
nix flake show --templates .
|
||||
```
|
||||
|
||||
### 2. Initialize from a Template
|
||||
|
||||
```bash
|
||||
# Package
|
||||
nix flake init -t .#package
|
||||
|
||||
# NixOS Module
|
||||
nix flake init -t .#nixos-module
|
||||
|
||||
# Home Manager Module
|
||||
nix flake init -t .#home-manager-module
|
||||
```
|
||||
|
||||
Note: `nix flake init` copies the template contents into the current directory. Use a subdirectory name:
|
||||
|
||||
```bash
|
||||
mkdir new-package && cd new-package
|
||||
nix flake init -t ..#package
|
||||
```
|
||||
|
||||
## Package Template
|
||||
|
||||
Creates a complete package structure:
|
||||
|
||||
```
|
||||
templates/package/
|
||||
├── default.nix # Package definition with comments
|
||||
```
|
||||
|
||||
### Fields to Fill In
|
||||
|
||||
| Field | Location | Notes |
|
||||
|-------|----------|-------|
|
||||
| `pname` | `default.nix` | Package name (kebab-case) |
|
||||
| `version` | `default.nix` | Semantic version |
|
||||
| `src` | `default.nix` | Fetcher (GitHub, URL, Git, etc.) |
|
||||
| `hash` | `default.nix` | Use `lib.fakeHash`, build to get real hash |
|
||||
| `meta.description` | `default.nix` | Short one-line description |
|
||||
| `meta.homepage` | `default.nix` | Project URL |
|
||||
| `meta.license` | `default.nix` | Use `lib.licenses.*` |
|
||||
| `meta.platforms` | `default.nix` | Usually `platforms.linux` |
|
||||
| `meta.mainProgram` | `default.nix` | Main binary name |
|
||||
|
||||
### Common Build Systems
|
||||
|
||||
```nix
|
||||
# Rust (recommended)
|
||||
rustPlatform.buildRustPackage rec { ... }
|
||||
|
||||
# Python
|
||||
python3.pkgs.buildPythonPackage rec { ... }
|
||||
|
||||
# Node.js
|
||||
pkg-config, nodejs, npm2nix, or pnpm + prisma
|
||||
|
||||
# Shell script
|
||||
writeShellScriptBin "name" ''echo hello''
|
||||
|
||||
# Go
|
||||
go mdbook build
|
||||
|
||||
# Generic C/Make
|
||||
stdenv.mkDerivation { ... }
|
||||
```
|
||||
|
||||
See [Adding Packages](./guides/adding-packages.md) for detailed instructions.
|
||||
|
||||
## NixOS Module Template
|
||||
|
||||
Creates a complete NixOS module:
|
||||
|
||||
```
|
||||
templates/nixos-module/
|
||||
├── default.nix # Module with options
|
||||
└── README.md # Module documentation
|
||||
```
|
||||
|
||||
### Fields to Fill In
|
||||
|
||||
| Field | Location | Notes |
|
||||
|-------|----------|-------|
|
||||
| Module name | `default.nix` | File name matches `m3ta.<name>` |
|
||||
| Options | `default.nix` | Add under `options.m3ta.<name>` |
|
||||
| Config | `default.nix` | Add under `config.m3ta.<name>` |
|
||||
| Description | `README.md` | What the module does |
|
||||
|
||||
### After Creating
|
||||
|
||||
1. Add to `modules/nixos/default.nix` imports
|
||||
2. Optionally export from `flake.nix` `nixosModules`
|
||||
3. Add documentation to `docs/modules/nixos/`
|
||||
4. Run `nix flake check`
|
||||
|
||||
## Home Manager Module Template
|
||||
|
||||
Creates a complete Home Manager module:
|
||||
|
||||
```
|
||||
templates/home-manager-module/
|
||||
├── default.nix # Module with options
|
||||
└── README.md # Module documentation
|
||||
```
|
||||
|
||||
### Fields to Fill In
|
||||
|
||||
| Field | Location | Notes |
|
||||
|-------|----------|-------|
|
||||
| Category | Directory | Choose `cli/` or `coding/` |
|
||||
| Options | `default.nix` | Add under `options.m3ta.<name>` |
|
||||
| Config | `default.nix` | Add under `config.m3ta.<name>` |
|
||||
| Description | `README.md` | What the module does |
|
||||
|
||||
### After Creating
|
||||
|
||||
1. Add to the appropriate category aggregator (`cli/default.nix` or `coding/default.nix`)
|
||||
2. Optionally export from `flake.nix` `homeManagerModules`
|
||||
3. Add documentation to `docs/modules/home-manager/`
|
||||
4. Run `nix flake check`
|
||||
|
||||
## Template Variables
|
||||
|
||||
Templates use Nix attribute references. After copying, search for these placeholders:
|
||||
|
||||
| Placeholder | Replace With |
|
||||
|-------------|--------------|
|
||||
| `package-name` | Your package name (kebab-case) |
|
||||
| `owner-name` / `repo-name` | GitHub owner and repo |
|
||||
| `0.1.0` | Initial version |
|
||||
| `lib.fakeHash` | Real hash after first build |
|
||||
| `lib.licenses.mit` | Appropriate license |
|
||||
| `A short description` | One-line description |
|
||||
|
||||
## Automated Updates
|
||||
|
||||
Packages created from templates are automatically updated weekly by the Gitea Actions workflow. See the main README for details on the `nix-update` automation.
|
||||
|
||||
## Related
|
||||
|
||||
- [Adding Packages](./guides/adding-packages.md) - Detailed package guide
|
||||
- [Adding Modules](./guides/adding-modules.md) - Detailed module guide
|
||||
- [Development Workflow](./guides/development-workflow.md) - Local development
|
||||
- [Architecture](./ARCHITECTURE.md) - Repository structure
|
||||
85
flake.lock
generated
85
flake.lock
generated
@@ -1,49 +1,12 @@
|
||||
{
|
||||
"nodes": {
|
||||
"agents": {
|
||||
"flake": false,
|
||||
"locked": {
|
||||
"lastModified": 1777399938,
|
||||
"narHash": "sha256-xXPqUQezDdDtF8MbpZnwD1HkybOYwF92evx8rJ6OXCU=",
|
||||
"ref": "refs/heads/master",
|
||||
"rev": "9a91f1ee0cf011a7eaf1f16a9e17610b0457e055",
|
||||
"revCount": 85,
|
||||
"type": "git",
|
||||
"url": "https://code.m3ta.dev/m3tam3re/AGENTS"
|
||||
},
|
||||
"original": {
|
||||
"type": "git",
|
||||
"url": "https://code.m3ta.dev/m3tam3re/AGENTS"
|
||||
}
|
||||
},
|
||||
"basecamp": {
|
||||
"inputs": {
|
||||
"nixpkgs": [
|
||||
"nixpkgs"
|
||||
]
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1774505501,
|
||||
"narHash": "sha256-7UiRrDptj7yuEFwToOfdunUMz/i3jRLR7CmMoYQjq6k=",
|
||||
"owner": "basecamp",
|
||||
"repo": "basecamp-cli",
|
||||
"rev": "f087e6ef84002503d0dbc75ea1c8c928a8928d9e",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "basecamp",
|
||||
"ref": "v0.7.2",
|
||||
"repo": "basecamp-cli",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"nixpkgs": {
|
||||
"locked": {
|
||||
"lastModified": 1777268161,
|
||||
"narHash": "sha256-bxrdOn8SCOv8tN4JbTF/TXq7kjo9ag4M+C8yzzIRYbE=",
|
||||
"lastModified": 1768127708,
|
||||
"narHash": "sha256-1Sm77VfZh3mU0F5OqKABNLWxOuDeHIlcFjsXeeiPazs=",
|
||||
"owner": "NixOS",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "1c3fe55ad329cbcb28471bb30f05c9827f724c76",
|
||||
"rev": "ffbc9f8cbaacfb331b6017d5a5abb21a492c9a38",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
@@ -53,49 +16,9 @@
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"nixpkgs-master": {
|
||||
"locked": {
|
||||
"lastModified": 1777684196,
|
||||
"narHash": "sha256-irZjT++CZFBGHsuHNqPTa6AE1wVVuxdoR7pcdp6hq0A=",
|
||||
"owner": "NixOS",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "1cad9ada6da0658a588196ddcb2836004caa2293",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "NixOS",
|
||||
"ref": "master",
|
||||
"repo": "nixpkgs",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"openspec": {
|
||||
"inputs": {
|
||||
"nixpkgs": [
|
||||
"nixpkgs"
|
||||
]
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1777679960,
|
||||
"narHash": "sha256-yLc5BzIecR3L9lPILImNqRgOqqXCZH902CSBLn/5UJI=",
|
||||
"owner": "Fission-AI",
|
||||
"repo": "OpenSpec",
|
||||
"rev": "0ca74762dc03ee25f8651eaa7c33866170112031",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "Fission-AI",
|
||||
"repo": "OpenSpec",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"root": {
|
||||
"inputs": {
|
||||
"agents": "agents",
|
||||
"basecamp": "basecamp",
|
||||
"nixpkgs": "nixpkgs",
|
||||
"nixpkgs-master": "nixpkgs-master",
|
||||
"openspec": "openspec"
|
||||
"nixpkgs": "nixpkgs"
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
91
flake.nix
91
flake.nix
@@ -3,24 +3,9 @@
|
||||
|
||||
inputs = {
|
||||
nixpkgs.url = "github:NixOS/nixpkgs/nixos-unstable";
|
||||
nixpkgs-master.url = "github:NixOS/nixpkgs/master";
|
||||
|
||||
basecamp = {
|
||||
url = "github:basecamp/basecamp-cli/v0.7.2";
|
||||
inputs.nixpkgs.follows = "nixpkgs";
|
||||
};
|
||||
|
||||
# openspec - spec-driven development for AI coding assistants
|
||||
openspec = {
|
||||
url = "github:Fission-AI/OpenSpec";
|
||||
inputs.nixpkgs.follows = "nixpkgs";
|
||||
};
|
||||
|
||||
# Agent definitions and coding rules
|
||||
agents = {
|
||||
url = "git+https://code.m3ta.dev/m3tam3re/AGENTS";
|
||||
flake = false;
|
||||
};
|
||||
# Optional: Add stable channel if needed
|
||||
# nixpkgs-stable.url = "github:NixOS/nixpkgs/nixos-24.05";
|
||||
};
|
||||
|
||||
outputs = {
|
||||
@@ -29,7 +14,12 @@
|
||||
...
|
||||
} @ inputs: let
|
||||
# Supported systems
|
||||
systems = ["x86_64-linux" "aarch64-linux" "x86_64-darwin" "aarch64-darwin"];
|
||||
systems = [
|
||||
"x86_64-linux"
|
||||
"aarch64-linux"
|
||||
"x86_64-darwin"
|
||||
"aarch64-darwin"
|
||||
];
|
||||
|
||||
# Helper function to generate an attrset for each of the systems
|
||||
forAllSystems = nixpkgs.lib.genAttrs systems;
|
||||
@@ -42,21 +32,25 @@
|
||||
};
|
||||
in {
|
||||
# Custom packages - accessible via 'nix build .#package-name'
|
||||
packages = forAllSystems (system: let
|
||||
packages = forAllSystems (
|
||||
system: let
|
||||
pkgs = pkgsFor system;
|
||||
in
|
||||
import ./pkgs {inherit pkgs inputs;});
|
||||
import ./pkgs {inherit pkgs;}
|
||||
);
|
||||
|
||||
# Overlays - can be imported in your system configuration
|
||||
overlays = {
|
||||
# Default overlay: adds all custom packages
|
||||
default = final: prev:
|
||||
import ./pkgs {
|
||||
pkgs = final;
|
||||
inputs = inputs;
|
||||
};
|
||||
import ./pkgs {pkgs = final;};
|
||||
|
||||
modifications = final: prev: import ./overlays/mods {inherit prev;};
|
||||
# Individual overlays for more granular control
|
||||
additions = final: prev:
|
||||
import ./pkgs {pkgs = final;};
|
||||
|
||||
modifications = final: prev:
|
||||
import ./overlays/mods {inherit prev;};
|
||||
};
|
||||
|
||||
# NixOS modules - for system-level configuration
|
||||
@@ -71,49 +65,32 @@
|
||||
homeManagerModules = {
|
||||
default = import ./modules/home-manager;
|
||||
ports = import ./modules/home-manager/ports.nix;
|
||||
opencode = import ./modules/home-manager/coding/opencode.nix;
|
||||
agents = import ./modules/home-manager/coding/agents;
|
||||
zellij-ps = import ./modules/home-manager/zellij-ps.nix;
|
||||
};
|
||||
|
||||
# Library functions - helper utilities for your configuration
|
||||
lib = forAllSystems (system: import ./lib {lib = nixpkgs.lib;});
|
||||
lib = forAllSystems (
|
||||
system: let
|
||||
pkgs = pkgsFor system;
|
||||
in
|
||||
import ./lib {lib = pkgs.lib;}
|
||||
);
|
||||
|
||||
# Development shells for various programming environments
|
||||
# Usage: nix develop .#<shell-name>
|
||||
# Available shells: default, python, devops, coding
|
||||
devShells = forAllSystems (system: let
|
||||
# Available shells: default, python, devops
|
||||
devShells = forAllSystems (
|
||||
system: let
|
||||
pkgs = pkgsFor system;
|
||||
in
|
||||
import ./shells {
|
||||
inherit pkgs inputs;
|
||||
agents = inputs.agents;
|
||||
});
|
||||
import ./shells {inherit pkgs;}
|
||||
);
|
||||
|
||||
# Formatter for 'nix fmt'
|
||||
formatter = forAllSystems (system: (pkgsFor system).alejandra);
|
||||
|
||||
# Checks for 'nix flake check' - verifies all packages build
|
||||
checks = forAllSystems (system: let
|
||||
pkgs = pkgsFor system;
|
||||
packages = import ./pkgs {inherit pkgs inputs;};
|
||||
in
|
||||
builtins.mapAttrs (name: pkg: pkgs.lib.hydraJob pkg) packages
|
||||
// {
|
||||
formatting = pkgs.runCommand "check-formatting" {} ''
|
||||
${pkgs.alejandra}/bin/alejandra --check ${./.}
|
||||
touch $out
|
||||
'';
|
||||
# Lib unit tests
|
||||
lib-agents = import ./tests/lib/agents-test.nix {
|
||||
inherit pkgs;
|
||||
lib = pkgs.lib;
|
||||
};
|
||||
lib-coding-rules = import ./tests/lib/coding-rules-test.nix {
|
||||
inherit pkgs;
|
||||
lib = pkgs.lib;
|
||||
};
|
||||
});
|
||||
formatter = forAllSystems (
|
||||
system:
|
||||
(pkgsFor system).alejandra
|
||||
);
|
||||
|
||||
# Templates for creating new packages/modules
|
||||
templates = {
|
||||
|
||||
433
lib/agents.nix
433
lib/agents.nix
@@ -1,433 +0,0 @@
|
||||
# Harness-agnostic agent management utilities
|
||||
#
|
||||
# This module provides functions to load canonical agent definitions and
|
||||
# render them for different AI coding tools (OpenCode, Claude Code, Pi).
|
||||
#
|
||||
# Usage in your configuration:
|
||||
#
|
||||
# let
|
||||
# m3taLib = inputs.m3ta-nixpkgs.lib.${system};
|
||||
# canonical = m3taLib.agents.loadCanonical { agentsInput = inputs.agents; };
|
||||
#
|
||||
# # Render for a specific tool
|
||||
# rendered = m3taLib.agents.renderForOpencode {
|
||||
# inherit pkgs canonical;
|
||||
# modelOverrides = { chiron = "anthropic/claude-sonnet-4"; };
|
||||
# };
|
||||
# in { ... }
|
||||
{lib}: let
|
||||
# ── Shared helpers ─────────────────────────────────────────────
|
||||
# Split a rule string on the LAST colon to get { pattern, action }.
|
||||
# e.g. "rm -rf *:ask" → pattern="rm -rf *", action="ask"
|
||||
# e.g. "/run/agenix/**:deny" → pattern="/run/agenix/**", action="deny"
|
||||
parseRule = ruleStr: let
|
||||
parts = lib.strings.splitString ":" ruleStr;
|
||||
action = lib.last parts;
|
||||
pattern = lib.concatStringsSep ":" (lib.init parts);
|
||||
in {inherit pattern action;};
|
||||
|
||||
# ── Shared renderer primitives ──────────────────────────────────
|
||||
# Render agent files from canonical definitions into a directory.
|
||||
# Each agent gets a "<name>.md" file containing mkContent name agent.
|
||||
#
|
||||
# Args:
|
||||
# pkgs — Nixpkgs package set with linkFarm
|
||||
# canonical — Attribute set of agent definitions (keyed by slug)
|
||||
# mkContent — Function: name: agent → string (file content)
|
||||
# name — Derivation name (e.g. "opencode-agents")
|
||||
#
|
||||
# Returns:
|
||||
# A store path containing all agent *.md files.
|
||||
renderAgentFiles = pkgs: canonical: mkContent: name:
|
||||
pkgs.linkFarm name (
|
||||
lib.mapAttrsToList (n: a: {
|
||||
name = "${n}.md";
|
||||
path = pkgs.writeText "${n}.md" (mkContent n a);
|
||||
})
|
||||
canonical
|
||||
);
|
||||
|
||||
agentsLib = {
|
||||
# ── loadCanonical ─────────────────────────────────────────────
|
||||
#
|
||||
# Load canonical agent definitions from the AGENTS flake input.
|
||||
# Returns the canonical attrset from lib.loadAgents (keyed by slug).
|
||||
|
||||
loadCanonical = {agentsInput}: agentsInput.lib.loadAgents;
|
||||
|
||||
# ── OpenCode renderer ─────────────────────────────────────────
|
||||
#
|
||||
# Produces a directory of agent *.md files suitable for
|
||||
# ~/.config/opencode/agents/ (system-level)
|
||||
# .opencode/agents/ (project-level)
|
||||
#
|
||||
# Each file has YAML frontmatter (description, mode, optional model,
|
||||
# optional permission) followed by the agent's systemPrompt content.
|
||||
# The filename (without .md) becomes the agent name in OpenCode.
|
||||
|
||||
renderForOpencode = {
|
||||
pkgs,
|
||||
canonical,
|
||||
modelOverrides ? {},
|
||||
}: let
|
||||
# Render one permission section to YAML lines.
|
||||
# intent-only → single line: " <tool>: <intent>"
|
||||
# intent+rules → nested block
|
||||
renderPermSection = tool: section:
|
||||
if !(section ? rules) || section.rules == []
|
||||
then [" ${tool}: ${section.intent}"]
|
||||
else let
|
||||
parsedRules = map parseRule section.rules;
|
||||
wildcardLine = " \"*\": ${section.intent}";
|
||||
ruleLines = map (r: " \"${r.pattern}\": ${r.action}") parsedRules;
|
||||
in
|
||||
[" ${tool}:"] ++ [wildcardLine] ++ ruleLines;
|
||||
|
||||
renderPermBlock = permissions:
|
||||
if permissions == {} || permissions == null
|
||||
then []
|
||||
else
|
||||
["permission:"]
|
||||
++ lib.concatLists (
|
||||
lib.mapAttrsToList renderPermSection permissions
|
||||
);
|
||||
|
||||
mkFrontmatter = name: agent: let
|
||||
descLine = "description: \"${agent.description}.\"";
|
||||
modeLine = "mode: ${agent.mode}";
|
||||
modelLine =
|
||||
lib.optionalString
|
||||
(modelOverrides ? ${name})
|
||||
"model: ${modelOverrides.${name}}\n";
|
||||
permBlock = renderPermBlock (agent.permissions or {});
|
||||
permLines =
|
||||
if permBlock == []
|
||||
then ""
|
||||
else lib.concatStringsSep "\n" permBlock + "\n";
|
||||
in "---\n${descLine}\n${modeLine}\n${modelLine}${permLines}---\n";
|
||||
|
||||
mkAgentContent = name: agent:
|
||||
(mkFrontmatter name agent) + agent.systemPrompt;
|
||||
in
|
||||
renderAgentFiles pkgs canonical mkAgentContent "opencode-agents";
|
||||
|
||||
# ── Claude Code renderer ──────────────────────────────────────
|
||||
#
|
||||
# Produces a directory containing:
|
||||
# .claude/agents/<name>.md — one per agent with YAML frontmatter
|
||||
# .claude/settings.json — permission rules in Claude Code DSL
|
||||
#
|
||||
# Claude Code requires:
|
||||
# - name field: [a-z0-9-]+ (kebab-case)
|
||||
# - description field: required
|
||||
# - All agents are subagents (no primary/subagent distinction)
|
||||
|
||||
renderForClaudeCode = {
|
||||
pkgs,
|
||||
canonical,
|
||||
modelOverrides ? {},
|
||||
}: let
|
||||
# Claude Code permission DSL format: "Tool(pattern)" or just "Tool"
|
||||
# Canonical bash rules → "Bash(pattern)" entries
|
||||
# Canonical edit rules → "Edit(pattern)" entries
|
||||
renderPermAllow = permissions: let
|
||||
bashRules =
|
||||
if !(permissions ? bash)
|
||||
then []
|
||||
else if permissions.bash.intent == "allow"
|
||||
then ["Bash"]
|
||||
else
|
||||
map
|
||||
(r: let parsed = parseRule r; in "Bash(${parsed.pattern})")
|
||||
(lib.filter (r: (parseRule r).action == "allow") (permissions.bash.rules or []));
|
||||
editRules =
|
||||
if !(permissions ? edit)
|
||||
then []
|
||||
else if permissions.edit.intent == "allow"
|
||||
then ["Edit"]
|
||||
else
|
||||
map
|
||||
(r: let parsed = parseRule r; in "Edit(${parsed.pattern})")
|
||||
(lib.filter (r: (parseRule r).action == "allow") (permissions.edit.rules or []));
|
||||
webRules =
|
||||
lib.optional (permissions.webfetch.intent or "" == "allow") "WebFetch";
|
||||
in
|
||||
bashRules ++ editRules ++ webRules;
|
||||
|
||||
renderPermDeny = permissions: let
|
||||
bashRules =
|
||||
if !(permissions ? bash)
|
||||
then []
|
||||
else
|
||||
map
|
||||
(r: let parsed = parseRule r; in "Bash(${parsed.pattern})")
|
||||
(lib.filter (r: (parseRule r).action == "deny") (permissions.bash.rules or []));
|
||||
editRules =
|
||||
if !(permissions ? edit)
|
||||
then []
|
||||
else
|
||||
map
|
||||
(r: let parsed = parseRule r; in "Edit(${parsed.pattern})")
|
||||
(lib.filter (r: (parseRule r).action == "deny") (permissions.edit.rules or []));
|
||||
in
|
||||
bashRules ++ editRules;
|
||||
|
||||
# Build YAML frontmatter for one Claude Code agent .md file.
|
||||
mkClaudeFrontmatter = name: agent: let
|
||||
descLine = "description: \"${agent.description}\"";
|
||||
modelLine =
|
||||
lib.optionalString
|
||||
(modelOverrides ? ${name})
|
||||
"model: ${modelOverrides.${name}}\n";
|
||||
skillsLine =
|
||||
if (agent ? skills) && agent.skills != []
|
||||
then "skills:\n" + lib.concatStringsSep "\n" (map (s: " - ${s}") agent.skills) + "\n"
|
||||
else "";
|
||||
in "---\n${descLine}\n${modelLine}${skillsLine}---\n";
|
||||
|
||||
mkClaudeAgentContent = name: agent:
|
||||
(mkClaudeFrontmatter name agent) + agent.systemPrompt;
|
||||
|
||||
agentFiles = renderAgentFiles pkgs canonical mkClaudeAgentContent "claude-code-agent-files";
|
||||
|
||||
# Build settings.json with permission rules aggregated from all agents.
|
||||
allAllows = lib.flatten (lib.mapAttrsToList (_: agent: renderPermAllow (agent.permissions or {})) canonical);
|
||||
allDenies = lib.flatten (lib.mapAttrsToList (_: agent: renderPermDeny (agent.permissions or {})) canonical);
|
||||
|
||||
settingsJson = builtins.toJSON {
|
||||
permissions = {
|
||||
allow = lib.unique (lib.sort (a: b: a < b) allAllows);
|
||||
deny = lib.unique (lib.sort (a: b: a < b) allDenies);
|
||||
};
|
||||
};
|
||||
|
||||
settingsFile = pkgs.writeText "claude-settings.json" settingsJson;
|
||||
in
|
||||
pkgs.runCommand "claude-code-agents" {} ''
|
||||
mkdir -p $out/.claude/agents
|
||||
cp -r ${agentFiles}/* $out/.claude/agents/
|
||||
cp ${settingsFile} $out/.claude/settings.json
|
||||
'';
|
||||
|
||||
# ── Pi renderer ───────────────────────────────────────────────
|
||||
#
|
||||
# This renderer produces:
|
||||
# AGENTS.md — concatenated agent descriptions + specialist listing
|
||||
# SYSTEM.md — primary agent's system prompt (replaces Pi default)
|
||||
# agents/{name}.md — one per agent for pi-subagents (YAML frontmatter + prompt)
|
||||
#
|
||||
# The agents/ files use pi-subagents frontmatter format:
|
||||
# name, description, tools, extensions, model, thinking, skill,
|
||||
# output, defaultReads, defaultProgress, interactive, maxSubagentDepth
|
||||
|
||||
renderForPi = {
|
||||
pkgs,
|
||||
canonical,
|
||||
modelOverrides ? {},
|
||||
primaryAgent ? null,
|
||||
codingRules ? null,
|
||||
}: let
|
||||
# Import coding-rules lib for concatRulesMd when codingRules is provided
|
||||
codingRulesLib = import ./coding-rules.nix {inherit lib;};
|
||||
# Find the primary agent (there should be exactly one).
|
||||
primaryAgents = lib.filterAttrs (_: a: a.mode == "primary") canonical;
|
||||
primaryNames = lib.attrNames primaryAgents;
|
||||
primaryName =
|
||||
if primaryAgent != null
|
||||
then primaryAgent
|
||||
else if primaryNames == []
|
||||
then throw "lib.agents.renderForPi: no primary agent found"
|
||||
else builtins.head primaryNames;
|
||||
primary = builtins.getAttr primaryName primaryAgents;
|
||||
|
||||
# Subagents for the specialist listing.
|
||||
subagents = lib.filterAttrs (_: a: a.mode != "primary") canonical;
|
||||
|
||||
# ── Permission → Pi tool mapping ──────────────────────────────
|
||||
#
|
||||
# Pi built-in tools: read, bash, edit, write, grep, find, ls,
|
||||
# mcp, subagent, web_search, fetch_content, etc.
|
||||
# Canonical tools: bash, edit, webfetch, websearch, question, external_directory
|
||||
#
|
||||
# We map canonical permissions to Pi's tool list.
|
||||
# intent=allow → include tool; intent=deny → exclude; intent=ask → include (Pi has no ask granularity)
|
||||
# When specific allow rules exist, the tool is always included (Pi can't restrict by pattern).
|
||||
|
||||
piToolsForAgent = agent: let
|
||||
perms = agent.permissions or {};
|
||||
tools = [];
|
||||
# Always available: read (no permission concept in Pi)
|
||||
addIf = tool: section:
|
||||
if section.intent == "allow" || section.intent == "ask"
|
||||
then [tool]
|
||||
else [];
|
||||
# bash → bash
|
||||
withBash = tools ++ (addIf "bash" (perms.bash or {intent = "ask";}));
|
||||
# edit → edit
|
||||
withEdit = withBash ++ (addIf "edit" (perms.edit or {intent = "deny";}));
|
||||
# webfetch → fetch_content
|
||||
withFetch = withEdit ++ (addIf "fetch_content" (perms.webfetch or {intent = "deny";}));
|
||||
# websearch → web_search
|
||||
withSearch = withFetch ++ (addIf "web_search" (perms.websearch or {intent = "deny";}));
|
||||
in
|
||||
lib.unique (withSearch ++ ["read" "grep" "find" "ls"]);
|
||||
|
||||
# ── Build YAML frontmatter for pi-subagents .md files ──────────
|
||||
mkPiFrontmatter = name: agent: let
|
||||
tools = piToolsForAgent agent;
|
||||
descLine = "description: \"${agent.description}\"";
|
||||
toolsLine = "tools: ${lib.concatStringsSep ", " tools}";
|
||||
model =
|
||||
if modelOverrides ? ${name}
|
||||
then "model: ${modelOverrides.${name}}"
|
||||
else "";
|
||||
skillsLine =
|
||||
if (agent ? skills) && agent.skills != []
|
||||
then "skill: ${lib.concatStringsSep ", " agent.skills}"
|
||||
else "";
|
||||
in
|
||||
"---\n"
|
||||
+ "name: ${name}\n"
|
||||
+ "${descLine}\n"
|
||||
+ "${toolsLine}\n"
|
||||
+ (lib.optionalString (model != "") "${model}\n")
|
||||
+ (lib.optionalString (skillsLine != "") "${skillsLine}\n")
|
||||
+ "---\n";
|
||||
|
||||
mkPiAgentContent = name: agent:
|
||||
(mkPiFrontmatter name agent) + agent.systemPrompt;
|
||||
|
||||
piAgentFiles = renderAgentFiles pkgs canonical mkPiAgentContent "pi-agent-files";
|
||||
|
||||
# ── Build AGENTS.md content ───────────────────────────────────
|
||||
primaryDn = primary.display_name or primaryName;
|
||||
specialistEntries = let
|
||||
mkEntry = name: agent: let
|
||||
dn = agent.display_name or name;
|
||||
in
|
||||
"- **" + dn + "**: " + agent.description;
|
||||
in
|
||||
lib.mapAttrsToList mkEntry subagents;
|
||||
# ── Coding rules section (optional) ────────────────────────
|
||||
# When codingRules is provided, append selected rules to AGENTS.md.
|
||||
# codingRules attrset: { agents, languages, concerns, frameworks }
|
||||
codingRulesSection =
|
||||
if codingRules != null
|
||||
then let
|
||||
section = codingRulesLib.mkRulesMdSection codingRules;
|
||||
in
|
||||
if section != ""
|
||||
then "\n" + section
|
||||
else ""
|
||||
else "";
|
||||
|
||||
agentsMd =
|
||||
"# Agent Instructions\n"
|
||||
+ "\n"
|
||||
+ "## "
|
||||
+ primaryDn
|
||||
+ "\n"
|
||||
+ "\n"
|
||||
+ primary.description
|
||||
+ "\n"
|
||||
+ "\n"
|
||||
+ (
|
||||
if subagents == {}
|
||||
then ""
|
||||
else "## Available Specialists\n\n" + lib.concatStringsSep "\n" specialistEntries + "\n"
|
||||
)
|
||||
+ codingRulesSection;
|
||||
|
||||
agentsMdFile = pkgs.writeText "AGENTS.md" agentsMd;
|
||||
systemMdFile = pkgs.writeText "SYSTEM.md" primary.systemPrompt;
|
||||
in
|
||||
pkgs.runCommand "pi-agents" {} ''
|
||||
mkdir -p $out/agents
|
||||
cp ${agentsMdFile} $out/AGENTS.md
|
||||
cp ${systemMdFile} $out/SYSTEM.md
|
||||
cp -r ${piAgentFiles}/* $out/agents/
|
||||
'';
|
||||
|
||||
# ── renderForTool dispatcher ──────────────────────────────────
|
||||
#
|
||||
# Dispatches to the correct renderer by tool name.
|
||||
# tool: "opencode" | "claude-code" | "pi"
|
||||
|
||||
renderForTool = {
|
||||
pkgs,
|
||||
agentsInput,
|
||||
tool,
|
||||
modelOverrides ? {},
|
||||
codingRules ? null,
|
||||
}: let
|
||||
canonical = agentsInput.lib.loadAgents;
|
||||
in
|
||||
if tool == "opencode"
|
||||
then
|
||||
agentsLib.renderForOpencode {
|
||||
inherit pkgs canonical modelOverrides;
|
||||
}
|
||||
else if tool == "claude-code"
|
||||
then
|
||||
agentsLib.renderForClaudeCode {
|
||||
inherit pkgs canonical modelOverrides;
|
||||
}
|
||||
else if tool == "pi"
|
||||
then
|
||||
agentsLib.renderForPi {
|
||||
inherit pkgs canonical modelOverrides codingRules;
|
||||
}
|
||||
else throw "lib.agents.renderForTool: unknown tool '${tool}'. Must be opencode, claude-code, or pi.";
|
||||
|
||||
# ── shellHookForTool ──────────────────────────────────────────
|
||||
#
|
||||
# Generates a shellHook string for use in devShells that symlinks
|
||||
# rendered agent files into the project directory.
|
||||
#
|
||||
# Usage:
|
||||
# devShells.default = pkgs.mkShell {
|
||||
# shellHook = m3taLib.agents.shellHookForTool {
|
||||
# inherit pkgs;
|
||||
# agentsInput = inputs.agents;
|
||||
# tool = "opencode";
|
||||
# modelOverrides = { chiron = "anthropic/claude-sonnet-4"; };
|
||||
# };
|
||||
# };
|
||||
|
||||
shellHookForTool = {
|
||||
pkgs,
|
||||
agentsInput,
|
||||
tool,
|
||||
modelOverrides ? {},
|
||||
codingRules ? null,
|
||||
}: let
|
||||
rendered = agentsLib.renderForTool {
|
||||
inherit pkgs agentsInput tool modelOverrides codingRules;
|
||||
};
|
||||
in
|
||||
if tool == "opencode"
|
||||
then ''
|
||||
# Agent files for OpenCode
|
||||
mkdir -p .opencode/agents
|
||||
ln -sfn ${rendered}/* .opencode/agents/
|
||||
''
|
||||
else if tool == "claude-code"
|
||||
then ''
|
||||
# Agent files for Claude Code
|
||||
mkdir -p .claude/agents
|
||||
ln -sfn ${rendered}/.claude/agents/* .claude/agents/
|
||||
ln -sfn ${rendered}/.claude/settings.json .claude/settings.json
|
||||
''
|
||||
else if tool == "pi"
|
||||
then ''
|
||||
# Agent files for Pi
|
||||
ln -sfn ${rendered}/AGENTS.md AGENTS.md
|
||||
mkdir -p .pi
|
||||
ln -sfn ${rendered}/SYSTEM.md .pi/SYSTEM.md
|
||||
mkdir -p .pi/agents
|
||||
ln -sfn ${rendered}/agents/* .pi/agents/
|
||||
''
|
||||
else throw "lib.agents.shellHookForTool: unknown tool '${tool}'";
|
||||
};
|
||||
in
|
||||
agentsLib
|
||||
@@ -1,233 +0,0 @@
|
||||
# Coding rules management utilities
|
||||
#
|
||||
# This module provides functions to configure Opencode agent rules across
|
||||
# multiple projects. Rules are defined in the AGENTS repository and can be
|
||||
# selectively included based on language, framework, and concerns.
|
||||
#
|
||||
# Usage in your configuration:
|
||||
#
|
||||
# # In your flake or configuration:
|
||||
# let
|
||||
# m3taLib = inputs.m3ta-nixpkgs.lib.${system};
|
||||
#
|
||||
# rules = m3taLib.coding-rules.mkCodingRules {
|
||||
# agents = inputs.agents;
|
||||
# languages = [ "python" "typescript" ];
|
||||
# concerns = [ "coding-style" "naming" "documentation" ];
|
||||
# frameworks = [ "react" "fastapi" ];
|
||||
# };
|
||||
# in {
|
||||
# # Use in your devShell:
|
||||
# devShells.default = pkgs.mkShell {
|
||||
# shellHook = rules.shellHook;
|
||||
# inherit (rules) instructions;
|
||||
# };
|
||||
# }
|
||||
#
|
||||
# The shellHook creates:
|
||||
# - A `.opencode-rules/` symlink pointing to the AGENTS repository rules directory
|
||||
# - A `coding-rules.json` file with a $schema reference and instructions list
|
||||
# - (Optional) Appends coding rules to `AGENTS.md` for Pi agent discovery
|
||||
#
|
||||
# The instructions list contains paths relative to the project root, all prefixed
|
||||
# with `.opencode-rules/`, making them portable across different project locations.
|
||||
{lib}: let
|
||||
# Create Opencode rules configuration from AGENTS repository
|
||||
#
|
||||
# Args:
|
||||
# agents: Path to the AGENTS repository (non-flake input)
|
||||
# languages: Optional list of language-specific rules to include
|
||||
# (e.g., [ "python" "typescript" "rust" ])
|
||||
# concerns: Optional list of concern rules to include
|
||||
# Default: [ "coding-style" "naming" "documentation" "testing" "git-workflow" "project-structure" ]
|
||||
# frameworks: Optional list of framework-specific rules to include
|
||||
# (e.g., [ "react" "fastapi" "django" ])
|
||||
# extraInstructions: Optional list of additional instruction paths
|
||||
# (for custom rules outside standard locations)
|
||||
# forPi: Whether to also append rules to AGENTS.md for Pi agent (default: true)
|
||||
# Pi discovers AGENTS.md files by walking parent dirs + cwd and concatenates them.
|
||||
# When enabled, a delimited block is appended to (or created in) AGENTS.md.
|
||||
#
|
||||
# Returns:
|
||||
# An attribute set containing:
|
||||
# - shellHook: Bash code to create symlink and coding-rules.json
|
||||
# - instructions: List of rule file paths (relative to project root)
|
||||
#
|
||||
# Example:
|
||||
# mkCodingRules {
|
||||
# agents = inputs.agents;
|
||||
# languages = [ "python" ];
|
||||
# frameworks = [ "fastapi" ];
|
||||
# }
|
||||
# # Returns:
|
||||
# # {
|
||||
# # shellHook = "...";
|
||||
# # instructions = [
|
||||
# # ".opencode-rules/concerns/coding-style.md"
|
||||
# # ".opencode-rules/concerns/naming.md"
|
||||
# # ".opencode-rules/concerns/documentation.md"
|
||||
# # ".opencode-rules/concerns/testing.md"
|
||||
# # ".opencode-rules/concerns/git-workflow.md"
|
||||
# # ".opencode-rules/concerns/project-structure.md"
|
||||
# # ".opencode-rules/languages/python.md"
|
||||
# # ".opencode-rules/frameworks/fastapi.md"
|
||||
# # ];
|
||||
# # }
|
||||
mkCodingRules = {
|
||||
agents,
|
||||
languages ? [],
|
||||
concerns ? [
|
||||
"coding-style"
|
||||
"naming"
|
||||
"documentation"
|
||||
"testing"
|
||||
"git-workflow"
|
||||
"project-structure"
|
||||
],
|
||||
frameworks ? [],
|
||||
extraInstructions ? [],
|
||||
rulesDir ? ".opencode-rules",
|
||||
forPi ? false,
|
||||
}: let
|
||||
# Build instructions list by mapping concerns, languages, frameworks to their file paths
|
||||
# All paths are relative to project root via the rulesDir symlink
|
||||
instructions =
|
||||
(map (c: "${rulesDir}/concerns/${c}.md") concerns)
|
||||
++ (map (l: "${rulesDir}/languages/${l}.md") languages)
|
||||
++ (map (f: "${rulesDir}/frameworks/${f}.md") frameworks)
|
||||
++ extraInstructions;
|
||||
|
||||
# Generate JSON configuration for coding rules
|
||||
rulesConfig = {
|
||||
"$schema" = "https://opencode.ai/config.json";
|
||||
inherit instructions;
|
||||
};
|
||||
|
||||
# Pi rules content (concatenated markdown) — only computed when forPi is true
|
||||
piRulesSection =
|
||||
if forPi
|
||||
then mkRulesMdSection {inherit agents languages concerns frameworks;}
|
||||
else "";
|
||||
|
||||
# Bash snippet to append rules to AGENTS.md for Pi discovery.
|
||||
# Uses HTML comment markers for idempotent updates:
|
||||
# - Removes any existing CODING-RULES block
|
||||
# - Appends the new block
|
||||
# - Creates AGENTS.md if it doesn't exist
|
||||
# Note: Uses plain if-then-else instead of lib.optionalString to avoid
|
||||
# forcing the `lib` argument (which may come from import <nixpkgs/lib>)
|
||||
# when forPi is false.
|
||||
piShellHook =
|
||||
if forPi && piRulesSection != ""
|
||||
then ''
|
||||
# Pi agent: append coding rules to AGENTS.md
|
||||
if [ -f AGENTS.md ]; then
|
||||
# Remove existing coding-rules block (if any)
|
||||
sed -i '/<!-- CODING-RULES:START -->/,/<!-- CODING-RULES:END -->/d' AGENTS.md
|
||||
# Append new coding-rules block
|
||||
cat >> AGENTS.md <<'PIRULES_EOF'
|
||||
${piRulesSection}
|
||||
PIRULES_EOF
|
||||
else
|
||||
# Create AGENTS.md with just the coding rules
|
||||
cat > AGENTS.md <<'PIRULES_EOF'
|
||||
${piRulesSection}
|
||||
PIRULES_EOF
|
||||
fi
|
||||
''
|
||||
else "";
|
||||
in {
|
||||
inherit instructions;
|
||||
|
||||
# Shell hook to set up rules in the project
|
||||
# Creates a symlink to the AGENTS rules directory and generates coding-rules.json
|
||||
# Optionally appends rules to AGENTS.md for Pi agent discovery
|
||||
shellHook = ''
|
||||
# Create/update symlink to AGENTS rules directory
|
||||
ln -sfn ${agents}/rules ${rulesDir}
|
||||
|
||||
# Generate coding-rules.json configuration file
|
||||
cat > coding-rules.json <<'RULES_EOF'
|
||||
${builtins.toJSON rulesConfig}
|
||||
RULES_EOF
|
||||
|
||||
${piShellHook}
|
||||
'';
|
||||
};
|
||||
# Concatenate selected rule files from the AGENTS repository into a single
|
||||
# markdown string. Used by Pi (append to AGENTS.md) and could be used by
|
||||
# other tools that don't support an instructions list.
|
||||
#
|
||||
# Args:
|
||||
# agents: Path to the AGENTS repository (non-flake input)
|
||||
# languages: Optional list of language-specific rules to include
|
||||
# concerns: Optional list of concern rules to include
|
||||
# Default: [ "coding-style" "naming" "documentation" "testing" "git-workflow" "project-structure" ]
|
||||
# frameworks: Optional list of framework-specific rules to include
|
||||
#
|
||||
# Returns: A single concatenated markdown string with all selected rules.
|
||||
#
|
||||
# Example:
|
||||
# concatRulesMd {
|
||||
# agents = inputs.agents;
|
||||
# languages = [ "python" ];
|
||||
# concerns = [ "coding-style" ];
|
||||
# }
|
||||
# # Returns: "\n# Coding Style\n\n...python rules...\n"
|
||||
concatRulesMd = {
|
||||
agents,
|
||||
languages ? [],
|
||||
concerns ? [
|
||||
"coding-style"
|
||||
"naming"
|
||||
"documentation"
|
||||
"testing"
|
||||
"git-workflow"
|
||||
"project-structure"
|
||||
],
|
||||
frameworks ? [],
|
||||
}: let
|
||||
rulePaths =
|
||||
(map (c: {
|
||||
kind = "concerns";
|
||||
name = c;
|
||||
})
|
||||
concerns)
|
||||
++ (map (l: {
|
||||
kind = "languages";
|
||||
name = l;
|
||||
})
|
||||
languages)
|
||||
++ (map (f: {
|
||||
kind = "frameworks";
|
||||
name = f;
|
||||
})
|
||||
frameworks);
|
||||
|
||||
readRule = rule: builtins.readFile "${agents}/rules/${rule.kind}/${rule.name}.md";
|
||||
ruleContents = map readRule rulePaths;
|
||||
in
|
||||
lib.concatStringsSep "\n\n" ruleContents;
|
||||
|
||||
# Build a coding rules section suitable for appending to AGENTS.md.
|
||||
# Wraps concatRulesMd output with a header and HTML comment markers
|
||||
# for idempotent updates in project-level shellHooks.
|
||||
#
|
||||
# Args: Same as concatRulesMd
|
||||
#
|
||||
# Returns: A markdown string with start/end markers and a header.
|
||||
mkRulesMdSection = args: let
|
||||
content = concatRulesMd args;
|
||||
in
|
||||
if builtins.stringLength content == 0
|
||||
then ""
|
||||
else ''
|
||||
<!-- CODING-RULES:START -->
|
||||
# Coding Rules
|
||||
|
||||
${content}
|
||||
<!-- CODING-RULES:END -->
|
||||
'';
|
||||
in {
|
||||
inherit mkCodingRules concatRulesMd mkRulesMdSection;
|
||||
}
|
||||
@@ -7,9 +7,6 @@
|
||||
# Port management utilities
|
||||
ports = import ./ports.nix {inherit lib;};
|
||||
|
||||
# Coding rules injection utilities
|
||||
coding-rules = import ./coding-rules.nix {inherit lib;};
|
||||
|
||||
# Agent configuration management utilities
|
||||
agents = import ./agents.nix {inherit lib;};
|
||||
# Add more helper modules here as needed
|
||||
# example = import ./example.nix { inherit lib; };
|
||||
}
|
||||
|
||||
@@ -95,4 +95,19 @@
|
||||
# List of service names (strings)
|
||||
listServices = lib.attrNames ports;
|
||||
};
|
||||
|
||||
# Simple helper to get a port without host override
|
||||
# Useful when you don't need host-specific ports
|
||||
#
|
||||
# Args:
|
||||
# portsConfig: Same structure as mkPortHelpers
|
||||
# service: The service name (string)
|
||||
#
|
||||
# Returns:
|
||||
# Port number (int) or null if service not found
|
||||
#
|
||||
# Example:
|
||||
# getDefaultPort myPorts "nginx" # Returns default port only
|
||||
getDefaultPort = portsConfig: service:
|
||||
portsConfig.ports.${service} or null;
|
||||
}
|
||||
|
||||
@@ -13,13 +13,7 @@ home-manager/
|
||||
│ └── zellij-ps.nix
|
||||
└── coding/ # Development tools
|
||||
├── default.nix # Category aggregator
|
||||
├── editors.nix # Neovim + Zed configs
|
||||
├── opencode.nix # OpenCode non-agent config (theme, plugins, formatter)
|
||||
└── agents/ # Per-tool agent deployment (canonical TOML → rendered)
|
||||
├── default.nix
|
||||
├── opencode.nix # File-based agents + skills + context
|
||||
├── claude-code.nix # Claude Code agents + settings.json
|
||||
└── pi.nix # Pi AGENTS.md + SYSTEM.md
|
||||
└── editors.nix # Neovim + Zed configs
|
||||
```
|
||||
|
||||
## Where to Look
|
||||
@@ -30,16 +24,11 @@ home-manager/
|
||||
| Add coding module | `coding/<name>.nix`, import in `coding/default.nix` |
|
||||
| Add new category | Create `<category>/default.nix`, import in root `default.nix` |
|
||||
| Module with host ports | Import `../../lib/ports.nix`, use `mkPortHelpers` |
|
||||
| Add agent renderer | `coding/agents/<tool>.nix`, import in `coding/agents/default.nix` |
|
||||
|
||||
## Option Namespaces
|
||||
|
||||
- `cli.*` - CLI tools (e.g., `cli.zellij-ps.enable`)
|
||||
- `coding.editors.*` - Editor configs (e.g., `coding.editors.neovim.enable`)
|
||||
- `coding.opencode.*` - OpenCode non-agent config (theme, plugins, formatter)
|
||||
- `coding.agents.opencode.*` - OpenCode agent deployment (file-based agents)
|
||||
- `coding.agents.claude-code.*` - Claude Code agent deployment
|
||||
- `coding.agents.pi.*` - Pi agent deployment
|
||||
- `m3ta.ports.*` - Port management (shared with NixOS)
|
||||
|
||||
## Patterns
|
||||
@@ -83,153 +72,3 @@ config = mkMerge [
|
||||
| `generateEnvVars` | Available | Not available |
|
||||
| Output file | `~/.config/m3ta/ports.json` | `/etc/m3ta/ports.json` |
|
||||
| Package access | `pkgs.*` via overlay | `pkgs.*` via overlay |
|
||||
|
||||
## Agent Modules
|
||||
|
||||
Agent definitions are stored as canonical `agent.toml` + `system-prompt.md` in the
|
||||
[AGENTS repo](https://code.m3ta.dev/m3tam3re/AGENTS). Renderers in `lib/agents.nix`
|
||||
transform these into tool-specific configs. Each tool has its own HM sub-module
|
||||
under `coding/agents/`.
|
||||
|
||||
### OpenCode (`coding.agents.opencode`)
|
||||
|
||||
Renders file-based agents to `~/.config/opencode/agents/*.md`:
|
||||
|
||||
```nix
|
||||
coding.agents.opencode = {
|
||||
enable = true;
|
||||
agentsInput = inputs.agents;
|
||||
modelOverrides = {
|
||||
chiron = "anthropic/claude-sonnet-4";
|
||||
};
|
||||
externalSkills = [
|
||||
{ src = inputs.skills-anthropic; }
|
||||
];
|
||||
};
|
||||
```
|
||||
|
||||
**Options:** `enable`, `agentsInput`, `modelOverrides`, `externalSkills`
|
||||
|
||||
### Claude Code (`coding.agents.claude-code`)
|
||||
|
||||
Renders agents to `~/.claude/agents/*.md` + `~/.claude/settings.json`:
|
||||
|
||||
```nix
|
||||
coding.agents.claude-code = {
|
||||
enable = true;
|
||||
agentsInput = inputs.agents;
|
||||
modelOverrides = {};
|
||||
};
|
||||
```
|
||||
|
||||
**Options:** `enable`, `agentsInput`, `modelOverrides`
|
||||
|
||||
### Pi (`coding.agents.pi`)
|
||||
|
||||
Renders `AGENTS.md` + `SYSTEM.md` to `~/.pi/agent/`:
|
||||
|
||||
```nix
|
||||
coding.agents.pi = {
|
||||
enable = true;
|
||||
agentsInput = inputs.agents;
|
||||
};
|
||||
```
|
||||
|
||||
**Options:** `enable`, `agentsInput`
|
||||
|
||||
### Project-level usage
|
||||
|
||||
For per-project agent setup via `flake.nix` + `direnv`:
|
||||
|
||||
```nix
|
||||
m3taLib.agents.shellHookForTool {
|
||||
inherit pkgs;
|
||||
agentsInput = inputs.agents;
|
||||
tool = "opencode";
|
||||
modelOverrides = { chiron = "anthropic/claude-sonnet-4"; };
|
||||
};
|
||||
```
|
||||
|
||||
## Migration Guide (OpenCode agents)
|
||||
|
||||
The agent system was migrated from embedded `agents.json` to file-based canonical
|
||||
`agent.toml` definitions. Here is how to migrate your home-manager config.
|
||||
|
||||
### What changed
|
||||
|
||||
| Before | After |
|
||||
|--------|-------|
|
||||
| `coding.opencode.agentsInput` | `coding.agents.opencode.agentsInput` |
|
||||
| `coding.opencode.externalSkills` | `coding.agents.opencode.externalSkills` |
|
||||
| Agents embedded in `config.json` | File-based `~/.config/opencode/agents/*.md` |
|
||||
| Model hardcoded in `agents.json` | Per-machine `modelOverrides` |
|
||||
| `mkOpencodeRules` | `mkCodingRules` |
|
||||
|
||||
### Migration steps
|
||||
|
||||
**1. Update home-manager config:**
|
||||
|
||||
Move `agentsInput` and `externalSkills` from `coding.opencode` to `coding.agents.opencode`.
|
||||
Add `modelOverrides` with the models previously hardcoded in agents.json:
|
||||
|
||||
```nix
|
||||
# BEFORE (legacy):
|
||||
coding.opencode = {
|
||||
enable = true;
|
||||
agentsInput = inputs.agents;
|
||||
externalSkills = [{ src = inputs.skills-anthropic; }];
|
||||
ohMyOpencodeSettings = { ... };
|
||||
};
|
||||
|
||||
# AFTER (new):
|
||||
coding.opencode = {
|
||||
enable = true;
|
||||
ohMyOpencodeSettings = { ... };
|
||||
};
|
||||
|
||||
coding.agents.opencode = {
|
||||
enable = true;
|
||||
agentsInput = inputs.agents;
|
||||
externalSkills = [{ src = inputs.skills-anthropic; }];
|
||||
modelOverrides = {
|
||||
chiron = "zai-coding-plan/glm-5";
|
||||
"chiron-forge" = "zai-coding-plan/glm-5";
|
||||
};
|
||||
};
|
||||
```
|
||||
|
||||
**2. Run `home-manager switch`:**
|
||||
|
||||
```bash
|
||||
home-manager switch --flake .
|
||||
```
|
||||
|
||||
**3. Verify agents are deployed:**
|
||||
|
||||
```bash
|
||||
ls ~/.config/opencode/agents/
|
||||
# Should show: chiron.md chiron-forge.md hermes.md athena.md apollo.md calliope.md
|
||||
```
|
||||
|
||||
**4. Remove legacy files from AGENTS repo** (after confirming everything works):
|
||||
|
||||
```bash
|
||||
cd /home/m3tam3re/p/AI/AGENTS
|
||||
rm agents/agents.json
|
||||
rm prompts/chiron.txt prompts/chiron-forge.txt prompts/hermes.txt \
|
||||
prompts/athena.txt prompts/apollo.txt prompts/calliope.txt
|
||||
rmdir prompts/ # if empty
|
||||
# Also remove lib.agentsJson from flake.nix
|
||||
```
|
||||
|
||||
**5. Final cleanup:** After legacy files are removed from AGENTS repo,
|
||||
remove `lib.agentsJson` from the AGENTS `flake.nix` (it's only needed for
|
||||
backward compatibility during the transition).
|
||||
|
||||
### Key advantage of the new system
|
||||
|
||||
Prompt changes no longer require `home-manager switch`. Since agents are
|
||||
deployed as file-based `~/.config/opencode/agents/*.md` (symlinks to Nix store),
|
||||
you only need to edit the `system-prompt.md` in the AGENTS repo, commit, update
|
||||
the flake lock, and run `home-manager switch`. Or for local development, edit
|
||||
the file directly and restart the tool.
|
||||
|
||||
@@ -1,94 +0,0 @@
|
||||
{
|
||||
config,
|
||||
lib,
|
||||
pkgs,
|
||||
...
|
||||
}: {
|
||||
imports = [
|
||||
./shared/default.nix
|
||||
];
|
||||
|
||||
options.coding.agents.claude-code = let
|
||||
shared = import ./shared/shared-options.nix {inherit lib;};
|
||||
mcpCfg = config.programs.mcp or null;
|
||||
in
|
||||
with lib; {
|
||||
enable = mkEnableOption "Claude Code agent management via canonical agent.toml definitions";
|
||||
|
||||
agentsInput = shared.mkAgentsInputOption ''
|
||||
The `agents` flake input (your personal AGENTS repo).
|
||||
When set, agents are rendered from canonical agent.toml files
|
||||
and symlinked to ~/.claude/agents/.
|
||||
'';
|
||||
|
||||
modelOverrides = shared.mkModelOverridesOption;
|
||||
|
||||
externalSkills = shared.externalSkillsOption;
|
||||
|
||||
mcpServers = mkOption {
|
||||
type = types.attrsOf types.anything;
|
||||
default =
|
||||
if mcpCfg != null
|
||||
then mcpCfg.servers
|
||||
else {};
|
||||
defaultText = literalExpression "config.programs.mcp.servers";
|
||||
description = ''
|
||||
MCP server configurations for Claude Code.
|
||||
Merged into ~/.claude/settings.json alongside permissions.
|
||||
Automatically inherits from config.programs.mcp.servers.
|
||||
'';
|
||||
};
|
||||
};
|
||||
|
||||
config = with lib; let
|
||||
shared = import ./shared/shared-options.nix {inherit lib;};
|
||||
cfg = config.coding.agents.claude-code;
|
||||
agentsLib = (import ../../../../lib {inherit lib;}).agents;
|
||||
in
|
||||
mkIf cfg.enable (let
|
||||
# Rendered agents + permissions (only if agentsInput is set)
|
||||
rendered = mkIf (cfg.agentsInput != null) (
|
||||
agentsLib.renderForClaudeCode {
|
||||
inherit pkgs;
|
||||
canonical = cfg.agentsInput.lib.loadAgents;
|
||||
modelOverrides = cfg.modelOverrides;
|
||||
}
|
||||
);
|
||||
|
||||
# Merge MCP servers into the rendered settings.json.
|
||||
# The renderer produces { permissions: { allow, deny } }.
|
||||
# We add mcpServers on top.
|
||||
settingsJson =
|
||||
if cfg.agentsInput != null
|
||||
then let
|
||||
renderedSettings = builtins.fromJSON (builtins.readFile "${rendered}/.claude/settings.json");
|
||||
withMcp =
|
||||
if cfg.mcpServers != {}
|
||||
then renderedSettings // {mcpServers = cfg.mcpServers;}
|
||||
else renderedSettings;
|
||||
in
|
||||
pkgs.writeText "claude-settings.json" (builtins.toJSON withMcp)
|
||||
else if cfg.mcpServers != {}
|
||||
then pkgs.writeText "claude-settings.json" (builtins.toJSON {mcpServers = cfg.mcpServers;})
|
||||
else null;
|
||||
in {
|
||||
# Rendered agent files symlinked to ~/.claude/agents/
|
||||
home.file.".claude/agents" = mkIf (cfg.agentsInput != null) {
|
||||
source = "${rendered}/.claude/agents";
|
||||
};
|
||||
|
||||
# Skills (merged from personal AGENTS repo + optional external skills)
|
||||
home.file.".claude/skills" = mkIf (cfg.agentsInput != null) {
|
||||
source = cfg.agentsInput.lib.mkOpencodeSkills {
|
||||
inherit pkgs;
|
||||
customSkills = "${cfg.agentsInput}/skills";
|
||||
externalSkills = shared.mapExternalSkills cfg.externalSkills;
|
||||
};
|
||||
};
|
||||
|
||||
# Rendered settings.json with permissions + MCP servers
|
||||
home.file.".claude/settings.json" = mkIf (settingsJson != null) {
|
||||
source = "${settingsJson}";
|
||||
};
|
||||
});
|
||||
}
|
||||
@@ -1,39 +0,0 @@
|
||||
# Per-tool agent sub-modules
|
||||
# Each module handles rendering canonical agent.toml definitions
|
||||
# for a specific AI coding tool.
|
||||
#
|
||||
# Also provides the shared coding.agents.skills submodule that writes
|
||||
# ~/.agents/skills — the central skills directory used by Pi, OpenCode, etc.
|
||||
{
|
||||
config,
|
||||
lib,
|
||||
pkgs,
|
||||
...
|
||||
}: let
|
||||
shared = import ./shared/shared-options.nix {inherit lib;};
|
||||
cfg = config.coding.agents.skills;
|
||||
mkIf = lib.mkIf;
|
||||
in {
|
||||
imports = [
|
||||
./opencode.nix
|
||||
./claude-code.nix
|
||||
./pi.nix
|
||||
];
|
||||
|
||||
options.coding.agents.skills = {
|
||||
agentsInput = shared.mkAgentsInputOption ''
|
||||
The `agents` flake input (your personal AGENTS repo).
|
||||
When set, skills are symlinked to ~/.agents/skills.
|
||||
'';
|
||||
|
||||
externalSkills = shared.externalSkillsOption;
|
||||
};
|
||||
|
||||
config = mkIf (cfg.agentsInput != null) {
|
||||
home.file.".agents/skills".source = cfg.agentsInput.lib.mkOpencodeSkills {
|
||||
inherit pkgs;
|
||||
customSkills = "${cfg.agentsInput}/skills";
|
||||
externalSkills = shared.mapExternalSkills cfg.externalSkills;
|
||||
};
|
||||
};
|
||||
}
|
||||
@@ -1,54 +0,0 @@
|
||||
{
|
||||
config,
|
||||
lib,
|
||||
pkgs,
|
||||
...
|
||||
}: {
|
||||
imports = [
|
||||
./shared/default.nix
|
||||
];
|
||||
|
||||
options.coding.agents.opencode = let
|
||||
shared = import ./shared/shared-options.nix {inherit lib;};
|
||||
in
|
||||
with lib; {
|
||||
enable = mkEnableOption "OpenCode agent management via canonical agent.toml definitions";
|
||||
|
||||
agentsInput = shared.mkAgentsInputOption ''
|
||||
The `agents` flake input (your personal AGENTS repo).
|
||||
When set, agents are rendered from canonical agent.toml files
|
||||
and symlinked to ~/.config/opencode/agents/.
|
||||
'';
|
||||
|
||||
modelOverrides = shared.mkModelOverridesOption;
|
||||
};
|
||||
|
||||
config = with lib; let
|
||||
shared = import ./shared/shared-options.nix {inherit lib;};
|
||||
cfg = config.coding.agents.opencode;
|
||||
in
|
||||
mkIf cfg.enable {
|
||||
# Rendered agent files symlinked to ~/.config/opencode/agents/
|
||||
xdg.configFile."opencode/agents" = let
|
||||
agentsLib = (import ../../../../lib {inherit lib;}).agents;
|
||||
in
|
||||
mkIf (cfg.agentsInput != null) {
|
||||
source = agentsLib.renderForOpencode {
|
||||
inherit pkgs;
|
||||
canonical = cfg.agentsInput.lib.loadAgents;
|
||||
modelOverrides = cfg.modelOverrides;
|
||||
};
|
||||
};
|
||||
|
||||
# Static config dirs from AGENTS repo
|
||||
xdg.configFile."opencode/context" = mkIf (cfg.agentsInput != null) {
|
||||
source = "${cfg.agentsInput}/context";
|
||||
};
|
||||
xdg.configFile."opencode/commands" = mkIf (cfg.agentsInput != null) {
|
||||
source = "${cfg.agentsInput}/commands";
|
||||
};
|
||||
xdg.configFile."opencode/prompts" = mkIf (cfg.agentsInput != null) {
|
||||
source = "${cfg.agentsInput}/prompts";
|
||||
};
|
||||
};
|
||||
}
|
||||
@@ -1,357 +0,0 @@
|
||||
{
|
||||
config,
|
||||
lib,
|
||||
pkgs,
|
||||
...
|
||||
}: {
|
||||
imports = [
|
||||
./shared/default.nix
|
||||
];
|
||||
|
||||
options.coding.agents.pi = let
|
||||
shared = import ./shared/shared-options.nix {inherit lib;};
|
||||
mcpCfg = config.programs.mcp or null;
|
||||
in
|
||||
with lib; {
|
||||
enable = mkEnableOption "Pi agent management via canonical agent.toml definitions";
|
||||
|
||||
mcpServers = mkOption {
|
||||
type = types.attrsOf types.anything;
|
||||
default =
|
||||
if mcpCfg != null
|
||||
then mcpCfg.servers
|
||||
else {};
|
||||
defaultText = literalExpression "config.programs.mcp.servers";
|
||||
description = ''
|
||||
MCP server configurations for Pi (pi-mcp-adapter).
|
||||
Written to ~/.pi/agent/mcp.json.
|
||||
Automatically inherits from config.programs.mcp.servers.
|
||||
'';
|
||||
};
|
||||
|
||||
agentsInput = shared.mkAgentsInputOption ''
|
||||
The `agents` flake input (your personal AGENTS repo).
|
||||
When set, the primary agent's system prompt is rendered as SYSTEM.md,
|
||||
all agents are listed in AGENTS.md, and subagent .md files are deployed.
|
||||
'';
|
||||
|
||||
modelOverrides = shared.mkModelOverridesOption;
|
||||
|
||||
primaryAgent = mkOption {
|
||||
type = types.nullOr types.str;
|
||||
default = null;
|
||||
description = ''
|
||||
Override which canonical agent is used as primary for SYSTEM.md.
|
||||
When null, the first agent with mode="primary" is used.
|
||||
'';
|
||||
};
|
||||
|
||||
codingRules = mkOption {
|
||||
type = types.nullOr (types.submodule {
|
||||
options = {
|
||||
languages = mkOption {
|
||||
type = types.listOf types.str;
|
||||
default = [];
|
||||
description = ''
|
||||
Language-specific coding rules to include
|
||||
(e.g. [ "python" "typescript" "nix" ]).
|
||||
Rule files are read from the AGENTS repo's rules/languages/ directory.
|
||||
'';
|
||||
};
|
||||
|
||||
concerns = mkOption {
|
||||
type = types.listOf types.str;
|
||||
default = [
|
||||
"coding-style"
|
||||
"naming"
|
||||
"documentation"
|
||||
"testing"
|
||||
"git-workflow"
|
||||
"project-structure"
|
||||
];
|
||||
description = ''
|
||||
Concern rules to include from the AGENTS repo's rules/concerns/ directory.
|
||||
'';
|
||||
};
|
||||
|
||||
frameworks = mkOption {
|
||||
type = types.listOf types.str;
|
||||
default = [];
|
||||
description = ''
|
||||
Framework-specific coding rules to include
|
||||
(e.g. [ "react" "fastapi" ]).
|
||||
Rule files are read from the AGENTS repo's rules/frameworks/ directory.
|
||||
'';
|
||||
};
|
||||
};
|
||||
});
|
||||
default = null;
|
||||
description = ''
|
||||
Coding rules to inject into ~/.pi/agent/AGENTS.md.
|
||||
Rules are read from the AGENTS repository and appended as markdown sections.
|
||||
Requires agentsInput to be set.
|
||||
'';
|
||||
example = literalExpression ''
|
||||
{
|
||||
languages = [ "python" "typescript" ];
|
||||
concerns = [ "coding-style" "testing" ];
|
||||
frameworks = [ "fastapi" ];
|
||||
}
|
||||
'';
|
||||
};
|
||||
|
||||
settings = mkOption {
|
||||
type = types.submodule {
|
||||
freeformType = types.attrsOf types.anything;
|
||||
options = {
|
||||
packages = mkOption {
|
||||
type = types.listOf types.str;
|
||||
default = [];
|
||||
description = ''
|
||||
Pi packages to install (npm:, git:, or local paths).
|
||||
These are written to ~/.pi/agent/settings.json.
|
||||
'';
|
||||
};
|
||||
|
||||
defaultProvider = mkOption {
|
||||
type = types.nullOr types.str;
|
||||
default = null;
|
||||
description = "Default LLM provider (e.g. 'anthropic', 'openai', 'zai').";
|
||||
};
|
||||
|
||||
defaultModel = mkOption {
|
||||
type = types.nullOr types.str;
|
||||
default = null;
|
||||
description = "Default model ID.";
|
||||
};
|
||||
|
||||
defaultThinkingLevel = mkOption {
|
||||
type = types.nullOr (types.enum ["off" "minimal" "low" "medium" "high" "xhigh"]);
|
||||
default = null;
|
||||
description = "Default extended thinking level.";
|
||||
};
|
||||
|
||||
theme = mkOption {
|
||||
type = types.nullOr types.str;
|
||||
default = null;
|
||||
description = "Pi theme name.";
|
||||
};
|
||||
|
||||
hideThinkingBlock = mkOption {
|
||||
type = types.nullOr types.bool;
|
||||
default = null;
|
||||
description = "Hide thinking blocks in output.";
|
||||
};
|
||||
|
||||
quietStartup = mkOption {
|
||||
type = types.nullOr types.bool;
|
||||
default = null;
|
||||
description = "Hide startup header.";
|
||||
};
|
||||
|
||||
compaction = mkOption {
|
||||
type = types.nullOr (types.submodule {
|
||||
options = {
|
||||
enabled = mkOption {
|
||||
type = types.nullOr types.bool;
|
||||
default = null;
|
||||
};
|
||||
reserveTokens = mkOption {
|
||||
type = types.nullOr types.int;
|
||||
default = null;
|
||||
};
|
||||
keepRecentTokens = mkOption {
|
||||
type = types.nullOr types.int;
|
||||
default = null;
|
||||
};
|
||||
};
|
||||
});
|
||||
default = null;
|
||||
description = "Auto-compaction settings.";
|
||||
};
|
||||
|
||||
enabledModels = mkOption {
|
||||
type = types.nullOr (types.listOf types.str);
|
||||
default = null;
|
||||
description = "Model patterns for Ctrl+P cycling.";
|
||||
};
|
||||
|
||||
sessionDir = mkOption {
|
||||
type = types.nullOr types.str;
|
||||
default = null;
|
||||
description = "Directory where session files are stored.";
|
||||
};
|
||||
|
||||
extensions = mkOption {
|
||||
type = types.listOf types.str;
|
||||
default = [];
|
||||
description = "Local extension file paths or directories.";
|
||||
};
|
||||
|
||||
skills = mkOption {
|
||||
type = types.listOf types.str;
|
||||
default = [];
|
||||
description = "Local skill file paths or directories.";
|
||||
};
|
||||
};
|
||||
};
|
||||
default = {};
|
||||
description = ''
|
||||
Pi settings written to ~/.pi/agent/settings.json.
|
||||
Only non-null values are included in the generated JSON.
|
||||
See pi docs/settings.md for all options.
|
||||
'';
|
||||
};
|
||||
|
||||
# ── Pi Guardrails ─────────────────────────────────────────────
|
||||
guardrails = mkOption {
|
||||
type = types.nullOr (types.submodule {
|
||||
options = {
|
||||
enable =
|
||||
mkEnableOption
|
||||
("Generate ~/.pi/agent/extensions/guardrails.json for pi-guardrails. "
|
||||
+ "Adds @aliou/pi-guardrails to packages automatically.");
|
||||
|
||||
config = mkOption {
|
||||
type = types.attrsOf types.anything;
|
||||
default = {};
|
||||
description = ''
|
||||
Guardrails configuration written to ~/.pi/agent/extensions/guardrails.json.
|
||||
See https://github.com/aliou/pi-guardrails for config schema.
|
||||
|
||||
IMPORTANT: Path access checks are lexical (not symlink-safe).
|
||||
Local project .pi/extensions/guardrails.json can override same rule IDs
|
||||
(memory > local > global > defaults). For immutable global policies,
|
||||
consider a wrapper or upstream patch.
|
||||
'';
|
||||
};
|
||||
};
|
||||
});
|
||||
default = null;
|
||||
description = ''
|
||||
Pi Guardrails security configuration.
|
||||
Generates ~/.pi/agent/extensions/guardrails.json when enabled.
|
||||
The @aliou/pi-guardrails package is added to settings.packages automatically.
|
||||
'';
|
||||
};
|
||||
};
|
||||
|
||||
config = with lib; let
|
||||
shared = import ./shared/shared-options.nix {inherit lib;};
|
||||
cfg = config.coding.agents.pi;
|
||||
in
|
||||
mkIf cfg.enable (let
|
||||
# Build settings.json by filtering out null values recursively
|
||||
filterNulls = attrs:
|
||||
lib.filterAttrs (_: v: v != null) (
|
||||
builtins.mapAttrs (_: v:
|
||||
if builtins.isAttrs v
|
||||
then let
|
||||
filtered = filterNulls v;
|
||||
in
|
||||
if filtered == {}
|
||||
then null
|
||||
else filtered
|
||||
else v)
|
||||
attrs
|
||||
);
|
||||
|
||||
# Base settings (already filtered)
|
||||
piSettings = filterNulls cfg.settings;
|
||||
|
||||
# Guardrails package to inject when guardrails is enabled
|
||||
guardrailsPackage = "npm:@aliou/pi-guardrails@0.11.1";
|
||||
|
||||
# Guardrails config (only when guardrails is enabled)
|
||||
guardrailsJson =
|
||||
if (cfg.guardrails != null && cfg.guardrails.enable)
|
||||
then builtins.toJSON cfg.guardrails.config
|
||||
else null;
|
||||
|
||||
# Merge guardrails package into settings.packages when guardrails is enabled
|
||||
piSettingsWithGuardrails = let
|
||||
baseSettings = cfg.settings;
|
||||
basePackages = baseSettings.packages or [];
|
||||
hasGuardrailsPackage =
|
||||
lib.any
|
||||
(p:
|
||||
lib.hasPrefix "npm:@aliou/pi-guardrails" p
|
||||
|| (lib.hasPrefix "git:" p && lib.hasSuffix "/pi-guardrails" p))
|
||||
basePackages;
|
||||
packagesWithGuardrails =
|
||||
if (cfg.guardrails != null && cfg.guardrails.enable && !hasGuardrailsPackage)
|
||||
then basePackages ++ [guardrailsPackage]
|
||||
else basePackages;
|
||||
in
|
||||
if packagesWithGuardrails != basePackages
|
||||
then filterNulls (baseSettings // {packages = packagesWithGuardrails;})
|
||||
else piSettings;
|
||||
|
||||
# Coding rules config for renderForPi (only when both agentsInput and codingRules are set)
|
||||
piCodingRules =
|
||||
if cfg.agentsInput != null && cfg.codingRules != null
|
||||
then cfg.codingRules // {agents = cfg.agentsInput;}
|
||||
else null;
|
||||
|
||||
# Rendered agents (only computed when agentsInput is set)
|
||||
rendered =
|
||||
if cfg.agentsInput != null
|
||||
then
|
||||
(import ../../../../lib {inherit lib;}).agents.renderForPi {
|
||||
inherit pkgs;
|
||||
canonical = cfg.agentsInput.lib.loadAgents;
|
||||
modelOverrides = cfg.modelOverrides;
|
||||
primaryAgent = cfg.primaryAgent;
|
||||
codingRules = piCodingRules;
|
||||
}
|
||||
else null;
|
||||
|
||||
# Dynamic home.file entries for agent .md files
|
||||
agentFiles =
|
||||
if cfg.agentsInput != null
|
||||
then let
|
||||
agentNames = builtins.attrNames cfg.agentsInput.lib.loadAgents;
|
||||
in
|
||||
builtins.listToAttrs (
|
||||
map (name: {
|
||||
name = ".pi/agent/agents/${name}.md";
|
||||
value = {source = "${rendered}/agents/${name}.md";};
|
||||
})
|
||||
agentNames
|
||||
)
|
||||
else {};
|
||||
in {
|
||||
home.file = mkMerge [
|
||||
# ── MCP servers from programs.mcp → ~/.pi/agent/mcp.json ───────
|
||||
(mkIf (cfg.mcpServers != {}) {
|
||||
".pi/agent/mcp.json".text = builtins.toJSON {mcpServers = cfg.mcpServers;};
|
||||
".pi/agent/mcp.json".force = true;
|
||||
})
|
||||
|
||||
# ── ~/.pi/agent/settings.json ──────────────────────────────────
|
||||
{
|
||||
".pi/agent/settings.json".text = builtins.toJSON piSettingsWithGuardrails;
|
||||
".pi/agent/settings.json".force = true;
|
||||
}
|
||||
|
||||
# ── pi-guardrails config ─────────────────────────────────────
|
||||
(mkIf (guardrailsJson != null) {
|
||||
".pi/agent/extensions/guardrails.json".text = guardrailsJson;
|
||||
".pi/agent/extensions/guardrails.json".force = true;
|
||||
})
|
||||
|
||||
# ── AGENTS.md — agent descriptions and specialist listing ──────
|
||||
(mkIf (cfg.agentsInput != null) {
|
||||
".pi/agent/AGENTS.md".source = "${rendered}/AGENTS.md";
|
||||
})
|
||||
|
||||
# ── SYSTEM.md — primary agent's system prompt ──────────────────
|
||||
(mkIf (cfg.agentsInput != null) {
|
||||
".pi/agent/SYSTEM.md".source = "${rendered}/SYSTEM.md";
|
||||
})
|
||||
|
||||
# ── Agents — pi-subagents .md files ────────────────────────────
|
||||
agentFiles
|
||||
];
|
||||
});
|
||||
}
|
||||
@@ -1,7 +0,0 @@
|
||||
# Shared agent module exports
|
||||
# Imports all shared modules for the coding.agents namespace.
|
||||
{
|
||||
imports = [
|
||||
./git-identity.nix
|
||||
];
|
||||
}
|
||||
@@ -1,64 +0,0 @@
|
||||
# Git identity module for agent commits.
|
||||
# Sets GIT_AUTHOR_*, GIT_COMMITTER_*, and GIT_SSH_COMMAND environment variables.
|
||||
{
|
||||
pkgs,
|
||||
lib,
|
||||
config,
|
||||
...
|
||||
}: let
|
||||
cfg = config.coding.agents.gitIdentity;
|
||||
in {
|
||||
options.coding.agents.gitIdentity = {
|
||||
enable = lib.mkEnableOption ''
|
||||
Agent Git identity for commits. When enabled, sets GIT_AUTHOR_* and
|
||||
GIT_COMMITTER_* environment variables for consistent bot identity.
|
||||
'';
|
||||
|
||||
name = lib.mkOption {
|
||||
type = lib.types.str;
|
||||
default = "m3ta-chiron";
|
||||
description = "Git user name for agent commits.";
|
||||
example = "m3ta-chiron";
|
||||
};
|
||||
|
||||
email = lib.mkOption {
|
||||
type = lib.types.str;
|
||||
default = "m3ta-chiron@agentmail.to";
|
||||
description = "Git email for agent commits.";
|
||||
example = "m3ta-chiron@agentmail.to";
|
||||
};
|
||||
|
||||
signingKey = lib.mkOption {
|
||||
type = lib.types.nullOr lib.types.path;
|
||||
default = null;
|
||||
description = ''
|
||||
Optional GPG signing key for verified commits.
|
||||
Set to null to disable signing.
|
||||
'';
|
||||
example = "/home/user/.gnupg/sign_key.gpg";
|
||||
};
|
||||
|
||||
sshKey = lib.mkOption {
|
||||
type = lib.types.path;
|
||||
description = ''
|
||||
Path to SSH private key for git push authentication.
|
||||
Use agenix-managed paths like /run/agenix/m3ta-chiron-ssh-key
|
||||
for secure secret management.
|
||||
'';
|
||||
example = "/run/agenix/m3ta-chiron-ssh-key";
|
||||
};
|
||||
};
|
||||
|
||||
config = lib.mkIf cfg.enable {
|
||||
home.sessionVariables = {
|
||||
# Git author/committer identity
|
||||
GIT_AUTHOR_NAME = cfg.name;
|
||||
GIT_AUTHOR_EMAIL = cfg.email;
|
||||
GIT_COMMITTER_NAME = cfg.name;
|
||||
GIT_COMMITTER_EMAIL = cfg.email;
|
||||
|
||||
# SSH command for git push
|
||||
GIT_SSH_COMMAND = "ssh -i ${cfg.sshKey} -o IdentitiesOnly=yes -o StrictHostKeyChecking=accept-new";
|
||||
};
|
||||
};
|
||||
}
|
||||
@@ -1,77 +0,0 @@
|
||||
# Shared option definitions for agent modules.
|
||||
# Prevents copy-pasting the externalSkills submodule across opencode/claude-code/pi.
|
||||
{lib}: let
|
||||
inherit (lib) mkOption mkEnableOption types literalExpression;
|
||||
in {
|
||||
# Common agentsInput option used by all agent modules.
|
||||
mkAgentsInputOption = description:
|
||||
mkOption {
|
||||
type = types.nullOr types.anything;
|
||||
default = null;
|
||||
inherit description;
|
||||
};
|
||||
|
||||
# Common modelOverrides option.
|
||||
mkModelOverridesOption = mkOption {
|
||||
type = types.attrsOf types.str;
|
||||
default = {};
|
||||
description = ''
|
||||
Per-agent model overrides. Maps agent slug to model string.
|
||||
Example: { chiron = "anthropic/claude-sonnet-4"; }
|
||||
'';
|
||||
example = literalExpression ''
|
||||
{
|
||||
chiron = "anthropic/claude-sonnet-4";
|
||||
"chiron-forge" = "anthropic/claude-sonnet-4";
|
||||
}
|
||||
'';
|
||||
};
|
||||
|
||||
# External skills submodule — used by opencode, claude-code, and pi modules.
|
||||
externalSkillsOption = mkOption {
|
||||
type = types.listOf (types.submodule {
|
||||
options = {
|
||||
src = mkOption {
|
||||
type = types.anything;
|
||||
description = "Flake input pointing to a skills repository root.";
|
||||
};
|
||||
skillsDir = mkOption {
|
||||
type = types.str;
|
||||
default = "skills";
|
||||
description = ''
|
||||
Subdirectory inside src that contains skill folders.
|
||||
'';
|
||||
};
|
||||
selectSkills = mkOption {
|
||||
type = types.nullOr (types.listOf types.str);
|
||||
default = null;
|
||||
description = ''
|
||||
List of skill names to cherry-pick from this source.
|
||||
null means include every skill found in skillsDir.
|
||||
'';
|
||||
};
|
||||
};
|
||||
});
|
||||
default = [];
|
||||
description = ''
|
||||
External skill sources passed to mkOpencodeSkills.
|
||||
Each entry maps directly to an element of the externalSkills
|
||||
list accepted by the AGENTS flake's lib.mkOpencodeSkills.
|
||||
'';
|
||||
example = literalExpression ''
|
||||
[
|
||||
{ src = inputs.skills-anthropic; selectSkills = [ "claude-api" ]; }
|
||||
{ src = inputs.basecamp; }
|
||||
]
|
||||
'';
|
||||
};
|
||||
|
||||
# Helper to map externalSkills from module config to mkOpencodeSkills format.
|
||||
mapExternalSkills = cfgEntries:
|
||||
map (
|
||||
entry:
|
||||
{inherit (entry) src skillsDir;}
|
||||
// lib.optionalAttrs (entry.selectSkills != null) {inherit (entry) selectSkills;}
|
||||
)
|
||||
cfgEntries;
|
||||
}
|
||||
@@ -2,7 +2,5 @@
|
||||
{
|
||||
imports = [
|
||||
./editors.nix
|
||||
./opencode.nix
|
||||
./agents
|
||||
];
|
||||
}
|
||||
|
||||
@@ -1,16 +1,36 @@
|
||||
{
|
||||
config,
|
||||
lib,
|
||||
options,
|
||||
pkgs,
|
||||
...
|
||||
}:
|
||||
with lib; let
|
||||
cfg = config.coding.editors;
|
||||
# home-manager 26.05+ renamed extraLuaConfig → initLua.
|
||||
# On stable 25.11 initLua does not exist; fall back to extraLuaConfig.
|
||||
hasInitLua = options.programs.neovim ? initLua;
|
||||
lazyVimConfig = ''
|
||||
in {
|
||||
options.coding.editors = {
|
||||
neovim = {
|
||||
enable = mkEnableOption "neovim with LazyVim configuration";
|
||||
};
|
||||
|
||||
zed = {
|
||||
enable = mkEnableOption "zed editor with custom configuration";
|
||||
};
|
||||
};
|
||||
|
||||
config = mkMerge [
|
||||
# Neovim configuration
|
||||
(mkIf cfg.neovim.enable {
|
||||
programs.neovim = {
|
||||
enable = true;
|
||||
defaultEditor = true;
|
||||
viAlias = true;
|
||||
vimAlias = true;
|
||||
vimdiffAlias = true;
|
||||
withNodeJs = true;
|
||||
withPython3 = true;
|
||||
|
||||
# This is your init.lua content
|
||||
extraLuaConfig = ''
|
||||
-- Bootstrap lazy.nvim
|
||||
local lazypath = vim.fn.stdpath("data") .. "/lazy/lazy.nvim"
|
||||
if not vim.loop.fs_stat(lazypath) then
|
||||
@@ -24,6 +44,7 @@ with lib; let
|
||||
})
|
||||
end
|
||||
vim.opt.rtp:prepend(lazypath)
|
||||
|
||||
-- Bootstrap LazyVim via lazy.nvim
|
||||
-- Docs: https://github.com/folke/lazy.nvim and https://www.lazyvim.org/
|
||||
require("lazy").setup({
|
||||
@@ -51,36 +72,9 @@ with lib; let
|
||||
vim.o.termguicolors = true
|
||||
vim.cmd.colorscheme("dracula")
|
||||
'';
|
||||
in {
|
||||
options.coding.editors = {
|
||||
neovim = {
|
||||
enable = mkEnableOption "neovim with LazyVim configuration";
|
||||
};
|
||||
zed = {
|
||||
enable = mkEnableOption "zed editor with custom configuration";
|
||||
};
|
||||
};
|
||||
config = mkMerge [
|
||||
# Neovim configuration
|
||||
(mkIf cfg.neovim.enable (mkMerge [
|
||||
{
|
||||
programs.neovim = {
|
||||
enable = true;
|
||||
defaultEditor = true;
|
||||
viAlias = true;
|
||||
vimAlias = true;
|
||||
vimdiffAlias = true;
|
||||
withNodeJs = true;
|
||||
withPython3 = true;
|
||||
};
|
||||
}
|
||||
# Use initLua on HM 26.05+ (unstable), extraLuaConfig on HM ≤ 25.11 (stable)
|
||||
(
|
||||
if hasInitLua
|
||||
then {programs.neovim.initLua = lazyVimConfig;}
|
||||
else {programs.neovim.extraLuaConfig = lazyVimConfig;}
|
||||
)
|
||||
]))
|
||||
})
|
||||
|
||||
# Zed editor configuration
|
||||
(mkIf cfg.zed.enable {
|
||||
programs.zed-editor = {
|
||||
@@ -91,11 +85,13 @@ in {
|
||||
ui_font_size = 16;
|
||||
buffer_font_size = 16;
|
||||
buffer_font_family = "FiraCode Nerd Font";
|
||||
|
||||
# Editor Behavior
|
||||
vim_mode = true;
|
||||
auto_update = false;
|
||||
format_on_save = "on";
|
||||
load_direnv = "shell_hook";
|
||||
|
||||
# AI Features
|
||||
features = {
|
||||
edit_prediction_provider = "zed";
|
||||
@@ -104,12 +100,14 @@ in {
|
||||
mode = "subtle";
|
||||
};
|
||||
show_edit_predictions = true;
|
||||
|
||||
agent = {
|
||||
default_model = {
|
||||
provider = "zed.dev";
|
||||
model = "claude-sonnet-4";
|
||||
};
|
||||
};
|
||||
|
||||
assistant = {
|
||||
version = "2";
|
||||
default_model = {
|
||||
@@ -117,6 +115,7 @@ in {
|
||||
model = "claude-4";
|
||||
};
|
||||
};
|
||||
|
||||
# Language Models
|
||||
language_models = {
|
||||
anthropic = {
|
||||
@@ -129,6 +128,7 @@ in {
|
||||
api_url = "http://localhost:11434";
|
||||
};
|
||||
};
|
||||
|
||||
# Languages Configuration
|
||||
languages = {
|
||||
Nix = {
|
||||
@@ -153,6 +153,7 @@ in {
|
||||
};
|
||||
};
|
||||
};
|
||||
|
||||
# LSP Configuration
|
||||
lsp = {
|
||||
rust-analyzer = {
|
||||
@@ -168,6 +169,7 @@ in {
|
||||
};
|
||||
};
|
||||
};
|
||||
|
||||
# Context Servers
|
||||
context_servers = {
|
||||
some-context-server = {
|
||||
@@ -180,6 +182,7 @@ in {
|
||||
env = {};
|
||||
};
|
||||
};
|
||||
|
||||
# Privacy
|
||||
telemetry = {
|
||||
metrics = false;
|
||||
@@ -187,6 +190,7 @@ in {
|
||||
};
|
||||
};
|
||||
})
|
||||
|
||||
# Common packages (always installed if either editor is enabled)
|
||||
(mkIf (cfg.neovim.enable || cfg.zed.enable) {
|
||||
home.packages = with pkgs; [zig];
|
||||
|
||||
@@ -1,86 +0,0 @@
|
||||
{
|
||||
config,
|
||||
lib,
|
||||
pkgs,
|
||||
...
|
||||
}:
|
||||
with lib; let
|
||||
cfg = config.coding.opencode;
|
||||
in {
|
||||
options.coding.opencode = {
|
||||
enable = mkEnableOption "opencode AI coding assistant";
|
||||
|
||||
ohMyOpencodeSettings = mkOption {
|
||||
type = types.attrs;
|
||||
default = {};
|
||||
description = ''
|
||||
Attributes merged (via recursiveUpdate) on top of the default
|
||||
oh-my-opencode.json. Use this to set provider-specific model
|
||||
assignments per machine.
|
||||
'';
|
||||
example = literalExpression ''
|
||||
{
|
||||
agents.sisyphus.model = "anthropic/claude-opus-4-5";
|
||||
categories.ultrabrain.model = "anthropic/claude-opus-4-5";
|
||||
}
|
||||
'';
|
||||
};
|
||||
|
||||
extraSettings = mkOption {
|
||||
type = types.attrs;
|
||||
default = {};
|
||||
description = ''
|
||||
Extra opencode settings merged (via mkMerge) into
|
||||
programs.opencode.settings. Use this to add provider
|
||||
configuration that is specific to a machine or organisation.
|
||||
'';
|
||||
example = literalExpression ''
|
||||
{
|
||||
provider.anthropic = {
|
||||
name = "Anthropic";
|
||||
models."claude-opus-4-5" = { limit.context = 200000; };
|
||||
};
|
||||
}
|
||||
'';
|
||||
};
|
||||
|
||||
extraPlugins = mkOption {
|
||||
type = types.listOf types.str;
|
||||
default = [];
|
||||
description = ''
|
||||
Additional opencode plugins to add to the plugin list.
|
||||
Each entry is a path or package name passed to opencode's plugin array.
|
||||
'';
|
||||
};
|
||||
};
|
||||
|
||||
config = mkIf cfg.enable {
|
||||
programs.opencode = {
|
||||
enable = true;
|
||||
enableMcpIntegration = true;
|
||||
settings = mkMerge [
|
||||
{
|
||||
theme = "opencode";
|
||||
plugin = ["oh-my-openagent"] ++ cfg.extraPlugins;
|
||||
formatter = {
|
||||
alejandra = {
|
||||
command = ["alejandra" "-q" "-"];
|
||||
extensions = [".nix"];
|
||||
};
|
||||
};
|
||||
}
|
||||
cfg.extraSettings
|
||||
];
|
||||
};
|
||||
|
||||
home.file.".config/opencode/oh-my-opencode.json".text = builtins.toJSON (
|
||||
recursiveUpdate
|
||||
{
|
||||
"$schema" = "https://raw.githubusercontent.com/code-yeongyu/oh-my-opencode/master/assets/oh-my-opencode.schema.json";
|
||||
google_auth = false;
|
||||
disabled_mcps = ["context7" "websearch"];
|
||||
}
|
||||
cfg.ohMyOpencodeSettings
|
||||
);
|
||||
};
|
||||
}
|
||||
70
overlays/default.nix
Normal file
70
overlays/default.nix
Normal file
@@ -0,0 +1,70 @@
|
||||
{inputs, ...}: {
|
||||
# This one brings our custom packages from the 'pkgs' directory
|
||||
additions = final: prev:
|
||||
(import ../pkgs {pkgs = final;})
|
||||
# // (inputs.hyprpanel.overlay final prev)
|
||||
// {rose-pine-hyprcursor = inputs.rose-pine-hyprcursor.packages.${prev.system}.default;};
|
||||
|
||||
# This one contains whatever you want to overlay
|
||||
# You can change versions, add patches, set compilation flags, anything really.
|
||||
# https://nixos.wiki/wiki/Overlays
|
||||
modifications = final: prev:
|
||||
# Import all package modifications from mods directory
|
||||
(import ./mods/default.nix {inherit prev;})
|
||||
// {
|
||||
# Direct configuration overrides
|
||||
brave = prev.brave.override {
|
||||
commandLineArgs = "--password-store=gnome-libsecret";
|
||||
};
|
||||
|
||||
# nodejs_24 = inputs.nixpkgs-stable.legacyPackages.${prev.system}.nodejs_24;
|
||||
# paperless-ngx = inputs.nixpkgs-45570c2.legacyPackages.${prev.system}.paperless-ngx;
|
||||
# anytype-heart = inputs.nixpkgs-9e58ed7.legacyPackages.${prev.system}.anytype-heart;
|
||||
# trezord = inputs.nixpkgs-2744d98.legacyPackages.${prev.system}.trezord;
|
||||
# mesa = inputs.nixpkgs-master.legacyPackages.${prev.system}.mesa;
|
||||
# hyprpanel = inputs.hyprpanel.packages.${prev.system}.default.overrideAttrs (prev: {
|
||||
# version = "latest"; # or whatever version you want
|
||||
# src = final.fetchFromGitHub {
|
||||
# owner = "Jas-SinghFSU";
|
||||
# repo = "HyprPanel";
|
||||
# rev = "master"; # or a specific commit hash
|
||||
# hash = "sha256-l623fIVhVCU/ylbBmohAtQNbK0YrWlEny0sC/vBJ+dU=";
|
||||
# };
|
||||
# });
|
||||
};
|
||||
|
||||
temp-packages = final: _prev: {
|
||||
temp = import inputs.nixpkgs-9e9486b {
|
||||
system = final.system;
|
||||
config.allowUnfree = true;
|
||||
};
|
||||
};
|
||||
|
||||
stable-packages = final: _prev: {
|
||||
stable = import inputs.nixpkgs-stable {
|
||||
system = final.system;
|
||||
config.allowUnfree = true;
|
||||
};
|
||||
};
|
||||
|
||||
pinned-packages = final: _prev: {
|
||||
pinned = import inputs.nixpkgs-9472de4 {
|
||||
system = final.system;
|
||||
config.allowUnfree = true;
|
||||
};
|
||||
};
|
||||
|
||||
locked-packages = final: _prev: {
|
||||
locked = import inputs.nixpkgs-locked {
|
||||
system = final.system;
|
||||
config.allowUnfree = true;
|
||||
};
|
||||
};
|
||||
|
||||
master-packages = final: _prev: {
|
||||
master = import inputs.nixpkgs-master {
|
||||
system = final.system;
|
||||
config.allowUnfree = true;
|
||||
};
|
||||
};
|
||||
}
|
||||
16
overlays/mods/beads.nix
Normal file
16
overlays/mods/beads.nix
Normal file
@@ -0,0 +1,16 @@
|
||||
{prev}:
|
||||
prev.beads.overrideAttrs (oldAttrs: rec {
|
||||
version = "0.47.1";
|
||||
|
||||
src = prev.fetchFromGitHub {
|
||||
owner = "steveyegge";
|
||||
repo = "beads";
|
||||
tag = "v${version}";
|
||||
hash = "sha256-DwIR/r1TJnpVd/CT1E2OTkAjU7k9/KHbcVwg5zziFVg=";
|
||||
};
|
||||
|
||||
vendorHash = "sha256-pY5m5ODRgqghyELRwwxOr+xlW41gtJWLXaW53GlLaFw=";
|
||||
|
||||
# Tests require git worktree operations that fail in Nix sandbox
|
||||
doCheck = false;
|
||||
})
|
||||
@@ -2,6 +2,10 @@
|
||||
# Package modifications
|
||||
# This overlay contains package overrides and modifications
|
||||
|
||||
# n8n = import ./n8n.nix {inherit prev;};
|
||||
# opencode = import ./opencode.nix {inherit prev;};
|
||||
# beads = import ./beads.nix {inherit prev;};
|
||||
|
||||
# Add more modifications here as needed
|
||||
# example-package = prev.example-package.override { ... };
|
||||
}
|
||||
|
||||
18
overlays/mods/n8n.nix
Normal file
18
overlays/mods/n8n.nix
Normal file
@@ -0,0 +1,18 @@
|
||||
{prev}:
|
||||
prev.n8n.overrideAttrs (oldAttrs: rec {
|
||||
version = "2.4.1";
|
||||
|
||||
src = prev.fetchFromGitHub {
|
||||
owner = "n8n-io";
|
||||
repo = "n8n";
|
||||
rev = "n8n@${version}";
|
||||
hash = "sha256-EQP9ZI8kt30SUYE1+/UUpxQXpavzKqDu8qE24zsNifg=";
|
||||
};
|
||||
|
||||
pnpmDeps = prev.pnpm_10.fetchDeps {
|
||||
pname = oldAttrs.pname;
|
||||
inherit version src;
|
||||
fetcherVersion = 1;
|
||||
hash = "sha256-Q30IuFEQD3896Hg0HCLd38YE2i8fJn74JY0o95LKJis=";
|
||||
};
|
||||
})
|
||||
16
overlays/mods/opencode.nix
Normal file
16
overlays/mods/opencode.nix
Normal file
@@ -0,0 +1,16 @@
|
||||
{prev}:
|
||||
prev.opencode.overrideAttrs (oldAttrs: rec {
|
||||
version = "1.1.18";
|
||||
|
||||
src = prev.fetchFromGitHub {
|
||||
owner = "anomalyco";
|
||||
repo = "opencode";
|
||||
tag = "v${version}";
|
||||
hash = "sha256-3A4s0FpjZuGB0HGMQVBXfWq+0yHmeIvnEQTSX3amV4I=";
|
||||
};
|
||||
|
||||
node_modules = oldAttrs.node_modules.overrideAttrs (old: {
|
||||
inherit version src;
|
||||
outputHash = "sha256-zSco4ORQQOqV3vMPuP+M/q/hBa+MJGnTKIlxgngMA3g=";
|
||||
});
|
||||
})
|
||||
64
pkgs/beads/default.nix
Normal file
64
pkgs/beads/default.nix
Normal file
@@ -0,0 +1,64 @@
|
||||
{
|
||||
lib,
|
||||
stdenv,
|
||||
buildGoModule,
|
||||
fetchFromGitHub,
|
||||
gitMinimal,
|
||||
installShellFiles,
|
||||
nix-update-script,
|
||||
versionCheckHook,
|
||||
writableTmpDirAsHomeHook,
|
||||
}:
|
||||
buildGoModule (finalAttrs: {
|
||||
pname = "beads";
|
||||
version = "0.47.1";
|
||||
|
||||
src = fetchFromGitHub {
|
||||
owner = "steveyegge";
|
||||
repo = "beads";
|
||||
tag = "v${finalAttrs.version}";
|
||||
hash = "sha256-DwIR/r1TJnpVd/CT1E2OTkAjU7k9/KHbcVwg5zziFVg=";
|
||||
};
|
||||
|
||||
vendorHash = "sha256-pY5m5ODRgqghyELRwwxOr+xlW41gtJWLXaW53GlLaFw=";
|
||||
|
||||
subPackages = ["cmd/bd"];
|
||||
|
||||
ldflags = ["-s" "-w"];
|
||||
|
||||
nativeBuildInputs = [installShellFiles];
|
||||
|
||||
nativeCheckInputs = [gitMinimal writableTmpDirAsHomeHook];
|
||||
|
||||
# Skip security tests on Darwin - they check for /etc/passwd which isn't available in sandbox
|
||||
checkFlags =
|
||||
lib.optionals stdenv.hostPlatform.isDarwin
|
||||
["-skip=TestCleanupMergeArtifacts_CommandInjectionPrevention"];
|
||||
|
||||
preCheck = ''
|
||||
export PATH="$out/bin:$PATH"
|
||||
'';
|
||||
|
||||
postInstall = lib.optionalString (stdenv.buildPlatform.canExecute stdenv.hostPlatform) ''
|
||||
installShellCompletion --cmd bd \
|
||||
--bash <($out/bin/bd completion bash) \
|
||||
--fish <($out/bin/bd completion fish) \
|
||||
--zsh <($out/bin/bd completion zsh)
|
||||
'';
|
||||
|
||||
nativeInstallCheckInputs = [versionCheckHook writableTmpDirAsHomeHook];
|
||||
versionCheckProgramArg = "version";
|
||||
doInstallCheck = true;
|
||||
|
||||
doCheck = false;
|
||||
|
||||
passthru.updateScript = nix-update-script {};
|
||||
|
||||
meta = {
|
||||
description = "Lightweight memory system for AI coding agents with graph-based issue tracking";
|
||||
homepage = "https://github.com/steveyegge/beads";
|
||||
license = lib.licenses.mit;
|
||||
maintainers = with lib.maintainers; [kedry];
|
||||
mainProgram = "bd";
|
||||
};
|
||||
})
|
||||
40
pkgs/code2prompt/default.nix
Normal file
40
pkgs/code2prompt/default.nix
Normal file
@@ -0,0 +1,40 @@
|
||||
{
|
||||
lib,
|
||||
fetchFromGitHub,
|
||||
nix-update-script,
|
||||
rustPlatform,
|
||||
pkg-config,
|
||||
perl,
|
||||
openssl,
|
||||
}:
|
||||
rustPlatform.buildRustPackage rec {
|
||||
pname = "code2prompt";
|
||||
version = "4.2.0";
|
||||
|
||||
src = fetchFromGitHub {
|
||||
owner = "mufeedvh";
|
||||
repo = "code2prompt";
|
||||
rev = "v${version}";
|
||||
hash = "sha256-Gh8SsSTZW7QlyyC3SWJ5pOK2x85/GT7+LPJn2Jeczpc=";
|
||||
};
|
||||
|
||||
cargoLock = {
|
||||
lockFile = src + "/Cargo.lock";
|
||||
};
|
||||
|
||||
buildAndTestSubdir = "crates/code2prompt";
|
||||
|
||||
nativeBuildInputs = [pkg-config perl];
|
||||
|
||||
buildInputs = [openssl];
|
||||
|
||||
passthru.updateScript = nix-update-script {};
|
||||
|
||||
meta = with lib; {
|
||||
description = "A CLI tool that converts your codebase into a single LLM prompt with a source tree, prompt templating, and token counting";
|
||||
homepage = "https://github.com/mufeedvh/code2prompt";
|
||||
license = licenses.mit;
|
||||
platforms = platforms.linux;
|
||||
mainProgram = "code2prompt";
|
||||
};
|
||||
}
|
||||
@@ -1,41 +1,17 @@
|
||||
# m3ta-nixpkgs package registry
|
||||
#
|
||||
# Flake inputs used:
|
||||
# inputs.basecamp → basecamp (pass-through)
|
||||
# inputs.openspec → openspec (pass-through)
|
||||
# inputs.opencode → opencode-desktop (build inputs + patches)
|
||||
# inputs.agents → not used directly here (used by lib/)
|
||||
{
|
||||
pkgs,
|
||||
inputs,
|
||||
...
|
||||
}: let
|
||||
system = pkgs.stdenv.hostPlatform.system;
|
||||
in {
|
||||
# ── Local packages ────────────────────────────────────────────────
|
||||
# Standard packages built from source in ./<name>/default.nix.
|
||||
# No flake inputs required.
|
||||
|
||||
sidecar = pkgs.callPackage ./sidecar {};
|
||||
td = pkgs.callPackage ./td {};
|
||||
eigent = pkgs.callPackage ./eigent {};
|
||||
{pkgs, ...}: {
|
||||
# Custom packages registry
|
||||
# Each package is defined in its own directory under pkgs/
|
||||
beads = pkgs.callPackage ./beads {};
|
||||
code2prompt = pkgs.callPackage ./code2prompt {};
|
||||
hyprpaper-random = pkgs.callPackage ./hyprpaper-random {};
|
||||
launch-webapp = pkgs.callPackage ./launch-webapp {};
|
||||
mem0 = pkgs.callPackage ./mem0 {};
|
||||
msty-studio = pkgs.callPackage ./msty-studio {};
|
||||
n8n = pkgs.callPackage ./n8n {};
|
||||
opencode = pkgs.callPackage ./opencode {};
|
||||
pomodoro-timer = pkgs.callPackage ./pomodoro-timer {};
|
||||
rofi-project-opener = pkgs.callPackage ./rofi-project-opener {};
|
||||
stt-ptt = pkgs.callPackage ./stt-ptt {};
|
||||
tuxedo-backlight = pkgs.callPackage ./tuxedo-backlight {};
|
||||
kestractl = pkgs.callPackage ./kestractl {};
|
||||
openshell = pkgs.callPackage ./openshell {};
|
||||
zellij-ps = pkgs.callPackage ./zellij-ps {};
|
||||
vibetyper = pkgs.callPackage ./vibetyper {};
|
||||
|
||||
# ── Pass-through packages ──────────────────────────────────────────
|
||||
# Imported directly from flake inputs. No local modifications.
|
||||
|
||||
basecamp = inputs.basecamp.packages.${system}.default;
|
||||
openspec = inputs.openspec.packages.${system}.default;
|
||||
}
|
||||
|
||||
@@ -1,74 +0,0 @@
|
||||
{
|
||||
appimageTools,
|
||||
fetchurl,
|
||||
lib,
|
||||
nodejs,
|
||||
uv,
|
||||
python3,
|
||||
nix-update-script,
|
||||
}: let
|
||||
pname = "eigent";
|
||||
version = "0.0.90";
|
||||
src = fetchurl {
|
||||
url = "https://github.com/eigent-ai/eigent/releases/download/v${version}/Eigent-${version}.AppImage";
|
||||
hash = "sha256-mwCBx+D6mgGqQa8bDuUpo3h49EwFVkwasJwaYc6aXFE=";
|
||||
};
|
||||
appimageContents = appimageTools.extractType2 {inherit pname version src;};
|
||||
in
|
||||
appimageTools.wrapType2 {
|
||||
inherit pname version src;
|
||||
|
||||
extraPkgs = _: [
|
||||
nodejs
|
||||
uv
|
||||
python3
|
||||
];
|
||||
|
||||
# Runs before bubblewrap launches — sets up writable state for the sandbox.
|
||||
extraPreBwrapCmds = ''
|
||||
# eigent writes to multiple dirs under resources/ at runtime:
|
||||
# prebuilt/ → pyvenv.cfg, .terminal_venv_fixed sentinel
|
||||
# backend/ → creates runtime/ dir for temporary state
|
||||
# Nix store is read-only → EROFS. Copy to writable location on first
|
||||
# launch (or when the package version changes).
|
||||
DATA_DIR="$HOME/.local/share/${pname}"
|
||||
mkdir -p "$DATA_DIR"
|
||||
for subdir in prebuilt backend; do
|
||||
SRC="${appimageContents}/resources/$subdir"
|
||||
DST="$DATA_DIR/$subdir"
|
||||
if [ ! -f "$DST/.nix-src" ] || [ "$(cat "$DST/.nix-src")" != "${appimageContents}" ]; then
|
||||
rm -rf "$DST"
|
||||
cp -r "$SRC" "$DST"
|
||||
chmod -R u+w "$DST"
|
||||
echo "${appimageContents}" > "$DST/.nix-src"
|
||||
fi
|
||||
done
|
||||
'';
|
||||
|
||||
# Bind-mount writable copies over the read-only store paths so the app
|
||||
# sees its files at the expected locations but can write to them.
|
||||
extraBwrapArgs = [
|
||||
"--bind $HOME/.local/share/${pname}/prebuilt ${appimageContents}/resources/prebuilt"
|
||||
"--bind $HOME/.local/share/${pname}/backend ${appimageContents}/resources/backend"
|
||||
];
|
||||
|
||||
extraInstallCommands = ''
|
||||
install -m 444 -D ${appimageContents}/eigent.desktop -t $out/share/applications
|
||||
substituteInPlace $out/share/applications/eigent.desktop \
|
||||
--replace-fail 'Exec=AppRun --no-sandbox %U' 'Exec=${pname} %U'
|
||||
install -m 444 -D ${appimageContents}/eigent.png \
|
||||
$out/share/icons/hicolor/256x256/apps/eigent.png
|
||||
'';
|
||||
|
||||
passthru = {
|
||||
updateScript = nix-update-script {};
|
||||
};
|
||||
|
||||
meta = {
|
||||
description = "Open source AI cowork desktop app — local alternative to Claude Cowork";
|
||||
homepage = "https://github.com/eigent-ai/eigent";
|
||||
license = lib.licenses.asl20;
|
||||
platforms = lib.platforms.linux;
|
||||
mainProgram = "eigent";
|
||||
};
|
||||
}
|
||||
@@ -1,37 +0,0 @@
|
||||
{
|
||||
lib,
|
||||
stdenv,
|
||||
fetchurl,
|
||||
autoPatchelfHook,
|
||||
}: let
|
||||
sources = lib.importJSON ./sources.json;
|
||||
source = sources.sources.${stdenv.hostPlatform.system};
|
||||
in
|
||||
stdenv.mkDerivation {
|
||||
pname = "kestractl";
|
||||
version = sources.version;
|
||||
|
||||
src = fetchurl {
|
||||
inherit (source) url hash;
|
||||
};
|
||||
|
||||
nativeBuildInputs = [autoPatchelfHook];
|
||||
|
||||
unpackPhase = ''
|
||||
tar -xzf $src
|
||||
'';
|
||||
|
||||
installPhase = ''
|
||||
install -Dm755 kestractl $out/bin/kestractl
|
||||
'';
|
||||
|
||||
passthru.updateScript = ./update.sh;
|
||||
|
||||
meta = with lib; {
|
||||
description = "CLI for the Kestra workflow orchestration platform";
|
||||
homepage = "https://github.com/kestra-io/kestractl";
|
||||
license = licenses.asl20;
|
||||
platforms = attrNames sources.sources;
|
||||
mainProgram = "kestractl";
|
||||
};
|
||||
}
|
||||
@@ -1,13 +0,0 @@
|
||||
{
|
||||
"version": "1.3.0",
|
||||
"sources": {
|
||||
"aarch64-linux": {
|
||||
"url": "https://github.com/kestra-io/kestractl/releases/download/1.3.0/kestractl_1.3.0_linux_arm64.tar.gz",
|
||||
"hash": "sha256-/18F6CZnnLbet4BmI1oQ5pZWkJwIshCq30qd+cm0GGA="
|
||||
},
|
||||
"x86_64-linux": {
|
||||
"url": "https://github.com/kestra-io/kestractl/releases/download/1.3.0/kestractl_1.3.0_linux_amd64.tar.gz",
|
||||
"hash": "sha256-xmsBiqNKvob8xHDyU253o6c25YIubHanNdLqzWaOvSA="
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,61 +0,0 @@
|
||||
#!/usr/bin/env nix-shell
|
||||
#!nix-shell --pure -i bash -p bash curl jq nix cacert git
|
||||
set -euo pipefail
|
||||
|
||||
# Update kestractl sources.json with the latest release from GitHub.
|
||||
# Usage: ./update.sh (or via nix-update --update-script)
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
SOURCES_FILE="$SCRIPT_DIR/sources.json"
|
||||
|
||||
# Map Nix system -> GitHub release asset name fragment
|
||||
declare -A SYSTEMS=(
|
||||
["x86_64-linux"]="linux_amd64"
|
||||
["aarch64-linux"]="linux_arm64"
|
||||
)
|
||||
|
||||
echo "Fetching latest kestractl release..."
|
||||
LATEST=$(curl -fsSL "https://api.github.com/repos/kestra-io/kestractl/releases/latest")
|
||||
VERSION=$(echo "$LATEST" | jq -r '.tag_name')
|
||||
echo "Latest version: $VERSION"
|
||||
|
||||
CURRENT_VERSION=$(jq -r '.version' "$SOURCES_FILE")
|
||||
if [[ "$VERSION" == "$CURRENT_VERSION" ]]; then
|
||||
echo "Already at latest version $VERSION, nothing to do."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
NEW_SOURCES="{}"
|
||||
|
||||
for NIX_SYSTEM in "${!SYSTEMS[@]}"; do
|
||||
ASSET_FRAG="${SYSTEMS[$NIX_SYSTEM]}"
|
||||
URL="https://github.com/kestra-io/kestractl/releases/download/${VERSION}/kestractl_${VERSION}_${ASSET_FRAG}.tar.gz"
|
||||
|
||||
echo "Fetching hash for $NIX_SYSTEM ($URL)..."
|
||||
HASH=$(nix-prefetch-url --type sha256 "$URL" 2>/dev/null)
|
||||
SRI=$(nix hash to-sri --type sha256 "$HASH")
|
||||
|
||||
NEW_SOURCES=$(echo "$NEW_SOURCES" | jq \
|
||||
--arg sys "$NIX_SYSTEM" \
|
||||
--arg url "$URL" \
|
||||
--arg hash "$SRI" \
|
||||
'. + {($sys): {url: $url, hash: $hash}}')
|
||||
done
|
||||
|
||||
jq -n \
|
||||
--arg version "$VERSION" \
|
||||
--argjson sources "$NEW_SOURCES" \
|
||||
'{"version": $version, "sources": $sources}' \
|
||||
> "$SOURCES_FILE"
|
||||
|
||||
echo "Updated $SOURCES_FILE to $VERSION"
|
||||
|
||||
# Commit when running in CI or via nix-update
|
||||
if [[ -d "$SCRIPT_DIR/../../.git" ]] || git rev-parse --is-inside-work-tree >/dev/null 2>&1; then
|
||||
NIXPKGS_ROOT=$(git -C "$SCRIPT_DIR" rev-parse --show-toplevel 2>/dev/null || true)
|
||||
if [[ -n "$NIXPKGS_ROOT" && -n "$(git -C "$NIXPKGS_ROOT" status --porcelain "$SOURCES_FILE")" ]]; then
|
||||
git -C "$NIXPKGS_ROOT" add "$SOURCES_FILE"
|
||||
git -C "$NIXPKGS_ROOT" commit -m "kestractl: ${CURRENT_VERSION} -> ${VERSION}"
|
||||
echo "Committed update to git"
|
||||
fi
|
||||
fi
|
||||
@@ -6,14 +6,14 @@
|
||||
}:
|
||||
python3.pkgs.buildPythonPackage rec {
|
||||
pname = "mem0ai";
|
||||
version = "2.0.1";
|
||||
version = "1.0.2";
|
||||
pyproject = true;
|
||||
|
||||
src = fetchFromGitHub {
|
||||
owner = "mem0ai";
|
||||
repo = "mem0";
|
||||
rev = "v${version}";
|
||||
hash = "sha256-lNSE0Yit+FmM8opC4XYtfVef7JfGd3wMKbLj67Kp4Qw=";
|
||||
hash = "sha256-wvIPmqYlpto+ggifdSOjveEmSneKeZcoltItusYSu4Q=";
|
||||
};
|
||||
|
||||
# Relax Python dependency version constraints
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
fetchurl,
|
||||
lib,
|
||||
nodejs,
|
||||
nodePackages,
|
||||
uv,
|
||||
python3,
|
||||
makeWrapper,
|
||||
@@ -21,6 +22,7 @@ in
|
||||
|
||||
extraPkgs = pkgs: [
|
||||
nodejs
|
||||
nodePackages.npm
|
||||
uv
|
||||
python3
|
||||
];
|
||||
@@ -32,7 +34,7 @@ in
|
||||
install -m 444 -D ${appimageContents}/MstyStudio.png \
|
||||
$out/share/icons/hicolor/256x256/apps/MstyStudio.png
|
||||
wrapProgram $out/bin/${pname} \
|
||||
--prefix PATH : ${nodejs}/bin:${uv}/bin:${python3}/bin
|
||||
--prefix PATH : ${nodejs}/bin:${nodePackages.npm}/bin:${uv}/bin:${python3}/bin
|
||||
'';
|
||||
meta = {
|
||||
description = "Msty Studio enables advanced, privacy‑preserving AI workflows entirely on your local machine.";
|
||||
|
||||
@@ -11,35 +11,27 @@
|
||||
node-gyp,
|
||||
cctools,
|
||||
xcbuild,
|
||||
dart-sass,
|
||||
libkrb5,
|
||||
libmongocrypt,
|
||||
libpq,
|
||||
makeWrapper,
|
||||
}: let
|
||||
python = python3.withPackages (
|
||||
ps:
|
||||
with ps; [
|
||||
websockets
|
||||
]
|
||||
);
|
||||
in
|
||||
}:
|
||||
stdenv.mkDerivation (finalAttrs: {
|
||||
pname = "n8n";
|
||||
version = "2.18.5";
|
||||
version = "n8n@2.3.6";
|
||||
|
||||
src = fetchFromGitHub {
|
||||
owner = "n8n-io";
|
||||
repo = "n8n";
|
||||
tag = "n8n@${finalAttrs.version}";
|
||||
hash = "sha256-ws0DXGQFR+z3nVyd4Yn9pIM7yh+H6GnuCRSLxgvtPxo=";
|
||||
tag = finalAttrs.version;
|
||||
hash = "sha256-9TGX99elCmB0Y/ttfQrC2HNxyQQcdGIazo8BWqhN634=";
|
||||
};
|
||||
|
||||
pnpmDeps = fetchPnpmDeps {
|
||||
inherit (finalAttrs) pname version src;
|
||||
pnpm = pnpm_10;
|
||||
fetcherVersion = 3;
|
||||
hash = "sha256-Ajgne0neNm6HgMK6z3jnEkUJJxVOTgzjpSaMaJgIndQ=";
|
||||
hash = "sha256-dSofdsoTERdq28ZGyz+Nza1Y5fnyPcuRIk38WeLqNVE=";
|
||||
};
|
||||
|
||||
nativeBuildInputs =
|
||||
@@ -50,28 +42,9 @@ in
|
||||
node-gyp # required to build sqlite3 bindings
|
||||
makeWrapper
|
||||
]
|
||||
++ lib.optional stdenv.hostPlatform.isDarwin [
|
||||
cctools
|
||||
xcbuild
|
||||
];
|
||||
++ lib.optional stdenv.hostPlatform.isDarwin [cctools xcbuild];
|
||||
|
||||
buildInputs = [
|
||||
nodejs
|
||||
libkrb5
|
||||
libmongocrypt
|
||||
libpq
|
||||
];
|
||||
|
||||
preBuild = ''
|
||||
# Force sass-embedded to use our dart-sass instead of bundled binaries.
|
||||
# The bundled Dart binary can't run in the Nix sandbox (no /lib64/ld-linux-x86-64.so.2).
|
||||
for dep in node_modules/.pnpm/sass-embedded@*; do
|
||||
substituteInPlace "$dep/node_modules/sass-embedded/dist/lib/src/compiler-path.js" \
|
||||
--replace-fail \
|
||||
'compilerCommand = (() => {' \
|
||||
'compilerCommand = (() => { return ["${lib.getExe dart-sass}"];'
|
||||
done
|
||||
'';
|
||||
buildInputs = [nodejs libkrb5 libmongocrypt libpq];
|
||||
|
||||
buildPhase = ''
|
||||
runHook preBuild
|
||||
@@ -80,12 +53,6 @@ in
|
||||
node-gyp rebuild
|
||||
popd
|
||||
|
||||
# isolated-vm is a native addon required by n8n-nodes-base (Merge node SQL sandbox)
|
||||
# since n8n 2.11.x; must be compiled before pnpm build runs generate-metadata
|
||||
pushd node_modules/isolated-vm
|
||||
node-gyp rebuild
|
||||
popd
|
||||
|
||||
# TODO: use deploy after resolved https://github.com/pnpm/pnpm/issues/5315
|
||||
pnpm build --filter=n8n
|
||||
|
||||
@@ -113,22 +80,11 @@ in
|
||||
runHook preInstall
|
||||
|
||||
mkdir -p $out/{bin,lib/n8n}
|
||||
cp -r {packages,node_modules} $out/lib/n8n
|
||||
mv {packages,node_modules} $out/lib/n8n
|
||||
|
||||
makeWrapper $out/lib/n8n/packages/cli/bin/n8n $out/bin/n8n \
|
||||
--set N8N_RELEASE_TYPE "stable"
|
||||
|
||||
# JavaScript runner
|
||||
makeWrapper ${nodejs}/bin/node $out/bin/n8n-task-runner \
|
||||
--add-flags "$out/lib/n8n/packages/@n8n/task-runner/dist/start.js"
|
||||
|
||||
# Python runner
|
||||
mkdir -p $out/lib/n8n-task-runner-python
|
||||
cp -r packages/@n8n/task-runner-python/* $out/lib/n8n-task-runner-python/
|
||||
makeWrapper ${python}/bin/python $out/bin/n8n-task-runner-python \
|
||||
--add-flags "$out/lib/n8n-task-runner-python/src/main.py" \
|
||||
--prefix PYTHONPATH : "$out/lib/n8n-task-runner-python"
|
||||
|
||||
runHook postInstall
|
||||
'';
|
||||
|
||||
@@ -150,12 +106,7 @@ in
|
||||
'';
|
||||
homepage = "https://n8n.io";
|
||||
changelog = "https://github.com/n8n-io/n8n/releases/tag/n8n@${finalAttrs.version}";
|
||||
maintainers = with lib.maintainers; [
|
||||
gepbird
|
||||
AdrienLemaire
|
||||
sweenu
|
||||
wrbbz
|
||||
];
|
||||
maintainers = with lib.maintainers; [gepbird AdrienLemaire sweenu];
|
||||
license = lib.licenses.sustainableUse;
|
||||
mainProgram = "n8n";
|
||||
platforms = lib.platforms.unix;
|
||||
|
||||
@@ -1,29 +1,6 @@
|
||||
#!/usr/bin/env nix-shell
|
||||
#!nix-shell -i bash -p bash curl jq nix-update cacert git nix
|
||||
#!nix-shell --pure -i bash -p bash curl jq nix-update cacert git
|
||||
set -euo pipefail
|
||||
|
||||
# n8n releases are published with two tags per version:
|
||||
# - "n8n@X.Y.Z" - the versioned tag
|
||||
# - "stable" - always points to the latest stable version
|
||||
#
|
||||
# We query the "stable" tag and extract the version from its target commitish (e.g., "release/2.18.5").
|
||||
# This ensures we always get the actual latest stable version, not the most recently created tag.
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
# Get the directory where this script lives (should be pkgs/n8n/)
|
||||
script_dir="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
|
||||
# Get the nixpkgs root (parent of pkgs/)
|
||||
nixpkgs_root="$(cd "$script_dir/../.." && pwd)"
|
||||
|
||||
cd "$nixpkgs_root"
|
||||
|
||||
# Query the "stable" tag and extract version from target_commitish (e.g., "release/2.18.5")
|
||||
new_version=$(curl -s "https://api.github.com/repos/n8n-io/n8n/releases/tags/stable" | jq --raw-output '.target_commitish | ltrimstr("release/")')
|
||||
|
||||
echo "Latest stable version: n8n@${new_version}"
|
||||
echo "Running from: $(pwd)"
|
||||
|
||||
# Use --flake --system to properly evaluate the flake-based package
|
||||
nix-update --flake --system x86_64-linux n8n --version "$new_version"
|
||||
new_version="$(curl -s "https://api.github.com/repos/n8n-io/n8n/releases/latest" | jq --raw-output '.tag_name | ltrimstr("n8n@")')"
|
||||
nix-update n8n --version "$new_version"
|
||||
|
||||
222
pkgs/opencode/default.nix
Normal file
222
pkgs/opencode/default.nix
Normal file
@@ -0,0 +1,222 @@
|
||||
{
|
||||
lib,
|
||||
stdenvNoCC,
|
||||
bun,
|
||||
fetchFromGitHub,
|
||||
fzf,
|
||||
makeBinaryWrapper,
|
||||
models-dev,
|
||||
nix-update-script,
|
||||
ripgrep,
|
||||
testers,
|
||||
installShellFiles,
|
||||
writableTmpDirAsHomeHook,
|
||||
}: let
|
||||
pname = "opencode";
|
||||
version = "1.1.25";
|
||||
src = fetchFromGitHub {
|
||||
owner = "anomalyco";
|
||||
repo = "opencode";
|
||||
tag = "v${version}";
|
||||
hash = "sha256-aF+4LL0x9wU2Ktrv/nJE2VXgUeXFrwJ16pa1sGNhpi4=";
|
||||
};
|
||||
|
||||
node_modules = stdenvNoCC.mkDerivation {
|
||||
pname = "${pname}-node_modules";
|
||||
inherit version src;
|
||||
|
||||
impureEnvVars =
|
||||
lib.fetchers.proxyImpureEnvVars
|
||||
++ ["GIT_PROXY_COMMAND" "SOCKS_SERVER"];
|
||||
|
||||
nativeBuildInputs = [bun writableTmpDirAsHomeHook];
|
||||
|
||||
dontConfigure = true;
|
||||
|
||||
buildPhase = ''
|
||||
runHook preBuild
|
||||
|
||||
export BUN_INSTALL_CACHE_DIR=$(mktemp -d)
|
||||
|
||||
bun install \
|
||||
--cpu="*" \
|
||||
--filter=./packages/opencode \
|
||||
--force \
|
||||
--frozen-lockfile \
|
||||
--ignore-scripts \
|
||||
--no-progress \
|
||||
--os="*" \
|
||||
--production
|
||||
|
||||
bun run ./nix/scripts/canonicalize-node-modules.ts
|
||||
bun run ./nix/scripts/normalize-bun-binaries.ts
|
||||
|
||||
runHook postBuild
|
||||
'';
|
||||
|
||||
installPhase = ''
|
||||
runHook preInstall
|
||||
|
||||
mkdir -p $out
|
||||
find . -type d -name node_modules -exec cp -R --parents {} $out \;
|
||||
|
||||
runHook postInstall
|
||||
'';
|
||||
|
||||
# NOTE: Required else we get errors that our fixed-output derivation references store paths
|
||||
dontFixup = true;
|
||||
|
||||
outputHash = "sha256-qheQCN71VM3M35+j9XhaCdxQNo5ze8mV8sDFaX0WVWM=";
|
||||
outputHashAlgo = "sha256";
|
||||
outputHashMode = "recursive";
|
||||
};
|
||||
in
|
||||
stdenvNoCC.mkDerivation (finalAttrs: {
|
||||
inherit pname version src node_modules;
|
||||
|
||||
nativeBuildInputs = [
|
||||
bun
|
||||
installShellFiles
|
||||
makeBinaryWrapper
|
||||
models-dev
|
||||
writableTmpDirAsHomeHook
|
||||
];
|
||||
|
||||
patches = [
|
||||
# NOTE: Relax Bun version check to be a warning instead of an error
|
||||
./relax-bun-version-check.patch
|
||||
];
|
||||
|
||||
dontConfigure = true;
|
||||
|
||||
env.MODELS_DEV_API_JSON = "${models-dev}/dist/_api.json";
|
||||
env.OPENCODE_VERSION = finalAttrs.version;
|
||||
env.OPENCODE_CHANNEL = "stable";
|
||||
|
||||
buildPhase = ''
|
||||
runHook preBuild
|
||||
|
||||
# Copy all node_modules including the .bun directory with actual packages
|
||||
cp -r ${finalAttrs.node_modules}/node_modules .
|
||||
cp -r ${finalAttrs.node_modules}/packages .
|
||||
|
||||
(
|
||||
cd packages/opencode
|
||||
|
||||
# Fix symlinks to workspace packages
|
||||
chmod -R u+w ./node_modules
|
||||
mkdir -p ./node_modules/@opencode-ai
|
||||
rm -f ./node_modules/@opencode-ai/{script,sdk,plugin}
|
||||
ln -s $(pwd)/../../packages/script ./node_modules/@opencode-ai/script
|
||||
ln -s $(pwd)/../../packages/sdk/js ./node_modules/@opencode-ai/sdk
|
||||
ln -s $(pwd)/../../packages/plugin ./node_modules/@opencode-ai/plugin
|
||||
|
||||
# Use upstream bundle.ts for Nix-compatible bundling
|
||||
cp ../../nix/bundle.ts ./bundle.ts
|
||||
chmod +x ./bundle.ts
|
||||
bun run ./bundle.ts
|
||||
)
|
||||
|
||||
runHook postBuild
|
||||
'';
|
||||
|
||||
installPhase = ''
|
||||
runHook preInstall
|
||||
|
||||
cd packages/opencode
|
||||
if [ ! -d dist ]; then
|
||||
echo "ERROR: dist directory missing after bundle step"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
mkdir -p $out/lib/opencode
|
||||
cp -r dist $out/lib/opencode/
|
||||
chmod -R u+w $out/lib/opencode/dist
|
||||
|
||||
# Select bundled worker assets deterministically (sorted find output)
|
||||
worker_file=$(find "$out/lib/opencode/dist" -type f \( -path '*/tui/worker.*' -o -name 'worker.*' \) | sort | head -n1)
|
||||
parser_worker_file=$(find "$out/lib/opencode/dist" -type f -name 'parser.worker.*' | sort | head -n1)
|
||||
if [ -z "$worker_file" ]; then
|
||||
echo "ERROR: bundled worker not found"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
main_wasm=$(printf '%s\n' "$out"/lib/opencode/dist/tree-sitter-*.wasm | sort | head -n1)
|
||||
wasm_list=$(find "$out/lib/opencode/dist" -maxdepth 1 -name 'tree-sitter-*.wasm' -print)
|
||||
for patch_file in "$worker_file" "$parser_worker_file"; do
|
||||
[ -z "$patch_file" ] && continue
|
||||
[ ! -f "$patch_file" ] && continue
|
||||
if [ -n "$wasm_list" ] && grep -q 'tree-sitter' "$patch_file"; then
|
||||
# Rewrite wasm references to absolute store paths to avoid runtime resolve failures.
|
||||
bun --bun ../../nix/scripts/patch-wasm.ts "$patch_file" "$main_wasm" $wasm_list
|
||||
fi
|
||||
done
|
||||
|
||||
mkdir -p $out/lib/opencode/node_modules
|
||||
cp -r ../../node_modules/.bun $out/lib/opencode/node_modules/
|
||||
mkdir -p $out/lib/opencode/node_modules/@opentui
|
||||
|
||||
# Generate and install JSON schema
|
||||
mkdir -p $out/share/opencode
|
||||
HOME=$TMPDIR bun --bun script/schema.ts $out/share/opencode/schema.json
|
||||
|
||||
mkdir -p $out/bin
|
||||
makeWrapper ${lib.getExe bun} $out/bin/opencode \
|
||||
--add-flags "run" \
|
||||
--add-flags "$out/lib/opencode/dist/src/index.js" \
|
||||
--prefix PATH : ${lib.makeBinPath [fzf ripgrep]} \
|
||||
--argv0 opencode
|
||||
|
||||
runHook postInstall
|
||||
'';
|
||||
|
||||
postInstall = ''
|
||||
# Add symlinks for platform-specific native modules
|
||||
pkgs=(
|
||||
$out/lib/opencode/node_modules/.bun/@opentui+core-*
|
||||
$out/lib/opencode/node_modules/.bun/@opentui+solid-*
|
||||
$out/lib/opencode/node_modules/.bun/@opentui+core@*
|
||||
$out/lib/opencode/node_modules/.bun/@opentui+solid@*
|
||||
)
|
||||
for pkg in "''${pkgs[@]}"; do
|
||||
if [ -d "$pkg" ]; then
|
||||
pkgName=$(basename "$pkg" | sed 's/@opentui+\([^@]*\)@.*/\1/')
|
||||
ln -sf ../.bun/$(basename "$pkg")/node_modules/@opentui/$pkgName \
|
||||
$out/lib/opencode/node_modules/@opentui/$pkgName
|
||||
fi
|
||||
done
|
||||
|
||||
${lib.optionalString
|
||||
((stdenvNoCC.buildPlatform.canExecute stdenvNoCC.hostPlatform)
|
||||
&& (stdenvNoCC.hostPlatform.system != "x86_64-darwin")) ''
|
||||
installShellCompletion --cmd opencode \
|
||||
--bash <($out/bin/opencode completion)
|
||||
''}
|
||||
'';
|
||||
|
||||
passthru = {
|
||||
jsonschema = "${placeholder "out"}/share/opencode/schema.json";
|
||||
tests.version = testers.testVersion {
|
||||
package = finalAttrs.finalPackage;
|
||||
command = "HOME=$(mktemp -d) opencode --version";
|
||||
inherit (finalAttrs) version;
|
||||
};
|
||||
updateScript =
|
||||
nix-update-script {extraArgs = ["--subpackage" "node_modules"];};
|
||||
};
|
||||
|
||||
meta = {
|
||||
description = "AI coding agent built for the terminal";
|
||||
longDescription = ''
|
||||
OpenCode is a terminal-based agent that can build anything.
|
||||
It combines a TypeScript/JavaScript core with a Go-based TUI
|
||||
to provide an interactive AI coding experience.
|
||||
'';
|
||||
homepage = "https://github.com/anomalyco/opencode";
|
||||
license = lib.licenses.mit;
|
||||
maintainers = with lib.maintainers; [delafthi];
|
||||
sourceProvenance = with lib.sourceTypes; [fromSource];
|
||||
platforms = ["aarch64-linux" "x86_64-linux" "aarch64-darwin" "x86_64-darwin"];
|
||||
mainProgram = "opencode";
|
||||
};
|
||||
})
|
||||
28
pkgs/opencode/relax-bun-version-check.patch
Normal file
28
pkgs/opencode/relax-bun-version-check.patch
Normal file
@@ -0,0 +1,28 @@
|
||||
From 0e07ea8225f5667e39c6aa59eea726266f0afab0 Mon Sep 17 00:00:00 2001
|
||||
From: =?UTF-8?q?J=C3=B6rg=20Thalheim?= <joerg@thalheim.io>
|
||||
Date: Thu, 13 Nov 2025 10:16:31 +0100
|
||||
Subject: [PATCH] Change Bun version check from error to warning
|
||||
MIME-Version: 1.0
|
||||
Content-Type: text/plain; charset=UTF-8
|
||||
Content-Transfer-Encoding: 8bit
|
||||
|
||||
Signed-off-by: Jörg Thalheim <joerg@thalheim.io>
|
||||
---
|
||||
packages/script/src/index.ts | 2 +-
|
||||
1 file changed, 1 insertion(+), 1 deletion(-)
|
||||
|
||||
diff --git a/packages/script/src/index.ts b/packages/script/src/index.ts
|
||||
index 141d2b75..de06d0dc 100644
|
||||
--- a/packages/script/src/index.ts
|
||||
+++ b/packages/script/src/index.ts
|
||||
@@ -10,7 +10,7 @@ if (!expectedBunVersion) {
|
||||
}
|
||||
|
||||
if (process.versions.bun !== expectedBunVersion) {
|
||||
- throw new Error(`This script requires bun@${expectedBunVersion}, but you are using bun@${process.versions.bun}`)
|
||||
+ console.warn(`Warning: This script expects bun@${expectedBunVersion}, but you are using bun@${process.versions.bun}`)
|
||||
}
|
||||
|
||||
const CHANNEL = process.env["OPENCODE_CHANNEL"] ?? (await $`git branch --show-current`.text().then((x) => x.trim()))
|
||||
--
|
||||
2.51.0
|
||||
@@ -1,35 +0,0 @@
|
||||
{
|
||||
lib,
|
||||
stdenv,
|
||||
fetchurl,
|
||||
}: let
|
||||
sources = lib.importJSON ./sources.json;
|
||||
source = sources.sources.${stdenv.hostPlatform.system};
|
||||
in
|
||||
stdenv.mkDerivation {
|
||||
pname = "openshell";
|
||||
version = lib.removePrefix "v" sources.version;
|
||||
|
||||
src = fetchurl {
|
||||
inherit (source) url hash;
|
||||
};
|
||||
|
||||
unpackPhase = ''
|
||||
tar -xzf $src
|
||||
'';
|
||||
|
||||
installPhase = ''
|
||||
install -Dm755 openshell $out/bin/openshell
|
||||
'';
|
||||
|
||||
passthru.updateScript = ./update.sh;
|
||||
|
||||
meta = with lib; {
|
||||
description = "Safe, private runtime for autonomous AI agents";
|
||||
homepage = "https://github.com/NVIDIA/OpenShell";
|
||||
license = licenses.asl20;
|
||||
platforms = attrNames sources.sources;
|
||||
mainProgram = "openshell";
|
||||
maintainers = [];
|
||||
};
|
||||
}
|
||||
@@ -1,17 +0,0 @@
|
||||
{
|
||||
"version": "v0.0.23",
|
||||
"sources": {
|
||||
"aarch64-linux": {
|
||||
"url": "https://github.com/NVIDIA/OpenShell/releases/download/v0.0.23/openshell-aarch64-unknown-linux-musl.tar.gz",
|
||||
"hash": "sha256-x+TMlj8sc68rbkxwW80NrmyC0xaeC81TJMNEtUNhOLg="
|
||||
},
|
||||
"x86_64-linux": {
|
||||
"url": "https://github.com/NVIDIA/OpenShell/releases/download/v0.0.23/openshell-x86_64-unknown-linux-musl.tar.gz",
|
||||
"hash": "sha256-WLmYWn7mCC6VzUFEFN/O49hui81U0zPI6f3E5Hc9SjI="
|
||||
},
|
||||
"aarch64-darwin": {
|
||||
"url": "https://github.com/NVIDIA/OpenShell/releases/download/v0.0.23/openshell-aarch64-apple-darwin.tar.gz",
|
||||
"hash": "sha256-Bm8YP+7+CRKjfjNevirKRWHFBrdy3h5XV7gegvvcWXc="
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,63 +0,0 @@
|
||||
#!/usr/bin/env nix-shell
|
||||
#!nix-shell --pure -i bash -p bash curl jq nix cacert git
|
||||
set -euo pipefail
|
||||
|
||||
# Update openshell sources.json with the latest release from GitHub.
|
||||
# Usage: ./update.sh (or via nix-update --update-script)
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
SOURCES_FILE="$SCRIPT_DIR/sources.json"
|
||||
|
||||
# Map Nix system -> GitHub release asset name
|
||||
declare -A SYSTEMS=(
|
||||
["x86_64-linux"]="openshell-x86_64-unknown-linux-musl.tar.gz"
|
||||
["aarch64-linux"]="openshell-aarch64-unknown-linux-musl.tar.gz"
|
||||
["aarch64-darwin"]="openshell-aarch64-apple-darwin.tar.gz"
|
||||
)
|
||||
|
||||
echo "Fetching latest openshell release..."
|
||||
LATEST=$(curl -fsSL "https://api.github.com/repos/NVIDIA/OpenShell/releases/latest")
|
||||
VERSION=$(echo "$LATEST" | jq -r '.tag_name')
|
||||
echo "Latest version: $VERSION"
|
||||
|
||||
CURRENT_VERSION=$(jq -r '.version' "$SOURCES_FILE")
|
||||
if [[ "$VERSION" == "$CURRENT_VERSION" ]]; then
|
||||
echo "Already at latest version $VERSION, nothing to do."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
NEW_SOURCES="{}"
|
||||
|
||||
for NIX_SYSTEM in "${!SYSTEMS[@]}"; do
|
||||
ASSET_NAME="${SYSTEMS[$NIX_SYSTEM]}"
|
||||
URL="https://github.com/NVIDIA/OpenShell/releases/download/${VERSION}/${ASSET_NAME}"
|
||||
|
||||
echo "Fetching hash for $NIX_SYSTEM ($URL)..."
|
||||
HASH=$(nix-prefetch-url --type sha256 "$URL" 2>/dev/null)
|
||||
SRI=$(nix hash to-sri --type sha256 "$HASH")
|
||||
|
||||
NEW_SOURCES=$(echo "$NEW_SOURCES" | jq \
|
||||
--arg sys "$NIX_SYSTEM" \
|
||||
--arg url "$URL" \
|
||||
--arg hash "$SRI" \
|
||||
'. + {($sys): {url: $url, hash: $hash}}')
|
||||
done
|
||||
|
||||
jq -n \
|
||||
--arg version "$VERSION" \
|
||||
--argjson sources "$NEW_SOURCES" \
|
||||
'{"version": $version, "sources": $sources}' \
|
||||
> "$SOURCES_FILE"
|
||||
|
||||
echo "Updated $SOURCES_FILE to $VERSION"
|
||||
|
||||
# Commit when running in CI or via nix-update
|
||||
if [[ -d "$SCRIPT_DIR/../../.git" ]] || git rev-parse --is-inside-work-tree >/dev/null 2>&1; then
|
||||
NIXPKGS_ROOT=$(git -C "$SCRIPT_DIR" rev-parse --show-toplevel 2>/dev/null || true)
|
||||
if [[ -n "$NIXPKGS_ROOT" && -n "$(git -C "$NIXPKGS_ROOT" status --porcelain "$SOURCES_FILE")" ]]; then
|
||||
CLEAN_VERSION="${VERSION#v}"
|
||||
git -C "$NIXPKGS_ROOT" add "$SOURCES_FILE"
|
||||
git -C "$NIXPKGS_ROOT" commit -m "openshell: ${CURRENT_VERSION#v} -> ${CLEAN_VERSION}"
|
||||
echo "Committed update to git"
|
||||
fi
|
||||
fi
|
||||
@@ -1,68 +0,0 @@
|
||||
{
|
||||
lib,
|
||||
buildGoModule,
|
||||
fetchFromGitHub,
|
||||
gitMinimal,
|
||||
makeWrapper, # Add this
|
||||
nix-update-script,
|
||||
opencode,
|
||||
td,
|
||||
tmux,
|
||||
versionCheckHook,
|
||||
writableTmpDirAsHomeHook,
|
||||
}:
|
||||
buildGoModule (finalAttrs: {
|
||||
pname = "sidecar";
|
||||
version = "0.84.0";
|
||||
|
||||
src = fetchFromGitHub {
|
||||
owner = "marcus";
|
||||
repo = "sidecar";
|
||||
tag = "v${finalAttrs.version}";
|
||||
hash = "sha256-80ldZlaZ99ti8dvw+Awev7ucz03iOVD2yzz/+IFHDvA=";
|
||||
};
|
||||
|
||||
vendorHash = "sha256-IDD+hQZODNPj+Gy9CX5GFdMcsvt75aFLpabXZehAjaw=";
|
||||
|
||||
subPackages = ["cmd/sidecar"];
|
||||
|
||||
ldflags = [
|
||||
"-s"
|
||||
"-w"
|
||||
"-X main.Version=v${finalAttrs.version}" # Can combine these
|
||||
];
|
||||
|
||||
nativeBuildInputs = [makeWrapper]; # Add this
|
||||
|
||||
nativeCheckInputs = [
|
||||
gitMinimal
|
||||
writableTmpDirAsHomeHook
|
||||
];
|
||||
|
||||
nativeInstallCheckInputs = [
|
||||
versionCheckHook
|
||||
writableTmpDirAsHomeHook
|
||||
];
|
||||
|
||||
versionCheckProgramArg = "--version";
|
||||
doInstallCheck = true;
|
||||
doCheck = false;
|
||||
|
||||
postInstall = ''
|
||||
wrapProgram $out/bin/sidecar \
|
||||
--prefix PATH : ${lib.makeBinPath [opencode td tmux]}
|
||||
'';
|
||||
|
||||
passthru = {
|
||||
updateScript = nix-update-script {};
|
||||
};
|
||||
|
||||
meta = {
|
||||
description = "Use sidecar next to CLI agents for diffs, file trees, conversation history, and task management with td";
|
||||
homepage = "https://github.com/marcus/sidecar";
|
||||
changelog = "https://github.com/marcus/sidecar/releases/tag/v${finalAttrs.version}";
|
||||
license = lib.licenses.mit;
|
||||
mainProgram = "sidecar";
|
||||
platforms = lib.platforms.unix;
|
||||
};
|
||||
})
|
||||
@@ -6,7 +6,6 @@
|
||||
wtype,
|
||||
libnotify,
|
||||
pipewire,
|
||||
procps,
|
||||
busybox,
|
||||
}: let
|
||||
script = writeShellScriptBin "stt-ptt" ''
|
||||
@@ -16,6 +15,7 @@
|
||||
CACHE_DIR="''${XDG_CACHE_HOME:-$HOME/.cache}/stt-ptt"
|
||||
MODEL_DIR="''${XDG_DATA_HOME:-$HOME/.local/share}/stt-ptt/models"
|
||||
AUDIO="$CACHE_DIR/stt.wav"
|
||||
PID_FILE="$CACHE_DIR/stt.pid"
|
||||
|
||||
# Configurable via environment
|
||||
STT_MODEL="''${STT_MODEL:-$MODEL_DIR/ggml-large-v3-turbo.bin}"
|
||||
@@ -26,31 +26,26 @@
|
||||
PW_RECORD="${pipewire}/bin/pw-record"
|
||||
WHISPER="${whisper-cpp}/bin/whisper-cli"
|
||||
WTYPE="${wtype}/bin/wtype"
|
||||
PKILL="${procps}/bin/pkill"
|
||||
MKDIR="${busybox}/bin/mkdir"
|
||||
RM="${busybox}/bin/rm"
|
||||
CAT="${busybox}/bin/cat"
|
||||
KILL="${busybox}/bin/kill"
|
||||
TR="${busybox}/bin/tr"
|
||||
SED="${busybox}/bin/sed"
|
||||
SLEEP="${busybox}/bin/sleep"
|
||||
|
||||
# Ensure cache directory exists
|
||||
"$MKDIR" -p "$CACHE_DIR"
|
||||
|
||||
# Kill any existing pw-record for this audio file (prevents orphan nodes)
|
||||
kill_existing() {
|
||||
"$PKILL" -f "pw-record.*$AUDIO" 2>/dev/null
|
||||
"$SLEEP" 0.1
|
||||
}
|
||||
|
||||
case "''${1:-}" in
|
||||
start)
|
||||
kill_existing
|
||||
"$RM" -f "$AUDIO"
|
||||
"$RM" -f "$AUDIO" "$PID_FILE"
|
||||
"$NOTIFY" -t "$STT_NOTIFY_TIMEOUT" -a "stt-ptt" "Recording..."
|
||||
"$PW_RECORD" --rate=16000 --channels=1 "$AUDIO" &
|
||||
echo $! > "$PID_FILE"
|
||||
;;
|
||||
stop)
|
||||
kill_existing
|
||||
[[ -f "$PID_FILE" ]] && "$KILL" "$("$CAT" "$PID_FILE")" 2>/dev/null
|
||||
"$RM" -f "$PID_FILE"
|
||||
|
||||
if [[ -f "$AUDIO" ]]; then
|
||||
if [[ ! -f "$STT_MODEL" ]]; then
|
||||
|
||||
@@ -1,47 +0,0 @@
|
||||
{
|
||||
lib,
|
||||
buildGoModule,
|
||||
fetchFromGitHub,
|
||||
gitMinimal,
|
||||
nix-update-script,
|
||||
versionCheckHook,
|
||||
writableTmpDirAsHomeHook,
|
||||
}:
|
||||
buildGoModule (finalAttrs: {
|
||||
pname = "td";
|
||||
version = "0.44.0";
|
||||
|
||||
src = fetchFromGitHub {
|
||||
owner = "marcus";
|
||||
repo = "td";
|
||||
tag = "v${finalAttrs.version}";
|
||||
hash = "sha256-k1OCK6LE99fHLuxv8HZUW8cSn2Wmk74J7kb6Mi5ZpVw=";
|
||||
};
|
||||
|
||||
vendorHash = "sha256-hFFG+vLXcL2NNdLQvQZ1hzu++pp5AkbFOPQS10wtsec=";
|
||||
|
||||
ldflags = [
|
||||
"-s"
|
||||
"-w"
|
||||
"-X"
|
||||
"main.Version=v${finalAttrs.version}"
|
||||
];
|
||||
|
||||
nativeCheckInputs = [gitMinimal writableTmpDirAsHomeHook];
|
||||
|
||||
nativeInstallCheckInputs = [versionCheckHook writableTmpDirAsHomeHook];
|
||||
versionCheckProgramArg = "version";
|
||||
doInstallCheck = true;
|
||||
|
||||
doCheck = false;
|
||||
|
||||
passthru.updateScript = nix-update-script {};
|
||||
|
||||
meta = with lib; {
|
||||
description = "Minimalist CLI for tracking tasks across AI coding sessions";
|
||||
homepage = "https://github.com/marcus/td";
|
||||
license = licenses.mit;
|
||||
mainProgram = "td";
|
||||
platforms = platforms.unix;
|
||||
};
|
||||
})
|
||||
@@ -1,34 +0,0 @@
|
||||
{
|
||||
appimageTools,
|
||||
fetchurl,
|
||||
lib,
|
||||
}: let
|
||||
pname = "vibetyper";
|
||||
version = "1.2.3";
|
||||
src = fetchurl {
|
||||
url = "https://cdn.vibetyper.com/releases/linux/VibeTyper.AppImage";
|
||||
sha256 = "sha256-6uGXw2nxb0sGkcMDTWBlL3PuwBfVodhgqfgZT1Ncs40=";
|
||||
};
|
||||
appimageContents = appimageTools.extractType2 {inherit pname version src;};
|
||||
in
|
||||
appimageTools.wrapType2 {
|
||||
inherit pname version src;
|
||||
|
||||
extraPkgs = pkgs: [pkgs.fuse2];
|
||||
|
||||
extraInstallCommands = ''
|
||||
install -m 444 -D ${appimageContents}/vibe-typer.desktop -t $out/share/applications
|
||||
substituteInPlace $out/share/applications/vibe-typer.desktop \
|
||||
--replace 'Exec=AppRun --no-sandbox' 'Exec=${pname}'
|
||||
install -m 444 -D ${appimageContents}/vibe-typer.png \
|
||||
$out/share/icons/hicolor/512x512/apps/vibe-typer.png
|
||||
'';
|
||||
|
||||
meta = {
|
||||
description = "VibeTyper - AI-powered typing assistant";
|
||||
homepage = "https://vibetyper.com";
|
||||
license = lib.licenses.unfree;
|
||||
platforms = lib.platforms.linux;
|
||||
mainProgram = "vibetyper";
|
||||
};
|
||||
}
|
||||
@@ -1,109 +0,0 @@
|
||||
# AI coding agent development environment with coding rules
|
||||
# Sets up coding rules for OpenCode and Pi, plus useful companion tools.
|
||||
# Usage: nix develop .#coding
|
||||
#
|
||||
# To enable coding rules, add the agents input to your flake:
|
||||
# agents = {
|
||||
# url = "git+https://code.m3ta.dev/m3tam3re/AGENTS";
|
||||
# flake = false;
|
||||
# };
|
||||
{
|
||||
pkgs,
|
||||
lib ? pkgs.lib,
|
||||
inputs ? null,
|
||||
agents ? null,
|
||||
}: let
|
||||
# Import the coding-rules library
|
||||
m3taLib = import ../lib {lib = pkgs.lib;};
|
||||
|
||||
# Import custom packages
|
||||
customPackages = import ../pkgs {inherit pkgs inputs;};
|
||||
|
||||
# Create rules configuration only if agents input is provided
|
||||
rulesConfig = lib.optionalAttrs (agents != null) {
|
||||
rules = m3taLib.coding-rules.mkCodingRules {
|
||||
inherit agents;
|
||||
|
||||
# Languages relevant to this repository
|
||||
languages = ["nix" "python" "shell"];
|
||||
|
||||
# Frameworks used in this repo
|
||||
frameworks = ["n8n"];
|
||||
|
||||
# Standard concerns for development
|
||||
concerns = [
|
||||
"coding-style"
|
||||
"naming"
|
||||
"documentation"
|
||||
"testing"
|
||||
"git-workflow"
|
||||
"project-structure"
|
||||
];
|
||||
|
||||
# Also append rules to AGENTS.md for Pi agent discovery
|
||||
forPi = true;
|
||||
};
|
||||
};
|
||||
in
|
||||
pkgs.mkShell {
|
||||
name = "coding";
|
||||
|
||||
# Development tools
|
||||
buildInputs = with pkgs; [
|
||||
# Task management for AI coding sessions
|
||||
customPackages.td
|
||||
|
||||
# Companion tool for CLI agents (diffs, file trees, task management)
|
||||
customPackages.sidecar
|
||||
|
||||
# Code analysis tools
|
||||
|
||||
# Nix development tools (for this repo)
|
||||
nil
|
||||
alejandra
|
||||
statix
|
||||
deadnix
|
||||
];
|
||||
|
||||
shellHook = ''
|
||||
echo "🤖 AI Coding Environment"
|
||||
echo ""
|
||||
|
||||
${
|
||||
if (agents != null)
|
||||
then ''
|
||||
# Set up coding rules for OpenCode + Pi
|
||||
${rulesConfig.rules.shellHook}
|
||||
|
||||
echo "✅ Coding rules configured (OpenCode + Pi)"
|
||||
echo " Languages: nix, python, shell"
|
||||
echo " Frameworks: n8n"
|
||||
echo " Concerns: coding-style, naming, documentation, testing, git-workflow, project-structure"
|
||||
''
|
||||
else ''
|
||||
echo "⚠️ Coding rules not configured"
|
||||
echo ""
|
||||
echo "To enable, add the agents input to your flake.nix:"
|
||||
echo ""
|
||||
echo " agents = {"
|
||||
echo " url = \"git+https://code.m3ta.dev/m3tam3re/AGENTS\";"
|
||||
echo " flake = false;"
|
||||
echo " };"
|
||||
''
|
||||
}
|
||||
|
||||
echo ""
|
||||
echo "Available tools:"
|
||||
echo " opencode - AI coding agent"
|
||||
echo " td usage --new-session - View current tasks"
|
||||
echo " sidecar - Companion tool (diffs, file trees, tasks)"
|
||||
echo " code2prompt - Convert code to prompts"
|
||||
echo ""
|
||||
echo "Nix development tools:"
|
||||
echo " nix flake check - Check flake validity"
|
||||
echo " nix fmt . - Format Nix files"
|
||||
echo " statix check . - Lint Nix files"
|
||||
echo " deadnix . - Find dead code"
|
||||
echo ""
|
||||
'';
|
||||
}
|
||||
@@ -1,11 +1,7 @@
|
||||
# Development shells for various programming environments
|
||||
# Each shell can be accessed via: nix develop .#<shell-name>
|
||||
# Or used in home-manager/system configs
|
||||
{
|
||||
pkgs,
|
||||
inputs,
|
||||
agents ? null,
|
||||
}: {
|
||||
{pkgs}: {
|
||||
# Default shell for working on this repository
|
||||
default = pkgs.mkShell {
|
||||
name = "m3ta-nixpkgs-dev";
|
||||
@@ -31,7 +27,6 @@
|
||||
};
|
||||
|
||||
# Import all individual shell environments
|
||||
python = import ./python.nix {inherit pkgs inputs;};
|
||||
devops = import ./devops.nix {inherit pkgs inputs;};
|
||||
coding = import ./coding.nix {inherit pkgs inputs agents;};
|
||||
python = import ./python.nix {inherit pkgs;};
|
||||
devops = import ./devops.nix {inherit pkgs;};
|
||||
}
|
||||
|
||||
@@ -1,9 +1,6 @@
|
||||
# DevOps development environment
|
||||
# Usage: nix develop .#devops
|
||||
{
|
||||
pkgs,
|
||||
inputs ? null,
|
||||
}:
|
||||
{pkgs}:
|
||||
pkgs.mkShell {
|
||||
name = "devops-dev";
|
||||
|
||||
|
||||
@@ -1,9 +1,6 @@
|
||||
# Modern Python development environment with marimo and uv — Nushell version
|
||||
# Usage: nix develop .#python (drops into Nushell)
|
||||
{
|
||||
pkgs,
|
||||
inputs ? null,
|
||||
}: let
|
||||
{pkgs}: let
|
||||
# Use the latest Python available in nixpkgs
|
||||
python = pkgs.python313;
|
||||
in
|
||||
|
||||
@@ -1,31 +0,0 @@
|
||||
# Smoke tests for lib/agents.nix
|
||||
# Verifies the library imports correctly and exports expected functions.
|
||||
# Actual renderer derivations are verified by flake check building packages.
|
||||
{
|
||||
lib,
|
||||
pkgs,
|
||||
}: let
|
||||
agentsLib = (import ../../lib {inherit lib;}).agents;
|
||||
in
|
||||
pkgs.runCommand "lib-agents-tests" {} ''
|
||||
echo "Running lib agents smoke tests..."
|
||||
|
||||
# Verify all expected functions exist
|
||||
${lib.optionalString (agentsLib ? loadCanonical) ''echo "1. pass: loadCanonical exists"''}
|
||||
${lib.optionalString (!(agentsLib ? loadCanonical)) ''echo "1. FAIL: loadCanonical missing" && exit 1''}
|
||||
|
||||
${lib.optionalString (agentsLib ? renderForTool) ''echo "2. pass: renderForTool exists"''}
|
||||
${lib.optionalString (!(agentsLib ? renderForTool)) ''echo "2. FAIL: renderForTool missing" && exit 1''}
|
||||
|
||||
${lib.optionalString (agentsLib ? renderForOpencode) ''echo "3. pass: renderForOpencode exists"''}
|
||||
${lib.optionalString (!(agentsLib ? renderForOpencode)) ''echo "3. FAIL: renderForOpencode missing" && exit 1''}
|
||||
|
||||
${lib.optionalString (agentsLib ? renderForPi) ''echo "4. pass: renderForPi exists"''}
|
||||
${lib.optionalString (!(agentsLib ? renderForPi)) ''echo "4. FAIL: renderForPi missing" && exit 1''}
|
||||
|
||||
${lib.optionalString (agentsLib ? shellHookForTool) ''echo "5. pass: shellHookForTool exists"''}
|
||||
${lib.optionalString (!(agentsLib ? shellHookForTool)) ''echo "5. FAIL: shellHookForTool missing" && exit 1''}
|
||||
|
||||
echo "All smoke tests passed"
|
||||
touch $out
|
||||
''
|
||||
@@ -1,89 +0,0 @@
|
||||
{
|
||||
lib,
|
||||
pkgs,
|
||||
}: let
|
||||
codingRulesLib = (import ../../lib {inherit lib;}).coding-rules;
|
||||
|
||||
# Test 1: instructions are generated correctly with custom rulesDir
|
||||
testInstructions = let
|
||||
rules = codingRulesLib.mkCodingRules {
|
||||
agents = "/tmp/fake-agents";
|
||||
languages = ["python"];
|
||||
concerns = ["naming"];
|
||||
rulesDir = ".coding-rules";
|
||||
};
|
||||
in
|
||||
assert rules.instructions
|
||||
== [
|
||||
".coding-rules/concerns/naming.md"
|
||||
".coding-rules/languages/python.md"
|
||||
]; "pass: instructions";
|
||||
|
||||
# Test 2: default rulesDir is .opencode-rules
|
||||
testDefaultRulesDir = let
|
||||
rules = codingRulesLib.mkCodingRules {
|
||||
agents = "/tmp/fake-agents";
|
||||
};
|
||||
hasCorrectPrefix = builtins.all (s: builtins.substring 0 15 s == ".opencode-rules") rules.instructions;
|
||||
in
|
||||
assert hasCorrectPrefix == true; "pass: default rulesDir";
|
||||
|
||||
# Test 3: shellHook contains both the symlink command and the config generation
|
||||
testShellHook = let
|
||||
rules = codingRulesLib.mkCodingRules {
|
||||
agents = "/tmp/fake-agents";
|
||||
};
|
||||
hook = rules.shellHook;
|
||||
hasSymlink = builtins.match ".*ln -sfn.*" hook != null;
|
||||
hasConfigGen = builtins.match ".*coding-rules.json.*" hook != null;
|
||||
in
|
||||
assert hasSymlink;
|
||||
assert hasConfigGen; "pass: shellHook";
|
||||
|
||||
# Test 4: forPi=false does not include AGENTS.md logic in shellHook
|
||||
testForPiDisabled = let
|
||||
rules = codingRulesLib.mkCodingRules {
|
||||
agents = "/tmp/fake-agents";
|
||||
forPi = false;
|
||||
};
|
||||
hook = rules.shellHook;
|
||||
hasPiBlock = builtins.match ".*CODING-RULES:START.*" hook != null;
|
||||
in
|
||||
assert hasPiBlock == false; "pass: forPi disabled";
|
||||
|
||||
# Test 5: mkRulesMdSection produces empty string for empty concerns
|
||||
testEmptyRulesMdSection = let
|
||||
section = codingRulesLib.mkRulesMdSection {
|
||||
agents = "/tmp/fake-agents";
|
||||
concerns = [];
|
||||
languages = [];
|
||||
frameworks = [];
|
||||
};
|
||||
in
|
||||
assert section == ""; "pass: empty mkRulesMdSection";
|
||||
|
||||
# Test 6: mkRulesMdSection wraps content with markers
|
||||
testRulesMdSection = let
|
||||
# Use a simple file path that won't be read (concatRulesMd returns empty
|
||||
# when files don't exist, so we just verify the function is callable)
|
||||
section = codingRulesLib.mkRulesMdSection {
|
||||
agents = "/tmp/fake-agents";
|
||||
concerns = [];
|
||||
languages = [];
|
||||
frameworks = [];
|
||||
};
|
||||
# After fix: mkRulesMdSection returns "" for empty rules, not a string with markers
|
||||
in
|
||||
assert section == ""; "pass: mkRulesMdSection empty case";
|
||||
in
|
||||
pkgs.runCommand "lib-coding-rules-tests" {} ''
|
||||
echo "Running lib coding-rules tests..."
|
||||
echo "1. ${testInstructions}"
|
||||
echo "2. ${testDefaultRulesDir}"
|
||||
echo "3. ${testShellHook}"
|
||||
echo "4. ${testForPiDisabled}"
|
||||
echo "5. ${testEmptyRulesMdSection}"
|
||||
echo "6. ${testRulesMdSection}"
|
||||
echo "All tests passed"
|
||||
touch $out
|
||||
''
|
||||
@@ -1,4 +0,0 @@
|
||||
{
|
||||
coding-rules = import ./coding-rules-test.nix;
|
||||
agents = import ./agents-test.nix;
|
||||
}
|
||||
Reference in New Issue
Block a user