mirror of
https://github.com/basnijholt/compose-farm.git
synced 2026-02-03 14:13:26 +00:00
Compare commits
65 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
81e1a482f4 | ||
|
|
435b014251 | ||
|
|
58585ac73c | ||
|
|
5a848ec416 | ||
|
|
b4595cb117 | ||
|
|
5f1c31b780 | ||
|
|
9974f87976 | ||
|
|
8b16484ce2 | ||
|
|
d75f9cca64 | ||
|
|
7ccb0734a2 | ||
|
|
61a845fad8 | ||
|
|
e7efae0153 | ||
|
|
b4ebe15dd1 | ||
|
|
9f55dcdd6e | ||
|
|
0694bbe56d | ||
|
|
3045948d0a | ||
|
|
1fa17b4e07 | ||
|
|
cd25a1914c | ||
|
|
a71200b199 | ||
|
|
967d68b14a | ||
|
|
b7614aeab7 | ||
|
|
d931784935 | ||
|
|
4755065229 | ||
|
|
e86bbf7681 | ||
|
|
be136eb916 | ||
|
|
78a223878f | ||
|
|
f5be23d626 | ||
|
|
3bdc483c2a | ||
|
|
3a3591a0f7 | ||
|
|
7f8ea49d7f | ||
|
|
1e67bde96c | ||
|
|
d8353dbb7e | ||
|
|
2e6146a94b | ||
|
|
87849a8161 | ||
|
|
c8bf792a9a | ||
|
|
d37295fbee | ||
|
|
266f541d35 | ||
|
|
aabdd550ba | ||
|
|
8ff60a1e3e | ||
|
|
2497bd727a | ||
|
|
e37d9d87ba | ||
|
|
80a1906d90 | ||
|
|
282de12336 | ||
|
|
2c5308aea3 | ||
|
|
5057202938 | ||
|
|
5e1b9987dd | ||
|
|
d9c26f7f2c | ||
|
|
adfcd4bb31 | ||
|
|
95f7d9c3cf | ||
|
|
4c1674cfd8 | ||
|
|
f65ca8420e | ||
|
|
85aff2c271 | ||
|
|
61ca24bb8e | ||
|
|
ed36588358 | ||
|
|
80c8079a8c | ||
|
|
763bedf9f6 | ||
|
|
641f7e91a8 | ||
|
|
4e8e925d59 | ||
|
|
d84858dcfb | ||
|
|
3121ee04eb | ||
|
|
a795132a04 | ||
|
|
a6e491575a | ||
|
|
78bf90afd9 | ||
|
|
76b60bdd96 | ||
|
|
98bfb1bf6d |
2
.gitattributes
vendored
Normal file
2
.gitattributes
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
*.gif filter=lfs diff=lfs merge=lfs -text
|
||||
*.webm filter=lfs diff=lfs merge=lfs -text
|
||||
24
.github/workflows/ci.yml
vendored
24
.github/workflows/ci.yml
vendored
@@ -27,8 +27,8 @@ jobs:
|
||||
- name: Install dependencies
|
||||
run: uv sync --all-extras --dev
|
||||
|
||||
- name: Run tests
|
||||
run: uv run pytest
|
||||
- name: Run tests (excluding browser tests)
|
||||
run: uv run pytest -m "not browser"
|
||||
|
||||
- name: Upload coverage reports to Codecov
|
||||
if: matrix.os == 'ubuntu-latest' && matrix.python-version == '3.13'
|
||||
@@ -36,6 +36,26 @@ jobs:
|
||||
env:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
||||
browser-tests:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@v7
|
||||
|
||||
- name: Set up Python
|
||||
run: uv python install 3.13
|
||||
|
||||
- name: Install dependencies
|
||||
run: uv sync --all-extras --dev
|
||||
|
||||
- name: Install Playwright browsers
|
||||
run: uv run playwright install chromium --with-deps
|
||||
|
||||
- name: Run browser tests
|
||||
run: uv run pytest -m browser -v --no-cov
|
||||
|
||||
lint:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
|
||||
58
.github/workflows/docs.yml
vendored
Normal file
58
.github/workflows/docs.yml
vendored
Normal file
@@ -0,0 +1,58 @@
|
||||
name: Docs
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main]
|
||||
paths:
|
||||
- "docs/**"
|
||||
- "zensical.toml"
|
||||
- ".github/workflows/docs.yml"
|
||||
workflow_dispatch:
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
pages: write
|
||||
id-token: write
|
||||
|
||||
concurrency:
|
||||
group: "pages"
|
||||
cancel-in-progress: false
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
lfs: true
|
||||
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@v4
|
||||
|
||||
- name: Set up Python
|
||||
run: uv python install 3.12
|
||||
|
||||
- name: Install Zensical
|
||||
run: uv tool install zensical
|
||||
|
||||
- name: Build docs
|
||||
run: zensical build
|
||||
|
||||
- name: Setup Pages
|
||||
uses: actions/configure-pages@v5
|
||||
|
||||
- name: Upload artifact
|
||||
uses: actions/upload-pages-artifact@v3
|
||||
with:
|
||||
path: "./site"
|
||||
|
||||
deploy:
|
||||
environment:
|
||||
name: github-pages
|
||||
url: ${{ steps.deployment.outputs.page_url }}
|
||||
runs-on: ubuntu-latest
|
||||
needs: build
|
||||
steps:
|
||||
- name: Deploy to GitHub Pages
|
||||
id: deployment
|
||||
uses: actions/deploy-pages@v4
|
||||
94
.prompts/docs-review.md
Normal file
94
.prompts/docs-review.md
Normal file
@@ -0,0 +1,94 @@
|
||||
Review all documentation in this repository for accuracy, completeness, and consistency. Cross-reference documentation against the actual codebase to identify issues.
|
||||
|
||||
## Scope
|
||||
|
||||
Review all documentation files:
|
||||
- docs/*.md (primary documentation)
|
||||
- README.md (repository landing page)
|
||||
- CLAUDE.md (development guidelines)
|
||||
- examples/README.md (example configurations)
|
||||
|
||||
## Review Checklist
|
||||
|
||||
### 1. Command Documentation
|
||||
|
||||
For each documented command, verify against the CLI source code:
|
||||
|
||||
- Command exists in codebase
|
||||
- All options are documented with correct names, types, and defaults
|
||||
- Short options (-x) match long options (--xxx)
|
||||
- Examples would work as written
|
||||
- Check for undocumented commands or options
|
||||
|
||||
Run `--help` for each command to verify.
|
||||
|
||||
### 2. Configuration Documentation
|
||||
|
||||
Verify against Pydantic models in the config module:
|
||||
|
||||
- All config keys are documented
|
||||
- Types match Pydantic field types
|
||||
- Required vs optional fields are correct
|
||||
- Default values are accurate
|
||||
- Config file search order matches code
|
||||
- Example YAML is valid and uses current schema
|
||||
|
||||
### 3. Architecture Documentation
|
||||
|
||||
Verify against actual directory structure:
|
||||
|
||||
- File paths match actual source code location
|
||||
- All modules listed actually exist
|
||||
- No modules are missing from the list
|
||||
- Component descriptions match code functionality
|
||||
- CLI module list includes all command files
|
||||
|
||||
### 4. State and Data Files
|
||||
|
||||
Verify against state and path modules:
|
||||
|
||||
- State file name and location are correct
|
||||
- State file format matches actual structure
|
||||
- Log file name and location are correct
|
||||
- What triggers state/log updates is accurate
|
||||
|
||||
### 5. Installation Documentation
|
||||
|
||||
Verify against pyproject.toml:
|
||||
|
||||
- Python version requirement matches requires-python
|
||||
- Package name is correct
|
||||
- Optional dependencies are documented
|
||||
- CLI entry points are mentioned
|
||||
- Installation methods work as documented
|
||||
|
||||
### 6. Feature Claims
|
||||
|
||||
For each claimed feature, verify it exists and works as described.
|
||||
|
||||
### 7. Cross-Reference Consistency
|
||||
|
||||
Check for conflicts between documentation files:
|
||||
|
||||
- README vs docs/index.md (should be consistent)
|
||||
- CLAUDE.md vs actual code structure
|
||||
- Command tables match across files
|
||||
- Config examples are consistent
|
||||
|
||||
## Output Format
|
||||
|
||||
Provide findings in these categories:
|
||||
|
||||
1. **Critical Issues**: Incorrect information that would cause user problems
|
||||
2. **Inaccuracies**: Technical errors, wrong defaults, incorrect paths
|
||||
3. **Missing Documentation**: Features/commands that exist but aren't documented
|
||||
4. **Outdated Content**: Information that was once true but no longer is
|
||||
5. **Inconsistencies**: Conflicts between different documentation files
|
||||
6. **Minor Issues**: Typos, formatting, unclear wording
|
||||
7. **Verified Accurate**: Sections confirmed to be correct
|
||||
|
||||
For each issue, include:
|
||||
- File path and line number (if applicable)
|
||||
- What the documentation says
|
||||
- What the code actually does
|
||||
- Suggested fix
|
||||
69
CLAUDE.md
69
CLAUDE.md
@@ -9,15 +9,17 @@
|
||||
## Architecture
|
||||
|
||||
```
|
||||
compose_farm/
|
||||
src/compose_farm/
|
||||
├── cli/ # CLI subpackage
|
||||
│ ├── __init__.py # Imports modules to trigger command registration
|
||||
│ ├── app.py # Shared Typer app instance, version callback
|
||||
│ ├── common.py # Shared helpers, options, progress bar utilities
|
||||
│ ├── config.py # Config subcommand (init, show, path, validate, edit)
|
||||
│ ├── config.py # Config subcommand (init, show, path, validate, edit, symlink)
|
||||
│ ├── lifecycle.py # up, down, pull, restart, update, apply commands
|
||||
│ ├── management.py # refresh, check, init-network, traefik-file commands
|
||||
│ └── monitoring.py # logs, ps, stats commands
|
||||
│ ├── monitoring.py # logs, ps, stats commands
|
||||
│ ├── ssh.py # SSH key management (setup, status, keygen)
|
||||
│ └── web.py # Web UI server command
|
||||
├── config.py # Pydantic models, YAML loading
|
||||
├── compose.py # Compose file parsing (.env, ports, volumes, networks)
|
||||
├── console.py # Shared Rich console instances
|
||||
@@ -25,13 +27,22 @@ compose_farm/
|
||||
├── operations.py # Business logic (up, migrate, discover, preflight checks)
|
||||
├── state.py # Deployment state tracking (which service on which host)
|
||||
├── logs.py # Image digest snapshots (dockerfarm-log.toml)
|
||||
└── traefik.py # Traefik file-provider config generation from labels
|
||||
├── paths.py # Path utilities, config file discovery
|
||||
├── ssh_keys.py # SSH key path constants and utilities
|
||||
├── traefik.py # Traefik file-provider config generation from labels
|
||||
└── web/ # Web UI (FastAPI + HTMX)
|
||||
```
|
||||
|
||||
## Web UI Icons
|
||||
|
||||
Icons use [Lucide](https://lucide.dev/). Add new icons as macros in `web/templates/partials/icons.html` by copying SVG paths from their site. The `action_btn`, `stat_card`, and `collapse` macros in `components.html` accept an optional `icon` parameter.
|
||||
|
||||
## HTMX Patterns
|
||||
|
||||
- **Multi-element refresh**: Use custom events, not `hx-swap-oob`. Elements have `hx-trigger="cf:refresh from:body"` and JS calls `document.body.dispatchEvent(new CustomEvent('cf:refresh'))`. Simpler to debug/test.
|
||||
- **SPA navigation**: Sidebar uses `hx-boost="true"` to AJAX-ify links.
|
||||
- **Attribute inheritance**: Set `hx-target`/`hx-swap` on parent elements.
|
||||
|
||||
## Key Design Decisions
|
||||
|
||||
1. **Hybrid SSH approach**: asyncssh for parallel streaming with prefixes; native `ssh -t` for raw mode (progress bars)
|
||||
@@ -43,6 +54,27 @@ Icons use [Lucide](https://lucide.dev/). Add new icons as macros in `web/templat
|
||||
7. **State tracking**: Tracks where services are deployed for auto-migration
|
||||
8. **Pre-flight checks**: Verifies NFS mounts and Docker networks exist before starting/migrating
|
||||
|
||||
## Code Style
|
||||
|
||||
- **Imports at top level**: Never add imports inside functions unless they are explicitly marked with `# noqa: PLC0415` and a comment explaining it speeds up CLI startup. Heavy modules like `pydantic`, `yaml`, and `rich.table` are lazily imported to keep `cf --help` fast.
|
||||
|
||||
## Testing
|
||||
|
||||
Run tests with `uv run pytest`. Browser tests require Chromium (system-installed or via `playwright install chromium`):
|
||||
|
||||
```bash
|
||||
# Unit tests only (skip browser tests, can parallelize)
|
||||
uv run pytest -m "not browser" -n auto
|
||||
|
||||
# Browser tests only (run sequentially, no coverage)
|
||||
uv run pytest -m browser --no-cov
|
||||
|
||||
# All tests
|
||||
uv run pytest --no-cov
|
||||
```
|
||||
|
||||
Browser tests are marked with `@pytest.mark.browser`. They use Playwright to test HTMX behavior, JavaScript functionality (sidebar filter, command palette, terminals), and content stability during navigation. Run sequentially (no `-n`) to avoid resource contention.
|
||||
|
||||
## Communication Notes
|
||||
|
||||
- Clarify ambiguous wording (e.g., homophones like "right"/"write", "their"/"there").
|
||||
@@ -53,6 +85,29 @@ Icons use [Lucide](https://lucide.dev/). Add new icons as macros in `web/templat
|
||||
- **NEVER merge anything into main.** Always commit directly or use fast-forward/rebase.
|
||||
- Never force push.
|
||||
|
||||
## Pull Requests
|
||||
|
||||
- Never include unchecked checklists (e.g., `- [ ] ...`) in PR descriptions. Either omit the checklist or use checked items.
|
||||
- **NEVER run `gh pr merge`**. PRs are merged via the GitHub UI, not the CLI.
|
||||
|
||||
## Releases
|
||||
|
||||
Use `gh release create` to create releases. The tag is created automatically.
|
||||
|
||||
```bash
|
||||
# Check current version
|
||||
git tag --sort=-v:refname | head -1
|
||||
|
||||
# Create release (minor version bump: v0.21.1 -> v0.22.0)
|
||||
gh release create v0.22.0 --title "v0.22.0" --notes "release notes here"
|
||||
```
|
||||
|
||||
Versioning:
|
||||
- **Patch** (v0.21.0 → v0.21.1): Bug fixes
|
||||
- **Minor** (v0.21.1 → v0.22.0): New features, non-breaking changes
|
||||
|
||||
Write release notes manually describing what changed. Group by features and bug fixes.
|
||||
|
||||
## Commands Quick Reference
|
||||
|
||||
CLI available as `cf` or `compose-farm`.
|
||||
@@ -63,7 +118,7 @@ CLI available as `cf` or `compose-farm`.
|
||||
| `down` | Stop services (`docker compose down`). Use `--orphaned` to stop services removed from config |
|
||||
| `pull` | Pull latest images |
|
||||
| `restart` | `down` + `up -d` |
|
||||
| `update` | `pull` + `down` + `up -d` |
|
||||
| `update` | `pull` + `build` + `down` + `up -d` |
|
||||
| `apply` | Make reality match config: migrate services + stop orphans. Use `--dry-run` to preview |
|
||||
| `logs` | Show service logs |
|
||||
| `ps` | Show status of all services |
|
||||
@@ -72,4 +127,6 @@ CLI available as `cf` or `compose-farm`.
|
||||
| `check` | Validate config, traefik labels, mounts, networks; show host compatibility |
|
||||
| `init-network` | Create Docker network on hosts with consistent subnet/gateway |
|
||||
| `traefik-file` | Generate Traefik file-provider config from compose labels |
|
||||
| `config` | Manage config files (init, show, path, validate, edit) |
|
||||
| `config` | Manage config files (init, show, path, validate, edit, symlink) |
|
||||
| `ssh` | Manage SSH keys (setup, status, keygen) |
|
||||
| `web` | Start web UI server |
|
||||
|
||||
128
README.md
128
README.md
@@ -23,6 +23,9 @@ A minimal CLI tool to run Docker Compose commands across multiple hosts via SSH.
|
||||
- [Best practices](#best-practices)
|
||||
- [What Compose Farm doesn't do](#what-compose-farm-doesnt-do)
|
||||
- [Installation](#installation)
|
||||
- [SSH Authentication](#ssh-authentication)
|
||||
- [SSH Agent (default)](#ssh-agent-default)
|
||||
- [Dedicated SSH Key (recommended for Docker/Web UI)](#dedicated-ssh-key-recommended-for-dockerweb-ui)
|
||||
- [Configuration](#configuration)
|
||||
- [Multi-Host Services](#multi-host-services)
|
||||
- [Config Command](#config-command)
|
||||
@@ -137,8 +140,11 @@ If you need containers on different hosts to communicate seamlessly, you need Do
|
||||
## Installation
|
||||
|
||||
```bash
|
||||
# One-liner (installs uv if needed)
|
||||
curl -fsSL https://raw.githubusercontent.com/basnijholt/compose-farm/main/bootstrap.sh | sh
|
||||
|
||||
# Or if you already have uv/pip
|
||||
uv tool install compose-farm
|
||||
# or
|
||||
pip install compose-farm
|
||||
```
|
||||
|
||||
@@ -159,6 +165,62 @@ docker run --rm \
|
||||
|
||||
</details>
|
||||
|
||||
## SSH Authentication
|
||||
|
||||
Compose Farm uses SSH to run commands on remote hosts. There are two authentication methods:
|
||||
|
||||
### SSH Agent (default)
|
||||
|
||||
Works out of the box if you have an SSH agent running with your keys loaded:
|
||||
|
||||
```bash
|
||||
# Verify your agent has keys
|
||||
ssh-add -l
|
||||
|
||||
# Run compose-farm commands
|
||||
cf up --all
|
||||
```
|
||||
|
||||
### Dedicated SSH Key (recommended for Docker/Web UI)
|
||||
|
||||
When running compose-farm in Docker, the SSH agent connection can be lost (e.g., after container restart). The `cf ssh` command sets up a dedicated key that persists:
|
||||
|
||||
```bash
|
||||
# Generate key and copy to all configured hosts
|
||||
cf ssh setup
|
||||
|
||||
# Check status
|
||||
cf ssh status
|
||||
```
|
||||
|
||||
This creates `~/.ssh/compose-farm/id_ed25519` (ED25519, no passphrase) and copies the public key to each host's `authorized_keys`. Compose Farm tries the SSH agent first, then falls back to this key.
|
||||
|
||||
<details><summary>🐳 Docker volume options for SSH keys</summary>
|
||||
|
||||
When running in Docker, mount a volume to persist the SSH keys. Choose ONE option and use it for both `cf` and `web` services:
|
||||
|
||||
**Option 1: Host path (default)** - keys at `~/.ssh/compose-farm/id_ed25519`
|
||||
```yaml
|
||||
volumes:
|
||||
- ~/.ssh/compose-farm:/root/.ssh
|
||||
```
|
||||
|
||||
**Option 2: Named volume** - managed by Docker
|
||||
```yaml
|
||||
volumes:
|
||||
- cf-ssh:/root/.ssh
|
||||
```
|
||||
|
||||
Run setup once after starting the container (while the SSH agent still works):
|
||||
|
||||
```bash
|
||||
docker compose exec web cf ssh setup
|
||||
```
|
||||
|
||||
The keys will persist across restarts.
|
||||
|
||||
</details>
|
||||
|
||||
## Configuration
|
||||
|
||||
Create `~/.config/compose-farm/compose-farm.yaml` (or `./compose-farm.yaml` in your working directory):
|
||||
@@ -249,7 +311,7 @@ The CLI is available as both `compose-farm` and the shorter `cf` alias.
|
||||
| `cf check` | Validate config, mounts, networks |
|
||||
| `cf init-network` | Create Docker network on hosts |
|
||||
| `cf traefik-file` | Generate Traefik file-provider config |
|
||||
| `cf config <cmd>` | Manage config files (init, show, path, validate, edit) |
|
||||
| `cf config <cmd>` | Manage config files (init, show, path, validate, edit, symlink) |
|
||||
|
||||
All commands support `--all` to operate on all services.
|
||||
|
||||
@@ -344,10 +406,11 @@ Full `--help` output for each command. See the [Usage](#usage) table above for a
|
||||
│ check Validate configuration, traefik labels, mounts, and networks. │
|
||||
│ init-network Create Docker network on hosts with consistent settings. │
|
||||
│ config Manage compose-farm configuration files. │
|
||||
│ ssh Manage SSH keys for passwordless authentication. │
|
||||
╰──────────────────────────────────────────────────────────────────────────────╯
|
||||
╭─ Monitoring ─────────────────────────────────────────────────────────────────╮
|
||||
│ logs Show service logs. │
|
||||
│ ps Show status of all services. │
|
||||
│ ps Show status of services. │
|
||||
│ stats Show overview statistics for hosts and services. │
|
||||
╰──────────────────────────────────────────────────────────────────────────────╯
|
||||
╭─ Server ─────────────────────────────────────────────────────────────────────╮
|
||||
@@ -557,12 +620,14 @@ Full `--help` output for each command. See the [Usage](#usage) table above for a
|
||||
|
||||
This is the "reconcile" command that ensures running services match your
|
||||
config file. It will:
|
||||
1. Stop orphaned services (in state but removed from config) 2. Migrate
|
||||
services on wrong host (host in state ≠ host in config) 3. Start missing
|
||||
services (in config but not in state)
|
||||
Use --dry-run to preview changes before applying. Use --no-orphans to only
|
||||
migrate/start without stopping orphaned services. Use --full to also run 'up'
|
||||
on all services (picks up compose/env changes).
|
||||
|
||||
1. Stop orphaned services (in state but removed from config)
|
||||
2. Migrate services on wrong host (host in state ≠ host in config)
|
||||
3. Start missing services (in config but not in state)
|
||||
|
||||
Use --dry-run to preview changes before applying.
|
||||
Use --no-orphans to only migrate/start without stopping orphaned services.
|
||||
Use --full to also run 'up' on all services (picks up compose/env changes).
|
||||
|
||||
╭─ Options ────────────────────────────────────────────────────────────────────╮
|
||||
│ --dry-run -n Show what would change without executing │
|
||||
@@ -636,9 +701,10 @@ Full `--help` output for each command. See the [Usage](#usage) table above for a
|
||||
|
||||
Update local state from running services.
|
||||
|
||||
Discovers which services are running on which hosts, updates the state file,
|
||||
and captures image digests. This is a read operation - it updates your local
|
||||
state to match reality, not the other way around.
|
||||
Discovers which services are running on which hosts, updates the state
|
||||
file, and captures image digests. This is a read operation - it updates
|
||||
your local state to match reality, not the other way around.
|
||||
|
||||
Use 'cf apply' to make reality match your config (stop orphans, migrate).
|
||||
|
||||
╭─ Options ────────────────────────────────────────────────────────────────────╮
|
||||
@@ -674,8 +740,10 @@ Full `--help` output for each command. See the [Usage](#usage) table above for a
|
||||
|
||||
Validate configuration, traefik labels, mounts, and networks.
|
||||
|
||||
Without arguments: validates all services against configured hosts. With
|
||||
service arguments: validates specific services and shows host compatibility.
|
||||
Without arguments: validates all services against configured hosts.
|
||||
With service arguments: validates specific services and shows host
|
||||
compatibility.
|
||||
|
||||
Use --local to skip SSH-based checks for faster validation.
|
||||
|
||||
╭─ Arguments ──────────────────────────────────────────────────────────────────╮
|
||||
@@ -714,8 +782,8 @@ Full `--help` output for each command. See the [Usage](#usage) table above for a
|
||||
Create Docker network on hosts with consistent settings.
|
||||
|
||||
Creates an external Docker network that services can use for cross-host
|
||||
communication. Uses the same subnet/gateway on all hosts to ensure consistent
|
||||
networking.
|
||||
communication. Uses the same subnet/gateway on all hosts to ensure
|
||||
consistent networking.
|
||||
|
||||
╭─ Arguments ──────────────────────────────────────────────────────────────────╮
|
||||
│ hosts [HOSTS]... Hosts to create network on (default: all) │
|
||||
@@ -773,6 +841,21 @@ Full `--help` output for each command. See the [Usage](#usage) table above for a
|
||||
|
||||
</details>
|
||||
|
||||
|
||||
<details>
|
||||
<summary>See the output of <code>cf ssh --help</code></summary>
|
||||
|
||||
<!-- CODE:BASH:START -->
|
||||
<!-- echo '```yaml' -->
|
||||
<!-- export NO_COLOR=1 -->
|
||||
<!-- export TERM=dumb -->
|
||||
<!-- export TERMINAL_WIDTH=90 -->
|
||||
<!-- cf ssh --help -->
|
||||
<!-- echo '```' -->
|
||||
<!-- CODE:END -->
|
||||
|
||||
</details>
|
||||
|
||||
**Monitoring**
|
||||
|
||||
<details>
|
||||
@@ -829,11 +912,20 @@ Full `--help` output for each command. See the [Usage](#usage) table above for a
|
||||
<!-- ⚠️ This content is auto-generated by `markdown-code-runner`. -->
|
||||
```yaml
|
||||
|
||||
Usage: cf ps [OPTIONS]
|
||||
Usage: cf ps [OPTIONS] [SERVICES]...
|
||||
|
||||
Show status of all services.
|
||||
Show status of services.
|
||||
|
||||
Without arguments: shows all services (same as --all).
|
||||
With service names: shows only those services.
|
||||
With --host: shows services on that host.
|
||||
|
||||
╭─ Arguments ──────────────────────────────────────────────────────────────────╮
|
||||
│ services [SERVICES]... Services to operate on │
|
||||
╰──────────────────────────────────────────────────────────────────────────────╯
|
||||
╭─ Options ────────────────────────────────────────────────────────────────────╮
|
||||
│ --all -a Run on all services │
|
||||
│ --host -H TEXT Filter to services on this host │
|
||||
│ --config -c PATH Path to config file │
|
||||
│ --help -h Show this message and exit. │
|
||||
╰──────────────────────────────────────────────────────────────────────────────╯
|
||||
|
||||
29
bootstrap.sh
Executable file
29
bootstrap.sh
Executable file
@@ -0,0 +1,29 @@
|
||||
#!/bin/sh
|
||||
# Compose Farm bootstrap script
|
||||
# Usage: curl -fsSL https://raw.githubusercontent.com/basnijholt/compose-farm/main/bootstrap.sh | sh
|
||||
#
|
||||
# This script installs uv (if needed) and then installs compose-farm as a uv tool.
|
||||
|
||||
set -e
|
||||
|
||||
if ! command -v uv >/dev/null 2>&1; then
|
||||
echo "uv is not installed. Installing..."
|
||||
curl -LsSf https://astral.sh/uv/install.sh | sh
|
||||
echo "uv installation complete!"
|
||||
echo ""
|
||||
|
||||
if [ -x ~/.local/bin/uv ]; then
|
||||
~/.local/bin/uv tool install compose-farm
|
||||
else
|
||||
echo "Please restart your shell and run this script again"
|
||||
echo ""
|
||||
exit 0
|
||||
fi
|
||||
else
|
||||
uv tool install compose-farm
|
||||
fi
|
||||
|
||||
echo ""
|
||||
echo "compose-farm is installed!"
|
||||
echo "Run 'cf --help' to get started."
|
||||
echo "If 'cf' is not found, restart your shell or run: source ~/.bashrc"
|
||||
@@ -5,6 +5,12 @@ services:
|
||||
- ${SSH_AUTH_SOCK}:/ssh-agent:ro
|
||||
# Compose directory (contains compose files AND compose-farm.yaml config)
|
||||
- ${CF_COMPOSE_DIR:-/opt/stacks}:${CF_COMPOSE_DIR:-/opt/stacks}
|
||||
# SSH keys for passwordless auth (generated by `cf ssh setup`)
|
||||
# Choose ONE option below (use the same option for both cf and web services):
|
||||
# Option 1: Host path (default) - keys at ~/.ssh/compose-farm/id_ed25519
|
||||
- ${CF_SSH_DIR:-~/.ssh/compose-farm}:/root/.ssh
|
||||
# Option 2: Named volume - managed by Docker, shared between services
|
||||
# - cf-ssh:/root/.ssh
|
||||
environment:
|
||||
- SSH_AUTH_SOCK=/ssh-agent
|
||||
# Config file path (state stored alongside it)
|
||||
@@ -12,13 +18,21 @@ services:
|
||||
|
||||
web:
|
||||
image: ghcr.io/basnijholt/compose-farm:latest
|
||||
restart: unless-stopped
|
||||
command: web --host 0.0.0.0 --port 9000
|
||||
volumes:
|
||||
- ${SSH_AUTH_SOCK}:/ssh-agent:ro
|
||||
- ${CF_COMPOSE_DIR:-/opt/stacks}:${CF_COMPOSE_DIR:-/opt/stacks}
|
||||
# SSH keys - use the SAME option as cf service above
|
||||
# Option 1: Host path (default)
|
||||
- ${CF_SSH_DIR:-~/.ssh/compose-farm}:/root/.ssh
|
||||
# Option 2: Named volume
|
||||
# - cf-ssh:/root/.ssh
|
||||
environment:
|
||||
- SSH_AUTH_SOCK=/ssh-agent
|
||||
- CF_CONFIG=${CF_COMPOSE_DIR:-/opt/stacks}/compose-farm.yaml
|
||||
# Used to detect self-updates and run via SSH to survive container restart
|
||||
- CF_WEB_SERVICE=compose-farm
|
||||
labels:
|
||||
- traefik.enable=true
|
||||
- traefik.http.routers.compose-farm.rule=Host(`compose-farm.${DOMAIN}`)
|
||||
@@ -32,3 +46,7 @@ services:
|
||||
networks:
|
||||
mynetwork:
|
||||
external: true
|
||||
|
||||
volumes:
|
||||
cf-ssh:
|
||||
# Only used if Option 2 is selected above
|
||||
|
||||
1
docs/CNAME
Normal file
1
docs/CNAME
Normal file
@@ -0,0 +1 @@
|
||||
compose-farm.nijho.lt
|
||||
346
docs/architecture.md
Normal file
346
docs/architecture.md
Normal file
@@ -0,0 +1,346 @@
|
||||
---
|
||||
icon: lucide/layers
|
||||
---
|
||||
|
||||
# Architecture
|
||||
|
||||
This document explains how Compose Farm works under the hood.
|
||||
|
||||
## Design Philosophy
|
||||
|
||||
Compose Farm follows three core principles:
|
||||
|
||||
1. **KISS** - Keep it simple. It's a thin wrapper around `docker compose` over SSH.
|
||||
2. **YAGNI** - No orchestration, no service discovery, no health checks until needed.
|
||||
3. **Zero changes** - Your existing compose files work unchanged.
|
||||
|
||||
## High-Level Architecture
|
||||
|
||||
```
|
||||
┌─────────────────────────────────────────────────────────────────┐
|
||||
│ Compose Farm CLI │
|
||||
│ │
|
||||
│ ┌──────────┐ ┌──────────┐ ┌──────────┐ ┌──────────────────┐ │
|
||||
│ │ Config │ │ State │ │Operations│ │ Executor │ │
|
||||
│ │ Parser │ │ Tracker │ │ Logic │ │ (SSH/Local) │ │
|
||||
│ └────┬─────┘ └────┬─────┘ └────┬─────┘ └────────┬─────────┘ │
|
||||
└───────┼─────────────┼─────────────┼─────────────────┼───────────┘
|
||||
│ │ │ │
|
||||
▼ ▼ ▼ ▼
|
||||
┌───────────────────────────────────────────────────────────────┐
|
||||
│ SSH / Local │
|
||||
└───────────────────────────────────────────────────────────────┘
|
||||
│ │
|
||||
▼ ▼
|
||||
┌───────────────┐ ┌───────────────┐
|
||||
│ Host: nuc │ │ Host: hp │
|
||||
│ │ │ │
|
||||
│ docker compose│ │ docker compose│
|
||||
│ up -d │ │ up -d │
|
||||
└───────────────┘ └───────────────┘
|
||||
```
|
||||
|
||||
## Core Components
|
||||
|
||||
### Configuration (`src/compose_farm/config.py`)
|
||||
|
||||
Pydantic models for YAML configuration:
|
||||
|
||||
- **Config** - Root configuration with compose_dir, hosts, services
|
||||
- **HostConfig** - Host address and SSH user
|
||||
- **ServiceConfig** - Service-to-host mappings
|
||||
|
||||
Key features:
|
||||
- Validation with Pydantic
|
||||
- Multi-host service expansion (`all` → list of hosts)
|
||||
- YAML loading with sensible defaults
|
||||
|
||||
### State Tracking (`src/compose_farm/state.py`)
|
||||
|
||||
Tracks deployment state in `compose-farm-state.yaml` (stored alongside the config file):
|
||||
|
||||
```yaml
|
||||
deployed:
|
||||
plex: nuc
|
||||
sonarr: nuc
|
||||
```
|
||||
|
||||
Used for:
|
||||
- Detecting migrations (service moved to different host)
|
||||
- Identifying orphans (services removed from config)
|
||||
- `cf ps` status display
|
||||
|
||||
### Operations (`src/compose_farm/operations.py`)
|
||||
|
||||
Business logic for service operations:
|
||||
|
||||
- **up** - Start service, handle migration if needed
|
||||
- **down** - Stop service
|
||||
- **preflight checks** - Verify mounts, networks exist before operations
|
||||
- **discover** - Find running services on hosts
|
||||
- **migrate** - Down on old host, up on new host
|
||||
|
||||
### Executor (`src/compose_farm/executor.py`)
|
||||
|
||||
SSH and local command execution:
|
||||
|
||||
- **Hybrid SSH approach**: asyncssh for parallel streaming, native `ssh -t` for raw mode
|
||||
- **Parallel by default**: Multiple services via `asyncio.gather`
|
||||
- **Streaming output**: Real-time stdout/stderr with `[service]` prefix
|
||||
- **Local detection**: Skips SSH when target matches local machine IP
|
||||
|
||||
### CLI (`src/compose_farm/cli/`)
|
||||
|
||||
Typer-based CLI with subcommand modules:
|
||||
|
||||
```
|
||||
cli/
|
||||
├── app.py # Shared Typer app, version callback
|
||||
├── common.py # Shared helpers, options, progress utilities
|
||||
├── config.py # config subcommand (init, show, path, validate, edit, symlink)
|
||||
├── lifecycle.py # up, down, pull, restart, update, apply
|
||||
├── management.py # refresh, check, init-network, traefik-file
|
||||
├── monitoring.py # logs, ps, stats
|
||||
├── ssh.py # SSH key management (setup, status, keygen)
|
||||
└── web.py # Web UI server command
|
||||
```
|
||||
|
||||
## Command Flow
|
||||
|
||||
### cf up plex
|
||||
|
||||
```
|
||||
1. Load configuration
|
||||
└─► Parse compose-farm.yaml
|
||||
└─► Validate service exists
|
||||
|
||||
2. Check state
|
||||
└─► Load state.yaml
|
||||
└─► Is plex already running?
|
||||
└─► Is it on a different host? (migration needed)
|
||||
|
||||
3. Pre-flight checks
|
||||
└─► SSH to target host
|
||||
└─► Check compose file exists
|
||||
└─► Check required mounts exist
|
||||
└─► Check required networks exist
|
||||
|
||||
4. Execute migration (if needed)
|
||||
└─► SSH to old host
|
||||
└─► Run: docker compose down
|
||||
|
||||
5. Start service
|
||||
└─► SSH to target host
|
||||
└─► cd /opt/compose/plex
|
||||
└─► Run: docker compose up -d
|
||||
|
||||
6. Update state
|
||||
└─► Write new state to state.yaml
|
||||
|
||||
7. Generate Traefik config (if configured)
|
||||
└─► Regenerate traefik file-provider
|
||||
```
|
||||
|
||||
### cf apply
|
||||
|
||||
```
|
||||
1. Load configuration and state
|
||||
|
||||
2. Compute diff
|
||||
├─► Orphans: in state, not in config
|
||||
├─► Migrations: in both, different host
|
||||
└─► Missing: in config, not in state
|
||||
|
||||
3. Stop orphans
|
||||
└─► For each orphan: cf down
|
||||
|
||||
4. Migrate services
|
||||
└─► For each migration: down old, up new
|
||||
|
||||
5. Start missing
|
||||
└─► For each missing: cf up
|
||||
|
||||
6. Update state
|
||||
```
|
||||
|
||||
## SSH Execution
|
||||
|
||||
### Parallel Streaming (asyncssh)
|
||||
|
||||
For most operations, Compose Farm uses asyncssh:
|
||||
|
||||
```python
|
||||
async def run_command(host, command):
|
||||
async with asyncssh.connect(host) as conn:
|
||||
result = await conn.run(command)
|
||||
return result.stdout, result.stderr
|
||||
```
|
||||
|
||||
Multiple services run concurrently via `asyncio.gather`.
|
||||
|
||||
### Raw Mode (native ssh)
|
||||
|
||||
For commands needing PTY (progress bars, interactive):
|
||||
|
||||
```bash
|
||||
ssh -t user@host "docker compose pull"
|
||||
```
|
||||
|
||||
### Local Detection
|
||||
|
||||
When target host IP matches local machine:
|
||||
|
||||
```python
|
||||
if is_local(host_address):
|
||||
# Run locally, no SSH
|
||||
subprocess.run(command)
|
||||
else:
|
||||
# SSH to remote
|
||||
ssh.run(command)
|
||||
```
|
||||
|
||||
## State Management
|
||||
|
||||
### State File
|
||||
|
||||
Location: `compose-farm-state.yaml` (stored alongside the config file)
|
||||
|
||||
```yaml
|
||||
deployed:
|
||||
plex: nuc
|
||||
sonarr: nuc
|
||||
```
|
||||
|
||||
Image digests are stored separately in `dockerfarm-log.toml` (also in the config directory).
|
||||
|
||||
### State Transitions
|
||||
|
||||
```
|
||||
Config Change State Change Action
|
||||
─────────────────────────────────────────────────────
|
||||
Add service Missing cf up
|
||||
Remove service Orphaned cf down
|
||||
Change host Migration down old, up new
|
||||
No change No change none (or refresh)
|
||||
```
|
||||
|
||||
### cf refresh
|
||||
|
||||
Syncs state with reality by querying Docker on each host:
|
||||
|
||||
```bash
|
||||
docker ps --format '{{.Names}}'
|
||||
```
|
||||
|
||||
Updates state.yaml to match what's actually running.
|
||||
|
||||
## Compose File Discovery
|
||||
|
||||
For each service, Compose Farm looks for compose files in:
|
||||
|
||||
```
|
||||
{compose_dir}/{service}/
|
||||
├── compose.yaml # preferred
|
||||
├── compose.yml
|
||||
├── docker-compose.yml
|
||||
└── docker-compose.yaml
|
||||
```
|
||||
|
||||
First match wins.
|
||||
|
||||
## Traefik Integration
|
||||
|
||||
### Label Extraction
|
||||
|
||||
Compose Farm parses Traefik labels from compose files:
|
||||
|
||||
```yaml
|
||||
services:
|
||||
plex:
|
||||
labels:
|
||||
- traefik.enable=true
|
||||
- traefik.http.routers.plex.rule=Host(`plex.example.com`)
|
||||
- traefik.http.services.plex.loadbalancer.server.port=32400
|
||||
```
|
||||
|
||||
### File Provider Generation
|
||||
|
||||
Converts labels to Traefik file-provider YAML:
|
||||
|
||||
```yaml
|
||||
http:
|
||||
routers:
|
||||
plex:
|
||||
rule: Host(`plex.example.com`)
|
||||
service: plex
|
||||
services:
|
||||
plex:
|
||||
loadBalancer:
|
||||
servers:
|
||||
- url: http://192.168.1.10:32400
|
||||
```
|
||||
|
||||
### Variable Resolution
|
||||
|
||||
Supports `${VAR}` and `${VAR:-default}` from:
|
||||
1. Service's `.env` file
|
||||
2. Current environment
|
||||
|
||||
## Error Handling
|
||||
|
||||
### Pre-flight Failures
|
||||
|
||||
Before any operation, Compose Farm checks:
|
||||
- SSH connectivity
|
||||
- Compose file existence
|
||||
- Required mounts
|
||||
- Required networks
|
||||
|
||||
If checks fail, operation aborts with clear error.
|
||||
|
||||
### Partial Failures
|
||||
|
||||
When operating on multiple services:
|
||||
- Each service is independent
|
||||
- Failures are logged, but other services continue
|
||||
- Exit code reflects overall success/failure
|
||||
|
||||
## Performance Considerations
|
||||
|
||||
### Parallel Execution
|
||||
|
||||
Services are started/stopped in parallel:
|
||||
|
||||
```python
|
||||
await asyncio.gather(*[
|
||||
up_service(service) for service in services
|
||||
])
|
||||
```
|
||||
|
||||
### SSH Multiplexing
|
||||
|
||||
For repeated connections to the same host, SSH reuses connections.
|
||||
|
||||
### Caching
|
||||
|
||||
- Config is parsed once per command
|
||||
- State is loaded once, written once
|
||||
- Host discovery results are cached during command
|
||||
|
||||
## Web UI Architecture
|
||||
|
||||
```
|
||||
┌─────────────────────────────────────────────────────────────┐
|
||||
│ Web UI │
|
||||
│ │
|
||||
│ ┌─────────────┐ ┌─────────────┐ ┌─────────────────────┐ │
|
||||
│ │ FastAPI │ │ Jinja │ │ HTMX │ │
|
||||
│ │ Backend │ │ Templates │ │ Dynamic Updates │ │
|
||||
│ └─────────────┘ └─────────────┘ └─────────────────────┘ │
|
||||
│ │
|
||||
│ Pattern: Custom events, not hx-swap-oob │
|
||||
│ Elements trigger on: cf:refresh from:body │
|
||||
└─────────────────────────────────────────────────────────────┘
|
||||
```
|
||||
|
||||
Icons use [Lucide](https://lucide.dev/). Add new icons as macros in `web/templates/partials/icons.html`.
|
||||
3
docs/assets/apply.gif
Normal file
3
docs/assets/apply.gif
Normal file
@@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:bb1372a59a4ed1ac74d3864d7a84dd5311fce4cb6c6a00bf3a574bc2f98d5595
|
||||
size 895927
|
||||
3
docs/assets/apply.webm
Normal file
3
docs/assets/apply.webm
Normal file
@@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:f339a85f3d930db5a020c9f77e106edc5f44ea7dee6f68557106721493c24ef8
|
||||
size 205907
|
||||
3
docs/assets/install.gif
Normal file
3
docs/assets/install.gif
Normal file
@@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:388aa49a1269145698f9763452aaf6b9c6232ea9229abe1dae304df558e29695
|
||||
size 403442
|
||||
3
docs/assets/install.webm
Normal file
3
docs/assets/install.webm
Normal file
@@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:9b8bf4dcb8ee67270d4a88124b4dd4abe0dab518e73812ee73f7c66d77f146e2
|
||||
size 228025
|
||||
3
docs/assets/logs.gif
Normal file
3
docs/assets/logs.gif
Normal file
@@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:16b9a28137dfae25488e2094de85766a039457f5dca20c2d84ac72e3967c10b9
|
||||
size 164237
|
||||
3
docs/assets/logs.webm
Normal file
3
docs/assets/logs.webm
Normal file
@@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:e0fbe697a1f8256ce3b9a6a64c7019d42769134df9b5b964e5abe98a29e918fd
|
||||
size 68242
|
||||
3
docs/assets/migration.gif
Normal file
3
docs/assets/migration.gif
Normal file
@@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:629b8c80b98eb996b75439745676fd99a83f391ca25f778a71bd59173f814c2f
|
||||
size 1194931
|
||||
3
docs/assets/migration.webm
Normal file
3
docs/assets/migration.webm
Normal file
@@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:33fd46f2d8538cc43be4cb553b3af9d8b412f282ee354b6373e2793fe41c799b
|
||||
size 405057
|
||||
3
docs/assets/quickstart.gif
Normal file
3
docs/assets/quickstart.gif
Normal file
@@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:ccd96e33faba5f297999917d89834b29d58bd2a8929eea8d62875e3d8830bd5c
|
||||
size 3198466
|
||||
3
docs/assets/quickstart.webm
Normal file
3
docs/assets/quickstart.webm
Normal file
@@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:979a1a21303bbf284b3510981066ef05c41c1035b34392fecc7bee472116e6db
|
||||
size 967564
|
||||
3
docs/assets/update.gif
Normal file
3
docs/assets/update.gif
Normal file
@@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:2067f4967a93b7ee3a8db7750c435f41b1fccd2919f3443da4b848c20cc54f23
|
||||
size 124559
|
||||
3
docs/assets/update.webm
Normal file
3
docs/assets/update.webm
Normal file
@@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:5471bd94e6d1b9d415547fa44de6021fdad2e1cc5b8b295680e217104aa749d6
|
||||
size 98149
|
||||
381
docs/best-practices.md
Normal file
381
docs/best-practices.md
Normal file
@@ -0,0 +1,381 @@
|
||||
---
|
||||
icon: lucide/lightbulb
|
||||
---
|
||||
|
||||
# Best Practices
|
||||
|
||||
Tips, limitations, and recommendations for using Compose Farm effectively.
|
||||
|
||||
## Limitations
|
||||
|
||||
### No Cross-Host Networking
|
||||
|
||||
Compose Farm moves containers between hosts but **does not provide cross-host networking**. Docker's internal DNS and networks don't span hosts.
|
||||
|
||||
**What breaks when you move a service:**
|
||||
|
||||
| Feature | Works? | Why |
|
||||
|---------|--------|-----|
|
||||
| `http://redis:6379` | No | Docker DNS doesn't cross hosts |
|
||||
| Docker network names | No | Networks are per-host |
|
||||
| `DATABASE_URL=postgres://db:5432` | No | Container name won't resolve |
|
||||
| Host IP addresses | Yes | Use `192.168.1.10:5432` |
|
||||
|
||||
### What Compose Farm Doesn't Do
|
||||
|
||||
- No overlay networking (use Swarm/Kubernetes)
|
||||
- No service discovery across hosts
|
||||
- No automatic dependency tracking between compose files
|
||||
- No health checks or restart policies beyond Docker's
|
||||
- No secrets management beyond Docker's
|
||||
|
||||
## Service Organization
|
||||
|
||||
### Keep Dependencies Together
|
||||
|
||||
If services talk to each other, keep them in the same compose file on the same host:
|
||||
|
||||
```yaml
|
||||
# /opt/compose/myapp/docker-compose.yml
|
||||
services:
|
||||
app:
|
||||
image: myapp
|
||||
depends_on:
|
||||
- db
|
||||
- redis
|
||||
|
||||
db:
|
||||
image: postgres
|
||||
|
||||
redis:
|
||||
image: redis
|
||||
```
|
||||
|
||||
```yaml
|
||||
# compose-farm.yaml
|
||||
services:
|
||||
myapp: nuc # All three containers stay together
|
||||
```
|
||||
|
||||
### Separate Standalone Services
|
||||
|
||||
Services that don't talk to other containers can be anywhere:
|
||||
|
||||
```yaml
|
||||
services:
|
||||
# These can run on any host
|
||||
plex: nuc
|
||||
jellyfin: hp
|
||||
homeassistant: nas
|
||||
|
||||
# These should stay together
|
||||
myapp: nuc # includes app + db + redis
|
||||
```
|
||||
|
||||
### Cross-Host Communication
|
||||
|
||||
If services MUST communicate across hosts, publish ports:
|
||||
|
||||
```yaml
|
||||
# Instead of
|
||||
DATABASE_URL=postgres://db:5432
|
||||
|
||||
# Use
|
||||
DATABASE_URL=postgres://192.168.1.10:5432
|
||||
```
|
||||
|
||||
```yaml
|
||||
# And publish the port
|
||||
services:
|
||||
db:
|
||||
ports:
|
||||
- "5432:5432"
|
||||
```
|
||||
|
||||
## Multi-Host Services
|
||||
|
||||
### When to Use `all`
|
||||
|
||||
Use `all` for services that need local access to each host:
|
||||
|
||||
```yaml
|
||||
services:
|
||||
# Need Docker socket
|
||||
dozzle: all # Log viewer
|
||||
portainer-agent: all # Portainer agents
|
||||
autokuma: all # Auto-creates monitors
|
||||
|
||||
# Need host metrics
|
||||
node-exporter: all # Prometheus metrics
|
||||
promtail: all # Log shipping
|
||||
```
|
||||
|
||||
### Host-Specific Lists
|
||||
|
||||
For services on specific hosts only:
|
||||
|
||||
```yaml
|
||||
services:
|
||||
# Only on compute nodes
|
||||
gitlab-runner: [nuc, hp]
|
||||
|
||||
# Only on storage nodes
|
||||
minio: [nas-1, nas-2]
|
||||
```
|
||||
|
||||
## Migration Safety
|
||||
|
||||
### Pre-flight Checks
|
||||
|
||||
Before migrating, Compose Farm verifies:
|
||||
- Compose file is accessible on new host
|
||||
- Required mounts exist on new host
|
||||
- Required networks exist on new host
|
||||
|
||||
### Data Considerations
|
||||
|
||||
**Compose Farm doesn't move data.** Ensure:
|
||||
|
||||
1. **Shared storage**: Data volumes on NFS/shared storage
|
||||
2. **External databases**: Data in external DB, not container
|
||||
3. **Backup first**: Always backup before migration
|
||||
|
||||
### Safe Migration Pattern
|
||||
|
||||
```bash
|
||||
# 1. Preview changes
|
||||
cf apply --dry-run
|
||||
|
||||
# 2. Verify target host can run the service
|
||||
cf check myservice
|
||||
|
||||
# 3. Apply changes
|
||||
cf apply
|
||||
```
|
||||
|
||||
## State Management
|
||||
|
||||
### When to Refresh
|
||||
|
||||
Run `cf refresh` after:
|
||||
- Manual `docker compose` commands
|
||||
- Container restarts
|
||||
- Host reboots
|
||||
- Any changes outside Compose Farm
|
||||
|
||||
```bash
|
||||
cf refresh --dry-run # Preview
|
||||
cf refresh # Sync
|
||||
```
|
||||
|
||||
### State Conflicts
|
||||
|
||||
If state doesn't match reality:
|
||||
|
||||
```bash
|
||||
# See what's actually running
|
||||
cf refresh --dry-run
|
||||
|
||||
# Sync state
|
||||
cf refresh
|
||||
|
||||
# Then apply config
|
||||
cf apply
|
||||
```
|
||||
|
||||
## Shared Storage
|
||||
|
||||
### NFS Best Practices
|
||||
|
||||
```bash
|
||||
# Mount options for Docker compatibility
|
||||
nas:/compose /opt/compose nfs rw,hard,intr,rsize=8192,wsize=8192 0 0
|
||||
```
|
||||
|
||||
### Directory Ownership
|
||||
|
||||
Ensure consistent UID/GID across hosts:
|
||||
|
||||
```yaml
|
||||
services:
|
||||
myapp:
|
||||
environment:
|
||||
- PUID=1000
|
||||
- PGID=1000
|
||||
```
|
||||
|
||||
### Config vs Data
|
||||
|
||||
Keep config and data separate:
|
||||
|
||||
```
|
||||
/opt/compose/ # Shared: compose files + config
|
||||
├── plex/
|
||||
│ ├── docker-compose.yml
|
||||
│ └── config/ # Small config files OK
|
||||
|
||||
/mnt/data/ # Shared: large media files
|
||||
├── movies/
|
||||
├── tv/
|
||||
└── music/
|
||||
|
||||
/opt/appdata/ # Local: per-host app data
|
||||
├── plex/
|
||||
└── sonarr/
|
||||
```
|
||||
|
||||
## Performance
|
||||
|
||||
### Parallel Operations
|
||||
|
||||
Compose Farm runs operations in parallel. For large deployments:
|
||||
|
||||
```bash
|
||||
# Good: parallel by default
|
||||
cf up --all
|
||||
|
||||
# Avoid: sequential updates when possible
|
||||
for svc in plex sonarr radarr; do
|
||||
cf update $svc
|
||||
done
|
||||
```
|
||||
|
||||
### SSH Connection Reuse
|
||||
|
||||
SSH connections are reused within a command. For many operations:
|
||||
|
||||
```bash
|
||||
# One command, one connection per host
|
||||
cf update --all
|
||||
|
||||
# Multiple commands, multiple connections (slower)
|
||||
cf update plex && cf update sonarr && cf update radarr
|
||||
```
|
||||
|
||||
## Traefik Setup
|
||||
|
||||
### Service Placement
|
||||
|
||||
Put Traefik on a reliable host:
|
||||
|
||||
```yaml
|
||||
services:
|
||||
traefik: nuc # Primary host with good uptime
|
||||
```
|
||||
|
||||
### Same-Host Services
|
||||
|
||||
Services on the same host as Traefik use Docker provider:
|
||||
|
||||
```yaml
|
||||
traefik_service: traefik
|
||||
|
||||
services:
|
||||
traefik: nuc
|
||||
portainer: nuc # Docker provider handles this
|
||||
plex: hp # File provider handles this
|
||||
```
|
||||
|
||||
### Middleware in Separate File
|
||||
|
||||
Define middlewares outside Compose Farm's generated file:
|
||||
|
||||
```yaml
|
||||
# /opt/traefik/dynamic.d/middlewares.yml
|
||||
http:
|
||||
middlewares:
|
||||
redirect-https:
|
||||
redirectScheme:
|
||||
scheme: https
|
||||
```
|
||||
|
||||
## Backup Strategy
|
||||
|
||||
### What to Backup
|
||||
|
||||
| Item | Location | Method |
|
||||
|------|----------|--------|
|
||||
| Compose Farm config | `~/.config/compose-farm/` | Git or copy |
|
||||
| Compose files | `/opt/compose/` | Git |
|
||||
| State file | `~/.config/compose-farm/state.yaml` | Optional (can refresh) |
|
||||
| App data | `/opt/appdata/` | Backup solution |
|
||||
|
||||
### Disaster Recovery
|
||||
|
||||
```bash
|
||||
# Restore config
|
||||
cp backup/compose-farm.yaml ~/.config/compose-farm/
|
||||
|
||||
# Refresh state from running containers
|
||||
cf refresh
|
||||
|
||||
# Or start fresh
|
||||
cf apply
|
||||
```
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Common Issues
|
||||
|
||||
**Service won't start:**
|
||||
```bash
|
||||
cf check myservice # Verify mounts/networks
|
||||
cf logs myservice # Check container logs
|
||||
```
|
||||
|
||||
**Migration fails:**
|
||||
```bash
|
||||
cf check myservice # Verify new host is ready
|
||||
cf init-network newhost # Create network if missing
|
||||
```
|
||||
|
||||
**State out of sync:**
|
||||
```bash
|
||||
cf refresh --dry-run # See differences
|
||||
cf refresh # Sync state
|
||||
```
|
||||
|
||||
**SSH issues:**
|
||||
```bash
|
||||
cf ssh status # Check key status
|
||||
cf ssh setup # Re-setup keys
|
||||
```
|
||||
|
||||
### Debug Mode
|
||||
|
||||
For more verbose output:
|
||||
|
||||
```bash
|
||||
# See exact commands being run
|
||||
cf --verbose up myservice
|
||||
```
|
||||
|
||||
## Security Considerations
|
||||
|
||||
### SSH Keys
|
||||
|
||||
- Use dedicated SSH key for Compose Farm
|
||||
- Limit key to specific hosts if possible
|
||||
- Don't store keys in Docker images
|
||||
|
||||
### Network Exposure
|
||||
|
||||
- Published ports are accessible from network
|
||||
- Use firewalls for sensitive services
|
||||
- Consider VPN for cross-host communication
|
||||
|
||||
### Secrets
|
||||
|
||||
- Don't commit `.env` files with secrets
|
||||
- Use Docker secrets or external secret management
|
||||
- Avoid secrets in compose file labels
|
||||
|
||||
## Comparison: When to Use Alternatives
|
||||
|
||||
| Scenario | Solution |
|
||||
|----------|----------|
|
||||
| 2-10 hosts, static services | **Compose Farm** |
|
||||
| Cross-host container networking | Docker Swarm |
|
||||
| Auto-scaling, self-healing | Kubernetes |
|
||||
| Infrastructure as code | Ansible + Compose Farm |
|
||||
| High availability requirements | Kubernetes or Swarm |
|
||||
650
docs/commands.md
Normal file
650
docs/commands.md
Normal file
@@ -0,0 +1,650 @@
|
||||
---
|
||||
icon: lucide/terminal
|
||||
---
|
||||
|
||||
# Commands Reference
|
||||
|
||||
The Compose Farm CLI is available as both `compose-farm` and the shorter alias `cf`.
|
||||
|
||||
## Command Overview
|
||||
|
||||
| Category | Command | Description |
|
||||
|----------|---------|-------------|
|
||||
| **Lifecycle** | `apply` | Make reality match config |
|
||||
| | `up` | Start services |
|
||||
| | `down` | Stop services |
|
||||
| | `restart` | Restart services (down + up) |
|
||||
| | `update` | Update services (pull + down + up) |
|
||||
| | `pull` | Pull latest images |
|
||||
| **Monitoring** | `ps` | Show service status |
|
||||
| | `logs` | Show service logs |
|
||||
| | `stats` | Show overview statistics |
|
||||
| **Configuration** | `check` | Validate config and mounts |
|
||||
| | `refresh` | Sync state from reality |
|
||||
| | `init-network` | Create Docker network |
|
||||
| | `traefik-file` | Generate Traefik config |
|
||||
| | `config` | Manage config files |
|
||||
| | `ssh` | Manage SSH keys |
|
||||
| **Server** | `web` | Start web UI |
|
||||
|
||||
## Global Options
|
||||
|
||||
```bash
|
||||
cf --version, -v # Show version
|
||||
cf --help, -h # Show help
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Lifecycle Commands
|
||||
|
||||
### cf apply
|
||||
|
||||
Make reality match your configuration. The primary reconciliation command.
|
||||
|
||||
<video autoplay loop muted playsinline>
|
||||
<source src="assets/apply.webm" type="video/webm">
|
||||
</video>
|
||||
|
||||
```bash
|
||||
cf apply [OPTIONS]
|
||||
```
|
||||
|
||||
**Options:**
|
||||
|
||||
| Option | Description |
|
||||
|--------|-------------|
|
||||
| `--dry-run, -n` | Preview changes without executing |
|
||||
| `--no-orphans` | Skip stopping orphaned services |
|
||||
| `--full, -f` | Also refresh running services |
|
||||
| `--config, -c PATH` | Path to config file |
|
||||
|
||||
**What it does:**
|
||||
|
||||
1. Stops orphaned services (in state but removed from config)
|
||||
2. Migrates services on wrong host
|
||||
3. Starts missing services (in config but not running)
|
||||
|
||||
**Examples:**
|
||||
|
||||
```bash
|
||||
# Preview what would change
|
||||
cf apply --dry-run
|
||||
|
||||
# Apply all changes
|
||||
cf apply
|
||||
|
||||
# Only start/migrate, don't stop orphans
|
||||
cf apply --no-orphans
|
||||
|
||||
# Also refresh all running services
|
||||
cf apply --full
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### cf up
|
||||
|
||||
Start services. Auto-migrates if host assignment changed.
|
||||
|
||||
```bash
|
||||
cf up [OPTIONS] [SERVICES]...
|
||||
```
|
||||
|
||||
**Options:**
|
||||
|
||||
| Option | Description |
|
||||
|--------|-------------|
|
||||
| `--all, -a` | Start all services |
|
||||
| `--host, -H TEXT` | Filter to services on this host |
|
||||
| `--config, -c PATH` | Path to config file |
|
||||
|
||||
**Examples:**
|
||||
|
||||
```bash
|
||||
# Start specific services
|
||||
cf up plex sonarr
|
||||
|
||||
# Start all services
|
||||
cf up --all
|
||||
|
||||
# Start all services on a specific host
|
||||
cf up --all --host nuc
|
||||
```
|
||||
|
||||
**Auto-migration:**
|
||||
|
||||
If you change a service's host in config and run `cf up`:
|
||||
|
||||
1. Verifies mounts/networks exist on new host
|
||||
2. Runs `down` on old host
|
||||
3. Runs `up -d` on new host
|
||||
4. Updates state
|
||||
|
||||
---
|
||||
|
||||
### cf down
|
||||
|
||||
Stop services.
|
||||
|
||||
```bash
|
||||
cf down [OPTIONS] [SERVICES]...
|
||||
```
|
||||
|
||||
**Options:**
|
||||
|
||||
| Option | Description |
|
||||
|--------|-------------|
|
||||
| `--all, -a` | Stop all services |
|
||||
| `--orphaned` | Stop orphaned services only |
|
||||
| `--host, -H TEXT` | Filter to services on this host |
|
||||
| `--config, -c PATH` | Path to config file |
|
||||
|
||||
**Examples:**
|
||||
|
||||
```bash
|
||||
# Stop specific services
|
||||
cf down plex
|
||||
|
||||
# Stop all services
|
||||
cf down --all
|
||||
|
||||
# Stop services removed from config
|
||||
cf down --orphaned
|
||||
|
||||
# Stop all services on a host
|
||||
cf down --all --host nuc
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### cf restart
|
||||
|
||||
Restart services (down + up).
|
||||
|
||||
```bash
|
||||
cf restart [OPTIONS] [SERVICES]...
|
||||
```
|
||||
|
||||
**Options:**
|
||||
|
||||
| Option | Description |
|
||||
|--------|-------------|
|
||||
| `--all, -a` | Restart all services |
|
||||
| `--config, -c PATH` | Path to config file |
|
||||
|
||||
**Examples:**
|
||||
|
||||
```bash
|
||||
cf restart plex
|
||||
cf restart --all
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### cf update
|
||||
|
||||
Update services (pull + build + down + up).
|
||||
|
||||
<video autoplay loop muted playsinline>
|
||||
<source src="assets/update.webm" type="video/webm">
|
||||
</video>
|
||||
|
||||
```bash
|
||||
cf update [OPTIONS] [SERVICES]...
|
||||
```
|
||||
|
||||
**Options:**
|
||||
|
||||
| Option | Description |
|
||||
|--------|-------------|
|
||||
| `--all, -a` | Update all services |
|
||||
| `--config, -c PATH` | Path to config file |
|
||||
|
||||
**Examples:**
|
||||
|
||||
```bash
|
||||
# Update specific service
|
||||
cf update plex
|
||||
|
||||
# Update all services
|
||||
cf update --all
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### cf pull
|
||||
|
||||
Pull latest images.
|
||||
|
||||
```bash
|
||||
cf pull [OPTIONS] [SERVICES]...
|
||||
```
|
||||
|
||||
**Options:**
|
||||
|
||||
| Option | Description |
|
||||
|--------|-------------|
|
||||
| `--all, -a` | Pull for all services |
|
||||
| `--config, -c PATH` | Path to config file |
|
||||
|
||||
**Examples:**
|
||||
|
||||
```bash
|
||||
cf pull plex
|
||||
cf pull --all
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Monitoring Commands
|
||||
|
||||
### cf ps
|
||||
|
||||
Show status of services.
|
||||
|
||||
```bash
|
||||
cf ps [OPTIONS] [SERVICES]...
|
||||
```
|
||||
|
||||
**Options:**
|
||||
|
||||
| Option | Description |
|
||||
|--------|-------------|
|
||||
| `--all, -a` | Show all services (default) |
|
||||
| `--host, -H TEXT` | Filter to services on this host |
|
||||
| `--config, -c PATH` | Path to config file |
|
||||
|
||||
**Examples:**
|
||||
|
||||
```bash
|
||||
# Show all services
|
||||
cf ps
|
||||
|
||||
# Show specific services
|
||||
cf ps plex sonarr
|
||||
|
||||
# Filter by host
|
||||
cf ps --host nuc
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### cf logs
|
||||
|
||||
Show service logs.
|
||||
|
||||
<video autoplay loop muted playsinline>
|
||||
<source src="assets/logs.webm" type="video/webm">
|
||||
</video>
|
||||
|
||||
```bash
|
||||
cf logs [OPTIONS] [SERVICES]...
|
||||
```
|
||||
|
||||
**Options:**
|
||||
|
||||
| Option | Description |
|
||||
|--------|-------------|
|
||||
| `--all, -a` | Show logs for all services |
|
||||
| `--host, -H TEXT` | Filter to services on this host |
|
||||
| `--follow, -f` | Follow logs (live stream) |
|
||||
| `--tail, -n INTEGER` | Number of lines (default: 20 for --all, 100 otherwise) |
|
||||
| `--config, -c PATH` | Path to config file |
|
||||
|
||||
**Examples:**
|
||||
|
||||
```bash
|
||||
# Show last 100 lines
|
||||
cf logs plex
|
||||
|
||||
# Follow logs
|
||||
cf logs -f plex
|
||||
|
||||
# Show last 50 lines of multiple services
|
||||
cf logs -n 50 plex sonarr
|
||||
|
||||
# Show last 20 lines of all services
|
||||
cf logs --all
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### cf stats
|
||||
|
||||
Show overview statistics.
|
||||
|
||||
```bash
|
||||
cf stats [OPTIONS]
|
||||
```
|
||||
|
||||
**Options:**
|
||||
|
||||
| Option | Description |
|
||||
|--------|-------------|
|
||||
| `--live, -l` | Query Docker for live container counts |
|
||||
| `--config, -c PATH` | Path to config file |
|
||||
|
||||
**Examples:**
|
||||
|
||||
```bash
|
||||
# Config/state overview
|
||||
cf stats
|
||||
|
||||
# Include live container counts
|
||||
cf stats --live
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Configuration Commands
|
||||
|
||||
### cf check
|
||||
|
||||
Validate configuration, mounts, and networks.
|
||||
|
||||
```bash
|
||||
cf check [OPTIONS] [SERVICES]...
|
||||
```
|
||||
|
||||
**Options:**
|
||||
|
||||
| Option | Description |
|
||||
|--------|-------------|
|
||||
| `--local` | Skip SSH-based checks (faster) |
|
||||
| `--config, -c PATH` | Path to config file |
|
||||
|
||||
**Examples:**
|
||||
|
||||
```bash
|
||||
# Full validation with SSH
|
||||
cf check
|
||||
|
||||
# Fast local-only validation
|
||||
cf check --local
|
||||
|
||||
# Check specific service and show host compatibility
|
||||
cf check jellyfin
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### cf refresh
|
||||
|
||||
Update local state from running services.
|
||||
|
||||
```bash
|
||||
cf refresh [OPTIONS]
|
||||
```
|
||||
|
||||
**Options:**
|
||||
|
||||
| Option | Description |
|
||||
|--------|-------------|
|
||||
| `--dry-run, -n` | Show what would change |
|
||||
| `--log-path, -l PATH` | Path to Dockerfarm TOML log |
|
||||
| `--config, -c PATH` | Path to config file |
|
||||
|
||||
**Examples:**
|
||||
|
||||
```bash
|
||||
# Sync state with reality
|
||||
cf refresh
|
||||
|
||||
# Preview changes
|
||||
cf refresh --dry-run
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### cf init-network
|
||||
|
||||
Create Docker network on hosts with consistent settings.
|
||||
|
||||
```bash
|
||||
cf init-network [OPTIONS] [HOSTS]...
|
||||
```
|
||||
|
||||
**Options:**
|
||||
|
||||
| Option | Description |
|
||||
|--------|-------------|
|
||||
| `--network, -n TEXT` | Network name (default: mynetwork) |
|
||||
| `--subnet, -s TEXT` | Network subnet (default: 172.20.0.0/16) |
|
||||
| `--gateway, -g TEXT` | Network gateway (default: 172.20.0.1) |
|
||||
| `--config, -c PATH` | Path to config file |
|
||||
|
||||
**Examples:**
|
||||
|
||||
```bash
|
||||
# Create on all hosts
|
||||
cf init-network
|
||||
|
||||
# Create on specific hosts
|
||||
cf init-network nuc hp
|
||||
|
||||
# Custom network settings
|
||||
cf init-network -n production -s 10.0.0.0/16 -g 10.0.0.1
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### cf traefik-file
|
||||
|
||||
Generate Traefik file-provider config from compose labels.
|
||||
|
||||
```bash
|
||||
cf traefik-file [OPTIONS] [SERVICES]...
|
||||
```
|
||||
|
||||
**Options:**
|
||||
|
||||
| Option | Description |
|
||||
|--------|-------------|
|
||||
| `--all, -a` | Generate for all services |
|
||||
| `--output, -o PATH` | Output file (stdout if omitted) |
|
||||
| `--config, -c PATH` | Path to config file |
|
||||
|
||||
**Examples:**
|
||||
|
||||
```bash
|
||||
# Preview to stdout
|
||||
cf traefik-file --all
|
||||
|
||||
# Write to file
|
||||
cf traefik-file --all -o /opt/traefik/dynamic.d/cf.yml
|
||||
|
||||
# Specific services
|
||||
cf traefik-file plex jellyfin -o /opt/traefik/cf.yml
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### cf config
|
||||
|
||||
Manage configuration files.
|
||||
|
||||
```bash
|
||||
cf config COMMAND
|
||||
```
|
||||
|
||||
**Subcommands:**
|
||||
|
||||
| Command | Description |
|
||||
|---------|-------------|
|
||||
| `init` | Create new config with examples |
|
||||
| `show` | Display config with highlighting |
|
||||
| `path` | Print config file path |
|
||||
| `validate` | Validate syntax and schema |
|
||||
| `edit` | Open in $EDITOR |
|
||||
| `symlink` | Create symlink from default location |
|
||||
|
||||
**Options by subcommand:**
|
||||
|
||||
| Subcommand | Options |
|
||||
|------------|---------|
|
||||
| `init` | `--path/-p PATH`, `--force/-f` |
|
||||
| `show` | `--path/-p PATH`, `--raw/-r` |
|
||||
| `edit` | `--path/-p PATH` |
|
||||
| `path` | `--path/-p PATH` |
|
||||
| `validate` | `--path/-p PATH` |
|
||||
| `symlink` | `--force/-f` |
|
||||
|
||||
**Examples:**
|
||||
|
||||
```bash
|
||||
# Create config at default location
|
||||
cf config init
|
||||
|
||||
# Create config at custom path
|
||||
cf config init --path /opt/compose-farm/config.yaml
|
||||
|
||||
# Show config with syntax highlighting
|
||||
cf config show
|
||||
|
||||
# Show raw config (for copy-paste)
|
||||
cf config show --raw
|
||||
|
||||
# Validate config
|
||||
cf config validate
|
||||
|
||||
# Edit config in $EDITOR
|
||||
cf config edit
|
||||
|
||||
# Print config path
|
||||
cf config path
|
||||
|
||||
# Create symlink to local config
|
||||
cf config symlink
|
||||
|
||||
# Create symlink to specific file
|
||||
cf config symlink /opt/compose-farm/config.yaml
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### cf ssh
|
||||
|
||||
Manage SSH keys for passwordless authentication.
|
||||
|
||||
```bash
|
||||
cf ssh COMMAND
|
||||
```
|
||||
|
||||
**Subcommands:**
|
||||
|
||||
| Command | Description |
|
||||
|---------|-------------|
|
||||
| `setup` | Generate key and copy to all hosts |
|
||||
| `status` | Show SSH key status and host connectivity |
|
||||
| `keygen` | Generate key without distributing |
|
||||
|
||||
**Options for `cf ssh setup` and `cf ssh keygen`:**
|
||||
|
||||
| Option | Description |
|
||||
|--------|-------------|
|
||||
| `--force, -f` | Regenerate key even if it exists |
|
||||
|
||||
**Examples:**
|
||||
|
||||
```bash
|
||||
# Set up SSH keys (generates and distributes)
|
||||
cf ssh setup
|
||||
|
||||
# Check status and connectivity
|
||||
cf ssh status
|
||||
|
||||
# Generate key only (don't distribute)
|
||||
cf ssh keygen
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Server Commands
|
||||
|
||||
### cf web
|
||||
|
||||
Start the web UI server.
|
||||
|
||||
```bash
|
||||
cf web [OPTIONS]
|
||||
```
|
||||
|
||||
**Options:**
|
||||
|
||||
| Option | Description |
|
||||
|--------|-------------|
|
||||
| `--host, -H TEXT` | Host to bind to (default: 0.0.0.0) |
|
||||
| `--port, -p INTEGER` | Port to listen on (default: 8000) |
|
||||
| `--reload, -r` | Enable auto-reload for development |
|
||||
|
||||
**Note:** Requires web dependencies: `pip install compose-farm[web]`
|
||||
|
||||
**Examples:**
|
||||
|
||||
```bash
|
||||
# Start on default port
|
||||
cf web
|
||||
|
||||
# Start on custom port
|
||||
cf web --port 3000
|
||||
|
||||
# Development mode with auto-reload
|
||||
cf web --reload
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Common Patterns
|
||||
|
||||
### Daily Operations
|
||||
|
||||
```bash
|
||||
# Morning: check status
|
||||
cf ps
|
||||
cf stats --live
|
||||
|
||||
# Update a specific service
|
||||
cf update plex
|
||||
|
||||
# View logs
|
||||
cf logs -f plex
|
||||
```
|
||||
|
||||
### Maintenance
|
||||
|
||||
```bash
|
||||
# Update all services
|
||||
cf update --all
|
||||
|
||||
# Refresh state after manual changes
|
||||
cf refresh
|
||||
```
|
||||
|
||||
### Migration
|
||||
|
||||
```bash
|
||||
# Preview what would change
|
||||
cf apply --dry-run
|
||||
|
||||
# Move a service: edit config, then
|
||||
cf up plex # auto-migrates
|
||||
|
||||
# Or reconcile everything
|
||||
cf apply
|
||||
```
|
||||
|
||||
### Troubleshooting
|
||||
|
||||
```bash
|
||||
# Validate config
|
||||
cf check --local
|
||||
cf check
|
||||
|
||||
# Check specific service
|
||||
cf check jellyfin
|
||||
|
||||
# Sync state
|
||||
cf refresh --dry-run
|
||||
cf refresh
|
||||
```
|
||||
402
docs/configuration.md
Normal file
402
docs/configuration.md
Normal file
@@ -0,0 +1,402 @@
|
||||
---
|
||||
icon: lucide/settings
|
||||
---
|
||||
|
||||
# Configuration Reference
|
||||
|
||||
Compose Farm uses a YAML configuration file to define hosts and service assignments.
|
||||
|
||||
## Config File Location
|
||||
|
||||
Compose Farm looks for configuration in this order:
|
||||
|
||||
1. `-c` / `--config` flag (if provided)
|
||||
2. `CF_CONFIG` environment variable
|
||||
3. `./compose-farm.yaml` (current directory)
|
||||
4. `$XDG_CONFIG_HOME/compose-farm/compose-farm.yaml` (defaults to `~/.config`)
|
||||
|
||||
Use `-c` / `--config` to specify a custom path:
|
||||
|
||||
```bash
|
||||
cf ps -c /path/to/config.yaml
|
||||
```
|
||||
|
||||
Or set the environment variable:
|
||||
|
||||
```bash
|
||||
export CF_CONFIG=/path/to/config.yaml
|
||||
```
|
||||
|
||||
## Full Example
|
||||
|
||||
```yaml
|
||||
# Required: directory containing compose files
|
||||
compose_dir: /opt/compose
|
||||
|
||||
# Optional: auto-regenerate Traefik config
|
||||
traefik_file: /opt/traefik/dynamic.d/compose-farm.yml
|
||||
traefik_service: traefik
|
||||
|
||||
# Define Docker hosts
|
||||
hosts:
|
||||
nuc:
|
||||
address: 192.168.1.10
|
||||
user: docker
|
||||
hp:
|
||||
address: 192.168.1.11
|
||||
user: admin
|
||||
local: localhost
|
||||
|
||||
# Map services to hosts
|
||||
services:
|
||||
# Single-host services
|
||||
plex: nuc
|
||||
sonarr: nuc
|
||||
radarr: hp
|
||||
jellyfin: local
|
||||
|
||||
# Multi-host services
|
||||
dozzle: all # Run on ALL hosts
|
||||
node-exporter: [nuc, hp] # Run on specific hosts
|
||||
```
|
||||
|
||||
## Settings Reference
|
||||
|
||||
### compose_dir (required)
|
||||
|
||||
Directory containing your compose service folders. Must be the same path on all hosts.
|
||||
|
||||
```yaml
|
||||
compose_dir: /opt/compose
|
||||
```
|
||||
|
||||
**Directory structure:**
|
||||
|
||||
```
|
||||
/opt/compose/
|
||||
├── plex/
|
||||
│ ├── docker-compose.yml # or compose.yaml
|
||||
│ └── .env # optional environment file
|
||||
├── sonarr/
|
||||
│ └── docker-compose.yml
|
||||
└── ...
|
||||
```
|
||||
|
||||
Supported compose file names (checked in order):
|
||||
- `compose.yaml`
|
||||
- `compose.yml`
|
||||
- `docker-compose.yml`
|
||||
- `docker-compose.yaml`
|
||||
|
||||
### traefik_file
|
||||
|
||||
Path to auto-generated Traefik file-provider config. When set, Compose Farm regenerates this file after `up`, `down`, `restart`, and `update` commands.
|
||||
|
||||
```yaml
|
||||
traefik_file: /opt/traefik/dynamic.d/compose-farm.yml
|
||||
```
|
||||
|
||||
### traefik_service
|
||||
|
||||
Service name running Traefik. Services on the same host are skipped in file-provider config (Traefik's docker provider handles them).
|
||||
|
||||
```yaml
|
||||
traefik_service: traefik
|
||||
```
|
||||
|
||||
## Hosts Configuration
|
||||
|
||||
### Basic Host
|
||||
|
||||
```yaml
|
||||
hosts:
|
||||
myserver:
|
||||
address: 192.168.1.10
|
||||
```
|
||||
|
||||
### With SSH User
|
||||
|
||||
```yaml
|
||||
hosts:
|
||||
myserver:
|
||||
address: 192.168.1.10
|
||||
user: docker
|
||||
```
|
||||
|
||||
If `user` is omitted, the current user is used.
|
||||
|
||||
### With Custom SSH Port
|
||||
|
||||
```yaml
|
||||
hosts:
|
||||
myserver:
|
||||
address: 192.168.1.10
|
||||
user: docker
|
||||
port: 2222 # SSH port (default: 22)
|
||||
```
|
||||
|
||||
### Localhost
|
||||
|
||||
For services running on the same machine where you invoke Compose Farm:
|
||||
|
||||
```yaml
|
||||
hosts:
|
||||
local: localhost
|
||||
```
|
||||
|
||||
No SSH is used for localhost services.
|
||||
|
||||
### Multiple Hosts
|
||||
|
||||
```yaml
|
||||
hosts:
|
||||
nuc:
|
||||
address: 192.168.1.10
|
||||
user: docker
|
||||
hp:
|
||||
address: 192.168.1.11
|
||||
user: admin
|
||||
truenas:
|
||||
address: 192.168.1.100
|
||||
local: localhost
|
||||
```
|
||||
|
||||
## Services Configuration
|
||||
|
||||
### Single-Host Service
|
||||
|
||||
```yaml
|
||||
services:
|
||||
plex: nuc
|
||||
sonarr: nuc
|
||||
radarr: hp
|
||||
```
|
||||
|
||||
### Multi-Host Service
|
||||
|
||||
For services that need to run on every host (e.g., log shippers, monitoring agents):
|
||||
|
||||
```yaml
|
||||
services:
|
||||
# Run on ALL configured hosts
|
||||
dozzle: all
|
||||
promtail: all
|
||||
|
||||
# Run on specific hosts
|
||||
node-exporter: [nuc, hp, truenas]
|
||||
```
|
||||
|
||||
**Common multi-host services:**
|
||||
- **Dozzle** - Docker log viewer (needs local socket)
|
||||
- **Promtail/Alloy** - Log shipping (needs local socket)
|
||||
- **node-exporter** - Host metrics (needs /proc, /sys)
|
||||
- **AutoKuma** - Uptime Kuma monitors (needs local socket)
|
||||
|
||||
### Service Names
|
||||
|
||||
Service names must match directory names in `compose_dir`:
|
||||
|
||||
```yaml
|
||||
compose_dir: /opt/compose
|
||||
services:
|
||||
plex: nuc # expects /opt/compose/plex/docker-compose.yml
|
||||
my-app: hp # expects /opt/compose/my-app/docker-compose.yml
|
||||
```
|
||||
|
||||
## State File
|
||||
|
||||
Compose Farm tracks deployment state in `compose-farm-state.yaml`, stored alongside the config file.
|
||||
|
||||
For example, if your config is at `~/.config/compose-farm/compose-farm.yaml`, the state file will be at `~/.config/compose-farm/compose-farm-state.yaml`.
|
||||
|
||||
```yaml
|
||||
deployed:
|
||||
plex: nuc
|
||||
sonarr: nuc
|
||||
```
|
||||
|
||||
This file records which services are deployed and on which host.
|
||||
|
||||
**Don't edit manually.** Use `cf refresh` to sync state with reality.
|
||||
|
||||
## Environment Variables
|
||||
|
||||
### In Compose Files
|
||||
|
||||
Your compose files can use `.env` files as usual:
|
||||
|
||||
```
|
||||
/opt/compose/plex/
|
||||
├── docker-compose.yml
|
||||
└── .env
|
||||
```
|
||||
|
||||
Compose Farm runs `docker compose` which handles `.env` automatically.
|
||||
|
||||
### In Traefik Labels
|
||||
|
||||
When generating Traefik config, Compose Farm resolves `${VAR}` and `${VAR:-default}` from:
|
||||
|
||||
1. The service's `.env` file
|
||||
2. Current environment
|
||||
|
||||
## Config Commands
|
||||
|
||||
### Initialize Config
|
||||
|
||||
```bash
|
||||
cf config init
|
||||
```
|
||||
|
||||
Creates a new config file with documented examples.
|
||||
|
||||
### Validate Config
|
||||
|
||||
```bash
|
||||
cf config validate
|
||||
```
|
||||
|
||||
Checks syntax and schema.
|
||||
|
||||
### Show Config
|
||||
|
||||
```bash
|
||||
cf config show
|
||||
```
|
||||
|
||||
Displays current config with syntax highlighting.
|
||||
|
||||
### Edit Config
|
||||
|
||||
```bash
|
||||
cf config edit
|
||||
```
|
||||
|
||||
Opens config in `$EDITOR`.
|
||||
|
||||
### Show Config Path
|
||||
|
||||
```bash
|
||||
cf config path
|
||||
```
|
||||
|
||||
Prints the config file location (useful for scripting).
|
||||
|
||||
### Create Symlink
|
||||
|
||||
```bash
|
||||
cf config symlink # Link to ./compose-farm.yaml
|
||||
cf config symlink /path/to/my-config.yaml # Link to specific file
|
||||
```
|
||||
|
||||
Creates a symlink from the default location (`~/.config/compose-farm/compose-farm.yaml`) to your config file. Use `--force` to overwrite an existing symlink.
|
||||
|
||||
## Validation
|
||||
|
||||
### Local Validation
|
||||
|
||||
Fast validation without SSH:
|
||||
|
||||
```bash
|
||||
cf check --local
|
||||
```
|
||||
|
||||
Checks:
|
||||
- Config syntax
|
||||
- Service-to-host mappings
|
||||
- Compose file existence
|
||||
|
||||
### Full Validation
|
||||
|
||||
```bash
|
||||
cf check
|
||||
```
|
||||
|
||||
Additional SSH-based checks:
|
||||
- Host connectivity
|
||||
- Mount point existence
|
||||
- Docker network existence
|
||||
- Traefik label validation
|
||||
|
||||
### Service-Specific Check
|
||||
|
||||
```bash
|
||||
cf check jellyfin
|
||||
```
|
||||
|
||||
Shows which hosts can run the service (have required mounts/networks).
|
||||
|
||||
## Example Configurations
|
||||
|
||||
### Minimal
|
||||
|
||||
```yaml
|
||||
compose_dir: /opt/compose
|
||||
|
||||
hosts:
|
||||
server: 192.168.1.10
|
||||
|
||||
services:
|
||||
myapp: server
|
||||
```
|
||||
|
||||
### Home Lab
|
||||
|
||||
```yaml
|
||||
compose_dir: /opt/compose
|
||||
|
||||
hosts:
|
||||
nuc:
|
||||
address: 192.168.1.10
|
||||
user: docker
|
||||
nas:
|
||||
address: 192.168.1.100
|
||||
user: admin
|
||||
|
||||
services:
|
||||
# Media
|
||||
plex: nuc
|
||||
sonarr: nuc
|
||||
radarr: nuc
|
||||
|
||||
# Infrastructure
|
||||
traefik: nuc
|
||||
portainer: nuc
|
||||
|
||||
# Monitoring (on all hosts)
|
||||
dozzle: all
|
||||
```
|
||||
|
||||
### Production
|
||||
|
||||
```yaml
|
||||
compose_dir: /opt/compose
|
||||
network: production
|
||||
traefik_file: /opt/traefik/dynamic.d/cf.yml
|
||||
traefik_service: traefik
|
||||
|
||||
hosts:
|
||||
web-1:
|
||||
address: 10.0.1.10
|
||||
user: deploy
|
||||
web-2:
|
||||
address: 10.0.1.11
|
||||
user: deploy
|
||||
db:
|
||||
address: 10.0.1.20
|
||||
user: deploy
|
||||
|
||||
services:
|
||||
# Load balanced
|
||||
api: [web-1, web-2]
|
||||
|
||||
# Single instance
|
||||
postgres: db
|
||||
redis: db
|
||||
|
||||
# Infrastructure
|
||||
traefik: web-1
|
||||
|
||||
# Monitoring
|
||||
promtail: all
|
||||
```
|
||||
26
docs/demos/README.md
Normal file
26
docs/demos/README.md
Normal file
@@ -0,0 +1,26 @@
|
||||
# Terminal Demos
|
||||
|
||||
[VHS](https://github.com/charmbracelet/vhs) tape files for recording terminal demos.
|
||||
|
||||
## Demos
|
||||
|
||||
| File | Shows |
|
||||
|------|-------|
|
||||
| `install.tape` | Installing with `uv tool install` |
|
||||
| `quickstart.tape` | `cf ps`, `cf up`, `cf logs` |
|
||||
| `logs.tape` | Viewing logs |
|
||||
| `update.tape` | `cf update` |
|
||||
| `migration.tape` | Service migration |
|
||||
| `apply.tape` | `cf apply` |
|
||||
|
||||
## Recording
|
||||
|
||||
```bash
|
||||
# Record all demos (outputs to docs/assets/)
|
||||
./docs/demos/record.sh
|
||||
|
||||
# Single demo
|
||||
cd /opt/stacks && vhs /path/to/docs/demos/quickstart.tape
|
||||
```
|
||||
|
||||
Output files (GIF + WebM) are tracked with Git LFS.
|
||||
39
docs/demos/apply.tape
Normal file
39
docs/demos/apply.tape
Normal file
@@ -0,0 +1,39 @@
|
||||
# Apply Demo
|
||||
# Shows cf apply previewing and reconciling state
|
||||
|
||||
Output docs/assets/apply.gif
|
||||
Output docs/assets/apply.webm
|
||||
|
||||
Set Shell "bash"
|
||||
Set FontSize 14
|
||||
Set Width 900
|
||||
Set Height 600
|
||||
Set Theme "Catppuccin Mocha"
|
||||
Set TypingSpeed 50ms
|
||||
|
||||
Type "# Preview what would change"
|
||||
Enter
|
||||
Sleep 500ms
|
||||
|
||||
Type "cf apply --dry-run"
|
||||
Enter
|
||||
Wait
|
||||
|
||||
Type "# Check current status"
|
||||
Enter
|
||||
Sleep 500ms
|
||||
|
||||
Type "cf stats"
|
||||
Enter
|
||||
Wait+Screen /Summary/
|
||||
Sleep 2s
|
||||
|
||||
Type "# Apply the changes"
|
||||
Enter
|
||||
Sleep 500ms
|
||||
|
||||
Type "cf apply"
|
||||
Enter
|
||||
# Wait for shell prompt (command complete)
|
||||
Wait
|
||||
Sleep 4s
|
||||
42
docs/demos/install.tape
Normal file
42
docs/demos/install.tape
Normal file
@@ -0,0 +1,42 @@
|
||||
# Installation Demo
|
||||
# Shows installing compose-farm with uv
|
||||
|
||||
Output docs/assets/install.gif
|
||||
Output docs/assets/install.webm
|
||||
|
||||
Set Shell "bash"
|
||||
Set FontSize 14
|
||||
Set Width 900
|
||||
Set Height 600
|
||||
Set Theme "Catppuccin Mocha"
|
||||
Set TypingSpeed 50ms
|
||||
Env FORCE_COLOR "1"
|
||||
|
||||
Hide
|
||||
Type "export PATH=$HOME/.local/bin:$PATH && uv tool uninstall compose-farm 2>/dev/null; clear"
|
||||
Enter
|
||||
Show
|
||||
Type "# Install with uv (recommended)"
|
||||
Enter
|
||||
Sleep 500ms
|
||||
|
||||
Type "uv tool install compose-farm"
|
||||
Enter
|
||||
Wait+Screen /Installed|already installed/
|
||||
|
||||
Type "# Verify installation"
|
||||
Enter
|
||||
Sleep 500ms
|
||||
|
||||
Type "cf --version"
|
||||
Enter
|
||||
Wait+Screen /compose-farm/
|
||||
Sleep 1s
|
||||
|
||||
Type "cf --help | less"
|
||||
Enter
|
||||
Sleep 2s
|
||||
PageDown
|
||||
Sleep 2s
|
||||
Type "q"
|
||||
Sleep 2s
|
||||
21
docs/demos/logs.tape
Normal file
21
docs/demos/logs.tape
Normal file
@@ -0,0 +1,21 @@
|
||||
# Logs Demo
|
||||
# Shows viewing service logs
|
||||
|
||||
Output docs/assets/logs.gif
|
||||
Output docs/assets/logs.webm
|
||||
|
||||
Set Shell "bash"
|
||||
Set FontSize 14
|
||||
Set Width 900
|
||||
Set Height 550
|
||||
Set Theme "Catppuccin Mocha"
|
||||
Set TypingSpeed 50ms
|
||||
|
||||
Type "# View recent logs"
|
||||
Enter
|
||||
Sleep 500ms
|
||||
|
||||
Type "cf logs immich --tail 20"
|
||||
Enter
|
||||
Wait+Screen /immich/
|
||||
Sleep 2s
|
||||
71
docs/demos/migration.tape
Normal file
71
docs/demos/migration.tape
Normal file
@@ -0,0 +1,71 @@
|
||||
# Migration Demo
|
||||
# Shows automatic service migration when host changes
|
||||
|
||||
Output docs/assets/migration.gif
|
||||
Output docs/assets/migration.webm
|
||||
|
||||
Set Shell "bash"
|
||||
Set FontSize 14
|
||||
Set Width 1000
|
||||
Set Height 600
|
||||
Set Theme "Catppuccin Mocha"
|
||||
Set TypingSpeed 50ms
|
||||
|
||||
Type "# Current status: audiobookshelf on 'nas'"
|
||||
Enter
|
||||
Sleep 500ms
|
||||
|
||||
Type "cf ps audiobookshelf"
|
||||
Enter
|
||||
Wait+Screen /PORTS/
|
||||
|
||||
Type "# Edit config to move it to 'anton'"
|
||||
Enter
|
||||
Sleep 1s
|
||||
|
||||
Type "nvim /opt/stacks/compose-farm.yaml"
|
||||
Enter
|
||||
Wait+Screen /services:/
|
||||
|
||||
# Search for audiobookshelf
|
||||
Type "/audiobookshelf"
|
||||
Enter
|
||||
Sleep 1s
|
||||
|
||||
# Move to the host value (nas) and change it
|
||||
Type "f:"
|
||||
Sleep 500ms
|
||||
Type "w"
|
||||
Sleep 500ms
|
||||
Type "ciw"
|
||||
Sleep 500ms
|
||||
Type "anton"
|
||||
Escape
|
||||
Sleep 1s
|
||||
|
||||
# Save and quit
|
||||
Type ":wq"
|
||||
Enter
|
||||
Sleep 1s
|
||||
|
||||
Type "# Run up - automatically migrates!"
|
||||
Enter
|
||||
Sleep 500ms
|
||||
|
||||
Type "cf up audiobookshelf"
|
||||
Enter
|
||||
# Wait for migration phases: first the stop on old host
|
||||
Wait+Screen /Migrating|down/
|
||||
# Then wait for start on new host
|
||||
Wait+Screen /Starting|up/
|
||||
# Finally wait for completion
|
||||
Wait
|
||||
|
||||
Type "# Verify: audiobookshelf now on 'anton'"
|
||||
Enter
|
||||
Sleep 500ms
|
||||
|
||||
Type "cf ps audiobookshelf"
|
||||
Enter
|
||||
Wait+Screen /PORTS/
|
||||
Sleep 3s
|
||||
91
docs/demos/quickstart.tape
Normal file
91
docs/demos/quickstart.tape
Normal file
@@ -0,0 +1,91 @@
|
||||
# Quick Start Demo
|
||||
# Shows basic cf commands
|
||||
|
||||
Output docs/assets/quickstart.gif
|
||||
Output docs/assets/quickstart.webm
|
||||
|
||||
Set Shell "bash"
|
||||
Set FontSize 14
|
||||
Set Width 900
|
||||
Set Height 600
|
||||
Set Theme "Catppuccin Mocha"
|
||||
Set FontFamily "FiraCode Nerd Font"
|
||||
Set TypingSpeed 50ms
|
||||
Env BAT_PAGING "always"
|
||||
|
||||
Type "# Config is just: service host"
|
||||
Enter
|
||||
Sleep 500ms
|
||||
|
||||
Type "# First, define your hosts..."
|
||||
Enter
|
||||
Sleep 500ms
|
||||
|
||||
Type "bat -r 1:11 compose-farm.yaml"
|
||||
Enter
|
||||
Sleep 3s
|
||||
Type "q"
|
||||
Sleep 500ms
|
||||
|
||||
Type "# Then map each service to a host"
|
||||
Enter
|
||||
Sleep 500ms
|
||||
|
||||
Type "bat -r 13:30 compose-farm.yaml"
|
||||
Enter
|
||||
Sleep 3s
|
||||
Type "q"
|
||||
Sleep 500ms
|
||||
|
||||
Type "# Check service status"
|
||||
Enter
|
||||
Sleep 500ms
|
||||
|
||||
Type "cf ps immich"
|
||||
Enter
|
||||
Wait+Screen /PORTS/
|
||||
|
||||
Type "# Start a service"
|
||||
Enter
|
||||
Sleep 500ms
|
||||
|
||||
Type "cf up immich"
|
||||
Enter
|
||||
Wait
|
||||
|
||||
Type "# View logs"
|
||||
Enter
|
||||
Sleep 500ms
|
||||
|
||||
Type "cf logs immich --tail 5"
|
||||
Enter
|
||||
Wait+Screen /immich/
|
||||
Sleep 2s
|
||||
|
||||
Type "# The magic: move between hosts (nas anton)"
|
||||
Enter
|
||||
Sleep 500ms
|
||||
|
||||
Type "# Change host in config (using sed)"
|
||||
Enter
|
||||
Sleep 500ms
|
||||
|
||||
Type "sed -i 's/audiobookshelf: nas/audiobookshelf: anton/' compose-farm.yaml"
|
||||
Enter
|
||||
Sleep 500ms
|
||||
|
||||
Type "# Apply changes - auto-migrates!"
|
||||
Enter
|
||||
Sleep 500ms
|
||||
|
||||
Type "cf apply"
|
||||
Enter
|
||||
Sleep 15s
|
||||
|
||||
Type "# Verify: now on anton"
|
||||
Enter
|
||||
Sleep 500ms
|
||||
|
||||
Type "cf ps audiobookshelf"
|
||||
Enter
|
||||
Sleep 5s
|
||||
88
docs/demos/record.sh
Executable file
88
docs/demos/record.sh
Executable file
@@ -0,0 +1,88 @@
|
||||
#!/usr/bin/env bash
|
||||
# Record all VHS demos
|
||||
# Run this on a Docker host with compose-farm configured
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
DOCS_DIR="$(dirname "$SCRIPT_DIR")"
|
||||
REPO_DIR="$(dirname "$DOCS_DIR")"
|
||||
OUTPUT_DIR="$DOCS_DIR/assets"
|
||||
|
||||
# Colors
|
||||
GREEN='\033[0;32m'
|
||||
BLUE='\033[0;34m'
|
||||
YELLOW='\033[0;33m'
|
||||
RED='\033[0;31m'
|
||||
NC='\033[0m' # No Color
|
||||
|
||||
# Check for VHS
|
||||
if ! command -v vhs &> /dev/null; then
|
||||
echo "VHS not found. Install with:"
|
||||
echo " brew install vhs"
|
||||
echo " # or"
|
||||
echo " go install github.com/charmbracelet/vhs@latest"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Ensure output directory exists
|
||||
mkdir -p "$OUTPUT_DIR"
|
||||
|
||||
# Temp output dir (VHS runs from /opt/stacks, so relative paths go here)
|
||||
TEMP_OUTPUT="/opt/stacks/docs/assets"
|
||||
mkdir -p "$TEMP_OUTPUT"
|
||||
|
||||
# Change to /opt/stacks so cf commands use installed version (not editable install)
|
||||
cd /opt/stacks
|
||||
|
||||
# Ensure compose-farm.yaml has no uncommitted changes (safety check)
|
||||
if ! git diff --quiet compose-farm.yaml; then
|
||||
echo -e "${RED}Error: compose-farm.yaml has uncommitted changes${NC}"
|
||||
echo "Commit or stash your changes before recording demos"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo -e "${BLUE}Recording VHS demos...${NC}"
|
||||
echo "Output directory: $OUTPUT_DIR"
|
||||
echo ""
|
||||
|
||||
# Function to record a tape
|
||||
record_tape() {
|
||||
local tape=$1
|
||||
local name=$(basename "$tape" .tape)
|
||||
echo -e "${GREEN}Recording:${NC} $name"
|
||||
if vhs "$tape"; then
|
||||
echo -e "${GREEN} ✓ Done${NC}"
|
||||
else
|
||||
echo -e "${RED} ✗ Failed${NC}"
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
# Record demos in logical order
|
||||
echo -e "${YELLOW}=== Phase 1: Basic demos ===${NC}"
|
||||
record_tape "$SCRIPT_DIR/install.tape"
|
||||
record_tape "$SCRIPT_DIR/quickstart.tape"
|
||||
record_tape "$SCRIPT_DIR/logs.tape"
|
||||
|
||||
echo -e "${YELLOW}=== Phase 2: Update demo ===${NC}"
|
||||
record_tape "$SCRIPT_DIR/update.tape"
|
||||
|
||||
echo -e "${YELLOW}=== Phase 3: Migration demo ===${NC}"
|
||||
record_tape "$SCRIPT_DIR/migration.tape"
|
||||
git -C /opt/stacks checkout compose-farm.yaml # Reset after migration
|
||||
|
||||
echo -e "${YELLOW}=== Phase 4: Apply demo ===${NC}"
|
||||
record_tape "$SCRIPT_DIR/apply.tape"
|
||||
|
||||
# Move GIFs and WebMs from temp location to repo
|
||||
echo ""
|
||||
echo -e "${BLUE}Moving recordings to repo...${NC}"
|
||||
mv "$TEMP_OUTPUT"/*.gif "$OUTPUT_DIR/" 2>/dev/null || true
|
||||
mv "$TEMP_OUTPUT"/*.webm "$OUTPUT_DIR/" 2>/dev/null || true
|
||||
rmdir "$TEMP_OUTPUT" 2>/dev/null || true
|
||||
rmdir "$(dirname "$TEMP_OUTPUT")" 2>/dev/null || true
|
||||
|
||||
echo ""
|
||||
echo -e "${GREEN}Done!${NC} Recordings saved to $OUTPUT_DIR/"
|
||||
ls -la "$OUTPUT_DIR"/*.gif "$OUTPUT_DIR"/*.webm 2>/dev/null || echo "No recordings found (check for errors above)"
|
||||
32
docs/demos/update.tape
Normal file
32
docs/demos/update.tape
Normal file
@@ -0,0 +1,32 @@
|
||||
# Update Demo
|
||||
# Shows updating services (pull + down + up)
|
||||
|
||||
Output docs/assets/update.gif
|
||||
Output docs/assets/update.webm
|
||||
|
||||
Set Shell "bash"
|
||||
Set FontSize 14
|
||||
Set Width 900
|
||||
Set Height 500
|
||||
Set Theme "Catppuccin Mocha"
|
||||
Set TypingSpeed 50ms
|
||||
|
||||
Type "# Update a single service"
|
||||
Enter
|
||||
Sleep 500ms
|
||||
|
||||
Type "cf update grocy"
|
||||
Enter
|
||||
# Wait for command to complete (chain waits for longer timeout)
|
||||
Wait+Screen /pull/
|
||||
Wait+Screen /grocy/
|
||||
Wait@60s
|
||||
|
||||
Type "# Check current status"
|
||||
Enter
|
||||
Sleep 500ms
|
||||
|
||||
Type "cf ps grocy"
|
||||
Enter
|
||||
Wait+Screen /PORTS/
|
||||
Sleep 1s
|
||||
287
docs/getting-started.md
Normal file
287
docs/getting-started.md
Normal file
@@ -0,0 +1,287 @@
|
||||
---
|
||||
icon: lucide/rocket
|
||||
---
|
||||
|
||||
# Getting Started
|
||||
|
||||
This guide walks you through installing Compose Farm and setting up your first multi-host deployment.
|
||||
|
||||
## Prerequisites
|
||||
|
||||
Before you begin, ensure you have:
|
||||
|
||||
- **[uv](https://docs.astral.sh/uv/)** (recommended) or Python 3.11+
|
||||
- **SSH key-based authentication** to your Docker hosts
|
||||
- **Docker and Docker Compose** installed on all target hosts
|
||||
- **Shared storage** for compose files (NFS, Syncthing, etc.)
|
||||
|
||||
## Installation
|
||||
|
||||
<video autoplay loop muted playsinline>
|
||||
<source src="assets/install.webm" type="video/webm">
|
||||
</video>
|
||||
|
||||
### One-liner (recommended)
|
||||
|
||||
```bash
|
||||
curl -fsSL https://raw.githubusercontent.com/basnijholt/compose-farm/main/bootstrap.sh | sh
|
||||
```
|
||||
|
||||
This installs [uv](https://docs.astral.sh/uv/) if needed, then installs compose-farm.
|
||||
|
||||
### Using uv
|
||||
|
||||
If you already have [uv](https://docs.astral.sh/uv/) installed:
|
||||
|
||||
```bash
|
||||
uv tool install compose-farm
|
||||
```
|
||||
|
||||
### Using pip
|
||||
|
||||
If you already have Python 3.11+ installed:
|
||||
|
||||
```bash
|
||||
pip install compose-farm
|
||||
```
|
||||
|
||||
### Using Docker
|
||||
|
||||
```bash
|
||||
docker run --rm \
|
||||
-v $SSH_AUTH_SOCK:/ssh-agent -e SSH_AUTH_SOCK=/ssh-agent \
|
||||
-v ./compose-farm.yaml:/root/.config/compose-farm/compose-farm.yaml:ro \
|
||||
ghcr.io/basnijholt/compose-farm up --all
|
||||
```
|
||||
|
||||
### Verify Installation
|
||||
|
||||
```bash
|
||||
cf --version
|
||||
cf --help
|
||||
```
|
||||
|
||||
## SSH Setup
|
||||
|
||||
Compose Farm uses SSH to run commands on remote hosts. You need passwordless SSH access.
|
||||
|
||||
### Option 1: SSH Agent (default)
|
||||
|
||||
If you already have SSH keys loaded in your agent:
|
||||
|
||||
```bash
|
||||
# Verify keys are loaded
|
||||
ssh-add -l
|
||||
|
||||
# Test connection
|
||||
ssh user@192.168.1.10 "docker --version"
|
||||
```
|
||||
|
||||
### Option 2: Dedicated Key (recommended for Docker)
|
||||
|
||||
For persistent access when running in Docker:
|
||||
|
||||
```bash
|
||||
# Generate and distribute key to all hosts
|
||||
cf ssh setup
|
||||
|
||||
# Check status
|
||||
cf ssh status
|
||||
```
|
||||
|
||||
This creates `~/.ssh/compose-farm/id_ed25519` and copies the public key to each host.
|
||||
|
||||
## Shared Storage Setup
|
||||
|
||||
Compose files must be accessible at the **same path** on all hosts. Common approaches:
|
||||
|
||||
### NFS Mount
|
||||
|
||||
```bash
|
||||
# On each Docker host
|
||||
sudo mount nas:/volume1/compose /opt/compose
|
||||
|
||||
# Or add to /etc/fstab
|
||||
nas:/volume1/compose /opt/compose nfs defaults 0 0
|
||||
```
|
||||
|
||||
### Directory Structure
|
||||
|
||||
```
|
||||
/opt/compose/ # compose_dir in config
|
||||
├── plex/
|
||||
│ └── docker-compose.yml
|
||||
├── sonarr/
|
||||
│ └── docker-compose.yml
|
||||
├── radarr/
|
||||
│ └── docker-compose.yml
|
||||
└── jellyfin/
|
||||
└── docker-compose.yml
|
||||
```
|
||||
|
||||
## Configuration
|
||||
|
||||
### Create Config File
|
||||
|
||||
Create `~/.config/compose-farm/compose-farm.yaml`:
|
||||
|
||||
```yaml
|
||||
# Where compose files are located (same path on all hosts)
|
||||
compose_dir: /opt/compose
|
||||
|
||||
# Define your Docker hosts
|
||||
hosts:
|
||||
nuc:
|
||||
address: 192.168.1.10
|
||||
user: docker # SSH user
|
||||
hp:
|
||||
address: 192.168.1.11
|
||||
# user defaults to current user
|
||||
local: localhost # Run locally without SSH
|
||||
|
||||
# Map services to hosts
|
||||
services:
|
||||
plex: nuc
|
||||
sonarr: nuc
|
||||
radarr: hp
|
||||
jellyfin: local
|
||||
```
|
||||
|
||||
### Validate Configuration
|
||||
|
||||
```bash
|
||||
cf check --local
|
||||
```
|
||||
|
||||
This validates syntax without SSH connections. For full validation:
|
||||
|
||||
```bash
|
||||
cf check
|
||||
```
|
||||
|
||||
## First Commands
|
||||
|
||||
### Check Status
|
||||
|
||||
```bash
|
||||
cf ps
|
||||
```
|
||||
|
||||
Shows all configured services and their status.
|
||||
|
||||
### Start All Services
|
||||
|
||||
```bash
|
||||
cf up --all
|
||||
```
|
||||
|
||||
Starts all services on their assigned hosts.
|
||||
|
||||
### Start Specific Services
|
||||
|
||||
```bash
|
||||
cf up plex sonarr
|
||||
```
|
||||
|
||||
### Apply Configuration
|
||||
|
||||
The most powerful command - reconciles reality with your config:
|
||||
|
||||
```bash
|
||||
cf apply --dry-run # Preview changes
|
||||
cf apply # Execute changes
|
||||
```
|
||||
|
||||
This will:
|
||||
1. Start services in config but not running
|
||||
2. Migrate services on wrong host
|
||||
3. Stop services removed from config
|
||||
|
||||
## Docker Network Setup
|
||||
|
||||
If your services use an external Docker network:
|
||||
|
||||
```bash
|
||||
# Create network on all hosts
|
||||
cf init-network
|
||||
|
||||
# Or specific hosts
|
||||
cf init-network nuc hp
|
||||
```
|
||||
|
||||
Default network: `mynetwork` with subnet `172.20.0.0/16`
|
||||
|
||||
## Example Workflow
|
||||
|
||||
### 1. Add a New Service
|
||||
|
||||
Create the compose file:
|
||||
|
||||
```bash
|
||||
# On any host (shared storage)
|
||||
mkdir -p /opt/compose/prowlarr
|
||||
cat > /opt/compose/prowlarr/docker-compose.yml << 'EOF'
|
||||
services:
|
||||
prowlarr:
|
||||
image: lscr.io/linuxserver/prowlarr:latest
|
||||
container_name: prowlarr
|
||||
environment:
|
||||
- PUID=1000
|
||||
- PGID=1000
|
||||
volumes:
|
||||
- /opt/config/prowlarr:/config
|
||||
ports:
|
||||
- "9696:9696"
|
||||
restart: unless-stopped
|
||||
EOF
|
||||
```
|
||||
|
||||
Add to config:
|
||||
|
||||
```yaml
|
||||
services:
|
||||
# ... existing services
|
||||
prowlarr: nuc
|
||||
```
|
||||
|
||||
Start the service:
|
||||
|
||||
```bash
|
||||
cf up prowlarr
|
||||
```
|
||||
|
||||
### 2. Move a Service to Another Host
|
||||
|
||||
Edit `compose-farm.yaml`:
|
||||
|
||||
```yaml
|
||||
services:
|
||||
plex: hp # Changed from nuc
|
||||
```
|
||||
|
||||
Apply the change:
|
||||
|
||||
```bash
|
||||
cf up plex
|
||||
# Automatically: down on nuc, up on hp
|
||||
```
|
||||
|
||||
Or use apply to reconcile everything:
|
||||
|
||||
```bash
|
||||
cf apply
|
||||
```
|
||||
|
||||
### 3. Update All Services
|
||||
|
||||
```bash
|
||||
cf update --all
|
||||
# Runs: pull + down + up for each service
|
||||
```
|
||||
|
||||
## Next Steps
|
||||
|
||||
- [Configuration Reference](configuration.md) - All config options
|
||||
- [Commands Reference](commands.md) - Full CLI documentation
|
||||
- [Traefik Integration](traefik.md) - Multi-host routing
|
||||
- [Best Practices](best-practices.md) - Tips and limitations
|
||||
129
docs/index.md
Normal file
129
docs/index.md
Normal file
@@ -0,0 +1,129 @@
|
||||
---
|
||||
icon: lucide/server
|
||||
---
|
||||
|
||||
# Compose Farm
|
||||
|
||||
A minimal CLI tool to run Docker Compose commands across multiple hosts via SSH.
|
||||
|
||||
## What is Compose Farm?
|
||||
|
||||
Compose Farm lets you manage Docker Compose services across multiple machines from a single command line. Think [Dockge](https://dockge.kuma.pet/) but with a CLI and web interface, designed for multi-host deployments.
|
||||
|
||||
Define which services run where in one YAML file, then use `cf apply` to make reality match your configuration.
|
||||
|
||||
## Quick Demo
|
||||
|
||||
<video autoplay loop muted playsinline>
|
||||
<source src="assets/quickstart.webm" type="video/webm">
|
||||
</video>
|
||||
|
||||
## Why Compose Farm?
|
||||
|
||||
| Problem | Compose Farm Solution |
|
||||
|---------|----------------------|
|
||||
| 100+ containers on one machine | Distribute across multiple hosts |
|
||||
| Kubernetes too complex | Just SSH + docker compose |
|
||||
| Swarm in maintenance mode | Zero infrastructure changes |
|
||||
| Manual SSH for each host | Single command for all |
|
||||
|
||||
**It's a convenience wrapper, not a new paradigm.** Your existing `docker-compose.yml` files work unchanged.
|
||||
|
||||
## Quick Start
|
||||
|
||||
```yaml
|
||||
# compose-farm.yaml
|
||||
compose_dir: /opt/compose
|
||||
|
||||
hosts:
|
||||
server-1:
|
||||
address: 192.168.1.10
|
||||
server-2:
|
||||
address: 192.168.1.11
|
||||
|
||||
services:
|
||||
plex: server-1
|
||||
jellyfin: server-2
|
||||
sonarr: server-1
|
||||
```
|
||||
|
||||
```bash
|
||||
cf apply # Services start, migrate, or stop as needed
|
||||
```
|
||||
|
||||
### Installation
|
||||
|
||||
```bash
|
||||
uv tool install compose-farm
|
||||
# or
|
||||
pip install compose-farm
|
||||
```
|
||||
|
||||
### Configuration
|
||||
|
||||
Create `~/.config/compose-farm/compose-farm.yaml`:
|
||||
|
||||
```yaml
|
||||
compose_dir: /opt/compose
|
||||
|
||||
hosts:
|
||||
nuc:
|
||||
address: 192.168.1.10
|
||||
user: docker
|
||||
hp:
|
||||
address: 192.168.1.11
|
||||
|
||||
services:
|
||||
plex: nuc
|
||||
sonarr: nuc
|
||||
radarr: hp
|
||||
```
|
||||
|
||||
### Usage
|
||||
|
||||
```bash
|
||||
# Make reality match config
|
||||
cf apply
|
||||
|
||||
# Start specific services
|
||||
cf up plex sonarr
|
||||
|
||||
# Check status
|
||||
cf ps
|
||||
|
||||
# View logs
|
||||
cf logs -f plex
|
||||
```
|
||||
|
||||
## Key Features
|
||||
|
||||
- **Declarative configuration**: One YAML defines where everything runs
|
||||
- **Auto-migration**: Change a host assignment, run `cf up`, service moves automatically
|
||||
|
||||
<video autoplay loop muted playsinline>
|
||||
<source src="assets/migration.webm" type="video/webm">
|
||||
</video>
|
||||
- **Parallel execution**: Multiple services start/stop concurrently
|
||||
- **State tracking**: Knows which services are running where
|
||||
- **Traefik integration**: Generate file-provider config for cross-host routing
|
||||
- **Zero changes**: Your compose files work as-is
|
||||
|
||||
## Requirements
|
||||
|
||||
- [uv](https://docs.astral.sh/uv/) (recommended) or Python 3.11+
|
||||
- SSH key-based authentication to your Docker hosts
|
||||
- Docker and Docker Compose on all target hosts
|
||||
- Shared storage (compose files at same path on all hosts)
|
||||
|
||||
## Documentation
|
||||
|
||||
- [Getting Started](getting-started.md) - Installation and first steps
|
||||
- [Configuration](configuration.md) - All configuration options
|
||||
- [Commands](commands.md) - CLI reference
|
||||
- [Architecture](architecture.md) - How it works under the hood
|
||||
- [Traefik Integration](traefik.md) - Multi-host routing setup
|
||||
- [Best Practices](best-practices.md) - Tips and limitations
|
||||
|
||||
## License
|
||||
|
||||
MIT
|
||||
385
docs/traefik.md
Normal file
385
docs/traefik.md
Normal file
@@ -0,0 +1,385 @@
|
||||
---
|
||||
icon: lucide/globe
|
||||
---
|
||||
|
||||
# Traefik Integration
|
||||
|
||||
Compose Farm can generate Traefik file-provider configuration for routing traffic across multiple hosts.
|
||||
|
||||
## The Problem
|
||||
|
||||
When you run Traefik on one host but services on others, Traefik's docker provider can't see remote containers. The file provider bridges this gap.
|
||||
|
||||
```
|
||||
Internet
|
||||
│
|
||||
▼
|
||||
┌─────────────────────────────────────────────────────────────┐
|
||||
│ Host: nuc │
|
||||
│ │
|
||||
│ ┌─────────┐ │
|
||||
│ │ Traefik │◄─── Docker provider sees local containers │
|
||||
│ │ │ │
|
||||
│ │ │◄─── File provider sees remote services │
|
||||
│ └────┬────┘ (from compose-farm.yml) │
|
||||
│ │ │
|
||||
└───────┼─────────────────────────────────────────────────────┘
|
||||
│
|
||||
├────────────────────┐
|
||||
│ │
|
||||
▼ ▼
|
||||
┌───────────────┐ ┌───────────────┐
|
||||
│ Host: hp │ │ Host: nas │
|
||||
│ │ │ │
|
||||
│ plex:32400 │ │ jellyfin:8096 │
|
||||
└───────────────┘ └───────────────┘
|
||||
```
|
||||
|
||||
## How It Works
|
||||
|
||||
1. Your compose files have standard Traefik labels
|
||||
2. Compose Farm reads labels and generates file-provider config
|
||||
3. Traefik watches the generated file
|
||||
4. Traffic routes to remote services via host IP + published port
|
||||
|
||||
## Setup
|
||||
|
||||
### Step 1: Configure Traefik File Provider
|
||||
|
||||
Add directory watching to your Traefik config:
|
||||
|
||||
```yaml
|
||||
# traefik.yml or docker-compose.yml command
|
||||
providers:
|
||||
file:
|
||||
directory: /opt/traefik/dynamic.d
|
||||
watch: true
|
||||
```
|
||||
|
||||
Or via command line:
|
||||
|
||||
```yaml
|
||||
services:
|
||||
traefik:
|
||||
command:
|
||||
- --providers.file.directory=/dynamic.d
|
||||
- --providers.file.watch=true
|
||||
volumes:
|
||||
- /opt/traefik/dynamic.d:/dynamic.d:ro
|
||||
```
|
||||
|
||||
### Step 2: Add Traefik Labels to Services
|
||||
|
||||
Your compose files use standard Traefik labels:
|
||||
|
||||
```yaml
|
||||
# /opt/compose/plex/docker-compose.yml
|
||||
services:
|
||||
plex:
|
||||
image: lscr.io/linuxserver/plex
|
||||
ports:
|
||||
- "32400:32400" # IMPORTANT: Must publish port!
|
||||
labels:
|
||||
- traefik.enable=true
|
||||
- traefik.http.routers.plex.rule=Host(`plex.example.com`)
|
||||
- traefik.http.routers.plex.entrypoints=websecure
|
||||
- traefik.http.routers.plex.tls.certresolver=letsencrypt
|
||||
- traefik.http.services.plex.loadbalancer.server.port=32400
|
||||
```
|
||||
|
||||
**Important:** Services must publish ports for cross-host routing. Traefik connects via `host_ip:published_port`.
|
||||
|
||||
### Step 3: Generate File Provider Config
|
||||
|
||||
```bash
|
||||
cf traefik-file --all -o /opt/traefik/dynamic.d/compose-farm.yml
|
||||
```
|
||||
|
||||
This generates:
|
||||
|
||||
```yaml
|
||||
# /opt/traefik/dynamic.d/compose-farm.yml
|
||||
http:
|
||||
routers:
|
||||
plex:
|
||||
rule: Host(`plex.example.com`)
|
||||
entryPoints:
|
||||
- websecure
|
||||
tls:
|
||||
certResolver: letsencrypt
|
||||
service: plex
|
||||
services:
|
||||
plex:
|
||||
loadBalancer:
|
||||
servers:
|
||||
- url: http://192.168.1.11:32400
|
||||
```
|
||||
|
||||
## Auto-Regeneration
|
||||
|
||||
Configure automatic regeneration in `compose-farm.yaml`:
|
||||
|
||||
```yaml
|
||||
compose_dir: /opt/compose
|
||||
traefik_file: /opt/traefik/dynamic.d/compose-farm.yml
|
||||
traefik_service: traefik
|
||||
|
||||
hosts:
|
||||
nuc:
|
||||
address: 192.168.1.10
|
||||
hp:
|
||||
address: 192.168.1.11
|
||||
|
||||
services:
|
||||
traefik: nuc # Traefik runs here
|
||||
plex: hp # Routed via file-provider
|
||||
sonarr: hp
|
||||
```
|
||||
|
||||
With `traefik_file` set, these commands auto-regenerate the config:
|
||||
- `cf up`
|
||||
- `cf down`
|
||||
- `cf restart`
|
||||
- `cf update`
|
||||
- `cf apply`
|
||||
|
||||
### traefik_service Option
|
||||
|
||||
When set, services on the **same host as Traefik** are skipped in file-provider output. Traefik's docker provider handles them directly.
|
||||
|
||||
```yaml
|
||||
traefik_service: traefik # traefik runs on nuc
|
||||
services:
|
||||
traefik: nuc # NOT in file-provider (docker provider)
|
||||
portainer: nuc # NOT in file-provider (docker provider)
|
||||
plex: hp # IN file-provider (cross-host)
|
||||
```
|
||||
|
||||
## Label Syntax
|
||||
|
||||
### Routers
|
||||
|
||||
```yaml
|
||||
labels:
|
||||
# Basic router
|
||||
- traefik.http.routers.myapp.rule=Host(`app.example.com`)
|
||||
- traefik.http.routers.myapp.entrypoints=websecure
|
||||
|
||||
# With TLS
|
||||
- traefik.http.routers.myapp.tls=true
|
||||
- traefik.http.routers.myapp.tls.certresolver=letsencrypt
|
||||
|
||||
# With middleware
|
||||
- traefik.http.routers.myapp.middlewares=auth@file
|
||||
```
|
||||
|
||||
### Services
|
||||
|
||||
```yaml
|
||||
labels:
|
||||
# Load balancer port
|
||||
- traefik.http.services.myapp.loadbalancer.server.port=8080
|
||||
|
||||
# Health check
|
||||
- traefik.http.services.myapp.loadbalancer.healthcheck.path=/health
|
||||
```
|
||||
|
||||
### Middlewares
|
||||
|
||||
Middlewares should be defined in a separate file (not generated by Compose Farm):
|
||||
|
||||
```yaml
|
||||
# /opt/traefik/dynamic.d/middlewares.yml
|
||||
http:
|
||||
middlewares:
|
||||
auth:
|
||||
basicAuth:
|
||||
users:
|
||||
- "user:$apr1$..."
|
||||
```
|
||||
|
||||
Reference in labels:
|
||||
|
||||
```yaml
|
||||
labels:
|
||||
- traefik.http.routers.myapp.middlewares=auth@file
|
||||
```
|
||||
|
||||
## Variable Substitution
|
||||
|
||||
Labels can use environment variables:
|
||||
|
||||
```yaml
|
||||
labels:
|
||||
- traefik.http.routers.myapp.rule=Host(`${DOMAIN}`)
|
||||
```
|
||||
|
||||
Compose Farm resolves variables from:
|
||||
1. Service's `.env` file
|
||||
2. Current environment
|
||||
|
||||
```bash
|
||||
# /opt/compose/myapp/.env
|
||||
DOMAIN=app.example.com
|
||||
```
|
||||
|
||||
## Port Resolution
|
||||
|
||||
Compose Farm determines the target URL from published ports:
|
||||
|
||||
```yaml
|
||||
ports:
|
||||
- "8080:80" # Uses 8080
|
||||
- "192.168.1.11:8080:80" # Uses 8080 on specific IP
|
||||
```
|
||||
|
||||
If no suitable port is found, a warning is shown.
|
||||
|
||||
## Complete Example
|
||||
|
||||
### compose-farm.yaml
|
||||
|
||||
```yaml
|
||||
compose_dir: /opt/compose
|
||||
traefik_file: /opt/traefik/dynamic.d/compose-farm.yml
|
||||
traefik_service: traefik
|
||||
|
||||
hosts:
|
||||
nuc:
|
||||
address: 192.168.1.10
|
||||
hp:
|
||||
address: 192.168.1.11
|
||||
nas:
|
||||
address: 192.168.1.100
|
||||
|
||||
services:
|
||||
traefik: nuc
|
||||
plex: hp
|
||||
jellyfin: nas
|
||||
sonarr: nuc
|
||||
radarr: nuc
|
||||
```
|
||||
|
||||
### /opt/compose/plex/docker-compose.yml
|
||||
|
||||
```yaml
|
||||
services:
|
||||
plex:
|
||||
image: lscr.io/linuxserver/plex
|
||||
container_name: plex
|
||||
ports:
|
||||
- "32400:32400"
|
||||
labels:
|
||||
- traefik.enable=true
|
||||
- traefik.http.routers.plex.rule=Host(`plex.example.com`)
|
||||
- traefik.http.routers.plex.entrypoints=websecure
|
||||
- traefik.http.routers.plex.tls.certresolver=letsencrypt
|
||||
- traefik.http.services.plex.loadbalancer.server.port=32400
|
||||
# ... other config
|
||||
```
|
||||
|
||||
### Generated compose-farm.yml
|
||||
|
||||
```yaml
|
||||
http:
|
||||
routers:
|
||||
plex:
|
||||
rule: Host(`plex.example.com`)
|
||||
entryPoints:
|
||||
- websecure
|
||||
tls:
|
||||
certResolver: letsencrypt
|
||||
service: plex
|
||||
jellyfin:
|
||||
rule: Host(`jellyfin.example.com`)
|
||||
entryPoints:
|
||||
- websecure
|
||||
tls:
|
||||
certResolver: letsencrypt
|
||||
service: jellyfin
|
||||
|
||||
services:
|
||||
plex:
|
||||
loadBalancer:
|
||||
servers:
|
||||
- url: http://192.168.1.11:32400
|
||||
jellyfin:
|
||||
loadBalancer:
|
||||
servers:
|
||||
- url: http://192.168.1.100:8096
|
||||
```
|
||||
|
||||
Note: `sonarr` and `radarr` are NOT in the file because they're on the same host as Traefik (`nuc`).
|
||||
|
||||
## Combining with Existing Config
|
||||
|
||||
If you have existing Traefik dynamic config:
|
||||
|
||||
```bash
|
||||
# Move existing config to directory
|
||||
mkdir -p /opt/traefik/dynamic.d
|
||||
mv /opt/traefik/dynamic.yml /opt/traefik/dynamic.d/manual.yml
|
||||
|
||||
# Generate Compose Farm config
|
||||
cf traefik-file --all -o /opt/traefik/dynamic.d/compose-farm.yml
|
||||
|
||||
# Update Traefik to watch directory
|
||||
# --providers.file.directory=/dynamic.d
|
||||
```
|
||||
|
||||
Traefik merges all YAML files in the directory.
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Service Not Accessible
|
||||
|
||||
1. **Check port is published:**
|
||||
```yaml
|
||||
ports:
|
||||
- "8080:80" # Must be published, not just exposed
|
||||
```
|
||||
|
||||
2. **Check label syntax:**
|
||||
```bash
|
||||
cf check myservice
|
||||
```
|
||||
|
||||
3. **Verify generated config:**
|
||||
```bash
|
||||
cf traefik-file myservice
|
||||
```
|
||||
|
||||
4. **Check Traefik logs:**
|
||||
```bash
|
||||
docker logs traefik
|
||||
```
|
||||
|
||||
### Config Not Regenerating
|
||||
|
||||
1. **Verify traefik_file is set:**
|
||||
```bash
|
||||
cf config show | grep traefik
|
||||
```
|
||||
|
||||
2. **Check file permissions:**
|
||||
```bash
|
||||
ls -la /opt/traefik/dynamic.d/
|
||||
```
|
||||
|
||||
3. **Manually regenerate:**
|
||||
```bash
|
||||
cf traefik-file --all -o /opt/traefik/dynamic.d/compose-farm.yml
|
||||
```
|
||||
|
||||
### Variable Not Resolved
|
||||
|
||||
1. **Check .env file exists:**
|
||||
```bash
|
||||
cat /opt/compose/myservice/.env
|
||||
```
|
||||
|
||||
2. **Test variable resolution:**
|
||||
```bash
|
||||
cd /opt/compose/myservice
|
||||
docker compose config
|
||||
```
|
||||
@@ -145,6 +145,9 @@ addopts = [
|
||||
"--no-cov-on-fail",
|
||||
"-v",
|
||||
]
|
||||
markers = [
|
||||
"browser: marks tests as browser tests (deselect with '-m \"not browser\"')",
|
||||
]
|
||||
|
||||
[tool.coverage.run]
|
||||
omit = []
|
||||
@@ -174,4 +177,8 @@ dev = [
|
||||
"websockets>=12.0",
|
||||
# For FastAPI TestClient
|
||||
"httpx>=0.28.0",
|
||||
# For browser tests (use system chromium via nix-shell -p chromium)
|
||||
"pytest-playwright>=0.7.0",
|
||||
# For parallel test execution
|
||||
"pytest-xdist>=3.0.0",
|
||||
]
|
||||
|
||||
@@ -8,6 +8,7 @@ from compose_farm.cli import (
|
||||
lifecycle, # noqa: F401
|
||||
management, # noqa: F401
|
||||
monitoring, # noqa: F401
|
||||
ssh, # noqa: F401
|
||||
web, # noqa: F401
|
||||
)
|
||||
|
||||
|
||||
@@ -18,7 +18,15 @@ from rich.progress import (
|
||||
TimeElapsedColumn,
|
||||
)
|
||||
|
||||
from compose_farm.console import console, err_console
|
||||
from compose_farm.console import (
|
||||
MSG_HOST_NOT_FOUND,
|
||||
MSG_SERVICE_NOT_FOUND,
|
||||
console,
|
||||
print_error,
|
||||
print_hint,
|
||||
print_success,
|
||||
print_warning,
|
||||
)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import Callable, Coroutine, Generator
|
||||
@@ -27,6 +35,7 @@ if TYPE_CHECKING:
|
||||
from compose_farm.executor import CommandResult
|
||||
|
||||
_T = TypeVar("_T")
|
||||
_R = TypeVar("_R")
|
||||
|
||||
|
||||
# --- Shared CLI Options ---
|
||||
@@ -56,6 +65,13 @@ _MISSING_PATH_PREVIEW_LIMIT = 2
|
||||
_STATS_PREVIEW_LIMIT = 3 # Max number of pending migrations to show by name
|
||||
|
||||
|
||||
def format_host(host: str | list[str]) -> str:
|
||||
"""Format a host value for display."""
|
||||
if isinstance(host, list):
|
||||
return ", ".join(host)
|
||||
return host
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def progress_bar(
|
||||
label: str, total: int, *, initial_description: str = "[dim]connecting...[/]"
|
||||
@@ -81,6 +97,37 @@ def progress_bar(
|
||||
yield progress, task_id
|
||||
|
||||
|
||||
def run_parallel_with_progress(
|
||||
label: str,
|
||||
items: list[_T],
|
||||
async_fn: Callable[[_T], Coroutine[None, None, _R]],
|
||||
) -> list[_R]:
|
||||
"""Run async tasks in parallel with a progress bar.
|
||||
|
||||
Args:
|
||||
label: Progress bar label (e.g., "Discovering", "Querying hosts")
|
||||
items: List of items to process
|
||||
async_fn: Async function to call for each item, returns tuple where
|
||||
first element is used for progress description
|
||||
|
||||
Returns:
|
||||
List of results from async_fn in completion order.
|
||||
|
||||
"""
|
||||
|
||||
async def gather() -> list[_R]:
|
||||
with progress_bar(label, len(items)) as (progress, task_id):
|
||||
tasks = [asyncio.create_task(async_fn(item)) for item in items]
|
||||
results: list[_R] = []
|
||||
for coro in asyncio.as_completed(tasks):
|
||||
result = await coro
|
||||
results.append(result)
|
||||
progress.update(task_id, advance=1, description=f"[cyan]{result[0]}[/]") # type: ignore[index]
|
||||
return results
|
||||
|
||||
return asyncio.run(gather())
|
||||
|
||||
|
||||
def load_config_or_exit(config_path: Path | None) -> Config:
|
||||
"""Load config or exit with a friendly error message."""
|
||||
# Lazy import: pydantic adds ~50ms to startup, only load when actually needed
|
||||
@@ -89,7 +136,7 @@ def load_config_or_exit(config_path: Path | None) -> Config:
|
||||
try:
|
||||
return load_config(config_path)
|
||||
except FileNotFoundError as e:
|
||||
err_console.print(f"[red]✗[/] {e}")
|
||||
print_error(str(e))
|
||||
raise typer.Exit(1) from e
|
||||
|
||||
|
||||
@@ -97,29 +144,54 @@ def get_services(
|
||||
services: list[str],
|
||||
all_services: bool,
|
||||
config_path: Path | None,
|
||||
*,
|
||||
host: str | None = None,
|
||||
default_all: bool = False,
|
||||
) -> tuple[list[str], Config]:
|
||||
"""Resolve service list and load config.
|
||||
|
||||
Handles three mutually exclusive selection methods:
|
||||
- Explicit service names
|
||||
- --all flag
|
||||
- --host filter
|
||||
|
||||
Args:
|
||||
services: Explicit service names
|
||||
all_services: Whether --all was specified
|
||||
config_path: Path to config file
|
||||
host: Filter to services on this host
|
||||
default_all: If True, default to all services when nothing specified (for ps)
|
||||
|
||||
Supports "." as shorthand for the current directory name.
|
||||
|
||||
"""
|
||||
validate_service_selection(services, all_services, host)
|
||||
config = load_config_or_exit(config_path)
|
||||
|
||||
if host is not None:
|
||||
validate_hosts(config, host)
|
||||
svc_list = [s for s in config.services if host in config.get_hosts(s)]
|
||||
if not svc_list:
|
||||
print_warning(f"No services configured for host [magenta]{host}[/]")
|
||||
raise typer.Exit(0)
|
||||
return svc_list, config
|
||||
|
||||
if all_services:
|
||||
return list(config.services.keys()), config
|
||||
|
||||
if not services:
|
||||
err_console.print("[red]✗[/] Specify services or use --all")
|
||||
if default_all:
|
||||
return list(config.services.keys()), config
|
||||
print_error("Specify services or use [bold]--all[/] / [bold]--host[/]")
|
||||
raise typer.Exit(1)
|
||||
|
||||
# Resolve "." to current directory name
|
||||
resolved = [Path.cwd().name if svc == "." else svc for svc in services]
|
||||
|
||||
# Validate all services exist in config
|
||||
unknown = [svc for svc in resolved if svc not in config.services]
|
||||
if unknown:
|
||||
for svc in unknown:
|
||||
err_console.print(f"[red]✗[/] Unknown service: [cyan]{svc}[/]")
|
||||
err_console.print("[dim]Hint: Add the service to compose-farm.yaml or use --all[/]")
|
||||
raise typer.Exit(1)
|
||||
validate_services(
|
||||
config, resolved, hint="Add the service to compose-farm.yaml or use [bold]--all[/]"
|
||||
)
|
||||
|
||||
return resolved, config
|
||||
|
||||
@@ -143,21 +215,19 @@ def report_results(results: list[CommandResult]) -> None:
|
||||
console.print() # Blank line before summary
|
||||
if failed:
|
||||
for r in failed:
|
||||
err_console.print(
|
||||
f"[red]✗[/] [cyan]{r.service}[/] failed with exit code {r.exit_code}"
|
||||
)
|
||||
print_error(f"[cyan]{r.service}[/] failed with exit code {r.exit_code}")
|
||||
console.print()
|
||||
console.print(
|
||||
f"[green]✓[/] {len(succeeded)}/{len(results)} services succeeded, "
|
||||
f"[red]✗[/] {len(failed)} failed"
|
||||
)
|
||||
else:
|
||||
console.print(f"[green]✓[/] All {len(results)} services succeeded")
|
||||
print_success(f"All {len(results)} services succeeded")
|
||||
|
||||
elif failed:
|
||||
# Single service failed
|
||||
r = failed[0]
|
||||
err_console.print(f"[red]✗[/] [cyan]{r.service}[/] failed with exit code {r.exit_code}")
|
||||
print_error(f"[cyan]{r.service}[/] failed with exit code {r.exit_code}")
|
||||
|
||||
if failed:
|
||||
raise typer.Exit(1)
|
||||
@@ -197,47 +267,58 @@ def maybe_regenerate_traefik(
|
||||
cfg.traefik_file.parent.mkdir(parents=True, exist_ok=True)
|
||||
cfg.traefik_file.write_text(new_content)
|
||||
console.print() # Ensure we're on a new line after streaming output
|
||||
console.print(f"[green]✓[/] Traefik config updated: {cfg.traefik_file}")
|
||||
print_success(f"Traefik config updated: {cfg.traefik_file}")
|
||||
|
||||
for warning in warnings:
|
||||
err_console.print(f"[yellow]![/] {warning}")
|
||||
print_warning(warning)
|
||||
except (FileNotFoundError, ValueError) as exc:
|
||||
err_console.print(f"[yellow]![/] Failed to update traefik config: {exc}")
|
||||
print_warning(f"Failed to update traefik config: {exc}")
|
||||
|
||||
|
||||
def validate_services(cfg: Config, services: list[str], *, hint: str | None = None) -> None:
|
||||
"""Validate that all services exist in config. Exits with error if any not found."""
|
||||
invalid = [s for s in services if s not in cfg.services]
|
||||
if invalid:
|
||||
for svc in invalid:
|
||||
print_error(MSG_SERVICE_NOT_FOUND.format(name=svc))
|
||||
if hint:
|
||||
print_hint(hint)
|
||||
raise typer.Exit(1)
|
||||
|
||||
|
||||
def validate_hosts(cfg: Config, hosts: str | list[str]) -> None:
|
||||
"""Validate that host(s) exist in config. Exits with error if any not found."""
|
||||
host_list = [hosts] if isinstance(hosts, str) else hosts
|
||||
invalid = [h for h in host_list if h not in cfg.hosts]
|
||||
if invalid:
|
||||
for h in invalid:
|
||||
print_error(MSG_HOST_NOT_FOUND.format(name=h))
|
||||
raise typer.Exit(1)
|
||||
|
||||
|
||||
def validate_host_for_service(cfg: Config, service: str, host: str) -> None:
|
||||
"""Validate that a host is valid for a service."""
|
||||
if host not in cfg.hosts:
|
||||
err_console.print(f"[red]✗[/] Host '{host}' not found in config")
|
||||
raise typer.Exit(1)
|
||||
validate_hosts(cfg, host)
|
||||
allowed_hosts = cfg.get_hosts(service)
|
||||
if host not in allowed_hosts:
|
||||
err_console.print(
|
||||
f"[red]✗[/] Service '{service}' is not configured for host '{host}' "
|
||||
print_error(
|
||||
f"Service [cyan]{service}[/] is not configured for host [magenta]{host}[/] "
|
||||
f"(configured: {', '.join(allowed_hosts)})"
|
||||
)
|
||||
raise typer.Exit(1)
|
||||
|
||||
|
||||
def run_host_operation(
|
||||
cfg: Config,
|
||||
svc_list: list[str],
|
||||
host: str,
|
||||
command: str,
|
||||
action_verb: str,
|
||||
state_callback: Callable[[Config, str, str], None],
|
||||
def validate_service_selection(
|
||||
services: list[str] | None,
|
||||
all_services: bool,
|
||||
host: str | None,
|
||||
) -> None:
|
||||
"""Run an operation on a specific host for multiple services."""
|
||||
from compose_farm.executor import run_compose_on_host # noqa: PLC0415
|
||||
"""Validate that only one service selection method is used.
|
||||
|
||||
results: list[CommandResult] = []
|
||||
for service in svc_list:
|
||||
validate_host_for_service(cfg, service, host)
|
||||
console.print(f"[cyan]\\[{service}][/] {action_verb} on [magenta]{host}[/]...")
|
||||
result = run_async(run_compose_on_host(cfg, service, host, command, raw=True))
|
||||
print() # Newline after raw output
|
||||
results.append(result)
|
||||
if result.success:
|
||||
state_callback(cfg, service, host)
|
||||
maybe_regenerate_traefik(cfg, results)
|
||||
report_results(results)
|
||||
The three selection methods (explicit services, --all, --host) are mutually
|
||||
exclusive. This ensures consistent behavior across all commands.
|
||||
"""
|
||||
methods = sum([bool(services), all_services, host is not None])
|
||||
if methods > 1:
|
||||
print_error("Use only one of: service names, [bold]--all[/], or [bold]--host[/]")
|
||||
raise typer.Exit(1)
|
||||
|
||||
@@ -14,7 +14,7 @@ from typing import Annotated
|
||||
import typer
|
||||
|
||||
from compose_farm.cli.app import app
|
||||
from compose_farm.console import console, err_console
|
||||
from compose_farm.console import MSG_CONFIG_NOT_FOUND, console, print_error, print_success
|
||||
from compose_farm.paths import config_search_paths, default_config_path, find_config_path
|
||||
|
||||
config_app = typer.Typer(
|
||||
@@ -40,24 +40,12 @@ _RawOption = Annotated[
|
||||
|
||||
|
||||
def _get_editor() -> str:
|
||||
"""Get the user's preferred editor.
|
||||
|
||||
Checks $EDITOR, then $VISUAL, then falls back to platform defaults.
|
||||
"""
|
||||
for env_var in ("EDITOR", "VISUAL"):
|
||||
editor = os.environ.get(env_var)
|
||||
if editor:
|
||||
return editor
|
||||
|
||||
"""Get the user's preferred editor ($EDITOR > $VISUAL > platform default)."""
|
||||
if editor := os.environ.get("EDITOR") or os.environ.get("VISUAL"):
|
||||
return editor
|
||||
if platform.system() == "Windows":
|
||||
return "notepad"
|
||||
|
||||
# Try common editors on Unix-like systems
|
||||
for editor in ("nano", "vim", "vi"):
|
||||
if shutil.which(editor):
|
||||
return editor
|
||||
|
||||
return "vi"
|
||||
return next((e for e in ("nano", "vim", "vi") if shutil.which(e)), "vi")
|
||||
|
||||
|
||||
def _generate_template() -> str:
|
||||
@@ -66,8 +54,8 @@ def _generate_template() -> str:
|
||||
template_file = resources.files("compose_farm") / "example-config.yaml"
|
||||
return template_file.read_text(encoding="utf-8")
|
||||
except FileNotFoundError as e:
|
||||
err_console.print("[red]Example config template is missing from the package.[/red]")
|
||||
err_console.print("Reinstall compose-farm or report this issue.")
|
||||
print_error("Example config template is missing from the package")
|
||||
console.print("Reinstall compose-farm or report this issue.")
|
||||
raise typer.Exit(1) from e
|
||||
|
||||
|
||||
@@ -80,6 +68,19 @@ def _get_config_file(path: Path | None) -> Path | None:
|
||||
return config_path.resolve() if config_path else None
|
||||
|
||||
|
||||
def _report_missing_config(explicit_path: Path | None = None) -> None:
|
||||
"""Report that a config file was not found."""
|
||||
console.print("[yellow]Config file not found.[/yellow]")
|
||||
if explicit_path:
|
||||
console.print(f"\nProvided path does not exist: [cyan]{explicit_path}[/cyan]")
|
||||
else:
|
||||
console.print("\nSearched locations:")
|
||||
for p in config_search_paths():
|
||||
status = "[green]exists[/green]" if p.exists() else "[dim]not found[/dim]"
|
||||
console.print(f" - {p} ({status})")
|
||||
console.print("\nRun [bold cyan]cf config init[/bold cyan] to create one.")
|
||||
|
||||
|
||||
@config_app.command("init")
|
||||
def config_init(
|
||||
path: _PathOption = None,
|
||||
@@ -107,7 +108,7 @@ def config_init(
|
||||
template_content = _generate_template()
|
||||
target_path.write_text(template_content, encoding="utf-8")
|
||||
|
||||
console.print(f"[green]✓[/] Config file created at: {target_path}")
|
||||
print_success(f"Config file created at: {target_path}")
|
||||
console.print("\n[dim]Edit the file to customize your settings:[/dim]")
|
||||
console.print(" [cyan]cf config edit[/cyan]")
|
||||
|
||||
@@ -123,17 +124,11 @@ def config_edit(
|
||||
config_file = _get_config_file(path)
|
||||
|
||||
if config_file is None:
|
||||
console.print("[yellow]No config file found.[/yellow]")
|
||||
console.print("\nRun [bold cyan]cf config init[/bold cyan] to create one.")
|
||||
console.print("\nSearched locations:")
|
||||
for p in config_search_paths():
|
||||
console.print(f" - {p}")
|
||||
_report_missing_config()
|
||||
raise typer.Exit(1)
|
||||
|
||||
if not config_file.exists():
|
||||
console.print("[yellow]Config file not found.[/yellow]")
|
||||
console.print(f"\nProvided path does not exist: [cyan]{config_file}[/cyan]")
|
||||
console.print("\nRun [bold cyan]cf config init[/bold cyan] to create one.")
|
||||
_report_missing_config(config_file)
|
||||
raise typer.Exit(1)
|
||||
|
||||
editor = _get_editor()
|
||||
@@ -142,21 +137,21 @@ def config_edit(
|
||||
try:
|
||||
editor_cmd = shlex.split(editor, posix=os.name != "nt")
|
||||
except ValueError as e:
|
||||
err_console.print("[red]Invalid editor command. Check $EDITOR/$VISUAL.[/red]")
|
||||
print_error("Invalid editor command. Check [bold]$EDITOR[/]/[bold]$VISUAL[/]")
|
||||
raise typer.Exit(1) from e
|
||||
|
||||
if not editor_cmd:
|
||||
err_console.print("[red]Editor command is empty.[/red]")
|
||||
print_error("Editor command is empty")
|
||||
raise typer.Exit(1)
|
||||
|
||||
try:
|
||||
subprocess.run([*editor_cmd, str(config_file)], check=True)
|
||||
except FileNotFoundError:
|
||||
err_console.print(f"[red]Editor '{editor_cmd[0]}' not found.[/red]")
|
||||
err_console.print("Set $EDITOR environment variable to your preferred editor.")
|
||||
print_error(f"Editor [cyan]{editor_cmd[0]}[/] not found")
|
||||
console.print("Set [bold]$EDITOR[/] environment variable to your preferred editor.")
|
||||
raise typer.Exit(1) from None
|
||||
except subprocess.CalledProcessError as e:
|
||||
err_console.print(f"[red]Editor exited with error code {e.returncode}[/red]")
|
||||
print_error(f"Editor exited with error code {e.returncode}")
|
||||
raise typer.Exit(e.returncode) from None
|
||||
|
||||
|
||||
@@ -169,18 +164,11 @@ def config_show(
|
||||
config_file = _get_config_file(path)
|
||||
|
||||
if config_file is None:
|
||||
console.print("[yellow]No config file found.[/yellow]")
|
||||
console.print("\nSearched locations:")
|
||||
for p in config_search_paths():
|
||||
status = "[green]exists[/green]" if p.exists() else "[dim]not found[/dim]"
|
||||
console.print(f" - {p} ({status})")
|
||||
console.print("\nRun [bold cyan]cf config init[/bold cyan] to create one.")
|
||||
_report_missing_config()
|
||||
raise typer.Exit(0)
|
||||
|
||||
if not config_file.exists():
|
||||
console.print("[yellow]Config file not found.[/yellow]")
|
||||
console.print(f"\nProvided path does not exist: [cyan]{config_file}[/cyan]")
|
||||
console.print("\nRun [bold cyan]cf config init[/bold cyan] to create one.")
|
||||
_report_missing_config(config_file)
|
||||
raise typer.Exit(1)
|
||||
|
||||
content = config_file.read_text(encoding="utf-8")
|
||||
@@ -207,11 +195,7 @@ def config_path(
|
||||
config_file = _get_config_file(path)
|
||||
|
||||
if config_file is None:
|
||||
console.print("[yellow]No config file found.[/yellow]")
|
||||
console.print("\nSearched locations:")
|
||||
for p in config_search_paths():
|
||||
status = "[green]exists[/green]" if p.exists() else "[dim]not found[/dim]"
|
||||
console.print(f" - {p} ({status})")
|
||||
_report_missing_config()
|
||||
raise typer.Exit(1)
|
||||
|
||||
# Just print the path for easy piping
|
||||
@@ -226,7 +210,7 @@ def config_validate(
|
||||
config_file = _get_config_file(path)
|
||||
|
||||
if config_file is None:
|
||||
err_console.print("[red]✗[/] No config file found")
|
||||
print_error(MSG_CONFIG_NOT_FOUND)
|
||||
raise typer.Exit(1)
|
||||
|
||||
# Lazy import: pydantic adds ~50ms to startup, only load when actually needed
|
||||
@@ -235,13 +219,13 @@ def config_validate(
|
||||
try:
|
||||
cfg = load_config(config_file)
|
||||
except FileNotFoundError as e:
|
||||
err_console.print(f"[red]✗[/] {e}")
|
||||
print_error(str(e))
|
||||
raise typer.Exit(1) from e
|
||||
except Exception as e:
|
||||
err_console.print(f"[red]✗[/] Invalid config: {e}")
|
||||
print_error(f"Invalid config: {e}")
|
||||
raise typer.Exit(1) from e
|
||||
|
||||
console.print(f"[green]✓[/] Valid config: {config_file}")
|
||||
print_success(f"Valid config: {config_file}")
|
||||
console.print(f" Hosts: {len(cfg.hosts)}")
|
||||
console.print(f" Services: {len(cfg.services)}")
|
||||
|
||||
@@ -268,11 +252,11 @@ def config_symlink(
|
||||
target_path = (target or Path("compose-farm.yaml")).expanduser().resolve()
|
||||
|
||||
if not target_path.exists():
|
||||
err_console.print(f"[red]✗[/] Target config file not found: {target_path}")
|
||||
print_error(f"Target config file not found: {target_path}")
|
||||
raise typer.Exit(1)
|
||||
|
||||
if not target_path.is_file():
|
||||
err_console.print(f"[red]✗[/] Target is not a file: {target_path}")
|
||||
print_error(f"Target is not a file: {target_path}")
|
||||
raise typer.Exit(1)
|
||||
|
||||
symlink_path = default_config_path()
|
||||
@@ -282,7 +266,7 @@ def config_symlink(
|
||||
if symlink_path.is_symlink():
|
||||
current_target = symlink_path.resolve() if symlink_path.exists() else None
|
||||
if current_target == target_path:
|
||||
console.print(f"[green]✓[/] Symlink already points to: {target_path}")
|
||||
print_success(f"Symlink already points to: {target_path}")
|
||||
return
|
||||
# Update existing symlink
|
||||
if not force:
|
||||
@@ -294,8 +278,8 @@ def config_symlink(
|
||||
symlink_path.unlink()
|
||||
else:
|
||||
# Regular file exists
|
||||
err_console.print(f"[red]✗[/] A regular file exists at: {symlink_path}")
|
||||
err_console.print(" Back it up or remove it first, then retry.")
|
||||
print_error(f"A regular file exists at: {symlink_path}")
|
||||
console.print(" Back it up or remove it first, then retry.")
|
||||
raise typer.Exit(1)
|
||||
|
||||
# Create parent directories
|
||||
@@ -304,7 +288,7 @@ def config_symlink(
|
||||
# Create symlink with absolute path
|
||||
symlink_path.symlink_to(target_path)
|
||||
|
||||
console.print("[green]✓[/] Created symlink:")
|
||||
print_success("Created symlink:")
|
||||
console.print(f" {symlink_path}")
|
||||
console.print(f" -> {target_path}")
|
||||
|
||||
|
||||
@@ -2,37 +2,32 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING, Annotated
|
||||
from typing import Annotated
|
||||
|
||||
import typer
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from compose_farm.config import Config
|
||||
|
||||
from compose_farm.cli.app import app
|
||||
from compose_farm.cli.common import (
|
||||
AllOption,
|
||||
ConfigOption,
|
||||
HostOption,
|
||||
ServicesArg,
|
||||
format_host,
|
||||
get_services,
|
||||
load_config_or_exit,
|
||||
maybe_regenerate_traefik,
|
||||
report_results,
|
||||
run_async,
|
||||
run_host_operation,
|
||||
)
|
||||
from compose_farm.console import console, err_console
|
||||
from compose_farm.console import MSG_DRY_RUN, console, print_error, print_success
|
||||
from compose_farm.executor import run_on_services, run_sequential_on_services
|
||||
from compose_farm.operations import stop_orphaned_services, up_services
|
||||
from compose_farm.state import (
|
||||
add_service_to_host,
|
||||
get_orphaned_services,
|
||||
get_service_host,
|
||||
get_services_needing_migration,
|
||||
get_services_not_in_state,
|
||||
remove_service,
|
||||
remove_service_from_host,
|
||||
)
|
||||
|
||||
|
||||
@@ -44,14 +39,7 @@ def up(
|
||||
config: ConfigOption = None,
|
||||
) -> None:
|
||||
"""Start services (docker compose up -d). Auto-migrates if host changed."""
|
||||
svc_list, cfg = get_services(services or [], all_services, config)
|
||||
|
||||
# Per-host operation: run on specific host only
|
||||
if host:
|
||||
run_host_operation(cfg, svc_list, host, "up -d", "Starting", add_service_to_host)
|
||||
return
|
||||
|
||||
# Normal operation: use up_services with migration logic
|
||||
svc_list, cfg = get_services(services or [], all_services, config, host=host)
|
||||
results = run_async(up_services(cfg, svc_list, raw=True))
|
||||
maybe_regenerate_traefik(cfg, results)
|
||||
report_results(results)
|
||||
@@ -71,17 +59,19 @@ def down(
|
||||
config: ConfigOption = None,
|
||||
) -> None:
|
||||
"""Stop services (docker compose down)."""
|
||||
# Handle --orphaned flag
|
||||
# Handle --orphaned flag (mutually exclusive with other selection methods)
|
||||
if orphaned:
|
||||
if services or all_services or host:
|
||||
err_console.print("[red]✗[/] Cannot use --orphaned with services, --all, or --host")
|
||||
print_error(
|
||||
"Cannot combine [bold]--orphaned[/] with services, [bold]--all[/], or [bold]--host[/]"
|
||||
)
|
||||
raise typer.Exit(1)
|
||||
|
||||
cfg = load_config_or_exit(config)
|
||||
orphaned_services = get_orphaned_services(cfg)
|
||||
|
||||
if not orphaned_services:
|
||||
console.print("[green]✓[/] No orphaned services to stop")
|
||||
print_success("No orphaned services to stop")
|
||||
return
|
||||
|
||||
console.print(
|
||||
@@ -92,14 +82,7 @@ def down(
|
||||
report_results(results)
|
||||
return
|
||||
|
||||
svc_list, cfg = get_services(services or [], all_services, config)
|
||||
|
||||
# Per-host operation: run on specific host only
|
||||
if host:
|
||||
run_host_operation(cfg, svc_list, host, "down", "Stopping", remove_service_from_host)
|
||||
return
|
||||
|
||||
# Normal operation
|
||||
svc_list, cfg = get_services(services or [], all_services, config, host=host)
|
||||
raw = len(svc_list) == 1
|
||||
results = run_async(run_on_services(cfg, svc_list, "down", raw=raw))
|
||||
|
||||
@@ -162,47 +145,8 @@ def update(
|
||||
report_results(results)
|
||||
|
||||
|
||||
def _format_host(host: str | list[str]) -> str:
|
||||
"""Format a host value for display."""
|
||||
if isinstance(host, list):
|
||||
return ", ".join(host)
|
||||
return host
|
||||
|
||||
|
||||
def _report_pending_migrations(cfg: Config, migrations: list[str]) -> None:
|
||||
"""Report services that need migration."""
|
||||
console.print(f"[cyan]Services to migrate ({len(migrations)}):[/]")
|
||||
for svc in migrations:
|
||||
current = get_service_host(cfg, svc)
|
||||
target = cfg.get_hosts(svc)[0]
|
||||
console.print(f" [cyan]{svc}[/]: [magenta]{current}[/] → [magenta]{target}[/]")
|
||||
|
||||
|
||||
def _report_pending_orphans(orphaned: dict[str, str | list[str]]) -> None:
|
||||
"""Report orphaned services that will be stopped."""
|
||||
console.print(f"[yellow]Orphaned services to stop ({len(orphaned)}):[/]")
|
||||
for svc, hosts in orphaned.items():
|
||||
console.print(f" [cyan]{svc}[/] on [magenta]{_format_host(hosts)}[/]")
|
||||
|
||||
|
||||
def _report_pending_starts(cfg: Config, missing: list[str]) -> None:
|
||||
"""Report services that will be started."""
|
||||
console.print(f"[green]Services to start ({len(missing)}):[/]")
|
||||
for svc in missing:
|
||||
target = _format_host(cfg.get_hosts(svc))
|
||||
console.print(f" [cyan]{svc}[/] on [magenta]{target}[/]")
|
||||
|
||||
|
||||
def _report_pending_refresh(cfg: Config, to_refresh: list[str]) -> None:
|
||||
"""Report services that will be refreshed."""
|
||||
console.print(f"[blue]Services to refresh ({len(to_refresh)}):[/]")
|
||||
for svc in to_refresh:
|
||||
target = _format_host(cfg.get_hosts(svc))
|
||||
console.print(f" [cyan]{svc}[/] on [magenta]{target}[/]")
|
||||
|
||||
|
||||
@app.command(rich_help_panel="Lifecycle")
|
||||
def apply(
|
||||
def apply( # noqa: PLR0912 (multi-phase reconciliation needs these branches)
|
||||
dry_run: Annotated[
|
||||
bool,
|
||||
typer.Option("--dry-run", "-n", help="Show what would change without executing"),
|
||||
@@ -245,21 +189,31 @@ def apply(
|
||||
has_refresh = bool(to_refresh)
|
||||
|
||||
if not has_orphans and not has_migrations and not has_missing and not has_refresh:
|
||||
console.print("[green]✓[/] Nothing to apply - reality matches config")
|
||||
print_success("Nothing to apply - reality matches config")
|
||||
return
|
||||
|
||||
# Report what will be done
|
||||
if has_orphans:
|
||||
_report_pending_orphans(orphaned)
|
||||
console.print(f"[yellow]Orphaned services to stop ({len(orphaned)}):[/]")
|
||||
for svc, hosts in orphaned.items():
|
||||
console.print(f" [cyan]{svc}[/] on [magenta]{format_host(hosts)}[/]")
|
||||
if has_migrations:
|
||||
_report_pending_migrations(cfg, migrations)
|
||||
console.print(f"[cyan]Services to migrate ({len(migrations)}):[/]")
|
||||
for svc in migrations:
|
||||
current = get_service_host(cfg, svc)
|
||||
target = cfg.get_hosts(svc)[0]
|
||||
console.print(f" [cyan]{svc}[/]: [magenta]{current}[/] → [magenta]{target}[/]")
|
||||
if has_missing:
|
||||
_report_pending_starts(cfg, missing)
|
||||
console.print(f"[green]Services to start ({len(missing)}):[/]")
|
||||
for svc in missing:
|
||||
console.print(f" [cyan]{svc}[/] on [magenta]{format_host(cfg.get_hosts(svc))}[/]")
|
||||
if has_refresh:
|
||||
_report_pending_refresh(cfg, to_refresh)
|
||||
console.print(f"[blue]Services to refresh ({len(to_refresh)}):[/]")
|
||||
for svc in to_refresh:
|
||||
console.print(f" [cyan]{svc}[/] on [magenta]{format_host(cfg.get_hosts(svc))}[/]")
|
||||
|
||||
if dry_run:
|
||||
console.print("\n[dim](dry-run: no changes made)[/]")
|
||||
console.print(f"\n{MSG_DRY_RUN}")
|
||||
return
|
||||
|
||||
# Execute changes
|
||||
|
||||
@@ -8,7 +8,6 @@ from pathlib import Path # noqa: TC003
|
||||
from typing import TYPE_CHECKING, Annotated
|
||||
|
||||
import typer
|
||||
from rich.progress import Progress, TaskID # noqa: TC002
|
||||
|
||||
from compose_farm.cli.app import app
|
||||
from compose_farm.cli.common import (
|
||||
@@ -17,16 +16,25 @@ from compose_farm.cli.common import (
|
||||
ConfigOption,
|
||||
LogPathOption,
|
||||
ServicesArg,
|
||||
format_host,
|
||||
get_services,
|
||||
load_config_or_exit,
|
||||
progress_bar,
|
||||
run_async,
|
||||
run_parallel_with_progress,
|
||||
validate_hosts,
|
||||
validate_services,
|
||||
)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from compose_farm.config import Config
|
||||
|
||||
from compose_farm.console import console, err_console
|
||||
from compose_farm.console import (
|
||||
MSG_DRY_RUN,
|
||||
console,
|
||||
print_error,
|
||||
print_success,
|
||||
print_warning,
|
||||
)
|
||||
from compose_farm.executor import (
|
||||
CommandResult,
|
||||
is_local,
|
||||
@@ -54,21 +62,12 @@ from compose_farm.traefik import generate_traefik_config, render_traefik_config
|
||||
|
||||
def _discover_services(cfg: Config) -> dict[str, str | list[str]]:
|
||||
"""Discover running services with a progress bar."""
|
||||
|
||||
async def gather_with_progress(
|
||||
progress: Progress, task_id: TaskID
|
||||
) -> dict[str, str | list[str]]:
|
||||
tasks = [asyncio.create_task(discover_service_host(cfg, s)) for s in cfg.services]
|
||||
discovered: dict[str, str | list[str]] = {}
|
||||
for coro in asyncio.as_completed(tasks):
|
||||
service, host = await coro
|
||||
if host is not None:
|
||||
discovered[service] = host
|
||||
progress.update(task_id, advance=1, description=f"[cyan]{service}[/]")
|
||||
return discovered
|
||||
|
||||
with progress_bar("Discovering", len(cfg.services)) as (progress, task_id):
|
||||
return asyncio.run(gather_with_progress(progress, task_id))
|
||||
results = run_parallel_with_progress(
|
||||
"Discovering",
|
||||
list(cfg.services),
|
||||
lambda s: discover_service_host(cfg, s),
|
||||
)
|
||||
return {svc: host for svc, host in results if host is not None}
|
||||
|
||||
|
||||
def _snapshot_services(
|
||||
@@ -77,36 +76,22 @@ def _snapshot_services(
|
||||
log_path: Path | None,
|
||||
) -> Path:
|
||||
"""Capture image digests with a progress bar."""
|
||||
|
||||
async def collect_service(service: str, now: datetime) -> list[SnapshotEntry]:
|
||||
try:
|
||||
return await collect_service_entries(cfg, service, now=now)
|
||||
except RuntimeError:
|
||||
return []
|
||||
|
||||
async def gather_with_progress(
|
||||
progress: Progress, task_id: TaskID, now: datetime, svc_list: list[str]
|
||||
) -> list[SnapshotEntry]:
|
||||
# Map tasks to service names so we can update description
|
||||
task_to_service = {asyncio.create_task(collect_service(s, now)): s for s in svc_list}
|
||||
all_entries: list[SnapshotEntry] = []
|
||||
for coro in asyncio.as_completed(list(task_to_service.keys())):
|
||||
entries = await coro
|
||||
all_entries.extend(entries)
|
||||
# Find which service just completed (by checking done tasks)
|
||||
for t, svc in task_to_service.items():
|
||||
if t.done() and not hasattr(t, "_reported"):
|
||||
t._reported = True # type: ignore[attr-defined]
|
||||
progress.update(task_id, advance=1, description=f"[cyan]{svc}[/]")
|
||||
break
|
||||
return all_entries
|
||||
|
||||
effective_log_path = log_path or DEFAULT_LOG_PATH
|
||||
now_dt = datetime.now(UTC)
|
||||
now_iso = isoformat(now_dt)
|
||||
|
||||
with progress_bar("Capturing", len(services)) as (progress, task_id):
|
||||
snapshot_entries = asyncio.run(gather_with_progress(progress, task_id, now_dt, services))
|
||||
async def collect_service(service: str) -> tuple[str, list[SnapshotEntry]]:
|
||||
try:
|
||||
return service, await collect_service_entries(cfg, service, now=now_dt)
|
||||
except RuntimeError:
|
||||
return service, []
|
||||
|
||||
results = run_parallel_with_progress(
|
||||
"Capturing",
|
||||
services,
|
||||
collect_service,
|
||||
)
|
||||
snapshot_entries = [entry for _, entries in results for entry in entries]
|
||||
|
||||
if not snapshot_entries:
|
||||
msg = "No image digests were captured"
|
||||
@@ -119,13 +104,6 @@ def _snapshot_services(
|
||||
return effective_log_path
|
||||
|
||||
|
||||
def _format_host(host: str | list[str]) -> str:
|
||||
"""Format a host value for display."""
|
||||
if isinstance(host, list):
|
||||
return ", ".join(host)
|
||||
return host
|
||||
|
||||
|
||||
def _report_sync_changes(
|
||||
added: list[str],
|
||||
removed: list[str],
|
||||
@@ -137,14 +115,14 @@ def _report_sync_changes(
|
||||
if added:
|
||||
console.print(f"\nNew services found ({len(added)}):")
|
||||
for service in sorted(added):
|
||||
host_str = _format_host(discovered[service])
|
||||
host_str = format_host(discovered[service])
|
||||
console.print(f" [green]+[/] [cyan]{service}[/] on [magenta]{host_str}[/]")
|
||||
|
||||
if changed:
|
||||
console.print(f"\nServices on different hosts ({len(changed)}):")
|
||||
for service, old_host, new_host in sorted(changed):
|
||||
old_str = _format_host(old_host)
|
||||
new_str = _format_host(new_host)
|
||||
old_str = format_host(old_host)
|
||||
new_str = format_host(new_host)
|
||||
console.print(
|
||||
f" [yellow]~[/] [cyan]{service}[/]: [magenta]{old_str}[/] → [magenta]{new_str}[/]"
|
||||
)
|
||||
@@ -152,7 +130,7 @@ def _report_sync_changes(
|
||||
if removed:
|
||||
console.print(f"\nServices no longer running ({len(removed)}):")
|
||||
for service in sorted(removed):
|
||||
host_str = _format_host(current_state[service])
|
||||
host_str = format_host(current_state[service])
|
||||
console.print(f" [red]-[/] [cyan]{service}[/] (was on [magenta]{host_str}[/])")
|
||||
|
||||
|
||||
@@ -171,21 +149,21 @@ def _check_ssh_connectivity(cfg: Config) -> list[str]:
|
||||
|
||||
async def check_host(host_name: str) -> tuple[str, bool]:
|
||||
host = cfg.hosts[host_name]
|
||||
result = await run_command(host, "echo ok", host_name, stream=False)
|
||||
return host_name, result.success
|
||||
try:
|
||||
result = await asyncio.wait_for(
|
||||
run_command(host, "echo ok", host_name, stream=False),
|
||||
timeout=5.0,
|
||||
)
|
||||
return host_name, result.success
|
||||
except TimeoutError:
|
||||
return host_name, False
|
||||
|
||||
async def gather_with_progress(progress: Progress, task_id: TaskID) -> list[str]:
|
||||
tasks = [asyncio.create_task(check_host(h)) for h in remote_hosts]
|
||||
unreachable: list[str] = []
|
||||
for coro in asyncio.as_completed(tasks):
|
||||
host_name, success = await coro
|
||||
if not success:
|
||||
unreachable.append(host_name)
|
||||
progress.update(task_id, advance=1, description=f"[cyan]{host_name}[/]")
|
||||
return unreachable
|
||||
|
||||
with progress_bar("Checking SSH connectivity", len(remote_hosts)) as (progress, task_id):
|
||||
return asyncio.run(gather_with_progress(progress, task_id))
|
||||
results = run_parallel_with_progress(
|
||||
"Checking SSH connectivity",
|
||||
remote_hosts,
|
||||
check_host,
|
||||
)
|
||||
return [host for host, success in results if not success]
|
||||
|
||||
|
||||
def _check_service_requirements(
|
||||
@@ -222,27 +200,21 @@ def _check_service_requirements(
|
||||
|
||||
return service, mount_errors, network_errors, device_errors
|
||||
|
||||
async def gather_with_progress(
|
||||
progress: Progress, task_id: TaskID
|
||||
) -> tuple[list[tuple[str, str, str]], list[tuple[str, str, str]], list[tuple[str, str, str]]]:
|
||||
tasks = [asyncio.create_task(check_service(s)) for s in services]
|
||||
all_mount_errors: list[tuple[str, str, str]] = []
|
||||
all_network_errors: list[tuple[str, str, str]] = []
|
||||
all_device_errors: list[tuple[str, str, str]] = []
|
||||
results = run_parallel_with_progress(
|
||||
"Checking requirements",
|
||||
services,
|
||||
check_service,
|
||||
)
|
||||
|
||||
for coro in asyncio.as_completed(tasks):
|
||||
service, mount_errs, net_errs, dev_errs = await coro
|
||||
all_mount_errors.extend(mount_errs)
|
||||
all_network_errors.extend(net_errs)
|
||||
all_device_errors.extend(dev_errs)
|
||||
progress.update(task_id, advance=1, description=f"[cyan]{service}[/]")
|
||||
all_mount_errors: list[tuple[str, str, str]] = []
|
||||
all_network_errors: list[tuple[str, str, str]] = []
|
||||
all_device_errors: list[tuple[str, str, str]] = []
|
||||
for _, mount_errs, net_errs, dev_errs in results:
|
||||
all_mount_errors.extend(mount_errs)
|
||||
all_network_errors.extend(net_errs)
|
||||
all_device_errors.extend(dev_errs)
|
||||
|
||||
return all_mount_errors, all_network_errors, all_device_errors
|
||||
|
||||
with progress_bar(
|
||||
"Checking requirements", len(services), initial_description="[dim]checking...[/]"
|
||||
) as (progress, task_id):
|
||||
return asyncio.run(gather_with_progress(progress, task_id))
|
||||
return all_mount_errors, all_network_errors, all_device_errors
|
||||
|
||||
|
||||
def _report_config_status(cfg: Config) -> bool:
|
||||
@@ -263,7 +235,7 @@ def _report_config_status(cfg: Config) -> bool:
|
||||
console.print(f" [red]-[/] [cyan]{name}[/]")
|
||||
|
||||
if not unmanaged and not missing_from_disk:
|
||||
console.print("[green]✓[/] Config matches disk")
|
||||
print_success("Config matches disk")
|
||||
|
||||
return bool(missing_from_disk)
|
||||
|
||||
@@ -275,11 +247,10 @@ def _report_orphaned_services(cfg: Config) -> bool:
|
||||
if orphaned:
|
||||
console.print("\n[yellow]Orphaned services[/] (in state but not in config):")
|
||||
console.print(
|
||||
"[dim]Run 'cf apply' to stop them, or 'cf down --orphaned' for just orphans.[/]"
|
||||
"[dim]Run [bold]cf apply[/bold] to stop them, or [bold]cf down --orphaned[/bold] for just orphans.[/]"
|
||||
)
|
||||
for name, hosts in sorted(orphaned.items()):
|
||||
host_str = ", ".join(hosts) if isinstance(hosts, list) else hosts
|
||||
console.print(f" [yellow]![/] [cyan]{name}[/] on [magenta]{host_str}[/]")
|
||||
console.print(f" [yellow]![/] [cyan]{name}[/] on [magenta]{format_host(hosts)}[/]")
|
||||
return True
|
||||
|
||||
return False
|
||||
@@ -295,54 +266,24 @@ def _report_traefik_status(cfg: Config, services: list[str]) -> None:
|
||||
if warnings:
|
||||
console.print(f"\n[yellow]Traefik issues[/] ({len(warnings)}):")
|
||||
for warning in warnings:
|
||||
console.print(f" [yellow]![/] {warning}")
|
||||
print_warning(warning)
|
||||
else:
|
||||
console.print("[green]✓[/] Traefik labels valid")
|
||||
print_success("Traefik labels valid")
|
||||
|
||||
|
||||
def _report_mount_errors(mount_errors: list[tuple[str, str, str]]) -> None:
|
||||
"""Report mount errors grouped by service."""
|
||||
def _report_requirement_errors(errors: list[tuple[str, str, str]], category: str) -> None:
|
||||
"""Report requirement errors (mounts, networks, devices) grouped by service."""
|
||||
by_service: dict[str, list[tuple[str, str]]] = {}
|
||||
for svc, host, path in mount_errors:
|
||||
by_service.setdefault(svc, []).append((host, path))
|
||||
for svc, host, item in errors:
|
||||
by_service.setdefault(svc, []).append((host, item))
|
||||
|
||||
console.print(f"[red]Missing mounts[/] ({len(mount_errors)}):")
|
||||
console.print(f"[red]Missing {category}[/] ({len(errors)}):")
|
||||
for svc, items in sorted(by_service.items()):
|
||||
host = items[0][0]
|
||||
paths = [p for _, p in items]
|
||||
missing = [i for _, i in items]
|
||||
console.print(f" [cyan]{svc}[/] on [magenta]{host}[/]:")
|
||||
for path in paths:
|
||||
console.print(f" [red]✗[/] {path}")
|
||||
|
||||
|
||||
def _report_network_errors(network_errors: list[tuple[str, str, str]]) -> None:
|
||||
"""Report network errors grouped by service."""
|
||||
by_service: dict[str, list[tuple[str, str]]] = {}
|
||||
for svc, host, net in network_errors:
|
||||
by_service.setdefault(svc, []).append((host, net))
|
||||
|
||||
console.print(f"[red]Missing networks[/] ({len(network_errors)}):")
|
||||
for svc, items in sorted(by_service.items()):
|
||||
host = items[0][0]
|
||||
networks = [n for _, n in items]
|
||||
console.print(f" [cyan]{svc}[/] on [magenta]{host}[/]:")
|
||||
for net in networks:
|
||||
console.print(f" [red]✗[/] {net}")
|
||||
|
||||
|
||||
def _report_device_errors(device_errors: list[tuple[str, str, str]]) -> None:
|
||||
"""Report device errors grouped by service."""
|
||||
by_service: dict[str, list[tuple[str, str]]] = {}
|
||||
for svc, host, dev in device_errors:
|
||||
by_service.setdefault(svc, []).append((host, dev))
|
||||
|
||||
console.print(f"[red]Missing devices[/] ({len(device_errors)}):")
|
||||
for svc, items in sorted(by_service.items()):
|
||||
host = items[0][0]
|
||||
devices = [d for _, d in items]
|
||||
console.print(f" [cyan]{svc}[/] on [magenta]{host}[/]:")
|
||||
for dev in devices:
|
||||
console.print(f" [red]✗[/] {dev}")
|
||||
for item in missing:
|
||||
console.print(f" [red]✗[/] {item}")
|
||||
|
||||
|
||||
def _report_ssh_status(unreachable_hosts: list[str]) -> bool:
|
||||
@@ -350,9 +291,9 @@ def _report_ssh_status(unreachable_hosts: list[str]) -> bool:
|
||||
if unreachable_hosts:
|
||||
console.print(f"[red]Unreachable hosts[/] ({len(unreachable_hosts)}):")
|
||||
for host in sorted(unreachable_hosts):
|
||||
console.print(f" [red]✗[/] [magenta]{host}[/]")
|
||||
print_error(f"[magenta]{host}[/]")
|
||||
return True
|
||||
console.print("[green]✓[/] All hosts reachable")
|
||||
print_success("All hosts reachable")
|
||||
return False
|
||||
|
||||
|
||||
@@ -394,16 +335,16 @@ def _run_remote_checks(cfg: Config, svc_list: list[str], *, show_host_compat: bo
|
||||
mount_errors, network_errors, device_errors = _check_service_requirements(cfg, svc_list)
|
||||
|
||||
if mount_errors:
|
||||
_report_mount_errors(mount_errors)
|
||||
_report_requirement_errors(mount_errors, "mounts")
|
||||
has_errors = True
|
||||
if network_errors:
|
||||
_report_network_errors(network_errors)
|
||||
_report_requirement_errors(network_errors, "networks")
|
||||
has_errors = True
|
||||
if device_errors:
|
||||
_report_device_errors(device_errors)
|
||||
_report_requirement_errors(device_errors, "devices")
|
||||
has_errors = True
|
||||
if not mount_errors and not network_errors and not device_errors:
|
||||
console.print("[green]✓[/] All mounts, networks, and devices exist")
|
||||
print_success("All mounts, networks, and devices exist")
|
||||
|
||||
if show_host_compat:
|
||||
for service in svc_list:
|
||||
@@ -440,7 +381,7 @@ def traefik_file(
|
||||
try:
|
||||
dynamic, warnings = generate_traefik_config(cfg, svc_list)
|
||||
except (FileNotFoundError, ValueError) as exc:
|
||||
err_console.print(f"[red]✗[/] {exc}")
|
||||
print_error(str(exc))
|
||||
raise typer.Exit(1) from exc
|
||||
|
||||
rendered = render_traefik_config(dynamic)
|
||||
@@ -448,12 +389,12 @@ def traefik_file(
|
||||
if output:
|
||||
output.parent.mkdir(parents=True, exist_ok=True)
|
||||
output.write_text(rendered)
|
||||
console.print(f"[green]✓[/] Traefik config written to {output}")
|
||||
print_success(f"Traefik config written to {output}")
|
||||
else:
|
||||
console.print(rendered)
|
||||
|
||||
for warning in warnings:
|
||||
err_console.print(f"[yellow]![/] {warning}")
|
||||
print_warning(warning)
|
||||
|
||||
|
||||
@app.command(rich_help_panel="Configuration")
|
||||
@@ -492,24 +433,24 @@ def refresh(
|
||||
if state_changed:
|
||||
_report_sync_changes(added, removed, changed, discovered, current_state)
|
||||
else:
|
||||
console.print("[green]✓[/] State is already in sync.")
|
||||
print_success("State is already in sync.")
|
||||
|
||||
if dry_run:
|
||||
console.print("\n[dim](dry-run: no changes made)[/]")
|
||||
console.print(f"\n{MSG_DRY_RUN}")
|
||||
return
|
||||
|
||||
# Update state file
|
||||
if state_changed:
|
||||
save_state(cfg, discovered)
|
||||
console.print(f"\n[green]✓[/] State updated: {len(discovered)} services tracked.")
|
||||
print_success(f"State updated: {len(discovered)} services tracked.")
|
||||
|
||||
# Capture image digests for running services
|
||||
if discovered:
|
||||
try:
|
||||
path = _snapshot_services(cfg, list(discovered.keys()), log_path)
|
||||
console.print(f"[green]✓[/] Digests written to {path}")
|
||||
print_success(f"Digests written to {path}")
|
||||
except RuntimeError as exc:
|
||||
err_console.print(f"[yellow]![/] {exc}")
|
||||
print_warning(str(exc))
|
||||
|
||||
|
||||
@app.command(rich_help_panel="Configuration")
|
||||
@@ -533,11 +474,7 @@ def check(
|
||||
# Determine which services to check and whether to show host compatibility
|
||||
if services:
|
||||
svc_list = list(services)
|
||||
invalid = [s for s in svc_list if s not in cfg.services]
|
||||
if invalid:
|
||||
for svc in invalid:
|
||||
err_console.print(f"[red]✗[/] Service '{svc}' not found in config")
|
||||
raise typer.Exit(1)
|
||||
validate_services(cfg, svc_list)
|
||||
show_host_compat = True
|
||||
else:
|
||||
svc_list = list(cfg.services.keys())
|
||||
@@ -587,11 +524,7 @@ def init_network(
|
||||
cfg = load_config_or_exit(config)
|
||||
|
||||
target_hosts = list(hosts) if hosts else list(cfg.hosts.keys())
|
||||
invalid = [h for h in target_hosts if h not in cfg.hosts]
|
||||
if invalid:
|
||||
for h in invalid:
|
||||
err_console.print(f"[red]✗[/] Host '{h}' not found in config")
|
||||
raise typer.Exit(1)
|
||||
validate_hosts(cfg, target_hosts)
|
||||
|
||||
async def create_network_on_host(host_name: str) -> CommandResult:
|
||||
host = cfg.hosts[host_name]
|
||||
@@ -616,9 +549,8 @@ def init_network(
|
||||
if result.success:
|
||||
console.print(f"[cyan]\\[{host_name}][/] [green]✓[/] Created network '{network}'")
|
||||
else:
|
||||
err_console.print(
|
||||
f"[cyan]\\[{host_name}][/] [red]✗[/] Failed to create network: "
|
||||
f"{result.stderr.strip()}"
|
||||
print_error(
|
||||
f"[cyan]\\[{host_name}][/] Failed to create network: {result.stderr.strip()}"
|
||||
)
|
||||
|
||||
return result
|
||||
|
||||
@@ -2,12 +2,10 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import contextlib
|
||||
from typing import TYPE_CHECKING, Annotated
|
||||
|
||||
import typer
|
||||
from rich.progress import Progress, TaskID # noqa: TC002
|
||||
from rich.table import Table
|
||||
|
||||
from compose_farm.cli.app import app
|
||||
@@ -19,47 +17,18 @@ from compose_farm.cli.common import (
|
||||
ServicesArg,
|
||||
get_services,
|
||||
load_config_or_exit,
|
||||
progress_bar,
|
||||
report_results,
|
||||
run_async,
|
||||
run_parallel_with_progress,
|
||||
)
|
||||
from compose_farm.console import console, err_console
|
||||
from compose_farm.console import console
|
||||
from compose_farm.executor import run_command, run_on_services
|
||||
from compose_farm.state import get_services_needing_migration, load_state
|
||||
from compose_farm.state import get_services_needing_migration, group_services_by_host, load_state
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import Mapping
|
||||
|
||||
from compose_farm.config import Config
|
||||
|
||||
|
||||
def _group_services_by_host(
|
||||
services: dict[str, str | list[str]],
|
||||
hosts: Mapping[str, object],
|
||||
all_hosts: list[str] | None = None,
|
||||
) -> dict[str, list[str]]:
|
||||
"""Group services by their assigned host(s).
|
||||
|
||||
For multi-host services (list or "all"), the service appears in multiple host lists.
|
||||
"""
|
||||
by_host: dict[str, list[str]] = {h: [] for h in hosts}
|
||||
for service, host_value in services.items():
|
||||
if isinstance(host_value, list):
|
||||
# Explicit list of hosts
|
||||
for host_name in host_value:
|
||||
if host_name in by_host:
|
||||
by_host[host_name].append(service)
|
||||
elif host_value == "all" and all_hosts:
|
||||
# "all" keyword - add to all hosts
|
||||
for host_name in all_hosts:
|
||||
if host_name in by_host:
|
||||
by_host[host_name].append(service)
|
||||
elif host_value in by_host:
|
||||
# Single host
|
||||
by_host[host_value].append(service)
|
||||
return by_host
|
||||
|
||||
|
||||
def _get_container_counts(cfg: Config) -> dict[str, int]:
|
||||
"""Get container counts from all hosts with a progress bar."""
|
||||
|
||||
@@ -72,18 +41,12 @@ def _get_container_counts(cfg: Config) -> dict[str, int]:
|
||||
count = int(result.stdout.strip())
|
||||
return host_name, count
|
||||
|
||||
async def gather_with_progress(progress: Progress, task_id: TaskID) -> dict[str, int]:
|
||||
hosts = list(cfg.hosts.keys())
|
||||
tasks = [asyncio.create_task(get_count(h)) for h in hosts]
|
||||
results: dict[str, int] = {}
|
||||
for coro in asyncio.as_completed(tasks):
|
||||
host_name, count = await coro
|
||||
results[host_name] = count
|
||||
progress.update(task_id, advance=1, description=f"[cyan]{host_name}[/]")
|
||||
return results
|
||||
|
||||
with progress_bar("Querying hosts", len(cfg.hosts)) as (progress, task_id):
|
||||
return asyncio.run(gather_with_progress(progress, task_id))
|
||||
results = run_parallel_with_progress(
|
||||
"Querying hosts",
|
||||
list(cfg.hosts.keys()),
|
||||
get_count,
|
||||
)
|
||||
return dict(results)
|
||||
|
||||
|
||||
def _build_host_table(
|
||||
@@ -163,24 +126,7 @@ def logs(
|
||||
config: ConfigOption = None,
|
||||
) -> None:
|
||||
"""Show service logs."""
|
||||
if all_services and host is not None:
|
||||
err_console.print("[red]✗[/] Cannot use --all and --host together")
|
||||
raise typer.Exit(1)
|
||||
|
||||
cfg = load_config_or_exit(config)
|
||||
|
||||
# Determine service list based on options
|
||||
if host is not None:
|
||||
if host not in cfg.hosts:
|
||||
err_console.print(f"[red]✗[/] Host '{host}' not found in config")
|
||||
raise typer.Exit(1)
|
||||
# Include services where host is in the list of configured hosts
|
||||
svc_list = [s for s in cfg.services if host in cfg.get_hosts(s)]
|
||||
if not svc_list:
|
||||
err_console.print(f"[yellow]![/] No services configured for host '{host}'")
|
||||
return
|
||||
else:
|
||||
svc_list, cfg = get_services(services or [], all_services, config)
|
||||
svc_list, cfg = get_services(services or [], all_services, config, host=host)
|
||||
|
||||
# Default to fewer lines when showing multiple services
|
||||
many_services = all_services or host is not None or len(svc_list) > 1
|
||||
@@ -194,11 +140,19 @@ def logs(
|
||||
|
||||
@app.command(rich_help_panel="Monitoring")
|
||||
def ps(
|
||||
services: ServicesArg = None,
|
||||
all_services: AllOption = False,
|
||||
host: HostOption = None,
|
||||
config: ConfigOption = None,
|
||||
) -> None:
|
||||
"""Show status of all services."""
|
||||
cfg = load_config_or_exit(config)
|
||||
results = run_async(run_on_services(cfg, list(cfg.services.keys()), "ps"))
|
||||
"""Show status of services.
|
||||
|
||||
Without arguments: shows all services (same as --all).
|
||||
With service names: shows only those services.
|
||||
With --host: shows services on that host.
|
||||
"""
|
||||
svc_list, cfg = get_services(services or [], all_services, config, host=host, default_all=True)
|
||||
results = run_async(run_on_services(cfg, svc_list, "ps"))
|
||||
report_results(results)
|
||||
|
||||
|
||||
@@ -220,8 +174,8 @@ def stats(
|
||||
pending = get_services_needing_migration(cfg)
|
||||
|
||||
all_hosts = list(cfg.hosts.keys())
|
||||
services_by_host = _group_services_by_host(cfg.services, cfg.hosts, all_hosts)
|
||||
running_by_host = _group_services_by_host(state, cfg.hosts, all_hosts)
|
||||
services_by_host = group_services_by_host(cfg.services, cfg.hosts, all_hosts)
|
||||
running_by_host = group_services_by_host(state, cfg.hosts, all_hosts)
|
||||
|
||||
container_counts: dict[str, int] = {}
|
||||
if live:
|
||||
|
||||
282
src/compose_farm/cli/ssh.py
Normal file
282
src/compose_farm/cli/ssh.py
Normal file
@@ -0,0 +1,282 @@
|
||||
"""SSH key management commands for compose-farm."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import subprocess
|
||||
from typing import TYPE_CHECKING, Annotated
|
||||
|
||||
import typer
|
||||
|
||||
from compose_farm.cli.app import app
|
||||
from compose_farm.cli.common import ConfigOption, load_config_or_exit, run_parallel_with_progress
|
||||
from compose_farm.console import console, err_console
|
||||
from compose_farm.executor import run_command
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from compose_farm.config import Host
|
||||
|
||||
from compose_farm.ssh_keys import (
|
||||
SSH_KEY_PATH,
|
||||
SSH_PUBKEY_PATH,
|
||||
get_pubkey_content,
|
||||
get_ssh_env,
|
||||
key_exists,
|
||||
)
|
||||
|
||||
_DEFAULT_SSH_PORT = 22
|
||||
_PUBKEY_DISPLAY_THRESHOLD = 60
|
||||
|
||||
ssh_app = typer.Typer(
|
||||
name="ssh",
|
||||
help="Manage SSH keys for passwordless authentication.",
|
||||
no_args_is_help=True,
|
||||
)
|
||||
|
||||
_ForceOption = Annotated[
|
||||
bool,
|
||||
typer.Option("--force", "-f", help="Regenerate key even if it exists."),
|
||||
]
|
||||
|
||||
|
||||
def _generate_key(*, force: bool = False) -> bool:
|
||||
"""Generate an ED25519 SSH key with no passphrase.
|
||||
|
||||
Returns True if key was generated, False if skipped.
|
||||
"""
|
||||
if key_exists() and not force:
|
||||
console.print(f"[yellow]![/] SSH key already exists: {SSH_KEY_PATH}")
|
||||
console.print("[dim]Use --force to regenerate[/]")
|
||||
return False
|
||||
|
||||
# Create .ssh directory if it doesn't exist
|
||||
SSH_KEY_PATH.parent.mkdir(parents=True, exist_ok=True, mode=0o700)
|
||||
|
||||
# Remove existing key if forcing regeneration
|
||||
if force:
|
||||
SSH_KEY_PATH.unlink(missing_ok=True)
|
||||
SSH_PUBKEY_PATH.unlink(missing_ok=True)
|
||||
|
||||
console.print(f"[dim]Generating SSH key at {SSH_KEY_PATH}...[/]")
|
||||
|
||||
try:
|
||||
subprocess.run(
|
||||
[ # noqa: S607
|
||||
"ssh-keygen",
|
||||
"-t",
|
||||
"ed25519",
|
||||
"-N",
|
||||
"", # No passphrase
|
||||
"-f",
|
||||
str(SSH_KEY_PATH),
|
||||
"-C",
|
||||
"compose-farm",
|
||||
],
|
||||
check=True,
|
||||
capture_output=True,
|
||||
)
|
||||
except subprocess.CalledProcessError as e:
|
||||
err_console.print(f"[red]Failed to generate SSH key:[/] {e.stderr.decode()}")
|
||||
return False
|
||||
except FileNotFoundError:
|
||||
err_console.print("[red]ssh-keygen not found. Is OpenSSH installed?[/]")
|
||||
return False
|
||||
|
||||
# Set correct permissions
|
||||
SSH_KEY_PATH.chmod(0o600)
|
||||
SSH_PUBKEY_PATH.chmod(0o644)
|
||||
|
||||
console.print(f"[green]Generated SSH key:[/] {SSH_KEY_PATH}")
|
||||
return True
|
||||
|
||||
|
||||
def _copy_key_to_host(host_name: str, address: str, user: str, port: int) -> bool:
|
||||
"""Copy public key to a host's authorized_keys.
|
||||
|
||||
Uses ssh-copy-id which handles agent vs password fallback automatically.
|
||||
Returns True on success, False on failure.
|
||||
"""
|
||||
target = f"{user}@{address}"
|
||||
console.print(f"[dim]Copying key to {host_name} ({target})...[/]")
|
||||
|
||||
cmd = ["ssh-copy-id"]
|
||||
|
||||
# Disable strict host key checking (consistent with executor.py)
|
||||
cmd.extend(["-o", "StrictHostKeyChecking=no"])
|
||||
cmd.extend(["-o", "UserKnownHostsFile=/dev/null"])
|
||||
|
||||
if port != _DEFAULT_SSH_PORT:
|
||||
cmd.extend(["-p", str(port)])
|
||||
|
||||
cmd.extend(["-i", str(SSH_PUBKEY_PATH), target])
|
||||
|
||||
try:
|
||||
# Don't capture output so user can see password prompt
|
||||
result = subprocess.run(cmd, check=False, env=get_ssh_env())
|
||||
if result.returncode == 0:
|
||||
console.print(f"[green]Key copied to {host_name}[/]")
|
||||
return True
|
||||
err_console.print(f"[red]Failed to copy key to {host_name}[/]")
|
||||
return False
|
||||
except FileNotFoundError:
|
||||
err_console.print("[red]ssh-copy-id not found. Is OpenSSH installed?[/]")
|
||||
return False
|
||||
|
||||
|
||||
@ssh_app.command("keygen")
|
||||
def ssh_keygen(
|
||||
force: _ForceOption = False,
|
||||
) -> None:
|
||||
"""Generate SSH key (does not distribute to hosts).
|
||||
|
||||
Creates an ED25519 key at ~/.ssh/compose-farm/id_ed25519 with no passphrase.
|
||||
Use 'cf ssh setup' to also distribute the key to all configured hosts.
|
||||
"""
|
||||
success = _generate_key(force=force)
|
||||
if not success and not key_exists():
|
||||
raise typer.Exit(1)
|
||||
|
||||
|
||||
@ssh_app.command("setup")
|
||||
def ssh_setup(
|
||||
config: ConfigOption = None,
|
||||
force: _ForceOption = False,
|
||||
) -> None:
|
||||
"""Generate SSH key and distribute to all configured hosts.
|
||||
|
||||
Creates an ED25519 key at ~/.ssh/compose-farm/id_ed25519 (no passphrase)
|
||||
and copies the public key to authorized_keys on each host.
|
||||
|
||||
For each host, tries SSH agent first. If agent is unavailable,
|
||||
prompts for password.
|
||||
"""
|
||||
cfg = load_config_or_exit(config)
|
||||
|
||||
# Skip localhost hosts
|
||||
remote_hosts = {
|
||||
name: host
|
||||
for name, host in cfg.hosts.items()
|
||||
if host.address.lower() not in ("localhost", "127.0.0.1")
|
||||
}
|
||||
|
||||
if not remote_hosts:
|
||||
console.print("[yellow]No remote hosts configured.[/]")
|
||||
raise typer.Exit(0)
|
||||
|
||||
# Generate key if needed
|
||||
if not key_exists() or force:
|
||||
if not _generate_key(force=force):
|
||||
raise typer.Exit(1)
|
||||
else:
|
||||
console.print(f"[dim]Using existing key: {SSH_KEY_PATH}[/]")
|
||||
|
||||
console.print()
|
||||
console.print(f"[bold]Distributing key to {len(remote_hosts)} host(s)...[/]")
|
||||
console.print()
|
||||
|
||||
# Copy key to each host
|
||||
succeeded = 0
|
||||
failed = 0
|
||||
|
||||
for host_name, host in remote_hosts.items():
|
||||
if _copy_key_to_host(host_name, host.address, host.user, host.port):
|
||||
succeeded += 1
|
||||
else:
|
||||
failed += 1
|
||||
|
||||
console.print()
|
||||
if failed == 0:
|
||||
console.print(
|
||||
f"[green]Setup complete.[/] {succeeded}/{len(remote_hosts)} hosts configured."
|
||||
)
|
||||
else:
|
||||
console.print(
|
||||
f"[yellow]Setup partially complete.[/] {succeeded}/{len(remote_hosts)} hosts configured, "
|
||||
f"[red]{failed} failed[/]."
|
||||
)
|
||||
raise typer.Exit(1)
|
||||
|
||||
|
||||
@ssh_app.command("status")
|
||||
def ssh_status(
|
||||
config: ConfigOption = None,
|
||||
) -> None:
|
||||
"""Show SSH key status and host connectivity."""
|
||||
from rich.table import Table # noqa: PLC0415
|
||||
|
||||
cfg = load_config_or_exit(config)
|
||||
|
||||
# Key status
|
||||
console.print("[bold]SSH Key Status[/]")
|
||||
console.print()
|
||||
|
||||
if key_exists():
|
||||
console.print(f" [green]Key exists:[/] {SSH_KEY_PATH}")
|
||||
pubkey = get_pubkey_content()
|
||||
if pubkey:
|
||||
# Show truncated public key
|
||||
if len(pubkey) > _PUBKEY_DISPLAY_THRESHOLD:
|
||||
console.print(f" [dim]Public key:[/] {pubkey[:30]}...{pubkey[-20:]}")
|
||||
else:
|
||||
console.print(f" [dim]Public key:[/] {pubkey}")
|
||||
else:
|
||||
console.print(f" [yellow]No key found:[/] {SSH_KEY_PATH}")
|
||||
console.print(" [dim]Run 'cf ssh setup' to generate and distribute a key[/]")
|
||||
|
||||
console.print()
|
||||
console.print("[bold]Host Connectivity[/]")
|
||||
console.print()
|
||||
|
||||
# Skip localhost hosts
|
||||
remote_hosts = {
|
||||
name: host
|
||||
for name, host in cfg.hosts.items()
|
||||
if host.address.lower() not in ("localhost", "127.0.0.1")
|
||||
}
|
||||
|
||||
if not remote_hosts:
|
||||
console.print(" [dim]No remote hosts configured[/]")
|
||||
return
|
||||
|
||||
async def check_host(item: tuple[str, Host]) -> tuple[str, str, str]:
|
||||
"""Check connectivity to a single host."""
|
||||
host_name, host = item
|
||||
target = f"{host.user}@{host.address}"
|
||||
if host.port != _DEFAULT_SSH_PORT:
|
||||
target += f":{host.port}"
|
||||
|
||||
try:
|
||||
result = await asyncio.wait_for(
|
||||
run_command(host, "echo ok", host_name, stream=False),
|
||||
timeout=5.0,
|
||||
)
|
||||
status = "[green]OK[/]" if result.success else "[red]Auth failed[/]"
|
||||
except TimeoutError:
|
||||
status = "[red]Timeout (5s)[/]"
|
||||
except Exception as e:
|
||||
status = f"[red]Error: {e}[/]"
|
||||
|
||||
return host_name, target, status
|
||||
|
||||
# Check connectivity in parallel with progress bar
|
||||
results = run_parallel_with_progress(
|
||||
"Checking hosts",
|
||||
list(remote_hosts.items()),
|
||||
check_host,
|
||||
)
|
||||
|
||||
# Build table from results
|
||||
table = Table(show_header=True, header_style="bold")
|
||||
table.add_column("Host")
|
||||
table.add_column("Address")
|
||||
table.add_column("Status")
|
||||
|
||||
# Sort by host name for consistent order
|
||||
for host_name, target, status in sorted(results, key=lambda r: r[0]):
|
||||
table.add_row(host_name, target, status)
|
||||
|
||||
console.print(table)
|
||||
|
||||
|
||||
# Register ssh subcommand on the shared app
|
||||
app.add_typer(ssh_app, name="ssh", rich_help_panel="Configuration")
|
||||
@@ -7,14 +7,14 @@ from __future__ import annotations
|
||||
|
||||
import os
|
||||
import re
|
||||
import stat
|
||||
from dataclasses import dataclass
|
||||
from pathlib import Path
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
import yaml
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from pathlib import Path
|
||||
|
||||
from .config import Config
|
||||
|
||||
# Port parsing constants
|
||||
@@ -141,23 +141,42 @@ def _resolve_host_path(host_path: str, compose_dir: Path) -> str | None:
|
||||
return None # Named volume
|
||||
|
||||
|
||||
def _is_socket(path: str) -> bool:
|
||||
"""Check if a path is a socket (e.g., SSH agent socket)."""
|
||||
try:
|
||||
return stat.S_ISSOCK(Path(path).stat().st_mode)
|
||||
except (FileNotFoundError, PermissionError, OSError):
|
||||
return False
|
||||
|
||||
|
||||
def _parse_volume_item(
|
||||
item: str | dict[str, Any],
|
||||
env: dict[str, str],
|
||||
compose_dir: Path,
|
||||
) -> str | None:
|
||||
"""Parse a single volume item and return host path if it's a bind mount."""
|
||||
"""Parse a single volume item and return host path if it's a bind mount.
|
||||
|
||||
Skips socket paths (e.g., SSH_AUTH_SOCK) since they're machine-local
|
||||
and shouldn't be validated on remote hosts.
|
||||
"""
|
||||
host_path: str | None = None
|
||||
|
||||
if isinstance(item, str):
|
||||
interpolated = _interpolate(item, env)
|
||||
parts = interpolated.split(":")
|
||||
if len(parts) >= _MIN_VOLUME_PARTS:
|
||||
return _resolve_host_path(parts[0], compose_dir)
|
||||
host_path = _resolve_host_path(parts[0], compose_dir)
|
||||
elif isinstance(item, dict) and item.get("type") == "bind":
|
||||
source = item.get("source")
|
||||
if source:
|
||||
interpolated = _interpolate(str(source), env)
|
||||
return _resolve_host_path(interpolated, compose_dir)
|
||||
return None
|
||||
host_path = _resolve_host_path(interpolated, compose_dir)
|
||||
|
||||
# Skip sockets - they're machine-local (e.g., SSH agent)
|
||||
if host_path and _is_socket(host_path):
|
||||
return None
|
||||
|
||||
return host_path
|
||||
|
||||
|
||||
def parse_host_volumes(config: Config, service: str) -> list[str]:
|
||||
@@ -194,13 +213,7 @@ def parse_host_volumes(config: Config, service: str) -> list[str]:
|
||||
paths.append(host_path)
|
||||
|
||||
# Return unique paths, preserving order
|
||||
seen: set[str] = set()
|
||||
unique: list[str] = []
|
||||
for p in paths:
|
||||
if p not in seen:
|
||||
seen.add(p)
|
||||
unique.append(p)
|
||||
return unique
|
||||
return list(dict.fromkeys(paths))
|
||||
|
||||
|
||||
def parse_devices(config: Config, service: str) -> list[str]:
|
||||
@@ -239,13 +252,7 @@ def parse_devices(config: Config, service: str) -> list[str]:
|
||||
devices.append(host_path)
|
||||
|
||||
# Return unique devices, preserving order
|
||||
seen: set[str] = set()
|
||||
unique: list[str] = []
|
||||
for d in devices:
|
||||
if d not in seen:
|
||||
seen.add(d)
|
||||
unique.append(d)
|
||||
return unique
|
||||
return list(dict.fromkeys(devices))
|
||||
|
||||
|
||||
def parse_external_networks(config: Config, service: str) -> list[str]:
|
||||
|
||||
@@ -10,6 +10,9 @@ from pydantic import BaseModel, Field, model_validator
|
||||
|
||||
from .paths import config_search_paths, find_config_path
|
||||
|
||||
# Supported compose filenames, in priority order
|
||||
COMPOSE_FILENAMES = ("compose.yaml", "compose.yml", "docker-compose.yml", "docker-compose.yaml")
|
||||
|
||||
|
||||
class Host(BaseModel):
|
||||
"""SSH host configuration."""
|
||||
@@ -90,17 +93,9 @@ class Config(BaseModel):
|
||||
return self.hosts[host_names[0]]
|
||||
|
||||
def get_compose_path(self, service: str) -> Path:
|
||||
"""Get compose file path for a service.
|
||||
|
||||
Tries compose.yaml first, then docker-compose.yml.
|
||||
"""
|
||||
"""Get compose file path for a service (tries compose.yaml first)."""
|
||||
service_dir = self.compose_dir / service
|
||||
for filename in (
|
||||
"compose.yaml",
|
||||
"compose.yml",
|
||||
"docker-compose.yml",
|
||||
"docker-compose.yaml",
|
||||
):
|
||||
for filename in COMPOSE_FILENAMES:
|
||||
candidate = service_dir / filename
|
||||
if candidate.exists():
|
||||
return candidate
|
||||
@@ -109,21 +104,12 @@ class Config(BaseModel):
|
||||
|
||||
def discover_compose_dirs(self) -> set[str]:
|
||||
"""Find all directories in compose_dir that contain a compose file."""
|
||||
compose_filenames = {
|
||||
"compose.yaml",
|
||||
"compose.yml",
|
||||
"docker-compose.yml",
|
||||
"docker-compose.yaml",
|
||||
}
|
||||
found: set[str] = set()
|
||||
if not self.compose_dir.exists():
|
||||
return found
|
||||
for subdir in self.compose_dir.iterdir():
|
||||
if subdir.is_dir():
|
||||
for filename in compose_filenames:
|
||||
if (subdir / filename).exists():
|
||||
found.add(subdir.name)
|
||||
break
|
||||
if subdir.is_dir() and any((subdir / f).exists() for f in COMPOSE_FILENAMES):
|
||||
found.add(subdir.name)
|
||||
return found
|
||||
|
||||
|
||||
|
||||
@@ -4,3 +4,35 @@ from rich.console import Console
|
||||
|
||||
console = Console(highlight=False)
|
||||
err_console = Console(stderr=True, highlight=False)
|
||||
|
||||
|
||||
# --- Message Constants ---
|
||||
# Standardized message templates for consistent user-facing output
|
||||
|
||||
MSG_SERVICE_NOT_FOUND = "Service [cyan]{name}[/] not found in config"
|
||||
MSG_HOST_NOT_FOUND = "Host [magenta]{name}[/] not found in config"
|
||||
MSG_CONFIG_NOT_FOUND = "Config file not found"
|
||||
MSG_DRY_RUN = "[dim](dry-run: no changes made)[/]"
|
||||
|
||||
|
||||
# --- Message Helper Functions ---
|
||||
|
||||
|
||||
def print_error(msg: str) -> None:
|
||||
"""Print error message with ✗ prefix to stderr."""
|
||||
err_console.print(f"[red]✗[/] {msg}")
|
||||
|
||||
|
||||
def print_success(msg: str) -> None:
|
||||
"""Print success message with ✓ prefix to stdout."""
|
||||
console.print(f"[green]✓[/] {msg}")
|
||||
|
||||
|
||||
def print_warning(msg: str) -> None:
|
||||
"""Print warning message with ! prefix to stderr."""
|
||||
err_console.print(f"[yellow]![/] {msg}")
|
||||
|
||||
|
||||
def print_hint(msg: str) -> None:
|
||||
"""Print hint message in dim style to stdout."""
|
||||
console.print(f"[dim]Hint: {msg}[/]")
|
||||
|
||||
@@ -12,6 +12,7 @@ from typing import TYPE_CHECKING, Any
|
||||
from rich.markup import escape
|
||||
|
||||
from .console import console, err_console
|
||||
from .ssh_keys import get_key_path, get_ssh_auth_sock, get_ssh_env
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import Callable
|
||||
@@ -22,6 +23,85 @@ LOCAL_ADDRESSES = frozenset({"local", "localhost", "127.0.0.1", "::1"})
|
||||
_DEFAULT_SSH_PORT = 22
|
||||
|
||||
|
||||
def _print_compose_command(
|
||||
host_name: str,
|
||||
compose_dir: str,
|
||||
compose_path: str,
|
||||
compose_cmd: str,
|
||||
) -> None:
|
||||
"""Print the docker compose command being executed.
|
||||
|
||||
Shows the host and a simplified command with relative path from compose_dir.
|
||||
"""
|
||||
# Show relative path from compose_dir for cleaner output
|
||||
if compose_path.startswith(compose_dir):
|
||||
rel_path = compose_path[len(compose_dir) :].lstrip("/")
|
||||
else:
|
||||
rel_path = compose_path
|
||||
|
||||
console.print(
|
||||
f"[dim][magenta]{host_name}[/magenta]: docker compose -f {rel_path} {compose_cmd}[/dim]"
|
||||
)
|
||||
|
||||
|
||||
async def _stream_output_lines(
|
||||
reader: Any,
|
||||
prefix: str,
|
||||
*,
|
||||
is_stderr: bool = False,
|
||||
) -> None:
|
||||
"""Stream lines from a reader to console with a service prefix.
|
||||
|
||||
Works with both asyncio.StreamReader (bytes) and asyncssh readers (str).
|
||||
If prefix is empty, output is printed without a prefix.
|
||||
"""
|
||||
out = err_console if is_stderr else console
|
||||
async for line in reader:
|
||||
text = line.decode() if isinstance(line, bytes) else line
|
||||
if text.strip():
|
||||
if prefix:
|
||||
out.print(f"[cyan]\\[{prefix}][/] {escape(text)}", end="")
|
||||
else:
|
||||
out.print(escape(text), end="")
|
||||
|
||||
|
||||
def build_ssh_command(host: Host, command: str, *, tty: bool = False) -> list[str]:
|
||||
"""Build SSH command args for executing a command on a remote host.
|
||||
|
||||
Args:
|
||||
host: Host configuration with address, port, user
|
||||
command: Command to run on the remote host
|
||||
tty: Whether to allocate a TTY (for interactive/progress bar commands)
|
||||
|
||||
Returns:
|
||||
List of command args suitable for subprocess
|
||||
|
||||
"""
|
||||
ssh_args = [
|
||||
"ssh",
|
||||
"-o",
|
||||
"StrictHostKeyChecking=no",
|
||||
"-o",
|
||||
"UserKnownHostsFile=/dev/null",
|
||||
"-o",
|
||||
"LogLevel=ERROR",
|
||||
]
|
||||
if tty:
|
||||
ssh_args.insert(1, "-tt") # Force TTY allocation
|
||||
|
||||
key_path = get_key_path()
|
||||
if key_path:
|
||||
ssh_args.extend(["-i", str(key_path)])
|
||||
|
||||
if host.port != _DEFAULT_SSH_PORT:
|
||||
ssh_args.extend(["-p", str(host.port)])
|
||||
|
||||
ssh_args.append(f"{host.user}@{host.address}")
|
||||
ssh_args.append(command)
|
||||
|
||||
return ssh_args
|
||||
|
||||
|
||||
@lru_cache(maxsize=1)
|
||||
def _get_local_ips() -> frozenset[str]:
|
||||
"""Get all IP addresses of the current machine."""
|
||||
@@ -71,12 +151,32 @@ def is_local(host: Host) -> bool:
|
||||
return addr in _get_local_ips()
|
||||
|
||||
|
||||
def ssh_connect_kwargs(host: Host) -> dict[str, Any]:
|
||||
"""Get kwargs for asyncssh.connect() from a Host config."""
|
||||
kwargs: dict[str, Any] = {
|
||||
"host": host.address,
|
||||
"port": host.port,
|
||||
"username": host.user,
|
||||
"known_hosts": None,
|
||||
}
|
||||
# Add SSH agent path (auto-detect forwarded agent if needed)
|
||||
agent_path = get_ssh_auth_sock()
|
||||
if agent_path:
|
||||
kwargs["agent_path"] = agent_path
|
||||
# Add key file fallback for when SSH agent is unavailable
|
||||
key_path = get_key_path()
|
||||
if key_path:
|
||||
kwargs["client_keys"] = [str(key_path)]
|
||||
return kwargs
|
||||
|
||||
|
||||
async def _run_local_command(
|
||||
command: str,
|
||||
service: str,
|
||||
*,
|
||||
stream: bool = True,
|
||||
raw: bool = False,
|
||||
prefix: str = "",
|
||||
) -> CommandResult:
|
||||
"""Run a command locally with streaming output."""
|
||||
try:
|
||||
@@ -101,25 +201,9 @@ async def _run_local_command(
|
||||
)
|
||||
|
||||
if stream and proc.stdout and proc.stderr:
|
||||
|
||||
async def read_stream(
|
||||
reader: asyncio.StreamReader,
|
||||
prefix: str,
|
||||
*,
|
||||
is_stderr: bool = False,
|
||||
) -> None:
|
||||
out = err_console if is_stderr else console
|
||||
while True:
|
||||
line = await reader.readline()
|
||||
if not line:
|
||||
break
|
||||
text = line.decode()
|
||||
if text.strip(): # Skip empty lines
|
||||
out.print(f"[cyan]\\[{prefix}][/] {escape(text)}", end="")
|
||||
|
||||
await asyncio.gather(
|
||||
read_stream(proc.stdout, service),
|
||||
read_stream(proc.stderr, service, is_stderr=True),
|
||||
_stream_output_lines(proc.stdout, prefix),
|
||||
_stream_output_lines(proc.stderr, prefix, is_stderr=True),
|
||||
)
|
||||
|
||||
stdout_data = b""
|
||||
@@ -148,16 +232,15 @@ async def _run_ssh_command(
|
||||
*,
|
||||
stream: bool = True,
|
||||
raw: bool = False,
|
||||
prefix: str = "",
|
||||
) -> CommandResult:
|
||||
"""Run a command on a remote host via SSH with streaming output."""
|
||||
if raw:
|
||||
# Use native ssh with TTY for proper progress bar rendering
|
||||
ssh_args = ["ssh", "-t"]
|
||||
if host.port != _DEFAULT_SSH_PORT:
|
||||
ssh_args.extend(["-p", str(host.port)])
|
||||
ssh_args.extend([f"{host.user}@{host.address}", command])
|
||||
ssh_args = build_ssh_command(host, command, tty=True)
|
||||
# Run in thread to avoid blocking the event loop
|
||||
result = await asyncio.to_thread(subprocess.run, ssh_args, check=False)
|
||||
# Use get_ssh_env() to auto-detect SSH agent socket
|
||||
result = await asyncio.to_thread(subprocess.run, ssh_args, check=False, env=get_ssh_env())
|
||||
return CommandResult(
|
||||
service=service,
|
||||
exit_code=result.returncode,
|
||||
@@ -168,29 +251,12 @@ async def _run_ssh_command(
|
||||
|
||||
proc: asyncssh.SSHClientProcess[Any]
|
||||
try:
|
||||
async with asyncssh.connect( # noqa: SIM117 - conn needed before create_process
|
||||
host.address,
|
||||
port=host.port,
|
||||
username=host.user,
|
||||
known_hosts=None,
|
||||
) as conn:
|
||||
async with asyncssh.connect(**ssh_connect_kwargs(host)) as conn: # noqa: SIM117
|
||||
async with conn.create_process(command) as proc:
|
||||
if stream:
|
||||
|
||||
async def read_stream(
|
||||
reader: Any,
|
||||
prefix: str,
|
||||
*,
|
||||
is_stderr: bool = False,
|
||||
) -> None:
|
||||
out = err_console if is_stderr else console
|
||||
async for line in reader:
|
||||
if line.strip(): # Skip empty lines
|
||||
out.print(f"[cyan]\\[{prefix}][/] {escape(line)}", end="")
|
||||
|
||||
await asyncio.gather(
|
||||
read_stream(proc.stdout, service),
|
||||
read_stream(proc.stderr, service, is_stderr=True),
|
||||
_stream_output_lines(proc.stdout, prefix),
|
||||
_stream_output_lines(proc.stderr, prefix, is_stderr=True),
|
||||
)
|
||||
|
||||
stdout_data = ""
|
||||
@@ -219,20 +285,27 @@ async def run_command(
|
||||
*,
|
||||
stream: bool = True,
|
||||
raw: bool = False,
|
||||
prefix: str | None = None,
|
||||
) -> CommandResult:
|
||||
"""Run a command on a host (locally or via SSH).
|
||||
|
||||
Args:
|
||||
host: Host configuration
|
||||
command: Command to run
|
||||
service: Service name (used as prefix in output)
|
||||
service: Service name (stored in result)
|
||||
stream: Whether to stream output (default True)
|
||||
raw: Whether to use raw mode with TTY (default False)
|
||||
prefix: Output prefix. None=use service name, ""=no prefix.
|
||||
|
||||
"""
|
||||
output_prefix = service if prefix is None else prefix
|
||||
if is_local(host):
|
||||
return await _run_local_command(command, service, stream=stream, raw=raw)
|
||||
return await _run_ssh_command(host, command, service, stream=stream, raw=raw)
|
||||
return await _run_local_command(
|
||||
command, service, stream=stream, raw=raw, prefix=output_prefix
|
||||
)
|
||||
return await _run_ssh_command(
|
||||
host, command, service, stream=stream, raw=raw, prefix=output_prefix
|
||||
)
|
||||
|
||||
|
||||
async def run_compose(
|
||||
@@ -242,13 +315,17 @@ async def run_compose(
|
||||
*,
|
||||
stream: bool = True,
|
||||
raw: bool = False,
|
||||
prefix: str | None = None,
|
||||
) -> CommandResult:
|
||||
"""Run a docker compose command for a service."""
|
||||
host = config.get_host(service)
|
||||
host_name = config.get_hosts(service)[0]
|
||||
host = config.hosts[host_name]
|
||||
compose_path = config.get_compose_path(service)
|
||||
|
||||
_print_compose_command(host_name, str(config.compose_dir), str(compose_path), compose_cmd)
|
||||
|
||||
command = f"docker compose -f {compose_path} {compose_cmd}"
|
||||
return await run_command(host, command, service, stream=stream, raw=raw)
|
||||
return await run_command(host, command, service, stream=stream, raw=raw, prefix=prefix)
|
||||
|
||||
|
||||
async def run_compose_on_host(
|
||||
@@ -259,6 +336,7 @@ async def run_compose_on_host(
|
||||
*,
|
||||
stream: bool = True,
|
||||
raw: bool = False,
|
||||
prefix: str | None = None,
|
||||
) -> CommandResult:
|
||||
"""Run a docker compose command for a service on a specific host.
|
||||
|
||||
@@ -267,8 +345,10 @@ async def run_compose_on_host(
|
||||
host = config.hosts[host_name]
|
||||
compose_path = config.get_compose_path(service)
|
||||
|
||||
_print_compose_command(host_name, str(config.compose_dir), str(compose_path), compose_cmd)
|
||||
|
||||
command = f"docker compose -f {compose_path} {compose_cmd}"
|
||||
return await run_command(host, command, service, stream=stream, raw=raw)
|
||||
return await run_command(host, command, service, stream=stream, raw=raw, prefix=prefix)
|
||||
|
||||
|
||||
async def run_on_services(
|
||||
@@ -294,10 +374,11 @@ async def _run_sequential_commands(
|
||||
*,
|
||||
stream: bool = True,
|
||||
raw: bool = False,
|
||||
prefix: str | None = None,
|
||||
) -> CommandResult:
|
||||
"""Run multiple compose commands sequentially for a service."""
|
||||
for cmd in commands:
|
||||
result = await run_compose(config, service, cmd, stream=stream, raw=raw)
|
||||
result = await run_compose(config, service, cmd, stream=stream, raw=raw, prefix=prefix)
|
||||
if not result.success:
|
||||
return result
|
||||
return CommandResult(service=service, exit_code=0, success=True)
|
||||
@@ -310,10 +391,12 @@ async def _run_sequential_commands_multi_host(
|
||||
*,
|
||||
stream: bool = True,
|
||||
raw: bool = False,
|
||||
prefix: str | None = None,
|
||||
) -> list[CommandResult]:
|
||||
"""Run multiple compose commands sequentially for a multi-host service.
|
||||
|
||||
Commands are run sequentially, but each command runs on all hosts in parallel.
|
||||
For multi-host services, prefix defaults to service@host format.
|
||||
"""
|
||||
host_names = config.get_hosts(service)
|
||||
compose_path = config.get_compose_path(service)
|
||||
@@ -323,9 +406,16 @@ async def _run_sequential_commands_multi_host(
|
||||
command = f"docker compose -f {compose_path} {cmd}"
|
||||
tasks = []
|
||||
for host_name in host_names:
|
||||
_print_compose_command(host_name, str(config.compose_dir), str(compose_path), cmd)
|
||||
host = config.hosts[host_name]
|
||||
# For multi-host services, always use service@host prefix to distinguish output
|
||||
label = f"{service}@{host_name}" if len(host_names) > 1 else service
|
||||
tasks.append(run_command(host, command, label, stream=stream, raw=raw))
|
||||
# Multi-host services always need prefixes to distinguish output from different hosts
|
||||
# (ignore empty prefix from single-service batches - we still need to distinguish hosts)
|
||||
effective_prefix = label if len(host_names) > 1 else prefix
|
||||
tasks.append(
|
||||
run_command(host, command, label, stream=stream, raw=raw, prefix=effective_prefix)
|
||||
)
|
||||
|
||||
results = await asyncio.gather(*tasks)
|
||||
final_results = list(results)
|
||||
@@ -350,6 +440,9 @@ async def run_sequential_on_services(
|
||||
For multi-host services, runs on all configured hosts.
|
||||
Note: raw=True only makes sense for single-service operations.
|
||||
"""
|
||||
# Skip prefix for single-service operations (command line already shows context)
|
||||
prefix: str | None = "" if len(services) == 1 else None
|
||||
|
||||
# Separate multi-host and single-host services for type-safe gathering
|
||||
multi_host_tasks = []
|
||||
single_host_tasks = []
|
||||
@@ -358,12 +451,14 @@ async def run_sequential_on_services(
|
||||
if config.is_multi_host(service):
|
||||
multi_host_tasks.append(
|
||||
_run_sequential_commands_multi_host(
|
||||
config, service, commands, stream=stream, raw=raw
|
||||
config, service, commands, stream=stream, raw=raw, prefix=prefix
|
||||
)
|
||||
)
|
||||
else:
|
||||
single_host_tasks.append(
|
||||
_run_sequential_commands(config, service, commands, stream=stream, raw=raw)
|
||||
_run_sequential_commands(
|
||||
config, service, commands, stream=stream, raw=raw, prefix=prefix
|
||||
)
|
||||
)
|
||||
|
||||
# Gather results separately to maintain type safety
|
||||
|
||||
@@ -10,7 +10,7 @@ import asyncio
|
||||
from typing import TYPE_CHECKING, NamedTuple
|
||||
|
||||
from .compose import parse_devices, parse_external_networks, parse_host_volumes
|
||||
from .console import console, err_console
|
||||
from .console import console, err_console, print_error, print_success, print_warning
|
||||
from .executor import (
|
||||
CommandResult,
|
||||
check_networks_exist,
|
||||
@@ -145,9 +145,7 @@ async def _cleanup_and_rollback(
|
||||
raw: bool = False,
|
||||
) -> None:
|
||||
"""Clean up failed start and attempt rollback to old host if it was running."""
|
||||
err_console.print(
|
||||
f"{prefix} [yellow]![/] Cleaning up failed start on [magenta]{target_host}[/]"
|
||||
)
|
||||
print_warning(f"{prefix} Cleaning up failed start on [magenta]{target_host}[/]")
|
||||
await run_compose(cfg, service, "down", raw=raw)
|
||||
|
||||
if not was_running:
|
||||
@@ -156,12 +154,12 @@ async def _cleanup_and_rollback(
|
||||
)
|
||||
return
|
||||
|
||||
err_console.print(f"{prefix} [yellow]![/] Rolling back to [magenta]{current_host}[/]...")
|
||||
print_warning(f"{prefix} Rolling back to [magenta]{current_host}[/]...")
|
||||
rollback_result = await run_compose_on_host(cfg, service, current_host, "up -d", raw=raw)
|
||||
if rollback_result.success:
|
||||
console.print(f"{prefix} [green]✓[/] Rollback succeeded on [magenta]{current_host}[/]")
|
||||
print_success(f"{prefix} Rollback succeeded on [magenta]{current_host}[/]")
|
||||
else:
|
||||
err_console.print(f"{prefix} [red]✗[/] Rollback failed - service is down")
|
||||
print_error(f"{prefix} Rollback failed - service is down")
|
||||
|
||||
|
||||
def _report_preflight_failures(
|
||||
@@ -170,17 +168,15 @@ def _report_preflight_failures(
|
||||
preflight: PreflightResult,
|
||||
) -> None:
|
||||
"""Report pre-flight check failures."""
|
||||
err_console.print(
|
||||
f"[cyan]\\[{service}][/] [red]✗[/] Cannot start on [magenta]{target_host}[/]:"
|
||||
)
|
||||
print_error(f"[cyan]\\[{service}][/] Cannot start on [magenta]{target_host}[/]:")
|
||||
for path in preflight.missing_paths:
|
||||
err_console.print(f" [red]✗[/] missing path: {path}")
|
||||
print_error(f" missing path: {path}")
|
||||
for net in preflight.missing_networks:
|
||||
err_console.print(f" [red]✗[/] missing network: {net}")
|
||||
print_error(f" missing network: {net}")
|
||||
if preflight.missing_networks:
|
||||
err_console.print(f" [dim]hint: cf init-network {target_host}[/]")
|
||||
err_console.print(f" [dim]Hint: cf init-network {target_host}[/]")
|
||||
for dev in preflight.missing_devices:
|
||||
err_console.print(f" [red]✗[/] missing device: {dev}")
|
||||
print_error(f" missing device: {dev}")
|
||||
|
||||
|
||||
async def _up_multi_host_service(
|
||||
@@ -252,8 +248,8 @@ async def _migrate_service(
|
||||
for cmd, label in [("pull --ignore-buildable", "Pull"), ("build", "Build")]:
|
||||
result = await _run_compose_step(cfg, service, cmd, raw=raw)
|
||||
if not result.success:
|
||||
err_console.print(
|
||||
f"{prefix} [red]✗[/] {label} failed on [magenta]{target_host}[/], "
|
||||
print_error(
|
||||
f"{prefix} {label} failed on [magenta]{target_host}[/], "
|
||||
"leaving service on current host"
|
||||
)
|
||||
return result
|
||||
@@ -293,9 +289,8 @@ async def _up_single_service(
|
||||
return failure
|
||||
did_migration = True
|
||||
else:
|
||||
err_console.print(
|
||||
f"{prefix} [yellow]![/] was on "
|
||||
f"[magenta]{current_host}[/] (not in config), skipping down"
|
||||
print_warning(
|
||||
f"{prefix} was on [magenta]{current_host}[/] (not in config), skipping down"
|
||||
)
|
||||
|
||||
# Start on target host
|
||||
@@ -391,9 +386,7 @@ async def stop_orphaned_services(cfg: Config) -> list[CommandResult]:
|
||||
for host in host_list:
|
||||
# Skip hosts no longer in config
|
||||
if host not in cfg.hosts:
|
||||
console.print(
|
||||
f" [yellow]![/] {service}@{host}: host no longer in config, skipping"
|
||||
)
|
||||
print_warning(f"{service}@{host}: host no longer in config, skipping")
|
||||
results.append(
|
||||
CommandResult(
|
||||
service=f"{service}@{host}",
|
||||
@@ -413,11 +406,11 @@ async def stop_orphaned_services(cfg: Config) -> list[CommandResult]:
|
||||
result = await task
|
||||
results.append(result)
|
||||
if result.success:
|
||||
console.print(f" [green]✓[/] {service}@{host}: stopped")
|
||||
print_success(f"{service}@{host}: stopped")
|
||||
else:
|
||||
console.print(f" [red]✗[/] {service}@{host}: {result.stderr or 'failed'}")
|
||||
print_error(f"{service}@{host}: {result.stderr or 'failed'}")
|
||||
except Exception as e:
|
||||
console.print(f" [red]✗[/] {service}@{host}: {e}")
|
||||
print_error(f"{service}@{host}: {e}")
|
||||
results.append(
|
||||
CommandResult(
|
||||
service=f"{service}@{host}",
|
||||
|
||||
67
src/compose_farm/ssh_keys.py
Normal file
67
src/compose_farm/ssh_keys.py
Normal file
@@ -0,0 +1,67 @@
|
||||
"""SSH key utilities for compose-farm."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
from pathlib import Path
|
||||
|
||||
# Default key paths for compose-farm SSH key
|
||||
# Keys are stored in a subdirectory for cleaner docker volume mounting
|
||||
SSH_KEY_DIR = Path.home() / ".ssh" / "compose-farm"
|
||||
SSH_KEY_PATH = SSH_KEY_DIR / "id_ed25519"
|
||||
SSH_PUBKEY_PATH = SSH_KEY_PATH.with_suffix(".pub")
|
||||
|
||||
|
||||
def get_ssh_auth_sock() -> str | None:
|
||||
"""Get SSH_AUTH_SOCK, auto-detecting forwarded agent if needed.
|
||||
|
||||
Checks in order:
|
||||
1. SSH_AUTH_SOCK environment variable (if socket exists)
|
||||
2. Forwarded agent sockets in ~/.ssh/agent/ (most recent first)
|
||||
|
||||
Returns the socket path or None if no valid socket found.
|
||||
"""
|
||||
sock = os.environ.get("SSH_AUTH_SOCK")
|
||||
if sock and Path(sock).is_socket():
|
||||
return sock
|
||||
|
||||
# Try to find a forwarded SSH agent socket
|
||||
agent_dir = Path.home() / ".ssh" / "agent"
|
||||
if agent_dir.is_dir():
|
||||
sockets = sorted(
|
||||
agent_dir.glob("s.*.sshd.*"), key=lambda p: p.stat().st_mtime, reverse=True
|
||||
)
|
||||
for s in sockets:
|
||||
if s.is_socket():
|
||||
return str(s)
|
||||
return None
|
||||
|
||||
|
||||
def get_ssh_env() -> dict[str, str]:
|
||||
"""Get environment dict for SSH subprocess with auto-detected agent.
|
||||
|
||||
Returns a copy of the current environment with SSH_AUTH_SOCK set
|
||||
to the auto-detected agent socket (if found).
|
||||
"""
|
||||
env = os.environ.copy()
|
||||
sock = get_ssh_auth_sock()
|
||||
if sock:
|
||||
env["SSH_AUTH_SOCK"] = sock
|
||||
return env
|
||||
|
||||
|
||||
def key_exists() -> bool:
|
||||
"""Check if the compose-farm SSH key pair exists."""
|
||||
return SSH_KEY_PATH.exists() and SSH_PUBKEY_PATH.exists()
|
||||
|
||||
|
||||
def get_key_path() -> Path | None:
|
||||
"""Get the SSH key path if it exists, None otherwise."""
|
||||
return SSH_KEY_PATH if key_exists() else None
|
||||
|
||||
|
||||
def get_pubkey_content() -> str | None:
|
||||
"""Get the public key content if it exists, None otherwise."""
|
||||
if not SSH_PUBKEY_PATH.exists():
|
||||
return None
|
||||
return SSH_PUBKEY_PATH.read_text().strip()
|
||||
@@ -8,11 +8,44 @@ from typing import TYPE_CHECKING, Any
|
||||
import yaml
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import Generator
|
||||
from collections.abc import Generator, Mapping
|
||||
|
||||
from .config import Config
|
||||
|
||||
|
||||
def group_services_by_host(
|
||||
services: dict[str, str | list[str]],
|
||||
hosts: Mapping[str, object],
|
||||
all_hosts: list[str] | None = None,
|
||||
) -> dict[str, list[str]]:
|
||||
"""Group services by their assigned host(s).
|
||||
|
||||
For multi-host services (list or "all"), the service appears in multiple host lists.
|
||||
"""
|
||||
by_host: dict[str, list[str]] = {h: [] for h in hosts}
|
||||
for service, host_value in services.items():
|
||||
if isinstance(host_value, list):
|
||||
for host_name in host_value:
|
||||
if host_name in by_host:
|
||||
by_host[host_name].append(service)
|
||||
elif host_value == "all" and all_hosts:
|
||||
for host_name in all_hosts:
|
||||
if host_name in by_host:
|
||||
by_host[host_name].append(service)
|
||||
elif host_value in by_host:
|
||||
by_host[host_value].append(service)
|
||||
return by_host
|
||||
|
||||
|
||||
def group_running_services_by_host(
|
||||
state: dict[str, str | list[str]],
|
||||
hosts: Mapping[str, object],
|
||||
) -> dict[str, list[str]]:
|
||||
"""Group running services by host, filtering out hosts with no services."""
|
||||
by_host = group_services_by_host(state, hosts)
|
||||
return {h: svcs for h, svcs in by_host.items() if svcs}
|
||||
|
||||
|
||||
def load_state(config: Config) -> dict[str, str | list[str]]:
|
||||
"""Load the current deployment state.
|
||||
|
||||
@@ -82,50 +115,6 @@ def remove_service(config: Config, service: str) -> None:
|
||||
state.pop(service, None)
|
||||
|
||||
|
||||
def add_service_to_host(config: Config, service: str, host: str) -> None:
|
||||
"""Add a specific host to a service's state.
|
||||
|
||||
For multi-host services, adds the host to the list if not present.
|
||||
For single-host services, sets the host.
|
||||
"""
|
||||
with _modify_state(config) as state:
|
||||
current = state.get(service)
|
||||
|
||||
if config.is_multi_host(service):
|
||||
# Multi-host: add to list if not present
|
||||
if isinstance(current, list):
|
||||
if host not in current:
|
||||
state[service] = [*current, host]
|
||||
else:
|
||||
state[service] = [host]
|
||||
else:
|
||||
# Single-host: just set it
|
||||
state[service] = host
|
||||
|
||||
|
||||
def remove_service_from_host(config: Config, service: str, host: str) -> None:
|
||||
"""Remove a specific host from a service's state.
|
||||
|
||||
For multi-host services, removes just that host from the list.
|
||||
For single-host services, removes the service entirely if host matches.
|
||||
"""
|
||||
with _modify_state(config) as state:
|
||||
current = state.get(service)
|
||||
if current is None:
|
||||
return
|
||||
|
||||
if isinstance(current, list):
|
||||
# Multi-host: remove this host from list
|
||||
remaining = [h for h in current if h != host]
|
||||
if remaining:
|
||||
state[service] = remaining
|
||||
else:
|
||||
state.pop(service, None)
|
||||
elif current == host:
|
||||
# Single-host: remove if matches
|
||||
state.pop(service, None)
|
||||
|
||||
|
||||
def get_services_needing_migration(config: Config) -> list[str]:
|
||||
"""Get services where current host differs from configured host.
|
||||
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import sys
|
||||
from contextlib import asynccontextmanager, suppress
|
||||
from typing import TYPE_CHECKING
|
||||
@@ -12,19 +13,35 @@ from pydantic import ValidationError
|
||||
|
||||
from compose_farm.web.deps import STATIC_DIR, get_config
|
||||
from compose_farm.web.routes import actions, api, pages
|
||||
from compose_farm.web.streaming import TASK_TTL_SECONDS, cleanup_stale_tasks
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import AsyncGenerator
|
||||
|
||||
|
||||
async def _task_cleanup_loop() -> None:
|
||||
"""Periodically clean up stale completed tasks."""
|
||||
while True:
|
||||
await asyncio.sleep(TASK_TTL_SECONDS // 2) # Run every 5 minutes
|
||||
cleanup_stale_tasks()
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
async def lifespan(_app: FastAPI) -> AsyncGenerator[None, None]:
|
||||
"""Application lifespan handler."""
|
||||
# Startup: pre-load config (ignore errors - handled per-request)
|
||||
with suppress(ValidationError, FileNotFoundError):
|
||||
get_config()
|
||||
|
||||
# Start background cleanup task
|
||||
cleanup_task = asyncio.create_task(_task_cleanup_loop())
|
||||
|
||||
yield
|
||||
# Shutdown: nothing to clean up
|
||||
|
||||
# Shutdown: cancel cleanup task
|
||||
cleanup_task.cancel()
|
||||
with suppress(asyncio.CancelledError):
|
||||
await cleanup_task
|
||||
|
||||
|
||||
def create_app() -> FastAPI:
|
||||
|
||||
@@ -10,6 +10,9 @@ from pathlib import Path
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from fastapi.templating import Jinja2Templates
|
||||
from pydantic import ValidationError
|
||||
|
||||
from compose_farm.executor import is_local
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from compose_farm.config import Config
|
||||
@@ -30,3 +33,18 @@ def get_config() -> Config:
|
||||
def get_templates() -> Jinja2Templates:
|
||||
"""Get Jinja2 templates instance."""
|
||||
return Jinja2Templates(directory=str(TEMPLATES_DIR))
|
||||
|
||||
|
||||
def extract_config_error(exc: Exception) -> str:
|
||||
"""Extract a user-friendly error message from a config exception."""
|
||||
if isinstance(exc, ValidationError):
|
||||
return "; ".join(err.get("msg", str(err)) for err in exc.errors())
|
||||
return str(exc)
|
||||
|
||||
|
||||
def get_local_host(config: Config) -> str | None:
|
||||
"""Find the local host name from config, if any."""
|
||||
for name, host in config.hosts.items():
|
||||
if is_local(host):
|
||||
return name
|
||||
return None
|
||||
|
||||
@@ -32,10 +32,17 @@ def _start_task(coro_factory: Callable[[str], Coroutine[Any, Any, None]]) -> str
|
||||
return task_id
|
||||
|
||||
|
||||
async def _run_service_action(name: str, command: str) -> dict[str, Any]:
|
||||
"""Run a compose command for a service."""
|
||||
config = get_config()
|
||||
# Allowed service commands
|
||||
ALLOWED_COMMANDS = {"up", "down", "restart", "pull", "update", "logs"}
|
||||
|
||||
|
||||
@router.post("/service/{name}/{command}")
|
||||
async def service_action(name: str, command: str) -> dict[str, Any]:
|
||||
"""Run a compose command for a service (up, down, restart, pull, update, logs)."""
|
||||
if command not in ALLOWED_COMMANDS:
|
||||
raise HTTPException(status_code=404, detail=f"Unknown command '{command}'")
|
||||
|
||||
config = get_config()
|
||||
if name not in config.services:
|
||||
raise HTTPException(status_code=404, detail=f"Service '{name}' not found")
|
||||
|
||||
@@ -43,42 +50,6 @@ async def _run_service_action(name: str, command: str) -> dict[str, Any]:
|
||||
return {"task_id": task_id, "service": name, "command": command}
|
||||
|
||||
|
||||
@router.post("/service/{name}/up")
|
||||
async def up_service(name: str) -> dict[str, Any]:
|
||||
"""Start a service."""
|
||||
return await _run_service_action(name, "up")
|
||||
|
||||
|
||||
@router.post("/service/{name}/down")
|
||||
async def down_service(name: str) -> dict[str, Any]:
|
||||
"""Stop a service."""
|
||||
return await _run_service_action(name, "down")
|
||||
|
||||
|
||||
@router.post("/service/{name}/restart")
|
||||
async def restart_service(name: str) -> dict[str, Any]:
|
||||
"""Restart a service (down + up)."""
|
||||
return await _run_service_action(name, "restart")
|
||||
|
||||
|
||||
@router.post("/service/{name}/pull")
|
||||
async def pull_service(name: str) -> dict[str, Any]:
|
||||
"""Pull latest images for a service."""
|
||||
return await _run_service_action(name, "pull")
|
||||
|
||||
|
||||
@router.post("/service/{name}/update")
|
||||
async def update_service(name: str) -> dict[str, Any]:
|
||||
"""Update a service (pull + build + down + up)."""
|
||||
return await _run_service_action(name, "update")
|
||||
|
||||
|
||||
@router.post("/service/{name}/logs")
|
||||
async def logs_service(name: str) -> dict[str, Any]:
|
||||
"""Show logs for a service."""
|
||||
return await _run_service_action(name, "logs")
|
||||
|
||||
|
||||
@router.post("/apply")
|
||||
async def apply_all() -> dict[str, Any]:
|
||||
"""Run cf apply to reconcile all services."""
|
||||
|
||||
@@ -2,19 +2,20 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import contextlib
|
||||
import json
|
||||
from typing import TYPE_CHECKING, Annotated, Any
|
||||
import shlex
|
||||
from datetime import UTC, datetime
|
||||
from pathlib import Path
|
||||
from typing import Annotated, Any
|
||||
|
||||
import asyncssh
|
||||
import yaml
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from pathlib import Path
|
||||
|
||||
from fastapi import APIRouter, Body, HTTPException
|
||||
from fastapi import APIRouter, Body, HTTPException, Query
|
||||
from fastapi.responses import HTMLResponse
|
||||
|
||||
from compose_farm.executor import run_compose_on_host
|
||||
from compose_farm.executor import is_local, run_compose_on_host, ssh_connect_kwargs
|
||||
from compose_farm.paths import find_config_path
|
||||
from compose_farm.state import load_state
|
||||
from compose_farm.web.deps import get_config, get_templates
|
||||
@@ -30,6 +31,51 @@ def _validate_yaml(content: str) -> None:
|
||||
raise HTTPException(status_code=400, detail=f"Invalid YAML: {e}") from e
|
||||
|
||||
|
||||
def _backup_file(file_path: Path) -> Path | None:
|
||||
"""Create a timestamped backup of a file if it exists and content differs.
|
||||
|
||||
Backups are stored in a .backups directory alongside the file.
|
||||
Returns the backup path if created, None if no backup was needed.
|
||||
"""
|
||||
if not file_path.exists():
|
||||
return None
|
||||
|
||||
# Create backup directory
|
||||
backup_dir = file_path.parent / ".backups"
|
||||
backup_dir.mkdir(exist_ok=True)
|
||||
|
||||
# Generate timestamped backup filename
|
||||
timestamp = datetime.now(tz=UTC).strftime("%Y%m%d_%H%M%S")
|
||||
backup_name = f"{file_path.name}.{timestamp}"
|
||||
backup_path = backup_dir / backup_name
|
||||
|
||||
# Copy current content to backup
|
||||
backup_path.write_text(file_path.read_text())
|
||||
|
||||
# Clean up old backups (keep last 200)
|
||||
backups = sorted(backup_dir.glob(f"{file_path.name}.*"), reverse=True)
|
||||
for old_backup in backups[200:]:
|
||||
old_backup.unlink()
|
||||
|
||||
return backup_path
|
||||
|
||||
|
||||
def _save_with_backup(file_path: Path, content: str) -> bool:
|
||||
"""Save content to file, creating a backup first if content changed.
|
||||
|
||||
Returns True if file was saved, False if content was unchanged.
|
||||
"""
|
||||
# Check if content actually changed
|
||||
if file_path.exists():
|
||||
current_content = file_path.read_text()
|
||||
if current_content == content:
|
||||
return False # No change, skip save
|
||||
_backup_file(file_path)
|
||||
|
||||
file_path.write_text(content)
|
||||
return True
|
||||
|
||||
|
||||
def _get_service_compose_path(name: str) -> Path:
|
||||
"""Get compose path for service, raising HTTPException if not found."""
|
||||
config = get_config()
|
||||
@@ -90,22 +136,34 @@ async def _get_container_states(
|
||||
# All containers should be on the same host
|
||||
host_name = containers[0]["Host"]
|
||||
|
||||
result = await run_compose_on_host(config, service, host_name, "ps --format json", stream=False)
|
||||
# Use -a to include stopped/exited containers
|
||||
result = await run_compose_on_host(
|
||||
config, service, host_name, "ps -a --format json", stream=False
|
||||
)
|
||||
if not result.success:
|
||||
return containers
|
||||
|
||||
# Build state map
|
||||
state_map: dict[str, str] = {}
|
||||
# Build state map: name -> (state, exit_code)
|
||||
state_map: dict[str, tuple[str, int]] = {}
|
||||
for line in result.stdout.strip().split("\n"):
|
||||
if line.strip():
|
||||
with contextlib.suppress(json.JSONDecodeError):
|
||||
data = json.loads(line)
|
||||
state_map[data.get("Name", "")] = data.get("State", "unknown")
|
||||
name = data.get("Name", "")
|
||||
state = data.get("State", "unknown")
|
||||
exit_code = data.get("ExitCode", 0)
|
||||
state_map[name] = (state, exit_code)
|
||||
|
||||
# Update container states
|
||||
for c in containers:
|
||||
if c["Name"] in state_map:
|
||||
c["State"] = state_map[c["Name"]]
|
||||
state, exit_code = state_map[c["Name"]]
|
||||
c["State"] = state
|
||||
c["ExitCode"] = exit_code
|
||||
else:
|
||||
# Container not in ps output means it was never started
|
||||
c["State"] = "created"
|
||||
c["ExitCode"] = None
|
||||
|
||||
return containers
|
||||
|
||||
@@ -183,8 +241,9 @@ async def save_compose(
|
||||
"""Save compose file content."""
|
||||
compose_path = _get_service_compose_path(name)
|
||||
_validate_yaml(content)
|
||||
compose_path.write_text(content)
|
||||
return {"success": True, "message": "Compose file saved"}
|
||||
saved = _save_with_backup(compose_path, content)
|
||||
msg = "Compose file saved" if saved else "No changes to save"
|
||||
return {"success": True, "message": msg}
|
||||
|
||||
|
||||
@router.put("/service/{name}/env")
|
||||
@@ -193,8 +252,9 @@ async def save_env(
|
||||
) -> dict[str, Any]:
|
||||
"""Save .env file content."""
|
||||
env_path = _get_service_compose_path(name).parent / ".env"
|
||||
env_path.write_text(content)
|
||||
return {"success": True, "message": ".env file saved"}
|
||||
saved = _save_with_backup(env_path, content)
|
||||
msg = ".env file saved" if saved else "No changes to save"
|
||||
return {"success": True, "message": msg}
|
||||
|
||||
|
||||
@router.put("/config")
|
||||
@@ -207,6 +267,106 @@ async def save_config(
|
||||
raise HTTPException(status_code=404, detail="Config file not found")
|
||||
|
||||
_validate_yaml(content)
|
||||
config_path.write_text(content)
|
||||
saved = _save_with_backup(config_path, content)
|
||||
msg = "Config saved" if saved else "No changes to save"
|
||||
return {"success": True, "message": msg}
|
||||
|
||||
return {"success": True, "message": "Config saved"}
|
||||
|
||||
async def _read_file_local(path: str) -> str:
|
||||
"""Read a file from the local filesystem."""
|
||||
expanded = Path(path).expanduser()
|
||||
return await asyncio.to_thread(expanded.read_text, encoding="utf-8")
|
||||
|
||||
|
||||
async def _write_file_local(path: str, content: str) -> bool:
|
||||
"""Write content to a file on the local filesystem with backup.
|
||||
|
||||
Returns True if file was saved, False if content was unchanged.
|
||||
"""
|
||||
expanded = Path(path).expanduser()
|
||||
return await asyncio.to_thread(_save_with_backup, expanded, content)
|
||||
|
||||
|
||||
async def _read_file_remote(host: Any, path: str) -> str:
|
||||
"""Read a file from a remote host via SSH."""
|
||||
# Expand ~ on remote by using shell
|
||||
cmd = f"cat {shlex.quote(path)}"
|
||||
if path.startswith("~/"):
|
||||
cmd = f"cat ~/{shlex.quote(path[2:])}"
|
||||
|
||||
async with asyncssh.connect(**ssh_connect_kwargs(host)) as conn:
|
||||
result = await conn.run(cmd, check=True)
|
||||
stdout = result.stdout or ""
|
||||
return stdout.decode() if isinstance(stdout, bytes) else stdout
|
||||
|
||||
|
||||
async def _write_file_remote(host: Any, path: str, content: str) -> None:
|
||||
"""Write content to a file on a remote host via SSH."""
|
||||
# Expand ~ on remote: keep ~ unquoted for shell expansion, quote the rest
|
||||
target = f"~/{shlex.quote(path[2:])}" if path.startswith("~/") else shlex.quote(path)
|
||||
cmd = f"cat > {target}"
|
||||
|
||||
async with asyncssh.connect(**ssh_connect_kwargs(host)) as conn:
|
||||
result = await conn.run(cmd, input=content, check=True)
|
||||
if result.returncode != 0:
|
||||
stderr = result.stderr.decode() if isinstance(result.stderr, bytes) else result.stderr
|
||||
msg = f"Failed to write file: {stderr}"
|
||||
raise RuntimeError(msg)
|
||||
|
||||
|
||||
def _get_console_host(host: str, path: str) -> Any:
|
||||
"""Validate and return host config for console file operations."""
|
||||
config = get_config()
|
||||
host_config = config.hosts.get(host)
|
||||
|
||||
if not host_config:
|
||||
raise HTTPException(status_code=404, detail=f"Host '{host}' not found")
|
||||
if not path:
|
||||
raise HTTPException(status_code=400, detail="Path is required")
|
||||
|
||||
return host_config
|
||||
|
||||
|
||||
@router.get("/console/file")
|
||||
async def read_console_file(
|
||||
host: Annotated[str, Query(description="Host name")],
|
||||
path: Annotated[str, Query(description="File path")],
|
||||
) -> dict[str, Any]:
|
||||
"""Read a file from a host for the console editor."""
|
||||
host_config = _get_console_host(host, path)
|
||||
|
||||
try:
|
||||
if is_local(host_config):
|
||||
content = await _read_file_local(path)
|
||||
else:
|
||||
content = await _read_file_remote(host_config, path)
|
||||
return {"success": True, "content": content}
|
||||
except FileNotFoundError:
|
||||
raise HTTPException(status_code=404, detail=f"File not found: {path}") from None
|
||||
except PermissionError:
|
||||
raise HTTPException(status_code=403, detail=f"Permission denied: {path}") from None
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=str(e)) from e
|
||||
|
||||
|
||||
@router.put("/console/file")
|
||||
async def write_console_file(
|
||||
host: Annotated[str, Query(description="Host name")],
|
||||
path: Annotated[str, Query(description="File path")],
|
||||
content: Annotated[str, Body(media_type="text/plain")],
|
||||
) -> dict[str, Any]:
|
||||
"""Write a file to a host from the console editor."""
|
||||
host_config = _get_console_host(host, path)
|
||||
|
||||
try:
|
||||
if is_local(host_config):
|
||||
saved = await _write_file_local(path, content)
|
||||
msg = f"Saved: {path}" if saved else "No changes to save"
|
||||
else:
|
||||
await _write_file_remote(host_config, path, content)
|
||||
msg = f"Saved: {path}" # Remote doesn't track changes
|
||||
return {"success": True, "message": msg}
|
||||
except PermissionError:
|
||||
raise HTTPException(status_code=403, detail=f"Permission denied: {path}") from None
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=str(e)) from e
|
||||
|
||||
@@ -13,13 +13,45 @@ from compose_farm.state import (
|
||||
get_service_host,
|
||||
get_services_needing_migration,
|
||||
get_services_not_in_state,
|
||||
group_running_services_by_host,
|
||||
load_state,
|
||||
)
|
||||
from compose_farm.web.deps import get_config, get_templates
|
||||
from compose_farm.web.deps import (
|
||||
extract_config_error,
|
||||
get_config,
|
||||
get_local_host,
|
||||
get_templates,
|
||||
)
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
@router.get("/console", response_class=HTMLResponse)
|
||||
async def console(request: Request) -> HTMLResponse:
|
||||
"""Console page with terminal and editor."""
|
||||
config = get_config()
|
||||
templates = get_templates()
|
||||
|
||||
# Sort hosts with local first
|
||||
local_host = get_local_host(config)
|
||||
hosts = sorted(config.hosts.keys())
|
||||
if local_host:
|
||||
hosts = [local_host] + [h for h in hosts if h != local_host]
|
||||
|
||||
# Get config path for default editor file
|
||||
config_path = str(config.config_path) if config.config_path else ""
|
||||
|
||||
return templates.TemplateResponse(
|
||||
"console.html",
|
||||
{
|
||||
"request": request,
|
||||
"hosts": hosts,
|
||||
"local_host": local_host,
|
||||
"config_path": config_path,
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@router.get("/", response_class=HTMLResponse)
|
||||
async def index(request: Request) -> HTMLResponse:
|
||||
"""Dashboard page - combined view of all cluster info."""
|
||||
@@ -30,11 +62,7 @@ async def index(request: Request) -> HTMLResponse:
|
||||
try:
|
||||
config = get_config()
|
||||
except (ValidationError, FileNotFoundError) as e:
|
||||
# Extract error message
|
||||
if isinstance(e, ValidationError):
|
||||
config_error = "; ".join(err.get("msg", str(err)) for err in e.errors())
|
||||
else:
|
||||
config_error = str(e)
|
||||
config_error = extract_config_error(e)
|
||||
|
||||
# Read raw config content for the editor
|
||||
config_path = find_config_path()
|
||||
@@ -70,14 +98,8 @@ async def index(request: Request) -> HTMLResponse:
|
||||
migrations = get_services_needing_migration(config)
|
||||
not_started = get_services_not_in_state(config)
|
||||
|
||||
# Group services by host
|
||||
services_by_host: dict[str, list[str]] = {}
|
||||
for svc, host in deployed.items():
|
||||
if isinstance(host, list):
|
||||
for h in host:
|
||||
services_by_host.setdefault(h, []).append(svc)
|
||||
else:
|
||||
services_by_host.setdefault(host, []).append(svc)
|
||||
# Group services by host (filter out hosts with no running services)
|
||||
services_by_host = group_running_services_by_host(deployed, config.hosts)
|
||||
|
||||
# Config file content
|
||||
config_content = ""
|
||||
@@ -173,6 +195,7 @@ async def sidebar_partial(request: Request) -> HTMLResponse:
|
||||
"services": sorted(config.services.keys()),
|
||||
"service_hosts": service_hosts,
|
||||
"hosts": sorted(config.hosts.keys()),
|
||||
"local_host": get_local_host(config),
|
||||
"state": state,
|
||||
},
|
||||
)
|
||||
@@ -186,10 +209,7 @@ async def config_error_partial(request: Request) -> HTMLResponse:
|
||||
get_config()
|
||||
return HTMLResponse("") # No error
|
||||
except (ValidationError, FileNotFoundError) as e:
|
||||
if isinstance(e, ValidationError):
|
||||
error = "; ".join(err.get("msg", str(err)) for err in e.errors())
|
||||
else:
|
||||
error = str(e)
|
||||
error = extract_config_error(e)
|
||||
return templates.TemplateResponse(
|
||||
"partials/config_error.html", {"request": request, "config_error": error}
|
||||
)
|
||||
@@ -246,15 +266,7 @@ async def services_by_host_partial(request: Request, expanded: bool = True) -> H
|
||||
templates = get_templates()
|
||||
|
||||
deployed = load_state(config)
|
||||
|
||||
# Group services by host
|
||||
services_by_host: dict[str, list[str]] = {}
|
||||
for svc, host in deployed.items():
|
||||
if isinstance(host, list):
|
||||
for h in host:
|
||||
services_by_host.setdefault(h, []).append(svc)
|
||||
else:
|
||||
services_by_host.setdefault(host, []).append(svc)
|
||||
services_by_host = group_running_services_by_host(deployed, config.hosts)
|
||||
|
||||
return templates.TemplateResponse(
|
||||
"partials/services_by_host.html",
|
||||
|
||||
@@ -1,3 +1,11 @@
|
||||
/* Sidebar inputs - remove focus outline (DaisyUI 5 uses outline + outline-offset) */
|
||||
#sidebar .input:focus,
|
||||
#sidebar .input:focus-within,
|
||||
#sidebar .select:focus {
|
||||
outline: none;
|
||||
outline-offset: 0;
|
||||
}
|
||||
|
||||
/* Editors (Monaco) - wrapper makes it resizable */
|
||||
.editor-wrapper {
|
||||
resize: vertical;
|
||||
@@ -53,3 +61,65 @@
|
||||
background-position: 16em center;
|
||||
}
|
||||
}
|
||||
|
||||
/* Command palette FAB - rainbow glow effect */
|
||||
@property --cmd-pos { syntax: "<number>"; inherits: true; initial-value: 100; }
|
||||
@property --cmd-blur { syntax: "<number>"; inherits: true; initial-value: 10; }
|
||||
@property --cmd-scale { syntax: "<number>"; inherits: true; initial-value: 1; }
|
||||
@property --cmd-opacity { syntax: "<number>"; inherits: true; initial-value: 0.3; }
|
||||
|
||||
#cmd-fab {
|
||||
--g: linear-gradient(to right, #fff, #fff, #0ff, #00f, #8000ff, #e066a3, #f00, #ff0, #bfff80, #fff, #fff);
|
||||
all: unset;
|
||||
position: fixed;
|
||||
bottom: 1.5rem;
|
||||
right: 1.5rem;
|
||||
z-index: 50;
|
||||
cursor: pointer;
|
||||
transform: scale(var(--cmd-scale));
|
||||
transition: --cmd-pos 3s, --cmd-blur 0.3s, --cmd-opacity 0.3s, --cmd-scale 0.2s cubic-bezier(.76,-.25,.51,1.13);
|
||||
}
|
||||
|
||||
.cmd-fab-inner {
|
||||
display: block;
|
||||
padding: 0.6em 1em;
|
||||
background: #1d232a;
|
||||
border-radius: 8px;
|
||||
font-size: 14px;
|
||||
position: relative;
|
||||
}
|
||||
|
||||
.cmd-fab-inner > span {
|
||||
background: var(--g) no-repeat calc(var(--cmd-pos) * 1%) 0 / 900%;
|
||||
-webkit-background-clip: text;
|
||||
background-clip: text;
|
||||
-webkit-text-fill-color: transparent;
|
||||
letter-spacing: 0.15ch;
|
||||
font-weight: 600;
|
||||
}
|
||||
|
||||
.cmd-fab-inner::before, .cmd-fab-inner::after {
|
||||
content: "";
|
||||
position: absolute;
|
||||
border-radius: 8px;
|
||||
}
|
||||
|
||||
.cmd-fab-inner::before {
|
||||
inset: -1.5px;
|
||||
background: var(--g) no-repeat calc(var(--cmd-pos) * 1%) 0 / 900%;
|
||||
border-radius: 9px;
|
||||
z-index: -1;
|
||||
opacity: var(--cmd-opacity);
|
||||
}
|
||||
|
||||
.cmd-fab-inner::after {
|
||||
inset: 0;
|
||||
background: #000;
|
||||
transform: translateY(10px);
|
||||
z-index: -2;
|
||||
filter: blur(calc(var(--cmd-blur) * 1px));
|
||||
}
|
||||
|
||||
#cmd-fab:hover { --cmd-scale: 1.05; --cmd-pos: 0; --cmd-blur: 30; --cmd-opacity: 1; }
|
||||
#cmd-fab:hover .cmd-fab-inner::after { background: var(--g); opacity: 0.3; }
|
||||
#cmd-fab:active { --cmd-scale: 0.98; --cmd-blur: 15; }
|
||||
|
||||
@@ -2,6 +2,10 @@
|
||||
* Compose Farm Web UI JavaScript
|
||||
*/
|
||||
|
||||
// ============================================================================
|
||||
// CONSTANTS
|
||||
// ============================================================================
|
||||
|
||||
// ANSI escape codes for terminal output
|
||||
const ANSI = {
|
||||
RED: '\x1b[31m',
|
||||
@@ -11,12 +15,6 @@ const ANSI = {
|
||||
CRLF: '\r\n'
|
||||
};
|
||||
|
||||
// Store active terminals and editors
|
||||
const terminals = {};
|
||||
const editors = {};
|
||||
let monacoLoaded = false;
|
||||
let monacoLoading = false;
|
||||
|
||||
// Terminal color theme (dark mode matching PicoCSS)
|
||||
const TERMINAL_THEME = {
|
||||
background: '#1a1a2e',
|
||||
@@ -41,12 +39,97 @@ const TERMINAL_THEME = {
|
||||
brightWhite: '#fafafa'
|
||||
};
|
||||
|
||||
// Language detection from file path
|
||||
const LANGUAGE_MAP = {
|
||||
'yaml': 'yaml', 'yml': 'yaml',
|
||||
'json': 'json',
|
||||
'js': 'javascript', 'mjs': 'javascript',
|
||||
'ts': 'typescript', 'tsx': 'typescript',
|
||||
'py': 'python',
|
||||
'sh': 'shell', 'bash': 'shell',
|
||||
'md': 'markdown',
|
||||
'html': 'html', 'htm': 'html',
|
||||
'css': 'css',
|
||||
'sql': 'sql',
|
||||
'toml': 'toml',
|
||||
'ini': 'ini', 'conf': 'ini',
|
||||
'dockerfile': 'dockerfile',
|
||||
'env': 'plaintext'
|
||||
};
|
||||
|
||||
// ============================================================================
|
||||
// STATE
|
||||
// ============================================================================
|
||||
|
||||
// Store active terminals and editors
|
||||
const terminals = {};
|
||||
const editors = {};
|
||||
let monacoLoaded = false;
|
||||
let monacoLoading = false;
|
||||
|
||||
// LocalStorage key prefix for active tasks (scoped by page)
|
||||
const TASK_KEY_PREFIX = 'cf_task:';
|
||||
const getTaskKey = () => TASK_KEY_PREFIX + window.location.pathname;
|
||||
|
||||
// Exec terminal state
|
||||
let execTerminalWrapper = null; // {term, dispose}
|
||||
let execWs = null;
|
||||
|
||||
// ============================================================================
|
||||
// UTILITIES
|
||||
// ============================================================================
|
||||
|
||||
/**
|
||||
* Get Monaco language from file path
|
||||
* @param {string} path - File path
|
||||
* @returns {string} Monaco language identifier
|
||||
*/
|
||||
function getLanguageFromPath(path) {
|
||||
const ext = path.split('.').pop().toLowerCase();
|
||||
return LANGUAGE_MAP[ext] || 'plaintext';
|
||||
}
|
||||
window.getLanguageFromPath = getLanguageFromPath;
|
||||
|
||||
/**
|
||||
* Create WebSocket connection with standard handlers
|
||||
* @param {string} path - WebSocket path
|
||||
* @returns {WebSocket}
|
||||
*/
|
||||
function createWebSocket(path) {
|
||||
const protocol = window.location.protocol === 'https:' ? 'wss:' : 'ws:';
|
||||
return new WebSocket(`${protocol}//${window.location.host}${path}`);
|
||||
}
|
||||
window.createWebSocket = createWebSocket;
|
||||
|
||||
/**
|
||||
* Wait for xterm.js to load, then execute callback
|
||||
* @param {function} callback - Function to call when xterm is ready
|
||||
* @param {number} maxAttempts - Max attempts (default 20 = 2 seconds)
|
||||
*/
|
||||
function whenXtermReady(callback, maxAttempts = 20) {
|
||||
const tryInit = (attempts) => {
|
||||
if (typeof Terminal !== 'undefined' && typeof FitAddon !== 'undefined') {
|
||||
callback();
|
||||
} else if (attempts > 0) {
|
||||
setTimeout(() => tryInit(attempts - 1), 100);
|
||||
} else {
|
||||
console.error('xterm.js failed to load');
|
||||
}
|
||||
};
|
||||
tryInit(maxAttempts);
|
||||
}
|
||||
window.whenXtermReady = whenXtermReady;
|
||||
|
||||
// ============================================================================
|
||||
// TERMINAL
|
||||
// ============================================================================
|
||||
|
||||
/**
|
||||
* Create a terminal with fit addon and resize observer
|
||||
* @param {HTMLElement} container - Container element
|
||||
* @param {object} extraOptions - Additional terminal options
|
||||
* @param {function} onResize - Optional callback called with (cols, rows) after resize
|
||||
* @returns {{term: Terminal, fitAddon: FitAddon}}
|
||||
* @returns {{term: Terminal, fitAddon: FitAddon, dispose: function}}
|
||||
*/
|
||||
function createTerminal(container, extraOptions = {}, onResize = null) {
|
||||
container.innerHTML = '';
|
||||
@@ -63,29 +146,26 @@ function createTerminal(container, extraOptions = {}, onResize = null) {
|
||||
const fitAddon = new FitAddon.FitAddon();
|
||||
term.loadAddon(fitAddon);
|
||||
term.open(container);
|
||||
fitAddon.fit();
|
||||
|
||||
const handleResize = () => {
|
||||
fitAddon.fit();
|
||||
if (onResize) {
|
||||
onResize(term.cols, term.rows);
|
||||
}
|
||||
onResize?.(term.cols, term.rows);
|
||||
};
|
||||
|
||||
window.addEventListener('resize', handleResize);
|
||||
new ResizeObserver(handleResize).observe(container);
|
||||
// Use ResizeObserver only (handles both container and window resize)
|
||||
const resizeObserver = new ResizeObserver(handleResize);
|
||||
resizeObserver.observe(container);
|
||||
|
||||
return { term, fitAddon };
|
||||
}
|
||||
handleResize(); // Initial fit
|
||||
|
||||
/**
|
||||
* Create WebSocket connection with standard handlers
|
||||
* @param {string} path - WebSocket path
|
||||
* @returns {WebSocket}
|
||||
*/
|
||||
function createWebSocket(path) {
|
||||
const protocol = window.location.protocol === 'https:' ? 'wss:' : 'ws:';
|
||||
return new WebSocket(`${protocol}//${window.location.host}${path}`);
|
||||
return {
|
||||
term,
|
||||
fitAddon,
|
||||
dispose() {
|
||||
resizeObserver.disconnect();
|
||||
term.dispose();
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -98,14 +178,22 @@ function initTerminal(elementId, taskId) {
|
||||
return;
|
||||
}
|
||||
|
||||
const { term, fitAddon } = createTerminal(container);
|
||||
const wrapper = createTerminal(container);
|
||||
const { term } = wrapper;
|
||||
const ws = createWebSocket(`/ws/terminal/${taskId}`);
|
||||
|
||||
const taskKey = getTaskKey();
|
||||
ws.onopen = () => {
|
||||
term.write(`${ANSI.DIM}[Connected]${ANSI.RESET}${ANSI.CRLF}`);
|
||||
setTerminalLoading(true);
|
||||
localStorage.setItem(taskKey, taskId);
|
||||
};
|
||||
ws.onmessage = (event) => {
|
||||
term.write(event.data);
|
||||
if (event.data.includes('[Done]') || event.data.includes('[Failed]')) {
|
||||
localStorage.removeItem(taskKey);
|
||||
}
|
||||
};
|
||||
ws.onmessage = (event) => term.write(event.data);
|
||||
ws.onclose = () => setTerminalLoading(false);
|
||||
ws.onerror = (error) => {
|
||||
term.write(`${ANSI.RED}[WebSocket Error]${ANSI.RESET}${ANSI.CRLF}`);
|
||||
@@ -113,7 +201,7 @@ function initTerminal(elementId, taskId) {
|
||||
setTerminalLoading(false);
|
||||
};
|
||||
|
||||
terminals[taskId] = { term, ws, fitAddon };
|
||||
terminals[taskId] = { ...wrapper, ws };
|
||||
return { term, ws };
|
||||
}
|
||||
|
||||
@@ -122,9 +210,6 @@ window.initTerminal = initTerminal;
|
||||
/**
|
||||
* Initialize an interactive exec terminal
|
||||
*/
|
||||
let execTerminal = null;
|
||||
let execWs = null;
|
||||
|
||||
function initExecTerminal(service, container, host) {
|
||||
const containerEl = document.getElementById('exec-terminal-container');
|
||||
const terminalEl = document.getElementById('exec-terminal');
|
||||
@@ -136,9 +221,9 @@ function initExecTerminal(service, container, host) {
|
||||
|
||||
containerEl.classList.remove('hidden');
|
||||
|
||||
// Clean up existing
|
||||
// Clean up existing (use wrapper's dispose to clean up ResizeObserver)
|
||||
if (execWs) { execWs.close(); execWs = null; }
|
||||
if (execTerminal) { execTerminal.dispose(); execTerminal = null; }
|
||||
if (execTerminalWrapper) { execTerminalWrapper.dispose(); execTerminalWrapper = null; }
|
||||
|
||||
// Create WebSocket first so resize callback can use it
|
||||
execWs = createWebSocket(`/ws/exec/${service}/${container}/${host}`);
|
||||
@@ -150,8 +235,8 @@ function initExecTerminal(service, container, host) {
|
||||
}
|
||||
};
|
||||
|
||||
const { term } = createTerminal(terminalEl, { cursorBlink: true }, sendSize);
|
||||
execTerminal = term;
|
||||
execTerminalWrapper = createTerminal(terminalEl, { cursorBlink: true }, sendSize);
|
||||
const term = execTerminalWrapper.term;
|
||||
|
||||
execWs.onopen = () => { sendSize(term.cols, term.rows); term.focus(); };
|
||||
execWs.onmessage = (event) => term.write(event.data);
|
||||
@@ -171,17 +256,32 @@ function initExecTerminal(service, container, host) {
|
||||
window.initExecTerminal = initExecTerminal;
|
||||
|
||||
/**
|
||||
* Refresh dashboard partials while preserving collapse states
|
||||
* Expand terminal collapse and scroll to it
|
||||
*/
|
||||
function refreshDashboard() {
|
||||
const isExpanded = (id) => document.getElementById(id)?.checked ?? true;
|
||||
htmx.ajax('GET', '/partials/sidebar', {target: '#sidebar nav', swap: 'innerHTML'});
|
||||
htmx.ajax('GET', '/partials/stats', {target: '#stats-cards', swap: 'outerHTML'});
|
||||
htmx.ajax('GET', `/partials/pending?expanded=${isExpanded('pending-collapse')}`, {target: '#pending-operations', swap: 'outerHTML'});
|
||||
htmx.ajax('GET', `/partials/services-by-host?expanded=${isExpanded('services-by-host-collapse')}`, {target: '#services-by-host', swap: 'outerHTML'});
|
||||
htmx.ajax('GET', '/partials/config-error', {target: '#config-error', swap: 'innerHTML'});
|
||||
function expandTerminal() {
|
||||
const toggle = document.getElementById('terminal-toggle');
|
||||
if (toggle) toggle.checked = true;
|
||||
|
||||
const collapse = document.getElementById('terminal-collapse');
|
||||
if (collapse) {
|
||||
collapse.scrollIntoView({ behavior: 'smooth', block: 'start' });
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Show/hide terminal loading spinner
|
||||
*/
|
||||
function setTerminalLoading(loading) {
|
||||
const spinner = document.getElementById('terminal-spinner');
|
||||
if (spinner) {
|
||||
spinner.classList.toggle('hidden', !loading);
|
||||
}
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// EDITOR (Monaco)
|
||||
// ============================================================================
|
||||
|
||||
/**
|
||||
* Load Monaco editor dynamically (only once)
|
||||
*/
|
||||
@@ -223,13 +323,15 @@ function loadMonaco(callback) {
|
||||
* @param {HTMLElement} container - Container element
|
||||
* @param {string} content - Initial content
|
||||
* @param {string} language - Editor language (yaml, plaintext, etc.)
|
||||
* @param {boolean} readonly - Whether editor is read-only
|
||||
* @param {object} opts - Options: { readonly, onSave }
|
||||
* @returns {object} Monaco editor instance
|
||||
*/
|
||||
function createEditor(container, content, language, readonly = false) {
|
||||
function createEditor(container, content, language, opts = {}) {
|
||||
const { readonly = false, onSave = null } = opts;
|
||||
|
||||
const options = {
|
||||
value: content,
|
||||
language: language,
|
||||
language,
|
||||
theme: 'vs-dark',
|
||||
minimap: { enabled: false },
|
||||
automaticLayout: true,
|
||||
@@ -248,23 +350,26 @@ function createEditor(container, content, language, readonly = false) {
|
||||
|
||||
// Add Command+S / Ctrl+S handler for editable editors
|
||||
if (!readonly) {
|
||||
editor.addCommand(monaco.KeyMod.CtrlCmd | monaco.KeyCode.KeyS, function() {
|
||||
saveAllEditors();
|
||||
editor.addCommand(monaco.KeyMod.CtrlCmd | monaco.KeyCode.KeyS, () => {
|
||||
if (onSave) {
|
||||
onSave(editor);
|
||||
} else {
|
||||
saveAllEditors();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
return editor;
|
||||
}
|
||||
window.createEditor = createEditor;
|
||||
|
||||
/**
|
||||
* Initialize all Monaco editors on the page
|
||||
*/
|
||||
function initMonacoEditors() {
|
||||
// Dispose existing editors
|
||||
Object.values(editors).forEach(ed => {
|
||||
if (ed && ed.dispose) ed.dispose();
|
||||
});
|
||||
Object.keys(editors).forEach(key => delete editors[key]);
|
||||
Object.values(editors).forEach(ed => ed?.dispose?.());
|
||||
for (const key in editors) delete editors[key];
|
||||
|
||||
const editorConfigs = [
|
||||
{ id: 'compose-editor', language: 'yaml', readonly: false },
|
||||
@@ -284,7 +389,7 @@ function initMonacoEditors() {
|
||||
if (!el) return;
|
||||
|
||||
const content = el.dataset.content || '';
|
||||
editors[id] = createEditor(el, content, language, readonly);
|
||||
editors[id] = createEditor(el, content, language, { readonly });
|
||||
if (!readonly) {
|
||||
editors[id].saveUrl = el.dataset.saveUrl;
|
||||
}
|
||||
@@ -338,147 +443,158 @@ function initSaveButton() {
|
||||
saveBtn.onclick = saveAllEditors;
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// UI HELPERS
|
||||
// ============================================================================
|
||||
|
||||
/**
|
||||
* Global keyboard shortcut handler
|
||||
* Refresh dashboard partials by dispatching a custom event.
|
||||
* Elements with hx-trigger="cf:refresh from:body" will automatically refresh.
|
||||
*/
|
||||
function initKeyboardShortcuts() {
|
||||
document.addEventListener('keydown', function(e) {
|
||||
// Command+S (Mac) or Ctrl+S (Windows/Linux)
|
||||
if ((e.metaKey || e.ctrlKey) && e.key === 's') {
|
||||
// Only handle if we have editors and no Monaco editor is focused
|
||||
if (Object.keys(editors).length > 0) {
|
||||
// Check if any Monaco editor is focused
|
||||
const focusedEditor = Object.values(editors).find(ed => ed && ed.hasTextFocus && ed.hasTextFocus());
|
||||
if (!focusedEditor) {
|
||||
e.preventDefault();
|
||||
saveAllEditors();
|
||||
}
|
||||
}
|
||||
}
|
||||
function refreshDashboard() {
|
||||
document.body.dispatchEvent(new CustomEvent('cf:refresh'));
|
||||
}
|
||||
|
||||
/**
|
||||
* Filter sidebar services by name and host
|
||||
*/
|
||||
function sidebarFilter() {
|
||||
const q = (document.getElementById('sidebar-filter')?.value || '').toLowerCase();
|
||||
const h = document.getElementById('sidebar-host-select')?.value || '';
|
||||
let n = 0;
|
||||
document.querySelectorAll('#sidebar-services li').forEach(li => {
|
||||
const show = (!q || li.dataset.svc.includes(q)) && (!h || !li.dataset.h || li.dataset.h === h);
|
||||
li.hidden = !show;
|
||||
if (show) n++;
|
||||
});
|
||||
document.getElementById('sidebar-count').textContent = '(' + n + ')';
|
||||
}
|
||||
window.sidebarFilter = sidebarFilter;
|
||||
|
||||
// Play intro animation on command palette button
|
||||
function playFabIntro() {
|
||||
const fab = document.getElementById('cmd-fab');
|
||||
if (!fab) return;
|
||||
setTimeout(() => {
|
||||
fab.style.setProperty('--cmd-pos', '0');
|
||||
fab.style.setProperty('--cmd-opacity', '1');
|
||||
fab.style.setProperty('--cmd-blur', '30');
|
||||
setTimeout(() => {
|
||||
fab.style.removeProperty('--cmd-pos');
|
||||
fab.style.removeProperty('--cmd-opacity');
|
||||
fab.style.removeProperty('--cmd-blur');
|
||||
}, 3000);
|
||||
}, 500);
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize page components
|
||||
*/
|
||||
function initPage() {
|
||||
initMonacoEditors();
|
||||
initSaveButton();
|
||||
}
|
||||
// ============================================================================
|
||||
// COMMAND PALETTE
|
||||
// ============================================================================
|
||||
|
||||
// Initialize on page load
|
||||
document.addEventListener('DOMContentLoaded', function() {
|
||||
initPage();
|
||||
initKeyboardShortcuts();
|
||||
});
|
||||
|
||||
// Re-initialize after HTMX swaps main content
|
||||
document.body.addEventListener('htmx:afterSwap', function(evt) {
|
||||
if (evt.detail.target.id === 'main-content') {
|
||||
initPage();
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* Expand terminal collapse and scroll to it
|
||||
*/
|
||||
function expandTerminal() {
|
||||
const toggle = document.getElementById('terminal-toggle');
|
||||
if (toggle) toggle.checked = true;
|
||||
|
||||
const collapse = document.getElementById('terminal-collapse');
|
||||
if (collapse) {
|
||||
collapse.scrollIntoView({ behavior: 'smooth', block: 'start' });
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Show/hide terminal loading spinner
|
||||
*/
|
||||
function setTerminalLoading(loading) {
|
||||
const spinner = document.getElementById('terminal-spinner');
|
||||
if (spinner) {
|
||||
spinner.classList.toggle('hidden', !loading);
|
||||
}
|
||||
}
|
||||
|
||||
// Handle action responses (terminal streaming)
|
||||
document.body.addEventListener('htmx:afterRequest', function(evt) {
|
||||
if (!evt.detail.successful || !evt.detail.xhr) return;
|
||||
|
||||
const text = evt.detail.xhr.responseText;
|
||||
// Only try to parse if it looks like JSON (starts with {)
|
||||
if (!text || !text.trim().startsWith('{')) return;
|
||||
|
||||
try {
|
||||
const response = JSON.parse(text);
|
||||
if (response.task_id) {
|
||||
// Expand terminal and scroll to it
|
||||
expandTerminal();
|
||||
|
||||
// Wait for xterm to be loaded if needed
|
||||
const tryInit = (attempts) => {
|
||||
if (typeof Terminal !== 'undefined' && typeof FitAddon !== 'undefined') {
|
||||
initTerminal('terminal-output', response.task_id);
|
||||
} else if (attempts > 0) {
|
||||
setTimeout(() => tryInit(attempts - 1), 100);
|
||||
} else {
|
||||
console.error('xterm.js failed to load');
|
||||
}
|
||||
};
|
||||
tryInit(20); // Try for up to 2 seconds
|
||||
}
|
||||
} catch (e) {
|
||||
// Not valid JSON, ignore
|
||||
}
|
||||
});
|
||||
|
||||
// Command Palette
|
||||
(function() {
|
||||
const dialog = document.getElementById('cmd-palette');
|
||||
const input = document.getElementById('cmd-input');
|
||||
const list = document.getElementById('cmd-list');
|
||||
const fab = document.getElementById('cmd-fab');
|
||||
const themeBtn = document.getElementById('theme-btn');
|
||||
if (!dialog || !input || !list) return;
|
||||
|
||||
const colors = { service: '#22c55e', action: '#eab308', nav: '#3b82f6' };
|
||||
// Load icons from template (rendered server-side from icons.html)
|
||||
const iconTemplate = document.getElementById('cmd-icons');
|
||||
const icons = {};
|
||||
if (iconTemplate) {
|
||||
iconTemplate.content.querySelectorAll('[data-icon]').forEach(el => {
|
||||
icons[el.dataset.icon] = el.innerHTML;
|
||||
});
|
||||
}
|
||||
|
||||
// All available DaisyUI themes
|
||||
const THEMES = ['light', 'dark', 'cupcake', 'bumblebee', 'emerald', 'corporate', 'synthwave', 'retro', 'cyberpunk', 'valentine', 'halloween', 'garden', 'forest', 'aqua', 'lofi', 'pastel', 'fantasy', 'wireframe', 'black', 'luxury', 'dracula', 'cmyk', 'autumn', 'business', 'acid', 'lemonade', 'night', 'coffee', 'winter', 'dim', 'nord', 'sunset', 'caramellatte', 'abyss', 'silk'];
|
||||
const THEME_KEY = 'cf_theme';
|
||||
|
||||
const colors = { service: '#22c55e', action: '#eab308', nav: '#3b82f6', app: '#a855f7', theme: '#ec4899' };
|
||||
let commands = [];
|
||||
let filtered = [];
|
||||
let selected = 0;
|
||||
let originalTheme = null; // Store theme when palette opens for preview/restore
|
||||
|
||||
const post = (url) => () => htmx.ajax('POST', url, {swap: 'none'});
|
||||
const nav = (url) => () => window.location.href = url;
|
||||
const cmd = (type, name, desc, action) => ({ type, name, desc, action });
|
||||
const nav = (url) => () => {
|
||||
htmx.ajax('GET', url, {target: '#main-content', select: '#main-content', swap: 'outerHTML'}).then(() => {
|
||||
history.pushState({}, '', url);
|
||||
});
|
||||
};
|
||||
// Navigate to dashboard (if needed) and trigger action
|
||||
const dashboardAction = (endpoint) => async () => {
|
||||
if (window.location.pathname !== '/') {
|
||||
await htmx.ajax('GET', '/', {target: '#main-content', select: '#main-content', swap: 'outerHTML'});
|
||||
history.pushState({}, '', '/');
|
||||
}
|
||||
htmx.ajax('POST', `/api/${endpoint}`, {swap: 'none'});
|
||||
};
|
||||
// Apply theme and save to localStorage
|
||||
const setTheme = (theme) => () => {
|
||||
document.documentElement.setAttribute('data-theme', theme);
|
||||
localStorage.setItem(THEME_KEY, theme);
|
||||
};
|
||||
// Preview theme without saving (for hover)
|
||||
const previewTheme = (theme) => {
|
||||
document.documentElement.setAttribute('data-theme', theme);
|
||||
};
|
||||
// Restore original theme (when closing without selection)
|
||||
const restoreTheme = () => {
|
||||
if (originalTheme) {
|
||||
document.documentElement.setAttribute('data-theme', originalTheme);
|
||||
}
|
||||
};
|
||||
// Generate color swatch HTML for a theme
|
||||
const themeSwatch = (theme) => `<span class="flex gap-0.5" data-theme="${theme}"><span class="w-2 h-4 rounded-l bg-primary"></span><span class="w-2 h-4 bg-secondary"></span><span class="w-2 h-4 bg-accent"></span><span class="w-2 h-4 rounded-r bg-neutral"></span></span>`;
|
||||
|
||||
const cmd = (type, name, desc, action, icon = null, themeId = null) => ({ type, name, desc, action, icon, themeId });
|
||||
|
||||
// Reopen palette with theme filter
|
||||
const openThemePicker = () => {
|
||||
// Small delay to let dialog close before reopening
|
||||
setTimeout(() => open('theme:'), 50);
|
||||
};
|
||||
|
||||
function buildCommands() {
|
||||
const actions = [
|
||||
cmd('action', 'Apply', 'Make reality match config', post('/api/apply')),
|
||||
cmd('action', 'Refresh', 'Update state from reality', post('/api/refresh')),
|
||||
cmd('nav', 'Dashboard', 'Go to dashboard', nav('/')),
|
||||
cmd('action', 'Apply', 'Make reality match config', dashboardAction('apply'), icons.check),
|
||||
cmd('action', 'Refresh', 'Update state from reality', dashboardAction('refresh'), icons.refresh_cw),
|
||||
cmd('app', 'Theme', 'Change color theme', openThemePicker, icons.palette),
|
||||
cmd('app', 'Dashboard', 'Go to dashboard', nav('/'), icons.home),
|
||||
cmd('app', 'Console', 'Go to console', nav('/console'), icons.terminal),
|
||||
];
|
||||
|
||||
// Add service-specific actions if on a service page
|
||||
const match = window.location.pathname.match(/^\/service\/(.+)$/);
|
||||
if (match) {
|
||||
const svc = decodeURIComponent(match[1]);
|
||||
const svcCmd = (name, desc, endpoint) => cmd('service', name, `${desc} ${svc}`, post(`/api/service/${svc}/${endpoint}`));
|
||||
const svcCmd = (name, desc, endpoint, icon) => cmd('service', name, `${desc} ${svc}`, post(`/api/service/${svc}/${endpoint}`), icon);
|
||||
actions.unshift(
|
||||
svcCmd('Up', 'Start', 'up'),
|
||||
svcCmd('Down', 'Stop', 'down'),
|
||||
svcCmd('Restart', 'Restart', 'restart'),
|
||||
svcCmd('Pull', 'Pull', 'pull'),
|
||||
svcCmd('Update', 'Pull + restart', 'update'),
|
||||
svcCmd('Logs', 'View logs for', 'logs'),
|
||||
svcCmd('Up', 'Start', 'up', icons.play),
|
||||
svcCmd('Down', 'Stop', 'down', icons.square),
|
||||
svcCmd('Restart', 'Restart', 'restart', icons.rotate_cw),
|
||||
svcCmd('Pull', 'Pull', 'pull', icons.cloud_download),
|
||||
svcCmd('Update', 'Pull + restart', 'update', icons.refresh_cw),
|
||||
svcCmd('Logs', 'View logs for', 'logs', icons.file_text),
|
||||
);
|
||||
}
|
||||
|
||||
// Add nav commands for all services from sidebar
|
||||
const services = [...document.querySelectorAll('#sidebar-services li[data-svc] a[href]')].map(a => {
|
||||
const name = a.getAttribute('href').replace('/service/', '');
|
||||
return cmd('nav', name, 'Go to service', nav(`/service/${name}`));
|
||||
return cmd('nav', name, 'Go to service', nav(`/service/${name}`), icons.box);
|
||||
});
|
||||
|
||||
commands = [...actions, ...services];
|
||||
// Add theme commands with color swatches
|
||||
const currentTheme = document.documentElement.getAttribute('data-theme') || 'dark';
|
||||
const themeCommands = THEMES.map(theme =>
|
||||
cmd('theme', `theme: ${theme}`, theme === currentTheme ? '(current)' : 'Switch theme', setTheme(theme), themeSwatch(theme), theme)
|
||||
);
|
||||
|
||||
commands = [...actions, ...services, ...themeCommands];
|
||||
}
|
||||
|
||||
function filter() {
|
||||
@@ -489,27 +605,55 @@ document.body.addEventListener('htmx:afterRequest', function(evt) {
|
||||
|
||||
function render() {
|
||||
list.innerHTML = filtered.map((c, i) => `
|
||||
<a class="flex justify-between items-center px-3 py-2 rounded-r cursor-pointer hover:bg-base-200 border-l-4 ${i === selected ? 'bg-base-300' : ''}" style="border-left-color: ${colors[c.type] || '#666'}" data-idx="${i}">
|
||||
<span><span class="opacity-50 text-xs mr-2">${c.type}</span>${c.name}</span>
|
||||
<a class="flex justify-between items-center px-3 py-2 rounded-r cursor-pointer hover:bg-base-200 border-l-4 ${i === selected ? 'bg-base-300' : ''}" style="border-left-color: ${colors[c.type] || '#666'}" data-idx="${i}"${c.themeId ? ` data-theme-id="${c.themeId}"` : ''}>
|
||||
<span class="flex items-center gap-2">${c.icon || ''}<span>${c.name}</span></span>
|
||||
<span class="opacity-40 text-xs">${c.desc}</span>
|
||||
</a>
|
||||
`).join('') || '<div class="opacity-50 p-2">No matches</div>';
|
||||
// Scroll selected item into view
|
||||
const sel = list.querySelector(`[data-idx="${selected}"]`);
|
||||
if (sel) sel.scrollIntoView({ block: 'nearest' });
|
||||
// Preview theme if selected item is a theme command
|
||||
const selectedCmd = filtered[selected];
|
||||
if (selectedCmd?.themeId) {
|
||||
previewTheme(selectedCmd.themeId);
|
||||
} else if (originalTheme) {
|
||||
// Restore original when navigating away from theme commands
|
||||
previewTheme(originalTheme);
|
||||
}
|
||||
}
|
||||
|
||||
function open() {
|
||||
function open(initialFilter = '') {
|
||||
// Store original theme for preview/restore
|
||||
originalTheme = document.documentElement.getAttribute('data-theme') || 'dark';
|
||||
buildCommands();
|
||||
selected = 0;
|
||||
input.value = '';
|
||||
input.value = initialFilter;
|
||||
filter();
|
||||
// If opening theme picker, select current theme
|
||||
if (initialFilter === 'theme:') {
|
||||
const currentIdx = filtered.findIndex(c => c.themeId === originalTheme);
|
||||
if (currentIdx >= 0) selected = currentIdx;
|
||||
}
|
||||
render();
|
||||
dialog.showModal();
|
||||
input.focus();
|
||||
}
|
||||
|
||||
function close() {
|
||||
dialog.close();
|
||||
restoreTheme();
|
||||
}
|
||||
|
||||
function exec() {
|
||||
if (filtered[selected]) {
|
||||
const cmd = filtered[selected];
|
||||
if (cmd) {
|
||||
if (cmd.themeId) {
|
||||
// Theme command commits the previewed choice.
|
||||
originalTheme = null;
|
||||
}
|
||||
dialog.close();
|
||||
filtered[selected].action();
|
||||
cmd.action();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -541,6 +685,122 @@ document.body.addEventListener('htmx:afterRequest', function(evt) {
|
||||
}
|
||||
});
|
||||
|
||||
// Hover previews theme without changing selection
|
||||
list.addEventListener('mouseover', e => {
|
||||
const a = e.target.closest('a[data-theme-id]');
|
||||
if (a) previewTheme(a.dataset.themeId);
|
||||
});
|
||||
|
||||
// Mouse leaving list restores to selected item's theme (or original)
|
||||
list.addEventListener('mouseleave', () => {
|
||||
const cmd = filtered[selected];
|
||||
previewTheme(cmd?.themeId || originalTheme);
|
||||
});
|
||||
|
||||
// Restore theme when dialog closes without selection (Escape, backdrop click)
|
||||
dialog.addEventListener('close', () => {
|
||||
if (originalTheme) {
|
||||
restoreTheme();
|
||||
originalTheme = null;
|
||||
}
|
||||
});
|
||||
|
||||
// FAB click to open
|
||||
if (fab) fab.addEventListener('click', open);
|
||||
if (fab) fab.addEventListener('click', () => open());
|
||||
|
||||
// Theme button opens palette with "theme:" filter
|
||||
if (themeBtn) themeBtn.addEventListener('click', () => open('theme:'));
|
||||
})();
|
||||
|
||||
// ============================================================================
|
||||
// THEME PERSISTENCE
|
||||
// ============================================================================
|
||||
|
||||
// Restore saved theme on load (also handled in inline script to prevent flash)
|
||||
(function() {
|
||||
const saved = localStorage.getItem('cf_theme');
|
||||
if (saved) document.documentElement.setAttribute('data-theme', saved);
|
||||
})();
|
||||
|
||||
// ============================================================================
|
||||
// INITIALIZATION
|
||||
// ============================================================================
|
||||
|
||||
/**
|
||||
* Global keyboard shortcut handler
|
||||
*/
|
||||
function initKeyboardShortcuts() {
|
||||
document.addEventListener('keydown', function(e) {
|
||||
// Command+S (Mac) or Ctrl+S (Windows/Linux)
|
||||
if ((e.metaKey || e.ctrlKey) && e.key === 's') {
|
||||
// Only handle if we have editors and no Monaco editor is focused
|
||||
if (Object.keys(editors).length > 0) {
|
||||
// Check if any Monaco editor is focused
|
||||
const focusedEditor = Object.values(editors).find(ed => ed?.hasTextFocus?.());
|
||||
if (!focusedEditor) {
|
||||
e.preventDefault();
|
||||
saveAllEditors();
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize page components
|
||||
*/
|
||||
function initPage() {
|
||||
initMonacoEditors();
|
||||
initSaveButton();
|
||||
}
|
||||
|
||||
/**
|
||||
* Attempt to reconnect to an active task from localStorage
|
||||
*/
|
||||
function tryReconnectToTask() {
|
||||
const taskId = localStorage.getItem(getTaskKey());
|
||||
if (!taskId) return;
|
||||
|
||||
whenXtermReady(() => {
|
||||
expandTerminal();
|
||||
initTerminal('terminal-output', taskId);
|
||||
});
|
||||
}
|
||||
|
||||
// Initialize on page load
|
||||
document.addEventListener('DOMContentLoaded', function() {
|
||||
initPage();
|
||||
initKeyboardShortcuts();
|
||||
playFabIntro();
|
||||
|
||||
// Try to reconnect to any active task
|
||||
tryReconnectToTask();
|
||||
});
|
||||
|
||||
// Re-initialize after HTMX swaps main content
|
||||
document.body.addEventListener('htmx:afterSwap', function(evt) {
|
||||
if (evt.detail.target.id === 'main-content') {
|
||||
initPage();
|
||||
// Try to reconnect when navigating back to dashboard
|
||||
tryReconnectToTask();
|
||||
}
|
||||
});
|
||||
|
||||
// Handle action responses (terminal streaming)
|
||||
document.body.addEventListener('htmx:afterRequest', function(evt) {
|
||||
if (!evt.detail.successful || !evt.detail.xhr) return;
|
||||
|
||||
const text = evt.detail.xhr.responseText;
|
||||
// Only try to parse if it looks like JSON (starts with {)
|
||||
if (!text || !text.trim().startsWith('{')) return;
|
||||
|
||||
try {
|
||||
const response = JSON.parse(text);
|
||||
if (response.task_id) {
|
||||
expandTerminal();
|
||||
whenXtermReady(() => initTerminal('terminal-output', response.task_id));
|
||||
}
|
||||
} catch (e) {
|
||||
// Not valid JSON, ignore
|
||||
}
|
||||
});
|
||||
|
||||
@@ -4,12 +4,18 @@ from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import os
|
||||
from pathlib import Path
|
||||
import time
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
from compose_farm.executor import build_ssh_command
|
||||
from compose_farm.ssh_keys import get_ssh_auth_sock
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from compose_farm.config import Config
|
||||
|
||||
# Environment variable to identify the web service (for self-update detection)
|
||||
CF_WEB_SERVICE = os.environ.get("CF_WEB_SERVICE", "")
|
||||
|
||||
# ANSI escape codes for terminal output
|
||||
RED = "\x1b[31m"
|
||||
GREEN = "\x1b[32m"
|
||||
@@ -17,28 +23,28 @@ DIM = "\x1b[2m"
|
||||
RESET = "\x1b[0m"
|
||||
CRLF = "\r\n"
|
||||
|
||||
|
||||
def _get_ssh_auth_sock() -> str | None:
|
||||
"""Get SSH_AUTH_SOCK, auto-detecting forwarded agent if needed."""
|
||||
sock = os.environ.get("SSH_AUTH_SOCK")
|
||||
if sock and Path(sock).is_socket():
|
||||
return sock
|
||||
|
||||
# Try to find a forwarded SSH agent socket
|
||||
agent_dir = Path.home() / ".ssh" / "agent"
|
||||
if agent_dir.is_dir():
|
||||
sockets = sorted(
|
||||
agent_dir.glob("s.*.sshd.*"), key=lambda p: p.stat().st_mtime, reverse=True
|
||||
)
|
||||
for s in sockets:
|
||||
if s.is_socket():
|
||||
return str(s)
|
||||
return None
|
||||
|
||||
|
||||
# In-memory task registry
|
||||
tasks: dict[str, dict[str, Any]] = {}
|
||||
|
||||
# How long to keep completed tasks (10 minutes)
|
||||
TASK_TTL_SECONDS = 600
|
||||
|
||||
|
||||
def cleanup_stale_tasks() -> int:
|
||||
"""Remove tasks that completed more than TASK_TTL_SECONDS ago.
|
||||
|
||||
Returns the number of tasks removed.
|
||||
"""
|
||||
cutoff = time.time() - TASK_TTL_SECONDS
|
||||
stale = [
|
||||
tid
|
||||
for tid, task in tasks.items()
|
||||
if task.get("completed_at") and task["completed_at"] < cutoff
|
||||
]
|
||||
for tid in stale:
|
||||
tasks.pop(tid, None)
|
||||
return len(stale)
|
||||
|
||||
|
||||
async def stream_to_task(task_id: str, message: str) -> None:
|
||||
"""Send a message to a task's output buffer."""
|
||||
@@ -46,55 +52,106 @@ async def stream_to_task(task_id: str, message: str) -> None:
|
||||
tasks[task_id]["output"].append(message)
|
||||
|
||||
|
||||
async def _stream_subprocess(task_id: str, args: list[str], env: dict[str, str]) -> int:
|
||||
"""Run subprocess and stream output to task buffer. Returns exit code."""
|
||||
process = await asyncio.create_subprocess_exec(
|
||||
*args,
|
||||
stdout=asyncio.subprocess.PIPE,
|
||||
stderr=asyncio.subprocess.STDOUT,
|
||||
env=env,
|
||||
)
|
||||
if process.stdout:
|
||||
async for line in process.stdout:
|
||||
text = line.decode("utf-8", errors="replace")
|
||||
# Convert \n to \r\n for xterm.js
|
||||
if text.endswith("\n") and not text.endswith("\r\n"):
|
||||
text = text[:-1] + "\r\n"
|
||||
await stream_to_task(task_id, text)
|
||||
return await process.wait()
|
||||
|
||||
|
||||
async def run_cli_streaming(
|
||||
config: Config,
|
||||
args: list[str],
|
||||
task_id: str,
|
||||
) -> None:
|
||||
"""Run a cf CLI command as subprocess and stream output to task buffer.
|
||||
|
||||
This reuses all CLI logic including Rich formatting, progress bars, etc.
|
||||
The subprocess gets a pseudo-TTY via FORCE_COLOR so Rich outputs ANSI codes.
|
||||
"""
|
||||
"""Run a cf CLI command as subprocess and stream output to task buffer."""
|
||||
try:
|
||||
# Build command - config option goes after the subcommand
|
||||
cmd = ["cf", *args, f"--config={config.config_path}"]
|
||||
await stream_to_task(task_id, f"{DIM}$ {' '.join(['cf', *args])}{RESET}{CRLF}")
|
||||
|
||||
# Show command being executed
|
||||
cmd_display = " ".join(["cf", *args])
|
||||
await stream_to_task(task_id, f"{DIM}$ {cmd_display}{RESET}{CRLF}")
|
||||
|
||||
# Force color output even though there's no real TTY
|
||||
# Set COLUMNS for Rich/Typer to format output correctly
|
||||
env = {"FORCE_COLOR": "1", "TERM": "xterm-256color", "COLUMNS": "120"}
|
||||
|
||||
# Ensure SSH agent is available (auto-detect if needed)
|
||||
ssh_sock = _get_ssh_auth_sock()
|
||||
if ssh_sock:
|
||||
# Build environment with color support and SSH agent
|
||||
env = {**os.environ, "FORCE_COLOR": "1", "TERM": "xterm-256color", "COLUMNS": "120"}
|
||||
if ssh_sock := get_ssh_auth_sock():
|
||||
env["SSH_AUTH_SOCK"] = ssh_sock
|
||||
|
||||
process = await asyncio.create_subprocess_exec(
|
||||
*cmd,
|
||||
stdout=asyncio.subprocess.PIPE,
|
||||
stderr=asyncio.subprocess.STDOUT,
|
||||
env={**os.environ, **env},
|
||||
)
|
||||
|
||||
# Stream output line by line
|
||||
if process.stdout:
|
||||
async for line in process.stdout:
|
||||
text = line.decode("utf-8", errors="replace")
|
||||
# Convert \n to \r\n for xterm.js
|
||||
if text.endswith("\n") and not text.endswith("\r\n"):
|
||||
text = text[:-1] + "\r\n"
|
||||
await stream_to_task(task_id, text)
|
||||
|
||||
exit_code = await process.wait()
|
||||
exit_code = await _stream_subprocess(task_id, cmd, env)
|
||||
tasks[task_id]["status"] = "completed" if exit_code == 0 else "failed"
|
||||
tasks[task_id]["completed_at"] = time.time()
|
||||
|
||||
except Exception as e:
|
||||
await stream_to_task(task_id, f"{RED}Error: {e}{RESET}{CRLF}")
|
||||
tasks[task_id]["status"] = "failed"
|
||||
tasks[task_id]["completed_at"] = time.time()
|
||||
|
||||
|
||||
def _is_self_update(service: str, command: str) -> bool:
|
||||
"""Check if this is a self-update (updating the web service itself).
|
||||
|
||||
Self-updates need special handling because running 'down' on the container
|
||||
we're running in would kill the process before 'up' can execute.
|
||||
"""
|
||||
if not CF_WEB_SERVICE or service != CF_WEB_SERVICE:
|
||||
return False
|
||||
# Commands that involve 'down' need SSH: update, restart, down
|
||||
return command in ("update", "restart", "down")
|
||||
|
||||
|
||||
async def _run_cli_via_ssh(
|
||||
config: Config,
|
||||
args: list[str],
|
||||
task_id: str,
|
||||
) -> None:
|
||||
"""Run a cf CLI command via SSH for self-updates (survives container restart)."""
|
||||
try:
|
||||
host = config.get_host(CF_WEB_SERVICE)
|
||||
cf_cmd = f"cf {' '.join(args)} --config={config.config_path}"
|
||||
# Include task_id to prevent collision with concurrent updates
|
||||
log_file = f"/tmp/cf-self-update-{task_id}.log" # noqa: S108
|
||||
|
||||
# setsid detaches command; tail streams output until SSH dies
|
||||
remote_cmd = (
|
||||
f"rm -f {log_file} && "
|
||||
f"PATH=$HOME/.local/bin:/usr/local/bin:$PATH "
|
||||
f"setsid sh -c '{cf_cmd} > {log_file} 2>&1' & "
|
||||
f"sleep 0.3 && tail -f {log_file} 2>/dev/null"
|
||||
)
|
||||
|
||||
await stream_to_task(task_id, f"{DIM}$ {cf_cmd}{RESET}{CRLF}")
|
||||
await stream_to_task(task_id, f"{GREEN}Running via SSH (detached with setsid){RESET}{CRLF}")
|
||||
|
||||
ssh_args = build_ssh_command(host, remote_cmd, tty=False)
|
||||
env = {**os.environ}
|
||||
if ssh_sock := get_ssh_auth_sock():
|
||||
env["SSH_AUTH_SOCK"] = ssh_sock
|
||||
|
||||
exit_code = await _stream_subprocess(task_id, ssh_args, env)
|
||||
|
||||
# Exit code 255 = SSH closed (container died during down) - expected for self-updates
|
||||
if exit_code == 255: # noqa: PLR2004
|
||||
await stream_to_task(
|
||||
task_id,
|
||||
f"{CRLF}{GREEN}Container restarting... refresh the page in a few seconds.{RESET}{CRLF}",
|
||||
)
|
||||
tasks[task_id]["status"] = "completed"
|
||||
else:
|
||||
tasks[task_id]["status"] = "completed" if exit_code == 0 else "failed"
|
||||
tasks[task_id]["completed_at"] = time.time()
|
||||
|
||||
except Exception as e:
|
||||
await stream_to_task(task_id, f"{RED}Error: {e}{RESET}{CRLF}")
|
||||
tasks[task_id]["status"] = "failed"
|
||||
tasks[task_id]["completed_at"] = time.time()
|
||||
|
||||
|
||||
async def run_compose_streaming(
|
||||
@@ -111,4 +168,9 @@ async def run_compose_streaming(
|
||||
|
||||
# Build CLI args
|
||||
cli_args = [cli_cmd, service, *extra_args]
|
||||
await run_cli_streaming(config, cli_args, task_id)
|
||||
|
||||
# Use SSH for self-updates to survive container restart
|
||||
if _is_self_update(service, cli_cmd):
|
||||
await _run_cli_via_ssh(config, cli_args, task_id)
|
||||
else:
|
||||
await run_cli_streaming(config, cli_args, task_id)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
{% from "partials/icons.html" import github, hamburger %}
|
||||
{% from "partials/icons.html" import github, hamburger, palette %}
|
||||
<!DOCTYPE html>
|
||||
<html lang="en" data-theme="dark">
|
||||
<head>
|
||||
@@ -8,6 +8,7 @@
|
||||
|
||||
<!-- daisyUI + Tailwind -->
|
||||
<link href="https://cdn.jsdelivr.net/npm/daisyui@5" data-vendor="daisyui.css" rel="stylesheet" type="text/css" />
|
||||
<link href="https://cdn.jsdelivr.net/npm/daisyui@5/themes.css" data-vendor="daisyui-themes.css" rel="stylesheet" type="text/css" />
|
||||
<script src="https://cdn.jsdelivr.net/npm/@tailwindcss/browser@4" data-vendor="tailwind.js"></script>
|
||||
|
||||
<!-- xterm.js -->
|
||||
@@ -15,6 +16,14 @@
|
||||
|
||||
<!-- Custom styles -->
|
||||
<link rel="stylesheet" href="/static/app.css">
|
||||
|
||||
<!-- Apply saved theme before render to prevent flash -->
|
||||
<script>
|
||||
(function() {
|
||||
const t = localStorage.getItem('cf_theme');
|
||||
if (t) document.documentElement.setAttribute('data-theme', t);
|
||||
})();
|
||||
</script>
|
||||
</head>
|
||||
<body class="min-h-screen bg-base-200">
|
||||
<div class="drawer lg:drawer-open">
|
||||
@@ -30,7 +39,7 @@
|
||||
<span class="font-semibold rainbow-hover">Compose Farm</span>
|
||||
</header>
|
||||
|
||||
<main id="main-content" class="flex-1 p-6 overflow-y-auto" hx-boost="true" hx-target="#main-content" hx-select="#main-content" hx-swap="outerHTML">
|
||||
<main id="main-content" class="flex-1 p-6 overflow-y-auto">
|
||||
{% block content %}{% endblock %}
|
||||
</main>
|
||||
</div>
|
||||
@@ -45,9 +54,12 @@
|
||||
<a href="https://github.com/basnijholt/compose-farm" target="_blank" title="GitHub" class="opacity-50 hover:opacity-100 transition-opacity">
|
||||
{{ github() }}
|
||||
</a>
|
||||
<button type="button" id="theme-btn" class="opacity-50 hover:opacity-100 transition-opacity cursor-pointer" title="Change theme (opens command palette)">
|
||||
{{ palette() }}
|
||||
</button>
|
||||
</h2>
|
||||
</header>
|
||||
<nav class="flex-1 overflow-y-auto p-2" hx-get="/partials/sidebar" hx-trigger="load" hx-swap="innerHTML">
|
||||
<nav class="flex-1 overflow-y-auto p-2" hx-get="/partials/sidebar" hx-trigger="load, cf:refresh from:body" hx-swap="innerHTML">
|
||||
<span class="loading loading-spinner loading-sm"></span> Loading...
|
||||
</nav>
|
||||
</aside>
|
||||
|
||||
237
src/compose_farm/web/templates/console.html
Normal file
237
src/compose_farm/web/templates/console.html
Normal file
@@ -0,0 +1,237 @@
|
||||
{% extends "base.html" %}
|
||||
{% from "partials/components.html" import page_header, collapse %}
|
||||
{% from "partials/icons.html" import terminal, file_code, save %}
|
||||
{% block title %}Console - Compose Farm{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
<div class="max-w-6xl">
|
||||
{{ page_header("Console", "Terminal and editor access") }}
|
||||
|
||||
<!-- Host Selector -->
|
||||
<div class="flex items-center gap-4 mb-4">
|
||||
<label class="font-semibold">Host:</label>
|
||||
<select id="console-host-select" class="select select-sm select-bordered">
|
||||
{% for name in hosts %}
|
||||
<option value="{{ name }}">{{ name }}{% if name == local_host %} (local){% endif %}</option>
|
||||
{% endfor %}
|
||||
</select>
|
||||
<button id="console-connect-btn" class="btn btn-sm btn-primary" onclick="connectConsole()">Connect</button>
|
||||
<span id="console-status" class="text-sm opacity-60"></span>
|
||||
</div>
|
||||
|
||||
<!-- Terminal -->
|
||||
{% call collapse("Terminal", checked=True, icon=terminal(), subtitle="Full shell access to selected host") %}
|
||||
<div id="console-terminal" class="w-full bg-base-300 rounded-lg overflow-hidden resize-y" style="height: 384px; min-height: 200px;"></div>
|
||||
{% endcall %}
|
||||
|
||||
<!-- Editor -->
|
||||
{% call collapse("Editor", checked=True, icon=file_code()) %}
|
||||
<div class="flex items-center justify-between mb-2">
|
||||
<div class="flex items-center gap-4">
|
||||
<input type="text" id="console-file-path" class="input input-sm input-bordered w-96" placeholder="Enter file path (e.g., ~/docker-compose.yaml)" value="{{ config_path }}">
|
||||
<button class="btn btn-sm btn-outline" onclick="loadFile()">Open</button>
|
||||
</div>
|
||||
<div class="flex items-center gap-2">
|
||||
<span id="editor-status" class="text-sm opacity-60"></span>
|
||||
<button id="console-save-btn" class="btn btn-sm btn-primary" onclick="saveFile()">{{ save() }} Save</button>
|
||||
</div>
|
||||
</div>
|
||||
<div id="console-editor" class="resize-y overflow-hidden rounded-lg" style="height: 512px; min-height: 200px;"></div>
|
||||
{% endcall %}
|
||||
</div>
|
||||
|
||||
<script>
|
||||
// Use var to allow re-declaration on HTMX navigation
|
||||
var consoleTerminalWrapper = null; // {term, dispose}
|
||||
var consoleWs = null;
|
||||
var consoleEditor = null;
|
||||
var currentFilePath = null;
|
||||
var currentHost = null;
|
||||
|
||||
// Helper to show status with monospace path
|
||||
function setEditorStatus(prefix, path) {
|
||||
const statusEl = document.getElementById('editor-status');
|
||||
const escaped = path.replace(/&/g, '&').replace(/</g, '<').replace(/>/g, '>');
|
||||
statusEl.innerHTML = `${prefix} <code class="font-mono">${escaped}</code>`;
|
||||
}
|
||||
|
||||
function connectConsole() {
|
||||
const hostSelect = document.getElementById('console-host-select');
|
||||
const host = hostSelect.value;
|
||||
const statusEl = document.getElementById('console-status');
|
||||
const terminalEl = document.getElementById('console-terminal');
|
||||
|
||||
if (!host) {
|
||||
statusEl.textContent = 'Please select a host';
|
||||
return;
|
||||
}
|
||||
|
||||
currentHost = host;
|
||||
|
||||
// Clean up existing connection (use wrapper's dispose to clean up ResizeObserver)
|
||||
if (consoleWs) {
|
||||
consoleWs.close();
|
||||
consoleWs = null;
|
||||
}
|
||||
if (consoleTerminalWrapper) {
|
||||
consoleTerminalWrapper.dispose();
|
||||
consoleTerminalWrapper = null;
|
||||
}
|
||||
|
||||
statusEl.textContent = 'Connecting...';
|
||||
|
||||
// Create WebSocket
|
||||
consoleWs = createWebSocket(`/ws/shell/${host}`);
|
||||
|
||||
// Resize callback - createTerminal's ResizeObserver calls this on container resize
|
||||
const sendSize = (cols, rows) => {
|
||||
if (consoleWs && consoleWs.readyState === WebSocket.OPEN) {
|
||||
consoleWs.send(JSON.stringify({ type: 'resize', cols, rows }));
|
||||
}
|
||||
};
|
||||
|
||||
// Create terminal with resize callback
|
||||
consoleTerminalWrapper = createTerminal(terminalEl, { cursorBlink: true }, sendSize);
|
||||
const term = consoleTerminalWrapper.term;
|
||||
|
||||
consoleWs.onopen = () => {
|
||||
statusEl.textContent = `Connected to ${host}`;
|
||||
sendSize(term.cols, term.rows);
|
||||
term.focus();
|
||||
// Auto-load the default file once editor is ready
|
||||
const pathInput = document.getElementById('console-file-path');
|
||||
if (pathInput && pathInput.value) {
|
||||
const tryLoad = () => consoleEditor ? loadFile() : setTimeout(tryLoad, 100);
|
||||
tryLoad();
|
||||
}
|
||||
};
|
||||
|
||||
consoleWs.onmessage = (event) => term.write(event.data);
|
||||
|
||||
consoleWs.onclose = () => {
|
||||
statusEl.textContent = 'Disconnected';
|
||||
term.write(`${ANSI.CRLF}${ANSI.DIM}[Connection closed]${ANSI.RESET}${ANSI.CRLF}`);
|
||||
};
|
||||
|
||||
consoleWs.onerror = (error) => {
|
||||
statusEl.textContent = 'Connection error';
|
||||
term.write(`${ANSI.RED}[WebSocket Error]${ANSI.RESET}${ANSI.CRLF}`);
|
||||
console.error('Console WebSocket error:', error);
|
||||
};
|
||||
|
||||
// Send input to WebSocket
|
||||
term.onData((data) => {
|
||||
if (consoleWs && consoleWs.readyState === WebSocket.OPEN) {
|
||||
consoleWs.send(data);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
function initConsoleEditor() {
|
||||
const editorEl = document.getElementById('console-editor');
|
||||
if (!editorEl || consoleEditor) return;
|
||||
|
||||
loadMonaco(() => {
|
||||
consoleEditor = createEditor(editorEl, '', 'plaintext', { onSave: saveFile });
|
||||
});
|
||||
}
|
||||
|
||||
async function loadFile() {
|
||||
const pathInput = document.getElementById('console-file-path');
|
||||
const path = pathInput.value.trim();
|
||||
const statusEl = document.getElementById('editor-status');
|
||||
|
||||
if (!path) {
|
||||
statusEl.textContent = 'Enter a file path';
|
||||
return;
|
||||
}
|
||||
|
||||
if (!currentHost) {
|
||||
statusEl.textContent = 'Connect to a host first';
|
||||
return;
|
||||
}
|
||||
|
||||
setEditorStatus('Loading', path + '...');
|
||||
|
||||
try {
|
||||
const response = await fetch(`/api/console/file?host=${encodeURIComponent(currentHost)}&path=${encodeURIComponent(path)}`);
|
||||
const data = await response.json();
|
||||
|
||||
if (!response.ok || !data.success) {
|
||||
statusEl.textContent = data.detail || 'Failed to load file';
|
||||
return;
|
||||
}
|
||||
|
||||
const language = getLanguageFromPath(path);
|
||||
|
||||
if (consoleEditor) {
|
||||
consoleEditor.setValue(data.content);
|
||||
monaco.editor.setModelLanguage(consoleEditor.getModel(), language);
|
||||
currentFilePath = path; // Only set after content is loaded
|
||||
setEditorStatus('Loaded:', path);
|
||||
} else {
|
||||
statusEl.textContent = 'Editor not ready';
|
||||
}
|
||||
} catch (e) {
|
||||
statusEl.textContent = `Error: ${e.message}`;
|
||||
}
|
||||
}
|
||||
|
||||
async function saveFile() {
|
||||
const statusEl = document.getElementById('editor-status');
|
||||
|
||||
if (!currentFilePath) {
|
||||
statusEl.textContent = 'No file loaded';
|
||||
return;
|
||||
}
|
||||
|
||||
if (!currentHost) {
|
||||
statusEl.textContent = 'Not connected to a host';
|
||||
return;
|
||||
}
|
||||
|
||||
if (!consoleEditor) {
|
||||
statusEl.textContent = 'Editor not ready';
|
||||
return;
|
||||
}
|
||||
|
||||
setEditorStatus('Saving', currentFilePath + '...');
|
||||
|
||||
try {
|
||||
const content = consoleEditor.getValue();
|
||||
const response = await fetch(`/api/console/file?host=${encodeURIComponent(currentHost)}&path=${encodeURIComponent(currentFilePath)}`, {
|
||||
method: 'PUT',
|
||||
headers: { 'Content-Type': 'text/plain' },
|
||||
body: content
|
||||
});
|
||||
const data = await response.json();
|
||||
|
||||
if (!response.ok || !data.success) {
|
||||
statusEl.textContent = data.detail || 'Failed to save file';
|
||||
return;
|
||||
}
|
||||
|
||||
setEditorStatus('Saved:', currentFilePath);
|
||||
} catch (e) {
|
||||
statusEl.textContent = `Error: ${e.message}`;
|
||||
}
|
||||
}
|
||||
|
||||
// Initialize editor and auto-connect to first host
|
||||
function init() {
|
||||
initConsoleEditor();
|
||||
const hostSelect = document.getElementById('console-host-select');
|
||||
if (hostSelect && hostSelect.options.length > 0) {
|
||||
connectConsole();
|
||||
}
|
||||
}
|
||||
|
||||
// On HTMX navigation, dependencies (app.js) are already loaded.
|
||||
// On hard refresh, this script runs before app.js, so wait for DOMContentLoaded.
|
||||
if (typeof createTerminal === 'function') {
|
||||
init();
|
||||
} else {
|
||||
document.addEventListener('DOMContentLoaded', init);
|
||||
}
|
||||
</script>
|
||||
{% endblock content %}
|
||||
@@ -8,7 +8,10 @@
|
||||
{{ page_header("Compose Farm", "Cluster overview and management") }}
|
||||
|
||||
<!-- Stats Cards -->
|
||||
{% include "partials/stats.html" %}
|
||||
<div id="stats-cards" class="grid grid-cols-2 md:grid-cols-4 gap-4 mb-6"
|
||||
hx-get="/partials/stats" hx-trigger="cf:refresh from:body" hx-swap="innerHTML">
|
||||
{% include "partials/stats.html" %}
|
||||
</div>
|
||||
|
||||
<!-- Global Actions -->
|
||||
<div class="flex flex-wrap gap-2 mb-6">
|
||||
@@ -20,7 +23,10 @@
|
||||
{% include "partials/terminal.html" %}
|
||||
|
||||
<!-- Config Error Banner -->
|
||||
<div id="config-error">
|
||||
<div id="config-error"
|
||||
hx-get="/partials/config-error"
|
||||
hx-trigger="cf:refresh from:body"
|
||||
hx-swap="innerHTML">
|
||||
{% if config_error %}
|
||||
{% include "partials/config_error.html" %}
|
||||
{% endif %}
|
||||
@@ -34,10 +40,16 @@
|
||||
{% endcall %}
|
||||
|
||||
<!-- Pending Operations -->
|
||||
{% include "partials/pending.html" %}
|
||||
<div id="pending-operations"
|
||||
hx-get="/partials/pending" hx-trigger="cf:refresh from:body" hx-swap="innerHTML">
|
||||
{% include "partials/pending.html" %}
|
||||
</div>
|
||||
|
||||
<!-- Services by Host -->
|
||||
{% include "partials/services_by_host.html" %}
|
||||
<div id="services-by-host"
|
||||
hx-get="/partials/services-by-host" hx-trigger="cf:refresh from:body" hx-swap="innerHTML">
|
||||
{% include "partials/services_by_host.html" %}
|
||||
</div>
|
||||
|
||||
<!-- Hosts Configuration -->
|
||||
{% call collapse("Hosts (" ~ (hosts | length) ~ ")", icon=server()) %}
|
||||
|
||||
@@ -1,4 +1,19 @@
|
||||
{% from "partials/icons.html" import search, command %}
|
||||
{% from "partials/icons.html" import search, play, square, rotate_cw, cloud_download, refresh_cw, file_text, check, home, terminal, box, palette %}
|
||||
|
||||
<!-- Icons for command palette (referenced by JS) -->
|
||||
<template id="cmd-icons">
|
||||
<span data-icon="play">{{ play() }}</span>
|
||||
<span data-icon="square">{{ square() }}</span>
|
||||
<span data-icon="rotate_cw">{{ rotate_cw() }}</span>
|
||||
<span data-icon="cloud_download">{{ cloud_download() }}</span>
|
||||
<span data-icon="refresh_cw">{{ refresh_cw() }}</span>
|
||||
<span data-icon="file_text">{{ file_text() }}</span>
|
||||
<span data-icon="check">{{ check() }}</span>
|
||||
<span data-icon="home">{{ home() }}</span>
|
||||
<span data-icon="terminal">{{ terminal() }}</span>
|
||||
<span data-icon="box">{{ box() }}</span>
|
||||
<span data-icon="palette">{{ palette() }}</span>
|
||||
</template>
|
||||
<dialog id="cmd-palette" class="modal">
|
||||
<div class="modal-box max-w-lg p-0">
|
||||
<label class="input input-lg bg-base-100 border-0 border-b border-base-300 w-full rounded-none rounded-t-box sticky top-0 z-10 focus-within:outline-none">
|
||||
@@ -14,6 +29,8 @@
|
||||
</dialog>
|
||||
|
||||
<!-- Floating button to open command palette -->
|
||||
<button id="cmd-fab" class="btn btn-circle glass shadow-lg fixed bottom-6 right-6 z-50 hover:ring hover:ring-base-content/50" title="Command Palette (⌘K)">
|
||||
{{ command(24) }}
|
||||
<button id="cmd-fab" class="fixed bottom-6 right-6 z-50" title="Command Palette (⌘K)">
|
||||
<div class="cmd-fab-inner">
|
||||
<span>⌘ + K</span>
|
||||
</div>
|
||||
</button>
|
||||
|
||||
@@ -9,12 +9,13 @@
|
||||
{% endmacro %}
|
||||
|
||||
{# Collapsible section #}
|
||||
{% macro collapse(title, id=None, checked=False, badge=None, icon=None) %}
|
||||
{% macro collapse(title, id=None, checked=False, badge=None, icon=None, subtitle=None) %}
|
||||
<div class="collapse collapse-arrow bg-base-100 shadow mb-4">
|
||||
<input type="checkbox" {% if id %}id="{{ id }}"{% endif %} {% if checked %}checked{% endif %} />
|
||||
<div class="collapse-title font-medium flex items-center gap-2">
|
||||
<div class="collapse-title font-semibold flex items-center gap-2">
|
||||
{% if icon %}{{ icon }}{% endif %}{{ title }}
|
||||
{% if badge %}<code class="text-xs ml-2 opacity-60">{{ badge }}</code>{% endif %}
|
||||
{% if subtitle %}<span class="text-xs opacity-50 font-normal">{{ subtitle }}</span>{% endif %}
|
||||
</div>
|
||||
<div class="collapse-content">
|
||||
{{ caller() }}
|
||||
|
||||
@@ -6,6 +6,14 @@
|
||||
<span class="badge badge-success">running</span>
|
||||
{% elif container.State == "unknown" %}
|
||||
<span class="badge badge-ghost"><span class="loading loading-spinner loading-xs"></span></span>
|
||||
{% elif container.State == "exited" %}
|
||||
{% if container.ExitCode == 0 %}
|
||||
<span class="badge badge-neutral">exited (0)</span>
|
||||
{% else %}
|
||||
<span class="badge badge-error">exited ({{ container.ExitCode }})</span>
|
||||
{% endif %}
|
||||
{% elif container.State == "created" %}
|
||||
<span class="badge badge-neutral">created</span>
|
||||
{% else %}
|
||||
<span class="badge badge-warning">{{ container.State }}</span>
|
||||
{% endif %}
|
||||
|
||||
@@ -146,3 +146,9 @@
|
||||
<path d="m21.73 18-8-14a2 2 0 0 0-3.48 0l-8 14A2 2 0 0 0 4 21h16a2 2 0 0 0 1.73-3"/><path d="M12 9v4"/><path d="M12 17h.01"/>
|
||||
</svg>
|
||||
{% endmacro %}
|
||||
|
||||
{% macro palette(size=16) %}
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="{{ size }}" height="{{ size }}" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round">
|
||||
<circle cx="13.5" cy="6.5" r="0.5" fill="currentColor"/><circle cx="17.5" cy="10.5" r="0.5" fill="currentColor"/><circle cx="8.5" cy="7.5" r="0.5" fill="currentColor"/><circle cx="6.5" cy="12.5" r="0.5" fill="currentColor"/><path d="M12 2C6.5 2 2 6.5 2 12s4.5 10 10 10c.926 0 1.648-.746 1.648-1.688 0-.437-.18-.835-.437-1.125-.29-.289-.438-.652-.438-1.125a1.64 1.64 0 0 1 1.668-1.668h1.996c3.051 0 5.555-2.503 5.555-5.555C21.965 6.012 17.461 2 12 2z"/>
|
||||
</svg>
|
||||
{% endmacro %}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
{% from "partials/components.html" import collapse %}
|
||||
<div id="pending-operations">
|
||||
{% from "partials/icons.html" import circle_check %}
|
||||
{% if orphaned or migrations or not_started %}
|
||||
{% call collapse("Pending Operations", id="pending-collapse", checked=expanded|default(true)) %}
|
||||
{% if orphaned %}
|
||||
@@ -31,8 +31,7 @@
|
||||
{% endcall %}
|
||||
{% else %}
|
||||
<div role="alert" class="alert alert-success mb-4">
|
||||
<svg xmlns="http://www.w3.org/2000/svg" class="stroke-current shrink-0 h-6 w-6" fill="none" viewBox="0 0 24 24"><path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M9 12l2 2 4-4m6 2a9 9 0 11-18 0 9 9 0 0118 0z" /></svg>
|
||||
<span class="shrink-0">{{ circle_check(24) }}</span>
|
||||
<span>All services are in sync with configuration.</span>
|
||||
</div>
|
||||
{% endif %}
|
||||
</div>
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
{% from "partials/components.html" import collapse %}
|
||||
{% from "partials/icons.html" import layers, search %}
|
||||
<div id="services-by-host">
|
||||
{% call collapse("Services by Host", id="services-by-host-collapse", checked=expanded|default(true), icon=layers()) %}
|
||||
<div class="flex flex-wrap gap-2 mb-4 items-center">
|
||||
<label class="input input-sm input-bordered flex items-center gap-2 bg-base-200">
|
||||
@@ -38,4 +37,3 @@
|
||||
}
|
||||
</script>
|
||||
{% endcall %}
|
||||
</div>
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
{% from "partials/icons.html" import home, search %}
|
||||
<!-- Dashboard Link -->
|
||||
{% from "partials/icons.html" import home, search, terminal %}
|
||||
<!-- Navigation Links -->
|
||||
<div class="mb-4">
|
||||
<ul class="menu" hx-boost="true" hx-target="#main-content" hx-select="#main-content" hx-swap="outerHTML">
|
||||
<li><a href="/" class="font-semibold">{{ home() }} Dashboard</a></li>
|
||||
<li><a href="/console" class="font-semibold">{{ terminal() }} Console</a></li>
|
||||
</ul>
|
||||
</div>
|
||||
|
||||
@@ -10,12 +11,12 @@
|
||||
<div class="mb-4">
|
||||
<h4 class="text-xs uppercase tracking-wide text-base-content/60 px-3 py-1">Services <span class="opacity-50" id="sidebar-count">({{ services | length }})</span></h4>
|
||||
<div class="px-2 mb-2 flex flex-col gap-1">
|
||||
<label class="input input-xs input-bordered flex items-center gap-2 bg-base-200">
|
||||
<label class="input input-xs flex items-center gap-2 bg-base-200">
|
||||
{{ search(14) }}<input type="text" id="sidebar-filter" placeholder="Filter..." onkeyup="sidebarFilter()" />
|
||||
</label>
|
||||
<select id="sidebar-host-select" class="select select-xs select-bordered bg-base-200 w-full" onchange="sidebarFilter()">
|
||||
<select id="sidebar-host-select" class="select select-xs bg-base-200 w-full" onchange="sidebarFilter()">
|
||||
<option value="">All hosts</option>
|
||||
{% for h in hosts %}<option value="{{ h }}">{{ h }}</option>{% endfor %}
|
||||
{% for h in hosts %}<option value="{{ h }}">{{ h }}{% if h == local_host %} (local){% endif %}</option>{% endfor %}
|
||||
</select>
|
||||
</div>
|
||||
<ul class="menu menu-sm" id="sidebar-services" hx-boost="true" hx-target="#main-content" hx-select="#main-content" hx-swap="outerHTML">
|
||||
@@ -30,16 +31,3 @@
|
||||
{% endfor %}
|
||||
</ul>
|
||||
</div>
|
||||
<script>
|
||||
function sidebarFilter() {
|
||||
const q = (document.getElementById('sidebar-filter')?.value || '').toLowerCase();
|
||||
const h = document.getElementById('sidebar-host-select')?.value || '';
|
||||
let n = 0;
|
||||
document.querySelectorAll('#sidebar-services li').forEach(li => {
|
||||
const show = (!q || li.dataset.svc.includes(q)) && (!h || !li.dataset.h || li.dataset.h === h);
|
||||
li.hidden = !show;
|
||||
if (show) n++;
|
||||
});
|
||||
document.getElementById('sidebar-count').textContent = '(' + n + ')';
|
||||
}
|
||||
</script>
|
||||
|
||||
@@ -1,8 +1,6 @@
|
||||
{% from "partials/components.html" import stat_card %}
|
||||
{% from "partials/icons.html" import server, layers, circle_check, circle_x %}
|
||||
<div id="stats-cards" class="grid grid-cols-2 md:grid-cols-4 gap-4 mb-6">
|
||||
{{ stat_card("Hosts", hosts | length, icon=server()) }}
|
||||
{{ stat_card("Services", services | length, icon=layers()) }}
|
||||
{{ stat_card("Running", running_count, "success", circle_check()) }}
|
||||
{{ stat_card("Stopped", stopped_count, icon=circle_x()) }}
|
||||
</div>
|
||||
{{ stat_card("Hosts", hosts | length, icon=server()) }}
|
||||
{{ stat_card("Services", services | length, icon=layers()) }}
|
||||
{{ stat_card("Running", running_count, "success", circle_check()) }}
|
||||
{{ stat_card("Stopped", stopped_count, icon=circle_x()) }}
|
||||
|
||||
@@ -8,6 +8,7 @@ import fcntl
|
||||
import json
|
||||
import os
|
||||
import pty
|
||||
import shlex
|
||||
import signal
|
||||
import struct
|
||||
import termios
|
||||
@@ -16,10 +17,13 @@ from typing import TYPE_CHECKING, Any
|
||||
import asyncssh
|
||||
from fastapi import APIRouter, WebSocket, WebSocketDisconnect
|
||||
|
||||
from compose_farm.executor import is_local
|
||||
from compose_farm.executor import is_local, ssh_connect_kwargs
|
||||
from compose_farm.web.deps import get_config
|
||||
from compose_farm.web.streaming import CRLF, DIM, GREEN, RED, RESET, tasks
|
||||
|
||||
# Shell command to prefer bash over sh
|
||||
SHELL_FALLBACK = "command -v bash >/dev/null && exec bash || exec sh"
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from compose_farm.config import Host
|
||||
|
||||
@@ -121,16 +125,26 @@ async def _bridge_websocket_to_ssh(
|
||||
proc.terminate()
|
||||
|
||||
|
||||
async def _run_local_exec(websocket: WebSocket, exec_cmd: str) -> None:
|
||||
"""Run docker exec locally with PTY."""
|
||||
def _make_controlling_tty(slave_fd: int) -> None:
|
||||
"""Set up the slave PTY as the controlling terminal for the child process."""
|
||||
# Create a new session
|
||||
os.setsid()
|
||||
# Make the slave fd the controlling terminal
|
||||
fcntl.ioctl(slave_fd, termios.TIOCSCTTY, 0)
|
||||
|
||||
|
||||
async def _run_local_exec(websocket: WebSocket, argv: list[str]) -> None:
|
||||
"""Run command locally with PTY using argv list (no shell interpretation)."""
|
||||
master_fd, slave_fd = pty.openpty()
|
||||
|
||||
proc = await asyncio.create_subprocess_shell(
|
||||
exec_cmd,
|
||||
proc = await asyncio.create_subprocess_exec(
|
||||
*argv,
|
||||
stdin=slave_fd,
|
||||
stdout=slave_fd,
|
||||
stderr=slave_fd,
|
||||
close_fds=True,
|
||||
preexec_fn=lambda: _make_controlling_tty(slave_fd),
|
||||
start_new_session=False, # We handle setsid in preexec_fn
|
||||
)
|
||||
os.close(slave_fd)
|
||||
|
||||
@@ -141,13 +155,14 @@ async def _run_local_exec(websocket: WebSocket, exec_cmd: str) -> None:
|
||||
await _bridge_websocket_to_fd(websocket, master_fd, proc)
|
||||
|
||||
|
||||
async def _run_remote_exec(websocket: WebSocket, host: Host, exec_cmd: str) -> None:
|
||||
async def _run_remote_exec(
|
||||
websocket: WebSocket, host: Host, exec_cmd: str, *, agent_forwarding: bool = False
|
||||
) -> None:
|
||||
"""Run docker exec on remote host via SSH with PTY."""
|
||||
# ssh_connect_kwargs includes agent_path and client_keys fallback
|
||||
async with asyncssh.connect(
|
||||
host.address,
|
||||
port=host.port,
|
||||
username=host.user,
|
||||
known_hosts=None,
|
||||
**ssh_connect_kwargs(host),
|
||||
agent_forwarding=agent_forwarding,
|
||||
) as conn:
|
||||
proc: asyncssh.SSHClientProcess[Any] = await conn.create_process(
|
||||
exec_cmd,
|
||||
@@ -170,11 +185,15 @@ async def _run_exec_session(
|
||||
await websocket.send_text(f"{RED}Host '{host_name}' not found{RESET}{CRLF}")
|
||||
return
|
||||
|
||||
exec_cmd = f"docker exec -it {container} /bin/sh -c 'command -v bash >/dev/null && exec bash || exec sh'"
|
||||
|
||||
if is_local(host):
|
||||
await _run_local_exec(websocket, exec_cmd)
|
||||
# Local: use argv list (no shell interpretation)
|
||||
argv = ["docker", "exec", "-it", container, "/bin/sh", "-c", SHELL_FALLBACK]
|
||||
await _run_local_exec(websocket, argv)
|
||||
else:
|
||||
# Remote: quote container name to prevent injection
|
||||
exec_cmd = (
|
||||
f"docker exec -it {shlex.quote(container)} /bin/sh -c {shlex.quote(SHELL_FALLBACK)}"
|
||||
)
|
||||
await _run_remote_exec(websocket, host, exec_cmd)
|
||||
|
||||
|
||||
@@ -202,13 +221,59 @@ async def exec_websocket(
|
||||
await websocket.close()
|
||||
|
||||
|
||||
async def _run_shell_session(
|
||||
websocket: WebSocket,
|
||||
host_name: str,
|
||||
) -> None:
|
||||
"""Run an interactive shell session on a host over WebSocket."""
|
||||
config = get_config()
|
||||
host = config.hosts.get(host_name)
|
||||
if not host:
|
||||
await websocket.send_text(f"{RED}Host '{host_name}' not found{RESET}{CRLF}")
|
||||
return
|
||||
|
||||
# Start interactive shell in home directory (avoid login shell to prevent job control warnings)
|
||||
shell_cmd = "cd ~ && exec bash -i 2>/dev/null || exec sh -i"
|
||||
|
||||
if is_local(host):
|
||||
# Local: use argv list with shell -c to interpret the command
|
||||
argv = ["/bin/sh", "-c", shell_cmd]
|
||||
await _run_local_exec(websocket, argv)
|
||||
else:
|
||||
await _run_remote_exec(websocket, host, shell_cmd, agent_forwarding=True)
|
||||
|
||||
|
||||
@router.websocket("/ws/shell/{host}")
|
||||
async def shell_websocket(
|
||||
websocket: WebSocket,
|
||||
host: str,
|
||||
) -> None:
|
||||
"""WebSocket endpoint for interactive host shell access."""
|
||||
await websocket.accept()
|
||||
|
||||
try:
|
||||
await websocket.send_text(f"{DIM}[Connecting to {host}...]{RESET}{CRLF}")
|
||||
await _run_shell_session(websocket, host)
|
||||
await websocket.send_text(f"{CRLF}{DIM}[Disconnected]{RESET}{CRLF}")
|
||||
except WebSocketDisconnect:
|
||||
pass
|
||||
except Exception as e:
|
||||
with contextlib.suppress(Exception):
|
||||
await websocket.send_text(f"{RED}Error: {e}{RESET}{CRLF}")
|
||||
finally:
|
||||
with contextlib.suppress(Exception):
|
||||
await websocket.close()
|
||||
|
||||
|
||||
@router.websocket("/ws/terminal/{task_id}")
|
||||
async def terminal_websocket(websocket: WebSocket, task_id: str) -> None:
|
||||
"""WebSocket endpoint for terminal streaming."""
|
||||
await websocket.accept()
|
||||
|
||||
if task_id not in tasks:
|
||||
await websocket.send_text(f"{RED}Error: Task not found{RESET}{CRLF}")
|
||||
await websocket.send_text(
|
||||
f"{DIM}Task not found (expired or container restarted).{RESET}{CRLF}"
|
||||
)
|
||||
await websocket.close(code=4004)
|
||||
return
|
||||
|
||||
@@ -232,5 +297,4 @@ async def terminal_websocket(websocket: WebSocket, task_id: str) -> None:
|
||||
await asyncio.sleep(0.05)
|
||||
except WebSocketDisconnect:
|
||||
pass
|
||||
finally:
|
||||
tasks.pop(task_id, None)
|
||||
# Task stays in memory for reconnection; cleanup_stale_tasks() handles expiry
|
||||
|
||||
@@ -150,7 +150,7 @@ class TestLogsHostFilter:
|
||||
mock_run_async, _ = _mock_run_async_factory(["svc1", "svc2"])
|
||||
|
||||
with (
|
||||
patch("compose_farm.cli.monitoring.load_config_or_exit", return_value=cfg),
|
||||
patch("compose_farm.cli.common.load_config_or_exit", return_value=cfg),
|
||||
patch("compose_farm.cli.monitoring.run_async", side_effect=mock_run_async),
|
||||
patch("compose_farm.cli.monitoring.run_on_services") as mock_run,
|
||||
):
|
||||
@@ -174,7 +174,7 @@ class TestLogsHostFilter:
|
||||
mock_run_async, _ = _mock_run_async_factory(["svc1", "svc2"])
|
||||
|
||||
with (
|
||||
patch("compose_farm.cli.monitoring.load_config_or_exit", return_value=cfg),
|
||||
patch("compose_farm.cli.common.load_config_or_exit", return_value=cfg),
|
||||
patch("compose_farm.cli.monitoring.run_async", side_effect=mock_run_async),
|
||||
patch("compose_farm.cli.monitoring.run_on_services") as mock_run,
|
||||
):
|
||||
|
||||
114
tests/test_cli_ssh.py
Normal file
114
tests/test_cli_ssh.py
Normal file
@@ -0,0 +1,114 @@
|
||||
"""Tests for CLI ssh commands."""
|
||||
|
||||
from pathlib import Path
|
||||
from unittest.mock import patch
|
||||
|
||||
from typer.testing import CliRunner
|
||||
|
||||
from compose_farm.cli.app import app
|
||||
|
||||
runner = CliRunner()
|
||||
|
||||
|
||||
class TestSshKeygen:
|
||||
"""Tests for cf ssh keygen command."""
|
||||
|
||||
def test_keygen_generates_key(self, tmp_path: Path) -> None:
|
||||
"""Generate SSH key when none exists."""
|
||||
key_path = tmp_path / "compose-farm"
|
||||
pubkey_path = tmp_path / "compose-farm.pub"
|
||||
|
||||
with (
|
||||
patch("compose_farm.cli.ssh.SSH_KEY_PATH", key_path),
|
||||
patch("compose_farm.cli.ssh.SSH_PUBKEY_PATH", pubkey_path),
|
||||
patch("compose_farm.cli.ssh.key_exists", return_value=False),
|
||||
):
|
||||
result = runner.invoke(app, ["ssh", "keygen"])
|
||||
|
||||
# Command runs (may fail if ssh-keygen not available in test env)
|
||||
assert result.exit_code in (0, 1)
|
||||
|
||||
def test_keygen_skips_if_exists(self, tmp_path: Path) -> None:
|
||||
"""Skip key generation if key already exists."""
|
||||
key_path = tmp_path / "compose-farm"
|
||||
pubkey_path = tmp_path / "compose-farm.pub"
|
||||
|
||||
with (
|
||||
patch("compose_farm.cli.ssh.SSH_KEY_PATH", key_path),
|
||||
patch("compose_farm.cli.ssh.SSH_PUBKEY_PATH", pubkey_path),
|
||||
patch("compose_farm.cli.ssh.key_exists", return_value=True),
|
||||
):
|
||||
result = runner.invoke(app, ["ssh", "keygen"])
|
||||
|
||||
assert "already exists" in result.output
|
||||
|
||||
|
||||
class TestSshStatus:
|
||||
"""Tests for cf ssh status command."""
|
||||
|
||||
def test_status_shows_no_key(self, tmp_path: Path) -> None:
|
||||
"""Show message when no key exists."""
|
||||
config_file = tmp_path / "compose-farm.yaml"
|
||||
config_file.write_text("""
|
||||
hosts:
|
||||
local:
|
||||
address: localhost
|
||||
services:
|
||||
test: local
|
||||
""")
|
||||
|
||||
with patch("compose_farm.cli.ssh.key_exists", return_value=False):
|
||||
result = runner.invoke(app, ["ssh", "status", f"--config={config_file}"])
|
||||
|
||||
assert "No key found" in result.output
|
||||
|
||||
def test_status_shows_key_exists(self, tmp_path: Path) -> None:
|
||||
"""Show key info when key exists."""
|
||||
config_file = tmp_path / "compose-farm.yaml"
|
||||
config_file.write_text("""
|
||||
hosts:
|
||||
local:
|
||||
address: localhost
|
||||
services:
|
||||
test: local
|
||||
""")
|
||||
|
||||
with (
|
||||
patch("compose_farm.cli.ssh.key_exists", return_value=True),
|
||||
patch("compose_farm.cli.ssh.get_pubkey_content", return_value="ssh-ed25519 AAAA..."),
|
||||
):
|
||||
result = runner.invoke(app, ["ssh", "status", f"--config={config_file}"])
|
||||
|
||||
assert "Key exists" in result.output
|
||||
|
||||
|
||||
class TestSshSetup:
|
||||
"""Tests for cf ssh setup command."""
|
||||
|
||||
def test_setup_no_remote_hosts(self, tmp_path: Path) -> None:
|
||||
"""Show message when no remote hosts configured."""
|
||||
config_file = tmp_path / "compose-farm.yaml"
|
||||
config_file.write_text("""
|
||||
hosts:
|
||||
local:
|
||||
address: localhost
|
||||
services:
|
||||
test: local
|
||||
""")
|
||||
|
||||
result = runner.invoke(app, ["ssh", "setup", f"--config={config_file}"])
|
||||
|
||||
assert "No remote hosts" in result.output
|
||||
|
||||
|
||||
class TestSshHelp:
|
||||
"""Tests for cf ssh help."""
|
||||
|
||||
def test_ssh_help(self) -> None:
|
||||
"""Show help for ssh command."""
|
||||
result = runner.invoke(app, ["ssh", "--help"])
|
||||
|
||||
assert result.exit_code == 0
|
||||
assert "setup" in result.output
|
||||
assert "status" in result.output
|
||||
assert "keygen" in result.output
|
||||
245
tests/test_ssh_keys.py
Normal file
245
tests/test_ssh_keys.py
Normal file
@@ -0,0 +1,245 @@
|
||||
"""Tests for ssh_keys module."""
|
||||
|
||||
import os
|
||||
from pathlib import Path
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
from compose_farm.config import Host
|
||||
from compose_farm.executor import ssh_connect_kwargs
|
||||
from compose_farm.ssh_keys import (
|
||||
SSH_KEY_PATH,
|
||||
get_key_path,
|
||||
get_pubkey_content,
|
||||
get_ssh_auth_sock,
|
||||
get_ssh_env,
|
||||
key_exists,
|
||||
)
|
||||
|
||||
|
||||
class TestGetSshAuthSock:
|
||||
"""Tests for get_ssh_auth_sock function."""
|
||||
|
||||
def test_returns_env_var_when_socket_exists(self) -> None:
|
||||
"""Return SSH_AUTH_SOCK env var if the socket exists."""
|
||||
mock_path = MagicMock()
|
||||
mock_path.is_socket.return_value = True
|
||||
|
||||
with (
|
||||
patch.dict(os.environ, {"SSH_AUTH_SOCK": "/tmp/agent.sock"}),
|
||||
patch("compose_farm.ssh_keys.Path", return_value=mock_path),
|
||||
):
|
||||
result = get_ssh_auth_sock()
|
||||
assert result == "/tmp/agent.sock"
|
||||
|
||||
def test_returns_none_when_env_var_not_socket(self, tmp_path: Path) -> None:
|
||||
"""Return None if SSH_AUTH_SOCK points to non-socket."""
|
||||
regular_file = tmp_path / "not_a_socket"
|
||||
regular_file.touch()
|
||||
with (
|
||||
patch.dict(os.environ, {"SSH_AUTH_SOCK": str(regular_file)}),
|
||||
patch("compose_farm.ssh_keys.Path.home", return_value=tmp_path),
|
||||
):
|
||||
# Should fall through to agent dir check, which won't exist
|
||||
result = get_ssh_auth_sock()
|
||||
assert result is None
|
||||
|
||||
def test_finds_agent_in_ssh_agent_dir(self, tmp_path: Path) -> None:
|
||||
"""Find agent socket in ~/.ssh/agent/ directory."""
|
||||
# Create agent directory structure with a regular file
|
||||
agent_dir = tmp_path / ".ssh" / "agent"
|
||||
agent_dir.mkdir(parents=True)
|
||||
sock_path = agent_dir / "s.12345.sshd.67890"
|
||||
sock_path.touch() # Create as regular file
|
||||
|
||||
with (
|
||||
patch.dict(os.environ, {}, clear=False),
|
||||
patch("compose_farm.ssh_keys.Path.home", return_value=tmp_path),
|
||||
patch.object(Path, "is_socket", return_value=True),
|
||||
):
|
||||
os.environ.pop("SSH_AUTH_SOCK", None)
|
||||
result = get_ssh_auth_sock()
|
||||
assert result == str(sock_path)
|
||||
|
||||
def test_returns_none_when_no_agent_found(self, tmp_path: Path) -> None:
|
||||
"""Return None when no SSH agent socket is found."""
|
||||
with (
|
||||
patch.dict(os.environ, {}, clear=False),
|
||||
patch("compose_farm.ssh_keys.Path.home", return_value=tmp_path),
|
||||
):
|
||||
os.environ.pop("SSH_AUTH_SOCK", None)
|
||||
result = get_ssh_auth_sock()
|
||||
assert result is None
|
||||
|
||||
|
||||
class TestGetSshEnv:
|
||||
"""Tests for get_ssh_env function."""
|
||||
|
||||
def test_returns_env_with_ssh_auth_sock(self) -> None:
|
||||
"""Return env dict with SSH_AUTH_SOCK set."""
|
||||
with patch("compose_farm.ssh_keys.get_ssh_auth_sock", return_value="/tmp/agent.sock"):
|
||||
result = get_ssh_env()
|
||||
assert result["SSH_AUTH_SOCK"] == "/tmp/agent.sock"
|
||||
# Should include other env vars too
|
||||
assert "PATH" in result or len(result) > 1
|
||||
|
||||
def test_returns_env_without_ssh_auth_sock_when_none(self, tmp_path: Path) -> None:
|
||||
"""Return env without SSH_AUTH_SOCK when no agent found."""
|
||||
with (
|
||||
patch.dict(os.environ, {}, clear=False),
|
||||
patch("compose_farm.ssh_keys.Path.home", return_value=tmp_path),
|
||||
):
|
||||
os.environ.pop("SSH_AUTH_SOCK", None)
|
||||
result = get_ssh_env()
|
||||
# SSH_AUTH_SOCK should not be set if no agent found
|
||||
assert result.get("SSH_AUTH_SOCK") is None
|
||||
|
||||
|
||||
class TestKeyExists:
|
||||
"""Tests for key_exists function."""
|
||||
|
||||
def test_returns_true_when_both_keys_exist(self, tmp_path: Path) -> None:
|
||||
"""Return True when both private and public keys exist."""
|
||||
key_path = tmp_path / "compose-farm"
|
||||
pubkey_path = tmp_path / "compose-farm.pub"
|
||||
key_path.touch()
|
||||
pubkey_path.touch()
|
||||
|
||||
with (
|
||||
patch("compose_farm.ssh_keys.SSH_KEY_PATH", key_path),
|
||||
patch("compose_farm.ssh_keys.SSH_PUBKEY_PATH", pubkey_path),
|
||||
):
|
||||
assert key_exists() is True
|
||||
|
||||
def test_returns_false_when_private_key_missing(self, tmp_path: Path) -> None:
|
||||
"""Return False when private key doesn't exist."""
|
||||
key_path = tmp_path / "compose-farm"
|
||||
pubkey_path = tmp_path / "compose-farm.pub"
|
||||
pubkey_path.touch() # Only public key exists
|
||||
|
||||
with (
|
||||
patch("compose_farm.ssh_keys.SSH_KEY_PATH", key_path),
|
||||
patch("compose_farm.ssh_keys.SSH_PUBKEY_PATH", pubkey_path),
|
||||
):
|
||||
assert key_exists() is False
|
||||
|
||||
def test_returns_false_when_public_key_missing(self, tmp_path: Path) -> None:
|
||||
"""Return False when public key doesn't exist."""
|
||||
key_path = tmp_path / "compose-farm"
|
||||
pubkey_path = tmp_path / "compose-farm.pub"
|
||||
key_path.touch() # Only private key exists
|
||||
|
||||
with (
|
||||
patch("compose_farm.ssh_keys.SSH_KEY_PATH", key_path),
|
||||
patch("compose_farm.ssh_keys.SSH_PUBKEY_PATH", pubkey_path),
|
||||
):
|
||||
assert key_exists() is False
|
||||
|
||||
|
||||
class TestGetKeyPath:
|
||||
"""Tests for get_key_path function."""
|
||||
|
||||
def test_returns_path_when_key_exists(self) -> None:
|
||||
"""Return key path when key exists."""
|
||||
with patch("compose_farm.ssh_keys.key_exists", return_value=True):
|
||||
result = get_key_path()
|
||||
assert result == SSH_KEY_PATH
|
||||
|
||||
def test_returns_none_when_key_missing(self) -> None:
|
||||
"""Return None when key doesn't exist."""
|
||||
with patch("compose_farm.ssh_keys.key_exists", return_value=False):
|
||||
result = get_key_path()
|
||||
assert result is None
|
||||
|
||||
|
||||
class TestGetPubkeyContent:
|
||||
"""Tests for get_pubkey_content function."""
|
||||
|
||||
def test_returns_content_when_exists(self, tmp_path: Path) -> None:
|
||||
"""Return public key content when file exists."""
|
||||
pubkey_content = "ssh-ed25519 AAAA... compose-farm"
|
||||
pubkey_path = tmp_path / "compose-farm.pub"
|
||||
pubkey_path.write_text(pubkey_content + "\n")
|
||||
|
||||
with patch("compose_farm.ssh_keys.SSH_PUBKEY_PATH", pubkey_path):
|
||||
result = get_pubkey_content()
|
||||
assert result == pubkey_content
|
||||
|
||||
def test_returns_none_when_missing(self, tmp_path: Path) -> None:
|
||||
"""Return None when public key doesn't exist."""
|
||||
pubkey_path = tmp_path / "compose-farm.pub" # Doesn't exist
|
||||
|
||||
with patch("compose_farm.ssh_keys.SSH_PUBKEY_PATH", pubkey_path):
|
||||
result = get_pubkey_content()
|
||||
assert result is None
|
||||
|
||||
|
||||
class TestSshConnectKwargs:
|
||||
"""Tests for ssh_connect_kwargs function."""
|
||||
|
||||
def test_basic_kwargs(self) -> None:
|
||||
"""Return basic connection kwargs."""
|
||||
host = Host(address="example.com", port=22, user="testuser")
|
||||
|
||||
with (
|
||||
patch("compose_farm.executor.get_ssh_auth_sock", return_value=None),
|
||||
patch("compose_farm.executor.get_key_path", return_value=None),
|
||||
):
|
||||
result = ssh_connect_kwargs(host)
|
||||
|
||||
assert result["host"] == "example.com"
|
||||
assert result["port"] == 22
|
||||
assert result["username"] == "testuser"
|
||||
assert result["known_hosts"] is None
|
||||
assert "agent_path" not in result
|
||||
assert "client_keys" not in result
|
||||
|
||||
def test_includes_agent_path_when_available(self) -> None:
|
||||
"""Include agent_path when SSH agent is available."""
|
||||
host = Host(address="example.com")
|
||||
|
||||
with (
|
||||
patch("compose_farm.executor.get_ssh_auth_sock", return_value="/tmp/agent.sock"),
|
||||
patch("compose_farm.executor.get_key_path", return_value=None),
|
||||
):
|
||||
result = ssh_connect_kwargs(host)
|
||||
|
||||
assert result["agent_path"] == "/tmp/agent.sock"
|
||||
|
||||
def test_includes_client_keys_when_key_exists(self, tmp_path: Path) -> None:
|
||||
"""Include client_keys when compose-farm key exists."""
|
||||
host = Host(address="example.com")
|
||||
key_path = tmp_path / "compose-farm"
|
||||
|
||||
with (
|
||||
patch("compose_farm.executor.get_ssh_auth_sock", return_value=None),
|
||||
patch("compose_farm.executor.get_key_path", return_value=key_path),
|
||||
):
|
||||
result = ssh_connect_kwargs(host)
|
||||
|
||||
assert result["client_keys"] == [str(key_path)]
|
||||
|
||||
def test_includes_both_agent_and_key(self, tmp_path: Path) -> None:
|
||||
"""Include both agent_path and client_keys when both available."""
|
||||
host = Host(address="example.com")
|
||||
key_path = tmp_path / "compose-farm"
|
||||
|
||||
with (
|
||||
patch("compose_farm.executor.get_ssh_auth_sock", return_value="/tmp/agent.sock"),
|
||||
patch("compose_farm.executor.get_key_path", return_value=key_path),
|
||||
):
|
||||
result = ssh_connect_kwargs(host)
|
||||
|
||||
assert result["agent_path"] == "/tmp/agent.sock"
|
||||
assert result["client_keys"] == [str(key_path)]
|
||||
|
||||
def test_custom_port(self) -> None:
|
||||
"""Handle custom SSH port."""
|
||||
host = Host(address="example.com", port=2222)
|
||||
|
||||
with (
|
||||
patch("compose_farm.executor.get_ssh_auth_sock", return_value=None),
|
||||
patch("compose_farm.executor.get_key_path", return_value=None),
|
||||
):
|
||||
result = ssh_connect_kwargs(host)
|
||||
|
||||
assert result["port"] == 2222
|
||||
54
tests/web/test_backup.py
Normal file
54
tests/web/test_backup.py
Normal file
@@ -0,0 +1,54 @@
|
||||
"""Tests for file backup functionality."""
|
||||
|
||||
from pathlib import Path
|
||||
|
||||
from compose_farm.web.routes.api import _backup_file, _save_with_backup
|
||||
|
||||
|
||||
def test_backup_creates_timestamped_file(tmp_path: Path) -> None:
|
||||
"""Test that backup creates file in .backups with correct content."""
|
||||
test_file = tmp_path / "test.yaml"
|
||||
test_file.write_text("original content")
|
||||
|
||||
backup_path = _backup_file(test_file)
|
||||
|
||||
assert backup_path is not None
|
||||
assert backup_path.parent.name == ".backups"
|
||||
assert backup_path.name.startswith("test.yaml.")
|
||||
assert backup_path.read_text() == "original content"
|
||||
|
||||
|
||||
def test_backup_returns_none_for_nonexistent_file(tmp_path: Path) -> None:
|
||||
"""Test that backup returns None if file doesn't exist."""
|
||||
assert _backup_file(tmp_path / "nonexistent.yaml") is None
|
||||
|
||||
|
||||
def test_save_creates_new_file(tmp_path: Path) -> None:
|
||||
"""Test that save creates new file without backup."""
|
||||
test_file = tmp_path / "new.yaml"
|
||||
|
||||
assert _save_with_backup(test_file, "content") is True
|
||||
assert test_file.read_text() == "content"
|
||||
assert not (tmp_path / ".backups").exists()
|
||||
|
||||
|
||||
def test_save_skips_unchanged_content(tmp_path: Path) -> None:
|
||||
"""Test that save returns False and creates no backup if unchanged."""
|
||||
test_file = tmp_path / "test.yaml"
|
||||
test_file.write_text("same")
|
||||
|
||||
assert _save_with_backup(test_file, "same") is False
|
||||
assert not (tmp_path / ".backups").exists()
|
||||
|
||||
|
||||
def test_save_creates_backup_before_overwrite(tmp_path: Path) -> None:
|
||||
"""Test that save backs up original before overwriting."""
|
||||
test_file = tmp_path / "test.yaml"
|
||||
test_file.write_text("original")
|
||||
|
||||
assert _save_with_backup(test_file, "new") is True
|
||||
assert test_file.read_text() == "new"
|
||||
|
||||
backups = list((tmp_path / ".backups").glob("test.yaml.*"))
|
||||
assert len(backups) == 1
|
||||
assert backups[0].read_text() == "original"
|
||||
@@ -75,14 +75,32 @@ class TestRenderContainers:
|
||||
|
||||
assert "loading-spinner" in html
|
||||
|
||||
def test_render_exited_success(self, mock_config: Config) -> None:
|
||||
from compose_farm.web.routes.api import _render_containers
|
||||
|
||||
containers = [{"Name": "plex", "State": "exited", "ExitCode": 0}]
|
||||
html = _render_containers("plex", "server-1", containers)
|
||||
|
||||
assert "badge-neutral" in html
|
||||
assert "exited (0)" in html
|
||||
|
||||
def test_render_exited_error(self, mock_config: Config) -> None:
|
||||
from compose_farm.web.routes.api import _render_containers
|
||||
|
||||
containers = [{"Name": "plex", "State": "exited", "ExitCode": 1}]
|
||||
html = _render_containers("plex", "server-1", containers)
|
||||
|
||||
assert "badge-error" in html
|
||||
assert "exited (1)" in html
|
||||
|
||||
def test_render_other_state(self, mock_config: Config) -> None:
|
||||
from compose_farm.web.routes.api import _render_containers
|
||||
|
||||
containers = [{"Name": "plex", "State": "exited"}]
|
||||
containers = [{"Name": "plex", "State": "restarting"}]
|
||||
html = _render_containers("plex", "server-1", containers)
|
||||
|
||||
assert "badge-warning" in html
|
||||
assert "exited" in html
|
||||
assert "restarting" in html
|
||||
|
||||
def test_render_with_header(self, mock_config: Config) -> None:
|
||||
from compose_farm.web.routes.api import _render_containers
|
||||
|
||||
1954
tests/web/test_htmx_browser.py
Normal file
1954
tests/web/test_htmx_browser.py
Normal file
File diff suppressed because it is too large
Load Diff
111
tests/web/test_template_context.py
Normal file
111
tests/web/test_template_context.py
Normal file
@@ -0,0 +1,111 @@
|
||||
"""Tests to verify template context variables match what templates expect.
|
||||
|
||||
Uses runtime validation by actually rendering templates and catching errors.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from pathlib import Path
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
import pytest
|
||||
from fastapi.testclient import TestClient
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from compose_farm.config import Config
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_config(tmp_path: Path, monkeypatch: pytest.MonkeyPatch) -> Config:
|
||||
"""Create a minimal mock config for template testing."""
|
||||
compose_dir = tmp_path / "compose"
|
||||
compose_dir.mkdir()
|
||||
|
||||
# Create minimal service directory
|
||||
svc_dir = compose_dir / "test-service"
|
||||
svc_dir.mkdir()
|
||||
(svc_dir / "compose.yaml").write_text("services:\n app:\n image: nginx\n")
|
||||
|
||||
config_path = tmp_path / "compose-farm.yaml"
|
||||
config_path.write_text(f"""
|
||||
compose_dir: {compose_dir}
|
||||
hosts:
|
||||
local-host:
|
||||
address: localhost
|
||||
services:
|
||||
test-service: local-host
|
||||
""")
|
||||
|
||||
state_path = tmp_path / "compose-farm-state.yaml"
|
||||
state_path.write_text("deployed:\n test-service: local-host\n")
|
||||
|
||||
from compose_farm.config import load_config
|
||||
|
||||
config = load_config(config_path)
|
||||
|
||||
# Patch get_config in all relevant modules
|
||||
from compose_farm.web import deps
|
||||
from compose_farm.web.routes import api, pages
|
||||
|
||||
monkeypatch.setattr(deps, "get_config", lambda: config)
|
||||
monkeypatch.setattr(api, "get_config", lambda: config)
|
||||
monkeypatch.setattr(pages, "get_config", lambda: config)
|
||||
|
||||
return config
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def client(mock_config: Config) -> TestClient:
|
||||
"""Create a test client with mocked config."""
|
||||
from compose_farm.web.app import create_app
|
||||
|
||||
return TestClient(create_app())
|
||||
|
||||
|
||||
class TestPageTemplatesRender:
|
||||
"""Test that page templates render without missing variables."""
|
||||
|
||||
def test_index_renders(self, client: TestClient) -> None:
|
||||
"""Test index page renders without errors."""
|
||||
response = client.get("/")
|
||||
assert response.status_code == 200
|
||||
assert "Compose Farm" in response.text
|
||||
|
||||
def test_console_renders(self, client: TestClient) -> None:
|
||||
"""Test console page renders without errors."""
|
||||
response = client.get("/console")
|
||||
assert response.status_code == 200
|
||||
assert "Console" in response.text
|
||||
assert "Terminal" in response.text
|
||||
|
||||
def test_service_detail_renders(self, client: TestClient) -> None:
|
||||
"""Test service detail page renders without errors."""
|
||||
response = client.get("/service/test-service")
|
||||
assert response.status_code == 200
|
||||
assert "test-service" in response.text
|
||||
|
||||
|
||||
class TestPartialTemplatesRender:
|
||||
"""Test that partial templates render without missing variables."""
|
||||
|
||||
def test_sidebar_renders(self, client: TestClient) -> None:
|
||||
"""Test sidebar partial renders without errors."""
|
||||
response = client.get("/partials/sidebar")
|
||||
assert response.status_code == 200
|
||||
assert "Dashboard" in response.text
|
||||
assert "Console" in response.text
|
||||
|
||||
def test_stats_renders(self, client: TestClient) -> None:
|
||||
"""Test stats partial renders without errors."""
|
||||
response = client.get("/partials/stats")
|
||||
assert response.status_code == 200
|
||||
|
||||
def test_pending_renders(self, client: TestClient) -> None:
|
||||
"""Test pending partial renders without errors."""
|
||||
response = client.get("/partials/pending")
|
||||
assert response.status_code == 200
|
||||
|
||||
def test_services_by_host_renders(self, client: TestClient) -> None:
|
||||
"""Test services_by_host partial renders without errors."""
|
||||
response = client.get("/partials/services-by-host")
|
||||
assert response.status_code == 200
|
||||
241
uv.lock
generated
241
uv.lock
generated
@@ -134,6 +134,79 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/db/3c/33bac158f8ab7f89b2e59426d5fe2e4f63f7ed25df84c036890172b412b5/cfgv-3.5.0-py2.py3-none-any.whl", hash = "sha256:a8dc6b26ad22ff227d2634a65cb388215ce6cc96bbcc5cfde7641ae87e8dacc0", size = 7445, upload-time = "2025-11-19T20:55:50.744Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "charset-normalizer"
|
||||
version = "3.4.4"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/13/69/33ddede1939fdd074bce5434295f38fae7136463422fe4fd3e0e89b98062/charset_normalizer-3.4.4.tar.gz", hash = "sha256:94537985111c35f28720e43603b8e7b43a6ecfb2ce1d3058bbe955b73404e21a", size = 129418, upload-time = "2025-10-14T04:42:32.879Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/ed/27/c6491ff4954e58a10f69ad90aca8a1b6fe9c5d3c6f380907af3c37435b59/charset_normalizer-3.4.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6e1fcf0720908f200cd21aa4e6750a48ff6ce4afe7ff5a79a90d5ed8a08296f8", size = 206988, upload-time = "2025-10-14T04:40:33.79Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/94/59/2e87300fe67ab820b5428580a53cad894272dbb97f38a7a814a2a1ac1011/charset_normalizer-3.4.4-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5f819d5fe9234f9f82d75bdfa9aef3a3d72c4d24a6e57aeaebba32a704553aa0", size = 147324, upload-time = "2025-10-14T04:40:34.961Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/07/fb/0cf61dc84b2b088391830f6274cb57c82e4da8bbc2efeac8c025edb88772/charset_normalizer-3.4.4-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:a59cb51917aa591b1c4e6a43c132f0cdc3c76dbad6155df4e28ee626cc77a0a3", size = 142742, upload-time = "2025-10-14T04:40:36.105Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/62/8b/171935adf2312cd745d290ed93cf16cf0dfe320863ab7cbeeae1dcd6535f/charset_normalizer-3.4.4-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:8ef3c867360f88ac904fd3f5e1f902f13307af9052646963ee08ff4f131adafc", size = 160863, upload-time = "2025-10-14T04:40:37.188Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/09/73/ad875b192bda14f2173bfc1bc9a55e009808484a4b256748d931b6948442/charset_normalizer-3.4.4-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d9e45d7faa48ee908174d8fe84854479ef838fc6a705c9315372eacbc2f02897", size = 157837, upload-time = "2025-10-14T04:40:38.435Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6d/fc/de9cce525b2c5b94b47c70a4b4fb19f871b24995c728e957ee68ab1671ea/charset_normalizer-3.4.4-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:840c25fb618a231545cbab0564a799f101b63b9901f2569faecd6b222ac72381", size = 151550, upload-time = "2025-10-14T04:40:40.053Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/55/c2/43edd615fdfba8c6f2dfbd459b25a6b3b551f24ea21981e23fb768503ce1/charset_normalizer-3.4.4-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:ca5862d5b3928c4940729dacc329aa9102900382fea192fc5e52eb69d6093815", size = 149162, upload-time = "2025-10-14T04:40:41.163Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/03/86/bde4ad8b4d0e9429a4e82c1e8f5c659993a9a863ad62c7df05cf7b678d75/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d9c7f57c3d666a53421049053eaacdd14bbd0a528e2186fcb2e672effd053bb0", size = 150019, upload-time = "2025-10-14T04:40:42.276Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1f/86/a151eb2af293a7e7bac3a739b81072585ce36ccfb4493039f49f1d3cae8c/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:277e970e750505ed74c832b4bf75dac7476262ee2a013f5574dd49075879e161", size = 143310, upload-time = "2025-10-14T04:40:43.439Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b5/fe/43dae6144a7e07b87478fdfc4dbe9efd5defb0e7ec29f5f58a55aeef7bf7/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:31fd66405eaf47bb62e8cd575dc621c56c668f27d46a61d975a249930dd5e2a4", size = 162022, upload-time = "2025-10-14T04:40:44.547Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/80/e6/7aab83774f5d2bca81f42ac58d04caf44f0cc2b65fc6db2b3b2e8a05f3b3/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:0d3d8f15c07f86e9ff82319b3d9ef6f4bf907608f53fe9d92b28ea9ae3d1fd89", size = 149383, upload-time = "2025-10-14T04:40:46.018Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4f/e8/b289173b4edae05c0dde07f69f8db476a0b511eac556dfe0d6bda3c43384/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:9f7fcd74d410a36883701fafa2482a6af2ff5ba96b9a620e9e0721e28ead5569", size = 159098, upload-time = "2025-10-14T04:40:47.081Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d8/df/fe699727754cae3f8478493c7f45f777b17c3ef0600e28abfec8619eb49c/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ebf3e58c7ec8a8bed6d66a75d7fb37b55e5015b03ceae72a8e7c74495551e224", size = 152991, upload-time = "2025-10-14T04:40:48.246Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1a/86/584869fe4ddb6ffa3bd9f491b87a01568797fb9bd8933f557dba9771beaf/charset_normalizer-3.4.4-cp311-cp311-win32.whl", hash = "sha256:eecbc200c7fd5ddb9a7f16c7decb07b566c29fa2161a16cf67b8d068bd21690a", size = 99456, upload-time = "2025-10-14T04:40:49.376Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/65/f6/62fdd5feb60530f50f7e38b4f6a1d5203f4d16ff4f9f0952962c044e919a/charset_normalizer-3.4.4-cp311-cp311-win_amd64.whl", hash = "sha256:5ae497466c7901d54b639cf42d5b8c1b6a4fead55215500d2f486d34db48d016", size = 106978, upload-time = "2025-10-14T04:40:50.844Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7a/9d/0710916e6c82948b3be62d9d398cb4fcf4e97b56d6a6aeccd66c4b2f2bd5/charset_normalizer-3.4.4-cp311-cp311-win_arm64.whl", hash = "sha256:65e2befcd84bc6f37095f5961e68a6f077bf44946771354a28ad434c2cce0ae1", size = 99969, upload-time = "2025-10-14T04:40:52.272Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f3/85/1637cd4af66fa687396e757dec650f28025f2a2f5a5531a3208dc0ec43f2/charset_normalizer-3.4.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0a98e6759f854bd25a58a73fa88833fba3b7c491169f86ce1180c948ab3fd394", size = 208425, upload-time = "2025-10-14T04:40:53.353Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/9d/6a/04130023fef2a0d9c62d0bae2649b69f7b7d8d24ea5536feef50551029df/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b5b290ccc2a263e8d185130284f8501e3e36c5e02750fc6b6bdeb2e9e96f1e25", size = 148162, upload-time = "2025-10-14T04:40:54.558Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/78/29/62328d79aa60da22c9e0b9a66539feae06ca0f5a4171ac4f7dc285b83688/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:74bb723680f9f7a6234dcf67aea57e708ec1fbdf5699fb91dfd6f511b0a320ef", size = 144558, upload-time = "2025-10-14T04:40:55.677Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/86/bb/b32194a4bf15b88403537c2e120b817c61cd4ecffa9b6876e941c3ee38fe/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f1e34719c6ed0b92f418c7c780480b26b5d9c50349e9a9af7d76bf757530350d", size = 161497, upload-time = "2025-10-14T04:40:57.217Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/19/89/a54c82b253d5b9b111dc74aca196ba5ccfcca8242d0fb64146d4d3183ff1/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2437418e20515acec67d86e12bf70056a33abdacb5cb1655042f6538d6b085a8", size = 159240, upload-time = "2025-10-14T04:40:58.358Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c0/10/d20b513afe03acc89ec33948320a5544d31f21b05368436d580dec4e234d/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:11d694519d7f29d6cd09f6ac70028dba10f92f6cdd059096db198c283794ac86", size = 153471, upload-time = "2025-10-14T04:40:59.468Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/61/fa/fbf177b55bdd727010f9c0a3c49eefa1d10f960e5f09d1d887bf93c2e698/charset_normalizer-3.4.4-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:ac1c4a689edcc530fc9d9aa11f5774b9e2f33f9a0c6a57864e90908f5208d30a", size = 150864, upload-time = "2025-10-14T04:41:00.623Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/05/12/9fbc6a4d39c0198adeebbde20b619790e9236557ca59fc40e0e3cebe6f40/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:21d142cc6c0ec30d2efee5068ca36c128a30b0f2c53c1c07bd78cb6bc1d3be5f", size = 150647, upload-time = "2025-10-14T04:41:01.754Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ad/1f/6a9a593d52e3e8c5d2b167daf8c6b968808efb57ef4c210acb907c365bc4/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:5dbe56a36425d26d6cfb40ce79c314a2e4dd6211d51d6d2191c00bed34f354cc", size = 145110, upload-time = "2025-10-14T04:41:03.231Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/30/42/9a52c609e72471b0fc54386dc63c3781a387bb4fe61c20231a4ebcd58bdd/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:5bfbb1b9acf3334612667b61bd3002196fe2a1eb4dd74d247e0f2a4d50ec9bbf", size = 162839, upload-time = "2025-10-14T04:41:04.715Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c4/5b/c0682bbf9f11597073052628ddd38344a3d673fda35a36773f7d19344b23/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:d055ec1e26e441f6187acf818b73564e6e6282709e9bcb5b63f5b23068356a15", size = 150667, upload-time = "2025-10-14T04:41:05.827Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e4/24/a41afeab6f990cf2daf6cb8c67419b63b48cf518e4f56022230840c9bfb2/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:af2d8c67d8e573d6de5bc30cdb27e9b95e49115cd9baad5ddbd1a6207aaa82a9", size = 160535, upload-time = "2025-10-14T04:41:06.938Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2a/e5/6a4ce77ed243c4a50a1fecca6aaaab419628c818a49434be428fe24c9957/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:780236ac706e66881f3b7f2f32dfe90507a09e67d1d454c762cf642e6e1586e0", size = 154816, upload-time = "2025-10-14T04:41:08.101Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a8/ef/89297262b8092b312d29cdb2517cb1237e51db8ecef2e9af5edbe7b683b1/charset_normalizer-3.4.4-cp312-cp312-win32.whl", hash = "sha256:5833d2c39d8896e4e19b689ffc198f08ea58116bee26dea51e362ecc7cd3ed26", size = 99694, upload-time = "2025-10-14T04:41:09.23Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3d/2d/1e5ed9dd3b3803994c155cd9aacb60c82c331bad84daf75bcb9c91b3295e/charset_normalizer-3.4.4-cp312-cp312-win_amd64.whl", hash = "sha256:a79cfe37875f822425b89a82333404539ae63dbdddf97f84dcbc3d339aae9525", size = 107131, upload-time = "2025-10-14T04:41:10.467Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d0/d9/0ed4c7098a861482a7b6a95603edce4c0d9db2311af23da1fb2b75ec26fc/charset_normalizer-3.4.4-cp312-cp312-win_arm64.whl", hash = "sha256:376bec83a63b8021bb5c8ea75e21c4ccb86e7e45ca4eb81146091b56599b80c3", size = 100390, upload-time = "2025-10-14T04:41:11.915Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/97/45/4b3a1239bbacd321068ea6e7ac28875b03ab8bc0aa0966452db17cd36714/charset_normalizer-3.4.4-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:e1f185f86a6f3403aa2420e815904c67b2f9ebc443f045edd0de921108345794", size = 208091, upload-time = "2025-10-14T04:41:13.346Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7d/62/73a6d7450829655a35bb88a88fca7d736f9882a27eacdca2c6d505b57e2e/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6b39f987ae8ccdf0d2642338faf2abb1862340facc796048b604ef14919e55ed", size = 147936, upload-time = "2025-10-14T04:41:14.461Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/89/c5/adb8c8b3d6625bef6d88b251bbb0d95f8205831b987631ab0c8bb5d937c2/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3162d5d8ce1bb98dd51af660f2121c55d0fa541b46dff7bb9b9f86ea1d87de72", size = 144180, upload-time = "2025-10-14T04:41:15.588Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/91/ed/9706e4070682d1cc219050b6048bfd293ccf67b3d4f5a4f39207453d4b99/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:81d5eb2a312700f4ecaa977a8235b634ce853200e828fbadf3a9c50bab278328", size = 161346, upload-time = "2025-10-14T04:41:16.738Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d5/0d/031f0d95e4972901a2f6f09ef055751805ff541511dc1252ba3ca1f80cf5/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5bd2293095d766545ec1a8f612559f6b40abc0eb18bb2f5d1171872d34036ede", size = 158874, upload-time = "2025-10-14T04:41:17.923Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f5/83/6ab5883f57c9c801ce5e5677242328aa45592be8a00644310a008d04f922/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a8a8b89589086a25749f471e6a900d3f662d1d3b6e2e59dcecf787b1cc3a1894", size = 153076, upload-time = "2025-10-14T04:41:19.106Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/75/1e/5ff781ddf5260e387d6419959ee89ef13878229732732ee73cdae01800f2/charset_normalizer-3.4.4-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:bc7637e2f80d8530ee4a78e878bce464f70087ce73cf7c1caf142416923b98f1", size = 150601, upload-time = "2025-10-14T04:41:20.245Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d7/57/71be810965493d3510a6ca79b90c19e48696fb1ff964da319334b12677f0/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f8bf04158c6b607d747e93949aa60618b61312fe647a6369f88ce2ff16043490", size = 150376, upload-time = "2025-10-14T04:41:21.398Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e5/d5/c3d057a78c181d007014feb7e9f2e65905a6c4ef182c0ddf0de2924edd65/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:554af85e960429cf30784dd47447d5125aaa3b99a6f0683589dbd27e2f45da44", size = 144825, upload-time = "2025-10-14T04:41:22.583Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e6/8c/d0406294828d4976f275ffbe66f00266c4b3136b7506941d87c00cab5272/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:74018750915ee7ad843a774364e13a3db91682f26142baddf775342c3f5b1133", size = 162583, upload-time = "2025-10-14T04:41:23.754Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d7/24/e2aa1f18c8f15c4c0e932d9287b8609dd30ad56dbe41d926bd846e22fb8d/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:c0463276121fdee9c49b98908b3a89c39be45d86d1dbaa22957e38f6321d4ce3", size = 150366, upload-time = "2025-10-14T04:41:25.27Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e4/5b/1e6160c7739aad1e2df054300cc618b06bf784a7a164b0f238360721ab86/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:362d61fd13843997c1c446760ef36f240cf81d3ebf74ac62652aebaf7838561e", size = 160300, upload-time = "2025-10-14T04:41:26.725Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7a/10/f882167cd207fbdd743e55534d5d9620e095089d176d55cb22d5322f2afd/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9a26f18905b8dd5d685d6d07b0cdf98a79f3c7a918906af7cc143ea2e164c8bc", size = 154465, upload-time = "2025-10-14T04:41:28.322Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/89/66/c7a9e1b7429be72123441bfdbaf2bc13faab3f90b933f664db506dea5915/charset_normalizer-3.4.4-cp313-cp313-win32.whl", hash = "sha256:9b35f4c90079ff2e2edc5b26c0c77925e5d2d255c42c74fdb70fb49b172726ac", size = 99404, upload-time = "2025-10-14T04:41:29.95Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c4/26/b9924fa27db384bdcd97ab83b4f0a8058d96ad9626ead570674d5e737d90/charset_normalizer-3.4.4-cp313-cp313-win_amd64.whl", hash = "sha256:b435cba5f4f750aa6c0a0d92c541fb79f69a387c91e61f1795227e4ed9cece14", size = 107092, upload-time = "2025-10-14T04:41:31.188Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/af/8f/3ed4bfa0c0c72a7ca17f0380cd9e4dd842b09f664e780c13cff1dcf2ef1b/charset_normalizer-3.4.4-cp313-cp313-win_arm64.whl", hash = "sha256:542d2cee80be6f80247095cc36c418f7bddd14f4a6de45af91dfad36d817bba2", size = 100408, upload-time = "2025-10-14T04:41:32.624Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2a/35/7051599bd493e62411d6ede36fd5af83a38f37c4767b92884df7301db25d/charset_normalizer-3.4.4-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:da3326d9e65ef63a817ecbcc0df6e94463713b754fe293eaa03da99befb9a5bd", size = 207746, upload-time = "2025-10-14T04:41:33.773Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/10/9a/97c8d48ef10d6cd4fcead2415523221624bf58bcf68a802721a6bc807c8f/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8af65f14dc14a79b924524b1e7fffe304517b2bff5a58bf64f30b98bbc5079eb", size = 147889, upload-time = "2025-10-14T04:41:34.897Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/10/bf/979224a919a1b606c82bd2c5fa49b5c6d5727aa47b4312bb27b1734f53cd/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:74664978bb272435107de04e36db5a9735e78232b85b77d45cfb38f758efd33e", size = 143641, upload-time = "2025-10-14T04:41:36.116Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ba/33/0ad65587441fc730dc7bd90e9716b30b4702dc7b617e6ba4997dc8651495/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:752944c7ffbfdd10c074dc58ec2d5a8a4cd9493b314d367c14d24c17684ddd14", size = 160779, upload-time = "2025-10-14T04:41:37.229Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/67/ed/331d6b249259ee71ddea93f6f2f0a56cfebd46938bde6fcc6f7b9a3d0e09/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d1f13550535ad8cff21b8d757a3257963e951d96e20ec82ab44bc64aeb62a191", size = 159035, upload-time = "2025-10-14T04:41:38.368Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/67/ff/f6b948ca32e4f2a4576aa129d8bed61f2e0543bf9f5f2b7fc3758ed005c9/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ecaae4149d99b1c9e7b88bb03e3221956f68fd6d50be2ef061b2381b61d20838", size = 152542, upload-time = "2025-10-14T04:41:39.862Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/16/85/276033dcbcc369eb176594de22728541a925b2632f9716428c851b149e83/charset_normalizer-3.4.4-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:cb6254dc36b47a990e59e1068afacdcd02958bdcce30bb50cc1700a8b9d624a6", size = 149524, upload-time = "2025-10-14T04:41:41.319Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/9e/f2/6a2a1f722b6aba37050e626530a46a68f74e63683947a8acff92569f979a/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:c8ae8a0f02f57a6e61203a31428fa1d677cbe50c93622b4149d5c0f319c1d19e", size = 150395, upload-time = "2025-10-14T04:41:42.539Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/60/bb/2186cb2f2bbaea6338cad15ce23a67f9b0672929744381e28b0592676824/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:47cc91b2f4dd2833fddaedd2893006b0106129d4b94fdb6af1f4ce5a9965577c", size = 143680, upload-time = "2025-10-14T04:41:43.661Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7d/a5/bf6f13b772fbb2a90360eb620d52ed8f796f3c5caee8398c3b2eb7b1c60d/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:82004af6c302b5d3ab2cfc4cc5f29db16123b1a8417f2e25f9066f91d4411090", size = 162045, upload-time = "2025-10-14T04:41:44.821Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/df/c5/d1be898bf0dc3ef9030c3825e5d3b83f2c528d207d246cbabe245966808d/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:2b7d8f6c26245217bd2ad053761201e9f9680f8ce52f0fcd8d0755aeae5b2152", size = 149687, upload-time = "2025-10-14T04:41:46.442Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a5/42/90c1f7b9341eef50c8a1cb3f098ac43b0508413f33affd762855f67a410e/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:799a7a5e4fb2d5898c60b640fd4981d6a25f1c11790935a44ce38c54e985f828", size = 160014, upload-time = "2025-10-14T04:41:47.631Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/76/be/4d3ee471e8145d12795ab655ece37baed0929462a86e72372fd25859047c/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:99ae2cffebb06e6c22bdc25801d7b30f503cc87dbd283479e7b606f70aff57ec", size = 154044, upload-time = "2025-10-14T04:41:48.81Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b0/6f/8f7af07237c34a1defe7defc565a9bc1807762f672c0fde711a4b22bf9c0/charset_normalizer-3.4.4-cp314-cp314-win32.whl", hash = "sha256:f9d332f8c2a2fcbffe1378594431458ddbef721c1769d78e2cbc06280d8155f9", size = 99940, upload-time = "2025-10-14T04:41:49.946Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4b/51/8ade005e5ca5b0d80fb4aff72a3775b325bdc3d27408c8113811a7cbe640/charset_normalizer-3.4.4-cp314-cp314-win_amd64.whl", hash = "sha256:8a6562c3700cce886c5be75ade4a5db4214fda19fede41d9792d100288d8f94c", size = 107104, upload-time = "2025-10-14T04:41:51.051Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/da/5f/6b8f83a55bb8278772c5ae54a577f3099025f9ade59d0136ac24a0df4bde/charset_normalizer-3.4.4-cp314-cp314-win_arm64.whl", hash = "sha256:de00632ca48df9daf77a2c65a484531649261ec9f25489917f09e455cb09ddb2", size = 100743, upload-time = "2025-10-14T04:41:52.122Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0a/4c/925909008ed5a988ccbb72dcc897407e5d6d3bd72410d69e051fc0c14647/charset_normalizer-3.4.4-py3-none-any.whl", hash = "sha256:7a32c560861a02ff789ad905a2fe94e3f840803362c84fecf1851cb4cf3dc37f", size = 53402, upload-time = "2025-10-14T04:42:31.76Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "click"
|
||||
version = "8.3.1"
|
||||
@@ -184,6 +257,8 @@ dev = [
|
||||
{ name = "pytest" },
|
||||
{ name = "pytest-asyncio" },
|
||||
{ name = "pytest-cov" },
|
||||
{ name = "pytest-playwright" },
|
||||
{ name = "pytest-xdist" },
|
||||
{ name = "ruff" },
|
||||
{ name = "types-pyyaml" },
|
||||
{ name = "uvicorn", extra = ["standard"] },
|
||||
@@ -214,6 +289,8 @@ dev = [
|
||||
{ name = "pytest", specifier = ">=9.0.2" },
|
||||
{ name = "pytest-asyncio", specifier = ">=1.3.0" },
|
||||
{ name = "pytest-cov", specifier = ">=6.0.0" },
|
||||
{ name = "pytest-playwright", specifier = ">=0.7.0" },
|
||||
{ name = "pytest-xdist", specifier = ">=3.0.0" },
|
||||
{ name = "ruff", specifier = ">=0.14.8" },
|
||||
{ name = "types-pyyaml", specifier = ">=6.0.12.20250915" },
|
||||
{ name = "uvicorn", extras = ["standard"], specifier = ">=0.27.0" },
|
||||
@@ -405,6 +482,15 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/de/15/545e2b6cf2e3be84bc1ed85613edd75b8aea69807a71c26f4ca6a9258e82/email_validator-2.3.0-py3-none-any.whl", hash = "sha256:80f13f623413e6b197ae73bb10bf4eb0908faf509ad8362c5edeb0be7fd450b4", size = 35604, upload-time = "2025-08-26T13:09:05.858Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "execnet"
|
||||
version = "2.1.2"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/bf/89/780e11f9588d9e7128a3f87788354c7946a9cbb1401ad38a48c4db9a4f07/execnet-2.1.2.tar.gz", hash = "sha256:63d83bfdd9a23e35b9c6a3261412324f964c2ec8dcd8d3c6916ee9373e0befcd", size = 166622, upload-time = "2025-11-12T09:56:37.75Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/ab/84/02fc1827e8cdded4aa65baef11296a9bbe595c474f0d6d758af082d849fd/execnet-2.1.2-py3-none-any.whl", hash = "sha256:67fba928dd5a544b783f6056f449e5e3931a5c378b128bc18501f7ea79e296ec", size = 40708, upload-time = "2025-11-12T09:56:36.333Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "fastapi"
|
||||
version = "0.125.0"
|
||||
@@ -573,6 +659,53 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/e3/7f/a1a97644e39e7316d850784c642093c99df1290a460df4ede27659056834/filelock-3.20.1-py3-none-any.whl", hash = "sha256:15d9e9a67306188a44baa72f569d2bfd803076269365fdea0934385da4dc361a", size = 16666, upload-time = "2025-12-15T23:54:26.874Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "greenlet"
|
||||
version = "3.3.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/c7/e5/40dbda2736893e3e53d25838e0f19a2b417dfc122b9989c91918db30b5d3/greenlet-3.3.0.tar.gz", hash = "sha256:a82bb225a4e9e4d653dd2fb7b8b2d36e4fb25bc0165422a11e48b88e9e6f78fb", size = 190651, upload-time = "2025-12-04T14:49:44.05Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/1f/cb/48e964c452ca2b92175a9b2dca037a553036cb053ba69e284650ce755f13/greenlet-3.3.0-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:e29f3018580e8412d6aaf5641bb7745d38c85228dacf51a73bd4e26ddf2a6a8e", size = 274908, upload-time = "2025-12-04T14:23:26.435Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/28/da/38d7bff4d0277b594ec557f479d65272a893f1f2a716cad91efeb8680953/greenlet-3.3.0-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a687205fb22794e838f947e2194c0566d3812966b41c78709554aa883183fb62", size = 577113, upload-time = "2025-12-04T14:50:05.493Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3c/f2/89c5eb0faddc3ff014f1c04467d67dee0d1d334ab81fadbf3744847f8a8a/greenlet-3.3.0-cp311-cp311-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:4243050a88ba61842186cb9e63c7dfa677ec146160b0efd73b855a3d9c7fcf32", size = 590338, upload-time = "2025-12-04T14:57:41.136Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/80/d7/db0a5085035d05134f8c089643da2b44cc9b80647c39e93129c5ef170d8f/greenlet-3.3.0-cp311-cp311-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:670d0f94cd302d81796e37299bcd04b95d62403883b24225c6b5271466612f45", size = 601098, upload-time = "2025-12-04T15:07:11.898Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/dc/a6/e959a127b630a58e23529972dbc868c107f9d583b5a9f878fb858c46bc1a/greenlet-3.3.0-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6cb3a8ec3db4a3b0eb8a3c25436c2d49e3505821802074969db017b87bc6a948", size = 590206, upload-time = "2025-12-04T14:26:01.254Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/48/60/29035719feb91798693023608447283b266b12efc576ed013dd9442364bb/greenlet-3.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:2de5a0b09eab81fc6a382791b995b1ccf2b172a9fec934747a7a23d2ff291794", size = 1550668, upload-time = "2025-12-04T15:04:22.439Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0a/5f/783a23754b691bfa86bd72c3033aa107490deac9b2ef190837b860996c9f/greenlet-3.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4449a736606bd30f27f8e1ff4678ee193bc47f6ca810d705981cfffd6ce0d8c5", size = 1615483, upload-time = "2025-12-04T14:27:28.083Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1d/d5/c339b3b4bc8198b7caa4f2bd9fd685ac9f29795816d8db112da3d04175bb/greenlet-3.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:7652ee180d16d447a683c04e4c5f6441bae7ba7b17ffd9f6b3aff4605e9e6f71", size = 301164, upload-time = "2025-12-04T14:42:51.577Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f8/0a/a3871375c7b9727edaeeea994bfff7c63ff7804c9829c19309ba2e058807/greenlet-3.3.0-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:b01548f6e0b9e9784a2c99c5651e5dc89ffcbe870bc5fb2e5ef864e9cc6b5dcb", size = 276379, upload-time = "2025-12-04T14:23:30.498Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/43/ab/7ebfe34dce8b87be0d11dae91acbf76f7b8246bf9d6b319c741f99fa59c6/greenlet-3.3.0-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:349345b770dc88f81506c6861d22a6ccd422207829d2c854ae2af8025af303e3", size = 597294, upload-time = "2025-12-04T14:50:06.847Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a4/39/f1c8da50024feecd0793dbd5e08f526809b8ab5609224a2da40aad3a7641/greenlet-3.3.0-cp312-cp312-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:e8e18ed6995e9e2c0b4ed264d2cf89260ab3ac7e13555b8032b25a74c6d18655", size = 607742, upload-time = "2025-12-04T14:57:42.349Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/77/cb/43692bcd5f7a0da6ec0ec6d58ee7cddb606d055ce94a62ac9b1aa481e969/greenlet-3.3.0-cp312-cp312-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:c024b1e5696626890038e34f76140ed1daf858e37496d33f2af57f06189e70d7", size = 622297, upload-time = "2025-12-04T15:07:13.552Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/75/b0/6bde0b1011a60782108c01de5913c588cf51a839174538d266de15e4bf4d/greenlet-3.3.0-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:047ab3df20ede6a57c35c14bf5200fcf04039d50f908270d3f9a7a82064f543b", size = 609885, upload-time = "2025-12-04T14:26:02.368Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/49/0e/49b46ac39f931f59f987b7cd9f34bfec8ef81d2a1e6e00682f55be5de9f4/greenlet-3.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2d9ad37fc657b1102ec880e637cccf20191581f75c64087a549e66c57e1ceb53", size = 1567424, upload-time = "2025-12-04T15:04:23.757Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/05/f5/49a9ac2dff7f10091935def9165c90236d8f175afb27cbed38fb1d61ab6b/greenlet-3.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:83cd0e36932e0e7f36a64b732a6f60c2fc2df28c351bae79fbaf4f8092fe7614", size = 1636017, upload-time = "2025-12-04T14:27:29.688Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6c/79/3912a94cf27ec503e51ba493692d6db1e3cd8ac7ac52b0b47c8e33d7f4f9/greenlet-3.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:a7a34b13d43a6b78abf828a6d0e87d3385680eaf830cd60d20d52f249faabf39", size = 301964, upload-time = "2025-12-04T14:36:58.316Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/02/2f/28592176381b9ab2cafa12829ba7b472d177f3acc35d8fbcf3673d966fff/greenlet-3.3.0-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:a1e41a81c7e2825822f4e068c48cb2196002362619e2d70b148f20a831c00739", size = 275140, upload-time = "2025-12-04T14:23:01.282Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2c/80/fbe937bf81e9fca98c981fe499e59a3f45df2a04da0baa5c2be0dca0d329/greenlet-3.3.0-cp313-cp313-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9f515a47d02da4d30caaa85b69474cec77b7929b2e936ff7fb853d42f4bf8808", size = 599219, upload-time = "2025-12-04T14:50:08.309Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c2/ff/7c985128f0514271b8268476af89aee6866df5eec04ac17dcfbc676213df/greenlet-3.3.0-cp313-cp313-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:7d2d9fd66bfadf230b385fdc90426fcd6eb64db54b40c495b72ac0feb5766c54", size = 610211, upload-time = "2025-12-04T14:57:43.968Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/79/07/c47a82d881319ec18a4510bb30463ed6891f2ad2c1901ed5ec23d3de351f/greenlet-3.3.0-cp313-cp313-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:30a6e28487a790417d036088b3bcb3f3ac7d8babaa7d0139edbaddebf3af9492", size = 624311, upload-time = "2025-12-04T15:07:14.697Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fd/8e/424b8c6e78bd9837d14ff7df01a9829fc883ba2ab4ea787d4f848435f23f/greenlet-3.3.0-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:087ea5e004437321508a8d6f20efc4cfec5e3c30118e1417ea96ed1d93950527", size = 612833, upload-time = "2025-12-04T14:26:03.669Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b5/ba/56699ff9b7c76ca12f1cdc27a886d0f81f2189c3455ff9f65246780f713d/greenlet-3.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ab97cf74045343f6c60a39913fa59710e4bd26a536ce7ab2397adf8b27e67c39", size = 1567256, upload-time = "2025-12-04T15:04:25.276Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1e/37/f31136132967982d698c71a281a8901daf1a8fbab935dce7c0cf15f942cc/greenlet-3.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5375d2e23184629112ca1ea89a53389dddbffcf417dad40125713d88eb5f96e8", size = 1636483, upload-time = "2025-12-04T14:27:30.804Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7e/71/ba21c3fb8c5dce83b8c01f458a42e99ffdb1963aeec08fff5a18588d8fd7/greenlet-3.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:9ee1942ea19550094033c35d25d20726e4f1c40d59545815e1128ac58d416d38", size = 301833, upload-time = "2025-12-04T14:32:23.929Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d7/7c/f0a6d0ede2c7bf092d00bc83ad5bafb7e6ec9b4aab2fbdfa6f134dc73327/greenlet-3.3.0-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:60c2ef0f578afb3c8d92ea07ad327f9a062547137afe91f38408f08aacab667f", size = 275671, upload-time = "2025-12-04T14:23:05.267Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/44/06/dac639ae1a50f5969d82d2e3dd9767d30d6dbdbab0e1a54010c8fe90263c/greenlet-3.3.0-cp314-cp314-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0a5d554d0712ba1de0a6c94c640f7aeba3f85b3a6e1f2899c11c2c0428da9365", size = 646360, upload-time = "2025-12-04T14:50:10.026Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e0/94/0fb76fe6c5369fba9bf98529ada6f4c3a1adf19e406a47332245ef0eb357/greenlet-3.3.0-cp314-cp314-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:3a898b1e9c5f7307ebbde4102908e6cbfcb9ea16284a3abe15cab996bee8b9b3", size = 658160, upload-time = "2025-12-04T14:57:45.41Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/93/79/d2c70cae6e823fac36c3bbc9077962105052b7ef81db2f01ec3b9bf17e2b/greenlet-3.3.0-cp314-cp314-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:dcd2bdbd444ff340e8d6bdf54d2f206ccddbb3ccfdcd3c25bf4afaa7b8f0cf45", size = 671388, upload-time = "2025-12-04T15:07:15.789Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b8/14/bab308fc2c1b5228c3224ec2bf928ce2e4d21d8046c161e44a2012b5203e/greenlet-3.3.0-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5773edda4dc00e173820722711d043799d3adb4f01731f40619e07ea2750b955", size = 660166, upload-time = "2025-12-04T14:26:05.099Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4b/d2/91465d39164eaa0085177f61983d80ffe746c5a1860f009811d498e7259c/greenlet-3.3.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:ac0549373982b36d5fd5d30beb8a7a33ee541ff98d2b502714a09f1169f31b55", size = 1615193, upload-time = "2025-12-04T15:04:27.041Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/42/1b/83d110a37044b92423084d52d5d5a3b3a73cafb51b547e6d7366ff62eff1/greenlet-3.3.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:d198d2d977460358c3b3a4dc844f875d1adb33817f0613f663a656f463764ccc", size = 1683653, upload-time = "2025-12-04T14:27:32.366Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7c/9a/9030e6f9aa8fd7808e9c31ba4c38f87c4f8ec324ee67431d181fe396d705/greenlet-3.3.0-cp314-cp314-win_amd64.whl", hash = "sha256:73f51dd0e0bdb596fb0417e475fa3c5e32d4c83638296e560086b8d7da7c4170", size = 305387, upload-time = "2025-12-04T14:26:51.063Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a0/66/bd6317bc5932accf351fc19f177ffba53712a202f9df10587da8df257c7e/greenlet-3.3.0-cp314-cp314t-macosx_11_0_universal2.whl", hash = "sha256:d6ed6f85fae6cdfdb9ce04c9bf7a08d666cfcfb914e7d006f44f840b46741931", size = 282638, upload-time = "2025-12-04T14:25:20.941Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/30/cf/cc81cb030b40e738d6e69502ccbd0dd1bced0588e958f9e757945de24404/greenlet-3.3.0-cp314-cp314t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d9125050fcf24554e69c4cacb086b87b3b55dc395a8b3ebe6487b045b2614388", size = 651145, upload-time = "2025-12-04T14:50:11.039Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/9c/ea/1020037b5ecfe95ca7df8d8549959baceb8186031da83d5ecceff8b08cd2/greenlet-3.3.0-cp314-cp314t-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:87e63ccfa13c0a0f6234ed0add552af24cc67dd886731f2261e46e241608bee3", size = 654236, upload-time = "2025-12-04T14:57:47.007Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/69/cc/1e4bae2e45ca2fa55299f4e85854606a78ecc37fead20d69322f96000504/greenlet-3.3.0-cp314-cp314t-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2662433acbca297c9153a4023fe2161c8dcfdcc91f10433171cf7e7d94ba2221", size = 662506, upload-time = "2025-12-04T15:07:16.906Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/57/b9/f8025d71a6085c441a7eaff0fd928bbb275a6633773667023d19179fe815/greenlet-3.3.0-cp314-cp314t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3c6e9b9c1527a78520357de498b0e709fb9e2f49c3a513afd5a249007261911b", size = 653783, upload-time = "2025-12-04T14:26:06.225Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f6/c7/876a8c7a7485d5d6b5c6821201d542ef28be645aa024cfe1145b35c120c1/greenlet-3.3.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:286d093f95ec98fdd92fcb955003b8a3d054b4e2cab3e2707a5039e7b50520fd", size = 1614857, upload-time = "2025-12-04T15:04:28.484Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4f/dc/041be1dff9f23dac5f48a43323cd0789cb798342011c19a248d9c9335536/greenlet-3.3.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:6c10513330af5b8ae16f023e8ddbfb486ab355d04467c4679c5cfe4659975dd9", size = 1676034, upload-time = "2025-12-04T14:27:33.531Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "h11"
|
||||
version = "0.16.0"
|
||||
@@ -936,6 +1069,25 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/cb/28/3bfe2fa5a7b9c46fe7e13c97bda14c895fb10fa2ebf1d0abb90e0cea7ee1/platformdirs-4.5.1-py3-none-any.whl", hash = "sha256:d03afa3963c806a9bed9d5125c8f4cb2fdaf74a55ab60e5d59b3fde758104d31", size = 18731, upload-time = "2025-12-05T13:52:56.823Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "playwright"
|
||||
version = "1.57.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "greenlet" },
|
||||
{ name = "pyee" },
|
||||
]
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/ed/b6/e17543cea8290ae4dced10be21d5a43c360096aa2cce0aa7039e60c50df3/playwright-1.57.0-py3-none-macosx_10_13_x86_64.whl", hash = "sha256:9351c1ac3dfd9b3820fe7fc4340d96c0d3736bb68097b9b7a69bd45d25e9370c", size = 41985039, upload-time = "2025-12-09T08:06:18.408Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8b/04/ef95b67e1ff59c080b2effd1a9a96984d6953f667c91dfe9d77c838fc956/playwright-1.57.0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:a4a9d65027bce48eeba842408bcc1421502dfd7e41e28d207e94260fa93ca67e", size = 40775575, upload-time = "2025-12-09T08:06:22.105Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/60/bd/5563850322a663956c927eefcf1457d12917e8f118c214410e815f2147d1/playwright-1.57.0-py3-none-macosx_11_0_universal2.whl", hash = "sha256:99104771abc4eafee48f47dac2369e0015516dc1ce8c409807d2dd440828b9a4", size = 41985042, upload-time = "2025-12-09T08:06:25.357Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/56/61/3a803cb5ae0321715bfd5247ea871d25b32c8f372aeb70550a90c5f586df/playwright-1.57.0-py3-none-manylinux1_x86_64.whl", hash = "sha256:284ed5a706b7c389a06caa431b2f0ba9ac4130113c3a779767dda758c2497bb1", size = 45975252, upload-time = "2025-12-09T08:06:29.186Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/83/d7/b72eb59dfbea0013a7f9731878df8c670f5f35318cedb010c8a30292c118/playwright-1.57.0-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38a1bae6c0a07839cdeaddbc0756b3b2b85e476c07945f64ece08f1f956a86f1", size = 45706917, upload-time = "2025-12-09T08:06:32.549Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e4/09/3fc9ebd7c95ee54ba6a68d5c0bc23e449f7235f4603fc60534a364934c16/playwright-1.57.0-py3-none-win32.whl", hash = "sha256:1dd93b265688da46e91ecb0606d36f777f8eadcf7fbef12f6426b20bf0c9137c", size = 36553860, upload-time = "2025-12-09T08:06:35.864Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/58/d4/dcdfd2a33096aeda6ca0d15584800443dd2be64becca8f315634044b135b/playwright-1.57.0-py3-none-win_amd64.whl", hash = "sha256:6caefb08ed2c6f29d33b8088d05d09376946e49a73be19271c8cd5384b82b14c", size = 36553864, upload-time = "2025-12-09T08:06:38.915Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6a/60/fe31d7e6b8907789dcb0584f88be741ba388413e4fbce35f1eba4e3073de/playwright-1.57.0-py3-none-win_arm64.whl", hash = "sha256:5f065f5a133dbc15e6e7c71e7bc04f258195755b1c32a432b792e28338c8335e", size = 32837940, upload-time = "2025-12-09T08:06:42.268Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pluggy"
|
||||
version = "1.6.0"
|
||||
@@ -1087,6 +1239,18 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/36/c7/cfc8e811f061c841d7990b0201912c3556bfeb99cdcb7ed24adc8d6f8704/pydantic_core-2.41.5-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:56121965f7a4dc965bff783d70b907ddf3d57f6eba29b6d2e5dabfaf07799c51", size = 2145302, upload-time = "2025-11-04T13:43:46.64Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pyee"
|
||||
version = "13.0.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "typing-extensions" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/95/03/1fd98d5841cd7964a27d729ccf2199602fe05eb7a405c1462eb7277945ed/pyee-13.0.0.tar.gz", hash = "sha256:b391e3c5a434d1f5118a25615001dbc8f669cf410ab67d04c4d4e07c55481c37", size = 31250, upload-time = "2025-03-17T18:53:15.955Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/9b/4d/b9add7c84060d4c1906abe9a7e5359f2a60f7a9a4f67268b2766673427d8/pyee-13.0.0-py3-none-any.whl", hash = "sha256:48195a3cddb3b1515ce0695ed76036b5ccc2ef3a9f963ff9f77aec0139845498", size = 15730, upload-time = "2025-03-17T18:53:14.532Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pygments"
|
||||
version = "2.19.2"
|
||||
@@ -1125,6 +1289,19 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/e5/35/f8b19922b6a25bc0880171a2f1a003eaeb93657475193ab516fd87cac9da/pytest_asyncio-1.3.0-py3-none-any.whl", hash = "sha256:611e26147c7f77640e6d0a92a38ed17c3e9848063698d5c93d5aa7aa11cebff5", size = 15075, upload-time = "2025-11-10T16:07:45.537Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pytest-base-url"
|
||||
version = "2.1.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "pytest" },
|
||||
{ name = "requests" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/ae/1a/b64ac368de6b993135cb70ca4e5d958a5c268094a3a2a4cac6f0021b6c4f/pytest_base_url-2.1.0.tar.gz", hash = "sha256:02748589a54f9e63fcbe62301d6b0496da0d10231b753e950c63e03aee745d45", size = 6702, upload-time = "2024-01-31T22:43:00.81Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/98/1c/b00940ab9eb8ede7897443b771987f2f4a76f06be02f1b3f01eb7567e24a/pytest_base_url-2.1.0-py3-none-any.whl", hash = "sha256:3ad15611778764d451927b2a53240c1a7a591b521ea44cebfe45849d2d2812e6", size = 5302, upload-time = "2024-01-31T22:42:58.897Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pytest-cov"
|
||||
version = "7.0.0"
|
||||
@@ -1139,6 +1316,34 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/ee/49/1377b49de7d0c1ce41292161ea0f721913fa8722c19fb9c1e3aa0367eecb/pytest_cov-7.0.0-py3-none-any.whl", hash = "sha256:3b8e9558b16cc1479da72058bdecf8073661c7f57f7d3c5f22a1c23507f2d861", size = 22424, upload-time = "2025-09-09T10:57:00.695Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pytest-playwright"
|
||||
version = "0.7.2"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "playwright" },
|
||||
{ name = "pytest" },
|
||||
{ name = "pytest-base-url" },
|
||||
{ name = "python-slugify" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/e8/6b/913e36aa421b35689ec95ed953ff7e8df3f2ee1c7b8ab2a3f1fd39d95faf/pytest_playwright-0.7.2.tar.gz", hash = "sha256:247b61123b28c7e8febb993a187a07e54f14a9aa04edc166f7a976d88f04c770", size = 16928, upload-time = "2025-11-24T03:43:22.53Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/76/61/4d333d8354ea2bea2c2f01bad0a4aa3c1262de20e1241f78e73360e9b620/pytest_playwright-0.7.2-py3-none-any.whl", hash = "sha256:8084e015b2b3ecff483c2160f1c8219b38b66c0d4578b23c0f700d1b0240ea38", size = 16881, upload-time = "2025-11-24T03:43:24.423Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pytest-xdist"
|
||||
version = "3.8.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "execnet" },
|
||||
{ name = "pytest" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/78/b4/439b179d1ff526791eb921115fca8e44e596a13efeda518b9d845a619450/pytest_xdist-3.8.0.tar.gz", hash = "sha256:7e578125ec9bc6050861aa93f2d59f1d8d085595d6551c2c90b6f4fad8d3a9f1", size = 88069, upload-time = "2025-07-01T13:30:59.346Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/ca/31/d4e37e9e550c2b92a9cbc2e4d0b7420a27224968580b5a447f420847c975/pytest_xdist-3.8.0-py3-none-any.whl", hash = "sha256:202ca578cfeb7370784a8c33d6d05bc6e13b4f25b5053c30a152269fd10f0b88", size = 46396, upload-time = "2025-07-01T13:30:56.632Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "python-dotenv"
|
||||
version = "1.2.1"
|
||||
@@ -1157,6 +1362,18 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/aa/76/03af049af4dcee5d27442f71b6924f01f3efb5d2bd34f23fcd563f2cc5f5/python_multipart-0.0.21-py3-none-any.whl", hash = "sha256:cf7a6713e01c87aa35387f4774e812c4361150938d20d232800f75ffcf266090", size = 24541, upload-time = "2025-12-17T09:24:21.153Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "python-slugify"
|
||||
version = "8.0.4"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "text-unidecode" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/87/c7/5e1547c44e31da50a460df93af11a535ace568ef89d7a811069ead340c4a/python-slugify-8.0.4.tar.gz", hash = "sha256:59202371d1d05b54a9e7720c5e038f928f45daaffe41dd10822f3907b937c856", size = 10921, upload-time = "2024-02-08T18:32:45.488Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/a4/62/02da182e544a51a5c3ccf4b03ab79df279f9c60c5e82d5e8bec7ca26ac11/python_slugify-8.0.4-py2.py3-none-any.whl", hash = "sha256:276540b79961052b66b7d116620b36518847f52d5fd9e3a70164fc8c50faa6b8", size = 10051, upload-time = "2024-02-08T18:32:43.911Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pyyaml"
|
||||
version = "6.0.3"
|
||||
@@ -1212,6 +1429,21 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/f1/12/de94a39c2ef588c7e6455cfbe7343d3b2dc9d6b6b2f40c4c6565744c873d/pyyaml-6.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:ebc55a14a21cb14062aa4162f906cd962b28e2e9ea38f9b4391244cd8de4ae0b", size = 149341, upload-time = "2025-09-25T21:32:56.828Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "requests"
|
||||
version = "2.32.5"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "certifi" },
|
||||
{ name = "charset-normalizer" },
|
||||
{ name = "idna" },
|
||||
{ name = "urllib3" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/c9/74/b3ff8e6c8446842c3f5c837e9c3dfcfe2018ea6ecef224c710c85ef728f4/requests-2.32.5.tar.gz", hash = "sha256:dbba0bac56e100853db0ea71b82b4dfd5fe2bf6d3754a8893c3af500cec7d7cf", size = 134517, upload-time = "2025-08-18T20:46:02.573Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/1e/db/4254e3eabe8020b458f1a747140d32277ec7a271daf1d235b70dc0b4e6e3/requests-2.32.5-py3-none-any.whl", hash = "sha256:2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6", size = 64738, upload-time = "2025-08-18T20:46:00.542Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rich"
|
||||
version = "14.2.0"
|
||||
@@ -1395,6 +1627,15 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/d9/52/1064f510b141bd54025f9b55105e26d1fa970b9be67ad766380a3c9b74b0/starlette-0.50.0-py3-none-any.whl", hash = "sha256:9e5391843ec9b6e472eed1365a78c8098cfceb7a74bfd4d6b1c0c0095efb3bca", size = 74033, upload-time = "2025-11-01T15:25:25.461Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "text-unidecode"
|
||||
version = "1.3"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/ab/e2/e9a00f0ccb71718418230718b3d900e71a5d16e701a3dae079a21e9cd8f8/text-unidecode-1.3.tar.gz", hash = "sha256:bad6603bb14d279193107714b288be206cac565dfa49aa5b105294dd5c4aab93", size = 76885, upload-time = "2019-08-30T21:36:45.405Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/a6/a5/c0b6468d3824fe3fde30dbb5e1f687b291608f9473681bbf7dabbf5a87d7/text_unidecode-1.3-py2.py3-none-any.whl", hash = "sha256:1311f10e8b895935241623731c2ba64f4c455287888b18189350b67134a822e8", size = 78154, upload-time = "2019-08-30T21:37:03.543Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tomli"
|
||||
version = "2.3.0"
|
||||
|
||||
75
zensical.toml
Normal file
75
zensical.toml
Normal file
@@ -0,0 +1,75 @@
|
||||
# Compose Farm Documentation
|
||||
# Built with Zensical - https://zensical.org
|
||||
|
||||
[project]
|
||||
site_name = "Compose Farm"
|
||||
site_description = "A minimal CLI tool to run Docker Compose commands across multiple hosts via SSH"
|
||||
site_author = "Bas Nijholt"
|
||||
site_url = "https://compose-farm.nijho.lt/"
|
||||
copyright = "Copyright © 2025 Bas Nijholt"
|
||||
|
||||
repo_url = "https://github.com/basnijholt/compose-farm"
|
||||
repo_name = "GitHub"
|
||||
edit_uri = "edit/main/docs"
|
||||
|
||||
nav = [
|
||||
{ "Home" = "index.md" },
|
||||
{ "Getting Started" = "getting-started.md" },
|
||||
{ "Configuration" = "configuration.md" },
|
||||
{ "Commands" = "commands.md" },
|
||||
{ "Architecture" = "architecture.md" },
|
||||
{ "Traefik Integration" = "traefik.md" },
|
||||
{ "Best Practices" = "best-practices.md" },
|
||||
]
|
||||
|
||||
[project.theme]
|
||||
language = "en"
|
||||
|
||||
features = [
|
||||
"announce.dismiss",
|
||||
"content.action.edit",
|
||||
"content.action.view",
|
||||
"content.code.annotate",
|
||||
"content.code.copy",
|
||||
"content.code.select",
|
||||
"content.footnote.tooltips",
|
||||
"content.tabs.link",
|
||||
"content.tooltips",
|
||||
"navigation.footer",
|
||||
"navigation.indexes",
|
||||
"navigation.instant",
|
||||
"navigation.instant.prefetch",
|
||||
"navigation.path",
|
||||
"navigation.sections",
|
||||
"navigation.top",
|
||||
"navigation.tracking",
|
||||
"search.highlight",
|
||||
]
|
||||
|
||||
[[project.theme.palette]]
|
||||
scheme = "default"
|
||||
primary = "teal"
|
||||
toggle.icon = "lucide/sun"
|
||||
toggle.name = "Switch to dark mode"
|
||||
|
||||
[[project.theme.palette]]
|
||||
scheme = "slate"
|
||||
primary = "teal"
|
||||
toggle.icon = "lucide/moon"
|
||||
toggle.name = "Switch to light mode"
|
||||
|
||||
[project.theme.font]
|
||||
text = "Inter"
|
||||
code = "JetBrains Mono"
|
||||
|
||||
[project.theme.icon]
|
||||
logo = "lucide/server"
|
||||
repo = "lucide/github"
|
||||
|
||||
[[project.extra.social]]
|
||||
icon = "fontawesome/brands/github"
|
||||
link = "https://github.com/basnijholt/compose-farm"
|
||||
|
||||
[[project.extra.social]]
|
||||
icon = "fontawesome/brands/python"
|
||||
link = "https://pypi.org/project/compose-farm/"
|
||||
Reference in New Issue
Block a user