mirror of
https://github.com/basnijholt/compose-farm.git
synced 2026-02-03 14:13:26 +00:00
Compare commits
207 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
5afda8cbb2 | ||
|
|
1bbf324f1e | ||
|
|
1be5b987a2 | ||
|
|
6b684b19f2 | ||
|
|
4a37982e30 | ||
|
|
55cb44e0e7 | ||
|
|
5c242d08bf | ||
|
|
5bf65d3849 | ||
|
|
21d5dfa175 | ||
|
|
e49ad29999 | ||
|
|
cdbe74ed89 | ||
|
|
129970379c | ||
|
|
c5c47d14dd | ||
|
|
95f19e7333 | ||
|
|
9c6edd3f18 | ||
|
|
bda9210354 | ||
|
|
f57951e8dc | ||
|
|
ba8c04caf8 | ||
|
|
ff0658117d | ||
|
|
920b593d5f | ||
|
|
27d9b08ce2 | ||
|
|
700cdacb4d | ||
|
|
3c7a532704 | ||
|
|
6048f37ad5 | ||
|
|
f18952633f | ||
|
|
437257e631 | ||
|
|
c720170f26 | ||
|
|
d9c03d6509 | ||
|
|
3b7066711f | ||
|
|
6a630c40a1 | ||
|
|
9f9c042b66 | ||
|
|
2a6d7d0b85 | ||
|
|
6d813ccd84 | ||
|
|
af9c760fb8 | ||
|
|
90656b05e3 | ||
|
|
d7a3d4e8c7 | ||
|
|
35f0b8bf99 | ||
|
|
be6b391121 | ||
|
|
7f56ba6a41 | ||
|
|
4b3d7a861e | ||
|
|
affed2edcf | ||
|
|
34642e8b8e | ||
|
|
4c8b6c5209 | ||
|
|
2b38ed28c0 | ||
|
|
26b57895ce | ||
|
|
367da13fae | ||
|
|
d6ecd42559 | ||
|
|
233c33fa52 | ||
|
|
43974c5743 | ||
|
|
cf94a62f37 | ||
|
|
81b4074827 | ||
|
|
455657c8df | ||
|
|
ee5a92788a | ||
|
|
2ba396a419 | ||
|
|
7144d58160 | ||
|
|
279fa2e5ef | ||
|
|
dbe0b8b597 | ||
|
|
b7315d255a | ||
|
|
f003d2931f | ||
|
|
6f7c557065 | ||
|
|
ecb6ee46b1 | ||
|
|
354967010f | ||
|
|
57122f31a3 | ||
|
|
cbbcec0d14 | ||
|
|
de38c35b8a | ||
|
|
def996ddf4 | ||
|
|
790e32e96b | ||
|
|
fd75c4d87f | ||
|
|
411a99cbc4 | ||
|
|
d2c6ab72b2 | ||
|
|
3656584eda | ||
|
|
8be370098d | ||
|
|
45057cb6df | ||
|
|
3f24484d60 | ||
|
|
b6d50a22b4 | ||
|
|
8a658210e1 | ||
|
|
583aaaa080 | ||
|
|
22ca4f64e8 | ||
|
|
32e798fcaa | ||
|
|
ced81c8b50 | ||
|
|
7ec4b71101 | ||
|
|
94aa58d380 | ||
|
|
f8d88e6f97 | ||
|
|
a95f6309b0 | ||
|
|
502de018af | ||
|
|
a3e8daad33 | ||
|
|
78a2f65c94 | ||
|
|
1689a6833a | ||
|
|
6d2f32eadf | ||
|
|
c549dd50c9 | ||
|
|
82312e9421 | ||
|
|
e13b367188 | ||
|
|
d73049cc1b | ||
|
|
4373b23cd3 | ||
|
|
73eb6ccf41 | ||
|
|
6ca48d0d56 | ||
|
|
b82599005e | ||
|
|
b044053674 | ||
|
|
e4f03bcd94 | ||
|
|
ac3797912f | ||
|
|
429a1f6e7e | ||
|
|
fab20e0796 | ||
|
|
1bc6baa0b0 | ||
|
|
996e0748f8 | ||
|
|
ca46fdfaa4 | ||
|
|
b480797e5b | ||
|
|
c47fdf847e | ||
|
|
3ca9562013 | ||
|
|
3104d5de28 | ||
|
|
fd141cbc8c | ||
|
|
aa0c15b6b3 | ||
|
|
4630a3e551 | ||
|
|
b70d5c52f1 | ||
|
|
5d8635ba7b | ||
|
|
27dad9d9d5 | ||
|
|
abb4417b15 | ||
|
|
388cca5591 | ||
|
|
8aa019e25f | ||
|
|
e4061cfbde | ||
|
|
9a1f20e2d4 | ||
|
|
3b45736729 | ||
|
|
1d88fa450a | ||
|
|
31ee6be163 | ||
|
|
096a2ca5f4 | ||
|
|
fb04f6f64d | ||
|
|
d8e54aa347 | ||
|
|
b2b6b421ba | ||
|
|
c6b35f02f0 | ||
|
|
7e43b0a6b8 | ||
|
|
2915b287ba | ||
|
|
ae561db0c9 | ||
|
|
2d132747c4 | ||
|
|
2848163a04 | ||
|
|
76aa6e11d2 | ||
|
|
d377df15b4 | ||
|
|
334c17cc28 | ||
|
|
f148b5bd3a | ||
|
|
54af649d76 | ||
|
|
f6e5a5fa56 | ||
|
|
01aa24d0db | ||
|
|
3e702ef72e | ||
|
|
a31218f7e5 | ||
|
|
5decb3ed95 | ||
|
|
da61436fbb | ||
|
|
b6025af0c8 | ||
|
|
ab914677c4 | ||
|
|
c0b421f812 | ||
|
|
2a446c800f | ||
|
|
dc541c0298 | ||
|
|
4d9b8b5ba4 | ||
|
|
566a07d3a4 | ||
|
|
921ce6f13a | ||
|
|
708e09a8cc | ||
|
|
04154b84f6 | ||
|
|
2bc9b09e58 | ||
|
|
16d517dcd0 | ||
|
|
5e8d09b010 | ||
|
|
6fc3535449 | ||
|
|
9158dba0ce | ||
|
|
7b2c431ca3 | ||
|
|
9deb460cfc | ||
|
|
2ce6f2473b | ||
|
|
04d8444168 | ||
|
|
b539c4ba76 | ||
|
|
473bc089c7 | ||
|
|
50f405eb77 | ||
|
|
fd0d3bcbcf | ||
|
|
f2e8ab0387 | ||
|
|
dfbf2748c7 | ||
|
|
57b0ba5916 | ||
|
|
e668fb0faf | ||
|
|
2702203cb5 | ||
|
|
27f17a2451 | ||
|
|
98c2492d21 | ||
|
|
04339cbb9a | ||
|
|
cdb3b1d257 | ||
|
|
0913769729 | ||
|
|
3a1d5b77b5 | ||
|
|
e12002ce86 | ||
|
|
676a6fe72d | ||
|
|
f29f8938fe | ||
|
|
4c0e147786 | ||
|
|
cba61118de | ||
|
|
32dc6b3665 | ||
|
|
7d98e664e9 | ||
|
|
6763403700 | ||
|
|
feb0e13bfd | ||
|
|
b86f6d190f | ||
|
|
5ed15b5445 | ||
|
|
761b6dd2d1 | ||
|
|
e86c2b6d47 | ||
|
|
9353b74c35 | ||
|
|
b7e8e0f3a9 | ||
|
|
b6c02587bc | ||
|
|
d412c42ca4 | ||
|
|
13e0adbbb9 | ||
|
|
68c41eb37c | ||
|
|
8af088bb5d | ||
|
|
1308eeca12 | ||
|
|
a66a68f395 | ||
|
|
6ea25c862e | ||
|
|
280524b546 | ||
|
|
db9360771b | ||
|
|
c7590ed0b7 | ||
|
|
bb563b9d4b | ||
|
|
fe160ee116 | ||
|
|
4c7f49414f |
88
.github/check_readme_commands.py
vendored
Executable file
88
.github/check_readme_commands.py
vendored
Executable file
@@ -0,0 +1,88 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Check that all CLI commands are documented in the README."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import re
|
||||
import sys
|
||||
from pathlib import Path
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
if TYPE_CHECKING:
|
||||
import typer
|
||||
|
||||
from compose_farm.cli import app
|
||||
|
||||
|
||||
def get_all_commands(typer_app: typer.Typer, prefix: str = "cf") -> set[str]:
|
||||
"""Extract all command names from a Typer app, including nested subcommands."""
|
||||
commands = set()
|
||||
|
||||
# Get registered commands (skip hidden ones like aliases)
|
||||
for command in typer_app.registered_commands:
|
||||
if command.hidden:
|
||||
continue
|
||||
name = command.name
|
||||
if not name and command.callback:
|
||||
name = command.callback.__name__
|
||||
if name:
|
||||
commands.add(f"{prefix} {name}")
|
||||
|
||||
# Get registered sub-apps (like 'config')
|
||||
for group in typer_app.registered_groups:
|
||||
sub_app = group.typer_instance
|
||||
sub_name = group.name
|
||||
if sub_app and sub_name:
|
||||
commands.add(f"{prefix} {sub_name}")
|
||||
# Don't recurse into subcommands - we only document the top-level subcommand
|
||||
|
||||
return commands
|
||||
|
||||
|
||||
def get_documented_commands(readme_path: Path) -> set[str]:
|
||||
"""Extract commands documented in README from help output sections."""
|
||||
content = readme_path.read_text()
|
||||
|
||||
# Match patterns like: <code>cf command --help</code>
|
||||
pattern = r"<code>(cf\s+[\w-]+)\s+--help</code>"
|
||||
matches = re.findall(pattern, content)
|
||||
|
||||
return set(matches)
|
||||
|
||||
|
||||
def main() -> int:
|
||||
"""Check that all CLI commands are documented in the README."""
|
||||
readme_path = Path(__file__).parent.parent / "README.md"
|
||||
|
||||
if not readme_path.exists():
|
||||
print(f"ERROR: README.md not found at {readme_path}")
|
||||
return 1
|
||||
|
||||
cli_commands = get_all_commands(app)
|
||||
documented_commands = get_documented_commands(readme_path)
|
||||
|
||||
# Also check for the main 'cf' help
|
||||
if "<code>cf --help</code>" in readme_path.read_text():
|
||||
documented_commands.add("cf")
|
||||
cli_commands.add("cf")
|
||||
|
||||
missing = cli_commands - documented_commands
|
||||
extra = documented_commands - cli_commands
|
||||
|
||||
if missing or extra:
|
||||
if missing:
|
||||
print("ERROR: Commands missing from README --help documentation:")
|
||||
for cmd in sorted(missing):
|
||||
print(f" - {cmd}")
|
||||
if extra:
|
||||
print("WARNING: Commands documented but not in CLI:")
|
||||
for cmd in sorted(extra):
|
||||
print(f" - {cmd}")
|
||||
return 1
|
||||
|
||||
print(f"✓ All {len(cli_commands)} commands documented in README")
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main())
|
||||
18
.github/workflows/ci.yml
vendored
18
.github/workflows/ci.yml
vendored
@@ -16,10 +16,10 @@ jobs:
|
||||
python-version: ["3.11", "3.12", "3.13"]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v6
|
||||
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@v6
|
||||
uses: astral-sh/setup-uv@v7
|
||||
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
run: uv python install ${{ matrix.python-version }}
|
||||
@@ -39,10 +39,10 @@ jobs:
|
||||
lint:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v6
|
||||
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@v6
|
||||
uses: astral-sh/setup-uv@v7
|
||||
|
||||
- name: Set up Python
|
||||
run: uv python install 3.12
|
||||
@@ -50,11 +50,5 @@ jobs:
|
||||
- name: Install dependencies
|
||||
run: uv sync --all-extras --dev
|
||||
|
||||
- name: Run ruff check
|
||||
run: uv run ruff check .
|
||||
|
||||
- name: Run ruff format check
|
||||
run: uv run ruff format --check .
|
||||
|
||||
- name: Run mypy
|
||||
run: uv run mypy src/compose_farm
|
||||
- name: Run pre-commit (via prek)
|
||||
uses: j178/prek-action@v1
|
||||
|
||||
92
.github/workflows/docker.yml
vendored
Normal file
92
.github/workflows/docker.yml
vendored
Normal file
@@ -0,0 +1,92 @@
|
||||
name: Build and Push Docker Image
|
||||
|
||||
on:
|
||||
workflow_run:
|
||||
workflows: ["Upload Python Package"]
|
||||
types: [completed]
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
version:
|
||||
description: 'Version to build (leave empty for latest)'
|
||||
required: false
|
||||
|
||||
env:
|
||||
REGISTRY: ghcr.io
|
||||
IMAGE_NAME: ${{ github.repository }}
|
||||
|
||||
jobs:
|
||||
build-and-push:
|
||||
runs-on: ubuntu-latest
|
||||
# Only run if PyPI upload succeeded (or manual dispatch)
|
||||
if: ${{ github.event_name == 'workflow_dispatch' || github.event.workflow_run.conclusion == 'success' }}
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v6
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Log in to Container Registry
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ${{ env.REGISTRY }}
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Extract version
|
||||
id: version
|
||||
run: |
|
||||
if [ "${{ github.event_name }}" = "workflow_run" ]; then
|
||||
# Get version from the tag that triggered the release
|
||||
VERSION="${{ github.event.workflow_run.head_branch }}"
|
||||
# Strip 'v' prefix if present
|
||||
VERSION="${VERSION#v}"
|
||||
elif [ -n "${{ github.event.inputs.version }}" ]; then
|
||||
VERSION="${{ github.event.inputs.version }}"
|
||||
else
|
||||
VERSION=""
|
||||
fi
|
||||
echo "version=$VERSION" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Wait for PyPI
|
||||
if: steps.version.outputs.version != ''
|
||||
run: |
|
||||
VERSION="${{ steps.version.outputs.version }}"
|
||||
echo "Waiting for compose-farm==$VERSION on PyPI..."
|
||||
for i in {1..30}; do
|
||||
if curl -sf "https://pypi.org/pypi/compose-farm/$VERSION/json" > /dev/null; then
|
||||
echo "✓ Version $VERSION available on PyPI"
|
||||
exit 0
|
||||
fi
|
||||
echo "Attempt $i: not yet available, waiting 10s..."
|
||||
sleep 10
|
||||
done
|
||||
echo "✗ Timeout waiting for PyPI"
|
||||
exit 1
|
||||
|
||||
- name: Extract metadata
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
|
||||
tags: |
|
||||
type=semver,pattern={{version}},value=v${{ steps.version.outputs.version }}
|
||||
type=semver,pattern={{major}}.{{minor}},value=v${{ steps.version.outputs.version }}
|
||||
type=semver,pattern={{major}},value=v${{ steps.version.outputs.version }}
|
||||
type=raw,value=latest
|
||||
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: .
|
||||
push: true
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
build-args: |
|
||||
VERSION=${{ steps.version.outputs.version }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
4
.github/workflows/release.yml
vendored
4
.github/workflows/release.yml
vendored
@@ -13,9 +13,9 @@ jobs:
|
||||
permissions:
|
||||
id-token: write
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v6
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@v6
|
||||
uses: astral-sh/setup-uv@v7
|
||||
- name: Build
|
||||
run: uv build
|
||||
- name: Publish package distributions to PyPI
|
||||
|
||||
6
.github/workflows/update-readme.yml
vendored
6
.github/workflows/update-readme.yml
vendored
@@ -11,16 +11,16 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Check out repository
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
persist-credentials: false
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v5
|
||||
uses: actions/setup-python@v6
|
||||
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@v6
|
||||
uses: astral-sh/setup-uv@v7
|
||||
|
||||
- name: Run markdown-code-runner
|
||||
env:
|
||||
|
||||
2
.gitignore
vendored
2
.gitignore
vendored
@@ -42,3 +42,5 @@ htmlcov/
|
||||
compose-farm.yaml
|
||||
!examples/compose-farm.yaml
|
||||
coverage.xml
|
||||
.env
|
||||
homepage/
|
||||
|
||||
@@ -1,4 +1,13 @@
|
||||
repos:
|
||||
- repo: local
|
||||
hooks:
|
||||
- id: check-readme-commands
|
||||
name: Check README documents all CLI commands
|
||||
entry: uv run python .github/check_readme_commands.py
|
||||
language: system
|
||||
files: ^(README\.md|src/compose_farm/cli/.*)$
|
||||
pass_filenames: false
|
||||
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v5.0.0
|
||||
hooks:
|
||||
@@ -10,7 +19,7 @@ repos:
|
||||
- id: debug-statements
|
||||
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.8.4
|
||||
rev: v0.14.9
|
||||
hooks:
|
||||
- id: ruff
|
||||
args: [--fix]
|
||||
|
||||
54
CLAUDE.md
54
CLAUDE.md
@@ -10,19 +10,34 @@
|
||||
|
||||
```
|
||||
compose_farm/
|
||||
├── config.py # Pydantic models, YAML loading
|
||||
├── ssh.py # asyncssh execution, streaming
|
||||
└── cli.py # Typer commands
|
||||
├── cli/ # CLI subpackage
|
||||
│ ├── __init__.py # Imports modules to trigger command registration
|
||||
│ ├── app.py # Shared Typer app instance, version callback
|
||||
│ ├── common.py # Shared helpers, options, progress bar utilities
|
||||
│ ├── config.py # Config subcommand (init, show, path, validate, edit)
|
||||
│ ├── lifecycle.py # up, down, pull, restart, update, apply commands
|
||||
│ ├── management.py # refresh, check, init-network, traefik-file commands
|
||||
│ └── monitoring.py # logs, ps, stats commands
|
||||
├── config.py # Pydantic models, YAML loading
|
||||
├── compose.py # Compose file parsing (.env, ports, volumes, networks)
|
||||
├── console.py # Shared Rich console instances
|
||||
├── executor.py # SSH/local command execution, streaming output
|
||||
├── operations.py # Business logic (up, migrate, discover, preflight checks)
|
||||
├── state.py # Deployment state tracking (which service on which host)
|
||||
├── logs.py # Image digest snapshots (dockerfarm-log.toml)
|
||||
└── traefik.py # Traefik file-provider config generation from labels
|
||||
```
|
||||
|
||||
## Key Design Decisions
|
||||
|
||||
1. **asyncssh over Paramiko/Fabric**: Native async support, built-in streaming
|
||||
1. **Hybrid SSH approach**: asyncssh for parallel streaming with prefixes; native `ssh -t` for raw mode (progress bars)
|
||||
2. **Parallel by default**: Multiple services run concurrently via `asyncio.gather`
|
||||
3. **Streaming output**: Real-time stdout/stderr with `[service]` prefix
|
||||
3. **Streaming output**: Real-time stdout/stderr with `[service]` prefix using Rich
|
||||
4. **SSH key auth only**: Uses ssh-agent, no password handling (YAGNI)
|
||||
5. **NFS assumption**: Compose files at same path on all hosts
|
||||
6. **Local execution**: When host is `localhost`/`local`, skip SSH and run locally
|
||||
6. **Local IP auto-detection**: Skips SSH when target host matches local machine's IP
|
||||
7. **State tracking**: Tracks where services are deployed for auto-migration
|
||||
8. **Pre-flight checks**: Verifies NFS mounts and Docker networks exist before starting/migrating
|
||||
|
||||
## Communication Notes
|
||||
|
||||
@@ -31,17 +46,26 @@ compose_farm/
|
||||
## Git Safety
|
||||
|
||||
- Never amend commits.
|
||||
- Never merge into a branch; prefer fast-forward or rebase as directed.
|
||||
- **NEVER merge anything into main.** Always commit directly or use fast-forward/rebase.
|
||||
- Never force push.
|
||||
|
||||
## Commands Quick Reference
|
||||
|
||||
| Command | Docker Compose Equivalent |
|
||||
|---------|--------------------------|
|
||||
| `up` | `docker compose up -d` |
|
||||
| `down` | `docker compose down` |
|
||||
| `pull` | `docker compose pull` |
|
||||
| `restart` | `down` + `up -d` |
|
||||
CLI available as `cf` or `compose-farm`.
|
||||
|
||||
| Command | Description |
|
||||
|---------|-------------|
|
||||
| `up` | Start services (`docker compose up -d`), auto-migrates if host changed |
|
||||
| `down` | Stop services (`docker compose down`). Use `--orphaned` to stop services removed from config |
|
||||
| `pull` | Pull latest images |
|
||||
| `restart` | `down` + `up -d` |
|
||||
| `update` | `pull` + `down` + `up -d` |
|
||||
| `logs` | `docker compose logs` |
|
||||
| `ps` | `docker compose ps` |
|
||||
| `apply` | Make reality match config: migrate services + stop orphans. Use `--dry-run` to preview |
|
||||
| `logs` | Show service logs |
|
||||
| `ps` | Show status of all services |
|
||||
| `stats` | Show overview (hosts, services, pending migrations; `--live` for container counts) |
|
||||
| `refresh` | Update state from reality: discover running services, capture image digests |
|
||||
| `check` | Validate config, traefik labels, mounts, networks; show host compatibility |
|
||||
| `init-network` | Create Docker network on hosts with consistent subnet/gateway |
|
||||
| `traefik-file` | Generate Traefik file-provider config from compose labels |
|
||||
| `config` | Manage config files (init, show, path, validate, edit) |
|
||||
|
||||
16
Dockerfile
Normal file
16
Dockerfile
Normal file
@@ -0,0 +1,16 @@
|
||||
# syntax=docker/dockerfile:1
|
||||
FROM ghcr.io/astral-sh/uv:python3.14-alpine
|
||||
|
||||
# Install SSH client (required for remote host connections)
|
||||
RUN apk add --no-cache openssh-client
|
||||
|
||||
# Install compose-farm from PyPI
|
||||
ARG VERSION
|
||||
RUN uv tool install compose-farm${VERSION:+==$VERSION}
|
||||
|
||||
# Add uv tool bin to PATH
|
||||
ENV PATH="/root/.local/bin:$PATH"
|
||||
|
||||
# Default entrypoint
|
||||
ENTRYPOINT ["cf"]
|
||||
CMD ["--help"]
|
||||
21
LICENSE
Normal file
21
LICENSE
Normal file
@@ -0,0 +1,21 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2025 Bas Nijholt
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
35
PLAN.md
35
PLAN.md
@@ -1,35 +0,0 @@
|
||||
# Compose Farm – Traefik Multihost Ingress Plan
|
||||
|
||||
## Goal
|
||||
Generate a Traefik file-provider fragment from existing docker-compose Traefik labels (no config duplication) so a single front-door Traefik on 192.168.1.66 with wildcard `*.lab.mydomain.org` can route to services running on other hosts. Keep the current simplicity (SSH + docker compose); no Swarm/K8s.
|
||||
|
||||
## Requirements
|
||||
- Traefik stays on main host; keep current `dynamic.yml` and Docker provider for local containers.
|
||||
- Add a watched directory provider (any path works) and load a generated fragment (e.g., `compose-farm.generated.yml`).
|
||||
- No edits to compose files: reuse existing `traefik.*` labels as the single source of truth; Compose Farm only reads them.
|
||||
- Generator infers routing from labels and reachability from `ports:` mappings; prefer host-published ports so Traefik can reach services across hosts. Upstreams point to `<host address>:<published host port>`; warn if no published port is found.
|
||||
- Only minimal data in `compose-farm.yaml`: hosts map and service→host mapping (already present).
|
||||
- No new orchestration/discovery layers; respect KISS/YAGNI/DRY.
|
||||
|
||||
## Non-Goals
|
||||
- No Swarm/Kubernetes adoption.
|
||||
- No global Docker provider across hosts.
|
||||
- No health checks/service discovery layer.
|
||||
|
||||
## Current State (Dec 2025)
|
||||
- Compose Farm: Typer CLI wrapping `docker compose` over SSH; config in `compose-farm.yaml`; parallel by default; snapshot/log tooling present.
|
||||
- Traefik: single instance on 192.168.1.66, wildcard `*.lab.mydomain.org`, Docker provider for local services, file provider via `dynamic.yml` already in use.
|
||||
|
||||
## Proposed Implementation Steps
|
||||
1) Add generator command: `compose-farm traefik-file --output <path>`.
|
||||
2) Resolve per-service host from `compose-farm.yaml`; read compose file at `{compose_dir}/{service}/docker-compose.yml`.
|
||||
3) Parse `traefik.*` labels to build routers/services/middlewares as in compose; map container port to published host port (from `ports:`) to form upstream URLs with host address.
|
||||
4) Emit file-provider YAML to the watched directory (recommended default: `/mnt/data/traefik/dynamic.d/compose-farm.generated.yml`, but user chooses via `--output`).
|
||||
5) Warnings: if no published port is found, warn that cross-host reachability requires L3 reachability to container IPs.
|
||||
6) Tests: label parsing, port mapping, YAML render; scenario with published port; scenario without published port.
|
||||
7) Docs: update README/CLAUDE to describe directory provider flags and the generator workflow; note that compose files remain unchanged.
|
||||
|
||||
## Open Questions
|
||||
- How to derive target host address: use `hosts.<name>.address` verbatim, or allow override per service? (Default: use host address.)
|
||||
- Should we support multiple hosts/backends per service for LB/HA? (Start with single server.)
|
||||
- Where to store generated file by default? (Default to user-specified `--output`; maybe fallback to `./compose-farm-traefik.yml`.)
|
||||
@@ -3,23 +3,28 @@
|
||||
|
||||
compose_dir: /opt/compose
|
||||
|
||||
# Optional: Auto-regenerate Traefik file-provider config after up/down/restart/update
|
||||
traefik_file: /opt/traefik/dynamic.d/compose-farm.yml
|
||||
traefik_service: traefik # Skip services on same host (docker provider handles them)
|
||||
|
||||
hosts:
|
||||
# Full form with all options
|
||||
nas01:
|
||||
server-1:
|
||||
address: 192.168.1.10
|
||||
user: docker
|
||||
port: 22
|
||||
|
||||
# Short form (just address, user defaults to current user)
|
||||
nas02: 192.168.1.11
|
||||
server-2: 192.168.1.11
|
||||
|
||||
# Local execution (no SSH)
|
||||
local: localhost
|
||||
|
||||
services:
|
||||
# Map service names to hosts
|
||||
# Compose file expected at: {compose_dir}/{service}/docker-compose.yml
|
||||
plex: nas01
|
||||
jellyfin: nas02
|
||||
sonarr: nas01
|
||||
radarr: nas02
|
||||
# Compose file expected at: {compose_dir}/{service}/compose.yaml
|
||||
traefik: server-1 # Traefik runs here
|
||||
plex: server-2 # Services on other hosts get file-provider entries
|
||||
jellyfin: server-2
|
||||
sonarr: server-1
|
||||
radarr: local
|
||||
|
||||
34
docker-compose.yml
Normal file
34
docker-compose.yml
Normal file
@@ -0,0 +1,34 @@
|
||||
services:
|
||||
cf:
|
||||
image: ghcr.io/basnijholt/compose-farm:latest
|
||||
volumes:
|
||||
- ${SSH_AUTH_SOCK}:/ssh-agent:ro
|
||||
# Compose directory (contains compose files AND compose-farm.yaml config)
|
||||
- ${CF_COMPOSE_DIR:-/opt/stacks}:${CF_COMPOSE_DIR:-/opt/stacks}
|
||||
environment:
|
||||
- SSH_AUTH_SOCK=/ssh-agent
|
||||
# Config file path (state stored alongside it)
|
||||
- CF_CONFIG=${CF_COMPOSE_DIR:-/opt/stacks}/compose-farm.yaml
|
||||
|
||||
web:
|
||||
image: ghcr.io/basnijholt/compose-farm:latest
|
||||
command: cf web --host 0.0.0.0 --port 9000
|
||||
volumes:
|
||||
- ${SSH_AUTH_SOCK}:/ssh-agent:ro
|
||||
- ${CF_COMPOSE_DIR:-/opt/stacks}:${CF_COMPOSE_DIR:-/opt/stacks}
|
||||
environment:
|
||||
- SSH_AUTH_SOCK=/ssh-agent
|
||||
- CF_CONFIG=${CF_COMPOSE_DIR:-/opt/stacks}/compose-farm.yaml
|
||||
labels:
|
||||
- traefik.enable=true
|
||||
- traefik.http.routers.compose-farm.rule=Host(`compose-farm.${DOMAIN}`)
|
||||
- traefik.http.routers.compose-farm.entrypoints=websecure
|
||||
- traefik.http.routers.compose-farm-local.rule=Host(`compose-farm.local`)
|
||||
- traefik.http.routers.compose-farm-local.entrypoints=web
|
||||
- traefik.http.services.compose-farm.loadbalancer.server.port=9000
|
||||
networks:
|
||||
- mynetwork
|
||||
|
||||
networks:
|
||||
mynetwork:
|
||||
external: true
|
||||
90
docs/dev/docker-swarm-network.md
Normal file
90
docs/dev/docker-swarm-network.md
Normal file
@@ -0,0 +1,90 @@
|
||||
# Docker Swarm Overlay Networks with Compose Farm
|
||||
|
||||
Notes from testing Docker Swarm's attachable overlay networks as a way to get cross-host container networking while still using `docker compose`.
|
||||
|
||||
## The Idea
|
||||
|
||||
Docker Swarm overlay networks can be made "attachable", allowing regular `docker compose` containers (not just swarm services) to join them. This would give us:
|
||||
|
||||
- Cross-host Docker DNS (containers find each other by name)
|
||||
- No need to publish ports for inter-container communication
|
||||
- Keep using `docker compose up` instead of `docker stack deploy`
|
||||
|
||||
## Setup Steps
|
||||
|
||||
```bash
|
||||
# On manager node
|
||||
docker swarm init --advertise-addr <manager-ip>
|
||||
|
||||
# On worker nodes (use token from init output)
|
||||
docker swarm join --token <token> <manager-ip>:2377
|
||||
|
||||
# Create attachable overlay network (on manager)
|
||||
docker network create --driver overlay --attachable my-network
|
||||
|
||||
# In compose files, add the network
|
||||
networks:
|
||||
my-network:
|
||||
external: true
|
||||
```
|
||||
|
||||
## Required Ports
|
||||
|
||||
Docker Swarm requires these ports open **bidirectionally** between all nodes:
|
||||
|
||||
| Port | Protocol | Purpose |
|
||||
|------|----------|---------|
|
||||
| 2377 | TCP | Cluster management |
|
||||
| 7946 | TCP + UDP | Node communication |
|
||||
| 4789 | UDP | Overlay network traffic (VXLAN) |
|
||||
|
||||
## Test Results (2024-12-13)
|
||||
|
||||
- docker-debian (192.168.1.66) as manager
|
||||
- dev-lxc (192.168.1.167) as worker
|
||||
|
||||
### What worked
|
||||
|
||||
- Swarm init and join
|
||||
- Overlay network creation
|
||||
- Nodes showed as Ready
|
||||
|
||||
### What failed
|
||||
|
||||
- Container on dev-lxc couldn't attach to overlay network
|
||||
- Error: `attaching to network failed... context deadline exceeded`
|
||||
- Cause: Port 7946 blocked from docker-debian → dev-lxc
|
||||
|
||||
### Root cause
|
||||
|
||||
Firewall on dev-lxc wasn't configured to allow swarm ports. Opening these ports requires sudo access on each node.
|
||||
|
||||
## Conclusion
|
||||
|
||||
Docker Swarm overlay networks are **not plug-and-play**. Requirements:
|
||||
|
||||
1. Swarm init/join on all nodes
|
||||
2. Firewall rules on all nodes (needs sudo/root)
|
||||
3. All nodes must have bidirectional connectivity on 3 ports
|
||||
|
||||
For a simpler alternative, consider:
|
||||
|
||||
- **Tailscale**: VPN mesh, containers use host's Tailscale IP
|
||||
- **Host networking + published ports**: What compose-farm does today
|
||||
- **Keep dependent services together**: Avoid cross-host networking entirely
|
||||
|
||||
## Future Work
|
||||
|
||||
If we decide to support overlay networks:
|
||||
|
||||
1. Add a `compose-farm network create` command that:
|
||||
- Initializes swarm if needed
|
||||
- Creates attachable overlay network
|
||||
- Documents required firewall rules
|
||||
|
||||
2. Add network config to compose-farm.yaml:
|
||||
```yaml
|
||||
overlay_network: compose-farm-net
|
||||
```
|
||||
|
||||
3. Auto-inject network into compose files (or document manual setup)
|
||||
128
docs/dev/future-improvements.md
Normal file
128
docs/dev/future-improvements.md
Normal file
@@ -0,0 +1,128 @@
|
||||
# Future Improvements
|
||||
|
||||
Low-priority improvements identified during code review. These are not currently causing issues but could be addressed if they become pain points.
|
||||
|
||||
## 1. State Module Efficiency (LOW)
|
||||
|
||||
**Current:** Every state operation reads and writes the entire file.
|
||||
|
||||
```python
|
||||
def set_service_host(config, service, host):
|
||||
state = load_state(config) # Read file
|
||||
state[service] = host
|
||||
save_state(config, state) # Write file
|
||||
```
|
||||
|
||||
**Impact:** With 87 services, this is fine. With 1000+, it would be slow.
|
||||
|
||||
**Potential fix:** Add batch operations:
|
||||
```python
|
||||
def update_state(config, updates: dict[str, str | None]) -> None:
|
||||
"""Batch update: set services to hosts, None means remove."""
|
||||
state = load_state(config)
|
||||
for service, host in updates.items():
|
||||
if host is None:
|
||||
state.pop(service, None)
|
||||
else:
|
||||
state[service] = host
|
||||
save_state(config, state)
|
||||
```
|
||||
|
||||
**When to do:** Only if state operations become noticeably slow.
|
||||
|
||||
---
|
||||
|
||||
## 2. Remote-Aware Compose Path Resolution (LOW)
|
||||
|
||||
**Current:** `config.get_compose_path()` checks if files exist on the local filesystem:
|
||||
|
||||
```python
|
||||
def get_compose_path(self, service: str) -> Path:
|
||||
for filename in ("compose.yaml", "compose.yml", ...):
|
||||
candidate = service_dir / filename
|
||||
if candidate.exists(): # Local check!
|
||||
return candidate
|
||||
```
|
||||
|
||||
**Why this works:** NFS/shared storage means local = remote.
|
||||
|
||||
**Why it could break:** If running compose-farm from a machine without the NFS mount, it returns `compose.yaml` (the default) even if `docker-compose.yml` exists on the remote host.
|
||||
|
||||
**Potential fix:** Query the remote host for file existence, or accept this limitation and document it.
|
||||
|
||||
**When to do:** Only if users need to run compose-farm from non-NFS machines.
|
||||
|
||||
---
|
||||
|
||||
## 3. Add Integration Tests for CLI Commands (MEDIUM)
|
||||
|
||||
**Current:** No integration tests for the actual CLI commands. Tests cover the underlying functions but not the Typer commands themselves.
|
||||
|
||||
**Potential fix:** Add integration tests using `CliRunner` from Typer:
|
||||
|
||||
```python
|
||||
from typer.testing import CliRunner
|
||||
from compose_farm.cli import app
|
||||
|
||||
runner = CliRunner()
|
||||
|
||||
def test_check_command_validates_config():
|
||||
result = runner.invoke(app, ["check", "--local"])
|
||||
assert result.exit_code == 0
|
||||
```
|
||||
|
||||
**When to do:** When CLI behavior becomes complex enough to warrant dedicated testing.
|
||||
|
||||
---
|
||||
|
||||
## 4. Add Tests for operations.py (MEDIUM)
|
||||
|
||||
**Current:** Operations module has 30% coverage. Most logic is tested indirectly through test_sync.py.
|
||||
|
||||
**Potential fix:** Add dedicated tests for:
|
||||
- `up_services()` with migration scenarios
|
||||
- `preflight_check()`
|
||||
- `check_host_compatibility()`
|
||||
|
||||
**When to do:** When adding new operations or modifying migration logic.
|
||||
|
||||
---
|
||||
|
||||
## 5. Consider Structured Logging (LOW)
|
||||
|
||||
**Current:** Operations print directly to console using Rich. This couples the operations module to the Rich library.
|
||||
|
||||
**Potential fix:** Use Python's logging module with a custom Rich handler:
|
||||
|
||||
```python
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# In operations:
|
||||
logger.info("Migrating %s from %s to %s", service, old_host, new_host)
|
||||
|
||||
# In cli.py - configure Rich handler:
|
||||
from rich.logging import RichHandler
|
||||
logging.basicConfig(handlers=[RichHandler()])
|
||||
```
|
||||
|
||||
**Benefits:**
|
||||
- Operations become testable without capturing stdout
|
||||
- Logs can be redirected to files
|
||||
- Log levels provide filtering
|
||||
|
||||
**When to do:** Only if console output coupling becomes a problem for testing or extensibility.
|
||||
|
||||
---
|
||||
|
||||
## Design Decisions to Keep
|
||||
|
||||
These patterns are working well and should be preserved:
|
||||
|
||||
1. **asyncio + asyncssh** - Solid async foundation
|
||||
2. **Pydantic models** - Clean validation
|
||||
3. **Rich for output** - Good UX
|
||||
4. **Test structure** - Good coverage
|
||||
5. **Module separation** - cli/operations/executor/compose pattern
|
||||
6. **KISS principle** - Don't over-engineer
|
||||
79
docs/reddit-post.md
Normal file
79
docs/reddit-post.md
Normal file
@@ -0,0 +1,79 @@
|
||||
# Title options
|
||||
|
||||
- Multi-host Docker Compose without Kubernetes or file changes
|
||||
- I built a CLI to run Docker Compose across hosts. Zero changes to your files.
|
||||
- I made a CLI to run Docker Compose across multiple hosts without Kubernetes or Swarm
|
||||
---
|
||||
|
||||
I've been running 100+ Docker Compose stacks on a single machine, and it kept running out of memory. I needed to spread services across multiple hosts, but:
|
||||
|
||||
- **Kubernetes** felt like overkill. I don't need pods, ingress controllers, or 10x more YAML.
|
||||
- **Docker Swarm** is basically in maintenance mode.
|
||||
- Both require rewriting my compose files.
|
||||
|
||||
So I built **Compose Farm**, a simple CLI that runs `docker compose` commands over SSH. No agents, no cluster setup, no changes to your existing compose files.
|
||||
|
||||
## How it works
|
||||
|
||||
One YAML file maps services to hosts:
|
||||
|
||||
```yaml
|
||||
compose_dir: /opt/stacks
|
||||
|
||||
hosts:
|
||||
nuc: 192.168.1.10
|
||||
hp: 192.168.1.11
|
||||
|
||||
services:
|
||||
plex: nuc
|
||||
jellyfin: hp
|
||||
sonarr: nuc
|
||||
radarr: nuc
|
||||
```
|
||||
|
||||
Then just:
|
||||
|
||||
```bash
|
||||
cf up plex # runs on nuc via SSH
|
||||
cf apply # makes config state match desired state on all hosts (like Terraform apply)
|
||||
cf up --all # starts everything on their assigned hosts
|
||||
cf logs -f plex # streams logs
|
||||
cf ps # shows status across all hosts
|
||||
```
|
||||
|
||||
## Auto-migration
|
||||
|
||||
Change a service's host in the config and run `cf up`. It stops the service on the old host and starts it on the new one. No manual SSH needed.
|
||||
|
||||
```yaml
|
||||
# Before
|
||||
plex: nuc
|
||||
|
||||
# After (just change this)
|
||||
plex: hp
|
||||
```
|
||||
|
||||
```bash
|
||||
cf up plex # migrates automatically
|
||||
```
|
||||
|
||||
## Requirements
|
||||
|
||||
- SSH key auth to your hosts
|
||||
- Same paths on all hosts (I use NFS from my NAS)
|
||||
- That's it. No agents, no daemons.
|
||||
|
||||
## What it doesn't do
|
||||
|
||||
- No high availability (if a host goes down, services don't auto-migrate)
|
||||
- No overlay networking (containers on different hosts can't talk via Docker DNS)
|
||||
- No health checks or automatic restarts
|
||||
|
||||
It's a convenience wrapper around `docker compose` + SSH. If you need failover or cross-host container networking, you probably do need Swarm or Kubernetes.
|
||||
|
||||
## Links
|
||||
|
||||
- GitHub: https://github.com/basnijholt/compose-farm
|
||||
- Install: `uv tool install compose-farm` or `pip install compose-farm`
|
||||
|
||||
Happy to answer questions or take feedback!
|
||||
169
docs/truenas-nested-nfs.md
Normal file
169
docs/truenas-nested-nfs.md
Normal file
@@ -0,0 +1,169 @@
|
||||
# TrueNAS NFS: Accessing Child ZFS Datasets
|
||||
|
||||
When NFS-exporting a parent ZFS dataset on TrueNAS, child datasets appear as **empty directories** to NFS clients. This document explains the problem and provides a workaround.
|
||||
|
||||
## The Problem
|
||||
|
||||
TrueNAS structures storage as ZFS datasets. A common pattern is:
|
||||
|
||||
```
|
||||
tank/data <- parent dataset (NFS exported)
|
||||
tank/data/app1 <- child dataset
|
||||
tank/data/app2 <- child dataset
|
||||
```
|
||||
|
||||
When you create an NFS share for `tank/data`, clients mount it and see the `app1/` and `app2/` directories—but they're empty. This happens because each ZFS dataset is a separate filesystem, and NFS doesn't traverse into child filesystems by default.
|
||||
|
||||
## The Solution: `crossmnt`
|
||||
|
||||
The NFS `crossmnt` export option tells the server to allow clients to traverse into child filesystems. However, TrueNAS doesn't expose this option in the UI.
|
||||
|
||||
### Workaround Script
|
||||
|
||||
This Python script injects `crossmnt` into `/etc/exports`:
|
||||
|
||||
```python
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Add crossmnt to TrueNAS NFS exports for child dataset visibility.
|
||||
|
||||
Usage: fix-nfs-crossmnt.py /mnt/pool/dataset
|
||||
|
||||
Setup:
|
||||
1. scp fix-nfs-crossmnt.py root@truenas.local:/root/
|
||||
2. chmod +x /root/fix-nfs-crossmnt.py
|
||||
3. Test: /root/fix-nfs-crossmnt.py /mnt/pool/dataset
|
||||
4. Add cron job: TrueNAS UI > System > Advanced > Cron Jobs
|
||||
Command: /root/fix-nfs-crossmnt.py /mnt/pool/dataset
|
||||
Schedule: */5 * * * *
|
||||
"""
|
||||
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
EXPORTS_FILE = Path("/etc/exports")
|
||||
|
||||
|
||||
def main():
|
||||
if len(sys.argv) != 2:
|
||||
print(f"Usage: {sys.argv[0]} /mnt/pool/dataset", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
export_path = sys.argv[1]
|
||||
content = EXPORTS_FILE.read_text()
|
||||
|
||||
if f'"{export_path}"' not in content:
|
||||
print(f"ERROR: {export_path} not found in {EXPORTS_FILE}", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
lines = content.splitlines()
|
||||
result = []
|
||||
in_block = False
|
||||
modified = False
|
||||
|
||||
for line in lines:
|
||||
if f'"{export_path}"' in line:
|
||||
in_block = True
|
||||
elif line.startswith('"'):
|
||||
in_block = False
|
||||
|
||||
if in_block and line[:1] in (" ", "\t") and "crossmnt" not in line:
|
||||
line = re.sub(r"\)(\\\s*)?$", r",crossmnt)\1", line)
|
||||
modified = True
|
||||
|
||||
result.append(line)
|
||||
|
||||
if not modified:
|
||||
return 0 # Already applied
|
||||
|
||||
EXPORTS_FILE.write_text("\n".join(result) + "\n")
|
||||
subprocess.run(["exportfs", "-ra"], check=True)
|
||||
print(f"Added crossmnt to {export_path}")
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main())
|
||||
```
|
||||
|
||||
## Setup Instructions
|
||||
|
||||
### 1. Copy the script to TrueNAS
|
||||
|
||||
```bash
|
||||
scp fix-nfs-crossmnt.py root@truenas.local:/root/
|
||||
ssh root@truenas.local chmod +x /root/fix-nfs-crossmnt.py
|
||||
```
|
||||
|
||||
### 2. Test manually
|
||||
|
||||
```bash
|
||||
ssh root@truenas.local
|
||||
|
||||
# Run the script
|
||||
/root/fix-nfs-crossmnt.py /mnt/tank/data
|
||||
|
||||
# Verify crossmnt was added
|
||||
cat /etc/exports
|
||||
```
|
||||
|
||||
You should see `,crossmnt` added to the client options:
|
||||
|
||||
```
|
||||
"/mnt/tank/data"\
|
||||
192.168.1.10(sec=sys,rw,no_subtree_check,crossmnt)\
|
||||
192.168.1.11(sec=sys,rw,no_subtree_check,crossmnt)
|
||||
```
|
||||
|
||||
### 3. Verify on NFS client
|
||||
|
||||
```bash
|
||||
# Before: empty directory
|
||||
ls /mnt/data/app1/
|
||||
# (nothing)
|
||||
|
||||
# After: actual contents visible
|
||||
ls /mnt/data/app1/
|
||||
# config.yaml data/ logs/
|
||||
```
|
||||
|
||||
### 4. Make it persistent
|
||||
|
||||
TrueNAS regenerates `/etc/exports` when you modify NFS shares in the UI. To survive this, set up a cron job:
|
||||
|
||||
1. Go to **TrueNAS UI → System → Advanced → Cron Jobs → Add**
|
||||
2. Configure:
|
||||
- **Description:** Fix NFS crossmnt
|
||||
- **Command:** `/root/fix-nfs-crossmnt.py /mnt/tank/data`
|
||||
- **Run As User:** root
|
||||
- **Schedule:** `*/5 * * * *` (every 5 minutes)
|
||||
- **Enabled:** checked
|
||||
3. Save
|
||||
|
||||
The script is idempotent—it only modifies the file if `crossmnt` is missing, and skips the write entirely if already applied.
|
||||
|
||||
## How It Works
|
||||
|
||||
1. Parses `/etc/exports` to find the specified export block
|
||||
2. Adds `,crossmnt` before the closing `)` on each client line
|
||||
3. Writes the file only if changes were made
|
||||
4. Runs `exportfs -ra` to reload the NFS configuration
|
||||
|
||||
## Why Not Use SMB Instead?
|
||||
|
||||
SMB handles child datasets seamlessly, but:
|
||||
|
||||
- NFS is simpler for Linux-to-Linux with matching UIDs
|
||||
- SMB requires more complex permission mapping for Docker volumes
|
||||
- Many existing setups already use NFS
|
||||
|
||||
## Related Links
|
||||
|
||||
- [TrueNAS Forum: Add crossmnt option to NFS exports](https://forums.truenas.com/t/add-crossmnt-option-to-nfs-exports/10573)
|
||||
- [exports(5) man page](https://man7.org/linux/man-pages/man5/exports.5.html) - see `crossmnt` option
|
||||
|
||||
## Tested On
|
||||
|
||||
- TrueNAS SCALE 24.10
|
||||
65
docs/truenas-nfs-root-squash.md
Normal file
65
docs/truenas-nfs-root-squash.md
Normal file
@@ -0,0 +1,65 @@
|
||||
# TrueNAS NFS: Disabling Root Squash
|
||||
|
||||
When running Docker containers on NFS-mounted storage, containers that run as root will fail to write files unless root squash is disabled. This document explains the problem and solution.
|
||||
|
||||
## The Problem
|
||||
|
||||
By default, NFS uses "root squash" which maps the root user (UID 0) on clients to `nobody` on the server. This is a security feature to prevent remote root users from having root access to the NFS server's files.
|
||||
|
||||
However, many Docker containers run as root internally. When these containers try to write to NFS-mounted volumes, the writes fail with "Permission denied" because the NFS server sees them as `nobody`, not `root`.
|
||||
|
||||
Example error in container logs:
|
||||
```
|
||||
System.UnauthorizedAccessException: Access to the path '/data' is denied.
|
||||
Error: EACCES: permission denied, mkdir '/app/data'
|
||||
```
|
||||
|
||||
## The Solution
|
||||
|
||||
In TrueNAS, configure the NFS share to map remote root to local root:
|
||||
|
||||
### TrueNAS SCALE UI
|
||||
|
||||
1. Go to **Shares → NFS**
|
||||
2. Edit your share
|
||||
3. Under **Advanced Options**:
|
||||
- **Maproot User**: `root`
|
||||
- **Maproot Group**: `wheel`
|
||||
4. Save
|
||||
|
||||
### Result in /etc/exports
|
||||
|
||||
```
|
||||
"/mnt/pool/data"\
|
||||
192.168.1.25(sec=sys,rw,no_root_squash,no_subtree_check)\
|
||||
192.168.1.26(sec=sys,rw,no_root_squash,no_subtree_check)
|
||||
```
|
||||
|
||||
The `no_root_squash` option means remote root is treated as root on the server.
|
||||
|
||||
## Why `wheel`?
|
||||
|
||||
On FreeBSD/TrueNAS, the root user's primary group is `wheel` (GID 0), not `root` like on Linux. So `root:wheel` = `0:0`.
|
||||
|
||||
## Security Considerations
|
||||
|
||||
Disabling root squash means any machine that can mount the NFS share has full root access to those files. This is acceptable when:
|
||||
|
||||
- The NFS clients are on a trusted private network
|
||||
- Only known hosts (by IP) are allowed to mount the share
|
||||
- The data isn't security-critical
|
||||
|
||||
For home lab setups with Docker containers, this is typically fine.
|
||||
|
||||
## Alternative: Run Containers as Non-Root
|
||||
|
||||
If you prefer to keep root squash enabled, you can run containers as a non-root user:
|
||||
|
||||
1. **LinuxServer.io images**: Set `PUID=1000` and `PGID=1000` environment variables
|
||||
2. **Other images**: Add `user: "1000:1000"` to the compose service
|
||||
|
||||
However, not all containers support running as non-root (they may need to bind to privileged ports, create system directories, etc.).
|
||||
|
||||
## Tested On
|
||||
|
||||
- TrueNAS SCALE 24.10
|
||||
@@ -1,42 +1,171 @@
|
||||
# Compose Farm Examples
|
||||
|
||||
This folder contains example Docker Compose services for testing Compose Farm locally.
|
||||
Real-world examples demonstrating compose-farm patterns for multi-host Docker deployments.
|
||||
|
||||
## Services
|
||||
|
||||
| Service | Type | Demonstrates |
|
||||
|---------|------|--------------|
|
||||
| [traefik](traefik/) | Infrastructure | Reverse proxy, Let's Encrypt, file-provider |
|
||||
| [mealie](mealie/) | Single container | Traefik labels, resource limits, environment vars |
|
||||
| [uptime-kuma](uptime-kuma/) | Single container | Docker socket, user mapping, custom DNS |
|
||||
| [paperless-ngx](paperless-ngx/) | Multi-container | Redis + App stack (SQLite) |
|
||||
| [autokuma](autokuma/) | Multi-host | Demonstrates `all` keyword (runs on every host) |
|
||||
|
||||
## Key Patterns
|
||||
|
||||
### External Network
|
||||
|
||||
All services connect to a shared external network for inter-service communication:
|
||||
|
||||
```yaml
|
||||
networks:
|
||||
mynetwork:
|
||||
external: true
|
||||
```
|
||||
|
||||
Create it on each host with consistent settings:
|
||||
|
||||
```bash
|
||||
compose-farm init-network --network mynetwork --subnet 172.20.0.0/16
|
||||
```
|
||||
|
||||
### Traefik Labels (Dual Routes)
|
||||
|
||||
Services expose two routes for different access patterns:
|
||||
|
||||
1. **HTTPS route** (`websecure` entrypoint): For your custom domain with Let's Encrypt TLS
|
||||
2. **HTTP route** (`web` entrypoint): For `.local` domains on your LAN (no TLS needed)
|
||||
|
||||
This pattern allows accessing services via:
|
||||
- `https://mealie.example.com` - from anywhere, with TLS
|
||||
- `http://mealie.local` - from your local network, no TLS overhead
|
||||
|
||||
```yaml
|
||||
labels:
|
||||
# HTTPS route for custom domain (e.g., mealie.example.com)
|
||||
- traefik.enable=true
|
||||
- traefik.http.routers.myapp.rule=Host(`myapp.${DOMAIN}`)
|
||||
- traefik.http.routers.myapp.entrypoints=websecure
|
||||
- traefik.http.services.myapp.loadbalancer.server.port=8080
|
||||
# HTTP route for .local domain (e.g., myapp.local)
|
||||
- traefik.http.routers.myapp-local.rule=Host(`myapp.local`)
|
||||
- traefik.http.routers.myapp-local.entrypoints=web
|
||||
```
|
||||
|
||||
> **Note:** `.local` domains require local DNS (e.g., Pi-hole, Technitium) to resolve to your Traefik host.
|
||||
|
||||
### Environment Variables
|
||||
|
||||
Each service has a `.env` file for secrets and domain configuration.
|
||||
Edit these files to set your domain and credentials:
|
||||
|
||||
```bash
|
||||
# Example: set your domain
|
||||
echo "DOMAIN=example.com" > mealie/.env
|
||||
```
|
||||
|
||||
Variables like `${DOMAIN}` are substituted at runtime by Docker Compose.
|
||||
|
||||
### NFS Volume Mounts
|
||||
|
||||
All data is stored on shared NFS storage at `/mnt/data/`:
|
||||
|
||||
```yaml
|
||||
volumes:
|
||||
- /mnt/data/myapp:/app/data
|
||||
```
|
||||
|
||||
This allows services to migrate between hosts without data loss.
|
||||
|
||||
### Multi-Host Services
|
||||
|
||||
Services that need to run on every host (e.g., monitoring agents):
|
||||
|
||||
```yaml
|
||||
# In compose-farm.yaml
|
||||
services:
|
||||
autokuma: all # Runs on every configured host
|
||||
```
|
||||
|
||||
### Multi-Container Stacks
|
||||
|
||||
Database-backed apps with multiple services:
|
||||
|
||||
```yaml
|
||||
services:
|
||||
redis:
|
||||
image: redis:7
|
||||
app:
|
||||
depends_on:
|
||||
- redis
|
||||
```
|
||||
|
||||
> **NFS + PostgreSQL Warning:** PostgreSQL should NOT run on NFS storage due to
|
||||
> fsync and file locking issues. Use SQLite (safe for single-writer on NFS) or
|
||||
> keep PostgreSQL data on local volumes (non-migratable).
|
||||
|
||||
### AutoKuma Labels (Optional)
|
||||
|
||||
The autokuma example demonstrates compose-farm's **multi-host feature** - running the same service on all hosts using the `all` keyword. AutoKuma itself is not part of compose-farm; it's just a good example because it needs to run on every host to monitor local Docker containers.
|
||||
|
||||
[AutoKuma](https://github.com/BigBoot/AutoKuma) automatically creates Uptime Kuma monitors from Docker labels:
|
||||
|
||||
```yaml
|
||||
labels:
|
||||
- kuma.myapp.http.name=My App
|
||||
- kuma.myapp.http.url=https://myapp.${DOMAIN}
|
||||
```
|
||||
|
||||
## Quick Start
|
||||
|
||||
```bash
|
||||
cd examples
|
||||
|
||||
# Check status of all services
|
||||
# 1. Create the shared network on all hosts
|
||||
compose-farm init-network
|
||||
|
||||
# 2. Start Traefik first (the reverse proxy)
|
||||
compose-farm up traefik
|
||||
|
||||
# 3. Start other services
|
||||
compose-farm up mealie uptime-kuma
|
||||
|
||||
# 4. Check status
|
||||
compose-farm ps
|
||||
|
||||
# Pull images
|
||||
compose-farm pull --all
|
||||
# 5. Generate Traefik file-provider config for cross-host routing
|
||||
compose-farm traefik-file --all
|
||||
|
||||
# Start hello-world (runs and exits)
|
||||
compose-farm up hello
|
||||
# 6. View logs
|
||||
compose-farm logs mealie
|
||||
|
||||
# Start nginx (stays running)
|
||||
compose-farm up nginx
|
||||
|
||||
# Check nginx is running
|
||||
curl localhost:8080
|
||||
|
||||
# View logs
|
||||
compose-farm logs nginx
|
||||
|
||||
# Stop nginx
|
||||
compose-farm down nginx
|
||||
|
||||
# Update all (pull + restart)
|
||||
compose-farm update --all
|
||||
# 7. Stop everything
|
||||
compose-farm down --all
|
||||
```
|
||||
|
||||
## Services
|
||||
## Configuration
|
||||
|
||||
- **hello**: Simple hello-world container (exits immediately)
|
||||
- **nginx**: Nginx web server on port 8080
|
||||
The `compose-farm.yaml` shows a multi-host setup:
|
||||
|
||||
## Config
|
||||
- **primary** (192.168.1.10): Runs Traefik and heavy services
|
||||
- **secondary** (192.168.1.11): Runs lighter services
|
||||
- **autokuma**: Runs on ALL hosts to monitor local containers
|
||||
|
||||
The `compose-farm.yaml` in this directory configures both services to run locally (no SSH).
|
||||
When Traefik runs on `primary` and a service runs on `secondary`, compose-farm
|
||||
automatically generates file-provider config so Traefik can route to it.
|
||||
|
||||
## Traefik File-Provider
|
||||
|
||||
When services run on different hosts than Traefik, use `traefik-file` to generate routing config:
|
||||
|
||||
```bash
|
||||
# Generate config for all services
|
||||
compose-farm traefik-file --all -o traefik/dynamic.d/compose-farm.yml
|
||||
|
||||
# Or configure auto-generation in compose-farm.yaml:
|
||||
traefik_file: /opt/stacks/traefik/dynamic.d/compose-farm.yml
|
||||
traefik_service: traefik
|
||||
```
|
||||
|
||||
With `traefik_file` configured, compose-farm automatically regenerates the config after `up`, `down`, `restart`, and `update` commands.
|
||||
|
||||
4
examples/autokuma/.env
Normal file
4
examples/autokuma/.env
Normal file
@@ -0,0 +1,4 @@
|
||||
# Copy to .env and fill in your values
|
||||
DOMAIN=example.com
|
||||
UPTIME_KUMA_USERNAME=admin
|
||||
UPTIME_KUMA_PASSWORD=your-uptime-kuma-password
|
||||
31
examples/autokuma/compose.yaml
Normal file
31
examples/autokuma/compose.yaml
Normal file
@@ -0,0 +1,31 @@
|
||||
# AutoKuma - Automatic Uptime Kuma monitor creation from Docker labels
|
||||
# Demonstrates: Multi-host service (runs on ALL hosts)
|
||||
#
|
||||
# This service monitors Docker containers on each host and automatically
|
||||
# creates Uptime Kuma monitors based on container labels.
|
||||
#
|
||||
# In compose-farm.yaml, configure as:
|
||||
# autokuma: all
|
||||
#
|
||||
# This runs the same container on every host, so each host's local
|
||||
# Docker socket is monitored.
|
||||
name: autokuma
|
||||
services:
|
||||
autokuma:
|
||||
image: ghcr.io/bigboot/autokuma:latest
|
||||
container_name: autokuma
|
||||
restart: unless-stopped
|
||||
environment:
|
||||
# Connect to your Uptime Kuma instance
|
||||
AUTOKUMA__KUMA__URL: https://uptime.${DOMAIN}
|
||||
AUTOKUMA__KUMA__USERNAME: ${UPTIME_KUMA_USERNAME}
|
||||
AUTOKUMA__KUMA__PASSWORD: ${UPTIME_KUMA_PASSWORD}
|
||||
# Tag for auto-created monitors
|
||||
AUTOKUMA__TAG__NAME: autokuma
|
||||
AUTOKUMA__TAG__COLOR: "#10B981"
|
||||
volumes:
|
||||
# Access local Docker socket to discover containers
|
||||
- /var/run/docker.sock:/var/run/docker.sock:ro
|
||||
# Custom DNS for resolving internal domains
|
||||
dns:
|
||||
- 192.168.1.1 # Your local DNS server
|
||||
9
examples/compose-farm-state.yaml
Normal file
9
examples/compose-farm-state.yaml
Normal file
@@ -0,0 +1,9 @@
|
||||
deployed:
|
||||
autokuma:
|
||||
- primary
|
||||
- secondary
|
||||
- local
|
||||
mealie: secondary
|
||||
paperless-ngx: primary
|
||||
traefik: primary
|
||||
uptime-kuma: secondary
|
||||
@@ -1,11 +1,40 @@
|
||||
# Example Compose Farm config for local testing
|
||||
# Run from the examples directory: cd examples && compose-farm ps
|
||||
# Example Compose Farm configuration
|
||||
# Demonstrates a multi-host setup with NFS shared storage
|
||||
#
|
||||
# To test locally: Update the host addresses and run from the examples directory
|
||||
|
||||
compose_dir: .
|
||||
compose_dir: /opt/stacks/compose-farm/examples
|
||||
|
||||
# Auto-regenerate Traefik file-provider config after up/down/restart/update
|
||||
traefik_file: /opt/stacks/compose-farm/examples/traefik/dynamic.d/compose-farm.yml
|
||||
traefik_service: traefik # Skip Traefik's host in file-provider (docker provider handles it)
|
||||
|
||||
hosts:
|
||||
# Primary server - runs Traefik and most services
|
||||
# Full form with all options
|
||||
primary:
|
||||
address: 192.168.1.10
|
||||
user: deploy
|
||||
port: 22
|
||||
|
||||
# Secondary server - runs some services for load distribution
|
||||
# Short form (user defaults to current user, port defaults to 22)
|
||||
secondary: 192.168.1.11
|
||||
|
||||
# Local execution (no SSH) - for testing or when running on the host itself
|
||||
local: localhost
|
||||
|
||||
services:
|
||||
hello: local
|
||||
nginx: local
|
||||
# Infrastructure (runs on primary where Traefik is)
|
||||
traefik: primary
|
||||
|
||||
# Multi-host services (runs on ALL hosts)
|
||||
# AutoKuma monitors Docker containers on each host
|
||||
autokuma: all
|
||||
|
||||
# Primary server services
|
||||
paperless-ngx: primary
|
||||
|
||||
# Secondary server services (distributed for performance)
|
||||
mealie: secondary
|
||||
uptime-kuma: secondary
|
||||
|
||||
@@ -1,4 +0,0 @@
|
||||
services:
|
||||
hello:
|
||||
image: hello-world
|
||||
container_name: sdc-hello
|
||||
2
examples/mealie/.env
Normal file
2
examples/mealie/.env
Normal file
@@ -0,0 +1,2 @@
|
||||
# Copy to .env and fill in your values
|
||||
DOMAIN=example.com
|
||||
47
examples/mealie/compose.yaml
Normal file
47
examples/mealie/compose.yaml
Normal file
@@ -0,0 +1,47 @@
|
||||
# Mealie - Recipe manager
|
||||
# Simple single-container service with Traefik labels
|
||||
#
|
||||
# Demonstrates:
|
||||
# - HTTPS route: mealie.${DOMAIN} (e.g., mealie.example.com) with Let's Encrypt
|
||||
# - HTTP route: mealie.local for LAN access without TLS
|
||||
# - External network, resource limits, environment variables
|
||||
name: mealie
|
||||
services:
|
||||
mealie:
|
||||
image: ghcr.io/mealie-recipes/mealie:latest
|
||||
container_name: mealie
|
||||
restart: unless-stopped
|
||||
networks:
|
||||
- mynetwork
|
||||
ports:
|
||||
- "9925:9000"
|
||||
deploy:
|
||||
resources:
|
||||
limits:
|
||||
memory: 1000M
|
||||
volumes:
|
||||
- /mnt/data/mealie:/app/data
|
||||
environment:
|
||||
ALLOW_SIGNUP: "false"
|
||||
PUID: 1000
|
||||
PGID: 1000
|
||||
TZ: America/Los_Angeles
|
||||
MAX_WORKERS: 1
|
||||
WEB_CONCURRENCY: 1
|
||||
BASE_URL: https://mealie.${DOMAIN}
|
||||
labels:
|
||||
# HTTPS route: mealie.example.com (requires DOMAIN in .env)
|
||||
- traefik.enable=true
|
||||
- traefik.http.routers.mealie.rule=Host(`mealie.${DOMAIN}`)
|
||||
- traefik.http.routers.mealie.entrypoints=websecure
|
||||
- traefik.http.services.mealie.loadbalancer.server.port=9000
|
||||
# HTTP route: mealie.local (for LAN access, no TLS)
|
||||
- traefik.http.routers.mealie-local.rule=Host(`mealie.local`)
|
||||
- traefik.http.routers.mealie-local.entrypoints=web
|
||||
# AutoKuma: automatically create Uptime Kuma monitor
|
||||
- kuma.mealie.http.name=Mealie
|
||||
- kuma.mealie.http.url=https://mealie.${DOMAIN}
|
||||
|
||||
networks:
|
||||
mynetwork:
|
||||
external: true
|
||||
@@ -1,6 +0,0 @@
|
||||
services:
|
||||
nginx:
|
||||
image: nginx:alpine
|
||||
container_name: sdc-nginx
|
||||
ports:
|
||||
- "8080:80"
|
||||
3
examples/paperless-ngx/.env
Normal file
3
examples/paperless-ngx/.env
Normal file
@@ -0,0 +1,3 @@
|
||||
# Copy to .env and fill in your values
|
||||
DOMAIN=example.com
|
||||
PAPERLESS_SECRET_KEY=change-me-to-a-random-string
|
||||
60
examples/paperless-ngx/compose.yaml
Normal file
60
examples/paperless-ngx/compose.yaml
Normal file
@@ -0,0 +1,60 @@
|
||||
# Paperless-ngx - Document management system
|
||||
#
|
||||
# Demonstrates:
|
||||
# - HTTPS route: paperless.${DOMAIN} (e.g., paperless.example.com) with Let's Encrypt
|
||||
# - HTTP route: paperless.local for LAN access without TLS
|
||||
# - Multi-container stack (Redis + App with SQLite)
|
||||
#
|
||||
# NOTE: This example uses SQLite (the default) instead of PostgreSQL.
|
||||
# PostgreSQL should NOT be used with NFS storage due to fsync/locking issues.
|
||||
# If you need PostgreSQL, use local volumes for the database.
|
||||
name: paperless-ngx
|
||||
services:
|
||||
redis:
|
||||
image: redis:8
|
||||
container_name: paperless-redis
|
||||
restart: unless-stopped
|
||||
networks:
|
||||
- mynetwork
|
||||
volumes:
|
||||
- /mnt/data/paperless/redis:/data
|
||||
|
||||
paperless:
|
||||
image: ghcr.io/paperless-ngx/paperless-ngx:latest
|
||||
container_name: paperless
|
||||
restart: unless-stopped
|
||||
depends_on:
|
||||
- redis
|
||||
networks:
|
||||
- mynetwork
|
||||
ports:
|
||||
- "8000:8000"
|
||||
volumes:
|
||||
# SQLite database stored here (safe on NFS for single-writer)
|
||||
- /mnt/data/paperless/data:/usr/src/paperless/data
|
||||
- /mnt/data/paperless/media:/usr/src/paperless/media
|
||||
- /mnt/data/paperless/export:/usr/src/paperless/export
|
||||
- /mnt/data/paperless/consume:/usr/src/paperless/consume
|
||||
environment:
|
||||
PAPERLESS_REDIS: redis://redis:6379
|
||||
PAPERLESS_URL: https://paperless.${DOMAIN}
|
||||
PAPERLESS_SECRET_KEY: ${PAPERLESS_SECRET_KEY}
|
||||
USERMAP_UID: 1000
|
||||
USERMAP_GID: 1000
|
||||
labels:
|
||||
# HTTPS route: paperless.example.com (requires DOMAIN in .env)
|
||||
- traefik.enable=true
|
||||
- traefik.http.routers.paperless.rule=Host(`paperless.${DOMAIN}`)
|
||||
- traefik.http.routers.paperless.entrypoints=websecure
|
||||
- traefik.http.services.paperless.loadbalancer.server.port=8000
|
||||
- traefik.docker.network=mynetwork
|
||||
# HTTP route: paperless.local (for LAN access, no TLS)
|
||||
- traefik.http.routers.paperless-local.rule=Host(`paperless.local`)
|
||||
- traefik.http.routers.paperless-local.entrypoints=web
|
||||
# AutoKuma: automatically create Uptime Kuma monitor
|
||||
- kuma.paperless.http.name=Paperless
|
||||
- kuma.paperless.http.url=https://paperless.${DOMAIN}
|
||||
|
||||
networks:
|
||||
mynetwork:
|
||||
external: true
|
||||
5
examples/traefik/.env
Normal file
5
examples/traefik/.env
Normal file
@@ -0,0 +1,5 @@
|
||||
# Copy to .env and fill in your values
|
||||
DOMAIN=example.com
|
||||
ACME_EMAIL=you@example.com
|
||||
CF_API_EMAIL=you@example.com
|
||||
CF_API_KEY=your-cloudflare-api-key
|
||||
58
examples/traefik/compose.yaml
Normal file
58
examples/traefik/compose.yaml
Normal file
@@ -0,0 +1,58 @@
|
||||
# Traefik reverse proxy with Let's Encrypt and file-provider support
|
||||
# This is the foundation service - other services route through it
|
||||
#
|
||||
# Entrypoints:
|
||||
# - web (port 80): HTTP for .local domains (no TLS needed on LAN)
|
||||
# - websecure (port 443): HTTPS with Let's Encrypt for custom domains
|
||||
name: traefik
|
||||
services:
|
||||
traefik:
|
||||
image: traefik:v3.2
|
||||
container_name: traefik
|
||||
command:
|
||||
- --api.dashboard=true
|
||||
- --providers.docker=true
|
||||
- --providers.docker.exposedbydefault=false
|
||||
- --providers.docker.network=mynetwork
|
||||
# File provider for routing to services on other hosts
|
||||
- --providers.file.directory=/dynamic.d
|
||||
- --providers.file.watch=true
|
||||
# HTTP entrypoint for .local domains (LAN access, no TLS)
|
||||
- --entrypoints.web.address=:80
|
||||
# HTTPS entrypoint for custom domains (with Let's Encrypt TLS)
|
||||
- --entrypoints.websecure.address=:443
|
||||
- --entrypoints.websecure.asDefault=true
|
||||
- --entrypoints.websecure.http.tls.certresolver=letsencrypt
|
||||
# Let's Encrypt DNS challenge (using Cloudflare as example)
|
||||
- --certificatesresolvers.letsencrypt.acme.email=${ACME_EMAIL}
|
||||
- --certificatesresolvers.letsencrypt.acme.storage=/letsencrypt/acme.json
|
||||
- --certificatesresolvers.letsencrypt.acme.dnschallenge.provider=cloudflare
|
||||
- --certificatesresolvers.letsencrypt.acme.dnschallenge.resolvers=1.1.1.1:53
|
||||
environment:
|
||||
# Cloudflare API token for DNS challenge
|
||||
CF_API_EMAIL: ${CF_API_EMAIL}
|
||||
CF_API_KEY: ${CF_API_KEY}
|
||||
restart: unless-stopped
|
||||
ports:
|
||||
- "80:80"
|
||||
- "443:443"
|
||||
- "8080:8080" # Dashboard
|
||||
volumes:
|
||||
- /var/run/docker.sock:/var/run/docker.sock:ro
|
||||
- /mnt/data/traefik/letsencrypt:/letsencrypt
|
||||
- ./dynamic.d:/dynamic.d:ro
|
||||
networks:
|
||||
- mynetwork
|
||||
labels:
|
||||
- traefik.enable=true
|
||||
# Dashboard accessible at traefik.yourdomain.com
|
||||
- traefik.http.routers.traefik.rule=Host(`traefik.${DOMAIN}`)
|
||||
- traefik.http.routers.traefik.entrypoints=websecure
|
||||
- traefik.http.routers.traefik.service=api@internal
|
||||
# AutoKuma: automatically create Uptime Kuma monitor
|
||||
- kuma.traefik.http.name=Traefik
|
||||
- kuma.traefik.http.url=https://traefik.${DOMAIN}
|
||||
|
||||
networks:
|
||||
mynetwork:
|
||||
external: true
|
||||
40
examples/traefik/dynamic.d/compose-farm.yml
Normal file
40
examples/traefik/dynamic.d/compose-farm.yml
Normal file
@@ -0,0 +1,40 @@
|
||||
# Auto-generated by compose-farm
|
||||
# https://github.com/basnijholt/compose-farm
|
||||
#
|
||||
# This file routes traffic to services running on hosts other than Traefik's host.
|
||||
# Services on Traefik's host use the Docker provider directly.
|
||||
#
|
||||
# Regenerate with: compose-farm traefik-file --all -o <this-file>
|
||||
# Or configure traefik_file in compose-farm.yaml for automatic updates.
|
||||
|
||||
http:
|
||||
routers:
|
||||
mealie:
|
||||
rule: Host(`mealie.example.com`)
|
||||
entrypoints:
|
||||
- websecure
|
||||
service: mealie
|
||||
mealie-local:
|
||||
rule: Host(`mealie.local`)
|
||||
entrypoints:
|
||||
- web
|
||||
service: mealie
|
||||
uptime:
|
||||
rule: Host(`uptime.example.com`)
|
||||
entrypoints:
|
||||
- websecure
|
||||
service: uptime
|
||||
uptime-local:
|
||||
rule: Host(`uptime.local`)
|
||||
entrypoints:
|
||||
- web
|
||||
service: uptime
|
||||
services:
|
||||
mealie:
|
||||
loadbalancer:
|
||||
servers:
|
||||
- url: http://192.168.1.11:9925
|
||||
uptime:
|
||||
loadbalancer:
|
||||
servers:
|
||||
- url: http://192.168.1.11:3001
|
||||
2
examples/uptime-kuma/.env
Normal file
2
examples/uptime-kuma/.env
Normal file
@@ -0,0 +1,2 @@
|
||||
# Copy to .env and fill in your values
|
||||
DOMAIN=example.com
|
||||
43
examples/uptime-kuma/compose.yaml
Normal file
43
examples/uptime-kuma/compose.yaml
Normal file
@@ -0,0 +1,43 @@
|
||||
# Uptime Kuma - Monitoring dashboard
|
||||
#
|
||||
# Demonstrates:
|
||||
# - HTTPS route: uptime.${DOMAIN} (e.g., uptime.example.com) with Let's Encrypt
|
||||
# - HTTP route: uptime.local for LAN access without TLS
|
||||
# - Docker socket access, user mapping for NFS, custom DNS
|
||||
name: uptime-kuma
|
||||
services:
|
||||
uptime-kuma:
|
||||
image: louislam/uptime-kuma:2
|
||||
container_name: uptime-kuma
|
||||
restart: unless-stopped
|
||||
# Run as non-root user (important for NFS volumes)
|
||||
user: "1000:1000"
|
||||
networks:
|
||||
- mynetwork
|
||||
ports:
|
||||
- "3001:3001"
|
||||
volumes:
|
||||
- /var/run/docker.sock:/var/run/docker.sock:ro
|
||||
- /mnt/data/uptime-kuma:/app/data
|
||||
environment:
|
||||
PUID: 1000
|
||||
PGID: 1000
|
||||
# Custom DNS for internal domain resolution
|
||||
dns:
|
||||
- 192.168.1.1 # Your local DNS server
|
||||
labels:
|
||||
# HTTPS route: uptime.example.com (requires DOMAIN in .env)
|
||||
- traefik.enable=true
|
||||
- traefik.http.routers.uptime.rule=Host(`uptime.${DOMAIN}`)
|
||||
- traefik.http.routers.uptime.entrypoints=websecure
|
||||
- traefik.http.services.uptime.loadbalancer.server.port=3001
|
||||
# HTTP route: uptime.local (for LAN access, no TLS)
|
||||
- traefik.http.routers.uptime-local.rule=Host(`uptime.local`)
|
||||
- traefik.http.routers.uptime-local.entrypoints=web
|
||||
# AutoKuma: automatically create Uptime Kuma monitor
|
||||
- kuma.uptime.http.name=Uptime Kuma
|
||||
- kuma.uptime.http.url=https://uptime.${DOMAIN}
|
||||
|
||||
networks:
|
||||
mynetwork:
|
||||
external: true
|
||||
@@ -3,19 +3,68 @@ name = "compose-farm"
|
||||
dynamic = ["version"]
|
||||
description = "Compose Farm - run docker compose commands across multiple hosts"
|
||||
readme = "README.md"
|
||||
license = "MIT"
|
||||
license-files = ["LICENSE"]
|
||||
authors = [
|
||||
{ name = "Bas Nijholt", email = "bas@nijho.lt" }
|
||||
]
|
||||
maintainers = [
|
||||
{ name = "Bas Nijholt", email = "bas@nijho.lt" }
|
||||
]
|
||||
requires-python = ">=3.11"
|
||||
keywords = [
|
||||
"docker",
|
||||
"docker-compose",
|
||||
"ssh",
|
||||
"devops",
|
||||
"deployment",
|
||||
"container",
|
||||
"orchestration",
|
||||
"multi-host",
|
||||
"homelab",
|
||||
"self-hosted",
|
||||
]
|
||||
classifiers = [
|
||||
"Development Status :: 4 - Beta",
|
||||
"Environment :: Console",
|
||||
"Intended Audience :: Developers",
|
||||
"Intended Audience :: System Administrators",
|
||||
"License :: OSI Approved :: MIT License",
|
||||
"Operating System :: OS Independent",
|
||||
"Programming Language :: Python :: 3",
|
||||
"Programming Language :: Python :: 3.11",
|
||||
"Programming Language :: Python :: 3.12",
|
||||
"Programming Language :: Python :: 3.13",
|
||||
"Programming Language :: Python :: 3.14",
|
||||
"Topic :: System :: Systems Administration",
|
||||
"Topic :: Utilities",
|
||||
"Typing :: Typed",
|
||||
]
|
||||
dependencies = [
|
||||
"typer>=0.9.0",
|
||||
"pydantic>=2.0.0",
|
||||
"asyncssh>=2.14.0",
|
||||
"pyyaml>=6.0",
|
||||
"rich>=13.0.0",
|
||||
]
|
||||
|
||||
[project.optional-dependencies]
|
||||
web = [
|
||||
"fastapi[standard]>=0.109.0",
|
||||
"jinja2>=3.1.0",
|
||||
"websockets>=12.0",
|
||||
]
|
||||
|
||||
[project.urls]
|
||||
Homepage = "https://github.com/basnijholt/compose-farm"
|
||||
Repository = "https://github.com/basnijholt/compose-farm"
|
||||
Documentation = "https://github.com/basnijholt/compose-farm#readme"
|
||||
Issues = "https://github.com/basnijholt/compose-farm/issues"
|
||||
Changelog = "https://github.com/basnijholt/compose-farm/releases"
|
||||
|
||||
[project.scripts]
|
||||
compose-farm = "compose_farm.cli:app"
|
||||
cf = "compose_farm.cli:app"
|
||||
|
||||
[build-system]
|
||||
requires = ["hatchling", "hatch-vcs"]
|
||||
@@ -59,7 +108,7 @@ ignore = [
|
||||
]
|
||||
|
||||
[tool.ruff.lint.per-file-ignores]
|
||||
"tests/*" = ["S101", "PLR2004", "S108", "D102", "D103"] # relaxed docstrings + asserts in tests
|
||||
"tests/*" = ["S101", "PLR2004", "S108", "D102", "D103", "PLC0415", "ARG001", "ARG002", "TC003"] # relaxed for tests
|
||||
|
||||
[tool.ruff.lint.mccabe]
|
||||
max-complexity = 18
|
||||
@@ -77,6 +126,10 @@ ignore_missing_imports = true
|
||||
module = "tests.*"
|
||||
disallow_untyped_decorators = false
|
||||
|
||||
[[tool.mypy.overrides]]
|
||||
module = "compose_farm.web.*"
|
||||
disallow_untyped_decorators = false
|
||||
|
||||
[tool.pytest.ini_options]
|
||||
asyncio_mode = "auto"
|
||||
testpaths = ["tests"]
|
||||
@@ -111,4 +164,11 @@ dev = [
|
||||
"ruff>=0.14.8",
|
||||
"types-pyyaml>=6.0.12.20250915",
|
||||
"markdown-code-runner>=0.7.0",
|
||||
# Web deps for type checking (these ship with inline types)
|
||||
"fastapi>=0.109.0",
|
||||
"uvicorn[standard]>=0.27.0",
|
||||
"jinja2>=3.1.0",
|
||||
"websockets>=12.0",
|
||||
# For FastAPI TestClient
|
||||
"httpx>=0.28.0",
|
||||
]
|
||||
|
||||
@@ -1,380 +0,0 @@
|
||||
"""CLI interface using Typer."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from pathlib import Path
|
||||
from typing import TYPE_CHECKING, Annotated, TypeVar
|
||||
|
||||
import typer
|
||||
import yaml
|
||||
|
||||
from . import __version__
|
||||
from .config import Config, load_config
|
||||
from .logs import snapshot_services
|
||||
from .ssh import (
|
||||
CommandResult,
|
||||
check_service_running,
|
||||
run_compose,
|
||||
run_compose_on_host,
|
||||
run_on_services,
|
||||
run_sequential_on_services,
|
||||
)
|
||||
from .state import get_service_host, load_state, remove_service, save_state, set_service_host
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import Coroutine
|
||||
|
||||
T = TypeVar("T")
|
||||
|
||||
|
||||
def _version_callback(value: bool) -> None:
|
||||
"""Print version and exit."""
|
||||
if value:
|
||||
typer.echo(f"compose-farm {__version__}")
|
||||
raise typer.Exit
|
||||
|
||||
|
||||
app = typer.Typer(
|
||||
name="compose-farm",
|
||||
help="Compose Farm - run docker compose commands across multiple hosts",
|
||||
no_args_is_help=True,
|
||||
)
|
||||
|
||||
|
||||
@app.callback()
|
||||
def main(
|
||||
version: Annotated[
|
||||
bool,
|
||||
typer.Option(
|
||||
"--version",
|
||||
"-v",
|
||||
help="Show version and exit",
|
||||
callback=_version_callback,
|
||||
is_eager=True,
|
||||
),
|
||||
] = False,
|
||||
) -> None:
|
||||
"""Compose Farm - run docker compose commands across multiple hosts."""
|
||||
|
||||
|
||||
def _get_services(
|
||||
services: list[str],
|
||||
all_services: bool,
|
||||
config_path: Path | None,
|
||||
) -> tuple[list[str], Config]:
|
||||
"""Resolve service list and load config."""
|
||||
config = load_config(config_path)
|
||||
|
||||
if all_services:
|
||||
return list(config.services.keys()), config
|
||||
if not services:
|
||||
typer.echo("Error: Specify services or use --all", err=True)
|
||||
raise typer.Exit(1)
|
||||
return list(services), config
|
||||
|
||||
|
||||
def _run_async(coro: Coroutine[None, None, T]) -> T:
|
||||
"""Run async coroutine."""
|
||||
return asyncio.run(coro)
|
||||
|
||||
|
||||
def _report_results(results: list[CommandResult]) -> None:
|
||||
"""Report command results and exit with appropriate code."""
|
||||
failed = [r for r in results if not r.success]
|
||||
if failed:
|
||||
for r in failed:
|
||||
typer.echo(f"[{r.service}] Failed with exit code {r.exit_code}", err=True)
|
||||
raise typer.Exit(1)
|
||||
|
||||
|
||||
ServicesArg = Annotated[
|
||||
list[str] | None,
|
||||
typer.Argument(help="Services to operate on"),
|
||||
]
|
||||
AllOption = Annotated[
|
||||
bool,
|
||||
typer.Option("--all", "-a", help="Run on all services"),
|
||||
]
|
||||
ConfigOption = Annotated[
|
||||
Path | None,
|
||||
typer.Option("--config", "-c", help="Path to config file"),
|
||||
]
|
||||
LogPathOption = Annotated[
|
||||
Path | None,
|
||||
typer.Option("--log-path", "-l", help="Path to Dockerfarm TOML log"),
|
||||
]
|
||||
|
||||
|
||||
async def _up_with_migration(
|
||||
cfg: Config,
|
||||
services: list[str],
|
||||
) -> list[CommandResult]:
|
||||
"""Start services with automatic migration if host changed."""
|
||||
results: list[CommandResult] = []
|
||||
|
||||
for service in services:
|
||||
target_host = cfg.services[service]
|
||||
current_host = get_service_host(service)
|
||||
|
||||
# If service is deployed elsewhere, migrate it
|
||||
if current_host and current_host != target_host:
|
||||
if current_host in cfg.hosts:
|
||||
typer.echo(f"[{service}] Migrating from {current_host} to {target_host}...")
|
||||
down_result = await run_compose_on_host(cfg, service, current_host, "down")
|
||||
if not down_result.success:
|
||||
results.append(down_result)
|
||||
continue
|
||||
else:
|
||||
typer.echo(
|
||||
f"[{service}] Warning: was on {current_host} (not in config), skipping down",
|
||||
err=True,
|
||||
)
|
||||
|
||||
# Start on target host
|
||||
up_result = await run_compose(cfg, service, "up -d")
|
||||
results.append(up_result)
|
||||
|
||||
# Update state on success
|
||||
if up_result.success:
|
||||
set_service_host(service, target_host)
|
||||
|
||||
return results
|
||||
|
||||
|
||||
@app.command()
|
||||
def up(
|
||||
services: ServicesArg = None,
|
||||
all_services: AllOption = False,
|
||||
config: ConfigOption = None,
|
||||
) -> None:
|
||||
"""Start services (docker compose up -d). Auto-migrates if host changed."""
|
||||
svc_list, cfg = _get_services(services or [], all_services, config)
|
||||
results = _run_async(_up_with_migration(cfg, svc_list))
|
||||
_report_results(results)
|
||||
|
||||
|
||||
@app.command()
|
||||
def down(
|
||||
services: ServicesArg = None,
|
||||
all_services: AllOption = False,
|
||||
config: ConfigOption = None,
|
||||
) -> None:
|
||||
"""Stop services (docker compose down)."""
|
||||
svc_list, cfg = _get_services(services or [], all_services, config)
|
||||
results = _run_async(run_on_services(cfg, svc_list, "down"))
|
||||
|
||||
# Remove from state on success
|
||||
for result in results:
|
||||
if result.success:
|
||||
remove_service(result.service)
|
||||
|
||||
_report_results(results)
|
||||
|
||||
|
||||
@app.command()
|
||||
def pull(
|
||||
services: ServicesArg = None,
|
||||
all_services: AllOption = False,
|
||||
config: ConfigOption = None,
|
||||
) -> None:
|
||||
"""Pull latest images (docker compose pull)."""
|
||||
svc_list, cfg = _get_services(services or [], all_services, config)
|
||||
results = _run_async(run_on_services(cfg, svc_list, "pull"))
|
||||
_report_results(results)
|
||||
|
||||
|
||||
@app.command()
|
||||
def restart(
|
||||
services: ServicesArg = None,
|
||||
all_services: AllOption = False,
|
||||
config: ConfigOption = None,
|
||||
) -> None:
|
||||
"""Restart services (down + up)."""
|
||||
svc_list, cfg = _get_services(services or [], all_services, config)
|
||||
results = _run_async(run_sequential_on_services(cfg, svc_list, ["down", "up -d"]))
|
||||
_report_results(results)
|
||||
|
||||
|
||||
@app.command()
|
||||
def update(
|
||||
services: ServicesArg = None,
|
||||
all_services: AllOption = False,
|
||||
config: ConfigOption = None,
|
||||
) -> None:
|
||||
"""Update services (pull + down + up)."""
|
||||
svc_list, cfg = _get_services(services or [], all_services, config)
|
||||
results = _run_async(run_sequential_on_services(cfg, svc_list, ["pull", "down", "up -d"]))
|
||||
_report_results(results)
|
||||
|
||||
|
||||
@app.command()
|
||||
def logs(
|
||||
services: ServicesArg = None,
|
||||
all_services: AllOption = False,
|
||||
follow: Annotated[bool, typer.Option("--follow", "-f", help="Follow logs")] = False,
|
||||
tail: Annotated[int, typer.Option("--tail", "-n", help="Number of lines")] = 100,
|
||||
config: ConfigOption = None,
|
||||
) -> None:
|
||||
"""Show service logs."""
|
||||
svc_list, cfg = _get_services(services or [], all_services, config)
|
||||
cmd = f"logs --tail {tail}"
|
||||
if follow:
|
||||
cmd += " -f"
|
||||
results = _run_async(run_on_services(cfg, svc_list, cmd))
|
||||
_report_results(results)
|
||||
|
||||
|
||||
@app.command()
|
||||
def ps(
|
||||
config: ConfigOption = None,
|
||||
) -> None:
|
||||
"""Show status of all services."""
|
||||
cfg = load_config(config)
|
||||
results = _run_async(run_on_services(cfg, list(cfg.services.keys()), "ps"))
|
||||
_report_results(results)
|
||||
|
||||
|
||||
@app.command("traefik-file")
|
||||
def traefik_file(
|
||||
services: ServicesArg = None,
|
||||
all_services: AllOption = False,
|
||||
output: Annotated[
|
||||
Path | None,
|
||||
typer.Option(
|
||||
"--output",
|
||||
"-o",
|
||||
help="Write Traefik file-provider YAML to this path (stdout if omitted)",
|
||||
),
|
||||
] = None,
|
||||
config: ConfigOption = None,
|
||||
) -> None:
|
||||
"""Generate a Traefik file-provider fragment from compose Traefik labels."""
|
||||
from .traefik import generate_traefik_config
|
||||
|
||||
svc_list, cfg = _get_services(services or [], all_services, config)
|
||||
try:
|
||||
dynamic, warnings = generate_traefik_config(cfg, svc_list)
|
||||
except (FileNotFoundError, ValueError) as exc:
|
||||
typer.echo(str(exc), err=True)
|
||||
raise typer.Exit(1) from exc
|
||||
|
||||
rendered = yaml.safe_dump(dynamic, sort_keys=False)
|
||||
|
||||
if output:
|
||||
output.parent.mkdir(parents=True, exist_ok=True)
|
||||
output.write_text(rendered)
|
||||
typer.echo(f"Traefik config written to {output}")
|
||||
else:
|
||||
typer.echo(rendered)
|
||||
|
||||
for warning in warnings:
|
||||
typer.echo(warning, err=True)
|
||||
|
||||
|
||||
async def _discover_running_services(cfg: Config) -> dict[str, str]:
|
||||
"""Discover which services are running on which hosts.
|
||||
|
||||
Returns a dict mapping service names to host names for running services.
|
||||
"""
|
||||
discovered: dict[str, str] = {}
|
||||
|
||||
for service, assigned_host in cfg.services.items():
|
||||
# Check assigned host first (most common case)
|
||||
if await check_service_running(cfg, service, assigned_host):
|
||||
discovered[service] = assigned_host
|
||||
continue
|
||||
|
||||
# Check other hosts in case service was migrated but state is stale
|
||||
for host_name in cfg.hosts:
|
||||
if host_name == assigned_host:
|
||||
continue
|
||||
if await check_service_running(cfg, service, host_name):
|
||||
discovered[service] = host_name
|
||||
break
|
||||
|
||||
return discovered
|
||||
|
||||
|
||||
def _report_sync_changes(
|
||||
added: list[str],
|
||||
removed: list[str],
|
||||
changed: list[tuple[str, str, str]],
|
||||
discovered: dict[str, str],
|
||||
current_state: dict[str, str],
|
||||
) -> None:
|
||||
"""Report sync changes to the user."""
|
||||
if added:
|
||||
typer.echo(f"\nNew services found ({len(added)}):")
|
||||
for service in sorted(added):
|
||||
typer.echo(f" + {service} on {discovered[service]}")
|
||||
|
||||
if changed:
|
||||
typer.echo(f"\nServices on different hosts ({len(changed)}):")
|
||||
for service, old_host, new_host in sorted(changed):
|
||||
typer.echo(f" ~ {service}: {old_host} -> {new_host}")
|
||||
|
||||
if removed:
|
||||
typer.echo(f"\nServices no longer running ({len(removed)}):")
|
||||
for service in sorted(removed):
|
||||
typer.echo(f" - {service} (was on {current_state[service]})")
|
||||
|
||||
|
||||
@app.command()
|
||||
def sync(
|
||||
config: ConfigOption = None,
|
||||
log_path: LogPathOption = None,
|
||||
dry_run: Annotated[
|
||||
bool,
|
||||
typer.Option("--dry-run", "-n", help="Show what would be synced without writing"),
|
||||
] = False,
|
||||
) -> None:
|
||||
"""Sync local state with running services.
|
||||
|
||||
Discovers which services are running on which hosts, updates the state
|
||||
file, and captures image digests. Combines service discovery with
|
||||
image snapshot into a single command.
|
||||
"""
|
||||
cfg = load_config(config)
|
||||
current_state = load_state()
|
||||
|
||||
typer.echo("Discovering running services...")
|
||||
discovered = _run_async(_discover_running_services(cfg))
|
||||
|
||||
# Calculate changes
|
||||
added = [s for s in discovered if s not in current_state]
|
||||
removed = [s for s in current_state if s not in discovered]
|
||||
changed = [
|
||||
(s, current_state[s], discovered[s])
|
||||
for s in discovered
|
||||
if s in current_state and current_state[s] != discovered[s]
|
||||
]
|
||||
|
||||
# Report state changes
|
||||
state_changed = bool(added or removed or changed)
|
||||
if state_changed:
|
||||
_report_sync_changes(added, removed, changed, discovered, current_state)
|
||||
else:
|
||||
typer.echo("State is already in sync.")
|
||||
|
||||
if dry_run:
|
||||
typer.echo("\n(dry-run: no changes made)")
|
||||
return
|
||||
|
||||
# Update state file
|
||||
if state_changed:
|
||||
save_state(discovered)
|
||||
typer.echo(f"\nState updated: {len(discovered)} services tracked.")
|
||||
|
||||
# Capture image digests for running services
|
||||
if discovered:
|
||||
typer.echo("\nCapturing image digests...")
|
||||
try:
|
||||
path = _run_async(snapshot_services(cfg, list(discovered.keys()), log_path=log_path))
|
||||
typer.echo(f"Digests written to {path}")
|
||||
except RuntimeError as exc:
|
||||
typer.echo(f"Warning: {exc}", err=True)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
app()
|
||||
20
src/compose_farm/cli/__init__.py
Normal file
20
src/compose_farm/cli/__init__.py
Normal file
@@ -0,0 +1,20 @@
|
||||
"""CLI interface using Typer."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
# Import command modules to trigger registration via @app.command() decorators
|
||||
from compose_farm.cli import (
|
||||
config, # noqa: F401
|
||||
lifecycle, # noqa: F401
|
||||
management, # noqa: F401
|
||||
monitoring, # noqa: F401
|
||||
web, # noqa: F401
|
||||
)
|
||||
|
||||
# Import the shared app instance
|
||||
from compose_farm.cli.app import app
|
||||
|
||||
__all__ = ["app"]
|
||||
|
||||
if __name__ == "__main__":
|
||||
app()
|
||||
42
src/compose_farm/cli/app.py
Normal file
42
src/compose_farm/cli/app.py
Normal file
@@ -0,0 +1,42 @@
|
||||
"""Shared Typer app instance."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Annotated
|
||||
|
||||
import typer
|
||||
|
||||
from compose_farm import __version__
|
||||
|
||||
__all__ = ["app"]
|
||||
|
||||
|
||||
def _version_callback(value: bool) -> None:
|
||||
"""Print version and exit."""
|
||||
if value:
|
||||
typer.echo(f"compose-farm {__version__}")
|
||||
raise typer.Exit
|
||||
|
||||
|
||||
app = typer.Typer(
|
||||
name="compose-farm",
|
||||
help="Compose Farm - run docker compose commands across multiple hosts",
|
||||
no_args_is_help=True,
|
||||
context_settings={"help_option_names": ["-h", "--help"]},
|
||||
)
|
||||
|
||||
|
||||
@app.callback()
|
||||
def main(
|
||||
version: Annotated[
|
||||
bool,
|
||||
typer.Option(
|
||||
"--version",
|
||||
"-v",
|
||||
help="Show version and exit",
|
||||
callback=_version_callback,
|
||||
is_eager=True,
|
||||
),
|
||||
] = False,
|
||||
) -> None:
|
||||
"""Compose Farm - run docker compose commands across multiple hosts."""
|
||||
243
src/compose_farm/cli/common.py
Normal file
243
src/compose_farm/cli/common.py
Normal file
@@ -0,0 +1,243 @@
|
||||
"""Shared CLI helpers, options, and utilities."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import contextlib
|
||||
from pathlib import Path
|
||||
from typing import TYPE_CHECKING, Annotated, TypeVar
|
||||
|
||||
import typer
|
||||
from rich.progress import (
|
||||
BarColumn,
|
||||
MofNCompleteColumn,
|
||||
Progress,
|
||||
SpinnerColumn,
|
||||
TaskID,
|
||||
TextColumn,
|
||||
TimeElapsedColumn,
|
||||
)
|
||||
|
||||
from compose_farm.console import console, err_console
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import Callable, Coroutine, Generator
|
||||
|
||||
from compose_farm.config import Config
|
||||
from compose_farm.executor import CommandResult
|
||||
|
||||
_T = TypeVar("_T")
|
||||
|
||||
|
||||
# --- Shared CLI Options ---
|
||||
ServicesArg = Annotated[
|
||||
list[str] | None,
|
||||
typer.Argument(help="Services to operate on"),
|
||||
]
|
||||
AllOption = Annotated[
|
||||
bool,
|
||||
typer.Option("--all", "-a", help="Run on all services"),
|
||||
]
|
||||
ConfigOption = Annotated[
|
||||
Path | None,
|
||||
typer.Option("--config", "-c", help="Path to config file"),
|
||||
]
|
||||
LogPathOption = Annotated[
|
||||
Path | None,
|
||||
typer.Option("--log-path", "-l", help="Path to Dockerfarm TOML log"),
|
||||
]
|
||||
HostOption = Annotated[
|
||||
str | None,
|
||||
typer.Option("--host", "-H", help="Filter to services on this host"),
|
||||
]
|
||||
|
||||
# --- Constants (internal) ---
|
||||
_MISSING_PATH_PREVIEW_LIMIT = 2
|
||||
_STATS_PREVIEW_LIMIT = 3 # Max number of pending migrations to show by name
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def progress_bar(
|
||||
label: str, total: int, *, initial_description: str = "[dim]connecting...[/]"
|
||||
) -> Generator[tuple[Progress, TaskID], None, None]:
|
||||
"""Create a standardized progress bar with consistent styling.
|
||||
|
||||
Yields (progress, task_id). Use progress.update(task_id, advance=1, description=...)
|
||||
to advance.
|
||||
"""
|
||||
with Progress(
|
||||
SpinnerColumn(),
|
||||
TextColumn(f"[bold blue]{label}[/]"),
|
||||
BarColumn(),
|
||||
MofNCompleteColumn(),
|
||||
TextColumn("•"),
|
||||
TimeElapsedColumn(),
|
||||
TextColumn("•"),
|
||||
TextColumn("[progress.description]{task.description}"),
|
||||
console=console,
|
||||
transient=True,
|
||||
) as progress:
|
||||
task_id = progress.add_task(initial_description, total=total)
|
||||
yield progress, task_id
|
||||
|
||||
|
||||
def load_config_or_exit(config_path: Path | None) -> Config:
|
||||
"""Load config or exit with a friendly error message."""
|
||||
# Lazy import: pydantic adds ~50ms to startup, only load when actually needed
|
||||
from compose_farm.config import load_config # noqa: PLC0415
|
||||
|
||||
try:
|
||||
return load_config(config_path)
|
||||
except FileNotFoundError as e:
|
||||
err_console.print(f"[red]✗[/] {e}")
|
||||
raise typer.Exit(1) from e
|
||||
|
||||
|
||||
def get_services(
|
||||
services: list[str],
|
||||
all_services: bool,
|
||||
config_path: Path | None,
|
||||
) -> tuple[list[str], Config]:
|
||||
"""Resolve service list and load config.
|
||||
|
||||
Supports "." as shorthand for the current directory name.
|
||||
"""
|
||||
config = load_config_or_exit(config_path)
|
||||
|
||||
if all_services:
|
||||
return list(config.services.keys()), config
|
||||
if not services:
|
||||
err_console.print("[red]✗[/] Specify services or use --all")
|
||||
raise typer.Exit(1)
|
||||
|
||||
# Resolve "." to current directory name
|
||||
resolved = [Path.cwd().name if svc == "." else svc for svc in services]
|
||||
|
||||
# Validate all services exist in config
|
||||
unknown = [svc for svc in resolved if svc not in config.services]
|
||||
if unknown:
|
||||
for svc in unknown:
|
||||
err_console.print(f"[red]✗[/] Unknown service: [cyan]{svc}[/]")
|
||||
err_console.print("[dim]Hint: Add the service to compose-farm.yaml or use --all[/]")
|
||||
raise typer.Exit(1)
|
||||
|
||||
return resolved, config
|
||||
|
||||
|
||||
def run_async(coro: Coroutine[None, None, _T]) -> _T:
|
||||
"""Run async coroutine."""
|
||||
try:
|
||||
return asyncio.run(coro)
|
||||
except KeyboardInterrupt:
|
||||
console.print("\n[yellow]Interrupted[/]")
|
||||
raise typer.Exit(130) from None # Standard exit code for SIGINT
|
||||
|
||||
|
||||
def report_results(results: list[CommandResult]) -> None:
|
||||
"""Report command results and exit with appropriate code."""
|
||||
succeeded = [r for r in results if r.success]
|
||||
failed = [r for r in results if not r.success]
|
||||
|
||||
# Always print summary when there are multiple results
|
||||
if len(results) > 1:
|
||||
console.print() # Blank line before summary
|
||||
if failed:
|
||||
for r in failed:
|
||||
err_console.print(
|
||||
f"[red]✗[/] [cyan]{r.service}[/] failed with exit code {r.exit_code}"
|
||||
)
|
||||
console.print()
|
||||
console.print(
|
||||
f"[green]✓[/] {len(succeeded)}/{len(results)} services succeeded, "
|
||||
f"[red]✗[/] {len(failed)} failed"
|
||||
)
|
||||
else:
|
||||
console.print(f"[green]✓[/] All {len(results)} services succeeded")
|
||||
|
||||
elif failed:
|
||||
# Single service failed
|
||||
r = failed[0]
|
||||
err_console.print(f"[red]✗[/] [cyan]{r.service}[/] failed with exit code {r.exit_code}")
|
||||
|
||||
if failed:
|
||||
raise typer.Exit(1)
|
||||
|
||||
|
||||
def maybe_regenerate_traefik(
|
||||
cfg: Config,
|
||||
results: list[CommandResult] | None = None,
|
||||
) -> None:
|
||||
"""Regenerate traefik config if traefik_file is configured.
|
||||
|
||||
If results are provided, skips regeneration if all services failed.
|
||||
"""
|
||||
if cfg.traefik_file is None:
|
||||
return
|
||||
|
||||
# Skip if all services failed
|
||||
if results and not any(r.success for r in results):
|
||||
return
|
||||
|
||||
# Lazy import: traefik/yaml adds startup time, only load when traefik_file is configured
|
||||
from compose_farm.traefik import ( # noqa: PLC0415
|
||||
generate_traefik_config,
|
||||
render_traefik_config,
|
||||
)
|
||||
|
||||
try:
|
||||
dynamic, warnings = generate_traefik_config(cfg, list(cfg.services.keys()))
|
||||
new_content = render_traefik_config(dynamic)
|
||||
|
||||
# Check if content changed
|
||||
old_content = ""
|
||||
if cfg.traefik_file.exists():
|
||||
old_content = cfg.traefik_file.read_text()
|
||||
|
||||
if new_content != old_content:
|
||||
cfg.traefik_file.parent.mkdir(parents=True, exist_ok=True)
|
||||
cfg.traefik_file.write_text(new_content)
|
||||
console.print() # Ensure we're on a new line after streaming output
|
||||
console.print(f"[green]✓[/] Traefik config updated: {cfg.traefik_file}")
|
||||
|
||||
for warning in warnings:
|
||||
err_console.print(f"[yellow]![/] {warning}")
|
||||
except (FileNotFoundError, ValueError) as exc:
|
||||
err_console.print(f"[yellow]![/] Failed to update traefik config: {exc}")
|
||||
|
||||
|
||||
def validate_host_for_service(cfg: Config, service: str, host: str) -> None:
|
||||
"""Validate that a host is valid for a service."""
|
||||
if host not in cfg.hosts:
|
||||
err_console.print(f"[red]✗[/] Host '{host}' not found in config")
|
||||
raise typer.Exit(1)
|
||||
allowed_hosts = cfg.get_hosts(service)
|
||||
if host not in allowed_hosts:
|
||||
err_console.print(
|
||||
f"[red]✗[/] Service '{service}' is not configured for host '{host}' "
|
||||
f"(configured: {', '.join(allowed_hosts)})"
|
||||
)
|
||||
raise typer.Exit(1)
|
||||
|
||||
|
||||
def run_host_operation(
|
||||
cfg: Config,
|
||||
svc_list: list[str],
|
||||
host: str,
|
||||
command: str,
|
||||
action_verb: str,
|
||||
state_callback: Callable[[Config, str, str], None],
|
||||
) -> None:
|
||||
"""Run an operation on a specific host for multiple services."""
|
||||
from compose_farm.executor import run_compose_on_host # noqa: PLC0415
|
||||
|
||||
results: list[CommandResult] = []
|
||||
for service in svc_list:
|
||||
validate_host_for_service(cfg, service, host)
|
||||
console.print(f"[cyan]\\[{service}][/] {action_verb} on [magenta]{host}[/]...")
|
||||
result = run_async(run_compose_on_host(cfg, service, host, command, raw=True))
|
||||
print() # Newline after raw output
|
||||
results.append(result)
|
||||
if result.success:
|
||||
state_callback(cfg, service, host)
|
||||
maybe_regenerate_traefik(cfg, results)
|
||||
report_results(results)
|
||||
323
src/compose_farm/cli/config.py
Normal file
323
src/compose_farm/cli/config.py
Normal file
@@ -0,0 +1,323 @@
|
||||
"""Configuration management commands for compose-farm."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import platform
|
||||
import shlex
|
||||
import shutil
|
||||
import subprocess
|
||||
from importlib import resources
|
||||
from pathlib import Path
|
||||
from typing import Annotated
|
||||
|
||||
import typer
|
||||
|
||||
from compose_farm.cli.app import app
|
||||
from compose_farm.console import console, err_console
|
||||
from compose_farm.paths import config_search_paths, default_config_path
|
||||
|
||||
config_app = typer.Typer(
|
||||
name="config",
|
||||
help="Manage compose-farm configuration files.",
|
||||
no_args_is_help=True,
|
||||
)
|
||||
|
||||
|
||||
# --- CLI Options (same pattern as cli.py) ---
|
||||
_PathOption = Annotated[
|
||||
Path | None,
|
||||
typer.Option("--path", "-p", help="Path to config file. Uses auto-detection if not specified."),
|
||||
]
|
||||
_ForceOption = Annotated[
|
||||
bool,
|
||||
typer.Option("--force", "-f", help="Overwrite existing config without confirmation."),
|
||||
]
|
||||
_RawOption = Annotated[
|
||||
bool,
|
||||
typer.Option("--raw", "-r", help="Output raw file contents (for copy-paste)."),
|
||||
]
|
||||
|
||||
|
||||
def _get_editor() -> str:
|
||||
"""Get the user's preferred editor.
|
||||
|
||||
Checks $EDITOR, then $VISUAL, then falls back to platform defaults.
|
||||
"""
|
||||
for env_var in ("EDITOR", "VISUAL"):
|
||||
editor = os.environ.get(env_var)
|
||||
if editor:
|
||||
return editor
|
||||
|
||||
if platform.system() == "Windows":
|
||||
return "notepad"
|
||||
|
||||
# Try common editors on Unix-like systems
|
||||
for editor in ("nano", "vim", "vi"):
|
||||
if shutil.which(editor):
|
||||
return editor
|
||||
|
||||
return "vi"
|
||||
|
||||
|
||||
def _generate_template() -> str:
|
||||
"""Generate a config template with documented schema."""
|
||||
try:
|
||||
template_file = resources.files("compose_farm") / "example-config.yaml"
|
||||
return template_file.read_text(encoding="utf-8")
|
||||
except FileNotFoundError as e:
|
||||
err_console.print("[red]Example config template is missing from the package.[/red]")
|
||||
err_console.print("Reinstall compose-farm or report this issue.")
|
||||
raise typer.Exit(1) from e
|
||||
|
||||
|
||||
def _get_config_file(path: Path | None) -> Path | None:
|
||||
"""Resolve config path, or auto-detect from standard locations."""
|
||||
if path:
|
||||
return path.expanduser().resolve()
|
||||
|
||||
# Check environment variable
|
||||
if env_path := os.environ.get("CF_CONFIG"):
|
||||
p = Path(env_path)
|
||||
if p.exists():
|
||||
return p.resolve()
|
||||
|
||||
# Check standard locations
|
||||
for p in config_search_paths():
|
||||
if p.exists():
|
||||
return p.resolve()
|
||||
|
||||
return None
|
||||
|
||||
|
||||
@config_app.command("init")
|
||||
def config_init(
|
||||
path: _PathOption = None,
|
||||
force: _ForceOption = False,
|
||||
) -> None:
|
||||
"""Create a new config file with documented example.
|
||||
|
||||
The generated config file serves as a template showing all available
|
||||
options with explanatory comments.
|
||||
"""
|
||||
target_path = (path.expanduser().resolve() if path else None) or default_config_path()
|
||||
|
||||
if target_path.exists() and not force:
|
||||
console.print(
|
||||
f"[bold yellow]Config file already exists at:[/bold yellow] [cyan]{target_path}[/cyan]",
|
||||
)
|
||||
if not typer.confirm("Overwrite existing config file?"):
|
||||
console.print("[dim]Aborted.[/dim]")
|
||||
raise typer.Exit(0)
|
||||
|
||||
# Create parent directories
|
||||
target_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# Generate and write template
|
||||
template_content = _generate_template()
|
||||
target_path.write_text(template_content, encoding="utf-8")
|
||||
|
||||
console.print(f"[green]✓[/] Config file created at: {target_path}")
|
||||
console.print("\n[dim]Edit the file to customize your settings:[/dim]")
|
||||
console.print(" [cyan]cf config edit[/cyan]")
|
||||
|
||||
|
||||
@config_app.command("edit")
|
||||
def config_edit(
|
||||
path: _PathOption = None,
|
||||
) -> None:
|
||||
"""Open the config file in your default editor.
|
||||
|
||||
The editor is determined by: $EDITOR > $VISUAL > platform default.
|
||||
"""
|
||||
config_file = _get_config_file(path)
|
||||
|
||||
if config_file is None:
|
||||
console.print("[yellow]No config file found.[/yellow]")
|
||||
console.print("\nRun [bold cyan]cf config init[/bold cyan] to create one.")
|
||||
console.print("\nSearched locations:")
|
||||
for p in config_search_paths():
|
||||
console.print(f" - {p}")
|
||||
raise typer.Exit(1)
|
||||
|
||||
if not config_file.exists():
|
||||
console.print("[yellow]Config file not found.[/yellow]")
|
||||
console.print(f"\nProvided path does not exist: [cyan]{config_file}[/cyan]")
|
||||
console.print("\nRun [bold cyan]cf config init[/bold cyan] to create one.")
|
||||
raise typer.Exit(1)
|
||||
|
||||
editor = _get_editor()
|
||||
console.print(f"[dim]Opening {config_file} with {editor}...[/dim]")
|
||||
|
||||
try:
|
||||
editor_cmd = shlex.split(editor, posix=os.name != "nt")
|
||||
except ValueError as e:
|
||||
err_console.print("[red]Invalid editor command. Check $EDITOR/$VISUAL.[/red]")
|
||||
raise typer.Exit(1) from e
|
||||
|
||||
if not editor_cmd:
|
||||
err_console.print("[red]Editor command is empty.[/red]")
|
||||
raise typer.Exit(1)
|
||||
|
||||
try:
|
||||
subprocess.run([*editor_cmd, str(config_file)], check=True)
|
||||
except FileNotFoundError:
|
||||
err_console.print(f"[red]Editor '{editor_cmd[0]}' not found.[/red]")
|
||||
err_console.print("Set $EDITOR environment variable to your preferred editor.")
|
||||
raise typer.Exit(1) from None
|
||||
except subprocess.CalledProcessError as e:
|
||||
err_console.print(f"[red]Editor exited with error code {e.returncode}[/red]")
|
||||
raise typer.Exit(e.returncode) from None
|
||||
|
||||
|
||||
@config_app.command("show")
|
||||
def config_show(
|
||||
path: _PathOption = None,
|
||||
raw: _RawOption = False,
|
||||
) -> None:
|
||||
"""Display the config file location and contents."""
|
||||
config_file = _get_config_file(path)
|
||||
|
||||
if config_file is None:
|
||||
console.print("[yellow]No config file found.[/yellow]")
|
||||
console.print("\nSearched locations:")
|
||||
for p in config_search_paths():
|
||||
status = "[green]exists[/green]" if p.exists() else "[dim]not found[/dim]"
|
||||
console.print(f" - {p} ({status})")
|
||||
console.print("\nRun [bold cyan]cf config init[/bold cyan] to create one.")
|
||||
raise typer.Exit(0)
|
||||
|
||||
if not config_file.exists():
|
||||
console.print("[yellow]Config file not found.[/yellow]")
|
||||
console.print(f"\nProvided path does not exist: [cyan]{config_file}[/cyan]")
|
||||
console.print("\nRun [bold cyan]cf config init[/bold cyan] to create one.")
|
||||
raise typer.Exit(1)
|
||||
|
||||
content = config_file.read_text(encoding="utf-8")
|
||||
|
||||
if raw:
|
||||
print(content, end="")
|
||||
return
|
||||
|
||||
from rich.syntax import Syntax # noqa: PLC0415
|
||||
|
||||
console.print(f"[bold green]Config file:[/bold green] [cyan]{config_file}[/cyan]")
|
||||
console.print()
|
||||
syntax = Syntax(content, "yaml", theme="monokai", line_numbers=True, word_wrap=True)
|
||||
console.print(syntax)
|
||||
console.print()
|
||||
console.print("[dim]Tip: Use -r for copy-paste friendly output[/dim]")
|
||||
|
||||
|
||||
@config_app.command("path")
|
||||
def config_path(
|
||||
path: _PathOption = None,
|
||||
) -> None:
|
||||
"""Print the config file path (useful for scripting)."""
|
||||
config_file = _get_config_file(path)
|
||||
|
||||
if config_file is None:
|
||||
console.print("[yellow]No config file found.[/yellow]")
|
||||
console.print("\nSearched locations:")
|
||||
for p in config_search_paths():
|
||||
status = "[green]exists[/green]" if p.exists() else "[dim]not found[/dim]"
|
||||
console.print(f" - {p} ({status})")
|
||||
raise typer.Exit(1)
|
||||
|
||||
# Just print the path for easy piping
|
||||
print(config_file)
|
||||
|
||||
|
||||
@config_app.command("validate")
|
||||
def config_validate(
|
||||
path: _PathOption = None,
|
||||
) -> None:
|
||||
"""Validate the config file syntax and schema."""
|
||||
config_file = _get_config_file(path)
|
||||
|
||||
if config_file is None:
|
||||
err_console.print("[red]✗[/] No config file found")
|
||||
raise typer.Exit(1)
|
||||
|
||||
# Lazy import: pydantic adds ~50ms to startup, only load when actually needed
|
||||
from compose_farm.config import load_config # noqa: PLC0415
|
||||
|
||||
try:
|
||||
cfg = load_config(config_file)
|
||||
except FileNotFoundError as e:
|
||||
err_console.print(f"[red]✗[/] {e}")
|
||||
raise typer.Exit(1) from e
|
||||
except Exception as e:
|
||||
err_console.print(f"[red]✗[/] Invalid config: {e}")
|
||||
raise typer.Exit(1) from e
|
||||
|
||||
console.print(f"[green]✓[/] Valid config: {config_file}")
|
||||
console.print(f" Hosts: {len(cfg.hosts)}")
|
||||
console.print(f" Services: {len(cfg.services)}")
|
||||
|
||||
|
||||
@config_app.command("symlink")
|
||||
def config_symlink(
|
||||
target: Annotated[
|
||||
Path | None,
|
||||
typer.Argument(help="Config file to link to. Defaults to ./compose-farm.yaml"),
|
||||
] = None,
|
||||
force: _ForceOption = False,
|
||||
) -> None:
|
||||
"""Create a symlink from the default config location to a config file.
|
||||
|
||||
This makes a local config file discoverable globally without copying.
|
||||
Always uses absolute paths to avoid broken symlinks.
|
||||
|
||||
Examples:
|
||||
cf config symlink # Link to ./compose-farm.yaml
|
||||
cf config symlink /opt/compose/config.yaml # Link to specific file
|
||||
|
||||
"""
|
||||
# Default to compose-farm.yaml in current directory
|
||||
target_path = (target or Path("compose-farm.yaml")).expanduser().resolve()
|
||||
|
||||
if not target_path.exists():
|
||||
err_console.print(f"[red]✗[/] Target config file not found: {target_path}")
|
||||
raise typer.Exit(1)
|
||||
|
||||
if not target_path.is_file():
|
||||
err_console.print(f"[red]✗[/] Target is not a file: {target_path}")
|
||||
raise typer.Exit(1)
|
||||
|
||||
symlink_path = default_config_path()
|
||||
|
||||
# Check if symlink location already exists
|
||||
if symlink_path.exists() or symlink_path.is_symlink():
|
||||
if symlink_path.is_symlink():
|
||||
current_target = symlink_path.resolve() if symlink_path.exists() else None
|
||||
if current_target == target_path:
|
||||
console.print(f"[green]✓[/] Symlink already points to: {target_path}")
|
||||
return
|
||||
# Update existing symlink
|
||||
if not force:
|
||||
existing = symlink_path.readlink()
|
||||
console.print(f"[yellow]Symlink exists:[/] {symlink_path} -> {existing}")
|
||||
if not typer.confirm(f"Update to point to {target_path}?"):
|
||||
console.print("[dim]Aborted.[/dim]")
|
||||
raise typer.Exit(0)
|
||||
symlink_path.unlink()
|
||||
else:
|
||||
# Regular file exists
|
||||
err_console.print(f"[red]✗[/] A regular file exists at: {symlink_path}")
|
||||
err_console.print(" Back it up or remove it first, then retry.")
|
||||
raise typer.Exit(1)
|
||||
|
||||
# Create parent directories
|
||||
symlink_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# Create symlink with absolute path
|
||||
symlink_path.symlink_to(target_path)
|
||||
|
||||
console.print("[green]✓[/] Created symlink:")
|
||||
console.print(f" {symlink_path}")
|
||||
console.print(f" -> {target_path}")
|
||||
|
||||
|
||||
# Register config subcommand on the shared app
|
||||
app.add_typer(config_app, name="config", rich_help_panel="Configuration")
|
||||
299
src/compose_farm/cli/lifecycle.py
Normal file
299
src/compose_farm/cli/lifecycle.py
Normal file
@@ -0,0 +1,299 @@
|
||||
"""Lifecycle commands: up, down, pull, restart, update, apply."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING, Annotated
|
||||
|
||||
import typer
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from compose_farm.config import Config
|
||||
|
||||
from compose_farm.cli.app import app
|
||||
from compose_farm.cli.common import (
|
||||
AllOption,
|
||||
ConfigOption,
|
||||
HostOption,
|
||||
ServicesArg,
|
||||
get_services,
|
||||
load_config_or_exit,
|
||||
maybe_regenerate_traefik,
|
||||
report_results,
|
||||
run_async,
|
||||
run_host_operation,
|
||||
)
|
||||
from compose_farm.console import console, err_console
|
||||
from compose_farm.executor import run_on_services, run_sequential_on_services
|
||||
from compose_farm.operations import stop_orphaned_services, up_services
|
||||
from compose_farm.state import (
|
||||
add_service_to_host,
|
||||
get_orphaned_services,
|
||||
get_service_host,
|
||||
get_services_needing_migration,
|
||||
get_services_not_in_state,
|
||||
remove_service,
|
||||
remove_service_from_host,
|
||||
)
|
||||
|
||||
|
||||
@app.command(rich_help_panel="Lifecycle")
|
||||
def up(
|
||||
services: ServicesArg = None,
|
||||
all_services: AllOption = False,
|
||||
host: HostOption = None,
|
||||
config: ConfigOption = None,
|
||||
) -> None:
|
||||
"""Start services (docker compose up -d). Auto-migrates if host changed."""
|
||||
svc_list, cfg = get_services(services or [], all_services, config)
|
||||
|
||||
# Per-host operation: run on specific host only
|
||||
if host:
|
||||
run_host_operation(cfg, svc_list, host, "up -d", "Starting", add_service_to_host)
|
||||
return
|
||||
|
||||
# Normal operation: use up_services with migration logic
|
||||
results = run_async(up_services(cfg, svc_list, raw=True))
|
||||
maybe_regenerate_traefik(cfg, results)
|
||||
report_results(results)
|
||||
|
||||
|
||||
@app.command(rich_help_panel="Lifecycle")
|
||||
def down(
|
||||
services: ServicesArg = None,
|
||||
all_services: AllOption = False,
|
||||
orphaned: Annotated[
|
||||
bool,
|
||||
typer.Option(
|
||||
"--orphaned", help="Stop orphaned services (in state but removed from config)"
|
||||
),
|
||||
] = False,
|
||||
host: HostOption = None,
|
||||
config: ConfigOption = None,
|
||||
) -> None:
|
||||
"""Stop services (docker compose down)."""
|
||||
# Handle --orphaned flag
|
||||
if orphaned:
|
||||
if services or all_services or host:
|
||||
err_console.print("[red]✗[/] Cannot use --orphaned with services, --all, or --host")
|
||||
raise typer.Exit(1)
|
||||
|
||||
cfg = load_config_or_exit(config)
|
||||
orphaned_services = get_orphaned_services(cfg)
|
||||
|
||||
if not orphaned_services:
|
||||
console.print("[green]✓[/] No orphaned services to stop")
|
||||
return
|
||||
|
||||
console.print(
|
||||
f"[yellow]Stopping {len(orphaned_services)} orphaned service(s):[/] "
|
||||
f"{', '.join(orphaned_services.keys())}"
|
||||
)
|
||||
results = run_async(stop_orphaned_services(cfg))
|
||||
report_results(results)
|
||||
return
|
||||
|
||||
svc_list, cfg = get_services(services or [], all_services, config)
|
||||
|
||||
# Per-host operation: run on specific host only
|
||||
if host:
|
||||
run_host_operation(cfg, svc_list, host, "down", "Stopping", remove_service_from_host)
|
||||
return
|
||||
|
||||
# Normal operation
|
||||
raw = len(svc_list) == 1
|
||||
results = run_async(run_on_services(cfg, svc_list, "down", raw=raw))
|
||||
|
||||
# Remove from state on success
|
||||
# For multi-host services, result.service is "svc@host", extract base name
|
||||
removed_services: set[str] = set()
|
||||
for result in results:
|
||||
if result.success:
|
||||
base_service = result.service.split("@")[0]
|
||||
if base_service not in removed_services:
|
||||
remove_service(cfg, base_service)
|
||||
removed_services.add(base_service)
|
||||
|
||||
maybe_regenerate_traefik(cfg, results)
|
||||
report_results(results)
|
||||
|
||||
|
||||
@app.command(rich_help_panel="Lifecycle")
|
||||
def pull(
|
||||
services: ServicesArg = None,
|
||||
all_services: AllOption = False,
|
||||
config: ConfigOption = None,
|
||||
) -> None:
|
||||
"""Pull latest images (docker compose pull)."""
|
||||
svc_list, cfg = get_services(services or [], all_services, config)
|
||||
raw = len(svc_list) == 1
|
||||
results = run_async(run_on_services(cfg, svc_list, "pull", raw=raw))
|
||||
report_results(results)
|
||||
|
||||
|
||||
@app.command(rich_help_panel="Lifecycle")
|
||||
def restart(
|
||||
services: ServicesArg = None,
|
||||
all_services: AllOption = False,
|
||||
config: ConfigOption = None,
|
||||
) -> None:
|
||||
"""Restart services (down + up)."""
|
||||
svc_list, cfg = get_services(services or [], all_services, config)
|
||||
raw = len(svc_list) == 1
|
||||
results = run_async(run_sequential_on_services(cfg, svc_list, ["down", "up -d"], raw=raw))
|
||||
maybe_regenerate_traefik(cfg, results)
|
||||
report_results(results)
|
||||
|
||||
|
||||
@app.command(rich_help_panel="Lifecycle")
|
||||
def update(
|
||||
services: ServicesArg = None,
|
||||
all_services: AllOption = False,
|
||||
config: ConfigOption = None,
|
||||
) -> None:
|
||||
"""Update services (pull + build + down + up)."""
|
||||
svc_list, cfg = get_services(services or [], all_services, config)
|
||||
raw = len(svc_list) == 1
|
||||
results = run_async(
|
||||
run_sequential_on_services(
|
||||
cfg, svc_list, ["pull --ignore-buildable", "build", "down", "up -d"], raw=raw
|
||||
)
|
||||
)
|
||||
maybe_regenerate_traefik(cfg, results)
|
||||
report_results(results)
|
||||
|
||||
|
||||
def _format_host(host: str | list[str]) -> str:
|
||||
"""Format a host value for display."""
|
||||
if isinstance(host, list):
|
||||
return ", ".join(host)
|
||||
return host
|
||||
|
||||
|
||||
def _report_pending_migrations(cfg: Config, migrations: list[str]) -> None:
|
||||
"""Report services that need migration."""
|
||||
console.print(f"[cyan]Services to migrate ({len(migrations)}):[/]")
|
||||
for svc in migrations:
|
||||
current = get_service_host(cfg, svc)
|
||||
target = cfg.get_hosts(svc)[0]
|
||||
console.print(f" [cyan]{svc}[/]: [magenta]{current}[/] → [magenta]{target}[/]")
|
||||
|
||||
|
||||
def _report_pending_orphans(orphaned: dict[str, str | list[str]]) -> None:
|
||||
"""Report orphaned services that will be stopped."""
|
||||
console.print(f"[yellow]Orphaned services to stop ({len(orphaned)}):[/]")
|
||||
for svc, hosts in orphaned.items():
|
||||
console.print(f" [cyan]{svc}[/] on [magenta]{_format_host(hosts)}[/]")
|
||||
|
||||
|
||||
def _report_pending_starts(cfg: Config, missing: list[str]) -> None:
|
||||
"""Report services that will be started."""
|
||||
console.print(f"[green]Services to start ({len(missing)}):[/]")
|
||||
for svc in missing:
|
||||
target = _format_host(cfg.get_hosts(svc))
|
||||
console.print(f" [cyan]{svc}[/] on [magenta]{target}[/]")
|
||||
|
||||
|
||||
def _report_pending_refresh(cfg: Config, to_refresh: list[str]) -> None:
|
||||
"""Report services that will be refreshed."""
|
||||
console.print(f"[blue]Services to refresh ({len(to_refresh)}):[/]")
|
||||
for svc in to_refresh:
|
||||
target = _format_host(cfg.get_hosts(svc))
|
||||
console.print(f" [cyan]{svc}[/] on [magenta]{target}[/]")
|
||||
|
||||
|
||||
@app.command(rich_help_panel="Lifecycle")
|
||||
def apply(
|
||||
dry_run: Annotated[
|
||||
bool,
|
||||
typer.Option("--dry-run", "-n", help="Show what would change without executing"),
|
||||
] = False,
|
||||
no_orphans: Annotated[
|
||||
bool,
|
||||
typer.Option("--no-orphans", help="Only migrate, don't stop orphaned services"),
|
||||
] = False,
|
||||
full: Annotated[
|
||||
bool,
|
||||
typer.Option("--full", "-f", help="Also run up on all services to apply config changes"),
|
||||
] = False,
|
||||
config: ConfigOption = None,
|
||||
) -> None:
|
||||
"""Make reality match config (start, migrate, stop as needed).
|
||||
|
||||
This is the "reconcile" command that ensures running services match your
|
||||
config file. It will:
|
||||
|
||||
1. Stop orphaned services (in state but removed from config)
|
||||
2. Migrate services on wrong host (host in state ≠ host in config)
|
||||
3. Start missing services (in config but not in state)
|
||||
|
||||
Use --dry-run to preview changes before applying.
|
||||
Use --no-orphans to only migrate/start without stopping orphaned services.
|
||||
Use --full to also run 'up' on all services (picks up compose/env changes).
|
||||
"""
|
||||
cfg = load_config_or_exit(config)
|
||||
orphaned = get_orphaned_services(cfg)
|
||||
migrations = get_services_needing_migration(cfg)
|
||||
missing = get_services_not_in_state(cfg)
|
||||
|
||||
# For --full: refresh all services not already being started/migrated
|
||||
handled = set(migrations) | set(missing)
|
||||
to_refresh = [svc for svc in cfg.services if svc not in handled] if full else []
|
||||
|
||||
has_orphans = bool(orphaned) and not no_orphans
|
||||
has_migrations = bool(migrations)
|
||||
has_missing = bool(missing)
|
||||
has_refresh = bool(to_refresh)
|
||||
|
||||
if not has_orphans and not has_migrations and not has_missing and not has_refresh:
|
||||
console.print("[green]✓[/] Nothing to apply - reality matches config")
|
||||
return
|
||||
|
||||
# Report what will be done
|
||||
if has_orphans:
|
||||
_report_pending_orphans(orphaned)
|
||||
if has_migrations:
|
||||
_report_pending_migrations(cfg, migrations)
|
||||
if has_missing:
|
||||
_report_pending_starts(cfg, missing)
|
||||
if has_refresh:
|
||||
_report_pending_refresh(cfg, to_refresh)
|
||||
|
||||
if dry_run:
|
||||
console.print("\n[dim](dry-run: no changes made)[/]")
|
||||
return
|
||||
|
||||
# Execute changes
|
||||
console.print()
|
||||
all_results = []
|
||||
|
||||
# 1. Stop orphaned services first
|
||||
if has_orphans:
|
||||
console.print("[yellow]Stopping orphaned services...[/]")
|
||||
all_results.extend(run_async(stop_orphaned_services(cfg)))
|
||||
|
||||
# 2. Migrate services on wrong host
|
||||
if has_migrations:
|
||||
console.print("[cyan]Migrating services...[/]")
|
||||
migrate_results = run_async(up_services(cfg, migrations, raw=True))
|
||||
all_results.extend(migrate_results)
|
||||
maybe_regenerate_traefik(cfg, migrate_results)
|
||||
|
||||
# 3. Start missing services (reuse up_services which handles state updates)
|
||||
if has_missing:
|
||||
console.print("[green]Starting missing services...[/]")
|
||||
start_results = run_async(up_services(cfg, missing, raw=True))
|
||||
all_results.extend(start_results)
|
||||
maybe_regenerate_traefik(cfg, start_results)
|
||||
|
||||
# 4. Refresh remaining services (--full: run up to apply config changes)
|
||||
if has_refresh:
|
||||
console.print("[blue]Refreshing services...[/]")
|
||||
refresh_results = run_async(up_services(cfg, to_refresh, raw=True))
|
||||
all_results.extend(refresh_results)
|
||||
maybe_regenerate_traefik(cfg, refresh_results)
|
||||
|
||||
report_results(all_results)
|
||||
|
||||
|
||||
# Alias: cf a = cf apply
|
||||
app.command("a", hidden=True)(apply)
|
||||
632
src/compose_farm/cli/management.py
Normal file
632
src/compose_farm/cli/management.py
Normal file
@@ -0,0 +1,632 @@
|
||||
"""Management commands: sync, check, init-network, traefik-file."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from datetime import UTC, datetime
|
||||
from pathlib import Path # noqa: TC003
|
||||
from typing import TYPE_CHECKING, Annotated
|
||||
|
||||
import typer
|
||||
from rich.progress import Progress, TaskID # noqa: TC002
|
||||
|
||||
from compose_farm.cli.app import app
|
||||
from compose_farm.cli.common import (
|
||||
_MISSING_PATH_PREVIEW_LIMIT,
|
||||
AllOption,
|
||||
ConfigOption,
|
||||
LogPathOption,
|
||||
ServicesArg,
|
||||
get_services,
|
||||
load_config_or_exit,
|
||||
progress_bar,
|
||||
run_async,
|
||||
)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from compose_farm.config import Config
|
||||
|
||||
from compose_farm.console import console, err_console
|
||||
from compose_farm.executor import (
|
||||
CommandResult,
|
||||
is_local,
|
||||
run_command,
|
||||
)
|
||||
from compose_farm.logs import (
|
||||
DEFAULT_LOG_PATH,
|
||||
SnapshotEntry,
|
||||
collect_service_entries,
|
||||
isoformat,
|
||||
load_existing_entries,
|
||||
merge_entries,
|
||||
write_toml,
|
||||
)
|
||||
from compose_farm.operations import (
|
||||
check_host_compatibility,
|
||||
check_service_requirements,
|
||||
discover_service_host,
|
||||
)
|
||||
from compose_farm.state import get_orphaned_services, load_state, save_state
|
||||
from compose_farm.traefik import generate_traefik_config, render_traefik_config
|
||||
|
||||
# --- Sync helpers ---
|
||||
|
||||
|
||||
def _discover_services(cfg: Config) -> dict[str, str | list[str]]:
|
||||
"""Discover running services with a progress bar."""
|
||||
|
||||
async def gather_with_progress(
|
||||
progress: Progress, task_id: TaskID
|
||||
) -> dict[str, str | list[str]]:
|
||||
tasks = [asyncio.create_task(discover_service_host(cfg, s)) for s in cfg.services]
|
||||
discovered: dict[str, str | list[str]] = {}
|
||||
for coro in asyncio.as_completed(tasks):
|
||||
service, host = await coro
|
||||
if host is not None:
|
||||
discovered[service] = host
|
||||
progress.update(task_id, advance=1, description=f"[cyan]{service}[/]")
|
||||
return discovered
|
||||
|
||||
with progress_bar("Discovering", len(cfg.services)) as (progress, task_id):
|
||||
return asyncio.run(gather_with_progress(progress, task_id))
|
||||
|
||||
|
||||
def _snapshot_services(
|
||||
cfg: Config,
|
||||
services: list[str],
|
||||
log_path: Path | None,
|
||||
) -> Path:
|
||||
"""Capture image digests with a progress bar."""
|
||||
|
||||
async def collect_service(service: str, now: datetime) -> list[SnapshotEntry]:
|
||||
try:
|
||||
return await collect_service_entries(cfg, service, now=now)
|
||||
except RuntimeError:
|
||||
return []
|
||||
|
||||
async def gather_with_progress(
|
||||
progress: Progress, task_id: TaskID, now: datetime, svc_list: list[str]
|
||||
) -> list[SnapshotEntry]:
|
||||
# Map tasks to service names so we can update description
|
||||
task_to_service = {asyncio.create_task(collect_service(s, now)): s for s in svc_list}
|
||||
all_entries: list[SnapshotEntry] = []
|
||||
for coro in asyncio.as_completed(list(task_to_service.keys())):
|
||||
entries = await coro
|
||||
all_entries.extend(entries)
|
||||
# Find which service just completed (by checking done tasks)
|
||||
for t, svc in task_to_service.items():
|
||||
if t.done() and not hasattr(t, "_reported"):
|
||||
t._reported = True # type: ignore[attr-defined]
|
||||
progress.update(task_id, advance=1, description=f"[cyan]{svc}[/]")
|
||||
break
|
||||
return all_entries
|
||||
|
||||
effective_log_path = log_path or DEFAULT_LOG_PATH
|
||||
now_dt = datetime.now(UTC)
|
||||
now_iso = isoformat(now_dt)
|
||||
|
||||
with progress_bar("Capturing", len(services)) as (progress, task_id):
|
||||
snapshot_entries = asyncio.run(gather_with_progress(progress, task_id, now_dt, services))
|
||||
|
||||
if not snapshot_entries:
|
||||
msg = "No image digests were captured"
|
||||
raise RuntimeError(msg)
|
||||
|
||||
existing_entries = load_existing_entries(effective_log_path)
|
||||
merged_entries = merge_entries(existing_entries, snapshot_entries, now_iso=now_iso)
|
||||
meta = {"generated_at": now_iso, "compose_dir": str(cfg.compose_dir)}
|
||||
write_toml(effective_log_path, meta=meta, entries=merged_entries)
|
||||
return effective_log_path
|
||||
|
||||
|
||||
def _format_host(host: str | list[str]) -> str:
|
||||
"""Format a host value for display."""
|
||||
if isinstance(host, list):
|
||||
return ", ".join(host)
|
||||
return host
|
||||
|
||||
|
||||
def _report_sync_changes(
|
||||
added: list[str],
|
||||
removed: list[str],
|
||||
changed: list[tuple[str, str | list[str], str | list[str]]],
|
||||
discovered: dict[str, str | list[str]],
|
||||
current_state: dict[str, str | list[str]],
|
||||
) -> None:
|
||||
"""Report sync changes to the user."""
|
||||
if added:
|
||||
console.print(f"\nNew services found ({len(added)}):")
|
||||
for service in sorted(added):
|
||||
host_str = _format_host(discovered[service])
|
||||
console.print(f" [green]+[/] [cyan]{service}[/] on [magenta]{host_str}[/]")
|
||||
|
||||
if changed:
|
||||
console.print(f"\nServices on different hosts ({len(changed)}):")
|
||||
for service, old_host, new_host in sorted(changed):
|
||||
old_str = _format_host(old_host)
|
||||
new_str = _format_host(new_host)
|
||||
console.print(
|
||||
f" [yellow]~[/] [cyan]{service}[/]: [magenta]{old_str}[/] → [magenta]{new_str}[/]"
|
||||
)
|
||||
|
||||
if removed:
|
||||
console.print(f"\nServices no longer running ({len(removed)}):")
|
||||
for service in sorted(removed):
|
||||
host_str = _format_host(current_state[service])
|
||||
console.print(f" [red]-[/] [cyan]{service}[/] (was on [magenta]{host_str}[/])")
|
||||
|
||||
|
||||
# --- Check helpers ---
|
||||
|
||||
|
||||
def _check_ssh_connectivity(cfg: Config) -> list[str]:
|
||||
"""Check SSH connectivity to all hosts. Returns list of unreachable hosts."""
|
||||
# Filter out local hosts - no SSH needed
|
||||
remote_hosts = [h for h in cfg.hosts if not is_local(cfg.hosts[h])]
|
||||
|
||||
if not remote_hosts:
|
||||
return []
|
||||
|
||||
console.print() # Spacing before progress bar
|
||||
|
||||
async def check_host(host_name: str) -> tuple[str, bool]:
|
||||
host = cfg.hosts[host_name]
|
||||
result = await run_command(host, "echo ok", host_name, stream=False)
|
||||
return host_name, result.success
|
||||
|
||||
async def gather_with_progress(progress: Progress, task_id: TaskID) -> list[str]:
|
||||
tasks = [asyncio.create_task(check_host(h)) for h in remote_hosts]
|
||||
unreachable: list[str] = []
|
||||
for coro in asyncio.as_completed(tasks):
|
||||
host_name, success = await coro
|
||||
if not success:
|
||||
unreachable.append(host_name)
|
||||
progress.update(task_id, advance=1, description=f"[cyan]{host_name}[/]")
|
||||
return unreachable
|
||||
|
||||
with progress_bar("Checking SSH connectivity", len(remote_hosts)) as (progress, task_id):
|
||||
return asyncio.run(gather_with_progress(progress, task_id))
|
||||
|
||||
|
||||
def _check_service_requirements(
|
||||
cfg: Config,
|
||||
services: list[str],
|
||||
) -> tuple[list[tuple[str, str, str]], list[tuple[str, str, str]], list[tuple[str, str, str]]]:
|
||||
"""Check mounts, networks, and devices for all services with a progress bar.
|
||||
|
||||
Returns (mount_errors, network_errors, device_errors) where each is a list of
|
||||
(service, host, missing_item) tuples.
|
||||
"""
|
||||
|
||||
async def check_service(
|
||||
service: str,
|
||||
) -> tuple[
|
||||
str,
|
||||
list[tuple[str, str, str]],
|
||||
list[tuple[str, str, str]],
|
||||
list[tuple[str, str, str]],
|
||||
]:
|
||||
"""Check requirements for a single service on all its hosts."""
|
||||
host_names = cfg.get_hosts(service)
|
||||
mount_errors: list[tuple[str, str, str]] = []
|
||||
network_errors: list[tuple[str, str, str]] = []
|
||||
device_errors: list[tuple[str, str, str]] = []
|
||||
|
||||
for host_name in host_names:
|
||||
missing_paths, missing_nets, missing_devs = await check_service_requirements(
|
||||
cfg, service, host_name
|
||||
)
|
||||
mount_errors.extend((service, host_name, p) for p in missing_paths)
|
||||
network_errors.extend((service, host_name, n) for n in missing_nets)
|
||||
device_errors.extend((service, host_name, d) for d in missing_devs)
|
||||
|
||||
return service, mount_errors, network_errors, device_errors
|
||||
|
||||
async def gather_with_progress(
|
||||
progress: Progress, task_id: TaskID
|
||||
) -> tuple[list[tuple[str, str, str]], list[tuple[str, str, str]], list[tuple[str, str, str]]]:
|
||||
tasks = [asyncio.create_task(check_service(s)) for s in services]
|
||||
all_mount_errors: list[tuple[str, str, str]] = []
|
||||
all_network_errors: list[tuple[str, str, str]] = []
|
||||
all_device_errors: list[tuple[str, str, str]] = []
|
||||
|
||||
for coro in asyncio.as_completed(tasks):
|
||||
service, mount_errs, net_errs, dev_errs = await coro
|
||||
all_mount_errors.extend(mount_errs)
|
||||
all_network_errors.extend(net_errs)
|
||||
all_device_errors.extend(dev_errs)
|
||||
progress.update(task_id, advance=1, description=f"[cyan]{service}[/]")
|
||||
|
||||
return all_mount_errors, all_network_errors, all_device_errors
|
||||
|
||||
with progress_bar(
|
||||
"Checking requirements", len(services), initial_description="[dim]checking...[/]"
|
||||
) as (progress, task_id):
|
||||
return asyncio.run(gather_with_progress(progress, task_id))
|
||||
|
||||
|
||||
def _report_config_status(cfg: Config) -> bool:
|
||||
"""Check and report config vs disk status. Returns True if errors found."""
|
||||
configured = set(cfg.services.keys())
|
||||
on_disk = cfg.discover_compose_dirs()
|
||||
unmanaged = sorted(on_disk - configured)
|
||||
missing_from_disk = sorted(configured - on_disk)
|
||||
|
||||
if unmanaged:
|
||||
console.print(f"\n[yellow]Unmanaged[/] (on disk but not in config, {len(unmanaged)}):")
|
||||
for name in unmanaged:
|
||||
console.print(f" [yellow]+[/] [cyan]{name}[/]")
|
||||
|
||||
if missing_from_disk:
|
||||
console.print(f"\n[red]In config but no compose file[/] ({len(missing_from_disk)}):")
|
||||
for name in missing_from_disk:
|
||||
console.print(f" [red]-[/] [cyan]{name}[/]")
|
||||
|
||||
if not unmanaged and not missing_from_disk:
|
||||
console.print("[green]✓[/] Config matches disk")
|
||||
|
||||
return bool(missing_from_disk)
|
||||
|
||||
|
||||
def _report_orphaned_services(cfg: Config) -> bool:
|
||||
"""Check for services in state but not in config. Returns True if orphans found."""
|
||||
orphaned = get_orphaned_services(cfg)
|
||||
|
||||
if orphaned:
|
||||
console.print("\n[yellow]Orphaned services[/] (in state but not in config):")
|
||||
console.print(
|
||||
"[dim]Run 'cf apply' to stop them, or 'cf down --orphaned' for just orphans.[/]"
|
||||
)
|
||||
for name, hosts in sorted(orphaned.items()):
|
||||
host_str = ", ".join(hosts) if isinstance(hosts, list) else hosts
|
||||
console.print(f" [yellow]![/] [cyan]{name}[/] on [magenta]{host_str}[/]")
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def _report_traefik_status(cfg: Config, services: list[str]) -> None:
|
||||
"""Check and report traefik label status."""
|
||||
try:
|
||||
_, warnings = generate_traefik_config(cfg, services, check_all=True)
|
||||
except (FileNotFoundError, ValueError):
|
||||
return
|
||||
|
||||
if warnings:
|
||||
console.print(f"\n[yellow]Traefik issues[/] ({len(warnings)}):")
|
||||
for warning in warnings:
|
||||
console.print(f" [yellow]![/] {warning}")
|
||||
else:
|
||||
console.print("[green]✓[/] Traefik labels valid")
|
||||
|
||||
|
||||
def _report_mount_errors(mount_errors: list[tuple[str, str, str]]) -> None:
|
||||
"""Report mount errors grouped by service."""
|
||||
by_service: dict[str, list[tuple[str, str]]] = {}
|
||||
for svc, host, path in mount_errors:
|
||||
by_service.setdefault(svc, []).append((host, path))
|
||||
|
||||
console.print(f"[red]Missing mounts[/] ({len(mount_errors)}):")
|
||||
for svc, items in sorted(by_service.items()):
|
||||
host = items[0][0]
|
||||
paths = [p for _, p in items]
|
||||
console.print(f" [cyan]{svc}[/] on [magenta]{host}[/]:")
|
||||
for path in paths:
|
||||
console.print(f" [red]✗[/] {path}")
|
||||
|
||||
|
||||
def _report_network_errors(network_errors: list[tuple[str, str, str]]) -> None:
|
||||
"""Report network errors grouped by service."""
|
||||
by_service: dict[str, list[tuple[str, str]]] = {}
|
||||
for svc, host, net in network_errors:
|
||||
by_service.setdefault(svc, []).append((host, net))
|
||||
|
||||
console.print(f"[red]Missing networks[/] ({len(network_errors)}):")
|
||||
for svc, items in sorted(by_service.items()):
|
||||
host = items[0][0]
|
||||
networks = [n for _, n in items]
|
||||
console.print(f" [cyan]{svc}[/] on [magenta]{host}[/]:")
|
||||
for net in networks:
|
||||
console.print(f" [red]✗[/] {net}")
|
||||
|
||||
|
||||
def _report_device_errors(device_errors: list[tuple[str, str, str]]) -> None:
|
||||
"""Report device errors grouped by service."""
|
||||
by_service: dict[str, list[tuple[str, str]]] = {}
|
||||
for svc, host, dev in device_errors:
|
||||
by_service.setdefault(svc, []).append((host, dev))
|
||||
|
||||
console.print(f"[red]Missing devices[/] ({len(device_errors)}):")
|
||||
for svc, items in sorted(by_service.items()):
|
||||
host = items[0][0]
|
||||
devices = [d for _, d in items]
|
||||
console.print(f" [cyan]{svc}[/] on [magenta]{host}[/]:")
|
||||
for dev in devices:
|
||||
console.print(f" [red]✗[/] {dev}")
|
||||
|
||||
|
||||
def _report_ssh_status(unreachable_hosts: list[str]) -> bool:
|
||||
"""Report SSH connectivity status. Returns True if there are errors."""
|
||||
if unreachable_hosts:
|
||||
console.print(f"[red]Unreachable hosts[/] ({len(unreachable_hosts)}):")
|
||||
for host in sorted(unreachable_hosts):
|
||||
console.print(f" [red]✗[/] [magenta]{host}[/]")
|
||||
return True
|
||||
console.print("[green]✓[/] All hosts reachable")
|
||||
return False
|
||||
|
||||
|
||||
def _report_host_compatibility(
|
||||
compat: dict[str, tuple[int, int, list[str]]],
|
||||
assigned_hosts: list[str],
|
||||
) -> None:
|
||||
"""Report host compatibility for a service."""
|
||||
for host_name, (found, total, missing) in sorted(compat.items()):
|
||||
is_assigned = host_name in assigned_hosts
|
||||
marker = " [dim](assigned)[/]" if is_assigned else ""
|
||||
|
||||
if found == total:
|
||||
console.print(f" [green]✓[/] [magenta]{host_name}[/] {found}/{total}{marker}")
|
||||
else:
|
||||
preview = ", ".join(missing[:_MISSING_PATH_PREVIEW_LIMIT])
|
||||
if len(missing) > _MISSING_PATH_PREVIEW_LIMIT:
|
||||
preview += f", +{len(missing) - _MISSING_PATH_PREVIEW_LIMIT} more"
|
||||
console.print(
|
||||
f" [red]✗[/] [magenta]{host_name}[/] {found}/{total} "
|
||||
f"[dim](missing: {preview})[/]{marker}"
|
||||
)
|
||||
|
||||
|
||||
def _run_remote_checks(cfg: Config, svc_list: list[str], *, show_host_compat: bool) -> bool:
|
||||
"""Run SSH-based checks for mounts, networks, and host compatibility.
|
||||
|
||||
Returns True if any errors were found.
|
||||
"""
|
||||
has_errors = False
|
||||
|
||||
# Check SSH connectivity first
|
||||
if _report_ssh_status(_check_ssh_connectivity(cfg)):
|
||||
has_errors = True
|
||||
|
||||
console.print() # Spacing before mounts/networks check
|
||||
|
||||
# Check mounts, networks, and devices
|
||||
mount_errors, network_errors, device_errors = _check_service_requirements(cfg, svc_list)
|
||||
|
||||
if mount_errors:
|
||||
_report_mount_errors(mount_errors)
|
||||
has_errors = True
|
||||
if network_errors:
|
||||
_report_network_errors(network_errors)
|
||||
has_errors = True
|
||||
if device_errors:
|
||||
_report_device_errors(device_errors)
|
||||
has_errors = True
|
||||
if not mount_errors and not network_errors and not device_errors:
|
||||
console.print("[green]✓[/] All mounts, networks, and devices exist")
|
||||
|
||||
if show_host_compat:
|
||||
for service in svc_list:
|
||||
console.print(f"\n[bold]Host compatibility for[/] [cyan]{service}[/]:")
|
||||
compat = run_async(check_host_compatibility(cfg, service))
|
||||
assigned_hosts = cfg.get_hosts(service)
|
||||
_report_host_compatibility(compat, assigned_hosts)
|
||||
|
||||
return has_errors
|
||||
|
||||
|
||||
# Default network settings for cross-host Docker networking
|
||||
_DEFAULT_NETWORK_NAME = "mynetwork"
|
||||
_DEFAULT_NETWORK_SUBNET = "172.20.0.0/16"
|
||||
_DEFAULT_NETWORK_GATEWAY = "172.20.0.1"
|
||||
|
||||
|
||||
@app.command("traefik-file", rich_help_panel="Configuration")
|
||||
def traefik_file(
|
||||
services: ServicesArg = None,
|
||||
all_services: AllOption = False,
|
||||
output: Annotated[
|
||||
Path | None,
|
||||
typer.Option(
|
||||
"--output",
|
||||
"-o",
|
||||
help="Write Traefik file-provider YAML to this path (stdout if omitted)",
|
||||
),
|
||||
] = None,
|
||||
config: ConfigOption = None,
|
||||
) -> None:
|
||||
"""Generate a Traefik file-provider fragment from compose Traefik labels."""
|
||||
svc_list, cfg = get_services(services or [], all_services, config)
|
||||
try:
|
||||
dynamic, warnings = generate_traefik_config(cfg, svc_list)
|
||||
except (FileNotFoundError, ValueError) as exc:
|
||||
err_console.print(f"[red]✗[/] {exc}")
|
||||
raise typer.Exit(1) from exc
|
||||
|
||||
rendered = render_traefik_config(dynamic)
|
||||
|
||||
if output:
|
||||
output.parent.mkdir(parents=True, exist_ok=True)
|
||||
output.write_text(rendered)
|
||||
console.print(f"[green]✓[/] Traefik config written to {output}")
|
||||
else:
|
||||
console.print(rendered)
|
||||
|
||||
for warning in warnings:
|
||||
err_console.print(f"[yellow]![/] {warning}")
|
||||
|
||||
|
||||
@app.command(rich_help_panel="Configuration")
|
||||
def refresh(
|
||||
config: ConfigOption = None,
|
||||
log_path: LogPathOption = None,
|
||||
dry_run: Annotated[
|
||||
bool,
|
||||
typer.Option("--dry-run", "-n", help="Show what would change without writing"),
|
||||
] = False,
|
||||
) -> None:
|
||||
"""Update local state from running services.
|
||||
|
||||
Discovers which services are running on which hosts, updates the state
|
||||
file, and captures image digests. This is a read operation - it updates
|
||||
your local state to match reality, not the other way around.
|
||||
|
||||
Use 'cf apply' to make reality match your config (stop orphans, migrate).
|
||||
"""
|
||||
cfg = load_config_or_exit(config)
|
||||
current_state = load_state(cfg)
|
||||
|
||||
discovered = _discover_services(cfg)
|
||||
|
||||
# Calculate changes
|
||||
added = [s for s in discovered if s not in current_state]
|
||||
removed = [s for s in current_state if s not in discovered]
|
||||
changed = [
|
||||
(s, current_state[s], discovered[s])
|
||||
for s in discovered
|
||||
if s in current_state and current_state[s] != discovered[s]
|
||||
]
|
||||
|
||||
# Report state changes
|
||||
state_changed = bool(added or removed or changed)
|
||||
if state_changed:
|
||||
_report_sync_changes(added, removed, changed, discovered, current_state)
|
||||
else:
|
||||
console.print("[green]✓[/] State is already in sync.")
|
||||
|
||||
if dry_run:
|
||||
console.print("\n[dim](dry-run: no changes made)[/]")
|
||||
return
|
||||
|
||||
# Update state file
|
||||
if state_changed:
|
||||
save_state(cfg, discovered)
|
||||
console.print(f"\n[green]✓[/] State updated: {len(discovered)} services tracked.")
|
||||
|
||||
# Capture image digests for running services
|
||||
if discovered:
|
||||
try:
|
||||
path = _snapshot_services(cfg, list(discovered.keys()), log_path)
|
||||
console.print(f"[green]✓[/] Digests written to {path}")
|
||||
except RuntimeError as exc:
|
||||
err_console.print(f"[yellow]![/] {exc}")
|
||||
|
||||
|
||||
@app.command(rich_help_panel="Configuration")
|
||||
def check(
|
||||
services: ServicesArg = None,
|
||||
local: Annotated[
|
||||
bool,
|
||||
typer.Option("--local", help="Skip SSH-based checks (faster)"),
|
||||
] = False,
|
||||
config: ConfigOption = None,
|
||||
) -> None:
|
||||
"""Validate configuration, traefik labels, mounts, and networks.
|
||||
|
||||
Without arguments: validates all services against configured hosts.
|
||||
With service arguments: validates specific services and shows host compatibility.
|
||||
|
||||
Use --local to skip SSH-based checks for faster validation.
|
||||
"""
|
||||
cfg = load_config_or_exit(config)
|
||||
|
||||
# Determine which services to check and whether to show host compatibility
|
||||
if services:
|
||||
svc_list = list(services)
|
||||
invalid = [s for s in svc_list if s not in cfg.services]
|
||||
if invalid:
|
||||
for svc in invalid:
|
||||
err_console.print(f"[red]✗[/] Service '{svc}' not found in config")
|
||||
raise typer.Exit(1)
|
||||
show_host_compat = True
|
||||
else:
|
||||
svc_list = list(cfg.services.keys())
|
||||
show_host_compat = False
|
||||
|
||||
# Run checks
|
||||
has_errors = _report_config_status(cfg)
|
||||
_report_traefik_status(cfg, svc_list)
|
||||
|
||||
if not local and _run_remote_checks(cfg, svc_list, show_host_compat=show_host_compat):
|
||||
has_errors = True
|
||||
|
||||
# Check for orphaned services (in state but removed from config)
|
||||
if _report_orphaned_services(cfg):
|
||||
has_errors = True
|
||||
|
||||
if has_errors:
|
||||
raise typer.Exit(1)
|
||||
|
||||
|
||||
@app.command("init-network", rich_help_panel="Configuration")
|
||||
def init_network(
|
||||
hosts: Annotated[
|
||||
list[str] | None,
|
||||
typer.Argument(help="Hosts to create network on (default: all)"),
|
||||
] = None,
|
||||
network: Annotated[
|
||||
str,
|
||||
typer.Option("--network", "-n", help="Network name"),
|
||||
] = _DEFAULT_NETWORK_NAME,
|
||||
subnet: Annotated[
|
||||
str,
|
||||
typer.Option("--subnet", "-s", help="Network subnet"),
|
||||
] = _DEFAULT_NETWORK_SUBNET,
|
||||
gateway: Annotated[
|
||||
str,
|
||||
typer.Option("--gateway", "-g", help="Network gateway"),
|
||||
] = _DEFAULT_NETWORK_GATEWAY,
|
||||
config: ConfigOption = None,
|
||||
) -> None:
|
||||
"""Create Docker network on hosts with consistent settings.
|
||||
|
||||
Creates an external Docker network that services can use for cross-host
|
||||
communication. Uses the same subnet/gateway on all hosts to ensure
|
||||
consistent networking.
|
||||
"""
|
||||
cfg = load_config_or_exit(config)
|
||||
|
||||
target_hosts = list(hosts) if hosts else list(cfg.hosts.keys())
|
||||
invalid = [h for h in target_hosts if h not in cfg.hosts]
|
||||
if invalid:
|
||||
for h in invalid:
|
||||
err_console.print(f"[red]✗[/] Host '{h}' not found in config")
|
||||
raise typer.Exit(1)
|
||||
|
||||
async def create_network_on_host(host_name: str) -> CommandResult:
|
||||
host = cfg.hosts[host_name]
|
||||
# Check if network already exists
|
||||
check_cmd = f"docker network inspect '{network}' >/dev/null 2>&1"
|
||||
check_result = await run_command(host, check_cmd, host_name, stream=False)
|
||||
|
||||
if check_result.success:
|
||||
console.print(f"[cyan]\\[{host_name}][/] Network '{network}' already exists")
|
||||
return CommandResult(service=host_name, exit_code=0, success=True)
|
||||
|
||||
# Create the network
|
||||
create_cmd = (
|
||||
f"docker network create "
|
||||
f"--driver bridge "
|
||||
f"--subnet '{subnet}' "
|
||||
f"--gateway '{gateway}' "
|
||||
f"'{network}'"
|
||||
)
|
||||
result = await run_command(host, create_cmd, host_name, stream=False)
|
||||
|
||||
if result.success:
|
||||
console.print(f"[cyan]\\[{host_name}][/] [green]✓[/] Created network '{network}'")
|
||||
else:
|
||||
err_console.print(
|
||||
f"[cyan]\\[{host_name}][/] [red]✗[/] Failed to create network: "
|
||||
f"{result.stderr.strip()}"
|
||||
)
|
||||
|
||||
return result
|
||||
|
||||
async def run_all() -> list[CommandResult]:
|
||||
return await asyncio.gather(*[create_network_on_host(h) for h in target_hosts])
|
||||
|
||||
results = run_async(run_all())
|
||||
failed = [r for r in results if not r.success]
|
||||
if failed:
|
||||
raise typer.Exit(1)
|
||||
236
src/compose_farm/cli/monitoring.py
Normal file
236
src/compose_farm/cli/monitoring.py
Normal file
@@ -0,0 +1,236 @@
|
||||
"""Monitoring commands: logs, ps, stats."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import contextlib
|
||||
from typing import TYPE_CHECKING, Annotated
|
||||
|
||||
import typer
|
||||
from rich.progress import Progress, TaskID # noqa: TC002
|
||||
from rich.table import Table
|
||||
|
||||
from compose_farm.cli.app import app
|
||||
from compose_farm.cli.common import (
|
||||
_STATS_PREVIEW_LIMIT,
|
||||
AllOption,
|
||||
ConfigOption,
|
||||
HostOption,
|
||||
ServicesArg,
|
||||
get_services,
|
||||
load_config_or_exit,
|
||||
progress_bar,
|
||||
report_results,
|
||||
run_async,
|
||||
)
|
||||
from compose_farm.console import console, err_console
|
||||
from compose_farm.executor import run_command, run_on_services
|
||||
from compose_farm.state import get_services_needing_migration, load_state
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import Mapping
|
||||
|
||||
from compose_farm.config import Config
|
||||
|
||||
|
||||
def _group_services_by_host(
|
||||
services: dict[str, str | list[str]],
|
||||
hosts: Mapping[str, object],
|
||||
all_hosts: list[str] | None = None,
|
||||
) -> dict[str, list[str]]:
|
||||
"""Group services by their assigned host(s).
|
||||
|
||||
For multi-host services (list or "all"), the service appears in multiple host lists.
|
||||
"""
|
||||
by_host: dict[str, list[str]] = {h: [] for h in hosts}
|
||||
for service, host_value in services.items():
|
||||
if isinstance(host_value, list):
|
||||
# Explicit list of hosts
|
||||
for host_name in host_value:
|
||||
if host_name in by_host:
|
||||
by_host[host_name].append(service)
|
||||
elif host_value == "all" and all_hosts:
|
||||
# "all" keyword - add to all hosts
|
||||
for host_name in all_hosts:
|
||||
if host_name in by_host:
|
||||
by_host[host_name].append(service)
|
||||
elif host_value in by_host:
|
||||
# Single host
|
||||
by_host[host_value].append(service)
|
||||
return by_host
|
||||
|
||||
|
||||
def _get_container_counts(cfg: Config) -> dict[str, int]:
|
||||
"""Get container counts from all hosts with a progress bar."""
|
||||
|
||||
async def get_count(host_name: str) -> tuple[str, int]:
|
||||
host = cfg.hosts[host_name]
|
||||
result = await run_command(host, "docker ps -q | wc -l", host_name, stream=False)
|
||||
count = 0
|
||||
if result.success:
|
||||
with contextlib.suppress(ValueError):
|
||||
count = int(result.stdout.strip())
|
||||
return host_name, count
|
||||
|
||||
async def gather_with_progress(progress: Progress, task_id: TaskID) -> dict[str, int]:
|
||||
hosts = list(cfg.hosts.keys())
|
||||
tasks = [asyncio.create_task(get_count(h)) for h in hosts]
|
||||
results: dict[str, int] = {}
|
||||
for coro in asyncio.as_completed(tasks):
|
||||
host_name, count = await coro
|
||||
results[host_name] = count
|
||||
progress.update(task_id, advance=1, description=f"[cyan]{host_name}[/]")
|
||||
return results
|
||||
|
||||
with progress_bar("Querying hosts", len(cfg.hosts)) as (progress, task_id):
|
||||
return asyncio.run(gather_with_progress(progress, task_id))
|
||||
|
||||
|
||||
def _build_host_table(
|
||||
cfg: Config,
|
||||
services_by_host: dict[str, list[str]],
|
||||
running_by_host: dict[str, list[str]],
|
||||
container_counts: dict[str, int],
|
||||
*,
|
||||
show_containers: bool,
|
||||
) -> Table:
|
||||
"""Build the hosts table."""
|
||||
table = Table(title="Hosts", show_header=True, header_style="bold cyan")
|
||||
table.add_column("Host", style="magenta")
|
||||
table.add_column("Address")
|
||||
table.add_column("Configured", justify="right")
|
||||
table.add_column("Running", justify="right")
|
||||
if show_containers:
|
||||
table.add_column("Containers", justify="right")
|
||||
|
||||
for host_name in sorted(cfg.hosts.keys()):
|
||||
host = cfg.hosts[host_name]
|
||||
configured = len(services_by_host[host_name])
|
||||
running = len(running_by_host[host_name])
|
||||
|
||||
row = [
|
||||
host_name,
|
||||
host.address,
|
||||
str(configured),
|
||||
str(running) if running > 0 else "[dim]0[/]",
|
||||
]
|
||||
if show_containers:
|
||||
count = container_counts.get(host_name, 0)
|
||||
row.append(str(count) if count > 0 else "[dim]0[/]")
|
||||
|
||||
table.add_row(*row)
|
||||
return table
|
||||
|
||||
|
||||
def _build_summary_table(
|
||||
cfg: Config, state: dict[str, str | list[str]], pending: list[str]
|
||||
) -> Table:
|
||||
"""Build the summary table."""
|
||||
on_disk = cfg.discover_compose_dirs()
|
||||
|
||||
table = Table(title="Summary", show_header=False)
|
||||
table.add_column("Label", style="dim")
|
||||
table.add_column("Value", style="bold")
|
||||
|
||||
table.add_row("Total hosts", str(len(cfg.hosts)))
|
||||
table.add_row("Services (configured)", str(len(cfg.services)))
|
||||
table.add_row("Services (tracked)", str(len(state)))
|
||||
table.add_row("Compose files on disk", str(len(on_disk)))
|
||||
|
||||
if pending:
|
||||
preview = ", ".join(pending[:_STATS_PREVIEW_LIMIT])
|
||||
suffix = "..." if len(pending) > _STATS_PREVIEW_LIMIT else ""
|
||||
table.add_row("Pending migrations", f"[yellow]{len(pending)}[/] ({preview}{suffix})")
|
||||
else:
|
||||
table.add_row("Pending migrations", "[green]0[/]")
|
||||
|
||||
return table
|
||||
|
||||
|
||||
# --- Command functions ---
|
||||
|
||||
|
||||
@app.command(rich_help_panel="Monitoring")
|
||||
def logs(
|
||||
services: ServicesArg = None,
|
||||
all_services: AllOption = False,
|
||||
host: HostOption = None,
|
||||
follow: Annotated[bool, typer.Option("--follow", "-f", help="Follow logs")] = False,
|
||||
tail: Annotated[
|
||||
int | None,
|
||||
typer.Option("--tail", "-n", help="Number of lines (default: 20 for --all, 100 otherwise)"),
|
||||
] = None,
|
||||
config: ConfigOption = None,
|
||||
) -> None:
|
||||
"""Show service logs."""
|
||||
if all_services and host is not None:
|
||||
err_console.print("[red]✗[/] Cannot use --all and --host together")
|
||||
raise typer.Exit(1)
|
||||
|
||||
cfg = load_config_or_exit(config)
|
||||
|
||||
# Determine service list based on options
|
||||
if host is not None:
|
||||
if host not in cfg.hosts:
|
||||
err_console.print(f"[red]✗[/] Host '{host}' not found in config")
|
||||
raise typer.Exit(1)
|
||||
# Include services where host is in the list of configured hosts
|
||||
svc_list = [s for s in cfg.services if host in cfg.get_hosts(s)]
|
||||
if not svc_list:
|
||||
err_console.print(f"[yellow]![/] No services configured for host '{host}'")
|
||||
return
|
||||
else:
|
||||
svc_list, cfg = get_services(services or [], all_services, config)
|
||||
|
||||
# Default to fewer lines when showing multiple services
|
||||
many_services = all_services or host is not None or len(svc_list) > 1
|
||||
effective_tail = tail if tail is not None else (20 if many_services else 100)
|
||||
cmd = f"logs --tail {effective_tail}"
|
||||
if follow:
|
||||
cmd += " -f"
|
||||
results = run_async(run_on_services(cfg, svc_list, cmd))
|
||||
report_results(results)
|
||||
|
||||
|
||||
@app.command(rich_help_panel="Monitoring")
|
||||
def ps(
|
||||
config: ConfigOption = None,
|
||||
) -> None:
|
||||
"""Show status of all services."""
|
||||
cfg = load_config_or_exit(config)
|
||||
results = run_async(run_on_services(cfg, list(cfg.services.keys()), "ps"))
|
||||
report_results(results)
|
||||
|
||||
|
||||
@app.command(rich_help_panel="Monitoring")
|
||||
def stats(
|
||||
live: Annotated[
|
||||
bool,
|
||||
typer.Option("--live", "-l", help="Query Docker for live container stats"),
|
||||
] = False,
|
||||
config: ConfigOption = None,
|
||||
) -> None:
|
||||
"""Show overview statistics for hosts and services.
|
||||
|
||||
Without --live: Shows config/state info (hosts, services, pending migrations).
|
||||
With --live: Also queries Docker on each host for container counts.
|
||||
"""
|
||||
cfg = load_config_or_exit(config)
|
||||
state = load_state(cfg)
|
||||
pending = get_services_needing_migration(cfg)
|
||||
|
||||
all_hosts = list(cfg.hosts.keys())
|
||||
services_by_host = _group_services_by_host(cfg.services, cfg.hosts, all_hosts)
|
||||
running_by_host = _group_services_by_host(state, cfg.hosts, all_hosts)
|
||||
|
||||
container_counts: dict[str, int] = {}
|
||||
if live:
|
||||
container_counts = _get_container_counts(cfg)
|
||||
|
||||
host_table = _build_host_table(
|
||||
cfg, services_by_host, running_by_host, container_counts, show_containers=live
|
||||
)
|
||||
console.print(host_table)
|
||||
|
||||
console.print()
|
||||
console.print(_build_summary_table(cfg, state, pending))
|
||||
48
src/compose_farm/cli/web.py
Normal file
48
src/compose_farm/cli/web.py
Normal file
@@ -0,0 +1,48 @@
|
||||
"""Web server command."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Annotated
|
||||
|
||||
import typer
|
||||
|
||||
from compose_farm.cli.app import app
|
||||
from compose_farm.console import console
|
||||
|
||||
|
||||
@app.command(rich_help_panel="Server")
|
||||
def web(
|
||||
host: Annotated[
|
||||
str,
|
||||
typer.Option("--host", "-H", help="Host to bind to"),
|
||||
] = "0.0.0.0", # noqa: S104
|
||||
port: Annotated[
|
||||
int,
|
||||
typer.Option("--port", "-p", help="Port to listen on"),
|
||||
] = 8000,
|
||||
reload: Annotated[
|
||||
bool,
|
||||
typer.Option("--reload", "-r", help="Enable auto-reload for development"),
|
||||
] = False,
|
||||
) -> None:
|
||||
"""Start the web UI server."""
|
||||
try:
|
||||
import uvicorn # noqa: PLC0415
|
||||
except ImportError:
|
||||
console.print(
|
||||
"[red]Error:[/] Web dependencies not installed. "
|
||||
"Install with: [cyan]pip install compose-farm[web][/]"
|
||||
)
|
||||
raise typer.Exit(1) from None
|
||||
|
||||
console.print(f"[green]Starting Compose Farm Web UI[/] at http://{host}:{port}")
|
||||
console.print("[dim]Press Ctrl+C to stop[/]")
|
||||
|
||||
uvicorn.run(
|
||||
"compose_farm.web:create_app",
|
||||
factory=True,
|
||||
host=host,
|
||||
port=port,
|
||||
reload=reload,
|
||||
log_level="info",
|
||||
)
|
||||
331
src/compose_farm/compose.py
Normal file
331
src/compose_farm/compose.py
Normal file
@@ -0,0 +1,331 @@
|
||||
"""Compose file parsing utilities.
|
||||
|
||||
Handles .env loading, variable interpolation, port/volume/network extraction.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import re
|
||||
from dataclasses import dataclass
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
import yaml
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from pathlib import Path
|
||||
|
||||
from .config import Config
|
||||
|
||||
# Port parsing constants
|
||||
_SINGLE_PART = 1
|
||||
_PUBLISHED_TARGET_PARTS = 2
|
||||
_HOST_PUBLISHED_PARTS = 3
|
||||
_MIN_VOLUME_PARTS = 2
|
||||
|
||||
_VAR_PATTERN = re.compile(r"\$\{([A-Za-z_][A-Za-z0-9_]*)(?::-(.*?))?\}")
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class PortMapping:
|
||||
"""Port mapping for a compose service."""
|
||||
|
||||
target: int
|
||||
published: int | None
|
||||
|
||||
|
||||
def _load_env(compose_path: Path) -> dict[str, str]:
|
||||
"""Load environment variables for compose interpolation.
|
||||
|
||||
Reads from .env file in the same directory as compose file,
|
||||
then overlays current environment variables.
|
||||
"""
|
||||
env: dict[str, str] = {}
|
||||
env_path = compose_path.parent / ".env"
|
||||
if env_path.exists():
|
||||
for line in env_path.read_text().splitlines():
|
||||
stripped = line.strip()
|
||||
if not stripped or stripped.startswith("#") or "=" not in stripped:
|
||||
continue
|
||||
key, value = stripped.split("=", 1)
|
||||
key = key.strip()
|
||||
value = value.strip()
|
||||
if (value.startswith('"') and value.endswith('"')) or (
|
||||
value.startswith("'") and value.endswith("'")
|
||||
):
|
||||
value = value[1:-1]
|
||||
env[key] = value
|
||||
env.update({k: v for k, v in os.environ.items() if isinstance(v, str)})
|
||||
return env
|
||||
|
||||
|
||||
def _interpolate(value: str, env: dict[str, str]) -> str:
|
||||
"""Perform ${VAR} and ${VAR:-default} interpolation."""
|
||||
|
||||
def replace(match: re.Match[str]) -> str:
|
||||
var = match.group(1)
|
||||
default = match.group(2)
|
||||
resolved = env.get(var)
|
||||
if resolved:
|
||||
return resolved
|
||||
return default or ""
|
||||
|
||||
return _VAR_PATTERN.sub(replace, value)
|
||||
|
||||
|
||||
def _parse_ports(raw: Any, env: dict[str, str]) -> list[PortMapping]: # noqa: PLR0912
|
||||
"""Parse port specifications from compose file.
|
||||
|
||||
Handles string formats like "8080", "8080:80", "0.0.0.0:8080:80",
|
||||
and dict formats with target/published keys.
|
||||
"""
|
||||
if raw is None:
|
||||
return []
|
||||
mappings: list[PortMapping] = []
|
||||
|
||||
items = raw if isinstance(raw, list) else [raw]
|
||||
|
||||
for item in items:
|
||||
if isinstance(item, str):
|
||||
interpolated = _interpolate(item, env)
|
||||
port_spec, _, _ = interpolated.partition("/")
|
||||
parts = port_spec.split(":")
|
||||
published: int | None = None
|
||||
target: int | None = None
|
||||
|
||||
if len(parts) == _SINGLE_PART and parts[0].isdigit():
|
||||
target = int(parts[0])
|
||||
elif (
|
||||
len(parts) == _PUBLISHED_TARGET_PARTS and parts[0].isdigit() and parts[1].isdigit()
|
||||
):
|
||||
published = int(parts[0])
|
||||
target = int(parts[1])
|
||||
elif (
|
||||
len(parts) == _HOST_PUBLISHED_PARTS and parts[-2].isdigit() and parts[-1].isdigit()
|
||||
):
|
||||
published = int(parts[-2])
|
||||
target = int(parts[-1])
|
||||
|
||||
if target is not None:
|
||||
mappings.append(PortMapping(target=target, published=published))
|
||||
elif isinstance(item, dict):
|
||||
target_raw = item.get("target")
|
||||
if isinstance(target_raw, str):
|
||||
target_raw = _interpolate(target_raw, env)
|
||||
if target_raw is None:
|
||||
continue
|
||||
try:
|
||||
target_val = int(str(target_raw))
|
||||
except (TypeError, ValueError):
|
||||
continue
|
||||
|
||||
published_raw = item.get("published")
|
||||
if isinstance(published_raw, str):
|
||||
published_raw = _interpolate(published_raw, env)
|
||||
published_val: int | None
|
||||
try:
|
||||
published_val = int(str(published_raw)) if published_raw is not None else None
|
||||
except (TypeError, ValueError):
|
||||
published_val = None
|
||||
mappings.append(PortMapping(target=target_val, published=published_val))
|
||||
|
||||
return mappings
|
||||
|
||||
|
||||
def _resolve_host_path(host_path: str, compose_dir: Path) -> str | None:
|
||||
"""Resolve a host path from volume mount, returning None for named volumes."""
|
||||
if host_path.startswith("/"):
|
||||
return host_path
|
||||
if host_path.startswith(("./", "../")):
|
||||
return str((compose_dir / host_path).resolve())
|
||||
return None # Named volume
|
||||
|
||||
|
||||
def _parse_volume_item(
|
||||
item: str | dict[str, Any],
|
||||
env: dict[str, str],
|
||||
compose_dir: Path,
|
||||
) -> str | None:
|
||||
"""Parse a single volume item and return host path if it's a bind mount."""
|
||||
if isinstance(item, str):
|
||||
interpolated = _interpolate(item, env)
|
||||
parts = interpolated.split(":")
|
||||
if len(parts) >= _MIN_VOLUME_PARTS:
|
||||
return _resolve_host_path(parts[0], compose_dir)
|
||||
elif isinstance(item, dict) and item.get("type") == "bind":
|
||||
source = item.get("source")
|
||||
if source:
|
||||
interpolated = _interpolate(str(source), env)
|
||||
return _resolve_host_path(interpolated, compose_dir)
|
||||
return None
|
||||
|
||||
|
||||
def parse_host_volumes(config: Config, service: str) -> list[str]:
|
||||
"""Extract host bind mount paths from a service's compose file.
|
||||
|
||||
Returns a list of absolute host paths used as volume mounts.
|
||||
Skips named volumes and resolves relative paths.
|
||||
"""
|
||||
compose_path = config.get_compose_path(service)
|
||||
if not compose_path.exists():
|
||||
return []
|
||||
|
||||
env = _load_env(compose_path)
|
||||
compose_data = yaml.safe_load(compose_path.read_text()) or {}
|
||||
raw_services = compose_data.get("services", {})
|
||||
if not isinstance(raw_services, dict):
|
||||
return []
|
||||
|
||||
paths: list[str] = []
|
||||
compose_dir = compose_path.parent
|
||||
|
||||
for definition in raw_services.values():
|
||||
if not isinstance(definition, dict):
|
||||
continue
|
||||
|
||||
volumes = definition.get("volumes")
|
||||
if not volumes:
|
||||
continue
|
||||
|
||||
items = volumes if isinstance(volumes, list) else [volumes]
|
||||
for item in items:
|
||||
host_path = _parse_volume_item(item, env, compose_dir)
|
||||
if host_path:
|
||||
paths.append(host_path)
|
||||
|
||||
# Return unique paths, preserving order
|
||||
seen: set[str] = set()
|
||||
unique: list[str] = []
|
||||
for p in paths:
|
||||
if p not in seen:
|
||||
seen.add(p)
|
||||
unique.append(p)
|
||||
return unique
|
||||
|
||||
|
||||
def parse_devices(config: Config, service: str) -> list[str]:
|
||||
"""Extract host device paths from a service's compose file.
|
||||
|
||||
Returns a list of host device paths (e.g., /dev/dri, /dev/dri/renderD128).
|
||||
"""
|
||||
compose_path = config.get_compose_path(service)
|
||||
if not compose_path.exists():
|
||||
return []
|
||||
|
||||
env = _load_env(compose_path)
|
||||
compose_data = yaml.safe_load(compose_path.read_text()) or {}
|
||||
raw_services = compose_data.get("services", {})
|
||||
if not isinstance(raw_services, dict):
|
||||
return []
|
||||
|
||||
devices: list[str] = []
|
||||
for definition in raw_services.values():
|
||||
if not isinstance(definition, dict):
|
||||
continue
|
||||
|
||||
device_list = definition.get("devices")
|
||||
if not device_list or not isinstance(device_list, list):
|
||||
continue
|
||||
|
||||
for item in device_list:
|
||||
if not isinstance(item, str):
|
||||
continue
|
||||
interpolated = _interpolate(item, env)
|
||||
# Format: host_path:container_path[:options]
|
||||
parts = interpolated.split(":")
|
||||
if parts:
|
||||
host_path = parts[0]
|
||||
if host_path.startswith("/dev/"):
|
||||
devices.append(host_path)
|
||||
|
||||
# Return unique devices, preserving order
|
||||
seen: set[str] = set()
|
||||
unique: list[str] = []
|
||||
for d in devices:
|
||||
if d not in seen:
|
||||
seen.add(d)
|
||||
unique.append(d)
|
||||
return unique
|
||||
|
||||
|
||||
def parse_external_networks(config: Config, service: str) -> list[str]:
|
||||
"""Extract external network names from a service's compose file.
|
||||
|
||||
Returns a list of network names marked as external: true.
|
||||
"""
|
||||
compose_path = config.get_compose_path(service)
|
||||
if not compose_path.exists():
|
||||
return []
|
||||
|
||||
compose_data = yaml.safe_load(compose_path.read_text()) or {}
|
||||
networks = compose_data.get("networks", {})
|
||||
if not isinstance(networks, dict):
|
||||
return []
|
||||
|
||||
external_networks: list[str] = []
|
||||
for name, definition in networks.items():
|
||||
if isinstance(definition, dict) and definition.get("external") is True:
|
||||
external_networks.append(name)
|
||||
|
||||
return external_networks
|
||||
|
||||
|
||||
def load_compose_services(
|
||||
config: Config,
|
||||
stack: str,
|
||||
) -> tuple[dict[str, Any], dict[str, str], str]:
|
||||
"""Load services from a compose file with environment interpolation.
|
||||
|
||||
Returns (services_dict, env_dict, host_address).
|
||||
"""
|
||||
compose_path = config.get_compose_path(stack)
|
||||
if not compose_path.exists():
|
||||
message = f"[{stack}] Compose file not found: {compose_path}"
|
||||
raise FileNotFoundError(message)
|
||||
|
||||
env = _load_env(compose_path)
|
||||
compose_data = yaml.safe_load(compose_path.read_text()) or {}
|
||||
raw_services = compose_data.get("services", {})
|
||||
if not isinstance(raw_services, dict):
|
||||
return {}, env, config.get_host(stack).address
|
||||
return raw_services, env, config.get_host(stack).address
|
||||
|
||||
|
||||
def normalize_labels(raw: Any, env: dict[str, str]) -> dict[str, str]:
|
||||
"""Normalize labels from list or dict format, with interpolation."""
|
||||
if raw is None:
|
||||
return {}
|
||||
if isinstance(raw, dict):
|
||||
return {
|
||||
_interpolate(str(k), env): _interpolate(str(v), env)
|
||||
for k, v in raw.items()
|
||||
if k is not None
|
||||
}
|
||||
if isinstance(raw, list):
|
||||
labels: dict[str, str] = {}
|
||||
for item in raw:
|
||||
if not isinstance(item, str) or "=" not in item:
|
||||
continue
|
||||
key_raw, value_raw = item.split("=", 1)
|
||||
key = _interpolate(key_raw.strip(), env)
|
||||
value = _interpolate(value_raw.strip(), env)
|
||||
labels[key] = value
|
||||
return labels
|
||||
return {}
|
||||
|
||||
|
||||
def get_ports_for_service(
|
||||
definition: dict[str, Any],
|
||||
all_services: dict[str, Any],
|
||||
env: dict[str, str],
|
||||
) -> list[PortMapping]:
|
||||
"""Get ports for a service, following network_mode: service:X if present."""
|
||||
network_mode = definition.get("network_mode", "")
|
||||
if isinstance(network_mode, str) and network_mode.startswith("service:"):
|
||||
# Service uses another service's network - get ports from that service
|
||||
ref_service = network_mode[len("service:") :]
|
||||
if ref_service in all_services:
|
||||
ref_def = all_services[ref_service]
|
||||
if isinstance(ref_def, dict):
|
||||
return _parse_ports(ref_def.get("ports"), env)
|
||||
return _parse_ports(definition.get("ports"), env)
|
||||
@@ -3,11 +3,14 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import getpass
|
||||
import os
|
||||
from pathlib import Path
|
||||
|
||||
import yaml
|
||||
from pydantic import BaseModel, Field, model_validator
|
||||
|
||||
from .paths import xdg_config_home
|
||||
|
||||
|
||||
class Host(BaseModel):
|
||||
"""SSH host configuration."""
|
||||
@@ -22,23 +25,70 @@ class Config(BaseModel):
|
||||
|
||||
compose_dir: Path = Path("/opt/compose")
|
||||
hosts: dict[str, Host]
|
||||
services: dict[str, str] # service_name -> host_name
|
||||
services: dict[str, str | list[str]] # service_name -> host_name or list of hosts
|
||||
traefik_file: Path | None = None # Auto-regenerate traefik config after up/down
|
||||
traefik_service: str | None = None # Service name for Traefik (skip its host in file-provider)
|
||||
config_path: Path = Path() # Set by load_config()
|
||||
|
||||
def get_state_path(self) -> Path:
|
||||
"""Get the state file path (stored alongside config)."""
|
||||
return self.config_path.parent / "compose-farm-state.yaml"
|
||||
|
||||
@model_validator(mode="after")
|
||||
def validate_service_hosts(self) -> Config:
|
||||
"""Ensure all services reference valid hosts."""
|
||||
for service, host_name in self.services.items():
|
||||
if host_name not in self.hosts:
|
||||
msg = f"Service '{service}' references unknown host '{host_name}'"
|
||||
raise ValueError(msg)
|
||||
def validate_hosts_and_services(self) -> Config:
|
||||
"""Validate host names and service configurations."""
|
||||
# "all" is reserved keyword, cannot be used as host name
|
||||
if "all" in self.hosts:
|
||||
msg = "'all' is a reserved keyword and cannot be used as a host name"
|
||||
raise ValueError(msg)
|
||||
|
||||
for service, host_value in self.services.items():
|
||||
# Validate list configurations
|
||||
if isinstance(host_value, list):
|
||||
if not host_value:
|
||||
msg = f"Service '{service}' has empty host list"
|
||||
raise ValueError(msg)
|
||||
if len(host_value) != len(set(host_value)):
|
||||
msg = f"Service '{service}' has duplicate hosts in list"
|
||||
raise ValueError(msg)
|
||||
|
||||
# Validate all referenced hosts exist
|
||||
host_names = self.get_hosts(service)
|
||||
for host_name in host_names:
|
||||
if host_name not in self.hosts:
|
||||
msg = f"Service '{service}' references unknown host '{host_name}'"
|
||||
raise ValueError(msg)
|
||||
return self
|
||||
|
||||
def get_host(self, service: str) -> Host:
|
||||
"""Get host config for a service."""
|
||||
def get_hosts(self, service: str) -> list[str]:
|
||||
"""Get list of host names for a service.
|
||||
|
||||
Supports:
|
||||
- Single host: "truenas-debian" -> ["truenas-debian"]
|
||||
- All hosts: "all" -> list of all configured hosts
|
||||
- Explicit list: ["host1", "host2"] -> ["host1", "host2"]
|
||||
"""
|
||||
if service not in self.services:
|
||||
msg = f"Unknown service: {service}"
|
||||
raise ValueError(msg)
|
||||
return self.hosts[self.services[service]]
|
||||
host_value = self.services[service]
|
||||
if isinstance(host_value, list):
|
||||
return host_value
|
||||
if host_value == "all":
|
||||
return list(self.hosts.keys())
|
||||
return [host_value]
|
||||
|
||||
def is_multi_host(self, service: str) -> bool:
|
||||
"""Check if a service runs on multiple hosts."""
|
||||
return len(self.get_hosts(service)) > 1
|
||||
|
||||
def get_host(self, service: str) -> Host:
|
||||
"""Get host config for a service (first host if multi-host)."""
|
||||
if service not in self.services:
|
||||
msg = f"Unknown service: {service}"
|
||||
raise ValueError(msg)
|
||||
host_names = self.get_hosts(service)
|
||||
return self.hosts[host_names[0]]
|
||||
|
||||
def get_compose_path(self, service: str) -> Path:
|
||||
"""Get compose file path for a service.
|
||||
@@ -58,6 +108,25 @@ class Config(BaseModel):
|
||||
# Default to compose.yaml if none exist (will error later)
|
||||
return service_dir / "compose.yaml"
|
||||
|
||||
def discover_compose_dirs(self) -> set[str]:
|
||||
"""Find all directories in compose_dir that contain a compose file."""
|
||||
compose_filenames = {
|
||||
"compose.yaml",
|
||||
"compose.yml",
|
||||
"docker-compose.yml",
|
||||
"docker-compose.yaml",
|
||||
}
|
||||
found: set[str] = set()
|
||||
if not self.compose_dir.exists():
|
||||
return found
|
||||
for subdir in self.compose_dir.iterdir():
|
||||
if subdir.is_dir():
|
||||
for filename in compose_filenames:
|
||||
if (subdir / filename).exists():
|
||||
found.add(subdir.name)
|
||||
break
|
||||
return found
|
||||
|
||||
|
||||
def _parse_hosts(raw_hosts: dict[str, str | dict[str, str | int]]) -> dict[str, Host]:
|
||||
"""Parse hosts from config, handling both simple and full forms."""
|
||||
@@ -76,17 +145,20 @@ def load_config(path: Path | None = None) -> Config:
|
||||
"""Load configuration from YAML file.
|
||||
|
||||
Search order:
|
||||
1. Explicit path if provided
|
||||
2. ./compose-farm.yaml
|
||||
3. ~/.config/compose-farm/compose-farm.yaml
|
||||
1. Explicit path if provided via --config
|
||||
2. CF_CONFIG environment variable
|
||||
3. ./compose-farm.yaml
|
||||
4. $XDG_CONFIG_HOME/compose-farm/compose-farm.yaml (defaults to ~/.config)
|
||||
"""
|
||||
search_paths = [
|
||||
Path("compose-farm.yaml"),
|
||||
Path.home() / ".config" / "compose-farm" / "compose-farm.yaml",
|
||||
xdg_config_home() / "compose-farm" / "compose-farm.yaml",
|
||||
]
|
||||
|
||||
if path:
|
||||
config_path = path
|
||||
elif env_path := os.environ.get("CF_CONFIG"):
|
||||
config_path = Path(env_path)
|
||||
else:
|
||||
config_path = None
|
||||
for p in search_paths:
|
||||
@@ -98,10 +170,18 @@ def load_config(path: Path | None = None) -> Config:
|
||||
msg = f"Config file not found. Searched: {', '.join(str(p) for p in search_paths)}"
|
||||
raise FileNotFoundError(msg)
|
||||
|
||||
if config_path.is_dir():
|
||||
msg = (
|
||||
f"Config path is a directory, not a file: {config_path}\n"
|
||||
"This often happens when Docker creates an empty directory for a missing mount."
|
||||
)
|
||||
raise FileNotFoundError(msg)
|
||||
|
||||
with config_path.open() as f:
|
||||
raw = yaml.safe_load(f)
|
||||
|
||||
# Parse hosts with flexible format support
|
||||
raw["hosts"] = _parse_hosts(raw.get("hosts", {}))
|
||||
raw["config_path"] = config_path.resolve()
|
||||
|
||||
return Config(**raw)
|
||||
|
||||
6
src/compose_farm/console.py
Normal file
6
src/compose_farm/console.py
Normal file
@@ -0,0 +1,6 @@
|
||||
"""Shared console instances for consistent output styling."""
|
||||
|
||||
from rich.console import Console
|
||||
|
||||
console = Console(highlight=False)
|
||||
err_console = Console(stderr=True, highlight=False)
|
||||
89
src/compose_farm/example-config.yaml
Normal file
89
src/compose_farm/example-config.yaml
Normal file
@@ -0,0 +1,89 @@
|
||||
# Compose Farm configuration
|
||||
# Documentation: https://github.com/basnijholt/compose-farm
|
||||
#
|
||||
# This file configures compose-farm to manage Docker Compose services
|
||||
# across multiple hosts via SSH.
|
||||
#
|
||||
# Place this file at:
|
||||
# - ./compose-farm.yaml (current directory)
|
||||
# - ~/.config/compose-farm/compose-farm.yaml
|
||||
# - Or specify with: cf --config /path/to/config.yaml
|
||||
# - Or set CF_CONFIG environment variable
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
# compose_dir: Directory containing service subdirectories with compose files
|
||||
# ------------------------------------------------------------------------------
|
||||
# Each subdirectory should contain a compose.yaml (or docker-compose.yml).
|
||||
# This path must be the same on all hosts (NFS mount recommended).
|
||||
#
|
||||
compose_dir: /opt/compose
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
# hosts: SSH connection details for each host
|
||||
# ------------------------------------------------------------------------------
|
||||
# Simple form:
|
||||
# hostname: ip-or-fqdn
|
||||
#
|
||||
# Full form:
|
||||
# hostname:
|
||||
# address: ip-or-fqdn
|
||||
# user: ssh-username # default: current user
|
||||
# port: 22 # default: 22
|
||||
#
|
||||
# Note: "all" is a reserved keyword and cannot be used as a host name.
|
||||
#
|
||||
hosts:
|
||||
# Example: simple form (uses current user, port 22)
|
||||
server1: 192.168.1.10
|
||||
|
||||
# Example: full form with explicit user
|
||||
server2:
|
||||
address: 192.168.1.20
|
||||
user: admin
|
||||
|
||||
# Example: full form with custom port
|
||||
server3:
|
||||
address: 192.168.1.30
|
||||
user: root
|
||||
port: 2222
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
# services: Map service names to their target host(s)
|
||||
# ------------------------------------------------------------------------------
|
||||
# Each service name must match a subdirectory in compose_dir.
|
||||
#
|
||||
# Single host:
|
||||
# service-name: hostname
|
||||
#
|
||||
# Multiple hosts (explicit list):
|
||||
# service-name: [host1, host2]
|
||||
#
|
||||
# All hosts:
|
||||
# service-name: all
|
||||
#
|
||||
services:
|
||||
# Example: service runs on a single host
|
||||
nginx: server1
|
||||
postgres: server2
|
||||
|
||||
# Example: service runs on multiple specific hosts
|
||||
# prometheus: [server1, server2]
|
||||
|
||||
# Example: service runs on ALL hosts (e.g., monitoring agents)
|
||||
# node-exporter: all
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
# traefik_file: (optional) Auto-generate Traefik file-provider config
|
||||
# ------------------------------------------------------------------------------
|
||||
# When set, compose-farm automatically regenerates this file after
|
||||
# up/down/restart/update commands. Traefik watches this file for changes.
|
||||
#
|
||||
# traefik_file: /opt/compose/traefik/dynamic.d/compose-farm.yml
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
# traefik_service: (optional) Service name running Traefik
|
||||
# ------------------------------------------------------------------------------
|
||||
# When generating traefik_file, services on the same host as Traefik are
|
||||
# skipped (they're handled by Traefik's Docker provider directly).
|
||||
#
|
||||
# traefik_service: traefik
|
||||
470
src/compose_farm/executor.py
Normal file
470
src/compose_farm/executor.py
Normal file
@@ -0,0 +1,470 @@
|
||||
"""Command execution via SSH or locally."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import socket
|
||||
import subprocess
|
||||
from dataclasses import dataclass
|
||||
from functools import lru_cache
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
from rich.markup import escape
|
||||
|
||||
from .console import console, err_console
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import Callable
|
||||
|
||||
from .config import Config, Host
|
||||
|
||||
LOCAL_ADDRESSES = frozenset({"local", "localhost", "127.0.0.1", "::1"})
|
||||
_DEFAULT_SSH_PORT = 22
|
||||
|
||||
|
||||
@lru_cache(maxsize=1)
|
||||
def _get_local_ips() -> frozenset[str]:
|
||||
"""Get all IP addresses of the current machine."""
|
||||
ips: set[str] = set()
|
||||
try:
|
||||
hostname = socket.gethostname()
|
||||
# Get all addresses for hostname
|
||||
for info in socket.getaddrinfo(hostname, None):
|
||||
addr = info[4][0]
|
||||
if isinstance(addr, str):
|
||||
ips.add(addr)
|
||||
# Also try getting the default outbound IP
|
||||
with socket.socket(socket.AF_INET, socket.SOCK_DGRAM) as s:
|
||||
s.connect(("8.8.8.8", 80))
|
||||
ips.add(s.getsockname()[0])
|
||||
except OSError:
|
||||
pass
|
||||
return frozenset(ips)
|
||||
|
||||
|
||||
@dataclass
|
||||
class CommandResult:
|
||||
"""Result of a command execution."""
|
||||
|
||||
service: str
|
||||
exit_code: int
|
||||
success: bool
|
||||
stdout: str = ""
|
||||
stderr: str = ""
|
||||
|
||||
# SSH returns 255 when connection is closed unexpectedly (e.g., Ctrl+C)
|
||||
_SSH_CONNECTION_CLOSED = 255
|
||||
|
||||
@property
|
||||
def interrupted(self) -> bool:
|
||||
"""Check if command was killed by SIGINT (Ctrl+C)."""
|
||||
# Negative exit codes indicate signal termination; -2 = SIGINT
|
||||
return self.exit_code < 0 or self.exit_code == self._SSH_CONNECTION_CLOSED
|
||||
|
||||
|
||||
def is_local(host: Host) -> bool:
|
||||
"""Check if host should run locally (no SSH)."""
|
||||
addr = host.address.lower()
|
||||
if addr in LOCAL_ADDRESSES:
|
||||
return True
|
||||
# Check if address matches any of this machine's IPs
|
||||
return addr in _get_local_ips()
|
||||
|
||||
|
||||
async def _run_local_command(
|
||||
command: str,
|
||||
service: str,
|
||||
*,
|
||||
stream: bool = True,
|
||||
raw: bool = False,
|
||||
) -> CommandResult:
|
||||
"""Run a command locally with streaming output."""
|
||||
try:
|
||||
if raw:
|
||||
# Run with inherited stdout/stderr for proper \r handling
|
||||
proc = await asyncio.create_subprocess_shell(
|
||||
command,
|
||||
stdout=None, # Inherit
|
||||
stderr=None, # Inherit
|
||||
)
|
||||
await proc.wait()
|
||||
return CommandResult(
|
||||
service=service,
|
||||
exit_code=proc.returncode or 0,
|
||||
success=proc.returncode == 0,
|
||||
)
|
||||
|
||||
proc = await asyncio.create_subprocess_shell(
|
||||
command,
|
||||
stdout=asyncio.subprocess.PIPE,
|
||||
stderr=asyncio.subprocess.PIPE,
|
||||
)
|
||||
|
||||
if stream and proc.stdout and proc.stderr:
|
||||
|
||||
async def read_stream(
|
||||
reader: asyncio.StreamReader,
|
||||
prefix: str,
|
||||
*,
|
||||
is_stderr: bool = False,
|
||||
) -> None:
|
||||
out = err_console if is_stderr else console
|
||||
while True:
|
||||
line = await reader.readline()
|
||||
if not line:
|
||||
break
|
||||
text = line.decode()
|
||||
if text.strip(): # Skip empty lines
|
||||
out.print(f"[cyan]\\[{prefix}][/] {escape(text)}", end="")
|
||||
|
||||
await asyncio.gather(
|
||||
read_stream(proc.stdout, service),
|
||||
read_stream(proc.stderr, service, is_stderr=True),
|
||||
)
|
||||
|
||||
stdout_data = b""
|
||||
stderr_data = b""
|
||||
if not stream:
|
||||
stdout_data, stderr_data = await proc.communicate()
|
||||
else:
|
||||
await proc.wait()
|
||||
|
||||
return CommandResult(
|
||||
service=service,
|
||||
exit_code=proc.returncode or 0,
|
||||
success=proc.returncode == 0,
|
||||
stdout=stdout_data.decode() if stdout_data else "",
|
||||
stderr=stderr_data.decode() if stderr_data else "",
|
||||
)
|
||||
except OSError as e:
|
||||
err_console.print(f"[cyan]\\[{service}][/] [red]Local error:[/] {e}")
|
||||
return CommandResult(service=service, exit_code=1, success=False)
|
||||
|
||||
|
||||
async def _run_ssh_command(
|
||||
host: Host,
|
||||
command: str,
|
||||
service: str,
|
||||
*,
|
||||
stream: bool = True,
|
||||
raw: bool = False,
|
||||
) -> CommandResult:
|
||||
"""Run a command on a remote host via SSH with streaming output."""
|
||||
if raw:
|
||||
# Use native ssh with TTY for proper progress bar rendering
|
||||
ssh_args = ["ssh", "-t"]
|
||||
if host.port != _DEFAULT_SSH_PORT:
|
||||
ssh_args.extend(["-p", str(host.port)])
|
||||
ssh_args.extend([f"{host.user}@{host.address}", command])
|
||||
# Run in thread to avoid blocking the event loop
|
||||
result = await asyncio.to_thread(subprocess.run, ssh_args, check=False)
|
||||
return CommandResult(
|
||||
service=service,
|
||||
exit_code=result.returncode,
|
||||
success=result.returncode == 0,
|
||||
)
|
||||
|
||||
import asyncssh # noqa: PLC0415 - lazy import for faster CLI startup
|
||||
|
||||
proc: asyncssh.SSHClientProcess[Any]
|
||||
try:
|
||||
async with asyncssh.connect( # noqa: SIM117 - conn needed before create_process
|
||||
host.address,
|
||||
port=host.port,
|
||||
username=host.user,
|
||||
known_hosts=None,
|
||||
) as conn:
|
||||
async with conn.create_process(command) as proc:
|
||||
if stream:
|
||||
|
||||
async def read_stream(
|
||||
reader: Any,
|
||||
prefix: str,
|
||||
*,
|
||||
is_stderr: bool = False,
|
||||
) -> None:
|
||||
out = err_console if is_stderr else console
|
||||
async for line in reader:
|
||||
if line.strip(): # Skip empty lines
|
||||
out.print(f"[cyan]\\[{prefix}][/] {escape(line)}", end="")
|
||||
|
||||
await asyncio.gather(
|
||||
read_stream(proc.stdout, service),
|
||||
read_stream(proc.stderr, service, is_stderr=True),
|
||||
)
|
||||
|
||||
stdout_data = ""
|
||||
stderr_data = ""
|
||||
if not stream:
|
||||
stdout_data = await proc.stdout.read()
|
||||
stderr_data = await proc.stderr.read()
|
||||
|
||||
await proc.wait()
|
||||
return CommandResult(
|
||||
service=service,
|
||||
exit_code=proc.exit_status or 0,
|
||||
success=proc.exit_status == 0,
|
||||
stdout=stdout_data,
|
||||
stderr=stderr_data,
|
||||
)
|
||||
except (OSError, asyncssh.Error) as e:
|
||||
err_console.print(f"[cyan]\\[{service}][/] [red]SSH error:[/] {e}")
|
||||
return CommandResult(service=service, exit_code=1, success=False)
|
||||
|
||||
|
||||
async def run_command(
|
||||
host: Host,
|
||||
command: str,
|
||||
service: str,
|
||||
*,
|
||||
stream: bool = True,
|
||||
raw: bool = False,
|
||||
) -> CommandResult:
|
||||
"""Run a command on a host (locally or via SSH).
|
||||
|
||||
Args:
|
||||
host: Host configuration
|
||||
command: Command to run
|
||||
service: Service name (used as prefix in output)
|
||||
stream: Whether to stream output (default True)
|
||||
raw: Whether to use raw mode with TTY (default False)
|
||||
|
||||
"""
|
||||
if is_local(host):
|
||||
return await _run_local_command(command, service, stream=stream, raw=raw)
|
||||
return await _run_ssh_command(host, command, service, stream=stream, raw=raw)
|
||||
|
||||
|
||||
async def run_compose(
|
||||
config: Config,
|
||||
service: str,
|
||||
compose_cmd: str,
|
||||
*,
|
||||
stream: bool = True,
|
||||
raw: bool = False,
|
||||
) -> CommandResult:
|
||||
"""Run a docker compose command for a service."""
|
||||
host = config.get_host(service)
|
||||
compose_path = config.get_compose_path(service)
|
||||
|
||||
command = f"docker compose -f {compose_path} {compose_cmd}"
|
||||
return await run_command(host, command, service, stream=stream, raw=raw)
|
||||
|
||||
|
||||
async def run_compose_on_host(
|
||||
config: Config,
|
||||
service: str,
|
||||
host_name: str,
|
||||
compose_cmd: str,
|
||||
*,
|
||||
stream: bool = True,
|
||||
raw: bool = False,
|
||||
) -> CommandResult:
|
||||
"""Run a docker compose command for a service on a specific host.
|
||||
|
||||
Used for migration - running 'down' on the old host before 'up' on new host.
|
||||
"""
|
||||
host = config.hosts[host_name]
|
||||
compose_path = config.get_compose_path(service)
|
||||
|
||||
command = f"docker compose -f {compose_path} {compose_cmd}"
|
||||
return await run_command(host, command, service, stream=stream, raw=raw)
|
||||
|
||||
|
||||
async def run_on_services(
|
||||
config: Config,
|
||||
services: list[str],
|
||||
compose_cmd: str,
|
||||
*,
|
||||
stream: bool = True,
|
||||
raw: bool = False,
|
||||
) -> list[CommandResult]:
|
||||
"""Run a docker compose command on multiple services in parallel.
|
||||
|
||||
For multi-host services, runs on all configured hosts.
|
||||
Note: raw=True only makes sense for single-service operations.
|
||||
"""
|
||||
return await run_sequential_on_services(config, services, [compose_cmd], stream=stream, raw=raw)
|
||||
|
||||
|
||||
async def _run_sequential_commands(
|
||||
config: Config,
|
||||
service: str,
|
||||
commands: list[str],
|
||||
*,
|
||||
stream: bool = True,
|
||||
raw: bool = False,
|
||||
) -> CommandResult:
|
||||
"""Run multiple compose commands sequentially for a service."""
|
||||
for cmd in commands:
|
||||
result = await run_compose(config, service, cmd, stream=stream, raw=raw)
|
||||
if not result.success:
|
||||
return result
|
||||
return CommandResult(service=service, exit_code=0, success=True)
|
||||
|
||||
|
||||
async def _run_sequential_commands_multi_host(
|
||||
config: Config,
|
||||
service: str,
|
||||
commands: list[str],
|
||||
*,
|
||||
stream: bool = True,
|
||||
raw: bool = False,
|
||||
) -> list[CommandResult]:
|
||||
"""Run multiple compose commands sequentially for a multi-host service.
|
||||
|
||||
Commands are run sequentially, but each command runs on all hosts in parallel.
|
||||
"""
|
||||
host_names = config.get_hosts(service)
|
||||
compose_path = config.get_compose_path(service)
|
||||
final_results: list[CommandResult] = []
|
||||
|
||||
for cmd in commands:
|
||||
command = f"docker compose -f {compose_path} {cmd}"
|
||||
tasks = []
|
||||
for host_name in host_names:
|
||||
host = config.hosts[host_name]
|
||||
label = f"{service}@{host_name}" if len(host_names) > 1 else service
|
||||
tasks.append(run_command(host, command, label, stream=stream, raw=raw))
|
||||
|
||||
results = await asyncio.gather(*tasks)
|
||||
final_results = list(results)
|
||||
|
||||
# Check if any failed
|
||||
if any(not r.success for r in results):
|
||||
return final_results
|
||||
|
||||
return final_results
|
||||
|
||||
|
||||
async def run_sequential_on_services(
|
||||
config: Config,
|
||||
services: list[str],
|
||||
commands: list[str],
|
||||
*,
|
||||
stream: bool = True,
|
||||
raw: bool = False,
|
||||
) -> list[CommandResult]:
|
||||
"""Run sequential commands on multiple services in parallel.
|
||||
|
||||
For multi-host services, runs on all configured hosts.
|
||||
Note: raw=True only makes sense for single-service operations.
|
||||
"""
|
||||
# Separate multi-host and single-host services for type-safe gathering
|
||||
multi_host_tasks = []
|
||||
single_host_tasks = []
|
||||
|
||||
for service in services:
|
||||
if config.is_multi_host(service):
|
||||
multi_host_tasks.append(
|
||||
_run_sequential_commands_multi_host(
|
||||
config, service, commands, stream=stream, raw=raw
|
||||
)
|
||||
)
|
||||
else:
|
||||
single_host_tasks.append(
|
||||
_run_sequential_commands(config, service, commands, stream=stream, raw=raw)
|
||||
)
|
||||
|
||||
# Gather results separately to maintain type safety
|
||||
flat_results: list[CommandResult] = []
|
||||
|
||||
if multi_host_tasks:
|
||||
multi_results = await asyncio.gather(*multi_host_tasks)
|
||||
for result_list in multi_results:
|
||||
flat_results.extend(result_list)
|
||||
|
||||
if single_host_tasks:
|
||||
single_results = await asyncio.gather(*single_host_tasks)
|
||||
flat_results.extend(single_results)
|
||||
|
||||
return flat_results
|
||||
|
||||
|
||||
async def check_service_running(
|
||||
config: Config,
|
||||
service: str,
|
||||
host_name: str,
|
||||
) -> bool:
|
||||
"""Check if a service has running containers on a specific host."""
|
||||
host = config.hosts[host_name]
|
||||
compose_path = config.get_compose_path(service)
|
||||
|
||||
# Use ps --status running to check for running containers
|
||||
command = f"docker compose -f {compose_path} ps --status running -q"
|
||||
result = await run_command(host, command, service, stream=False)
|
||||
|
||||
# If command succeeded and has output, containers are running
|
||||
return result.success and bool(result.stdout.strip())
|
||||
|
||||
|
||||
async def _batch_check_existence(
|
||||
config: Config,
|
||||
host_name: str,
|
||||
items: list[str],
|
||||
cmd_template: Callable[[str], str],
|
||||
context: str,
|
||||
) -> dict[str, bool]:
|
||||
"""Check existence of multiple items on a host using a command template."""
|
||||
if not items:
|
||||
return {}
|
||||
|
||||
host = config.hosts[host_name]
|
||||
checks = []
|
||||
for item in items:
|
||||
escaped = item.replace("'", "'\\''")
|
||||
checks.append(cmd_template(escaped))
|
||||
|
||||
command = "; ".join(checks)
|
||||
result = await run_command(host, command, context, stream=False)
|
||||
|
||||
exists: dict[str, bool] = dict.fromkeys(items, False)
|
||||
for raw_line in result.stdout.splitlines():
|
||||
line = raw_line.strip()
|
||||
if line.startswith("Y:"):
|
||||
exists[line[2:]] = True
|
||||
elif line.startswith("N:"):
|
||||
exists[line[2:]] = False
|
||||
|
||||
return exists
|
||||
|
||||
|
||||
async def check_paths_exist(
|
||||
config: Config,
|
||||
host_name: str,
|
||||
paths: list[str],
|
||||
) -> dict[str, bool]:
|
||||
"""Check if multiple paths exist on a specific host.
|
||||
|
||||
Returns a dict mapping path -> exists.
|
||||
Handles permission denied as "exists" (path is there, just not accessible).
|
||||
"""
|
||||
# Only report missing if stat says "No such file", otherwise assume exists
|
||||
# (handles permission denied correctly - path exists, just not accessible)
|
||||
return await _batch_check_existence(
|
||||
config,
|
||||
host_name,
|
||||
paths,
|
||||
lambda esc: f"stat '{esc}' 2>&1 | grep -q 'No such file' && echo 'N:{esc}' || echo 'Y:{esc}'",
|
||||
"mount-check",
|
||||
)
|
||||
|
||||
|
||||
async def check_networks_exist(
|
||||
config: Config,
|
||||
host_name: str,
|
||||
networks: list[str],
|
||||
) -> dict[str, bool]:
|
||||
"""Check if Docker networks exist on a specific host.
|
||||
|
||||
Returns a dict mapping network_name -> exists.
|
||||
"""
|
||||
return await _batch_check_existence(
|
||||
config,
|
||||
host_name,
|
||||
networks,
|
||||
lambda esc: (
|
||||
f"docker network inspect '{esc}' >/dev/null 2>&1 && echo 'Y:{esc}' || echo 'N:{esc}'"
|
||||
),
|
||||
"network-check",
|
||||
)
|
||||
@@ -6,20 +6,21 @@ import json
|
||||
import tomllib
|
||||
from dataclasses import dataclass
|
||||
from datetime import UTC, datetime
|
||||
from pathlib import Path
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
from .ssh import run_compose
|
||||
from .executor import run_compose
|
||||
from .paths import xdg_config_home
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import Awaitable, Callable, Iterable
|
||||
from pathlib import Path
|
||||
|
||||
from .config import Config
|
||||
from .ssh import CommandResult
|
||||
from .executor import CommandResult
|
||||
|
||||
|
||||
DEFAULT_LOG_PATH = Path.home() / ".config" / "compose-farm" / "dockerfarm-log.toml"
|
||||
DIGEST_HEX_LENGTH = 64
|
||||
DEFAULT_LOG_PATH = xdg_config_home() / "compose-farm" / "dockerfarm-log.toml"
|
||||
_DIGEST_HEX_LENGTH = 64
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
@@ -46,7 +47,8 @@ class SnapshotEntry:
|
||||
}
|
||||
|
||||
|
||||
def _isoformat(dt: datetime) -> str:
|
||||
def isoformat(dt: datetime) -> str:
|
||||
"""Format a datetime as an ISO 8601 string with Z suffix for UTC."""
|
||||
return dt.astimezone(UTC).replace(microsecond=0).isoformat().replace("+00:00", "Z")
|
||||
|
||||
|
||||
@@ -95,13 +97,13 @@ def _extract_image_fields(record: dict[str, Any]) -> tuple[str, str]:
|
||||
or ""
|
||||
)
|
||||
|
||||
if digest and not digest.startswith("sha256:") and len(digest) == DIGEST_HEX_LENGTH:
|
||||
if digest and not digest.startswith("sha256:") and len(digest) == _DIGEST_HEX_LENGTH:
|
||||
digest = f"sha256:{digest}"
|
||||
|
||||
return image, digest
|
||||
|
||||
|
||||
async def _collect_service_entries(
|
||||
async def collect_service_entries(
|
||||
config: Config,
|
||||
service: str,
|
||||
*,
|
||||
@@ -116,7 +118,8 @@ async def _collect_service_entries(
|
||||
raise RuntimeError(error)
|
||||
|
||||
records = _parse_images_output(result.stdout)
|
||||
host_name = config.services[service]
|
||||
# Use first host for snapshots (multi-host services use same images on all hosts)
|
||||
host_name = config.get_hosts(service)[0]
|
||||
compose_path = config.get_compose_path(service)
|
||||
|
||||
entries: list[SnapshotEntry] = []
|
||||
@@ -137,19 +140,21 @@ async def _collect_service_entries(
|
||||
return entries
|
||||
|
||||
|
||||
def _load_existing_entries(log_path: Path) -> list[dict[str, str]]:
|
||||
def load_existing_entries(log_path: Path) -> list[dict[str, str]]:
|
||||
"""Load existing snapshot entries from a TOML log file."""
|
||||
if not log_path.exists():
|
||||
return []
|
||||
data = tomllib.loads(log_path.read_text())
|
||||
return list(data.get("entries", []))
|
||||
|
||||
|
||||
def _merge_entries(
|
||||
def merge_entries(
|
||||
existing: Iterable[dict[str, str]],
|
||||
new_entries: Iterable[SnapshotEntry],
|
||||
*,
|
||||
now_iso: str,
|
||||
) -> list[dict[str, str]]:
|
||||
"""Merge new snapshot entries with existing ones, preserving first_seen timestamps."""
|
||||
merged: dict[tuple[str, str, str], dict[str, str]] = {
|
||||
(e["service"], e["host"], e["digest"]): dict(e) for e in existing
|
||||
}
|
||||
@@ -162,7 +167,8 @@ def _merge_entries(
|
||||
return list(merged.values())
|
||||
|
||||
|
||||
def _write_toml(log_path: Path, *, meta: dict[str, str], entries: list[dict[str, str]]) -> None:
|
||||
def write_toml(log_path: Path, *, meta: dict[str, str], entries: list[dict[str, str]]) -> None:
|
||||
"""Write snapshot entries to a TOML log file."""
|
||||
lines: list[str] = ["[meta]"]
|
||||
lines.extend(f'{key} = "{_escape(meta[key])}"' for key in sorted(meta))
|
||||
|
||||
@@ -187,45 +193,3 @@ def _write_toml(log_path: Path, *, meta: dict[str, str], entries: list[dict[str,
|
||||
content = "\n".join(lines).rstrip() + "\n"
|
||||
log_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
log_path.write_text(content)
|
||||
|
||||
|
||||
async def snapshot_services(
|
||||
config: Config,
|
||||
services: list[str],
|
||||
*,
|
||||
log_path: Path | None = None,
|
||||
now: datetime | None = None,
|
||||
run_compose_fn: Callable[..., Awaitable[CommandResult]] = run_compose,
|
||||
) -> Path:
|
||||
"""Capture current image digests for services and write them to a TOML log.
|
||||
|
||||
- Preserves the earliest `first_seen` per (service, host, digest)
|
||||
- Updates `last_seen` for digests observed in this snapshot
|
||||
- Leaves untouched digests that were not part of this run (history is kept)
|
||||
"""
|
||||
if not services:
|
||||
error = "No services specified for snapshot"
|
||||
raise RuntimeError(error)
|
||||
|
||||
log_path = log_path or DEFAULT_LOG_PATH
|
||||
now_dt = now or datetime.now(UTC)
|
||||
now_iso = _isoformat(now_dt)
|
||||
|
||||
existing_entries = _load_existing_entries(log_path)
|
||||
|
||||
snapshot_entries: list[SnapshotEntry] = []
|
||||
for service in services:
|
||||
snapshot_entries.extend(
|
||||
await _collect_service_entries(
|
||||
config, service, now=now_dt, run_compose_fn=run_compose_fn
|
||||
)
|
||||
)
|
||||
|
||||
if not snapshot_entries:
|
||||
error = "No image digests were captured"
|
||||
raise RuntimeError(error)
|
||||
|
||||
merged_entries = _merge_entries(existing_entries, snapshot_entries, now_iso=now_iso)
|
||||
meta = {"generated_at": now_iso, "compose_dir": str(config.compose_dir)}
|
||||
_write_toml(log_path, meta=meta, entries=merged_entries)
|
||||
return log_path
|
||||
|
||||
441
src/compose_farm/operations.py
Normal file
441
src/compose_farm/operations.py
Normal file
@@ -0,0 +1,441 @@
|
||||
"""High-level operations for compose-farm.
|
||||
|
||||
Contains the business logic for up, down, sync, check, and migration operations.
|
||||
CLI commands are thin wrappers around these functions.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from typing import TYPE_CHECKING, NamedTuple
|
||||
|
||||
from .compose import parse_devices, parse_external_networks, parse_host_volumes
|
||||
from .console import console, err_console
|
||||
from .executor import (
|
||||
CommandResult,
|
||||
check_networks_exist,
|
||||
check_paths_exist,
|
||||
check_service_running,
|
||||
run_command,
|
||||
run_compose,
|
||||
run_compose_on_host,
|
||||
)
|
||||
from .state import (
|
||||
get_orphaned_services,
|
||||
get_service_host,
|
||||
remove_service,
|
||||
set_multi_host_service,
|
||||
set_service_host,
|
||||
)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .config import Config
|
||||
|
||||
|
||||
class OperationInterruptedError(Exception):
|
||||
"""Raised when a command is interrupted by Ctrl+C."""
|
||||
|
||||
|
||||
class PreflightResult(NamedTuple):
|
||||
"""Result of pre-flight checks for a service on a host."""
|
||||
|
||||
missing_paths: list[str]
|
||||
missing_networks: list[str]
|
||||
missing_devices: list[str]
|
||||
|
||||
@property
|
||||
def ok(self) -> bool:
|
||||
"""Return True if all checks passed."""
|
||||
return not (self.missing_paths or self.missing_networks or self.missing_devices)
|
||||
|
||||
|
||||
async def _run_compose_step(
|
||||
cfg: Config,
|
||||
service: str,
|
||||
command: str,
|
||||
*,
|
||||
raw: bool,
|
||||
host: str | None = None,
|
||||
) -> CommandResult:
|
||||
"""Run a compose command, handle raw output newline, and check for interrupts."""
|
||||
if host:
|
||||
result = await run_compose_on_host(cfg, service, host, command, raw=raw)
|
||||
else:
|
||||
result = await run_compose(cfg, service, command, raw=raw)
|
||||
if raw:
|
||||
print() # Ensure newline after raw output
|
||||
if result.interrupted:
|
||||
raise OperationInterruptedError
|
||||
return result
|
||||
|
||||
|
||||
def get_service_paths(cfg: Config, service: str) -> list[str]:
|
||||
"""Get all required paths for a service (compose_dir + volumes)."""
|
||||
paths = [str(cfg.compose_dir)]
|
||||
paths.extend(parse_host_volumes(cfg, service))
|
||||
return paths
|
||||
|
||||
|
||||
async def discover_service_host(cfg: Config, service: str) -> tuple[str, str | list[str] | None]:
|
||||
"""Discover where a service is running.
|
||||
|
||||
For multi-host services, checks all assigned hosts in parallel.
|
||||
For single-host, checks assigned host first, then others.
|
||||
|
||||
Returns (service_name, host_or_hosts_or_none).
|
||||
"""
|
||||
assigned_hosts = cfg.get_hosts(service)
|
||||
|
||||
if cfg.is_multi_host(service):
|
||||
# Check all assigned hosts in parallel
|
||||
checks = await asyncio.gather(
|
||||
*[check_service_running(cfg, service, h) for h in assigned_hosts]
|
||||
)
|
||||
running = [h for h, is_running in zip(assigned_hosts, checks, strict=True) if is_running]
|
||||
return service, running if running else None
|
||||
|
||||
# Single-host: check assigned host first, then others
|
||||
if await check_service_running(cfg, service, assigned_hosts[0]):
|
||||
return service, assigned_hosts[0]
|
||||
for host in cfg.hosts:
|
||||
if host != assigned_hosts[0] and await check_service_running(cfg, service, host):
|
||||
return service, host
|
||||
return service, None
|
||||
|
||||
|
||||
async def check_service_requirements(
|
||||
cfg: Config,
|
||||
service: str,
|
||||
host_name: str,
|
||||
) -> PreflightResult:
|
||||
"""Check if a service can run on a specific host.
|
||||
|
||||
Verifies that all required paths (volumes), networks, and devices exist.
|
||||
"""
|
||||
# Check mount paths
|
||||
paths = get_service_paths(cfg, service)
|
||||
path_exists = await check_paths_exist(cfg, host_name, paths)
|
||||
missing_paths = [p for p, found in path_exists.items() if not found]
|
||||
|
||||
# Check external networks
|
||||
networks = parse_external_networks(cfg, service)
|
||||
missing_networks: list[str] = []
|
||||
if networks:
|
||||
net_exists = await check_networks_exist(cfg, host_name, networks)
|
||||
missing_networks = [n for n, found in net_exists.items() if not found]
|
||||
|
||||
# Check devices
|
||||
devices = parse_devices(cfg, service)
|
||||
missing_devices: list[str] = []
|
||||
if devices:
|
||||
dev_exists = await check_paths_exist(cfg, host_name, devices)
|
||||
missing_devices = [d for d, found in dev_exists.items() if not found]
|
||||
|
||||
return PreflightResult(missing_paths, missing_networks, missing_devices)
|
||||
|
||||
|
||||
async def _cleanup_and_rollback(
|
||||
cfg: Config,
|
||||
service: str,
|
||||
target_host: str,
|
||||
current_host: str,
|
||||
prefix: str,
|
||||
*,
|
||||
was_running: bool,
|
||||
raw: bool = False,
|
||||
) -> None:
|
||||
"""Clean up failed start and attempt rollback to old host if it was running."""
|
||||
err_console.print(
|
||||
f"{prefix} [yellow]![/] Cleaning up failed start on [magenta]{target_host}[/]"
|
||||
)
|
||||
await run_compose(cfg, service, "down", raw=raw)
|
||||
|
||||
if not was_running:
|
||||
err_console.print(
|
||||
f"{prefix} [dim]Service was not running on [magenta]{current_host}[/], skipping rollback[/]"
|
||||
)
|
||||
return
|
||||
|
||||
err_console.print(f"{prefix} [yellow]![/] Rolling back to [magenta]{current_host}[/]...")
|
||||
rollback_result = await run_compose_on_host(cfg, service, current_host, "up -d", raw=raw)
|
||||
if rollback_result.success:
|
||||
console.print(f"{prefix} [green]✓[/] Rollback succeeded on [magenta]{current_host}[/]")
|
||||
else:
|
||||
err_console.print(f"{prefix} [red]✗[/] Rollback failed - service is down")
|
||||
|
||||
|
||||
def _report_preflight_failures(
|
||||
service: str,
|
||||
target_host: str,
|
||||
preflight: PreflightResult,
|
||||
) -> None:
|
||||
"""Report pre-flight check failures."""
|
||||
err_console.print(
|
||||
f"[cyan]\\[{service}][/] [red]✗[/] Cannot start on [magenta]{target_host}[/]:"
|
||||
)
|
||||
for path in preflight.missing_paths:
|
||||
err_console.print(f" [red]✗[/] missing path: {path}")
|
||||
for net in preflight.missing_networks:
|
||||
err_console.print(f" [red]✗[/] missing network: {net}")
|
||||
if preflight.missing_networks:
|
||||
err_console.print(f" [dim]hint: cf init-network {target_host}[/]")
|
||||
for dev in preflight.missing_devices:
|
||||
err_console.print(f" [red]✗[/] missing device: {dev}")
|
||||
|
||||
|
||||
async def _up_multi_host_service(
|
||||
cfg: Config,
|
||||
service: str,
|
||||
prefix: str,
|
||||
*,
|
||||
raw: bool = False,
|
||||
) -> list[CommandResult]:
|
||||
"""Start a multi-host service on all configured hosts."""
|
||||
host_names = cfg.get_hosts(service)
|
||||
results: list[CommandResult] = []
|
||||
compose_path = cfg.get_compose_path(service)
|
||||
command = f"docker compose -f {compose_path} up -d"
|
||||
|
||||
# Pre-flight checks on all hosts
|
||||
for host_name in host_names:
|
||||
preflight = await check_service_requirements(cfg, service, host_name)
|
||||
if not preflight.ok:
|
||||
_report_preflight_failures(service, host_name, preflight)
|
||||
results.append(
|
||||
CommandResult(service=f"{service}@{host_name}", exit_code=1, success=False)
|
||||
)
|
||||
return results
|
||||
|
||||
# Start on all hosts
|
||||
hosts_str = ", ".join(f"[magenta]{h}[/]" for h in host_names)
|
||||
console.print(f"{prefix} Starting on {hosts_str}...")
|
||||
|
||||
succeeded_hosts: list[str] = []
|
||||
for host_name in host_names:
|
||||
host = cfg.hosts[host_name]
|
||||
label = f"{service}@{host_name}"
|
||||
result = await run_command(host, command, label, stream=not raw, raw=raw)
|
||||
if raw:
|
||||
print() # Ensure newline after raw output
|
||||
results.append(result)
|
||||
if result.success:
|
||||
succeeded_hosts.append(host_name)
|
||||
|
||||
# Update state with hosts that succeeded (partial success is tracked)
|
||||
if succeeded_hosts:
|
||||
set_multi_host_service(cfg, service, succeeded_hosts)
|
||||
|
||||
return results
|
||||
|
||||
|
||||
async def _migrate_service(
|
||||
cfg: Config,
|
||||
service: str,
|
||||
current_host: str,
|
||||
target_host: str,
|
||||
prefix: str,
|
||||
*,
|
||||
raw: bool = False,
|
||||
) -> CommandResult | None:
|
||||
"""Migrate a service from current_host to target_host.
|
||||
|
||||
Pre-pulls/builds images on target, then stops service on current host.
|
||||
Returns failure result if migration prep fails, None on success.
|
||||
"""
|
||||
console.print(
|
||||
f"{prefix} Migrating from [magenta]{current_host}[/] → [magenta]{target_host}[/]..."
|
||||
)
|
||||
|
||||
# Prepare images on target host before stopping old service to minimize downtime.
|
||||
# Pull handles image-based services; build handles Dockerfile-based services.
|
||||
# --ignore-buildable makes pull skip images that have build: defined.
|
||||
for cmd, label in [("pull --ignore-buildable", "Pull"), ("build", "Build")]:
|
||||
result = await _run_compose_step(cfg, service, cmd, raw=raw)
|
||||
if not result.success:
|
||||
err_console.print(
|
||||
f"{prefix} [red]✗[/] {label} failed on [magenta]{target_host}[/], "
|
||||
"leaving service on current host"
|
||||
)
|
||||
return result
|
||||
|
||||
# Stop on current host
|
||||
down_result = await _run_compose_step(cfg, service, "down", raw=raw, host=current_host)
|
||||
return down_result if not down_result.success else None
|
||||
|
||||
|
||||
async def _up_single_service(
|
||||
cfg: Config,
|
||||
service: str,
|
||||
prefix: str,
|
||||
*,
|
||||
raw: bool,
|
||||
) -> CommandResult:
|
||||
"""Start a single-host service with migration support."""
|
||||
target_host = cfg.get_hosts(service)[0]
|
||||
current_host = get_service_host(cfg, service)
|
||||
|
||||
# Pre-flight check: verify paths, networks, and devices exist on target
|
||||
preflight = await check_service_requirements(cfg, service, target_host)
|
||||
if not preflight.ok:
|
||||
_report_preflight_failures(service, target_host, preflight)
|
||||
return CommandResult(service=service, exit_code=1, success=False)
|
||||
|
||||
# If service is deployed elsewhere, migrate it
|
||||
did_migration = False
|
||||
was_running = False
|
||||
if current_host and current_host != target_host:
|
||||
if current_host in cfg.hosts:
|
||||
was_running = await check_service_running(cfg, service, current_host)
|
||||
failure = await _migrate_service(
|
||||
cfg, service, current_host, target_host, prefix, raw=raw
|
||||
)
|
||||
if failure:
|
||||
return failure
|
||||
did_migration = True
|
||||
else:
|
||||
err_console.print(
|
||||
f"{prefix} [yellow]![/] was on "
|
||||
f"[magenta]{current_host}[/] (not in config), skipping down"
|
||||
)
|
||||
|
||||
# Start on target host
|
||||
console.print(f"{prefix} Starting on [magenta]{target_host}[/]...")
|
||||
up_result = await _run_compose_step(cfg, service, "up -d", raw=raw)
|
||||
|
||||
# Update state on success, or rollback on failure
|
||||
if up_result.success:
|
||||
set_service_host(cfg, service, target_host)
|
||||
elif did_migration and current_host:
|
||||
await _cleanup_and_rollback(
|
||||
cfg,
|
||||
service,
|
||||
target_host,
|
||||
current_host,
|
||||
prefix,
|
||||
was_running=was_running,
|
||||
raw=raw,
|
||||
)
|
||||
|
||||
return up_result
|
||||
|
||||
|
||||
async def up_services(
|
||||
cfg: Config,
|
||||
services: list[str],
|
||||
*,
|
||||
raw: bool = False,
|
||||
) -> list[CommandResult]:
|
||||
"""Start services with automatic migration if host changed."""
|
||||
results: list[CommandResult] = []
|
||||
total = len(services)
|
||||
|
||||
try:
|
||||
for idx, service in enumerate(services, 1):
|
||||
prefix = f"[dim][{idx}/{total}][/] [cyan]\\[{service}][/]"
|
||||
|
||||
if cfg.is_multi_host(service):
|
||||
results.extend(await _up_multi_host_service(cfg, service, prefix, raw=raw))
|
||||
else:
|
||||
results.append(await _up_single_service(cfg, service, prefix, raw=raw))
|
||||
except OperationInterruptedError:
|
||||
raise KeyboardInterrupt from None
|
||||
|
||||
return results
|
||||
|
||||
|
||||
async def check_host_compatibility(
|
||||
cfg: Config,
|
||||
service: str,
|
||||
) -> dict[str, tuple[int, int, list[str]]]:
|
||||
"""Check which hosts can run a service based on paths, networks, and devices.
|
||||
|
||||
Returns dict of host_name -> (found_count, total_count, missing_items).
|
||||
"""
|
||||
# Get total requirements count
|
||||
paths = get_service_paths(cfg, service)
|
||||
networks = parse_external_networks(cfg, service)
|
||||
devices = parse_devices(cfg, service)
|
||||
total = len(paths) + len(networks) + len(devices)
|
||||
|
||||
results: dict[str, tuple[int, int, list[str]]] = {}
|
||||
|
||||
for host_name in cfg.hosts:
|
||||
preflight = await check_service_requirements(cfg, service, host_name)
|
||||
all_missing = (
|
||||
preflight.missing_paths + preflight.missing_networks + preflight.missing_devices
|
||||
)
|
||||
found = total - len(all_missing)
|
||||
results[host_name] = (found, total, all_missing)
|
||||
|
||||
return results
|
||||
|
||||
|
||||
async def stop_orphaned_services(cfg: Config) -> list[CommandResult]:
|
||||
"""Stop orphaned services (in state but not in config).
|
||||
|
||||
Runs docker compose down on each service on its tracked host(s).
|
||||
Only removes from state on successful stop.
|
||||
|
||||
Returns list of CommandResults for each service@host.
|
||||
"""
|
||||
orphaned = get_orphaned_services(cfg)
|
||||
if not orphaned:
|
||||
return []
|
||||
|
||||
results: list[CommandResult] = []
|
||||
tasks: list[tuple[str, str, asyncio.Task[CommandResult]]] = []
|
||||
|
||||
# Build list of (service, host, task) for all orphaned services
|
||||
for service, hosts in orphaned.items():
|
||||
host_list = hosts if isinstance(hosts, list) else [hosts]
|
||||
for host in host_list:
|
||||
# Skip hosts no longer in config
|
||||
if host not in cfg.hosts:
|
||||
console.print(
|
||||
f" [yellow]![/] {service}@{host}: host no longer in config, skipping"
|
||||
)
|
||||
results.append(
|
||||
CommandResult(
|
||||
service=f"{service}@{host}",
|
||||
exit_code=1,
|
||||
success=False,
|
||||
stderr="host no longer in config",
|
||||
)
|
||||
)
|
||||
continue
|
||||
coro = run_compose_on_host(cfg, service, host, "down")
|
||||
tasks.append((service, host, asyncio.create_task(coro)))
|
||||
|
||||
# Run all down commands in parallel
|
||||
if tasks:
|
||||
for service, host, task in tasks:
|
||||
try:
|
||||
result = await task
|
||||
results.append(result)
|
||||
if result.success:
|
||||
console.print(f" [green]✓[/] {service}@{host}: stopped")
|
||||
else:
|
||||
console.print(f" [red]✗[/] {service}@{host}: {result.stderr or 'failed'}")
|
||||
except Exception as e:
|
||||
console.print(f" [red]✗[/] {service}@{host}: {e}")
|
||||
results.append(
|
||||
CommandResult(
|
||||
service=f"{service}@{host}",
|
||||
exit_code=1,
|
||||
success=False,
|
||||
stderr=str(e),
|
||||
)
|
||||
)
|
||||
|
||||
# Remove from state only for services where ALL hosts succeeded
|
||||
for service, hosts in orphaned.items():
|
||||
host_list = hosts if isinstance(hosts, list) else [hosts]
|
||||
all_succeeded = all(
|
||||
r.success
|
||||
for r in results
|
||||
if r.service.startswith(f"{service}@") or r.service == service
|
||||
)
|
||||
if all_succeeded:
|
||||
remove_service(cfg, service)
|
||||
|
||||
return results
|
||||
21
src/compose_farm/paths.py
Normal file
21
src/compose_farm/paths.py
Normal file
@@ -0,0 +1,21 @@
|
||||
"""Path utilities - lightweight module with no heavy dependencies."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
def xdg_config_home() -> Path:
|
||||
"""Get XDG config directory, respecting XDG_CONFIG_HOME env var."""
|
||||
return Path(os.environ.get("XDG_CONFIG_HOME", Path.home() / ".config"))
|
||||
|
||||
|
||||
def default_config_path() -> Path:
|
||||
"""Get the default user config path."""
|
||||
return xdg_config_home() / "compose-farm" / "compose-farm.yaml"
|
||||
|
||||
|
||||
def config_search_paths() -> list[Path]:
|
||||
"""Get search paths for config files."""
|
||||
return [Path("compose-farm.yaml"), default_config_path()]
|
||||
@@ -1,244 +0,0 @@
|
||||
"""Command execution via SSH or locally."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import sys
|
||||
from dataclasses import dataclass
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
import asyncssh
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .config import Config, Host
|
||||
|
||||
LOCAL_ADDRESSES = frozenset({"local", "localhost", "127.0.0.1", "::1"})
|
||||
|
||||
|
||||
@dataclass
|
||||
class CommandResult:
|
||||
"""Result of a command execution."""
|
||||
|
||||
service: str
|
||||
exit_code: int
|
||||
success: bool
|
||||
stdout: str = ""
|
||||
stderr: str = ""
|
||||
|
||||
|
||||
def _is_local(host: Host) -> bool:
|
||||
"""Check if host should run locally (no SSH)."""
|
||||
return host.address.lower() in LOCAL_ADDRESSES
|
||||
|
||||
|
||||
async def _run_local_command(
|
||||
command: str,
|
||||
service: str,
|
||||
*,
|
||||
stream: bool = True,
|
||||
) -> CommandResult:
|
||||
"""Run a command locally with streaming output."""
|
||||
try:
|
||||
proc = await asyncio.create_subprocess_shell(
|
||||
command,
|
||||
stdout=asyncio.subprocess.PIPE,
|
||||
stderr=asyncio.subprocess.PIPE,
|
||||
)
|
||||
|
||||
if stream and proc.stdout and proc.stderr:
|
||||
|
||||
async def read_stream(
|
||||
reader: asyncio.StreamReader,
|
||||
prefix: str,
|
||||
*,
|
||||
is_stderr: bool = False,
|
||||
) -> None:
|
||||
output = sys.stderr if is_stderr else sys.stdout
|
||||
while True:
|
||||
line = await reader.readline()
|
||||
if not line:
|
||||
break
|
||||
print(f"[{prefix}] {line.decode()}", end="", file=output, flush=True)
|
||||
|
||||
await asyncio.gather(
|
||||
read_stream(proc.stdout, service),
|
||||
read_stream(proc.stderr, service, is_stderr=True),
|
||||
)
|
||||
|
||||
stdout_data = b""
|
||||
stderr_data = b""
|
||||
if not stream:
|
||||
stdout_data, stderr_data = await proc.communicate()
|
||||
else:
|
||||
await proc.wait()
|
||||
|
||||
return CommandResult(
|
||||
service=service,
|
||||
exit_code=proc.returncode or 0,
|
||||
success=proc.returncode == 0,
|
||||
stdout=stdout_data.decode() if stdout_data else "",
|
||||
stderr=stderr_data.decode() if stderr_data else "",
|
||||
)
|
||||
except OSError as e:
|
||||
print(f"[{service}] Local error: {e}", file=sys.stderr)
|
||||
return CommandResult(service=service, exit_code=1, success=False)
|
||||
|
||||
|
||||
async def _run_ssh_command(
|
||||
host: Host,
|
||||
command: str,
|
||||
service: str,
|
||||
*,
|
||||
stream: bool = True,
|
||||
) -> CommandResult:
|
||||
"""Run a command on a remote host via SSH with streaming output."""
|
||||
proc: asyncssh.SSHClientProcess[Any]
|
||||
try:
|
||||
async with (
|
||||
asyncssh.connect(
|
||||
host.address,
|
||||
port=host.port,
|
||||
username=host.user,
|
||||
known_hosts=None,
|
||||
) as conn,
|
||||
conn.create_process(command) as proc,
|
||||
):
|
||||
if stream:
|
||||
|
||||
async def read_stream(
|
||||
reader: Any,
|
||||
prefix: str,
|
||||
*,
|
||||
is_stderr: bool = False,
|
||||
) -> None:
|
||||
output = sys.stderr if is_stderr else sys.stdout
|
||||
async for line in reader:
|
||||
print(f"[{prefix}] {line}", end="", file=output, flush=True)
|
||||
|
||||
await asyncio.gather(
|
||||
read_stream(proc.stdout, service),
|
||||
read_stream(proc.stderr, service, is_stderr=True),
|
||||
)
|
||||
|
||||
stdout_data = ""
|
||||
stderr_data = ""
|
||||
if not stream:
|
||||
stdout_data = await proc.stdout.read()
|
||||
stderr_data = await proc.stderr.read()
|
||||
|
||||
await proc.wait()
|
||||
return CommandResult(
|
||||
service=service,
|
||||
exit_code=proc.exit_status or 0,
|
||||
success=proc.exit_status == 0,
|
||||
stdout=stdout_data,
|
||||
stderr=stderr_data,
|
||||
)
|
||||
except (OSError, asyncssh.Error) as e:
|
||||
print(f"[{service}] SSH error: {e}", file=sys.stderr)
|
||||
return CommandResult(service=service, exit_code=1, success=False)
|
||||
|
||||
|
||||
async def run_command(
|
||||
host: Host,
|
||||
command: str,
|
||||
service: str,
|
||||
*,
|
||||
stream: bool = True,
|
||||
) -> CommandResult:
|
||||
"""Run a command on a host (locally or via SSH)."""
|
||||
if _is_local(host):
|
||||
return await _run_local_command(command, service, stream=stream)
|
||||
return await _run_ssh_command(host, command, service, stream=stream)
|
||||
|
||||
|
||||
async def run_compose(
|
||||
config: Config,
|
||||
service: str,
|
||||
compose_cmd: str,
|
||||
*,
|
||||
stream: bool = True,
|
||||
) -> CommandResult:
|
||||
"""Run a docker compose command for a service."""
|
||||
host = config.get_host(service)
|
||||
compose_path = config.get_compose_path(service)
|
||||
|
||||
command = f"docker compose -f {compose_path} {compose_cmd}"
|
||||
return await run_command(host, command, service, stream=stream)
|
||||
|
||||
|
||||
async def run_compose_on_host(
|
||||
config: Config,
|
||||
service: str,
|
||||
host_name: str,
|
||||
compose_cmd: str,
|
||||
*,
|
||||
stream: bool = True,
|
||||
) -> CommandResult:
|
||||
"""Run a docker compose command for a service on a specific host.
|
||||
|
||||
Used for migration - running 'down' on the old host before 'up' on new host.
|
||||
"""
|
||||
host = config.hosts[host_name]
|
||||
compose_path = config.get_compose_path(service)
|
||||
|
||||
command = f"docker compose -f {compose_path} {compose_cmd}"
|
||||
return await run_command(host, command, service, stream=stream)
|
||||
|
||||
|
||||
async def run_on_services(
|
||||
config: Config,
|
||||
services: list[str],
|
||||
compose_cmd: str,
|
||||
*,
|
||||
stream: bool = True,
|
||||
) -> list[CommandResult]:
|
||||
"""Run a docker compose command on multiple services in parallel."""
|
||||
tasks = [run_compose(config, service, compose_cmd, stream=stream) for service in services]
|
||||
return await asyncio.gather(*tasks)
|
||||
|
||||
|
||||
async def run_sequential_commands(
|
||||
config: Config,
|
||||
service: str,
|
||||
commands: list[str],
|
||||
*,
|
||||
stream: bool = True,
|
||||
) -> CommandResult:
|
||||
"""Run multiple compose commands sequentially for a service."""
|
||||
for cmd in commands:
|
||||
result = await run_compose(config, service, cmd, stream=stream)
|
||||
if not result.success:
|
||||
return result
|
||||
return CommandResult(service=service, exit_code=0, success=True)
|
||||
|
||||
|
||||
async def run_sequential_on_services(
|
||||
config: Config,
|
||||
services: list[str],
|
||||
commands: list[str],
|
||||
*,
|
||||
stream: bool = True,
|
||||
) -> list[CommandResult]:
|
||||
"""Run sequential commands on multiple services in parallel."""
|
||||
tasks = [
|
||||
run_sequential_commands(config, service, commands, stream=stream) for service in services
|
||||
]
|
||||
return await asyncio.gather(*tasks)
|
||||
|
||||
|
||||
async def check_service_running(
|
||||
config: Config,
|
||||
service: str,
|
||||
host_name: str,
|
||||
) -> bool:
|
||||
"""Check if a service has running containers on a specific host."""
|
||||
host = config.hosts[host_name]
|
||||
compose_path = config.get_compose_path(service)
|
||||
|
||||
# Use ps --status running to check for running containers
|
||||
command = f"docker compose -f {compose_path} ps --status running -q"
|
||||
result = await run_command(host, command, service, stream=False)
|
||||
|
||||
# If command succeeded and has output, containers are running
|
||||
return result.success and bool(result.stdout.strip())
|
||||
@@ -2,57 +2,165 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
import contextlib
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
import yaml
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import Generator
|
||||
|
||||
def _get_state_path() -> Path:
|
||||
"""Get the path to the state file."""
|
||||
state_dir = Path.home() / ".config" / "compose-farm"
|
||||
state_dir.mkdir(parents=True, exist_ok=True)
|
||||
return state_dir / "state.yaml"
|
||||
from .config import Config
|
||||
|
||||
|
||||
def load_state() -> dict[str, str]:
|
||||
def load_state(config: Config) -> dict[str, str | list[str]]:
|
||||
"""Load the current deployment state.
|
||||
|
||||
Returns a dict mapping service names to host names.
|
||||
Returns a dict mapping service names to host name(s).
|
||||
Multi-host services store a list of hosts.
|
||||
"""
|
||||
state_path = _get_state_path()
|
||||
state_path = config.get_state_path()
|
||||
if not state_path.exists():
|
||||
return {}
|
||||
|
||||
with state_path.open() as f:
|
||||
data: dict[str, Any] = yaml.safe_load(f) or {}
|
||||
|
||||
deployed: dict[str, str] = data.get("deployed", {})
|
||||
deployed: dict[str, str | list[str]] = data.get("deployed", {})
|
||||
return deployed
|
||||
|
||||
|
||||
def save_state(deployed: dict[str, str]) -> None:
|
||||
def _sorted_dict(d: dict[str, str | list[str]]) -> dict[str, str | list[str]]:
|
||||
"""Return a dictionary sorted by keys."""
|
||||
return dict(sorted(d.items(), key=lambda item: item[0]))
|
||||
|
||||
|
||||
def save_state(config: Config, deployed: dict[str, str | list[str]]) -> None:
|
||||
"""Save the deployment state."""
|
||||
state_path = _get_state_path()
|
||||
state_path = config.get_state_path()
|
||||
with state_path.open("w") as f:
|
||||
yaml.safe_dump({"deployed": deployed}, f, sort_keys=False)
|
||||
yaml.safe_dump({"deployed": _sorted_dict(deployed)}, f, sort_keys=False)
|
||||
|
||||
|
||||
def get_service_host(service: str) -> str | None:
|
||||
"""Get the host where a service is currently deployed."""
|
||||
state = load_state()
|
||||
return state.get(service)
|
||||
@contextlib.contextmanager
|
||||
def _modify_state(config: Config) -> Generator[dict[str, str | list[str]], None, None]:
|
||||
"""Context manager to load, modify, and save state."""
|
||||
state = load_state(config)
|
||||
yield state
|
||||
save_state(config, state)
|
||||
|
||||
|
||||
def set_service_host(service: str, host: str) -> None:
|
||||
def get_service_host(config: Config, service: str) -> str | None:
|
||||
"""Get the host where a service is currently deployed.
|
||||
|
||||
For multi-host services, returns the first host or None.
|
||||
"""
|
||||
state = load_state(config)
|
||||
value = state.get(service)
|
||||
if value is None:
|
||||
return None
|
||||
if isinstance(value, list):
|
||||
return value[0] if value else None
|
||||
return value
|
||||
|
||||
|
||||
def set_service_host(config: Config, service: str, host: str) -> None:
|
||||
"""Record that a service is deployed on a host."""
|
||||
state = load_state()
|
||||
state[service] = host
|
||||
save_state(state)
|
||||
with _modify_state(config) as state:
|
||||
state[service] = host
|
||||
|
||||
|
||||
def remove_service(service: str) -> None:
|
||||
def set_multi_host_service(config: Config, service: str, hosts: list[str]) -> None:
|
||||
"""Record that a multi-host service is deployed on multiple hosts."""
|
||||
with _modify_state(config) as state:
|
||||
state[service] = hosts
|
||||
|
||||
|
||||
def remove_service(config: Config, service: str) -> None:
|
||||
"""Remove a service from the state (after down)."""
|
||||
state = load_state()
|
||||
state.pop(service, None)
|
||||
save_state(state)
|
||||
with _modify_state(config) as state:
|
||||
state.pop(service, None)
|
||||
|
||||
|
||||
def add_service_to_host(config: Config, service: str, host: str) -> None:
|
||||
"""Add a specific host to a service's state.
|
||||
|
||||
For multi-host services, adds the host to the list if not present.
|
||||
For single-host services, sets the host.
|
||||
"""
|
||||
with _modify_state(config) as state:
|
||||
current = state.get(service)
|
||||
|
||||
if config.is_multi_host(service):
|
||||
# Multi-host: add to list if not present
|
||||
if isinstance(current, list):
|
||||
if host not in current:
|
||||
state[service] = [*current, host]
|
||||
else:
|
||||
state[service] = [host]
|
||||
else:
|
||||
# Single-host: just set it
|
||||
state[service] = host
|
||||
|
||||
|
||||
def remove_service_from_host(config: Config, service: str, host: str) -> None:
|
||||
"""Remove a specific host from a service's state.
|
||||
|
||||
For multi-host services, removes just that host from the list.
|
||||
For single-host services, removes the service entirely if host matches.
|
||||
"""
|
||||
with _modify_state(config) as state:
|
||||
current = state.get(service)
|
||||
if current is None:
|
||||
return
|
||||
|
||||
if isinstance(current, list):
|
||||
# Multi-host: remove this host from list
|
||||
remaining = [h for h in current if h != host]
|
||||
if remaining:
|
||||
state[service] = remaining
|
||||
else:
|
||||
state.pop(service, None)
|
||||
elif current == host:
|
||||
# Single-host: remove if matches
|
||||
state.pop(service, None)
|
||||
|
||||
|
||||
def get_services_needing_migration(config: Config) -> list[str]:
|
||||
"""Get services where current host differs from configured host.
|
||||
|
||||
Multi-host services are never considered for migration.
|
||||
"""
|
||||
needs_migration = []
|
||||
for service in config.services:
|
||||
# Skip multi-host services
|
||||
if config.is_multi_host(service):
|
||||
continue
|
||||
|
||||
configured_host = config.get_hosts(service)[0]
|
||||
current_host = get_service_host(config, service)
|
||||
if current_host and current_host != configured_host:
|
||||
needs_migration.append(service)
|
||||
return needs_migration
|
||||
|
||||
|
||||
def get_orphaned_services(config: Config) -> dict[str, str | list[str]]:
|
||||
"""Get services that are in state but not in config.
|
||||
|
||||
These are services that were previously deployed but have been
|
||||
removed from the config file (e.g., commented out).
|
||||
|
||||
Returns a dict mapping service name to host(s) where it's deployed.
|
||||
"""
|
||||
state = load_state(config)
|
||||
return {service: hosts for service, hosts in state.items() if service not in config.services}
|
||||
|
||||
|
||||
def get_services_not_in_state(config: Config) -> list[str]:
|
||||
"""Get services that are in config but not in state.
|
||||
|
||||
These are services that should be running but aren't tracked
|
||||
(e.g., newly added to config, or previously stopped as orphans).
|
||||
"""
|
||||
state = load_state(config)
|
||||
return [service for service in config.services if service not in state]
|
||||
|
||||
@@ -8,30 +8,25 @@ use host-published ports for cross-host reachability.
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import re
|
||||
from dataclasses import dataclass
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
import yaml
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from pathlib import Path
|
||||
from .compose import (
|
||||
PortMapping,
|
||||
get_ports_for_service,
|
||||
load_compose_services,
|
||||
normalize_labels,
|
||||
)
|
||||
from .executor import LOCAL_ADDRESSES
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .config import Config
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class PortMapping:
|
||||
"""Port mapping for a compose service."""
|
||||
|
||||
target: int
|
||||
published: int | None
|
||||
protocol: str | None = None
|
||||
|
||||
|
||||
@dataclass
|
||||
class TraefikServiceSource:
|
||||
class _TraefikServiceSource:
|
||||
"""Source information to build an upstream for a Traefik service."""
|
||||
|
||||
traefik_service: str
|
||||
@@ -43,129 +38,9 @@ class TraefikServiceSource:
|
||||
scheme: str | None = None
|
||||
|
||||
|
||||
LIST_VALUE_KEYS = {"entrypoints", "middlewares"}
|
||||
SINGLE_PART = 1
|
||||
PUBLISHED_TARGET_PARTS = 2
|
||||
HOST_PUBLISHED_PARTS = 3
|
||||
MIN_ROUTER_PARTS = 3
|
||||
MIN_SERVICE_LABEL_PARTS = 6
|
||||
_VAR_PATTERN = re.compile(r"\$\{([A-Za-z_][A-Za-z0-9_]*)(?::-(.*?))?\}")
|
||||
|
||||
|
||||
def _load_env(compose_path: Path) -> dict[str, str]:
|
||||
"""Load environment variables for compose interpolation."""
|
||||
env: dict[str, str] = {}
|
||||
env_path = compose_path.parent / ".env"
|
||||
if env_path.exists():
|
||||
for line in env_path.read_text().splitlines():
|
||||
stripped = line.strip()
|
||||
if not stripped or stripped.startswith("#") or "=" not in stripped:
|
||||
continue
|
||||
key, value = stripped.split("=", 1)
|
||||
key = key.strip()
|
||||
value = value.strip()
|
||||
if (value.startswith('"') and value.endswith('"')) or (
|
||||
value.startswith("'") and value.endswith("'")
|
||||
):
|
||||
value = value[1:-1]
|
||||
env[key] = value
|
||||
env.update({k: v for k, v in os.environ.items() if isinstance(v, str)})
|
||||
return env
|
||||
|
||||
|
||||
def _interpolate(value: str, env: dict[str, str]) -> str:
|
||||
"""Perform a minimal `${VAR}`/`${VAR:-default}` interpolation."""
|
||||
|
||||
def replace(match: re.Match[str]) -> str:
|
||||
var = match.group(1)
|
||||
default = match.group(2)
|
||||
resolved = env.get(var)
|
||||
if resolved:
|
||||
return resolved
|
||||
return default or ""
|
||||
|
||||
return _VAR_PATTERN.sub(replace, value)
|
||||
|
||||
|
||||
def _normalize_labels(raw: Any, env: dict[str, str]) -> dict[str, str]:
|
||||
if raw is None:
|
||||
return {}
|
||||
if isinstance(raw, dict):
|
||||
return {
|
||||
_interpolate(str(k), env): _interpolate(str(v), env)
|
||||
for k, v in raw.items()
|
||||
if k is not None
|
||||
}
|
||||
if isinstance(raw, list):
|
||||
labels: dict[str, str] = {}
|
||||
for item in raw:
|
||||
if not isinstance(item, str) or "=" not in item:
|
||||
continue
|
||||
key_raw, value_raw = item.split("=", 1)
|
||||
key = _interpolate(key_raw.strip(), env)
|
||||
value = _interpolate(value_raw.strip(), env)
|
||||
labels[key] = value
|
||||
return labels
|
||||
return {}
|
||||
|
||||
|
||||
def _parse_ports(raw: Any, env: dict[str, str]) -> list[PortMapping]: # noqa: PLR0912
|
||||
if raw is None:
|
||||
return []
|
||||
mappings: list[PortMapping] = []
|
||||
|
||||
items = raw if isinstance(raw, list) else [raw]
|
||||
|
||||
for item in items:
|
||||
if isinstance(item, str):
|
||||
interpolated = _interpolate(item, env)
|
||||
port_spec, _, protocol = interpolated.partition("/")
|
||||
parts = port_spec.split(":")
|
||||
published: int | None = None
|
||||
target: int | None = None
|
||||
|
||||
if len(parts) == SINGLE_PART and parts[0].isdigit():
|
||||
target = int(parts[0])
|
||||
elif len(parts) == PUBLISHED_TARGET_PARTS and parts[0].isdigit() and parts[1].isdigit():
|
||||
published = int(parts[0])
|
||||
target = int(parts[1])
|
||||
elif len(parts) == HOST_PUBLISHED_PARTS and parts[-2].isdigit() and parts[-1].isdigit():
|
||||
published = int(parts[-2])
|
||||
target = int(parts[-1])
|
||||
|
||||
if target is not None:
|
||||
mappings.append(
|
||||
PortMapping(target=target, published=published, protocol=protocol or None)
|
||||
)
|
||||
elif isinstance(item, dict):
|
||||
target_raw = item.get("target")
|
||||
if isinstance(target_raw, str):
|
||||
target_raw = _interpolate(target_raw, env)
|
||||
if target_raw is None:
|
||||
continue
|
||||
try:
|
||||
target_val = int(str(target_raw))
|
||||
except (TypeError, ValueError):
|
||||
continue
|
||||
|
||||
published_raw = item.get("published")
|
||||
if isinstance(published_raw, str):
|
||||
published_raw = _interpolate(published_raw, env)
|
||||
published_val: int | None
|
||||
try:
|
||||
published_val = int(str(published_raw)) if published_raw is not None else None
|
||||
except (TypeError, ValueError):
|
||||
published_val = None
|
||||
protocol_val = item.get("protocol")
|
||||
mappings.append(
|
||||
PortMapping(
|
||||
target=target_val,
|
||||
published=published_val,
|
||||
protocol=str(protocol_val) if protocol_val else None,
|
||||
)
|
||||
)
|
||||
|
||||
return mappings
|
||||
_LIST_VALUE_KEYS = {"entrypoints", "middlewares"}
|
||||
_MIN_ROUTER_PARTS = 3
|
||||
_MIN_SERVICE_LABEL_PARTS = 6
|
||||
|
||||
|
||||
def _parse_value(key: str, raw_value: str) -> Any:
|
||||
@@ -176,7 +51,7 @@ def _parse_value(key: str, raw_value: str) -> Any:
|
||||
if value.isdigit():
|
||||
return int(value)
|
||||
last_segment = key.rsplit(".", 1)[-1]
|
||||
if last_segment in LIST_VALUE_KEYS:
|
||||
if last_segment in _LIST_VALUE_KEYS:
|
||||
parts = [v.strip() for v in value.split(",")] if "," in value else [value]
|
||||
return [part for part in parts if part]
|
||||
return value
|
||||
@@ -227,7 +102,7 @@ def _insert(root: dict[str, Any], key_path: list[str], value: Any) -> None: # n
|
||||
current = container_list[list_index]
|
||||
|
||||
|
||||
def _resolve_published_port(source: TraefikServiceSource) -> tuple[int | None, str | None]:
|
||||
def _resolve_published_port(source: _TraefikServiceSource) -> tuple[int | None, str | None]:
|
||||
"""Resolve host-published port for a Traefik service.
|
||||
|
||||
Returns (published_port, warning_message).
|
||||
@@ -263,23 +138,9 @@ def _resolve_published_port(source: TraefikServiceSource) -> tuple[int | None, s
|
||||
)
|
||||
|
||||
|
||||
def _load_stack(config: Config, stack: str) -> tuple[dict[str, Any], dict[str, str], str]:
|
||||
compose_path = config.get_compose_path(stack)
|
||||
if not compose_path.exists():
|
||||
message = f"[{stack}] Compose file not found: {compose_path}"
|
||||
raise FileNotFoundError(message)
|
||||
|
||||
env = _load_env(compose_path)
|
||||
compose_data = yaml.safe_load(compose_path.read_text()) or {}
|
||||
raw_services = compose_data.get("services", {})
|
||||
if not isinstance(raw_services, dict):
|
||||
return {}, env, config.get_host(stack).address
|
||||
return raw_services, env, config.get_host(stack).address
|
||||
|
||||
|
||||
def _finalize_http_services(
|
||||
dynamic: dict[str, Any],
|
||||
sources: dict[str, TraefikServiceSource],
|
||||
sources: dict[str, _TraefikServiceSource],
|
||||
warnings: list[str],
|
||||
) -> None:
|
||||
for traefik_service, source in sources.items():
|
||||
@@ -289,8 +150,8 @@ def _finalize_http_services(
|
||||
if published_port is None:
|
||||
warnings.append(
|
||||
f"[{source.stack}/{source.compose_service}] "
|
||||
f"No host-published port found for Traefik service '{traefik_service}'. "
|
||||
"Traefik will require L3 reachability to container IPs."
|
||||
f"No published port found for Traefik service '{traefik_service}'. "
|
||||
"Add a ports: mapping (e.g., '8080:8080') for cross-host routing."
|
||||
)
|
||||
continue
|
||||
|
||||
@@ -350,7 +211,7 @@ def _process_router_label(
|
||||
if not key_without_prefix.startswith("http.routers."):
|
||||
return
|
||||
router_parts = key_without_prefix.split(".")
|
||||
if len(router_parts) < MIN_ROUTER_PARTS:
|
||||
if len(router_parts) < _MIN_ROUTER_PARTS:
|
||||
return
|
||||
router_name = router_parts[2]
|
||||
router_remainder = router_parts[3:]
|
||||
@@ -368,12 +229,12 @@ def _process_service_label(
|
||||
host_address: str,
|
||||
ports: list[PortMapping],
|
||||
service_names: set[str],
|
||||
sources: dict[str, TraefikServiceSource],
|
||||
sources: dict[str, _TraefikServiceSource],
|
||||
) -> None:
|
||||
if not key_without_prefix.startswith("http.services."):
|
||||
return
|
||||
parts = key_without_prefix.split(".")
|
||||
if len(parts) < MIN_SERVICE_LABEL_PARTS:
|
||||
if len(parts) < _MIN_SERVICE_LABEL_PARTS:
|
||||
return
|
||||
traefik_service = parts[2]
|
||||
service_names.add(traefik_service)
|
||||
@@ -381,7 +242,7 @@ def _process_service_label(
|
||||
|
||||
source = sources.get(traefik_service)
|
||||
if source is None:
|
||||
source = TraefikServiceSource(
|
||||
source = _TraefikServiceSource(
|
||||
traefik_service=traefik_service,
|
||||
stack=stack,
|
||||
compose_service=compose_service,
|
||||
@@ -402,20 +263,21 @@ def _process_service_labels(
|
||||
stack: str,
|
||||
compose_service: str,
|
||||
definition: dict[str, Any],
|
||||
all_services: dict[str, Any],
|
||||
host_address: str,
|
||||
env: dict[str, str],
|
||||
dynamic: dict[str, Any],
|
||||
sources: dict[str, TraefikServiceSource],
|
||||
sources: dict[str, _TraefikServiceSource],
|
||||
warnings: list[str],
|
||||
) -> None:
|
||||
labels = _normalize_labels(definition.get("labels"), env)
|
||||
labels = normalize_labels(definition.get("labels"), env)
|
||||
if not labels:
|
||||
return
|
||||
enable_raw = labels.get("traefik.enable")
|
||||
if enable_raw is not None and _parse_value("enable", enable_raw) is False:
|
||||
return
|
||||
|
||||
ports = _parse_ports(definition.get("ports"), env)
|
||||
ports = get_ports_for_service(definition, all_services, env)
|
||||
routers: dict[str, bool] = {}
|
||||
service_names: set[str] = set()
|
||||
|
||||
@@ -450,17 +312,41 @@ def _process_service_labels(
|
||||
def generate_traefik_config(
|
||||
config: Config,
|
||||
services: list[str],
|
||||
*,
|
||||
check_all: bool = False,
|
||||
) -> tuple[dict[str, Any], list[str]]:
|
||||
"""Generate Traefik dynamic config from compose labels.
|
||||
|
||||
Args:
|
||||
config: The compose-farm config.
|
||||
services: List of service names to process.
|
||||
check_all: If True, check all services for warnings (ignore host filtering).
|
||||
Used by the check command to validate all traefik labels.
|
||||
|
||||
Returns (config_dict, warnings).
|
||||
|
||||
"""
|
||||
dynamic: dict[str, Any] = {}
|
||||
warnings: list[str] = []
|
||||
sources: dict[str, TraefikServiceSource] = {}
|
||||
sources: dict[str, _TraefikServiceSource] = {}
|
||||
|
||||
# Determine Traefik's host from service assignment
|
||||
traefik_host = None
|
||||
if config.traefik_service and not check_all:
|
||||
traefik_host = config.services.get(config.traefik_service)
|
||||
|
||||
for stack in services:
|
||||
raw_services, env, host_address = _load_stack(config, stack)
|
||||
raw_services, env, host_address = load_compose_services(config, stack)
|
||||
stack_host = config.services.get(stack)
|
||||
|
||||
# Skip services on Traefik's host - docker provider handles them directly
|
||||
# (unless check_all is True, for validation purposes)
|
||||
if not check_all:
|
||||
if host_address.lower() in LOCAL_ADDRESSES:
|
||||
continue
|
||||
if traefik_host and stack_host == traefik_host:
|
||||
continue
|
||||
|
||||
for compose_service, definition in raw_services.items():
|
||||
if not isinstance(definition, dict):
|
||||
continue
|
||||
@@ -468,6 +354,7 @@ def generate_traefik_config(
|
||||
stack,
|
||||
compose_service,
|
||||
definition,
|
||||
raw_services,
|
||||
host_address,
|
||||
env,
|
||||
dynamic,
|
||||
@@ -477,3 +364,22 @@ def generate_traefik_config(
|
||||
|
||||
_finalize_http_services(dynamic, sources, warnings)
|
||||
return dynamic, warnings
|
||||
|
||||
|
||||
_TRAEFIK_CONFIG_HEADER = """\
|
||||
# Auto-generated by compose-farm
|
||||
# https://github.com/basnijholt/compose-farm
|
||||
#
|
||||
# This file routes traffic to services running on hosts other than Traefik's host.
|
||||
# Services on Traefik's host use the Docker provider directly.
|
||||
#
|
||||
# Regenerate with: compose-farm traefik-file --all -o <this-file>
|
||||
# Or configure traefik_file in compose-farm.yaml for automatic updates.
|
||||
|
||||
"""
|
||||
|
||||
|
||||
def render_traefik_config(dynamic: dict[str, Any]) -> str:
|
||||
"""Render Traefik dynamic config as YAML with a header comment."""
|
||||
body = yaml.safe_dump(dynamic, sort_keys=False)
|
||||
return _TRAEFIK_CONFIG_HEADER + body
|
||||
|
||||
7
src/compose_farm/web/__init__.py
Normal file
7
src/compose_farm/web/__init__.py
Normal file
@@ -0,0 +1,7 @@
|
||||
"""Compose Farm Web UI."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from compose_farm.web.app import create_app
|
||||
|
||||
__all__ = ["create_app"]
|
||||
49
src/compose_farm/web/app.py
Normal file
49
src/compose_farm/web/app.py
Normal file
@@ -0,0 +1,49 @@
|
||||
"""FastAPI application setup."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import sys
|
||||
from contextlib import asynccontextmanager
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from fastapi import FastAPI
|
||||
from fastapi.staticfiles import StaticFiles
|
||||
|
||||
from compose_farm.web.deps import STATIC_DIR, get_config
|
||||
from compose_farm.web.routes import actions, api, pages
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import AsyncGenerator
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
async def lifespan(_app: FastAPI) -> AsyncGenerator[None, None]:
|
||||
"""Application lifespan handler."""
|
||||
# Startup: pre-load config
|
||||
get_config()
|
||||
yield
|
||||
# Shutdown: nothing to clean up
|
||||
|
||||
|
||||
def create_app() -> FastAPI:
|
||||
"""Create and configure the FastAPI application."""
|
||||
app = FastAPI(
|
||||
title="Compose Farm",
|
||||
description="Web UI for managing Docker Compose services across multiple hosts",
|
||||
lifespan=lifespan,
|
||||
)
|
||||
|
||||
# Mount static files
|
||||
app.mount("/static", StaticFiles(directory=str(STATIC_DIR)), name="static")
|
||||
|
||||
app.include_router(pages.router)
|
||||
app.include_router(api.router, prefix="/api")
|
||||
app.include_router(actions.router, prefix="/api")
|
||||
|
||||
# WebSocket routes use Unix-only modules (fcntl, pty)
|
||||
if sys.platform != "win32":
|
||||
from compose_farm.web.ws import router as ws_router # noqa: PLC0415
|
||||
|
||||
app.include_router(ws_router)
|
||||
|
||||
return app
|
||||
40
src/compose_farm/web/deps.py
Normal file
40
src/compose_farm/web/deps.py
Normal file
@@ -0,0 +1,40 @@
|
||||
"""Shared dependencies for web modules.
|
||||
|
||||
This module contains shared config and template accessors to avoid circular imports
|
||||
between app.py and route modules.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from functools import lru_cache
|
||||
from pathlib import Path
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from fastapi.templating import Jinja2Templates
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from compose_farm.config import Config
|
||||
|
||||
# Paths
|
||||
WEB_DIR = Path(__file__).parent
|
||||
TEMPLATES_DIR = WEB_DIR / "templates"
|
||||
STATIC_DIR = WEB_DIR / "static"
|
||||
|
||||
|
||||
@lru_cache
|
||||
def get_config() -> Config:
|
||||
"""Load config once per process (cached)."""
|
||||
from compose_farm.config import load_config # noqa: PLC0415
|
||||
|
||||
return load_config()
|
||||
|
||||
|
||||
def reload_config() -> Config:
|
||||
"""Clear config cache and reload from disk."""
|
||||
get_config.cache_clear()
|
||||
return get_config()
|
||||
|
||||
|
||||
def get_templates() -> Jinja2Templates:
|
||||
"""Get Jinja2 templates instance."""
|
||||
return Jinja2Templates(directory=str(TEMPLATES_DIR))
|
||||
5
src/compose_farm/web/routes/__init__.py
Normal file
5
src/compose_farm/web/routes/__init__.py
Normal file
@@ -0,0 +1,5 @@
|
||||
"""Web routes."""
|
||||
|
||||
from compose_farm.web.routes import actions, api, pages
|
||||
|
||||
__all__ = ["actions", "api", "pages"]
|
||||
95
src/compose_farm/web/routes/actions.py
Normal file
95
src/compose_farm/web/routes/actions.py
Normal file
@@ -0,0 +1,95 @@
|
||||
"""Action routes for service operations."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import uuid
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
from fastapi import APIRouter, HTTPException
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import Callable, Coroutine
|
||||
|
||||
from compose_farm.web.deps import get_config
|
||||
from compose_farm.web.streaming import run_cli_streaming, run_compose_streaming, tasks
|
||||
|
||||
router = APIRouter(tags=["actions"])
|
||||
|
||||
# Store task references to prevent garbage collection
|
||||
_background_tasks: set[asyncio.Task[None]] = set()
|
||||
|
||||
|
||||
def _start_task(coro_factory: Callable[[str], Coroutine[Any, Any, None]]) -> str:
|
||||
"""Create a task, register it, and return the task_id."""
|
||||
task_id = str(uuid.uuid4())
|
||||
tasks[task_id] = {"status": "running", "output": []}
|
||||
|
||||
task: asyncio.Task[None] = asyncio.create_task(coro_factory(task_id))
|
||||
_background_tasks.add(task)
|
||||
task.add_done_callback(_background_tasks.discard)
|
||||
|
||||
return task_id
|
||||
|
||||
|
||||
async def _run_service_action(name: str, command: str) -> dict[str, Any]:
|
||||
"""Run a compose command for a service."""
|
||||
config = get_config()
|
||||
|
||||
if name not in config.services:
|
||||
raise HTTPException(status_code=404, detail=f"Service '{name}' not found")
|
||||
|
||||
task_id = _start_task(lambda tid: run_compose_streaming(config, name, command, tid))
|
||||
return {"task_id": task_id, "service": name, "command": command}
|
||||
|
||||
|
||||
@router.post("/service/{name}/up")
|
||||
async def up_service(name: str) -> dict[str, Any]:
|
||||
"""Start a service."""
|
||||
return await _run_service_action(name, "up")
|
||||
|
||||
|
||||
@router.post("/service/{name}/down")
|
||||
async def down_service(name: str) -> dict[str, Any]:
|
||||
"""Stop a service."""
|
||||
return await _run_service_action(name, "down")
|
||||
|
||||
|
||||
@router.post("/service/{name}/restart")
|
||||
async def restart_service(name: str) -> dict[str, Any]:
|
||||
"""Restart a service (down + up)."""
|
||||
return await _run_service_action(name, "restart")
|
||||
|
||||
|
||||
@router.post("/service/{name}/pull")
|
||||
async def pull_service(name: str) -> dict[str, Any]:
|
||||
"""Pull latest images for a service."""
|
||||
return await _run_service_action(name, "pull")
|
||||
|
||||
|
||||
@router.post("/service/{name}/update")
|
||||
async def update_service(name: str) -> dict[str, Any]:
|
||||
"""Update a service (pull + build + down + up)."""
|
||||
return await _run_service_action(name, "update")
|
||||
|
||||
|
||||
@router.post("/service/{name}/logs")
|
||||
async def logs_service(name: str) -> dict[str, Any]:
|
||||
"""Show logs for a service."""
|
||||
return await _run_service_action(name, "logs")
|
||||
|
||||
|
||||
@router.post("/apply")
|
||||
async def apply_all() -> dict[str, Any]:
|
||||
"""Run cf apply to reconcile all services."""
|
||||
config = get_config()
|
||||
task_id = _start_task(lambda tid: run_cli_streaming(config, ["apply"], tid))
|
||||
return {"task_id": task_id, "command": "apply"}
|
||||
|
||||
|
||||
@router.post("/refresh")
|
||||
async def refresh_state() -> dict[str, Any]:
|
||||
"""Refresh state from running services."""
|
||||
config = get_config()
|
||||
task_id = _start_task(lambda tid: run_cli_streaming(config, ["refresh"], tid))
|
||||
return {"task_id": task_id, "command": "refresh"}
|
||||
212
src/compose_farm/web/routes/api.py
Normal file
212
src/compose_farm/web/routes/api.py
Normal file
@@ -0,0 +1,212 @@
|
||||
"""JSON API routes."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import contextlib
|
||||
import json
|
||||
from typing import TYPE_CHECKING, Annotated, Any
|
||||
|
||||
import yaml
|
||||
from fastapi import APIRouter, Body, HTTPException
|
||||
from fastapi.responses import HTMLResponse
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from pathlib import Path
|
||||
|
||||
from compose_farm.executor import run_compose_on_host
|
||||
from compose_farm.state import load_state
|
||||
from compose_farm.web.deps import get_config, get_templates, reload_config
|
||||
|
||||
router = APIRouter(tags=["api"])
|
||||
|
||||
|
||||
def _validate_yaml(content: str) -> None:
|
||||
"""Validate YAML content, raise HTTPException on error."""
|
||||
try:
|
||||
yaml.safe_load(content)
|
||||
except yaml.YAMLError as e:
|
||||
raise HTTPException(status_code=400, detail=f"Invalid YAML: {e}") from e
|
||||
|
||||
|
||||
def _get_service_compose_path(name: str) -> Path:
|
||||
"""Get compose path for service, raising HTTPException if not found."""
|
||||
config = get_config()
|
||||
|
||||
if name not in config.services:
|
||||
raise HTTPException(status_code=404, detail=f"Service '{name}' not found")
|
||||
|
||||
compose_path = config.get_compose_path(name)
|
||||
if not compose_path:
|
||||
raise HTTPException(status_code=404, detail="Compose file not found")
|
||||
|
||||
return compose_path
|
||||
|
||||
|
||||
def _get_compose_services(config: Any, service: str, hosts: list[str]) -> list[dict[str, Any]]:
|
||||
"""Get container info from compose file (fast, local read).
|
||||
|
||||
Returns one entry per container per host for multi-host services.
|
||||
"""
|
||||
compose_path = config.get_compose_path(service)
|
||||
if not compose_path or not compose_path.exists():
|
||||
return []
|
||||
|
||||
compose_data = yaml.safe_load(compose_path.read_text()) or {}
|
||||
raw_services = compose_data.get("services", {})
|
||||
if not isinstance(raw_services, dict):
|
||||
return []
|
||||
|
||||
# Project name is the directory name (docker compose default)
|
||||
project_name = compose_path.parent.name
|
||||
|
||||
containers = []
|
||||
for host in hosts:
|
||||
for svc_name, svc_def in raw_services.items():
|
||||
# Use container_name if set, otherwise default to {project}-{service}-1
|
||||
if isinstance(svc_def, dict) and svc_def.get("container_name"):
|
||||
container_name = svc_def["container_name"]
|
||||
else:
|
||||
container_name = f"{project_name}-{svc_name}-1"
|
||||
containers.append(
|
||||
{
|
||||
"Name": container_name,
|
||||
"Service": svc_name,
|
||||
"Host": host,
|
||||
"State": "unknown", # Status requires Docker query
|
||||
}
|
||||
)
|
||||
return containers
|
||||
|
||||
|
||||
async def _get_container_states(
|
||||
config: Any, service: str, containers: list[dict[str, Any]]
|
||||
) -> list[dict[str, Any]]:
|
||||
"""Query Docker for actual container states on a single host."""
|
||||
if not containers:
|
||||
return containers
|
||||
|
||||
# All containers should be on the same host
|
||||
host_name = containers[0]["Host"]
|
||||
|
||||
result = await run_compose_on_host(config, service, host_name, "ps --format json", stream=False)
|
||||
if not result.success:
|
||||
return containers
|
||||
|
||||
# Build state map
|
||||
state_map: dict[str, str] = {}
|
||||
for line in result.stdout.strip().split("\n"):
|
||||
if line.strip():
|
||||
with contextlib.suppress(json.JSONDecodeError):
|
||||
data = json.loads(line)
|
||||
state_map[data.get("Name", "")] = data.get("State", "unknown")
|
||||
|
||||
# Update container states
|
||||
for c in containers:
|
||||
if c["Name"] in state_map:
|
||||
c["State"] = state_map[c["Name"]]
|
||||
|
||||
return containers
|
||||
|
||||
|
||||
def _render_containers(
|
||||
service: str, host: str, containers: list[dict[str, Any]], *, show_header: bool = False
|
||||
) -> str:
|
||||
"""Render containers HTML using Jinja template."""
|
||||
templates = get_templates()
|
||||
template = templates.env.get_template("partials/containers.html")
|
||||
module = template.make_module()
|
||||
result: str = module.host_containers(service, host, containers, show_header=show_header)
|
||||
return result
|
||||
|
||||
|
||||
@router.get("/service/{name}/containers", response_class=HTMLResponse)
|
||||
async def get_containers(name: str, host: str | None = None) -> HTMLResponse:
|
||||
"""Get containers for a service as HTML buttons.
|
||||
|
||||
If host is specified, queries Docker for that host's status.
|
||||
Otherwise returns all hosts with loading spinners that auto-fetch.
|
||||
"""
|
||||
config = get_config()
|
||||
|
||||
if name not in config.services:
|
||||
raise HTTPException(status_code=404, detail=f"Service '{name}' not found")
|
||||
|
||||
# Get hosts where service is running from state
|
||||
state = load_state(config)
|
||||
current_hosts = state.get(name)
|
||||
if not current_hosts:
|
||||
return HTMLResponse('<span class="text-base-content/60">Service not running</span>')
|
||||
|
||||
all_hosts = current_hosts if isinstance(current_hosts, list) else [current_hosts]
|
||||
|
||||
# If host specified, return just that host's containers with status
|
||||
if host:
|
||||
if host not in all_hosts:
|
||||
return HTMLResponse(f'<span class="text-error">Host {host} not found</span>')
|
||||
|
||||
containers = _get_compose_services(config, name, [host])
|
||||
containers = await _get_container_states(config, name, containers)
|
||||
return HTMLResponse(_render_containers(name, host, containers))
|
||||
|
||||
# Initial load: return all hosts with loading spinners, each fetches its own status
|
||||
html_parts = []
|
||||
is_multi_host = len(all_hosts) > 1
|
||||
|
||||
for h in all_hosts:
|
||||
host_id = f"containers-{name}-{h}".replace(".", "-")
|
||||
containers = _get_compose_services(config, name, [h])
|
||||
|
||||
if is_multi_host:
|
||||
html_parts.append(f'<div class="font-semibold text-sm mt-3 mb-1">{h}</div>')
|
||||
|
||||
# Container for this host that auto-fetches its own status
|
||||
html_parts.append(f"""
|
||||
<div id="{host_id}"
|
||||
hx-get="/api/service/{name}/containers?host={h}"
|
||||
hx-trigger="load"
|
||||
hx-target="this"
|
||||
hx-select="unset"
|
||||
hx-swap="innerHTML">
|
||||
{_render_containers(name, h, containers)}
|
||||
</div>
|
||||
""")
|
||||
|
||||
return HTMLResponse("".join(html_parts))
|
||||
|
||||
|
||||
@router.put("/service/{name}/compose")
|
||||
async def save_compose(
|
||||
name: str, content: Annotated[str, Body(media_type="text/plain")]
|
||||
) -> dict[str, Any]:
|
||||
"""Save compose file content."""
|
||||
compose_path = _get_service_compose_path(name)
|
||||
_validate_yaml(content)
|
||||
compose_path.write_text(content)
|
||||
return {"success": True, "message": "Compose file saved"}
|
||||
|
||||
|
||||
@router.put("/service/{name}/env")
|
||||
async def save_env(
|
||||
name: str, content: Annotated[str, Body(media_type="text/plain")]
|
||||
) -> dict[str, Any]:
|
||||
"""Save .env file content."""
|
||||
env_path = _get_service_compose_path(name).parent / ".env"
|
||||
env_path.write_text(content)
|
||||
return {"success": True, "message": ".env file saved"}
|
||||
|
||||
|
||||
@router.put("/config")
|
||||
async def save_config(
|
||||
content: Annotated[str, Body(media_type="text/plain")],
|
||||
) -> dict[str, Any]:
|
||||
"""Save compose-farm.yaml config file."""
|
||||
config = get_config()
|
||||
|
||||
if not config.config_path:
|
||||
raise HTTPException(status_code=404, detail="Config path not set")
|
||||
|
||||
_validate_yaml(content)
|
||||
config.config_path.write_text(content)
|
||||
reload_config()
|
||||
|
||||
return {"success": True, "message": "Config saved"}
|
||||
207
src/compose_farm/web/routes/pages.py
Normal file
207
src/compose_farm/web/routes/pages.py
Normal file
@@ -0,0 +1,207 @@
|
||||
"""HTML page routes."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import yaml
|
||||
from fastapi import APIRouter, Request
|
||||
from fastapi.responses import HTMLResponse
|
||||
|
||||
from compose_farm.state import (
|
||||
get_orphaned_services,
|
||||
get_service_host,
|
||||
get_services_needing_migration,
|
||||
get_services_not_in_state,
|
||||
load_state,
|
||||
)
|
||||
from compose_farm.web.deps import get_config, get_templates
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
@router.get("/", response_class=HTMLResponse)
|
||||
async def index(request: Request) -> HTMLResponse:
|
||||
"""Dashboard page - combined view of all cluster info."""
|
||||
config = get_config()
|
||||
templates = get_templates()
|
||||
|
||||
# Get state
|
||||
deployed = load_state(config)
|
||||
|
||||
# Stats
|
||||
running_count = len(deployed)
|
||||
stopped_count = len(config.services) - running_count
|
||||
|
||||
# Pending operations
|
||||
orphaned = get_orphaned_services(config)
|
||||
migrations = get_services_needing_migration(config)
|
||||
not_started = get_services_not_in_state(config)
|
||||
|
||||
# Group services by host
|
||||
services_by_host: dict[str, list[str]] = {}
|
||||
for svc, host in deployed.items():
|
||||
if isinstance(host, list):
|
||||
for h in host:
|
||||
services_by_host.setdefault(h, []).append(svc)
|
||||
else:
|
||||
services_by_host.setdefault(host, []).append(svc)
|
||||
|
||||
# Config file content
|
||||
config_content = ""
|
||||
if config.config_path and config.config_path.exists():
|
||||
config_content = config.config_path.read_text()
|
||||
|
||||
# State file content
|
||||
state_content = yaml.dump({"deployed": deployed}, default_flow_style=False, sort_keys=False)
|
||||
|
||||
return templates.TemplateResponse(
|
||||
"index.html",
|
||||
{
|
||||
"request": request,
|
||||
# Config data
|
||||
"hosts": config.hosts,
|
||||
"services": config.services,
|
||||
"config_content": config_content,
|
||||
# State data
|
||||
"state_content": state_content,
|
||||
# Stats
|
||||
"running_count": running_count,
|
||||
"stopped_count": stopped_count,
|
||||
# Pending operations
|
||||
"orphaned": orphaned,
|
||||
"migrations": migrations,
|
||||
"not_started": not_started,
|
||||
# Services by host
|
||||
"services_by_host": services_by_host,
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@router.get("/service/{name}", response_class=HTMLResponse)
|
||||
async def service_detail(request: Request, name: str) -> HTMLResponse:
|
||||
"""Service detail page."""
|
||||
config = get_config()
|
||||
templates = get_templates()
|
||||
|
||||
# Get compose file content
|
||||
compose_path = config.get_compose_path(name)
|
||||
compose_content = ""
|
||||
if compose_path and compose_path.exists():
|
||||
compose_content = compose_path.read_text()
|
||||
|
||||
# Get .env file content
|
||||
env_content = ""
|
||||
env_path = None
|
||||
if compose_path:
|
||||
env_path = compose_path.parent / ".env"
|
||||
if env_path.exists():
|
||||
env_content = env_path.read_text()
|
||||
|
||||
# Get host info
|
||||
hosts = config.get_hosts(name)
|
||||
|
||||
# Get state
|
||||
current_host = get_service_host(config, name)
|
||||
|
||||
return templates.TemplateResponse(
|
||||
"service.html",
|
||||
{
|
||||
"request": request,
|
||||
"name": name,
|
||||
"hosts": hosts,
|
||||
"current_host": current_host,
|
||||
"compose_content": compose_content,
|
||||
"compose_path": str(compose_path) if compose_path else None,
|
||||
"env_content": env_content,
|
||||
"env_path": str(env_path) if env_path else None,
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@router.get("/partials/sidebar", response_class=HTMLResponse)
|
||||
async def sidebar_partial(request: Request) -> HTMLResponse:
|
||||
"""Sidebar service list partial."""
|
||||
config = get_config()
|
||||
templates = get_templates()
|
||||
|
||||
state = load_state(config)
|
||||
|
||||
return templates.TemplateResponse(
|
||||
"partials/sidebar.html",
|
||||
{
|
||||
"request": request,
|
||||
"services": sorted(config.services.keys()),
|
||||
"state": state,
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@router.get("/partials/stats", response_class=HTMLResponse)
|
||||
async def stats_partial(request: Request) -> HTMLResponse:
|
||||
"""Stats cards partial."""
|
||||
config = get_config()
|
||||
templates = get_templates()
|
||||
|
||||
deployed = load_state(config)
|
||||
running_count = len(deployed)
|
||||
stopped_count = len(config.services) - running_count
|
||||
|
||||
return templates.TemplateResponse(
|
||||
"partials/stats.html",
|
||||
{
|
||||
"request": request,
|
||||
"hosts": config.hosts,
|
||||
"services": config.services,
|
||||
"running_count": running_count,
|
||||
"stopped_count": stopped_count,
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@router.get("/partials/pending", response_class=HTMLResponse)
|
||||
async def pending_partial(request: Request, expanded: bool = True) -> HTMLResponse:
|
||||
"""Pending operations partial."""
|
||||
config = get_config()
|
||||
templates = get_templates()
|
||||
|
||||
orphaned = get_orphaned_services(config)
|
||||
migrations = get_services_needing_migration(config)
|
||||
not_started = get_services_not_in_state(config)
|
||||
|
||||
return templates.TemplateResponse(
|
||||
"partials/pending.html",
|
||||
{
|
||||
"request": request,
|
||||
"orphaned": orphaned,
|
||||
"migrations": migrations,
|
||||
"not_started": not_started,
|
||||
"expanded": expanded,
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@router.get("/partials/services-by-host", response_class=HTMLResponse)
|
||||
async def services_by_host_partial(request: Request, expanded: bool = True) -> HTMLResponse:
|
||||
"""Services by host partial."""
|
||||
config = get_config()
|
||||
templates = get_templates()
|
||||
|
||||
deployed = load_state(config)
|
||||
|
||||
# Group services by host
|
||||
services_by_host: dict[str, list[str]] = {}
|
||||
for svc, host in deployed.items():
|
||||
if isinstance(host, list):
|
||||
for h in host:
|
||||
services_by_host.setdefault(h, []).append(svc)
|
||||
else:
|
||||
services_by_host.setdefault(host, []).append(svc)
|
||||
|
||||
return templates.TemplateResponse(
|
||||
"partials/services_by_host.html",
|
||||
{
|
||||
"request": request,
|
||||
"hosts": config.hosts,
|
||||
"services_by_host": services_by_host,
|
||||
"expanded": expanded,
|
||||
},
|
||||
)
|
||||
25
src/compose_farm/web/static/app.css
Normal file
25
src/compose_farm/web/static/app.css
Normal file
@@ -0,0 +1,25 @@
|
||||
/* Editors (Monaco) - wrapper makes it resizable */
|
||||
.editor-wrapper {
|
||||
resize: vertical;
|
||||
overflow: hidden;
|
||||
min-height: 150px;
|
||||
}
|
||||
|
||||
.editor-wrapper .yaml-editor,
|
||||
.editor-wrapper .env-editor,
|
||||
.editor-wrapper .yaml-viewer {
|
||||
height: 100%;
|
||||
border: 1px solid oklch(var(--bc) / 0.2);
|
||||
border-radius: 0.5rem;
|
||||
}
|
||||
|
||||
.editor-wrapper.yaml-wrapper { height: 400px; }
|
||||
.editor-wrapper.env-wrapper { height: 250px; }
|
||||
.editor-wrapper.viewer-wrapper { height: 300px; }
|
||||
|
||||
/* Terminal - no custom CSS needed, using h-full class in HTML */
|
||||
|
||||
/* Prevent save button resize when text changes */
|
||||
#save-btn, #save-config-btn {
|
||||
min-width: 5rem;
|
||||
}
|
||||
437
src/compose_farm/web/static/app.js
Normal file
437
src/compose_farm/web/static/app.js
Normal file
@@ -0,0 +1,437 @@
|
||||
/**
|
||||
* Compose Farm Web UI JavaScript
|
||||
*/
|
||||
|
||||
// ANSI escape codes for terminal output
|
||||
const ANSI = {
|
||||
RED: '\x1b[31m',
|
||||
GREEN: '\x1b[32m',
|
||||
DIM: '\x1b[2m',
|
||||
RESET: '\x1b[0m',
|
||||
CRLF: '\r\n'
|
||||
};
|
||||
|
||||
// Store active terminals and editors
|
||||
const terminals = {};
|
||||
const editors = {};
|
||||
let monacoLoaded = false;
|
||||
let monacoLoading = false;
|
||||
|
||||
// Terminal color theme (dark mode matching PicoCSS)
|
||||
const TERMINAL_THEME = {
|
||||
background: '#1a1a2e',
|
||||
foreground: '#e4e4e7',
|
||||
cursor: '#e4e4e7',
|
||||
cursorAccent: '#1a1a2e',
|
||||
black: '#18181b',
|
||||
red: '#ef4444',
|
||||
green: '#22c55e',
|
||||
yellow: '#eab308',
|
||||
blue: '#3b82f6',
|
||||
magenta: '#a855f7',
|
||||
cyan: '#06b6d4',
|
||||
white: '#e4e4e7',
|
||||
brightBlack: '#52525b',
|
||||
brightRed: '#f87171',
|
||||
brightGreen: '#4ade80',
|
||||
brightYellow: '#facc15',
|
||||
brightBlue: '#60a5fa',
|
||||
brightMagenta: '#c084fc',
|
||||
brightCyan: '#22d3ee',
|
||||
brightWhite: '#fafafa'
|
||||
};
|
||||
|
||||
/**
|
||||
* Create a terminal with fit addon and resize observer
|
||||
* @param {HTMLElement} container - Container element
|
||||
* @param {object} extraOptions - Additional terminal options
|
||||
* @param {function} onResize - Optional callback called with (cols, rows) after resize
|
||||
* @returns {{term: Terminal, fitAddon: FitAddon}}
|
||||
*/
|
||||
function createTerminal(container, extraOptions = {}, onResize = null) {
|
||||
container.innerHTML = '';
|
||||
|
||||
const term = new Terminal({
|
||||
convertEol: true,
|
||||
theme: TERMINAL_THEME,
|
||||
fontSize: 13,
|
||||
fontFamily: 'Monaco, Menlo, "Ubuntu Mono", monospace',
|
||||
scrollback: 5000,
|
||||
...extraOptions
|
||||
});
|
||||
|
||||
const fitAddon = new FitAddon.FitAddon();
|
||||
term.loadAddon(fitAddon);
|
||||
term.open(container);
|
||||
fitAddon.fit();
|
||||
|
||||
const handleResize = () => {
|
||||
fitAddon.fit();
|
||||
if (onResize) {
|
||||
onResize(term.cols, term.rows);
|
||||
}
|
||||
};
|
||||
|
||||
window.addEventListener('resize', handleResize);
|
||||
new ResizeObserver(handleResize).observe(container);
|
||||
|
||||
return { term, fitAddon };
|
||||
}
|
||||
|
||||
/**
|
||||
* Create WebSocket connection with standard handlers
|
||||
* @param {string} path - WebSocket path
|
||||
* @returns {WebSocket}
|
||||
*/
|
||||
function createWebSocket(path) {
|
||||
const protocol = window.location.protocol === 'https:' ? 'wss:' : 'ws:';
|
||||
return new WebSocket(`${protocol}//${window.location.host}${path}`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize a terminal and connect to WebSocket for streaming
|
||||
*/
|
||||
function initTerminal(elementId, taskId) {
|
||||
const container = document.getElementById(elementId);
|
||||
if (!container) {
|
||||
console.error('Terminal container not found:', elementId);
|
||||
return;
|
||||
}
|
||||
|
||||
const { term, fitAddon } = createTerminal(container);
|
||||
const ws = createWebSocket(`/ws/terminal/${taskId}`);
|
||||
|
||||
ws.onopen = () => {
|
||||
term.write(`${ANSI.DIM}[Connected]${ANSI.RESET}${ANSI.CRLF}`);
|
||||
setTerminalLoading(true);
|
||||
};
|
||||
ws.onmessage = (event) => term.write(event.data);
|
||||
ws.onclose = () => setTerminalLoading(false);
|
||||
ws.onerror = (error) => {
|
||||
term.write(`${ANSI.RED}[WebSocket Error]${ANSI.RESET}${ANSI.CRLF}`);
|
||||
console.error('WebSocket error:', error);
|
||||
setTerminalLoading(false);
|
||||
};
|
||||
|
||||
terminals[taskId] = { term, ws, fitAddon };
|
||||
return { term, ws };
|
||||
}
|
||||
|
||||
window.initTerminal = initTerminal;
|
||||
|
||||
/**
|
||||
* Initialize an interactive exec terminal
|
||||
*/
|
||||
let execTerminal = null;
|
||||
let execWs = null;
|
||||
|
||||
function initExecTerminal(service, container, host) {
|
||||
const containerEl = document.getElementById('exec-terminal-container');
|
||||
const terminalEl = document.getElementById('exec-terminal');
|
||||
|
||||
if (!containerEl || !terminalEl) {
|
||||
console.error('Exec terminal elements not found');
|
||||
return;
|
||||
}
|
||||
|
||||
containerEl.classList.remove('hidden');
|
||||
|
||||
// Clean up existing
|
||||
if (execWs) { execWs.close(); execWs = null; }
|
||||
if (execTerminal) { execTerminal.dispose(); execTerminal = null; }
|
||||
|
||||
// Create WebSocket first so resize callback can use it
|
||||
execWs = createWebSocket(`/ws/exec/${service}/${container}/${host}`);
|
||||
|
||||
// Resize callback sends size to WebSocket
|
||||
const sendSize = (cols, rows) => {
|
||||
if (execWs && execWs.readyState === WebSocket.OPEN) {
|
||||
execWs.send(JSON.stringify({ type: 'resize', cols, rows }));
|
||||
}
|
||||
};
|
||||
|
||||
const { term } = createTerminal(terminalEl, { cursorBlink: true }, sendSize);
|
||||
execTerminal = term;
|
||||
|
||||
execWs.onopen = () => { sendSize(term.cols, term.rows); term.focus(); };
|
||||
execWs.onmessage = (event) => term.write(event.data);
|
||||
execWs.onclose = () => term.write(`${ANSI.CRLF}${ANSI.DIM}[Connection closed]${ANSI.RESET}${ANSI.CRLF}`);
|
||||
execWs.onerror = (error) => {
|
||||
term.write(`${ANSI.RED}[WebSocket Error]${ANSI.RESET}${ANSI.CRLF}`);
|
||||
console.error('Exec WebSocket error:', error);
|
||||
};
|
||||
|
||||
term.onData((data) => {
|
||||
if (execWs && execWs.readyState === WebSocket.OPEN) {
|
||||
execWs.send(data);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
window.initExecTerminal = initExecTerminal;
|
||||
|
||||
/**
|
||||
* Refresh dashboard partials while preserving collapse states
|
||||
*/
|
||||
function refreshDashboard() {
|
||||
const isExpanded = (id) => document.getElementById(id)?.checked ?? true;
|
||||
htmx.ajax('GET', '/partials/sidebar', {target: '#sidebar nav', swap: 'innerHTML'});
|
||||
htmx.ajax('GET', '/partials/stats', {target: '#stats-cards', swap: 'outerHTML'});
|
||||
htmx.ajax('GET', `/partials/pending?expanded=${isExpanded('pending-collapse')}`, {target: '#pending-operations', swap: 'outerHTML'});
|
||||
htmx.ajax('GET', `/partials/services-by-host?expanded=${isExpanded('services-by-host-collapse')}`, {target: '#services-by-host', swap: 'outerHTML'});
|
||||
}
|
||||
|
||||
/**
|
||||
* Load Monaco editor dynamically (only once)
|
||||
*/
|
||||
function loadMonaco(callback) {
|
||||
if (monacoLoaded) {
|
||||
callback();
|
||||
return;
|
||||
}
|
||||
|
||||
if (monacoLoading) {
|
||||
// Wait for it to load
|
||||
const checkInterval = setInterval(() => {
|
||||
if (monacoLoaded) {
|
||||
clearInterval(checkInterval);
|
||||
callback();
|
||||
}
|
||||
}, 100);
|
||||
return;
|
||||
}
|
||||
|
||||
monacoLoading = true;
|
||||
|
||||
// Load the Monaco loader script
|
||||
const script = document.createElement('script');
|
||||
script.src = 'https://cdn.jsdelivr.net/npm/monaco-editor@0.52.2/min/vs/loader.js';
|
||||
script.onload = function() {
|
||||
require.config({ paths: { vs: 'https://cdn.jsdelivr.net/npm/monaco-editor@0.52.2/min/vs' }});
|
||||
require(['vs/editor/editor.main'], function() {
|
||||
monacoLoaded = true;
|
||||
monacoLoading = false;
|
||||
callback();
|
||||
});
|
||||
};
|
||||
document.head.appendChild(script);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a Monaco editor instance
|
||||
* @param {HTMLElement} container - Container element
|
||||
* @param {string} content - Initial content
|
||||
* @param {string} language - Editor language (yaml, plaintext, etc.)
|
||||
* @param {boolean} readonly - Whether editor is read-only
|
||||
* @returns {object} Monaco editor instance
|
||||
*/
|
||||
function createEditor(container, content, language, readonly = false) {
|
||||
const options = {
|
||||
value: content,
|
||||
language: language,
|
||||
theme: 'vs-dark',
|
||||
minimap: { enabled: false },
|
||||
automaticLayout: true,
|
||||
scrollBeyondLastLine: false,
|
||||
fontSize: 14,
|
||||
lineNumbers: 'on',
|
||||
wordWrap: 'on'
|
||||
};
|
||||
|
||||
if (readonly) {
|
||||
options.readOnly = true;
|
||||
options.domReadOnly = true;
|
||||
}
|
||||
|
||||
const editor = monaco.editor.create(container, options);
|
||||
|
||||
// Add Command+S / Ctrl+S handler for editable editors
|
||||
if (!readonly) {
|
||||
editor.addCommand(monaco.KeyMod.CtrlCmd | monaco.KeyCode.KeyS, function() {
|
||||
saveAllEditors();
|
||||
});
|
||||
}
|
||||
|
||||
return editor;
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize all Monaco editors on the page
|
||||
*/
|
||||
function initMonacoEditors() {
|
||||
// Dispose existing editors
|
||||
Object.values(editors).forEach(ed => {
|
||||
if (ed && ed.dispose) ed.dispose();
|
||||
});
|
||||
Object.keys(editors).forEach(key => delete editors[key]);
|
||||
|
||||
const editorConfigs = [
|
||||
{ id: 'compose-editor', language: 'yaml', readonly: false },
|
||||
{ id: 'env-editor', language: 'plaintext', readonly: false },
|
||||
{ id: 'config-editor', language: 'yaml', readonly: false },
|
||||
{ id: 'state-viewer', language: 'yaml', readonly: true }
|
||||
];
|
||||
|
||||
// Check if any editor elements exist
|
||||
const hasEditors = editorConfigs.some(({ id }) => document.getElementById(id));
|
||||
if (!hasEditors) return;
|
||||
|
||||
// Load Monaco and create editors
|
||||
loadMonaco(() => {
|
||||
editorConfigs.forEach(({ id, language, readonly }) => {
|
||||
const el = document.getElementById(id);
|
||||
if (!el) return;
|
||||
|
||||
const content = el.dataset.content || '';
|
||||
editors[id] = createEditor(el, content, language, readonly);
|
||||
if (!readonly) {
|
||||
editors[id].saveUrl = el.dataset.saveUrl;
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Save all editors
|
||||
*/
|
||||
async function saveAllEditors() {
|
||||
const saveBtn = document.getElementById('save-btn') || document.getElementById('save-config-btn');
|
||||
const results = [];
|
||||
|
||||
for (const [id, editor] of Object.entries(editors)) {
|
||||
if (!editor || !editor.saveUrl) continue;
|
||||
|
||||
const content = editor.getValue();
|
||||
try {
|
||||
const response = await fetch(editor.saveUrl, {
|
||||
method: 'PUT',
|
||||
headers: { 'Content-Type': 'text/plain' },
|
||||
body: content
|
||||
});
|
||||
const data = await response.json();
|
||||
if (!data.success) {
|
||||
results.push({ id, success: false, error: data.detail || 'Unknown error' });
|
||||
} else {
|
||||
results.push({ id, success: true });
|
||||
}
|
||||
} catch (e) {
|
||||
results.push({ id, success: false, error: e.message });
|
||||
}
|
||||
}
|
||||
|
||||
// Show result
|
||||
const errors = results.filter(r => !r.success);
|
||||
if (errors.length > 0) {
|
||||
alert('Errors saving:\n' + errors.map(e => `${e.id}: ${e.error}`).join('\n'));
|
||||
} else if (saveBtn && results.length > 0) {
|
||||
saveBtn.textContent = 'Saved!';
|
||||
setTimeout(() => saveBtn.textContent = saveBtn.id === 'save-config-btn' ? 'Save Config' : 'Save All', 2000);
|
||||
|
||||
refreshDashboard();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize save button handler
|
||||
*/
|
||||
function initSaveButton() {
|
||||
const saveBtn = document.getElementById('save-btn') || document.getElementById('save-config-btn');
|
||||
if (!saveBtn) return;
|
||||
|
||||
saveBtn.onclick = saveAllEditors;
|
||||
}
|
||||
|
||||
/**
|
||||
* Global keyboard shortcut handler
|
||||
*/
|
||||
function initKeyboardShortcuts() {
|
||||
document.addEventListener('keydown', function(e) {
|
||||
// Command+S (Mac) or Ctrl+S (Windows/Linux)
|
||||
if ((e.metaKey || e.ctrlKey) && e.key === 's') {
|
||||
// Only handle if we have editors and no Monaco editor is focused
|
||||
if (Object.keys(editors).length > 0) {
|
||||
// Check if any Monaco editor is focused
|
||||
const focusedEditor = Object.values(editors).find(ed => ed && ed.hasTextFocus && ed.hasTextFocus());
|
||||
if (!focusedEditor) {
|
||||
e.preventDefault();
|
||||
saveAllEditors();
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize page components
|
||||
*/
|
||||
function initPage() {
|
||||
initMonacoEditors();
|
||||
initSaveButton();
|
||||
}
|
||||
|
||||
// Initialize on page load
|
||||
document.addEventListener('DOMContentLoaded', function() {
|
||||
initPage();
|
||||
initKeyboardShortcuts();
|
||||
});
|
||||
|
||||
// Re-initialize after HTMX swaps main content
|
||||
document.body.addEventListener('htmx:afterSwap', function(evt) {
|
||||
if (evt.detail.target.id === 'main-content') {
|
||||
initPage();
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* Expand terminal collapse and scroll to it
|
||||
*/
|
||||
function expandTerminal() {
|
||||
const toggle = document.getElementById('terminal-toggle');
|
||||
if (toggle) toggle.checked = true;
|
||||
|
||||
const collapse = document.getElementById('terminal-collapse');
|
||||
if (collapse) {
|
||||
collapse.scrollIntoView({ behavior: 'smooth', block: 'start' });
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Show/hide terminal loading spinner
|
||||
*/
|
||||
function setTerminalLoading(loading) {
|
||||
const spinner = document.getElementById('terminal-spinner');
|
||||
if (spinner) {
|
||||
spinner.classList.toggle('hidden', !loading);
|
||||
}
|
||||
}
|
||||
|
||||
// Handle action responses (terminal streaming)
|
||||
document.body.addEventListener('htmx:afterRequest', function(evt) {
|
||||
if (!evt.detail.successful || !evt.detail.xhr) return;
|
||||
|
||||
const text = evt.detail.xhr.responseText;
|
||||
// Only try to parse if it looks like JSON (starts with {)
|
||||
if (!text || !text.trim().startsWith('{')) return;
|
||||
|
||||
try {
|
||||
const response = JSON.parse(text);
|
||||
if (response.task_id) {
|
||||
// Expand terminal and scroll to it
|
||||
expandTerminal();
|
||||
|
||||
// Wait for xterm to be loaded if needed
|
||||
const tryInit = (attempts) => {
|
||||
if (typeof Terminal !== 'undefined' && typeof FitAddon !== 'undefined') {
|
||||
initTerminal('terminal-output', response.task_id);
|
||||
} else if (attempts > 0) {
|
||||
setTimeout(() => tryInit(attempts - 1), 100);
|
||||
} else {
|
||||
console.error('xterm.js failed to load');
|
||||
}
|
||||
};
|
||||
tryInit(20); // Try for up to 2 seconds
|
||||
}
|
||||
} catch (e) {
|
||||
// Not valid JSON, ignore
|
||||
}
|
||||
});
|
||||
114
src/compose_farm/web/streaming.py
Normal file
114
src/compose_farm/web/streaming.py
Normal file
@@ -0,0 +1,114 @@
|
||||
"""Streaming executor adapter for web UI."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import os
|
||||
from pathlib import Path
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from compose_farm.config import Config
|
||||
|
||||
# ANSI escape codes for terminal output
|
||||
RED = "\x1b[31m"
|
||||
GREEN = "\x1b[32m"
|
||||
DIM = "\x1b[2m"
|
||||
RESET = "\x1b[0m"
|
||||
CRLF = "\r\n"
|
||||
|
||||
|
||||
def _get_ssh_auth_sock() -> str | None:
|
||||
"""Get SSH_AUTH_SOCK, auto-detecting forwarded agent if needed."""
|
||||
sock = os.environ.get("SSH_AUTH_SOCK")
|
||||
if sock and Path(sock).is_socket():
|
||||
return sock
|
||||
|
||||
# Try to find a forwarded SSH agent socket
|
||||
agent_dir = Path.home() / ".ssh" / "agent"
|
||||
if agent_dir.is_dir():
|
||||
sockets = sorted(
|
||||
agent_dir.glob("s.*.sshd.*"), key=lambda p: p.stat().st_mtime, reverse=True
|
||||
)
|
||||
for s in sockets:
|
||||
if s.is_socket():
|
||||
return str(s)
|
||||
return None
|
||||
|
||||
|
||||
# In-memory task registry
|
||||
tasks: dict[str, dict[str, Any]] = {}
|
||||
|
||||
|
||||
async def stream_to_task(task_id: str, message: str) -> None:
|
||||
"""Send a message to a task's output buffer."""
|
||||
if task_id in tasks:
|
||||
tasks[task_id]["output"].append(message)
|
||||
|
||||
|
||||
async def run_cli_streaming(
|
||||
config: Config,
|
||||
args: list[str],
|
||||
task_id: str,
|
||||
) -> None:
|
||||
"""Run a cf CLI command as subprocess and stream output to task buffer.
|
||||
|
||||
This reuses all CLI logic including Rich formatting, progress bars, etc.
|
||||
The subprocess gets a pseudo-TTY via FORCE_COLOR so Rich outputs ANSI codes.
|
||||
"""
|
||||
try:
|
||||
# Build command - config option goes after the subcommand
|
||||
cmd = ["cf", *args, f"--config={config.config_path}"]
|
||||
|
||||
# Show command being executed
|
||||
cmd_display = " ".join(["cf", *args])
|
||||
await stream_to_task(task_id, f"{DIM}$ {cmd_display}{RESET}{CRLF}")
|
||||
|
||||
# Force color output even though there's no real TTY
|
||||
# Set COLUMNS for Rich/Typer to format output correctly
|
||||
env = {"FORCE_COLOR": "1", "TERM": "xterm-256color", "COLUMNS": "120"}
|
||||
|
||||
# Ensure SSH agent is available (auto-detect if needed)
|
||||
ssh_sock = _get_ssh_auth_sock()
|
||||
if ssh_sock:
|
||||
env["SSH_AUTH_SOCK"] = ssh_sock
|
||||
|
||||
process = await asyncio.create_subprocess_exec(
|
||||
*cmd,
|
||||
stdout=asyncio.subprocess.PIPE,
|
||||
stderr=asyncio.subprocess.STDOUT,
|
||||
env={**os.environ, **env},
|
||||
)
|
||||
|
||||
# Stream output line by line
|
||||
if process.stdout:
|
||||
async for line in process.stdout:
|
||||
text = line.decode("utf-8", errors="replace")
|
||||
# Convert \n to \r\n for xterm.js
|
||||
if text.endswith("\n") and not text.endswith("\r\n"):
|
||||
text = text[:-1] + "\r\n"
|
||||
await stream_to_task(task_id, text)
|
||||
|
||||
exit_code = await process.wait()
|
||||
tasks[task_id]["status"] = "completed" if exit_code == 0 else "failed"
|
||||
|
||||
except Exception as e:
|
||||
await stream_to_task(task_id, f"{RED}Error: {e}{RESET}{CRLF}")
|
||||
tasks[task_id]["status"] = "failed"
|
||||
|
||||
|
||||
async def run_compose_streaming(
|
||||
config: Config,
|
||||
service: str,
|
||||
command: str,
|
||||
task_id: str,
|
||||
) -> None:
|
||||
"""Run a compose command (up/down/pull/restart) via CLI subprocess."""
|
||||
# Split command into args (e.g., "up -d" -> ["up", "-d"])
|
||||
args = command.split()
|
||||
cli_cmd = args[0] # up, down, pull, restart
|
||||
extra_args = args[1:] # -d, etc.
|
||||
|
||||
# Build CLI args
|
||||
cli_args = [cli_cmd, service, *extra_args]
|
||||
await run_cli_streaming(config, cli_args, task_id)
|
||||
64
src/compose_farm/web/templates/base.html
Normal file
64
src/compose_farm/web/templates/base.html
Normal file
@@ -0,0 +1,64 @@
|
||||
{% from "partials/icons.html" import github, hamburger %}
|
||||
<!DOCTYPE html>
|
||||
<html lang="en" data-theme="dark">
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1">
|
||||
<title>{% block title %}Compose Farm{% endblock %}</title>
|
||||
|
||||
<!-- daisyUI + Tailwind -->
|
||||
<link href="https://cdn.jsdelivr.net/npm/daisyui@5" rel="stylesheet" type="text/css" />
|
||||
<script src="https://cdn.jsdelivr.net/npm/@tailwindcss/browser@4"></script>
|
||||
|
||||
<!-- xterm.js -->
|
||||
<link rel="stylesheet" href="https://cdn.jsdelivr.net/npm/@xterm/xterm@5.5.0/css/xterm.css">
|
||||
|
||||
<!-- Custom styles -->
|
||||
<link rel="stylesheet" href="/static/app.css">
|
||||
</head>
|
||||
<body class="min-h-screen bg-base-200">
|
||||
<div class="drawer lg:drawer-open">
|
||||
<input id="drawer-toggle" type="checkbox" class="drawer-toggle" />
|
||||
|
||||
<!-- Main content -->
|
||||
<div class="drawer-content flex flex-col">
|
||||
<!-- Mobile navbar with hamburger -->
|
||||
<header class="navbar bg-base-100 border-b border-base-300 lg:hidden">
|
||||
<label for="drawer-toggle" class="btn btn-ghost btn-square">
|
||||
{{ hamburger() }}
|
||||
</label>
|
||||
<span class="font-semibold">Compose Farm</span>
|
||||
</header>
|
||||
|
||||
<main id="main-content" class="flex-1 p-6 overflow-y-auto" hx-boost="true" hx-target="#main-content" hx-select="#main-content" hx-swap="outerHTML">
|
||||
{% block content %}{% endblock %}
|
||||
</main>
|
||||
</div>
|
||||
|
||||
<!-- Sidebar -->
|
||||
<div class="drawer-side">
|
||||
<label for="drawer-toggle" class="drawer-overlay" aria-label="close sidebar"></label>
|
||||
<aside id="sidebar" class="w-64 bg-base-100 border-r border-base-300 flex flex-col min-h-screen">
|
||||
<header class="p-4 border-b border-base-300">
|
||||
<h2 class="text-lg font-semibold flex items-center gap-2">
|
||||
Compose Farm
|
||||
<a href="https://github.com/basnijholt/compose-farm" target="_blank" title="GitHub" class="opacity-50 hover:opacity-100 transition-opacity">
|
||||
{{ github() }}
|
||||
</a>
|
||||
</h2>
|
||||
</header>
|
||||
<nav class="flex-1 overflow-y-auto p-2" hx-get="/partials/sidebar" hx-trigger="load" hx-swap="innerHTML">
|
||||
<span class="loading loading-spinner loading-sm"></span> Loading...
|
||||
</nav>
|
||||
</aside>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Scripts - HTMX first -->
|
||||
<script src="https://unpkg.com/htmx.org@2.0.4"></script>
|
||||
<script src="https://cdn.jsdelivr.net/npm/@xterm/xterm@5.5.0/lib/xterm.js"></script>
|
||||
<script src="https://cdn.jsdelivr.net/npm/@xterm/addon-fit@0.10.0/lib/addon-fit.js"></script>
|
||||
<script src="/static/app.js"></script>
|
||||
{% block scripts %}{% endblock %}
|
||||
</body>
|
||||
</html>
|
||||
65
src/compose_farm/web/templates/index.html
Normal file
65
src/compose_farm/web/templates/index.html
Normal file
@@ -0,0 +1,65 @@
|
||||
{% extends "base.html" %}
|
||||
{% from "partials/components.html" import page_header, collapse, stat_card, table, action_btn %}
|
||||
{% block title %}Dashboard - Compose Farm{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
<div class="max-w-5xl">
|
||||
{{ page_header("Compose Farm", "Cluster overview and management") }}
|
||||
|
||||
<!-- Stats Cards -->
|
||||
{% include "partials/stats.html" %}
|
||||
|
||||
<!-- Global Actions -->
|
||||
<div class="flex flex-wrap gap-2 mb-6">
|
||||
{{ action_btn("Apply", "/api/apply", "primary", "Make reality match config") }}
|
||||
{{ action_btn("Refresh", "/api/refresh", "outline", "Update state from reality") }}
|
||||
<button id="save-config-btn" class="btn btn-outline">Save Config</button>
|
||||
</div>
|
||||
|
||||
{% include "partials/terminal.html" %}
|
||||
|
||||
<!-- Config Editor -->
|
||||
{% call collapse("Edit Config", badge="compose-farm.yaml") %}
|
||||
<div class="editor-wrapper yaml-wrapper">
|
||||
<div id="config-editor" class="yaml-editor" data-content="{{ config_content | e }}" data-save-url="/api/config"></div>
|
||||
</div>
|
||||
{% endcall %}
|
||||
|
||||
<!-- Pending Operations -->
|
||||
{% include "partials/pending.html" %}
|
||||
|
||||
<!-- Services by Host -->
|
||||
{% include "partials/services_by_host.html" %}
|
||||
|
||||
<!-- Hosts Configuration -->
|
||||
{% call collapse("Hosts (" ~ (hosts | length) ~ ")") %}
|
||||
{% call table() %}
|
||||
<thead>
|
||||
<tr>
|
||||
<th>Name</th>
|
||||
<th>Address</th>
|
||||
<th>User</th>
|
||||
<th>Port</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
{% for name, host in hosts.items() %}
|
||||
<tr class="hover:bg-base-300">
|
||||
<td class="font-semibold">{{ name }}</td>
|
||||
<td><code class="text-sm">{{ host.address }}</code></td>
|
||||
<td><code class="text-sm">{{ host.user }}</code></td>
|
||||
<td><code class="text-sm">{{ host.port }}</code></td>
|
||||
</tr>
|
||||
{% endfor %}
|
||||
</tbody>
|
||||
{% endcall %}
|
||||
{% endcall %}
|
||||
|
||||
<!-- State Viewer -->
|
||||
{% call collapse("Raw State", badge="compose-farm-state.yaml") %}
|
||||
<div class="editor-wrapper viewer-wrapper">
|
||||
<div id="state-viewer" class="yaml-viewer" data-content="{{ state_content | e }}"></div>
|
||||
</div>
|
||||
{% endcall %}
|
||||
</div>
|
||||
{% endblock %}
|
||||
52
src/compose_farm/web/templates/partials/components.html
Normal file
52
src/compose_farm/web/templates/partials/components.html
Normal file
@@ -0,0 +1,52 @@
|
||||
{# Page header with title and optional subtitle (supports HTML) #}
|
||||
{% macro page_header(title, subtitle=None) %}
|
||||
<div class="mb-6">
|
||||
<h1 class="text-3xl font-bold">{{ title }}</h1>
|
||||
{% if subtitle %}
|
||||
<p class="text-base-content/60 text-lg">{{ subtitle | safe }}</p>
|
||||
{% endif %}
|
||||
</div>
|
||||
{% endmacro %}
|
||||
|
||||
{# Collapsible section #}
|
||||
{% macro collapse(title, id=None, checked=False, badge=None) %}
|
||||
<div class="collapse collapse-arrow bg-base-100 shadow mb-4">
|
||||
<input type="checkbox" {% if id %}id="{{ id }}"{% endif %} {% if checked %}checked{% endif %} />
|
||||
<div class="collapse-title font-medium">
|
||||
{{ title }}
|
||||
{% if badge %}<code class="text-xs ml-2 opacity-60">{{ badge }}</code>{% endif %}
|
||||
</div>
|
||||
<div class="collapse-content">
|
||||
{{ caller() }}
|
||||
</div>
|
||||
</div>
|
||||
{% endmacro %}
|
||||
|
||||
{# Action button with htmx #}
|
||||
{% macro action_btn(label, url, style="outline", title=None) %}
|
||||
<button hx-post="{{ url }}"
|
||||
hx-swap="none"
|
||||
class="btn btn-{{ style }}"
|
||||
{% if title %}title="{{ title }}"{% endif %}>
|
||||
{{ label }}
|
||||
</button>
|
||||
{% endmacro %}
|
||||
|
||||
{# Stat card for dashboard #}
|
||||
{% macro stat_card(label, value, color=None) %}
|
||||
<div class="card bg-base-100 shadow">
|
||||
<div class="card-body items-center text-center">
|
||||
<h2 class="card-title text-base-content/60 text-sm">{{ label }}</h2>
|
||||
<p class="text-4xl font-bold {% if color %}text-{{ color }}{% endif %}">{{ value }}</p>
|
||||
</div>
|
||||
</div>
|
||||
{% endmacro %}
|
||||
|
||||
{# Data table wrapper #}
|
||||
{% macro table() %}
|
||||
<div class="overflow-x-auto">
|
||||
<table class="table table-zebra">
|
||||
{{ caller() }}
|
||||
</table>
|
||||
</div>
|
||||
{% endmacro %}
|
||||
26
src/compose_farm/web/templates/partials/containers.html
Normal file
26
src/compose_farm/web/templates/partials/containers.html
Normal file
@@ -0,0 +1,26 @@
|
||||
{# Container list for a service on a single host #}
|
||||
{% macro container_row(service, container, host) %}
|
||||
<div class="flex items-center gap-2 mb-2">
|
||||
{% if container.State == "running" %}
|
||||
<span class="badge badge-success">running</span>
|
||||
{% elif container.State == "unknown" %}
|
||||
<span class="badge badge-ghost"><span class="loading loading-spinner loading-xs"></span></span>
|
||||
{% else %}
|
||||
<span class="badge badge-warning">{{ container.State }}</span>
|
||||
{% endif %}
|
||||
<code class="text-sm flex-1">{{ container.Name }}</code>
|
||||
<button class="btn btn-sm btn-outline"
|
||||
onclick="initExecTerminal('{{ service }}', '{{ container.Name }}', '{{ host }}')">
|
||||
Shell
|
||||
</button>
|
||||
</div>
|
||||
{% endmacro %}
|
||||
|
||||
{% macro host_containers(service, host, containers, show_header=False) %}
|
||||
{% if show_header %}
|
||||
<div class="font-semibold text-sm mt-3 mb-1">{{ host }}</div>
|
||||
{% endif %}
|
||||
{% for container in containers %}
|
||||
{{ container_row(service, container, host) }}
|
||||
{% endfor %}
|
||||
{% endmacro %}
|
||||
9
src/compose_farm/web/templates/partials/icons.html
Normal file
9
src/compose_farm/web/templates/partials/icons.html
Normal file
@@ -0,0 +1,9 @@
|
||||
{% macro github(size=16) %}
|
||||
<svg height="{{ size }}" width="{{ size }}" viewBox="0 0 16 16" fill="currentColor"><path d="M8 0C3.58 0 0 3.58 0 8c0 3.54 2.29 6.53 5.47 7.59.4.07.55-.17.55-.38 0-.19-.01-.82-.01-1.49-2.01.37-2.53-.49-2.69-.94-.09-.23-.48-.94-.82-1.13-.28-.15-.68-.52-.01-.53.63-.01 1.08.58 1.23.82.72 1.21 1.87.87 2.33.66.07-.52.28-.87.51-1.07-1.78-.2-3.64-.89-3.64-3.95 0-.87.31-1.59.82-2.15-.08-.2-.36-1.02.08-2.12 0 0 .67-.21 2.2.82.64-.18 1.32-.27 2-.27.68 0 1.36.09 2 .27 1.53-1.04 2.2-.82 2.2-.82.44 1.1.16 1.92.08 2.12.51.56.82 1.27.82 2.15 0 3.07-1.87 3.75-3.65 3.95.29.25.54.73.54 1.48 0 1.07-.01 1.93-.01 2.2 0 .21.15.46.55.38A8.013 8.013 0 0016 8c0-4.42-3.58-8-8-8z"/></svg>
|
||||
{% endmacro %}
|
||||
|
||||
{% macro hamburger() %}
|
||||
<svg xmlns="http://www.w3.org/2000/svg" class="h-5 w-5" fill="none" viewBox="0 0 24 24" stroke="currentColor">
|
||||
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M4 6h16M4 12h16M4 18h16" />
|
||||
</svg>
|
||||
{% endmacro %}
|
||||
38
src/compose_farm/web/templates/partials/pending.html
Normal file
38
src/compose_farm/web/templates/partials/pending.html
Normal file
@@ -0,0 +1,38 @@
|
||||
{% from "partials/components.html" import collapse %}
|
||||
<div id="pending-operations">
|
||||
{% if orphaned or migrations or not_started %}
|
||||
{% call collapse("Pending Operations", id="pending-collapse", checked=expanded|default(true)) %}
|
||||
{% if orphaned %}
|
||||
<h4 class="font-semibold mt-2 mb-1">Orphaned Services (will be stopped)</h4>
|
||||
<ul class="list-disc list-inside mb-4">
|
||||
{% for svc, host in orphaned.items() %}
|
||||
<li><a href="/service/{{ svc }}" class="badge badge-warning hover:badge-primary">{{ svc }}</a> on {{ host }}</li>
|
||||
{% endfor %}
|
||||
</ul>
|
||||
{% endif %}
|
||||
|
||||
{% if migrations %}
|
||||
<h4 class="font-semibold mt-2 mb-1">Services Needing Migration</h4>
|
||||
<ul class="list-disc list-inside mb-4">
|
||||
{% for svc in migrations %}
|
||||
<li><a href="/service/{{ svc }}" class="badge badge-info hover:badge-primary">{{ svc }}</a></li>
|
||||
{% endfor %}
|
||||
</ul>
|
||||
{% endif %}
|
||||
|
||||
{% if not_started %}
|
||||
<h4 class="font-semibold mt-2 mb-1">Services Not Started</h4>
|
||||
<ul class="menu menu-horizontal bg-base-200 rounded-box mb-2">
|
||||
{% for svc in not_started | sort %}
|
||||
<li><a href="/service/{{ svc }}">{{ svc }}</a></li>
|
||||
{% endfor %}
|
||||
</ul>
|
||||
{% endif %}
|
||||
{% endcall %}
|
||||
{% else %}
|
||||
<div role="alert" class="alert alert-success mb-4">
|
||||
<svg xmlns="http://www.w3.org/2000/svg" class="stroke-current shrink-0 h-6 w-6" fill="none" viewBox="0 0 24 24"><path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M9 12l2 2 4-4m6 2a9 9 0 11-18 0 9 9 0 0118 0z" /></svg>
|
||||
<span>All services are in sync with configuration.</span>
|
||||
</div>
|
||||
{% endif %}
|
||||
</div>
|
||||
@@ -0,0 +1,20 @@
|
||||
{% from "partials/components.html" import collapse %}
|
||||
<div id="services-by-host">
|
||||
{% call collapse("Services by Host", id="services-by-host-collapse", checked=expanded|default(true)) %}
|
||||
{% for host_name, host_services in services_by_host.items() | sort %}
|
||||
<h4 class="font-semibold mt-3 mb-1">
|
||||
{{ host_name }}
|
||||
{% if host_name in hosts %}
|
||||
<code class="text-xs ml-2 opacity-60">{{ hosts[host_name].address }}</code>
|
||||
{% endif %}
|
||||
</h4>
|
||||
<ul class="menu menu-horizontal bg-base-200 rounded-box mb-2">
|
||||
{% for svc in host_services | sort %}
|
||||
<li><a href="/service/{{ svc }}">{{ svc }}</a></li>
|
||||
{% endfor %}
|
||||
</ul>
|
||||
{% else %}
|
||||
<p class="text-base-content/60 italic">No services currently running.</p>
|
||||
{% endfor %}
|
||||
{% endcall %}
|
||||
</div>
|
||||
27
src/compose_farm/web/templates/partials/sidebar.html
Normal file
27
src/compose_farm/web/templates/partials/sidebar.html
Normal file
@@ -0,0 +1,27 @@
|
||||
<!-- Dashboard Link -->
|
||||
<div class="mb-4">
|
||||
<ul class="menu" hx-boost="true" hx-target="#main-content" hx-select="#main-content" hx-swap="outerHTML">
|
||||
<li><a href="/" class="font-semibold"><span class="font-mono opacity-60">~</span> Dashboard</a></li>
|
||||
</ul>
|
||||
</div>
|
||||
|
||||
<!-- Services Section -->
|
||||
<div class="mb-4">
|
||||
<h4 class="text-xs uppercase tracking-wide text-base-content/60 px-3 py-1">
|
||||
Services <span class="text-xs opacity-50" title="Green = tracked in state file">(state)</span>
|
||||
</h4>
|
||||
<ul class="menu" hx-boost="true" hx-target="#main-content" hx-select="#main-content" hx-swap="outerHTML">
|
||||
{% for service in services %}
|
||||
<li>
|
||||
<a href="/service/{{ service }}" class="flex items-center gap-2">
|
||||
{% if service in state %}
|
||||
<span class="status status-success" title="In state file"></span>
|
||||
{% else %}
|
||||
<span class="status status-neutral" title="Not in state file"></span>
|
||||
{% endif %}
|
||||
{{ service }}
|
||||
</a>
|
||||
</li>
|
||||
{% endfor %}
|
||||
</ul>
|
||||
</div>
|
||||
7
src/compose_farm/web/templates/partials/stats.html
Normal file
7
src/compose_farm/web/templates/partials/stats.html
Normal file
@@ -0,0 +1,7 @@
|
||||
{% from "partials/components.html" import stat_card %}
|
||||
<div id="stats-cards" class="grid grid-cols-2 md:grid-cols-4 gap-4 mb-6">
|
||||
{{ stat_card("Hosts", hosts | length) }}
|
||||
{{ stat_card("Services", services | length) }}
|
||||
{{ stat_card("Running", running_count, "success") }}
|
||||
{{ stat_card("Stopped", stopped_count) }}
|
||||
</div>
|
||||
13
src/compose_farm/web/templates/partials/terminal.html
Normal file
13
src/compose_farm/web/templates/partials/terminal.html
Normal file
@@ -0,0 +1,13 @@
|
||||
<!-- Shared Terminal Component -->
|
||||
<div class="collapse collapse-arrow bg-base-100 shadow mb-4" id="terminal-collapse">
|
||||
<input type="checkbox" id="terminal-toggle" />
|
||||
<div class="collapse-title font-medium flex items-center gap-2">
|
||||
Terminal Output
|
||||
<span id="terminal-spinner" class="loading loading-spinner loading-sm hidden"></span>
|
||||
</div>
|
||||
<div class="collapse-content">
|
||||
<div id="terminal-container" class="bg-[#1a1a2e] rounded-lg h-[300px] border border-white/10 resize-y overflow-hidden">
|
||||
<div id="terminal-output" class="h-full"></div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
66
src/compose_farm/web/templates/service.html
Normal file
66
src/compose_farm/web/templates/service.html
Normal file
@@ -0,0 +1,66 @@
|
||||
{% extends "base.html" %}
|
||||
{% from "partials/components.html" import collapse, action_btn %}
|
||||
{% block title %}{{ name }} - Compose Farm{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
<div class="max-w-5xl">
|
||||
<div class="mb-6">
|
||||
<h1 class="text-3xl font-bold">{{ name }}</h1>
|
||||
<div class="flex flex-wrap items-center gap-2 mt-2">
|
||||
{% if current_host %}
|
||||
<span class="badge badge-success">Running on {{ current_host }}</span>
|
||||
{% else %}
|
||||
<span class="badge badge-neutral">Not running</span>
|
||||
{% endif %}
|
||||
<span class="badge badge-outline">{{ hosts | join(', ') }}</span>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Action Buttons -->
|
||||
<div class="flex flex-wrap gap-2 mb-6">
|
||||
<!-- Lifecycle -->
|
||||
{{ action_btn("Up", "/api/service/" ~ name ~ "/up", "primary", "Start service (docker compose up -d)") }}
|
||||
{{ action_btn("Down", "/api/service/" ~ name ~ "/down", "outline", "Stop service (docker compose down)") }}
|
||||
{{ action_btn("Restart", "/api/service/" ~ name ~ "/restart", "secondary", "Restart service (down + up)") }}
|
||||
{{ action_btn("Update", "/api/service/" ~ name ~ "/update", "accent", "Update to latest (pull + build + down + up)") }}
|
||||
|
||||
<div class="divider divider-horizontal mx-0"></div>
|
||||
|
||||
<!-- Other -->
|
||||
{{ action_btn("Pull", "/api/service/" ~ name ~ "/pull", "outline", "Pull latest images (no restart)") }}
|
||||
{{ action_btn("Logs", "/api/service/" ~ name ~ "/logs", "outline", "Show recent logs") }}
|
||||
<button id="save-btn" class="btn btn-outline">Save All</button>
|
||||
</div>
|
||||
|
||||
{% call collapse("Compose File", badge=compose_path) %}
|
||||
<div class="editor-wrapper yaml-wrapper">
|
||||
<div id="compose-editor" class="yaml-editor" data-content="{{ compose_content | e }}" data-save-url="/api/service/{{ name }}/compose"></div>
|
||||
</div>
|
||||
{% endcall %}
|
||||
|
||||
{% call collapse(".env File", badge=env_path) %}
|
||||
<div class="editor-wrapper env-wrapper">
|
||||
<div id="env-editor" class="env-editor" data-content="{{ env_content | e }}" data-save-url="/api/service/{{ name }}/env"></div>
|
||||
</div>
|
||||
{% endcall %}
|
||||
|
||||
{% include "partials/terminal.html" %}
|
||||
|
||||
<!-- Exec Terminal -->
|
||||
{% if current_host %}
|
||||
{% call collapse("Container Shell", id="exec-collapse", checked=True) %}
|
||||
<div id="containers-list" class="mb-4"
|
||||
hx-get="/api/service/{{ name }}/containers"
|
||||
hx-trigger="load"
|
||||
hx-target="this"
|
||||
hx-select="unset"
|
||||
hx-swap="innerHTML">
|
||||
<span class="loading loading-spinner loading-sm"></span> Loading containers...
|
||||
</div>
|
||||
<div id="exec-terminal-container" class="bg-[#1a1a2e] rounded-lg h-[400px] border border-white/10 hidden">
|
||||
<div id="exec-terminal" class="h-full"></div>
|
||||
</div>
|
||||
{% endcall %}
|
||||
{% endif %}
|
||||
</div>
|
||||
{% endblock %}
|
||||
236
src/compose_farm/web/ws.py
Normal file
236
src/compose_farm/web/ws.py
Normal file
@@ -0,0 +1,236 @@
|
||||
"""WebSocket handler for terminal streaming."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import contextlib
|
||||
import fcntl
|
||||
import json
|
||||
import os
|
||||
import pty
|
||||
import signal
|
||||
import struct
|
||||
import termios
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
import asyncssh
|
||||
from fastapi import APIRouter, WebSocket, WebSocketDisconnect
|
||||
|
||||
from compose_farm.executor import is_local
|
||||
from compose_farm.web.deps import get_config
|
||||
from compose_farm.web.streaming import CRLF, DIM, GREEN, RED, RESET, tasks
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from compose_farm.config import Host
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
def _parse_resize(msg: str) -> tuple[int, int] | None:
|
||||
"""Parse a resize message, return (cols, rows) or None if not a resize."""
|
||||
try:
|
||||
data = json.loads(msg)
|
||||
if data.get("type") == "resize":
|
||||
return int(data["cols"]), int(data["rows"])
|
||||
except (json.JSONDecodeError, KeyError, TypeError, ValueError):
|
||||
pass
|
||||
return None
|
||||
|
||||
|
||||
def _resize_pty(
|
||||
fd: int, cols: int, rows: int, proc: asyncio.subprocess.Process | None = None
|
||||
) -> None:
|
||||
"""Resize a local PTY and send SIGWINCH to the process."""
|
||||
winsize = struct.pack("HHHH", rows, cols, 0, 0)
|
||||
fcntl.ioctl(fd, termios.TIOCSWINSZ, winsize)
|
||||
# Explicitly send SIGWINCH so docker exec forwards it to the container
|
||||
if proc and proc.pid:
|
||||
os.kill(proc.pid, signal.SIGWINCH)
|
||||
|
||||
|
||||
async def _bridge_websocket_to_fd(
|
||||
websocket: WebSocket,
|
||||
master_fd: int,
|
||||
proc: asyncio.subprocess.Process,
|
||||
) -> None:
|
||||
"""Bridge WebSocket to a local PTY file descriptor."""
|
||||
loop = asyncio.get_event_loop()
|
||||
|
||||
async def read_output() -> None:
|
||||
while proc.returncode is None:
|
||||
try:
|
||||
data = await loop.run_in_executor(None, lambda: os.read(master_fd, 4096))
|
||||
except BlockingIOError:
|
||||
await asyncio.sleep(0.01)
|
||||
continue
|
||||
except OSError:
|
||||
break
|
||||
if not data:
|
||||
break
|
||||
await websocket.send_text(data.decode("utf-8", errors="replace"))
|
||||
|
||||
read_task = asyncio.create_task(read_output())
|
||||
|
||||
try:
|
||||
while proc.returncode is None:
|
||||
try:
|
||||
msg = await asyncio.wait_for(websocket.receive_text(), timeout=0.1)
|
||||
except TimeoutError:
|
||||
continue
|
||||
if size := _parse_resize(msg):
|
||||
_resize_pty(master_fd, *size, proc)
|
||||
else:
|
||||
os.write(master_fd, msg.encode())
|
||||
finally:
|
||||
read_task.cancel()
|
||||
os.close(master_fd)
|
||||
if proc.returncode is None:
|
||||
proc.terminate()
|
||||
|
||||
|
||||
async def _bridge_websocket_to_ssh(
|
||||
websocket: WebSocket,
|
||||
proc: Any, # asyncssh.SSHClientProcess
|
||||
) -> None:
|
||||
"""Bridge WebSocket to an SSH process with PTY."""
|
||||
assert proc.stdout is not None
|
||||
assert proc.stdin is not None
|
||||
|
||||
async def read_stdout() -> None:
|
||||
while proc.returncode is None:
|
||||
data = await proc.stdout.read(4096)
|
||||
if not data:
|
||||
break
|
||||
text = data if isinstance(data, str) else data.decode()
|
||||
await websocket.send_text(text)
|
||||
|
||||
read_task = asyncio.create_task(read_stdout())
|
||||
|
||||
try:
|
||||
while proc.returncode is None:
|
||||
try:
|
||||
msg = await asyncio.wait_for(websocket.receive_text(), timeout=0.1)
|
||||
except TimeoutError:
|
||||
continue
|
||||
if size := _parse_resize(msg):
|
||||
proc.change_terminal_size(*size)
|
||||
else:
|
||||
proc.stdin.write(msg)
|
||||
finally:
|
||||
read_task.cancel()
|
||||
proc.terminate()
|
||||
|
||||
|
||||
async def _run_local_exec(websocket: WebSocket, exec_cmd: str) -> None:
|
||||
"""Run docker exec locally with PTY."""
|
||||
master_fd, slave_fd = pty.openpty()
|
||||
|
||||
proc = await asyncio.create_subprocess_shell(
|
||||
exec_cmd,
|
||||
stdin=slave_fd,
|
||||
stdout=slave_fd,
|
||||
stderr=slave_fd,
|
||||
close_fds=True,
|
||||
)
|
||||
os.close(slave_fd)
|
||||
|
||||
# Set non-blocking
|
||||
flags = fcntl.fcntl(master_fd, fcntl.F_GETFL)
|
||||
fcntl.fcntl(master_fd, fcntl.F_SETFL, flags | os.O_NONBLOCK)
|
||||
|
||||
await _bridge_websocket_to_fd(websocket, master_fd, proc)
|
||||
|
||||
|
||||
async def _run_remote_exec(websocket: WebSocket, host: Host, exec_cmd: str) -> None:
|
||||
"""Run docker exec on remote host via SSH with PTY."""
|
||||
async with asyncssh.connect(
|
||||
host.address,
|
||||
port=host.port,
|
||||
username=host.user,
|
||||
known_hosts=None,
|
||||
) as conn:
|
||||
proc: asyncssh.SSHClientProcess[Any] = await conn.create_process(
|
||||
exec_cmd,
|
||||
term_type="xterm-256color",
|
||||
term_size=(80, 24),
|
||||
)
|
||||
async with proc:
|
||||
await _bridge_websocket_to_ssh(websocket, proc)
|
||||
|
||||
|
||||
async def _run_exec_session(
|
||||
websocket: WebSocket,
|
||||
container: str,
|
||||
host_name: str,
|
||||
) -> None:
|
||||
"""Run an interactive docker exec session over WebSocket."""
|
||||
config = get_config()
|
||||
host = config.hosts.get(host_name)
|
||||
if not host:
|
||||
await websocket.send_text(f"{RED}Host '{host_name}' not found{RESET}{CRLF}")
|
||||
return
|
||||
|
||||
exec_cmd = f"docker exec -it {container} /bin/sh -c 'command -v bash >/dev/null && exec bash || exec sh'"
|
||||
|
||||
if is_local(host):
|
||||
await _run_local_exec(websocket, exec_cmd)
|
||||
else:
|
||||
await _run_remote_exec(websocket, host, exec_cmd)
|
||||
|
||||
|
||||
@router.websocket("/ws/exec/{service}/{container}/{host}")
|
||||
async def exec_websocket(
|
||||
websocket: WebSocket,
|
||||
service: str, # noqa: ARG001
|
||||
container: str,
|
||||
host: str,
|
||||
) -> None:
|
||||
"""WebSocket endpoint for interactive container exec."""
|
||||
await websocket.accept()
|
||||
|
||||
try:
|
||||
await websocket.send_text(f"{DIM}[Connecting to {container} on {host}...]{RESET}{CRLF}")
|
||||
await _run_exec_session(websocket, container, host)
|
||||
await websocket.send_text(f"{CRLF}{DIM}[Disconnected]{RESET}{CRLF}")
|
||||
except WebSocketDisconnect:
|
||||
pass
|
||||
except Exception as e:
|
||||
with contextlib.suppress(Exception):
|
||||
await websocket.send_text(f"{RED}Error: {e}{RESET}{CRLF}")
|
||||
finally:
|
||||
with contextlib.suppress(Exception):
|
||||
await websocket.close()
|
||||
|
||||
|
||||
@router.websocket("/ws/terminal/{task_id}")
|
||||
async def terminal_websocket(websocket: WebSocket, task_id: str) -> None:
|
||||
"""WebSocket endpoint for terminal streaming."""
|
||||
await websocket.accept()
|
||||
|
||||
if task_id not in tasks:
|
||||
await websocket.send_text(f"{RED}Error: Task not found{RESET}{CRLF}")
|
||||
await websocket.close(code=4004)
|
||||
return
|
||||
|
||||
task = tasks[task_id]
|
||||
sent_count = 0
|
||||
|
||||
try:
|
||||
while True:
|
||||
# Send any new output
|
||||
while sent_count < len(task["output"]):
|
||||
await websocket.send_text(task["output"][sent_count])
|
||||
sent_count += 1
|
||||
|
||||
if task["status"] in ("completed", "failed"):
|
||||
status = "[Done]" if task["status"] == "completed" else "[Failed]"
|
||||
color = GREEN if task["status"] == "completed" else RED
|
||||
await websocket.send_text(f"{CRLF}{color}{status}{RESET}{CRLF}")
|
||||
await websocket.close()
|
||||
break
|
||||
|
||||
await asyncio.sleep(0.05)
|
||||
except WebSocketDisconnect:
|
||||
pass
|
||||
finally:
|
||||
tasks.pop(task_id, None)
|
||||
426
tests/test_cli_lifecycle.py
Normal file
426
tests/test_cli_lifecycle.py
Normal file
@@ -0,0 +1,426 @@
|
||||
"""Tests for CLI lifecycle commands (apply, down --orphaned)."""
|
||||
|
||||
from pathlib import Path
|
||||
from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
import typer
|
||||
|
||||
from compose_farm.cli.lifecycle import apply, down
|
||||
from compose_farm.config import Config, Host
|
||||
from compose_farm.executor import CommandResult
|
||||
|
||||
|
||||
def _make_config(tmp_path: Path, services: dict[str, str] | None = None) -> Config:
|
||||
"""Create a minimal config for testing."""
|
||||
compose_dir = tmp_path / "compose"
|
||||
compose_dir.mkdir()
|
||||
|
||||
svc_dict = services or {"svc1": "host1", "svc2": "host2"}
|
||||
for svc in svc_dict:
|
||||
svc_dir = compose_dir / svc
|
||||
svc_dir.mkdir()
|
||||
(svc_dir / "docker-compose.yml").write_text("services: {}\n")
|
||||
|
||||
config_path = tmp_path / "compose-farm.yaml"
|
||||
config_path.write_text("")
|
||||
|
||||
return Config(
|
||||
compose_dir=compose_dir,
|
||||
hosts={"host1": Host(address="localhost"), "host2": Host(address="localhost")},
|
||||
services=svc_dict,
|
||||
config_path=config_path,
|
||||
)
|
||||
|
||||
|
||||
def _make_result(service: str, success: bool = True) -> CommandResult:
|
||||
"""Create a command result."""
|
||||
return CommandResult(
|
||||
service=service,
|
||||
exit_code=0 if success else 1,
|
||||
success=success,
|
||||
stdout="",
|
||||
stderr="",
|
||||
)
|
||||
|
||||
|
||||
class TestApplyCommand:
|
||||
"""Tests for the apply command."""
|
||||
|
||||
def test_apply_nothing_to_do(self, tmp_path: Path, capsys: pytest.CaptureFixture[str]) -> None:
|
||||
"""When no migrations, orphans, or missing services, prints success message."""
|
||||
cfg = _make_config(tmp_path)
|
||||
|
||||
with (
|
||||
patch("compose_farm.cli.lifecycle.load_config_or_exit", return_value=cfg),
|
||||
patch("compose_farm.cli.lifecycle.get_orphaned_services", return_value={}),
|
||||
patch("compose_farm.cli.lifecycle.get_services_needing_migration", return_value=[]),
|
||||
patch("compose_farm.cli.lifecycle.get_services_not_in_state", return_value=[]),
|
||||
):
|
||||
apply(dry_run=False, no_orphans=False, full=False, config=None)
|
||||
|
||||
captured = capsys.readouterr()
|
||||
assert "Nothing to apply" in captured.out
|
||||
|
||||
def test_apply_dry_run_shows_preview(
|
||||
self, tmp_path: Path, capsys: pytest.CaptureFixture[str]
|
||||
) -> None:
|
||||
"""Dry run shows what would be done without executing."""
|
||||
cfg = _make_config(tmp_path)
|
||||
|
||||
with (
|
||||
patch("compose_farm.cli.lifecycle.load_config_or_exit", return_value=cfg),
|
||||
patch(
|
||||
"compose_farm.cli.lifecycle.get_orphaned_services",
|
||||
return_value={"old-svc": "host1"},
|
||||
),
|
||||
patch(
|
||||
"compose_farm.cli.lifecycle.get_services_needing_migration",
|
||||
return_value=["svc1"],
|
||||
),
|
||||
patch("compose_farm.cli.lifecycle.get_services_not_in_state", return_value=[]),
|
||||
patch("compose_farm.cli.lifecycle.get_service_host", return_value="host1"),
|
||||
patch("compose_farm.cli.lifecycle.stop_orphaned_services") as mock_stop,
|
||||
patch("compose_farm.cli.lifecycle.up_services") as mock_up,
|
||||
):
|
||||
apply(dry_run=True, no_orphans=False, full=False, config=None)
|
||||
|
||||
captured = capsys.readouterr()
|
||||
assert "Services to migrate" in captured.out
|
||||
assert "svc1" in captured.out
|
||||
assert "Orphaned services to stop" in captured.out
|
||||
assert "old-svc" in captured.out
|
||||
assert "dry-run" in captured.out
|
||||
|
||||
# Should not have called the actual operations
|
||||
mock_stop.assert_not_called()
|
||||
mock_up.assert_not_called()
|
||||
|
||||
def test_apply_executes_migrations(self, tmp_path: Path) -> None:
|
||||
"""Apply runs migrations when services need migration."""
|
||||
cfg = _make_config(tmp_path)
|
||||
mock_results = [_make_result("svc1")]
|
||||
|
||||
with (
|
||||
patch("compose_farm.cli.lifecycle.load_config_or_exit", return_value=cfg),
|
||||
patch("compose_farm.cli.lifecycle.get_orphaned_services", return_value={}),
|
||||
patch(
|
||||
"compose_farm.cli.lifecycle.get_services_needing_migration",
|
||||
return_value=["svc1"],
|
||||
),
|
||||
patch("compose_farm.cli.lifecycle.get_services_not_in_state", return_value=[]),
|
||||
patch("compose_farm.cli.lifecycle.get_service_host", return_value="host1"),
|
||||
patch(
|
||||
"compose_farm.cli.lifecycle.run_async",
|
||||
return_value=mock_results,
|
||||
),
|
||||
patch("compose_farm.cli.lifecycle.up_services") as mock_up,
|
||||
patch("compose_farm.cli.lifecycle.maybe_regenerate_traefik"),
|
||||
patch("compose_farm.cli.lifecycle.report_results"),
|
||||
):
|
||||
apply(dry_run=False, no_orphans=False, full=False, config=None)
|
||||
|
||||
mock_up.assert_called_once()
|
||||
call_args = mock_up.call_args
|
||||
assert call_args[0][1] == ["svc1"] # services list
|
||||
|
||||
def test_apply_executes_orphan_cleanup(self, tmp_path: Path) -> None:
|
||||
"""Apply stops orphaned services."""
|
||||
cfg = _make_config(tmp_path)
|
||||
mock_results = [_make_result("old-svc@host1")]
|
||||
|
||||
with (
|
||||
patch("compose_farm.cli.lifecycle.load_config_or_exit", return_value=cfg),
|
||||
patch(
|
||||
"compose_farm.cli.lifecycle.get_orphaned_services",
|
||||
return_value={"old-svc": "host1"},
|
||||
),
|
||||
patch("compose_farm.cli.lifecycle.get_services_needing_migration", return_value=[]),
|
||||
patch("compose_farm.cli.lifecycle.get_services_not_in_state", return_value=[]),
|
||||
patch(
|
||||
"compose_farm.cli.lifecycle.run_async",
|
||||
return_value=mock_results,
|
||||
),
|
||||
patch("compose_farm.cli.lifecycle.stop_orphaned_services") as mock_stop,
|
||||
patch("compose_farm.cli.lifecycle.report_results"),
|
||||
):
|
||||
apply(dry_run=False, no_orphans=False, full=False, config=None)
|
||||
|
||||
mock_stop.assert_called_once_with(cfg)
|
||||
|
||||
def test_apply_no_orphans_skips_orphan_cleanup(
|
||||
self, tmp_path: Path, capsys: pytest.CaptureFixture[str]
|
||||
) -> None:
|
||||
"""--no-orphans flag skips orphan cleanup."""
|
||||
cfg = _make_config(tmp_path)
|
||||
mock_results = [_make_result("svc1")]
|
||||
|
||||
with (
|
||||
patch("compose_farm.cli.lifecycle.load_config_or_exit", return_value=cfg),
|
||||
patch(
|
||||
"compose_farm.cli.lifecycle.get_orphaned_services",
|
||||
return_value={"old-svc": "host1"},
|
||||
),
|
||||
patch(
|
||||
"compose_farm.cli.lifecycle.get_services_needing_migration",
|
||||
return_value=["svc1"],
|
||||
),
|
||||
patch("compose_farm.cli.lifecycle.get_services_not_in_state", return_value=[]),
|
||||
patch("compose_farm.cli.lifecycle.get_service_host", return_value="host1"),
|
||||
patch(
|
||||
"compose_farm.cli.lifecycle.run_async",
|
||||
return_value=mock_results,
|
||||
),
|
||||
patch("compose_farm.cli.lifecycle.up_services") as mock_up,
|
||||
patch("compose_farm.cli.lifecycle.stop_orphaned_services") as mock_stop,
|
||||
patch("compose_farm.cli.lifecycle.maybe_regenerate_traefik"),
|
||||
patch("compose_farm.cli.lifecycle.report_results"),
|
||||
):
|
||||
apply(dry_run=False, no_orphans=True, full=False, config=None)
|
||||
|
||||
# Should run migrations but not orphan cleanup
|
||||
mock_up.assert_called_once()
|
||||
mock_stop.assert_not_called()
|
||||
|
||||
# Orphans should not appear in output
|
||||
captured = capsys.readouterr()
|
||||
assert "old-svc" not in captured.out
|
||||
|
||||
def test_apply_no_orphans_nothing_to_do(
|
||||
self, tmp_path: Path, capsys: pytest.CaptureFixture[str]
|
||||
) -> None:
|
||||
"""--no-orphans with only orphans means nothing to do."""
|
||||
cfg = _make_config(tmp_path)
|
||||
|
||||
with (
|
||||
patch("compose_farm.cli.lifecycle.load_config_or_exit", return_value=cfg),
|
||||
patch(
|
||||
"compose_farm.cli.lifecycle.get_orphaned_services",
|
||||
return_value={"old-svc": "host1"},
|
||||
),
|
||||
patch("compose_farm.cli.lifecycle.get_services_needing_migration", return_value=[]),
|
||||
patch("compose_farm.cli.lifecycle.get_services_not_in_state", return_value=[]),
|
||||
):
|
||||
apply(dry_run=False, no_orphans=True, full=False, config=None)
|
||||
|
||||
captured = capsys.readouterr()
|
||||
assert "Nothing to apply" in captured.out
|
||||
|
||||
def test_apply_starts_missing_services(self, tmp_path: Path) -> None:
|
||||
"""Apply starts services that are in config but not in state."""
|
||||
cfg = _make_config(tmp_path)
|
||||
mock_results = [_make_result("svc1")]
|
||||
|
||||
with (
|
||||
patch("compose_farm.cli.lifecycle.load_config_or_exit", return_value=cfg),
|
||||
patch("compose_farm.cli.lifecycle.get_orphaned_services", return_value={}),
|
||||
patch("compose_farm.cli.lifecycle.get_services_needing_migration", return_value=[]),
|
||||
patch(
|
||||
"compose_farm.cli.lifecycle.get_services_not_in_state",
|
||||
return_value=["svc1"],
|
||||
),
|
||||
patch(
|
||||
"compose_farm.cli.lifecycle.run_async",
|
||||
return_value=mock_results,
|
||||
),
|
||||
patch("compose_farm.cli.lifecycle.up_services") as mock_up,
|
||||
patch("compose_farm.cli.lifecycle.maybe_regenerate_traefik"),
|
||||
patch("compose_farm.cli.lifecycle.report_results"),
|
||||
):
|
||||
apply(dry_run=False, no_orphans=False, full=False, config=None)
|
||||
|
||||
mock_up.assert_called_once()
|
||||
call_args = mock_up.call_args
|
||||
assert call_args[0][1] == ["svc1"]
|
||||
|
||||
def test_apply_dry_run_shows_missing_services(
|
||||
self, tmp_path: Path, capsys: pytest.CaptureFixture[str]
|
||||
) -> None:
|
||||
"""Dry run shows services that would be started."""
|
||||
cfg = _make_config(tmp_path)
|
||||
|
||||
with (
|
||||
patch("compose_farm.cli.lifecycle.load_config_or_exit", return_value=cfg),
|
||||
patch("compose_farm.cli.lifecycle.get_orphaned_services", return_value={}),
|
||||
patch("compose_farm.cli.lifecycle.get_services_needing_migration", return_value=[]),
|
||||
patch(
|
||||
"compose_farm.cli.lifecycle.get_services_not_in_state",
|
||||
return_value=["svc1"],
|
||||
),
|
||||
):
|
||||
apply(dry_run=True, no_orphans=False, full=False, config=None)
|
||||
|
||||
captured = capsys.readouterr()
|
||||
assert "Services to start" in captured.out
|
||||
assert "svc1" in captured.out
|
||||
assert "dry-run" in captured.out
|
||||
|
||||
def test_apply_full_refreshes_all_services(self, tmp_path: Path) -> None:
|
||||
"""--full runs up on all services to pick up config changes."""
|
||||
cfg = _make_config(tmp_path)
|
||||
mock_results = [_make_result("svc1"), _make_result("svc2")]
|
||||
|
||||
with (
|
||||
patch("compose_farm.cli.lifecycle.load_config_or_exit", return_value=cfg),
|
||||
patch("compose_farm.cli.lifecycle.get_orphaned_services", return_value={}),
|
||||
patch("compose_farm.cli.lifecycle.get_services_needing_migration", return_value=[]),
|
||||
patch("compose_farm.cli.lifecycle.get_services_not_in_state", return_value=[]),
|
||||
patch(
|
||||
"compose_farm.cli.lifecycle.run_async",
|
||||
return_value=mock_results,
|
||||
),
|
||||
patch("compose_farm.cli.lifecycle.up_services") as mock_up,
|
||||
patch("compose_farm.cli.lifecycle.maybe_regenerate_traefik"),
|
||||
patch("compose_farm.cli.lifecycle.report_results"),
|
||||
):
|
||||
apply(dry_run=False, no_orphans=False, full=True, config=None)
|
||||
|
||||
mock_up.assert_called_once()
|
||||
call_args = mock_up.call_args
|
||||
# Should refresh all services in config
|
||||
assert set(call_args[0][1]) == {"svc1", "svc2"}
|
||||
|
||||
def test_apply_full_dry_run_shows_refresh(
|
||||
self, tmp_path: Path, capsys: pytest.CaptureFixture[str]
|
||||
) -> None:
|
||||
"""--full --dry-run shows services that would be refreshed."""
|
||||
cfg = _make_config(tmp_path)
|
||||
|
||||
with (
|
||||
patch("compose_farm.cli.lifecycle.load_config_or_exit", return_value=cfg),
|
||||
patch("compose_farm.cli.lifecycle.get_orphaned_services", return_value={}),
|
||||
patch("compose_farm.cli.lifecycle.get_services_needing_migration", return_value=[]),
|
||||
patch("compose_farm.cli.lifecycle.get_services_not_in_state", return_value=[]),
|
||||
):
|
||||
apply(dry_run=True, no_orphans=False, full=True, config=None)
|
||||
|
||||
captured = capsys.readouterr()
|
||||
assert "Services to refresh" in captured.out
|
||||
assert "svc1" in captured.out
|
||||
assert "svc2" in captured.out
|
||||
assert "dry-run" in captured.out
|
||||
|
||||
def test_apply_full_excludes_already_handled_services(self, tmp_path: Path) -> None:
|
||||
"""--full doesn't double-process services that are migrating or starting."""
|
||||
cfg = _make_config(tmp_path, {"svc1": "host1", "svc2": "host2", "svc3": "host1"})
|
||||
mock_results = [_make_result("svc1"), _make_result("svc3")]
|
||||
|
||||
with (
|
||||
patch("compose_farm.cli.lifecycle.load_config_or_exit", return_value=cfg),
|
||||
patch("compose_farm.cli.lifecycle.get_orphaned_services", return_value={}),
|
||||
patch(
|
||||
"compose_farm.cli.lifecycle.get_services_needing_migration",
|
||||
return_value=["svc1"],
|
||||
),
|
||||
patch(
|
||||
"compose_farm.cli.lifecycle.get_services_not_in_state",
|
||||
return_value=["svc2"],
|
||||
),
|
||||
patch("compose_farm.cli.lifecycle.get_service_host", return_value="host2"),
|
||||
patch(
|
||||
"compose_farm.cli.lifecycle.run_async",
|
||||
return_value=mock_results,
|
||||
),
|
||||
patch("compose_farm.cli.lifecycle.up_services") as mock_up,
|
||||
patch("compose_farm.cli.lifecycle.maybe_regenerate_traefik"),
|
||||
patch("compose_farm.cli.lifecycle.report_results"),
|
||||
):
|
||||
apply(dry_run=False, no_orphans=False, full=True, config=None)
|
||||
|
||||
# up_services should be called 3 times: migrate, start, refresh
|
||||
assert mock_up.call_count == 3
|
||||
# Get the third call (refresh) and check it only has svc3
|
||||
refresh_call = mock_up.call_args_list[2]
|
||||
assert refresh_call[0][1] == ["svc3"]
|
||||
|
||||
|
||||
class TestDownOrphaned:
|
||||
"""Tests for down --orphaned flag."""
|
||||
|
||||
def test_down_orphaned_no_orphans(
|
||||
self, tmp_path: Path, capsys: pytest.CaptureFixture[str]
|
||||
) -> None:
|
||||
"""When no orphans exist, prints success message."""
|
||||
cfg = _make_config(tmp_path)
|
||||
|
||||
with (
|
||||
patch("compose_farm.cli.lifecycle.load_config_or_exit", return_value=cfg),
|
||||
patch("compose_farm.cli.lifecycle.get_orphaned_services", return_value={}),
|
||||
):
|
||||
down(
|
||||
services=None,
|
||||
all_services=False,
|
||||
orphaned=True,
|
||||
host=None,
|
||||
config=None,
|
||||
)
|
||||
|
||||
captured = capsys.readouterr()
|
||||
assert "No orphaned services to stop" in captured.out
|
||||
|
||||
def test_down_orphaned_stops_services(self, tmp_path: Path) -> None:
|
||||
"""--orphaned stops orphaned services."""
|
||||
cfg = _make_config(tmp_path)
|
||||
mock_results = [_make_result("old-svc@host1")]
|
||||
|
||||
with (
|
||||
patch("compose_farm.cli.lifecycle.load_config_or_exit", return_value=cfg),
|
||||
patch(
|
||||
"compose_farm.cli.lifecycle.get_orphaned_services",
|
||||
return_value={"old-svc": "host1"},
|
||||
),
|
||||
patch(
|
||||
"compose_farm.cli.lifecycle.run_async",
|
||||
return_value=mock_results,
|
||||
),
|
||||
patch("compose_farm.cli.lifecycle.stop_orphaned_services") as mock_stop,
|
||||
patch("compose_farm.cli.lifecycle.report_results"),
|
||||
):
|
||||
down(
|
||||
services=None,
|
||||
all_services=False,
|
||||
orphaned=True,
|
||||
host=None,
|
||||
config=None,
|
||||
)
|
||||
|
||||
mock_stop.assert_called_once_with(cfg)
|
||||
|
||||
def test_down_orphaned_with_services_errors(self) -> None:
|
||||
"""--orphaned cannot be combined with service arguments."""
|
||||
with pytest.raises(typer.Exit) as exc_info:
|
||||
down(
|
||||
services=["svc1"],
|
||||
all_services=False,
|
||||
orphaned=True,
|
||||
host=None,
|
||||
config=None,
|
||||
)
|
||||
|
||||
assert exc_info.value.exit_code == 1
|
||||
|
||||
def test_down_orphaned_with_all_errors(self) -> None:
|
||||
"""--orphaned cannot be combined with --all."""
|
||||
with pytest.raises(typer.Exit) as exc_info:
|
||||
down(
|
||||
services=None,
|
||||
all_services=True,
|
||||
orphaned=True,
|
||||
host=None,
|
||||
config=None,
|
||||
)
|
||||
|
||||
assert exc_info.value.exit_code == 1
|
||||
|
||||
def test_down_orphaned_with_host_errors(self) -> None:
|
||||
"""--orphaned cannot be combined with --host."""
|
||||
with pytest.raises(typer.Exit) as exc_info:
|
||||
down(
|
||||
services=None,
|
||||
all_services=False,
|
||||
orphaned=True,
|
||||
host="host1",
|
||||
config=None,
|
||||
)
|
||||
|
||||
assert exc_info.value.exit_code == 1
|
||||
207
tests/test_cli_logs.py
Normal file
207
tests/test_cli_logs.py
Normal file
@@ -0,0 +1,207 @@
|
||||
"""Tests for CLI logs command."""
|
||||
|
||||
from collections.abc import Coroutine
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
import typer
|
||||
|
||||
from compose_farm.cli.monitoring import logs
|
||||
from compose_farm.config import Config, Host
|
||||
from compose_farm.executor import CommandResult
|
||||
|
||||
|
||||
def _make_config(tmp_path: Path) -> Config:
|
||||
"""Create a minimal config for testing."""
|
||||
compose_dir = tmp_path / "compose"
|
||||
compose_dir.mkdir()
|
||||
for svc in ("svc1", "svc2", "svc3"):
|
||||
svc_dir = compose_dir / svc
|
||||
svc_dir.mkdir()
|
||||
(svc_dir / "docker-compose.yml").write_text("services: {}\n")
|
||||
|
||||
return Config(
|
||||
compose_dir=compose_dir,
|
||||
hosts={"local": Host(address="localhost"), "remote": Host(address="192.168.1.10")},
|
||||
services={"svc1": "local", "svc2": "local", "svc3": "remote"},
|
||||
)
|
||||
|
||||
|
||||
def _make_result(service: str) -> CommandResult:
|
||||
"""Create a successful command result."""
|
||||
return CommandResult(service=service, exit_code=0, success=True, stdout="", stderr="")
|
||||
|
||||
|
||||
def _mock_run_async_factory(
|
||||
services: list[str],
|
||||
) -> tuple[Any, list[CommandResult]]:
|
||||
"""Create a mock run_async that returns results for given services."""
|
||||
results = [_make_result(s) for s in services]
|
||||
|
||||
def mock_run_async(_coro: Coroutine[Any, Any, Any]) -> list[CommandResult]:
|
||||
return results
|
||||
|
||||
return mock_run_async, results
|
||||
|
||||
|
||||
class TestLogsContextualDefault:
|
||||
"""Tests for logs --tail contextual default behavior."""
|
||||
|
||||
def test_logs_all_services_defaults_to_20(self, tmp_path: Path) -> None:
|
||||
"""When --all is specified, default tail should be 20."""
|
||||
cfg = _make_config(tmp_path)
|
||||
mock_run_async, _ = _mock_run_async_factory(["svc1", "svc2", "svc3"])
|
||||
|
||||
with (
|
||||
patch("compose_farm.cli.monitoring.load_config_or_exit", return_value=cfg),
|
||||
patch("compose_farm.cli.common.load_config_or_exit", return_value=cfg),
|
||||
patch("compose_farm.cli.monitoring.run_async", side_effect=mock_run_async),
|
||||
patch("compose_farm.cli.monitoring.run_on_services") as mock_run,
|
||||
):
|
||||
mock_run.return_value = None
|
||||
|
||||
logs(services=None, all_services=True, host=None, follow=False, tail=None, config=None)
|
||||
|
||||
mock_run.assert_called_once()
|
||||
call_args = mock_run.call_args
|
||||
assert call_args[0][2] == "logs --tail 20"
|
||||
|
||||
def test_logs_single_service_defaults_to_100(self, tmp_path: Path) -> None:
|
||||
"""When specific services are specified, default tail should be 100."""
|
||||
cfg = _make_config(tmp_path)
|
||||
mock_run_async, _ = _mock_run_async_factory(["svc1"])
|
||||
|
||||
with (
|
||||
patch("compose_farm.cli.monitoring.load_config_or_exit", return_value=cfg),
|
||||
patch("compose_farm.cli.common.load_config_or_exit", return_value=cfg),
|
||||
patch("compose_farm.cli.monitoring.run_async", side_effect=mock_run_async),
|
||||
patch("compose_farm.cli.monitoring.run_on_services") as mock_run,
|
||||
):
|
||||
logs(
|
||||
services=["svc1"],
|
||||
all_services=False,
|
||||
host=None,
|
||||
follow=False,
|
||||
tail=None,
|
||||
config=None,
|
||||
)
|
||||
|
||||
mock_run.assert_called_once()
|
||||
call_args = mock_run.call_args
|
||||
assert call_args[0][2] == "logs --tail 100"
|
||||
|
||||
def test_logs_explicit_tail_overrides_default(self, tmp_path: Path) -> None:
|
||||
"""When --tail is explicitly provided, it should override the default."""
|
||||
cfg = _make_config(tmp_path)
|
||||
mock_run_async, _ = _mock_run_async_factory(["svc1", "svc2", "svc3"])
|
||||
|
||||
with (
|
||||
patch("compose_farm.cli.monitoring.load_config_or_exit", return_value=cfg),
|
||||
patch("compose_farm.cli.common.load_config_or_exit", return_value=cfg),
|
||||
patch("compose_farm.cli.monitoring.run_async", side_effect=mock_run_async),
|
||||
patch("compose_farm.cli.monitoring.run_on_services") as mock_run,
|
||||
):
|
||||
logs(
|
||||
services=None,
|
||||
all_services=True,
|
||||
host=None,
|
||||
follow=False,
|
||||
tail=50,
|
||||
config=None,
|
||||
)
|
||||
|
||||
mock_run.assert_called_once()
|
||||
call_args = mock_run.call_args
|
||||
assert call_args[0][2] == "logs --tail 50"
|
||||
|
||||
def test_logs_follow_appends_flag(self, tmp_path: Path) -> None:
|
||||
"""When --follow is specified, -f should be appended to command."""
|
||||
cfg = _make_config(tmp_path)
|
||||
mock_run_async, _ = _mock_run_async_factory(["svc1"])
|
||||
|
||||
with (
|
||||
patch("compose_farm.cli.monitoring.load_config_or_exit", return_value=cfg),
|
||||
patch("compose_farm.cli.common.load_config_or_exit", return_value=cfg),
|
||||
patch("compose_farm.cli.monitoring.run_async", side_effect=mock_run_async),
|
||||
patch("compose_farm.cli.monitoring.run_on_services") as mock_run,
|
||||
):
|
||||
logs(
|
||||
services=["svc1"],
|
||||
all_services=False,
|
||||
host=None,
|
||||
follow=True,
|
||||
tail=None,
|
||||
config=None,
|
||||
)
|
||||
|
||||
mock_run.assert_called_once()
|
||||
call_args = mock_run.call_args
|
||||
assert call_args[0][2] == "logs --tail 100 -f"
|
||||
|
||||
|
||||
class TestLogsHostFilter:
|
||||
"""Tests for logs --host filter behavior."""
|
||||
|
||||
def test_logs_host_filter_selects_services_on_host(self, tmp_path: Path) -> None:
|
||||
"""When --host is specified, only services on that host are included."""
|
||||
cfg = _make_config(tmp_path)
|
||||
mock_run_async, _ = _mock_run_async_factory(["svc1", "svc2"])
|
||||
|
||||
with (
|
||||
patch("compose_farm.cli.monitoring.load_config_or_exit", return_value=cfg),
|
||||
patch("compose_farm.cli.monitoring.run_async", side_effect=mock_run_async),
|
||||
patch("compose_farm.cli.monitoring.run_on_services") as mock_run,
|
||||
):
|
||||
logs(
|
||||
services=None,
|
||||
all_services=False,
|
||||
host="local",
|
||||
follow=False,
|
||||
tail=None,
|
||||
config=None,
|
||||
)
|
||||
|
||||
mock_run.assert_called_once()
|
||||
call_args = mock_run.call_args
|
||||
# svc1 and svc2 are on "local", svc3 is on "remote"
|
||||
assert set(call_args[0][1]) == {"svc1", "svc2"}
|
||||
|
||||
def test_logs_host_filter_defaults_to_20_lines(self, tmp_path: Path) -> None:
|
||||
"""When --host is specified, default tail should be 20 (multiple services)."""
|
||||
cfg = _make_config(tmp_path)
|
||||
mock_run_async, _ = _mock_run_async_factory(["svc1", "svc2"])
|
||||
|
||||
with (
|
||||
patch("compose_farm.cli.monitoring.load_config_or_exit", return_value=cfg),
|
||||
patch("compose_farm.cli.monitoring.run_async", side_effect=mock_run_async),
|
||||
patch("compose_farm.cli.monitoring.run_on_services") as mock_run,
|
||||
):
|
||||
logs(
|
||||
services=None,
|
||||
all_services=False,
|
||||
host="local",
|
||||
follow=False,
|
||||
tail=None,
|
||||
config=None,
|
||||
)
|
||||
|
||||
mock_run.assert_called_once()
|
||||
call_args = mock_run.call_args
|
||||
assert call_args[0][2] == "logs --tail 20"
|
||||
|
||||
def test_logs_all_and_host_mutually_exclusive(self) -> None:
|
||||
"""Using --all and --host together should error."""
|
||||
# No config mock needed - error is raised before config is loaded
|
||||
with pytest.raises(typer.Exit) as exc_info:
|
||||
logs(
|
||||
services=None,
|
||||
all_services=True,
|
||||
host="local",
|
||||
follow=False,
|
||||
tail=None,
|
||||
config=None,
|
||||
)
|
||||
|
||||
assert exc_info.value.exit_code == 1
|
||||
58
tests/test_cli_startup.py
Normal file
58
tests/test_cli_startup.py
Normal file
@@ -0,0 +1,58 @@
|
||||
"""Test CLI startup performance."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
import time
|
||||
|
||||
# Thresholds in seconds, per OS
|
||||
if sys.platform == "win32":
|
||||
CLI_STARTUP_THRESHOLD = 2.0
|
||||
elif sys.platform == "darwin":
|
||||
CLI_STARTUP_THRESHOLD = 0.35
|
||||
else: # Linux
|
||||
CLI_STARTUP_THRESHOLD = 0.25
|
||||
|
||||
|
||||
def test_cli_startup_time() -> None:
|
||||
"""Verify CLI startup time stays within acceptable bounds.
|
||||
|
||||
This test ensures we don't accidentally introduce slow imports
|
||||
that degrade the user experience.
|
||||
"""
|
||||
cf_path = shutil.which("cf")
|
||||
assert cf_path is not None, "cf command not found in PATH"
|
||||
|
||||
# Run up to 6 times, return early if we hit the threshold
|
||||
times: list[float] = []
|
||||
for _ in range(6):
|
||||
start = time.perf_counter()
|
||||
result = subprocess.run(
|
||||
[cf_path, "--help"],
|
||||
check=False,
|
||||
capture_output=True,
|
||||
text=True,
|
||||
)
|
||||
elapsed = time.perf_counter() - start
|
||||
times.append(elapsed)
|
||||
|
||||
# Verify the command succeeded
|
||||
assert result.returncode == 0, f"CLI failed: {result.stderr}"
|
||||
|
||||
# Pass early if under threshold
|
||||
if elapsed < CLI_STARTUP_THRESHOLD:
|
||||
print(f"\nCLI startup: {elapsed:.3f}s (threshold: {CLI_STARTUP_THRESHOLD}s)")
|
||||
return
|
||||
|
||||
# All attempts exceeded threshold
|
||||
best_time = min(times)
|
||||
msg = (
|
||||
f"\nCLI startup times: {[f'{t:.3f}s' for t in times]}\n"
|
||||
f"Best: {best_time:.3f}s, Threshold: {CLI_STARTUP_THRESHOLD}s"
|
||||
)
|
||||
print(msg)
|
||||
|
||||
err_msg = f"CLI startup too slow!\n{msg}\nCheck for slow imports."
|
||||
raise AssertionError(err_msg)
|
||||
@@ -128,6 +128,8 @@ class TestLoadConfig:
|
||||
|
||||
def test_load_config_not_found(self, tmp_path: Path, monkeypatch: pytest.MonkeyPatch) -> None:
|
||||
monkeypatch.chdir(tmp_path)
|
||||
monkeypatch.delenv("CF_CONFIG", raising=False)
|
||||
monkeypatch.setenv("XDG_CONFIG_HOME", str(tmp_path / "empty_config"))
|
||||
with pytest.raises(FileNotFoundError, match="Config file not found"):
|
||||
load_config()
|
||||
|
||||
|
||||
230
tests/test_config_cmd.py
Normal file
230
tests/test_config_cmd.py
Normal file
@@ -0,0 +1,230 @@
|
||||
"""Tests for config command module."""
|
||||
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
import pytest
|
||||
import yaml
|
||||
from typer.testing import CliRunner
|
||||
|
||||
from compose_farm.cli import app
|
||||
from compose_farm.cli.config import (
|
||||
_generate_template,
|
||||
_get_config_file,
|
||||
_get_editor,
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def runner() -> CliRunner:
|
||||
return CliRunner()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def valid_config_data() -> dict[str, Any]:
|
||||
return {
|
||||
"compose_dir": "/opt/compose",
|
||||
"hosts": {"server1": "192.168.1.10"},
|
||||
"services": {"nginx": "server1"},
|
||||
}
|
||||
|
||||
|
||||
class TestGetEditor:
|
||||
"""Tests for _get_editor function."""
|
||||
|
||||
def test_uses_editor_env(self, monkeypatch: pytest.MonkeyPatch) -> None:
|
||||
monkeypatch.setenv("EDITOR", "code")
|
||||
monkeypatch.delenv("VISUAL", raising=False)
|
||||
assert _get_editor() == "code"
|
||||
|
||||
def test_uses_visual_env(self, monkeypatch: pytest.MonkeyPatch) -> None:
|
||||
monkeypatch.delenv("EDITOR", raising=False)
|
||||
monkeypatch.setenv("VISUAL", "subl")
|
||||
assert _get_editor() == "subl"
|
||||
|
||||
def test_editor_takes_precedence(self, monkeypatch: pytest.MonkeyPatch) -> None:
|
||||
monkeypatch.setenv("EDITOR", "vim")
|
||||
monkeypatch.setenv("VISUAL", "code")
|
||||
assert _get_editor() == "vim"
|
||||
|
||||
|
||||
class TestGetConfigFile:
|
||||
"""Tests for _get_config_file function."""
|
||||
|
||||
def test_explicit_path(self, tmp_path: Path) -> None:
|
||||
config_file = tmp_path / "my-config.yaml"
|
||||
config_file.touch()
|
||||
result = _get_config_file(config_file)
|
||||
assert result == config_file.resolve()
|
||||
|
||||
def test_cf_config_env(self, tmp_path: Path, monkeypatch: pytest.MonkeyPatch) -> None:
|
||||
config_file = tmp_path / "env-config.yaml"
|
||||
config_file.touch()
|
||||
monkeypatch.setenv("CF_CONFIG", str(config_file))
|
||||
result = _get_config_file(None)
|
||||
assert result == config_file.resolve()
|
||||
|
||||
def test_returns_none_when_not_found(
|
||||
self, tmp_path: Path, monkeypatch: pytest.MonkeyPatch
|
||||
) -> None:
|
||||
monkeypatch.chdir(tmp_path)
|
||||
monkeypatch.delenv("CF_CONFIG", raising=False)
|
||||
# Set XDG_CONFIG_HOME to a nonexistent path - config_search_paths() will
|
||||
# now return paths that don't exist
|
||||
monkeypatch.setenv("XDG_CONFIG_HOME", str(tmp_path / "nonexistent"))
|
||||
result = _get_config_file(None)
|
||||
assert result is None
|
||||
|
||||
|
||||
class TestGenerateTemplate:
|
||||
"""Tests for _generate_template function."""
|
||||
|
||||
def test_generates_valid_yaml(self) -> None:
|
||||
template = _generate_template()
|
||||
# Should be valid YAML
|
||||
data = yaml.safe_load(template)
|
||||
assert "compose_dir" in data
|
||||
assert "hosts" in data
|
||||
assert "services" in data
|
||||
|
||||
def test_has_documentation_comments(self) -> None:
|
||||
template = _generate_template()
|
||||
assert "# Compose Farm configuration" in template
|
||||
assert "hosts:" in template
|
||||
assert "services:" in template
|
||||
|
||||
|
||||
class TestConfigInit:
|
||||
"""Tests for cf config init command."""
|
||||
|
||||
def test_init_creates_file(
|
||||
self, runner: CliRunner, tmp_path: Path, monkeypatch: pytest.MonkeyPatch
|
||||
) -> None:
|
||||
monkeypatch.delenv("CF_CONFIG", raising=False)
|
||||
config_file = tmp_path / "new-config.yaml"
|
||||
result = runner.invoke(app, ["config", "init", "-p", str(config_file)])
|
||||
assert result.exit_code == 0
|
||||
assert config_file.exists()
|
||||
assert "Config file created" in result.stdout
|
||||
|
||||
def test_init_force_overwrites(
|
||||
self, runner: CliRunner, tmp_path: Path, monkeypatch: pytest.MonkeyPatch
|
||||
) -> None:
|
||||
monkeypatch.delenv("CF_CONFIG", raising=False)
|
||||
config_file = tmp_path / "existing.yaml"
|
||||
config_file.write_text("old content")
|
||||
result = runner.invoke(app, ["config", "init", "-p", str(config_file), "-f"])
|
||||
assert result.exit_code == 0
|
||||
content = config_file.read_text()
|
||||
assert "old content" not in content
|
||||
assert "compose_dir" in content
|
||||
|
||||
def test_init_prompts_on_existing(
|
||||
self, runner: CliRunner, tmp_path: Path, monkeypatch: pytest.MonkeyPatch
|
||||
) -> None:
|
||||
monkeypatch.delenv("CF_CONFIG", raising=False)
|
||||
config_file = tmp_path / "existing.yaml"
|
||||
config_file.write_text("old content")
|
||||
result = runner.invoke(app, ["config", "init", "-p", str(config_file)], input="n\n")
|
||||
assert result.exit_code == 0
|
||||
assert "Aborted" in result.stdout
|
||||
assert config_file.read_text() == "old content"
|
||||
|
||||
|
||||
class TestConfigPath:
|
||||
"""Tests for cf config path command."""
|
||||
|
||||
def test_path_shows_config(
|
||||
self,
|
||||
runner: CliRunner,
|
||||
tmp_path: Path,
|
||||
valid_config_data: dict[str, Any],
|
||||
monkeypatch: pytest.MonkeyPatch,
|
||||
) -> None:
|
||||
monkeypatch.chdir(tmp_path)
|
||||
monkeypatch.delenv("CF_CONFIG", raising=False)
|
||||
config_file = tmp_path / "compose-farm.yaml"
|
||||
config_file.write_text(yaml.dump(valid_config_data))
|
||||
result = runner.invoke(app, ["config", "path"])
|
||||
assert result.exit_code == 0
|
||||
assert str(config_file) in result.stdout
|
||||
|
||||
def test_path_with_explicit_path(self, runner: CliRunner, tmp_path: Path) -> None:
|
||||
# When explicitly provided, path is returned even if file doesn't exist
|
||||
nonexistent = tmp_path / "nonexistent.yaml"
|
||||
result = runner.invoke(app, ["config", "path", "-p", str(nonexistent)])
|
||||
assert result.exit_code == 0
|
||||
assert str(nonexistent) in result.stdout
|
||||
|
||||
|
||||
class TestConfigShow:
|
||||
"""Tests for cf config show command."""
|
||||
|
||||
def test_show_displays_content(
|
||||
self,
|
||||
runner: CliRunner,
|
||||
tmp_path: Path,
|
||||
valid_config_data: dict[str, Any],
|
||||
monkeypatch: pytest.MonkeyPatch,
|
||||
) -> None:
|
||||
monkeypatch.chdir(tmp_path)
|
||||
monkeypatch.delenv("CF_CONFIG", raising=False)
|
||||
config_file = tmp_path / "compose-farm.yaml"
|
||||
config_file.write_text(yaml.dump(valid_config_data))
|
||||
result = runner.invoke(app, ["config", "show"])
|
||||
assert result.exit_code == 0
|
||||
assert "Config file:" in result.stdout
|
||||
|
||||
def test_show_raw_output(
|
||||
self,
|
||||
runner: CliRunner,
|
||||
tmp_path: Path,
|
||||
valid_config_data: dict[str, Any],
|
||||
monkeypatch: pytest.MonkeyPatch,
|
||||
) -> None:
|
||||
monkeypatch.chdir(tmp_path)
|
||||
monkeypatch.delenv("CF_CONFIG", raising=False)
|
||||
config_file = tmp_path / "compose-farm.yaml"
|
||||
content = yaml.dump(valid_config_data)
|
||||
config_file.write_text(content)
|
||||
result = runner.invoke(app, ["config", "show", "-r"])
|
||||
assert result.exit_code == 0
|
||||
assert content in result.stdout
|
||||
|
||||
|
||||
class TestConfigValidate:
|
||||
"""Tests for cf config validate command."""
|
||||
|
||||
def test_validate_valid_config(
|
||||
self,
|
||||
runner: CliRunner,
|
||||
tmp_path: Path,
|
||||
valid_config_data: dict[str, Any],
|
||||
monkeypatch: pytest.MonkeyPatch,
|
||||
) -> None:
|
||||
monkeypatch.chdir(tmp_path)
|
||||
monkeypatch.delenv("CF_CONFIG", raising=False)
|
||||
config_file = tmp_path / "compose-farm.yaml"
|
||||
config_file.write_text(yaml.dump(valid_config_data))
|
||||
result = runner.invoke(app, ["config", "validate"])
|
||||
assert result.exit_code == 0
|
||||
assert "Valid config" in result.stdout
|
||||
assert "Hosts: 1" in result.stdout
|
||||
assert "Services: 1" in result.stdout
|
||||
|
||||
def test_validate_invalid_config(self, runner: CliRunner, tmp_path: Path) -> None:
|
||||
config_file = tmp_path / "invalid.yaml"
|
||||
config_file.write_text("invalid: [yaml: content")
|
||||
result = runner.invoke(app, ["config", "validate", "-p", str(config_file)])
|
||||
assert result.exit_code == 1
|
||||
# Error goes to stderr (captured in output when using CliRunner)
|
||||
output = result.stdout + (result.stderr or "")
|
||||
assert "Invalid config" in output or "✗" in output
|
||||
|
||||
def test_validate_missing_config(self, runner: CliRunner, tmp_path: Path) -> None:
|
||||
nonexistent = tmp_path / "nonexistent.yaml"
|
||||
result = runner.invoke(app, ["config", "validate", "-p", str(nonexistent)])
|
||||
assert result.exit_code == 1
|
||||
# Error goes to stderr
|
||||
output = result.stdout + (result.stderr or "")
|
||||
assert "Config file not found" in output or "not found" in output.lower()
|
||||
241
tests/test_executor.py
Normal file
241
tests/test_executor.py
Normal file
@@ -0,0 +1,241 @@
|
||||
"""Tests for executor module."""
|
||||
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
import pytest
|
||||
|
||||
from compose_farm.config import Config, Host
|
||||
from compose_farm.executor import (
|
||||
CommandResult,
|
||||
_run_local_command,
|
||||
check_networks_exist,
|
||||
check_paths_exist,
|
||||
is_local,
|
||||
run_command,
|
||||
run_compose,
|
||||
run_on_services,
|
||||
)
|
||||
|
||||
# These tests run actual shell commands that only work on Linux
|
||||
linux_only = pytest.mark.skipif(sys.platform != "linux", reason="Linux-only shell commands")
|
||||
|
||||
|
||||
class TestIsLocal:
|
||||
"""Tests for is_local function."""
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"address",
|
||||
["local", "localhost", "127.0.0.1", "::1", "LOCAL", "LOCALHOST"],
|
||||
)
|
||||
def test_local_addresses(self, address: str) -> None:
|
||||
host = Host(address=address)
|
||||
assert is_local(host) is True
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"address",
|
||||
["192.168.1.10", "nas01.local", "10.0.0.1", "example.com"],
|
||||
)
|
||||
def test_remote_addresses(self, address: str) -> None:
|
||||
host = Host(address=address)
|
||||
assert is_local(host) is False
|
||||
|
||||
|
||||
class TestRunLocalCommand:
|
||||
"""Tests for local command execution."""
|
||||
|
||||
async def test_run_local_command_success(self) -> None:
|
||||
result = await _run_local_command("echo hello", "test-service")
|
||||
assert result.success is True
|
||||
assert result.exit_code == 0
|
||||
assert result.service == "test-service"
|
||||
|
||||
async def test_run_local_command_failure(self) -> None:
|
||||
result = await _run_local_command("exit 1", "test-service")
|
||||
assert result.success is False
|
||||
assert result.exit_code == 1
|
||||
|
||||
async def test_run_local_command_not_found(self) -> None:
|
||||
result = await _run_local_command("nonexistent_command_xyz", "test-service")
|
||||
assert result.success is False
|
||||
assert result.exit_code != 0
|
||||
|
||||
async def test_run_local_command_captures_output(self) -> None:
|
||||
result = await _run_local_command("echo hello", "test-service", stream=False)
|
||||
assert "hello" in result.stdout
|
||||
|
||||
|
||||
class TestRunCommand:
|
||||
"""Tests for run_command dispatcher."""
|
||||
|
||||
async def test_run_command_local(self) -> None:
|
||||
host = Host(address="localhost")
|
||||
result = await run_command(host, "echo test", "test-service")
|
||||
assert result.success is True
|
||||
|
||||
async def test_run_command_result_structure(self) -> None:
|
||||
host = Host(address="local")
|
||||
result = await run_command(host, "true", "my-service")
|
||||
assert isinstance(result, CommandResult)
|
||||
assert result.service == "my-service"
|
||||
assert result.exit_code == 0
|
||||
assert result.success is True
|
||||
|
||||
|
||||
class TestRunCompose:
|
||||
"""Tests for compose command execution."""
|
||||
|
||||
async def test_run_compose_builds_correct_command(self, tmp_path: Path) -> None:
|
||||
# Create a minimal compose file
|
||||
compose_dir = tmp_path / "compose"
|
||||
service_dir = compose_dir / "test-service"
|
||||
service_dir.mkdir(parents=True)
|
||||
compose_file = service_dir / "docker-compose.yml"
|
||||
compose_file.write_text("services: {}")
|
||||
|
||||
config = Config(
|
||||
compose_dir=compose_dir,
|
||||
hosts={"local": Host(address="localhost")},
|
||||
services={"test-service": "local"},
|
||||
)
|
||||
|
||||
# This will fail because docker compose isn't running,
|
||||
# but we can verify the command structure works
|
||||
result = await run_compose(config, "test-service", "config", stream=False)
|
||||
# Command may fail due to no docker, but structure is correct
|
||||
assert result.service == "test-service"
|
||||
|
||||
|
||||
class TestRunOnServices:
|
||||
"""Tests for parallel service execution."""
|
||||
|
||||
async def test_run_on_services_parallel(self) -> None:
|
||||
config = Config(
|
||||
compose_dir=Path("/tmp"),
|
||||
hosts={"local": Host(address="localhost")},
|
||||
services={"svc1": "local", "svc2": "local"},
|
||||
)
|
||||
|
||||
# Use a simple command that will work without docker
|
||||
# We'll test the parallelism structure
|
||||
results = await run_on_services(config, ["svc1", "svc2"], "version", stream=False)
|
||||
assert len(results) == 2
|
||||
assert results[0].service == "svc1"
|
||||
assert results[1].service == "svc2"
|
||||
|
||||
|
||||
@linux_only
|
||||
class TestCheckPathsExist:
|
||||
"""Tests for check_paths_exist function (uses 'test -e' shell command)."""
|
||||
|
||||
async def test_check_existing_paths(self, tmp_path: Path) -> None:
|
||||
"""Check paths that exist."""
|
||||
config = Config(
|
||||
compose_dir=tmp_path,
|
||||
hosts={"local": Host(address="localhost")},
|
||||
services={},
|
||||
)
|
||||
# Create test paths
|
||||
(tmp_path / "dir1").mkdir()
|
||||
(tmp_path / "file1").touch()
|
||||
|
||||
result = await check_paths_exist(
|
||||
config, "local", [str(tmp_path / "dir1"), str(tmp_path / "file1")]
|
||||
)
|
||||
|
||||
assert result[str(tmp_path / "dir1")] is True
|
||||
assert result[str(tmp_path / "file1")] is True
|
||||
|
||||
async def test_check_missing_paths(self, tmp_path: Path) -> None:
|
||||
"""Check paths that don't exist."""
|
||||
config = Config(
|
||||
compose_dir=tmp_path,
|
||||
hosts={"local": Host(address="localhost")},
|
||||
services={},
|
||||
)
|
||||
|
||||
result = await check_paths_exist(
|
||||
config, "local", [str(tmp_path / "missing1"), str(tmp_path / "missing2")]
|
||||
)
|
||||
|
||||
assert result[str(tmp_path / "missing1")] is False
|
||||
assert result[str(tmp_path / "missing2")] is False
|
||||
|
||||
async def test_check_mixed_paths(self, tmp_path: Path) -> None:
|
||||
"""Check mix of existing and missing paths."""
|
||||
config = Config(
|
||||
compose_dir=tmp_path,
|
||||
hosts={"local": Host(address="localhost")},
|
||||
services={},
|
||||
)
|
||||
(tmp_path / "exists").mkdir()
|
||||
|
||||
result = await check_paths_exist(
|
||||
config, "local", [str(tmp_path / "exists"), str(tmp_path / "missing")]
|
||||
)
|
||||
|
||||
assert result[str(tmp_path / "exists")] is True
|
||||
assert result[str(tmp_path / "missing")] is False
|
||||
|
||||
async def test_check_empty_paths(self, tmp_path: Path) -> None:
|
||||
"""Empty path list returns empty dict."""
|
||||
config = Config(
|
||||
compose_dir=tmp_path,
|
||||
hosts={"local": Host(address="localhost")},
|
||||
services={},
|
||||
)
|
||||
|
||||
result = await check_paths_exist(config, "local", [])
|
||||
assert result == {}
|
||||
|
||||
|
||||
@linux_only
|
||||
class TestCheckNetworksExist:
|
||||
"""Tests for check_networks_exist function (requires Docker)."""
|
||||
|
||||
async def test_check_bridge_network_exists(self, tmp_path: Path) -> None:
|
||||
"""The 'bridge' network always exists on Docker hosts."""
|
||||
config = Config(
|
||||
compose_dir=tmp_path,
|
||||
hosts={"local": Host(address="localhost")},
|
||||
services={},
|
||||
)
|
||||
|
||||
result = await check_networks_exist(config, "local", ["bridge"])
|
||||
assert result["bridge"] is True
|
||||
|
||||
async def test_check_nonexistent_network(self, tmp_path: Path) -> None:
|
||||
"""Check a network that doesn't exist."""
|
||||
config = Config(
|
||||
compose_dir=tmp_path,
|
||||
hosts={"local": Host(address="localhost")},
|
||||
services={},
|
||||
)
|
||||
|
||||
result = await check_networks_exist(config, "local", ["nonexistent_network_xyz_123"])
|
||||
assert result["nonexistent_network_xyz_123"] is False
|
||||
|
||||
async def test_check_mixed_networks(self, tmp_path: Path) -> None:
|
||||
"""Check mix of existing and non-existing networks."""
|
||||
config = Config(
|
||||
compose_dir=tmp_path,
|
||||
hosts={"local": Host(address="localhost")},
|
||||
services={},
|
||||
)
|
||||
|
||||
result = await check_networks_exist(
|
||||
config, "local", ["bridge", "nonexistent_network_xyz_123"]
|
||||
)
|
||||
assert result["bridge"] is True
|
||||
assert result["nonexistent_network_xyz_123"] is False
|
||||
|
||||
async def test_check_empty_networks(self, tmp_path: Path) -> None:
|
||||
"""Empty network list returns empty dict."""
|
||||
config = Config(
|
||||
compose_dir=tmp_path,
|
||||
hosts={"local": Host(address="localhost")},
|
||||
services={},
|
||||
)
|
||||
|
||||
result = await check_networks_exist(config, "local", [])
|
||||
assert result == {}
|
||||
@@ -8,8 +8,15 @@ from pathlib import Path
|
||||
import pytest
|
||||
|
||||
from compose_farm.config import Config, Host
|
||||
from compose_farm.logs import _parse_images_output, snapshot_services
|
||||
from compose_farm.ssh import CommandResult
|
||||
from compose_farm.executor import CommandResult
|
||||
from compose_farm.logs import (
|
||||
_parse_images_output,
|
||||
collect_service_entries,
|
||||
isoformat,
|
||||
load_existing_entries,
|
||||
merge_entries,
|
||||
write_toml,
|
||||
)
|
||||
|
||||
|
||||
def test_parse_images_output_handles_list_and_lines() -> None:
|
||||
@@ -55,26 +62,29 @@ async def test_snapshot_preserves_first_seen(tmp_path: Path) -> None:
|
||||
|
||||
log_path = tmp_path / "dockerfarm-log.toml"
|
||||
|
||||
# First snapshot
|
||||
first_time = datetime(2025, 1, 1, tzinfo=UTC)
|
||||
await snapshot_services(
|
||||
config,
|
||||
["svc"],
|
||||
log_path=log_path,
|
||||
now=first_time,
|
||||
run_compose_fn=fake_run_compose,
|
||||
first_entries = await collect_service_entries(
|
||||
config, "svc", now=first_time, run_compose_fn=fake_run_compose
|
||||
)
|
||||
first_iso = isoformat(first_time)
|
||||
merged = merge_entries([], first_entries, now_iso=first_iso)
|
||||
meta = {"generated_at": first_iso, "compose_dir": str(config.compose_dir)}
|
||||
write_toml(log_path, meta=meta, entries=merged)
|
||||
|
||||
after_first = tomllib.loads(log_path.read_text())
|
||||
first_seen = after_first["entries"][0]["first_seen"]
|
||||
|
||||
# Second snapshot
|
||||
second_time = datetime(2025, 2, 1, tzinfo=UTC)
|
||||
await snapshot_services(
|
||||
config,
|
||||
["svc"],
|
||||
log_path=log_path,
|
||||
now=second_time,
|
||||
run_compose_fn=fake_run_compose,
|
||||
second_entries = await collect_service_entries(
|
||||
config, "svc", now=second_time, run_compose_fn=fake_run_compose
|
||||
)
|
||||
second_iso = isoformat(second_time)
|
||||
existing = load_existing_entries(log_path)
|
||||
merged = merge_entries(existing, second_entries, now_iso=second_iso)
|
||||
meta = {"generated_at": second_iso, "compose_dir": str(config.compose_dir)}
|
||||
write_toml(log_path, meta=meta, entries=merged)
|
||||
|
||||
after_second = tomllib.loads(log_path.read_text())
|
||||
entry = after_second["entries"][0]
|
||||
|
||||
111
tests/test_operations.py
Normal file
111
tests/test_operations.py
Normal file
@@ -0,0 +1,111 @@
|
||||
"""Tests for operations module."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import inspect
|
||||
from pathlib import Path
|
||||
from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
|
||||
from compose_farm.cli import lifecycle
|
||||
from compose_farm.config import Config, Host
|
||||
from compose_farm.executor import CommandResult
|
||||
from compose_farm.operations import _migrate_service
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def basic_config(tmp_path: Path) -> Config:
|
||||
"""Create a basic test config."""
|
||||
compose_dir = tmp_path / "compose"
|
||||
service_dir = compose_dir / "test-service"
|
||||
service_dir.mkdir(parents=True)
|
||||
(service_dir / "docker-compose.yml").write_text("services: {}")
|
||||
return Config(
|
||||
compose_dir=compose_dir,
|
||||
hosts={
|
||||
"host1": Host(address="localhost"),
|
||||
"host2": Host(address="localhost"),
|
||||
},
|
||||
services={"test-service": "host2"},
|
||||
)
|
||||
|
||||
|
||||
class TestMigrationCommands:
|
||||
"""Tests for migration command sequence."""
|
||||
|
||||
@pytest.fixture
|
||||
def config(self, tmp_path: Path) -> Config:
|
||||
"""Create a test config."""
|
||||
compose_dir = tmp_path / "compose"
|
||||
service_dir = compose_dir / "test-service"
|
||||
service_dir.mkdir(parents=True)
|
||||
(service_dir / "docker-compose.yml").write_text("services: {}")
|
||||
return Config(
|
||||
compose_dir=compose_dir,
|
||||
hosts={
|
||||
"host1": Host(address="localhost"),
|
||||
"host2": Host(address="localhost"),
|
||||
},
|
||||
services={"test-service": "host2"},
|
||||
)
|
||||
|
||||
async def test_migration_uses_pull_ignore_buildable(self, config: Config) -> None:
|
||||
"""Migration should use 'pull --ignore-buildable' to skip buildable images."""
|
||||
commands_called: list[str] = []
|
||||
|
||||
async def mock_run_compose_step(
|
||||
cfg: Config,
|
||||
service: str,
|
||||
command: str,
|
||||
*,
|
||||
raw: bool,
|
||||
host: str | None = None,
|
||||
) -> CommandResult:
|
||||
commands_called.append(command)
|
||||
return CommandResult(
|
||||
service=service,
|
||||
exit_code=0,
|
||||
success=True,
|
||||
)
|
||||
|
||||
with patch(
|
||||
"compose_farm.operations._run_compose_step",
|
||||
side_effect=mock_run_compose_step,
|
||||
):
|
||||
await _migrate_service(
|
||||
config,
|
||||
"test-service",
|
||||
current_host="host1",
|
||||
target_host="host2",
|
||||
prefix="[test]",
|
||||
raw=False,
|
||||
)
|
||||
|
||||
# Migration should call pull with --ignore-buildable, then build, then down
|
||||
assert "pull --ignore-buildable" in commands_called
|
||||
assert "build" in commands_called
|
||||
assert "down" in commands_called
|
||||
# pull should come before build
|
||||
pull_idx = commands_called.index("pull --ignore-buildable")
|
||||
build_idx = commands_called.index("build")
|
||||
assert pull_idx < build_idx
|
||||
|
||||
|
||||
class TestUpdateCommandSequence:
|
||||
"""Tests for update command sequence."""
|
||||
|
||||
def test_update_command_sequence_includes_build(self) -> None:
|
||||
"""Update command should use pull --ignore-buildable and build."""
|
||||
# This is a static check of the command sequence in lifecycle.py
|
||||
# The actual command sequence is defined in the update function
|
||||
|
||||
source = inspect.getsource(lifecycle.update)
|
||||
|
||||
# Verify the command sequence includes pull --ignore-buildable
|
||||
assert "pull --ignore-buildable" in source
|
||||
# Verify build is included
|
||||
assert '"build"' in source or "'build'" in source
|
||||
# Verify the sequence is pull, build, down, up
|
||||
assert "down" in source
|
||||
assert "up -d" in source
|
||||
@@ -1,16 +1,15 @@
|
||||
"""Tests for sync command and related functions."""
|
||||
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
from unittest.mock import AsyncMock, patch
|
||||
|
||||
import pytest
|
||||
|
||||
from compose_farm import cli as cli_module
|
||||
from compose_farm import ssh as ssh_module
|
||||
from compose_farm import executor as executor_module
|
||||
from compose_farm import state as state_module
|
||||
from compose_farm.cli import management as cli_management_module
|
||||
from compose_farm.config import Config, Host
|
||||
from compose_farm.ssh import CommandResult, check_service_running
|
||||
from compose_farm.executor import CommandResult, check_service_running
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
@@ -58,7 +57,7 @@ class TestCheckServiceRunning:
|
||||
@pytest.mark.asyncio
|
||||
async def test_service_running(self, mock_config: Config) -> None:
|
||||
"""Returns True when service has running containers."""
|
||||
with patch.object(ssh_module, "run_command", new_callable=AsyncMock) as mock_run:
|
||||
with patch.object(executor_module, "run_command", new_callable=AsyncMock) as mock_run:
|
||||
mock_run.return_value = CommandResult(
|
||||
service="plex",
|
||||
exit_code=0,
|
||||
@@ -71,7 +70,7 @@ class TestCheckServiceRunning:
|
||||
@pytest.mark.asyncio
|
||||
async def test_service_not_running(self, mock_config: Config) -> None:
|
||||
"""Returns False when service has no running containers."""
|
||||
with patch.object(ssh_module, "run_command", new_callable=AsyncMock) as mock_run:
|
||||
with patch.object(executor_module, "run_command", new_callable=AsyncMock) as mock_run:
|
||||
mock_run.return_value = CommandResult(
|
||||
service="plex",
|
||||
exit_code=0,
|
||||
@@ -84,7 +83,7 @@ class TestCheckServiceRunning:
|
||||
@pytest.mark.asyncio
|
||||
async def test_command_failed(self, mock_config: Config) -> None:
|
||||
"""Returns False when command fails."""
|
||||
with patch.object(ssh_module, "run_command", new_callable=AsyncMock) as mock_run:
|
||||
with patch.object(executor_module, "run_command", new_callable=AsyncMock) as mock_run:
|
||||
mock_run.return_value = CommandResult(
|
||||
service="plex",
|
||||
exit_code=1,
|
||||
@@ -94,48 +93,12 @@ class TestCheckServiceRunning:
|
||||
assert result is False
|
||||
|
||||
|
||||
class TestDiscoverRunningServices:
|
||||
"""Tests for _discover_running_services function."""
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_discovers_on_assigned_host(self, mock_config: Config) -> None:
|
||||
"""Discovers service running on its assigned host."""
|
||||
with patch.object(
|
||||
cli_module, "check_service_running", new_callable=AsyncMock
|
||||
) as mock_check:
|
||||
# plex running on nas01, jellyfin not running, sonarr on nas02
|
||||
async def check_side_effect(_cfg: Any, service: str, host: str) -> bool:
|
||||
return (service == "plex" and host == "nas01") or (
|
||||
service == "sonarr" and host == "nas02"
|
||||
)
|
||||
|
||||
mock_check.side_effect = check_side_effect
|
||||
|
||||
result = await cli_module._discover_running_services(mock_config)
|
||||
assert result == {"plex": "nas01", "sonarr": "nas02"}
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_discovers_on_different_host(self, mock_config: Config) -> None:
|
||||
"""Discovers service running on non-assigned host (after migration)."""
|
||||
with patch.object(
|
||||
cli_module, "check_service_running", new_callable=AsyncMock
|
||||
) as mock_check:
|
||||
# plex migrated to nas02
|
||||
async def check_side_effect(_cfg: Any, service: str, host: str) -> bool:
|
||||
return service == "plex" and host == "nas02"
|
||||
|
||||
mock_check.side_effect = check_side_effect
|
||||
|
||||
result = await cli_module._discover_running_services(mock_config)
|
||||
assert result == {"plex": "nas02"}
|
||||
|
||||
|
||||
class TestReportSyncChanges:
|
||||
"""Tests for _report_sync_changes function."""
|
||||
|
||||
def test_reports_added(self, capsys: pytest.CaptureFixture[str]) -> None:
|
||||
"""Reports newly discovered services."""
|
||||
cli_module._report_sync_changes(
|
||||
cli_management_module._report_sync_changes(
|
||||
added=["plex", "jellyfin"],
|
||||
removed=[],
|
||||
changed=[],
|
||||
@@ -149,7 +112,7 @@ class TestReportSyncChanges:
|
||||
|
||||
def test_reports_removed(self, capsys: pytest.CaptureFixture[str]) -> None:
|
||||
"""Reports services that are no longer running."""
|
||||
cli_module._report_sync_changes(
|
||||
cli_management_module._report_sync_changes(
|
||||
added=[],
|
||||
removed=["sonarr"],
|
||||
changed=[],
|
||||
@@ -162,7 +125,7 @@ class TestReportSyncChanges:
|
||||
|
||||
def test_reports_changed(self, capsys: pytest.CaptureFixture[str]) -> None:
|
||||
"""Reports services that moved to a different host."""
|
||||
cli_module._report_sync_changes(
|
||||
cli_management_module._report_sync_changes(
|
||||
added=[],
|
||||
removed=[],
|
||||
changed=[("plex", "nas01", "nas02")],
|
||||
@@ -171,4 +134,4 @@ class TestReportSyncChanges:
|
||||
)
|
||||
captured = capsys.readouterr()
|
||||
assert "Services on different hosts (1)" in captured.out
|
||||
assert "~ plex: nas01 -> nas02" in captured.out
|
||||
assert "~ plex: nas01 → nas02" in captured.out
|
||||
@@ -1,118 +0,0 @@
|
||||
"""Tests for ssh module."""
|
||||
|
||||
from pathlib import Path
|
||||
|
||||
import pytest
|
||||
|
||||
from compose_farm.config import Config, Host
|
||||
from compose_farm.ssh import (
|
||||
CommandResult,
|
||||
_is_local,
|
||||
_run_local_command,
|
||||
run_command,
|
||||
run_compose,
|
||||
run_on_services,
|
||||
)
|
||||
|
||||
|
||||
class TestIsLocal:
|
||||
"""Tests for _is_local function."""
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"address",
|
||||
["local", "localhost", "127.0.0.1", "::1", "LOCAL", "LOCALHOST"],
|
||||
)
|
||||
def test_local_addresses(self, address: str) -> None:
|
||||
host = Host(address=address)
|
||||
assert _is_local(host) is True
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"address",
|
||||
["192.168.1.10", "nas01.local", "10.0.0.1", "example.com"],
|
||||
)
|
||||
def test_remote_addresses(self, address: str) -> None:
|
||||
host = Host(address=address)
|
||||
assert _is_local(host) is False
|
||||
|
||||
|
||||
class TestRunLocalCommand:
|
||||
"""Tests for local command execution."""
|
||||
|
||||
async def test_run_local_command_success(self) -> None:
|
||||
result = await _run_local_command("echo hello", "test-service")
|
||||
assert result.success is True
|
||||
assert result.exit_code == 0
|
||||
assert result.service == "test-service"
|
||||
|
||||
async def test_run_local_command_failure(self) -> None:
|
||||
result = await _run_local_command("exit 1", "test-service")
|
||||
assert result.success is False
|
||||
assert result.exit_code == 1
|
||||
|
||||
async def test_run_local_command_not_found(self) -> None:
|
||||
result = await _run_local_command("nonexistent_command_xyz", "test-service")
|
||||
assert result.success is False
|
||||
assert result.exit_code != 0
|
||||
|
||||
async def test_run_local_command_captures_output(self) -> None:
|
||||
result = await _run_local_command("echo hello", "test-service", stream=False)
|
||||
assert "hello" in result.stdout
|
||||
|
||||
|
||||
class TestRunCommand:
|
||||
"""Tests for run_command dispatcher."""
|
||||
|
||||
async def test_run_command_local(self) -> None:
|
||||
host = Host(address="localhost")
|
||||
result = await run_command(host, "echo test", "test-service")
|
||||
assert result.success is True
|
||||
|
||||
async def test_run_command_result_structure(self) -> None:
|
||||
host = Host(address="local")
|
||||
result = await run_command(host, "true", "my-service")
|
||||
assert isinstance(result, CommandResult)
|
||||
assert result.service == "my-service"
|
||||
assert result.exit_code == 0
|
||||
assert result.success is True
|
||||
|
||||
|
||||
class TestRunCompose:
|
||||
"""Tests for compose command execution."""
|
||||
|
||||
async def test_run_compose_builds_correct_command(self, tmp_path: Path) -> None:
|
||||
# Create a minimal compose file
|
||||
compose_dir = tmp_path / "compose"
|
||||
service_dir = compose_dir / "test-service"
|
||||
service_dir.mkdir(parents=True)
|
||||
compose_file = service_dir / "docker-compose.yml"
|
||||
compose_file.write_text("services: {}")
|
||||
|
||||
config = Config(
|
||||
compose_dir=compose_dir,
|
||||
hosts={"local": Host(address="localhost")},
|
||||
services={"test-service": "local"},
|
||||
)
|
||||
|
||||
# This will fail because docker compose isn't running,
|
||||
# but we can verify the command structure works
|
||||
result = await run_compose(config, "test-service", "config", stream=False)
|
||||
# Command may fail due to no docker, but structure is correct
|
||||
assert result.service == "test-service"
|
||||
|
||||
|
||||
class TestRunOnServices:
|
||||
"""Tests for parallel service execution."""
|
||||
|
||||
async def test_run_on_services_parallel(self) -> None:
|
||||
config = Config(
|
||||
compose_dir=Path("/tmp"),
|
||||
hosts={"local": Host(address="localhost")},
|
||||
services={"svc1": "local", "svc2": "local"},
|
||||
)
|
||||
|
||||
# Use a simple command that will work without docker
|
||||
# We'll test the parallelism structure
|
||||
results = await run_on_services(config, ["svc1", "svc2"], "version", stream=False)
|
||||
assert len(results) == 2
|
||||
assert results[0].service == "svc1"
|
||||
assert results[1].service == "svc2"
|
||||
@@ -4,9 +4,11 @@ from pathlib import Path
|
||||
|
||||
import pytest
|
||||
|
||||
from compose_farm import state as state_module
|
||||
from compose_farm.config import Config, Host
|
||||
from compose_farm.state import (
|
||||
get_orphaned_services,
|
||||
get_service_host,
|
||||
get_services_not_in_state,
|
||||
load_state,
|
||||
remove_service,
|
||||
save_state,
|
||||
@@ -15,52 +17,51 @@ from compose_farm.state import (
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def state_dir(tmp_path: Path, monkeypatch: pytest.MonkeyPatch) -> Path:
|
||||
"""Create a temporary state directory and patch _get_state_path."""
|
||||
state_path = tmp_path / ".config" / "compose-farm"
|
||||
state_path.mkdir(parents=True)
|
||||
|
||||
def mock_get_state_path() -> Path:
|
||||
return state_path / "state.yaml"
|
||||
|
||||
monkeypatch.setattr(state_module, "_get_state_path", mock_get_state_path)
|
||||
return state_path
|
||||
def config(tmp_path: Path) -> Config:
|
||||
"""Create a config with a temporary config path for state storage."""
|
||||
config_path = tmp_path / "compose-farm.yaml"
|
||||
config_path.write_text("") # Create empty file
|
||||
return Config(
|
||||
compose_dir=tmp_path / "compose",
|
||||
hosts={"nas01": Host(address="192.168.1.10")},
|
||||
services={"plex": "nas01"},
|
||||
config_path=config_path,
|
||||
)
|
||||
|
||||
|
||||
class TestLoadState:
|
||||
"""Tests for load_state function."""
|
||||
|
||||
def test_load_state_empty(self, state_dir: Path) -> None:
|
||||
def test_load_state_empty(self, config: Config) -> None:
|
||||
"""Returns empty dict when state file doesn't exist."""
|
||||
_ = state_dir # Fixture activates the mock
|
||||
result = load_state()
|
||||
result = load_state(config)
|
||||
assert result == {}
|
||||
|
||||
def test_load_state_with_data(self, state_dir: Path) -> None:
|
||||
def test_load_state_with_data(self, config: Config) -> None:
|
||||
"""Loads existing state from file."""
|
||||
state_file = state_dir / "state.yaml"
|
||||
state_file = config.get_state_path()
|
||||
state_file.write_text("deployed:\n plex: nas01\n jellyfin: nas02\n")
|
||||
|
||||
result = load_state()
|
||||
result = load_state(config)
|
||||
assert result == {"plex": "nas01", "jellyfin": "nas02"}
|
||||
|
||||
def test_load_state_empty_file(self, state_dir: Path) -> None:
|
||||
def test_load_state_empty_file(self, config: Config) -> None:
|
||||
"""Returns empty dict for empty file."""
|
||||
state_file = state_dir / "state.yaml"
|
||||
state_file = config.get_state_path()
|
||||
state_file.write_text("")
|
||||
|
||||
result = load_state()
|
||||
result = load_state(config)
|
||||
assert result == {}
|
||||
|
||||
|
||||
class TestSaveState:
|
||||
"""Tests for save_state function."""
|
||||
|
||||
def test_save_state(self, state_dir: Path) -> None:
|
||||
def test_save_state(self, config: Config) -> None:
|
||||
"""Saves state to file."""
|
||||
save_state({"plex": "nas01", "jellyfin": "nas02"})
|
||||
save_state(config, {"plex": "nas01", "jellyfin": "nas02"})
|
||||
|
||||
state_file = state_dir / "state.yaml"
|
||||
state_file = config.get_state_path()
|
||||
assert state_file.exists()
|
||||
content = state_file.read_text()
|
||||
assert "plex: nas01" in content
|
||||
@@ -70,65 +71,171 @@ class TestSaveState:
|
||||
class TestGetServiceHost:
|
||||
"""Tests for get_service_host function."""
|
||||
|
||||
def test_get_existing_service(self, state_dir: Path) -> None:
|
||||
def test_get_existing_service(self, config: Config) -> None:
|
||||
"""Returns host for existing service."""
|
||||
state_file = state_dir / "state.yaml"
|
||||
state_file = config.get_state_path()
|
||||
state_file.write_text("deployed:\n plex: nas01\n")
|
||||
|
||||
host = get_service_host("plex")
|
||||
host = get_service_host(config, "plex")
|
||||
assert host == "nas01"
|
||||
|
||||
def test_get_nonexistent_service(self, state_dir: Path) -> None:
|
||||
def test_get_nonexistent_service(self, config: Config) -> None:
|
||||
"""Returns None for service not in state."""
|
||||
state_file = state_dir / "state.yaml"
|
||||
state_file = config.get_state_path()
|
||||
state_file.write_text("deployed:\n plex: nas01\n")
|
||||
|
||||
host = get_service_host("unknown")
|
||||
host = get_service_host(config, "unknown")
|
||||
assert host is None
|
||||
|
||||
|
||||
class TestSetServiceHost:
|
||||
"""Tests for set_service_host function."""
|
||||
|
||||
def test_set_new_service(self, state_dir: Path) -> None:
|
||||
def test_set_new_service(self, config: Config) -> None:
|
||||
"""Adds new service to state."""
|
||||
_ = state_dir # Fixture activates the mock
|
||||
set_service_host("plex", "nas01")
|
||||
set_service_host(config, "plex", "nas01")
|
||||
|
||||
result = load_state()
|
||||
result = load_state(config)
|
||||
assert result["plex"] == "nas01"
|
||||
|
||||
def test_update_existing_service(self, state_dir: Path) -> None:
|
||||
def test_update_existing_service(self, config: Config) -> None:
|
||||
"""Updates host for existing service."""
|
||||
state_file = state_dir / "state.yaml"
|
||||
state_file = config.get_state_path()
|
||||
state_file.write_text("deployed:\n plex: nas01\n")
|
||||
|
||||
set_service_host("plex", "nas02")
|
||||
set_service_host(config, "plex", "nas02")
|
||||
|
||||
result = load_state()
|
||||
result = load_state(config)
|
||||
assert result["plex"] == "nas02"
|
||||
|
||||
|
||||
class TestRemoveService:
|
||||
"""Tests for remove_service function."""
|
||||
|
||||
def test_remove_existing_service(self, state_dir: Path) -> None:
|
||||
def test_remove_existing_service(self, config: Config) -> None:
|
||||
"""Removes service from state."""
|
||||
state_file = state_dir / "state.yaml"
|
||||
state_file = config.get_state_path()
|
||||
state_file.write_text("deployed:\n plex: nas01\n jellyfin: nas02\n")
|
||||
|
||||
remove_service("plex")
|
||||
remove_service(config, "plex")
|
||||
|
||||
result = load_state()
|
||||
result = load_state(config)
|
||||
assert "plex" not in result
|
||||
assert result["jellyfin"] == "nas02"
|
||||
|
||||
def test_remove_nonexistent_service(self, state_dir: Path) -> None:
|
||||
def test_remove_nonexistent_service(self, config: Config) -> None:
|
||||
"""Removing nonexistent service doesn't error."""
|
||||
state_file = state_dir / "state.yaml"
|
||||
state_file = config.get_state_path()
|
||||
state_file.write_text("deployed:\n plex: nas01\n")
|
||||
|
||||
remove_service("unknown") # Should not raise
|
||||
remove_service(config, "unknown") # Should not raise
|
||||
|
||||
result = load_state()
|
||||
result = load_state(config)
|
||||
assert result["plex"] == "nas01"
|
||||
|
||||
|
||||
class TestGetOrphanedServices:
|
||||
"""Tests for get_orphaned_services function."""
|
||||
|
||||
def test_no_orphans(self, config: Config) -> None:
|
||||
"""Returns empty dict when all services in state are in config."""
|
||||
state_file = config.get_state_path()
|
||||
state_file.write_text("deployed:\n plex: nas01\n")
|
||||
|
||||
result = get_orphaned_services(config)
|
||||
assert result == {}
|
||||
|
||||
def test_finds_orphaned_service(self, config: Config) -> None:
|
||||
"""Returns services in state but not in config."""
|
||||
state_file = config.get_state_path()
|
||||
state_file.write_text("deployed:\n plex: nas01\n jellyfin: nas02\n")
|
||||
|
||||
result = get_orphaned_services(config)
|
||||
# plex is in config, jellyfin is not
|
||||
assert result == {"jellyfin": "nas02"}
|
||||
|
||||
def test_finds_orphaned_multi_host_service(self, config: Config) -> None:
|
||||
"""Returns multi-host orphaned services with host list."""
|
||||
state_file = config.get_state_path()
|
||||
state_file.write_text("deployed:\n plex: nas01\n dozzle:\n - nas01\n - nas02\n")
|
||||
|
||||
result = get_orphaned_services(config)
|
||||
assert result == {"dozzle": ["nas01", "nas02"]}
|
||||
|
||||
def test_empty_state(self, config: Config) -> None:
|
||||
"""Returns empty dict when state is empty."""
|
||||
result = get_orphaned_services(config)
|
||||
assert result == {}
|
||||
|
||||
def test_all_orphaned(self, tmp_path: Path) -> None:
|
||||
"""Returns all services when none are in config."""
|
||||
config_path = tmp_path / "compose-farm.yaml"
|
||||
config_path.write_text("")
|
||||
cfg = Config(
|
||||
compose_dir=tmp_path / "compose",
|
||||
hosts={"nas01": Host(address="192.168.1.10")},
|
||||
services={}, # No services in config
|
||||
config_path=config_path,
|
||||
)
|
||||
state_file = cfg.get_state_path()
|
||||
state_file.write_text("deployed:\n plex: nas01\n jellyfin: nas02\n")
|
||||
|
||||
result = get_orphaned_services(cfg)
|
||||
assert result == {"plex": "nas01", "jellyfin": "nas02"}
|
||||
|
||||
|
||||
class TestGetServicesNotInState:
|
||||
"""Tests for get_services_not_in_state function."""
|
||||
|
||||
def test_all_in_state(self, config: Config) -> None:
|
||||
"""Returns empty list when all services are in state."""
|
||||
state_file = config.get_state_path()
|
||||
state_file.write_text("deployed:\n plex: nas01\n")
|
||||
|
||||
result = get_services_not_in_state(config)
|
||||
assert result == []
|
||||
|
||||
def test_finds_missing_service(self, tmp_path: Path) -> None:
|
||||
"""Returns services in config but not in state."""
|
||||
config_path = tmp_path / "compose-farm.yaml"
|
||||
config_path.write_text("")
|
||||
cfg = Config(
|
||||
compose_dir=tmp_path / "compose",
|
||||
hosts={"nas01": Host(address="192.168.1.10")},
|
||||
services={"plex": "nas01", "jellyfin": "nas01"},
|
||||
config_path=config_path,
|
||||
)
|
||||
state_file = cfg.get_state_path()
|
||||
state_file.write_text("deployed:\n plex: nas01\n")
|
||||
|
||||
result = get_services_not_in_state(cfg)
|
||||
assert result == ["jellyfin"]
|
||||
|
||||
def test_empty_state(self, tmp_path: Path) -> None:
|
||||
"""Returns all services when state is empty."""
|
||||
config_path = tmp_path / "compose-farm.yaml"
|
||||
config_path.write_text("")
|
||||
cfg = Config(
|
||||
compose_dir=tmp_path / "compose",
|
||||
hosts={"nas01": Host(address="192.168.1.10")},
|
||||
services={"plex": "nas01", "jellyfin": "nas01"},
|
||||
config_path=config_path,
|
||||
)
|
||||
|
||||
result = get_services_not_in_state(cfg)
|
||||
assert set(result) == {"plex", "jellyfin"}
|
||||
|
||||
def test_empty_config(self, config: Config) -> None:
|
||||
"""Returns empty list when config has no services."""
|
||||
# config fixture has plex: nas01, but we need empty config
|
||||
config_path = config.config_path
|
||||
config_path.write_text("")
|
||||
cfg = Config(
|
||||
compose_dir=config.compose_dir,
|
||||
hosts={"nas01": Host(address="192.168.1.10")},
|
||||
services={},
|
||||
config_path=config_path,
|
||||
)
|
||||
|
||||
result = get_services_not_in_state(cfg)
|
||||
assert result == []
|
||||
|
||||
@@ -4,6 +4,7 @@ from pathlib import Path
|
||||
|
||||
import yaml
|
||||
|
||||
from compose_farm.compose import parse_external_networks
|
||||
from compose_farm.config import Config, Host
|
||||
from compose_farm.traefik import generate_traefik_config
|
||||
|
||||
@@ -76,7 +77,7 @@ def test_generate_traefik_config_without_published_port_warns(tmp_path: Path) ->
|
||||
dynamic, warnings = generate_traefik_config(cfg, ["app"])
|
||||
|
||||
assert dynamic["http"]["routers"]["app"]["rule"] == "Host(`app.lab.mydomain.org`)"
|
||||
assert any("No host-published port found" in warning for warning in warnings)
|
||||
assert any("No published port found" in warning for warning in warnings)
|
||||
|
||||
|
||||
def test_generate_interpolates_env_and_infers_router_service(tmp_path: Path) -> None:
|
||||
@@ -193,3 +194,145 @@ def test_generate_skips_services_with_enable_false(tmp_path: Path) -> None:
|
||||
|
||||
assert dynamic == {}
|
||||
assert warnings == []
|
||||
|
||||
|
||||
def test_generate_follows_network_mode_service_for_ports(tmp_path: Path) -> None:
|
||||
"""Services using network_mode: service:X should use ports from service X."""
|
||||
cfg = Config(
|
||||
compose_dir=tmp_path,
|
||||
hosts={"nas01": Host(address="192.168.1.10")},
|
||||
services={"vpn-stack": "nas01"},
|
||||
)
|
||||
compose_path = tmp_path / "vpn-stack" / "docker-compose.yml"
|
||||
_write_compose(
|
||||
compose_path,
|
||||
{
|
||||
"services": {
|
||||
"vpn": {
|
||||
"image": "gluetun",
|
||||
"ports": ["5080:5080", "9696:9696"],
|
||||
},
|
||||
"qbittorrent": {
|
||||
"image": "qbittorrent",
|
||||
"network_mode": "service:vpn",
|
||||
"labels": [
|
||||
"traefik.enable=true",
|
||||
"traefik.http.routers.torrent.rule=Host(`torrent.example.com`)",
|
||||
"traefik.http.services.torrent.loadbalancer.server.port=5080",
|
||||
],
|
||||
},
|
||||
"prowlarr": {
|
||||
"image": "prowlarr",
|
||||
"network_mode": "service:vpn",
|
||||
"labels": [
|
||||
"traefik.enable=true",
|
||||
"traefik.http.routers.prowlarr.rule=Host(`prowlarr.example.com`)",
|
||||
"traefik.http.services.prowlarr.loadbalancer.server.port=9696",
|
||||
],
|
||||
},
|
||||
}
|
||||
},
|
||||
)
|
||||
|
||||
dynamic, warnings = generate_traefik_config(cfg, ["vpn-stack"])
|
||||
|
||||
assert warnings == []
|
||||
# Both services should get their ports from the vpn service
|
||||
torrent_servers = dynamic["http"]["services"]["torrent"]["loadbalancer"]["servers"]
|
||||
assert torrent_servers == [{"url": "http://192.168.1.10:5080"}]
|
||||
prowlarr_servers = dynamic["http"]["services"]["prowlarr"]["loadbalancer"]["servers"]
|
||||
assert prowlarr_servers == [{"url": "http://192.168.1.10:9696"}]
|
||||
|
||||
|
||||
def test_parse_external_networks_single(tmp_path: Path) -> None:
|
||||
"""Extract a single external network from compose file."""
|
||||
cfg = Config(
|
||||
compose_dir=tmp_path,
|
||||
hosts={"host1": Host(address="192.168.1.10")},
|
||||
services={"app": "host1"},
|
||||
)
|
||||
compose_path = tmp_path / "app" / "compose.yaml"
|
||||
_write_compose(
|
||||
compose_path,
|
||||
{
|
||||
"services": {"app": {"image": "nginx"}},
|
||||
"networks": {"mynetwork": {"external": True}},
|
||||
},
|
||||
)
|
||||
|
||||
networks = parse_external_networks(cfg, "app")
|
||||
assert networks == ["mynetwork"]
|
||||
|
||||
|
||||
def test_parse_external_networks_multiple(tmp_path: Path) -> None:
|
||||
"""Extract multiple external networks from compose file."""
|
||||
cfg = Config(
|
||||
compose_dir=tmp_path,
|
||||
hosts={"host1": Host(address="192.168.1.10")},
|
||||
services={"app": "host1"},
|
||||
)
|
||||
compose_path = tmp_path / "app" / "compose.yaml"
|
||||
_write_compose(
|
||||
compose_path,
|
||||
{
|
||||
"services": {"app": {"image": "nginx"}},
|
||||
"networks": {
|
||||
"frontend": {"external": True},
|
||||
"backend": {"external": True},
|
||||
"internal": {"driver": "bridge"}, # not external
|
||||
},
|
||||
},
|
||||
)
|
||||
|
||||
networks = parse_external_networks(cfg, "app")
|
||||
assert set(networks) == {"frontend", "backend"}
|
||||
|
||||
|
||||
def test_parse_external_networks_none(tmp_path: Path) -> None:
|
||||
"""No external networks returns empty list."""
|
||||
cfg = Config(
|
||||
compose_dir=tmp_path,
|
||||
hosts={"host1": Host(address="192.168.1.10")},
|
||||
services={"app": "host1"},
|
||||
)
|
||||
compose_path = tmp_path / "app" / "compose.yaml"
|
||||
_write_compose(
|
||||
compose_path,
|
||||
{
|
||||
"services": {"app": {"image": "nginx"}},
|
||||
"networks": {"internal": {"driver": "bridge"}},
|
||||
},
|
||||
)
|
||||
|
||||
networks = parse_external_networks(cfg, "app")
|
||||
assert networks == []
|
||||
|
||||
|
||||
def test_parse_external_networks_no_networks_section(tmp_path: Path) -> None:
|
||||
"""No networks section returns empty list."""
|
||||
cfg = Config(
|
||||
compose_dir=tmp_path,
|
||||
hosts={"host1": Host(address="192.168.1.10")},
|
||||
services={"app": "host1"},
|
||||
)
|
||||
compose_path = tmp_path / "app" / "compose.yaml"
|
||||
_write_compose(
|
||||
compose_path,
|
||||
{"services": {"app": {"image": "nginx"}}},
|
||||
)
|
||||
|
||||
networks = parse_external_networks(cfg, "app")
|
||||
assert networks == []
|
||||
|
||||
|
||||
def test_parse_external_networks_missing_compose(tmp_path: Path) -> None:
|
||||
"""Missing compose file returns empty list."""
|
||||
cfg = Config(
|
||||
compose_dir=tmp_path,
|
||||
hosts={"host1": Host(address="192.168.1.10")},
|
||||
services={"app": "host1"},
|
||||
)
|
||||
# Don't create compose file
|
||||
|
||||
networks = parse_external_networks(cfg, "app")
|
||||
assert networks == []
|
||||
|
||||
1
tests/web/__init__.py
Normal file
1
tests/web/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
"""Web UI tests."""
|
||||
99
tests/web/conftest.py
Normal file
99
tests/web/conftest.py
Normal file
@@ -0,0 +1,99 @@
|
||||
"""Fixtures for web UI tests."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Generator
|
||||
from pathlib import Path
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
import pytest
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from compose_farm.config import Config
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def compose_dir(tmp_path: Path) -> Path:
|
||||
"""Create a temporary compose directory with sample services."""
|
||||
compose_path = tmp_path / "compose"
|
||||
compose_path.mkdir()
|
||||
|
||||
# Create a sample service
|
||||
plex_dir = compose_path / "plex"
|
||||
plex_dir.mkdir()
|
||||
(plex_dir / "compose.yaml").write_text("""
|
||||
services:
|
||||
plex:
|
||||
image: plexinc/pms-docker
|
||||
container_name: plex
|
||||
ports:
|
||||
- "32400:32400"
|
||||
""")
|
||||
(plex_dir / ".env").write_text("PLEX_CLAIM=claim-xxx\n")
|
||||
|
||||
# Create another service
|
||||
sonarr_dir = compose_path / "sonarr"
|
||||
sonarr_dir.mkdir()
|
||||
(sonarr_dir / "compose.yaml").write_text("""
|
||||
services:
|
||||
sonarr:
|
||||
image: linuxserver/sonarr
|
||||
""")
|
||||
|
||||
return compose_path
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def config_file(tmp_path: Path, compose_dir: Path) -> Path:
|
||||
"""Create a temporary config file and state file."""
|
||||
config_path = tmp_path / "compose-farm.yaml"
|
||||
config_path.write_text(f"""
|
||||
compose_dir: {compose_dir}
|
||||
|
||||
hosts:
|
||||
server-1:
|
||||
address: 192.168.1.10
|
||||
user: docker
|
||||
server-2:
|
||||
address: 192.168.1.11
|
||||
|
||||
services:
|
||||
plex: server-1
|
||||
sonarr: server-2
|
||||
""")
|
||||
|
||||
# State file must be alongside config file
|
||||
state_path = tmp_path / "compose-farm-state.yaml"
|
||||
state_path.write_text("""
|
||||
deployed:
|
||||
plex: server-1
|
||||
""")
|
||||
|
||||
return config_path
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_config(
|
||||
config_file: Path, monkeypatch: pytest.MonkeyPatch
|
||||
) -> Generator[Config, None, None]:
|
||||
"""Patch get_config to return a test config."""
|
||||
from compose_farm.config import load_config
|
||||
from compose_farm.web import deps as web_deps
|
||||
from compose_farm.web.routes import api as web_api
|
||||
|
||||
config = load_config(config_file)
|
||||
|
||||
# Save original and clear cache before patching
|
||||
original_get_config = web_deps.get_config
|
||||
original_get_config.cache_clear()
|
||||
|
||||
# Patch in all modules that import get_config
|
||||
monkeypatch.setattr(web_deps, "get_config", lambda: config)
|
||||
monkeypatch.setattr(web_api, "get_config", lambda: config)
|
||||
|
||||
yield config
|
||||
|
||||
# monkeypatch auto-restores, then clear cache
|
||||
# (cache_clear happens after monkeypatch cleanup via addfinalizier)
|
||||
monkeypatch.undo()
|
||||
original_get_config.cache_clear()
|
||||
106
tests/web/test_helpers.py
Normal file
106
tests/web/test_helpers.py
Normal file
@@ -0,0 +1,106 @@
|
||||
"""Tests for web API helper functions."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from pathlib import Path
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
import pytest
|
||||
from fastapi import HTTPException
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from compose_farm.config import Config
|
||||
|
||||
|
||||
class TestValidateYaml:
|
||||
"""Tests for _validate_yaml helper."""
|
||||
|
||||
def test_valid_yaml(self) -> None:
|
||||
from compose_farm.web.routes.api import _validate_yaml
|
||||
|
||||
# Should not raise
|
||||
_validate_yaml("key: value")
|
||||
_validate_yaml("list:\n - item1\n - item2")
|
||||
_validate_yaml("")
|
||||
|
||||
def test_invalid_yaml(self) -> None:
|
||||
from compose_farm.web.routes.api import _validate_yaml
|
||||
|
||||
with pytest.raises(HTTPException) as exc_info:
|
||||
_validate_yaml("key: [unclosed")
|
||||
|
||||
assert exc_info.value.status_code == 400
|
||||
assert "Invalid YAML" in exc_info.value.detail
|
||||
|
||||
|
||||
class TestGetServiceComposePath:
|
||||
"""Tests for _get_service_compose_path helper."""
|
||||
|
||||
def test_service_found(self, mock_config: Config) -> None:
|
||||
from compose_farm.web.routes.api import _get_service_compose_path
|
||||
|
||||
path = _get_service_compose_path("plex")
|
||||
assert isinstance(path, Path)
|
||||
assert path.name == "compose.yaml"
|
||||
assert path.parent.name == "plex"
|
||||
|
||||
def test_service_not_found(self, mock_config: Config) -> None:
|
||||
from compose_farm.web.routes.api import _get_service_compose_path
|
||||
|
||||
with pytest.raises(HTTPException) as exc_info:
|
||||
_get_service_compose_path("nonexistent")
|
||||
|
||||
assert exc_info.value.status_code == 404
|
||||
assert "not found" in exc_info.value.detail
|
||||
|
||||
|
||||
class TestRenderContainers:
|
||||
"""Tests for container template rendering."""
|
||||
|
||||
def test_render_running_container(self, mock_config: Config) -> None:
|
||||
from compose_farm.web.routes.api import _render_containers
|
||||
|
||||
containers = [{"Name": "plex", "State": "running"}]
|
||||
html = _render_containers("plex", "server-1", containers)
|
||||
|
||||
assert "badge-success" in html
|
||||
assert "plex" in html
|
||||
assert "initExecTerminal" in html
|
||||
|
||||
def test_render_unknown_state(self, mock_config: Config) -> None:
|
||||
from compose_farm.web.routes.api import _render_containers
|
||||
|
||||
containers = [{"Name": "plex", "State": "unknown"}]
|
||||
html = _render_containers("plex", "server-1", containers)
|
||||
|
||||
assert "loading-spinner" in html
|
||||
|
||||
def test_render_other_state(self, mock_config: Config) -> None:
|
||||
from compose_farm.web.routes.api import _render_containers
|
||||
|
||||
containers = [{"Name": "plex", "State": "exited"}]
|
||||
html = _render_containers("plex", "server-1", containers)
|
||||
|
||||
assert "badge-warning" in html
|
||||
assert "exited" in html
|
||||
|
||||
def test_render_with_header(self, mock_config: Config) -> None:
|
||||
from compose_farm.web.routes.api import _render_containers
|
||||
|
||||
containers = [{"Name": "plex", "State": "running"}]
|
||||
html = _render_containers("plex", "server-1", containers, show_header=True)
|
||||
|
||||
assert "server-1" in html
|
||||
assert "font-semibold" in html
|
||||
|
||||
def test_render_multiple_containers(self, mock_config: Config) -> None:
|
||||
from compose_farm.web.routes.api import _render_containers
|
||||
|
||||
containers = [
|
||||
{"Name": "app-web-1", "State": "running"},
|
||||
{"Name": "app-db-1", "State": "running"},
|
||||
]
|
||||
html = _render_containers("app", "server-1", containers)
|
||||
|
||||
assert "app-web-1" in html
|
||||
assert "app-db-1" in html
|
||||
452
uv.lock
generated
452
uv.lock
generated
@@ -2,6 +2,15 @@ version = 1
|
||||
revision = 3
|
||||
requires-python = ">=3.11"
|
||||
|
||||
[[package]]
|
||||
name = "annotated-doc"
|
||||
version = "0.0.4"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/57/ba/046ceea27344560984e26a590f90bc7f4a75b06701f653222458922b558c/annotated_doc-0.0.4.tar.gz", hash = "sha256:fbcda96e87e9c92ad167c2e53839e57503ecfda18804ea28102353485033faa4", size = 7288, upload-time = "2025-11-10T22:07:42.062Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/1e/d3/26bf1008eb3d2daa8ef4cacc7f3bfdc11818d111f7e2d0201bc6e3b49d45/annotated_doc-0.0.4-py3-none-any.whl", hash = "sha256:571ac1dc6991c450b25a9c2d84a3705e2ae7a53467b5d111c24fa8baabbed320", size = 5303, upload-time = "2025-11-10T22:07:40.673Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "annotated-types"
|
||||
version = "0.7.0"
|
||||
@@ -11,6 +20,19 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643, upload-time = "2024-05-20T21:33:24.1Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "anyio"
|
||||
version = "4.12.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "idna" },
|
||||
{ name = "typing-extensions", marker = "python_full_version < '3.13'" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/16/ce/8a777047513153587e5434fd752e89334ac33e379aa3497db860eeb60377/anyio-4.12.0.tar.gz", hash = "sha256:73c693b567b0c55130c104d0b43a9baf3aa6a31fc6110116509f27bf75e21ec0", size = 228266, upload-time = "2025-11-28T23:37:38.911Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/7f/9c/36c5c37947ebfb8c7f22e0eb6e4d188ee2d53aa3880f3f2744fb894f0cb1/anyio-4.12.0-py3-none-any.whl", hash = "sha256:dad2376a628f98eeca4881fc56cd06affd18f659b17a747d3ff0307ced94b1bb", size = 113362, upload-time = "2025-11-28T23:36:57.897Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "asyncssh"
|
||||
version = "2.21.1"
|
||||
@@ -24,6 +46,15 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/0e/89/4a9a61bc120ca68bce92b0ea176ddc0e550e58c60ab820603bd5246e7261/asyncssh-2.21.1-py3-none-any.whl", hash = "sha256:f218f9f303c78df6627d0646835e04039a156d15e174ad63c058d62de61e1968", size = 375529, upload-time = "2025-09-28T16:36:17.68Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "certifi"
|
||||
version = "2025.11.12"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/a2/8c/58f469717fa48465e4a50c014a0400602d3c437d7c0c468e17ada824da3a/certifi-2025.11.12.tar.gz", hash = "sha256:d8ab5478f2ecd78af242878415affce761ca6bc54a22a27e026d7c25357c3316", size = 160538, upload-time = "2025-11-12T02:54:51.517Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/70/7d/9bc192684cea499815ff478dfcdc13835ddf401365057044fb721ec6bddb/certifi-2025.11.12-py3-none-any.whl", hash = "sha256:97de8790030bbd5c2d96b7ec782fc2f7820ef8dba6db909ccf95449f2d062d4b", size = 159438, upload-time = "2025-11-12T02:54:49.735Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "cffi"
|
||||
version = "2.0.0"
|
||||
@@ -131,11 +162,23 @@ dependencies = [
|
||||
{ name = "asyncssh" },
|
||||
{ name = "pydantic" },
|
||||
{ name = "pyyaml" },
|
||||
{ name = "rich" },
|
||||
{ name = "typer" },
|
||||
]
|
||||
|
||||
[package.optional-dependencies]
|
||||
web = [
|
||||
{ name = "fastapi" },
|
||||
{ name = "jinja2" },
|
||||
{ name = "uvicorn", extra = ["standard"] },
|
||||
{ name = "websockets" },
|
||||
]
|
||||
|
||||
[package.dev-dependencies]
|
||||
dev = [
|
||||
{ name = "fastapi" },
|
||||
{ name = "httpx" },
|
||||
{ name = "jinja2" },
|
||||
{ name = "markdown-code-runner" },
|
||||
{ name = "mypy" },
|
||||
{ name = "pre-commit" },
|
||||
@@ -144,18 +187,29 @@ dev = [
|
||||
{ name = "pytest-cov" },
|
||||
{ name = "ruff" },
|
||||
{ name = "types-pyyaml" },
|
||||
{ name = "uvicorn", extra = ["standard"] },
|
||||
{ name = "websockets" },
|
||||
]
|
||||
|
||||
[package.metadata]
|
||||
requires-dist = [
|
||||
{ name = "asyncssh", specifier = ">=2.14.0" },
|
||||
{ name = "fastapi", marker = "extra == 'web'", specifier = ">=0.109.0" },
|
||||
{ name = "jinja2", marker = "extra == 'web'", specifier = ">=3.1.0" },
|
||||
{ name = "pydantic", specifier = ">=2.0.0" },
|
||||
{ name = "pyyaml", specifier = ">=6.0" },
|
||||
{ name = "rich", specifier = ">=13.0.0" },
|
||||
{ name = "typer", specifier = ">=0.9.0" },
|
||||
{ name = "uvicorn", extras = ["standard"], marker = "extra == 'web'", specifier = ">=0.27.0" },
|
||||
{ name = "websockets", marker = "extra == 'web'", specifier = ">=12.0" },
|
||||
]
|
||||
provides-extras = ["web"]
|
||||
|
||||
[package.metadata.requires-dev]
|
||||
dev = [
|
||||
{ name = "fastapi", specifier = ">=0.109.0" },
|
||||
{ name = "httpx", specifier = ">=0.28.0" },
|
||||
{ name = "jinja2", specifier = ">=3.1.0" },
|
||||
{ name = "markdown-code-runner", specifier = ">=0.7.0" },
|
||||
{ name = "mypy", specifier = ">=1.19.0" },
|
||||
{ name = "pre-commit", specifier = ">=4.5.0" },
|
||||
@@ -164,6 +218,8 @@ dev = [
|
||||
{ name = "pytest-cov", specifier = ">=6.0.0" },
|
||||
{ name = "ruff", specifier = ">=0.14.8" },
|
||||
{ name = "types-pyyaml", specifier = ">=6.0.12.20250915" },
|
||||
{ name = "uvicorn", extras = ["standard"], specifier = ">=0.27.0" },
|
||||
{ name = "websockets", specifier = ">=12.0" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -329,6 +385,21 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/33/6b/e0547afaf41bf2c42e52430072fa5658766e3d65bd4b03a563d1b6336f57/distlib-0.4.0-py2.py3-none-any.whl", hash = "sha256:9659f7d87e46584a30b5780e43ac7a2143098441670ff0a49d5f9034c54a6c16", size = 469047, upload-time = "2025-07-17T16:51:58.613Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "fastapi"
|
||||
version = "0.124.4"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "annotated-doc" },
|
||||
{ name = "pydantic" },
|
||||
{ name = "starlette" },
|
||||
{ name = "typing-extensions" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/cd/21/ade3ff6745a82ea8ad88552b4139d27941549e4f19125879f848ac8f3c3d/fastapi-0.124.4.tar.gz", hash = "sha256:0e9422e8d6b797515f33f500309f6e1c98ee4e85563ba0f2debb282df6343763", size = 378460, upload-time = "2025-12-12T15:00:43.891Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/3e/57/aa70121b5008f44031be645a61a7c4abc24e0e888ad3fc8fda916f4d188e/fastapi-0.124.4-py3-none-any.whl", hash = "sha256:6d1e703698443ccb89e50abe4893f3c84d9d6689c0cf1ca4fad6d3c15cf69f15", size = 113281, upload-time = "2025-12-12T15:00:42.44Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "filelock"
|
||||
version = "3.20.0"
|
||||
@@ -338,6 +409,79 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/76/91/7216b27286936c16f5b4d0c530087e4a54eead683e6b0b73dd0c64844af6/filelock-3.20.0-py3-none-any.whl", hash = "sha256:339b4732ffda5cd79b13f4e2711a31b0365ce445d95d243bb996273d072546a2", size = 16054, upload-time = "2025-10-08T18:03:48.35Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "h11"
|
||||
version = "0.16.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/01/ee/02a2c011bdab74c6fb3c75474d40b3052059d95df7e73351460c8588d963/h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1", size = 101250, upload-time = "2025-04-24T03:35:25.427Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/04/4b/29cac41a4d98d144bf5f6d33995617b185d14b22401f75ca86f384e87ff1/h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86", size = 37515, upload-time = "2025-04-24T03:35:24.344Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "httpcore"
|
||||
version = "1.0.9"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "certifi" },
|
||||
{ name = "h11" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/06/94/82699a10bca87a5556c9c59b5963f2d039dbd239f25bc2a63907a05a14cb/httpcore-1.0.9.tar.gz", hash = "sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8", size = 85484, upload-time = "2025-04-24T22:06:22.219Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/7e/f5/f66802a942d491edb555dd61e3a9961140fd64c90bce1eafd741609d334d/httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55", size = 78784, upload-time = "2025-04-24T22:06:20.566Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "httptools"
|
||||
version = "0.7.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/b5/46/120a669232c7bdedb9d52d4aeae7e6c7dfe151e99dc70802e2fc7a5e1993/httptools-0.7.1.tar.gz", hash = "sha256:abd72556974f8e7c74a259655924a717a2365b236c882c3f6f8a45fe94703ac9", size = 258961, upload-time = "2025-10-10T03:55:08.559Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/9c/08/17e07e8d89ab8f343c134616d72eebfe03798835058e2ab579dcc8353c06/httptools-0.7.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:474d3b7ab469fefcca3697a10d11a32ee2b9573250206ba1e50d5980910da657", size = 206521, upload-time = "2025-10-10T03:54:31.002Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/aa/06/c9c1b41ff52f16aee526fd10fbda99fa4787938aa776858ddc4a1ea825ec/httptools-0.7.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a3c3b7366bb6c7b96bd72d0dbe7f7d5eead261361f013be5f6d9590465ea1c70", size = 110375, upload-time = "2025-10-10T03:54:31.941Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/cc/cc/10935db22fda0ee34c76f047590ca0a8bd9de531406a3ccb10a90e12ea21/httptools-0.7.1-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:379b479408b8747f47f3b253326183d7c009a3936518cdb70db58cffd369d9df", size = 456621, upload-time = "2025-10-10T03:54:33.176Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0e/84/875382b10d271b0c11aa5d414b44f92f8dd53e9b658aec338a79164fa548/httptools-0.7.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:cad6b591a682dcc6cf1397c3900527f9affef1e55a06c4547264796bbd17cf5e", size = 454954, upload-time = "2025-10-10T03:54:34.226Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/30/e1/44f89b280f7e46c0b1b2ccee5737d46b3bb13136383958f20b580a821ca0/httptools-0.7.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:eb844698d11433d2139bbeeb56499102143beb582bd6c194e3ba69c22f25c274", size = 440175, upload-time = "2025-10-10T03:54:35.942Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6f/7e/b9287763159e700e335028bc1824359dc736fa9b829dacedace91a39b37e/httptools-0.7.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f65744d7a8bdb4bda5e1fa23e4ba16832860606fcc09d674d56e425e991539ec", size = 440310, upload-time = "2025-10-10T03:54:37.1Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b3/07/5b614f592868e07f5c94b1f301b5e14a21df4e8076215a3bccb830a687d8/httptools-0.7.1-cp311-cp311-win_amd64.whl", hash = "sha256:135fbe974b3718eada677229312e97f3b31f8a9c8ffa3ae6f565bf808d5b6bcb", size = 86875, upload-time = "2025-10-10T03:54:38.421Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/53/7f/403e5d787dc4942316e515e949b0c8a013d84078a915910e9f391ba9b3ed/httptools-0.7.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:38e0c83a2ea9746ebbd643bdfb521b9aa4a91703e2cd705c20443405d2fd16a5", size = 206280, upload-time = "2025-10-10T03:54:39.274Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2a/0d/7f3fd28e2ce311ccc998c388dd1c53b18120fda3b70ebb022b135dc9839b/httptools-0.7.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f25bbaf1235e27704f1a7b86cd3304eabc04f569c828101d94a0e605ef7205a5", size = 110004, upload-time = "2025-10-10T03:54:40.403Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/84/a6/b3965e1e146ef5762870bbe76117876ceba51a201e18cc31f5703e454596/httptools-0.7.1-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:2c15f37ef679ab9ecc06bfc4e6e8628c32a8e4b305459de7cf6785acd57e4d03", size = 517655, upload-time = "2025-10-10T03:54:41.347Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/11/7d/71fee6f1844e6fa378f2eddde6c3e41ce3a1fb4b2d81118dd544e3441ec0/httptools-0.7.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7fe6e96090df46b36ccfaf746f03034e5ab723162bc51b0a4cf58305324036f2", size = 511440, upload-time = "2025-10-10T03:54:42.452Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/22/a5/079d216712a4f3ffa24af4a0381b108aa9c45b7a5cc6eb141f81726b1823/httptools-0.7.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:f72fdbae2dbc6e68b8239defb48e6a5937b12218e6ffc2c7846cc37befa84362", size = 495186, upload-time = "2025-10-10T03:54:43.937Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e9/9e/025ad7b65278745dee3bd0ebf9314934c4592560878308a6121f7f812084/httptools-0.7.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e99c7b90a29fd82fea9ef57943d501a16f3404d7b9ee81799d41639bdaae412c", size = 499192, upload-time = "2025-10-10T03:54:45.003Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6d/de/40a8f202b987d43afc4d54689600ff03ce65680ede2f31df348d7f368b8f/httptools-0.7.1-cp312-cp312-win_amd64.whl", hash = "sha256:3e14f530fefa7499334a79b0cf7e7cd2992870eb893526fb097d51b4f2d0f321", size = 86694, upload-time = "2025-10-10T03:54:45.923Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/09/8f/c77b1fcbfd262d422f12da02feb0d218fa228d52485b77b953832105bb90/httptools-0.7.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:6babce6cfa2a99545c60bfef8bee0cc0545413cb0018f617c8059a30ad985de3", size = 202889, upload-time = "2025-10-10T03:54:47.089Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0a/1a/22887f53602feaa066354867bc49a68fc295c2293433177ee90870a7d517/httptools-0.7.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:601b7628de7504077dd3dcb3791c6b8694bbd967148a6d1f01806509254fb1ca", size = 108180, upload-time = "2025-10-10T03:54:48.052Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/32/6a/6aaa91937f0010d288d3d124ca2946d48d60c3a5ee7ca62afe870e3ea011/httptools-0.7.1-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:04c6c0e6c5fb0739c5b8a9eb046d298650a0ff38cf42537fc372b28dc7e4472c", size = 478596, upload-time = "2025-10-10T03:54:48.919Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6d/70/023d7ce117993107be88d2cbca566a7c1323ccbaf0af7eabf2064fe356f6/httptools-0.7.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:69d4f9705c405ae3ee83d6a12283dc9feba8cc6aaec671b412917e644ab4fa66", size = 473268, upload-time = "2025-10-10T03:54:49.993Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/32/4d/9dd616c38da088e3f436e9a616e1d0cc66544b8cdac405cc4e81c8679fc7/httptools-0.7.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:44c8f4347d4b31269c8a9205d8a5ee2df5322b09bbbd30f8f862185bb6b05346", size = 455517, upload-time = "2025-10-10T03:54:51.066Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1d/3a/a6c595c310b7df958e739aae88724e24f9246a514d909547778d776799be/httptools-0.7.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:465275d76db4d554918aba40bf1cbebe324670f3dfc979eaffaa5d108e2ed650", size = 458337, upload-time = "2025-10-10T03:54:52.196Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fd/82/88e8d6d2c51edc1cc391b6e044c6c435b6aebe97b1abc33db1b0b24cd582/httptools-0.7.1-cp313-cp313-win_amd64.whl", hash = "sha256:322d00c2068d125bd570f7bf78b2d367dad02b919d8581d7476d8b75b294e3e6", size = 85743, upload-time = "2025-10-10T03:54:53.448Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/34/50/9d095fcbb6de2d523e027a2f304d4551855c2f46e0b82befd718b8b20056/httptools-0.7.1-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:c08fe65728b8d70b6923ce31e3956f859d5e1e8548e6f22ec520a962c6757270", size = 203619, upload-time = "2025-10-10T03:54:54.321Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/07/f0/89720dc5139ae54b03f861b5e2c55a37dba9a5da7d51e1e824a1f343627f/httptools-0.7.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:7aea2e3c3953521c3c51106ee11487a910d45586e351202474d45472db7d72d3", size = 108714, upload-time = "2025-10-10T03:54:55.163Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b3/cb/eea88506f191fb552c11787c23f9a405f4c7b0c5799bf73f2249cd4f5228/httptools-0.7.1-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:0e68b8582f4ea9166be62926077a3334064d422cf08ab87d8b74664f8e9058e1", size = 472909, upload-time = "2025-10-10T03:54:56.056Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e0/4a/a548bdfae6369c0d078bab5769f7b66f17f1bfaa6fa28f81d6be6959066b/httptools-0.7.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:df091cf961a3be783d6aebae963cc9b71e00d57fa6f149025075217bc6a55a7b", size = 470831, upload-time = "2025-10-10T03:54:57.219Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4d/31/14df99e1c43bd132eec921c2e7e11cda7852f65619bc0fc5bdc2d0cb126c/httptools-0.7.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:f084813239e1eb403ddacd06a30de3d3e09a9b76e7894dcda2b22f8a726e9c60", size = 452631, upload-time = "2025-10-10T03:54:58.219Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/22/d2/b7e131f7be8d854d48cb6d048113c30f9a46dca0c9a8b08fcb3fcd588cdc/httptools-0.7.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:7347714368fb2b335e9063bc2b96f2f87a9ceffcd9758ac295f8bbcd3ffbc0ca", size = 452910, upload-time = "2025-10-10T03:54:59.366Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/53/cf/878f3b91e4e6e011eff6d1fa9ca39f7eb17d19c9d7971b04873734112f30/httptools-0.7.1-cp314-cp314-win_amd64.whl", hash = "sha256:cfabda2a5bb85aa2a904ce06d974a3f30fb36cc63d7feaddec05d2050acede96", size = 88205, upload-time = "2025-10-10T03:55:00.389Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "httpx"
|
||||
version = "0.28.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "anyio" },
|
||||
{ name = "certifi" },
|
||||
{ name = "httpcore" },
|
||||
{ name = "idna" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/b1/df/48c586a5fe32a0f01324ee087459e112ebb7224f646c0b5023f5e79e9956/httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc", size = 141406, upload-time = "2024-12-06T15:37:23.222Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad", size = 73517, upload-time = "2024-12-06T15:37:21.509Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "identify"
|
||||
version = "2.6.15"
|
||||
@@ -347,6 +491,15 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/0f/1c/e5fd8f973d4f375adb21565739498e2e9a1e54c858a97b9a8ccfdc81da9b/identify-2.6.15-py2.py3-none-any.whl", hash = "sha256:1181ef7608e00704db228516541eb83a88a9f94433a8c80bb9b5bd54b1d81757", size = 99183, upload-time = "2025-10-02T17:43:39.137Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "idna"
|
||||
version = "3.11"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/6f/6d/0703ccc57f3a7233505399edb88de3cbd678da106337b9fcde432b65ed60/idna-3.11.tar.gz", hash = "sha256:795dafcc9c04ed0c1fb032c2aa73654d8e8c5023a7df64a53f39190ada629902", size = 194582, upload-time = "2025-10-12T14:55:20.501Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/0e/61/66938bbb5fc52dbdf84594873d5b51fb1f7c7794e9c0f5bd885f30bc507b/idna-3.11-py3-none-any.whl", hash = "sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea", size = 71008, upload-time = "2025-10-12T14:55:18.883Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "iniconfig"
|
||||
version = "2.3.0"
|
||||
@@ -356,6 +509,18 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/cb/b1/3846dd7f199d53cb17f49cba7e651e9ce294d8497c8c150530ed11865bb8/iniconfig-2.3.0-py3-none-any.whl", hash = "sha256:f631c04d2c48c52b84d0d0549c99ff3859c98df65b3101406327ecc7d53fbf12", size = 7484, upload-time = "2025-10-18T21:55:41.639Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "jinja2"
|
||||
version = "3.1.6"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "markupsafe" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/df/bf/f7da0350254c0ed7c72f3e33cef02e048281fec7ecec5f032d4aac52226b/jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d", size = 245115, upload-time = "2025-03-05T20:05:02.478Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/62/a1/3d680cbfd5f4b8f15abc1d571870c5fc3e594bb582bc3b64ea099db13e56/jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67", size = 134899, upload-time = "2025-03-05T20:05:00.369Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "librt"
|
||||
version = "0.7.3"
|
||||
@@ -440,6 +605,80 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/94/54/e7d793b573f298e1c9013b8c4dade17d481164aa517d1d7148619c2cedbf/markdown_it_py-4.0.0-py3-none-any.whl", hash = "sha256:87327c59b172c5011896038353a81343b6754500a08cd7a4973bb48c6d578147", size = 87321, upload-time = "2025-08-11T12:57:51.923Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "markupsafe"
|
||||
version = "3.0.3"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/7e/99/7690b6d4034fffd95959cbe0c02de8deb3098cc577c67bb6a24fe5d7caa7/markupsafe-3.0.3.tar.gz", hash = "sha256:722695808f4b6457b320fdc131280796bdceb04ab50fe1795cd540799ebe1698", size = 80313, upload-time = "2025-09-27T18:37:40.426Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/08/db/fefacb2136439fc8dd20e797950e749aa1f4997ed584c62cfb8ef7c2be0e/markupsafe-3.0.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1cc7ea17a6824959616c525620e387f6dd30fec8cb44f649e31712db02123dad", size = 11631, upload-time = "2025-09-27T18:36:18.185Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e1/2e/5898933336b61975ce9dc04decbc0a7f2fee78c30353c5efba7f2d6ff27a/markupsafe-3.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4bd4cd07944443f5a265608cc6aab442e4f74dff8088b0dfc8238647b8f6ae9a", size = 12058, upload-time = "2025-09-27T18:36:19.444Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1d/09/adf2df3699d87d1d8184038df46a9c80d78c0148492323f4693df54e17bb/markupsafe-3.0.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6b5420a1d9450023228968e7e6a9ce57f65d148ab56d2313fcd589eee96a7a50", size = 24287, upload-time = "2025-09-27T18:36:20.768Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/30/ac/0273f6fcb5f42e314c6d8cd99effae6a5354604d461b8d392b5ec9530a54/markupsafe-3.0.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0bf2a864d67e76e5c9a34dc26ec616a66b9888e25e7b9460e1c76d3293bd9dbf", size = 22940, upload-time = "2025-09-27T18:36:22.249Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/19/ae/31c1be199ef767124c042c6c3e904da327a2f7f0cd63a0337e1eca2967a8/markupsafe-3.0.3-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:bc51efed119bc9cfdf792cdeaa4d67e8f6fcccab66ed4bfdd6bde3e59bfcbb2f", size = 21887, upload-time = "2025-09-27T18:36:23.535Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b2/76/7edcab99d5349a4532a459e1fe64f0b0467a3365056ae550d3bcf3f79e1e/markupsafe-3.0.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:068f375c472b3e7acbe2d5318dea141359e6900156b5b2ba06a30b169086b91a", size = 23692, upload-time = "2025-09-27T18:36:24.823Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a4/28/6e74cdd26d7514849143d69f0bf2399f929c37dc2b31e6829fd2045b2765/markupsafe-3.0.3-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:7be7b61bb172e1ed687f1754f8e7484f1c8019780f6f6b0786e76bb01c2ae115", size = 21471, upload-time = "2025-09-27T18:36:25.95Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/62/7e/a145f36a5c2945673e590850a6f8014318d5577ed7e5920a4b3448e0865d/markupsafe-3.0.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f9e130248f4462aaa8e2552d547f36ddadbeaa573879158d721bbd33dfe4743a", size = 22923, upload-time = "2025-09-27T18:36:27.109Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0f/62/d9c46a7f5c9adbeeeda52f5b8d802e1094e9717705a645efc71b0913a0a8/markupsafe-3.0.3-cp311-cp311-win32.whl", hash = "sha256:0db14f5dafddbb6d9208827849fad01f1a2609380add406671a26386cdf15a19", size = 14572, upload-time = "2025-09-27T18:36:28.045Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/83/8a/4414c03d3f891739326e1783338e48fb49781cc915b2e0ee052aa490d586/markupsafe-3.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:de8a88e63464af587c950061a5e6a67d3632e36df62b986892331d4620a35c01", size = 15077, upload-time = "2025-09-27T18:36:29.025Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/35/73/893072b42e6862f319b5207adc9ae06070f095b358655f077f69a35601f0/markupsafe-3.0.3-cp311-cp311-win_arm64.whl", hash = "sha256:3b562dd9e9ea93f13d53989d23a7e775fdfd1066c33494ff43f5418bc8c58a5c", size = 13876, upload-time = "2025-09-27T18:36:29.954Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5a/72/147da192e38635ada20e0a2e1a51cf8823d2119ce8883f7053879c2199b5/markupsafe-3.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d53197da72cc091b024dd97249dfc7794d6a56530370992a5e1a08983ad9230e", size = 11615, upload-time = "2025-09-27T18:36:30.854Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/9a/81/7e4e08678a1f98521201c3079f77db69fb552acd56067661f8c2f534a718/markupsafe-3.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1872df69a4de6aead3491198eaf13810b565bdbeec3ae2dc8780f14458ec73ce", size = 12020, upload-time = "2025-09-27T18:36:31.971Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1e/2c/799f4742efc39633a1b54a92eec4082e4f815314869865d876824c257c1e/markupsafe-3.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3a7e8ae81ae39e62a41ec302f972ba6ae23a5c5396c8e60113e9066ef893da0d", size = 24332, upload-time = "2025-09-27T18:36:32.813Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3c/2e/8d0c2ab90a8c1d9a24f0399058ab8519a3279d1bd4289511d74e909f060e/markupsafe-3.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d6dd0be5b5b189d31db7cda48b91d7e0a9795f31430b7f271219ab30f1d3ac9d", size = 22947, upload-time = "2025-09-27T18:36:33.86Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2c/54/887f3092a85238093a0b2154bd629c89444f395618842e8b0c41783898ea/markupsafe-3.0.3-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:94c6f0bb423f739146aec64595853541634bde58b2135f27f61c1ffd1cd4d16a", size = 21962, upload-time = "2025-09-27T18:36:35.099Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c9/2f/336b8c7b6f4a4d95e91119dc8521402461b74a485558d8f238a68312f11c/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:be8813b57049a7dc738189df53d69395eba14fb99345e0a5994914a3864c8a4b", size = 23760, upload-time = "2025-09-27T18:36:36.001Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/32/43/67935f2b7e4982ffb50a4d169b724d74b62a3964bc1a9a527f5ac4f1ee2b/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:83891d0e9fb81a825d9a6d61e3f07550ca70a076484292a70fde82c4b807286f", size = 21529, upload-time = "2025-09-27T18:36:36.906Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/89/e0/4486f11e51bbba8b0c041098859e869e304d1c261e59244baa3d295d47b7/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:77f0643abe7495da77fb436f50f8dab76dbc6e5fd25d39589a0f1fe6548bfa2b", size = 23015, upload-time = "2025-09-27T18:36:37.868Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2f/e1/78ee7a023dac597a5825441ebd17170785a9dab23de95d2c7508ade94e0e/markupsafe-3.0.3-cp312-cp312-win32.whl", hash = "sha256:d88b440e37a16e651bda4c7c2b930eb586fd15ca7406cb39e211fcff3bf3017d", size = 14540, upload-time = "2025-09-27T18:36:38.761Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/aa/5b/bec5aa9bbbb2c946ca2733ef9c4ca91c91b6a24580193e891b5f7dbe8e1e/markupsafe-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:26a5784ded40c9e318cfc2bdb30fe164bdb8665ded9cd64d500a34fb42067b1c", size = 15105, upload-time = "2025-09-27T18:36:39.701Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e5/f1/216fc1bbfd74011693a4fd837e7026152e89c4bcf3e77b6692fba9923123/markupsafe-3.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:35add3b638a5d900e807944a078b51922212fb3dedb01633a8defc4b01a3c85f", size = 13906, upload-time = "2025-09-27T18:36:40.689Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/38/2f/907b9c7bbba283e68f20259574b13d005c121a0fa4c175f9bed27c4597ff/markupsafe-3.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e1cf1972137e83c5d4c136c43ced9ac51d0e124706ee1c8aa8532c1287fa8795", size = 11622, upload-time = "2025-09-27T18:36:41.777Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/9c/d9/5f7756922cdd676869eca1c4e3c0cd0df60ed30199ffd775e319089cb3ed/markupsafe-3.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:116bb52f642a37c115f517494ea5feb03889e04df47eeff5b130b1808ce7c219", size = 12029, upload-time = "2025-09-27T18:36:43.257Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/00/07/575a68c754943058c78f30db02ee03a64b3c638586fba6a6dd56830b30a3/markupsafe-3.0.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:133a43e73a802c5562be9bbcd03d090aa5a1fe899db609c29e8c8d815c5f6de6", size = 24374, upload-time = "2025-09-27T18:36:44.508Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a9/21/9b05698b46f218fc0e118e1f8168395c65c8a2c750ae2bab54fc4bd4e0e8/markupsafe-3.0.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ccfcd093f13f0f0b7fdd0f198b90053bf7b2f02a3927a30e63f3ccc9df56b676", size = 22980, upload-time = "2025-09-27T18:36:45.385Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7f/71/544260864f893f18b6827315b988c146b559391e6e7e8f7252839b1b846a/markupsafe-3.0.3-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:509fa21c6deb7a7a273d629cf5ec029bc209d1a51178615ddf718f5918992ab9", size = 21990, upload-time = "2025-09-27T18:36:46.916Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c2/28/b50fc2f74d1ad761af2f5dcce7492648b983d00a65b8c0e0cb457c82ebbe/markupsafe-3.0.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a4afe79fb3de0b7097d81da19090f4df4f8d3a2b3adaa8764138aac2e44f3af1", size = 23784, upload-time = "2025-09-27T18:36:47.884Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ed/76/104b2aa106a208da8b17a2fb72e033a5a9d7073c68f7e508b94916ed47a9/markupsafe-3.0.3-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:795e7751525cae078558e679d646ae45574b47ed6e7771863fcc079a6171a0fc", size = 21588, upload-time = "2025-09-27T18:36:48.82Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b5/99/16a5eb2d140087ebd97180d95249b00a03aa87e29cc224056274f2e45fd6/markupsafe-3.0.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8485f406a96febb5140bfeca44a73e3ce5116b2501ac54fe953e488fb1d03b12", size = 23041, upload-time = "2025-09-27T18:36:49.797Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/19/bc/e7140ed90c5d61d77cea142eed9f9c303f4c4806f60a1044c13e3f1471d0/markupsafe-3.0.3-cp313-cp313-win32.whl", hash = "sha256:bdd37121970bfd8be76c5fb069c7751683bdf373db1ed6c010162b2a130248ed", size = 14543, upload-time = "2025-09-27T18:36:51.584Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/05/73/c4abe620b841b6b791f2edc248f556900667a5a1cf023a6646967ae98335/markupsafe-3.0.3-cp313-cp313-win_amd64.whl", hash = "sha256:9a1abfdc021a164803f4d485104931fb8f8c1efd55bc6b748d2f5774e78b62c5", size = 15113, upload-time = "2025-09-27T18:36:52.537Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f0/3a/fa34a0f7cfef23cf9500d68cb7c32dd64ffd58a12b09225fb03dd37d5b80/markupsafe-3.0.3-cp313-cp313-win_arm64.whl", hash = "sha256:7e68f88e5b8799aa49c85cd116c932a1ac15caaa3f5db09087854d218359e485", size = 13911, upload-time = "2025-09-27T18:36:53.513Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e4/d7/e05cd7efe43a88a17a37b3ae96e79a19e846f3f456fe79c57ca61356ef01/markupsafe-3.0.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:218551f6df4868a8d527e3062d0fb968682fe92054e89978594c28e642c43a73", size = 11658, upload-time = "2025-09-27T18:36:54.819Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/99/9e/e412117548182ce2148bdeacdda3bb494260c0b0184360fe0d56389b523b/markupsafe-3.0.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:3524b778fe5cfb3452a09d31e7b5adefeea8c5be1d43c4f810ba09f2ceb29d37", size = 12066, upload-time = "2025-09-27T18:36:55.714Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/bc/e6/fa0ffcda717ef64a5108eaa7b4f5ed28d56122c9a6d70ab8b72f9f715c80/markupsafe-3.0.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4e885a3d1efa2eadc93c894a21770e4bc67899e3543680313b09f139e149ab19", size = 25639, upload-time = "2025-09-27T18:36:56.908Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/96/ec/2102e881fe9d25fc16cb4b25d5f5cde50970967ffa5dddafdb771237062d/markupsafe-3.0.3-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8709b08f4a89aa7586de0aadc8da56180242ee0ada3999749b183aa23df95025", size = 23569, upload-time = "2025-09-27T18:36:57.913Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4b/30/6f2fce1f1f205fc9323255b216ca8a235b15860c34b6798f810f05828e32/markupsafe-3.0.3-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:b8512a91625c9b3da6f127803b166b629725e68af71f8184ae7e7d54686a56d6", size = 23284, upload-time = "2025-09-27T18:36:58.833Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/58/47/4a0ccea4ab9f5dcb6f79c0236d954acb382202721e704223a8aafa38b5c8/markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:9b79b7a16f7fedff2495d684f2b59b0457c3b493778c9eed31111be64d58279f", size = 24801, upload-time = "2025-09-27T18:36:59.739Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6a/70/3780e9b72180b6fecb83a4814d84c3bf4b4ae4bf0b19c27196104149734c/markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:12c63dfb4a98206f045aa9563db46507995f7ef6d83b2f68eda65c307c6829eb", size = 22769, upload-time = "2025-09-27T18:37:00.719Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/98/c5/c03c7f4125180fc215220c035beac6b9cb684bc7a067c84fc69414d315f5/markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:8f71bc33915be5186016f675cd83a1e08523649b0e33efdb898db577ef5bb009", size = 23642, upload-time = "2025-09-27T18:37:01.673Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/80/d6/2d1b89f6ca4bff1036499b1e29a1d02d282259f3681540e16563f27ebc23/markupsafe-3.0.3-cp313-cp313t-win32.whl", hash = "sha256:69c0b73548bc525c8cb9a251cddf1931d1db4d2258e9599c28c07ef3580ef354", size = 14612, upload-time = "2025-09-27T18:37:02.639Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2b/98/e48a4bfba0a0ffcf9925fe2d69240bfaa19c6f7507b8cd09c70684a53c1e/markupsafe-3.0.3-cp313-cp313t-win_amd64.whl", hash = "sha256:1b4b79e8ebf6b55351f0d91fe80f893b4743f104bff22e90697db1590e47a218", size = 15200, upload-time = "2025-09-27T18:37:03.582Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0e/72/e3cc540f351f316e9ed0f092757459afbc595824ca724cbc5a5d4263713f/markupsafe-3.0.3-cp313-cp313t-win_arm64.whl", hash = "sha256:ad2cf8aa28b8c020ab2fc8287b0f823d0a7d8630784c31e9ee5edea20f406287", size = 13973, upload-time = "2025-09-27T18:37:04.929Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/33/8a/8e42d4838cd89b7dde187011e97fe6c3af66d8c044997d2183fbd6d31352/markupsafe-3.0.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:eaa9599de571d72e2daf60164784109f19978b327a3910d3e9de8c97b5b70cfe", size = 11619, upload-time = "2025-09-27T18:37:06.342Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b5/64/7660f8a4a8e53c924d0fa05dc3a55c9cee10bbd82b11c5afb27d44b096ce/markupsafe-3.0.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c47a551199eb8eb2121d4f0f15ae0f923d31350ab9280078d1e5f12b249e0026", size = 12029, upload-time = "2025-09-27T18:37:07.213Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/da/ef/e648bfd021127bef5fa12e1720ffed0c6cbb8310c8d9bea7266337ff06de/markupsafe-3.0.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f34c41761022dd093b4b6896d4810782ffbabe30f2d443ff5f083e0cbbb8c737", size = 24408, upload-time = "2025-09-27T18:37:09.572Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/41/3c/a36c2450754618e62008bf7435ccb0f88053e07592e6028a34776213d877/markupsafe-3.0.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:457a69a9577064c05a97c41f4e65148652db078a3a509039e64d3467b9e7ef97", size = 23005, upload-time = "2025-09-27T18:37:10.58Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/bc/20/b7fdf89a8456b099837cd1dc21974632a02a999ec9bf7ca3e490aacd98e7/markupsafe-3.0.3-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e8afc3f2ccfa24215f8cb28dcf43f0113ac3c37c2f0f0806d8c70e4228c5cf4d", size = 22048, upload-time = "2025-09-27T18:37:11.547Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/9a/a7/591f592afdc734f47db08a75793a55d7fbcc6902a723ae4cfbab61010cc5/markupsafe-3.0.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:ec15a59cf5af7be74194f7ab02d0f59a62bdcf1a537677ce67a2537c9b87fcda", size = 23821, upload-time = "2025-09-27T18:37:12.48Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7d/33/45b24e4f44195b26521bc6f1a82197118f74df348556594bd2262bda1038/markupsafe-3.0.3-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:0eb9ff8191e8498cca014656ae6b8d61f39da5f95b488805da4bb029cccbfbaf", size = 21606, upload-time = "2025-09-27T18:37:13.485Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ff/0e/53dfaca23a69fbfbbf17a4b64072090e70717344c52eaaaa9c5ddff1e5f0/markupsafe-3.0.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:2713baf880df847f2bece4230d4d094280f4e67b1e813eec43b4c0e144a34ffe", size = 23043, upload-time = "2025-09-27T18:37:14.408Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/46/11/f333a06fc16236d5238bfe74daccbca41459dcd8d1fa952e8fbd5dccfb70/markupsafe-3.0.3-cp314-cp314-win32.whl", hash = "sha256:729586769a26dbceff69f7a7dbbf59ab6572b99d94576a5592625d5b411576b9", size = 14747, upload-time = "2025-09-27T18:37:15.36Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/28/52/182836104b33b444e400b14f797212f720cbc9ed6ba34c800639d154e821/markupsafe-3.0.3-cp314-cp314-win_amd64.whl", hash = "sha256:bdc919ead48f234740ad807933cdf545180bfbe9342c2bb451556db2ed958581", size = 15341, upload-time = "2025-09-27T18:37:16.496Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6f/18/acf23e91bd94fd7b3031558b1f013adfa21a8e407a3fdb32745538730382/markupsafe-3.0.3-cp314-cp314-win_arm64.whl", hash = "sha256:5a7d5dc5140555cf21a6fefbdbf8723f06fcd2f63ef108f2854de715e4422cb4", size = 14073, upload-time = "2025-09-27T18:37:17.476Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3c/f0/57689aa4076e1b43b15fdfa646b04653969d50cf30c32a102762be2485da/markupsafe-3.0.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:1353ef0c1b138e1907ae78e2f6c63ff67501122006b0f9abad68fda5f4ffc6ab", size = 11661, upload-time = "2025-09-27T18:37:18.453Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/89/c3/2e67a7ca217c6912985ec766c6393b636fb0c2344443ff9d91404dc4c79f/markupsafe-3.0.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:1085e7fbddd3be5f89cc898938f42c0b3c711fdcb37d75221de2666af647c175", size = 12069, upload-time = "2025-09-27T18:37:19.332Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f0/00/be561dce4e6ca66b15276e184ce4b8aec61fe83662cce2f7d72bd3249d28/markupsafe-3.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1b52b4fb9df4eb9ae465f8d0c228a00624de2334f216f178a995ccdcf82c4634", size = 25670, upload-time = "2025-09-27T18:37:20.245Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/50/09/c419f6f5a92e5fadde27efd190eca90f05e1261b10dbd8cbcb39cd8ea1dc/markupsafe-3.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fed51ac40f757d41b7c48425901843666a6677e3e8eb0abcff09e4ba6e664f50", size = 23598, upload-time = "2025-09-27T18:37:21.177Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/22/44/a0681611106e0b2921b3033fc19bc53323e0b50bc70cffdd19f7d679bb66/markupsafe-3.0.3-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:f190daf01f13c72eac4efd5c430a8de82489d9cff23c364c3ea822545032993e", size = 23261, upload-time = "2025-09-27T18:37:22.167Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5f/57/1b0b3f100259dc9fffe780cfb60d4be71375510e435efec3d116b6436d43/markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:e56b7d45a839a697b5eb268c82a71bd8c7f6c94d6fd50c3d577fa39a9f1409f5", size = 24835, upload-time = "2025-09-27T18:37:23.296Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/26/6a/4bf6d0c97c4920f1597cc14dd720705eca0bf7c787aebc6bb4d1bead5388/markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:f3e98bb3798ead92273dc0e5fd0f31ade220f59a266ffd8a4f6065e0a3ce0523", size = 22733, upload-time = "2025-09-27T18:37:24.237Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/14/c7/ca723101509b518797fedc2fdf79ba57f886b4aca8a7d31857ba3ee8281f/markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:5678211cb9333a6468fb8d8be0305520aa073f50d17f089b5b4b477ea6e67fdc", size = 23672, upload-time = "2025-09-27T18:37:25.271Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fb/df/5bd7a48c256faecd1d36edc13133e51397e41b73bb77e1a69deab746ebac/markupsafe-3.0.3-cp314-cp314t-win32.whl", hash = "sha256:915c04ba3851909ce68ccc2b8e2cd691618c4dc4c4232fb7982bca3f41fd8c3d", size = 14819, upload-time = "2025-09-27T18:37:26.285Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1a/8a/0402ba61a2f16038b48b39bccca271134be00c5c9f0f623208399333c448/markupsafe-3.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:4faffd047e07c38848ce017e8725090413cd80cbc23d86e55c587bf979e579c9", size = 15426, upload-time = "2025-09-27T18:37:27.316Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/70/bc/6f1c2f612465f5fa89b95bead1f44dcb607670fd42891d8fdcd5d039f4f4/markupsafe-3.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:32001d6a8fc98c8cb5c947787c5d08b0a50663d139f1305bac5885d98d9b40fa", size = 14146, upload-time = "2025-09-27T18:37:28.327Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "mdurl"
|
||||
version = "0.1.2"
|
||||
@@ -731,6 +970,15 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/ee/49/1377b49de7d0c1ce41292161ea0f721913fa8722c19fb9c1e3aa0367eecb/pytest_cov-7.0.0-py3-none-any.whl", hash = "sha256:3b8e9558b16cc1479da72058bdecf8073661c7f57f7d3c5f22a1c23507f2d861", size = 22424, upload-time = "2025-09-09T10:57:00.695Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "python-dotenv"
|
||||
version = "1.2.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/f0/26/19cadc79a718c5edbec86fd4919a6b6d3f681039a2f6d66d14be94e75fb9/python_dotenv-1.2.1.tar.gz", hash = "sha256:42667e897e16ab0d66954af0e60a9caa94f0fd4ecf3aaf6d2d260eec1aa36ad6", size = 44221, upload-time = "2025-10-26T15:12:10.434Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/14/1b/a298b06749107c305e1fe0f814c6c74aea7b2f1e10989cb30f544a1b3253/python_dotenv-1.2.1-py3-none-any.whl", hash = "sha256:b81ee9561e9ca4004139c6cbba3a238c32b03e4894671e181b671e8cb8425d61", size = 21230, upload-time = "2025-10-26T15:12:09.109Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pyyaml"
|
||||
version = "6.0.3"
|
||||
@@ -834,6 +1082,19 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/e0/f9/0595336914c5619e5f28a1fb793285925a8cd4b432c9da0a987836c7f822/shellingham-1.5.4-py2.py3-none-any.whl", hash = "sha256:7ecfff8f2fd72616f7481040475a65b2bf8af90a56c89140852d1120324e8686", size = 9755, upload-time = "2023-10-24T04:13:38.866Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "starlette"
|
||||
version = "0.50.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "anyio" },
|
||||
{ name = "typing-extensions", marker = "python_full_version < '3.13'" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/ba/b8/73a0e6a6e079a9d9cfa64113d771e421640b6f679a52eeb9b32f72d871a1/starlette-0.50.0.tar.gz", hash = "sha256:a2a17b22203254bcbc2e1f926d2d55f3f9497f769416b3190768befe598fa3ca", size = 2646985, upload-time = "2025-11-01T15:25:27.516Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/d9/52/1064f510b141bd54025f9b55105e26d1fa970b9be67ad766380a3c9b74b0/starlette-0.50.0-py3-none-any.whl", hash = "sha256:9e5391843ec9b6e472eed1365a78c8098cfceb7a74bfd4d6b1c0c0095efb3bca", size = 74033, upload-time = "2025-11-01T15:25:25.461Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tomli"
|
||||
version = "2.3.0"
|
||||
@@ -928,6 +1189,68 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/dc/9b/47798a6c91d8bdb567fe2698fe81e0c6b7cb7ef4d13da4114b41d239f65d/typing_inspection-0.4.2-py3-none-any.whl", hash = "sha256:4ed1cacbdc298c220f1bd249ed5287caa16f34d44ef4e9c3d0cbad5b521545e7", size = 14611, upload-time = "2025-10-01T02:14:40.154Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "uvicorn"
|
||||
version = "0.38.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "click" },
|
||||
{ name = "h11" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/cb/ce/f06b84e2697fef4688ca63bdb2fdf113ca0a3be33f94488f2cadb690b0cf/uvicorn-0.38.0.tar.gz", hash = "sha256:fd97093bdd120a2609fc0d3afe931d4d4ad688b6e75f0f929fde1bc36fe0e91d", size = 80605, upload-time = "2025-10-18T13:46:44.63Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/ee/d9/d88e73ca598f4f6ff671fb5fde8a32925c2e08a637303a1d12883c7305fa/uvicorn-0.38.0-py3-none-any.whl", hash = "sha256:48c0afd214ceb59340075b4a052ea1ee91c16fbc2a9b1469cca0e54566977b02", size = 68109, upload-time = "2025-10-18T13:46:42.958Z" },
|
||||
]
|
||||
|
||||
[package.optional-dependencies]
|
||||
standard = [
|
||||
{ name = "colorama", marker = "sys_platform == 'win32'" },
|
||||
{ name = "httptools" },
|
||||
{ name = "python-dotenv" },
|
||||
{ name = "pyyaml" },
|
||||
{ name = "uvloop", marker = "platform_python_implementation != 'PyPy' and sys_platform != 'cygwin' and sys_platform != 'win32'" },
|
||||
{ name = "watchfiles" },
|
||||
{ name = "websockets" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "uvloop"
|
||||
version = "0.22.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/06/f0/18d39dbd1971d6d62c4629cc7fa67f74821b0dc1f5a77af43719de7936a7/uvloop-0.22.1.tar.gz", hash = "sha256:6c84bae345b9147082b17371e3dd5d42775bddce91f885499017f4607fdaf39f", size = 2443250, upload-time = "2025-10-16T22:17:19.342Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/c7/d5/69900f7883235562f1f50d8184bb7dd84a2fb61e9ec63f3782546fdbd057/uvloop-0.22.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c60ebcd36f7b240b30788554b6f0782454826a0ed765d8430652621b5de674b9", size = 1352420, upload-time = "2025-10-16T22:16:21.187Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a8/73/c4e271b3bce59724e291465cc936c37758886a4868787da0278b3b56b905/uvloop-0.22.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3b7f102bf3cb1995cfeaee9321105e8f5da76fdb104cdad8986f85461a1b7b77", size = 748677, upload-time = "2025-10-16T22:16:22.558Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/86/94/9fb7fad2f824d25f8ecac0d70b94d0d48107ad5ece03769a9c543444f78a/uvloop-0.22.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:53c85520781d84a4b8b230e24a5af5b0778efdb39142b424990ff1ef7c48ba21", size = 3753819, upload-time = "2025-10-16T22:16:23.903Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/74/4f/256aca690709e9b008b7108bc85fba619a2bc37c6d80743d18abad16ee09/uvloop-0.22.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:56a2d1fae65fd82197cb8c53c367310b3eabe1bbb9fb5a04d28e3e3520e4f702", size = 3804529, upload-time = "2025-10-16T22:16:25.246Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7f/74/03c05ae4737e871923d21a76fe28b6aad57f5c03b6e6bfcfa5ad616013e4/uvloop-0.22.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:40631b049d5972c6755b06d0bfe8233b1bd9a8a6392d9d1c45c10b6f9e9b2733", size = 3621267, upload-time = "2025-10-16T22:16:26.819Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/75/be/f8e590fe61d18b4a92070905497aec4c0e64ae1761498cad09023f3f4b3e/uvloop-0.22.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:535cc37b3a04f6cd2c1ef65fa1d370c9a35b6695df735fcff5427323f2cd5473", size = 3723105, upload-time = "2025-10-16T22:16:28.252Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3d/ff/7f72e8170be527b4977b033239a83a68d5c881cc4775fca255c677f7ac5d/uvloop-0.22.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:fe94b4564e865d968414598eea1a6de60adba0c040ba4ed05ac1300de402cd42", size = 1359936, upload-time = "2025-10-16T22:16:29.436Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c3/c6/e5d433f88fd54d81ef4be58b2b7b0cea13c442454a1db703a1eea0db1a59/uvloop-0.22.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:51eb9bd88391483410daad430813d982010f9c9c89512321f5b60e2cddbdddd6", size = 752769, upload-time = "2025-10-16T22:16:30.493Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/24/68/a6ac446820273e71aa762fa21cdcc09861edd3536ff47c5cd3b7afb10eeb/uvloop-0.22.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:700e674a166ca5778255e0e1dc4e9d79ab2acc57b9171b79e65feba7184b3370", size = 4317413, upload-time = "2025-10-16T22:16:31.644Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5f/6f/e62b4dfc7ad6518e7eff2516f680d02a0f6eb62c0c212e152ca708a0085e/uvloop-0.22.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7b5b1ac819a3f946d3b2ee07f09149578ae76066d70b44df3fa990add49a82e4", size = 4426307, upload-time = "2025-10-16T22:16:32.917Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/90/60/97362554ac21e20e81bcef1150cb2a7e4ffdaf8ea1e5b2e8bf7a053caa18/uvloop-0.22.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e047cc068570bac9866237739607d1313b9253c3051ad84738cbb095be0537b2", size = 4131970, upload-time = "2025-10-16T22:16:34.015Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/99/39/6b3f7d234ba3964c428a6e40006340f53ba37993f46ed6e111c6e9141d18/uvloop-0.22.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:512fec6815e2dd45161054592441ef76c830eddaad55c8aa30952e6fe1ed07c0", size = 4296343, upload-time = "2025-10-16T22:16:35.149Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/89/8c/182a2a593195bfd39842ea68ebc084e20c850806117213f5a299dfc513d9/uvloop-0.22.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:561577354eb94200d75aca23fbde86ee11be36b00e52a4eaf8f50fb0c86b7705", size = 1358611, upload-time = "2025-10-16T22:16:36.833Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d2/14/e301ee96a6dc95224b6f1162cd3312f6d1217be3907b79173b06785f2fe7/uvloop-0.22.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:1cdf5192ab3e674ca26da2eada35b288d2fa49fdd0f357a19f0e7c4e7d5077c8", size = 751811, upload-time = "2025-10-16T22:16:38.275Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b7/02/654426ce265ac19e2980bfd9ea6590ca96a56f10c76e63801a2df01c0486/uvloop-0.22.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6e2ea3d6190a2968f4a14a23019d3b16870dd2190cd69c8180f7c632d21de68d", size = 4288562, upload-time = "2025-10-16T22:16:39.375Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/15/c0/0be24758891ef825f2065cd5db8741aaddabe3e248ee6acc5e8a80f04005/uvloop-0.22.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0530a5fbad9c9e4ee3f2b33b148c6a64d47bbad8000ea63704fa8260f4cf728e", size = 4366890, upload-time = "2025-10-16T22:16:40.547Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d2/53/8369e5219a5855869bcee5f4d317f6da0e2c669aecf0ef7d371e3d084449/uvloop-0.22.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:bc5ef13bbc10b5335792360623cc378d52d7e62c2de64660616478c32cd0598e", size = 4119472, upload-time = "2025-10-16T22:16:41.694Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f8/ba/d69adbe699b768f6b29a5eec7b47dd610bd17a69de51b251126a801369ea/uvloop-0.22.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:1f38ec5e3f18c8a10ded09742f7fb8de0108796eb673f30ce7762ce1b8550cad", size = 4239051, upload-time = "2025-10-16T22:16:43.224Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/90/cd/b62bdeaa429758aee8de8b00ac0dd26593a9de93d302bff3d21439e9791d/uvloop-0.22.1-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:3879b88423ec7e97cd4eba2a443aa26ed4e59b45e6b76aabf13fe2f27023a142", size = 1362067, upload-time = "2025-10-16T22:16:44.503Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0d/f8/a132124dfda0777e489ca86732e85e69afcd1ff7686647000050ba670689/uvloop-0.22.1-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:4baa86acedf1d62115c1dc6ad1e17134476688f08c6efd8a2ab076e815665c74", size = 752423, upload-time = "2025-10-16T22:16:45.968Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a3/94/94af78c156f88da4b3a733773ad5ba0b164393e357cc4bd0ab2e2677a7d6/uvloop-0.22.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:297c27d8003520596236bdb2335e6b3f649480bd09e00d1e3a99144b691d2a35", size = 4272437, upload-time = "2025-10-16T22:16:47.451Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b5/35/60249e9fd07b32c665192cec7af29e06c7cd96fa1d08b84f012a56a0b38e/uvloop-0.22.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c1955d5a1dd43198244d47664a5858082a3239766a839b2102a269aaff7a4e25", size = 4292101, upload-time = "2025-10-16T22:16:49.318Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/02/62/67d382dfcb25d0a98ce73c11ed1a6fba5037a1a1d533dcbb7cab033a2636/uvloop-0.22.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:b31dc2fccbd42adc73bc4e7cdbae4fc5086cf378979e53ca5d0301838c5682c6", size = 4114158, upload-time = "2025-10-16T22:16:50.517Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f0/7a/f1171b4a882a5d13c8b7576f348acfe6074d72eaf52cccef752f748d4a9f/uvloop-0.22.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:93f617675b2d03af4e72a5333ef89450dfaa5321303ede6e67ba9c9d26878079", size = 4177360, upload-time = "2025-10-16T22:16:52.646Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/79/7b/b01414f31546caf0919da80ad57cbfe24c56b151d12af68cee1b04922ca8/uvloop-0.22.1-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:37554f70528f60cad66945b885eb01f1bb514f132d92b6eeed1c90fd54ed6289", size = 1454790, upload-time = "2025-10-16T22:16:54.355Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d4/31/0bb232318dd838cad3fa8fb0c68c8b40e1145b32025581975e18b11fab40/uvloop-0.22.1-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:b76324e2dc033a0b2f435f33eb88ff9913c156ef78e153fb210e03c13da746b3", size = 796783, upload-time = "2025-10-16T22:16:55.906Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/42/38/c9b09f3271a7a723a5de69f8e237ab8e7803183131bc57c890db0b6bb872/uvloop-0.22.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:badb4d8e58ee08dad957002027830d5c3b06aea446a6a3744483c2b3b745345c", size = 4647548, upload-time = "2025-10-16T22:16:57.008Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c1/37/945b4ca0ac27e3dc4952642d4c900edd030b3da6c9634875af6e13ae80e5/uvloop-0.22.1-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b91328c72635f6f9e0282e4a57da7470c7350ab1c9f48546c0f2866205349d21", size = 4467065, upload-time = "2025-10-16T22:16:58.206Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/97/cc/48d232f33d60e2e2e0b42f4e73455b146b76ebe216487e862700457fbf3c/uvloop-0.22.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:daf620c2995d193449393d6c62131b3fbd40a63bf7b307a1527856ace637fe88", size = 4328384, upload-time = "2025-10-16T22:16:59.36Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e4/16/c1fd27e9549f3c4baf1dc9c20c456cd2f822dbf8de9f463824b0c0357e06/uvloop-0.22.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:6cde23eeda1a25c75b2e07d39970f3374105d5eafbaab2a4482be82f272d5a5e", size = 4296730, upload-time = "2025-10-16T22:17:00.744Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "virtualenv"
|
||||
version = "20.35.4"
|
||||
@@ -941,3 +1264,132 @@ sdist = { url = "https://files.pythonhosted.org/packages/20/28/e6f1a6f655d620846
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/79/0c/c05523fa3181fdf0c9c52a6ba91a23fbf3246cc095f26f6516f9c60e6771/virtualenv-20.35.4-py3-none-any.whl", hash = "sha256:c21c9cede36c9753eeade68ba7d523529f228a403463376cf821eaae2b650f1b", size = 6005095, upload-time = "2025-10-29T06:57:37.598Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "watchfiles"
|
||||
version = "1.1.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "anyio" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/c2/c9/8869df9b2a2d6c59d79220a4db37679e74f807c559ffe5265e08b227a210/watchfiles-1.1.1.tar.gz", hash = "sha256:a173cb5c16c4f40ab19cecf48a534c409f7ea983ab8fed0741304a1c0a31b3f2", size = 94440, upload-time = "2025-10-14T15:06:21.08Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/1f/f8/2c5f479fb531ce2f0564eda479faecf253d886b1ab3630a39b7bf7362d46/watchfiles-1.1.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:f57b396167a2565a4e8b5e56a5a1c537571733992b226f4f1197d79e94cf0ae5", size = 406529, upload-time = "2025-10-14T15:04:32.899Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fe/cd/f515660b1f32f65df671ddf6f85bfaca621aee177712874dc30a97397977/watchfiles-1.1.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:421e29339983e1bebc281fab40d812742268ad057db4aee8c4d2bce0af43b741", size = 394384, upload-time = "2025-10-14T15:04:33.761Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7b/c3/28b7dc99733eab43fca2d10f55c86e03bd6ab11ca31b802abac26b23d161/watchfiles-1.1.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6e43d39a741e972bab5d8100b5cdacf69db64e34eb19b6e9af162bccf63c5cc6", size = 448789, upload-time = "2025-10-14T15:04:34.679Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4a/24/33e71113b320030011c8e4316ccca04194bf0cbbaeee207f00cbc7d6b9f5/watchfiles-1.1.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f537afb3276d12814082a2e9b242bdcf416c2e8fd9f799a737990a1dbe906e5b", size = 460521, upload-time = "2025-10-14T15:04:35.963Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f4/c3/3c9a55f255aa57b91579ae9e98c88704955fa9dac3e5614fb378291155df/watchfiles-1.1.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b2cd9e04277e756a2e2d2543d65d1e2166d6fd4c9b183f8808634fda23f17b14", size = 488722, upload-time = "2025-10-14T15:04:37.091Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/49/36/506447b73eb46c120169dc1717fe2eff07c234bb3232a7200b5f5bd816e9/watchfiles-1.1.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5f3f58818dc0b07f7d9aa7fe9eb1037aecb9700e63e1f6acfed13e9fef648f5d", size = 596088, upload-time = "2025-10-14T15:04:38.39Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/82/ab/5f39e752a9838ec4d52e9b87c1e80f1ee3ccdbe92e183c15b6577ab9de16/watchfiles-1.1.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9bb9f66367023ae783551042d31b1d7fd422e8289eedd91f26754a66f44d5cff", size = 472923, upload-time = "2025-10-14T15:04:39.666Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/af/b9/a419292f05e302dea372fa7e6fda5178a92998411f8581b9830d28fb9edb/watchfiles-1.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aebfd0861a83e6c3d1110b78ad54704486555246e542be3e2bb94195eabb2606", size = 456080, upload-time = "2025-10-14T15:04:40.643Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b0/c3/d5932fd62bde1a30c36e10c409dc5d54506726f08cb3e1d8d0ba5e2bc8db/watchfiles-1.1.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:5fac835b4ab3c6487b5dbad78c4b3724e26bcc468e886f8ba8cc4306f68f6701", size = 629432, upload-time = "2025-10-14T15:04:41.789Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f7/77/16bddd9779fafb795f1a94319dc965209c5641db5bf1edbbccace6d1b3c0/watchfiles-1.1.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:399600947b170270e80134ac854e21b3ccdefa11a9529a3decc1327088180f10", size = 623046, upload-time = "2025-10-14T15:04:42.718Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/46/ef/f2ecb9a0f342b4bfad13a2787155c6ee7ce792140eac63a34676a2feeef2/watchfiles-1.1.1-cp311-cp311-win32.whl", hash = "sha256:de6da501c883f58ad50db3a32ad397b09ad29865b5f26f64c24d3e3281685849", size = 271473, upload-time = "2025-10-14T15:04:43.624Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/94/bc/f42d71125f19731ea435c3948cad148d31a64fccde3867e5ba4edee901f9/watchfiles-1.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:35c53bd62a0b885bf653ebf6b700d1bf05debb78ad9292cf2a942b23513dc4c4", size = 287598, upload-time = "2025-10-14T15:04:44.516Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/57/c9/a30f897351f95bbbfb6abcadafbaca711ce1162f4db95fc908c98a9165f3/watchfiles-1.1.1-cp311-cp311-win_arm64.whl", hash = "sha256:57ca5281a8b5e27593cb7d82c2ac927ad88a96ed406aa446f6344e4328208e9e", size = 277210, upload-time = "2025-10-14T15:04:45.883Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/74/d5/f039e7e3c639d9b1d09b07ea412a6806d38123f0508e5f9b48a87b0a76cc/watchfiles-1.1.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:8c89f9f2f740a6b7dcc753140dd5e1ab9215966f7a3530d0c0705c83b401bd7d", size = 404745, upload-time = "2025-10-14T15:04:46.731Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a5/96/a881a13aa1349827490dab2d363c8039527060cfcc2c92cc6d13d1b1049e/watchfiles-1.1.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:bd404be08018c37350f0d6e34676bd1e2889990117a2b90070b3007f172d0610", size = 391769, upload-time = "2025-10-14T15:04:48.003Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4b/5b/d3b460364aeb8da471c1989238ea0e56bec24b6042a68046adf3d9ddb01c/watchfiles-1.1.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8526e8f916bb5b9a0a777c8317c23ce65de259422bba5b31325a6fa6029d33af", size = 449374, upload-time = "2025-10-14T15:04:49.179Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b9/44/5769cb62d4ed055cb17417c0a109a92f007114a4e07f30812a73a4efdb11/watchfiles-1.1.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2edc3553362b1c38d9f06242416a5d8e9fe235c204a4072e988ce2e5bb1f69f6", size = 459485, upload-time = "2025-10-14T15:04:50.155Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/19/0c/286b6301ded2eccd4ffd0041a1b726afda999926cf720aab63adb68a1e36/watchfiles-1.1.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:30f7da3fb3f2844259cba4720c3fc7138eb0f7b659c38f3bfa65084c7fc7abce", size = 488813, upload-time = "2025-10-14T15:04:51.059Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c7/2b/8530ed41112dd4a22f4dcfdb5ccf6a1baad1ff6eed8dc5a5f09e7e8c41c7/watchfiles-1.1.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f8979280bdafff686ba5e4d8f97840f929a87ed9cdf133cbbd42f7766774d2aa", size = 594816, upload-time = "2025-10-14T15:04:52.031Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ce/d2/f5f9fb49489f184f18470d4f99f4e862a4b3e9ac2865688eb2099e3d837a/watchfiles-1.1.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dcc5c24523771db3a294c77d94771abcfcb82a0e0ee8efd910c37c59ec1b31bb", size = 475186, upload-time = "2025-10-14T15:04:53.064Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/cf/68/5707da262a119fb06fbe214d82dd1fe4a6f4af32d2d14de368d0349eb52a/watchfiles-1.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1db5d7ae38ff20153d542460752ff397fcf5c96090c1230803713cf3147a6803", size = 456812, upload-time = "2025-10-14T15:04:55.174Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/66/ab/3cbb8756323e8f9b6f9acb9ef4ec26d42b2109bce830cc1f3468df20511d/watchfiles-1.1.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:28475ddbde92df1874b6c5c8aaeb24ad5be47a11f87cde5a28ef3835932e3e94", size = 630196, upload-time = "2025-10-14T15:04:56.22Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/78/46/7152ec29b8335f80167928944a94955015a345440f524d2dfe63fc2f437b/watchfiles-1.1.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:36193ed342f5b9842edd3532729a2ad55c4160ffcfa3700e0d54be496b70dd43", size = 622657, upload-time = "2025-10-14T15:04:57.521Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0a/bf/95895e78dd75efe9a7f31733607f384b42eb5feb54bd2eb6ed57cc2e94f4/watchfiles-1.1.1-cp312-cp312-win32.whl", hash = "sha256:859e43a1951717cc8de7f4c77674a6d389b106361585951d9e69572823f311d9", size = 272042, upload-time = "2025-10-14T15:04:59.046Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/87/0a/90eb755f568de2688cb220171c4191df932232c20946966c27a59c400850/watchfiles-1.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:91d4c9a823a8c987cce8fa2690923b069966dabb196dd8d137ea2cede885fde9", size = 288410, upload-time = "2025-10-14T15:05:00.081Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/36/76/f322701530586922fbd6723c4f91ace21364924822a8772c549483abed13/watchfiles-1.1.1-cp312-cp312-win_arm64.whl", hash = "sha256:a625815d4a2bdca61953dbba5a39d60164451ef34c88d751f6c368c3ea73d404", size = 278209, upload-time = "2025-10-14T15:05:01.168Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/bb/f4/f750b29225fe77139f7ae5de89d4949f5a99f934c65a1f1c0b248f26f747/watchfiles-1.1.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:130e4876309e8686a5e37dba7d5e9bc77e6ed908266996ca26572437a5271e18", size = 404321, upload-time = "2025-10-14T15:05:02.063Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2b/f9/f07a295cde762644aa4c4bb0f88921d2d141af45e735b965fb2e87858328/watchfiles-1.1.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5f3bde70f157f84ece3765b42b4a52c6ac1a50334903c6eaf765362f6ccca88a", size = 391783, upload-time = "2025-10-14T15:05:03.052Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/bc/11/fc2502457e0bea39a5c958d86d2cb69e407a4d00b85735ca724bfa6e0d1a/watchfiles-1.1.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:14e0b1fe858430fc0251737ef3824c54027bedb8c37c38114488b8e131cf8219", size = 449279, upload-time = "2025-10-14T15:05:04.004Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e3/1f/d66bc15ea0b728df3ed96a539c777acfcad0eb78555ad9efcaa1274688f0/watchfiles-1.1.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f27db948078f3823a6bb3b465180db8ebecf26dd5dae6f6180bd87383b6b4428", size = 459405, upload-time = "2025-10-14T15:05:04.942Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/be/90/9f4a65c0aec3ccf032703e6db02d89a157462fbb2cf20dd415128251cac0/watchfiles-1.1.1-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:059098c3a429f62fc98e8ec62b982230ef2c8df68c79e826e37b895bc359a9c0", size = 488976, upload-time = "2025-10-14T15:05:05.905Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/37/57/ee347af605d867f712be7029bb94c8c071732a4b44792e3176fa3c612d39/watchfiles-1.1.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bfb5862016acc9b869bb57284e6cb35fdf8e22fe59f7548858e2f971d045f150", size = 595506, upload-time = "2025-10-14T15:05:06.906Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a8/78/cc5ab0b86c122047f75e8fc471c67a04dee395daf847d3e59381996c8707/watchfiles-1.1.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:319b27255aacd9923b8a276bb14d21a5f7ff82564c744235fc5eae58d95422ae", size = 474936, upload-time = "2025-10-14T15:05:07.906Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/62/da/def65b170a3815af7bd40a3e7010bf6ab53089ef1b75d05dd5385b87cf08/watchfiles-1.1.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c755367e51db90e75b19454b680903631d41f9e3607fbd941d296a020c2d752d", size = 456147, upload-time = "2025-10-14T15:05:09.138Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/57/99/da6573ba71166e82d288d4df0839128004c67d2778d3b566c138695f5c0b/watchfiles-1.1.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c22c776292a23bfc7237a98f791b9ad3144b02116ff10d820829ce62dff46d0b", size = 630007, upload-time = "2025-10-14T15:05:10.117Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a8/51/7439c4dd39511368849eb1e53279cd3454b4a4dbace80bab88feeb83c6b5/watchfiles-1.1.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:3a476189be23c3686bc2f4321dd501cb329c0a0469e77b7b534ee10129ae6374", size = 622280, upload-time = "2025-10-14T15:05:11.146Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/95/9c/8ed97d4bba5db6fdcdb2b298d3898f2dd5c20f6b73aee04eabe56c59677e/watchfiles-1.1.1-cp313-cp313-win32.whl", hash = "sha256:bf0a91bfb5574a2f7fc223cf95eeea79abfefa404bf1ea5e339c0c1560ae99a0", size = 272056, upload-time = "2025-10-14T15:05:12.156Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1f/f3/c14e28429f744a260d8ceae18bf58c1d5fa56b50d006a7a9f80e1882cb0d/watchfiles-1.1.1-cp313-cp313-win_amd64.whl", hash = "sha256:52e06553899e11e8074503c8e716d574adeeb7e68913115c4b3653c53f9bae42", size = 288162, upload-time = "2025-10-14T15:05:13.208Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/dc/61/fe0e56c40d5cd29523e398d31153218718c5786b5e636d9ae8ae79453d27/watchfiles-1.1.1-cp313-cp313-win_arm64.whl", hash = "sha256:ac3cc5759570cd02662b15fbcd9d917f7ecd47efe0d6b40474eafd246f91ea18", size = 277909, upload-time = "2025-10-14T15:05:14.49Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/79/42/e0a7d749626f1e28c7108a99fb9bf524b501bbbeb9b261ceecde644d5a07/watchfiles-1.1.1-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:563b116874a9a7ce6f96f87cd0b94f7faf92d08d0021e837796f0a14318ef8da", size = 403389, upload-time = "2025-10-14T15:05:15.777Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/15/49/08732f90ce0fbbc13913f9f215c689cfc9ced345fb1bcd8829a50007cc8d/watchfiles-1.1.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:3ad9fe1dae4ab4212d8c91e80b832425e24f421703b5a42ef2e4a1e215aff051", size = 389964, upload-time = "2025-10-14T15:05:16.85Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/27/0d/7c315d4bd5f2538910491a0393c56bf70d333d51bc5b34bee8e68e8cea19/watchfiles-1.1.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ce70f96a46b894b36eba678f153f052967a0d06d5b5a19b336ab0dbbd029f73e", size = 448114, upload-time = "2025-10-14T15:05:17.876Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c3/24/9e096de47a4d11bc4df41e9d1e61776393eac4cb6eb11b3e23315b78b2cc/watchfiles-1.1.1-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:cb467c999c2eff23a6417e58d75e5828716f42ed8289fe6b77a7e5a91036ca70", size = 460264, upload-time = "2025-10-14T15:05:18.962Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/cc/0f/e8dea6375f1d3ba5fcb0b3583e2b493e77379834c74fd5a22d66d85d6540/watchfiles-1.1.1-cp313-cp313t-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:836398932192dae4146c8f6f737d74baeac8b70ce14831a239bdb1ca882fc261", size = 487877, upload-time = "2025-10-14T15:05:20.094Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ac/5b/df24cfc6424a12deb41503b64d42fbea6b8cb357ec62ca84a5a3476f654a/watchfiles-1.1.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:743185e7372b7bc7c389e1badcc606931a827112fbbd37f14c537320fca08620", size = 595176, upload-time = "2025-10-14T15:05:21.134Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8f/b5/853b6757f7347de4e9b37e8cc3289283fb983cba1ab4d2d7144694871d9c/watchfiles-1.1.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:afaeff7696e0ad9f02cbb8f56365ff4686ab205fcf9c4c5b6fdfaaa16549dd04", size = 473577, upload-time = "2025-10-14T15:05:22.306Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e1/f7/0a4467be0a56e80447c8529c9fce5b38eab4f513cb3d9bf82e7392a5696b/watchfiles-1.1.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3f7eb7da0eb23aa2ba036d4f616d46906013a68caf61b7fdbe42fc8b25132e77", size = 455425, upload-time = "2025-10-14T15:05:23.348Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8e/e0/82583485ea00137ddf69bc84a2db88bd92ab4a6e3c405e5fb878ead8d0e7/watchfiles-1.1.1-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:831a62658609f0e5c64178211c942ace999517f5770fe9436be4c2faeba0c0ef", size = 628826, upload-time = "2025-10-14T15:05:24.398Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/28/9a/a785356fccf9fae84c0cc90570f11702ae9571036fb25932f1242c82191c/watchfiles-1.1.1-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:f9a2ae5c91cecc9edd47e041a930490c31c3afb1f5e6d71de3dc671bfaca02bf", size = 622208, upload-time = "2025-10-14T15:05:25.45Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c3/f4/0872229324ef69b2c3edec35e84bd57a1289e7d3fe74588048ed8947a323/watchfiles-1.1.1-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:d1715143123baeeaeadec0528bb7441103979a1d5f6fd0e1f915383fea7ea6d5", size = 404315, upload-time = "2025-10-14T15:05:26.501Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7b/22/16d5331eaed1cb107b873f6ae1b69e9ced582fcf0c59a50cd84f403b1c32/watchfiles-1.1.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:39574d6370c4579d7f5d0ad940ce5b20db0e4117444e39b6d8f99db5676c52fd", size = 390869, upload-time = "2025-10-14T15:05:27.649Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b2/7e/5643bfff5acb6539b18483128fdc0ef2cccc94a5b8fbda130c823e8ed636/watchfiles-1.1.1-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7365b92c2e69ee952902e8f70f3ba6360d0d596d9299d55d7d386df84b6941fb", size = 449919, upload-time = "2025-10-14T15:05:28.701Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/51/2e/c410993ba5025a9f9357c376f48976ef0e1b1aefb73b97a5ae01a5972755/watchfiles-1.1.1-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bfff9740c69c0e4ed32416f013f3c45e2ae42ccedd1167ef2d805c000b6c71a5", size = 460845, upload-time = "2025-10-14T15:05:30.064Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8e/a4/2df3b404469122e8680f0fcd06079317e48db58a2da2950fb45020947734/watchfiles-1.1.1-cp314-cp314-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b27cf2eb1dda37b2089e3907d8ea92922b673c0c427886d4edc6b94d8dfe5db3", size = 489027, upload-time = "2025-10-14T15:05:31.064Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ea/84/4587ba5b1f267167ee715b7f66e6382cca6938e0a4b870adad93e44747e6/watchfiles-1.1.1-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:526e86aced14a65a5b0ec50827c745597c782ff46b571dbfe46192ab9e0b3c33", size = 595615, upload-time = "2025-10-14T15:05:32.074Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6a/0f/c6988c91d06e93cd0bb3d4a808bcf32375ca1904609835c3031799e3ecae/watchfiles-1.1.1-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:04e78dd0b6352db95507fd8cb46f39d185cf8c74e4cf1e4fbad1d3df96faf510", size = 474836, upload-time = "2025-10-14T15:05:33.209Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b4/36/ded8aebea91919485b7bbabbd14f5f359326cb5ec218cd67074d1e426d74/watchfiles-1.1.1-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5c85794a4cfa094714fb9c08d4a218375b2b95b8ed1666e8677c349906246c05", size = 455099, upload-time = "2025-10-14T15:05:34.189Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/98/e0/8c9bdba88af756a2fce230dd365fab2baf927ba42cd47521ee7498fd5211/watchfiles-1.1.1-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:74d5012b7630714b66be7b7b7a78855ef7ad58e8650c73afc4c076a1f480a8d6", size = 630626, upload-time = "2025-10-14T15:05:35.216Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2a/84/a95db05354bf2d19e438520d92a8ca475e578c647f78f53197f5a2f17aaf/watchfiles-1.1.1-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:8fbe85cb3201c7d380d3d0b90e63d520f15d6afe217165d7f98c9c649654db81", size = 622519, upload-time = "2025-10-14T15:05:36.259Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1d/ce/d8acdc8de545de995c339be67711e474c77d643555a9bb74a9334252bd55/watchfiles-1.1.1-cp314-cp314-win32.whl", hash = "sha256:3fa0b59c92278b5a7800d3ee7733da9d096d4aabcfabb9a928918bd276ef9b9b", size = 272078, upload-time = "2025-10-14T15:05:37.63Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c4/c9/a74487f72d0451524be827e8edec251da0cc1fcf111646a511ae752e1a3d/watchfiles-1.1.1-cp314-cp314-win_amd64.whl", hash = "sha256:c2047d0b6cea13b3316bdbafbfa0c4228ae593d995030fda39089d36e64fc03a", size = 287664, upload-time = "2025-10-14T15:05:38.95Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/df/b8/8ac000702cdd496cdce998c6f4ee0ca1f15977bba51bdf07d872ebdfc34c/watchfiles-1.1.1-cp314-cp314-win_arm64.whl", hash = "sha256:842178b126593addc05acf6fce960d28bc5fae7afbaa2c6c1b3a7b9460e5be02", size = 277154, upload-time = "2025-10-14T15:05:39.954Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/47/a8/e3af2184707c29f0f14b1963c0aace6529f9d1b8582d5b99f31bbf42f59e/watchfiles-1.1.1-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:88863fbbc1a7312972f1c511f202eb30866370ebb8493aef2812b9ff28156a21", size = 403820, upload-time = "2025-10-14T15:05:40.932Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c0/ec/e47e307c2f4bd75f9f9e8afbe3876679b18e1bcec449beca132a1c5ffb2d/watchfiles-1.1.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:55c7475190662e202c08c6c0f4d9e345a29367438cf8e8037f3155e10a88d5a5", size = 390510, upload-time = "2025-10-14T15:05:41.945Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d5/a0/ad235642118090f66e7b2f18fd5c42082418404a79205cdfca50b6309c13/watchfiles-1.1.1-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3f53fa183d53a1d7a8852277c92b967ae99c2d4dcee2bfacff8868e6e30b15f7", size = 448408, upload-time = "2025-10-14T15:05:43.385Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/df/85/97fa10fd5ff3332ae17e7e40e20784e419e28521549780869f1413742e9d/watchfiles-1.1.1-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6aae418a8b323732fa89721d86f39ec8f092fc2af67f4217a2b07fd3e93c6101", size = 458968, upload-time = "2025-10-14T15:05:44.404Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/47/c2/9059c2e8966ea5ce678166617a7f75ecba6164375f3b288e50a40dc6d489/watchfiles-1.1.1-cp314-cp314t-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f096076119da54a6080e8920cbdaac3dbee667eb91dcc5e5b78840b87415bd44", size = 488096, upload-time = "2025-10-14T15:05:45.398Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/94/44/d90a9ec8ac309bc26db808a13e7bfc0e4e78b6fc051078a554e132e80160/watchfiles-1.1.1-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:00485f441d183717038ed2e887a7c868154f216877653121068107b227a2f64c", size = 596040, upload-time = "2025-10-14T15:05:46.502Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/95/68/4e3479b20ca305cfc561db3ed207a8a1c745ee32bf24f2026a129d0ddb6e/watchfiles-1.1.1-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a55f3e9e493158d7bfdb60a1165035f1cf7d320914e7b7ea83fe22c6023b58fc", size = 473847, upload-time = "2025-10-14T15:05:47.484Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4f/55/2af26693fd15165c4ff7857e38330e1b61ab8c37d15dc79118cdba115b7a/watchfiles-1.1.1-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c91ed27800188c2ae96d16e3149f199d62f86c7af5f5f4d2c61a3ed8cd3666c", size = 455072, upload-time = "2025-10-14T15:05:48.928Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/66/1d/d0d200b10c9311ec25d2273f8aad8c3ef7cc7ea11808022501811208a750/watchfiles-1.1.1-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:311ff15a0bae3714ffb603e6ba6dbfba4065ab60865d15a6ec544133bdb21099", size = 629104, upload-time = "2025-10-14T15:05:49.908Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e3/bd/fa9bb053192491b3867ba07d2343d9f2252e00811567d30ae8d0f78136fe/watchfiles-1.1.1-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:a916a2932da8f8ab582f242c065f5c81bed3462849ca79ee357dd9551b0e9b01", size = 622112, upload-time = "2025-10-14T15:05:50.941Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d3/8e/e500f8b0b77be4ff753ac94dc06b33d8f0d839377fee1b78e8c8d8f031bf/watchfiles-1.1.1-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:db476ab59b6765134de1d4fe96a1a9c96ddf091683599be0f26147ea1b2e4b88", size = 408250, upload-time = "2025-10-14T15:06:10.264Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/bd/95/615e72cd27b85b61eec764a5ca51bd94d40b5adea5ff47567d9ebc4d275a/watchfiles-1.1.1-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:89eef07eee5e9d1fda06e38822ad167a044153457e6fd997f8a858ab7564a336", size = 396117, upload-time = "2025-10-14T15:06:11.28Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c9/81/e7fe958ce8a7fb5c73cc9fb07f5aeaf755e6aa72498c57d760af760c91f8/watchfiles-1.1.1-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ce19e06cbda693e9e7686358af9cd6f5d61312ab8b00488bc36f5aabbaf77e24", size = 450493, upload-time = "2025-10-14T15:06:12.321Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6e/d4/ed38dd3b1767193de971e694aa544356e63353c33a85d948166b5ff58b9e/watchfiles-1.1.1-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3e6f39af2eab0118338902798b5aa6664f46ff66bc0280de76fca67a7f262a49", size = 457546, upload-time = "2025-10-14T15:06:13.372Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "websockets"
|
||||
version = "15.0.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/21/e6/26d09fab466b7ca9c7737474c52be4f76a40301b08362eb2dbc19dcc16c1/websockets-15.0.1.tar.gz", hash = "sha256:82544de02076bafba038ce055ee6412d68da13ab47f0c60cab827346de828dee", size = 177016, upload-time = "2025-03-05T20:03:41.606Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/9f/32/18fcd5919c293a398db67443acd33fde142f283853076049824fc58e6f75/websockets-15.0.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:823c248b690b2fd9303ba00c4f66cd5e2d8c3ba4aa968b2779be9532a4dad431", size = 175423, upload-time = "2025-03-05T20:01:56.276Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/76/70/ba1ad96b07869275ef42e2ce21f07a5b0148936688c2baf7e4a1f60d5058/websockets-15.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678999709e68425ae2593acf2e3ebcbcf2e69885a5ee78f9eb80e6e371f1bf57", size = 173082, upload-time = "2025-03-05T20:01:57.563Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/86/f2/10b55821dd40eb696ce4704a87d57774696f9451108cff0d2824c97e0f97/websockets-15.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d50fd1ee42388dcfb2b3676132c78116490976f1300da28eb629272d5d93e905", size = 173330, upload-time = "2025-03-05T20:01:59.063Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a5/90/1c37ae8b8a113d3daf1065222b6af61cc44102da95388ac0018fcb7d93d9/websockets-15.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d99e5546bf73dbad5bf3547174cd6cb8ba7273062a23808ffea025ecb1cf8562", size = 182878, upload-time = "2025-03-05T20:02:00.305Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8e/8d/96e8e288b2a41dffafb78e8904ea7367ee4f891dafc2ab8d87e2124cb3d3/websockets-15.0.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:66dd88c918e3287efc22409d426c8f729688d89a0c587c88971a0faa2c2f3792", size = 181883, upload-time = "2025-03-05T20:02:03.148Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/93/1f/5d6dbf551766308f6f50f8baf8e9860be6182911e8106da7a7f73785f4c4/websockets-15.0.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8dd8327c795b3e3f219760fa603dcae1dcc148172290a8ab15158cf85a953413", size = 182252, upload-time = "2025-03-05T20:02:05.29Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d4/78/2d4fed9123e6620cbf1706c0de8a1632e1a28e7774d94346d7de1bba2ca3/websockets-15.0.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8fdc51055e6ff4adeb88d58a11042ec9a5eae317a0a53d12c062c8a8865909e8", size = 182521, upload-time = "2025-03-05T20:02:07.458Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e7/3b/66d4c1b444dd1a9823c4a81f50231b921bab54eee2f69e70319b4e21f1ca/websockets-15.0.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:693f0192126df6c2327cce3baa7c06f2a117575e32ab2308f7f8216c29d9e2e3", size = 181958, upload-time = "2025-03-05T20:02:09.842Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/08/ff/e9eed2ee5fed6f76fdd6032ca5cd38c57ca9661430bb3d5fb2872dc8703c/websockets-15.0.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:54479983bd5fb469c38f2f5c7e3a24f9a4e70594cd68cd1fa6b9340dadaff7cf", size = 181918, upload-time = "2025-03-05T20:02:11.968Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d8/75/994634a49b7e12532be6a42103597b71098fd25900f7437d6055ed39930a/websockets-15.0.1-cp311-cp311-win32.whl", hash = "sha256:16b6c1b3e57799b9d38427dda63edcbe4926352c47cf88588c0be4ace18dac85", size = 176388, upload-time = "2025-03-05T20:02:13.32Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/98/93/e36c73f78400a65f5e236cd376713c34182e6663f6889cd45a4a04d8f203/websockets-15.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:27ccee0071a0e75d22cb35849b1db43f2ecd3e161041ac1ee9d2352ddf72f065", size = 176828, upload-time = "2025-03-05T20:02:14.585Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/51/6b/4545a0d843594f5d0771e86463606a3988b5a09ca5123136f8a76580dd63/websockets-15.0.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:3e90baa811a5d73f3ca0bcbf32064d663ed81318ab225ee4f427ad4e26e5aff3", size = 175437, upload-time = "2025-03-05T20:02:16.706Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f4/71/809a0f5f6a06522af902e0f2ea2757f71ead94610010cf570ab5c98e99ed/websockets-15.0.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:592f1a9fe869c778694f0aa806ba0374e97648ab57936f092fd9d87f8bc03665", size = 173096, upload-time = "2025-03-05T20:02:18.832Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3d/69/1a681dd6f02180916f116894181eab8b2e25b31e484c5d0eae637ec01f7c/websockets-15.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0701bc3cfcb9164d04a14b149fd74be7347a530ad3bbf15ab2c678a2cd3dd9a2", size = 173332, upload-time = "2025-03-05T20:02:20.187Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a6/02/0073b3952f5bce97eafbb35757f8d0d54812b6174ed8dd952aa08429bcc3/websockets-15.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8b56bdcdb4505c8078cb6c7157d9811a85790f2f2b3632c7d1462ab5783d215", size = 183152, upload-time = "2025-03-05T20:02:22.286Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/74/45/c205c8480eafd114b428284840da0b1be9ffd0e4f87338dc95dc6ff961a1/websockets-15.0.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0af68c55afbd5f07986df82831c7bff04846928ea8d1fd7f30052638788bc9b5", size = 182096, upload-time = "2025-03-05T20:02:24.368Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/14/8f/aa61f528fba38578ec553c145857a181384c72b98156f858ca5c8e82d9d3/websockets-15.0.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64dee438fed052b52e4f98f76c5790513235efaa1ef7f3f2192c392cd7c91b65", size = 182523, upload-time = "2025-03-05T20:02:25.669Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ec/6d/0267396610add5bc0d0d3e77f546d4cd287200804fe02323797de77dbce9/websockets-15.0.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d5f6b181bb38171a8ad1d6aa58a67a6aa9d4b38d0f8c5f496b9e42561dfc62fe", size = 182790, upload-time = "2025-03-05T20:02:26.99Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/02/05/c68c5adbf679cf610ae2f74a9b871ae84564462955d991178f95a1ddb7dd/websockets-15.0.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:5d54b09eba2bada6011aea5375542a157637b91029687eb4fdb2dab11059c1b4", size = 182165, upload-time = "2025-03-05T20:02:30.291Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/29/93/bb672df7b2f5faac89761cb5fa34f5cec45a4026c383a4b5761c6cea5c16/websockets-15.0.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3be571a8b5afed347da347bfcf27ba12b069d9d7f42cb8c7028b5e98bbb12597", size = 182160, upload-time = "2025-03-05T20:02:31.634Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ff/83/de1f7709376dc3ca9b7eeb4b9a07b4526b14876b6d372a4dc62312bebee0/websockets-15.0.1-cp312-cp312-win32.whl", hash = "sha256:c338ffa0520bdb12fbc527265235639fb76e7bc7faafbb93f6ba80d9c06578a9", size = 176395, upload-time = "2025-03-05T20:02:33.017Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7d/71/abf2ebc3bbfa40f391ce1428c7168fb20582d0ff57019b69ea20fa698043/websockets-15.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:fcd5cf9e305d7b8338754470cf69cf81f420459dbae8a3b40cee57417f4614a7", size = 176841, upload-time = "2025-03-05T20:02:34.498Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/cb/9f/51f0cf64471a9d2b4d0fc6c534f323b664e7095640c34562f5182e5a7195/websockets-15.0.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ee443ef070bb3b6ed74514f5efaa37a252af57c90eb33b956d35c8e9c10a1931", size = 175440, upload-time = "2025-03-05T20:02:36.695Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8a/05/aa116ec9943c718905997412c5989f7ed671bc0188ee2ba89520e8765d7b/websockets-15.0.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:5a939de6b7b4e18ca683218320fc67ea886038265fd1ed30173f5ce3f8e85675", size = 173098, upload-time = "2025-03-05T20:02:37.985Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ff/0b/33cef55ff24f2d92924923c99926dcce78e7bd922d649467f0eda8368923/websockets-15.0.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:746ee8dba912cd6fc889a8147168991d50ed70447bf18bcda7039f7d2e3d9151", size = 173329, upload-time = "2025-03-05T20:02:39.298Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/31/1d/063b25dcc01faa8fada1469bdf769de3768b7044eac9d41f734fd7b6ad6d/websockets-15.0.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:595b6c3969023ecf9041b2936ac3827e4623bfa3ccf007575f04c5a6aa318c22", size = 183111, upload-time = "2025-03-05T20:02:40.595Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/93/53/9a87ee494a51bf63e4ec9241c1ccc4f7c2f45fff85d5bde2ff74fcb68b9e/websockets-15.0.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c714d2fc58b5ca3e285461a4cc0c9a66bd0e24c5da9911e30158286c9b5be7f", size = 182054, upload-time = "2025-03-05T20:02:41.926Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ff/b2/83a6ddf56cdcbad4e3d841fcc55d6ba7d19aeb89c50f24dd7e859ec0805f/websockets-15.0.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f3c1e2ab208db911594ae5b4f79addeb3501604a165019dd221c0bdcabe4db8", size = 182496, upload-time = "2025-03-05T20:02:43.304Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/98/41/e7038944ed0abf34c45aa4635ba28136f06052e08fc2168520bb8b25149f/websockets-15.0.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:229cf1d3ca6c1804400b0a9790dc66528e08a6a1feec0d5040e8b9eb14422375", size = 182829, upload-time = "2025-03-05T20:02:48.812Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e0/17/de15b6158680c7623c6ef0db361da965ab25d813ae54fcfeae2e5b9ef910/websockets-15.0.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:756c56e867a90fb00177d530dca4b097dd753cde348448a1012ed6c5131f8b7d", size = 182217, upload-time = "2025-03-05T20:02:50.14Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/33/2b/1f168cb6041853eef0362fb9554c3824367c5560cbdaad89ac40f8c2edfc/websockets-15.0.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:558d023b3df0bffe50a04e710bc87742de35060580a293c2a984299ed83bc4e4", size = 182195, upload-time = "2025-03-05T20:02:51.561Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/86/eb/20b6cdf273913d0ad05a6a14aed4b9a85591c18a987a3d47f20fa13dcc47/websockets-15.0.1-cp313-cp313-win32.whl", hash = "sha256:ba9e56e8ceeeedb2e080147ba85ffcd5cd0711b89576b83784d8605a7df455fa", size = 176393, upload-time = "2025-03-05T20:02:53.814Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1b/6c/c65773d6cab416a64d191d6ee8a8b1c68a09970ea6909d16965d26bfed1e/websockets-15.0.1-cp313-cp313-win_amd64.whl", hash = "sha256:e09473f095a819042ecb2ab9465aee615bd9c2028e4ef7d933600a8401c79561", size = 176837, upload-time = "2025-03-05T20:02:55.237Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fa/a8/5b41e0da817d64113292ab1f8247140aac61cbf6cfd085d6a0fa77f4984f/websockets-15.0.1-py3-none-any.whl", hash = "sha256:f7a866fbc1e97b5c617ee4116daaa09b722101d4a3c170c787450ba409f9736f", size = 169743, upload-time = "2025-03-05T20:03:39.41Z" },
|
||||
]
|
||||
|
||||
Reference in New Issue
Block a user