mirror of
https://github.com/basnijholt/compose-farm.git
synced 2026-02-03 14:13:26 +00:00
feat(web): add Glances integration for host resource stats (#124)
This commit is contained in:
2
.gitignore
vendored
2
.gitignore
vendored
@@ -37,6 +37,7 @@ ENV/
|
|||||||
.coverage
|
.coverage
|
||||||
.pytest_cache/
|
.pytest_cache/
|
||||||
htmlcov/
|
htmlcov/
|
||||||
|
.code/
|
||||||
|
|
||||||
# Local config (don't commit real configs)
|
# Local config (don't commit real configs)
|
||||||
compose-farm.yaml
|
compose-farm.yaml
|
||||||
@@ -45,3 +46,4 @@ coverage.xml
|
|||||||
.env
|
.env
|
||||||
homepage/
|
homepage/
|
||||||
site/
|
site/
|
||||||
|
.playwright-mcp/
|
||||||
|
|||||||
47
README.md
47
README.md
@@ -54,6 +54,7 @@ A minimal CLI tool to run Docker Compose commands across multiple hosts via SSH.
|
|||||||
- [CLI `--help` Output](#cli---help-output)
|
- [CLI `--help` Output](#cli---help-output)
|
||||||
- [Auto-Migration](#auto-migration)
|
- [Auto-Migration](#auto-migration)
|
||||||
- [Traefik Multihost Ingress (File Provider)](#traefik-multihost-ingress-file-provider)
|
- [Traefik Multihost Ingress (File Provider)](#traefik-multihost-ingress-file-provider)
|
||||||
|
- [Host Resource Monitoring (Glances)](#host-resource-monitoring-glances)
|
||||||
- [Comparison with Alternatives](#comparison-with-alternatives)
|
- [Comparison with Alternatives](#comparison-with-alternatives)
|
||||||
- [License](#license)
|
- [License](#license)
|
||||||
|
|
||||||
@@ -1325,6 +1326,52 @@ Update your Traefik config to use directory watching instead of a single file:
|
|||||||
- --providers.file.watch=true
|
- --providers.file.watch=true
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## Host Resource Monitoring (Glances)
|
||||||
|
|
||||||
|
The web UI can display real-time CPU, memory, and load stats for all configured hosts. This uses [Glances](https://nicolargo.github.io/glances/), a cross-platform system monitoring tool with a REST API.
|
||||||
|
|
||||||
|
**Setup**
|
||||||
|
|
||||||
|
1. Deploy a Glances stack that runs on all hosts:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
# glances/compose.yaml
|
||||||
|
name: glances
|
||||||
|
services:
|
||||||
|
glances:
|
||||||
|
image: nicolargo/glances:latest
|
||||||
|
container_name: glances
|
||||||
|
restart: unless-stopped
|
||||||
|
pid: host
|
||||||
|
ports:
|
||||||
|
- "61208:61208"
|
||||||
|
environment:
|
||||||
|
- GLANCES_OPT=-w # Enable web server mode
|
||||||
|
volumes:
|
||||||
|
- /var/run/docker.sock:/var/run/docker.sock:ro
|
||||||
|
```
|
||||||
|
|
||||||
|
2. Add it to your config as a multi-host stack:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
# compose-farm.yaml
|
||||||
|
stacks:
|
||||||
|
glances: all # Runs on every host
|
||||||
|
|
||||||
|
glances_stack: glances # Enables resource stats in web UI
|
||||||
|
```
|
||||||
|
|
||||||
|
3. Deploy: `cf up glances`
|
||||||
|
|
||||||
|
The web UI dashboard will now show a "Host Resources" section with live stats from all hosts. Hosts where Glances is unreachable show an error indicator.
|
||||||
|
|
||||||
|
**Live Stats Page**
|
||||||
|
|
||||||
|
With Glances configured, a Live Stats page (`/live-stats`) shows all running containers across all hosts:
|
||||||
|
|
||||||
|
- **Columns**: Stack, Service, Host, Image, Status, Uptime, CPU, Memory, Net I/O
|
||||||
|
- **Features**: Sorting, filtering, live updates (no SSH required—uses Glances REST API)
|
||||||
|
|
||||||
## Comparison with Alternatives
|
## Comparison with Alternatives
|
||||||
|
|
||||||
There are many ways to run containers on multiple hosts. Here is where Compose Farm sits:
|
There are many ways to run containers on multiple hosts. Here is where Compose Farm sits:
|
||||||
|
|||||||
3
docs/assets/web-live_stats.gif
Normal file
3
docs/assets/web-live_stats.gif
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
version https://git-lfs.github.com/spec/v1
|
||||||
|
oid sha256:4135888689a10c5ae2904825d98f2a6d215c174a4bd823e25761f619590f04ff
|
||||||
|
size 3990104
|
||||||
3
docs/assets/web-live_stats.webm
Normal file
3
docs/assets/web-live_stats.webm
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
version https://git-lfs.github.com/spec/v1
|
||||||
|
oid sha256:87739cd6f6576a81100392d8d1e59d3e776fecc8f0721a31332df89e7fc8593d
|
||||||
|
size 5814274
|
||||||
@@ -21,24 +21,37 @@ import uvicorn
|
|||||||
|
|
||||||
from compose_farm.config import Config as CFConfig
|
from compose_farm.config import Config as CFConfig
|
||||||
from compose_farm.config import load_config
|
from compose_farm.config import load_config
|
||||||
|
from compose_farm.executor import (
|
||||||
|
get_container_compose_labels as _original_get_compose_labels,
|
||||||
|
)
|
||||||
|
from compose_farm.glances import ContainerStats
|
||||||
|
from compose_farm.glances import fetch_container_stats as _original_fetch_container_stats
|
||||||
from compose_farm.state import load_state as _original_load_state
|
from compose_farm.state import load_state as _original_load_state
|
||||||
from compose_farm.web.app import create_app
|
|
||||||
from compose_farm.web.cdn import CDN_ASSETS, ensure_vendor_cache
|
from compose_farm.web.cdn import CDN_ASSETS, ensure_vendor_cache
|
||||||
|
|
||||||
|
# NOTE: Do NOT import create_app here - it must be imported AFTER patches are applied
|
||||||
|
# to ensure the patched get_config is used by all route modules
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from collections.abc import Generator
|
from collections.abc import Generator
|
||||||
|
|
||||||
from playwright.sync_api import BrowserContext, Page, Route
|
from playwright.sync_api import BrowserContext, Page, Route
|
||||||
|
|
||||||
# Stacks to exclude from demo recordings (exact match)
|
# Substrings to exclude from demo recordings (case-insensitive)
|
||||||
DEMO_EXCLUDE_STACKS = {"arr"}
|
DEMO_EXCLUDE_PATTERNS = {"arr", "vpn", "tash"}
|
||||||
|
|
||||||
|
|
||||||
|
def _should_exclude(name: str) -> bool:
|
||||||
|
"""Check if a stack/container name should be excluded from demo."""
|
||||||
|
name_lower = name.lower()
|
||||||
|
return any(pattern in name_lower for pattern in DEMO_EXCLUDE_PATTERNS)
|
||||||
|
|
||||||
|
|
||||||
def _get_filtered_config() -> CFConfig:
|
def _get_filtered_config() -> CFConfig:
|
||||||
"""Load config but filter out excluded stacks."""
|
"""Load config but filter out excluded stacks."""
|
||||||
config = load_config()
|
config = load_config()
|
||||||
filtered_stacks = {
|
filtered_stacks = {
|
||||||
name: host for name, host in config.stacks.items() if name not in DEMO_EXCLUDE_STACKS
|
name: host for name, host in config.stacks.items() if not _should_exclude(name)
|
||||||
}
|
}
|
||||||
return CFConfig(
|
return CFConfig(
|
||||||
compose_dir=config.compose_dir,
|
compose_dir=config.compose_dir,
|
||||||
@@ -46,6 +59,7 @@ def _get_filtered_config() -> CFConfig:
|
|||||||
stacks=filtered_stacks,
|
stacks=filtered_stacks,
|
||||||
traefik_file=config.traefik_file,
|
traefik_file=config.traefik_file,
|
||||||
traefik_stack=config.traefik_stack,
|
traefik_stack=config.traefik_stack,
|
||||||
|
glances_stack=config.glances_stack,
|
||||||
config_path=config.config_path,
|
config_path=config.config_path,
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -53,7 +67,37 @@ def _get_filtered_config() -> CFConfig:
|
|||||||
def _get_filtered_state(config: CFConfig) -> dict[str, str | list[str]]:
|
def _get_filtered_state(config: CFConfig) -> dict[str, str | list[str]]:
|
||||||
"""Load state but filter out excluded stacks."""
|
"""Load state but filter out excluded stacks."""
|
||||||
state = _original_load_state(config)
|
state = _original_load_state(config)
|
||||||
return {name: host for name, host in state.items() if name not in DEMO_EXCLUDE_STACKS}
|
return {name: host for name, host in state.items() if not _should_exclude(name)}
|
||||||
|
|
||||||
|
|
||||||
|
async def _filtered_fetch_container_stats(
|
||||||
|
host_name: str,
|
||||||
|
host_address: str,
|
||||||
|
port: int = 61208,
|
||||||
|
request_timeout: float = 10.0,
|
||||||
|
) -> tuple[list[ContainerStats] | None, str | None]:
|
||||||
|
"""Fetch container stats but filter out excluded containers."""
|
||||||
|
containers, error = await _original_fetch_container_stats(
|
||||||
|
host_name, host_address, port, request_timeout
|
||||||
|
)
|
||||||
|
if containers:
|
||||||
|
# Filter by container name (stack is empty at this point)
|
||||||
|
containers = [c for c in containers if not _should_exclude(c.name)]
|
||||||
|
return containers, error
|
||||||
|
|
||||||
|
|
||||||
|
async def _filtered_get_compose_labels(
|
||||||
|
config: CFConfig,
|
||||||
|
host_name: str,
|
||||||
|
) -> dict[str, tuple[str, str]]:
|
||||||
|
"""Get compose labels but filter out excluded stacks."""
|
||||||
|
labels = await _original_get_compose_labels(config, host_name)
|
||||||
|
# Filter out containers whose stack (project) name should be excluded
|
||||||
|
return {
|
||||||
|
name: (stack, service)
|
||||||
|
for name, (stack, service) in labels.items()
|
||||||
|
if not _should_exclude(stack)
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(scope="session")
|
@pytest.fixture(scope="session")
|
||||||
@@ -84,19 +128,23 @@ def server_url() -> Generator[str, None, None]:
|
|||||||
|
|
||||||
# Patch at source module level so all callers get filtered versions
|
# Patch at source module level so all callers get filtered versions
|
||||||
patches = [
|
patches = [
|
||||||
# Patch load_state at source - all functions calling it get filtered state
|
# Patch load_config at source - get_config() calls this internally
|
||||||
|
patch("compose_farm.config.load_config", _get_filtered_config),
|
||||||
|
# Patch load_state at source and where imported
|
||||||
patch("compose_farm.state.load_state", _get_filtered_state),
|
patch("compose_farm.state.load_state", _get_filtered_state),
|
||||||
# Patch get_config where imported
|
patch("compose_farm.web.routes.pages.load_state", _get_filtered_state),
|
||||||
patch("compose_farm.web.routes.pages.get_config", _get_filtered_config),
|
# Patch container fetch to filter out excluded containers (Live Stats page)
|
||||||
patch("compose_farm.web.routes.api.get_config", _get_filtered_config),
|
patch("compose_farm.glances.fetch_container_stats", _filtered_fetch_container_stats),
|
||||||
patch("compose_farm.web.routes.actions.get_config", _get_filtered_config),
|
# Patch compose labels to filter out excluded stacks
|
||||||
patch("compose_farm.web.app.get_config", _get_filtered_config),
|
patch("compose_farm.executor.get_container_compose_labels", _filtered_get_compose_labels),
|
||||||
patch("compose_farm.web.ws.get_config", _get_filtered_config),
|
|
||||||
]
|
]
|
||||||
|
|
||||||
for p in patches:
|
for p in patches:
|
||||||
p.start()
|
p.start()
|
||||||
|
|
||||||
|
# Import create_app AFTER patches are started so route modules see patched get_config
|
||||||
|
from compose_farm.web.app import create_app # noqa: PLC0415
|
||||||
|
|
||||||
with socket.socket() as s:
|
with socket.socket() as s:
|
||||||
s.bind(("127.0.0.1", 0))
|
s.bind(("127.0.0.1", 0))
|
||||||
port = s.getsockname()[1]
|
port = s.getsockname()[1]
|
||||||
@@ -160,6 +208,7 @@ def recording_context(
|
|||||||
if url.startswith(url_prefix):
|
if url.startswith(url_prefix):
|
||||||
route.fulfill(status=200, content_type=content_type, body=filepath.read_bytes())
|
route.fulfill(status=200, content_type=content_type, body=filepath.read_bytes())
|
||||||
return
|
return
|
||||||
|
print(f"UNCACHED CDN request: {url}")
|
||||||
route.abort("failed")
|
route.abort("failed")
|
||||||
|
|
||||||
context.route(re.compile(r"https://(cdn\.jsdelivr\.net|unpkg\.com)/.*"), handle_cdn)
|
context.route(re.compile(r"https://(cdn\.jsdelivr\.net|unpkg\.com)/.*"), handle_cdn)
|
||||||
@@ -176,6 +225,35 @@ def recording_page(recording_context: BrowserContext) -> Generator[Page, None, N
|
|||||||
page.close()
|
page.close()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def wide_recording_context(
|
||||||
|
browser: Any, # pytest-playwright's browser fixture
|
||||||
|
recording_output_dir: Path,
|
||||||
|
) -> Generator[BrowserContext, None, None]:
|
||||||
|
"""Browser context with wider viewport for demos needing more horizontal space.
|
||||||
|
|
||||||
|
NOTE: This fixture does NOT use CDN interception (unlike recording_context).
|
||||||
|
CDN interception was causing inline scripts from containers.html to be
|
||||||
|
removed from the DOM, likely due to Tailwind's browser plugin behavior.
|
||||||
|
"""
|
||||||
|
context = browser.new_context(
|
||||||
|
viewport={"width": 1920, "height": 1080},
|
||||||
|
record_video_dir=str(recording_output_dir),
|
||||||
|
record_video_size={"width": 1920, "height": 1080},
|
||||||
|
)
|
||||||
|
|
||||||
|
yield context
|
||||||
|
context.close()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def wide_recording_page(wide_recording_context: BrowserContext) -> Generator[Page, None, None]:
|
||||||
|
"""Page with wider viewport for demos needing more horizontal space."""
|
||||||
|
page = wide_recording_context.new_page()
|
||||||
|
yield page
|
||||||
|
page.close()
|
||||||
|
|
||||||
|
|
||||||
# Demo helper functions
|
# Demo helper functions
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
85
docs/demos/web/demo_live_stats.py
Normal file
85
docs/demos/web/demo_live_stats.py
Normal file
@@ -0,0 +1,85 @@
|
|||||||
|
"""Demo: Live Stats page.
|
||||||
|
|
||||||
|
Records a ~20 second demo showing:
|
||||||
|
- Navigating to Live Stats via command palette
|
||||||
|
- Container table with real-time stats
|
||||||
|
- Filtering containers
|
||||||
|
- Sorting by different columns
|
||||||
|
- Auto-refresh countdown
|
||||||
|
|
||||||
|
Run: pytest docs/demos/web/demo_live_stats.py -v --no-cov
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from typing import TYPE_CHECKING
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
from conftest import (
|
||||||
|
open_command_palette,
|
||||||
|
pause,
|
||||||
|
slow_type,
|
||||||
|
wait_for_sidebar,
|
||||||
|
)
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from playwright.sync_api import Page
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.browser # type: ignore[misc]
|
||||||
|
def test_demo_live_stats(wide_recording_page: Page, server_url: str) -> None:
|
||||||
|
"""Record Live Stats page demo."""
|
||||||
|
page = wide_recording_page
|
||||||
|
|
||||||
|
# Start on dashboard
|
||||||
|
page.goto(server_url)
|
||||||
|
wait_for_sidebar(page)
|
||||||
|
pause(page, 1000)
|
||||||
|
|
||||||
|
# Navigate to Live Stats via command palette
|
||||||
|
open_command_palette(page)
|
||||||
|
pause(page, 400)
|
||||||
|
slow_type(page, "#cmd-input", "live", delay=100)
|
||||||
|
pause(page, 500)
|
||||||
|
page.keyboard.press("Enter")
|
||||||
|
page.wait_for_url("**/live-stats", timeout=5000)
|
||||||
|
|
||||||
|
# Wait for containers to load (may take ~10s on first load due to SSH)
|
||||||
|
page.wait_for_selector("#container-rows tr:not(:has(.loading))", timeout=30000)
|
||||||
|
pause(page, 2000) # Let viewer see the full table with timer
|
||||||
|
|
||||||
|
# Demonstrate filtering
|
||||||
|
slow_type(page, "#filter-input", "grocy", delay=100)
|
||||||
|
pause(page, 1500) # Show filtered results
|
||||||
|
|
||||||
|
# Clear filter
|
||||||
|
page.fill("#filter-input", "")
|
||||||
|
pause(page, 1000)
|
||||||
|
|
||||||
|
# Sort by memory (click header)
|
||||||
|
page.click("th:has-text('Mem')")
|
||||||
|
pause(page, 1500)
|
||||||
|
|
||||||
|
# Sort by CPU
|
||||||
|
page.click("th:has-text('CPU')")
|
||||||
|
pause(page, 1500)
|
||||||
|
|
||||||
|
# Sort by host
|
||||||
|
page.click("th:has-text('Host')")
|
||||||
|
pause(page, 1500)
|
||||||
|
|
||||||
|
# Watch auto-refresh timer count down
|
||||||
|
pause(page, 3500) # Wait for refresh to happen
|
||||||
|
|
||||||
|
# Hover on action menu to show pause behavior
|
||||||
|
action_btn = page.locator('button[onclick^="openActionMenu"]').first
|
||||||
|
action_btn.scroll_into_view_if_needed()
|
||||||
|
action_btn.hover()
|
||||||
|
pause(page, 2000) # Show paused state (timer shows ⏸) and action menu
|
||||||
|
|
||||||
|
# Move away to close menu and resume refresh
|
||||||
|
page.locator("h2").first.hover() # Move to header
|
||||||
|
pause(page, 3500) # Watch countdown resume and refresh happen
|
||||||
|
|
||||||
|
# Final pause
|
||||||
|
pause(page, 1000)
|
||||||
@@ -37,6 +37,7 @@ DEMOS = [
|
|||||||
"workflow",
|
"workflow",
|
||||||
"console",
|
"console",
|
||||||
"shell",
|
"shell",
|
||||||
|
"live_stats",
|
||||||
]
|
]
|
||||||
|
|
||||||
# High-quality ffmpeg settings for VP8 encoding
|
# High-quality ffmpeg settings for VP8 encoding
|
||||||
|
|||||||
@@ -51,10 +51,32 @@ Press `Ctrl+K` (or `Cmd+K` on macOS) to open the command palette. Use fuzzy sear
|
|||||||
### Dashboard (`/`)
|
### Dashboard (`/`)
|
||||||
|
|
||||||
- Stack overview with status indicators
|
- Stack overview with status indicators
|
||||||
- Host statistics
|
- Host statistics (CPU, memory, disk, load via Glances)
|
||||||
- Pending operations (migrations, orphaned stacks)
|
- Pending operations (migrations, orphaned stacks)
|
||||||
- Quick actions via command palette
|
- Quick actions via command palette
|
||||||
|
|
||||||
|
### Live Stats (`/live-stats`)
|
||||||
|
|
||||||
|
Real-time container monitoring across all hosts, powered by [Glances](https://nicolargo.github.io/glances/).
|
||||||
|
|
||||||
|
- **Live metrics**: CPU, memory, network I/O for every container
|
||||||
|
- **Auto-refresh**: Updates every 3 seconds (pauses when dropdown menus are open)
|
||||||
|
- **Filtering**: Type to filter containers by name, stack, host, or image
|
||||||
|
- **Sorting**: Click column headers to sort by any metric
|
||||||
|
- **Update detection**: Shows when container images have updates available
|
||||||
|
|
||||||
|
<video autoplay loop muted playsinline>
|
||||||
|
<source src="/assets/web-live_stats.webm" type="video/webm">
|
||||||
|
</video>
|
||||||
|
|
||||||
|
#### Requirements
|
||||||
|
|
||||||
|
Live Stats requires Glances to be deployed on all hosts:
|
||||||
|
|
||||||
|
1. Add `glances_stack: glances` to your `compose-farm.yaml`
|
||||||
|
2. Deploy a Glances stack that runs on all hosts (see [example](https://github.com/basnijholt/compose-farm/tree/main/examples/glances))
|
||||||
|
3. Glances must expose its REST API on port 61208
|
||||||
|
|
||||||
### Stack Detail (`/stack/{name}`)
|
### Stack Detail (`/stack/{name}`)
|
||||||
|
|
||||||
- Compose file editor (Monaco)
|
- Compose file editor (Monaco)
|
||||||
|
|||||||
@@ -53,6 +53,7 @@ web = [
|
|||||||
"fastapi[standard]>=0.109.0",
|
"fastapi[standard]>=0.109.0",
|
||||||
"jinja2>=3.1.0",
|
"jinja2>=3.1.0",
|
||||||
"websockets>=12.0",
|
"websockets>=12.0",
|
||||||
|
"humanize>=4.0.0",
|
||||||
]
|
]
|
||||||
|
|
||||||
[project.urls]
|
[project.urls]
|
||||||
|
|||||||
@@ -31,6 +31,9 @@ class Config(BaseModel, extra="forbid"):
|
|||||||
stacks: dict[str, str | list[str]] # stack_name -> host_name or list of hosts
|
stacks: dict[str, str | list[str]] # stack_name -> host_name or list of hosts
|
||||||
traefik_file: Path | None = None # Auto-regenerate traefik config after up/down
|
traefik_file: Path | None = None # Auto-regenerate traefik config after up/down
|
||||||
traefik_stack: str | None = None # Stack name for Traefik (skip its host in file-provider)
|
traefik_stack: str | None = None # Stack name for Traefik (skip its host in file-provider)
|
||||||
|
glances_stack: str | None = (
|
||||||
|
None # Stack name for Glances (enables host resource stats in web UI)
|
||||||
|
)
|
||||||
config_path: Path = Path() # Set by load_config()
|
config_path: Path = Path() # Set by load_config()
|
||||||
|
|
||||||
def get_state_path(self) -> Path:
|
def get_state_path(self) -> Path:
|
||||||
|
|||||||
@@ -5,6 +5,7 @@ from __future__ import annotations
|
|||||||
import asyncio
|
import asyncio
|
||||||
import socket
|
import socket
|
||||||
import subprocess
|
import subprocess
|
||||||
|
import time
|
||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
from functools import lru_cache
|
from functools import lru_cache
|
||||||
from typing import TYPE_CHECKING, Any
|
from typing import TYPE_CHECKING, Any
|
||||||
@@ -23,6 +24,38 @@ LOCAL_ADDRESSES = frozenset({"local", "localhost", "127.0.0.1", "::1"})
|
|||||||
_DEFAULT_SSH_PORT = 22
|
_DEFAULT_SSH_PORT = 22
|
||||||
|
|
||||||
|
|
||||||
|
class TTLCache:
|
||||||
|
"""Simple TTL cache for async function results."""
|
||||||
|
|
||||||
|
def __init__(self, ttl_seconds: float = 30.0) -> None:
|
||||||
|
"""Initialize cache with default TTL in seconds."""
|
||||||
|
# Cache stores: key -> (timestamp, value, item_ttl)
|
||||||
|
self._cache: dict[str, tuple[float, Any, float]] = {}
|
||||||
|
self._default_ttl = ttl_seconds
|
||||||
|
|
||||||
|
def get(self, key: str) -> Any | None:
|
||||||
|
"""Get value if exists and not expired."""
|
||||||
|
if key in self._cache:
|
||||||
|
timestamp, value, item_ttl = self._cache[key]
|
||||||
|
if time.monotonic() - timestamp < item_ttl:
|
||||||
|
return value
|
||||||
|
del self._cache[key]
|
||||||
|
return None
|
||||||
|
|
||||||
|
def set(self, key: str, value: Any, ttl_seconds: float | None = None) -> None:
|
||||||
|
"""Set value with current timestamp and optional custom TTL."""
|
||||||
|
ttl = ttl_seconds if ttl_seconds is not None else self._default_ttl
|
||||||
|
self._cache[key] = (time.monotonic(), value, ttl)
|
||||||
|
|
||||||
|
def clear(self) -> None:
|
||||||
|
"""Clear all cached values."""
|
||||||
|
self._cache.clear()
|
||||||
|
|
||||||
|
|
||||||
|
# Cache compose labels per host for 30 seconds
|
||||||
|
_compose_labels_cache = TTLCache(ttl_seconds=30.0)
|
||||||
|
|
||||||
|
|
||||||
def _print_compose_command(
|
def _print_compose_command(
|
||||||
host_name: str,
|
host_name: str,
|
||||||
compose_dir: str,
|
compose_dir: str,
|
||||||
@@ -158,6 +191,7 @@ def ssh_connect_kwargs(host: Host) -> dict[str, Any]:
|
|||||||
"port": host.port,
|
"port": host.port,
|
||||||
"username": host.user,
|
"username": host.user,
|
||||||
"known_hosts": None,
|
"known_hosts": None,
|
||||||
|
"gss_auth": False, # Disable GSSAPI - causes multi-second delays
|
||||||
}
|
}
|
||||||
# Add key file fallback (prioritized over agent if present)
|
# Add key file fallback (prioritized over agent if present)
|
||||||
key_path = get_key_path()
|
key_path = get_key_path()
|
||||||
@@ -523,6 +557,50 @@ async def get_running_stacks_on_host(
|
|||||||
return {line.strip() for line in result.stdout.splitlines() if line.strip()}
|
return {line.strip() for line in result.stdout.splitlines() if line.strip()}
|
||||||
|
|
||||||
|
|
||||||
|
async def get_container_compose_labels(
|
||||||
|
config: Config,
|
||||||
|
host_name: str,
|
||||||
|
) -> dict[str, tuple[str, str]]:
|
||||||
|
"""Get compose labels for all containers on a host.
|
||||||
|
|
||||||
|
Returns dict of container_name -> (project, service).
|
||||||
|
Includes all containers (-a flag) since Glances shows stopped containers too.
|
||||||
|
Falls back to empty dict on timeout/error (5s timeout).
|
||||||
|
Results are cached for 30 seconds to reduce SSH overhead.
|
||||||
|
"""
|
||||||
|
# Check cache first
|
||||||
|
cached: dict[str, tuple[str, str]] | None = _compose_labels_cache.get(host_name)
|
||||||
|
if cached is not None:
|
||||||
|
return cached
|
||||||
|
|
||||||
|
host = config.hosts[host_name]
|
||||||
|
cmd = (
|
||||||
|
"docker ps -a --format "
|
||||||
|
'\'{{.Names}}\t{{.Label "com.docker.compose.project"}}\t'
|
||||||
|
'{{.Label "com.docker.compose.service"}}\''
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
async with asyncio.timeout(5.0):
|
||||||
|
result = await run_command(host, cmd, stack=host_name, stream=False, prefix="")
|
||||||
|
except TimeoutError:
|
||||||
|
return {}
|
||||||
|
except Exception:
|
||||||
|
return {}
|
||||||
|
|
||||||
|
labels: dict[str, tuple[str, str]] = {}
|
||||||
|
if result.success:
|
||||||
|
for line in result.stdout.splitlines():
|
||||||
|
parts = line.strip().split("\t")
|
||||||
|
if len(parts) >= 3: # noqa: PLR2004
|
||||||
|
name, project, service = parts[0], parts[1], parts[2]
|
||||||
|
labels[name] = (project or "", service or "")
|
||||||
|
|
||||||
|
# Cache the result
|
||||||
|
_compose_labels_cache.set(host_name, labels)
|
||||||
|
return labels
|
||||||
|
|
||||||
|
|
||||||
async def _batch_check_existence(
|
async def _batch_check_existence(
|
||||||
config: Config,
|
config: Config,
|
||||||
host_name: str,
|
host_name: str,
|
||||||
|
|||||||
236
src/compose_farm/glances.py
Normal file
236
src/compose_farm/glances.py
Normal file
@@ -0,0 +1,236 @@
|
|||||||
|
"""Glances API client for host resource monitoring."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import asyncio
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from typing import TYPE_CHECKING, Any
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from .config import Config
|
||||||
|
|
||||||
|
# Default Glances REST API port
|
||||||
|
DEFAULT_GLANCES_PORT = 61208
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class HostStats:
|
||||||
|
"""Resource statistics for a host."""
|
||||||
|
|
||||||
|
host: str
|
||||||
|
cpu_percent: float
|
||||||
|
mem_percent: float
|
||||||
|
swap_percent: float
|
||||||
|
load: float
|
||||||
|
disk_percent: float
|
||||||
|
net_rx_rate: float = 0.0 # bytes/sec
|
||||||
|
net_tx_rate: float = 0.0 # bytes/sec
|
||||||
|
error: str | None = None
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_error(cls, host: str, error: str) -> HostStats:
|
||||||
|
"""Create a HostStats with an error."""
|
||||||
|
return cls(
|
||||||
|
host=host,
|
||||||
|
cpu_percent=0,
|
||||||
|
mem_percent=0,
|
||||||
|
swap_percent=0,
|
||||||
|
load=0,
|
||||||
|
disk_percent=0,
|
||||||
|
net_rx_rate=0,
|
||||||
|
net_tx_rate=0,
|
||||||
|
error=error,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
async def fetch_host_stats(
|
||||||
|
host_name: str,
|
||||||
|
host_address: str,
|
||||||
|
port: int = DEFAULT_GLANCES_PORT,
|
||||||
|
request_timeout: float = 10.0,
|
||||||
|
) -> HostStats:
|
||||||
|
"""Fetch stats from a single host's Glances API."""
|
||||||
|
import httpx # noqa: PLC0415
|
||||||
|
|
||||||
|
base_url = f"http://{host_address}:{port}/api/4"
|
||||||
|
|
||||||
|
try:
|
||||||
|
async with httpx.AsyncClient(timeout=request_timeout) as client:
|
||||||
|
# Fetch quicklook stats (CPU, mem, load)
|
||||||
|
response = await client.get(f"{base_url}/quicklook")
|
||||||
|
if not response.is_success:
|
||||||
|
return HostStats.from_error(host_name, f"HTTP {response.status_code}")
|
||||||
|
data = response.json()
|
||||||
|
|
||||||
|
# Fetch filesystem stats for disk usage (root fs or max across all)
|
||||||
|
disk_percent = 0.0
|
||||||
|
try:
|
||||||
|
fs_response = await client.get(f"{base_url}/fs")
|
||||||
|
if fs_response.is_success:
|
||||||
|
fs_data = fs_response.json()
|
||||||
|
root = next((fs for fs in fs_data if fs.get("mnt_point") == "/"), None)
|
||||||
|
disk_percent = (
|
||||||
|
root.get("percent", 0)
|
||||||
|
if root
|
||||||
|
else max((fs.get("percent", 0) for fs in fs_data), default=0)
|
||||||
|
)
|
||||||
|
except httpx.HTTPError:
|
||||||
|
pass # Disk stats are optional
|
||||||
|
|
||||||
|
# Fetch network stats for rate (sum across non-loopback interfaces)
|
||||||
|
net_rx_rate, net_tx_rate = 0.0, 0.0
|
||||||
|
try:
|
||||||
|
net_response = await client.get(f"{base_url}/network")
|
||||||
|
if net_response.is_success:
|
||||||
|
for iface in net_response.json():
|
||||||
|
if not iface.get("interface_name", "").startswith("lo"):
|
||||||
|
net_rx_rate += iface.get("bytes_recv_rate_per_sec") or 0
|
||||||
|
net_tx_rate += iface.get("bytes_sent_rate_per_sec") or 0
|
||||||
|
except httpx.HTTPError:
|
||||||
|
pass # Network stats are optional
|
||||||
|
|
||||||
|
return HostStats(
|
||||||
|
host=host_name,
|
||||||
|
cpu_percent=data.get("cpu", 0),
|
||||||
|
mem_percent=data.get("mem", 0),
|
||||||
|
swap_percent=data.get("swap", 0),
|
||||||
|
load=data.get("load", 0),
|
||||||
|
disk_percent=disk_percent,
|
||||||
|
net_rx_rate=net_rx_rate,
|
||||||
|
net_tx_rate=net_tx_rate,
|
||||||
|
)
|
||||||
|
except httpx.TimeoutException:
|
||||||
|
return HostStats.from_error(host_name, "timeout")
|
||||||
|
except httpx.HTTPError as e:
|
||||||
|
return HostStats.from_error(host_name, str(e))
|
||||||
|
except Exception as e:
|
||||||
|
return HostStats.from_error(host_name, str(e))
|
||||||
|
|
||||||
|
|
||||||
|
async def fetch_all_host_stats(
|
||||||
|
config: Config,
|
||||||
|
port: int = DEFAULT_GLANCES_PORT,
|
||||||
|
) -> dict[str, HostStats]:
|
||||||
|
"""Fetch stats from all hosts in parallel."""
|
||||||
|
tasks = [fetch_host_stats(name, host.address, port) for name, host in config.hosts.items()]
|
||||||
|
results = await asyncio.gather(*tasks)
|
||||||
|
return {stats.host: stats for stats in results}
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class ContainerStats:
|
||||||
|
"""Container statistics from Glances."""
|
||||||
|
|
||||||
|
name: str
|
||||||
|
host: str
|
||||||
|
status: str
|
||||||
|
image: str
|
||||||
|
cpu_percent: float
|
||||||
|
memory_usage: int # bytes
|
||||||
|
memory_limit: int # bytes
|
||||||
|
memory_percent: float
|
||||||
|
network_rx: int # cumulative bytes received
|
||||||
|
network_tx: int # cumulative bytes sent
|
||||||
|
uptime: str
|
||||||
|
ports: str
|
||||||
|
engine: str # docker, podman, etc.
|
||||||
|
stack: str = "" # compose project name (from docker labels)
|
||||||
|
service: str = "" # compose service name (from docker labels)
|
||||||
|
|
||||||
|
|
||||||
|
def _parse_container(data: dict[str, Any], host_name: str) -> ContainerStats:
|
||||||
|
"""Parse container data from Glances API response."""
|
||||||
|
# Image can be a list or string
|
||||||
|
image = data.get("image", ["unknown"])
|
||||||
|
if isinstance(image, list):
|
||||||
|
image = image[0] if image else "unknown"
|
||||||
|
|
||||||
|
# Calculate memory percent
|
||||||
|
mem_usage = data.get("memory_usage", 0) or 0
|
||||||
|
mem_limit = data.get("memory_limit", 1) or 1 # Avoid division by zero
|
||||||
|
mem_percent = (mem_usage / mem_limit) * 100 if mem_limit > 0 else 0
|
||||||
|
|
||||||
|
# Network stats
|
||||||
|
network = data.get("network", {}) or {}
|
||||||
|
network_rx = network.get("cumulative_rx", 0) or 0
|
||||||
|
network_tx = network.get("cumulative_tx", 0) or 0
|
||||||
|
|
||||||
|
return ContainerStats(
|
||||||
|
name=data.get("name", "unknown"),
|
||||||
|
host=host_name,
|
||||||
|
status=data.get("status", "unknown"),
|
||||||
|
image=image,
|
||||||
|
cpu_percent=data.get("cpu_percent", 0) or 0,
|
||||||
|
memory_usage=mem_usage,
|
||||||
|
memory_limit=mem_limit,
|
||||||
|
memory_percent=mem_percent,
|
||||||
|
network_rx=network_rx,
|
||||||
|
network_tx=network_tx,
|
||||||
|
uptime=data.get("uptime", ""),
|
||||||
|
ports=data.get("ports", "") or "",
|
||||||
|
engine=data.get("engine", "docker"),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
async def fetch_container_stats(
|
||||||
|
host_name: str,
|
||||||
|
host_address: str,
|
||||||
|
port: int = DEFAULT_GLANCES_PORT,
|
||||||
|
request_timeout: float = 10.0,
|
||||||
|
) -> tuple[list[ContainerStats] | None, str | None]:
|
||||||
|
"""Fetch container stats from a single host's Glances API.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
(containers, error_message)
|
||||||
|
- Success: ([...], None)
|
||||||
|
- Failure: (None, "error message")
|
||||||
|
|
||||||
|
"""
|
||||||
|
import httpx # noqa: PLC0415
|
||||||
|
|
||||||
|
url = f"http://{host_address}:{port}/api/4/containers"
|
||||||
|
|
||||||
|
try:
|
||||||
|
async with httpx.AsyncClient(timeout=request_timeout) as client:
|
||||||
|
response = await client.get(url)
|
||||||
|
if not response.is_success:
|
||||||
|
return None, f"HTTP {response.status_code}: {response.reason_phrase}"
|
||||||
|
data = response.json()
|
||||||
|
return [_parse_container(c, host_name) for c in data], None
|
||||||
|
except httpx.ConnectError:
|
||||||
|
return None, "Connection refused (Glances offline?)"
|
||||||
|
except httpx.TimeoutException:
|
||||||
|
return None, "Connection timed out"
|
||||||
|
except Exception as e:
|
||||||
|
return None, str(e)
|
||||||
|
|
||||||
|
|
||||||
|
async def fetch_all_container_stats(
|
||||||
|
config: Config,
|
||||||
|
port: int = DEFAULT_GLANCES_PORT,
|
||||||
|
) -> list[ContainerStats]:
|
||||||
|
"""Fetch container stats from all hosts in parallel, enriched with compose labels."""
|
||||||
|
from .executor import get_container_compose_labels # noqa: PLC0415
|
||||||
|
|
||||||
|
async def fetch_host_data(
|
||||||
|
host_name: str,
|
||||||
|
host_address: str,
|
||||||
|
) -> list[ContainerStats]:
|
||||||
|
# Fetch Glances stats and compose labels in parallel
|
||||||
|
stats_task = fetch_container_stats(host_name, host_address, port)
|
||||||
|
labels_task = get_container_compose_labels(config, host_name)
|
||||||
|
(containers, _), labels = await asyncio.gather(stats_task, labels_task)
|
||||||
|
|
||||||
|
if containers is None:
|
||||||
|
# Skip failed hosts in aggregate view
|
||||||
|
return []
|
||||||
|
|
||||||
|
# Enrich containers with compose labels (mutate in place)
|
||||||
|
for c in containers:
|
||||||
|
c.stack, c.service = labels.get(c.name, ("", ""))
|
||||||
|
return containers
|
||||||
|
|
||||||
|
tasks = [fetch_host_data(name, host.address) for name, host in config.hosts.items()]
|
||||||
|
results = await asyncio.gather(*tasks)
|
||||||
|
# Flatten list of lists
|
||||||
|
return [container for host_containers in results for container in host_containers]
|
||||||
220
src/compose_farm/registry.py
Normal file
220
src/compose_farm/registry.py
Normal file
@@ -0,0 +1,220 @@
|
|||||||
|
"""Container registry API client for tag discovery."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import re
|
||||||
|
from dataclasses import dataclass, field
|
||||||
|
from typing import TYPE_CHECKING
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
import httpx
|
||||||
|
|
||||||
|
# Image reference pattern: [registry/][namespace/]name[:tag][@digest]
|
||||||
|
IMAGE_PATTERN = re.compile(
|
||||||
|
r"^(?:(?P<registry>[^/]+\.[^/]+)/)?(?:(?P<namespace>[^/:@]+)/)?(?P<name>[^/:@]+)(?::(?P<tag>[^@]+))?(?:@(?P<digest>.+))?$"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Docker Hub aliases
|
||||||
|
DOCKER_HUB_ALIASES = frozenset(
|
||||||
|
{"docker.io", "index.docker.io", "registry.hub.docker.com", "registry-1.docker.io"}
|
||||||
|
)
|
||||||
|
|
||||||
|
# Token endpoints per registry: (url, extra_params)
|
||||||
|
TOKEN_ENDPOINTS: dict[str, tuple[str, dict[str, str]]] = {
|
||||||
|
"docker.io": ("https://auth.docker.io/token", {"service": "registry.docker.io"}),
|
||||||
|
"ghcr.io": ("https://ghcr.io/token", {}),
|
||||||
|
}
|
||||||
|
|
||||||
|
# Registry URL overrides (Docker Hub uses a different host for API)
|
||||||
|
REGISTRY_URLS: dict[str, str] = {
|
||||||
|
"docker.io": "https://registry-1.docker.io",
|
||||||
|
}
|
||||||
|
|
||||||
|
HTTP_OK = 200
|
||||||
|
|
||||||
|
MANIFEST_ACCEPT = (
|
||||||
|
"application/vnd.docker.distribution.manifest.v2+json, "
|
||||||
|
"application/vnd.oci.image.manifest.v1+json, "
|
||||||
|
"application/vnd.oci.image.index.v1+json"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True)
|
||||||
|
class ImageRef:
|
||||||
|
"""Parsed container image reference."""
|
||||||
|
|
||||||
|
registry: str
|
||||||
|
namespace: str
|
||||||
|
name: str
|
||||||
|
tag: str
|
||||||
|
digest: str | None = None
|
||||||
|
|
||||||
|
@property
|
||||||
|
def full_name(self) -> str:
|
||||||
|
"""Full image name with namespace."""
|
||||||
|
return f"{self.namespace}/{self.name}" if self.namespace else self.name
|
||||||
|
|
||||||
|
@property
|
||||||
|
def display_name(self) -> str:
|
||||||
|
"""Display name (omits docker.io/library for official images)."""
|
||||||
|
if self.registry in DOCKER_HUB_ALIASES:
|
||||||
|
if self.namespace == "library":
|
||||||
|
return self.name
|
||||||
|
return self.full_name
|
||||||
|
return f"{self.registry}/{self.full_name}"
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def parse(cls, image: str) -> ImageRef:
|
||||||
|
"""Parse image string into components."""
|
||||||
|
match = IMAGE_PATTERN.match(image)
|
||||||
|
if not match:
|
||||||
|
return cls("docker.io", "library", image.split(":")[0].split("@")[0], "latest")
|
||||||
|
|
||||||
|
groups = match.groupdict()
|
||||||
|
registry = groups.get("registry") or "docker.io"
|
||||||
|
namespace = groups.get("namespace") or ""
|
||||||
|
name = groups.get("name") or image
|
||||||
|
tag = groups.get("tag") or "latest"
|
||||||
|
digest = groups.get("digest")
|
||||||
|
|
||||||
|
# Docker Hub official images have implicit "library" namespace
|
||||||
|
if registry in DOCKER_HUB_ALIASES and not namespace:
|
||||||
|
namespace = "library"
|
||||||
|
|
||||||
|
return cls(registry, namespace, name, tag, digest)
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class TagCheckResult:
|
||||||
|
"""Result of checking tags for an image."""
|
||||||
|
|
||||||
|
image: ImageRef
|
||||||
|
current_digest: str
|
||||||
|
available_updates: list[str] = field(default_factory=list)
|
||||||
|
error: str | None = None
|
||||||
|
|
||||||
|
|
||||||
|
class RegistryClient:
|
||||||
|
"""Unified OCI Distribution API client."""
|
||||||
|
|
||||||
|
def __init__(self, registry: str) -> None:
|
||||||
|
"""Initialize for a specific registry."""
|
||||||
|
self.registry = registry.lower()
|
||||||
|
# Normalize Docker Hub aliases
|
||||||
|
if self.registry in DOCKER_HUB_ALIASES:
|
||||||
|
self.registry = "docker.io"
|
||||||
|
|
||||||
|
self.registry_url = REGISTRY_URLS.get(self.registry, f"https://{self.registry}")
|
||||||
|
self._token_cache: dict[str, str] = {}
|
||||||
|
|
||||||
|
async def _get_token(self, image: ImageRef, client: httpx.AsyncClient) -> str | None:
|
||||||
|
"""Get auth token for the registry (cached per image)."""
|
||||||
|
cache_key = image.full_name
|
||||||
|
if cache_key in self._token_cache:
|
||||||
|
return self._token_cache[cache_key]
|
||||||
|
|
||||||
|
endpoint = TOKEN_ENDPOINTS.get(self.registry)
|
||||||
|
if not endpoint:
|
||||||
|
return None # No auth needed or unknown registry
|
||||||
|
|
||||||
|
url, extra_params = endpoint
|
||||||
|
params = {"scope": f"repository:{image.full_name}:pull", **extra_params}
|
||||||
|
resp = await client.get(url, params=params)
|
||||||
|
|
||||||
|
if resp.status_code == HTTP_OK:
|
||||||
|
token: str | None = resp.json().get("token")
|
||||||
|
if token:
|
||||||
|
self._token_cache[cache_key] = token
|
||||||
|
return token
|
||||||
|
return None
|
||||||
|
|
||||||
|
async def get_tags(self, image: ImageRef, client: httpx.AsyncClient) -> list[str]:
|
||||||
|
"""Fetch available tags for an image."""
|
||||||
|
headers = {}
|
||||||
|
token = await self._get_token(image, client)
|
||||||
|
if token:
|
||||||
|
headers["Authorization"] = f"Bearer {token}"
|
||||||
|
|
||||||
|
url = f"{self.registry_url}/v2/{image.full_name}/tags/list"
|
||||||
|
resp = await client.get(url, headers=headers)
|
||||||
|
|
||||||
|
if resp.status_code != HTTP_OK:
|
||||||
|
return []
|
||||||
|
tags: list[str] = resp.json().get("tags", [])
|
||||||
|
return tags
|
||||||
|
|
||||||
|
async def get_digest(self, image: ImageRef, tag: str, client: httpx.AsyncClient) -> str | None:
|
||||||
|
"""Get digest for a specific tag."""
|
||||||
|
headers = {"Accept": MANIFEST_ACCEPT}
|
||||||
|
token = await self._get_token(image, client)
|
||||||
|
if token:
|
||||||
|
headers["Authorization"] = f"Bearer {token}"
|
||||||
|
|
||||||
|
url = f"{self.registry_url}/v2/{image.full_name}/manifests/{tag}"
|
||||||
|
resp = await client.head(url, headers=headers)
|
||||||
|
|
||||||
|
if resp.status_code == HTTP_OK:
|
||||||
|
digest: str | None = resp.headers.get("docker-content-digest")
|
||||||
|
return digest
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def _parse_version(tag: str) -> tuple[int, ...] | None:
|
||||||
|
"""Parse version string into comparable tuple."""
|
||||||
|
tag = tag.lstrip("vV")
|
||||||
|
parts = tag.split(".")
|
||||||
|
try:
|
||||||
|
return tuple(int(p) for p in parts)
|
||||||
|
except ValueError:
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def _find_updates(current_tag: str, tags: list[str]) -> list[str]:
|
||||||
|
"""Find tags newer than current based on version comparison."""
|
||||||
|
current_version = _parse_version(current_tag)
|
||||||
|
if current_version is None:
|
||||||
|
return []
|
||||||
|
|
||||||
|
updates = []
|
||||||
|
for tag in tags:
|
||||||
|
tag_version = _parse_version(tag)
|
||||||
|
if tag_version and tag_version > current_version:
|
||||||
|
updates.append(tag)
|
||||||
|
|
||||||
|
updates.sort(key=lambda t: _parse_version(t) or (), reverse=True)
|
||||||
|
return updates
|
||||||
|
|
||||||
|
|
||||||
|
async def check_image_updates(
|
||||||
|
image_str: str,
|
||||||
|
client: httpx.AsyncClient,
|
||||||
|
) -> TagCheckResult:
|
||||||
|
"""Check if newer versions are available for an image.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
image_str: Image string like "nginx:1.25" or "ghcr.io/user/repo:tag"
|
||||||
|
client: httpx async client
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
TagCheckResult with available updates
|
||||||
|
|
||||||
|
"""
|
||||||
|
image = ImageRef.parse(image_str)
|
||||||
|
registry_client = RegistryClient(image.registry)
|
||||||
|
|
||||||
|
try:
|
||||||
|
tags = await registry_client.get_tags(image, client)
|
||||||
|
updates = _find_updates(image.tag, tags)
|
||||||
|
current_digest = await registry_client.get_digest(image, image.tag, client) or ""
|
||||||
|
|
||||||
|
return TagCheckResult(
|
||||||
|
image=image,
|
||||||
|
current_digest=current_digest,
|
||||||
|
available_updates=updates,
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
return TagCheckResult(
|
||||||
|
image=image,
|
||||||
|
current_digest="",
|
||||||
|
error=str(e),
|
||||||
|
)
|
||||||
@@ -6,15 +6,16 @@ import asyncio
|
|||||||
import logging
|
import logging
|
||||||
import sys
|
import sys
|
||||||
from contextlib import asynccontextmanager, suppress
|
from contextlib import asynccontextmanager, suppress
|
||||||
from typing import TYPE_CHECKING
|
from typing import TYPE_CHECKING, Any, cast
|
||||||
|
|
||||||
from fastapi import FastAPI
|
from fastapi import FastAPI
|
||||||
|
from fastapi.middleware.gzip import GZipMiddleware
|
||||||
from fastapi.staticfiles import StaticFiles
|
from fastapi.staticfiles import StaticFiles
|
||||||
from pydantic import ValidationError
|
from pydantic import ValidationError
|
||||||
from rich.logging import RichHandler
|
from rich.logging import RichHandler
|
||||||
|
|
||||||
from compose_farm.web.deps import STATIC_DIR, get_config
|
from compose_farm.web.deps import STATIC_DIR, get_config
|
||||||
from compose_farm.web.routes import actions, api, pages
|
from compose_farm.web.routes import actions, api, containers, pages
|
||||||
from compose_farm.web.streaming import TASK_TTL_SECONDS, cleanup_stale_tasks
|
from compose_farm.web.streaming import TASK_TTL_SECONDS, cleanup_stale_tasks
|
||||||
|
|
||||||
# Configure logging with Rich handler for compose_farm.web modules
|
# Configure logging with Rich handler for compose_farm.web modules
|
||||||
@@ -64,10 +65,14 @@ def create_app() -> FastAPI:
|
|||||||
lifespan=lifespan,
|
lifespan=lifespan,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# Enable Gzip compression for faster transfers over slow networks
|
||||||
|
app.add_middleware(cast("Any", GZipMiddleware), minimum_size=1000)
|
||||||
|
|
||||||
# Mount static files
|
# Mount static files
|
||||||
app.mount("/static", StaticFiles(directory=str(STATIC_DIR)), name="static")
|
app.mount("/static", StaticFiles(directory=str(STATIC_DIR)), name="static")
|
||||||
|
|
||||||
app.include_router(pages.router)
|
app.include_router(pages.router)
|
||||||
|
app.include_router(containers.router)
|
||||||
app.include_router(api.router, prefix="/api")
|
app.include_router(api.router, prefix="/api")
|
||||||
app.include_router(actions.router, prefix="/api")
|
app.include_router(actions.router, prefix="/api")
|
||||||
|
|
||||||
|
|||||||
@@ -39,6 +39,14 @@ CDN_ASSETS: dict[str, tuple[str, str]] = {
|
|||||||
"xterm-fit.js",
|
"xterm-fit.js",
|
||||||
"application/javascript",
|
"application/javascript",
|
||||||
),
|
),
|
||||||
|
"https://unpkg.com/idiomorph/dist/idiomorph.min.js": (
|
||||||
|
"idiomorph.js",
|
||||||
|
"application/javascript",
|
||||||
|
),
|
||||||
|
"https://unpkg.com/idiomorph/dist/idiomorph-ext.min.js": (
|
||||||
|
"idiomorph-ext.js",
|
||||||
|
"application/javascript",
|
||||||
|
),
|
||||||
# Monaco editor - dynamically loaded by app.js
|
# Monaco editor - dynamically loaded by app.js
|
||||||
"https://cdn.jsdelivr.net/npm/monaco-editor@0.52.2/min/vs/loader.js": (
|
"https://cdn.jsdelivr.net/npm/monaco-editor@0.52.2/min/vs/loader.js": (
|
||||||
"monaco-loader.js",
|
"monaco-loader.js",
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
"""Web routes."""
|
"""Web routes."""
|
||||||
|
|
||||||
from compose_farm.web.routes import actions, api, pages
|
from compose_farm.web.routes import actions, api, containers, pages
|
||||||
|
|
||||||
__all__ = ["actions", "api", "pages"]
|
__all__ = ["actions", "api", "containers", "pages"]
|
||||||
|
|||||||
@@ -21,6 +21,7 @@ from fastapi.responses import HTMLResponse
|
|||||||
|
|
||||||
from compose_farm.compose import get_container_name
|
from compose_farm.compose import get_container_name
|
||||||
from compose_farm.executor import is_local, run_compose_on_host, ssh_connect_kwargs
|
from compose_farm.executor import is_local, run_compose_on_host, ssh_connect_kwargs
|
||||||
|
from compose_farm.glances import fetch_all_host_stats
|
||||||
from compose_farm.paths import backup_dir, find_config_path
|
from compose_farm.paths import backup_dir, find_config_path
|
||||||
from compose_farm.state import load_state
|
from compose_farm.state import load_state
|
||||||
from compose_farm.web.deps import get_config, get_templates
|
from compose_farm.web.deps import get_config, get_templates
|
||||||
@@ -385,3 +386,19 @@ async def write_console_file(
|
|||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.exception("Failed to write file %s to host %s", path, host)
|
logger.exception("Failed to write file %s to host %s", path, host)
|
||||||
raise HTTPException(status_code=500, detail=str(e)) from e
|
raise HTTPException(status_code=500, detail=str(e)) from e
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/glances", response_class=HTMLResponse)
|
||||||
|
async def get_glances_stats() -> HTMLResponse:
|
||||||
|
"""Get resource stats from Glances for all hosts."""
|
||||||
|
config = get_config()
|
||||||
|
|
||||||
|
if not config.glances_stack:
|
||||||
|
return HTMLResponse("") # Glances not configured
|
||||||
|
|
||||||
|
stats = await fetch_all_host_stats(config)
|
||||||
|
|
||||||
|
templates = get_templates()
|
||||||
|
template = templates.env.get_template("partials/glances.html")
|
||||||
|
html = template.render(stats=stats)
|
||||||
|
return HTMLResponse(html)
|
||||||
|
|||||||
370
src/compose_farm/web/routes/containers.py
Normal file
370
src/compose_farm/web/routes/containers.py
Normal file
@@ -0,0 +1,370 @@
|
|||||||
|
"""Container dashboard routes using Glances API."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import html
|
||||||
|
import re
|
||||||
|
from typing import TYPE_CHECKING
|
||||||
|
from urllib.parse import quote
|
||||||
|
|
||||||
|
import humanize
|
||||||
|
from fastapi import APIRouter, Request
|
||||||
|
from fastapi.responses import HTMLResponse, JSONResponse
|
||||||
|
|
||||||
|
from compose_farm.executor import TTLCache
|
||||||
|
from compose_farm.glances import ContainerStats, fetch_all_container_stats
|
||||||
|
from compose_farm.registry import DOCKER_HUB_ALIASES, ImageRef
|
||||||
|
from compose_farm.web.deps import get_config, get_templates
|
||||||
|
|
||||||
|
router = APIRouter(tags=["containers"])
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from compose_farm.registry import TagCheckResult
|
||||||
|
|
||||||
|
# Cache registry update checks for 5 minutes (300 seconds)
|
||||||
|
# Registry calls are slow and often rate-limited
|
||||||
|
_update_check_cache = TTLCache(ttl_seconds=300.0)
|
||||||
|
|
||||||
|
# Minimum parts needed to infer stack/service from container name
|
||||||
|
MIN_NAME_PARTS = 2
|
||||||
|
|
||||||
|
# HTML for "no update info" dash
|
||||||
|
_DASH_HTML = '<span class="text-xs opacity-50">-</span>'
|
||||||
|
|
||||||
|
|
||||||
|
def _format_bytes(bytes_val: int) -> str:
|
||||||
|
"""Format bytes to human readable string."""
|
||||||
|
return humanize.naturalsize(bytes_val, binary=True, format="%.1f")
|
||||||
|
|
||||||
|
|
||||||
|
def _parse_image(image: str) -> tuple[str, str]:
|
||||||
|
"""Parse image string into (name, tag)."""
|
||||||
|
# Handle registry prefix (e.g., ghcr.io/user/repo:tag)
|
||||||
|
if ":" in image:
|
||||||
|
# Find last colon that's not part of port
|
||||||
|
parts = image.rsplit(":", 1)
|
||||||
|
if "/" in parts[-1]:
|
||||||
|
# The "tag" contains a slash, so it's probably a port
|
||||||
|
return image, "latest"
|
||||||
|
return parts[0], parts[1]
|
||||||
|
return image, "latest"
|
||||||
|
|
||||||
|
|
||||||
|
def _infer_stack_service(name: str) -> tuple[str, str]:
|
||||||
|
"""Fallback: infer stack and service from container name.
|
||||||
|
|
||||||
|
Used when compose labels are not available.
|
||||||
|
Docker Compose naming conventions:
|
||||||
|
- Default: {project}_{service}_{instance} or {project}-{service}-{instance}
|
||||||
|
- Custom: {container_name} from compose file
|
||||||
|
"""
|
||||||
|
# Try underscore separator first (older compose)
|
||||||
|
if "_" in name:
|
||||||
|
parts = name.split("_")
|
||||||
|
if len(parts) >= MIN_NAME_PARTS:
|
||||||
|
return parts[0], parts[1]
|
||||||
|
# Try hyphen separator (newer compose)
|
||||||
|
if "-" in name:
|
||||||
|
parts = name.split("-")
|
||||||
|
if len(parts) >= MIN_NAME_PARTS:
|
||||||
|
return parts[0], "-".join(parts[1:-1]) if len(parts) > MIN_NAME_PARTS else parts[1]
|
||||||
|
# Fallback: use name as both stack and service
|
||||||
|
return name, name
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/live-stats", response_class=HTMLResponse)
|
||||||
|
async def containers_page(request: Request) -> HTMLResponse:
|
||||||
|
"""Container dashboard page."""
|
||||||
|
config = get_config()
|
||||||
|
templates = get_templates()
|
||||||
|
|
||||||
|
# Check if Glances is configured
|
||||||
|
glances_enabled = config.glances_stack is not None
|
||||||
|
|
||||||
|
return templates.TemplateResponse(
|
||||||
|
"containers.html",
|
||||||
|
{
|
||||||
|
"request": request,
|
||||||
|
"glances_enabled": glances_enabled,
|
||||||
|
"hosts": sorted(config.hosts.keys()) if glances_enabled else [],
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
_STATUS_CLASSES = {
|
||||||
|
"running": "badge badge-success badge-sm",
|
||||||
|
"exited": "badge badge-error badge-sm",
|
||||||
|
"paused": "badge badge-warning badge-sm",
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def _status_class(status: str) -> str:
|
||||||
|
"""Get CSS class for status badge."""
|
||||||
|
return _STATUS_CLASSES.get(status.lower(), "badge badge-ghost badge-sm")
|
||||||
|
|
||||||
|
|
||||||
|
def _progress_class(percent: float) -> str:
|
||||||
|
"""Get CSS class for progress bar color."""
|
||||||
|
if percent > 80: # noqa: PLR2004
|
||||||
|
return "bg-error"
|
||||||
|
if percent > 50: # noqa: PLR2004
|
||||||
|
return "bg-warning"
|
||||||
|
return "bg-success"
|
||||||
|
|
||||||
|
|
||||||
|
def _render_update_cell(image: str, tag: str) -> str:
|
||||||
|
"""Render update check cell with client-side batch updates."""
|
||||||
|
encoded_image = quote(image, safe="")
|
||||||
|
encoded_tag = quote(tag, safe="")
|
||||||
|
cached_html = _update_check_cache.get(f"{image}:{tag}")
|
||||||
|
inner = cached_html if cached_html is not None else _DASH_HTML
|
||||||
|
return (
|
||||||
|
f"""<td class="update-cell" data-image="{encoded_image}" data-tag="{encoded_tag}">"""
|
||||||
|
f"{inner}</td>"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _image_web_url(image: str) -> str | None:
|
||||||
|
"""Return a human-friendly registry URL for an image (without tag)."""
|
||||||
|
ref = ImageRef.parse(image)
|
||||||
|
if ref.registry in DOCKER_HUB_ALIASES:
|
||||||
|
if ref.namespace == "library":
|
||||||
|
return f"https://hub.docker.com/_/{ref.name}"
|
||||||
|
return f"https://hub.docker.com/r/{ref.namespace}/{ref.name}"
|
||||||
|
return f"https://{ref.registry}/{ref.full_name}"
|
||||||
|
|
||||||
|
|
||||||
|
def _render_row(c: ContainerStats, idx: int | str) -> str:
|
||||||
|
"""Render a single container as an HTML table row."""
|
||||||
|
image_name, tag = _parse_image(c.image)
|
||||||
|
stack = c.stack if c.stack else _infer_stack_service(c.name)[0]
|
||||||
|
service = c.service if c.service else _infer_stack_service(c.name)[1]
|
||||||
|
|
||||||
|
cpu = c.cpu_percent
|
||||||
|
mem = c.memory_percent
|
||||||
|
cpu_class = _progress_class(cpu)
|
||||||
|
mem_class = _progress_class(mem)
|
||||||
|
|
||||||
|
# Highlight rows with high resource usage
|
||||||
|
high_cpu = cpu > 80 # noqa: PLR2004
|
||||||
|
high_mem = mem > 90 # noqa: PLR2004
|
||||||
|
row_class = "high-usage" if (high_cpu or high_mem) else ""
|
||||||
|
|
||||||
|
uptime_sec = _parse_uptime_seconds(c.uptime)
|
||||||
|
actions = _render_actions(stack)
|
||||||
|
update_cell = _render_update_cell(image_name, tag)
|
||||||
|
image_label = f"{image_name}:{tag}"
|
||||||
|
image_url = _image_web_url(image_name)
|
||||||
|
if image_url:
|
||||||
|
image_html = (
|
||||||
|
f'<a href="{image_url}" target="_blank" rel="noopener noreferrer" '
|
||||||
|
f'class="link link-hover">'
|
||||||
|
f'<code class="text-xs bg-base-200 px-1 rounded">{image_label}</code></a>'
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
image_html = f'<code class="text-xs bg-base-200 px-1 rounded">{image_label}</code>'
|
||||||
|
# Render as single line to avoid whitespace nodes in DOM
|
||||||
|
row_id = f"c-{c.host}-{c.name}"
|
||||||
|
class_attr = f' class="{row_class}"' if row_class else ""
|
||||||
|
return (
|
||||||
|
f'<tr id="{row_id}" data-host="{c.host}"{class_attr}><td class="text-xs opacity-50">{idx}</td>'
|
||||||
|
f'<td data-sort="{stack.lower()}"><a href="/stack/{stack}" class="link link-hover link-primary" hx-boost="true">{stack}</a></td>'
|
||||||
|
f'<td data-sort="{service.lower()}" class="text-xs opacity-70">{service}</td>'
|
||||||
|
f"<td>{actions}</td>"
|
||||||
|
f'<td data-sort="{c.host.lower()}"><span class="badge badge-outline badge-xs">{c.host}</span></td>'
|
||||||
|
f'<td data-sort="{c.image.lower()}">{image_html}</td>'
|
||||||
|
f"{update_cell}"
|
||||||
|
f'<td data-sort="{c.status.lower()}"><span class="{_status_class(c.status)}">{c.status}</span></td>'
|
||||||
|
f'<td data-sort="{uptime_sec}" class="text-xs text-right font-mono">{c.uptime or "-"}</td>'
|
||||||
|
f'<td data-sort="{cpu}" class="text-right font-mono"><div class="flex flex-col items-end gap-0.5"><div class="w-12 h-2 bg-base-300 rounded-full overflow-hidden"><div class="h-full {cpu_class}" style="width: {min(cpu, 100)}%"></div></div><span class="text-xs">{cpu:.0f}%</span></div></td>'
|
||||||
|
f'<td data-sort="{c.memory_usage}" class="text-right font-mono"><div class="flex flex-col items-end gap-0.5"><div class="w-12 h-2 bg-base-300 rounded-full overflow-hidden"><div class="h-full {mem_class}" style="width: {min(mem, 100)}%"></div></div><span class="text-xs">{_format_bytes(c.memory_usage)}</span></div></td>'
|
||||||
|
f'<td data-sort="{c.network_rx + c.network_tx}" class="text-xs text-right font-mono">↓{_format_bytes(c.network_rx)} ↑{_format_bytes(c.network_tx)}</td>'
|
||||||
|
"</tr>"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _render_actions(stack: str) -> str:
|
||||||
|
"""Render actions dropdown for a container row."""
|
||||||
|
return f"""<button class="btn btn-circle btn-ghost btn-xs" onclick="openActionMenu(event, '{stack}')" aria-label="Actions for {stack}">
|
||||||
|
<svg class="h-4 w-4"><use href="#icon-menu" /></svg>
|
||||||
|
</button>"""
|
||||||
|
|
||||||
|
|
||||||
|
def _parse_uptime_seconds(uptime: str) -> int:
|
||||||
|
"""Parse uptime string to seconds for sorting."""
|
||||||
|
if not uptime:
|
||||||
|
return 0
|
||||||
|
uptime = uptime.lower().strip()
|
||||||
|
# Handle "a/an" as 1
|
||||||
|
uptime = uptime.replace("an ", "1 ").replace("a ", "1 ")
|
||||||
|
|
||||||
|
total = 0
|
||||||
|
multipliers = {
|
||||||
|
"second": 1,
|
||||||
|
"minute": 60,
|
||||||
|
"hour": 3600,
|
||||||
|
"day": 86400,
|
||||||
|
"week": 604800,
|
||||||
|
"month": 2592000,
|
||||||
|
"year": 31536000,
|
||||||
|
}
|
||||||
|
for match in re.finditer(r"(\d+)\s*(\w+)", uptime):
|
||||||
|
num = int(match.group(1))
|
||||||
|
unit = match.group(2).rstrip("s") # Remove plural 's'
|
||||||
|
total += num * multipliers.get(unit, 0)
|
||||||
|
return total
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/api/containers/rows", response_class=HTMLResponse)
|
||||||
|
async def get_containers_rows() -> HTMLResponse:
|
||||||
|
"""Get container table rows as HTML for HTMX.
|
||||||
|
|
||||||
|
Each cell has data-sort attribute for instant client-side sorting.
|
||||||
|
"""
|
||||||
|
config = get_config()
|
||||||
|
|
||||||
|
if not config.glances_stack:
|
||||||
|
return HTMLResponse(
|
||||||
|
'<tr><td colspan="12" class="text-center text-error">Glances not configured</td></tr>'
|
||||||
|
)
|
||||||
|
|
||||||
|
containers = await fetch_all_container_stats(config)
|
||||||
|
|
||||||
|
if not containers:
|
||||||
|
return HTMLResponse(
|
||||||
|
'<tr><td colspan="12" class="text-center py-4 opacity-60">No containers found</td></tr>'
|
||||||
|
)
|
||||||
|
|
||||||
|
rows = "\n".join(_render_row(c, i + 1) for i, c in enumerate(containers))
|
||||||
|
return HTMLResponse(rows)
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/api/containers/rows/{host_name}", response_class=HTMLResponse)
|
||||||
|
async def get_containers_rows_by_host(host_name: str) -> HTMLResponse:
|
||||||
|
"""Get container rows for a specific host.
|
||||||
|
|
||||||
|
Returns immediately with Glances data. Stack/service are inferred from
|
||||||
|
container names for instant display (no SSH wait).
|
||||||
|
"""
|
||||||
|
import logging # noqa: PLC0415
|
||||||
|
import time # noqa: PLC0415
|
||||||
|
|
||||||
|
from compose_farm.executor import get_container_compose_labels # noqa: PLC0415
|
||||||
|
from compose_farm.glances import fetch_container_stats # noqa: PLC0415
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
config = get_config()
|
||||||
|
|
||||||
|
if host_name not in config.hosts:
|
||||||
|
return HTMLResponse("")
|
||||||
|
|
||||||
|
host = config.hosts[host_name]
|
||||||
|
|
||||||
|
t0 = time.monotonic()
|
||||||
|
containers, error = await fetch_container_stats(host_name, host.address)
|
||||||
|
t1 = time.monotonic()
|
||||||
|
fetch_ms = (t1 - t0) * 1000
|
||||||
|
|
||||||
|
if containers is None:
|
||||||
|
logger.error(
|
||||||
|
"Failed to fetch stats for %s in %.1fms: %s",
|
||||||
|
host_name,
|
||||||
|
fetch_ms,
|
||||||
|
error,
|
||||||
|
)
|
||||||
|
return HTMLResponse(
|
||||||
|
f'<tr class="text-error"><td colspan="12" class="text-center py-2">Error: {error}</td></tr>'
|
||||||
|
)
|
||||||
|
|
||||||
|
if not containers:
|
||||||
|
return HTMLResponse("") # No rows for this host
|
||||||
|
|
||||||
|
labels = await get_container_compose_labels(config, host_name)
|
||||||
|
for c in containers:
|
||||||
|
stack, service = labels.get(c.name, ("", ""))
|
||||||
|
if not stack or not service:
|
||||||
|
stack, service = _infer_stack_service(c.name)
|
||||||
|
c.stack, c.service = stack, service
|
||||||
|
|
||||||
|
# Only show containers from stacks in config (filters out orphaned/unknown stacks)
|
||||||
|
containers = [c for c in containers if not c.stack or c.stack in config.stacks]
|
||||||
|
|
||||||
|
# Use placeholder index (will be renumbered by JS after all hosts load)
|
||||||
|
rows = "\n".join(_render_row(c, "-") for c in containers)
|
||||||
|
t2 = time.monotonic()
|
||||||
|
render_ms = (t2 - t1) * 1000
|
||||||
|
|
||||||
|
logger.info(
|
||||||
|
"Loaded %d rows for %s in %.1fms (fetch) + %.1fms (render)",
|
||||||
|
len(containers),
|
||||||
|
host_name,
|
||||||
|
fetch_ms,
|
||||||
|
render_ms,
|
||||||
|
)
|
||||||
|
return HTMLResponse(rows)
|
||||||
|
|
||||||
|
|
||||||
|
def _render_update_badge(result: TagCheckResult) -> str:
|
||||||
|
if result.error:
|
||||||
|
return _DASH_HTML
|
||||||
|
if result.available_updates:
|
||||||
|
updates = result.available_updates
|
||||||
|
count = len(updates)
|
||||||
|
title = f"Newer: {', '.join(updates[:3])}" + ("..." if count > 3 else "") # noqa: PLR2004
|
||||||
|
tip = html.escape(title, quote=True)
|
||||||
|
return (
|
||||||
|
f'<span class="tooltip" data-tip="{tip}">'
|
||||||
|
f'<span class="badge badge-warning badge-xs cursor-help">{count} new</span>'
|
||||||
|
"</span>"
|
||||||
|
)
|
||||||
|
return '<span class="tooltip" data-tip="Up to date"><span class="text-success text-xs">✓</span></span>'
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/api/containers/check-updates", response_class=JSONResponse)
|
||||||
|
async def check_container_updates_batch(request: Request) -> JSONResponse:
|
||||||
|
"""Batch update checks for a list of images.
|
||||||
|
|
||||||
|
Payload: {"items": [{"image": "...", "tag": "..."}, ...]}
|
||||||
|
Returns: {"results": [{"image": "...", "tag": "...", "html": "..."}, ...]}
|
||||||
|
"""
|
||||||
|
import httpx # noqa: PLC0415
|
||||||
|
|
||||||
|
payload = await request.json()
|
||||||
|
items = payload.get("items", []) if isinstance(payload, dict) else []
|
||||||
|
if not items:
|
||||||
|
return JSONResponse({"results": []})
|
||||||
|
|
||||||
|
results = []
|
||||||
|
|
||||||
|
from compose_farm.registry import check_image_updates # noqa: PLC0415
|
||||||
|
|
||||||
|
async with httpx.AsyncClient(timeout=10.0) as client:
|
||||||
|
for item in items:
|
||||||
|
image = item.get("image", "")
|
||||||
|
tag = item.get("tag", "")
|
||||||
|
full_image = f"{image}:{tag}"
|
||||||
|
if not image or not tag:
|
||||||
|
results.append({"image": image, "tag": tag, "html": _DASH_HTML})
|
||||||
|
continue
|
||||||
|
|
||||||
|
# NOTE: Tag-based checks cannot detect digest changes for moving tags
|
||||||
|
# like "latest". A future improvement could compare remote vs local
|
||||||
|
# digests using dockerfarm-log.toml (from `cf refresh`) or a per-host
|
||||||
|
# digest lookup.
|
||||||
|
|
||||||
|
cached_html: str | None = _update_check_cache.get(full_image)
|
||||||
|
if cached_html is not None:
|
||||||
|
results.append({"image": image, "tag": tag, "html": cached_html})
|
||||||
|
continue
|
||||||
|
|
||||||
|
try:
|
||||||
|
result = await check_image_updates(full_image, client)
|
||||||
|
html = _render_update_badge(result)
|
||||||
|
_update_check_cache.set(full_image, html)
|
||||||
|
except Exception:
|
||||||
|
_update_check_cache.set(full_image, _DASH_HTML, ttl_seconds=60.0)
|
||||||
|
html = _DASH_HTML
|
||||||
|
|
||||||
|
results.append({"image": image, "tag": tag, "html": html})
|
||||||
|
|
||||||
|
return JSONResponse({"results": results})
|
||||||
@@ -9,7 +9,6 @@
|
|||||||
// ANSI escape codes for terminal output
|
// ANSI escape codes for terminal output
|
||||||
const ANSI = {
|
const ANSI = {
|
||||||
RED: '\x1b[31m',
|
RED: '\x1b[31m',
|
||||||
GREEN: '\x1b[32m',
|
|
||||||
DIM: '\x1b[2m',
|
DIM: '\x1b[2m',
|
||||||
RESET: '\x1b[0m',
|
RESET: '\x1b[0m',
|
||||||
CRLF: '\r\n'
|
CRLF: '\r\n'
|
||||||
@@ -122,7 +121,6 @@ function whenXtermReady(callback, maxAttempts = 20) {
|
|||||||
};
|
};
|
||||||
tryInit(maxAttempts);
|
tryInit(maxAttempts);
|
||||||
}
|
}
|
||||||
window.whenXtermReady = whenXtermReady;
|
|
||||||
|
|
||||||
// ============================================================================
|
// ============================================================================
|
||||||
// TERMINAL
|
// TERMINAL
|
||||||
@@ -209,8 +207,6 @@ function initTerminal(elementId, taskId) {
|
|||||||
return { term, ws };
|
return { term, ws };
|
||||||
}
|
}
|
||||||
|
|
||||||
window.initTerminal = initTerminal;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Initialize an interactive exec terminal
|
* Initialize an interactive exec terminal
|
||||||
*/
|
*/
|
||||||
@@ -432,7 +428,7 @@ function initMonacoEditors() {
|
|||||||
* Save all editors
|
* Save all editors
|
||||||
*/
|
*/
|
||||||
async function saveAllEditors() {
|
async function saveAllEditors() {
|
||||||
const saveBtn = document.getElementById('save-btn') || document.getElementById('save-config-btn');
|
const saveBtn = getSaveButton();
|
||||||
const results = [];
|
const results = [];
|
||||||
|
|
||||||
for (const [id, editor] of Object.entries(editors)) {
|
for (const [id, editor] of Object.entries(editors)) {
|
||||||
@@ -468,12 +464,16 @@ async function saveAllEditors() {
|
|||||||
* Initialize save button handler
|
* Initialize save button handler
|
||||||
*/
|
*/
|
||||||
function initSaveButton() {
|
function initSaveButton() {
|
||||||
const saveBtn = document.getElementById('save-btn') || document.getElementById('save-config-btn');
|
const saveBtn = getSaveButton();
|
||||||
if (!saveBtn) return;
|
if (!saveBtn) return;
|
||||||
|
|
||||||
saveBtn.onclick = saveAllEditors;
|
saveBtn.onclick = saveAllEditors;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function getSaveButton() {
|
||||||
|
return document.getElementById('save-btn') || document.getElementById('save-config-btn');
|
||||||
|
}
|
||||||
|
|
||||||
// ============================================================================
|
// ============================================================================
|
||||||
// UI HELPERS
|
// UI HELPERS
|
||||||
// ============================================================================
|
// ============================================================================
|
||||||
@@ -607,6 +607,7 @@ function playFabIntro() {
|
|||||||
cmd('action', 'Update All', 'Update all stacks', dashboardAction('update-all'), icons.refresh_cw),
|
cmd('action', 'Update All', 'Update all stacks', dashboardAction('update-all'), icons.refresh_cw),
|
||||||
cmd('app', 'Theme', 'Change color theme', openThemePicker, icons.palette),
|
cmd('app', 'Theme', 'Change color theme', openThemePicker, icons.palette),
|
||||||
cmd('app', 'Dashboard', 'Go to dashboard', nav('/'), icons.home),
|
cmd('app', 'Dashboard', 'Go to dashboard', nav('/'), icons.home),
|
||||||
|
cmd('app', 'Live Stats', 'View all containers across hosts', nav('/live-stats'), icons.box),
|
||||||
cmd('app', 'Console', 'Go to console', nav('/console'), icons.terminal),
|
cmd('app', 'Console', 'Go to console', nav('/console'), icons.terminal),
|
||||||
cmd('app', 'Edit Config', 'Edit compose-farm.yaml', nav('/console#editor'), icons.file_code),
|
cmd('app', 'Edit Config', 'Edit compose-farm.yaml', nav('/console#editor'), icons.file_code),
|
||||||
cmd('app', 'Docs', 'Open documentation', openExternal('https://compose-farm.nijho.lt/'), icons.book_open),
|
cmd('app', 'Docs', 'Open documentation', openExternal('https://compose-farm.nijho.lt/'), icons.book_open),
|
||||||
@@ -743,11 +744,6 @@ function playFabIntro() {
|
|||||||
input.focus();
|
input.focus();
|
||||||
}
|
}
|
||||||
|
|
||||||
function close() {
|
|
||||||
dialog.close();
|
|
||||||
restoreTheme();
|
|
||||||
}
|
|
||||||
|
|
||||||
function exec() {
|
function exec() {
|
||||||
const cmd = filtered[selected];
|
const cmd = filtered[selected];
|
||||||
if (cmd) {
|
if (cmd) {
|
||||||
@@ -869,6 +865,119 @@ function initPage() {
|
|||||||
initMonacoEditors();
|
initMonacoEditors();
|
||||||
initSaveButton();
|
initSaveButton();
|
||||||
updateShortcutKeys();
|
updateShortcutKeys();
|
||||||
|
initLiveStats();
|
||||||
|
initSharedActionMenu();
|
||||||
|
maybeRunStackAction();
|
||||||
|
}
|
||||||
|
|
||||||
|
function navigateToStack(stack, action = null) {
|
||||||
|
const url = action ? `/stack/${stack}?action=${action}` : `/stack/${stack}`;
|
||||||
|
window.location.href = url;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Initialize shared action menu for container rows
|
||||||
|
*/
|
||||||
|
function initSharedActionMenu() {
|
||||||
|
const menuEl = document.getElementById('shared-action-menu');
|
||||||
|
if (!menuEl) return;
|
||||||
|
if (menuEl.dataset.bound === '1') return;
|
||||||
|
menuEl.dataset.bound = '1';
|
||||||
|
|
||||||
|
let hoverTimeout = null;
|
||||||
|
|
||||||
|
function showMenuForButton(btn, stack) {
|
||||||
|
menuEl.dataset.stack = stack;
|
||||||
|
|
||||||
|
// Position menu relative to button
|
||||||
|
const rect = btn.getBoundingClientRect();
|
||||||
|
menuEl.classList.remove('hidden');
|
||||||
|
menuEl.style.visibility = 'hidden';
|
||||||
|
const menuRect = menuEl.getBoundingClientRect();
|
||||||
|
|
||||||
|
const left = rect.right - menuRect.width + window.scrollX;
|
||||||
|
const top = rect.bottom + window.scrollY;
|
||||||
|
|
||||||
|
menuEl.style.top = `${top}px`;
|
||||||
|
menuEl.style.left = `${left}px`;
|
||||||
|
menuEl.style.visibility = '';
|
||||||
|
|
||||||
|
if (typeof liveStats !== 'undefined') liveStats.dropdownOpen = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
function closeMenu() {
|
||||||
|
menuEl.classList.add('hidden');
|
||||||
|
if (typeof liveStats !== 'undefined') liveStats.dropdownOpen = false;
|
||||||
|
menuEl.dataset.stack = '';
|
||||||
|
}
|
||||||
|
|
||||||
|
function scheduleClose() {
|
||||||
|
if (hoverTimeout) clearTimeout(hoverTimeout);
|
||||||
|
hoverTimeout = setTimeout(closeMenu, 100);
|
||||||
|
}
|
||||||
|
|
||||||
|
function cancelClose() {
|
||||||
|
if (hoverTimeout) {
|
||||||
|
clearTimeout(hoverTimeout);
|
||||||
|
hoverTimeout = null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Button hover: show menu (event delegation on tbody)
|
||||||
|
const tbody = document.getElementById('container-rows');
|
||||||
|
if (tbody) {
|
||||||
|
tbody.addEventListener('mouseenter', (e) => {
|
||||||
|
const btn = e.target.closest('button[onclick^="openActionMenu"]');
|
||||||
|
if (!btn) return;
|
||||||
|
|
||||||
|
// Extract stack from onclick attribute
|
||||||
|
const match = btn.getAttribute('onclick')?.match(/openActionMenu\(event,\s*'([^']+)'\)/);
|
||||||
|
if (!match) return;
|
||||||
|
|
||||||
|
cancelClose();
|
||||||
|
showMenuForButton(btn, match[1]);
|
||||||
|
}, true);
|
||||||
|
|
||||||
|
tbody.addEventListener('mouseleave', (e) => {
|
||||||
|
const btn = e.target.closest('button[onclick^="openActionMenu"]');
|
||||||
|
if (btn) scheduleClose();
|
||||||
|
}, true);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Keep menu open while hovering over it
|
||||||
|
menuEl.addEventListener('mouseenter', cancelClose);
|
||||||
|
menuEl.addEventListener('mouseleave', scheduleClose);
|
||||||
|
|
||||||
|
// Click action in menu
|
||||||
|
menuEl.addEventListener('click', (e) => {
|
||||||
|
const link = e.target.closest('a[data-action]');
|
||||||
|
const stack = menuEl.dataset.stack;
|
||||||
|
if (!link || !stack) return;
|
||||||
|
|
||||||
|
e.preventDefault();
|
||||||
|
navigateToStack(stack, link.dataset.action);
|
||||||
|
closeMenu();
|
||||||
|
});
|
||||||
|
|
||||||
|
// Also support click on button (for touch/accessibility)
|
||||||
|
window.openActionMenu = function(event, stack) {
|
||||||
|
event.stopPropagation();
|
||||||
|
showMenuForButton(event.currentTarget, stack);
|
||||||
|
};
|
||||||
|
|
||||||
|
// Close on outside click
|
||||||
|
document.body.addEventListener('click', (e) => {
|
||||||
|
if (!menuEl.classList.contains('hidden') &&
|
||||||
|
!menuEl.contains(e.target) &&
|
||||||
|
!e.target.closest('button[onclick^="openActionMenu"]')) {
|
||||||
|
closeMenu();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Close on Escape
|
||||||
|
document.body.addEventListener('keydown', (e) => {
|
||||||
|
if (e.key === 'Escape') closeMenu();
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -889,6 +998,30 @@ function tryReconnectToTask(path) {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function maybeRunStackAction() {
|
||||||
|
const params = new URLSearchParams(window.location.search);
|
||||||
|
const stackEl = document.querySelector('[data-stack-name]');
|
||||||
|
const stackName = stackEl?.dataset?.stackName;
|
||||||
|
if (!stackName) return;
|
||||||
|
|
||||||
|
const action = params.get('action');
|
||||||
|
if (!action) return;
|
||||||
|
|
||||||
|
const button = document.querySelector(`button[hx-post="/api/stack/${stackName}/${action}"]`);
|
||||||
|
if (!button) return;
|
||||||
|
|
||||||
|
params.delete('action');
|
||||||
|
const newQuery = params.toString();
|
||||||
|
const newUrl = newQuery ? `${window.location.pathname}?${newQuery}` : window.location.pathname;
|
||||||
|
history.replaceState({}, '', newUrl);
|
||||||
|
|
||||||
|
if (window.htmx) {
|
||||||
|
htmx.trigger(button, 'click');
|
||||||
|
} else {
|
||||||
|
button.click();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Initialize on page load
|
// Initialize on page load
|
||||||
document.addEventListener('DOMContentLoaded', function() {
|
document.addEventListener('DOMContentLoaded', function() {
|
||||||
initPage();
|
initPage();
|
||||||
@@ -930,3 +1063,443 @@ document.body.addEventListener('htmx:afterRequest', function(evt) {
|
|||||||
// Not valid JSON, ignore
|
// Not valid JSON, ignore
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// LIVE STATS PAGE
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
// State persists across SPA navigation (intervals must be cleared on re-init)
|
||||||
|
let liveStats = {
|
||||||
|
sortCol: 9,
|
||||||
|
sortAsc: false,
|
||||||
|
lastUpdate: 0,
|
||||||
|
dropdownOpen: false,
|
||||||
|
scrolling: false,
|
||||||
|
scrollTimer: null,
|
||||||
|
loadingHosts: new Set(),
|
||||||
|
eventsBound: false,
|
||||||
|
intervals: [],
|
||||||
|
updateCheckTimes: new Map(),
|
||||||
|
autoRefresh: true
|
||||||
|
};
|
||||||
|
|
||||||
|
const REFRESH_INTERVAL = 5000;
|
||||||
|
const UPDATE_CHECK_TTL = 120000;
|
||||||
|
const NUMERIC_COLS = new Set([8, 9, 10, 11]); // uptime, cpu, mem, net
|
||||||
|
|
||||||
|
function filterTable() {
|
||||||
|
const textFilter = document.getElementById('filter-input')?.value.toLowerCase() || '';
|
||||||
|
const hostFilter = document.getElementById('host-filter')?.value || '';
|
||||||
|
const rows = document.querySelectorAll('#container-rows tr');
|
||||||
|
let visible = 0;
|
||||||
|
let total = 0;
|
||||||
|
|
||||||
|
rows.forEach(row => {
|
||||||
|
// Skip loading/empty/error rows (they have colspan)
|
||||||
|
if (row.cells[0]?.colSpan > 1) return;
|
||||||
|
total++;
|
||||||
|
const matchesText = !textFilter || row.textContent.toLowerCase().includes(textFilter);
|
||||||
|
const matchesHost = !hostFilter || row.dataset.host === hostFilter;
|
||||||
|
const show = matchesText && matchesHost;
|
||||||
|
row.style.display = show ? '' : 'none';
|
||||||
|
if (show) visible++;
|
||||||
|
});
|
||||||
|
|
||||||
|
const countEl = document.getElementById('container-count');
|
||||||
|
if (countEl) {
|
||||||
|
const isFiltering = textFilter || hostFilter;
|
||||||
|
countEl.textContent = total > 0
|
||||||
|
? (isFiltering ? `${visible} of ${total} containers` : `${total} containers`)
|
||||||
|
: '';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
window.filterTable = filterTable;
|
||||||
|
|
||||||
|
function sortTable(col) {
|
||||||
|
if (liveStats.sortCol === col) {
|
||||||
|
liveStats.sortAsc = !liveStats.sortAsc;
|
||||||
|
} else {
|
||||||
|
liveStats.sortCol = col;
|
||||||
|
liveStats.sortAsc = false;
|
||||||
|
}
|
||||||
|
updateSortIndicators();
|
||||||
|
doSort();
|
||||||
|
}
|
||||||
|
window.sortTable = sortTable;
|
||||||
|
|
||||||
|
function updateSortIndicators() {
|
||||||
|
document.querySelectorAll('thead th').forEach((th, i) => {
|
||||||
|
const span = th.querySelector('.sort-indicator');
|
||||||
|
if (span) {
|
||||||
|
span.textContent = (i === liveStats.sortCol) ? (liveStats.sortAsc ? '↑' : '↓') : '';
|
||||||
|
span.style.opacity = (i === liveStats.sortCol) ? '1' : '0.3';
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
function doSort() {
|
||||||
|
const tbody = document.getElementById('container-rows');
|
||||||
|
if (!tbody) return;
|
||||||
|
|
||||||
|
const rows = Array.from(tbody.querySelectorAll('tr'));
|
||||||
|
if (rows.length === 0) return;
|
||||||
|
if (rows.length === 1 && rows[0].cells[0]?.colSpan > 1) return; // Empty state row
|
||||||
|
|
||||||
|
const isNumeric = NUMERIC_COLS.has(liveStats.sortCol);
|
||||||
|
rows.sort((a, b) => {
|
||||||
|
// Pin placeholders/empty rows to the bottom
|
||||||
|
const aLoading = a.classList.contains('loading-row') || a.classList.contains('host-empty') || a.cells[0]?.colSpan > 1;
|
||||||
|
const bLoading = b.classList.contains('loading-row') || b.classList.contains('host-empty') || b.cells[0]?.colSpan > 1;
|
||||||
|
if (aLoading && !bLoading) return 1;
|
||||||
|
if (!aLoading && bLoading) return -1;
|
||||||
|
if (aLoading && bLoading) return 0;
|
||||||
|
|
||||||
|
const aVal = a.cells[liveStats.sortCol]?.dataset?.sort ?? '';
|
||||||
|
const bVal = b.cells[liveStats.sortCol]?.dataset?.sort ?? '';
|
||||||
|
const cmp = isNumeric ? aVal - bVal : aVal.localeCompare(bVal);
|
||||||
|
return liveStats.sortAsc ? cmp : -cmp;
|
||||||
|
});
|
||||||
|
|
||||||
|
let index = 1;
|
||||||
|
const fragment = document.createDocumentFragment();
|
||||||
|
rows.forEach((row) => {
|
||||||
|
if (row.cells.length > 1) {
|
||||||
|
row.cells[0].textContent = index++;
|
||||||
|
}
|
||||||
|
fragment.appendChild(row);
|
||||||
|
});
|
||||||
|
tbody.appendChild(fragment);
|
||||||
|
}
|
||||||
|
|
||||||
|
function isLoading() {
|
||||||
|
return liveStats.loadingHosts.size > 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
function getLiveStatsHosts() {
|
||||||
|
const tbody = document.getElementById('container-rows');
|
||||||
|
if (!tbody) return [];
|
||||||
|
const dataHosts = tbody.dataset.hosts || '';
|
||||||
|
return dataHosts.split(',').map(h => h.trim()).filter(Boolean);
|
||||||
|
}
|
||||||
|
|
||||||
|
function buildHostRow(host, message, className) {
|
||||||
|
return (
|
||||||
|
`<tr class="${className}" data-host="${host}">` +
|
||||||
|
`<td colspan="12" class="text-center py-2">` +
|
||||||
|
`<span class="text-sm opacity-60">${message}</span>` +
|
||||||
|
`</td></tr>`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
async function checkUpdatesForHost(host) {
|
||||||
|
// Update checks always run - they only update small cells, not disruptive
|
||||||
|
const last = liveStats.updateCheckTimes.get(host) || 0;
|
||||||
|
if (Date.now() - last < UPDATE_CHECK_TTL) return;
|
||||||
|
|
||||||
|
const cells = Array.from(
|
||||||
|
document.querySelectorAll(`tr[data-host="${host}"] td.update-cell[data-image][data-tag]`)
|
||||||
|
);
|
||||||
|
if (cells.length === 0) return;
|
||||||
|
|
||||||
|
const items = [];
|
||||||
|
const seen = new Set();
|
||||||
|
cells.forEach(cell => {
|
||||||
|
const image = decodeURIComponent(cell.dataset.image || '');
|
||||||
|
const tag = decodeURIComponent(cell.dataset.tag || '');
|
||||||
|
const key = `${image}:${tag}`;
|
||||||
|
if (!image || seen.has(key)) return;
|
||||||
|
seen.add(key);
|
||||||
|
items.push({ image, tag });
|
||||||
|
});
|
||||||
|
|
||||||
|
if (items.length === 0) return;
|
||||||
|
|
||||||
|
try {
|
||||||
|
const response = await fetch('/api/containers/check-updates', {
|
||||||
|
method: 'POST',
|
||||||
|
headers: { 'Content-Type': 'application/json' },
|
||||||
|
body: JSON.stringify({ items })
|
||||||
|
});
|
||||||
|
if (!response.ok) return;
|
||||||
|
const data = await response.json();
|
||||||
|
const results = Array.isArray(data?.results) ? data.results : [];
|
||||||
|
const htmlMap = new Map();
|
||||||
|
results.forEach(result => {
|
||||||
|
const key = `${result.image}:${result.tag}`;
|
||||||
|
htmlMap.set(key, result.html);
|
||||||
|
});
|
||||||
|
|
||||||
|
cells.forEach(cell => {
|
||||||
|
const image = decodeURIComponent(cell.dataset.image || '');
|
||||||
|
const tag = decodeURIComponent(cell.dataset.tag || '');
|
||||||
|
const key = `${image}:${tag}`;
|
||||||
|
const html = htmlMap.get(key);
|
||||||
|
if (html && cell.innerHTML !== html) {
|
||||||
|
cell.innerHTML = html;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
liveStats.updateCheckTimes.set(host, Date.now());
|
||||||
|
} catch (e) {
|
||||||
|
console.error('Update check failed:', e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function replaceHostRows(host, html) {
|
||||||
|
const tbody = document.getElementById('container-rows');
|
||||||
|
if (!tbody) return;
|
||||||
|
|
||||||
|
// Remove loading indicator for this host if present
|
||||||
|
const loadingRow = tbody.querySelector(`tr.loading-row[data-host="${host}"]`);
|
||||||
|
if (loadingRow) loadingRow.remove();
|
||||||
|
|
||||||
|
const template = document.createElement('template');
|
||||||
|
template.innerHTML = html.trim();
|
||||||
|
let newRows = Array.from(template.content.children).filter(el => el.tagName === 'TR');
|
||||||
|
|
||||||
|
if (newRows.length === 0) {
|
||||||
|
// Only show empty message if we don't have any rows for this host
|
||||||
|
const existing = tbody.querySelector(`tr[data-host="${host}"]:not(.loading-row)`);
|
||||||
|
if (!existing) {
|
||||||
|
template.innerHTML = buildHostRow(host, `No containers on ${host}`, 'host-empty');
|
||||||
|
newRows = Array.from(template.content.children);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Track which IDs we've seen in this update
|
||||||
|
const newIds = new Set();
|
||||||
|
|
||||||
|
newRows.forEach(newRow => {
|
||||||
|
const id = newRow.id;
|
||||||
|
if (id) newIds.add(id);
|
||||||
|
|
||||||
|
if (id) {
|
||||||
|
const existing = document.getElementById(id);
|
||||||
|
if (existing) {
|
||||||
|
// Morph in place if Idiomorph is available, otherwise replace
|
||||||
|
if (typeof Idiomorph !== 'undefined') {
|
||||||
|
Idiomorph.morph(existing, newRow);
|
||||||
|
} else {
|
||||||
|
existing.replaceWith(newRow);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Re-process HTMX if needed (though inner content usually carries attributes)
|
||||||
|
const morphedRow = document.getElementById(id);
|
||||||
|
if (window.htmx) htmx.process(morphedRow);
|
||||||
|
|
||||||
|
// Trigger refresh animation
|
||||||
|
if (morphedRow) {
|
||||||
|
morphedRow.classList.add('row-updated');
|
||||||
|
setTimeout(() => morphedRow.classList.remove('row-updated'), 500);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// New row - append (will be sorted later)
|
||||||
|
tbody.appendChild(newRow);
|
||||||
|
if (window.htmx) htmx.process(newRow);
|
||||||
|
// Animate new rows too
|
||||||
|
newRow.classList.add('row-updated');
|
||||||
|
setTimeout(() => newRow.classList.remove('row-updated'), 500);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// Fallback for rows without ID (like error/empty messages)
|
||||||
|
// Just append them, cleaning up previous generic rows handled below
|
||||||
|
tbody.appendChild(newRow);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Remove orphaned rows for this host (rows that exist in DOM but not in new response)
|
||||||
|
// Be careful not to remove rows that were just added (if they lack IDs)
|
||||||
|
const currentHostRows = Array.from(tbody.querySelectorAll(`tr[data-host="${host}"]`));
|
||||||
|
currentHostRows.forEach(row => {
|
||||||
|
// Skip if it's one of the new rows we just appended (check presence in newRows?)
|
||||||
|
// Actually, if we just appended it, it is in DOM.
|
||||||
|
// We rely on ID matching.
|
||||||
|
// Error/Empty rows usually don't have ID, but we handle them by clearing old ones?
|
||||||
|
// Let's assume data rows have IDs.
|
||||||
|
if (row.id && !newIds.has(row.id)) {
|
||||||
|
row.remove();
|
||||||
|
}
|
||||||
|
// Also remove old empty/error messages if we now have data
|
||||||
|
if (!row.id && newRows.length > 0 && newRows[0].id) {
|
||||||
|
row.remove();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
liveStats.loadingHosts.delete(host);
|
||||||
|
checkUpdatesForHost(host);
|
||||||
|
scheduleRowUpdate();
|
||||||
|
}
|
||||||
|
|
||||||
|
async function loadHostRows(host) {
|
||||||
|
liveStats.loadingHosts.add(host);
|
||||||
|
try {
|
||||||
|
const response = await fetch(`/api/containers/rows/${encodeURIComponent(host)}`);
|
||||||
|
const html = response.ok ? await response.text() : '';
|
||||||
|
replaceHostRows(host, html);
|
||||||
|
} catch (e) {
|
||||||
|
console.error(`Failed to load ${host}:`, e);
|
||||||
|
const msg = e.message || String(e);
|
||||||
|
// Fallback to simpler error display if replaceHostRows fails (e.g. Idiomorph missing)
|
||||||
|
try {
|
||||||
|
replaceHostRows(host, buildHostRow(host, `Error: ${msg}`, 'text-error'));
|
||||||
|
} catch (err2) {
|
||||||
|
// Last resort: find row and force innerHTML
|
||||||
|
const tbody = document.getElementById('container-rows');
|
||||||
|
const row = tbody?.querySelector(`tr[data-host="${host}"]`);
|
||||||
|
if (row) row.innerHTML = `<td colspan="12" class="text-center text-error">Error: ${msg}</td>`;
|
||||||
|
}
|
||||||
|
} finally {
|
||||||
|
liveStats.loadingHosts.delete(host);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function refreshLiveStats() {
|
||||||
|
if (liveStats.dropdownOpen || liveStats.scrolling) return;
|
||||||
|
const hosts = getLiveStatsHosts();
|
||||||
|
if (hosts.length === 0) return;
|
||||||
|
liveStats.lastUpdate = Date.now();
|
||||||
|
hosts.forEach(loadHostRows);
|
||||||
|
}
|
||||||
|
window.refreshLiveStats = refreshLiveStats;
|
||||||
|
|
||||||
|
function toggleAutoRefresh() {
|
||||||
|
liveStats.autoRefresh = !liveStats.autoRefresh;
|
||||||
|
const timer = document.getElementById('refresh-timer');
|
||||||
|
if (timer) {
|
||||||
|
timer.classList.toggle('btn-error', !liveStats.autoRefresh);
|
||||||
|
timer.classList.toggle('btn-outline', liveStats.autoRefresh);
|
||||||
|
}
|
||||||
|
if (liveStats.autoRefresh) {
|
||||||
|
// Re-enabling: trigger immediate refresh
|
||||||
|
refreshLiveStats();
|
||||||
|
} else {
|
||||||
|
// Disabling: ensure update checks run for current data
|
||||||
|
const hosts = getLiveStatsHosts();
|
||||||
|
hosts.forEach(host => checkUpdatesForHost(host));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
window.toggleAutoRefresh = toggleAutoRefresh;
|
||||||
|
|
||||||
|
function initLiveStats() {
|
||||||
|
if (!document.getElementById('refresh-timer')) return;
|
||||||
|
|
||||||
|
// Clear previous intervals (important for SPA navigation)
|
||||||
|
liveStats.intervals.forEach(clearInterval);
|
||||||
|
liveStats.intervals = [];
|
||||||
|
liveStats.lastUpdate = Date.now();
|
||||||
|
liveStats.dropdownOpen = false;
|
||||||
|
liveStats.scrolling = false;
|
||||||
|
if (liveStats.scrollTimer) clearTimeout(liveStats.scrollTimer);
|
||||||
|
liveStats.scrollTimer = null;
|
||||||
|
liveStats.loadingHosts.clear();
|
||||||
|
liveStats.updateCheckTimes = new Map();
|
||||||
|
liveStats.autoRefresh = true;
|
||||||
|
|
||||||
|
if (!liveStats.eventsBound) {
|
||||||
|
liveStats.eventsBound = true;
|
||||||
|
|
||||||
|
// Dropdown pauses refresh
|
||||||
|
document.body.addEventListener('click', e => {
|
||||||
|
liveStats.dropdownOpen = !!e.target.closest('.dropdown');
|
||||||
|
});
|
||||||
|
document.body.addEventListener('focusin', e => {
|
||||||
|
if (e.target.closest('.dropdown')) liveStats.dropdownOpen = true;
|
||||||
|
});
|
||||||
|
document.body.addEventListener('focusout', () => {
|
||||||
|
setTimeout(() => {
|
||||||
|
liveStats.dropdownOpen = !!document.activeElement?.closest('.dropdown');
|
||||||
|
}, 150);
|
||||||
|
});
|
||||||
|
document.body.addEventListener('keydown', e => {
|
||||||
|
if (e.key === 'Escape') liveStats.dropdownOpen = false;
|
||||||
|
});
|
||||||
|
|
||||||
|
// Pause refresh while scrolling (helps on slow mobile browsers)
|
||||||
|
window.addEventListener('scroll', () => {
|
||||||
|
liveStats.scrolling = true;
|
||||||
|
if (liveStats.scrollTimer) clearTimeout(liveStats.scrollTimer);
|
||||||
|
liveStats.scrollTimer = setTimeout(() => {
|
||||||
|
liveStats.scrolling = false;
|
||||||
|
}, 200);
|
||||||
|
}, { passive: true });
|
||||||
|
}
|
||||||
|
|
||||||
|
// Auto-refresh every 5 seconds (skip if disabled, loading, or dropdown open)
|
||||||
|
liveStats.intervals.push(setInterval(() => {
|
||||||
|
if (!liveStats.autoRefresh) return;
|
||||||
|
if (liveStats.dropdownOpen || liveStats.scrolling || isLoading()) return;
|
||||||
|
refreshLiveStats();
|
||||||
|
}, REFRESH_INTERVAL));
|
||||||
|
|
||||||
|
// Timer display (updates every 100ms)
|
||||||
|
liveStats.intervals.push(setInterval(() => {
|
||||||
|
const timer = document.getElementById('refresh-timer');
|
||||||
|
if (!timer) {
|
||||||
|
liveStats.intervals.forEach(clearInterval);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const loading = isLoading();
|
||||||
|
const paused = liveStats.dropdownOpen || liveStats.scrolling;
|
||||||
|
const elapsed = Date.now() - liveStats.lastUpdate;
|
||||||
|
window.refreshPaused = paused || loading || !liveStats.autoRefresh;
|
||||||
|
|
||||||
|
// Update refresh timer button
|
||||||
|
let text;
|
||||||
|
if (!liveStats.autoRefresh) {
|
||||||
|
text = 'OFF';
|
||||||
|
} else if (paused) {
|
||||||
|
text = '❚❚';
|
||||||
|
} else {
|
||||||
|
const remaining = Math.max(0, REFRESH_INTERVAL - elapsed);
|
||||||
|
text = loading ? '↻ …' : `↻ ${Math.ceil(remaining / 1000)}s`;
|
||||||
|
}
|
||||||
|
if (timer.textContent !== text) {
|
||||||
|
timer.textContent = text;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update "last updated" display
|
||||||
|
const lastUpdatedEl = document.getElementById('last-updated');
|
||||||
|
if (lastUpdatedEl) {
|
||||||
|
const secs = Math.floor(elapsed / 1000);
|
||||||
|
const updatedText = secs < 5 ? 'Updated just now' : `Updated ${secs}s ago`;
|
||||||
|
if (lastUpdatedEl.textContent !== updatedText) {
|
||||||
|
lastUpdatedEl.textContent = updatedText;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}, 100));
|
||||||
|
|
||||||
|
updateSortIndicators();
|
||||||
|
refreshLiveStats();
|
||||||
|
}
|
||||||
|
|
||||||
|
function scheduleRowUpdate() {
|
||||||
|
// Sort and filter immediately to prevent flicker
|
||||||
|
doSort();
|
||||||
|
filterTable();
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// STACKS BY HOST FILTER
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
function sbhFilter() {
|
||||||
|
const query = (document.getElementById('sbh-filter')?.value || '').toLowerCase();
|
||||||
|
const hostFilter = document.getElementById('sbh-host-select')?.value || '';
|
||||||
|
|
||||||
|
document.querySelectorAll('.sbh-group').forEach(group => {
|
||||||
|
if (hostFilter && group.dataset.h !== hostFilter) {
|
||||||
|
group.hidden = true;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
let visibleCount = 0;
|
||||||
|
group.querySelectorAll('li[data-s]').forEach(li => {
|
||||||
|
const show = !query || li.dataset.s.includes(query);
|
||||||
|
li.hidden = !show;
|
||||||
|
if (show) visibleCount++;
|
||||||
|
});
|
||||||
|
group.hidden = visibleCount === 0;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
window.sbhFilter = sbhFilter;
|
||||||
|
|||||||
@@ -26,6 +26,23 @@
|
|||||||
</script>
|
</script>
|
||||||
</head>
|
</head>
|
||||||
<body class="min-h-screen bg-base-200">
|
<body class="min-h-screen bg-base-200">
|
||||||
|
<svg style="display: none">
|
||||||
|
<symbol id="icon-menu" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round">
|
||||||
|
<circle cx="12" cy="5" r="1" /><circle cx="12" cy="12" r="1" /><circle cx="12" cy="19" r="1" />
|
||||||
|
</symbol>
|
||||||
|
<symbol id="icon-restart" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round">
|
||||||
|
<path d="M4 4v5h.582m15.356 2A8.001 8.001 0 004.582 9m0 0H9m11 11v-5h-.581m0 0a8.003 8.003 0 01-15.357-2m15.357 2H15" />
|
||||||
|
</symbol>
|
||||||
|
<symbol id="icon-pull" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round">
|
||||||
|
<path d="M4 16v1a3 3 0 003 3h10a3 3 0 003-3v-1m-4-4l-4 4m0 0l-4-4m4 4V4" />
|
||||||
|
</symbol>
|
||||||
|
<symbol id="icon-update" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round">
|
||||||
|
<path d="M4 16v1a3 3 0 003 3h10a3 3 0 003-3v-1m-4-8l-4-4m0 0L8 8m4-4v12" />
|
||||||
|
</symbol>
|
||||||
|
<symbol id="icon-logs" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round">
|
||||||
|
<path d="M9 12h6m-6 4h6m2 5H7a2 2 0 01-2-2V5a2 2 0 012-2h5.586a1 1 0 01.707.293l5.414 5.414a1 1 0 01.293.707V19a2 2 0 01-2 2z" />
|
||||||
|
</symbol>
|
||||||
|
</svg>
|
||||||
<div class="drawer lg:drawer-open">
|
<div class="drawer lg:drawer-open">
|
||||||
<input id="drawer-toggle" type="checkbox" class="drawer-toggle" />
|
<input id="drawer-toggle" type="checkbox" class="drawer-toggle" />
|
||||||
|
|
||||||
@@ -80,6 +97,8 @@
|
|||||||
|
|
||||||
<!-- Scripts - HTMX first -->
|
<!-- Scripts - HTMX first -->
|
||||||
<script src="https://unpkg.com/htmx.org@2.0.4" data-vendor="htmx.js"></script>
|
<script src="https://unpkg.com/htmx.org@2.0.4" data-vendor="htmx.js"></script>
|
||||||
|
<script src="https://unpkg.com/idiomorph/dist/idiomorph.min.js"></script>
|
||||||
|
<script src="https://unpkg.com/idiomorph/dist/idiomorph-ext.min.js"></script>
|
||||||
<script src="https://cdn.jsdelivr.net/npm/@xterm/xterm@5.5.0/lib/xterm.js" data-vendor="xterm.js"></script>
|
<script src="https://cdn.jsdelivr.net/npm/@xterm/xterm@5.5.0/lib/xterm.js" data-vendor="xterm.js"></script>
|
||||||
<script src="https://cdn.jsdelivr.net/npm/@xterm/addon-fit@0.10.0/lib/addon-fit.js" data-vendor="xterm-fit.js"></script>
|
<script src="https://cdn.jsdelivr.net/npm/@xterm/addon-fit@0.10.0/lib/addon-fit.js" data-vendor="xterm-fit.js"></script>
|
||||||
<script src="/static/app.js"></script>
|
<script src="/static/app.js"></script>
|
||||||
|
|||||||
97
src/compose_farm/web/templates/containers.html
Normal file
97
src/compose_farm/web/templates/containers.html
Normal file
@@ -0,0 +1,97 @@
|
|||||||
|
{% extends "base.html" %}
|
||||||
|
{% from "partials/components.html" import page_header %}
|
||||||
|
{% from "partials/icons.html" import refresh_cw %}
|
||||||
|
{% block title %}Live Stats - Compose Farm{% endblock %}
|
||||||
|
|
||||||
|
{% block content %}
|
||||||
|
<div class="max-w-7xl">
|
||||||
|
{{ page_header("Live Stats", "All running containers across hosts") }}
|
||||||
|
|
||||||
|
{% if not glances_enabled %}
|
||||||
|
<div class="alert alert-warning mb-6">
|
||||||
|
<svg xmlns="http://www.w3.org/2000/svg" class="stroke-current shrink-0 h-6 w-6" fill="none" viewBox="0 0 24 24"><path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M12 9v2m0 4h.01m-6.938 4h13.856c1.54 0 2.502-1.667 1.732-3L13.732 4c-.77-1.333-2.694-1.333-3.464 0L3.34 16c-.77 1.333.192 3 1.732 3z" /></svg>
|
||||||
|
<div>
|
||||||
|
<h3 class="font-bold">Glances not configured</h3>
|
||||||
|
<div class="text-xs">Add <code class="bg-base-300 px-1 rounded">glances_stack: glances</code> to your config and deploy Glances on all hosts.</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
{% else %}
|
||||||
|
|
||||||
|
<!-- Action Bar -->
|
||||||
|
<div class="flex flex-wrap items-center gap-4 mb-6">
|
||||||
|
<div class="tooltip" data-tip="Refresh now">
|
||||||
|
<button class="btn btn-outline btn-sm" type="button" onclick="refreshLiveStats()">
|
||||||
|
{{ refresh_cw() }} Refresh
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
<div class="tooltip" data-tip="Click to toggle auto-refresh">
|
||||||
|
<button class="btn btn-outline btn-sm font-mono w-20 justify-center"
|
||||||
|
id="refresh-timer" onclick="toggleAutoRefresh()">↻</button>
|
||||||
|
</div>
|
||||||
|
<input type="text" id="filter-input" placeholder="Filter containers..."
|
||||||
|
class="input input-bordered input-sm w-64" onkeyup="filterTable()">
|
||||||
|
<select id="host-filter" class="select select-bordered select-sm" onchange="filterTable()">
|
||||||
|
<option value="">All hosts</option>
|
||||||
|
{% for host in hosts %}<option value="{{ host }}">{{ host }}</option>{% endfor %}
|
||||||
|
</select>
|
||||||
|
<span id="container-count" class="text-sm text-base-content/60"></span>
|
||||||
|
<span id="last-updated" class="text-sm text-base-content/40 ml-auto"></span>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<!-- Container Table -->
|
||||||
|
<div class="card bg-base-100 shadow overflow-x-auto">
|
||||||
|
<table class="table table-zebra table-sm w-full">
|
||||||
|
<thead class="sticky top-0 bg-base-200">
|
||||||
|
<tr>
|
||||||
|
<th class="w-8">#</th>
|
||||||
|
<th class="cursor-pointer" onclick="sortTable(1)">Stack<span class="sort-indicator"></span></th>
|
||||||
|
<th class="cursor-pointer" onclick="sortTable(2)">Service<span class="sort-indicator"></span></th>
|
||||||
|
<th></th>
|
||||||
|
<th class="cursor-pointer" onclick="sortTable(4)">Host<span class="sort-indicator"></span></th>
|
||||||
|
<th class="cursor-pointer" onclick="sortTable(5)">Image<span class="sort-indicator"></span></th>
|
||||||
|
<th class="w-16">Update</th>
|
||||||
|
<th class="cursor-pointer" onclick="sortTable(7)">Status<span class="sort-indicator"></span></th>
|
||||||
|
<th class="cursor-pointer text-right" onclick="sortTable(8)">Uptime<span class="sort-indicator"></span></th>
|
||||||
|
<th class="cursor-pointer text-right" onclick="sortTable(9)">CPU<span class="sort-indicator"></span></th>
|
||||||
|
<th class="cursor-pointer text-right" onclick="sortTable(10)">Mem<span class="sort-indicator"></span></th>
|
||||||
|
<th class="cursor-pointer text-right" onclick="sortTable(11)">Net I/O<span class="sort-indicator"></span></th>
|
||||||
|
</tr>
|
||||||
|
</thead>
|
||||||
|
<tbody id="container-rows" data-hosts="{{ hosts | join(',') }}">
|
||||||
|
{% for host in hosts %}
|
||||||
|
<tr class="loading-row" data-host="{{ host }}">
|
||||||
|
<td colspan="12" class="text-center py-2">
|
||||||
|
<span class="loading loading-spinner loading-xs"></span>
|
||||||
|
<span class="text-sm opacity-60">Loading {{ host }}...</span>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
{% endfor %}
|
||||||
|
</tbody>
|
||||||
|
</table>
|
||||||
|
</div>
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
<!-- Shared Action Menu -->
|
||||||
|
<ul id="shared-action-menu" class="menu menu-sm bg-base-200 rounded-box shadow-lg w-36 absolute z-50 p-2 hidden">
|
||||||
|
<li><a data-action="restart"><svg class="h-4 w-4"><use href="#icon-restart" /></svg>Restart</a></li>
|
||||||
|
<li><a data-action="pull"><svg class="h-4 w-4"><use href="#icon-pull" /></svg>Pull</a></li>
|
||||||
|
<li><a data-action="update"><svg class="h-4 w-4"><use href="#icon-update" /></svg>Update</a></li>
|
||||||
|
<li><a data-action="logs"><svg class="h-4 w-4"><use href="#icon-logs" /></svg>Logs</a></li>
|
||||||
|
</ul>
|
||||||
|
</div>
|
||||||
|
{% endblock %}
|
||||||
|
|
||||||
|
{% block scripts %}
|
||||||
|
{% if glances_enabled %}
|
||||||
|
<style>
|
||||||
|
.sort-indicator { display: inline-block; width: 1em; text-align: center; opacity: 0.5; }
|
||||||
|
.high-usage { background-color: oklch(var(--er) / 0.15) !important; }
|
||||||
|
/* Refresh animation */
|
||||||
|
@keyframes row-pulse {
|
||||||
|
0% { background-color: oklch(var(--p) / 0.2); }
|
||||||
|
100% { background-color: transparent; }
|
||||||
|
}
|
||||||
|
.row-updated { animation: row-pulse 0.5s ease-out; }
|
||||||
|
</style>
|
||||||
|
{% endif %}
|
||||||
|
{% endblock %}
|
||||||
@@ -53,6 +53,13 @@
|
|||||||
{% include "partials/stacks_by_host.html" %}
|
{% include "partials/stacks_by_host.html" %}
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
<!-- Host Resources (Glances) -->
|
||||||
|
<div id="glances-stats"
|
||||||
|
hx-get="/api/glances"
|
||||||
|
hx-trigger="load, cf:refresh from:body, every 30s"
|
||||||
|
hx-swap="innerHTML">
|
||||||
|
</div>
|
||||||
|
|
||||||
<!-- Hosts Configuration -->
|
<!-- Hosts Configuration -->
|
||||||
{% call collapse("Hosts (" ~ (hosts | length) ~ ")", icon=server()) %}
|
{% call collapse("Hosts (" ~ (hosts | length) ~ ")", icon=server()) %}
|
||||||
{% call table() %}
|
{% call table() %}
|
||||||
|
|||||||
66
src/compose_farm/web/templates/partials/glances.html
Normal file
66
src/compose_farm/web/templates/partials/glances.html
Normal file
@@ -0,0 +1,66 @@
|
|||||||
|
{# Glances resource stats display #}
|
||||||
|
{% from "partials/icons.html" import cpu, memory_stick, gauge, server, activity, hard_drive, arrow_down_up, refresh_cw %}
|
||||||
|
|
||||||
|
{% macro progress_bar(percent, color="primary") %}
|
||||||
|
<div class="flex items-center gap-2 min-w-32">
|
||||||
|
<progress class="progress progress-{{ color }} flex-1" value="{{ percent }}" max="100"></progress>
|
||||||
|
<span class="text-xs w-10 text-right">{{ "%.1f"|format(percent) }}%</span>
|
||||||
|
</div>
|
||||||
|
{% endmacro %}
|
||||||
|
|
||||||
|
{% macro format_rate(bytes_per_sec) %}
|
||||||
|
{%- if bytes_per_sec >= 1048576 -%}
|
||||||
|
{{ "%.1f"|format(bytes_per_sec / 1048576) }} MB/s
|
||||||
|
{%- elif bytes_per_sec >= 1024 -%}
|
||||||
|
{{ "%.1f"|format(bytes_per_sec / 1024) }} KB/s
|
||||||
|
{%- else -%}
|
||||||
|
{{ "%.0f"|format(bytes_per_sec) }} B/s
|
||||||
|
{%- endif -%}
|
||||||
|
{% endmacro %}
|
||||||
|
|
||||||
|
{% macro host_row(host_stats) %}
|
||||||
|
<tr>
|
||||||
|
<td class="font-medium">{{ server(14) }} {{ host_stats.host }}</td>
|
||||||
|
{% if host_stats.error %}
|
||||||
|
<td colspan="5" class="text-error text-xs">{{ host_stats.error }}</td>
|
||||||
|
{% else %}
|
||||||
|
<td>{{ progress_bar(host_stats.cpu_percent, "info") }}</td>
|
||||||
|
<td>{{ progress_bar(host_stats.mem_percent, "success") }}</td>
|
||||||
|
<td>{{ progress_bar(host_stats.disk_percent, "warning") }}</td>
|
||||||
|
<td class="text-xs font-mono">↓{{ format_rate(host_stats.net_rx_rate) }} ↑{{ format_rate(host_stats.net_tx_rate) }}</td>
|
||||||
|
<td class="text-sm">{{ "%.1f"|format(host_stats.load) }}</td>
|
||||||
|
{% endif %}
|
||||||
|
</tr>
|
||||||
|
{% endmacro %}
|
||||||
|
|
||||||
|
<div class="card bg-base-100 shadow mt-4 mb-4">
|
||||||
|
<div class="card-body p-4">
|
||||||
|
<div class="flex items-center justify-between">
|
||||||
|
<h2 class="card-title text-base gap-2">{{ activity(18) }} Host Resources</h2>
|
||||||
|
<button class="btn btn-ghost btn-xs opacity-50 hover:opacity-100"
|
||||||
|
hx-get="/api/glances" hx-target="#glances-stats" hx-swap="innerHTML"
|
||||||
|
title="Refresh">
|
||||||
|
{{ refresh_cw(14) }}
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
<div class="overflow-x-auto">
|
||||||
|
<table class="table table-sm">
|
||||||
|
<thead>
|
||||||
|
<tr>
|
||||||
|
<th>Host</th>
|
||||||
|
<th>{{ cpu(14) }} CPU</th>
|
||||||
|
<th>{{ memory_stick(14) }} Memory</th>
|
||||||
|
<th>{{ hard_drive(14) }} Disk</th>
|
||||||
|
<th>{{ arrow_down_up(14) }} Net</th>
|
||||||
|
<th>{{ gauge(14) }} Load</th>
|
||||||
|
</tr>
|
||||||
|
</thead>
|
||||||
|
<tbody>
|
||||||
|
{% for host_name, host_stats in stats.items() %}
|
||||||
|
{{ host_row(host_stats) }}
|
||||||
|
{% endfor %}
|
||||||
|
</tbody>
|
||||||
|
</table>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
@@ -176,3 +176,46 @@
|
|||||||
<path d="M15 3h6v6"/><path d="M10 14 21 3"/><path d="M18 13v6a2 2 0 0 1-2 2H5a2 2 0 0 1-2-2V8a2 2 0 0 1 2-2h6"/>
|
<path d="M15 3h6v6"/><path d="M10 14 21 3"/><path d="M18 13v6a2 2 0 0 1-2 2H5a2 2 0 0 1-2-2V8a2 2 0 0 1 2-2h6"/>
|
||||||
</svg>
|
</svg>
|
||||||
{% endmacro %}
|
{% endmacro %}
|
||||||
|
|
||||||
|
{# Resource monitoring icons #}
|
||||||
|
{% macro cpu(size=16) %}
|
||||||
|
<svg xmlns="http://www.w3.org/2000/svg" width="{{ size }}" height="{{ size }}" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round">
|
||||||
|
<rect width="16" height="16" x="4" y="4" rx="2"/><rect width="6" height="6" x="9" y="9" rx="1"/><path d="M15 2v2"/><path d="M15 20v2"/><path d="M2 15h2"/><path d="M2 9h2"/><path d="M20 15h2"/><path d="M20 9h2"/><path d="M9 2v2"/><path d="M9 20v2"/>
|
||||||
|
</svg>
|
||||||
|
{% endmacro %}
|
||||||
|
|
||||||
|
{% macro memory_stick(size=16) %}
|
||||||
|
<svg xmlns="http://www.w3.org/2000/svg" width="{{ size }}" height="{{ size }}" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round">
|
||||||
|
<path d="M6 19v-3"/><path d="M10 19v-3"/><path d="M14 19v-3"/><path d="M18 19v-3"/><path d="M8 11V9"/><path d="M16 11V9"/><path d="M12 11V9"/><path d="M2 15h20"/><path d="M2 7a2 2 0 0 1 2-2h16a2 2 0 0 1 2 2v1.1a2 2 0 0 0 0 3.837V17a2 2 0 0 1-2 2H4a2 2 0 0 1-2-2v-5.1a2 2 0 0 0 0-3.837z"/>
|
||||||
|
</svg>
|
||||||
|
{% endmacro %}
|
||||||
|
|
||||||
|
{% macro gauge(size=16) %}
|
||||||
|
<svg xmlns="http://www.w3.org/2000/svg" width="{{ size }}" height="{{ size }}" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round">
|
||||||
|
<path d="m12 14 4-4"/><path d="M3.34 19a10 10 0 1 1 17.32 0"/>
|
||||||
|
</svg>
|
||||||
|
{% endmacro %}
|
||||||
|
|
||||||
|
{% macro activity(size=16) %}
|
||||||
|
<svg xmlns="http://www.w3.org/2000/svg" width="{{ size }}" height="{{ size }}" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round">
|
||||||
|
<path d="M22 12h-2.48a2 2 0 0 0-1.93 1.46l-2.35 8.36a.25.25 0 0 1-.48 0L9.24 2.18a.25.25 0 0 0-.48 0l-2.35 8.36A2 2 0 0 1 4.49 12H2"/>
|
||||||
|
</svg>
|
||||||
|
{% endmacro %}
|
||||||
|
|
||||||
|
{% macro arrow_down_up(size=16) %}
|
||||||
|
<svg xmlns="http://www.w3.org/2000/svg" width="{{ size }}" height="{{ size }}" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round">
|
||||||
|
<path d="m3 16 4 4 4-4"/><path d="M7 20V4"/><path d="m21 8-4-4-4 4"/><path d="M17 4v16"/>
|
||||||
|
</svg>
|
||||||
|
{% endmacro %}
|
||||||
|
|
||||||
|
{% macro hard_drive(size=16) %}
|
||||||
|
<svg xmlns="http://www.w3.org/2000/svg" width="{{ size }}" height="{{ size }}" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round">
|
||||||
|
<line x1="22" x2="2" y1="12" y2="12"/><path d="M5.45 5.11 2 12v6a2 2 0 0 0 2 2h16a2 2 0 0 0 2-2v-6l-3.45-6.89A2 2 0 0 0 16.76 4H7.24a2 2 0 0 0-1.79 1.11z"/><line x1="6" x2="6.01" y1="16" y2="16"/><line x1="10" x2="10.01" y1="16" y2="16"/>
|
||||||
|
</svg>
|
||||||
|
{% endmacro %}
|
||||||
|
|
||||||
|
{% macro box(size=16) %}
|
||||||
|
<svg xmlns="http://www.w3.org/2000/svg" width="{{ size }}" height="{{ size }}" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round">
|
||||||
|
<path d="M21 8a2 2 0 0 0-1-1.73l-7-4a2 2 0 0 0-2 0l-7 4A2 2 0 0 0 3 8v8a2 2 0 0 0 1 1.73l7 4a2 2 0 0 0 2 0l7-4A2 2 0 0 0 21 16Z"/><path d="m3.3 7 8.7 5 8.7-5"/><path d="M12 22V12"/>
|
||||||
|
</svg>
|
||||||
|
{% endmacro %}
|
||||||
|
|||||||
@@ -1,8 +1,9 @@
|
|||||||
{% from "partials/icons.html" import home, search, terminal %}
|
{% from "partials/icons.html" import home, search, terminal, box %}
|
||||||
<!-- Navigation Links -->
|
<!-- Navigation Links -->
|
||||||
<div class="mb-4">
|
<div class="mb-4">
|
||||||
<ul class="menu" hx-boost="true" hx-target="#main-content" hx-select="#main-content" hx-swap="outerHTML">
|
<ul class="menu" hx-boost="true" hx-target="#main-content" hx-select="#main-content" hx-swap="outerHTML">
|
||||||
<li><a href="/" class="font-semibold">{{ home() }} Dashboard</a></li>
|
<li><a href="/" class="font-semibold">{{ home() }} Dashboard</a></li>
|
||||||
|
<li><a href="/live-stats" class="font-semibold">{{ box() }} Live Stats</a></li>
|
||||||
<li><a href="/console" class="font-semibold">{{ terminal() }} Console</a></li>
|
<li><a href="/console" class="font-semibold">{{ terminal() }} Console</a></li>
|
||||||
</ul>
|
</ul>
|
||||||
</div>
|
</div>
|
||||||
|
|||||||
@@ -20,20 +20,4 @@
|
|||||||
{% else %}
|
{% else %}
|
||||||
<p class="text-base-content/60 italic">No stacks currently running.</p>
|
<p class="text-base-content/60 italic">No stacks currently running.</p>
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
<script>
|
|
||||||
function sbhFilter() {
|
|
||||||
const q = (document.getElementById('sbh-filter')?.value || '').toLowerCase();
|
|
||||||
const h = document.getElementById('sbh-host-select')?.value || '';
|
|
||||||
document.querySelectorAll('.sbh-group').forEach(g => {
|
|
||||||
if (h && g.dataset.h !== h) { g.hidden = true; return; }
|
|
||||||
let n = 0;
|
|
||||||
g.querySelectorAll('li[data-s]').forEach(li => {
|
|
||||||
const show = !q || li.dataset.s.includes(q);
|
|
||||||
li.hidden = !show;
|
|
||||||
if (show) n++;
|
|
||||||
});
|
|
||||||
g.hidden = !n;
|
|
||||||
});
|
|
||||||
}
|
|
||||||
</script>
|
|
||||||
{% endcall %}
|
{% endcall %}
|
||||||
|
|||||||
@@ -4,7 +4,7 @@
|
|||||||
{% block title %}{{ name }} - Compose Farm{% endblock %}
|
{% block title %}{{ name }} - Compose Farm{% endblock %}
|
||||||
|
|
||||||
{% block content %}
|
{% block content %}
|
||||||
<div class="max-w-5xl" data-services="{{ services | join(',') }}" data-containers='{{ containers | tojson }}' data-website-urls='{{ website_urls | tojson }}'>
|
<div class="max-w-5xl" data-stack-name="{{ name }}" data-services="{{ services | join(',') }}" data-containers='{{ containers | tojson }}' data-website-urls='{{ website_urls | tojson }}'>
|
||||||
<div class="mb-6">
|
<div class="mb-6">
|
||||||
<h1 class="text-3xl font-bold rainbow-hover">{{ name }}</h1>
|
<h1 class="text-3xl font-bold rainbow-hover">{{ name }}</h1>
|
||||||
<div class="flex flex-wrap items-center gap-2 mt-2">
|
<div class="flex flex-wrap items-center gap-2 mt-2">
|
||||||
|
|||||||
269
tests/test_containers.py
Normal file
269
tests/test_containers.py
Normal file
@@ -0,0 +1,269 @@
|
|||||||
|
"""Tests for Containers page routes."""
|
||||||
|
|
||||||
|
from pathlib import Path
|
||||||
|
from unittest.mock import AsyncMock, patch
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
from fastapi.testclient import TestClient
|
||||||
|
|
||||||
|
from compose_farm.config import Config, Host
|
||||||
|
from compose_farm.glances import ContainerStats
|
||||||
|
from compose_farm.web.app import create_app
|
||||||
|
from compose_farm.web.routes.containers import (
|
||||||
|
_format_bytes,
|
||||||
|
_infer_stack_service,
|
||||||
|
_parse_image,
|
||||||
|
_parse_uptime_seconds,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Byte size constants for tests
|
||||||
|
KB = 1024
|
||||||
|
MB = KB * 1024
|
||||||
|
GB = MB * 1024
|
||||||
|
|
||||||
|
|
||||||
|
class TestFormatBytes:
|
||||||
|
"""Tests for _format_bytes function (uses humanize library)."""
|
||||||
|
|
||||||
|
def test_bytes(self) -> None:
|
||||||
|
assert _format_bytes(500) == "500 Bytes"
|
||||||
|
assert _format_bytes(0) == "0 Bytes"
|
||||||
|
|
||||||
|
def test_kilobytes(self) -> None:
|
||||||
|
assert _format_bytes(KB) == "1.0 KiB"
|
||||||
|
assert _format_bytes(KB * 5) == "5.0 KiB"
|
||||||
|
assert _format_bytes(KB + 512) == "1.5 KiB"
|
||||||
|
|
||||||
|
def test_megabytes(self) -> None:
|
||||||
|
assert _format_bytes(MB) == "1.0 MiB"
|
||||||
|
assert _format_bytes(MB * 100) == "100.0 MiB"
|
||||||
|
assert _format_bytes(MB * 512) == "512.0 MiB"
|
||||||
|
|
||||||
|
def test_gigabytes(self) -> None:
|
||||||
|
assert _format_bytes(GB) == "1.0 GiB"
|
||||||
|
assert _format_bytes(GB * 2) == "2.0 GiB"
|
||||||
|
|
||||||
|
|
||||||
|
class TestParseImage:
|
||||||
|
"""Tests for _parse_image function."""
|
||||||
|
|
||||||
|
def test_simple_image_with_tag(self) -> None:
|
||||||
|
assert _parse_image("nginx:latest") == ("nginx", "latest")
|
||||||
|
assert _parse_image("redis:7") == ("redis", "7")
|
||||||
|
|
||||||
|
def test_image_without_tag(self) -> None:
|
||||||
|
assert _parse_image("nginx") == ("nginx", "latest")
|
||||||
|
|
||||||
|
def test_registry_image(self) -> None:
|
||||||
|
assert _parse_image("ghcr.io/user/repo:v1.0") == ("ghcr.io/user/repo", "v1.0")
|
||||||
|
assert _parse_image("docker.io/library/nginx:alpine") == (
|
||||||
|
"docker.io/library/nginx",
|
||||||
|
"alpine",
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_image_with_port_in_registry(self) -> None:
|
||||||
|
# Registry with port should not be confused with tag
|
||||||
|
assert _parse_image("localhost:5000/myimage") == ("localhost:5000/myimage", "latest")
|
||||||
|
|
||||||
|
|
||||||
|
class TestParseUptimeSeconds:
|
||||||
|
"""Tests for _parse_uptime_seconds function."""
|
||||||
|
|
||||||
|
def test_seconds(self) -> None:
|
||||||
|
assert _parse_uptime_seconds("17 seconds") == 17
|
||||||
|
assert _parse_uptime_seconds("1 second") == 1
|
||||||
|
|
||||||
|
def test_minutes(self) -> None:
|
||||||
|
assert _parse_uptime_seconds("5 minutes") == 300
|
||||||
|
assert _parse_uptime_seconds("1 minute") == 60
|
||||||
|
|
||||||
|
def test_hours(self) -> None:
|
||||||
|
assert _parse_uptime_seconds("2 hours") == 7200
|
||||||
|
assert _parse_uptime_seconds("an hour") == 3600
|
||||||
|
assert _parse_uptime_seconds("1 hour") == 3600
|
||||||
|
|
||||||
|
def test_days(self) -> None:
|
||||||
|
assert _parse_uptime_seconds("3 days") == 259200
|
||||||
|
assert _parse_uptime_seconds("a day") == 86400
|
||||||
|
|
||||||
|
def test_empty(self) -> None:
|
||||||
|
assert _parse_uptime_seconds("") == 0
|
||||||
|
assert _parse_uptime_seconds("-") == 0
|
||||||
|
|
||||||
|
|
||||||
|
class TestInferStackService:
|
||||||
|
"""Tests for _infer_stack_service function."""
|
||||||
|
|
||||||
|
def test_underscore_separator(self) -> None:
|
||||||
|
assert _infer_stack_service("mystack_web_1") == ("mystack", "web")
|
||||||
|
assert _infer_stack_service("app_db_1") == ("app", "db")
|
||||||
|
|
||||||
|
def test_hyphen_separator(self) -> None:
|
||||||
|
assert _infer_stack_service("mystack-web-1") == ("mystack", "web")
|
||||||
|
assert _infer_stack_service("compose-farm-api-1") == ("compose", "farm-api")
|
||||||
|
|
||||||
|
def test_simple_name(self) -> None:
|
||||||
|
# No separator - use name for both
|
||||||
|
assert _infer_stack_service("nginx") == ("nginx", "nginx")
|
||||||
|
assert _infer_stack_service("traefik") == ("traefik", "traefik")
|
||||||
|
|
||||||
|
def test_single_part_with_separator(self) -> None:
|
||||||
|
# Edge case: separator with empty second part
|
||||||
|
assert _infer_stack_service("single_") == ("single", "")
|
||||||
|
|
||||||
|
|
||||||
|
class TestContainersPage:
|
||||||
|
"""Tests for containers page endpoint."""
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def client(self) -> TestClient:
|
||||||
|
app = create_app()
|
||||||
|
return TestClient(app)
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def mock_config(self) -> Config:
|
||||||
|
return Config(
|
||||||
|
compose_dir=Path("/opt/compose"),
|
||||||
|
hosts={
|
||||||
|
"nas": Host(address="192.168.1.6"),
|
||||||
|
"nuc": Host(address="192.168.1.2"),
|
||||||
|
},
|
||||||
|
stacks={"test": "nas"},
|
||||||
|
glances_stack="glances",
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_containers_page_without_glances(self, client: TestClient) -> None:
|
||||||
|
"""Test containers page shows warning when Glances not configured."""
|
||||||
|
with patch("compose_farm.web.routes.containers.get_config") as mock:
|
||||||
|
mock.return_value = Config(
|
||||||
|
compose_dir=Path("/opt/compose"),
|
||||||
|
hosts={"nas": Host(address="192.168.1.6")},
|
||||||
|
stacks={"test": "nas"},
|
||||||
|
glances_stack=None,
|
||||||
|
)
|
||||||
|
response = client.get("/live-stats")
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
assert "Glances not configured" in response.text
|
||||||
|
|
||||||
|
def test_containers_page_with_glances(self, client: TestClient, mock_config: Config) -> None:
|
||||||
|
"""Test containers page loads when Glances is configured."""
|
||||||
|
with patch("compose_farm.web.routes.containers.get_config") as mock:
|
||||||
|
mock.return_value = mock_config
|
||||||
|
response = client.get("/live-stats")
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
assert "Live Stats" in response.text
|
||||||
|
assert "container-rows" in response.text
|
||||||
|
|
||||||
|
|
||||||
|
class TestContainersRowsAPI:
|
||||||
|
"""Tests for containers rows HTML endpoint."""
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def client(self) -> TestClient:
|
||||||
|
app = create_app()
|
||||||
|
return TestClient(app)
|
||||||
|
|
||||||
|
def test_rows_without_glances(self, client: TestClient) -> None:
|
||||||
|
"""Test rows endpoint returns error when Glances not configured."""
|
||||||
|
with patch("compose_farm.web.routes.containers.get_config") as mock:
|
||||||
|
mock.return_value = Config(
|
||||||
|
compose_dir=Path("/opt/compose"),
|
||||||
|
hosts={"nas": Host(address="192.168.1.6")},
|
||||||
|
stacks={"test": "nas"},
|
||||||
|
glances_stack=None,
|
||||||
|
)
|
||||||
|
response = client.get("/api/containers/rows")
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
assert "Glances not configured" in response.text
|
||||||
|
|
||||||
|
def test_rows_returns_html(self, client: TestClient) -> None:
|
||||||
|
"""Test rows endpoint returns HTML table rows."""
|
||||||
|
mock_containers = [
|
||||||
|
ContainerStats(
|
||||||
|
name="nginx",
|
||||||
|
host="nas",
|
||||||
|
status="running",
|
||||||
|
image="nginx:latest",
|
||||||
|
cpu_percent=5.5,
|
||||||
|
memory_usage=104857600,
|
||||||
|
memory_limit=1073741824,
|
||||||
|
memory_percent=9.77,
|
||||||
|
network_rx=1000,
|
||||||
|
network_tx=500,
|
||||||
|
uptime="2 hours",
|
||||||
|
ports="80->80/tcp",
|
||||||
|
engine="docker",
|
||||||
|
stack="web",
|
||||||
|
service="nginx",
|
||||||
|
),
|
||||||
|
]
|
||||||
|
|
||||||
|
with (
|
||||||
|
patch("compose_farm.web.routes.containers.get_config") as mock_config,
|
||||||
|
patch(
|
||||||
|
"compose_farm.web.routes.containers.fetch_all_container_stats",
|
||||||
|
new_callable=AsyncMock,
|
||||||
|
) as mock_fetch,
|
||||||
|
):
|
||||||
|
mock_config.return_value = Config(
|
||||||
|
compose_dir=Path("/opt/compose"),
|
||||||
|
hosts={"nas": Host(address="192.168.1.6")},
|
||||||
|
stacks={"test": "nas"},
|
||||||
|
glances_stack="glances",
|
||||||
|
)
|
||||||
|
mock_fetch.return_value = mock_containers
|
||||||
|
|
||||||
|
response = client.get("/api/containers/rows")
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
assert "<tr " in response.text # <tr id="..."> has attributes
|
||||||
|
assert "nginx" in response.text
|
||||||
|
assert "running" in response.text
|
||||||
|
|
||||||
|
def test_rows_have_data_sort_attributes(self, client: TestClient) -> None:
|
||||||
|
"""Test rows have data-sort attributes for client-side sorting."""
|
||||||
|
mock_containers = [
|
||||||
|
ContainerStats(
|
||||||
|
name="alpha",
|
||||||
|
host="nas",
|
||||||
|
status="running",
|
||||||
|
image="nginx:latest",
|
||||||
|
cpu_percent=10.0,
|
||||||
|
memory_usage=100,
|
||||||
|
memory_limit=1000,
|
||||||
|
memory_percent=10.0,
|
||||||
|
network_rx=100,
|
||||||
|
network_tx=100,
|
||||||
|
uptime="1 hour",
|
||||||
|
ports="",
|
||||||
|
engine="docker",
|
||||||
|
stack="alpha",
|
||||||
|
service="web",
|
||||||
|
),
|
||||||
|
]
|
||||||
|
|
||||||
|
with (
|
||||||
|
patch("compose_farm.web.routes.containers.get_config") as mock_config,
|
||||||
|
patch(
|
||||||
|
"compose_farm.web.routes.containers.fetch_all_container_stats",
|
||||||
|
new_callable=AsyncMock,
|
||||||
|
) as mock_fetch,
|
||||||
|
):
|
||||||
|
mock_config.return_value = Config(
|
||||||
|
compose_dir=Path("/opt/compose"),
|
||||||
|
hosts={"nas": Host(address="192.168.1.6")},
|
||||||
|
stacks={"test": "nas"},
|
||||||
|
glances_stack="glances",
|
||||||
|
)
|
||||||
|
mock_fetch.return_value = mock_containers
|
||||||
|
|
||||||
|
response = client.get("/api/containers/rows")
|
||||||
|
assert response.status_code == 200
|
||||||
|
# Check that cells have data-sort attributes
|
||||||
|
assert 'data-sort="alpha"' in response.text # stack
|
||||||
|
assert 'data-sort="web"' in response.text # service
|
||||||
|
assert 'data-sort="3600"' in response.text # uptime (1 hour = 3600s)
|
||||||
|
assert 'data-sort="10' in response.text # cpu
|
||||||
349
tests/test_glances.py
Normal file
349
tests/test_glances.py
Normal file
@@ -0,0 +1,349 @@
|
|||||||
|
"""Tests for Glances integration."""
|
||||||
|
|
||||||
|
from pathlib import Path
|
||||||
|
from unittest.mock import AsyncMock, patch
|
||||||
|
|
||||||
|
import httpx
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from compose_farm.config import Config, Host
|
||||||
|
from compose_farm.glances import (
|
||||||
|
DEFAULT_GLANCES_PORT,
|
||||||
|
ContainerStats,
|
||||||
|
HostStats,
|
||||||
|
fetch_all_container_stats,
|
||||||
|
fetch_all_host_stats,
|
||||||
|
fetch_container_stats,
|
||||||
|
fetch_host_stats,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class TestHostStats:
|
||||||
|
"""Tests for HostStats dataclass."""
|
||||||
|
|
||||||
|
def test_host_stats_creation(self) -> None:
|
||||||
|
stats = HostStats(
|
||||||
|
host="nas",
|
||||||
|
cpu_percent=25.5,
|
||||||
|
mem_percent=50.0,
|
||||||
|
swap_percent=10.0,
|
||||||
|
load=2.5,
|
||||||
|
disk_percent=75.0,
|
||||||
|
)
|
||||||
|
assert stats.host == "nas"
|
||||||
|
assert stats.cpu_percent == 25.5
|
||||||
|
assert stats.mem_percent == 50.0
|
||||||
|
assert stats.disk_percent == 75.0
|
||||||
|
assert stats.error is None
|
||||||
|
|
||||||
|
def test_host_stats_from_error(self) -> None:
|
||||||
|
stats = HostStats.from_error("nas", "Connection refused")
|
||||||
|
assert stats.host == "nas"
|
||||||
|
assert stats.cpu_percent == 0
|
||||||
|
assert stats.mem_percent == 0
|
||||||
|
assert stats.error == "Connection refused"
|
||||||
|
|
||||||
|
|
||||||
|
class TestFetchHostStats:
|
||||||
|
"""Tests for fetch_host_stats function."""
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_fetch_host_stats_success(self) -> None:
|
||||||
|
quicklook_response = httpx.Response(
|
||||||
|
200,
|
||||||
|
json={
|
||||||
|
"cpu": 25.5,
|
||||||
|
"mem": 50.0,
|
||||||
|
"swap": 5.0,
|
||||||
|
"load": 2.5,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
fs_response = httpx.Response(
|
||||||
|
200,
|
||||||
|
json=[
|
||||||
|
{"mnt_point": "/", "percent": 65.0},
|
||||||
|
{"mnt_point": "/mnt/data", "percent": 80.0},
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
||||||
|
async def mock_get(url: str) -> httpx.Response:
|
||||||
|
if "quicklook" in url:
|
||||||
|
return quicklook_response
|
||||||
|
return fs_response
|
||||||
|
|
||||||
|
with patch("httpx.AsyncClient") as mock_client:
|
||||||
|
mock_client.return_value.__aenter__ = AsyncMock(return_value=mock_client.return_value)
|
||||||
|
mock_client.return_value.__aexit__ = AsyncMock(return_value=None)
|
||||||
|
mock_client.return_value.get = AsyncMock(side_effect=mock_get)
|
||||||
|
|
||||||
|
stats = await fetch_host_stats("nas", "192.168.1.6")
|
||||||
|
|
||||||
|
assert stats.host == "nas"
|
||||||
|
assert stats.cpu_percent == 25.5
|
||||||
|
assert stats.mem_percent == 50.0
|
||||||
|
assert stats.swap_percent == 5.0
|
||||||
|
assert stats.load == 2.5
|
||||||
|
assert stats.disk_percent == 65.0 # Root filesystem
|
||||||
|
assert stats.error is None
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_fetch_host_stats_http_error(self) -> None:
|
||||||
|
mock_response = httpx.Response(500)
|
||||||
|
|
||||||
|
with patch("httpx.AsyncClient") as mock_client:
|
||||||
|
mock_client.return_value.__aenter__ = AsyncMock(return_value=mock_client.return_value)
|
||||||
|
mock_client.return_value.__aexit__ = AsyncMock(return_value=None)
|
||||||
|
mock_client.return_value.get = AsyncMock(return_value=mock_response)
|
||||||
|
|
||||||
|
stats = await fetch_host_stats("nas", "192.168.1.6")
|
||||||
|
|
||||||
|
assert stats.host == "nas"
|
||||||
|
assert stats.error == "HTTP 500"
|
||||||
|
assert stats.cpu_percent == 0
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_fetch_host_stats_timeout(self) -> None:
|
||||||
|
with patch("httpx.AsyncClient") as mock_client:
|
||||||
|
mock_client.return_value.__aenter__ = AsyncMock(return_value=mock_client.return_value)
|
||||||
|
mock_client.return_value.__aexit__ = AsyncMock(return_value=None)
|
||||||
|
mock_client.return_value.get = AsyncMock(side_effect=httpx.TimeoutException("timeout"))
|
||||||
|
|
||||||
|
stats = await fetch_host_stats("nas", "192.168.1.6")
|
||||||
|
|
||||||
|
assert stats.host == "nas"
|
||||||
|
assert stats.error == "timeout"
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_fetch_host_stats_connection_error(self) -> None:
|
||||||
|
with patch("httpx.AsyncClient") as mock_client:
|
||||||
|
mock_client.return_value.__aenter__ = AsyncMock(return_value=mock_client.return_value)
|
||||||
|
mock_client.return_value.__aexit__ = AsyncMock(return_value=None)
|
||||||
|
mock_client.return_value.get = AsyncMock(
|
||||||
|
side_effect=httpx.ConnectError("Connection refused")
|
||||||
|
)
|
||||||
|
|
||||||
|
stats = await fetch_host_stats("nas", "192.168.1.6")
|
||||||
|
|
||||||
|
assert stats.host == "nas"
|
||||||
|
assert stats.error is not None
|
||||||
|
assert "Connection refused" in stats.error
|
||||||
|
|
||||||
|
|
||||||
|
class TestFetchAllHostStats:
|
||||||
|
"""Tests for fetch_all_host_stats function."""
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_fetch_all_host_stats(self) -> None:
|
||||||
|
config = Config(
|
||||||
|
compose_dir=Path("/opt/compose"),
|
||||||
|
hosts={
|
||||||
|
"nas": Host(address="192.168.1.6"),
|
||||||
|
"nuc": Host(address="192.168.1.2"),
|
||||||
|
},
|
||||||
|
stacks={"test": "nas"},
|
||||||
|
)
|
||||||
|
|
||||||
|
quicklook_response = httpx.Response(
|
||||||
|
200,
|
||||||
|
json={
|
||||||
|
"cpu": 25.5,
|
||||||
|
"mem": 50.0,
|
||||||
|
"swap": 5.0,
|
||||||
|
"load": 2.5,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
fs_response = httpx.Response(
|
||||||
|
200,
|
||||||
|
json=[{"mnt_point": "/", "percent": 70.0}],
|
||||||
|
)
|
||||||
|
|
||||||
|
async def mock_get(url: str) -> httpx.Response:
|
||||||
|
if "quicklook" in url:
|
||||||
|
return quicklook_response
|
||||||
|
return fs_response
|
||||||
|
|
||||||
|
with patch("httpx.AsyncClient") as mock_client:
|
||||||
|
mock_client.return_value.__aenter__ = AsyncMock(return_value=mock_client.return_value)
|
||||||
|
mock_client.return_value.__aexit__ = AsyncMock(return_value=None)
|
||||||
|
mock_client.return_value.get = AsyncMock(side_effect=mock_get)
|
||||||
|
|
||||||
|
stats = await fetch_all_host_stats(config)
|
||||||
|
|
||||||
|
assert "nas" in stats
|
||||||
|
assert "nuc" in stats
|
||||||
|
assert stats["nas"].cpu_percent == 25.5
|
||||||
|
assert stats["nuc"].cpu_percent == 25.5
|
||||||
|
assert stats["nas"].disk_percent == 70.0
|
||||||
|
|
||||||
|
|
||||||
|
class TestDefaultPort:
|
||||||
|
"""Tests for default Glances port constant."""
|
||||||
|
|
||||||
|
def test_default_port(self) -> None:
|
||||||
|
assert DEFAULT_GLANCES_PORT == 61208
|
||||||
|
|
||||||
|
|
||||||
|
class TestContainerStats:
|
||||||
|
"""Tests for ContainerStats dataclass."""
|
||||||
|
|
||||||
|
def test_container_stats_creation(self) -> None:
|
||||||
|
stats = ContainerStats(
|
||||||
|
name="nginx",
|
||||||
|
host="nas",
|
||||||
|
status="running",
|
||||||
|
image="nginx:latest",
|
||||||
|
cpu_percent=5.5,
|
||||||
|
memory_usage=104857600, # 100MB
|
||||||
|
memory_limit=1073741824, # 1GB
|
||||||
|
memory_percent=9.77,
|
||||||
|
network_rx=1000000,
|
||||||
|
network_tx=500000,
|
||||||
|
uptime="2 hours",
|
||||||
|
ports="80->80/tcp",
|
||||||
|
engine="docker",
|
||||||
|
)
|
||||||
|
assert stats.name == "nginx"
|
||||||
|
assert stats.host == "nas"
|
||||||
|
assert stats.cpu_percent == 5.5
|
||||||
|
|
||||||
|
|
||||||
|
class TestFetchContainerStats:
|
||||||
|
"""Tests for fetch_container_stats function."""
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_fetch_container_stats_success(self) -> None:
|
||||||
|
mock_response = httpx.Response(
|
||||||
|
200,
|
||||||
|
json=[
|
||||||
|
{
|
||||||
|
"name": "nginx",
|
||||||
|
"status": "running",
|
||||||
|
"image": ["nginx:latest"],
|
||||||
|
"cpu_percent": 5.5,
|
||||||
|
"memory_usage": 104857600,
|
||||||
|
"memory_limit": 1073741824,
|
||||||
|
"network": {"cumulative_rx": 1000, "cumulative_tx": 500},
|
||||||
|
"uptime": "2 hours",
|
||||||
|
"ports": "80->80/tcp",
|
||||||
|
"engine": "docker",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "redis",
|
||||||
|
"status": "running",
|
||||||
|
"image": ["redis:7"],
|
||||||
|
"cpu_percent": 1.2,
|
||||||
|
"memory_usage": 52428800,
|
||||||
|
"memory_limit": 1073741824,
|
||||||
|
"network": {},
|
||||||
|
"uptime": "3 hours",
|
||||||
|
"ports": "",
|
||||||
|
"engine": "docker",
|
||||||
|
},
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
||||||
|
with patch("httpx.AsyncClient") as mock_client:
|
||||||
|
mock_client.return_value.__aenter__ = AsyncMock(return_value=mock_client.return_value)
|
||||||
|
mock_client.return_value.__aexit__ = AsyncMock(return_value=None)
|
||||||
|
mock_client.return_value.get = AsyncMock(return_value=mock_response)
|
||||||
|
|
||||||
|
containers, error = await fetch_container_stats("nas", "192.168.1.6")
|
||||||
|
|
||||||
|
assert error is None
|
||||||
|
assert containers is not None
|
||||||
|
assert len(containers) == 2
|
||||||
|
assert containers[0].name == "nginx"
|
||||||
|
assert containers[0].host == "nas"
|
||||||
|
assert containers[0].cpu_percent == 5.5
|
||||||
|
assert containers[1].name == "redis"
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_fetch_container_stats_empty_on_error(self) -> None:
|
||||||
|
with patch("httpx.AsyncClient") as mock_client:
|
||||||
|
mock_client.return_value.__aenter__ = AsyncMock(return_value=mock_client.return_value)
|
||||||
|
mock_client.return_value.__aexit__ = AsyncMock(return_value=None)
|
||||||
|
mock_client.return_value.get = AsyncMock(side_effect=httpx.TimeoutException("timeout"))
|
||||||
|
|
||||||
|
containers, error = await fetch_container_stats("nas", "192.168.1.6")
|
||||||
|
|
||||||
|
assert containers is None
|
||||||
|
assert error == "Connection timed out"
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_fetch_container_stats_handles_string_image(self) -> None:
|
||||||
|
"""Test that image field works as string (not just list)."""
|
||||||
|
mock_response = httpx.Response(
|
||||||
|
200,
|
||||||
|
json=[
|
||||||
|
{
|
||||||
|
"name": "test",
|
||||||
|
"status": "running",
|
||||||
|
"image": "myimage:v1", # String instead of list
|
||||||
|
"cpu_percent": 0,
|
||||||
|
"memory_usage": 0,
|
||||||
|
"memory_limit": 1,
|
||||||
|
"network": {},
|
||||||
|
"uptime": "",
|
||||||
|
"ports": "",
|
||||||
|
"engine": "docker",
|
||||||
|
},
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
||||||
|
with patch("httpx.AsyncClient") as mock_client:
|
||||||
|
mock_client.return_value.__aenter__ = AsyncMock(return_value=mock_client.return_value)
|
||||||
|
mock_client.return_value.__aexit__ = AsyncMock(return_value=None)
|
||||||
|
mock_client.return_value.get = AsyncMock(return_value=mock_response)
|
||||||
|
|
||||||
|
containers, error = await fetch_container_stats("nas", "192.168.1.6")
|
||||||
|
|
||||||
|
assert error is None
|
||||||
|
assert containers is not None
|
||||||
|
assert len(containers) == 1
|
||||||
|
assert containers[0].image == "myimage:v1"
|
||||||
|
|
||||||
|
|
||||||
|
class TestFetchAllContainerStats:
|
||||||
|
"""Tests for fetch_all_container_stats function."""
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_fetch_all_container_stats(self) -> None:
|
||||||
|
config = Config(
|
||||||
|
compose_dir=Path("/opt/compose"),
|
||||||
|
hosts={
|
||||||
|
"nas": Host(address="192.168.1.6"),
|
||||||
|
"nuc": Host(address="192.168.1.2"),
|
||||||
|
},
|
||||||
|
stacks={"test": "nas"},
|
||||||
|
)
|
||||||
|
|
||||||
|
mock_response = httpx.Response(
|
||||||
|
200,
|
||||||
|
json=[
|
||||||
|
{
|
||||||
|
"name": "nginx",
|
||||||
|
"status": "running",
|
||||||
|
"image": ["nginx:latest"],
|
||||||
|
"cpu_percent": 5.5,
|
||||||
|
"memory_usage": 104857600,
|
||||||
|
"memory_limit": 1073741824,
|
||||||
|
"network": {},
|
||||||
|
"uptime": "2 hours",
|
||||||
|
"ports": "",
|
||||||
|
"engine": "docker",
|
||||||
|
},
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
||||||
|
with patch("httpx.AsyncClient") as mock_client:
|
||||||
|
mock_client.return_value.__aenter__ = AsyncMock(return_value=mock_client.return_value)
|
||||||
|
mock_client.return_value.__aexit__ = AsyncMock(return_value=None)
|
||||||
|
mock_client.return_value.get = AsyncMock(return_value=mock_response)
|
||||||
|
|
||||||
|
containers = await fetch_all_container_stats(config)
|
||||||
|
|
||||||
|
# 2 hosts x 1 container each = 2 containers
|
||||||
|
assert len(containers) == 2
|
||||||
|
hosts = {c.host for c in containers}
|
||||||
|
assert "nas" in hosts
|
||||||
|
assert "nuc" in hosts
|
||||||
182
tests/test_registry.py
Normal file
182
tests/test_registry.py
Normal file
@@ -0,0 +1,182 @@
|
|||||||
|
"""Tests for registry module."""
|
||||||
|
|
||||||
|
from compose_farm.registry import (
|
||||||
|
DOCKER_HUB_ALIASES,
|
||||||
|
ImageRef,
|
||||||
|
RegistryClient,
|
||||||
|
TagCheckResult,
|
||||||
|
_find_updates,
|
||||||
|
_parse_version,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class TestImageRef:
|
||||||
|
"""Tests for ImageRef parsing."""
|
||||||
|
|
||||||
|
def test_parse_simple_image(self) -> None:
|
||||||
|
"""Test parsing simple image name."""
|
||||||
|
ref = ImageRef.parse("nginx")
|
||||||
|
assert ref.registry == "docker.io"
|
||||||
|
assert ref.namespace == "library"
|
||||||
|
assert ref.name == "nginx"
|
||||||
|
assert ref.tag == "latest"
|
||||||
|
|
||||||
|
def test_parse_image_with_tag(self) -> None:
|
||||||
|
"""Test parsing image with tag."""
|
||||||
|
ref = ImageRef.parse("nginx:1.25")
|
||||||
|
assert ref.registry == "docker.io"
|
||||||
|
assert ref.namespace == "library"
|
||||||
|
assert ref.name == "nginx"
|
||||||
|
assert ref.tag == "1.25"
|
||||||
|
|
||||||
|
def test_parse_image_with_namespace(self) -> None:
|
||||||
|
"""Test parsing image with namespace."""
|
||||||
|
ref = ImageRef.parse("linuxserver/jellyfin:latest")
|
||||||
|
assert ref.registry == "docker.io"
|
||||||
|
assert ref.namespace == "linuxserver"
|
||||||
|
assert ref.name == "jellyfin"
|
||||||
|
assert ref.tag == "latest"
|
||||||
|
|
||||||
|
def test_parse_ghcr_image(self) -> None:
|
||||||
|
"""Test parsing GitHub Container Registry image."""
|
||||||
|
ref = ImageRef.parse("ghcr.io/user/repo:v1.0.0")
|
||||||
|
assert ref.registry == "ghcr.io"
|
||||||
|
assert ref.namespace == "user"
|
||||||
|
assert ref.name == "repo"
|
||||||
|
assert ref.tag == "v1.0.0"
|
||||||
|
|
||||||
|
def test_parse_image_with_digest(self) -> None:
|
||||||
|
"""Test parsing image with digest."""
|
||||||
|
ref = ImageRef.parse("nginx:latest@sha256:abc123")
|
||||||
|
assert ref.registry == "docker.io"
|
||||||
|
assert ref.name == "nginx"
|
||||||
|
assert ref.tag == "latest"
|
||||||
|
assert ref.digest == "sha256:abc123"
|
||||||
|
|
||||||
|
def test_full_name_with_namespace(self) -> None:
|
||||||
|
"""Test full_name property with namespace."""
|
||||||
|
ref = ImageRef.parse("linuxserver/jellyfin")
|
||||||
|
assert ref.full_name == "linuxserver/jellyfin"
|
||||||
|
|
||||||
|
def test_full_name_without_namespace(self) -> None:
|
||||||
|
"""Test full_name property for official images."""
|
||||||
|
ref = ImageRef.parse("nginx")
|
||||||
|
assert ref.full_name == "library/nginx"
|
||||||
|
|
||||||
|
def test_display_name_official_image(self) -> None:
|
||||||
|
"""Test display_name for official Docker Hub images."""
|
||||||
|
ref = ImageRef.parse("nginx:latest")
|
||||||
|
assert ref.display_name == "nginx"
|
||||||
|
|
||||||
|
def test_display_name_hub_with_namespace(self) -> None:
|
||||||
|
"""Test display_name for Docker Hub images with namespace."""
|
||||||
|
ref = ImageRef.parse("linuxserver/jellyfin")
|
||||||
|
assert ref.display_name == "linuxserver/jellyfin"
|
||||||
|
|
||||||
|
def test_display_name_other_registry(self) -> None:
|
||||||
|
"""Test display_name for other registries."""
|
||||||
|
ref = ImageRef.parse("ghcr.io/user/repo")
|
||||||
|
assert ref.display_name == "ghcr.io/user/repo"
|
||||||
|
|
||||||
|
|
||||||
|
class TestParseVersion:
|
||||||
|
"""Tests for version parsing."""
|
||||||
|
|
||||||
|
def test_parse_semver(self) -> None:
|
||||||
|
"""Test parsing semantic version."""
|
||||||
|
assert _parse_version("1.2.3") == (1, 2, 3)
|
||||||
|
|
||||||
|
def test_parse_version_with_v_prefix(self) -> None:
|
||||||
|
"""Test parsing version with v prefix."""
|
||||||
|
assert _parse_version("v1.2.3") == (1, 2, 3)
|
||||||
|
assert _parse_version("V1.2.3") == (1, 2, 3)
|
||||||
|
|
||||||
|
def test_parse_two_part_version(self) -> None:
|
||||||
|
"""Test parsing two-part version."""
|
||||||
|
assert _parse_version("1.25") == (1, 25)
|
||||||
|
|
||||||
|
def test_parse_single_number(self) -> None:
|
||||||
|
"""Test parsing single number version."""
|
||||||
|
assert _parse_version("7") == (7,)
|
||||||
|
|
||||||
|
def test_parse_invalid_version(self) -> None:
|
||||||
|
"""Test parsing non-version tags."""
|
||||||
|
assert _parse_version("latest") is None
|
||||||
|
assert _parse_version("stable") is None
|
||||||
|
assert _parse_version("alpine") is None
|
||||||
|
|
||||||
|
|
||||||
|
class TestFindUpdates:
|
||||||
|
"""Tests for finding available updates."""
|
||||||
|
|
||||||
|
def test_find_updates_with_newer_versions(self) -> None:
|
||||||
|
"""Test finding newer versions."""
|
||||||
|
current = "1.0.0"
|
||||||
|
tags = ["0.9.0", "1.0.0", "1.1.0", "2.0.0"]
|
||||||
|
updates = _find_updates(current, tags)
|
||||||
|
assert updates == ["2.0.0", "1.1.0"]
|
||||||
|
|
||||||
|
def test_find_updates_no_newer(self) -> None:
|
||||||
|
"""Test when already on latest."""
|
||||||
|
current = "2.0.0"
|
||||||
|
tags = ["1.0.0", "1.5.0", "2.0.0"]
|
||||||
|
updates = _find_updates(current, tags)
|
||||||
|
assert updates == []
|
||||||
|
|
||||||
|
def test_find_updates_non_version_tag(self) -> None:
|
||||||
|
"""Test with non-version current tag."""
|
||||||
|
current = "latest"
|
||||||
|
tags = ["1.0.0", "2.0.0"]
|
||||||
|
updates = _find_updates(current, tags)
|
||||||
|
# Can't determine updates for non-version tags
|
||||||
|
assert updates == []
|
||||||
|
|
||||||
|
|
||||||
|
class TestRegistryClient:
|
||||||
|
"""Tests for unified registry client."""
|
||||||
|
|
||||||
|
def test_docker_hub_normalization(self) -> None:
|
||||||
|
"""Test Docker Hub aliases are normalized."""
|
||||||
|
for alias in DOCKER_HUB_ALIASES:
|
||||||
|
client = RegistryClient(alias)
|
||||||
|
assert client.registry == "docker.io"
|
||||||
|
assert client.registry_url == "https://registry-1.docker.io"
|
||||||
|
|
||||||
|
def test_ghcr_client(self) -> None:
|
||||||
|
"""Test GitHub Container Registry client."""
|
||||||
|
client = RegistryClient("ghcr.io")
|
||||||
|
assert client.registry == "ghcr.io"
|
||||||
|
assert client.registry_url == "https://ghcr.io"
|
||||||
|
|
||||||
|
def test_generic_registry(self) -> None:
|
||||||
|
"""Test generic registry client."""
|
||||||
|
client = RegistryClient("quay.io")
|
||||||
|
assert client.registry == "quay.io"
|
||||||
|
assert client.registry_url == "https://quay.io"
|
||||||
|
|
||||||
|
|
||||||
|
class TestTagCheckResult:
|
||||||
|
"""Tests for TagCheckResult."""
|
||||||
|
|
||||||
|
def test_create_result(self) -> None:
|
||||||
|
"""Test creating a result."""
|
||||||
|
ref = ImageRef.parse("nginx:1.25")
|
||||||
|
result = TagCheckResult(
|
||||||
|
image=ref,
|
||||||
|
current_digest="sha256:abc",
|
||||||
|
available_updates=["1.26", "1.27"],
|
||||||
|
)
|
||||||
|
assert result.image.name == "nginx"
|
||||||
|
assert result.available_updates == ["1.26", "1.27"]
|
||||||
|
assert result.error is None
|
||||||
|
|
||||||
|
def test_result_with_error(self) -> None:
|
||||||
|
"""Test result with error."""
|
||||||
|
ref = ImageRef.parse("nginx")
|
||||||
|
result = TagCheckResult(
|
||||||
|
image=ref,
|
||||||
|
current_digest="",
|
||||||
|
error="Connection refused",
|
||||||
|
)
|
||||||
|
assert result.error == "Connection refused"
|
||||||
|
assert result.available_updates == []
|
||||||
@@ -134,6 +134,13 @@ def test_config(tmp_path_factory: pytest.TempPathFactory) -> Path:
|
|||||||
else:
|
else:
|
||||||
(svc / "compose.yaml").write_text(f"services:\n {name}:\n image: test/{name}\n")
|
(svc / "compose.yaml").write_text(f"services:\n {name}:\n image: test/{name}\n")
|
||||||
|
|
||||||
|
# Create glances stack (required for containers page)
|
||||||
|
glances_dir = compose_dir / "glances"
|
||||||
|
glances_dir.mkdir()
|
||||||
|
(glances_dir / "compose.yaml").write_text(
|
||||||
|
"services:\n glances:\n image: nicolargo/glances\n"
|
||||||
|
)
|
||||||
|
|
||||||
# Create config with multiple hosts
|
# Create config with multiple hosts
|
||||||
config = tmp / "compose-farm.yaml"
|
config = tmp / "compose-farm.yaml"
|
||||||
config.write_text(f"""
|
config.write_text(f"""
|
||||||
@@ -151,6 +158,8 @@ stacks:
|
|||||||
nextcloud: server-2
|
nextcloud: server-2
|
||||||
jellyfin: server-2
|
jellyfin: server-2
|
||||||
redis: server-1
|
redis: server-1
|
||||||
|
glances: all
|
||||||
|
glances_stack: glances
|
||||||
""")
|
""")
|
||||||
|
|
||||||
# Create state (plex and nextcloud running, grafana and jellyfin not started)
|
# Create state (plex and nextcloud running, grafana and jellyfin not started)
|
||||||
@@ -245,7 +254,7 @@ class TestHTMXSidebarLoading:
|
|||||||
|
|
||||||
# Verify actual stacks from test config appear
|
# Verify actual stacks from test config appear
|
||||||
stacks = page.locator("#sidebar-stacks li")
|
stacks = page.locator("#sidebar-stacks li")
|
||||||
assert stacks.count() == 5 # plex, grafana, nextcloud, jellyfin, redis
|
assert stacks.count() == 6 # plex, grafana, nextcloud, jellyfin, redis, glances
|
||||||
|
|
||||||
# Check specific stacks are present
|
# Check specific stacks are present
|
||||||
content = page.locator("#sidebar-stacks").inner_text()
|
content = page.locator("#sidebar-stacks").inner_text()
|
||||||
@@ -348,7 +357,7 @@ class TestDashboardContent:
|
|||||||
|
|
||||||
# From test config: 2 hosts, 5 stacks, 2 running (plex, nextcloud)
|
# From test config: 2 hosts, 5 stacks, 2 running (plex, nextcloud)
|
||||||
assert "2" in stats # hosts count
|
assert "2" in stats # hosts count
|
||||||
assert "5" in stats # stacks count
|
assert "6" in stats # stacks count
|
||||||
|
|
||||||
def test_pending_shows_not_started_stacks(self, page: Page, server_url: str) -> None:
|
def test_pending_shows_not_started_stacks(self, page: Page, server_url: str) -> None:
|
||||||
"""Pending operations shows grafana and jellyfin as not started."""
|
"""Pending operations shows grafana and jellyfin as not started."""
|
||||||
@@ -476,9 +485,9 @@ class TestSidebarFilter:
|
|||||||
page.goto(server_url)
|
page.goto(server_url)
|
||||||
page.wait_for_selector("#sidebar-stacks", timeout=TIMEOUT)
|
page.wait_for_selector("#sidebar-stacks", timeout=TIMEOUT)
|
||||||
|
|
||||||
# Initially all 4 stacks visible
|
# Initially all 6 stacks visible
|
||||||
visible_items = page.locator("#sidebar-stacks li:not([hidden])")
|
visible_items = page.locator("#sidebar-stacks li:not([hidden])")
|
||||||
assert visible_items.count() == 5
|
assert visible_items.count() == 6
|
||||||
|
|
||||||
# Type in filter to match only "plex"
|
# Type in filter to match only "plex"
|
||||||
self._filter_sidebar(page, "plex")
|
self._filter_sidebar(page, "plex")
|
||||||
@@ -493,9 +502,9 @@ class TestSidebarFilter:
|
|||||||
page.goto(server_url)
|
page.goto(server_url)
|
||||||
page.wait_for_selector("#sidebar-stacks", timeout=TIMEOUT)
|
page.wait_for_selector("#sidebar-stacks", timeout=TIMEOUT)
|
||||||
|
|
||||||
# Initial count should be (5)
|
# Initial count should be (6)
|
||||||
count_badge = page.locator("#sidebar-count")
|
count_badge = page.locator("#sidebar-count")
|
||||||
assert "(5)" in count_badge.inner_text()
|
assert "(6)" in count_badge.inner_text()
|
||||||
|
|
||||||
# Filter to show only stacks containing "x" (plex, nextcloud)
|
# Filter to show only stacks containing "x" (plex, nextcloud)
|
||||||
self._filter_sidebar(page, "x")
|
self._filter_sidebar(page, "x")
|
||||||
@@ -524,13 +533,14 @@ class TestSidebarFilter:
|
|||||||
# Select server-1 from dropdown
|
# Select server-1 from dropdown
|
||||||
page.locator("#sidebar-host-select").select_option("server-1")
|
page.locator("#sidebar-host-select").select_option("server-1")
|
||||||
|
|
||||||
# Only plex, grafana, and redis (server-1 stacks) should be visible
|
# plex, grafana, redis (server-1), and glances (all) should be visible
|
||||||
visible = page.locator("#sidebar-stacks li:not([hidden])")
|
visible = page.locator("#sidebar-stacks li:not([hidden])")
|
||||||
assert visible.count() == 3
|
assert visible.count() == 4
|
||||||
|
|
||||||
content = visible.all_inner_texts()
|
content = visible.all_inner_texts()
|
||||||
assert any("plex" in s for s in content)
|
assert any("plex" in s for s in content)
|
||||||
assert any("grafana" in s for s in content)
|
assert any("grafana" in s for s in content)
|
||||||
|
assert any("glances" in s for s in content)
|
||||||
assert not any("nextcloud" in s for s in content)
|
assert not any("nextcloud" in s for s in content)
|
||||||
assert not any("jellyfin" in s for s in content)
|
assert not any("jellyfin" in s for s in content)
|
||||||
|
|
||||||
@@ -562,7 +572,7 @@ class TestSidebarFilter:
|
|||||||
self._filter_sidebar(page, "")
|
self._filter_sidebar(page, "")
|
||||||
|
|
||||||
# All stacks visible again
|
# All stacks visible again
|
||||||
assert page.locator("#sidebar-stacks li:not([hidden])").count() == 5
|
assert page.locator("#sidebar-stacks li:not([hidden])").count() == 6
|
||||||
|
|
||||||
|
|
||||||
class TestCommandPalette:
|
class TestCommandPalette:
|
||||||
@@ -884,7 +894,7 @@ class TestContentStability:
|
|||||||
|
|
||||||
# Remember sidebar state
|
# Remember sidebar state
|
||||||
initial_count = page.locator("#sidebar-stacks li").count()
|
initial_count = page.locator("#sidebar-stacks li").count()
|
||||||
assert initial_count == 5
|
assert initial_count == 6
|
||||||
|
|
||||||
# Navigate away
|
# Navigate away
|
||||||
page.locator("#sidebar-stacks a", has_text="plex").click()
|
page.locator("#sidebar-stacks a", has_text="plex").click()
|
||||||
@@ -2329,3 +2339,227 @@ class TestTerminalNavigationIsolation:
|
|||||||
# Terminal should still be collapsed (no task to reconnect to)
|
# Terminal should still be collapsed (no task to reconnect to)
|
||||||
terminal_toggle = page.locator("#terminal-toggle")
|
terminal_toggle = page.locator("#terminal-toggle")
|
||||||
assert not terminal_toggle.is_checked(), "Terminal should remain collapsed after navigation"
|
assert not terminal_toggle.is_checked(), "Terminal should remain collapsed after navigation"
|
||||||
|
|
||||||
|
|
||||||
|
class TestContainersPagePause:
|
||||||
|
"""Test containers page auto-refresh pause mechanism.
|
||||||
|
|
||||||
|
The containers page auto-refreshes every 3 seconds. When a user opens
|
||||||
|
an action dropdown, refresh should pause to prevent the dropdown from
|
||||||
|
closing unexpectedly.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Mock HTML for container rows with action dropdowns
|
||||||
|
MOCK_ROWS_HTML = """
|
||||||
|
<tr>
|
||||||
|
<td>1</td>
|
||||||
|
<td data-sort="plex"><a href="/stack/plex" class="link">plex</a></td>
|
||||||
|
<td data-sort="server">server</td>
|
||||||
|
<td><div class="dropdown dropdown-end">
|
||||||
|
<label tabindex="0" class="btn btn-circle btn-ghost btn-xs"><svg class="h-4 w-4"></svg></label>
|
||||||
|
<ul tabindex="0" class="dropdown-content menu menu-sm bg-base-200 rounded-box shadow-lg w-36 z-50 p-2">
|
||||||
|
<li><a hx-post="/api/stack/plex/restart">Restart</a></li>
|
||||||
|
</ul>
|
||||||
|
</div></td>
|
||||||
|
<td data-sort="nas"><span class="badge">nas</span></td>
|
||||||
|
<td data-sort="nginx:latest"><code>nginx:latest</code></td>
|
||||||
|
<td data-sort="running"><span class="badge badge-success">running</span></td>
|
||||||
|
<td data-sort="3600">1 hour</td>
|
||||||
|
<td data-sort="5"><progress class="progress" value="5" max="100"></progress><span>5%</span></td>
|
||||||
|
<td data-sort="104857600"><progress class="progress" value="10" max="100"></progress><span>100MB</span></td>
|
||||||
|
<td data-sort="1000">↓1KB ↑1KB</td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td>2</td>
|
||||||
|
<td data-sort="redis"><a href="/stack/redis" class="link">redis</a></td>
|
||||||
|
<td data-sort="redis">redis</td>
|
||||||
|
<td><div class="dropdown dropdown-end">
|
||||||
|
<label tabindex="0" class="btn btn-circle btn-ghost btn-xs"><svg class="h-4 w-4"></svg></label>
|
||||||
|
<ul tabindex="0" class="dropdown-content menu menu-sm bg-base-200 rounded-box shadow-lg w-36 z-50 p-2">
|
||||||
|
<li><a hx-post="/api/stack/redis/restart">Restart</a></li>
|
||||||
|
</ul>
|
||||||
|
</div></td>
|
||||||
|
<td data-sort="nas"><span class="badge">nas</span></td>
|
||||||
|
<td data-sort="redis:7"><code>redis:7</code></td>
|
||||||
|
<td data-sort="running"><span class="badge badge-success">running</span></td>
|
||||||
|
<td data-sort="7200">2 hours</td>
|
||||||
|
<td data-sort="1"><progress class="progress" value="1" max="100"></progress><span>1%</span></td>
|
||||||
|
<td data-sort="52428800"><progress class="progress" value="5" max="100"></progress><span>50MB</span></td>
|
||||||
|
<td data-sort="500">↓500B ↑500B</td>
|
||||||
|
</tr>
|
||||||
|
"""
|
||||||
|
|
||||||
|
def test_dropdown_pauses_refresh(self, page: Page, server_url: str) -> None:
|
||||||
|
"""Opening action dropdown pauses auto-refresh.
|
||||||
|
|
||||||
|
Bug: focusin event triggers pause, but focusout fires shortly after
|
||||||
|
when focus moves within the dropdown, causing refresh to resume
|
||||||
|
while dropdown is still visually open.
|
||||||
|
"""
|
||||||
|
# Mock container rows and update checks
|
||||||
|
page.route(
|
||||||
|
"**/api/containers/rows/*",
|
||||||
|
lambda route: route.fulfill(
|
||||||
|
status=200,
|
||||||
|
content_type="text/html",
|
||||||
|
body=self.MOCK_ROWS_HTML,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
page.route(
|
||||||
|
"**/api/containers/check-updates",
|
||||||
|
lambda route: route.fulfill(
|
||||||
|
status=200,
|
||||||
|
content_type="application/json",
|
||||||
|
body='{"results": []}',
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
page.goto(f"{server_url}/live-stats")
|
||||||
|
|
||||||
|
# Wait for container rows to load
|
||||||
|
page.wait_for_function(
|
||||||
|
"document.querySelectorAll('#container-rows tr:not(.loading-row)').length > 0",
|
||||||
|
timeout=TIMEOUT,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Wait for timer to start
|
||||||
|
page.wait_for_function(
|
||||||
|
"document.getElementById('refresh-timer')?.textContent?.includes('↻')",
|
||||||
|
timeout=TIMEOUT,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Click on a dropdown to open it
|
||||||
|
dropdown_label = page.locator(".dropdown label").first
|
||||||
|
dropdown_label.click()
|
||||||
|
|
||||||
|
# Wait a moment for focusin to trigger
|
||||||
|
page.wait_for_timeout(200)
|
||||||
|
|
||||||
|
# Verify pause is engaged
|
||||||
|
timer_text = page.locator("#refresh-timer").inner_text()
|
||||||
|
|
||||||
|
assert timer_text == "❚❚", (
|
||||||
|
f"Refresh should be paused after clicking dropdown. timer='{timer_text}'"
|
||||||
|
)
|
||||||
|
assert "❚❚" in timer_text, f"Timer should show pause icon, got '{timer_text}'"
|
||||||
|
|
||||||
|
def test_refresh_stays_paused_while_dropdown_open(self, page: Page, server_url: str) -> None:
|
||||||
|
"""Refresh remains paused for duration dropdown is open (>5s refresh interval).
|
||||||
|
|
||||||
|
This is the critical test for the pause bug: refresh should stay paused
|
||||||
|
for longer than the 3-second refresh interval while dropdown is open.
|
||||||
|
"""
|
||||||
|
# Mock container rows and update checks
|
||||||
|
page.route(
|
||||||
|
"**/api/containers/rows/*",
|
||||||
|
lambda route: route.fulfill(
|
||||||
|
status=200,
|
||||||
|
content_type="text/html",
|
||||||
|
body=self.MOCK_ROWS_HTML,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
page.route(
|
||||||
|
"**/api/containers/check-updates",
|
||||||
|
lambda route: route.fulfill(
|
||||||
|
status=200,
|
||||||
|
content_type="application/json",
|
||||||
|
body='{"results": []}',
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
page.goto(f"{server_url}/live-stats")
|
||||||
|
|
||||||
|
# Wait for container rows to load
|
||||||
|
page.wait_for_function(
|
||||||
|
"document.querySelectorAll('#container-rows tr:not(.loading-row)').length > 0",
|
||||||
|
timeout=TIMEOUT,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Wait for timer to start
|
||||||
|
page.wait_for_function(
|
||||||
|
"document.getElementById('refresh-timer')?.textContent?.includes('↻')",
|
||||||
|
timeout=TIMEOUT,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Record a marker in the first row to detect if refresh happened
|
||||||
|
page.evaluate("""
|
||||||
|
const firstRow = document.querySelector('#container-rows tr');
|
||||||
|
if (firstRow) firstRow.dataset.testMarker = 'original';
|
||||||
|
""")
|
||||||
|
|
||||||
|
# Click dropdown to pause
|
||||||
|
dropdown_label = page.locator(".dropdown label").first
|
||||||
|
dropdown_label.click()
|
||||||
|
page.wait_for_timeout(200)
|
||||||
|
|
||||||
|
# Confirm paused
|
||||||
|
assert page.locator("#refresh-timer").inner_text() == "❚❚"
|
||||||
|
|
||||||
|
# Wait longer than the 5-second refresh interval
|
||||||
|
page.wait_for_timeout(6000)
|
||||||
|
|
||||||
|
# Check if still paused
|
||||||
|
timer_text = page.locator("#refresh-timer").inner_text()
|
||||||
|
|
||||||
|
# Check if the row was replaced (marker would be gone)
|
||||||
|
marker = page.evaluate("""
|
||||||
|
document.querySelector('#container-rows tr')?.dataset?.testMarker
|
||||||
|
""")
|
||||||
|
|
||||||
|
assert timer_text == "❚❚", f"Refresh should still be paused after 6s. timer='{timer_text}'"
|
||||||
|
assert marker == "original", (
|
||||||
|
"Table was refreshed while dropdown was open - pause mechanism failed"
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_refresh_resumes_after_dropdown_closes(self, page: Page, server_url: str) -> None:
|
||||||
|
"""Refresh resumes after dropdown is closed."""
|
||||||
|
# Mock container rows and update checks
|
||||||
|
page.route(
|
||||||
|
"**/api/containers/rows/*",
|
||||||
|
lambda route: route.fulfill(
|
||||||
|
status=200,
|
||||||
|
content_type="text/html",
|
||||||
|
body=self.MOCK_ROWS_HTML,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
page.route(
|
||||||
|
"**/api/containers/check-updates",
|
||||||
|
lambda route: route.fulfill(
|
||||||
|
status=200,
|
||||||
|
content_type="application/json",
|
||||||
|
body='{"results": []}',
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
page.goto(f"{server_url}/live-stats")
|
||||||
|
|
||||||
|
# Wait for container rows to load
|
||||||
|
page.wait_for_function(
|
||||||
|
"document.querySelectorAll('#container-rows tr:not(.loading-row)').length > 0",
|
||||||
|
timeout=TIMEOUT,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Wait for timer to start
|
||||||
|
page.wait_for_function(
|
||||||
|
"document.getElementById('refresh-timer')?.textContent?.includes('↻')",
|
||||||
|
timeout=TIMEOUT,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Click dropdown to pause
|
||||||
|
dropdown_label = page.locator(".dropdown label").first
|
||||||
|
dropdown_label.click()
|
||||||
|
page.wait_for_timeout(200)
|
||||||
|
|
||||||
|
assert page.locator("#refresh-timer").inner_text() == "❚❚"
|
||||||
|
|
||||||
|
# Close dropdown by pressing Escape or clicking elsewhere
|
||||||
|
page.keyboard.press("Escape")
|
||||||
|
page.wait_for_timeout(300) # Wait for focusout timeout (150ms) + buffer
|
||||||
|
|
||||||
|
# Verify refresh resumed
|
||||||
|
timer_text = page.locator("#refresh-timer").inner_text()
|
||||||
|
|
||||||
|
assert timer_text != "❚❚", (
|
||||||
|
f"Refresh should resume after closing dropdown. timer='{timer_text}'"
|
||||||
|
)
|
||||||
|
assert "↻" in timer_text, f"Timer should show countdown, got '{timer_text}'"
|
||||||
|
|||||||
11
uv.lock
generated
11
uv.lock
generated
@@ -242,6 +242,7 @@ dependencies = [
|
|||||||
[package.optional-dependencies]
|
[package.optional-dependencies]
|
||||||
web = [
|
web = [
|
||||||
{ name = "fastapi", extra = ["standard"] },
|
{ name = "fastapi", extra = ["standard"] },
|
||||||
|
{ name = "humanize" },
|
||||||
{ name = "jinja2" },
|
{ name = "jinja2" },
|
||||||
{ name = "websockets" },
|
{ name = "websockets" },
|
||||||
]
|
]
|
||||||
@@ -270,6 +271,7 @@ dev = [
|
|||||||
requires-dist = [
|
requires-dist = [
|
||||||
{ name = "asyncssh", specifier = ">=2.14.0" },
|
{ name = "asyncssh", specifier = ">=2.14.0" },
|
||||||
{ name = "fastapi", extras = ["standard"], marker = "extra == 'web'", specifier = ">=0.109.0" },
|
{ name = "fastapi", extras = ["standard"], marker = "extra == 'web'", specifier = ">=0.109.0" },
|
||||||
|
{ name = "humanize", marker = "extra == 'web'", specifier = ">=4.0.0" },
|
||||||
{ name = "jinja2", marker = "extra == 'web'", specifier = ">=3.1.0" },
|
{ name = "jinja2", marker = "extra == 'web'", specifier = ">=3.1.0" },
|
||||||
{ name = "pydantic", specifier = ">=2.0.0" },
|
{ name = "pydantic", specifier = ">=2.0.0" },
|
||||||
{ name = "pyyaml", specifier = ">=6.0" },
|
{ name = "pyyaml", specifier = ">=6.0" },
|
||||||
@@ -781,6 +783,15 @@ wheels = [
|
|||||||
{ url = "https://files.pythonhosted.org/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad", size = 73517, upload-time = "2024-12-06T15:37:21.509Z" },
|
{ url = "https://files.pythonhosted.org/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad", size = 73517, upload-time = "2024-12-06T15:37:21.509Z" },
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "humanize"
|
||||||
|
version = "4.15.0"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/ba/66/a3921783d54be8a6870ac4ccffcd15c4dc0dd7fcce51c6d63b8c63935276/humanize-4.15.0.tar.gz", hash = "sha256:1dd098483eb1c7ee8e32eb2e99ad1910baefa4b75c3aff3a82f4d78688993b10", size = 83599, upload-time = "2025-12-20T20:16:13.19Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/c5/7b/bca5613a0c3b542420cf92bd5e5fb8ebd5435ce1011a091f66bb7693285e/humanize-4.15.0-py3-none-any.whl", hash = "sha256:b1186eb9f5a9749cd9cb8565aee77919dd7c8d076161cf44d70e59e3301e1769", size = 132203, upload-time = "2025-12-20T20:16:11.67Z" },
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "identify"
|
name = "identify"
|
||||||
version = "2.6.15"
|
version = "2.6.15"
|
||||||
|
|||||||
Reference in New Issue
Block a user