diff --git a/.prompts/duplication-audit.md b/.prompts/duplication-audit.md new file mode 100644 index 0000000..f916bee --- /dev/null +++ b/.prompts/duplication-audit.md @@ -0,0 +1,79 @@ +# Duplication audit and generalization prompt + +You are a coding agent working inside a repository. Your job is to find duplicated +functionality (not just identical code) and propose a minimal, safe generalization. +Keep it simple and avoid adding features. + +## First steps + +- Read project-specific instructions (AGENTS.md, CONTRIBUTING.md, or similar) and follow them. +- If instructions mention tooling or style (e.g., preferred search tools), use those. +- Ask a brief clarification if the request is ambiguous (for example: report only vs refactor). + +## Objective + +Identify and consolidate duplicated functionality across the codebase. Duplication includes: +- Multiple functions that parse or validate the same data in slightly different ways +- Repeated file reads or config parsing +- Similar command building or subprocess execution paths +- Near-identical error handling or logging patterns +- Repeated data transforms that can become a shared helper + +The goal is to propose a general, reusable abstraction that reduces duplication while +preserving behavior. Keep changes minimal and easy to review. + +## Search strategy + +1) Map the hot paths +- Scan entry points (CLI, web handlers, tasks, jobs) to see what they do repeatedly. +- Look for cross-module patterns: same steps, different files. + +2) Find duplicate operations +- Use fast search tools (prefer `rg`) to find repeated keywords and patterns. +- Check for repeated YAML/JSON parsing, env interpolation, file IO, command building, + data validation, or response formatting. + +3) Validate duplication is real +- Confirm the functional intent matches (not just similar code). +- Note any subtle differences that must be preserved. + +4) Propose a minimal generalization +- Suggest a shared helper, utility, or wrapper. +- Avoid over-engineering. If only two call sites exist, keep the helper small. +- Prefer pure functions and centralized IO if that already exists. + +## Deliverables + +Provide a concise report with: + +1) Findings +- List duplicated behaviors with file references and a short description of the + shared functionality. +- Explain why these are functionally the same (or nearly the same). + +2) Proposed generalizations +- For each duplication, propose a shared helper and where it should live. +- Outline any behavior differences that need to be parameterized. + +3) Impact and risk +- Note any behavior risks, test needs, or migration steps. + +If the user asked you to implement changes: +- Make only the minimal edits needed to dedupe behavior. +- Keep the public API stable unless explicitly requested. +- Add small comments only when the logic is non-obvious. +- Summarize what changed and why. + +## Output format + +- Start with a short summary of the top 1-3 duplications. +- Then provide a list of findings, ordered by impact. +- Include a small proposed refactor plan (step-by-step, no more than 5 steps). +- End with any questions or assumptions. + +## Guardrails + +- Do not add new features or change behavior beyond deduplication. +- Avoid deep refactors without explicit request. +- Preserve existing style conventions and import rules. +- If a duplication is better left alone (e.g., clarity, single usage), say so. diff --git a/src/compose_farm/compose.py b/src/compose_farm/compose.py index bea2648..a404667 100644 --- a/src/compose_farm/compose.py +++ b/src/compose_farm/compose.py @@ -59,6 +59,31 @@ def _load_env(compose_path: Path) -> dict[str, str]: return env +def parse_compose_data(content: str) -> dict[str, Any]: + """Parse compose YAML content into a dict.""" + compose_data = yaml.safe_load(content) or {} + return compose_data if isinstance(compose_data, dict) else {} + + +def load_compose_data(compose_path: Path) -> dict[str, Any]: + """Load compose YAML from a file path.""" + return parse_compose_data(compose_path.read_text()) + + +def load_compose_data_for_stack(config: Config, stack: str) -> tuple[Path, dict[str, Any]]: + """Load compose YAML for a stack, returning (path, data).""" + compose_path = config.get_compose_path(stack) + if not compose_path.exists(): + return compose_path, {} + return compose_path, load_compose_data(compose_path) + + +def extract_services(compose_data: dict[str, Any]) -> dict[str, Any]: + """Extract services mapping from compose data.""" + raw_services = compose_data.get("services", {}) + return raw_services if isinstance(raw_services, dict) else {} + + def _interpolate(value: str, env: dict[str, str]) -> str: """Perform ${VAR} and ${VAR:-default} interpolation.""" @@ -185,16 +210,15 @@ def parse_host_volumes(config: Config, stack: str) -> list[str]: Returns a list of absolute host paths used as volume mounts. Skips named volumes and resolves relative paths. """ - compose_path = config.get_compose_path(stack) + compose_path, compose_data = load_compose_data_for_stack(config, stack) if not compose_path.exists(): return [] - env = _load_env(compose_path) - compose_data = yaml.safe_load(compose_path.read_text()) or {} - raw_services = compose_data.get("services", {}) - if not isinstance(raw_services, dict): + raw_services = extract_services(compose_data) + if not raw_services: return [] + env = _load_env(compose_path) paths: list[str] = [] compose_dir = compose_path.parent @@ -221,16 +245,15 @@ def parse_devices(config: Config, stack: str) -> list[str]: Returns a list of host device paths (e.g., /dev/dri, /dev/dri/renderD128). """ - compose_path = config.get_compose_path(stack) + compose_path, compose_data = load_compose_data_for_stack(config, stack) if not compose_path.exists(): return [] - env = _load_env(compose_path) - compose_data = yaml.safe_load(compose_path.read_text()) or {} - raw_services = compose_data.get("services", {}) - if not isinstance(raw_services, dict): + raw_services = extract_services(compose_data) + if not raw_services: return [] + env = _load_env(compose_path) devices: list[str] = [] for definition in raw_services.values(): if not isinstance(definition, dict): @@ -260,11 +283,10 @@ def parse_external_networks(config: Config, stack: str) -> list[str]: Returns a list of network names marked as external: true. """ - compose_path = config.get_compose_path(stack) + compose_path, compose_data = load_compose_data_for_stack(config, stack) if not compose_path.exists(): return [] - compose_data = yaml.safe_load(compose_path.read_text()) or {} networks = compose_data.get("networks", {}) if not isinstance(networks, dict): return [] @@ -285,15 +307,14 @@ def load_compose_services( Returns (services_dict, env_dict, host_address). """ - compose_path = config.get_compose_path(stack) + compose_path, compose_data = load_compose_data_for_stack(config, stack) if not compose_path.exists(): message = f"[{stack}] Compose file not found: {compose_path}" raise FileNotFoundError(message) env = _load_env(compose_path) - compose_data = yaml.safe_load(compose_path.read_text()) or {} - raw_services = compose_data.get("services", {}) - if not isinstance(raw_services, dict): + raw_services = extract_services(compose_data) + if not raw_services: return {}, env, config.get_host(stack).address return raw_services, env, config.get_host(stack).address diff --git a/src/compose_farm/web/routes/api.py b/src/compose_farm/web/routes/api.py index b590d4a..a9afc19 100644 --- a/src/compose_farm/web/routes/api.py +++ b/src/compose_farm/web/routes/api.py @@ -19,7 +19,7 @@ import yaml from fastapi import APIRouter, Body, HTTPException, Query from fastapi.responses import HTMLResponse -from compose_farm.compose import get_container_name +from compose_farm.compose import extract_services, get_container_name, load_compose_data_for_stack from compose_farm.executor import is_local, run_compose_on_host, ssh_connect_kwargs from compose_farm.glances import fetch_all_host_stats from compose_farm.paths import backup_dir, find_config_path @@ -107,13 +107,11 @@ def _get_compose_services(config: Any, stack: str, hosts: list[str]) -> list[dic Returns one entry per container per host for multi-host stacks. """ - compose_path = config.get_compose_path(stack) - if not compose_path or not compose_path.exists(): + compose_path, compose_data = load_compose_data_for_stack(config, stack) + if not compose_path.exists(): return [] - - compose_data = yaml.safe_load(compose_path.read_text()) or {} - raw_services = compose_data.get("services", {}) - if not isinstance(raw_services, dict): + raw_services = extract_services(compose_data) + if not raw_services: return [] # Project name is the directory name (docker compose default) diff --git a/src/compose_farm/web/routes/pages.py b/src/compose_farm/web/routes/pages.py index b2cf20a..71d7ddc 100644 --- a/src/compose_farm/web/routes/pages.py +++ b/src/compose_farm/web/routes/pages.py @@ -7,7 +7,7 @@ from fastapi import APIRouter, Request from fastapi.responses import HTMLResponse from pydantic import ValidationError -from compose_farm.compose import get_container_name +from compose_farm.compose import extract_services, get_container_name, parse_compose_data from compose_farm.paths import find_config_path from compose_farm.state import ( get_orphaned_stacks, @@ -166,9 +166,9 @@ async def stack_detail(request: Request, name: str) -> HTMLResponse: containers: dict[str, dict[str, str]] = {} shell_host = current_host[0] if isinstance(current_host, list) else current_host if compose_content: - compose_data = yaml.safe_load(compose_content) or {} - raw_services = compose_data.get("services", {}) - if isinstance(raw_services, dict): + compose_data = parse_compose_data(compose_content) + raw_services = extract_services(compose_data) + if raw_services: services = list(raw_services.keys()) # Build container info for shell access (only if stack is running) if shell_host: