Files
technitium_zone_exporter/src/helpers.py
2025-09-28 15:04:01 +02:00

132 lines
4.6 KiB
Python

import logging
import subprocess
from pathlib import Path
from datetime import datetime, UTC
from config import *
from git import run_git_cmd, ensure_git_repo
from technitium import list_zones, export_zone
def write_zone_export(zone_name, content) -> Path:
dest_dir = Path(GIT_REPO_DIR)
dest_dir.mkdir(parents=True, exist_ok=True)
safe_name = zone_name.replace("/", "_")
out_path = dest_dir / f"db.{safe_name}"
mode = "w"
logging.info(f"Writing export for zone {zone_name} -> {out_path}")
with open(out_path, mode, encoding="utf-8") as f:
f.write(content)
return out_path
def commit_and_push(changed_files, trigger_path):
# Stage files
try:
# Add only the exports folder (keeps repo tidy)
run_git_cmd(["add", "-A"])
except subprocess.CalledProcessError as e:
logging.exception(f"git add failed: {e}")
return
# Check if there is anything to commit
try:
# git diff --cached --quiet will exit 0 if no changes staged
subprocess.run(["git", "-C", GIT_REPO_DIR, "diff", "--cached", "--quiet"], check=True)
logging.info("No changes to commit (nothing staged).")
return
except subprocess.CalledProcessError:
# Non-zero return means there are changes staged.
pass
changed_list_text = "\n".join(str(p) for p in changed_files)
ts = datetime.now(UTC).strftime("%Y-%m-%dT%H:%M:%SZ")
commit_msg = f"Technitium zone export: {ts}\n\nTrigger: {trigger_path}\n\nChanged files:\n{changed_list_text}\n"
env = os.environ.copy()
env["GIT_AUTHOR_NAME"] = GIT_AUTHOR_NAME
env["GIT_AUTHOR_EMAIL"] = GIT_AUTHOR_EMAIL
try:
run_git_cmd(["commit", "-m", commit_msg], check=True)
logging.info("Committed changes to git.")
except subprocess.CalledProcessError as e:
logging.exception(f"git commit failed: {e}")
return
if GIT_PUSH:
try:
run_git_cmd(["push"], check=True)
logging.info("Pushed commit to remote.")
except subprocess.CalledProcessError as e:
logging.exception(f"git push failed: {e}")
def extract_domain_from_path(path: str) -> str|None:
name = Path(path).name
name_no_ext = name.rstrip(".zone")
candidates = set()
if DOMAIN_FRAGMENT_RE.search(name_no_ext):
found = DOMAIN_FRAGMENT_RE.findall(name_no_ext)
for f in found:
return f
return None
def export_single_zone(trigger_path: str) -> list[Path]:
logging.info(f"Starting export of single zone for trigger path {trigger_path})")
ensure_git_repo()
domain = extract_domain_from_path(trigger_path)
try:
zones = list_zones()
except Exception as e:
logging.exception(f"Failed to list zones from API; falling back to full export: {e}")
return export_all_zones(trigger_path)
if domain is not None:
for zone in zones:
zone_name = zone.get("name")
if zone_name == domain:
logging.info(f"Single matching zone found: {zone_name}")
try:
content = export_zone(zone)
out = write_zone_export(zone_name, content)
commit_and_push([out], trigger_path)
return [out]
except Exception as e:
logging.exception(f"Failed to export zone {zone_name}; falling back to full export: {e}")
return export_all_zones(trigger_path)
logging.info(f"No unique match found for {domain}; falling back to full export")
return export_all_zones(trigger_path)
else:
logging.info(f"No domain found for trigger path {trigger_path}; falling back to full export")
return export_all_zones(trigger_path)
def export_all_zones(trigger_path: str ="filesystem-change") -> list[Path]:
logging.info(f"Starting export of all zones (trigger={trigger_path})")
ensure_git_repo()
try:
zones = list_zones()
except Exception as e:
logging.exception(f"Failed to list zones from API: {e}")
return []
written_files = []
for z in zones:
# zone may be a dict with keys like 'id' and 'domain' — adapt to your API result shape
zone_name = z.get("name")
try:
content = export_zone(z)
out = write_zone_export(zone_name, content)
written_files.append(out)
except Exception as e:
logging.exception(f"Failed to export zone {zone_name}: {e}")
if written_files:
commit_and_push(written_files, trigger_path)
else:
logging.info("No zone files were written; skipping commit.")
return written_files