Initial commit
This commit is contained in:
1
server/app/__init__.py
Normal file
1
server/app/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
"""Server application package."""
|
||||
44
server/app/config.py
Normal file
44
server/app/config.py
Normal file
@@ -0,0 +1,44 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
|
||||
from pydantic import BaseSettings
|
||||
|
||||
|
||||
def _ensure_dir(path: str) -> str:
|
||||
os.makedirs(path, exist_ok=True)
|
||||
return path
|
||||
|
||||
|
||||
class Settings(BaseSettings):
|
||||
api_username: str = "backup-admin"
|
||||
api_password: str = "SuperSecret123"
|
||||
token_lifetime_minutes: int = 120
|
||||
database_url: str = "sqlite:///./data/backup.db"
|
||||
backup_base: str = "/srv/backup"
|
||||
history_retention: int = 20
|
||||
log_dir: str = "./logs"
|
||||
ssh_timeout_seconds: int = 1800
|
||||
ssh_pass_command: str = "sshpass"
|
||||
ssh_extra_args: str = "-o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null"
|
||||
rsync_extra_args: str = "--info=progress2 --partial"
|
||||
compress: bool = True
|
||||
|
||||
class Config:
|
||||
env_file = os.path.join(os.path.dirname(__file__), "..", ".env")
|
||||
env_file_encoding = "utf-8"
|
||||
|
||||
@property
|
||||
def backup_current(self) -> str:
|
||||
return _ensure_dir(os.path.join(self.backup_base, "current"))
|
||||
|
||||
@property
|
||||
def backup_history(self) -> str:
|
||||
return _ensure_dir(os.path.join(self.backup_base, ".history"))
|
||||
|
||||
@property
|
||||
def runtime_logs(self) -> str:
|
||||
return _ensure_dir(self.log_dir)
|
||||
|
||||
|
||||
settings = Settings()
|
||||
18
server/app/database.py
Normal file
18
server/app/database.py
Normal file
@@ -0,0 +1,18 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from sqlmodel import SQLModel, create_engine, Session
|
||||
|
||||
from .config import settings
|
||||
|
||||
engine = create_engine(
|
||||
settings.database_url,
|
||||
connect_args={"check_same_thread": False} if settings.database_url.startswith("sqlite") else {},
|
||||
)
|
||||
|
||||
|
||||
def init_db() -> None:
|
||||
SQLModel.metadata.create_all(engine)
|
||||
|
||||
|
||||
def get_session() -> Session:
|
||||
return Session(engine)
|
||||
203
server/app/job_runner.py
Normal file
203
server/app/job_runner.py
Normal file
@@ -0,0 +1,203 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import queue
|
||||
import shlex
|
||||
import shutil
|
||||
import subprocess
|
||||
import threading
|
||||
from datetime import datetime
|
||||
from typing import Iterable, NamedTuple
|
||||
|
||||
from sqlmodel import select
|
||||
|
||||
from .config import settings
|
||||
from .database import get_session
|
||||
from .models import Job, JobStatus, RunHistory
|
||||
|
||||
|
||||
class JobPayload(NamedTuple):
|
||||
job_id: int
|
||||
ssh_password: str
|
||||
|
||||
|
||||
def _sanitize_segment(value: str) -> str:
|
||||
sanitized = "".join(ch if ch.isalnum() or ch in "-_" else "_" for ch in value)
|
||||
return sanitized[:64]
|
||||
|
||||
|
||||
def _split_args(raw: str) -> list[str]:
|
||||
raw = raw.strip()
|
||||
if not raw:
|
||||
return []
|
||||
return shlex.split(raw)
|
||||
|
||||
|
||||
def _ensure_dirs(*paths: str) -> None:
|
||||
for path in paths:
|
||||
os.makedirs(path, exist_ok=True)
|
||||
|
||||
|
||||
def _write_log(path: str, lines: Iterable[str]) -> None:
|
||||
with open(path, "a", encoding="utf-8", errors="ignore") as fp:
|
||||
for line in lines:
|
||||
fp.write(line)
|
||||
if not line.endswith("\n"):
|
||||
fp.write("\n")
|
||||
|
||||
|
||||
def _tail(log_path: str, max_lines: int = 20) -> list[str]:
|
||||
try:
|
||||
with open(log_path, "r", encoding="utf-8", errors="ignore") as fp:
|
||||
return fp.readlines()[-max_lines:]
|
||||
except FileNotFoundError:
|
||||
return []
|
||||
|
||||
|
||||
def _cleanup_history(profile_name: str, session, keep: int = 20) -> None:
|
||||
if keep <= 0:
|
||||
return
|
||||
sanitized = _sanitize_segment(profile_name)
|
||||
history_root = os.path.join(settings.backup_history, sanitized)
|
||||
session.flush()
|
||||
records = (
|
||||
session.exec(
|
||||
select(RunHistory)
|
||||
.where(RunHistory.profile_name == profile_name)
|
||||
.order_by(RunHistory.created_at.desc())
|
||||
)
|
||||
.all()
|
||||
)
|
||||
stale = records[keep:]
|
||||
for record in stale:
|
||||
path = os.path.join(history_root, record.run_id)
|
||||
shutil.rmtree(path, ignore_errors=True)
|
||||
session.delete(record)
|
||||
if stale:
|
||||
session.commit()
|
||||
|
||||
|
||||
class JobRunner:
|
||||
def __init__(self) -> None:
|
||||
self.queue: "queue.Queue[JobPayload]" = queue.Queue()
|
||||
self.worker = threading.Thread(target=self._worker, daemon=True)
|
||||
self.worker.start()
|
||||
|
||||
def enqueue(self, payload: JobPayload) -> None:
|
||||
self.queue.put(payload)
|
||||
|
||||
def _worker(self) -> None:
|
||||
while True:
|
||||
payload = self.queue.get()
|
||||
try:
|
||||
self._run(payload)
|
||||
finally:
|
||||
self.queue.task_done()
|
||||
|
||||
def _run(self, payload: JobPayload) -> None:
|
||||
session = get_session()
|
||||
job = session.get(Job, payload.job_id)
|
||||
if not job:
|
||||
session.close()
|
||||
return
|
||||
log_path = os.path.join(settings.runtime_logs, f"job_{job.id}.log")
|
||||
try:
|
||||
job.log_path = log_path
|
||||
job.status = JobStatus.RUNNING
|
||||
job.progress = 0
|
||||
job.updated_at = datetime.utcnow()
|
||||
session.add(job)
|
||||
session.commit()
|
||||
|
||||
history_root = os.path.join(settings.backup_history, _sanitize_segment(job.profile_name), job.run_id)
|
||||
_ensure_dirs(history_root)
|
||||
target_base = os.path.join(settings.backup_current, _sanitize_segment(job.profile_name))
|
||||
_ensure_dirs(target_base)
|
||||
|
||||
folders = [folder for folder in job.folders.split("||") if folder]
|
||||
total = max(len(folders), 1)
|
||||
completed = 0
|
||||
summary_lines: list[str] = []
|
||||
|
||||
for folder in folders:
|
||||
dest_folder = os.path.join(target_base, _sanitize_segment(folder))
|
||||
_ensure_dirs(dest_folder)
|
||||
args = [
|
||||
settings.ssh_pass_command,
|
||||
"-p",
|
||||
payload.ssh_password,
|
||||
"rsync",
|
||||
"-a",
|
||||
"--backup",
|
||||
f"--backup-dir={history_root}",
|
||||
]
|
||||
args += _split_args(settings.rsync_extra_args)
|
||||
if settings.compress:
|
||||
args.append("-z")
|
||||
args += [
|
||||
"-e",
|
||||
f"ssh {settings.ssh_extra_args}",
|
||||
f"{job.ssh_username}@{job.client_host}:{folder}",
|
||||
dest_folder,
|
||||
]
|
||||
|
||||
process = subprocess.Popen(
|
||||
args,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.STDOUT,
|
||||
text=True,
|
||||
bufsize=1,
|
||||
)
|
||||
|
||||
if process.stdout:
|
||||
for line in process.stdout:
|
||||
_write_log(log_path, [line])
|
||||
summary_lines.append(line.strip())
|
||||
|
||||
process.wait()
|
||||
if process.returncode != 0:
|
||||
summary = f"Folder {folder} failed (rsync exit {process.returncode})."
|
||||
job.summary = summary
|
||||
job.status = JobStatus.FAILED
|
||||
job.updated_at = datetime.utcnow()
|
||||
session.add(job)
|
||||
session.commit()
|
||||
return
|
||||
|
||||
completed += 1
|
||||
job.progress = int((completed / total) * 100)
|
||||
job.updated_at = datetime.utcnow()
|
||||
session.add(job)
|
||||
session.commit()
|
||||
|
||||
job.status = JobStatus.COMPLETED
|
||||
job.progress = 100
|
||||
job.summary = "; ".join(summary_lines[-5:])
|
||||
job.updated_at = datetime.utcnow()
|
||||
session.add(job)
|
||||
session.commit()
|
||||
|
||||
run_entry = RunHistory(profile_name=job.profile_name, run_id=job.run_id)
|
||||
session.add(run_entry)
|
||||
session.commit()
|
||||
|
||||
_cleanup_history(job.profile_name, session, keep=settings.history_retention)
|
||||
finally:
|
||||
session.close()
|
||||
|
||||
def tail_log(self, job: Job, lines: int = 20) -> list[str]:
|
||||
if not job.log_path:
|
||||
return []
|
||||
return [line.strip() for line in _tail(job.log_path, lines)]
|
||||
|
||||
|
||||
def ensure_runner() -> JobRunner:
|
||||
global _runner
|
||||
try:
|
||||
return _runner
|
||||
except NameError:
|
||||
_runner = JobRunner()
|
||||
return _runner
|
||||
|
||||
|
||||
runner = ensure_runner()
|
||||
207
server/app/main.py
Normal file
207
server/app/main.py
Normal file
@@ -0,0 +1,207 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime, timedelta
|
||||
from typing import List
|
||||
|
||||
from fastapi import Depends, FastAPI, HTTPException, status
|
||||
from fastapi.security import OAuth2PasswordBearer, OAuth2PasswordRequestForm
|
||||
from sqlmodel import select
|
||||
|
||||
from .config import settings
|
||||
from .database import get_session, init_db
|
||||
from .job_runner import JobPayload, runner
|
||||
from .models import Job, JobStatus, Profile, Token, RunHistory
|
||||
from .schemas import (
|
||||
AuthToken,
|
||||
BackupLog,
|
||||
BackupStartRequest,
|
||||
HealthResponse,
|
||||
JobStatusResponse,
|
||||
ProfileCreate,
|
||||
ProfileRead,
|
||||
TokenType,
|
||||
)
|
||||
|
||||
app = FastAPI(title="Backup Orchestrator", version="0.1.0")
|
||||
oauth_scheme = OAuth2PasswordBearer(tokenUrl="/auth/login")
|
||||
|
||||
|
||||
def _collect_folders(text: str) -> List[str]:
|
||||
return [entry.strip() for entry in text.split("||") if entry.strip()]
|
||||
|
||||
|
||||
def _profile_to_response(profile: Profile) -> ProfileRead:
|
||||
return ProfileRead(
|
||||
id=profile.id, # type: ignore[arg-type]
|
||||
name=profile.name,
|
||||
folders=_collect_folders(profile.folders),
|
||||
description=profile.description,
|
||||
schedule_enabled=profile.schedule_enabled,
|
||||
created_at=profile.created_at,
|
||||
updated_at=profile.updated_at,
|
||||
)
|
||||
|
||||
|
||||
def _get_current_user(token: str = Depends(oauth_scheme)): # noqa: C901
|
||||
session = get_session()
|
||||
try:
|
||||
record = session.exec(select(Token).where(Token.token == token)).first()
|
||||
if not record or record.expires_at < datetime.utcnow():
|
||||
raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail="Invalid or expired token")
|
||||
return record.user
|
||||
finally:
|
||||
session.close()
|
||||
|
||||
|
||||
@app.on_event("startup")
|
||||
def on_startup() -> None:
|
||||
init_db()
|
||||
|
||||
|
||||
@app.post("/auth/login", response_model=AuthToken)
|
||||
def login(form_data: OAuth2PasswordRequestForm = Depends()) -> AuthToken:
|
||||
if form_data.username != settings.api_username or form_data.password != settings.api_password:
|
||||
raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail="Invalid credentials")
|
||||
session = get_session()
|
||||
expires_at = datetime.utcnow() + timedelta(minutes=settings.token_lifetime_minutes)
|
||||
token_value = Token(token=_generate_token(), user=form_data.username, expires_at=expires_at)
|
||||
session.add(token_value)
|
||||
session.commit()
|
||||
session.close()
|
||||
return AuthToken(access_token=token_value.token, token_type=TokenType.bearer, expires_at=expires_at)
|
||||
|
||||
|
||||
def _generate_token(length: int = 36) -> str:
|
||||
from secrets import token_urlsafe
|
||||
|
||||
return token_urlsafe(length)
|
||||
|
||||
|
||||
@app.get("/profiles", response_model=List[ProfileRead])
|
||||
def list_profiles(_: str = Depends(_get_current_user)) -> List[ProfileRead]:
|
||||
session = get_session()
|
||||
try:
|
||||
results = session.exec(select(Profile).order_by(Profile.created_at)).all()
|
||||
return [_profile_to_response(profile) for profile in results]
|
||||
finally:
|
||||
session.close()
|
||||
|
||||
|
||||
@app.post("/profiles", response_model=ProfileRead, status_code=status.HTTP_201_CREATED)
|
||||
def create_profile(data: ProfileCreate, _: str = Depends(_get_current_user)) -> ProfileRead:
|
||||
session = get_session()
|
||||
try:
|
||||
profile = Profile(
|
||||
name=data.name,
|
||||
folders="||".join(data.folders),
|
||||
description=data.description,
|
||||
schedule_enabled=data.schedule_enabled,
|
||||
)
|
||||
session.add(profile)
|
||||
session.commit()
|
||||
session.refresh(profile)
|
||||
return _profile_to_response(profile)
|
||||
finally:
|
||||
session.close()
|
||||
|
||||
|
||||
@app.put("/profiles/{profile_id}", response_model=ProfileRead)
|
||||
def update_profile(profile_id: int, data: ProfileCreate, _: str = Depends(_get_current_user)) -> ProfileRead:
|
||||
session = get_session()
|
||||
try:
|
||||
profile = session.get(Profile, profile_id)
|
||||
if not profile:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Profile not found")
|
||||
profile.name = data.name
|
||||
profile.folders = "||".join(data.folders)
|
||||
profile.description = data.description
|
||||
profile.schedule_enabled = data.schedule_enabled
|
||||
profile.updated_at = datetime.utcnow()
|
||||
session.add(profile)
|
||||
session.commit()
|
||||
session.refresh(profile)
|
||||
return _profile_to_response(profile)
|
||||
finally:
|
||||
session.close()
|
||||
|
||||
|
||||
@app.delete("/profiles/{profile_id}", status_code=status.HTTP_204_NO_CONTENT)
|
||||
def delete_profile(profile_id: int, _: str = Depends(_get_current_user)) -> None:
|
||||
session = get_session()
|
||||
try:
|
||||
profile = session.get(Profile, profile_id)
|
||||
if not profile:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Profile not found")
|
||||
session.delete(profile)
|
||||
session.commit()
|
||||
finally:
|
||||
session.close()
|
||||
|
||||
|
||||
@app.post("/backup/start")
|
||||
def start_backup(request: BackupStartRequest, _: str = Depends(_get_current_user)) -> dict:
|
||||
session = get_session()
|
||||
try:
|
||||
profile = session.get(Profile, request.profile_id)
|
||||
if not profile:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Profile not found")
|
||||
folders = request.folders or _collect_folders(profile.folders)
|
||||
if not folders:
|
||||
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail="No folders provided")
|
||||
job = Job(
|
||||
profile_name=profile.name,
|
||||
client_host=request.client_host,
|
||||
folders="||".join(folders),
|
||||
ssh_username=request.ssh_username,
|
||||
run_id=_generate_run_id(),
|
||||
)
|
||||
session.add(job)
|
||||
session.commit()
|
||||
session.refresh(job)
|
||||
runner.enqueue(JobPayload(job_id=job.id, ssh_password=request.ssh_password))
|
||||
return {"job_id": job.id}
|
||||
finally:
|
||||
session.close()
|
||||
|
||||
|
||||
def _generate_run_id() -> str:
|
||||
from uuid import uuid4
|
||||
|
||||
return uuid4().hex
|
||||
|
||||
|
||||
@app.get("/backup/status/{job_id}", response_model=JobStatusResponse)
|
||||
def job_status(job_id: int, _: str = Depends(_get_current_user)) -> JobStatusResponse:
|
||||
session = get_session()
|
||||
try:
|
||||
job = session.get(Job, job_id)
|
||||
if not job:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Job not found")
|
||||
return JobStatusResponse(
|
||||
job_id=job.id, # type: ignore[arg-type]
|
||||
status=job.status.value,
|
||||
progress=job.progress,
|
||||
summary=job.summary,
|
||||
last_log_lines=runner.tail_log(job),
|
||||
created_at=job.created_at,
|
||||
updated_at=job.updated_at,
|
||||
)
|
||||
finally:
|
||||
session.close()
|
||||
|
||||
|
||||
@app.get("/backup/log/{job_id}", response_model=BackupLog)
|
||||
def job_log(job_id: int, _: str = Depends(_get_current_user)) -> BackupLog:
|
||||
session = get_session()
|
||||
try:
|
||||
job = session.get(Job, job_id)
|
||||
if not job:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Job not found")
|
||||
return BackupLog(job_id=job.id, lines=runner.tail_log(job, lines=200))
|
||||
finally:
|
||||
session.close()
|
||||
|
||||
|
||||
@app.get("/health", response_model=HealthResponse)
|
||||
def health() -> HealthResponse:
|
||||
return HealthResponse(status="ok", version=app.version)
|
||||
52
server/app/models.py
Normal file
52
server/app/models.py
Normal file
@@ -0,0 +1,52 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime
|
||||
from enum import Enum
|
||||
from typing import Optional
|
||||
|
||||
from sqlmodel import SQLModel, Field
|
||||
|
||||
|
||||
class JobStatus(str, Enum):
|
||||
QUEUED = "QUEUED"
|
||||
RUNNING = "RUNNING"
|
||||
COMPLETED = "COMPLETED"
|
||||
FAILED = "FAILED"
|
||||
|
||||
|
||||
class Job(SQLModel, table=True):
|
||||
id: Optional[int] = Field(default=None, primary_key=True)
|
||||
profile_name: str
|
||||
client_host: str
|
||||
folders: str
|
||||
ssh_username: str
|
||||
run_id: str
|
||||
status: JobStatus = Field(sa_column_kwargs={"default": JobStatus.QUEUED})
|
||||
progress: int = 0
|
||||
summary: Optional[str] = None
|
||||
log_path: Optional[str] = None
|
||||
created_at: datetime = Field(default_factory=datetime.utcnow)
|
||||
updated_at: datetime = Field(default_factory=datetime.utcnow)
|
||||
|
||||
|
||||
class Profile(SQLModel, table=True):
|
||||
id: Optional[int] = Field(default=None, primary_key=True)
|
||||
name: str
|
||||
folders: str
|
||||
schedule_enabled: bool = Field(default=False)
|
||||
description: Optional[str] = None
|
||||
created_at: datetime = Field(default_factory=datetime.utcnow)
|
||||
updated_at: datetime = Field(default_factory=datetime.utcnow)
|
||||
|
||||
|
||||
class Token(SQLModel, table=True):
|
||||
token: str = Field(primary_key=True)
|
||||
user: str
|
||||
expires_at: datetime
|
||||
|
||||
|
||||
class RunHistory(SQLModel, table=True):
|
||||
id: Optional[int] = Field(default=None, primary_key=True)
|
||||
profile_name: str
|
||||
run_id: str
|
||||
created_at: datetime = Field(default_factory=datetime.utcnow)
|
||||
62
server/app/schemas.py
Normal file
62
server/app/schemas.py
Normal file
@@ -0,0 +1,62 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime
|
||||
from enum import Enum
|
||||
from typing import List, Optional
|
||||
|
||||
from pydantic import BaseModel, Field, constr
|
||||
|
||||
|
||||
class TokenType(str, Enum):
|
||||
bearer = "bearer"
|
||||
|
||||
|
||||
class AuthToken(BaseModel):
|
||||
access_token: str
|
||||
token_type: TokenType
|
||||
expires_at: datetime
|
||||
|
||||
|
||||
class ProfileBase(BaseModel):
|
||||
name: constr(min_length=1)
|
||||
folders: List[str] = Field(..., min_items=1)
|
||||
description: Optional[str] = None
|
||||
schedule_enabled: bool = False
|
||||
|
||||
|
||||
class ProfileCreate(ProfileBase):
|
||||
pass
|
||||
|
||||
|
||||
class ProfileRead(ProfileBase):
|
||||
id: int
|
||||
created_at: datetime
|
||||
updated_at: datetime
|
||||
|
||||
|
||||
class BackupStartRequest(BaseModel):
|
||||
profile_id: int
|
||||
client_host: str
|
||||
ssh_username: str
|
||||
ssh_password: str
|
||||
folders: Optional[List[str]] = None
|
||||
|
||||
|
||||
class JobStatusResponse(BaseModel):
|
||||
job_id: int
|
||||
status: str
|
||||
progress: int
|
||||
summary: Optional[str]
|
||||
last_log_lines: List[str] = []
|
||||
created_at: datetime
|
||||
updated_at: datetime
|
||||
|
||||
|
||||
class BackupLog(BaseModel):
|
||||
job_id: int
|
||||
lines: List[str]
|
||||
|
||||
|
||||
class HealthResponse(BaseModel):
|
||||
status: str
|
||||
version: str
|
||||
Reference in New Issue
Block a user