feat: implement full backend + frontend server detail, settings, and create server pages
Backend: - Complete FastAPI backend with 42+ REST endpoints (auth, servers, config, players, bans, missions, mods, games, system) - Game adapter architecture with Arma 3 as first-class adapter - WebSocket real-time events for status, metrics, logs, players - Background thread system (process monitor, metrics, log tail, RCon poller) - Fernet encryption for sensitive config fields at rest - JWT auth with admin/viewer roles, bcrypt password hashing - SQLite with WAL mode, parameterized queries, migration system - APScheduler cleanup jobs for logs, metrics, events Frontend: - Server Detail page with 7 tabs (overview, config, players, bans, missions, mods, logs) - Settings page with password change and admin user management - Create Server wizard (4-step; known bug: silent validation failure) - New hooks: useServerDetail, useAuth, useGames - New components: ServerHeader, ConfigEditor, PlayerTable, BanTable, MissionList, ModList, LogViewer, PasswordChange, UserManager - WebSocket onEvent callback for real-time log accumulation - 120 unit tests passing (Vitest + React Testing Library) Docs: - Added .gitignore, CLAUDE.md, README.md - Updated FRONTEND.md, ARCHITECTURE.md with current implementation state - Added .env.example for backend configuration Known issues: - Create Server form: "Next" buttons don't validate before advancing, causing silent submit failure when fields are invalid - Config sub-tabs need UX redesign for non-technical users
This commit is contained in:
1
backend/core/dal/__init__.py
Normal file
1
backend/core/dal/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
"""Data Access Layer repositories."""
|
||||
52
backend/core/dal/ban_repository.py
Normal file
52
backend/core/dal/ban_repository.py
Normal file
@@ -0,0 +1,52 @@
|
||||
import json
|
||||
from datetime import datetime, timezone
|
||||
from core.dal.base_repository import BaseRepository
|
||||
|
||||
|
||||
class BanRepository(BaseRepository):
|
||||
|
||||
def get_all(self, server_id: int, active_only: bool = True) -> list[dict]:
|
||||
if active_only:
|
||||
return self._fetchall(
|
||||
"SELECT * FROM bans WHERE server_id = :sid AND is_active = 1 ORDER BY banned_at DESC",
|
||||
{"sid": server_id},
|
||||
)
|
||||
return self._fetchall(
|
||||
"SELECT * FROM bans WHERE server_id = :sid ORDER BY banned_at DESC",
|
||||
{"sid": server_id},
|
||||
)
|
||||
|
||||
def create(
|
||||
self,
|
||||
server_id: int,
|
||||
guid: str | None,
|
||||
name: str | None,
|
||||
reason: str | None,
|
||||
banned_by: str,
|
||||
expires_at: str | None = None,
|
||||
game_data: dict | None = None,
|
||||
) -> int:
|
||||
return self._lastrowid(
|
||||
"""
|
||||
INSERT INTO bans (server_id, guid, name, reason, banned_by, expires_at, game_data)
|
||||
VALUES (:sid, :guid, :name, :reason, :by, :exp, :gd)
|
||||
""",
|
||||
{
|
||||
"sid": server_id,
|
||||
"guid": guid,
|
||||
"name": name,
|
||||
"reason": reason,
|
||||
"by": banned_by,
|
||||
"exp": expires_at,
|
||||
"gd": json.dumps(game_data or {}),
|
||||
},
|
||||
)
|
||||
|
||||
def deactivate(self, ban_id: int) -> None:
|
||||
self._execute(
|
||||
"UPDATE bans SET is_active = 0 WHERE id = :id",
|
||||
{"id": ban_id},
|
||||
)
|
||||
|
||||
def get_by_id(self, ban_id: int) -> dict | None:
|
||||
return self._fetchone("SELECT * FROM bans WHERE id = :id", {"id": ban_id})
|
||||
27
backend/core/dal/base_repository.py
Normal file
27
backend/core/dal/base_repository.py
Normal file
@@ -0,0 +1,27 @@
|
||||
"""Base repository with common DB helpers."""
|
||||
from __future__ import annotations
|
||||
|
||||
from sqlalchemy import text
|
||||
from sqlalchemy.engine import Connection
|
||||
|
||||
|
||||
class BaseRepository:
|
||||
def __init__(self, db: Connection):
|
||||
self._db = db
|
||||
|
||||
def _execute(self, query: str, params: dict | None = None):
|
||||
return self._db.execute(text(query), params or {})
|
||||
|
||||
def _fetchone(self, query: str, params: dict | None = None) -> dict | None:
|
||||
row = self._db.execute(text(query), params or {}).fetchone()
|
||||
if row is None:
|
||||
return None
|
||||
return dict(row._mapping)
|
||||
|
||||
def _fetchall(self, query: str, params: dict | None = None) -> list[dict]:
|
||||
rows = self._db.execute(text(query), params or {}).fetchall()
|
||||
return [dict(r._mapping) for r in rows]
|
||||
|
||||
def _lastrowid(self, query: str, params: dict | None = None) -> int:
|
||||
result = self._db.execute(text(query), params or {})
|
||||
return result.lastrowid
|
||||
163
backend/core/dal/config_repository.py
Normal file
163
backend/core/dal/config_repository.py
Normal file
@@ -0,0 +1,163 @@
|
||||
"""
|
||||
Manages the game_configs table.
|
||||
Handles Fernet encryption/decryption of sensitive fields transparently.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
from datetime import datetime, timezone
|
||||
|
||||
from core.dal.base_repository import BaseRepository
|
||||
from core.utils.crypto import decrypt, encrypt, is_encrypted
|
||||
|
||||
|
||||
class ConfigRepository(BaseRepository):
|
||||
|
||||
def _encrypt_sensitive(
|
||||
self, config: dict, sensitive_fields: list[str]
|
||||
) -> dict:
|
||||
"""Return new dict with sensitive fields encrypted."""
|
||||
result = dict(config)
|
||||
for field in sensitive_fields:
|
||||
if field in result and result[field] and not is_encrypted(str(result[field])):
|
||||
result[field] = encrypt(str(result[field]))
|
||||
return result
|
||||
|
||||
def _decrypt_sensitive(
|
||||
self, config: dict, sensitive_fields: list[str]
|
||||
) -> dict:
|
||||
"""Return new dict with sensitive fields decrypted."""
|
||||
result = dict(config)
|
||||
for field in sensitive_fields:
|
||||
if field in result and is_encrypted(str(result[field])):
|
||||
result[field] = decrypt(str(result[field]))
|
||||
return result
|
||||
|
||||
def get_section(
|
||||
self,
|
||||
server_id: int,
|
||||
section: str,
|
||||
sensitive_fields: list[str] | None = None,
|
||||
) -> dict | None:
|
||||
"""Get a config section. Decrypts sensitive fields automatically."""
|
||||
row = self._fetchone(
|
||||
"SELECT * FROM game_configs WHERE server_id = :sid AND section = :sec",
|
||||
{"sid": server_id, "sec": section},
|
||||
)
|
||||
if row is None:
|
||||
return None
|
||||
config = json.loads(row["config_json"])
|
||||
if sensitive_fields:
|
||||
config = self._decrypt_sensitive(config, sensitive_fields)
|
||||
config["_meta"] = {
|
||||
"config_version": row["config_version"],
|
||||
"schema_version": row["schema_version"],
|
||||
}
|
||||
return config
|
||||
|
||||
def get_all_sections(
|
||||
self,
|
||||
server_id: int,
|
||||
sensitive_fields_by_section: dict[str, list[str]] | None = None,
|
||||
) -> dict[str, dict]:
|
||||
"""Get all config sections for a server."""
|
||||
rows = self._fetchall(
|
||||
"SELECT * FROM game_configs WHERE server_id = :sid ORDER BY section",
|
||||
{"sid": server_id},
|
||||
)
|
||||
result = {}
|
||||
for row in rows:
|
||||
config = json.loads(row["config_json"])
|
||||
sf = (sensitive_fields_by_section or {}).get(row["section"], [])
|
||||
if sf:
|
||||
config = self._decrypt_sensitive(config, sf)
|
||||
config["_meta"] = {
|
||||
"config_version": row["config_version"],
|
||||
"schema_version": row["schema_version"],
|
||||
}
|
||||
result[row["section"]] = config
|
||||
return result
|
||||
|
||||
def upsert_section(
|
||||
self,
|
||||
server_id: int,
|
||||
game_type: str,
|
||||
section: str,
|
||||
config_data: dict,
|
||||
schema_version: str,
|
||||
sensitive_fields: list[str] | None = None,
|
||||
expected_config_version: int | None = None,
|
||||
) -> int:
|
||||
"""
|
||||
Upsert a config section.
|
||||
If expected_config_version is provided, checks optimistic lock.
|
||||
Returns the new config_version.
|
||||
Raises ValueError on version conflict (caller returns 409).
|
||||
"""
|
||||
now = datetime.now(timezone.utc).isoformat()
|
||||
|
||||
# Strip _meta before storing
|
||||
data_to_store = {k: v for k, v in config_data.items() if k != "_meta"}
|
||||
|
||||
# Encrypt sensitive fields
|
||||
if sensitive_fields:
|
||||
data_to_store = self._encrypt_sensitive(data_to_store, sensitive_fields)
|
||||
|
||||
# Check if row exists
|
||||
existing = self._fetchone(
|
||||
"SELECT id, config_version FROM game_configs WHERE server_id = :sid AND section = :sec",
|
||||
{"sid": server_id, "sec": section},
|
||||
)
|
||||
|
||||
if existing is None:
|
||||
# Insert
|
||||
self._execute(
|
||||
"""
|
||||
INSERT INTO game_configs
|
||||
(server_id, game_type, section, config_json, config_version, schema_version, updated_at)
|
||||
VALUES (:sid, :gt, :sec, :json, 1, :sv, :now)
|
||||
""",
|
||||
{
|
||||
"sid": server_id, "gt": game_type, "sec": section,
|
||||
"json": json.dumps(data_to_store), "sv": schema_version, "now": now,
|
||||
},
|
||||
)
|
||||
return 1
|
||||
else:
|
||||
current_version = existing["config_version"]
|
||||
if expected_config_version is not None and expected_config_version != current_version:
|
||||
raise ValueError(
|
||||
f"CONFIG_VERSION_CONFLICT:{current_version}"
|
||||
)
|
||||
new_version = current_version + 1
|
||||
self._execute(
|
||||
"""
|
||||
UPDATE game_configs
|
||||
SET config_json = :json, config_version = :cv,
|
||||
schema_version = :sv, updated_at = :now
|
||||
WHERE server_id = :sid AND section = :sec
|
||||
""",
|
||||
{
|
||||
"json": json.dumps(data_to_store),
|
||||
"cv": new_version,
|
||||
"sv": schema_version,
|
||||
"now": now,
|
||||
"sid": server_id,
|
||||
"sec": section,
|
||||
},
|
||||
)
|
||||
return new_version
|
||||
|
||||
def delete_sections(self, server_id: int) -> None:
|
||||
self._execute(
|
||||
"DELETE FROM game_configs WHERE server_id = :sid",
|
||||
{"sid": server_id},
|
||||
)
|
||||
|
||||
def get_raw_sections(self, server_id: int) -> dict[str, dict]:
|
||||
"""Get all sections without decryption — for config file generation."""
|
||||
rows = self._fetchall(
|
||||
"SELECT section, config_json FROM game_configs WHERE server_id = :sid",
|
||||
{"sid": server_id},
|
||||
)
|
||||
return {row["section"]: json.loads(row["config_json"]) for row in rows}
|
||||
62
backend/core/dal/event_repository.py
Normal file
62
backend/core/dal/event_repository.py
Normal file
@@ -0,0 +1,62 @@
|
||||
import json
|
||||
from core.dal.base_repository import BaseRepository
|
||||
|
||||
|
||||
class EventRepository(BaseRepository):
|
||||
|
||||
def insert(
|
||||
self,
|
||||
server_id: int,
|
||||
event_type: str,
|
||||
actor: str = "system",
|
||||
detail: dict | None = None,
|
||||
) -> None:
|
||||
self._execute(
|
||||
"""
|
||||
INSERT INTO server_events (server_id, event_type, actor, detail)
|
||||
VALUES (:sid, :et, :actor, :detail)
|
||||
""",
|
||||
{
|
||||
"sid": server_id,
|
||||
"et": event_type,
|
||||
"actor": actor,
|
||||
"detail": json.dumps(detail) if detail else None,
|
||||
},
|
||||
)
|
||||
|
||||
def get_events(
|
||||
self,
|
||||
server_id: int,
|
||||
limit: int = 50,
|
||||
offset: int = 0,
|
||||
event_type: str | None = None,
|
||||
) -> list[dict]:
|
||||
if event_type:
|
||||
return self._fetchall(
|
||||
"""
|
||||
SELECT * FROM server_events
|
||||
WHERE server_id = :sid AND event_type = :et
|
||||
ORDER BY created_at DESC LIMIT :limit OFFSET :offset
|
||||
""",
|
||||
{"sid": server_id, "et": event_type, "limit": limit, "offset": offset},
|
||||
)
|
||||
return self._fetchall(
|
||||
"""
|
||||
SELECT * FROM server_events WHERE server_id = :sid
|
||||
ORDER BY created_at DESC LIMIT :limit OFFSET :offset
|
||||
""",
|
||||
{"sid": server_id, "limit": limit, "offset": offset},
|
||||
)
|
||||
|
||||
def get_recent_all_servers(self, limit: int = 20) -> list[dict]:
|
||||
return self._fetchall(
|
||||
"SELECT * FROM server_events ORDER BY created_at DESC LIMIT :limit",
|
||||
{"limit": limit},
|
||||
)
|
||||
|
||||
def cleanup_old(self, retention_days: int) -> None:
|
||||
"""Delete events older than retention_days."""
|
||||
self._execute(
|
||||
"DELETE FROM server_events WHERE created_at < datetime('now', :delta)",
|
||||
{"delta": f"-{retention_days} days"},
|
||||
)
|
||||
61
backend/core/dal/log_repository.py
Normal file
61
backend/core/dal/log_repository.py
Normal file
@@ -0,0 +1,61 @@
|
||||
from core.dal.base_repository import BaseRepository
|
||||
|
||||
|
||||
class LogRepository(BaseRepository):
|
||||
|
||||
def insert(self, server_id: int, entry: dict) -> None:
|
||||
"""entry = {timestamp, level, message}"""
|
||||
self._execute(
|
||||
"""
|
||||
INSERT INTO logs (server_id, timestamp, level, message)
|
||||
VALUES (:sid, :ts, :level, :msg)
|
||||
""",
|
||||
{
|
||||
"sid": server_id,
|
||||
"ts": entry.get("timestamp", ""),
|
||||
"level": entry.get("level", "info"),
|
||||
"msg": entry.get("message", ""),
|
||||
},
|
||||
)
|
||||
|
||||
def query(
|
||||
self,
|
||||
server_id: int,
|
||||
limit: int = 200,
|
||||
offset: int = 0,
|
||||
level: str | None = None,
|
||||
since: str | None = None,
|
||||
search: str | None = None,
|
||||
) -> tuple[int, list[dict]]:
|
||||
conditions = ["server_id = :sid"]
|
||||
params: dict = {"sid": server_id, "limit": limit, "offset": offset}
|
||||
if level:
|
||||
conditions.append("level = :level")
|
||||
params["level"] = level
|
||||
if since:
|
||||
conditions.append("timestamp >= :since")
|
||||
params["since"] = since
|
||||
if search:
|
||||
conditions.append("message LIKE :search")
|
||||
params["search"] = f"%{search}%"
|
||||
|
||||
where = " AND ".join(conditions)
|
||||
total_row = self._fetchone(f"SELECT COUNT(*) as cnt FROM logs WHERE {where}", params)
|
||||
total = total_row["cnt"] if total_row else 0
|
||||
rows = self._fetchall(
|
||||
f"SELECT * FROM logs WHERE {where} ORDER BY timestamp DESC LIMIT :limit OFFSET :offset",
|
||||
params,
|
||||
)
|
||||
return total, rows
|
||||
|
||||
def clear(self, server_id: int) -> int:
|
||||
result = self._execute(
|
||||
"DELETE FROM logs WHERE server_id = :sid", {"sid": server_id}
|
||||
)
|
||||
return result.rowcount
|
||||
|
||||
def cleanup_old(self, retention_days: int) -> None:
|
||||
self._execute(
|
||||
"DELETE FROM logs WHERE created_at < datetime('now', :delta)",
|
||||
{"delta": f"-{retention_days} days"},
|
||||
)
|
||||
53
backend/core/dal/metrics_repository.py
Normal file
53
backend/core/dal/metrics_repository.py
Normal file
@@ -0,0 +1,53 @@
|
||||
from core.dal.base_repository import BaseRepository
|
||||
|
||||
|
||||
class MetricsRepository(BaseRepository):
|
||||
|
||||
def insert(
|
||||
self, server_id: int, cpu_percent: float, ram_mb: float = 0.0, player_count: int = 0
|
||||
) -> None:
|
||||
self._execute(
|
||||
"""
|
||||
INSERT INTO metrics (server_id, cpu_percent, ram_mb, player_count)
|
||||
VALUES (:sid, :cpu, :ram, :pc)
|
||||
""",
|
||||
{"sid": server_id, "cpu": cpu_percent, "ram": ram_mb, "pc": player_count},
|
||||
)
|
||||
|
||||
def query(
|
||||
self,
|
||||
server_id: int,
|
||||
from_ts: str | None = None,
|
||||
to_ts: str | None = None,
|
||||
) -> list[dict]:
|
||||
conditions = ["server_id = :sid"]
|
||||
params: dict = {"sid": server_id}
|
||||
if from_ts:
|
||||
conditions.append("timestamp >= :from_ts")
|
||||
params["from_ts"] = from_ts
|
||||
if to_ts:
|
||||
conditions.append("timestamp <= :to_ts")
|
||||
params["to_ts"] = to_ts
|
||||
where = " AND ".join(conditions)
|
||||
return self._fetchall(
|
||||
f"SELECT * FROM metrics WHERE {where} ORDER BY timestamp ASC",
|
||||
params,
|
||||
)
|
||||
|
||||
def get_latest(self, server_id: int) -> dict | None:
|
||||
return self._fetchone(
|
||||
"SELECT * FROM metrics WHERE server_id = :sid ORDER BY timestamp DESC LIMIT 1",
|
||||
{"sid": server_id},
|
||||
)
|
||||
|
||||
def cleanup_old(self, retention_days: int = 1, server_id: int | None = None) -> None:
|
||||
if server_id is not None:
|
||||
self._execute(
|
||||
"DELETE FROM metrics WHERE server_id = :sid AND timestamp < datetime('now', :delta)",
|
||||
{"sid": server_id, "delta": f"-{retention_days} days"},
|
||||
)
|
||||
else:
|
||||
self._execute(
|
||||
"DELETE FROM metrics WHERE timestamp < datetime('now', :delta)",
|
||||
{"delta": f"-{retention_days} days"},
|
||||
)
|
||||
70
backend/core/dal/player_repository.py
Normal file
70
backend/core/dal/player_repository.py
Normal file
@@ -0,0 +1,70 @@
|
||||
import json
|
||||
from datetime import datetime, timezone
|
||||
from core.dal.base_repository import BaseRepository
|
||||
|
||||
|
||||
class PlayerRepository(BaseRepository):
|
||||
|
||||
def get_all(self, server_id: int) -> list[dict]:
|
||||
return self._fetchall(
|
||||
"SELECT * FROM players WHERE server_id = :sid ORDER BY slot_id",
|
||||
{"sid": server_id},
|
||||
)
|
||||
|
||||
def count(self, server_id: int) -> int:
|
||||
row = self._fetchone(
|
||||
"SELECT COUNT(*) as cnt FROM players WHERE server_id = :sid",
|
||||
{"sid": server_id},
|
||||
)
|
||||
return row["cnt"] if row else 0
|
||||
|
||||
def upsert(self, server_id: int, player: dict) -> None:
|
||||
now = datetime.now(timezone.utc).isoformat()
|
||||
self._execute(
|
||||
"""
|
||||
INSERT INTO players (server_id, slot_id, name, guid, ip, ping, game_data, joined_at, updated_at)
|
||||
VALUES (:sid, :slot, :name, :guid, :ip, :ping, :gd, :now, :now)
|
||||
ON CONFLICT(server_id, slot_id) DO UPDATE SET
|
||||
name = excluded.name,
|
||||
guid = excluded.guid,
|
||||
ping = excluded.ping,
|
||||
game_data = excluded.game_data,
|
||||
updated_at = excluded.updated_at
|
||||
""",
|
||||
{
|
||||
"sid": server_id,
|
||||
"slot": str(player.get("slot_id", "")),
|
||||
"name": player.get("name", ""),
|
||||
"guid": player.get("guid"),
|
||||
"ip": player.get("ip"),
|
||||
"ping": player.get("ping"),
|
||||
"gd": json.dumps(player.get("game_data", {})),
|
||||
"now": now,
|
||||
},
|
||||
)
|
||||
|
||||
def clear(self, server_id: int) -> None:
|
||||
self._execute("DELETE FROM players WHERE server_id = :sid", {"sid": server_id})
|
||||
|
||||
def get_history(
|
||||
self,
|
||||
server_id: int,
|
||||
limit: int = 50,
|
||||
offset: int = 0,
|
||||
search: str | None = None,
|
||||
) -> tuple[int, list[dict]]:
|
||||
conditions = ["server_id = :sid"]
|
||||
params: dict = {"sid": server_id, "limit": limit, "offset": offset}
|
||||
if search:
|
||||
conditions.append("name LIKE :search")
|
||||
params["search"] = f"%{search}%"
|
||||
where = " AND ".join(conditions)
|
||||
total_row = self._fetchone(
|
||||
f"SELECT COUNT(*) as cnt FROM player_history WHERE {where}", params
|
||||
)
|
||||
total = total_row["cnt"] if total_row else 0
|
||||
rows = self._fetchall(
|
||||
f"SELECT * FROM player_history WHERE {where} ORDER BY left_at DESC LIMIT :limit OFFSET :offset",
|
||||
params,
|
||||
)
|
||||
return total, rows
|
||||
111
backend/core/dal/server_repository.py
Normal file
111
backend/core/dal/server_repository.py
Normal file
@@ -0,0 +1,111 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime, timezone
|
||||
|
||||
from core.dal.base_repository import BaseRepository
|
||||
|
||||
|
||||
class ServerRepository(BaseRepository):
|
||||
|
||||
def get_all(self, game_type: str | None = None) -> list[dict]:
|
||||
if game_type:
|
||||
return self._fetchall(
|
||||
"SELECT * FROM servers WHERE game_type = :gt ORDER BY name",
|
||||
{"gt": game_type},
|
||||
)
|
||||
return self._fetchall("SELECT * FROM servers ORDER BY name")
|
||||
|
||||
def get_by_id(self, server_id: int) -> dict | None:
|
||||
return self._fetchone("SELECT * FROM servers WHERE id = :id", {"id": server_id})
|
||||
|
||||
def create(
|
||||
self,
|
||||
name: str,
|
||||
game_type: str,
|
||||
exe_path: str,
|
||||
game_port: int,
|
||||
rcon_port: int | None = None,
|
||||
description: str | None = None,
|
||||
auto_restart: bool = False,
|
||||
max_restarts: int = 3,
|
||||
) -> int:
|
||||
return self._lastrowid(
|
||||
"""
|
||||
INSERT INTO servers
|
||||
(name, description, game_type, exe_path, game_port, rcon_port,
|
||||
auto_restart, max_restarts)
|
||||
VALUES
|
||||
(:name, :desc, :game_type, :exe, :gp, :rp, :ar, :mr)
|
||||
""",
|
||||
{
|
||||
"name": name,
|
||||
"desc": description,
|
||||
"game_type": game_type,
|
||||
"exe": exe_path,
|
||||
"gp": game_port,
|
||||
"rp": rcon_port,
|
||||
"ar": int(auto_restart),
|
||||
"mr": max_restarts,
|
||||
},
|
||||
)
|
||||
|
||||
def update(self, server_id: int, **fields) -> None:
|
||||
if not fields:
|
||||
return
|
||||
fields["updated_at"] = datetime.now(timezone.utc).isoformat()
|
||||
fields["id"] = server_id
|
||||
set_clause = ", ".join(f"{k} = :{k}" for k in fields if k != "id")
|
||||
self._execute(f"UPDATE servers SET {set_clause} WHERE id = :id", fields)
|
||||
|
||||
def update_status(
|
||||
self,
|
||||
server_id: int,
|
||||
status: str,
|
||||
pid: int | None = None,
|
||||
started_at: str | None = None,
|
||||
stopped_at: str | None = None,
|
||||
) -> None:
|
||||
now = datetime.now(timezone.utc).isoformat()
|
||||
self._execute(
|
||||
"""
|
||||
UPDATE servers
|
||||
SET status = :status, pid = :pid, started_at = :sa,
|
||||
stopped_at = :sta, updated_at = :now
|
||||
WHERE id = :id
|
||||
""",
|
||||
{
|
||||
"status": status,
|
||||
"pid": pid,
|
||||
"sa": started_at,
|
||||
"sta": stopped_at,
|
||||
"now": now,
|
||||
"id": server_id,
|
||||
},
|
||||
)
|
||||
|
||||
def delete(self, server_id: int) -> None:
|
||||
self._execute("DELETE FROM servers WHERE id = :id", {"id": server_id})
|
||||
|
||||
def get_running(self) -> list[dict]:
|
||||
return self._fetchall(
|
||||
"SELECT * FROM servers WHERE status IN ('running', 'starting')"
|
||||
)
|
||||
|
||||
def increment_restart_count(self, server_id: int) -> None:
|
||||
now = datetime.now(timezone.utc).isoformat()
|
||||
self._execute(
|
||||
"""
|
||||
UPDATE servers
|
||||
SET restart_count = restart_count + 1,
|
||||
last_restart_at = :now,
|
||||
updated_at = :now
|
||||
WHERE id = :id
|
||||
""",
|
||||
{"now": now, "id": server_id},
|
||||
)
|
||||
|
||||
def reset_restart_count(self, server_id: int) -> None:
|
||||
self._execute(
|
||||
"UPDATE servers SET restart_count = 0 WHERE id = :id",
|
||||
{"id": server_id},
|
||||
)
|
||||
Reference in New Issue
Block a user