452 lines
15 KiB
Python
452 lines
15 KiB
Python
"""GET/POST /api/faces/* — face recognition endpoints."""
|
|
from __future__ import annotations
|
|
|
|
import logging
|
|
|
|
import numpy as np
|
|
from fastapi import APIRouter, Depends, HTTPException, Query
|
|
from fastapi.responses import Response
|
|
from pydantic import BaseModel
|
|
|
|
from face_service import _SIMILARITY_THRESHOLD
|
|
from web.auth import require_admin
|
|
|
|
router = APIRouter()
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
def _auto_link_for_person(person_id: int, face_db_module, ungrouped: list[dict]) -> int:
|
|
"""Auto-link ungrouped unidentified detections similar to person_id. Returns count linked."""
|
|
ref_embeddings = face_db_module.get_person_embeddings(person_id)
|
|
if not ref_embeddings or not ungrouped:
|
|
return 0
|
|
ref_M = np.stack(ref_embeddings)
|
|
ref_M_norm = ref_M / (np.linalg.norm(ref_M, axis=1, keepdims=True) + 1e-8)
|
|
count = 0
|
|
for ue in ungrouped:
|
|
norm_ue = ue["embedding"] / (np.linalg.norm(ue["embedding"]) + 1e-8)
|
|
if float(np.max(ref_M_norm @ norm_ue)) >= _SIMILARITY_THRESHOLD:
|
|
face_db_module.link_detection_to_person(ue["id"], person_id)
|
|
count += 1
|
|
return count
|
|
|
|
|
|
@router.get("/persons")
|
|
async def list_persons(_: dict = Depends(require_admin)):
|
|
"""List all known persons."""
|
|
import face_db
|
|
return {"persons": face_db.list_persons()}
|
|
|
|
|
|
@router.get("/persons/check")
|
|
async def check_person_name(name: str, _: dict = Depends(require_admin)):
|
|
"""Check whether a person name is already taken (case-insensitive)."""
|
|
import face_db
|
|
return {"exists": face_db.person_name_exists(name)}
|
|
|
|
|
|
@router.get("/persons/search")
|
|
async def search_persons(
|
|
q: str = "",
|
|
limit: int = Query(10, ge=1, le=50),
|
|
_: dict = Depends(require_admin),
|
|
):
|
|
"""Search persons by name/alias substring."""
|
|
import face_db
|
|
all_persons = face_db.list_persons()
|
|
q_lower = q.strip().lower()
|
|
if q_lower:
|
|
filtered = [
|
|
p for p in all_persons
|
|
if q_lower in p["name"].lower()
|
|
or any(q_lower in a["alias"].lower() for a in p["aliases"])
|
|
]
|
|
else:
|
|
filtered = all_persons
|
|
return {"persons": filtered[:limit]}
|
|
|
|
|
|
@router.get("/persons/{person_id}")
|
|
async def get_person(person_id: int, _: dict = Depends(require_admin)):
|
|
"""Get a single person with aliases."""
|
|
import face_db
|
|
person = face_db.get_person(person_id)
|
|
if person is None:
|
|
raise HTTPException(404, f"Person {person_id} not found")
|
|
return person
|
|
|
|
|
|
@router.get("/crop/{detection_id}")
|
|
async def get_face_crop(detection_id: int, _: dict = Depends(require_admin)):
|
|
"""Return a JPEG face crop for the given detection id."""
|
|
import asyncio
|
|
from face_service import get_face_service
|
|
svc = get_face_service()
|
|
if not svc.available:
|
|
raise HTTPException(503, "Face service not available")
|
|
loop = asyncio.get_event_loop()
|
|
crop = await loop.run_in_executor(svc._executor, svc.get_face_crop, detection_id)
|
|
if crop is None:
|
|
raise HTTPException(404, "Face crop not found")
|
|
return Response(content=crop, media_type="image/jpeg")
|
|
|
|
|
|
class _IdentifyItem(BaseModel):
|
|
detection_id: int
|
|
name: str
|
|
use_existing: bool = False
|
|
|
|
|
|
class _IdentifyRequest(BaseModel):
|
|
identifications: list[_IdentifyItem]
|
|
|
|
|
|
@router.get("/detections/unidentified")
|
|
async def list_unidentified_detections(
|
|
limit: int = Query(50, ge=1, le=200),
|
|
offset: int = Query(0, ge=0),
|
|
_: dict = Depends(require_admin),
|
|
):
|
|
"""List unidentified face detections from input images (paginated)."""
|
|
import face_db
|
|
detections, total = face_db.get_unidentified_input_detections(limit=limit, offset=offset)
|
|
return {"detections": detections, "total": total}
|
|
|
|
|
|
class _AliasRequest(BaseModel):
|
|
alias: str
|
|
|
|
|
|
@router.post("/persons/{person_id}/aliases")
|
|
async def add_person_alias(
|
|
person_id: int,
|
|
body: _AliasRequest,
|
|
_: dict = Depends(require_admin),
|
|
):
|
|
"""Add an alias to a person."""
|
|
import face_db
|
|
alias = body.alias.strip()
|
|
if not alias:
|
|
raise HTTPException(400, "Alias cannot be empty")
|
|
if len(alias) > 100:
|
|
raise HTTPException(400, "Alias too long (max 100 chars)")
|
|
try:
|
|
alias_id, _ = face_db.add_alias(person_id, alias)
|
|
except ValueError as e:
|
|
raise HTTPException(409, str(e)) from e
|
|
return {"id": alias_id, "alias": alias}
|
|
|
|
|
|
@router.delete("/persons/{person_id}/aliases/{alias_id}")
|
|
async def remove_person_alias(
|
|
person_id: int,
|
|
alias_id: int,
|
|
_: dict = Depends(require_admin),
|
|
):
|
|
"""Remove an alias from a person."""
|
|
import face_db
|
|
face_db.remove_alias(alias_id)
|
|
return {"ok": True}
|
|
|
|
|
|
class _RenameRequest(BaseModel):
|
|
name: str
|
|
|
|
|
|
@router.patch("/persons/{person_id}")
|
|
async def rename_person(
|
|
person_id: int,
|
|
body: _RenameRequest,
|
|
_: dict = Depends(require_admin),
|
|
):
|
|
"""Rename a person."""
|
|
import face_db
|
|
name = body.name.strip()
|
|
if not name:
|
|
raise HTTPException(400, "Name cannot be empty")
|
|
if len(name) > 100:
|
|
raise HTTPException(400, "Name too long (max 100 chars)")
|
|
if face_db.get_person(person_id) is None:
|
|
raise HTTPException(404, f"Person {person_id} not found")
|
|
try:
|
|
face_db.rename_person(person_id, name)
|
|
except ValueError as e:
|
|
raise HTTPException(409, str(e)) from e
|
|
return {"ok": True, "person_id": person_id, "name": name}
|
|
|
|
|
|
@router.delete("/persons/{person_id}")
|
|
async def delete_person(person_id: int, _: dict = Depends(require_admin)):
|
|
"""Delete a person and unidentify all their detections."""
|
|
import face_db
|
|
if face_db.get_person(person_id) is None:
|
|
raise HTTPException(404, f"Person {person_id} not found")
|
|
face_db.delete_person(person_id)
|
|
return {"ok": True}
|
|
|
|
|
|
@router.get("/persons/{person_id}/detections")
|
|
async def get_person_detections(
|
|
person_id: int,
|
|
limit: int = Query(50, ge=1, le=200),
|
|
offset: int = Query(0, ge=0),
|
|
_: dict = Depends(require_admin),
|
|
):
|
|
"""List detections for a person (paginated)."""
|
|
import face_db
|
|
detections, total = face_db.get_detections_for_person(person_id, limit=limit, offset=offset)
|
|
return {"detections": detections, "total": total}
|
|
|
|
|
|
class _MergePersonRequest(BaseModel):
|
|
other_person_id: int
|
|
|
|
|
|
@router.post("/persons/{person_id}/merge")
|
|
async def merge_persons(
|
|
person_id: int,
|
|
body: _MergePersonRequest,
|
|
_: dict = Depends(require_admin),
|
|
):
|
|
"""Merge another person into this one (survivor keeps their id)."""
|
|
import face_db
|
|
if person_id == body.other_person_id:
|
|
raise HTTPException(400, "Cannot merge person into themselves")
|
|
if face_db.get_person(person_id) is None:
|
|
raise HTTPException(404, f"Person {person_id} not found")
|
|
if face_db.get_person(body.other_person_id) is None:
|
|
raise HTTPException(404, f"Person {body.other_person_id} not found")
|
|
face_db.merge_persons(person_id, body.other_person_id)
|
|
return {"ok": True, "survivor_id": person_id, "absorbed_id": body.other_person_id}
|
|
|
|
|
|
class _ReassignRequest(BaseModel):
|
|
person_name: str | None = None
|
|
use_existing: bool = False
|
|
|
|
|
|
@router.post("/detections/{detection_id}/reassign")
|
|
async def reassign_detection(
|
|
detection_id: int,
|
|
body: _ReassignRequest,
|
|
_: dict = Depends(require_admin),
|
|
):
|
|
"""
|
|
Reassign or unidentify a detection.
|
|
- person_name=null → unidentify (set person_id=NULL)
|
|
- person_name=str → link to that person (create if needed, or use existing)
|
|
"""
|
|
import face_db
|
|
det = face_db.get_detection(detection_id)
|
|
if det is None:
|
|
raise HTTPException(404, f"Detection {detection_id} not found")
|
|
if body.person_name is None:
|
|
face_db.unidentify_detection(detection_id)
|
|
return {"detection_id": detection_id, "person_id": None, "unidentified": True}
|
|
name = body.person_name.strip()
|
|
if not name:
|
|
raise HTTPException(400, "Name cannot be empty")
|
|
if len(name) > 100:
|
|
raise HTTPException(400, "Name too long (max 100 chars)")
|
|
exists = face_db.person_name_exists(name)
|
|
if exists and not body.use_existing:
|
|
raise HTTPException(
|
|
409, f"A person named '{name}' already exists. Set use_existing=true to link."
|
|
)
|
|
person_id, is_new = face_db.get_or_create_person(name)
|
|
face_db.link_detection_to_person(detection_id, person_id)
|
|
return {"detection_id": detection_id, "person_id": person_id, "is_new": is_new}
|
|
|
|
|
|
class _ClusterRequest(BaseModel):
|
|
threshold: float = 0.45
|
|
|
|
|
|
class _MergeGroupsRequest(BaseModel):
|
|
keep_group_id: int
|
|
discard_group_id: int
|
|
|
|
|
|
class _IdentifyGroupRequest(BaseModel):
|
|
name: str
|
|
use_existing: bool = False
|
|
|
|
|
|
@router.get("/groups")
|
|
async def list_face_groups(_: dict = Depends(require_admin)):
|
|
"""List face groups with ≥ 2 unidentified detections."""
|
|
import face_db
|
|
groups = face_db.get_groups_with_detections()
|
|
return {"groups": groups, "total": len(groups)}
|
|
|
|
|
|
@router.get("/groups/{group_id}/detections")
|
|
async def get_face_group_detections(group_id: int, _: dict = Depends(require_admin)):
|
|
"""Return the unidentified detections belonging to a group (fetched on expand)."""
|
|
import face_db
|
|
detections = face_db.get_group_detections(group_id)
|
|
return {"detections": detections}
|
|
|
|
|
|
@router.post("/groups/compute")
|
|
async def compute_face_groups(body: _ClusterRequest, _: dict = Depends(require_admin)):
|
|
"""Run full re-cluster of all unidentified faces."""
|
|
from face_service import get_face_service
|
|
if not 0.3 <= body.threshold <= 0.7:
|
|
raise HTTPException(422, "threshold must be between 0.3 and 0.7")
|
|
svc = get_face_service()
|
|
if not svc.available:
|
|
raise HTTPException(503, "Face service not available")
|
|
groups = await svc.cluster_unidentified_faces(body.threshold)
|
|
total_detections = sum(len(g) for g in groups)
|
|
return {
|
|
"groups_created": len(groups),
|
|
"total_detections_clustered": total_detections,
|
|
"threshold": body.threshold,
|
|
}
|
|
|
|
|
|
@router.post("/groups/merge")
|
|
async def merge_face_groups(body: _MergeGroupsRequest, _: dict = Depends(require_admin)):
|
|
"""Merge two groups into one."""
|
|
import face_db
|
|
if body.keep_group_id == body.discard_group_id:
|
|
raise HTTPException(400, "keep_group_id and discard_group_id must differ")
|
|
groups_index = {g["id"] for g in face_db.get_groups_with_detections()}
|
|
if body.keep_group_id not in groups_index:
|
|
raise HTTPException(404, f"Group {body.keep_group_id} not found")
|
|
if body.discard_group_id not in groups_index:
|
|
raise HTTPException(404, f"Group {body.discard_group_id} not found")
|
|
face_db.merge_groups(body.keep_group_id, body.discard_group_id)
|
|
return {"ok": True, "surviving_group_id": body.keep_group_id}
|
|
|
|
|
|
@router.post("/groups/{group_id}/identify")
|
|
async def identify_face_group(
|
|
group_id: int,
|
|
body: _IdentifyGroupRequest,
|
|
_: dict = Depends(require_admin),
|
|
):
|
|
"""
|
|
Identify all detections in a group as one person.
|
|
After identification, auto-links any similar ungrouped unidentified detections.
|
|
Works for both input and output source types.
|
|
"""
|
|
import face_db
|
|
name = body.name.strip()
|
|
if not name:
|
|
raise HTTPException(400, "Name cannot be empty")
|
|
if len(name) > 100:
|
|
raise HTTPException(400, "Name too long (max 100 chars)")
|
|
|
|
detections = face_db.get_group_detections(group_id)
|
|
if not detections:
|
|
raise HTTPException(404, f"Group {group_id} not found or has no unidentified detections")
|
|
|
|
exists = face_db.person_name_exists(name)
|
|
if exists and not body.use_existing:
|
|
raise HTTPException(
|
|
409, f"A person named '{name}' already exists. Set use_existing=true to link."
|
|
)
|
|
|
|
person_id, is_new = face_db.get_or_create_person(name)
|
|
identified_count = 0
|
|
for det in detections:
|
|
face_db.link_detection_to_person(det["id"], person_id)
|
|
identified_count += 1
|
|
|
|
face_db.delete_group(group_id)
|
|
|
|
# Post-identify: auto-link ungrouped unidentified detections similar to this person
|
|
ungrouped = face_db.get_ungrouped_unidentified_embeddings()
|
|
auto_linked_count = _auto_link_for_person(person_id, face_db, ungrouped)
|
|
|
|
return {
|
|
"person_id": person_id,
|
|
"person_name": name,
|
|
"is_new": is_new,
|
|
"identified_count": identified_count,
|
|
"auto_linked_count": auto_linked_count,
|
|
}
|
|
|
|
|
|
@router.delete("/groups/{group_id}/detections/{detection_id}")
|
|
async def remove_group_detection(
|
|
group_id: int,
|
|
detection_id: int,
|
|
_: dict = Depends(require_admin),
|
|
):
|
|
"""Remove a single detection from its group. Cleans up singleton groups."""
|
|
import face_db
|
|
det = face_db.get_detection(detection_id)
|
|
if det is None or det.get("group_id") != group_id:
|
|
raise HTTPException(404, "Detection not found in this group")
|
|
face_db.remove_detection_from_group(detection_id)
|
|
face_db.delete_singleton_groups()
|
|
return {"ok": True}
|
|
|
|
|
|
@router.post("/rescan/outputs")
|
|
async def rescan_output_embeddings(_: dict = Depends(require_admin)):
|
|
"""Re-detect faces in stored output images to rebuild NULL embeddings."""
|
|
import face_db
|
|
from face_service import get_face_service
|
|
svc = get_face_service()
|
|
if not svc.available:
|
|
raise HTTPException(503, "Face service not available")
|
|
source_ids = face_db.get_null_embedding_output_source_ids()
|
|
total_updated = 0
|
|
for source_id in source_ids:
|
|
updated = await svc.rescan_output_embedding(source_id)
|
|
total_updated += updated
|
|
return {"processed": len(source_ids), "updated": total_updated}
|
|
|
|
|
|
@router.post("/identify")
|
|
async def identify_faces(body: _IdentifyRequest, _: dict = Depends(require_admin)):
|
|
"""
|
|
Identify one or more face detections by name.
|
|
|
|
- If the name is new → creates a new person and links the detection.
|
|
- If the name exists and ``use_existing=true`` → links to the existing person.
|
|
- If the name exists and ``use_existing=false`` → HTTP 409.
|
|
- Only detections with ``source_type='input'`` may be identified via the web UI.
|
|
"""
|
|
import face_db
|
|
results = []
|
|
for item in body.identifications:
|
|
name = item.name.strip()
|
|
if not name:
|
|
raise HTTPException(400, "Name cannot be empty")
|
|
if len(name) > 100:
|
|
raise HTTPException(400, "Name too long (max 100 chars)")
|
|
|
|
det = face_db.get_detection(item.detection_id)
|
|
if det is None:
|
|
raise HTTPException(404, f"Detection {item.detection_id} not found")
|
|
if det["source_type"] != "input":
|
|
raise HTTPException(403, "Only input-image detections may be identified via web UI")
|
|
|
|
exists = face_db.person_name_exists(name)
|
|
if exists and not item.use_existing:
|
|
raise HTTPException(
|
|
409,
|
|
f"A person named '{name}' already exists. Set use_existing=true to link.",
|
|
)
|
|
|
|
person_id, is_new = face_db.get_or_create_person(name)
|
|
face_db.link_detection_to_person(item.detection_id, person_id)
|
|
|
|
results.append({
|
|
"detection_id": item.detection_id,
|
|
"person_id": person_id,
|
|
"person_name": name,
|
|
"is_new": is_new,
|
|
})
|
|
|
|
# Auto-link similar ungrouped faces for each person identified in this batch
|
|
ungrouped = face_db.get_ungrouped_unidentified_embeddings()
|
|
unique_pids = {r["person_id"] for r in results}
|
|
auto_linked_count = sum(_auto_link_for_person(pid, face_db, ungrouped) for pid in unique_pids)
|
|
return {"identifications": results, "auto_linked_count": auto_linked_count}
|