Enable on-demand INCI retrieval in /routines/suggest through Gemini function calling so detailed ingredient data is fetched only when needed. Persist and normalize tool_trace data in AI logs to make function-call behavior directly inspectable via /ai-logs endpoints.
83 lines
2.5 KiB
Python
83 lines
2.5 KiB
Python
import json
|
|
from typing import Any, Optional
|
|
from uuid import UUID
|
|
|
|
from fastapi import APIRouter, Depends, HTTPException
|
|
from sqlmodel import Session, SQLModel, col, select
|
|
|
|
from db import get_session
|
|
from innercontext.models.ai_log import AICallLog
|
|
|
|
router = APIRouter()
|
|
|
|
|
|
def _normalize_tool_trace(value: object) -> dict[str, Any] | None:
|
|
if value is None:
|
|
return None
|
|
if isinstance(value, dict):
|
|
return {str(k): v for k, v in value.items()}
|
|
if isinstance(value, str):
|
|
try:
|
|
parsed = json.loads(value)
|
|
except json.JSONDecodeError:
|
|
return None
|
|
if isinstance(parsed, dict):
|
|
return {str(k): v for k, v in parsed.items()}
|
|
return None
|
|
return None
|
|
|
|
|
|
class AICallLogPublic(SQLModel):
|
|
"""List-friendly view: omits large text fields."""
|
|
|
|
id: UUID
|
|
created_at: object
|
|
endpoint: str
|
|
model: str
|
|
prompt_tokens: Optional[int] = None
|
|
completion_tokens: Optional[int] = None
|
|
total_tokens: Optional[int] = None
|
|
duration_ms: Optional[int] = None
|
|
tool_trace: Optional[dict[str, Any]] = None
|
|
success: bool
|
|
error_detail: Optional[str] = None
|
|
|
|
|
|
@router.get("", response_model=list[AICallLogPublic])
|
|
def list_ai_logs(
|
|
endpoint: Optional[str] = None,
|
|
success: Optional[bool] = None,
|
|
limit: int = 50,
|
|
session: Session = Depends(get_session),
|
|
):
|
|
stmt = select(AICallLog).order_by(col(AICallLog.created_at).desc()).limit(limit)
|
|
if endpoint is not None:
|
|
stmt = stmt.where(AICallLog.endpoint == endpoint)
|
|
if success is not None:
|
|
stmt = stmt.where(AICallLog.success == success)
|
|
logs = session.exec(stmt).all()
|
|
return [
|
|
AICallLogPublic(
|
|
id=log.id,
|
|
created_at=log.created_at,
|
|
endpoint=log.endpoint,
|
|
model=log.model,
|
|
prompt_tokens=log.prompt_tokens,
|
|
completion_tokens=log.completion_tokens,
|
|
total_tokens=log.total_tokens,
|
|
duration_ms=log.duration_ms,
|
|
tool_trace=_normalize_tool_trace(getattr(log, "tool_trace", None)),
|
|
success=log.success,
|
|
error_detail=log.error_detail,
|
|
)
|
|
for log in logs
|
|
]
|
|
|
|
|
|
@router.get("/{log_id}", response_model=AICallLog)
|
|
def get_ai_log(log_id: UUID, session: Session = Depends(get_session)):
|
|
log = session.get(AICallLog, log_id)
|
|
if log is None:
|
|
raise HTTPException(status_code=404, detail="Log not found")
|
|
log.tool_trace = _normalize_tool_trace(getattr(log, "tool_trace", None))
|
|
return log
|