Add POST /products/parse-text endpoint that accepts raw product text, calls Gemini (google-genai) with a structured extraction prompt, and returns a partial ProductParseResponse. Frontend gains a collapsible "AI pre-fill" card at the top of ProductForm that merges the LLM response into all form fields reactively. - Backend: ProductParseRequest/Response schemas, system prompt with enum constraints, temperature=0.0 for deterministic extraction, effect_profile always returned in full - Frontend: parseProductText() in api.ts; controlled $state bindings for all text/number/checkbox inputs; applyAiResult() merges response Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
32 lines
596 B
TOML
32 lines
596 B
TOML
[project]
|
|
name = "innercontext"
|
|
version = "0.1.0"
|
|
description = "Add your description here"
|
|
readme = "README.md"
|
|
requires-python = ">=3.12"
|
|
dependencies = [
|
|
"fastapi>=0.132.0",
|
|
"google-genai>=1.65.0",
|
|
"psycopg>=3.3.3",
|
|
"python-dotenv>=1.2.1",
|
|
"sqlmodel>=0.0.37",
|
|
"uvicorn[standard]>=0.34.0",
|
|
]
|
|
|
|
[dependency-groups]
|
|
dev = [
|
|
"black>=26.1.0",
|
|
"httpx>=0.28.1",
|
|
"isort>=8.0.0",
|
|
"pytest>=9.0.2",
|
|
"ruff>=0.15.2",
|
|
"ty>=0.0.18",
|
|
]
|
|
|
|
[tool.pytest.ini_options]
|
|
testpaths = ["tests"]
|
|
pythonpath = ["."]
|
|
addopts = "-v --tb=short"
|
|
|
|
[tool.isort]
|
|
profile = "black"
|