358 lines
12 KiB
Python
358 lines
12 KiB
Python
from typing import List, Optional, Tuple, Any
|
|
from datetime import datetime
|
|
|
|
from fastapi import APIRouter, Depends, Query, HTTPException, Path, Response, UploadFile, File
|
|
from sqlalchemy import text, select, insert
|
|
from sqlalchemy.ext.asyncio import AsyncSession
|
|
from pydantic import BaseModel
|
|
|
|
from .database import get_session
|
|
from .models import Inventory
|
|
|
|
router = APIRouter()
|
|
|
|
# ---------------------------------------------------------------------------
|
|
# Crafting Recipes endpoints
|
|
|
|
@router.post("/inventory/import")
|
|
async def import_inventory_csv(
|
|
file: UploadFile = File(...),
|
|
session: AsyncSession = Depends(get_session),
|
|
):
|
|
"""Replace the entire inventory table with contents from an uploaded CSV.
|
|
|
|
The CSV must use the delimiter ``;`` and column headers: ``char;storage;item;quantity``.
|
|
"""
|
|
import csv
|
|
import io
|
|
# Read file bytes and decode
|
|
contents = await file.read()
|
|
try:
|
|
text_data = contents.decode("utf-8")
|
|
except UnicodeDecodeError:
|
|
raise HTTPException(status_code=400, detail="CSV must be UTF-8 encoded")
|
|
|
|
reader = csv.DictReader(io.StringIO(text_data), delimiter=";", quotechar='"')
|
|
raw_rows = []
|
|
for r in reader:
|
|
try:
|
|
qty = int(r["quantity"].strip()) if r["quantity"].strip() else 0
|
|
except (KeyError, ValueError):
|
|
raise HTTPException(status_code=400, detail="Invalid CSV schema or quantity value")
|
|
raw_rows.append(
|
|
{
|
|
"character_name": r["char"].strip(),
|
|
"storage_type": r["storage"].strip(),
|
|
"item_name": r["item"].strip(),
|
|
"quantity": qty,
|
|
}
|
|
)
|
|
|
|
# ---------------------------------------------------------------------
|
|
# Resolve stack sizes and split quantities across inventory slots
|
|
# ---------------------------------------------------------------------
|
|
item_names = {r["item_name"] for r in raw_rows}
|
|
stack_sizes: dict[str, int] = {}
|
|
if item_names:
|
|
# Discover item tables that have a stack_size column ( *_items )
|
|
tbl_res = await session.execute(
|
|
text(
|
|
"""
|
|
SELECT table_name
|
|
FROM information_schema.columns
|
|
WHERE table_schema = 'public'
|
|
AND column_name = 'stack_size'
|
|
AND table_name LIKE '%_items'
|
|
"""
|
|
)
|
|
)
|
|
item_tables = [row[0] for row in tbl_res.fetchall()]
|
|
|
|
# Query each table for name ▸ stack_size entries we need
|
|
for t in item_tables:
|
|
q = text(f"SELECT name, stack_size FROM {t} WHERE name = ANY(:names)")
|
|
res = await session.execute(q, {"names": list(item_names)})
|
|
for name, stack in res.fetchall():
|
|
# Prefer the first non-null value encountered
|
|
if name not in stack_sizes or not stack_sizes[name]:
|
|
stack_sizes[name] = (stack or 1) if stack and stack > 0 else 1
|
|
|
|
def _stack_for(item_name: str) -> int:
|
|
return stack_sizes.get(item_name, 1) or 1
|
|
|
|
# Resolve item_ids via all_items once
|
|
id_rows = await session.execute(text("SELECT id,name FROM all_items WHERE name = ANY(:names)"), {"names": list(item_names)})
|
|
id_map = {n: i for i, n in id_rows.fetchall()}
|
|
|
|
rows: list[dict] = []
|
|
for r in raw_rows:
|
|
qty = r["quantity"]
|
|
if qty <= 0:
|
|
continue
|
|
stack = _stack_for(r["item_name"])
|
|
# Split into multiple slot-rows respecting stack size
|
|
while qty > 0:
|
|
take = min(stack, qty)
|
|
slot_row = r.copy()
|
|
slot_row["quantity"] = take
|
|
slot_row["item_id"] = id_map.get(r["item_name"]) # may be None
|
|
rows.append(slot_row)
|
|
qty -= take
|
|
|
|
# Replace table contents inside a transaction
|
|
try:
|
|
await session.execute(text("TRUNCATE TABLE inventory;"))
|
|
if rows:
|
|
await session.execute(insert(Inventory), rows)
|
|
await session.commit()
|
|
except Exception as e:
|
|
await session.rollback()
|
|
raise HTTPException(status_code=500, detail=f"Failed to import CSV: {e}")
|
|
|
|
return {"imported": len(rows)}
|
|
|
|
ALLOWED_CRAFTS = {
|
|
"woodworking": "recipes_woodworking",
|
|
# Future crafts can be added here, e.g. "smithing": "recipes_smithing"
|
|
}
|
|
|
|
|
|
|
|
class MetadataResponse(BaseModel):
|
|
storage_types: List[str]
|
|
type_descriptions: List[str]
|
|
|
|
|
|
@router.get("/metadata", response_model=MetadataResponse)
|
|
async def metadata(session: AsyncSession = Depends(get_session)):
|
|
"""Return distinct storage types and type descriptions."""
|
|
storage_q = await session.execute(text("SELECT DISTINCT storage_type FROM inventory ORDER BY storage_type"))
|
|
storage_types = [row[0] for row in storage_q.fetchall() if row[0]]
|
|
|
|
type_q = await session.execute(text("SELECT DISTINCT type_description FROM all_items ORDER BY type_description"))
|
|
original_types = {row[0] for row in type_q.fetchall() if row[0]}
|
|
|
|
processed_types = set(original_types)
|
|
has_nothing_or_unknown = 'NOTHING' in processed_types or 'UNKNOWN' in processed_types
|
|
|
|
if 'NOTHING' in processed_types:
|
|
processed_types.remove('NOTHING')
|
|
if 'UNKNOWN' in processed_types:
|
|
processed_types.remove('UNKNOWN')
|
|
|
|
if has_nothing_or_unknown:
|
|
processed_types.add('MANNEQUIN')
|
|
|
|
type_descriptions = sorted(list(processed_types))
|
|
|
|
return MetadataResponse(storage_types=storage_types, type_descriptions=type_descriptions)
|
|
|
|
|
|
class InventoryItem(BaseModel):
|
|
id: int
|
|
item_name: str
|
|
quantity: int
|
|
storage_type: str
|
|
description: Optional[str]
|
|
icon_id: Optional[str]
|
|
type_description: Optional[str]
|
|
stack_size: Optional[int]
|
|
last_updated: Optional[datetime]
|
|
|
|
class Config:
|
|
from_attributes = True
|
|
|
|
|
|
@router.get("/inventory/{character}", response_model=List[InventoryItem])
|
|
async def inventory(
|
|
character: str,
|
|
storage_type: Optional[str] = Query(None),
|
|
session: AsyncSession = Depends(get_session),
|
|
):
|
|
"""Return items for a character, optionally filtered by storage_type."""
|
|
base_sql = """
|
|
SELECT i.*, ai.description, ai.icon_id, ai.type_description, ai.stack_size
|
|
FROM inventory i
|
|
LEFT JOIN all_items ai ON ai.name = i.item_name
|
|
WHERE i.character_name = :char
|
|
"""
|
|
params = {"char": character}
|
|
if storage_type:
|
|
base_sql += " AND i.storage_type = :storage"
|
|
params["storage"] = storage_type
|
|
q = text(base_sql)
|
|
result = await session.execute(q, params)
|
|
rows = result.fetchall()
|
|
return [InventoryItem(
|
|
id=r.id,
|
|
item_name=r.item_name,
|
|
quantity=r.quantity,
|
|
storage_type=r.storage_type,
|
|
description=r.description,
|
|
icon_id=r.icon_id,
|
|
type_description=r.type_description,
|
|
stack_size=r.stack_size,
|
|
last_updated=r.last_updated,
|
|
) for r in rows]
|
|
|
|
|
|
class ItemSummary(BaseModel):
|
|
id: int
|
|
name: str
|
|
icon_id: Optional[str]
|
|
type_description: Optional[str]
|
|
jobs_description: Optional[List[str]]
|
|
|
|
|
|
@router.get("/items", response_model=List[ItemSummary])
|
|
async def items(
|
|
response: Response,
|
|
type: Optional[str] = Query(None, alias="type"),
|
|
search: Optional[str] = Query(None, description="Substring search on item name"),
|
|
page: int = Query(1, ge=1),
|
|
page_size: int = Query(40, ge=1, le=100),
|
|
session: AsyncSession = Depends(get_session),
|
|
):
|
|
"""Return items from all_items view with pagination."""
|
|
offset = (page - 1) * page_size
|
|
|
|
params: dict[str, Any] = {"limit": page_size, "offset": offset}
|
|
where_clauses: list[str] = ["a.name != '.'"]
|
|
|
|
if type:
|
|
if type == "MANNEQUIN":
|
|
where_clauses.append("a.type_description IN ('MANNEQUIN', 'NOTHING', 'UNKNOWN')")
|
|
else:
|
|
where_clauses.append("a.type_description = :type")
|
|
params["type"] = type
|
|
|
|
if search:
|
|
where_clauses.append("a.name ILIKE :search")
|
|
params["search"] = f"%{search}%"
|
|
|
|
where_sql = f"WHERE {' AND '.join(where_clauses)}" if where_clauses else ""
|
|
|
|
# Removed dependency on armor_items to avoid errors if table is absent
|
|
join_sql = "FROM all_items a"
|
|
|
|
# Total count for pagination (exclude limit/offset)
|
|
count_params = {k: v for k, v in params.items() if k not in ("limit", "offset")}
|
|
count_q = text(f"SELECT COUNT(*) {join_sql} {where_sql}")
|
|
total_res = await session.execute(count_q, count_params)
|
|
total_count = total_res.scalar() or 0
|
|
response.headers["X-Total-Count"] = str(total_count)
|
|
|
|
# Main query
|
|
q = text(
|
|
f"SELECT a.id, a.name, a.icon_id, a.type_description, NULL AS jobs_description "
|
|
f"{join_sql} {where_sql} ORDER BY a.id LIMIT :limit OFFSET :offset"
|
|
)
|
|
result = await session.execute(q, params)
|
|
rows = result.fetchall()
|
|
return [
|
|
ItemSummary(
|
|
id=r.id,
|
|
name=r.name,
|
|
icon_id=r.icon_id,
|
|
type_description=r.type_description,
|
|
jobs_description=r.jobs_description,
|
|
)
|
|
for r in rows
|
|
]
|
|
|
|
|
|
|
|
|
|
|
|
class ItemDetail(BaseModel):
|
|
id: int
|
|
name: str
|
|
description: Optional[str]
|
|
icon_id: Optional[str]
|
|
type_description: Optional[str]
|
|
icon_b64: Optional[str] = None
|
|
|
|
|
|
@router.get("/icon/{icon_id}")
|
|
async def get_icon(icon_id: str, session: AsyncSession = Depends(get_session)):
|
|
q = text("SELECT image_data, image_format, image_encoding FROM item_icons WHERE id = :id LIMIT 1")
|
|
res = await session.execute(q, {"id": icon_id})
|
|
row = res.fetchone()
|
|
if not row:
|
|
raise HTTPException(status_code=404, detail="Icon not found")
|
|
import base64
|
|
if row.image_encoding == "base64":
|
|
data_bytes = base64.b64decode(row.image_data)
|
|
else:
|
|
data_bytes = row.image_data
|
|
media_type = f"image/{row.image_format.split('/')[-1]}" if row.image_format else "image/png"
|
|
from fastapi.responses import Response
|
|
return Response(content=data_bytes, media_type=media_type)
|
|
|
|
|
|
@router.get("/items/by-name/{item_name}", response_model=ItemDetail)
|
|
async def item_detail_by_name(item_name: str, session: AsyncSession = Depends(get_session)):
|
|
q = text("""
|
|
SELECT a.*, ic.image_data, ic.image_encoding
|
|
FROM all_items a
|
|
LEFT JOIN item_icons ic ON ic.id = a.icon_id
|
|
WHERE a.name = :n
|
|
LIMIT 1
|
|
""")
|
|
row = (await session.execute(q, {"n": item_name})).fetchone()
|
|
if not row:
|
|
raise HTTPException(status_code=404, detail="Item not found")
|
|
import base64
|
|
icon_b64: str | None = None
|
|
if row.image_data is not None:
|
|
if row.image_encoding == "base64":
|
|
icon_b64 = row.image_data
|
|
else:
|
|
try:
|
|
icon_b64 = base64.b64encode(row.image_data).decode()
|
|
except Exception:
|
|
icon_b64 = None
|
|
return ItemDetail(
|
|
id=row.id,
|
|
name=row.name,
|
|
description=row.description,
|
|
icon_id=row.icon_id,
|
|
type_description=row.type_description,
|
|
icon_b64=icon_b64,
|
|
)
|
|
|
|
|
|
@router.get("/items/{item_id}", response_model=ItemDetail)
|
|
async def item_detail(item_id: int = Path(..., ge=1), session: AsyncSession = Depends(get_session)):
|
|
"""Retrieve item metadata and icon by numeric ID."""
|
|
q = text(
|
|
"""
|
|
SELECT a.*, ic.image_data, ic.image_encoding
|
|
FROM all_items a
|
|
LEFT JOIN item_icons ic ON ic.id = a.icon_id
|
|
WHERE a.id = :id
|
|
LIMIT 1
|
|
"""
|
|
)
|
|
row = (await session.execute(q, {"id": item_id})).fetchone()
|
|
if not row:
|
|
raise HTTPException(status_code=404, detail="Item not found")
|
|
import base64
|
|
icon_b64: str | None = None
|
|
if row.image_data is not None:
|
|
if row.image_encoding == "base64":
|
|
icon_b64 = row.image_data
|
|
else:
|
|
try:
|
|
icon_b64 = base64.b64encode(row.image_data).decode()
|
|
except Exception:
|
|
icon_b64 = None
|
|
return ItemDetail(
|
|
id=row.id,
|
|
name=row.name,
|
|
description=row.description,
|
|
icon_id=row.icon_id,
|
|
type_description=row.type_description,
|
|
icon_b64=icon_b64,
|
|
)
|