Initial commit
This commit is contained in:
15
backend/Dockerfile
Normal file
15
backend/Dockerfile
Normal file
@@ -0,0 +1,15 @@
|
||||
FROM python:3.11-slim
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Install dependencies
|
||||
COPY requirements.txt ./
|
||||
RUN pip install --no-cache-dir -r requirements.txt
|
||||
|
||||
# Copy application code
|
||||
COPY app ./app
|
||||
|
||||
# Environment variables will be supplied via docker-compose (.env)
|
||||
ENV PYTHONPATH=/app
|
||||
|
||||
CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8000"]
|
||||
35
backend/app/database.py
Normal file
35
backend/app/database.py
Normal file
@@ -0,0 +1,35 @@
|
||||
import os
|
||||
from pathlib import Path
|
||||
|
||||
from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine, async_sessionmaker
|
||||
from sqlalchemy.orm import declarative_base
|
||||
from dotenv import load_dotenv
|
||||
|
||||
# Try loading ../db.conf into environment variables
|
||||
PROJECT_ROOT = Path(__file__).resolve().parents[2]
|
||||
DB_CONF = PROJECT_ROOT / "db.conf"
|
||||
if DB_CONF.exists():
|
||||
load_dotenv(DB_CONF)
|
||||
else:
|
||||
load_dotenv()
|
||||
|
||||
DB_HOST = os.getenv("PSQL_HOST", "localhost")
|
||||
DB_PORT = os.getenv("PSQL_PORT", "5432")
|
||||
DB_USER = os.getenv("PSQL_USER", "postgres")
|
||||
DB_PASSWORD = os.getenv("PSQL_PASSWORD", "")
|
||||
DB_NAME = os.getenv("PSQL_DBNAME", "ffxi_items")
|
||||
|
||||
DATABASE_URL = (
|
||||
f"postgresql+asyncpg://{DB_USER}:{DB_PASSWORD}@{DB_HOST}:{DB_PORT}/{DB_NAME}"
|
||||
)
|
||||
|
||||
engine = create_async_engine(DATABASE_URL, echo=False, pool_size=10, max_overflow=20)
|
||||
AsyncSessionLocal: async_sessionmaker[AsyncSession] = async_sessionmaker(
|
||||
bind=engine, expire_on_commit=False
|
||||
)
|
||||
|
||||
Base = declarative_base()
|
||||
|
||||
async def get_session() -> AsyncSession:
|
||||
async with AsyncSessionLocal() as session:
|
||||
yield session
|
||||
77
backend/app/main.py
Normal file
77
backend/app/main.py
Normal file
@@ -0,0 +1,77 @@
|
||||
from fastapi import FastAPI
|
||||
|
||||
from .router import router
|
||||
from .recipes_router import router as recipes_router
|
||||
from .database import engine
|
||||
from sqlalchemy import text
|
||||
|
||||
app = FastAPI(title="FFXI Item Browser API")
|
||||
|
||||
# Ensure all_items view exists on startup
|
||||
@app.on_event("startup")
|
||||
async def ensure_view():
|
||||
"""Recreate the materialized view `all_items` each startup to ensure schema consistency.
|
||||
The view merges all *_items tables and exposes columns: id, name, description, icon_id, type_description.
|
||||
Some source tables may lack `description` or `icon_id`; NULLs are substituted in those cases.
|
||||
"""
|
||||
async with engine.begin() as conn:
|
||||
# Drop existing view if present
|
||||
await conn.execute(text("DROP VIEW IF EXISTS all_items"))
|
||||
|
||||
# Discover item tables
|
||||
table_rows = await conn.execute(
|
||||
text(
|
||||
"SELECT tablename FROM pg_tables WHERE schemaname='public'"
|
||||
" AND tablename LIKE '%_items' AND tablename NOT IN ('inventory')"
|
||||
)
|
||||
)
|
||||
tables = [r[0] for r in table_rows]
|
||||
if not tables:
|
||||
return
|
||||
|
||||
selects = []
|
||||
for t in tables:
|
||||
desc_exists = await conn.scalar(
|
||||
text(
|
||||
"""
|
||||
SELECT EXISTS (
|
||||
SELECT 1 FROM information_schema.columns
|
||||
WHERE table_name=:table AND column_name='description'
|
||||
)
|
||||
"""),
|
||||
{"table": t},
|
||||
)
|
||||
icon_exists = await conn.scalar(
|
||||
text(
|
||||
"""
|
||||
SELECT EXISTS (
|
||||
SELECT 1 FROM information_schema.columns
|
||||
WHERE table_name=:table AND column_name='icon_id'
|
||||
)
|
||||
"""),
|
||||
{"table": t},
|
||||
)
|
||||
desc_col = "description" if desc_exists else "NULL"
|
||||
icon_col = "icon_id" if icon_exists else "NULL"
|
||||
selects.append(
|
||||
f"SELECT id, name, {desc_col} AS description, {icon_col} AS icon_id, type_description FROM {t}"
|
||||
)
|
||||
|
||||
union_sql = " UNION ALL ".join(selects)
|
||||
await conn.execute(text(f"CREATE VIEW all_items AS {union_sql}"))
|
||||
await conn.commit()
|
||||
|
||||
# Mount API routes
|
||||
app.include_router(router, prefix="/api")
|
||||
app.include_router(recipes_router, prefix="/api")
|
||||
|
||||
|
||||
@app.get("/health")
|
||||
async def health():
|
||||
return {"status": "ok"}
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
import uvicorn
|
||||
|
||||
uvicorn.run("app.main:app", host="0.0.0.0", port=8000, reload=True)
|
||||
39
backend/app/models.py
Normal file
39
backend/app/models.py
Normal file
@@ -0,0 +1,39 @@
|
||||
"""SQLAlchemy models (minimal to get API running).
|
||||
For full production use we should reflected or generate models from the DB, but
|
||||
this subset is enough to power metadata + inventory endpoints.
|
||||
"""
|
||||
from datetime import datetime
|
||||
from sqlalchemy import Column, Integer, String, DateTime
|
||||
from sqlalchemy.orm import declarative_base
|
||||
|
||||
Base = declarative_base()
|
||||
|
||||
class Inventory(Base):
|
||||
__tablename__ = "inventory"
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
character_name = Column(String)
|
||||
storage_type = Column(String)
|
||||
item_name = Column(String)
|
||||
quantity = Column(Integer)
|
||||
last_updated = Column(DateTime, default=datetime.utcnow)
|
||||
|
||||
|
||||
class Spell(Base):
|
||||
"""Spell table with job level columns (selected jobs only)."""
|
||||
|
||||
__tablename__ = "spells"
|
||||
|
||||
name = Column(String, primary_key=True)
|
||||
run = Column(Integer, nullable=True)
|
||||
whm = Column(Integer, nullable=True)
|
||||
blm = Column(Integer, nullable=True)
|
||||
rdm = Column(Integer, nullable=True)
|
||||
pld = Column(Integer, nullable=True)
|
||||
drk = Column(Integer, nullable=True)
|
||||
brd = Column(Integer, nullable=True)
|
||||
nin = Column(Integer, nullable=True)
|
||||
smn = Column(Integer, nullable=True)
|
||||
cor = Column(Integer, nullable=True)
|
||||
sch = Column(Integer, nullable=True)
|
||||
geo = Column(Integer, nullable=True)
|
||||
177
backend/app/recipes_router.py
Normal file
177
backend/app/recipes_router.py
Normal file
@@ -0,0 +1,177 @@
|
||||
"""Router exposing crafting recipe endpoints.
|
||||
|
||||
This is separated from `router.py` to keep modules focused.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import List, Optional, Tuple, Any
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Path, Query
|
||||
from sqlalchemy import text
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from pydantic import BaseModel
|
||||
|
||||
from .database import get_session
|
||||
|
||||
# Map craft names -> table names in Postgres
|
||||
ALLOWED_CRAFTS = {
|
||||
"woodworking": "recipes_woodworking",
|
||||
"smithing": "recipes_smithing",
|
||||
"alchemy": "recipes_alchemy",
|
||||
"bonecraft": "recipes_bonecraft",
|
||||
"goldsmithing": "recipes_goldsmithing",
|
||||
"clothcraft": "recipes_clothcraft",
|
||||
"leathercraft": "recipes_leathercraft",
|
||||
"cooking": "recipes_cooking",
|
||||
}
|
||||
|
||||
router = APIRouter(tags=["recipes"])
|
||||
|
||||
class RecipeUsageSummary(BaseModel):
|
||||
craft: str
|
||||
id: int
|
||||
name: str
|
||||
level: int
|
||||
|
||||
class ItemRecipeUsage(BaseModel):
|
||||
crafted: list[RecipeUsageSummary] = []
|
||||
ingredient: list[RecipeUsageSummary] = []
|
||||
|
||||
|
||||
class RecipeDetail(BaseModel):
|
||||
id: int
|
||||
name: str
|
||||
level: int
|
||||
category: str
|
||||
crystal: str
|
||||
key_item: Optional[str] | None = None
|
||||
ingredients: List[Tuple[str, int]]
|
||||
hq_yields: List[Optional[Tuple[str, int]]]
|
||||
subcrafts: List[Tuple[str, int]]
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
|
||||
|
||||
def _craft_table(craft: str) -> str:
|
||||
craft_lower = craft.lower()
|
||||
if craft_lower not in ALLOWED_CRAFTS:
|
||||
raise HTTPException(status_code=404, detail="Unknown craft")
|
||||
return ALLOWED_CRAFTS[craft_lower]
|
||||
|
||||
|
||||
@router.get("/recipes/{craft}", response_model=List[RecipeDetail])
|
||||
async def list_recipes(
|
||||
craft: str = Path(..., description="Craft name, e.g. woodworking"),
|
||||
|
||||
session: AsyncSession = Depends(get_session),
|
||||
):
|
||||
"""Return full list of recipes for a craft."""
|
||||
table = _craft_table(craft)
|
||||
q = text(f"SELECT * FROM {table} ORDER BY level, name")
|
||||
result = await session.execute(q)
|
||||
rows = result.fetchall()
|
||||
print("[DEBUG] list_recipes", table, len(rows))
|
||||
details: List[RecipeDetail] = []
|
||||
for r in rows:
|
||||
details.append(
|
||||
RecipeDetail(
|
||||
id=r.id,
|
||||
name=r.name,
|
||||
level=r.level,
|
||||
category=r.category,
|
||||
crystal=r.crystal,
|
||||
key_item=r.key_item,
|
||||
ingredients=_to_list_tuples(r.ingredients),
|
||||
hq_yields=[tuple(h) if h else None for h in r.hq_yields] if r.hq_yields else [],
|
||||
subcrafts=_to_list_tuples(r.subcrafts),
|
||||
)
|
||||
)
|
||||
return details
|
||||
|
||||
|
||||
def _to_list_tuples(value: Any) -> List[Tuple[str, int]]:
|
||||
"""Convert JSON/array from Postgres to List[Tuple[str, int]]."""
|
||||
if not value:
|
||||
return []
|
||||
# asyncpg already converts jsonb to Python lists/dicts
|
||||
formatted: List[Tuple[str, int]] = []
|
||||
for item in value:
|
||||
# Accept [name, qty] or {"name": n, "qty": q}
|
||||
if isinstance(item, (list, tuple)) and len(item) == 2:
|
||||
name, qty = item
|
||||
elif isinstance(item, dict):
|
||||
name = item.get("0") or item.get("name") or item.get("item")
|
||||
qty = item.get("1") or item.get("qty") or item.get("quantity")
|
||||
else:
|
||||
# Fallback: treat as string name, qty=1
|
||||
name, qty = str(item), 1
|
||||
if name is None or qty is None:
|
||||
continue
|
||||
try:
|
||||
qty_int = int(qty)
|
||||
except Exception:
|
||||
qty_int = 1
|
||||
formatted.append((str(name), qty_int))
|
||||
return formatted
|
||||
|
||||
|
||||
@router.get("/recipes/usage/{item_name}", response_model=ItemRecipeUsage)
|
||||
async def item_recipe_usage(item_name: str, session: AsyncSession = Depends(get_session)):
|
||||
"""Return lists of recipes that craft `item_name` or use it as an ingredient (excluding crystals)."""
|
||||
if item_name.lower().endswith(" crystal"):
|
||||
return ItemRecipeUsage()
|
||||
|
||||
crafted: list[RecipeUsageSummary] = []
|
||||
ingredient: list[RecipeUsageSummary] = []
|
||||
|
||||
for craft, table in ALLOWED_CRAFTS.items():
|
||||
# Crafted results
|
||||
q1 = text(f"SELECT id, name, level FROM {table} WHERE name = :n LIMIT 50")
|
||||
res1 = await session.execute(q1, {"n": item_name})
|
||||
crafted.extend(
|
||||
[RecipeUsageSummary(craft=craft, id=r.id, name=r.name, level=r.level) for r in res1.fetchall()]
|
||||
)
|
||||
|
||||
# As ingredient (simple text match in JSON/array column)
|
||||
q2 = text(
|
||||
f"SELECT id, name, level FROM {table} WHERE ingredients::text ILIKE :pat LIMIT 50"
|
||||
)
|
||||
res2 = await session.execute(q2, {"pat": f"%{item_name}%"})
|
||||
for r in res2.fetchall():
|
||||
if not any(c.id == r.id and c.craft == craft for c in crafted) and not any(
|
||||
i.id == r.id and i.craft == craft for i in ingredient
|
||||
):
|
||||
ingredient.append(
|
||||
RecipeUsageSummary(craft=craft, id=r.id, name=r.name, level=r.level)
|
||||
)
|
||||
|
||||
return ItemRecipeUsage(crafted=crafted, ingredient=ingredient)
|
||||
|
||||
|
||||
@router.get("/recipes/{craft}/{recipe_id}", response_model=RecipeDetail)
|
||||
async def recipe_detail(
|
||||
craft: str = Path(..., description="Craft name"),
|
||||
recipe_id: int = Path(..., description="Recipe ID"),
|
||||
session: AsyncSession = Depends(get_session),
|
||||
):
|
||||
"""Return full recipe record."""
|
||||
table = _craft_table(craft)
|
||||
q = text(f"SELECT * FROM {table} WHERE id = :id LIMIT 1")
|
||||
result = await session.execute(q, {"id": recipe_id})
|
||||
row = result.fetchone()
|
||||
if not row:
|
||||
raise HTTPException(status_code=404, detail="Recipe not found")
|
||||
print("[DEBUG] recipe_detail", craft, recipe_id)
|
||||
|
||||
return RecipeDetail(
|
||||
id=row.id,
|
||||
name=row.name,
|
||||
level=row.level,
|
||||
category=row.category,
|
||||
crystal=row.crystal,
|
||||
key_item=row.key_item,
|
||||
ingredients=_to_list_tuples(row.ingredients),
|
||||
hq_yields=[tuple(h) if h else None for h in row.hq_yields] if row.hq_yields else [],
|
||||
subcrafts=_to_list_tuples(row.subcrafts),
|
||||
)
|
||||
205
backend/app/router.py
Normal file
205
backend/app/router.py
Normal file
@@ -0,0 +1,205 @@
|
||||
from typing import List, Optional, Tuple, Any
|
||||
from datetime import datetime
|
||||
|
||||
from fastapi import APIRouter, Depends, Query, HTTPException, Path, Response
|
||||
from sqlalchemy import text, select
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from pydantic import BaseModel
|
||||
|
||||
from .database import get_session
|
||||
from .models import Inventory
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Crafting Recipes endpoints
|
||||
|
||||
ALLOWED_CRAFTS = {
|
||||
"woodworking": "recipes_woodworking",
|
||||
# Future crafts can be added here, e.g. "smithing": "recipes_smithing"
|
||||
}
|
||||
|
||||
|
||||
|
||||
class MetadataResponse(BaseModel):
|
||||
storage_types: List[str]
|
||||
type_descriptions: List[str]
|
||||
|
||||
|
||||
@router.get("/metadata", response_model=MetadataResponse)
|
||||
async def metadata(session: AsyncSession = Depends(get_session)):
|
||||
"""Return distinct storage types and type descriptions."""
|
||||
storage_q = await session.execute(text("SELECT DISTINCT storage_type FROM inventory ORDER BY storage_type"))
|
||||
storage_types = [row[0] for row in storage_q.fetchall() if row[0]]
|
||||
|
||||
type_q = await session.execute(text("SELECT DISTINCT type_description FROM all_items ORDER BY type_description"))
|
||||
original_types = {row[0] for row in type_q.fetchall() if row[0]}
|
||||
|
||||
processed_types = set(original_types)
|
||||
has_nothing_or_unknown = 'NOTHING' in processed_types or 'UNKNOWN' in processed_types
|
||||
|
||||
if 'NOTHING' in processed_types:
|
||||
processed_types.remove('NOTHING')
|
||||
if 'UNKNOWN' in processed_types:
|
||||
processed_types.remove('UNKNOWN')
|
||||
|
||||
if has_nothing_or_unknown:
|
||||
processed_types.add('MANNEQUIN')
|
||||
|
||||
type_descriptions = sorted(list(processed_types))
|
||||
|
||||
return MetadataResponse(storage_types=storage_types, type_descriptions=type_descriptions)
|
||||
|
||||
|
||||
class InventoryItem(BaseModel):
|
||||
id: int
|
||||
item_name: str
|
||||
quantity: int
|
||||
storage_type: str
|
||||
description: Optional[str]
|
||||
icon_id: Optional[str]
|
||||
type_description: Optional[str]
|
||||
last_updated: Optional[datetime]
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
|
||||
|
||||
@router.get("/inventory/{character}", response_model=List[InventoryItem])
|
||||
async def inventory(
|
||||
character: str,
|
||||
storage_type: Optional[str] = Query(None),
|
||||
session: AsyncSession = Depends(get_session),
|
||||
):
|
||||
"""Return items for a character, optionally filtered by storage_type."""
|
||||
base_sql = """
|
||||
SELECT i.*, ai.description, ai.icon_id, ai.type_description
|
||||
FROM inventory i
|
||||
LEFT JOIN all_items ai ON ai.name = i.item_name
|
||||
WHERE i.character_name = :char
|
||||
"""
|
||||
params = {"char": character}
|
||||
if storage_type:
|
||||
base_sql += " AND i.storage_type = :storage"
|
||||
params["storage"] = storage_type
|
||||
q = text(base_sql)
|
||||
result = await session.execute(q, params)
|
||||
rows = result.fetchall()
|
||||
return [InventoryItem(
|
||||
id=r.id,
|
||||
item_name=r.item_name,
|
||||
quantity=r.quantity,
|
||||
storage_type=r.storage_type,
|
||||
description=r.description,
|
||||
icon_id=r.icon_id,
|
||||
type_description=r.type_description,
|
||||
last_updated=r.last_updated,
|
||||
) for r in rows]
|
||||
|
||||
|
||||
class ItemSummary(BaseModel):
|
||||
id: int
|
||||
name: str
|
||||
icon_id: Optional[str]
|
||||
type_description: Optional[str]
|
||||
|
||||
|
||||
@router.get("/items", response_model=List[ItemSummary])
|
||||
async def items(
|
||||
response: Response,
|
||||
type: Optional[str] = Query(None, alias="type"),
|
||||
search: Optional[str] = Query(None, description="Substring search on item name"),
|
||||
page: int = Query(1, ge=1),
|
||||
page_size: int = Query(40, ge=1, le=100),
|
||||
session: AsyncSession = Depends(get_session),
|
||||
):
|
||||
"""Return items from all_items view with pagination."""
|
||||
offset = (page - 1) * page_size
|
||||
params = {"limit": page_size, "offset": offset}
|
||||
where_clauses = ["name != '.'"]
|
||||
|
||||
if type:
|
||||
if type == 'MANNEQUIN':
|
||||
where_clauses.append("type_description IN ('MANNEQUIN', 'NOTHING', 'UNKNOWN')")
|
||||
else:
|
||||
where_clauses.append("type_description = :type")
|
||||
params["type"] = type
|
||||
|
||||
if search:
|
||||
where_clauses.append("name ILIKE :search")
|
||||
params["search"] = f"%{search}%"
|
||||
|
||||
where_sql = f"WHERE {' AND '.join(where_clauses)}" if where_clauses else ""
|
||||
|
||||
# Calculate total count for pagination
|
||||
count_q = text(f"SELECT COUNT(*) FROM all_items {where_sql}")
|
||||
# Use only relevant params for count query (exclude limit/offset)
|
||||
count_params = {k: v for k, v in params.items() if k not in ("limit", "offset")}
|
||||
total_res = await session.execute(count_q, count_params)
|
||||
total_count = total_res.scalar() or 0
|
||||
response.headers["X-Total-Count"] = str(total_count)
|
||||
|
||||
q = text(
|
||||
f"SELECT id, name, icon_id, type_description FROM all_items {where_sql} ORDER BY id LIMIT :limit OFFSET :offset"
|
||||
)
|
||||
result = await session.execute(q, params)
|
||||
rows = result.fetchall()
|
||||
return [ItemSummary(id=r.id, name=r.name, icon_id=r.icon_id, type_description=r.type_description) for r in rows]
|
||||
|
||||
|
||||
class ItemDetail(BaseModel):
|
||||
id: int
|
||||
name: str
|
||||
description: Optional[str]
|
||||
icon_id: Optional[str]
|
||||
type_description: Optional[str]
|
||||
|
||||
|
||||
@router.get("/icon/{icon_id}")
|
||||
async def get_icon(icon_id: str, session: AsyncSession = Depends(get_session)):
|
||||
q = text("SELECT image_data, image_format, image_encoding FROM item_icons WHERE id = :id LIMIT 1")
|
||||
res = await session.execute(q, {"id": icon_id})
|
||||
row = res.fetchone()
|
||||
if not row:
|
||||
raise HTTPException(status_code=404, detail="Icon not found")
|
||||
import base64
|
||||
if row.image_encoding == "base64":
|
||||
data_bytes = base64.b64decode(row.image_data)
|
||||
else:
|
||||
data_bytes = row.image_data
|
||||
media_type = f"image/{row.image_format.split('/')[-1]}" if row.image_format else "image/png"
|
||||
from fastapi.responses import Response
|
||||
return Response(content=data_bytes, media_type=media_type)
|
||||
|
||||
|
||||
@router.get("/items/by-name/{item_name}", response_model=ItemDetail)
|
||||
async def item_detail_by_name(item_name: str, session: AsyncSession = Depends(get_session)):
|
||||
q = text("SELECT * FROM all_items WHERE name = :n LIMIT 1")
|
||||
row = (await session.execute(q, {"n": item_name})).fetchone()
|
||||
if not row:
|
||||
raise HTTPException(status_code=404, detail="Item not found")
|
||||
return ItemDetail(
|
||||
id=row.id,
|
||||
name=row.name,
|
||||
description=row.description,
|
||||
icon_id=row.icon_id,
|
||||
type_description=row.type_description,
|
||||
)
|
||||
|
||||
|
||||
@router.get("/items/{item_id}", response_model=ItemDetail)
|
||||
async def item_detail(item_id: int, session: AsyncSession = Depends(get_session)):
|
||||
"""Fetch full item record from all_items view."""
|
||||
q = text("SELECT * FROM all_items WHERE id = :id LIMIT 1")
|
||||
result = await session.execute(q, {"id": item_id})
|
||||
row = result.fetchone()
|
||||
if not row:
|
||||
raise HTTPException(status_code=404, detail="Item not found")
|
||||
|
||||
return ItemDetail(
|
||||
id=row.id,
|
||||
name=row.name,
|
||||
description=row.description,
|
||||
icon_id=row.icon_id,
|
||||
type_description=row.type_description,
|
||||
)
|
||||
6
backend/requirements.txt
Normal file
6
backend/requirements.txt
Normal file
@@ -0,0 +1,6 @@
|
||||
fastapi==0.110.0
|
||||
uvicorn[standard]==0.29.0
|
||||
SQLAlchemy[asyncio]==2.0.27
|
||||
asyncpg==0.29.0
|
||||
pydantic==2.7.1
|
||||
python-dotenv==1.0.1
|
||||
Reference in New Issue
Block a user