Files
storyteller/main.py
Aodhan Collins 41975ecfe2 Phase 2 complete
2025-10-12 02:18:56 +01:00

875 lines
37 KiB
Python

from fastapi import FastAPI, HTTPException, WebSocket, WebSocketDisconnect
from fastapi.middleware.cors import CORSMiddleware
from pydantic import BaseModel, Field
from typing import Dict, List, Optional
import uuid
import os
from dotenv import load_dotenv
from openai import AsyncOpenAI
import asyncio
from datetime import datetime
import httpx
# Load environment variables
load_dotenv()
# Initialize FastAPI
app = FastAPI(title="Storyteller RPG API")
# CORS middleware
app.add_middleware(
CORSMiddleware,
allow_origins=["*"],
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
# Initialize OpenAI
client = AsyncOpenAI(api_key=os.getenv("OPENAI_API_KEY"))
openrouter_api_key = os.getenv("OPENROUTER_API_KEY")
if not os.getenv("OPENAI_API_KEY") and not openrouter_api_key:
print("Warning: Neither OPENAI_API_KEY nor OPENROUTER_API_KEY set. AI features will not work.")
# Models
class Message(BaseModel):
id: str = Field(default_factory=lambda: str(uuid.uuid4()))
sender: str # "character" or "storyteller"
content: str
timestamp: str = Field(default_factory=lambda: datetime.now().isoformat())
visibility: str = "private" # "public", "private", "mixed"
public_content: Optional[str] = None # For mixed messages - visible to all
private_content: Optional[str] = None # For mixed messages - only storyteller sees
class CharacterProfile(BaseModel):
"""Character profile with race, class, gender, and personality traits"""
gender: str = "Male" # Male, Female, Non-binary, Custom
race: str = "Human" # Human, Elf, Dwarf, Orc, Halfling
character_class: str = "Warrior" # Warrior, Wizard, Cleric, Archer, Rogue
personality_type: str = "Friendly" # Friendly, Serious, Doubtful, Measured
background: str = "" # Custom background story
avatar_data: Optional[str] = None # base64 encoded avatar image
class Character(BaseModel):
id: str = Field(default_factory=lambda: str(uuid.uuid4()))
name: str
description: str
personality: str = "" # Additional personality traits (legacy field)
profile: Optional[CharacterProfile] = None # Structured profile
llm_model: str = "gpt-3.5-turbo" # LLM model for this character
conversation_history: List[Message] = [] # Private conversation with storyteller
pending_response: bool = False # Waiting for storyteller response
class StorytellerResponse(BaseModel):
character_id: str
content: str
class GameSession(BaseModel):
id: str = Field(default_factory=lambda: str(uuid.uuid4()))
name: str
characters: Dict[str, Character] = {}
current_scene: str = ""
scene_history: List[str] = [] # All scenes narrated
public_messages: List[Message] = [] # Public messages visible to all characters
# Character Profile Prompt Templates
RACE_PROMPTS = {
"Human": "You are a human character, versatile and adaptable to any situation. You have a balanced approach to problem-solving.",
"Elf": "You are an elf, graceful and wise with centuries of experience. You have keen senses and a deep connection to nature and magic.",
"Dwarf": "You are a dwarf, stout and honorable with deep knowledge of stone and metal. You are loyal, practical, and value tradition.",
"Orc": "You are an orc, powerful and direct with a strong sense of honor and combat prowess. You value strength and straightforward action.",
"Halfling": "You are a halfling, small but brave with natural luck and a cheerful disposition. You are resourceful and enjoy the simple pleasures of life."
}
CLASS_PROMPTS = {
"Warrior": "You excel in physical combat and tactics, preferring direct action and protecting your allies. You are brave and decisive in battle.",
"Wizard": "You are a master of arcane arts, solving problems with magic and knowledge. You are intellectual, curious, and often seek understanding before action.",
"Cleric": "You channel divine power to heal and protect, guided by faith and compassion. You support your allies and seek to help those in need.",
"Archer": "You are a skilled marksman, preferring distance and precision in combat. You are patient, observant, and value accuracy over brute force.",
"Rogue": "You rely on stealth and cunning, using tricks and skills to overcome obstacles. You are clever, adaptable, and often find unconventional solutions."
}
PERSONALITY_PROMPTS = {
"Friendly": "You are friendly and approachable, always looking for the good in others. You prefer cooperation and building positive relationships.",
"Serious": "You are serious and focused, prioritizing efficiency and practical solutions. You are disciplined and value getting things done.",
"Doubtful": "You are cautious and skeptical, questioning motives and analyzing situations carefully. You prefer to be prepared for potential threats.",
"Measured": "You are measured and thoughtful, weighing options carefully before acting. You seek balance and consider multiple perspectives."
}
def build_character_system_prompt(character: Character) -> str:
"""Build system prompt from character profile"""
if not character.profile:
# Legacy character without profile
base_prompt = f"You are {character.name}. {character.description}"
if character.personality:
base_prompt += f" {character.personality}"
return base_prompt
# Build prompt from profile
profile = character.profile
race_trait = RACE_PROMPTS.get(profile.race, "")
class_trait = CLASS_PROMPTS.get(profile.character_class, "")
personality_trait = PERSONALITY_PROMPTS.get(profile.personality_type, "")
prompt_parts = [
f"You are {character.name}, a {profile.gender.lower()} {profile.race} {profile.character_class}.",
character.description,
race_trait,
class_trait,
personality_trait,
]
if profile.background:
prompt_parts.append(f"Background: {profile.background}")
if character.personality: # Legacy personality field
prompt_parts.append(character.personality)
return " ".join(filter(None, prompt_parts))
# In-memory storage (replace with database in production)
sessions: Dict[str, GameSession] = {}
# WebSocket connection manager
class ConnectionManager:
def __init__(self):
self.active_connections: Dict[str, WebSocket] = {} # key: "session_character" or "session_storyteller"
async def connect(self, websocket: WebSocket, client_id: str):
await websocket.accept()
self.active_connections[client_id] = websocket
def disconnect(self, client_id: str):
if client_id in self.active_connections:
del self.active_connections[client_id]
async def send_to_client(self, client_id: str, message: dict):
if client_id in self.active_connections:
await self.active_connections[client_id].send_json(message)
manager = ConnectionManager()
# API Endpoints
@app.post("/sessions/")
async def create_session(name: str):
session = GameSession(name=name)
sessions[session.id] = session
return session
@app.get("/sessions/{session_id}")
async def get_session(session_id: str):
if session_id not in sessions:
raise HTTPException(status_code=404, detail="Session not found")
return sessions[session_id]
class CreateCharacterRequest(BaseModel):
name: str
description: str
personality: str = "" # Legacy field
llm_model: str = "gpt-3.5-turbo"
profile: Optional[CharacterProfile] = None
@app.post("/sessions/{session_id}/characters/")
async def add_character(
session_id: str,
request: CreateCharacterRequest
):
if session_id not in sessions:
raise HTTPException(status_code=404, detail="Session not found")
character = Character(
name=request.name,
description=request.description,
personality=request.personality,
profile=request.profile,
llm_model=request.llm_model
)
session = sessions[session_id]
session.characters[character.id] = character
# Notify storyteller of new character
storyteller_key = f"{session_id}_storyteller"
if storyteller_key in manager.active_connections:
await manager.send_to_client(storyteller_key, {
"type": "character_joined",
"character": {
"id": character.id,
"name": character.name,
"description": character.description,
"llm_model": character.llm_model,
"profile": character.profile.dict() if character.profile else None
}
})
return character
# Legacy endpoint for backward compatibility
@app.post("/sessions/{session_id}/characters/legacy/")
async def add_character_legacy(
session_id: str,
name: str,
description: str,
personality: str = "",
llm_model: str = "gpt-3.5-turbo"
):
request = CreateCharacterRequest(
name=name,
description=description,
personality=personality,
llm_model=llm_model
)
return await add_character(session_id, request)
# Export character to JSON
@app.get("/sessions/{session_id}/characters/{character_id}/export")
async def export_character(session_id: str, character_id: str):
"""Export character profile to JSON"""
if session_id not in sessions:
raise HTTPException(status_code=404, detail="Session not found")
session = sessions[session_id]
if character_id not in session.characters:
raise HTTPException(status_code=404, detail="Character not found")
character = session.characters[character_id]
export_data = {
"version": "1.0",
"character": character.model_dump(),
"created_at": datetime.now().isoformat(),
"export_type": "storyteller_rpg_character"
}
return export_data
# Import character from JSON
class ImportCharacterRequest(BaseModel):
character_data: dict
@app.post("/sessions/{session_id}/characters/import")
async def import_character(session_id: str, request: ImportCharacterRequest):
"""Import character from exported JSON"""
if session_id not in sessions:
raise HTTPException(status_code=404, detail="Session not found")
try:
# Validate and extract character data
char_data = request.character_data
if "character" in char_data:
char_data = char_data["character"]
# Create character from imported data
character = Character(**char_data)
# Generate new ID to avoid conflicts
character.id = str(uuid.uuid4())
# Clear conversation history
character.conversation_history = []
character.pending_response = False
session = sessions[session_id]
session.characters[character.id] = character
# Notify storyteller
storyteller_key = f"{session_id}_storyteller"
if storyteller_key in manager.active_connections:
await manager.send_to_client(storyteller_key, {
"type": "character_joined",
"character": {
"id": character.id,
"name": character.name,
"description": character.description,
"llm_model": character.llm_model,
"profile": character.profile.dict() if character.profile else None
}
})
return character
except Exception as e:
raise HTTPException(status_code=400, detail=f"Invalid character data: {str(e)}")
# Get profile options
@app.get("/profile/options")
async def get_profile_options():
"""Get available profile options for character creation"""
return {
"genders": ["Male", "Female", "Non-binary", "Custom"],
"races": list(RACE_PROMPTS.keys()),
"classes": list(CLASS_PROMPTS.keys()),
"personality_types": list(PERSONALITY_PROMPTS.keys()),
"race_descriptions": {
"Human": "Versatile and adaptable",
"Elf": "Graceful, wise, with keen senses",
"Dwarf": "Stout, loyal, master craftsmen",
"Orc": "Powerful, direct, honorable",
"Halfling": "Small, brave, lucky"
},
"class_descriptions": {
"Warrior": "Physical combat and tactics",
"Wizard": "Arcane magic and knowledge",
"Cleric": "Divine power and healing",
"Archer": "Ranged combat and precision",
"Rogue": "Stealth, cunning, and skills"
},
"personality_descriptions": {
"Friendly": "Optimistic and cooperative",
"Serious": "Focused and pragmatic",
"Doubtful": "Cautious and analytical",
"Measured": "Balanced and thoughtful"
}
}
# WebSocket endpoint for character interactions (character view)
@app.websocket("/ws/character/{session_id}/{character_id}")
async def character_websocket(websocket: WebSocket, session_id: str, character_id: str):
if session_id not in sessions or character_id not in sessions[session_id].characters:
await websocket.close(code=1008, reason="Session or character not found")
return
client_key = f"{session_id}_{character_id}"
await manager.connect(websocket, client_key)
try:
# Send conversation history and public messages
session = sessions[session_id]
character = session.characters[character_id]
await websocket.send_json({
"type": "history",
"messages": [msg.model_dump() for msg in character.conversation_history],
"public_messages": [msg.model_dump() for msg in session.public_messages]
})
while True:
data = await websocket.receive_json()
if data.get("type") == "message":
# Character sends message (can be public, private, or mixed)
visibility = data.get("visibility", "private")
message = Message(
sender="character",
content=data["content"],
visibility=visibility,
public_content=data.get("public_content"),
private_content=data.get("private_content")
)
# Add to appropriate feed(s)
if visibility == "public":
session.public_messages.append(message)
# Broadcast to all characters
for char_id in session.characters:
char_key = f"{session_id}_{char_id}"
if char_key in manager.active_connections:
await manager.send_to_client(char_key, {
"type": "public_message",
"character_name": character.name,
"message": message.model_dump()
})
elif visibility == "mixed":
session.public_messages.append(message)
# Broadcast public part to all characters
for char_id in session.characters:
char_key = f"{session_id}_{char_id}"
if char_key in manager.active_connections:
await manager.send_to_client(char_key, {
"type": "public_message",
"character_name": character.name,
"message": message.model_dump()
})
# Add to character's private conversation
character.conversation_history.append(message)
character.pending_response = True
else: # private
character.conversation_history.append(message)
character.pending_response = True
# Forward to storyteller
storyteller_key = f"{session_id}_storyteller"
if storyteller_key in manager.active_connections:
await manager.send_to_client(storyteller_key, {
"type": "character_message",
"character_id": character_id,
"character_name": character.name,
"message": message.model_dump()
})
except WebSocketDisconnect:
manager.disconnect(client_key)
# WebSocket endpoint for storyteller
@app.websocket("/ws/storyteller/{session_id}")
async def storyteller_websocket(websocket: WebSocket, session_id: str):
if session_id not in sessions:
await websocket.close(code=1008, reason="Session not found")
return
client_key = f"{session_id}_storyteller"
await manager.connect(websocket, client_key)
try:
# Send all characters and their conversation states
session = sessions[session_id]
await websocket.send_json({
"type": "session_state",
"characters": {
char_id: {
"id": char.id,
"name": char.name,
"description": char.description,
"personality": char.personality,
"conversation_history": [msg.model_dump() for msg in char.conversation_history],
"pending_response": char.pending_response
}
for char_id, char in session.characters.items()
},
"current_scene": session.current_scene,
"public_messages": [msg.model_dump() for msg in session.public_messages]
})
while True:
data = await websocket.receive_json()
if data.get("type") == "respond_to_character":
# Storyteller responds to a specific character
character_id = data["character_id"]
content = data["content"]
if character_id in session.characters:
character = session.characters[character_id]
message = Message(sender="storyteller", content=content)
character.conversation_history.append(message)
character.pending_response = False
# Send to character
char_key = f"{session_id}_{character_id}"
if char_key in manager.active_connections:
await manager.send_to_client(char_key, {
"type": "storyteller_response",
"message": message.model_dump()
})
elif data.get("type") == "narrate_scene":
# Broadcast scene to all characters
scene = data["content"]
session.current_scene = scene
session.scene_history.append(scene)
# Send to all connected characters
for char_id in session.characters:
char_key = f"{session_id}_{char_id}"
if char_key in manager.active_connections:
await manager.send_to_client(char_key, {
"type": "scene_narration",
"content": scene
})
except WebSocketDisconnect:
manager.disconnect(client_key)
# AI-assisted response generation using character's specific LLM
async def call_llm(model: str, messages: List[dict], temperature: float = 0.8, max_tokens: int = 200) -> str:
"""Call LLM via OpenRouter or OpenAI depending on model"""
# OpenAI models
if model.startswith("gpt-") or model.startswith("o1-"):
if not os.getenv("OPENAI_API_KEY"):
return "OpenAI API key not set."
try:
response = await client.chat.completions.create(
model=model,
messages=messages,
temperature=temperature,
max_tokens=max_tokens
)
return response.choices[0].message.content
except Exception as e:
return f"OpenAI error: {str(e)}"
# OpenRouter models (Claude, Llama, Gemini, etc.)
else:
if not openrouter_api_key:
return "OpenRouter API key not set."
try:
async with httpx.AsyncClient() as http_client:
response = await http_client.post(
"https://openrouter.ai/api/v1/chat/completions",
headers={
"Authorization": f"Bearer {openrouter_api_key}",
"HTTP-Referer": "http://localhost:3000",
"X-Title": "Storyteller RPG"
},
json={
"model": model,
"messages": messages,
"temperature": temperature,
"max_tokens": max_tokens
},
timeout=30.0
)
response.raise_for_status()
data = response.json()
return data["choices"][0]["message"]["content"]
except Exception as e:
return f"OpenRouter error: {str(e)}"
@app.post("/sessions/{session_id}/generate_suggestion")
async def generate_suggestion(session_id: str, character_id: str, context: str = ""):
"""Generate AI suggestion for storyteller response to a character using the character's LLM"""
if session_id not in sessions:
raise HTTPException(status_code=404, detail="Session not found")
session = sessions[session_id]
if character_id not in session.characters:
raise HTTPException(status_code=404, detail="Character not found")
character = session.characters[character_id]
# Prepare context for AI suggestion using character profile
system_prompt = build_character_system_prompt(character)
if session.current_scene:
system_prompt += f" Current scene: {session.current_scene}"
messages = [
{
"role": "system",
"content": system_prompt
}
]
# Add recent conversation history
for msg in character.conversation_history[-6:]:
role = "assistant" if msg.sender == "character" else "user"
messages.append({"role": role, "content": msg.content})
if context:
messages.append({"role": "user", "content": f"Additional context: {context}"})
try:
suggestion = await call_llm(character.llm_model, messages, temperature=0.8, max_tokens=200)
return {"suggestion": suggestion, "model_used": character.llm_model}
except Exception as e:
raise HTTPException(status_code=500, detail=f"Error generating suggestion: {str(e)}")
# Generate context-aware response with multiple characters
class ContextualResponseRequest(BaseModel):
character_ids: List[str] # List of character IDs to include in context
response_type: str = "scene" # "scene" (broadcast) or "individual" (per character)
model: str = "gpt-4o"
additional_context: Optional[str] = None
@app.post("/sessions/{session_id}/generate_contextual_response")
async def generate_contextual_response(
session_id: str,
request: ContextualResponseRequest
):
"""Generate a storyteller response using context from multiple characters"""
if session_id not in sessions:
raise HTTPException(status_code=404, detail="Session not found")
session = sessions[session_id]
# Validate all character IDs exist
for char_id in request.character_ids:
if char_id not in session.characters:
raise HTTPException(status_code=404, detail=f"Character {char_id} not found")
# Build context from all selected characters
context_parts = []
context_parts.append("You are the storyteller/game master in an RPG session. Here's what the characters have done:")
context_parts.append("")
# Add current scene if available
if session.current_scene:
context_parts.append(f"Current Scene: {session.current_scene}")
context_parts.append("")
# Add public messages for context
if session.public_messages:
context_parts.append("Recent public actions:")
for msg in session.public_messages[-5:]:
context_parts.append(f"- {msg.content}")
context_parts.append("")
# Add each character's recent messages
for char_id in request.character_ids:
character = session.characters[char_id]
context_parts.append(f"Character: {character.name}")
context_parts.append(f"Description: {character.description}")
if character.personality:
context_parts.append(f"Personality: {character.personality}")
# Add recent conversation
if character.conversation_history:
context_parts.append("Recent messages:")
for msg in character.conversation_history[-3:]:
sender_label = character.name if msg.sender == "character" else "You (Storyteller)"
context_parts.append(f" {sender_label}: {msg.content}")
else:
context_parts.append("(No messages yet)")
context_parts.append("")
# Add additional context if provided
if request.additional_context:
context_parts.append(f"Additional context: {request.additional_context}")
context_parts.append("")
# Build the prompt based on response type
if request.response_type == "scene":
context_parts.append("Generate a scene description that addresses the actions and situations of all these characters. The scene should be vivid and incorporate what each character has done or asked about.")
else:
context_parts.append("Generate individual responses for each character, taking into account all their actions and the context of what's happening.")
context_parts.append("")
context_parts.append("IMPORTANT: Format your response EXACTLY as follows, with each character's response on a separate line:")
context_parts.append("")
for char_id in request.character_ids:
char_name = session.characters[char_id].name
context_parts.append(f"[{char_name}] Your response for {char_name} here (2-3 sentences)")
context_parts.append("")
context_parts.append("Use EXACTLY this format with square brackets and character names. Do not add any other text before or after.")
full_context = "\n".join(context_parts)
# Call LLM with the context
system_prompt = "You are a creative and engaging RPG storyteller/game master."
if request.response_type == "individual":
system_prompt += " When asked to format responses with [CharacterName] brackets, you MUST follow that exact format precisely. Use square brackets around each character's name, followed by their response text."
messages = [
{"role": "system", "content": system_prompt},
{"role": "user", "content": full_context}
]
try:
response = await call_llm(request.model, messages, temperature=0.8, max_tokens=500)
# If individual responses, parse and send to each character
if request.response_type == "individual":
# Parse the response to extract individual parts
import re
# Create a map of character names to IDs
name_to_id = {session.characters[char_id].name: char_id for char_id in request.character_ids}
# Parse responses in format: "[CharName] response text"
sent_responses = {}
for char_name, char_id in name_to_id.items():
# Use the new square bracket format: [CharName] response text
# This pattern captures everything after [CharName] until the next [AnotherName] or end of string
pattern = rf'\[{re.escape(char_name)}\]\s*(.*?)(?=\[[\w\s]+\]|\Z)'
match = re.search(pattern, response, re.DOTALL | re.IGNORECASE)
if match:
individual_response = match.group(1).strip()
# Clean up any trailing newlines or extra whitespace
individual_response = ' '.join(individual_response.split())
if individual_response: # Only send if we got actual content
# Send to character's conversation history
character = session.characters[char_id]
storyteller_message = Message(
sender="storyteller",
content=individual_response,
visibility="private"
)
character.conversation_history.append(storyteller_message)
character.pending_response = False
sent_responses[char_name] = individual_response
# Notify via WebSocket if connected
char_key = f"{session_id}_{char_id}"
if char_key in manager.active_connections:
try:
await manager.send_to_client(char_key, {
"type": "new_message",
"message": storyteller_message.model_dump()
})
except:
pass
return {
"response": response,
"model_used": request.model,
"characters_included": [
{
"id": char_id,
"name": session.characters[char_id].name
}
for char_id in request.character_ids
],
"response_type": request.response_type,
"individual_responses_sent": sent_responses,
"success": len(sent_responses) > 0
}
else:
# Scene description - just return the response
return {
"response": response,
"model_used": request.model,
"characters_included": [
{
"id": char_id,
"name": session.characters[char_id].name
}
for char_id in request.character_ids
],
"response_type": request.response_type
}
except Exception as e:
raise HTTPException(status_code=500, detail=f"Error generating response: {str(e)}")
# Get available LLM models
@app.get("/models")
async def get_available_models():
"""Get list of available LLM models"""
models = {
"openai": [],
"openrouter": []
}
if os.getenv("OPENAI_API_KEY"):
models["openai"] = [
{"id": "gpt-4o", "name": "GPT-4o (Latest)", "provider": "OpenAI"},
{"id": "gpt-4-turbo", "name": "GPT-4 Turbo", "provider": "OpenAI"},
{"id": "gpt-4", "name": "GPT-4", "provider": "OpenAI"},
{"id": "gpt-3.5-turbo", "name": "GPT-3.5 Turbo (Fast & Cheap)", "provider": "OpenAI"},
]
if openrouter_api_key:
models["openrouter"] = [
{"id": "anthropic/claude-3.5-sonnet", "name": "Claude 3.5 Sonnet", "provider": "Anthropic"},
{"id": "anthropic/claude-3-opus", "name": "Claude 3 Opus", "provider": "Anthropic"},
{"id": "anthropic/claude-3-haiku", "name": "Claude 3 Haiku (Fast)", "provider": "Anthropic"},
{"id": "google/gemini-pro-1.5", "name": "Gemini Pro 1.5", "provider": "Google"},
{"id": "meta-llama/llama-3.1-70b-instruct", "name": "Llama 3.1 70B", "provider": "Meta"},
{"id": "meta-llama/llama-3.1-8b-instruct", "name": "Llama 3.1 8B (Fast)", "provider": "Meta"},
{"id": "mistralai/mistral-large", "name": "Mistral Large", "provider": "Mistral"},
{"id": "cohere/command-r-plus", "name": "Command R+", "provider": "Cohere"},
]
return models
# Get all pending character messages
@app.get("/sessions/{session_id}/pending_messages")
async def get_pending_messages(session_id: str):
if session_id not in sessions:
raise HTTPException(status_code=404, detail="Session not found")
session = sessions[session_id]
pending = {}
for char_id, char in session.characters.items():
if char.pending_response:
last_message = char.conversation_history[-1] if char.conversation_history else None
if last_message and last_message.sender == "character":
pending[char_id] = {
"character_name": char.name,
"message": last_message.model_dump()
}
return pending
# Get character conversation history (for storyteller)
@app.get("/sessions/{session_id}/characters/{character_id}/conversation")
async def get_character_conversation(session_id: str, character_id: str):
if session_id not in sessions:
raise HTTPException(status_code=404, detail="Session not found")
session = sessions[session_id]
if character_id not in session.characters:
raise HTTPException(status_code=404, detail="Character not found")
character = session.characters[character_id]
return {
"character": {
"id": character.id,
"name": character.name,
"description": character.description,
"personality": character.personality
},
"conversation": [msg.model_dump() for msg in character.conversation_history],
"pending_response": character.pending_response
}
# Create a default test session on startup
def create_demo_session():
"""Create a pre-configured demo session for testing"""
demo_session_id = "demo-session-001"
# Create session
demo_session = GameSession(
id=demo_session_id,
name="The Cursed Tavern",
current_scene="You stand outside the weathered doors of the Rusty Flagon tavern. Strange whispers echo from within, and the windows flicker with an eerie green light. The townspeople warned you about this place, but the reward for investigating is too good to pass up.",
scene_history=["You arrive at the remote village of Millhaven at dusk, seeking adventure and fortune."]
)
# Create Character 1: Bargin the Dwarf
bargin = Character(
id="char-bargin-001",
name="Bargin Ironforge",
description="A stout dwarf warrior with a braided red beard and battle-scarred armor. Carries a massive war axe named 'Grudgekeeper'.",
personality="Brave but reckless. Loves a good fight and a strong ale. Quick to anger but fiercely loyal to companions.",
llm_model="gpt-3.5-turbo",
conversation_history=[],
pending_response=False
)
# Create Character 2: Willow the Elf
willow = Character(
id="char-willow-002",
name="Willow Moonwhisper",
description="An elven ranger with silver hair and piercing green eyes. Moves silently through shadows, bow always at the ready.",
personality="Cautious and observant. Prefers to scout ahead and avoid unnecessary conflict. Has an affinity for nature and animals.",
llm_model="gpt-3.5-turbo",
conversation_history=[],
pending_response=False
)
# Add initial conversation for context
initial_storyteller_msg = Message(
sender="storyteller",
content="Welcome to the Cursed Tavern adventure! You've been hired by the village elder to investigate strange happenings at the old tavern. Locals report seeing ghostly figures and hearing unearthly screams. Your mission: discover what's causing the disturbances and put an end to it. What would you like to do?"
)
bargin.conversation_history.append(initial_storyteller_msg)
willow.conversation_history.append(initial_storyteller_msg)
# Add characters to session
demo_session.characters[bargin.id] = bargin
demo_session.characters[willow.id] = willow
# Store session
sessions[demo_session_id] = demo_session
print(f"\n{'='*60}")
print(f"🎲 DEMO SESSION CREATED!")
print(f"{'='*60}")
print(f"Session ID: {demo_session_id}")
print(f"Session Name: {demo_session.name}")
print(f"\nCharacters:")
print(f" 1. {bargin.name} (ID: {bargin.id})")
print(f" {bargin.description}")
print(f"\n 2. {willow.name} (ID: {willow.id})")
print(f" {willow.description}")
print(f"\nScenario: {demo_session.name}")
print(f"Scene: {demo_session.current_scene[:100]}...")
print(f"\n{'='*60}")
print(f"To join as Storyteller: Use session ID '{demo_session_id}'")
print(f"To join as Bargin: Use session ID '{demo_session_id}' + character ID '{bargin.id}'")
print(f"To join as Willow: Use session ID '{demo_session_id}' + character ID '{willow.id}'")
print(f"{'='*60}\n")
return demo_session_id
if __name__ == "__main__":
import uvicorn
# Create demo session on startup
create_demo_session()
uvicorn.run(app, host="0.0.0.0", port=8000)