Files
text-adventure-llm/config.py
Aodhan Collins 912b205699 Initial commit.
Basic docker deployment with Local LLM integration and simple game state.
2025-08-17 19:31:33 +01:00

32 lines
1.1 KiB
Python

#!/usr/bin/env python3
"""
Configuration settings for the text-based LLM interaction system.
"""
class Config:
"""Configuration class for the LLM interaction system."""
def __init__(self):
"""Initialize configuration settings."""
# LM Studio server settings
self.LM_STUDIO_HOST = "10.0.0.200"
self.LM_STUDIO_PORT = 1234
self.API_BASE_URL = f"http://{self.LM_STUDIO_HOST}:{self.LM_STUDIO_PORT}/v1"
self.CHAT_COMPLETIONS_ENDPOINT = f"{self.API_BASE_URL}/chat/completions"
# Default model settings
self.DEFAULT_MODEL = "default_model" # Will be updated based on available models
self.TEMPERATURE = 0.7
self.MAX_TOKENS = -1 # -1 means no limit
# Request settings
self.REQUEST_TIMEOUT = 30 # seconds
def get_api_url(self):
"""Get the base API URL for LM Studio."""
return self.API_BASE_URL
def get_chat_completions_url(self):
"""Get the chat completions endpoint URL."""
return self.CHAT_COMPLETIONS_ENDPOINT