Initial commit.
Basic docker deployment with Local LLM integration and simple game state.
This commit is contained in:
32
config.py
Normal file
32
config.py
Normal file
@@ -0,0 +1,32 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Configuration settings for the text-based LLM interaction system.
|
||||
"""
|
||||
|
||||
|
||||
class Config:
|
||||
"""Configuration class for the LLM interaction system."""
|
||||
|
||||
def __init__(self):
|
||||
"""Initialize configuration settings."""
|
||||
# LM Studio server settings
|
||||
self.LM_STUDIO_HOST = "10.0.0.200"
|
||||
self.LM_STUDIO_PORT = 1234
|
||||
self.API_BASE_URL = f"http://{self.LM_STUDIO_HOST}:{self.LM_STUDIO_PORT}/v1"
|
||||
self.CHAT_COMPLETIONS_ENDPOINT = f"{self.API_BASE_URL}/chat/completions"
|
||||
|
||||
# Default model settings
|
||||
self.DEFAULT_MODEL = "default_model" # Will be updated based on available models
|
||||
self.TEMPERATURE = 0.7
|
||||
self.MAX_TOKENS = -1 # -1 means no limit
|
||||
|
||||
# Request settings
|
||||
self.REQUEST_TIMEOUT = 30 # seconds
|
||||
|
||||
def get_api_url(self):
|
||||
"""Get the base API URL for LM Studio."""
|
||||
return self.API_BASE_URL
|
||||
|
||||
def get_chat_completions_url(self):
|
||||
"""Get the chat completions endpoint URL."""
|
||||
return self.CHAT_COMPLETIONS_ENDPOINT
|
||||
Reference in New Issue
Block a user