Initial commit.

Basic docker deployment with Local LLM integration and simple game state.
This commit is contained in:
Aodhan Collins
2025-08-17 19:31:33 +01:00
commit 912b205699
30 changed files with 2476 additions and 0 deletions

38
test_llm_connection.py Normal file
View File

@@ -0,0 +1,38 @@
#!/usr/bin/env python3
"""
Test script for connecting to LM Studio.
"""
from config import Config
from llm_client import LLMClient
def test_connection():
"""Test the connection to LM Studio."""
print("Testing connection to LM Studio...")
print("-" * 30)
# Create config and client
config = Config()
llm_client = LLMClient(config)
# Display connection details
print(f"Host: {config.LM_STUDIO_HOST}")
print(f"Port: {config.LM_STUDIO_PORT}")
print(f"API URL: {config.get_api_url()}")
print(f"Chat Completions URL: {config.get_chat_completions_url()}")
print()
# Test connection
try:
success = llm_client.test_connection()
if success:
print("✓ Connection to LM Studio successful!")
else:
print("✗ Failed to connect to LM Studio")
except Exception as e:
print(f"✗ Error testing connection: {e}")
if __name__ == "__main__":
test_connection()