Initial commit.

Basic docker deployment with Local LLM integration and simple game state.
This commit is contained in:
Aodhan Collins
2025-08-17 19:31:33 +01:00
commit 912b205699
30 changed files with 2476 additions and 0 deletions

53
test_llm_exchange.py Normal file
View File

@@ -0,0 +1,53 @@
#!/usr/bin/env python3
"""
Test script for basic message exchange with LLM.
"""
from config import Config
from llm_client import LLMClient
from conversation import ConversationManager
def test_message_exchange():
"""Test basic message exchange with LLM."""
print("Testing message exchange with LLM...")
print("-" * 40)
# Create components
config = Config()
llm_client = LLMClient(config)
conversation_manager = ConversationManager()
# Test connection first
try:
success = llm_client.test_connection()
if not success:
print("✗ Failed to connect to LM Studio")
return
else:
print("✓ Connected to LM Studio")
except Exception as e:
print(f"✗ Error testing connection: {e}")
return
# Add a test message to conversation
test_message = "Hello, are you there?"
conversation_manager.add_user_message(test_message)
print(f"Sending message: {test_message}")
# Get response from LLM
try:
response = llm_client.get_response(conversation_manager.get_history())
print(f"Received response: {response}")
# Add response to conversation
conversation_manager.add_assistant_message(response)
print("\n✓ Message exchange successful!")
except Exception as e:
print(f"✗ Error in message exchange: {e}")
if __name__ == "__main__":
test_message_exchange()