Initial MVP
This commit is contained in:
72
scripts/LLMService.gd
Normal file
72
scripts/LLMService.gd
Normal file
@@ -0,0 +1,72 @@
|
||||
class_name LLMService extends Node
|
||||
|
||||
signal response_received(response_dict: Dictionary)
|
||||
signal error_occurred(message: String)
|
||||
|
||||
var http_request: HTTPRequest
|
||||
var api_key: String = ""
|
||||
var api_url: String = "https://openrouter.ai/api/v1/chat/completions"
|
||||
var model: String = "google/gemini-2.5-flash-preview-09-2025"
|
||||
|
||||
func _ready():
|
||||
http_request = HTTPRequest.new()
|
||||
add_child(http_request)
|
||||
http_request.request_completed.connect(_on_request_completed)
|
||||
_load_api_key()
|
||||
|
||||
func _load_api_key():
|
||||
var config = ConfigFile.new()
|
||||
var err = config.load("user://secrets.cfg")
|
||||
if err == OK:
|
||||
api_key = config.get_value("auth", "openrouter_key", "")
|
||||
else:
|
||||
print("No secrets.cfg found. Please create one with [auth] openrouter_key=...")
|
||||
|
||||
func send_prompt(system_prompt: String, user_input: String):
|
||||
if api_key == "":
|
||||
emit_signal("error_occurred", "API Key missing. Please check user://secrets.cfg")
|
||||
return
|
||||
|
||||
var headers = [
|
||||
"Content-Type: application/json",
|
||||
"Authorization: Bearer " + api_key,
|
||||
"HTTP-Referer: https://github.com/vibecoding/storyteller",
|
||||
"X-Title: Storyteller"
|
||||
]
|
||||
|
||||
var body = {
|
||||
"model": model,
|
||||
"messages": [
|
||||
{"role": "system", "content": system_prompt},
|
||||
{"role": "user", "content": user_input}
|
||||
],
|
||||
"response_format": {"type": "json_object"}
|
||||
}
|
||||
|
||||
var json_body = JSON.stringify(body)
|
||||
var error = http_request.request(api_url, headers, HTTPClient.METHOD_POST, json_body)
|
||||
if error != OK:
|
||||
emit_signal("error_occurred", "HTTP Request failed: " + str(error))
|
||||
|
||||
func _on_request_completed(result, response_code, headers, body):
|
||||
if response_code != 200:
|
||||
emit_signal("error_occurred", "API Error: " + str(response_code) + " " + body.get_string_from_utf8())
|
||||
return
|
||||
|
||||
var json = JSON.new()
|
||||
var parse_result = json.parse(body.get_string_from_utf8())
|
||||
if parse_result != OK:
|
||||
emit_signal("error_occurred", "JSON Parse Error")
|
||||
return
|
||||
|
||||
var response = json.get_data()
|
||||
if "choices" in response and response["choices"].size() > 0:
|
||||
var content = response["choices"][0]["message"]["content"]
|
||||
# Parse the content as JSON again since the LLM returns a string containing JSON
|
||||
var content_json = JSON.new()
|
||||
if content_json.parse(content) == OK:
|
||||
emit_signal("response_received", content_json.get_data())
|
||||
else:
|
||||
emit_signal("error_occurred", "LLM returned invalid JSON: " + content)
|
||||
else:
|
||||
emit_signal("error_occurred", "Invalid API response structure")
|
||||
Reference in New Issue
Block a user