diff --git a/homelab-ai-bot/telegram_bot.py b/homelab-ai-bot/telegram_bot.py index 85602b28..88ae6df5 100644 --- a/homelab-ai-bot/telegram_bot.py +++ b/homelab-ai-bot/telegram_bot.py @@ -409,7 +409,7 @@ async def handle_voice(update: Update, ctx: ContextTypes.DEFAULT_TYPE): context.last_suggest_result = {"type": None} context.set_source_type("telegram_voice") handlers = context.get_tool_handlers(session_id=session_id) - answer = llm.ask_with_tools(text, handlers, session_id=session_id) + answer = await asyncio.to_thread(llm.ask_with_tools, text, handlers, session_id=session_id) if session_id: memory_client.log_message(session_id, "user", text) memory_client.log_message(session_id, "assistant", answer) @@ -451,7 +451,7 @@ async def handle_photo(update: Update, ctx: ContextTypes.DEFAULT_TYPE): context.last_suggest_result = {"type": None} context.set_source_type("telegram_photo") handlers = context.get_tool_handlers(session_id=session_id) - answer = llm.ask_with_image(image_base64, caption, handlers, session_id=session_id) + answer = await asyncio.to_thread(llm.ask_with_image, image_base64, caption, handlers, session_id=session_id) warning_text, warnings = _check_flight_plausibility(answer) if warning_text: @@ -627,7 +627,7 @@ async def handle_document(update: Update, ctx: ContextTypes.DEFAULT_TYPE): context.last_suggest_result = {"type": None} context.set_source_type("telegram_photo") handlers = context.get_tool_handlers(session_id=session_id) - answer = llm.ask_with_image(image_base64, caption, handlers, session_id=session_id) + answer = await asyncio.to_thread(llm.ask_with_image, image_base64, caption, handlers, session_id=session_id) warning_text, warnings = _check_flight_plausibility(answer) if warning_text: @@ -664,7 +664,7 @@ async def handle_document(update: Update, ctx: ContextTypes.DEFAULT_TYPE): context.last_suggest_result = {"type": None} context.set_source_type("telegram_pdf") handlers = context.get_tool_handlers(session_id=session_id) - answer = llm.ask_with_tools(full_prompt, handlers, session_id=session_id) + answer = await asyncio.to_thread(llm.ask_with_tools, full_prompt, handlers, session_id=session_id) warning_text, warnings = _check_flight_plausibility(answer) if warning_text: @@ -735,7 +735,7 @@ async def handle_message(update: Update, ctx: ContextTypes.DEFAULT_TYPE): context.last_suggest_result = {"type": None} context.set_source_type("telegram_text") handlers = context.get_tool_handlers(session_id=session_id) - answer = llm.ask_with_tools(text, handlers, session_id=session_id) + answer = await asyncio.to_thread(llm.ask_with_tools, text, handlers, session_id=session_id) if session_id: memory_client.log_message(session_id, "user", text) memory_client.log_message(session_id, "assistant", answer)