modifies orchestrator,add code_exec tool, openai_chat (later we add chat)
Browse files
orchestrator_functions.py
CHANGED
@@ -519,7 +519,7 @@ async def csv_chat(csv_url: str, query: str):
|
|
519 |
)
|
520 |
logger.info("LangChain-Gemini answer:", gemini_answer)
|
521 |
|
522 |
-
if
|
523 |
return {"answer": jsonable_encoder(gemini_answer)}
|
524 |
|
525 |
raise Exception("LangChain-Gemini response not usable, falling back to LangChain-Groq")
|
@@ -534,7 +534,7 @@ async def csv_chat(csv_url: str, query: str):
|
|
534 |
)
|
535 |
logger.info("LangChain-Groq answer:", lang_groq_answer)
|
536 |
|
537 |
-
if not
|
538 |
return {"answer": jsonable_encoder(lang_groq_answer)}
|
539 |
|
540 |
raise Exception("LangChain-Groq response not usable, falling back to raw Groq")
|
|
|
519 |
)
|
520 |
logger.info("LangChain-Gemini answer:", gemini_answer)
|
521 |
|
522 |
+
if gemini_answer is not None:
|
523 |
return {"answer": jsonable_encoder(gemini_answer)}
|
524 |
|
525 |
raise Exception("LangChain-Gemini response not usable, falling back to LangChain-Groq")
|
|
|
534 |
)
|
535 |
logger.info("LangChain-Groq answer:", lang_groq_answer)
|
536 |
|
537 |
+
if lang_groq_answer is not None:
|
538 |
return {"answer": jsonable_encoder(lang_groq_answer)}
|
539 |
|
540 |
raise Exception("LangChain-Groq response not usable, falling back to raw Groq")
|