amanmaurya0704 commited on
Commit
20cac53
·
1 Parent(s): dd1d3b2
Files changed (29) hide show
  1. app.py +5 -0
  2. src/__pycache__/__init__.cpython-312.pyc +0 -0
  3. src/langgraphagenticai/LLMS/__pycache__/__init__.cpython-312.pyc +0 -0
  4. src/langgraphagenticai/LLMS/__pycache__/groqllm.cpython-312.pyc +0 -0
  5. src/langgraphagenticai/LLMS/groqllm.py +19 -0
  6. src/langgraphagenticai/__pycache__/__init__.cpython-312.pyc +0 -0
  7. src/langgraphagenticai/__pycache__/main.cpython-312.pyc +0 -0
  8. src/langgraphagenticai/graph/__pycache__/__init__.cpython-312.pyc +0 -0
  9. src/langgraphagenticai/graph/__pycache__/graph_builder.cpython-312.pyc +0 -0
  10. src/langgraphagenticai/graph/graph_builder.py +78 -0
  11. src/langgraphagenticai/main.py +67 -0
  12. src/langgraphagenticai/node/__pycache__/__init__.cpython-312.pyc +0 -0
  13. src/langgraphagenticai/node/__pycache__/basic_chatbot_node.cpython-312.pyc +0 -0
  14. src/langgraphagenticai/node/__pycache__/chatbot_wiyh_tools.cpython-312.pyc +0 -0
  15. src/langgraphagenticai/node/basic_chatbot_node.py +14 -0
  16. src/langgraphagenticai/node/chatbot_wiyh_tools.py +43 -0
  17. src/langgraphagenticai/state/__pycache__/__init__.cpython-312.pyc +0 -0
  18. src/langgraphagenticai/state/__pycache__/state.cpython-312.pyc +0 -0
  19. src/langgraphagenticai/state/state.py +11 -0
  20. src/langgraphagenticai/tools/__pycache__/__init__.cpython-312.pyc +0 -0
  21. src/langgraphagenticai/tools/__pycache__/searchtool.cpython-312.pyc +0 -0
  22. src/langgraphagenticai/tools/searchtool.py +15 -0
  23. src/langgraphagenticai/ui/__pycache__/__init__.cpython-312.pyc +0 -0
  24. src/langgraphagenticai/ui/streamlitui/__pycache__/display_result.cpython-312.pyc +0 -0
  25. src/langgraphagenticai/ui/streamlitui/__pycache__/loadui.cpython-312.pyc +0 -0
  26. src/langgraphagenticai/ui/streamlitui/__pycache__/uiconfigfile.cpython-312.pyc +0 -0
  27. src/langgraphagenticai/ui/streamlitui/display_result.py +42 -0
  28. src/langgraphagenticai/ui/streamlitui/loadui.py +61 -1
  29. src/langgraphagenticai/ui/streamlitui/uiconfigfile.py +2 -2
app.py CHANGED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ from src.langgraphagenticai.main import load_langgraph_agenticai_app
2
+
3
+
4
+ if __name__=="__main__":
5
+ load_langgraph_agenticai_app()
src/__pycache__/__init__.cpython-312.pyc ADDED
Binary file (172 Bytes). View file
 
src/langgraphagenticai/LLMS/__pycache__/__init__.cpython-312.pyc ADDED
Binary file (196 Bytes). View file
 
src/langgraphagenticai/LLMS/__pycache__/groqllm.cpython-312.pyc ADDED
Binary file (1.36 kB). View file
 
src/langgraphagenticai/LLMS/groqllm.py CHANGED
@@ -1 +1,20 @@
 
 
1
  from langchain_groq import ChatGroq
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import streamlit as st
3
  from langchain_groq import ChatGroq
4
+
5
+ class GroqLLM:
6
+ def __init__(self,user_controls_input):
7
+ self.user_controls_input=user_controls_input
8
+
9
+ def get_llm_model(self):
10
+ try:
11
+ groq_api_key=self.user_controls_input['GROQ_API_KEY']
12
+ selected_groq_model=self.user_controls_input['selected_groq_model']
13
+ if groq_api_key=='' and os.environ["GROQ_API_KEY"] =='':
14
+ st.error("Please Enter the Groq API KEY")
15
+
16
+ llm = ChatGroq(api_key =groq_api_key, model=selected_groq_model)
17
+
18
+ except Exception as e:
19
+ raise ValueError(f"Error Occurred with Exception : {e}")
20
+ return llm
src/langgraphagenticai/__pycache__/__init__.cpython-312.pyc ADDED
Binary file (191 Bytes). View file
 
src/langgraphagenticai/__pycache__/main.cpython-312.pyc ADDED
Binary file (2.62 kB). View file
 
src/langgraphagenticai/graph/__pycache__/__init__.cpython-312.pyc ADDED
Binary file (197 Bytes). View file
 
src/langgraphagenticai/graph/__pycache__/graph_builder.cpython-312.pyc ADDED
Binary file (3.84 kB). View file
 
src/langgraphagenticai/graph/graph_builder.py ADDED
@@ -0,0 +1,78 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from langgraph.graph import StateGraph, START,END, MessagesState
2
+ from langgraph.prebuilt import tools_condition,ToolNode
3
+ from langchain_core.prompts import ChatPromptTemplate
4
+ from src.langgraphagenticai.state.state import State
5
+ from src.langgraphagenticai.node.basic_chatbot_node import BasicChatbotNode
6
+ from src.langgraphagenticai.node.chatbot_wiyh_tools import ChatbotWithToolNode
7
+ from src.langgraphagenticai.tools.searchtool import get_tools,create_tool_node
8
+
9
+
10
+
11
+
12
+ class GraphBuilder:
13
+
14
+ def __init__(self,model):
15
+ self.llm=model
16
+ self.graph_builder=StateGraph(State)
17
+
18
+ def basic_chatbot_build_graph(self):
19
+ """
20
+ Builds a basic chatbot graph using LangGraph.
21
+ This method initializes a chatbot node using the `BasicChatbotNode` class
22
+ and integrates it into the graph. The chatbot node is set as both the
23
+ entry and exit point of the graph.
24
+ """
25
+ self.basic_chatbot_node=BasicChatbotNode(self.llm)
26
+ self.graph_builder.add_node("chatbot",self.basic_chatbot_node.process)
27
+ self.graph_builder.add_edge(START,"chatbot")
28
+ self.graph_builder.add_edge("chatbot",END)
29
+
30
+
31
+ def chatbot_with_tools_build_graph(self):
32
+ """
33
+ Builds an advanced chatbot graph with tool integration.
34
+ This method creates a chatbot graph that includes both a chatbot node
35
+ and a tool node. It defines tools, initializes the chatbot with tool
36
+ capabilities, and sets up conditional and direct edges between nodes.
37
+ The chatbot node is set as the entry point.
38
+ """
39
+ ## Define the tool and tool node
40
+
41
+ tools=get_tools()
42
+ tool_node=create_tool_node(tools)
43
+
44
+ ##Define LLM
45
+ llm = self.llm
46
+
47
+ # Define chatbot node
48
+ obj_chatbot_with_node = ChatbotWithToolNode(llm)
49
+ chatbot_node = obj_chatbot_with_node.create_chatbot(tools)
50
+
51
+ # Add nodes
52
+ self.graph_builder.add_node("chatbot", chatbot_node)
53
+ self.graph_builder.add_node("tools", tool_node)
54
+
55
+ # Define conditional and direct edges
56
+ self.graph_builder.add_edge(START,"chatbot")
57
+ self.graph_builder.add_conditional_edges("chatbot", tools_condition)
58
+ self.graph_builder.add_edge("tools","chatbot")
59
+
60
+
61
+
62
+
63
+ def setup_graph(self, usecase: str):
64
+ """
65
+ Sets up the graph for the selected use case.
66
+ """
67
+ if usecase == "Basic Chatbot":
68
+ self.basic_chatbot_build_graph()
69
+
70
+ if usecase == "Chatbot with Tool":
71
+ self.chatbot_with_tools_build_graph()
72
+ return self.graph_builder.compile()
73
+
74
+
75
+
76
+
77
+
78
+
src/langgraphagenticai/main.py CHANGED
@@ -0,0 +1,67 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+
2
+ import streamlit as st
3
+ import json
4
+ from src.langgraphagenticai.ui.streamlitui.loadui import LoadStreamlitUI
5
+ from src.langgraphagenticai.LLMS.groqllm import GroqLLM
6
+ from src.langgraphagenticai.graph.graph_builder import GraphBuilder
7
+ from src.langgraphagenticai.ui.streamlitui.display_result import DisplayResultStreamlit
8
+
9
+ # MAIN Function START
10
+ def load_langgraph_agenticai_app():
11
+ """
12
+ Loads and runs the LangGraph AgenticAI application with Streamlit UI.
13
+ This function initializes the UI, handles user input, configures the LLM model,
14
+ sets up the graph based on the selected use case, and displays the output while
15
+ implementing exception handling for robustness.
16
+ """
17
+
18
+ # Load UI
19
+ ui = LoadStreamlitUI()
20
+ user_input = ui.load_streamlit_ui()
21
+
22
+ if not user_input:
23
+ st.error("Error: Failed to load user input from the UI.")
24
+ return
25
+
26
+ # Text input for user message
27
+ if st.session_state.IsFetchButtonClicked:
28
+ user_message = st.session_state.timeframe
29
+ else :
30
+ user_message = st.chat_input("Enter your message:")
31
+
32
+ if user_message:
33
+ try:
34
+ # Configure LLM
35
+ obj_llm_config = GroqLLM(user_controls_input=user_input)
36
+ model = obj_llm_config.get_llm_model()
37
+
38
+ if not model:
39
+ st.error("Error: LLM model could not be initialized.")
40
+ return
41
+
42
+ # Initialize and set up the graph based on use case
43
+ usecase = user_input.get('selected_usecase')
44
+ if not usecase:
45
+ st.error("Error: No use case selected.")
46
+ return
47
+
48
+
49
+ ### Graph Builder
50
+ graph_builder=GraphBuilder(model)
51
+ try:
52
+ graph = graph_builder.setup_graph(usecase)
53
+ DisplayResultStreamlit(usecase,graph,user_message).display_result_on_ui()
54
+ except Exception as e:
55
+ st.error(f"Error: Graph setup failed - {e}")
56
+ return
57
+
58
+
59
+ except Exception as e:
60
+ raise ValueError(f"Error Occurred with Exception : {e}")
61
+
62
+
63
+
64
+
65
+
66
+
67
+
src/langgraphagenticai/node/__pycache__/__init__.cpython-312.pyc ADDED
Binary file (196 Bytes). View file
 
src/langgraphagenticai/node/__pycache__/basic_chatbot_node.cpython-312.pyc ADDED
Binary file (1.04 kB). View file
 
src/langgraphagenticai/node/__pycache__/chatbot_wiyh_tools.cpython-312.pyc ADDED
Binary file (1.99 kB). View file
 
src/langgraphagenticai/node/basic_chatbot_node.py ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from src.langgraphagenticai.state.state import State
2
+
3
+ class BasicChatbotNode:
4
+ """
5
+ Basic chatbot logic implementation.
6
+ """
7
+ def __init__(self,model):
8
+ self.llm = model
9
+
10
+ def process(self, state: State) -> dict:
11
+ """
12
+ Processes the input state and generates a chatbot response.
13
+ """
14
+ return {"messages":self.llm.invoke(state['messages'])}
src/langgraphagenticai/node/chatbot_wiyh_tools.py ADDED
@@ -0,0 +1,43 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from src.langgraphagenticai.state.state import State
2
+
3
+ class ChatbotWithToolNode:
4
+ """
5
+ Chatbot logic enhanced with tool integration.
6
+ """
7
+ def __init__(self,model):
8
+ self.llm = model
9
+
10
+ def process(self, state: State) -> dict:
11
+ """
12
+ Processes the input state and generates a response with tool integration.
13
+ """
14
+ user_input = state["messages"][-1] if state["messages"] else ""
15
+ llm_response = self.llm.invoke([{"role": "user", "content": user_input}])
16
+
17
+ # Simulate tool-specific logic
18
+ tools_response = f"Tool integration for: '{user_input}'"
19
+
20
+ return {"messages": [llm_response, tools_response]}
21
+
22
+ def create_chatbot(self, tools):
23
+ """
24
+ Returns a chatbot node function.
25
+ """
26
+ llm_with_tools = self.llm.bind_tools(tools)
27
+
28
+ def chatbot_node(state: State):
29
+ """
30
+ Chatbot logic for processing the input state and returning a response.
31
+ """
32
+ return {"messages": [llm_with_tools.invoke(state["messages"])]}
33
+
34
+ return chatbot_node
35
+
36
+
37
+
38
+
39
+
40
+
41
+
42
+
43
+
src/langgraphagenticai/state/__pycache__/__init__.cpython-312.pyc ADDED
Binary file (197 Bytes). View file
 
src/langgraphagenticai/state/__pycache__/state.cpython-312.pyc ADDED
Binary file (860 Bytes). View file
 
src/langgraphagenticai/state/state.py ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from typing import Annotated, Literal, Optional
2
+ from typing_extensions import TypedDict
3
+ from langgraph.graph.message import add_messages
4
+ from typing import TypedDict, Annotated, List
5
+ from langchain_core.messages import HumanMessage, AIMessage
6
+
7
+ class State(TypedDict):
8
+ """
9
+ Represents the structure of the state used in the graph.
10
+ """
11
+ messages: Annotated[list, add_messages]
src/langgraphagenticai/tools/__pycache__/__init__.cpython-312.pyc ADDED
Binary file (197 Bytes). View file
 
src/langgraphagenticai/tools/__pycache__/searchtool.cpython-312.pyc ADDED
Binary file (777 Bytes). View file
 
src/langgraphagenticai/tools/searchtool.py ADDED
@@ -0,0 +1,15 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from langchain_community.tools.tavily_search import TavilySearchResults
2
+ from langgraph.prebuilt import ToolNode
3
+
4
+ def get_tools():
5
+ """
6
+ Return the list of tools to be used in the chatbot
7
+ """
8
+ tools=[TavilySearchResults(max_results=2)]
9
+ return tools
10
+
11
+ def create_tool_node(tools):
12
+ """
13
+ creates and returns a tool node for the graph
14
+ """
15
+ return ToolNode(tools=tools)
src/langgraphagenticai/ui/__pycache__/__init__.cpython-312.pyc ADDED
Binary file (194 Bytes). View file
 
src/langgraphagenticai/ui/streamlitui/__pycache__/display_result.cpython-312.pyc ADDED
Binary file (3.08 kB). View file
 
src/langgraphagenticai/ui/streamlitui/__pycache__/loadui.cpython-312.pyc ADDED
Binary file (4.04 kB). View file
 
src/langgraphagenticai/ui/streamlitui/__pycache__/uiconfigfile.cpython-312.pyc ADDED
Binary file (1.95 kB). View file
 
src/langgraphagenticai/ui/streamlitui/display_result.py CHANGED
@@ -0,0 +1,42 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ from langchain_core.messages import HumanMessage,AIMessage,ToolMessage
3
+ import json
4
+
5
+
6
+ class DisplayResultStreamlit:
7
+ def __init__(self,usecase,graph,user_message):
8
+ self.usecase= usecase
9
+ self.graph = graph
10
+ self.user_message = user_message
11
+
12
+ def display_result_on_ui(self):
13
+ usecase= self.usecase
14
+ graph = self.graph
15
+ user_message = self.user_message
16
+ if usecase =="Basic Chatbot":
17
+ for event in graph.stream({'messages':("user",user_message)}):
18
+ print(event.values())
19
+ for value in event.values():
20
+ print(value['messages'])
21
+ with st.chat_message("user"):
22
+ st.write(user_message)
23
+ with st.chat_message("assistant"):
24
+ st.write(value["messages"].content)
25
+
26
+ elif usecase=="Chatbot with Tool":
27
+ # Prepare state and invoke the graph
28
+ initial_state = {"messages": [user_message]}
29
+ res = graph.invoke(initial_state)
30
+ for message in res['messages']:
31
+ if type(message) == HumanMessage:
32
+ with st.chat_message("user"):
33
+ st.write(message.content)
34
+ elif type(message)==ToolMessage:
35
+ with st.chat_message("ai"):
36
+ st.write("Tool Call Start")
37
+ st.write(message.content)
38
+ st.write("Tool Call End")
39
+ elif type(message)==AIMessage and message.content:
40
+ with st.chat_message("assistant"):
41
+ st.write(message.content)
42
+
src/langgraphagenticai/ui/streamlitui/loadui.py CHANGED
@@ -7,4 +7,64 @@ from langchain_core.messages import AIMessage, HumanMessage, SystemMessage
7
  class LoadStreamlitUI:
8
  def __init__(self):
9
  self.config = Config()
10
- self.user_control = {}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
7
  class LoadStreamlitUI:
8
  def __init__(self):
9
  self.config = Config()
10
+ self.user_control = {}
11
+
12
+ def initialize_session(self):
13
+ return {
14
+ "current_step": "requirements",
15
+ "requirements": "",
16
+ "user_stories": "",
17
+ "po_feedback": "",
18
+ "generated_code": "",
19
+ "review_feedback": "",
20
+ "decision": None
21
+ }
22
+
23
+
24
+
25
+ def load_streamlit_ui(self):
26
+ st.set_page_config(page_title= "🤖 " + self.config.get_page_title(), layout="wide")
27
+ st.header("🤖 " + self.config.get_page_title())
28
+ st.session_state.timeframe = ''
29
+ st.session_state.IsFetchButtonClicked = False
30
+ st.session_state.IsSDLC = False
31
+
32
+
33
+
34
+ with st.sidebar:
35
+ # Get options from config
36
+ llm_options = self.config.get_llm_options()
37
+ usecase_options = self.config.get_usecase_options()
38
+
39
+ # LLM selection
40
+ self.user_control["selected_llm"] = st.selectbox("Select LLM", llm_options)
41
+
42
+ if self.user_control["selected_llm"] == 'Groq':
43
+ # Model selection
44
+ model_options = self.config.get_groq_model_options()
45
+ self.user_control["selected_groq_model"] = st.selectbox("Select Model", model_options)
46
+ # API key input
47
+ self.user_control["GROQ_API_KEY"] = st.session_state["GROQ_API_KEY"] = st.text_input("API Key",
48
+ type="password")
49
+ # Validate API key
50
+ if not self.user_control["GROQ_API_KEY"]:
51
+ st.warning("⚠️ Please enter your GROQ API key to proceed. Don't have? refer : https://console.groq.com/keys ")
52
+
53
+
54
+ # Use case selection
55
+ self.user_control["selected_usecase"] = st.selectbox("Select Usecases", usecase_options)
56
+
57
+ if self.user_control["selected_usecase"] =="Chatbot with Tool":
58
+ # API key input
59
+ os.environ["TAVILY_API_KEY"] = self.user_control["TAVILY_API_KEY"] = st.session_state["TAVILY_API_KEY"] = st.text_input("TAVILY API KEY",
60
+ type="password")
61
+ # Validate API key
62
+ if not self.user_control["TAVILY_API_KEY"]:
63
+ st.warning("⚠️ Please enter your TAVILY_API_KEY key to proceed. Don't have? refer : https://app.tavily.com/home")
64
+
65
+ if "state" not in st.session_state:
66
+ st.session_state.state = self.initialize_session()
67
+
68
+
69
+
70
+ return self.user_control
src/langgraphagenticai/ui/streamlitui/uiconfigfile.py CHANGED
@@ -1,9 +1,9 @@
1
  from configparser import ConfigParser
2
 
3
  class Config:
4
- def __init__(self, config_file='./src/langgraphagenticai/ui/streamlitui/uiconfigfile.ini'):
5
  self.config = ConfigParser()
6
- self.config.read(self.config_file)
7
 
8
  def get_llm_options(self):
9
  return self.config["DEFAULT"].get("LLM_OPTIONS", "").split(",")
 
1
  from configparser import ConfigParser
2
 
3
  class Config:
4
+ def __init__(self, config_file=r"C:\Users\amanm\Downloads\Machine Learning\Langgraph_Project\src\langgraphagenticai\ui\streamlitui\uiconfigfile.ini"):
5
  self.config = ConfigParser()
6
+ self.config.read(config_file)
7
 
8
  def get_llm_options(self):
9
  return self.config["DEFAULT"].get("LLM_OPTIONS", "").split(",")