From 7623e81a9f1d1e10f24562fce1c858d180283760 Mon Sep 17 00:00:00 2001 From: Maro <25640912+commitBlob@users.noreply.github.com> Date: Tue, 30 Sep 2025 12:40:18 +0100 Subject: [PATCH] feat: add LangGraph Agentic RAG implementation (#2) --- RAG/.gitignore | 119 +++++++++++++++++++++++ RAG/README.md | 115 ++++++++++++++++++++++ RAG/graph/__init__.py | 0 RAG/graph/chains/__init__.py | 0 RAG/graph/chains/answer_grader.py | 25 +++++ RAG/graph/chains/generation.py | 8 ++ RAG/graph/chains/hallucination_grader.py | 24 +++++ RAG/graph/chains/retrieval_grader.py | 25 +++++ RAG/graph/chains/router.py | 29 ++++++ RAG/graph/consts.py | 4 + RAG/graph/graph.py | 99 +++++++++++++++++++ RAG/graph/nodes/__init__.py | 6 ++ RAG/graph/nodes/generate.py | 21 ++++ RAG/graph/nodes/grade_documents.py | 42 ++++++++ RAG/graph/nodes/retrieve.py | 20 ++++ RAG/graph/nodes/web_search.py | 43 ++++++++ RAG/graph/state.py | 18 ++++ RAG/graph/tests/__init__.py | 0 RAG/graph/tests/test_chains.py | 76 +++++++++++++++ RAG/graph_output.png | Bin 0 -> 27419 bytes RAG/ingestion.py | 35 +++++++ RAG/main.py | 9 ++ 22 files changed, 718 insertions(+) create mode 100644 RAG/.gitignore create mode 100644 RAG/README.md create mode 100644 RAG/graph/__init__.py create mode 100644 RAG/graph/chains/__init__.py create mode 100644 RAG/graph/chains/answer_grader.py create mode 100644 RAG/graph/chains/generation.py create mode 100644 RAG/graph/chains/hallucination_grader.py create mode 100644 RAG/graph/chains/retrieval_grader.py create mode 100644 RAG/graph/chains/router.py create mode 100644 RAG/graph/consts.py create mode 100644 RAG/graph/graph.py create mode 100644 RAG/graph/nodes/__init__.py create mode 100644 RAG/graph/nodes/generate.py create mode 100644 RAG/graph/nodes/grade_documents.py create mode 100644 RAG/graph/nodes/retrieve.py create mode 100644 RAG/graph/nodes/web_search.py create mode 100644 RAG/graph/state.py create mode 100644 RAG/graph/tests/__init__.py create mode 100644 RAG/graph/tests/test_chains.py create mode 100644 RAG/graph_output.png create mode 100644 RAG/ingestion.py create mode 100644 RAG/main.py diff --git a/RAG/.gitignore b/RAG/.gitignore new file mode 100644 index 0000000..38abdb8 --- /dev/null +++ b/RAG/.gitignore @@ -0,0 +1,119 @@ +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class +.chroma/* +.chroma +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +*.egg-info/ +.installed.cfg +*.egg + +# PyInstaller +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +.hypothesis/ +.pytest_cache/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# pyenv +.python-version + +# celery beat schedule file +celerybeat-schedule + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ + +# VS Code +.vscode/ +*.code-workspace + +# PyCharm +.idea/ +*.iml + +# Jupyter +.ipynb_checkpoints/ + +# macOS +.DS_Store + +# Windows +Thumbs.db \ No newline at end of file diff --git a/RAG/README.md b/RAG/README.md new file mode 100644 index 0000000..866bb61 --- /dev/null +++ b/RAG/README.md @@ -0,0 +1,115 @@ +# Playground/RAG + +Part of the [playground](https://github.com/commitBlob/playground) repository collection. + +## LangGraph Agentic RAG + +An intelligent question-answering system built with LangGraph that combines vector store retrieval and web search capabilities with sophisticated answer validation. The system uses an agent-based approach to dynamically decide the best information source and verify the quality of responses. + +### Project Location + +This project is located in the `RAG` directory of the playground repository: +``` +playground/ +└── RAG/ # This project + ├── ingestion.py + ├── main.py + └── graph/ + └── ... +``` + +## Features + +- **Smart Routing**: Automatically routes questions to either vector store or web search based on the question's content +- **Document Relevance Grading**: Evaluates retrieved documents for relevance to the question +- **Hallucination Detection**: Verifies that generated answers are grounded in the source documents +- **Answer Quality Assessment**: Ensures generated responses actually answer the user's question +- **Fallback Mechanisms**: Dynamically switches to web search when vector store results are insufficient +- **Flexible Architecture**: Built with LangGraph for clear state management and workflow control + +## Architecture + +The system uses a state-based graph architecture with several key components: + +1. **Router**: Determines whether to use vector store or web search based on the question +2. **Retriever**: Fetches relevant documents from Pinecone vector store +3. **Document Grader**: Evaluates document relevance +4. **Generator**: Creates answers based on retrieved documents +5. **Answer Grader**: Validates answers for hallucinations and relevance +6. **Web Search**: Provides additional information when needed using Tavily Search + +## Setup + +1. Clone the repository +2. Install dependencies: + ```bash + pip install langchain langchainhub langchain-community langchain-tavily langchain-pinecone langgraph python-dotenv pytest + ``` + +3. Create a `.env` file with the following keys: + ``` + OPENAI_API_KEY=your_openai_key + PINECONE_API_KEY=your_pinecone_key + TAVILY_API_KEY=your_tavily_key + LANGSMITH_API_KEY=your_langsmith_key (optional) + LANGSMITH_TRACING=true (optional) + LANGSMITH_PROJECT_NAME=your_project_name (optional) + + ``` + +## Usage + +1. Run the ingestion script to populate the vector store: + ```bash + python ingestion.py + ``` + +2. Run the main application: + ```bash + python main.py + ``` + +Example query: +```python +from graph.graph import app + +result = app.invoke(input={"question": "What is agent memory?"}) +print(result) +``` + +## Project Structure + +``` +├── ingestion.py # Document ingestion and vector store setup +├── main.py # Main application entry point +├── graph/ +│ ├── chains/ # LangChain components +│ │ ├── answer_grader.py +│ │ ├── generation.py +│ │ ├── hallucination_grader.py +│ │ ├── retrieval_grader.py +│ │ └── router.py +│ ├── nodes/ # Graph nodes implementation +│ │ ├── generate.py +│ │ ├── grade_documents.py +│ │ ├── retrieve.py +│ │ └── web_search.py +│ ├── tests/ # Test cases +│ ├── graph.py # Main graph definition +│ └── state.py # State management +``` + +## Testing + +Run the test suite: +```bash +pytest . -s -v +``` + +## Flow Visualization + +The system generates a visual representation of the workflow graph in `graph_output.png`: + +![LangGraph Workflow](graph_output.png) + +This visualization shows the complete flow of the question-answering system, including routing decisions, document retrieval, grading steps, and generation paths. \ No newline at end of file diff --git a/RAG/graph/__init__.py b/RAG/graph/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/RAG/graph/chains/__init__.py b/RAG/graph/chains/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/RAG/graph/chains/answer_grader.py b/RAG/graph/chains/answer_grader.py new file mode 100644 index 0000000..96fd2a9 --- /dev/null +++ b/RAG/graph/chains/answer_grader.py @@ -0,0 +1,25 @@ +from langchain_core.prompts import ChatPromptTemplate +from langchain_core.runnables import RunnableSequence +from langchain_openai import ChatOpenAI +from pydantic import BaseModel, Field + + +class GradeAnswer(BaseModel): + binary_score: bool = Field( + description="Answer addresses the question, 'yes' or 'no'" + ) + +llm = ChatOpenAI(temperature=0) + +structured_llm_grader = llm.with_structured_output(GradeAnswer) + +system = """You are a grader assessing whether an answer addresses / resolves a question \n + Give a binary score 'yes' or 'no'. Yes' means that the answer resolves the question.""" +answer_prompt = ChatPromptTemplate.from_messages( + [ + ("system", system), + ("human", "User question: \n\n {question} \n\n LLM generation: {generation}"), + ] +) + +answer_grader: RunnableSequence = answer_prompt | structured_llm_grader \ No newline at end of file diff --git a/RAG/graph/chains/generation.py b/RAG/graph/chains/generation.py new file mode 100644 index 0000000..d4720f1 --- /dev/null +++ b/RAG/graph/chains/generation.py @@ -0,0 +1,8 @@ +from langchain import hub +from langchain_core.output_parsers import StrOutputParser +from langchain_openai import ChatOpenAI + +llm = ChatOpenAI(temperature=0) +prompt = hub.pull("rlm/rag-prompt") + +generation_chain = prompt | llm | StrOutputParser() \ No newline at end of file diff --git a/RAG/graph/chains/hallucination_grader.py b/RAG/graph/chains/hallucination_grader.py new file mode 100644 index 0000000..d78f112 --- /dev/null +++ b/RAG/graph/chains/hallucination_grader.py @@ -0,0 +1,24 @@ +from langchain_core.prompts import ChatPromptTemplate +from langchain_core.runnables import RunnableSequence +from langchain_openai import ChatOpenAI +from pydantic import BaseModel, Field + +llm = ChatOpenAI(temperature=0) + +class GradeHallucinations(BaseModel): + """Binary score for hallucination prensent in generation answer.""" + + binary_score: bool = Field(description="Answer is grounded in the facts, 'yes' or 'no'") + +structured_llm_grader = llm.with_structured_output(GradeHallucinations) + +system = """You are a grader assessing whether an LLM generation is grounded in / supported by a set of retrieved facts. \n + Give a binary score 'yes' or 'no'. 'Yes' means that the answer is grounded in / supported by the set of facts.""" +hallucination_prompt = ChatPromptTemplate.from_messages( + [ + ("system", system), + ("human", "Set of facts: \n\n {documents} \n\n LLM generation: {generation}"), + ] +) + +hallucination_grader: RunnableSequence = hallucination_prompt | structured_llm_grader \ No newline at end of file diff --git a/RAG/graph/chains/retrieval_grader.py b/RAG/graph/chains/retrieval_grader.py new file mode 100644 index 0000000..7bfd851 --- /dev/null +++ b/RAG/graph/chains/retrieval_grader.py @@ -0,0 +1,25 @@ +from langchain_core.prompts import ChatPromptTemplate +from pydantic import BaseModel, Field +from langchain_openai import ChatOpenAI + +llm = ChatOpenAI(temperature=0) + +class GradeDocuments(BaseModel): + """Binary score for rlevance check on retrieved documents""" + + binary_score: str = Field(description="Document are relevant to the question, 'yes' or 'no'") + +structured_llm_grader = llm.with_structured_output(GradeDocuments) + +system = """You are a grader assessing relevance of a retrieved document to a user question. \n + If the document contains keyword(s) or semantic meaning related to the question, grade it as relevant. \n + Give a binary score 'yes' or 'no' score to indicate whether the document is relevant to the question.""" + +grade_prompt = ChatPromptTemplate.from_messages( + [ + ("system", system), + ("human", "Retrieved document: \n\n {document} \n\n User question: {question}"), + ] +) + +retrieval_grader = grade_prompt | structured_llm_grader \ No newline at end of file diff --git a/RAG/graph/chains/router.py b/RAG/graph/chains/router.py new file mode 100644 index 0000000..3730d00 --- /dev/null +++ b/RAG/graph/chains/router.py @@ -0,0 +1,29 @@ +from typing import Literal + +from langchain_core.prompts import ChatPromptTemplate +from langchain_openai import ChatOpenAI +from pydantic import BaseModel, Field + + +class RouteQuery(BaseModel): + """Route a user query to the most relevant datasource.""" + + datasource: Literal["vectorstore", "websearch"] = Field( + ..., + description="Given a user question choose to route it to web search or a vectorstore.", + ) + +llm = ChatOpenAI(temperature=0) +structured_llm_router = llm.with_structured_output(RouteQuery) + +system = """You are an expert at routing a user question to a vectorstore or web search. +The vectorstore contains documents related to agents, prompt engineering, and adversarial attacks. +Use the vectorstore for questions on these topics. For all else, use web-search.""" +route_prompt = ChatPromptTemplate.from_messages( + [ + ("system", system), + ("human", "{question}"), + ] +) + +question_router = route_prompt | structured_llm_router \ No newline at end of file diff --git a/RAG/graph/consts.py b/RAG/graph/consts.py new file mode 100644 index 0000000..47eb623 --- /dev/null +++ b/RAG/graph/consts.py @@ -0,0 +1,4 @@ +RETRIEVE = "retrieve" +GRADE_DOCUMENTS = "grade_documents" +GENERATE = "generate" +WEBSEARCH = "websearch" \ No newline at end of file diff --git a/RAG/graph/graph.py b/RAG/graph/graph.py new file mode 100644 index 0000000..f38d7e3 --- /dev/null +++ b/RAG/graph/graph.py @@ -0,0 +1,99 @@ +from dotenv import load_dotenv +from langgraph.graph import END, StateGraph + +from graph.chains.answer_grader import answer_grader +from graph.chains.hallucination_grader import hallucination_grader +from graph.chains.router import RouteQuery, question_router +from graph.consts import GENERATE, GRADE_DOCUMENTS, RETRIEVE, WEBSEARCH +from graph.nodes import generate, grade_documents, retrieve, web_search +from graph.state import GraphState + +load_dotenv() + +def decide_to_generate(state): + print("assessing graded documents...") + + if state["web_search"]: + print( + "---DECISION: NOT ALL DOCUMENTS ARE NOT RELEVANT TO QUESTION, INCLUDE WEB SEARCH---" + ) + return WEBSEARCH + else: + print("---DECISION: GENERATE---") + return GENERATE + +def grade_generation_grounded_in_documents_and_question(state: GraphState) -> str: + print("check for hallucinations in generation...") + question = state["question"] + documents = state["documents"] + generation = state["generation"] + + score = hallucination_grader.invoke( + {"documents": documents, "generation": generation} + ) + + if hallucination_grade := score.binary_score: + print("decision - generation is grounded in documents") + score = answer_grader.invoke( + {"generation": generation, "question": question} + ) + if answer_grade := score.binary_score: + print("decision - generation answers the question") + return "useful" + else: + print("decision - generation does not answer the question") + return "not useful" + else: + print("decision - generation is NOT grounded in documents") + return "not supported" + +def route_question(state: GraphState) -> str: + print("routing question...") + question = state["question"] + source: RouteQuery = question_router.invoke({"question": question}) + if source.datasource == WEBSEARCH: + print("decision - route question to web search") + return WEBSEARCH + elif source.datasource == "vectorstore": + print("decision - route question to rag") + return RETRIEVE + + + +workflow = StateGraph(GraphState) +workflow.add_node(RETRIEVE, retrieve) +workflow.add_node(GRADE_DOCUMENTS, grade_documents) +workflow.add_node(GENERATE, generate) +workflow.add_node(WEBSEARCH, web_search) + +workflow.set_conditional_entry_point( + route_question, + { + RETRIEVE: RETRIEVE, + WEBSEARCH: WEBSEARCH + } +) +workflow.add_edge(RETRIEVE, GRADE_DOCUMENTS) +workflow.add_conditional_edges( + GRADE_DOCUMENTS, + decide_to_generate, + { + WEBSEARCH: WEBSEARCH, + GENERATE: GENERATE, + } +) +workflow.add_conditional_edges( + GENERATE, + grade_generation_grounded_in_documents_and_question, + { + "useful": END, + "not useful": WEBSEARCH, + "not supported": GENERATE, + } +) +workflow.add_edge(WEBSEARCH, GENERATE) +workflow.add_edge(GENERATE, END) + +app = workflow.compile() + +app.get_graph().draw_mermaid_png(output_file_path="graph_output.png") \ No newline at end of file diff --git a/RAG/graph/nodes/__init__.py b/RAG/graph/nodes/__init__.py new file mode 100644 index 0000000..cc1cbfa --- /dev/null +++ b/RAG/graph/nodes/__init__.py @@ -0,0 +1,6 @@ +from graph.nodes.generate import generate +from graph.nodes.grade_documents import grade_documents +from graph.nodes.retrieve import retrieve +from graph.nodes.web_search import web_search + +__all__ = ["generate", "grade_documents", "retrieve", "web_search"] \ No newline at end of file diff --git a/RAG/graph/nodes/generate.py b/RAG/graph/nodes/generate.py new file mode 100644 index 0000000..a346dc7 --- /dev/null +++ b/RAG/graph/nodes/generate.py @@ -0,0 +1,21 @@ +from typing import Any, Dict + +from graph.chains.generation import generation_chain +from graph.state import GraphState + +def generate(state: GraphState) -> Dict[str, Any]: + """ + Generate a response based on the question and documents in the state. + + Args: + state (GraphState): The current state of the graph. + + Returns: + Dict[str, Any]: A dictionary containing the generated response and the original question. + """ + print("Generating response...") + question = state["question"] + documents = state["documents"] + + generation = generation_chain.invoke({"context": documents, "question": question}) + return {"documents": documents, "generation": generation, "question": question} \ No newline at end of file diff --git a/RAG/graph/nodes/grade_documents.py b/RAG/graph/nodes/grade_documents.py new file mode 100644 index 0000000..4abbb1e --- /dev/null +++ b/RAG/graph/nodes/grade_documents.py @@ -0,0 +1,42 @@ +from typing import Any, Dict + +from graph import state +from graph.chains.retrieval_grader import retrieval_grader +from graph.state import GraphState + + +def grade_documents(state: GraphState) -> Dict[str, Any]: + """ + Determines whether the retrieved documents are relevant to the question + If any document is not relevant, we will set a flag to run web search + + Args: + state (dict): The current graph state + + Returns: + state (dict): Filtered out irrelevant documents and updated web_search state + """ + + print("Grading documents...") + question = state["question"] + documents = state["documents"] + + filtered_docs = [] + web_search = False + + for document in documents: + score = retrieval_grader.invoke( + {"document": document.page_content, "question": question} + ) + + grade = score.binary_score + + if grade.lower() == "yes": + print("Document is relevant") + filtered_docs.append(document) + else: + print("Document is not relevant") + web_search = True + continue + + return {"documents": filtered_docs, "web_search": web_search, "question": question} \ No newline at end of file diff --git a/RAG/graph/nodes/retrieve.py b/RAG/graph/nodes/retrieve.py new file mode 100644 index 0000000..8031adf --- /dev/null +++ b/RAG/graph/nodes/retrieve.py @@ -0,0 +1,20 @@ +from typing import Any, Dict + +from graph.state import GraphState +from ingestion import retriever + +def retrieve(state: GraphState) -> Dict[str, Any]: + """ + Retrieve documents based on the question in the state. + + Args: + state (GraphState): The current state of the graph. + + Returns: + Dict[str, Any]: A dictionary containing the retrieved documents and the original question. + """ + print("Retrieving documents...") + question = state["question"] + documents = retriever.invoke(question) + + return {"documents": documents, "question": question} \ No newline at end of file diff --git a/RAG/graph/nodes/web_search.py b/RAG/graph/nodes/web_search.py new file mode 100644 index 0000000..536c71e --- /dev/null +++ b/RAG/graph/nodes/web_search.py @@ -0,0 +1,43 @@ +from typing import Any, Dict + +from dotenv import load_dotenv +from langchain.schema import Document +from langchain_tavily import TavilySearch + +from graph.state import GraphState + +load_dotenv() +web_search_tool = TavilySearch(max_results=3) + +def web_search(state: GraphState) -> Dict[str, Any]: + """ + Perform a web search based on the question in the state. + + Args: + state (GraphState): The current state of the graph. + + Returns: + Dict[str, Any]: A dictionary containing the search results and the original question. + """ + print("Performing web search...") + question = state["question"] + if "documents" in state: + documents = state["documents"] + else: + documents = None + + tavily_results = web_search_tool.invoke({"query": question})["results"] + joined_tavily_result = "\n".join( + [tavily_result["content"] for tavily_result in tavily_results] + ) + web_results = Document(page_content=joined_tavily_result) + + if documents is not None: + documents.append(web_results) + else: + documents = [web_results] + + return {"documents": documents, "question": question} + +if __name__ == "__main__": + web_search(state={"question": "agent memory", "documents": None}) \ No newline at end of file diff --git a/RAG/graph/state.py b/RAG/graph/state.py new file mode 100644 index 0000000..2c3dc43 --- /dev/null +++ b/RAG/graph/state.py @@ -0,0 +1,18 @@ +from typing import List, TypedDict + +class GraphState(TypedDict): + """ + Represents the state of our graph. + + Attributes: + question: question + generation: LLM generation + web_search: whether to add search + documents: list of documents + """ + + question: str + generation: str + web_search: bool + documents: List[str] + \ No newline at end of file diff --git a/RAG/graph/tests/__init__.py b/RAG/graph/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/RAG/graph/tests/test_chains.py b/RAG/graph/tests/test_chains.py new file mode 100644 index 0000000..099e815 --- /dev/null +++ b/RAG/graph/tests/test_chains.py @@ -0,0 +1,76 @@ +from pprint import pprint + +from dotenv import load_dotenv + +load_dotenv() + +from graph.chains.generation import generation_chain +from graph.chains.hallucination_grader import (GradeHallucinations, + hallucination_grader) +from graph.chains.retrieval_grader import GradeDocuments, retrieval_grader +from graph.chains.router import RouteQuery, question_router +from ingestion import retriever + + +def test_retrieval_grader_answer_yes() -> None: + question = "agent memory" + documents = retriever.invoke(question) + doc_txt = documents[0].page_content + + res: GradeDocuments = retrieval_grader.invoke( + {"document": doc_txt, "question": question} + ) + + assert res.binary_score == "yes" + +def test_retrieval_grader_answer_no() -> None: + question = "agent memory" + documents = retriever.invoke(question) + doc_txt = documents[1].page_content + + res: GradeDocuments = retrieval_grader.invoke( + {"document": doc_txt, "question": "how to make pizza"} + ) + + assert res.binary_score == "no" + +def test_generation_chain() -> None: + question = "agent memory" + docs = retriever.invoke(question) + generation = generation_chain.invoke({"context": docs, "question": question}) + pprint(generation) + +def test_hallucination_grader_answer_yes() -> None: + question = "agent memory" + docs = retriever.invoke(question) + + generation = generation_chain.invoke({"context": docs, "question": question}) + res: GradeHallucinations = hallucination_grader.invoke( + {"documents": docs, "generation": generation} + ) + assert res.binary_score + +def test_hallucination_grader_answer_no() -> None: + question = "agent memory" + docs = retriever.invoke(question) + + res: GradeHallucinations = hallucination_grader.invoke( + { + "documents": docs, + "generation": "In order to make pizza we need to first start with the dough", + } + ) + assert not res.binary_score + +def test_router_to_vectorstore() -> None: + question = "agent memory" + + res: RouteQuery = question_router.invoke({"question": question}) + assert res.datasource == "vectorstore" + + +def test_router_to_websearch() -> None: + question = "how to make pizza" + + res: RouteQuery = question_router.invoke({"question": question}) + assert res.datasource == "websearch" diff --git a/RAG/graph_output.png b/RAG/graph_output.png new file mode 100644 index 0000000000000000000000000000000000000000..6477268e349dbda72f2161f2e02e1691180a126b GIT binary patch literal 27419 zcma&ObyQa2*Db1o_|o0o(%mJUBHbZf(%njTNVgy$-Q68hN=kR9baNj3ecyM-J>#A; z?jMfv^0L|c*-xxB*IaWNsvs|k2!{vv;>8O@X(=(K7cX9By?F786!HpuLsUSb{o)16 z3u!T76}R+*jMwilEb#l6XY<5S#8AZ#uzHVp?+*Ae=lSNVdykHe%FarT4|vZGju@d? zAg^PW7LVKcn#w9jR*pswPVNQUTW2j?Odro$p0*L;6J&ZJek2sZugM{PF=V+m(85ya zN?d*8XSNrs!audQ1){wOI)(@rLE&(m3XNZYG(ja?~WrN|zOK)E77UDLS(ehOzdZz^n(JEkR)tY?mKQfU#qlz0C3vPQ zN02U^Rns$iU=-4IPwFho52rlnmND(0oBygv<8p?eJ^Bj^Wf7mM`NlA$D;->f zyHDK~5w7?k$b%inc>G}WRNuqoA3kO4zlbZG%#dqC^Kt%@P@~SY<;PU!ADr@D2^{6t zr7kHr!pYRRHiIT3f3T6{6H8(8v+YbDDD>gs%N`)e5-6r25vYS7huGWCamN#DzKj|< zkd!yMn!F%~umwyW_NULI6A)p|dnjnJXY)o5u0BzmPWekoplrbu&PO*oUmCR{X=-Za zxg{XHnNhx$NG8s;5&AgO^@7hcoFLFzLn}{dJt94VuT~%iH9%8%?-FXMHLPTMI3dp( zF2UZ*>kOS7Qtda%K(o?d&ze}AEDcqEIA0%)&lQ!IK38ki?(>j}`=w}vt*^I8Zzy=k4WX%&OVwuvrm@#h_K9QTd83jore}d22YO8{vH-0cRVsAe$dE z@mm*Jg4fElD!oNKtmG)@=uo6_L!04P41WAP+}sER+|0DJmYY8l#KWV0=98BNp+cTYsmz>p>A9fm0pf%Ch{Wa#E>+h{9Oz%zo>nk3rH zL!1CCjh|t?n7faV2$h*c*UStT1x2p&{&Wnp7a^Yuy+~a{!}YCVtH(vDcKz09M%vTu z-c<3~_SnISL)Q6>su z`y!JcnxkUr(aeGLR=rC>6xL~bG{g7)uVkrKt;_LJhFxu;@{de@ceKE1a>=M~C3B6A zW{Y{d6FrI@(FAchwlw+uQqk8b5+f!vd35<#Tl+v$n_4LZe!# zmTHkoX!P%;Ht!*5u|kId^O`DNVH`ZAdNF$@^B+fW3)~&9^UbbIRP^-pGKus&COn@{Ehy>FpCZI;`-HMYT4^@l+$QvM;Yq48&X z%)`?3<9jb6J{N>}R`QecEoyjpC9bY?5E9_%@Mj-nvxgxhdiVk!=bip<(m1T93zexc zc+^WYUg~z|Y>lLmM}eKV97p|OrFr(xA2(0Wa_#yu?Rp!#ofxB1&1%zk@9s?xK_HIA z@TDPeDELB#K=c{E8ZI^R88$s<)EiSbqWWSdf8B_KY6)w8oN6eN2XG#0aHl z>|Ld{(&XhR0eT*SJO0o(CzpKC&rQ~^oLw#7Zf%4*W~cUk-Eu8;qaS)VTTJAi+@X)E zzr|opiID9HecI0QWi@7&!7oqavPHsSes`Rc`(bxuFfNp#$%UUnTTdIrlh+AyzQZ|R zK6uEDm?aEMAP&hDs#IZoC;srcE;GCsI~@jz}sWZVqdo;}rO-}y?_ceTJHM8eBE zIv)(fMmdZbw&)|Lp@Abk80Xb3)E85OYKquvBmH-!(;pU{ zirr?RLATlE@$vEKXpi(g*M}>3#IVbf6^&YPDk`2R8=t3Z35{u;x{*6mBWQ!sn5N!s zaPDLi=|dwT2)Jw(zDq`f4SBjTh;Eyq{S=t7ct+Ho?n%@GE}( z50-(Mtzx^fTK$4r1j#UgQauv^pUc5?skUekg8RkZRBt%;{&eYGcN8fTF>IqFCp#Ix zyiMoqoOZrr%sii#mPXZ1fo;aOL3(bJg(UstbsTD=pXKl-tU<47DNT#YUnLe@TRIY~ z|EOfuZ%izg|FJ8B4qt&WZ!eQ4Q|)&PuWYeJJH@wp-Mgo`moc!Bu9U5(Otp?-PfHNL zoR$}TnrBLLr>0R{!hQul4?D5L@*blgO32#Xu<4 z{MqQn-18)@;$~h#hLzgAX4v+S>%gPzHNl&pGiuR(diaXH>HP3)h%6sVu-2)PX@94r zd`%nUB+YAOxUmqnGh z9om3)Jg2GE?QhrU=;+kc#nrN}WTE4q3IheMM9ySBSH^{IFo|q_F}xO1i+=w8Y^KB9 zo|gx*NsJ6S4ZlW4BJ-rZyuBSa2J9bhFTPUBviij^TI5I!G&*iu&Xgf)BTi)Sxh_>2 zNv0-nzOXP#T@E0Al|YgkW^7dNdc2g4v2}28u)gj;X$F!*MtV9_nTtOlg!N<2#qPu#eY4?Y1KSU^ z9}xQr6u)cLebUSkg9S$}+Py&A*f_Ve^zia7W!l>i?SvrRyjXgZI;BS`;*;2^Q7^W z95#(oK<;~#YpQxXPGmyf9!M7gs>ULiF(e{>974iKDa76n8}C@yK)wfQa5!Gtuk}X? zA&AM|_tz{SXNo0%-JP$uWnf?ctNy1!UvsGyCu8yOxV#47gA0?CDP$_Y3@cww87S#XUbR^X)14<&9 z)J^F1>(_(ERXU9hEPg0l5-Rl_9Ub7H^6~MBPJ*oE?@*xl<|DP7=%-Kg_W{yzRG9P{ z02PI>R5WGZSAi8$FW0-+pFs}W+TITSI5IbviaNdq(t)0yp46M)y_90%1`hck^vu^; zclUq*3{uaCI@QVf`TfO={_ar{MGU7Gr^R@{F-J6s*uaOv`Bu*>kfQ=ZNkxOau1~&L zST;rDvgNz|U5k){A`tYt?u*2aAQ6+cl*vb7rKbMz1?r9At5LjKBWkJ-nG01WvJPty z#aJC@<+m6Pjb2wri;i2vnqYra8uk`MB4tzZ^LurCep#$uZVF9`=Tl-cmMH*6rI*5L z!t1;jkYm2o;yx%tj;!3AIb44d1|xNIW#qZybVHh+b{)wo5 zqHX_*7B8-@6Mpa8$&nET-6p39k^ukbZ-&#jkXMDsA;P4~k<9#oaF|uq)ke^yZ#SAz zp-JBlxRN7`Rhx~mnhr}nko?zUz)X1FoSOXiV@@}RwpUgc!2M6_1{x1(@8-Ut11MTKtUl!sW5)fn4q7AV`DS}fZD|f5;>$Bn}E&& zFA2W^XKd>>0G{jG;+U!0QF| z{QmuSM&Ea{v|g*$a&~^6%?|}qbWG))xfteU&&AJ5hBaZ?oMl9tcZ>ciSIMXGHeF7D zA&w8%5ic~4ubw;n)_a|_b;8u#U$6O4bf6U0w%@=K6dy??E+_s>XD>` zS?oOJ^6$4*ZDGm|v?&q-9B{AgRUGt@if*f@?(%gSF4~A5h6olHeFWZT%qxGd(09oR zPbj75l%PziHf^nHY*5bX=)pcO#-DPHdja=ao99&g3mWdvNmoOQ&z@$Yw&}1oOY@5y zZ(s?sgS@x~t2`#d!w_{RLTbK{B@oWvZY?6HItf_#1!?^1BsP5dEC-q0~9JE078E%O;MNE`M|Q^MiS~(c@4lI`eRU zy8K#u9_9>NPW|oVh5s9bJ)e!8$M2HJlLeZ$f8&Dx>x*jduw|dTt%bHdn%jO?1 zwGwVk)Elf@Ci56hevT#<`na{nA&bXegol4uD&1Mv&NW@8El5U2R_UL=f~=053#gCh zSy7=WzcR6~a9>$@%E=+4exGh+W_7i)DgcT5VHSEgkA!rUV)u7^HZa`Ou=d7L9g9|_ zMm__EI$D1utNdF+0w$+swT;Do;$a$xMQL+)K|z7w1FvMiSh-%Kb!UreB?}AGn=1v{ z1dgJaFEP4pUV6=2zGffP-s<;`GX8!Z$q&V)kn&#*aN}Yi`sHL41}+$$d{fMmO{lJ@ z_|szYel)%D?wn|2CpkMneU~xAgNVO|nu3CRRV_9)wp7ba^mt)Wz-jw3!@fJT6|CHzyHq|9>Z>sV#G9rm9w&G*aDABJDH@n&z1T!+GT$kHu zR+$d%je5G&T1`D2AHcRvx^K0k*VWZ=s~NuW{(Fim3#-CR>2_|0Y0f`U6WZ$S;X_76 zwKegh;(gl8O4bovXXh7b+X7hyyA}U8t_EYhX`y3U9Q(F^8uo4@=}csjggh^e6KLB> zwB+u;3DZ+5>`0t|RB>%{P2tR@Klyv?LyhX^G#Km>ms0&=)#5xpq2e^pQ`fbPjfmtj z?U|(}HbO$y6?7Ag_zz)RTZG@i)5+HBFvvw;=kIg&tE?(c6j zh-UJY+q^G=kzPD^HN7pE9*Ec0sap8e%*V8&&MJX&zaeAY>6;AW@uBI z0K=pIE20S|gh2%yoqMl?t=-*PhnqyRu`H*9Nw3R=L0neu_bKeyfs*u=lizr{Sh7|h zD%e8>xN}`Ks&8$Lg@U869e>4S+AhP$Wi^o$YjgV0P{D05ov-y)r*iC3c`L}u?#ZM} zA>LZODb;C2%UW!9H4AXBGTdeq~G`cH7nvogIuCk@II@Y9prpiPZJg^o0%Rdv9wcK&JH z>Ucz#nE0o9a=A-JtyUAR-++VyE^>jvF4%r=-!XH}$s>j8DwkN9huiz6_&9so%^4HcDDR9%n_%AHJI<$} zMhp3i{m@+-lTP}jYw=s5$BmUwza8GC?nfm{Aw29!tf$BXReOqLE?4_M)E z|Hcb#^EMqIF^qq=*aP<}dIow*Tp|bx-v9UE{a;4@|L0d4T3Slr1^+*0ts^!JH7G|S z5)ag04d1yD?@Kq2j@n$0^(0WJKrZk42#U$qIaEnQCakzr#&~#KHp37R$nzQn)z{Yt z2L~UegJPA-YB~Zn0Ok25BVN6Ri>FcU8yky$3OgwUMG`KXDZq=+w?_430A(_0#iNSB z?f%LZ1(ne=oWy8Aef12sr4%qq6|!IDaLTEd>F{XReTpD~7V#a#Xv3da$cSKm6}vk8 zBppDu%ZvUiIO`my?MV1Rf8f1W@v7t-DF>e5Po8AqbeL8b=eOJ3fjKH|63FENyR)3D zjXNYp>8zDd`+Srm`ae>XC>_JaR#o<%db3hiKE<-y4_a;?P?-fZ{MILj-V3@>;pX5T zvk}W$c%Y(^p^2+(z%fxOE@F>1MY~VGFPu#6sGQ7vr<5j&6d$p!RKK1(wj;TH@0hkD zR0-{9l1RY*t4m3(q)NF_pN(0h1Vp9@RH7p8$E&m!JtUJoKDMsmM(^A8Lq}pAN6l?| z;~|J*j*yH0F?cR32g$Lwkqg7Atq>I#IYrMF&DhYS6Qgbt88?f_#iCqKHlpow0OEBn-PN=dxBKB@ES)>y(n60uhH=ck={y%lfzskc@#JI4yM<#?Y>N_2s#DFUs zsSI3j9xSF)5A2N%Rlw2-w1EitmhwCfEnt^qc_bjFK^g;-rzj@?JW(Y2v)Jl@A|~ZV za|((Fbnd{6;5p>Ca9bK|w&c4o^DHN+m5+mcpLisn0p9{W`|GQvgG$a_CDYMpMCI(dr>`8PHp+auA^W>!ki&v+MJwpH< zwpm^28yIA74yix3T4gQIapG_|IFzPdxI7&nk7!TJ;cz*2=HT{*C+*Fc4V-O%sDG?I z8k~>>)ggmQg?TVid(sK*$ngd!!zRok&b%+WhKil~FWZNVmvb%vy)&A^ck}4a=g=s= z+Lb9N+HW$ja^797)n?fF+vv@~r$fu>k|Y67`9*O+@GMo>Q$}q9OyySjwbcX3$>ot% zczxQ-n2OH-*$Uqwfg(|i92TfZX4#GLw}zrB;ZGeuX=kX%p8NoBgSLL`&Nx?4S0?Yg}@&vkLVKDIf; z$;V@&Q*9dR@;jjzTQZtZr`6*ygD)zAnConGF^yc}avOWG(a~`-SvXU?RU$h5)l0vl zblwNs@i$cf;kFsE)p(w5378D2r3;vyHo$h{aadNGOl0PWhmECkkdl$TDi&~avrYb> z|6vSmzMhcah;4N(ulo9Tjb{hX%JPAx>`cj{He(#%H(<}2osL3@e@<}>4X&$e*Pbtd z`e2K}D%*a(?#Onj1TnyKei>gAE?ezl{pz<{>5EK;vpZphBDgX|Qnk&qD|296Dm+cJ`g*eop*>1}u%w}|-N*mGK*Ml```K_S+a1})zgn!E{L55g;f1)R}rd9FTPX z?xe>jP>ZP+C@5kl$=$2#y&@8j*-HpA!!c8>=-`^k(Wv}7w+kW#+2n2AKsB_W{!J0b z&Qj|<$T~BOdsB|@x?x8CL=VJK3kkj4G{mc1Z_od5{CP}aggB(2!Xa;O?iXy{aYF-l z0qqke<@+}yKRcELTRn=)*UDatE5=**e>f3d#rVSD69D)_HW9R#V^a~OQ$NXtY>v`* zB9allIxmT~Sk{BsEf&xcl`ju?z6savCvIbRR!{uI{cv0U!BfGGM2Ddu`$%cnIP^It zm(LX5+sIykCPUD7QMkU(iqTucgE*vufP?@5lRL&gWB_tF zL$MbRm|<9v7efT{=xw*{mlKSBJu5dex|(;G;{0O1i%pl)x_dh@lMU&+<9VU)#peH; z3+M*tM9^wGzv@EI-n?cXtc> z-W?&H233Sb5eeAso;LPO4`=WZuSK=qxd7_-vDUhKf5sM(fLdg$r zkRBaxPTdnl9(Sg%u0y*AJlDRRtWe40u^3H5o9m2gukKX*0SsxN5P`H({Qh4_17vK* z`!^PJx<&l5q_mc1v@&47c+~@*pS8;_x2E*fPZ^ZJ#i#k{Myuj^SDOxHquUm0g|nn4 zDX|`;+vhq&=ndlT&82n6c_DmNlZotY$(*OF)rpnz24!sL=aTb-?6Qi!8FFDYr>U^g z5cJkoeA@>r-P)qszNnNw+d%Q5M4O0+h-7}}$xi<#1U&94#qO=-8Tbp44k~k#22@yva(9d6B+k%z0x2WA?k_M?2!M$$s*JWzJi>?1F-kWnl%= zk<1#?sk(*6Ggc9~PdHCC!u9lxVRLLE#sI{Fz~3{s-jDpa^V_Dg3)glDrh{a@$(hiJ z!EAIPf+S=^_)zY>lnk0xAiO6MF{jVqIM36;?3eP{s-`;xe5N?u)~(JEMW~~t#zNQM zo#*k57Y(o4SosvlEFF~!w21O$;u@flrQt=rehVZC_{Y?HH zh#Elg49gV$iiZ_&TZ%I&{X=UAQYiA`r90wq|EXa2z!ksn^@r0%NrtH_T5 zHEtdcMi*E#IDKE3DR1w#0-Fp3ods<)3=RLJ{vNB|l-AF$3-^{Nnzicd38jv5bY_})q7)qg}x_G`?V>gh`1JGoIot0 z=Vue$&a6(aeQ5eSwC!@;_>~YHLuWHIzL=#6+4J=Fd!1!M@w<%hc64u{jPy9i^vH}B z8Tgr&-U0<$J#(Gc2Gs82FS;}pLB}O4?btre(Y=+PN}Z3o>x0Q zd$dMBTs!O<)_KNF2ARs=|MC2}&doDW%oasnvc;%5n*pL3j|2zW-*dAdOh zWyiMovn!P#P4UMldX0*2pu6VKi1Fzdw368Tq!>VPCzC+CIx+@1TOCQ`0&Ugao)A>> zpNR}Z6g?eZpt`%eafNeHP8TUezDq=cKK~kM=TJxj(L^$f5lY;1M*>tPE$)BU#6N!g z*5k+F#|)Vqos4h?J%66y#!*KT320?2NPX`u zh`qkQsm{nCyjp(@53PYrxuTPM-=44f`hKm;t*ofv_PS;!hj@e4BFp*y{pxsmIY%5` zf+7vfsr5pG{oq&12$HtoxsRS#B57Lj!jPS?^;& zU?5N~>9qS~NI0`#%#`WU0sRX(1QDzp#UMZ!77lK2u9oik&m|+j97|BDeg7)t`}q62BwN=v4Nc8| z9j;$s3Yh^qZu7ii1S+Hm639B>{Q_6PRX`478X(+Rd<*np5hU#3%?v84tG{=Qtn~Es z)O?x+(ifQ1#egqfTt*)a3}Sg4O(jr3I{_Vh3F-O+rV(MI88?*6c8S^V-%~(Dq5$10 zOiWBb(|d7cgJ3Du(A*5BP6Bj2U|)j}XYlDie;)z8gbNqwhYEdTBLY@Gb_YlZK7J%2 zhk)mSKP8Xluu{Vl3t9nf2OL0&K#Ty%$ozbN1^USFa3g5&Hww=`!XOb`pYJMz-H;%c zZIi5DW3C7q?Jt&lE9Iyr)Lgm|4^~$z&Bll%a#aDZ4VsQ!5F@Y-2AxbddsB4zsN6nL z2??s8BaRar^q*(3(Sw~mvxoNqM1WLI>z^U)2sbueU=j^1X37XRNMm5DhSB)x729#hc)9*!WXr5*`_eNIuSdYXq2Y(5j~-BU=OA`3PnLP;J_SrfX_P zsjxo`g*ABfu9NG(>p;B4+V!o*c6n)f8Vm0YeiW(&(F2ciyxd6;r|v|Hazi7{ zRXF)s_0<;3-3rBQC{u}{4CJ}HytsUCEfi-MKt@)y>Hes@$E z`Yv=vtg2q_6J=dleXf3mo};NZsj3aO`xI2RtXSCJJv=DSZ(5nhXmNkeP=7(k0mum9 zWD%&%=u%Fs4;$2xM%A48%nSm82&?sZ%xXW)4Mn1Mfn{cv8g%7rC6~%m`C79(ZW+>_ zwv9UZJH6t0x5j)uzC1M$>-$s}aZQBJchjD>%qGHJ;LBw&ki@h+5KEpy5fZ7ED{I>s zavZDekR@t_OG+>~s)Ccd(JU@WW0X^ffR&YOZlZ$mKraHe1v9v*A2TP|tNs-G&8Ptd z;6y)PJGgNz=_>2R9W}Vg-sKBP!1G-^sM4&3#m2B_oYjLHTzhnKt}v?1X$HZV3oL4_zdb%aL(~#;hI@>*R6v z=@B_x|AqoaT^OIPBpq$`!+kQMgHmN+FSjeSMU;EM>>jIBGms($|rlJCZ{8VGG{!>VQxkf9xc8s~nGTo%EKmkHR zwwh1B85tRm{;t1CZg*WC{q-x_NaB4j5~#`q+)j3qn6jQ+FDcqFdumkSk9nQ(8k#); zjV+&6^>N|P?!`VwnNFieyP#<0IiQ06jxtkGnGQ`mN8;BOXxC?pdb}yoIZN^YYtpau z$^;Wy(sxIv)>4(S)iokC-iu2ei`||hT?Z5t);D<;D`e+9w<^W|)2ake|G`q#>eTUD!khrV z=cU`#Q=Ne4W56;NMa}&Ey;o(8!zSJwk6y1VpG*hmk z0XZ_#kuen&3fkIyi#+o3xHqBSR^Ey6JMXFf(%sv;wWb%gvf2;e>rq1{S}0Ylc4|qP zv+%q-zlS#L{x7~^)d27fb|rJpL(~ffA7eb|i%CU2`_qsKCoSKS`aIWmTsqf36X@8w zfIe2c#qBQ;d1%f-8K@f@2OOnG6N_AjBH*S2g>1ehJu`zt3jZ!4FAonn@BTD0`cBb* za{(88EDlZsF?J(XGmOH-pm#hR{PD5;;rs|C(9jDBj~gvebMZziK@N#EMMlCt^V`nT z&1&;%+~kNG2#;~!jZ?p7_M?V-Ki=c&;S)g=Xqqo0xJ@S%5HFh1#MvRS78d%wh zKSy%h{^_}Q-u{-~LBBaX;27&evRh12MYZe?23;cF8p&g%YLgKp)SAjV;kqwbXRD%+ zr7H!!#7`=;DPci9Xc!NgfdGxFmT1JHN{E8c^onvK{+wU0(vV|kXpbWt3Thdwy!Gau zxE2GbfOTf8F#A88H$JT}rkDF(9_&mtuSepme_b*tRy+Lu{rlE%#wl+HapLU%bWxiF ztV9H2Nf(=K)fE(0^yfKSd=7&fka4x?e;&o6XZ?^T?9Tpsw7XE)AEpW)y9Pl<7V_S@ zjxlI`T51c;gWe+-vn$9be9jrNGy@{?smz`ibCem)b?2wFi}hnMu;^YuEP9hSkv|-K zORvj0bG7NX0e5?j2YNBJ#7A}`d+qhs^UPj7S&E9W;^7!sg3iKf^`Bs8N^WFz^_REt zxk~5Sj2r(hec{Fr57H9f-q5j%Qo%2d`~pgZPIuxxJ^bQsUZS{pcHPky2Z>)9POyw%~Vf z*)AP$QIS4oK6Ej*wk$FuOF*6hLrmg)3YpE}mD!9A&zaNyv?OCvxmNAK_;^*A`~|#4 z$6)e;A1^iv+H-bA!&etJjrD8p9UVo|{7s`Qd9^!_s}F#8Fd|_RjWV2w(Am3QnuK!{ zSkQmyBB`{;jJKzOSAxTO+hQRarfrjoJ*g)ApXs#Lc^&14A>stuE(S4k4RExq$^V)U z%nDC77B8ZLRR5(WQid53cpOxuZ|~n0P$q&#-G9=i3}z&rXb7lS zo|7W}BCQaR7<>>G7AD05de9NbdUtFXte>%0JrF*&3GS!VCo6|(8RosdPSijaEWJHh z71hwtFg7+$4+1e72xEalldWq2JhkiN^g}c7FZci;4`hx{4_C{enpoDQIMWe2UTzig zxHSLtsl;UxLWGTtErCM}2GB|1T@Msr*2`^#=%73U`m0X>le6ePOW^26yHdUCuLm}D zI9=6kZEdx+wHI$f@R5iGhau~t4*(+0&JH2K$sA>y(B>LXW6 zJZ&&ns=r>js;sQPJsT4OqV#+|JbxX={=98p)vkhP{DRVc+!q})UaHNNr_^Y($jil5 z4^*5xx@<j zfEpS=OtSz~kl?#Wwr*|QHh|>y^tRRm<2S(hqI1bqYf_&c`W6>YDB687yF;&5UpRwd`@*RQwB7MYDe#G19`%|8PF#QNRdiN#$MRcg56G4Wk>m#U~WAm%4 zs>U;b3N{qjC$JcGa3%x=1t0FNB;ou}VMKf*Q3~+4u%FwPN^i6-2nKk=bq9zP z+POFs>17;$=E{UvKz|??k%q%*H)v_<-a96sR(8$H*F3u{oljvkb8#FxT_mbV{OY~P z`pC$B+DFVzf zLzx16^x9BD}b@QR>aS(mOykKZnm7Fb#uJ7#(icF;#h$=+ii6nwp008X38J%mv#g{ zv`Do$XQ%Wbl~NYVym%vAJwyYTcm!RKbWGW!h=q<04u-eMPNe&E#&_Ch!hvpd+*IY{ z1QfdIxWPL+=3X|lRe5G(nQw6nJ^CVW0U6H`S@Q*SkS9sh)giEpfVpRhTpI?r zx(Oqsg6f+4@qyUa*Z0uA+H?di)+B!o>>Vj{X5lX|hs*6*#GDgSlSQglKwf)!d6^9U z0f_=DIofmmjWg}aXTR116r#!bMR?=ifP3f7!~h%{Lw^*Z|Mz830s=vC2ZJAJpdCz9 zgTFr%8z%5j(Sr?&j?oY)wRrM=6R;dVBZ73gfxsP~!!n>eiAf)rZ|*_#!_5X2Ive2R z=}dmc3%Aq(+ZE8Io|<4~%V$7c+{cac(pZ%AI z^ZHFrgHux+fE=Jl1Y(hZRkOWjadC0rlc}*;#H&9B5YFy3kNkkd0kB)V>tX-&r$XQN zApv&ee0KswwIB>QbVPvj#lOAa`ZY8Zo)QMmZ}%+@ySYMd80HN=5axi=CTQ};Cd3~U z)Nq@HkX`*>Fy>lVSy0t?SSO{C&@R#w> zy|bGVX`I$TT@mq*sZf5okDlcQzqa_@h9yT~IF(}`7^Uc~!6He)!oz!>tOx^F8gNx1 znw0`?)_n#o8sGt{fU4U4Y%7JEn(n2YW)g#TP?mg5&>KL>w0m6afkUSF>fL14o@uY? z7Xs31_QnQ=t>97_YYNbEm`R;^dU$jR`B5WAqDR}Ff|%{|a63CU zH<~5%>@bisUhx@Cpi6>$kdw`qOF7BuqLep%q6>O$qS}_CWp4h~VP6Wm6A@#fi39C< zCPbDIUH^c&x`8=#t4-&&_hT9efy42AyY6(nL}&p`B|{|Ofq_nbnYp!c{&YITl)N+X zBU+*M3@F-Q>T3W~Be%Xvwn>OVQHxY^-fzJ_;g`G|-sYPkk9a9I0-D``UbAyoyvHy|&L0AeT9lI2 z3q1hxBye6Do2GKvX0jMbP_Yl_1B(`nyB$c|$?bmBYz*qU!=w5T6ZuUSt5a3mUR zh59Tb=Fj^+J@iAmSNHeB#X7S^gP9k>41o=JwvHK+0ulw6^<4b%YPU|2KiS)@H)bqM z4qL;a-(a)*fVJSgrU?bhby?-dM{vNREnqwY-XrMyrC{Dp9X>wX0a4Q0cE?B)AXD4y z(e#4e&eqy4XIfa2hWhePAmVe93{`@Tlgr^8%bDU|{)W^3YhcKmv|Q`C^73^!gExjd-OF0goMW=>^=KIHb@6 z_g#?}S69I!V`4BlL-y-^oi7V?8mk7H;i;BKVgiDK$QZ3A3lsrPqLxdJ=}FBY9CLbl zd|;kise)aakk|v10=+KwR2vvfX|3fZjCEO=4gf*+H{)|yMZLXXYqQ;I*Ryew zywdO%*f`|mVvZ9%PHsh#s~xxH<+cZ6*Fj7gEca&QTs{OIJYY}+HD}qG<@0gJJ1XU#v$ju9_Ze;ZD}@Hw_ji8$(xy( z0hd51I`x`hu~Ht~1Vy7p5lZOsC7^wQr^DsoPbHuLq&@N0p~5MxtI<^g>+J| z)L_33jtiij4RYR~zI(jC0YNiFG9MUG{3xm}p3BAIZQ>Obl2#b2kea{#+&nzJR<`MT zR#cO2`AsBQo(F$e0zN(SyIrV+gb&~xnv#4}#I2xZdPNBkC>0eTO5flI*yhvvoa~8Y z(t@y|TC4`jdXoDE5U5y`h1;}qzF~Ak^YpO0@+_Z@D%d(0cysk+Lxf1_d><~7D#lIi zdKUOXgm-{R_U3exrP66viONHv!D69-B{$MS;>)|q!K+rw)J&v^wdNG(gP>(XG?+JV zfs()!0xS;{YMlLH-+{pkj1g9Zvu$0iljYNasfHL%v&GHbar^ON^P8-I6wHQ&>FH_U z!+LjZalol}+pHwz|Gne%0uAKt2-u~DTHxf65PiFBeX`OSViUou5LeM58UqY8XY;o0 z*`GSuyKPbwk72x?xIpWH-*F3JVp4kvXqf>li%>MoyS1SQDrcTgf+|5}fKX5Vc3Zo< z-@KZdNTUqX3w>sd@gS7O z&-IAJuoNunxM(2G6T@Ey0kzam^Pbv*c<**6^rZ!d@#he}h6mEXw{S`@N zHh7h(c8MM`<0?8Ei3>@9$+bl(A=T|rHyod{lp<+rYC6prNkf>pwgFb;fbI(Wb&2Zz zV|wQm+I6&ke7!d_GZPclAPdOtkvv3_5&d*E^=`cR@(JhkdIr;n_I@um`Xhbt+FXZdy@yAzLoDJw)KLUFWB|kpM~8-Q$BN#@lPw zMn)qJ^Y&+VZ?P~F^C7^bWY1X}YHlAM87T=F8#^hPDTT^@TMFteU->!+&65xI!$>`3z!1E4*C0v4YO}zd zFO8dLjspkY@h$x~^9uJf)A&cuPk&Z6HvxDe(CY>b0jHfExe~R=|!z zU%k|W=SIoIHKmQ%YX&%EcagjD?RMp2Psx;#SXYHI2Ze z>NB8`ZVu9TB?L>O)9>})>0U32P|c#xs=*3aU$f}c8F92lgOLLJvQff_Uo&08AmWj< z^^z-s9SFj?5fVBQ`Hc-k@!8MUaRQ3eYk`!SldG%kawatEh8_TAdoFtCWv_0#zMyqq zU=sB3y`?aCwV91hEzd0d*=`#E$%<{}Ncvd-rQ${__td!tVmc);$35IzlE&1hz7PTB zhg>s~5Rl%emuvj&)Q#f9&#M4{>U#ue#G&URkz|B?JyBV)YZ`?|3WJXOT>HjuM?Fi(XmO-m-uhY{%;*H|X}ZwU@sc*VMB6Fxg@36#veG zya!6^oRg%U)KD3mu!fUd1v?`cGJ`zdYeF{?=R5v)AA5;Y^9Xv(V>pdrQ0qE&MHR6< zgHZhy5^ecnPf~(QsU}yPK@rd$H~1qM;T4fB#%k=7$@pvB>}Lqsdx44U#)z{1?W8C7 zwGB+$elQ5%r0FyB-9p(U@2>&kFk&O0n(Mc*2EO`bTCec*_-T&qa)ME;C^=B! ztkQK0``dA^q3Ufb2Wh8-Ly8&WK@~B0;B^D*&+f#0CpW8(>3#(+k3G<6(u^+YVaKUkv3% zruqo@W#`Zk>r5m%Iy+aRg&sc!L8+_DSj*aBI??f3&=uxDr=DMkcNG~AewEhIZ*`5@ z5H^tNvOYd}OF50+gWN>jy$JQPOV@9KhwYWr;bMb56px^o^&2DtjTSc+n_qzT_=vhj zzoCj6y^;pb~+4^$NHlqfRiRUyc$p-2;9_ zr@`)JHd(-nC_$vu6R!%hmiyR`z1v`}z<=gIY-)Z@55y2mkuG38?TYKuZUz7E1|hGLl$&56Ja!*Z1s>elpI`m` zZ#D_LUCJRA>)L;Cp()^EMOGhV2A{*7lm=k5P{q2H3RR1z0mM%3lP6;S*wN&?kJjAg z#y~p%xr3o{KkZLOHwB{1j_KsEU>?P4b6$z<5*Y8w;Z^rb#@7cgVQC{I7gu1Hd!#Io zrQCGvgMy0m=7FCz*L%42ORRzCyDp|r9mPyrL@{5ZYrMIDGy(_+_TK->D_K8K{4Vh~ z!!OiD#j<6*m!AUdAh#!HC*JRm@p!H@nhjqHuiT{}8~K2%EaSa~-CM*5V7-;|>hM83 z{6+mtuO&z|@eLb~KjZh`;~%-ar2Vn+^`pQ3q8z!kDI_>I-uad7m8lodDkE<3acvz$ z5EzTB7rLS=FhgHaVWr3u_>}@Syt&%A&H{3k7GDUZW0P?&QvEf2(HYBaWlC<4GtW%! z9D&2~Dh0ap?gtvR$DJ$rO7q`CcFI8zhtc5wD3{yt5A<|buz!mFU#*>YJeA@9|4WqY zEqi5)jEs;Gva_N>_Bg6TvO^Mutdl)T;SiC%Lw017m1CBb?bz%0K7Bsl$K&_U_rLF- z=RD4RpZmV<>%Ok*{eC}RuPa$0RGVk^vu9Ws42Js$2gPaU)J#E^sy%)$#v2x1%Kdq3 z_K6$Xs4XHwLdD)@^49KMZ$b9&8yQoN0+nJ>&FbAwfE|DniO=StWbY3ckT4jG&L}Dr zp%_1)=SMMZ59(rx@JsgWF2oGYytH(megO)>Z6>Wl@-kwmbC(e+7lY6NL@9eg_&WLg zv=+LKHolw>marfptvz1&UU74Gx2`Eew0bx$e$db*uBD)s(-`zT9DSQZ_jLwVt{XA( z1WeJen%G#WZdQDON8qnE2I;3aZE=}bWYkA2HLpQc|NO)?5sSJb_>;sBfRJXwAfSy8 zAqL-~|NXc2AeZLg;D8QkuOjx%J^R}gRzOUm&7k-Jccks8$NJlJo$xzw3e#Qa>YY|d zpg>e6X8WB&?J_LPeG=_cpNeyK4SCLn-!FL%)(zmR(vc|2MNO!Jf<=M&0NtF|B}FHAvB03YFmMLu(u56p{tyKj==O~>|~0N)ycuh z>;uy48G8T;A)#d!ff|XXVCV13%F0aZAj{Z{ws3}*-y$e;@DR)$0?jYSk{DTH`_{>n z%2^924JF-ouvkc&p+jp_t6`-C3#Ge6-n*;j&^tU<$J;{&i{a{QCD}D(;p#Z2nn_u% z4>n6OH^;IukJ$>{FD3L&rFpyw*$dFw|D$&llwju0$wA0L1>}r&;#DhPS~@Qeel~H2 zJRaVVjs|0}vOegP0W`ryr8NU>ZMPt*{Q z$VsHH7>s;P1OtMPukxdT6S?>j0>>MuMmBjL330Iy40MC$aL{)<8w{RxZhA$kZ!0-y zNvs0#5_hiy3Rge3g!QyJq%JKiEM`exZo;^J_T2g9iQ1IVR}jN-3{A;RS0O=nj7g%Y0mx0xkX$FSuV9Mv_Ai$hD6bLV_E-GLXh#l zW@M507Sd<>dJs9|?%~1(ruIw1kokZ`8%}x;lL!Q&OquK_fOxUpKwV{z;nC^9rD$;wkPt3!R=}9a;<-HX@!dV>R6IJ#g=tyfGeaRwqiFHk|CkUF5;k|1<;A0tv zB@>uHfXpWZ^Wdvj10f4^kA?M(4JFE(ANAOU8ckyFpeWS0710o^g_vlaEBe***|c%m z%*MthX59~b3&O1*F=9P+&S$PbohG~&T1zz0Qx`(VGM2F^XZ12eN$chic%Bf-E_NSz zyT`QKR%b{;@KZXzoDb;(M5{p3TW_blKc z3ySB@crcaTG?~J~!=ri={`MSa1I+Q?yjpLN&eSMjC<>+ry1GdFM`0?g{XE?js-SW? zV?;eAN!)=)yduWq!xP#dj`gyc$$aNVh1OOT!KBS!8lrJyhbB3vueS`Hj4sXRybs+L zl9o}(S9|rxB3q`byuXO8r8h|QP+YG7I8MXiA7?X32z>^&b2nH=jDAEnC_l8C!K z&lYiLLv-Kv`;u%+(DpGk_ol+Ez-sD@!F-)#tm z*g=P%4m@$yMeq4vIdt=x>R+~DYSudrnO>j+D|GcH|3IF#pkw{PVO!aDjhkP_OAZ>JN4Yo zmp0;bAx*C}$6gzdN5chmnNXRU!wfWf{O8L9S9NZJj_U*ms$w&Oc0b~`vDiU>)iG?v zq`wVL;cst%H>m<{b%5Wo?hmx_$Ab^vEqUkTOTL zohD3LZNcEH*OME*B_O>U=nN^y96dlv)Sc&`AljL7+H0E{Sy&&bl*Xi%@w`5|5}KD| zvSseh99bX~>k>m$Rb%=pwy?Cgd{1v?&tmQBR*kLnsenn5*H3&2nJ00Y7JQEoO`};Q zovA-1v$Gj0IupLG#gA};e^utV{NduzXp|L)Xox(AlvvXJq}U?a*ABe@lMATk%*+`{ z^deA0Fbv)ynn9X6jD~Ffwdm6z$jFp|y19wW~T2gMEpY z=&*8^7gpx$=l4g^F~g24$t59I#HlmQCrRr}A&gbcM7lJ`Y3gIWkXbIdUUuS7sGE!< zpMTN2n*AseNsyYzU~Sh@OsCSJ{I1N)ee1Zv{cNPL@kCQsMoOoI>12dvmVJAIQxA-&2INz05`wp^y_=4dkDTiQfz&8$8lEJ>QB3X_prbp8JCy!*xlWgtjhE*PkbMQ1V9r zYR;qD()qDCK9TZokEheHDe=QT6X$DWU%Xjpi#nu4tP{WE)VYGqOVzczQRNy5_Qe-3 zMw_-fr)vHQiEZVAWp-m}^UMWvjT`3GWnUiTdt^sGx4bTawHcTz%Qg9Y4Y};*k9cG3 z4x+c=a-Hmjg@t|MEQ?wXQ6V7-AW)7ENche6w4Ud?R>@6u{Vf9lMzwEu3-?oCw0`Cc z1R$IyR>qEpQCx?|M}AMeehZja`wk1&?rnUWtbHcyu=@LueD(Vux{;`!-X{!Vz6h?W z?SgF(vU*|n>Pie-!)QNSQ8O{QlgrHV?tQl{Hb8d(h;h{8-;`^--!ggnvvZUhmQ=C) zF%RF3k%F#5Uv|$8d=-|;w1myC0|#Nh=3)LkhasGP&$@TGx*r*7KVrMO>oou;%*)Z#8KFf@Di5X9cw7bVswfRRc50S?i1C0(MUsx1sS+{0qtWBJmEfI*7f#+=nWrxht#nQ`$ z6yl6S<;MDHs>5P}xX#y%B)ZdaK)8Vwda^wEatU=)X0`hr6JcQa+Q`xMZwjWyyQUQd zT$vpS72jrg$-z1zBH)WU%6cTzq~|Q;QYNY%{!R0HEw*jWI$UKK1wffKqf>vn-Md-d zNVnBNmX!G(lR_lsRbo_6`uhhcO3K8E8fV_X=093eN*WvP>r;Ke~fU(n(^x4?tnTzGy z;huAM?ZlgIQn@bMro1n>e}HMht{n1GEVv`3#9!LjA3Yp$Tu5zMN$iWhS5kSm&pRDa z{#>eIEXkEWC-Q!=N>WHj2))SQius3IEn#s&MxXd>$t;{Fs$ZcKE1%8tc-+sizP7?q zIM(!fw^a7Urw!_8R!`5U0I8EiwO&uaregW@rB(GQU6Z7o{kJDpCmK1t5a)o*vj(JO zSCoZ?rOaV1QrG-GShTR_+$LOjP0lfvVcGMw^u@DXf+Z0bD(}B zXPNkHexc<7W?Q`K@_i}1kIV|Fu$}dpJj$5`$L5Cmn@xiXCt=%uvt6g_8~Ivl+r20{ zI-g<&N=mDZ-<{nRH+0PPa9-HVE*OH7aw|rRRN?NqrZWMn~9$citM7>Y@c?uS}A%u_JzW4J>?BxYzsLqkdmsm$f^ zrKF^CG?}PvPLcd;moshKiy{81@;f}=k+K)Iz5;%)^uno1+VjhKtFj~-2AO!pHS>x` zEvHZ2-G8K11bC49YQ!VN&W}i%8oRgU@HgknbS~{?`4g)a3DCIj>g!0N)FU|k;ywG1ho0g-tjfs8I!Avk9 zisE=YspGbyZI`?%Xb5INwSF(~g3nES-4R-9=}~ImSgDcn)qLSF7P}9(Wl{YIc}S*% zy1Fv(P!FxSbor$wy0on9@2_X;fN7>w*I_EflepOxh`7mMd z^RLRZ);&-4wL<&tNxu7JY#Z|*IOHemNE83VUfz|kA2ecmn8GSw3*G`>YCS6=Sx&wu z6j%<-U7P$$8sgadGBDQhBnwAM+a&I=#H^7cQ1sb#!-w|FT^jGM92zQZmrUQ85%tUHLg zMwRnxHKL(!Rs5uMqsj74lV=b4Rr);cBGod(3dzGT#1F732??Q5+%MDS1me27+dE&i z9C|;%col@dS(Gz&WAX5Z5)EXlsDx8_UR^CpVMf-6`zXHU5x)2MfdZi|BI-5=<%2sd z(laK>+Zg=ZuFrY`j?MLHO3ekYCePQ1uwsoUGonowoptN2{VeK-$*HGan4squ$r z8`>8W7>%UlNG_PKl5 zyF4&^=3v!uZ7n4tlol7nP1Q8mSgGXdKbcuOQ`)oqRb=ga(7`RS#m(Qo0daAgyX`Vb zAs^vvs%ke|k1nT%CB1wy`po0~aMa}CM65m0_M-Q5;;P1HiL%sEi1NA=#e6mC*3d7t zL{>CDJsm9cl>1Yqlk=8~u?CYjPFzkaonM91nYYf{rr+8GHCpSPsauA;{rC{Nc9=Ks zv_uR?=^hf)UtL2I9IX*3NdI9v;K{RR8je5L{I_foClU?I(o?N7WlTq2FgZGOSWF3^ z=WtGYefOT}U35vQdts4=Gk3y|xMkDly3SX$2w%2^N0Shfq2)Q+;rE`Czc(j{{v7CrD z8Ue8y!}+>e->)J~5%Y%X*k5H6cvGGho_FYAxroTchZNT7Nx`Yb-73Yn@3}3Ww0%H5ws@hlrN$wTtj{y|&r&?h;I!KBTh8-NrQx&)lZp-Ue2f zi|n1ChBfEt(4q)&ya%6Y8t!NWq>O{|lZ!mYe^ff66^1WKqok_Ka`WsEli>wraHt}K)p2#TOIhWUJl)Cy#OlAP1$z#Z0&q{O4 zDD7pn&O*h_=lQ|g+gOR$Qo)cGB2b`~Y(>E|v|smMVz-1X}h zP#Q&0j90G=q5e8E+9Zg&?HsJ8(k!qYvHj-v2cJD5-GPkJ-L3s1G&`pr@gFJG!z}B` z{HBqLqEEIG6N8xBhFb z`5&{)|IfzjzdoJ+hX-(O;}Z~w76m%UNlFfNbcBUo0>aDQkUZ>t14O3m z{lY}SGeF*Ir-R^*0zn_E4H?u#6`Z|TI5D_b|GWOoOZxjrKp1mO3MQZxl3w7B>BS5l@}@)IE;yA14B)hbz$ zm~rLPd9uu0V2S}pgj7A>93W(azkby~t@p?YSP~Fw;5^1fBjk^EemO%Py2V(p!T_zg zz4B|guTS5z4^9U92zPo_6%{xhr;ePSo@Qlb)k&7fF|U!FtM}SS2j~J^cNM?ytYTyV z4O*t#0_5F-Ss1PY>~|~*EumyV4sr(~Iw5Jt-VXr7tvi$fuL#+1n;1x^!!-Pkfo55} zNEM8^f~Fg&$=m~ipbUyj$Aj(^$f}DMFE*qD`>2z6^&@0!dy8S^KMA)9y+1KZj9*0n zegmj(B=6(iEbugK|Fa2lT(}!~C}P6DM1_SNOv}K^HXG=XheLS`Qb=Yp{G5mi!fI;g zKAb!`RtK;}$m;6Jr|v+ZOEk@M6-rU2fCeAw17sT*1ZgK?2Vz81JIST1XOyd;!Xu1_<)C@v0KIKnfi&+l5OPe+hn9N{Nhe(;-n54jepSA9T;mjM*7tAjZ)tJ-{ACl8pfi;2DUJKbdrgTtX_b04f(FT`nIY|Q)?A6S7t4qLu_ z175Iq%{=QEkk~g$KoqdCu!F~^T-|8dmxSPfEnBd4O<-t-m@H+zT<0{w}@#W@C1D zF2WdtT%#N)?qUUM zoFY-tCos__L{)pNEUm0WDCR)r&DXK@92W@$DmPzQrt!&ujeudzCoq}`8Kx3WzTtO$ zGliI5czL*h4NJI*y32Z|l`T>l%0Bqvd_R>0#`u7M0H8+Udx0dXptI3x?MC!ygNs_ia&mGoNK`*A2|r5~ zn@UH?A6!fXHxVu>6mW@ccxFeZ#y^8>g)_kedG|`7p_13y4HN-=zCg+t%Z#vrksBZ7yE;*8TI$#b zmA-gk^W;!gaBmD$7xqmVI6XOZ?ug~rCOp_(OFNRx#qAc984d*N5R9wT8lH(r7qD}H zjP0kD^wrn6W(hHJh3O8+2Y}fmWiTI3<-i!~Wi_|LGcDLsNM#9ZBOqQ@0Y3(?!KP*~ z65}T1+y*N4_1dixPzL%cG^tqw{4{2DG3MWXL<+=s6+pvn%ICKIWgF2;z2R34GX?)x zkY*OX(xtcHwFPBE_m#1@i?AmI?i-l2ZmFsU2M040W}sllgV#Ypg*5-RGkUQI=ByBi z!le8$x1|j9QnZMBxW_^grOg9aC7WvwlN8ccp$#Ta9E1o3QGZg{ zI2MdFl_;`s4)rkY_(5Zc4fjA5Hdin~Zy8Kab1ssRm6w#ry8c#&R!HjmfcqoEm)OAk z{3~xi0$-_ZT?Us23F%}s9qutvkN{^4em+Nl&iW>3ZhU%BdjS8nva)h7_us({0j1Le z9UZywh7koTd+MvGG=pZt@XF(G+=VuuS!=F^o&>XN&Zo-H^#4xDIuZKR%}vE%8>VYu zOnrXj@(2F=aSQN2FwoDgt>I?z9cmcLaH~KepZ{g|sDF;>|9%7GIry@kUO02+^fbMM jb_2G!I*(hEo;#!ZGmL|I&4?SmIisPftMWzJGVuQaDOiBa literal 0 HcmV?d00001 diff --git a/RAG/ingestion.py b/RAG/ingestion.py new file mode 100644 index 0000000..e71a74c --- /dev/null +++ b/RAG/ingestion.py @@ -0,0 +1,35 @@ +import os +from dotenv import load_dotenv +from langchain.text_splitter import RecursiveCharacterTextSplitter +from langchain_community.document_loaders import WebBaseLoader +from langchain_pinecone.vectorstores import PineconeVectorStore +from langchain_openai import OpenAIEmbeddings + +load_dotenv() + +urls = [ + "https://lilianweng.github.io/posts/2023-06-23-agent/", + "https://lilianweng.github.io/posts/2023-03-15-prompt-engineering/", + "https://lilianweng.github.io/posts/2023-10-25-adv-attack-llm/", +] + +docs = [WebBaseLoader(url).load() for url in urls] +docs_list = [item for sublist in docs for item in sublist] + +text_splitter = RecursiveCharacterTextSplitter.from_tiktoken_encoder( + chunk_size=250, chunk_overlap=0 +) + +doc_splits = text_splitter.split_documents(docs_list) + +vectorstore = PineconeVectorStore.from_documents( + documents=doc_splits, + embedding=OpenAIEmbeddings(), + index_name="langgraph-agentic-rag", +) + +retriever = PineconeVectorStore( + index_name="langgraph-agentic-rag", + embedding=OpenAIEmbeddings(), + text_key="text" +).as_retriever() \ No newline at end of file diff --git a/RAG/main.py b/RAG/main.py new file mode 100644 index 0000000..6c90c11 --- /dev/null +++ b/RAG/main.py @@ -0,0 +1,9 @@ +from dotenv import load_dotenv + +load_dotenv() + +from graph.graph import app + +if __name__ == "__main__": + print("Hello, LangGraph!") + print(app.invoke(input={"question": "What is agent memory?"})) \ No newline at end of file