fixing the retriever from pinecone
Browse files- app.py +9 -5
- chainlit.md +2 -0
- utils.py +2 -2
app.py
CHANGED
|
@@ -39,17 +39,16 @@ def rename(orig_author: str):
|
|
| 39 |
@cl.on_chat_start # marks a function that will be executed at the start of a user session
|
| 40 |
async def start_chat():
|
| 41 |
|
| 42 |
-
msg = cl.Message(content=f"
|
| 43 |
await msg.send()
|
| 44 |
|
| 45 |
# load documents from Arxiv
|
| 46 |
axloader = ArxivLoader()
|
| 47 |
axloader.main()
|
| 48 |
|
| 49 |
-
#
|
| 50 |
pi = PineconeIndexer()
|
| 51 |
pi.load_embedder()
|
| 52 |
-
pi.index_documents(axloader.documents)
|
| 53 |
retriever=pi.get_vectorstore().as_retriever()
|
| 54 |
print(pi.index.describe_index_stats())
|
| 55 |
|
|
@@ -59,7 +58,7 @@ async def start_chat():
|
|
| 59 |
temperature=0
|
| 60 |
)
|
| 61 |
|
| 62 |
-
msg
|
| 63 |
await msg.send()
|
| 64 |
|
| 65 |
cl.user_session.set("llm", llm)
|
|
@@ -86,4 +85,9 @@ async def main(message: cl.Message):
|
|
| 86 |
|
| 87 |
answer = retrieval_augmented_qa_chain.invoke({"question" : message.content})
|
| 88 |
|
| 89 |
-
await cl.Message(content=answer["response"].content).send()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 39 |
@cl.on_chat_start # marks a function that will be executed at the start of a user session
|
| 40 |
async def start_chat():
|
| 41 |
|
| 42 |
+
msg = cl.Message(content=f"Initializing the Application...")
|
| 43 |
await msg.send()
|
| 44 |
|
| 45 |
# load documents from Arxiv
|
| 46 |
axloader = ArxivLoader()
|
| 47 |
axloader.main()
|
| 48 |
|
| 49 |
+
# load embedder and the retriever
|
| 50 |
pi = PineconeIndexer()
|
| 51 |
pi.load_embedder()
|
|
|
|
| 52 |
retriever=pi.get_vectorstore().as_retriever()
|
| 53 |
print(pi.index.describe_index_stats())
|
| 54 |
|
|
|
|
| 58 |
temperature=0
|
| 59 |
)
|
| 60 |
|
| 61 |
+
msg = cl.Message(content=f"Application is ready !")
|
| 62 |
await msg.send()
|
| 63 |
|
| 64 |
cl.user_session.set("llm", llm)
|
|
|
|
| 85 |
|
| 86 |
answer = retrieval_augmented_qa_chain.invoke({"question" : message.content})
|
| 87 |
|
| 88 |
+
await cl.Message(content=answer["response"].content).send()
|
| 89 |
+
|
| 90 |
+
|
| 91 |
+
|
| 92 |
+
|
| 93 |
+
|
chainlit.md
CHANGED
|
@@ -1,3 +1,5 @@
|
|
| 1 |
# Pythonic RAGA with LangChain & Pinecone
|
| 2 |
|
| 3 |
This application leverages Chainlit, OpenAI, LangChain, Pinecone and Hugging Face to build a basic RAQA (Retrieval Augmented Question Answering) application based on a Pinecone index containing documents with arxiv papers about nuclear fission.
|
|
|
|
|
|
|
|
|
| 1 |
# Pythonic RAGA with LangChain & Pinecone
|
| 2 |
|
| 3 |
This application leverages Chainlit, OpenAI, LangChain, Pinecone and Hugging Face to build a basic RAQA (Retrieval Augmented Question Answering) application based on a Pinecone index containing documents with arxiv papers about nuclear fission.
|
| 4 |
+
|
| 5 |
+
Await for the `The application is ready !` message before starting to use the app.
|
utils.py
CHANGED
|
@@ -104,10 +104,10 @@ class PineconeIndexer:
|
|
| 104 |
metric=metric,
|
| 105 |
dimension=n_dims
|
| 106 |
)
|
|
|
|
|
|
|
| 107 |
|
| 108 |
self.index = pinecone.Index(index_name)
|
| 109 |
-
self.arxiv_loader = ArxivLoader()
|
| 110 |
-
|
| 111 |
|
| 112 |
def load_embedder(self):
|
| 113 |
""""""
|
|
|
|
| 104 |
metric=metric,
|
| 105 |
dimension=n_dims
|
| 106 |
)
|
| 107 |
+
|
| 108 |
+
self.arxiv_loader = ArxivLoader()
|
| 109 |
|
| 110 |
self.index = pinecone.Index(index_name)
|
|
|
|
|
|
|
| 111 |
|
| 112 |
def load_embedder(self):
|
| 113 |
""""""
|