Compare commits

..

No commits in common. "testing1" and "master" have entirely different histories.

1 changed files with 4 additions and 12 deletions

View File

@ -13,7 +13,6 @@ from langchain.vectorstores import Chroma
from langchain.memory import ConversationBufferMemory from langchain.memory import ConversationBufferMemory
from langchain.chains import RetrievalQA, ConversationalRetrievalChain from langchain.chains import RetrievalQA, ConversationalRetrievalChain
from langchain.retrievers.multi_query import MultiQueryRetriever from langchain.retrievers.multi_query import MultiQueryRetriever
from langchain.prompts import PromptTemplate
from flask import Flask, request, abort from flask import Flask, request, abort
from linebot.v3 import ( from linebot.v3 import (
@ -45,22 +44,15 @@ log_file = open(log_file_name, "w")
log_file.write("") log_file.write("")
log_file.close() log_file.close()
# User Sessions # User Sessions
user_sessions = {} user_sessions = {}
# init vectorstore embedding # init vectorstore embedding
qa_prompt_template = """
Use the following pieces of context to answer the question at the end.
If you don't know the answer, just say that you don't know, don't try to make up an answer.
always add this exact keyphrase "answer_not_found" to the end of the text if you don't know the answer.
{context}
Question: {question}
Helpful Answer:"""
os.environ["OPENAI_API_KEY"] = srv_secrets.openai_key os.environ["OPENAI_API_KEY"] = srv_secrets.openai_key
llm = ChatOpenAI(model_name="gpt-3.5-turbo", temperature=0) llm = ChatOpenAI(model_name="gpt-3.5-turbo", temperature=0)
vectorstore = Chroma(persist_directory=srv_secrets.chroma_db_dir, embedding_function=OpenAIEmbeddings()) vectorstore = Chroma(persist_directory=srv_secrets.chroma_db_dir, embedding_function=OpenAIEmbeddings())
qa_prompt = PromptTemplate.from_template(qa_prompt_template)
# Setup Logging # Setup Logging
logging.basicConfig() logging.basicConfig()
@ -117,7 +109,7 @@ def message_text(event):
# User Session # User Session
# create session if none exist # create session if none exist
if event.source.user_id not in user_sessions.keys(): if event.source.user_id not in user_sessions.keys():
user_sessions[event.source.user_id] = ConversationalRetrievalChain.from_llm(llm,retriever=retriever,memory=ConversationBufferMemory(memory_key="chat_history", return_messages=True, combine_docs_chain_kwargs={"prompt": qa_prompt})) user_sessions[event.source.user_id] = ConversationalRetrievalChain.from_llm(llm,retriever=retriever,memory=ConversationBufferMemory(memory_key="chat_history", return_messages=True))
# unique_docs = retriever_from_llm.get_relevant_documents(query=event.message.text) # unique_docs = retriever_from_llm.get_relevant_documents(query=event.message.text)
with ApiClient(configuration) as api_client: with ApiClient(configuration) as api_client:
@ -133,7 +125,7 @@ def message_text(event):
messages=[TextMessage(text=answer)] messages=[TextMessage(text=answer)]
)) ))
log_file.write(", bot_answer: " + answer + "\n") log_file.write("bot_answer: " + answer + "\n")
log_file.close() log_file.close()
# with ApiClient(configuration) as api_client: # with ApiClient(configuration) as api_client: