Viewing File: /home/ubuntu/codegamaai-test/efimarket_bot/src/retriver_chunk.py
from langchain.vectorstores import Qdrant
import qdrant_client
from src.constants import *
from langchain.embeddings import OpenAIEmbeddings
import os
# from pronouns import *
os.environ["OPENAI_API_KEY"] = os.environ["OPENAI_API_KEY"]
embedding = OpenAIEmbeddings()
client = qdrant_client.QdrantClient(
qdrant_url,
api_key=None )
def vectore_retrival_algo(vectordb,query,n_doc,score,algo):
query = str(query)
if algo == "similarity_score_threshold":
return vectordb.as_retriever(search_type="similarity_score_threshold", search_kwargs={"score_threshold": score, "k": n_doc}).get_relevant_documents(query=query)
else:
return vectordb.as_retriever(search_type="mmr",search_kwargs={'k': n_doc, 'lambda_mult': score}).get_relevant_documents(query=query)
def retirve_method_1(user_id,bot_id,history,query,n_history_questions,n_doc_retirve,lambda_mult,algo):
try:
collection_id = str(user_id) + "_" + str(bot_id)
split_query = []
# if check_conjunctions_in_sentence(query) == True:
# split_query = preprocess_query(query)
vectordb = Qdrant(client=client,embeddings=embedding, collection_name=collection_id)
doc = vectore_retrival_algo(vectordb,query,n_doc_retirve,lambda_mult,algo)
# # n_contents = [item["content"] for item in history if item["role"] == "user"][-n_history_questions:]
# n_contents = n_contents + split_query
# for i in range(len(n_contents)):
# last_user_value = n_contents[i]
# doc1 = vectore_retrival_algo(vectordb,last_user_value,2,lambda_mult,algo)
# doc.extend(doc1)
return doc
except:
print("retirve_method_1 exception")
return []
Back to Directory
File Manager