I have the following LangChain code that checks the chroma vectorstore and extracts the answers from the stored docs - how do I incorporate a Prompt template to create some context , such as the following:
sales_template = """You are customer services and you need to help people.
{context}
Question: {question}"""
SALES_PROMPT = PromptTemplate(
template=sales_template, input_variables=["context", "question"]
)
How do I incorporate the above into the below?
#Embedding Text Using Langchain
from langchain.embeddings import SentenceTransformerEmbeddings
embeddings = SentenceTransformerEmbeddings(model_name="all-MiniLM-L6-v2")
# Creating Vector Store with Chroma DB
from langchain.vectorstores import Chroma
#db = Chroma.from_documents(docs, embeddings)
db = Chroma(persist_directory="./chroma_db", embedding_function=embeddings)
# docs = db3.similarity_search(query)
# print(docs[0].page_content)
#Using OpenAI Large Language Models (LLM) with Chroma DB
import os
os.environ["OPENAI_API_KEY"] = 'sk-12345678910'
from langchain.chat_models import ChatOpenAI
model_name = "gpt-3.5-turbo"
llm = ChatOpenAI(model_name=model_name)
#Extracting Answers from Documents
from langchain.chains.question_answering import load_qa_chain
chain = load_qa_chain(llm, chain_type="stuff",verbose=True)
query = "What does Neil do for work?"
matching_docs = db.similarity_search(query)
answer = chain.run(input_documents=matching_docs, question=query)
print(answer)
you can try as below
sales_template = """You are customer services and you need to help people.
{context}
Question: {question}"""
chain = RetrievalQAWithSourcesChain.from_chain_type(
llm=OpenAI(temperature=0),
chain_type="stuff",
retriever=docsearch.as_retriever(search_type="similarity", search_kwargs={"k":2}),
chain_type_kwargs={
"prompt": PromptTemplate(
template=sales_template,
input_variables=["context", "question"],
),
},
)