Hi,
I am trying to do Q&A on docs. I tried the below code but I am getting error. Can someone please help me fixing the error.
device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
context_window = 1024
num_output = 256
model_name = 'GPT2'
embedding_model = '/home/Coding/Coding Stuff/AI Models/GTE-Base/'
embding = HuggingFaceEmbedding(model_name=embedding_model, device=device)
class Our_own_llm(CustomLLM):
@property
def metadata(self) -> LLMMetadata:
'''Get LLM metadata'''
return LLMMetadata(context_window=context_window, num_output=num_output, model_name=model_name)
@llm_completion_callback()
def complete(self, query:str, max_length=1024, temperature=0.5, **kwargs:Any) -> CompletionResponse:
'''Complete prompt'''
api_url = "http://127.0.0.1:5000/own_gpt"
data = {"context": None, "question": query, "max_length": max_length, "temperature": temperature}
response = requests.post(url = api_url, json = data)
result = response.text
return CompletionResponse(text=result)
@llm_completion_callback()
def stream_complete(self, context:str, query:str, **kwargs:Any) -> CompletionResponseGen:
raise NotImplementedError()
def context_service():
llm = Our_own_llm()
service_context = ServiceContext.from_defaults(llm=llm, context_window=context_window, num_output=num_output, embed_model=embding)
return service_context
file = '/home/laxmidhar/Coding Stuff/Data_House/interview questions.docx'
query = 'what is selection bias?'
service_context = context_service()
docx_document = DocxReader()
data = docx_document.load_data(file=Path(file))
index = ListIndex.from_documents(data, service_context=service_context, embd_model = embding)
query_engine = index.as_query_engine()
response = query_engine.query(query)
The error details in the txt file.