Cell In[67], line 5
2 print(nodes[i].text)
4 print("**GENERATED**")----> 5 response = global_index.query(nodes[i].text, similarity_top_k=5, mode="embedding", service_context = service_context, text_qa_template=QA_PROMPT) 6 display(Markdown(f"{response}")) 8 print("**SOURCES**")
File ~/anaconda3/envs/424b/lib/python3.11/site-packages/llama_index/indices/base.py:244, in BaseGPTIndex.query(self, query_str, mode, query_transform, use_async, **query_kwargs)
230 query_config = QueryConfig(
231 index_struct_type=self._index_struct.get_type(),
232 query_mode=mode_enum,
233 query_kwargs=query_kwargs,
234 )
235 query_runner = QueryRunner(
236 index_struct=self._index_struct,
237 service_context=self._service_context,
(...)
242 use_async=use_async,
243 )
--> 244 return query_runner.query(query_str)
File ~/anaconda3/envs/424b/lib/python3.11/site-packages/llama_index/indices/query/query_runner.py:341, in QueryRunner.query(self, query_str_or_bundle, index_id, level)
323 """Run query.
...
683 rbody, rcode, resp.data, rheaders, stream_error=stream_error
684 )
685 return resp
InvalidRequestError: Invalid URL (POST /v1/completions)
openai.Edit.create(
model="text-davinci-edit-001",
input="",
instruction="",
temperature=0.7,
top_p=1
)
class CustomLLM(): def _call(self, prompt: str, stop: Optional[List[str]] = None) -> str: result = openai.Edit.create(...) # use the prompt as input? return result # or parse the result here, not sure what this will actually be ... # the rest of the class is the same