create-llama
... When I execute que npx command, and in the final part when select the option for install dependencies, it fails because it cant find poetry... but when looking into the poetry docs, they dont recommend installing poetry globally.. thus, also this will install the deps globally instead of in the create-llama project, am I right? index = VectorStoreIndex.from_documents(
documents, transformations=[text_splitter]
)
llm = OpenAI(model="gpt-3.5-turbo", max_tokens=200, system_prompt="You are a dog")
class RAG:
def __init__(self, index: VectorStoreIndex):
self.index = index.as_chat_engine(llm=llm, chat_mode="condense_plus_context")
def chat(self, query: str):
response = self.index.chat(query)
return response.response
@r.post("/request")
async def chat_request(
data: _ChatData,
chat_engine: BaseChatEngine = Depends(get_chat_engine),
):
last_message_content, messages = await parse_chat_data(data)
response = chat_engine.chat(last_message_content, messages)
return _Result(
result=_Message(role=MessageRole.ASSISTANT, content=response.response),
nodes=_SourceNodes.from_source_nodes(response.source_nodes),
)
def run_agent(query: str) -> str:
prompt = ...
tool_names = [tool.metadata.name for tool in tools]
tool_names_str = "\n".join(tool_names)
qa_template = prompt.format(query=query, tools=tool_names_str)
llm = Bedrock(
model=os.getenv("MODEL_ID"),
aws_access_key_id=os.getenv("BEDROCK_AWS_ACCESS_KEY"),
aws_secret_access_key=os.getenv("BEDROCK_AWS_SECRET_KEY"),
system_prompt=qa_template,
temperature=0,
region_name="us-east-1",
)
agent = ReActAgent.from_tools(tools, llm=llm, verbose=True, max_iterations=40)
response = agent.chat(qa_template)
return response
theme = gr.themes.Default(
primary_hue="amber",
)