Enter Query: do you offer Unity3D? INFO:llama_index.token_counter.token_counter:> [retrieve] Total LLM token usage: 0 tokens INFO:llama_index.token_counter.token_counter:> [retrieve] Total embedding token usage: 7 tokens INFO:llama_index.token_counter.token_counter:> [get_response] Total LLM token usage: 1803 tokens INFO:llama_index.token_counter.token_counter:> [get_response] Total embedding token usage: 0 tokens INFO:llama_index.token_counter.token_counter:> [get_response] Total LLM token usage: 1803 tokens INFO:llama_index.token_counter.token_counter:> [get_response] Total embedding token usage: 0 tokens Output: Yes, they offer resources skilled in Unity3D for game development.
import logging logging.getLogger().handlers.clear()
from langchain import OpenAI from llama_index import SimpleDirectoryReader, LangchainEmbedding, GPTListIndex, GPTVectorStoreIndex, PromptHelper, StorageContext, load_index_from_storage from llama_index import LLMPredictor, ServiceContext import os from dotenv import dotenv_values os.environ['OPENAI_API_KEY'] = dotenv_values()['OPENAI_API_KEY'] def construct_index(directory_path): # set maximum input size max_input_size = 4096 # set number of output tokens num_outputs = 256 # set maximum chunk overlap max_chunk_overlap = 0.2 # set chunk size limit chunk_size_limit = 600 prompt_helper = PromptHelper(max_input_size, num_outputs, max_chunk_overlap, chunk_size_limit=chunk_size_limit) # define LLM llm_predictor = LLMPredictor(llm=OpenAI(temperature=0.2, model_name="gpt-3.5-turbo", max_tokens=num_outputs)) documents = SimpleDirectoryReader(directory_path).load_data() service_context = ServiceContext.from_defaults(llm_predictor=llm_predictor) index = GPTVectorStoreIndex.from_documents(documents, service_context=service_context) index.storage_context.persist('index.json') return index def ask_bot(query, input_index='index.json'): storage_context = StorageContext.from_defaults(persist_dir=input_index) index = load_index_from_storage(storage_context) query_engine = index.as_query_engine(response_mode='compact') response = query_engine.query(query) response = str(response) print("Output: " + response) return response # construct_index('knowledge/')
from llama_index import SimpleDirectoryReader, ListIndex, VectorStoreIndex, StorageContext, load_index_from_storage from llama_index import ServiceContext from llama_index.llms import OpenAI import os from dotenv import dotenv_values os.environ['OPENAI_API_KEY'] = dotenv_values()['OPENAI_API_KEY'] def get_service_context(): chunk_size = 600 num_outputs = 256 context_window = 4096 # define LLM llm = OpenAI(temperature=0.2, model="gpt-3.5-turbo", max_tokens=num_outputs) service_context = ServiceContext.from_defaults( llm=llm, chunks_size=chunk_size, context_window=context_window, ) return service_context def construct_index(directory_path): service_context = get_service_context() documents = SimpleDirectoryReader(directory_path).load_data() index = GPTVectorStoreIndex.from_documents(documents, service_context=service_context) index.storage_context.persist(persist_dir='./storage') return index def ask_bot(query, input_index_dir='./storage'): service_context = get_service_context() storage_context = StorageContext.from_defaults(persist_dir=input_index_dir) index = load_index_from_storage(storage_context, service_context=service_context) query_engine = index.as_query_engine( response_mode='compact', similarity_top_k=3, # bump up the top k from 2 due to smaller chunk size? ) response = query_engine.query(query) response = str(response) print("Output: " + response) return response # construct_index('knowledge/')
File "/Library/Frameworks/Python.framework/Versions/3.11/lib/python3.11/site-packages/openai/util.py", line 186, in default_api_key raise openai.error.AuthenticationError( openai.error.AuthenticationError: No API key provided. You can set your API key in code using 'openai.api_key = <API-KEY>', or you can set the environment variable OPENAI_API_KEY=<API-KEY>). If your API key is stored in a file, you can point the openai module at it with 'openai.api_key_path = <PATH>'. You can generate API keys in the OpenAI web interface. See https://platform.openai.com/account/api-keys for details.
openai.api_key = "sk-..."