Do you see anything in here?
class Configuration:
def __init__(self):
self.initialize()
def initialize(self):
self.llm = Ollama(model="mixtral:8x7b-instruct-v0.1-q6_K", base_url="http://192.168.0.105:1234")
self.embed_model = MistralAIEmbedding(model_name="mistral-embed",
api_key=MISTRAL_API_KEY)
self.client = chromadb.PersistentClient(path="./dbs/vector_db")
self.chroma_collection = self.client.get_or_create_collection(name="test")
self.vector_store = ChromaVectorStore(chroma_collection=self.chroma_collection)
self.storage_context = StorageContext.from_defaults(vector_store=self.vector_store)
self.service_context = ServiceContext.from_defaults(llm=self.llm,
chunk_size=1024,
chunk_overlap=25,
embed_model=self.embed_model)
...<snip>...
def main():
config = Configuration()
document_list = []
rows = extract_and_store_articles_info("./dbs/processed_data_test.db")
for row in rows:
metadata, text = json.loads(row[0]), row[1]
documents = load_data(metadata, text)
document_list.append(documents)
for document in document_list:
print(document[0].metadata)
try:
VectorStoreIndex.from_documents(documents=document,
service_context=config.service_context,
storage_context=config.storage_context,
show_progress=True)
except ValueError as e:
print(document, e)
continue
Parsing nodes: 100%|[00:00<00:00, 150.73it/s]
Generating embeddings: 100%|