PGVectorStore(..., embed_dim=384)
?vector_store = PGVectorStore.from_params( database=default_db, host=pg_host, password=pg_password, port=pg_port, user=pg_user, table_name="test_chunk_512", embed_dim=384, # embedding dimension for openai is 1536 )
index = VectorStoreIndex.from_documents( documents, storage_context=storage_context, #service_context=service_context, embed_dim=384, show_progress=True )
index = VectorStoreIndex.from_documents( documents, storage_context=storage_context, #service_context=service_context, embed_dim=384, show_progress=True )
index = VectorStoreIndex.from_documents( documents, storage_context=storage_context, embed_model=embed_model, show_progress=True )
DataError: (psycopg2.errors.DataException) expected 1536 dimensions, not 384
index = VectorStoreIndex.from_documents( documents, storage_context=storage_context, #service_context=service_context, embed_dim=384, embed_model=embed_model, show_progress=True )
vector_store = PGVectorStore.from_params( database=default_db, host=pg_host, password=pg_password, port=pg_port, user=pg_user, table_name="test_chunk_512", embed_dim=384, # embedding dimension for bge-small is 384 ) # create index storage_context = StorageContext.from_defaults(vector_store=vector_store) index = VectorStoreIndex.from_documents( documents, storage_context=storage_context, #service_context=service_context, embed_dim=384, embed_model=embed_model, show_progress=True )
DataException Traceback (most recent call last) File /opt/conda/lib/python3.10/site-packages/sqlalchemy/engine/base.py:2120, in Connection._exec_insertmany_context(self, dialect, context) 2119 else: -> 2120 dialect.do_execute( 2121 cursor, 2122 sub_stmt, 2123 sub_params, 2124 context, 2125 ) 2127 except BaseException as e: File /opt/conda/lib/python3.10/site-packages/sqlalchemy/engine/default.py:919, in DefaultDialect.do_execute(self, cursor, statement, parameters, context) 918 def do_execute(self, cursor, statement, parameters, context=None): --> 919 cursor.execute(statement, parameters) DataException: expected 1536 dimensions, not 384 The above exception was the direct cause of the following exception: DataError Traceback (most recent call last) Cell In[26], line 1 ----> 1 index = VectorStoreIndex.from_documents( 2 documents, 3 storage_context=storage_context, 4 #service_context=service_context, 5 embed_dim=384, 6 embed_model=embed_model, 7 show_progress=True 8 )
disk_offload
function instead." Not pretty where to configure the disk_offload llm = HuggingFaceLLM( model_name="meta-llama/Meta-Llama-3-8B", model_kwargs={ "token": token_hf, "torch_dtype": torch.bfloat16, # comment this line and uncomment below to use 4bit # "quantization_config": quantization_config }, generate_kwargs={ "do_sample": True, "temperature": 0.01, "top_p": 0.9, }, tokenizer_name="meta-llama/Meta-Llama-3-8B", tokenizer_kwargs={"token": token_hf}, stopping_ids=stopping_ids, )