export const llamaPipeline = async(content) =>{
let OpenEmbedllm = new OpenAIEmbedding({
temperature: 0,
apiKey: process.env.OPENAI_SECRET_KEY,
model:'text-embedding-3-small',
apiVersion:"2023-07-01-preview",
})
try{
const resFromAnalyze = await client.query(
SELECT overlap,chunk_size)
let pipeline = new IngestionPipeline({transformations:[
// new RemoveSpecialCharacters(),
new SimpleNodeParser({ chunkOverlap:200,
chunkSize:20}),
new KeywordExtractor({llm:azureOpenAillm}),
OpenEmbedllm
]})
const nodes = await pipeline.run({documents:[new Document({text:content})]})
console.log(nodes)
return nodes
}catch(err){
console.log(err)
}
}