Quantcast
Channel: Weaviate Community Forum - Latest posts
Viewing all articles
Browse latest Browse all 3625

I want to use Azure OpenAI but being asked to provide an OPENAI_APIKEY?

$
0
0

Thank you!

That makes sense. I ended up finding out that I don’t have access to the Azure OpenAI Embedding model in my Azure OpenAI subscription, so I had to change directions and use sentence-transformers/all-MiniLM-L6-v2 instead.

So I am creating the collection like this:

def _create_new_collection(self):
        """Create a new collection with proper configuration."""
        return self.client.collections.create(
            name=self.collection_name,
            vectorizer_config=weaviate.classes.config.Configure.Vectorizer.none(),
            vector_index_config=weaviate.classes.config.Configure.VectorIndex.hnsw(
                distance_metric=weaviate.classes.config.VectorDistances.COSINE,
                vector_cache_max_objects=1000000,
                max_connections=64,
                ef_construction=128,
                ef=100,
                dynamic_ef_min=100,
                dynamic_ef_max=500,
                dynamic_ef_factor=8,
                flat_search_cutoff=40000,
                cleanup_interval_seconds=300
            ),
            properties=[
                weaviate.classes.config.Property(
                    name="content",
                    data_type=weaviate.classes.config.DataType.TEXT,
                    description="The chunk content",
                    vectorize=True
                ),
                weaviate.classes.config.Property(
                    name="documentId",
                    data_type=weaviate.classes.config.DataType.TEXT,
                    description="Document identifier"
                ),
                weaviate.classes.config.Property(
                    name="chunkId",
                    data_type=weaviate.classes.config.DataType.INT,
                    description="Chunk number within document"
                ),
                weaviate.classes.config.Property(
                    name="source",
                    data_type=weaviate.classes.config.DataType.TEXT,
                    description="Document source"
                ),
                weaviate.classes.config.Property(
                    name="lastUpdated",
                    data_type=weaviate.classes.config.DataType.DATE,
                    description="Last update timestamp"
                ),
                weaviate.classes.config.Property(
                    name="contentHash",
                    data_type=weaviate.classes.config.DataType.TEXT,
                    description="Hash of document content"
                ),
                weaviate.classes.config.Property(
                    name="filePath",
                    data_type=weaviate.classes.config.DataType.TEXT,
                    description="Original file path"
                )
            ]
        )

And ingesting the documents like this:

def ingest_document(self, content: str, source: str, file_path: str = None) -> str:
        """Ingest a document into Weaviate."""
        try:
            doc_id = self._generate_doc_id(content, source)
            content_hash = hashlib.md5(content.encode()).hexdigest()
            
            # Get collection
            collection = self.client.collections.get(self.collection_name)
            
            # Delete existing chunks if document exists
            try:
                collection.data.delete_many(
                    where={
                        "path": ["documentId"],
                        "operator": "Equal",
                        "valueString": doc_id
                    }
                )
                logging.info(f"Deleted existing chunks for document {doc_id}")
            except Exception as e:
                if "not found" not in str(e).lower():
                    logging.warning(f"Error deleting existing chunks: {str(e)}")

            # Create new chunks
            chunks = self._chunk_document(content)
            current_time = datetime.now(timezone.utc).isoformat()
            
            # Prepare objects for batch import
            objects_to_create = []
            for i, chunk in enumerate(chunks):
                # Generate vector for the chunk
                vector = self._generate_embedding(chunk)
                
                properties = {
                    "content": chunk,
                    "documentId": doc_id,
                    "chunkId": i,
                    "source": source,
                    "lastUpdated": current_time,
                    "contentHash": content_hash
                }
                
                if file_path:
                    properties["filePath"] = file_path
                    
                # Create object with vector
                objects_to_create.append({
                    "properties": properties,
                    "vector": vector
                })

            # Import objects in batches
            batch_size = 100
            for i in range(0, len(objects_to_create), batch_size):
                batch = objects_to_create[i:i + batch_size]
                try:
                    # Use batch import
                    with collection.batch.dynamic() as batch_writer:
                        for obj in batch:
                            batch_writer.add_object(
                                properties=obj["properties"],
                                vector=obj["vector"]
                            )
                    logging.info(f"Successfully inserted batch of {len(batch)} chunks for document {doc_id}")
                except Exception as e:
                    logging.error(f"Error inserting batch: {str(e)}")
                    raise

            return doc_id

This all works, I am able to connect to weaviate, ingest the documents in batches and trigger my prompt with the relevant chunks to Azure OpenAI.

But, with that said, the answers I am getting from Azure OpenAI are pretty average and it doesn’t look like it’s considering the knowledge from my internal documents that i’ve passed in as relevant chunks, so this is now where I need to figure out what’s going on.

Cheers


Viewing all articles
Browse latest Browse all 3625

Trending Articles