|
| 1 | +from codegate.codegate_logging import setup_logging |
| 2 | +from codegate.inference.inference_engine import LlamaCppInferenceEngine |
| 3 | +from weaviate.classes.config import DataType, Property |
| 4 | +from weaviate.classes.query import MetadataQuery |
| 5 | +import weaviate |
| 6 | + |
| 7 | + |
| 8 | +schema_config = [ |
| 9 | + { |
| 10 | + "name": "Package", |
| 11 | + "properties": [ |
| 12 | + {"name": "name", "data_type": DataType.TEXT}, |
| 13 | + {"name": "type", "data_type": DataType.TEXT}, |
| 14 | + {"name": "status", "data_type": DataType.TEXT}, |
| 15 | + {"name": "description", "data_type": DataType.TEXT}, |
| 16 | + ] |
| 17 | + }, |
| 18 | +] |
| 19 | + |
| 20 | + |
| 21 | +class StorageEngine: |
| 22 | + def __init__(self, data_path='./weaviate_data'): |
| 23 | + self.client = weaviate.WeaviateClient( |
| 24 | + embedded_options=weaviate.EmbeddedOptions( |
| 25 | + persistence_data_path=data_path |
| 26 | + ), |
| 27 | + ) |
| 28 | + self.__logger = setup_logging() |
| 29 | + self.inference_engine = LlamaCppInferenceEngine() |
| 30 | + self.model_path = "./models/all-minilm-L6-v2-q5_k_m.gguf" |
| 31 | + self.schema_config = schema_config |
| 32 | + self.connect() |
| 33 | + self.setup_schema() |
| 34 | + |
| 35 | + def connect(self): |
| 36 | + self.client.connect() |
| 37 | + if self.client.is_ready(): |
| 38 | + self.__logger.info("Weaviate connection established and client is ready.") |
| 39 | + else: |
| 40 | + raise Exception("Weaviate client is not ready.") |
| 41 | + |
| 42 | + def setup_schema(self): |
| 43 | + for class_config in self.schema_config: |
| 44 | + if not self.client.collections.exists(class_config['name']): |
| 45 | + self.client.collections.create(class_config['name'], properties=class_config['properties']) |
| 46 | + self.__logger.info(f"Weaviate schema for class {class_config['name']} setup complete.") |
| 47 | + |
| 48 | + async def search(self, query, limit=5, distance=0.1): |
| 49 | + """ |
| 50 | + Search the 'Package' collection based on a query string. |
| 51 | +
|
| 52 | + Args: |
| 53 | + query (str): The text query for which to search. |
| 54 | + limit (int): The number of results to return. |
| 55 | +
|
| 56 | + Returns: |
| 57 | + list: A list of matching results with their properties and distances. |
| 58 | + """ |
| 59 | + # Generate the vector for the query |
| 60 | + query_vector = await self.inference_engine.embed(self.model_path, [query]) |
| 61 | + |
| 62 | + # Perform the vector search |
| 63 | + try: |
| 64 | + collection = self.client.collections.get("Package") |
| 65 | + response = collection.query.near_vector(query_vector, limit=limit, distance=distance, return_metadata=MetadataQuery(distance=True)) |
| 66 | + if not response: |
| 67 | + return [] |
| 68 | + return response.objects |
| 69 | + |
| 70 | + except Exception as e: |
| 71 | + self.__logger.error(f"Error during search: {str(e)}") |
| 72 | + return [] |
| 73 | + |
| 74 | + def close(self): |
| 75 | + self.client.close() |
| 76 | + self.__logger.info("Connection closed.") |
0 commit comments