2024-01-18 21:39:12 +08:00
|
|
|
import base64
|
|
|
|
import json
|
2023-06-25 16:49:14 +08:00
|
|
|
import logging
|
2024-01-02 23:42:00 +08:00
|
|
|
from typing import List, Optional
|
2023-06-25 16:49:14 +08:00
|
|
|
|
2023-08-23 19:10:11 +08:00
|
|
|
import numpy as np
|
2024-01-02 23:42:00 +08:00
|
|
|
from core.model_manager import ModelInstance
|
2023-06-25 16:49:14 +08:00
|
|
|
from extensions.ext_database import db
|
2024-01-12 12:34:01 +08:00
|
|
|
from langchain.embeddings.base import Embeddings
|
2024-01-18 21:39:12 +08:00
|
|
|
|
|
|
|
from extensions.ext_redis import redis_client
|
2023-06-25 16:49:14 +08:00
|
|
|
from libs import helper
|
|
|
|
from models.dataset import Embedding
|
2024-01-12 12:34:01 +08:00
|
|
|
from sqlalchemy.exc import IntegrityError
|
2023-06-25 16:49:14 +08:00
|
|
|
|
2024-01-02 23:42:00 +08:00
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
2023-06-25 16:49:14 +08:00
|
|
|
|
|
|
|
class CacheEmbedding(Embeddings):
|
2024-01-02 23:42:00 +08:00
|
|
|
def __init__(self, model_instance: ModelInstance, user: Optional[str] = None) -> None:
|
|
|
|
self._model_instance = model_instance
|
|
|
|
self._user = user
|
2023-06-25 16:49:14 +08:00
|
|
|
|
|
|
|
def embed_documents(self, texts: List[str]) -> List[List[float]]:
|
|
|
|
"""Embed search docs."""
|
|
|
|
# use doc embedding cache or store if not exists
|
2023-12-03 19:07:00 +08:00
|
|
|
text_embeddings = [None for _ in range(len(texts))]
|
|
|
|
embedding_queue_indices = []
|
|
|
|
for i, text in enumerate(texts):
|
2023-06-25 16:49:14 +08:00
|
|
|
hash = helper.generate_text_hash(text)
|
2024-01-18 21:39:12 +08:00
|
|
|
embedding_cache_key = f'{self._model_instance.provider}_{self._model_instance.model}_{hash}'
|
|
|
|
embedding = redis_client.get(embedding_cache_key)
|
2023-06-25 16:49:14 +08:00
|
|
|
if embedding:
|
2024-01-18 21:39:12 +08:00
|
|
|
redis_client.expire(embedding_cache_key, 3600)
|
|
|
|
text_embeddings[i] = list(np.frombuffer(base64.b64decode(embedding), dtype="float"))
|
|
|
|
|
2023-06-25 16:49:14 +08:00
|
|
|
else:
|
2023-12-03 19:07:00 +08:00
|
|
|
embedding_queue_indices.append(i)
|
2023-06-25 16:49:14 +08:00
|
|
|
|
2023-12-03 19:07:00 +08:00
|
|
|
if embedding_queue_indices:
|
2023-08-12 00:57:00 +08:00
|
|
|
try:
|
2024-01-02 23:42:00 +08:00
|
|
|
embedding_result = self._model_instance.invoke_text_embedding(
|
|
|
|
texts=[texts[i] for i in embedding_queue_indices],
|
|
|
|
user=self._user
|
|
|
|
)
|
|
|
|
|
|
|
|
embedding_results = embedding_result.embeddings
|
2023-08-12 00:57:00 +08:00
|
|
|
except Exception as ex:
|
2024-01-02 23:42:00 +08:00
|
|
|
logger.error('Failed to embed documents: ', ex)
|
|
|
|
raise ex
|
2023-12-03 19:07:00 +08:00
|
|
|
|
|
|
|
for i, indice in enumerate(embedding_queue_indices):
|
|
|
|
hash = helper.generate_text_hash(texts[indice])
|
2023-06-25 16:49:14 +08:00
|
|
|
|
2023-08-12 00:57:00 +08:00
|
|
|
try:
|
2024-01-18 21:39:12 +08:00
|
|
|
embedding_cache_key = f'{self._model_instance.provider}_{self._model_instance.model}_{hash}'
|
2023-08-23 19:10:11 +08:00
|
|
|
vector = embedding_results[i]
|
|
|
|
normalized_embedding = (vector / np.linalg.norm(vector)).tolist()
|
2023-12-03 19:07:00 +08:00
|
|
|
text_embeddings[indice] = normalized_embedding
|
2024-01-18 21:39:12 +08:00
|
|
|
# encode embedding to base64
|
|
|
|
embedding_vector = np.array(normalized_embedding)
|
|
|
|
vector_bytes = embedding_vector.tobytes()
|
|
|
|
# Transform to Base64
|
|
|
|
encoded_vector = base64.b64encode(vector_bytes)
|
|
|
|
# Transform to string
|
|
|
|
encoded_str = encoded_vector.decode("utf-8")
|
|
|
|
redis_client.setex(embedding_cache_key, 3600, encoded_str)
|
|
|
|
|
2023-08-12 00:57:00 +08:00
|
|
|
except IntegrityError:
|
|
|
|
db.session.rollback()
|
|
|
|
continue
|
|
|
|
except:
|
2024-01-18 21:39:12 +08:00
|
|
|
logging.exception('Failed to add embedding to redis')
|
2023-08-12 00:57:00 +08:00
|
|
|
continue
|
2023-06-25 16:49:14 +08:00
|
|
|
|
|
|
|
return text_embeddings
|
|
|
|
|
|
|
|
def embed_query(self, text: str) -> List[float]:
|
|
|
|
"""Embed query text."""
|
|
|
|
# use doc embedding cache or store if not exists
|
|
|
|
hash = helper.generate_text_hash(text)
|
2024-01-18 21:39:12 +08:00
|
|
|
embedding_cache_key = f'{self._model_instance.provider}_{self._model_instance.model}_{hash}'
|
|
|
|
embedding = redis_client.get(embedding_cache_key)
|
2023-06-25 16:49:14 +08:00
|
|
|
if embedding:
|
2024-01-18 21:39:12 +08:00
|
|
|
redis_client.expire(embedding_cache_key, 3600)
|
|
|
|
return list(np.frombuffer(base64.b64decode(embedding), dtype="float"))
|
|
|
|
|
2023-06-25 16:49:14 +08:00
|
|
|
|
2023-08-12 00:57:00 +08:00
|
|
|
try:
|
2024-01-02 23:42:00 +08:00
|
|
|
embedding_result = self._model_instance.invoke_text_embedding(
|
|
|
|
texts=[text],
|
|
|
|
user=self._user
|
|
|
|
)
|
|
|
|
|
|
|
|
embedding_results = embedding_result.embeddings[0]
|
2023-08-23 19:10:11 +08:00
|
|
|
embedding_results = (embedding_results / np.linalg.norm(embedding_results)).tolist()
|
2023-08-12 00:57:00 +08:00
|
|
|
except Exception as ex:
|
2024-01-02 23:42:00 +08:00
|
|
|
raise ex
|
2023-06-25 16:49:14 +08:00
|
|
|
|
|
|
|
try:
|
2024-01-18 21:39:12 +08:00
|
|
|
# encode embedding to base64
|
|
|
|
embedding_vector = np.array(embedding_results)
|
|
|
|
vector_bytes = embedding_vector.tobytes()
|
|
|
|
# Transform to Base64
|
|
|
|
encoded_vector = base64.b64encode(vector_bytes)
|
|
|
|
# Transform to string
|
|
|
|
encoded_str = encoded_vector.decode("utf-8")
|
|
|
|
redis_client.setex(embedding_cache_key, 3600, encoded_str)
|
|
|
|
|
2023-06-25 16:49:14 +08:00
|
|
|
except IntegrityError:
|
|
|
|
db.session.rollback()
|
|
|
|
except:
|
2024-01-18 21:39:12 +08:00
|
|
|
logging.exception('Failed to add embedding to redis')
|
2023-06-25 16:49:14 +08:00
|
|
|
|
|
|
|
return embedding_results
|