2023-05-15 08:51:32 +08:00
|
|
|
import logging
|
|
|
|
import time
|
2024-07-15 19:57:05 +08:00
|
|
|
from typing import Optional
|
2023-05-15 08:51:32 +08:00
|
|
|
|
|
|
|
import click
|
|
|
|
from celery import shared_task
|
2024-02-06 13:21:13 +08:00
|
|
|
|
2024-02-22 23:31:57 +08:00
|
|
|
from core.rag.index_processor.index_processor_factory import IndexProcessorFactory
|
2024-11-11 21:43:37 +08:00
|
|
|
from core.tools.utils.web_reader_tool import get_image_upload_file_ids
|
2023-05-15 08:51:32 +08:00
|
|
|
from extensions.ext_database import db
|
2024-07-15 19:57:05 +08:00
|
|
|
from extensions.ext_storage import storage
|
2024-01-12 12:34:01 +08:00
|
|
|
from models.dataset import Dataset, DocumentSegment
|
2024-07-15 19:57:05 +08:00
|
|
|
from models.model import UploadFile
|
2023-05-15 08:51:32 +08:00
|
|
|
|
|
|
|
|
2024-08-26 13:38:37 +08:00
|
|
|
@shared_task(queue="dataset")
|
2024-07-15 19:57:05 +08:00
|
|
|
def clean_document_task(document_id: str, dataset_id: str, doc_form: str, file_id: Optional[str]):
|
2023-05-15 08:51:32 +08:00
|
|
|
"""
|
|
|
|
Clean document when document deleted.
|
|
|
|
:param document_id: document id
|
|
|
|
:param dataset_id: dataset id
|
2024-02-22 23:31:57 +08:00
|
|
|
:param doc_form: doc_form
|
2024-07-15 19:57:05 +08:00
|
|
|
:param file_id: file id
|
2023-05-15 08:51:32 +08:00
|
|
|
|
|
|
|
Usage: clean_document_task.delay(document_id, dataset_id)
|
|
|
|
"""
|
2024-08-26 13:38:37 +08:00
|
|
|
logging.info(click.style("Start clean document when document deleted: {}".format(document_id), fg="green"))
|
2023-05-15 08:51:32 +08:00
|
|
|
start_at = time.perf_counter()
|
|
|
|
|
|
|
|
try:
|
|
|
|
dataset = db.session.query(Dataset).filter(Dataset.id == dataset_id).first()
|
|
|
|
|
|
|
|
if not dataset:
|
2024-08-26 13:38:37 +08:00
|
|
|
raise Exception("Document has no dataset")
|
2023-05-15 08:51:32 +08:00
|
|
|
|
|
|
|
segments = db.session.query(DocumentSegment).filter(DocumentSegment.document_id == document_id).all()
|
2023-10-12 13:30:44 +08:00
|
|
|
# check segment is exist
|
|
|
|
if segments:
|
|
|
|
index_node_ids = [segment.index_node_id for segment in segments]
|
2024-02-22 23:31:57 +08:00
|
|
|
index_processor = IndexProcessorFactory(doc_form).init_index_processor()
|
|
|
|
index_processor.clean(dataset, index_node_ids)
|
2023-05-15 08:51:32 +08:00
|
|
|
|
2023-10-12 13:30:44 +08:00
|
|
|
for segment in segments:
|
2024-11-11 21:43:37 +08:00
|
|
|
image_upload_file_ids = get_image_upload_file_ids(segment.content)
|
|
|
|
for upload_file_id in image_upload_file_ids:
|
|
|
|
image_file = db.session.query(UploadFile).filter(UploadFile.id == upload_file_id).first()
|
|
|
|
try:
|
|
|
|
storage.delete(image_file.key)
|
|
|
|
except Exception:
|
|
|
|
logging.exception(
|
|
|
|
"Delete image_files failed when storage deleted, \
|
|
|
|
image_upload_file_is: {}".format(upload_file_id)
|
|
|
|
)
|
2023-10-12 13:30:44 +08:00
|
|
|
db.session.delete(segment)
|
2023-06-25 16:49:14 +08:00
|
|
|
|
2023-10-12 13:30:44 +08:00
|
|
|
db.session.commit()
|
2024-07-15 19:57:05 +08:00
|
|
|
if file_id:
|
2024-08-26 13:38:37 +08:00
|
|
|
file = db.session.query(UploadFile).filter(UploadFile.id == file_id).first()
|
2024-07-15 19:57:05 +08:00
|
|
|
if file:
|
|
|
|
try:
|
|
|
|
storage.delete(file.key)
|
|
|
|
except Exception:
|
|
|
|
logging.exception("Delete file failed when document deleted, file_id: {}".format(file_id))
|
|
|
|
db.session.delete(file)
|
|
|
|
db.session.commit()
|
|
|
|
|
|
|
|
end_at = time.perf_counter()
|
|
|
|
logging.info(
|
2024-08-26 13:38:37 +08:00
|
|
|
click.style(
|
|
|
|
"Cleaned document when document deleted: {} latency: {}".format(document_id, end_at - start_at),
|
|
|
|
fg="green",
|
|
|
|
)
|
|
|
|
)
|
2023-05-15 08:51:32 +08:00
|
|
|
except Exception:
|
|
|
|
logging.exception("Cleaned document when document deleted failed")
|