disable_segments_from_index_task.py 2.7 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677
  1. import logging
  2. import time
  3. import click
  4. from celery import shared_task # type: ignore
  5. from core.rag.index_processor.index_processor_factory import IndexProcessorFactory
  6. from extensions.ext_database import db
  7. from extensions.ext_redis import redis_client
  8. from models.dataset import Dataset, DocumentSegment
  9. from models.dataset import Document as DatasetDocument
  10. @shared_task(queue="dataset")
  11. def disable_segments_from_index_task(segment_ids: list, dataset_id: str, document_id: str):
  12. """
  13. Async disable segments from index
  14. :param segment_ids:
  15. Usage: disable_segments_from_index_task.delay(segment_ids, dataset_id, document_id)
  16. """
  17. start_at = time.perf_counter()
  18. dataset = db.session.query(Dataset).filter(Dataset.id == dataset_id).first()
  19. if not dataset:
  20. logging.info(click.style("Dataset {} not found, pass.".format(dataset_id), fg="cyan"))
  21. return
  22. dataset_document = db.session.query(DatasetDocument).filter(DatasetDocument.id == document_id).first()
  23. if not dataset_document:
  24. logging.info(click.style("Document {} not found, pass.".format(document_id), fg="cyan"))
  25. return
  26. if not dataset_document.enabled or dataset_document.archived or dataset_document.indexing_status != "completed":
  27. logging.info(click.style("Document {} status is invalid, pass.".format(document_id), fg="cyan"))
  28. return
  29. # sync index processor
  30. index_processor = IndexProcessorFactory(dataset_document.doc_form).init_index_processor()
  31. segments = (
  32. db.session.query(DocumentSegment)
  33. .filter(
  34. DocumentSegment.id.in_(segment_ids),
  35. DocumentSegment.dataset_id == dataset_id,
  36. DocumentSegment.document_id == document_id,
  37. )
  38. .all()
  39. )
  40. if not segments:
  41. return
  42. try:
  43. index_node_ids = [segment.index_node_id for segment in segments]
  44. index_processor.clean(dataset, index_node_ids, with_keywords=True, delete_child_chunks=False)
  45. end_at = time.perf_counter()
  46. logging.info(click.style("Segments removed from index latency: {}".format(end_at - start_at), fg="green"))
  47. except Exception:
  48. # update segment error msg
  49. db.session.query(DocumentSegment).filter(
  50. DocumentSegment.id.in_(segment_ids),
  51. DocumentSegment.dataset_id == dataset_id,
  52. DocumentSegment.document_id == document_id,
  53. ).update(
  54. {
  55. "disabled_at": None,
  56. "disabled_by": None,
  57. "enabled": True,
  58. }
  59. )
  60. db.session.commit()
  61. finally:
  62. for segment in segments:
  63. indexing_cache_key = "segment_{}_indexing".format(segment.id)
  64. redis_client.delete(indexing_cache_key)