clean_unused_messages_task.py 3.8 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293
  1. import datetime
  2. import time
  3. import click
  4. from sqlalchemy import func
  5. from werkzeug.exceptions import NotFound
  6. import app
  7. from configs import dify_config
  8. from core.rag.index_processor.index_processor_factory import IndexProcessorFactory
  9. from extensions.ext_database import db
  10. from models.dataset import Dataset, DatasetQuery, Document
  11. @app.celery.task(queue="dataset")
  12. def clean_unused_message_task():
  13. click.echo(click.style("Start clean unused messages .", fg="green"))
  14. clean_days = int(dify_config.CLEAN_DAY_SETTING)
  15. start_at = time.perf_counter()
  16. thirty_days_ago = datetime.datetime.now() - datetime.timedelta(days=clean_days)
  17. page = 1
  18. while True:
  19. try:
  20. # Subquery for counting new documents
  21. document_subquery_new = (
  22. db.session.query(Document.dataset_id, func.count(Document.id).label("document_count"))
  23. .filter(
  24. Document.indexing_status == "completed",
  25. Document.enabled == True,
  26. Document.archived == False,
  27. Document.updated_at > thirty_days_ago,
  28. )
  29. .group_by(Document.dataset_id)
  30. .subquery()
  31. )
  32. # Subquery for counting old documents
  33. document_subquery_old = (
  34. db.session.query(Document.dataset_id, func.count(Document.id).label("document_count"))
  35. .filter(
  36. Document.indexing_status == "completed",
  37. Document.enabled == True,
  38. Document.archived == False,
  39. Document.updated_at < thirty_days_ago,
  40. )
  41. .group_by(Document.dataset_id)
  42. .subquery()
  43. )
  44. # Main query with join and filter
  45. datasets = (
  46. db.session.query(Dataset)
  47. .outerjoin(document_subquery_new, Dataset.id == document_subquery_new.c.dataset_id)
  48. .outerjoin(document_subquery_old, Dataset.id == document_subquery_old.c.dataset_id)
  49. .filter(
  50. Dataset.created_at < thirty_days_ago,
  51. func.coalesce(document_subquery_new.c.document_count, 0) == 0,
  52. func.coalesce(document_subquery_old.c.document_count, 0) > 0,
  53. )
  54. .order_by(Dataset.created_at.desc())
  55. .paginate(page=page, per_page=50)
  56. )
  57. except NotFound:
  58. break
  59. if datasets.items is None or len(datasets.items) == 0:
  60. break
  61. page += 1
  62. for dataset in datasets:
  63. dataset_query = (
  64. db.session.query(DatasetQuery)
  65. .filter(DatasetQuery.created_at > thirty_days_ago, DatasetQuery.dataset_id == dataset.id)
  66. .all()
  67. )
  68. if not dataset_query or len(dataset_query) == 0:
  69. try:
  70. # remove index
  71. index_processor = IndexProcessorFactory(dataset.doc_form).init_index_processor()
  72. index_processor.clean(dataset, None)
  73. # update document
  74. update_params = {Document.enabled: False}
  75. Document.query.filter_by(dataset_id=dataset.id).update(update_params)
  76. db.session.commit()
  77. click.echo(click.style("Cleaned unused dataset {} from db success!".format(dataset.id), fg="green"))
  78. except Exception as e:
  79. click.echo(
  80. click.style("clean dataset index error: {} {}".format(e.__class__.__name__, str(e)), fg="red")
  81. )
  82. end_at = time.perf_counter()
  83. click.echo(click.style("Cleaned unused dataset from db success latency: {}".format(end_at - start_at), fg="green"))