add_segment_to_index_task.py 2.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384
  1. import datetime
  2. import logging
  3. import time
  4. import click
  5. from celery import shared_task
  6. from langchain.schema import Document
  7. from werkzeug.exceptions import NotFound
  8. from core.index.index import IndexBuilder
  9. from extensions.ext_database import db
  10. from extensions.ext_redis import redis_client
  11. from models.dataset import DocumentSegment
  12. @shared_task
  13. def add_segment_to_index_task(segment_id: str):
  14. """
  15. Async Add segment to index
  16. :param segment_id:
  17. Usage: add_segment_to_index.delay(segment_id)
  18. """
  19. logging.info(click.style('Start add segment to index: {}'.format(segment_id), fg='green'))
  20. start_at = time.perf_counter()
  21. segment = db.session.query(DocumentSegment).filter(DocumentSegment.id == segment_id).first()
  22. if not segment:
  23. raise NotFound('Segment not found')
  24. if segment.status != 'completed':
  25. return
  26. indexing_cache_key = 'segment_{}_indexing'.format(segment.id)
  27. try:
  28. document = Document(
  29. page_content=segment.content,
  30. metadata={
  31. "doc_id": segment.index_node_id,
  32. "doc_hash": segment.index_node_hash,
  33. "document_id": segment.document_id,
  34. "dataset_id": segment.dataset_id,
  35. }
  36. )
  37. dataset = segment.dataset
  38. if not dataset:
  39. logging.info(click.style('Segment {} has no dataset, pass.'.format(segment.id), fg='cyan'))
  40. return
  41. dataset_document = segment.document
  42. if not dataset_document:
  43. logging.info(click.style('Segment {} has no document, pass.'.format(segment.id), fg='cyan'))
  44. return
  45. if not dataset_document.enabled or dataset_document.archived or dataset_document.indexing_status != 'completed':
  46. logging.info(click.style('Segment {} document status is invalid, pass.'.format(segment.id), fg='cyan'))
  47. return
  48. # save vector index
  49. index = IndexBuilder.get_index(dataset, 'high_quality')
  50. if index:
  51. index.add_texts([document], duplicate_check=True)
  52. # save keyword index
  53. index = IndexBuilder.get_index(dataset, 'economy')
  54. if index:
  55. index.add_texts([document])
  56. end_at = time.perf_counter()
  57. logging.info(click.style('Segment added to index: {} latency: {}'.format(segment.id, end_at - start_at), fg='green'))
  58. except Exception as e:
  59. logging.exception("add segment to index failed")
  60. segment.enabled = False
  61. segment.disabled_at = datetime.datetime.utcnow()
  62. segment.status = 'error'
  63. segment.error = str(e)
  64. db.session.commit()
  65. finally:
  66. redis_client.delete(indexing_cache_key)