| 123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115 | import datetimeimport loggingimport timefrom typing import List, Optionalimport clickfrom celery import shared_taskfrom langchain.schema import Documentfrom werkzeug.exceptions import NotFoundfrom core.index.index import IndexBuilderfrom extensions.ext_database import dbfrom extensions.ext_redis import redis_clientfrom models.dataset import DocumentSegment@shared_task(queue='dataset')def update_segment_index_task(segment_id: str, keywords: Optional[List[str]] = None):    """    Async update segment index    :param segment_id:    :param keywords:    Usage: update_segment_index_task.delay(segment_id)    """    logging.info(click.style('Start update segment index: {}'.format(segment_id), fg='green'))    start_at = time.perf_counter()    segment = db.session.query(DocumentSegment).filter(DocumentSegment.id == segment_id).first()    if not segment:        raise NotFound('Segment not found')    if segment.status != 'updating':        return    indexing_cache_key = 'segment_{}_indexing'.format(segment.id)    try:        dataset = segment.dataset        if not dataset:            logging.info(click.style('Segment {} has no dataset, pass.'.format(segment.id), fg='cyan'))            return        dataset_document = segment.document        if not dataset_document:            logging.info(click.style('Segment {} has no document, pass.'.format(segment.id), fg='cyan'))            return        if not dataset_document.enabled or dataset_document.archived or dataset_document.indexing_status != 'completed':            logging.info(click.style('Segment {} document status is invalid, pass.'.format(segment.id), fg='cyan'))            return        # update segment status to indexing        update_params = {            DocumentSegment.status: "indexing",            DocumentSegment.indexing_at: datetime.datetime.utcnow()        }        DocumentSegment.query.filter_by(id=segment.id).update(update_params)        db.session.commit()        vector_index = IndexBuilder.get_index(dataset, 'high_quality')        kw_index = IndexBuilder.get_index(dataset, 'economy')        # delete from vector index        if vector_index:            vector_index.delete_by_ids([segment.index_node_id])        # delete from keyword index        kw_index.delete_by_ids([segment.index_node_id])        # add new index        document = Document(            page_content=segment.content,            metadata={                "doc_id": segment.index_node_id,                "doc_hash": segment.index_node_hash,                "document_id": segment.document_id,                "dataset_id": segment.dataset_id,            }        )        # save vector index        index = IndexBuilder.get_index(dataset, 'high_quality')        if index:            index.add_texts([document], duplicate_check=True)        # save keyword index        index = IndexBuilder.get_index(dataset, 'economy')        if index:            if keywords and len(keywords) > 0:                index.create_segment_keywords(segment.index_node_id, keywords)            else:                index.add_texts([document])        # update segment to completed        update_params = {            DocumentSegment.status: "completed",            DocumentSegment.completed_at: datetime.datetime.utcnow()        }        DocumentSegment.query.filter_by(id=segment.id).update(update_params)        db.session.commit()        end_at = time.perf_counter()        logging.info(click.style('Segment update index: {} latency: {}'.format(segment.id, end_at - start_at), fg='green'))    except Exception as e:        logging.exception("update segment index failed")        segment.enabled = False        segment.disabled_at = datetime.datetime.utcnow()        segment.status = 'error'        segment.error = str(e)        db.session.commit()    finally:        redis_client.delete(indexing_cache_key)
 |