dify/api/tasks/update_segment_index_task.py

114 lines
3.9 KiB
Python
Raw Normal View History

2023-05-15 08:51:32 +08:00
import datetime
import logging
import time
from typing import List, Optional
2023-05-15 08:51:32 +08:00
import click
from celery import shared_task
from core.index.index import IndexBuilder
2023-05-15 08:51:32 +08:00
from extensions.ext_database import db
from extensions.ext_redis import redis_client
from langchain.schema import Document
2023-05-15 08:51:32 +08:00
from models.dataset import DocumentSegment
from werkzeug.exceptions import NotFound
2023-05-15 08:51:32 +08:00
2023-07-31 13:13:08 +08:00
@shared_task(queue='dataset')
def update_segment_index_task(segment_id: str, keywords: Optional[List[str]] = None):
2023-05-15 08:51:32 +08:00
"""
Async update segment index
2023-05-15 08:51:32 +08:00
:param segment_id:
:param keywords:
Usage: update_segment_index_task.delay(segment_id)
2023-05-15 08:51:32 +08:00
"""
logging.info(click.style('Start update segment index: {}'.format(segment_id), fg='green'))
2023-05-15 08:51:32 +08:00
start_at = time.perf_counter()
segment = db.session.query(DocumentSegment).filter(DocumentSegment.id == segment_id).first()
if not segment:
raise NotFound('Segment not found')
if segment.status != 'updating':
2023-05-15 08:51:32 +08:00
return
indexing_cache_key = 'segment_{}_indexing'.format(segment.id)
try:
dataset = segment.dataset
if not dataset:
logging.info(click.style('Segment {} has no dataset, pass.'.format(segment.id), fg='cyan'))
return
2023-05-15 08:51:32 +08:00
dataset_document = segment.document
if not dataset_document:
logging.info(click.style('Segment {} has no document, pass.'.format(segment.id), fg='cyan'))
return
if not dataset_document.enabled or dataset_document.archived or dataset_document.indexing_status != 'completed':
logging.info(click.style('Segment {} document status is invalid, pass.'.format(segment.id), fg='cyan'))
return
2023-05-15 08:51:32 +08:00
# update segment status to indexing
update_params = {
DocumentSegment.status: "indexing",
DocumentSegment.indexing_at: datetime.datetime.utcnow()
}
DocumentSegment.query.filter_by(id=segment.id).update(update_params)
db.session.commit()
vector_index = IndexBuilder.get_index(dataset, 'high_quality')
kw_index = IndexBuilder.get_index(dataset, 'economy')
# delete from vector index
if vector_index:
vector_index.delete_by_ids([segment.index_node_id])
# delete from keyword index
kw_index.delete_by_ids([segment.index_node_id])
# add new index
document = Document(
page_content=segment.content,
metadata={
"doc_id": segment.index_node_id,
"doc_hash": segment.index_node_hash,
"document_id": segment.document_id,
"dataset_id": segment.dataset_id,
}
)
2023-05-15 08:51:32 +08:00
# save vector index
index = IndexBuilder.get_index(dataset, 'high_quality')
if index:
index.add_texts([document], duplicate_check=True)
2023-05-15 08:51:32 +08:00
# save keyword index
index = IndexBuilder.get_index(dataset, 'economy')
if index:
if keywords and len(keywords) > 0:
index.create_segment_keywords(segment.index_node_id, keywords)
else:
index.add_texts([document])
# update segment to completed
update_params = {
DocumentSegment.status: "completed",
DocumentSegment.completed_at: datetime.datetime.utcnow()
}
DocumentSegment.query.filter_by(id=segment.id).update(update_params)
db.session.commit()
2023-05-15 08:51:32 +08:00
end_at = time.perf_counter()
logging.info(click.style('Segment update index: {} latency: {}'.format(segment.id, end_at - start_at), fg='green'))
2023-05-15 08:51:32 +08:00
except Exception as e:
logging.exception("update segment index failed")
2023-05-15 08:51:32 +08:00
segment.enabled = False
segment.disabled_at = datetime.datetime.utcnow()
segment.status = 'error'
segment.error = str(e)
db.session.commit()
finally:
redis_client.delete(indexing_cache_key)