2023-05-15 08:51:32 +08:00
|
|
|
import datetime
|
|
|
|
import logging
|
|
|
|
import time
|
|
|
|
|
|
|
|
import click
|
|
|
|
from celery import shared_task
|
2023-06-25 16:49:14 +08:00
|
|
|
from core.index.index import IndexBuilder
|
2023-05-15 08:51:32 +08:00
|
|
|
from extensions.ext_database import db
|
|
|
|
from extensions.ext_redis import redis_client
|
2024-01-12 12:34:01 +08:00
|
|
|
from langchain.schema import Document
|
2023-05-15 08:51:32 +08:00
|
|
|
from models.dataset import DocumentSegment
|
2024-01-12 12:34:01 +08:00
|
|
|
from werkzeug.exceptions import NotFound
|
2023-05-15 08:51:32 +08:00
|
|
|
|
|
|
|
|
2023-07-31 13:13:08 +08:00
|
|
|
@shared_task(queue='dataset')
|
2023-07-28 20:47:15 +08:00
|
|
|
def enable_segment_to_index_task(segment_id: str):
|
2023-05-15 08:51:32 +08:00
|
|
|
"""
|
2023-07-28 20:47:15 +08:00
|
|
|
Async enable segment to index
|
2023-05-15 08:51:32 +08:00
|
|
|
:param segment_id:
|
|
|
|
|
2023-07-28 20:47:15 +08:00
|
|
|
Usage: enable_segment_to_index_task.delay(segment_id)
|
2023-05-15 08:51:32 +08:00
|
|
|
"""
|
2023-07-28 20:47:15 +08:00
|
|
|
logging.info(click.style('Start enable segment to index: {}'.format(segment_id), fg='green'))
|
2023-05-15 08:51:32 +08:00
|
|
|
start_at = time.perf_counter()
|
|
|
|
|
|
|
|
segment = db.session.query(DocumentSegment).filter(DocumentSegment.id == segment_id).first()
|
|
|
|
if not segment:
|
|
|
|
raise NotFound('Segment not found')
|
|
|
|
|
|
|
|
if segment.status != 'completed':
|
2024-01-05 18:18:38 +08:00
|
|
|
raise NotFound('Segment is not completed, enable action is not allowed.')
|
2023-05-15 08:51:32 +08:00
|
|
|
|
|
|
|
indexing_cache_key = 'segment_{}_indexing'.format(segment.id)
|
|
|
|
|
|
|
|
try:
|
2023-06-25 16:49:14 +08:00
|
|
|
document = Document(
|
|
|
|
page_content=segment.content,
|
|
|
|
metadata={
|
|
|
|
"doc_id": segment.index_node_id,
|
|
|
|
"doc_hash": segment.index_node_hash,
|
|
|
|
"document_id": segment.document_id,
|
|
|
|
"dataset_id": segment.dataset_id,
|
|
|
|
}
|
2023-05-15 08:51:32 +08:00
|
|
|
)
|
|
|
|
|
|
|
|
dataset = segment.dataset
|
|
|
|
|
|
|
|
if not dataset:
|
2023-06-25 16:49:14 +08:00
|
|
|
logging.info(click.style('Segment {} has no dataset, pass.'.format(segment.id), fg='cyan'))
|
|
|
|
return
|
2023-05-15 08:51:32 +08:00
|
|
|
|
2023-06-25 16:49:14 +08:00
|
|
|
dataset_document = segment.document
|
|
|
|
|
|
|
|
if not dataset_document:
|
|
|
|
logging.info(click.style('Segment {} has no document, pass.'.format(segment.id), fg='cyan'))
|
|
|
|
return
|
|
|
|
|
|
|
|
if not dataset_document.enabled or dataset_document.archived or dataset_document.indexing_status != 'completed':
|
|
|
|
logging.info(click.style('Segment {} document status is invalid, pass.'.format(segment.id), fg='cyan'))
|
|
|
|
return
|
2023-05-15 08:51:32 +08:00
|
|
|
|
|
|
|
# save vector index
|
2023-06-25 16:49:14 +08:00
|
|
|
index = IndexBuilder.get_index(dataset, 'high_quality')
|
|
|
|
if index:
|
|
|
|
index.add_texts([document], duplicate_check=True)
|
2023-05-15 08:51:32 +08:00
|
|
|
|
|
|
|
# save keyword index
|
2023-06-25 16:49:14 +08:00
|
|
|
index = IndexBuilder.get_index(dataset, 'economy')
|
|
|
|
if index:
|
|
|
|
index.add_texts([document])
|
2023-05-15 08:51:32 +08:00
|
|
|
|
|
|
|
end_at = time.perf_counter()
|
2023-07-28 20:47:15 +08:00
|
|
|
logging.info(click.style('Segment enabled to index: {} latency: {}'.format(segment.id, end_at - start_at), fg='green'))
|
2023-05-15 08:51:32 +08:00
|
|
|
except Exception as e:
|
2023-07-28 20:47:15 +08:00
|
|
|
logging.exception("enable segment to index failed")
|
2023-05-15 08:51:32 +08:00
|
|
|
segment.enabled = False
|
|
|
|
segment.disabled_at = datetime.datetime.utcnow()
|
|
|
|
segment.status = 'error'
|
|
|
|
segment.error = str(e)
|
|
|
|
db.session.commit()
|
|
|
|
finally:
|
|
|
|
redis_client.delete(indexing_cache_key)
|