dify/api/tasks/external_document_indexing_task.py

74 lines
2.9 KiB
Python
Raw Normal View History

2024-08-20 11:13:29 +08:00
import datetime
2024-08-20 16:18:35 +08:00
import json
2024-08-20 11:13:29 +08:00
import logging
import time
import click
from celery import shared_task
from configs import dify_config
from core.indexing_runner import DocumentIsPausedException, IndexingRunner
from extensions.ext_database import db
2024-08-20 16:18:35 +08:00
from extensions.ext_storage import storage
2024-08-20 11:13:29 +08:00
from models.dataset import Dataset, Document, ExternalApiTemplates
from models.model import UploadFile
2024-08-20 16:18:35 +08:00
from services.external_knowledge_service import ExternalDatasetService
2024-08-20 11:13:29 +08:00
from services.feature_service import FeatureService
@shared_task(queue='dataset')
def external_document_indexing_task(dataset_id: str, api_template_id: str, data_source: dict, process_parameter: dict):
"""
Async process document
:param dataset_id:
:param api_template_id:
:param data_source:
:param process_parameter:
Usage: external_document_indexing_task.delay(dataset_id, document_id)
"""
start_at = time.perf_counter()
dataset = db.session.query(Dataset).filter(Dataset.id == dataset_id).first()
if not dataset:
logging.info(click.style('Processed external dataset: {} failed, dataset not exit.'.format(dataset_id), fg='red'))
return
# get external api template
api_template = db.session.query(ExternalApiTemplates).filter(
ExternalApiTemplates.id == api_template_id,
ExternalApiTemplates.tenant_id == dataset.tenant_id
).first()
if not api_template:
logging.info(click.style('Processed external dataset: {} failed, api template: {} not exit.'.format(dataset_id, api_template_id), fg='red'))
return
2024-08-20 16:18:35 +08:00
files = {}
2024-08-20 11:13:29 +08:00
if data_source["type"] == "upload_file":
upload_file_list = data_source["info_list"]['file_info_list']['file_ids']
for file_id in upload_file_list:
file = db.session.query(UploadFile).filter(
UploadFile.tenant_id == dataset.tenant_id,
UploadFile.id == file_id
).first()
if file:
2024-08-20 16:18:35 +08:00
files[file.id] = (file.name, storage.load_once(file.key), file.mime_type)
2024-08-20 11:13:29 +08:00
try:
2024-08-20 16:18:35 +08:00
settings = ExternalDatasetService.get_api_template_settings(json.loads(api_template.settings))
2024-08-20 11:13:29 +08:00
# assemble headers
2024-08-20 16:18:35 +08:00
headers = ExternalDatasetService.assembling_headers(settings.authorization, settings.headers)
2024-08-20 11:13:29 +08:00
# do http request
2024-08-20 16:18:35 +08:00
response = ExternalDatasetService.process_external_api(settings, headers, process_parameter, files)
if response.status_code != 200:
logging.info(click.style('Processed external dataset: {} failed, status code: {}'.format(dataset.id, response.status_code), fg='red'))
return
end_at = time.perf_counter()
logging.info(
click.style('Processed external dataset: {} successful, latency: {}'.format(dataset.id, end_at - start_at), fg='green'))
2024-08-20 11:13:29 +08:00
except DocumentIsPausedException as ex:
logging.info(click.style(str(ex), fg='yellow'))
2024-08-20 16:18:35 +08:00
2024-08-20 11:13:29 +08:00
except Exception:
pass