fix: update datetime usage to use UTC consistently across workflow and task modules
Signed-off-by: -LAN- <laipz8200@outlook.com>
This commit is contained in:
parent
e796937d02
commit
c43f388586
@ -1,6 +1,6 @@
|
|||||||
import logging
|
import logging
|
||||||
from collections.abc import Generator, Mapping, Sequence
|
from collections.abc import Generator, Mapping, Sequence
|
||||||
from datetime import datetime, timezone
|
from datetime import UTC, datetime
|
||||||
from typing import Any, cast
|
from typing import Any, cast
|
||||||
|
|
||||||
from configs import dify_config
|
from configs import dify_config
|
||||||
@ -80,7 +80,7 @@ class LoopNode(BaseNode[LoopNodeData]):
|
|||||||
thread_pool_id=self.thread_pool_id,
|
thread_pool_id=self.thread_pool_id,
|
||||||
)
|
)
|
||||||
|
|
||||||
start_at = datetime.now(timezone.utc).replace(tzinfo=None)
|
start_at = datetime.now(UTC).replace(tzinfo=None)
|
||||||
condition_processor = ConditionProcessor()
|
condition_processor = ConditionProcessor()
|
||||||
|
|
||||||
# Start Loop event
|
# Start Loop event
|
||||||
|
@ -949,7 +949,7 @@ class DocumentService:
|
|||||||
).first()
|
).first()
|
||||||
if document:
|
if document:
|
||||||
document.dataset_process_rule_id = dataset_process_rule.id # type: ignore
|
document.dataset_process_rule_id = dataset_process_rule.id # type: ignore
|
||||||
document.updated_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
|
document.updated_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
|
||||||
document.created_from = created_from
|
document.created_from = created_from
|
||||||
document.doc_form = knowledge_config.doc_form
|
document.doc_form = knowledge_config.doc_form
|
||||||
document.doc_language = knowledge_config.doc_language
|
document.doc_language = knowledge_config.doc_language
|
||||||
@ -1916,7 +1916,7 @@ class SegmentService:
|
|||||||
if cache_result is not None:
|
if cache_result is not None:
|
||||||
continue
|
continue
|
||||||
segment.enabled = False
|
segment.enabled = False
|
||||||
segment.disabled_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
|
segment.disabled_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
|
||||||
segment.disabled_by = current_user.id
|
segment.disabled_by = current_user.id
|
||||||
db.session.add(segment)
|
db.session.add(segment)
|
||||||
real_deal_segmment_ids.append(segment.id)
|
real_deal_segmment_ids.append(segment.id)
|
||||||
@ -2008,7 +2008,7 @@ class SegmentService:
|
|||||||
child_chunk.content = child_chunk_update_args.content
|
child_chunk.content = child_chunk_update_args.content
|
||||||
child_chunk.word_count = len(child_chunk.content)
|
child_chunk.word_count = len(child_chunk.content)
|
||||||
child_chunk.updated_by = current_user.id
|
child_chunk.updated_by = current_user.id
|
||||||
child_chunk.updated_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
|
child_chunk.updated_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
|
||||||
child_chunk.type = "customized"
|
child_chunk.type = "customized"
|
||||||
update_child_chunks.append(child_chunk)
|
update_child_chunks.append(child_chunk)
|
||||||
else:
|
else:
|
||||||
@ -2065,7 +2065,7 @@ class SegmentService:
|
|||||||
child_chunk.content = content
|
child_chunk.content = content
|
||||||
child_chunk.word_count = len(content)
|
child_chunk.word_count = len(content)
|
||||||
child_chunk.updated_by = current_user.id
|
child_chunk.updated_by = current_user.id
|
||||||
child_chunk.updated_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
|
child_chunk.updated_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
|
||||||
child_chunk.type = "customized"
|
child_chunk.type = "customized"
|
||||||
db.session.add(child_chunk)
|
db.session.add(child_chunk)
|
||||||
VectorService.update_child_chunk_vector([], [child_chunk], [], dataset)
|
VectorService.update_child_chunk_vector([], [child_chunk], [], dataset)
|
||||||
|
@ -51,7 +51,7 @@ def duplicate_document_indexing_task(dataset_id: str, document_ids: list):
|
|||||||
if document:
|
if document:
|
||||||
document.indexing_status = "error"
|
document.indexing_status = "error"
|
||||||
document.error = str(e)
|
document.error = str(e)
|
||||||
document.stopped_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
|
document.stopped_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
|
||||||
db.session.add(document)
|
db.session.add(document)
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
return
|
return
|
||||||
@ -80,7 +80,7 @@ def duplicate_document_indexing_task(dataset_id: str, document_ids: list):
|
|||||||
db.session.commit()
|
db.session.commit()
|
||||||
|
|
||||||
document.indexing_status = "parsing"
|
document.indexing_status = "parsing"
|
||||||
document.processing_started_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
|
document.processing_started_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
|
||||||
documents.append(document)
|
documents.append(document)
|
||||||
db.session.add(document)
|
db.session.add(document)
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
|
@ -97,7 +97,7 @@ def enable_segments_to_index_task(segment_ids: list, dataset_id: str, document_i
|
|||||||
{
|
{
|
||||||
"error": str(e),
|
"error": str(e),
|
||||||
"status": "error",
|
"status": "error",
|
||||||
"disabled_at": datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None),
|
"disabled_at": datetime.datetime.now(datetime.UTC).replace(tzinfo=None),
|
||||||
"enabled": False,
|
"enabled": False,
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
@ -48,7 +48,7 @@ def retry_document_indexing_task(dataset_id: str, document_ids: list[str]):
|
|||||||
if document:
|
if document:
|
||||||
document.indexing_status = "error"
|
document.indexing_status = "error"
|
||||||
document.error = str(e)
|
document.error = str(e)
|
||||||
document.stopped_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
|
document.stopped_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
|
||||||
db.session.add(document)
|
db.session.add(document)
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
redis_client.delete(retry_indexing_cache_key)
|
redis_client.delete(retry_indexing_cache_key)
|
||||||
@ -76,7 +76,7 @@ def retry_document_indexing_task(dataset_id: str, document_ids: list[str]):
|
|||||||
db.session.commit()
|
db.session.commit()
|
||||||
|
|
||||||
document.indexing_status = "parsing"
|
document.indexing_status = "parsing"
|
||||||
document.processing_started_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
|
document.processing_started_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
|
||||||
db.session.add(document)
|
db.session.add(document)
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
|
|
||||||
@ -86,7 +86,7 @@ def retry_document_indexing_task(dataset_id: str, document_ids: list[str]):
|
|||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
document.indexing_status = "error"
|
document.indexing_status = "error"
|
||||||
document.error = str(ex)
|
document.error = str(ex)
|
||||||
document.stopped_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
|
document.stopped_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
|
||||||
db.session.add(document)
|
db.session.add(document)
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
logging.info(click.style(str(ex), fg="yellow"))
|
logging.info(click.style(str(ex), fg="yellow"))
|
||||||
|
@ -46,7 +46,7 @@ def sync_website_document_indexing_task(dataset_id: str, document_id: str):
|
|||||||
if document:
|
if document:
|
||||||
document.indexing_status = "error"
|
document.indexing_status = "error"
|
||||||
document.error = str(e)
|
document.error = str(e)
|
||||||
document.stopped_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
|
document.stopped_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
|
||||||
db.session.add(document)
|
db.session.add(document)
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
redis_client.delete(sync_indexing_cache_key)
|
redis_client.delete(sync_indexing_cache_key)
|
||||||
@ -72,7 +72,7 @@ def sync_website_document_indexing_task(dataset_id: str, document_id: str):
|
|||||||
db.session.commit()
|
db.session.commit()
|
||||||
|
|
||||||
document.indexing_status = "parsing"
|
document.indexing_status = "parsing"
|
||||||
document.processing_started_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
|
document.processing_started_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
|
||||||
db.session.add(document)
|
db.session.add(document)
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
|
|
||||||
@ -82,7 +82,7 @@ def sync_website_document_indexing_task(dataset_id: str, document_id: str):
|
|||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
document.indexing_status = "error"
|
document.indexing_status = "error"
|
||||||
document.error = str(ex)
|
document.error = str(ex)
|
||||||
document.stopped_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
|
document.stopped_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
|
||||||
db.session.add(document)
|
db.session.add(document)
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
logging.info(click.style(str(ex), fg="yellow"))
|
logging.info(click.style(str(ex), fg="yellow"))
|
||||||
|
Loading…
Reference in New Issue
Block a user