chore: bump ruff to 0.11.0 and fix linting violations (#15953)
This commit is contained in:
parent
98a4b3e78b
commit
9e782d4c1e
@ -151,7 +151,7 @@ class BaseAppGenerator:
|
|||||||
|
|
||||||
def gen():
|
def gen():
|
||||||
for message in generator:
|
for message in generator:
|
||||||
if isinstance(message, (Mapping, dict)):
|
if isinstance(message, Mapping | dict):
|
||||||
yield f"data: {json.dumps(message)}\n\n"
|
yield f"data: {json.dumps(message)}\n\n"
|
||||||
else:
|
else:
|
||||||
yield f"event: {message}\n\n"
|
yield f"event: {message}\n\n"
|
||||||
|
@ -15,7 +15,6 @@ from ..enums import SystemVariableKey
|
|||||||
|
|
||||||
VariableValue = Union[str, int, float, dict, list, File]
|
VariableValue = Union[str, int, float, dict, list, File]
|
||||||
|
|
||||||
|
|
||||||
VARIABLE_PATTERN = re.compile(r"\{\{#([a-zA-Z0-9_]{1,50}(?:\.[a-zA-Z_][a-zA-Z0-9_]{0,29}){1,10})#\}\}")
|
VARIABLE_PATTERN = re.compile(r"\{\{#([a-zA-Z0-9_]{1,50}(?:\.[a-zA-Z_][a-zA-Z0-9_]{0,29}){1,10})#\}\}")
|
||||||
|
|
||||||
|
|
||||||
@ -131,7 +130,7 @@ class VariablePool(BaseModel):
|
|||||||
if attr not in {item.value for item in FileAttribute}:
|
if attr not in {item.value for item in FileAttribute}:
|
||||||
return None
|
return None
|
||||||
value = self.get(selector)
|
value = self.get(selector)
|
||||||
if not isinstance(value, (FileSegment, NoneSegment)):
|
if not isinstance(value, FileSegment | NoneSegment):
|
||||||
return None
|
return None
|
||||||
if isinstance(value, FileSegment):
|
if isinstance(value, FileSegment):
|
||||||
attr = FileAttribute(attr)
|
attr = FileAttribute(attr)
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
import logging
|
import logging
|
||||||
from collections.abc import Generator, Mapping, Sequence
|
from collections.abc import Generator, Mapping, Sequence
|
||||||
from datetime import datetime, timezone
|
from datetime import UTC, datetime
|
||||||
from typing import Any, cast
|
from typing import Any, cast
|
||||||
|
|
||||||
from configs import dify_config
|
from configs import dify_config
|
||||||
@ -80,7 +80,7 @@ class LoopNode(BaseNode[LoopNodeData]):
|
|||||||
thread_pool_id=self.thread_pool_id,
|
thread_pool_id=self.thread_pool_id,
|
||||||
)
|
)
|
||||||
|
|
||||||
start_at = datetime.now(timezone.utc).replace(tzinfo=None)
|
start_at = datetime.now(UTC).replace(tzinfo=None)
|
||||||
condition_processor = ConditionProcessor()
|
condition_processor = ConditionProcessor()
|
||||||
|
|
||||||
# Start Loop event
|
# Start Loop event
|
||||||
|
572
api/poetry.lock
generated
572
api/poetry.lock
generated
File diff suppressed because it is too large
Load Diff
@ -175,4 +175,4 @@ types-tqdm = "~4.67.0.20241221"
|
|||||||
optional = true
|
optional = true
|
||||||
[tool.poetry.group.lint.dependencies]
|
[tool.poetry.group.lint.dependencies]
|
||||||
dotenv-linter = "~0.5.0"
|
dotenv-linter = "~0.5.0"
|
||||||
ruff = "~0.9.9"
|
ruff = "~0.11.0"
|
||||||
|
@ -949,7 +949,7 @@ class DocumentService:
|
|||||||
).first()
|
).first()
|
||||||
if document:
|
if document:
|
||||||
document.dataset_process_rule_id = dataset_process_rule.id # type: ignore
|
document.dataset_process_rule_id = dataset_process_rule.id # type: ignore
|
||||||
document.updated_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
|
document.updated_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
|
||||||
document.created_from = created_from
|
document.created_from = created_from
|
||||||
document.doc_form = knowledge_config.doc_form
|
document.doc_form = knowledge_config.doc_form
|
||||||
document.doc_language = knowledge_config.doc_language
|
document.doc_language = knowledge_config.doc_language
|
||||||
@ -1916,7 +1916,7 @@ class SegmentService:
|
|||||||
if cache_result is not None:
|
if cache_result is not None:
|
||||||
continue
|
continue
|
||||||
segment.enabled = False
|
segment.enabled = False
|
||||||
segment.disabled_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
|
segment.disabled_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
|
||||||
segment.disabled_by = current_user.id
|
segment.disabled_by = current_user.id
|
||||||
db.session.add(segment)
|
db.session.add(segment)
|
||||||
real_deal_segmment_ids.append(segment.id)
|
real_deal_segmment_ids.append(segment.id)
|
||||||
@ -2008,7 +2008,7 @@ class SegmentService:
|
|||||||
child_chunk.content = child_chunk_update_args.content
|
child_chunk.content = child_chunk_update_args.content
|
||||||
child_chunk.word_count = len(child_chunk.content)
|
child_chunk.word_count = len(child_chunk.content)
|
||||||
child_chunk.updated_by = current_user.id
|
child_chunk.updated_by = current_user.id
|
||||||
child_chunk.updated_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
|
child_chunk.updated_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
|
||||||
child_chunk.type = "customized"
|
child_chunk.type = "customized"
|
||||||
update_child_chunks.append(child_chunk)
|
update_child_chunks.append(child_chunk)
|
||||||
else:
|
else:
|
||||||
@ -2065,7 +2065,7 @@ class SegmentService:
|
|||||||
child_chunk.content = content
|
child_chunk.content = content
|
||||||
child_chunk.word_count = len(content)
|
child_chunk.word_count = len(content)
|
||||||
child_chunk.updated_by = current_user.id
|
child_chunk.updated_by = current_user.id
|
||||||
child_chunk.updated_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
|
child_chunk.updated_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
|
||||||
child_chunk.type = "customized"
|
child_chunk.type = "customized"
|
||||||
db.session.add(child_chunk)
|
db.session.add(child_chunk)
|
||||||
VectorService.update_child_chunk_vector([], [child_chunk], [], dataset)
|
VectorService.update_child_chunk_vector([], [child_chunk], [], dataset)
|
||||||
|
@ -51,7 +51,7 @@ def duplicate_document_indexing_task(dataset_id: str, document_ids: list):
|
|||||||
if document:
|
if document:
|
||||||
document.indexing_status = "error"
|
document.indexing_status = "error"
|
||||||
document.error = str(e)
|
document.error = str(e)
|
||||||
document.stopped_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
|
document.stopped_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
|
||||||
db.session.add(document)
|
db.session.add(document)
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
return
|
return
|
||||||
@ -80,7 +80,7 @@ def duplicate_document_indexing_task(dataset_id: str, document_ids: list):
|
|||||||
db.session.commit()
|
db.session.commit()
|
||||||
|
|
||||||
document.indexing_status = "parsing"
|
document.indexing_status = "parsing"
|
||||||
document.processing_started_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
|
document.processing_started_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
|
||||||
documents.append(document)
|
documents.append(document)
|
||||||
db.session.add(document)
|
db.session.add(document)
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
|
@ -99,7 +99,7 @@ def enable_segments_to_index_task(segment_ids: list, dataset_id: str, document_i
|
|||||||
{
|
{
|
||||||
"error": str(e),
|
"error": str(e),
|
||||||
"status": "error",
|
"status": "error",
|
||||||
"disabled_at": datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None),
|
"disabled_at": datetime.datetime.now(datetime.UTC).replace(tzinfo=None),
|
||||||
"enabled": False,
|
"enabled": False,
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
@ -48,7 +48,7 @@ def retry_document_indexing_task(dataset_id: str, document_ids: list[str]):
|
|||||||
if document:
|
if document:
|
||||||
document.indexing_status = "error"
|
document.indexing_status = "error"
|
||||||
document.error = str(e)
|
document.error = str(e)
|
||||||
document.stopped_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
|
document.stopped_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
|
||||||
db.session.add(document)
|
db.session.add(document)
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
redis_client.delete(retry_indexing_cache_key)
|
redis_client.delete(retry_indexing_cache_key)
|
||||||
@ -76,7 +76,7 @@ def retry_document_indexing_task(dataset_id: str, document_ids: list[str]):
|
|||||||
db.session.commit()
|
db.session.commit()
|
||||||
|
|
||||||
document.indexing_status = "parsing"
|
document.indexing_status = "parsing"
|
||||||
document.processing_started_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
|
document.processing_started_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
|
||||||
db.session.add(document)
|
db.session.add(document)
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
|
|
||||||
@ -86,7 +86,7 @@ def retry_document_indexing_task(dataset_id: str, document_ids: list[str]):
|
|||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
document.indexing_status = "error"
|
document.indexing_status = "error"
|
||||||
document.error = str(ex)
|
document.error = str(ex)
|
||||||
document.stopped_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
|
document.stopped_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
|
||||||
db.session.add(document)
|
db.session.add(document)
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
logging.info(click.style(str(ex), fg="yellow"))
|
logging.info(click.style(str(ex), fg="yellow"))
|
||||||
|
@ -46,7 +46,7 @@ def sync_website_document_indexing_task(dataset_id: str, document_id: str):
|
|||||||
if document:
|
if document:
|
||||||
document.indexing_status = "error"
|
document.indexing_status = "error"
|
||||||
document.error = str(e)
|
document.error = str(e)
|
||||||
document.stopped_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
|
document.stopped_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
|
||||||
db.session.add(document)
|
db.session.add(document)
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
redis_client.delete(sync_indexing_cache_key)
|
redis_client.delete(sync_indexing_cache_key)
|
||||||
@ -72,7 +72,7 @@ def sync_website_document_indexing_task(dataset_id: str, document_id: str):
|
|||||||
db.session.commit()
|
db.session.commit()
|
||||||
|
|
||||||
document.indexing_status = "parsing"
|
document.indexing_status = "parsing"
|
||||||
document.processing_started_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
|
document.processing_started_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
|
||||||
db.session.add(document)
|
db.session.add(document)
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
|
|
||||||
@ -82,7 +82,7 @@ def sync_website_document_indexing_task(dataset_id: str, document_id: str):
|
|||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
document.indexing_status = "error"
|
document.indexing_status = "error"
|
||||||
document.error = str(ex)
|
document.error = str(ex)
|
||||||
document.stopped_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
|
document.stopped_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
|
||||||
db.session.add(document)
|
db.session.add(document)
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
logging.info(click.style(str(ex), fg="yellow"))
|
logging.info(click.style(str(ex), fg="yellow"))
|
||||||
|
Loading…
Reference in New Issue
Block a user