fix parent-child retrival count
This commit is contained in:
parent
1bbef1f028
commit
2fac7785f3
@ -770,9 +770,9 @@ class DatasetRetrieval:
|
|||||||
user_id: str,
|
user_id: str,
|
||||||
metadata_filtering_mode: str,
|
metadata_filtering_mode: str,
|
||||||
metadata_model_config: ModelConfig,
|
metadata_model_config: ModelConfig,
|
||||||
metadata_filtering_conditions: MetadataFilteringCondition,
|
metadata_filtering_conditions: Optional[MetadataFilteringCondition],
|
||||||
inputs: dict,
|
inputs: dict,
|
||||||
) -> dict[str, list[str]]:
|
) -> Optional[dict[str, list[str]]]:
|
||||||
document_query = db.session.query(Document).filter(
|
document_query = db.session.query(Document).filter(
|
||||||
Document.dataset_id.in_(dataset_ids),
|
Document.dataset_id.in_(dataset_ids),
|
||||||
Document.indexing_status == "completed",
|
Document.indexing_status == "completed",
|
||||||
@ -899,7 +899,7 @@ class DatasetRetrieval:
|
|||||||
case "≥", ">=":
|
case "≥", ">=":
|
||||||
query = query.filter(Document.doc_metadata[metadata_name] >= value)
|
query = query.filter(Document.doc_metadata[metadata_name] >= value)
|
||||||
case _:
|
case _:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def _fetch_model_config(
|
def _fetch_model_config(
|
||||||
self, tenant_id: str, model: ModelConfig
|
self, tenant_id: str, model: ModelConfig
|
||||||
|
@ -30,7 +30,6 @@ from core.workflow.nodes.knowledge_retrieval.template_prompts import (
|
|||||||
METADATA_FILTER_USER_PROMPT_1,
|
METADATA_FILTER_USER_PROMPT_1,
|
||||||
METADATA_FILTER_USER_PROMPT_3,
|
METADATA_FILTER_USER_PROMPT_3,
|
||||||
)
|
)
|
||||||
from core.workflow.nodes.list_operator.exc import InvalidConditionError
|
|
||||||
from core.workflow.nodes.llm.entities import LLMNodeChatModelMessage, LLMNodeCompletionModelPromptTemplate
|
from core.workflow.nodes.llm.entities import LLMNodeChatModelMessage, LLMNodeCompletionModelPromptTemplate
|
||||||
from core.workflow.nodes.llm.node import LLMNode
|
from core.workflow.nodes.llm.node import LLMNode
|
||||||
from core.workflow.nodes.question_classifier.template_prompts import QUESTION_CLASSIFIER_USER_PROMPT_2
|
from core.workflow.nodes.question_classifier.template_prompts import QUESTION_CLASSIFIER_USER_PROMPT_2
|
||||||
|
@ -203,7 +203,7 @@ class Dataset(db.Model): # type: ignore[name-defined]
|
|||||||
def doc_metadata(self):
|
def doc_metadata(self):
|
||||||
dataset_metadatas = db.session.query(DatasetMetadata).filter(DatasetMetadata.dataset_id == self.id).all()
|
dataset_metadatas = db.session.query(DatasetMetadata).filter(DatasetMetadata.dataset_id == self.id).all()
|
||||||
|
|
||||||
return [
|
doc_metadata = [
|
||||||
{
|
{
|
||||||
"id": dataset_metadata.id,
|
"id": dataset_metadata.id,
|
||||||
"name": dataset_metadata.name,
|
"name": dataset_metadata.name,
|
||||||
@ -211,6 +211,43 @@ class Dataset(db.Model): # type: ignore[name-defined]
|
|||||||
}
|
}
|
||||||
for dataset_metadata in dataset_metadatas
|
for dataset_metadata in dataset_metadatas
|
||||||
]
|
]
|
||||||
|
if self.built_in_field_enabled:
|
||||||
|
doc_metadata.append(
|
||||||
|
{
|
||||||
|
"id": "built-in",
|
||||||
|
"name": BuiltInField.document_name,
|
||||||
|
"type": "string",
|
||||||
|
}
|
||||||
|
)
|
||||||
|
doc_metadata.append(
|
||||||
|
{
|
||||||
|
"id": "built-in",
|
||||||
|
"name": BuiltInField.uploader,
|
||||||
|
"type": "string",
|
||||||
|
}
|
||||||
|
)
|
||||||
|
doc_metadata.append(
|
||||||
|
{
|
||||||
|
"id": "built-in",
|
||||||
|
"name": BuiltInField.upload_date,
|
||||||
|
"type": "date",
|
||||||
|
}
|
||||||
|
)
|
||||||
|
doc_metadata.append(
|
||||||
|
{
|
||||||
|
"id": "built-in",
|
||||||
|
"name": BuiltInField.last_update_date,
|
||||||
|
"type": "date",
|
||||||
|
}
|
||||||
|
)
|
||||||
|
doc_metadata.append(
|
||||||
|
{
|
||||||
|
"id": "built-in",
|
||||||
|
"name": BuiltInField.source,
|
||||||
|
"type": "string",
|
||||||
|
}
|
||||||
|
)
|
||||||
|
return doc_metadata
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def gen_collection_name_by_id(dataset_id: str) -> str:
|
def gen_collection_name_by_id(dataset_id: str) -> str:
|
||||||
|
@ -31,8 +31,7 @@ class MetadataService:
|
|||||||
return metadata
|
return metadata
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def \
|
def update_metadata_name(dataset_id: str, metadata_id: str, name: str) -> DatasetMetadata:
|
||||||
update_metadata_name(dataset_id: str, metadata_id: str, name: str) -> DatasetMetadata:
|
|
||||||
lock_key = f"dataset_metadata_lock_{dataset_id}"
|
lock_key = f"dataset_metadata_lock_{dataset_id}"
|
||||||
try:
|
try:
|
||||||
MetadataService.knowledge_base_metadata_lock_check(dataset_id, None)
|
MetadataService.knowledge_base_metadata_lock_check(dataset_id, None)
|
||||||
@ -61,7 +60,7 @@ class MetadataService:
|
|||||||
logging.exception("Update metadata name failed")
|
logging.exception("Update metadata name failed")
|
||||||
finally:
|
finally:
|
||||||
redis_client.delete(lock_key)
|
redis_client.delete(lock_key)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def delete_metadata(dataset_id: str, metadata_id: str):
|
def delete_metadata(dataset_id: str, metadata_id: str):
|
||||||
lock_key = f"dataset_metadata_lock_{dataset_id}"
|
lock_key = f"dataset_metadata_lock_{dataset_id}"
|
||||||
@ -215,7 +214,9 @@ class MetadataService:
|
|||||||
"id": item.get("id"),
|
"id": item.get("id"),
|
||||||
"name": item.get("name"),
|
"name": item.get("name"),
|
||||||
"type": item.get("type"),
|
"type": item.get("type"),
|
||||||
"count": DatasetMetadataBinding.query.filter_by(metadata_id=item.get("id"), dataset_id=dataset.id).count(),
|
"count": DatasetMetadataBinding.query.filter_by(
|
||||||
|
metadata_id=item.get("id"), dataset_id=dataset.id
|
||||||
|
).count(),
|
||||||
}
|
}
|
||||||
for item in dataset.doc_metadata or []
|
for item in dataset.doc_metadata or []
|
||||||
],
|
],
|
||||||
|
Loading…
Reference in New Issue
Block a user