fix: refactor all 'or []' and 'or {}' logic to make code more clear (#10883)

Signed-off-by: yihong0618 <zouzou0208@gmail.com>
This commit is contained in:
yihong 2024-11-21 10:34:43 +08:00 committed by GitHub
parent ec9f6220c9
commit 0067b16d1e
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
11 changed files with 20 additions and 26 deletions

View File

@ -114,16 +114,9 @@ class BaseAgentRunner(AppRunner):
# check if model supports stream tool call
llm_model = cast(LargeLanguageModel, model_instance.model_type_instance)
model_schema = llm_model.get_model_schema(model_instance.model, model_instance.credentials)
if model_schema and ModelFeature.STREAM_TOOL_CALL in (model_schema.features or []):
self.stream_tool_call = True
else:
self.stream_tool_call = False
# check if model supports vision
if model_schema and ModelFeature.VISION in (model_schema.features or []):
self.files = application_generate_entity.files
else:
self.files = []
features = model_schema.features if model_schema and model_schema.features else []
self.stream_tool_call = ModelFeature.STREAM_TOOL_CALL in features
self.files = application_generate_entity.files if ModelFeature.VISION in features else []
self.query = None
self._current_thoughts: list[PromptMessage] = []
@ -250,7 +243,7 @@ class BaseAgentRunner(AppRunner):
update prompt message tool
"""
# try to get tool runtime parameters
tool_runtime_parameters = tool.get_runtime_parameters() or []
tool_runtime_parameters = tool.get_runtime_parameters()
for parameter in tool_runtime_parameters:
if parameter.form != ToolParameter.ToolParameterForm.LLM:

View File

@ -381,7 +381,7 @@ class WorkflowCycleManage:
id=workflow_run.id,
workflow_id=workflow_run.workflow_id,
sequence_number=workflow_run.sequence_number,
inputs=workflow_run.inputs_dict or {},
inputs=workflow_run.inputs_dict,
created_at=int(workflow_run.created_at.timestamp()),
),
)
@ -428,7 +428,7 @@ class WorkflowCycleManage:
created_by=created_by,
created_at=int(workflow_run.created_at.timestamp()),
finished_at=int(workflow_run.finished_at.timestamp()),
files=self._fetch_files_from_node_outputs(workflow_run.outputs_dict or {}),
files=self._fetch_files_from_node_outputs(workflow_run.outputs_dict),
),
)

View File

@ -691,8 +691,8 @@ class CohereLargeLanguageModel(LargeLanguageModel):
base_model_schema = cast(AIModelEntity, base_model_schema)
base_model_schema_features = base_model_schema.features or []
base_model_schema_model_properties = base_model_schema.model_properties or {}
base_model_schema_parameters_rules = base_model_schema.parameter_rules or []
base_model_schema_model_properties = base_model_schema.model_properties
base_model_schema_parameters_rules = base_model_schema.parameter_rules
entity = AIModelEntity(
model=model,

View File

@ -1130,8 +1130,8 @@ class OpenAILargeLanguageModel(_CommonOpenAI, LargeLanguageModel):
base_model_schema = model_map[base_model]
base_model_schema_features = base_model_schema.features or []
base_model_schema_model_properties = base_model_schema.model_properties or {}
base_model_schema_parameters_rules = base_model_schema.parameter_rules or []
base_model_schema_model_properties = base_model_schema.model_properties
base_model_schema_parameters_rules = base_model_schema.parameter_rules
entity = AIModelEntity(
model=model,

View File

@ -37,13 +37,14 @@ class OpenLLMGenerateMessage:
class OpenLLMGenerate:
def generate(
self,
*,
server_url: str,
model_name: str,
stream: bool,
model_parameters: dict[str, Any],
stop: list[str],
stop: list[str] | None = None,
prompt_messages: list[OpenLLMGenerateMessage],
user: str,
user: str | None = None,
) -> Union[Generator[OpenLLMGenerateMessage, None, None], OpenLLMGenerateMessage]:
if not server_url:
raise InvalidAuthenticationError("Invalid server URL")

View File

@ -261,7 +261,7 @@ class Tool(BaseModel, ABC):
"""
parameters = self.parameters or []
parameters = parameters.copy()
user_parameters = self.get_runtime_parameters() or []
user_parameters = self.get_runtime_parameters()
user_parameters = user_parameters.copy()
# override parameters

View File

@ -55,7 +55,7 @@ class ToolEngine:
# check if this tool has only one parameter
parameters = [
parameter
for parameter in tool.get_runtime_parameters() or []
for parameter in tool.get_runtime_parameters()
if parameter.form == ToolParameter.ToolParameterForm.LLM
]
if parameters and len(parameters) == 1:

View File

@ -127,7 +127,7 @@ class ToolParameterConfigurationManager(BaseModel):
# get tool parameters
tool_parameters = self.tool_runtime.parameters or []
# get tool runtime parameters
runtime_parameters = self.tool_runtime.get_runtime_parameters() or []
runtime_parameters = self.tool_runtime.get_runtime_parameters()
# override parameters
current_parameters = tool_parameters.copy()
for runtime_parameter in runtime_parameters:

View File

@ -341,7 +341,7 @@ class AppService:
if not app_model_config:
return meta
agent_config = app_model_config.agent_mode_dict or {}
agent_config = app_model_config.agent_mode_dict
# get all tools
tools = agent_config.get("tools", [])

View File

@ -242,7 +242,7 @@ class ToolTransformService:
# get tool parameters
parameters = tool.parameters or []
# get tool runtime parameters
runtime_parameters = tool.get_runtime_parameters() or []
runtime_parameters = tool.get_runtime_parameters()
# override parameters
current_parameters = parameters.copy()
for runtime_parameter in runtime_parameters:

View File

@ -51,8 +51,8 @@ class WebsiteService:
excludes = options.get("excludes").split(",") if options.get("excludes") else []
params = {
"crawlerOptions": {
"includes": includes or [],
"excludes": excludes or [],
"includes": includes,
"excludes": excludes,
"generateImgAltText": True,
"limit": options.get("limit", 1),
"returnOnlyUrls": False,