Merge branch 'main' into feat/plugins
This commit is contained in:
commit
ec6f4ee9df
@ -9,7 +9,7 @@ class PackagingInfo(BaseSettings):
|
||||
|
||||
CURRENT_VERSION: str = Field(
|
||||
description="Dify version",
|
||||
default="0.14.1",
|
||||
default="0.14.2",
|
||||
)
|
||||
|
||||
COMMIT_SHA: str = Field(
|
||||
|
@ -76,7 +76,7 @@ class OAuthCallback(Resource):
|
||||
try:
|
||||
token = oauth_provider.get_access_token(code)
|
||||
user_info = oauth_provider.get_user_info(token)
|
||||
except requests.exceptions.HTTPError as e:
|
||||
except requests.exceptions.RequestException as e:
|
||||
logging.exception(f"An error occurred during the OAuth process with {provider}: {e.response.text}")
|
||||
return {"error": "OAuth process failed"}, 400
|
||||
|
||||
|
@ -421,7 +421,11 @@ class OpenAILargeLanguageModel(_CommonOpenAI, LargeLanguageModel):
|
||||
|
||||
# text completion model
|
||||
response = client.completions.create(
|
||||
prompt=prompt_messages[0].content, model=model, stream=stream, **model_parameters, **extra_model_kwargs
|
||||
prompt=prompt_messages[0].content,
|
||||
model=model,
|
||||
stream=stream,
|
||||
**model_parameters,
|
||||
**extra_model_kwargs,
|
||||
)
|
||||
|
||||
if stream:
|
||||
@ -593,6 +597,8 @@ class OpenAILargeLanguageModel(_CommonOpenAI, LargeLanguageModel):
|
||||
model_parameters["response_format"] = {"type": "json_schema", "json_schema": schema}
|
||||
else:
|
||||
model_parameters["response_format"] = {"type": response_format}
|
||||
elif "json_schema" in model_parameters:
|
||||
del model_parameters["json_schema"]
|
||||
|
||||
extra_model_kwargs = {}
|
||||
|
||||
|
@ -360,7 +360,7 @@ class TraceTask:
|
||||
raise ValueError("Workflow run not found")
|
||||
|
||||
db.session.merge(workflow_run)
|
||||
db.sessoin.refresh(workflow_run)
|
||||
db.session.refresh(workflow_run)
|
||||
|
||||
workflow_id = workflow_run.workflow_id
|
||||
tenant_id = workflow_run.tenant_id
|
||||
|
@ -1,32 +1,8 @@
|
||||
from typing import Any
|
||||
|
||||
from core.file import FileTransferMethod, FileType
|
||||
from core.tools.errors import ToolProviderCredentialValidationError
|
||||
from core.tools.provider.builtin.vectorizer.tools.vectorizer import VectorizerTool
|
||||
from core.tools.provider.builtin_tool_provider import BuiltinToolProviderController
|
||||
from factories import file_factory
|
||||
|
||||
|
||||
class VectorizerProvider(BuiltinToolProviderController):
|
||||
def _validate_credentials(self, credentials: dict[str, Any]) -> None:
|
||||
mapping = {
|
||||
"transfer_method": FileTransferMethod.TOOL_FILE,
|
||||
"type": FileType.IMAGE,
|
||||
"id": "test_id",
|
||||
"url": "https://cloud.dify.ai/logo/logo-site.png",
|
||||
}
|
||||
test_img = file_factory.build_from_mapping(
|
||||
mapping=mapping,
|
||||
tenant_id="__test_123",
|
||||
)
|
||||
try:
|
||||
VectorizerTool().fork_tool_runtime(
|
||||
runtime={
|
||||
"credentials": credentials,
|
||||
}
|
||||
).invoke(
|
||||
user_id="",
|
||||
tool_parameters={"mode": "test", "image": test_img},
|
||||
)
|
||||
except Exception as e:
|
||||
raise ToolProviderCredentialValidationError(str(e))
|
||||
return
|
||||
|
@ -21,6 +21,7 @@ from .variables import (
|
||||
ArrayNumberVariable,
|
||||
ArrayObjectVariable,
|
||||
ArrayStringVariable,
|
||||
ArrayVariable,
|
||||
FileVariable,
|
||||
FloatVariable,
|
||||
IntegerVariable,
|
||||
@ -43,6 +44,7 @@ __all__ = [
|
||||
"ArraySegment",
|
||||
"ArrayStringSegment",
|
||||
"ArrayStringVariable",
|
||||
"ArrayVariable",
|
||||
"FileSegment",
|
||||
"FileVariable",
|
||||
"FloatSegment",
|
||||
|
@ -10,6 +10,7 @@ from .segments import (
|
||||
ArrayFileSegment,
|
||||
ArrayNumberSegment,
|
||||
ArrayObjectSegment,
|
||||
ArraySegment,
|
||||
ArrayStringSegment,
|
||||
FileSegment,
|
||||
FloatSegment,
|
||||
@ -52,19 +53,23 @@ class ObjectVariable(ObjectSegment, Variable):
|
||||
pass
|
||||
|
||||
|
||||
class ArrayAnyVariable(ArrayAnySegment, Variable):
|
||||
class ArrayVariable(ArraySegment, Variable):
|
||||
pass
|
||||
|
||||
|
||||
class ArrayStringVariable(ArrayStringSegment, Variable):
|
||||
class ArrayAnyVariable(ArrayAnySegment, ArrayVariable):
|
||||
pass
|
||||
|
||||
|
||||
class ArrayNumberVariable(ArrayNumberSegment, Variable):
|
||||
class ArrayStringVariable(ArrayStringSegment, ArrayVariable):
|
||||
pass
|
||||
|
||||
|
||||
class ArrayObjectVariable(ArrayObjectSegment, Variable):
|
||||
class ArrayNumberVariable(ArrayNumberSegment, ArrayVariable):
|
||||
pass
|
||||
|
||||
|
||||
class ArrayObjectVariable(ArrayObjectSegment, ArrayVariable):
|
||||
pass
|
||||
|
||||
|
||||
|
@ -60,7 +60,6 @@ class AnswerStreamProcessor(StreamProcessor):
|
||||
|
||||
del self.current_stream_chunk_generating_node_ids[event.route_node_state.node_id]
|
||||
|
||||
# remove unreachable nodes
|
||||
self._remove_unreachable_nodes(event)
|
||||
|
||||
# generate stream outputs
|
||||
|
@ -1,3 +1,4 @@
|
||||
import logging
|
||||
from abc import ABC, abstractmethod
|
||||
from collections.abc import Generator
|
||||
|
||||
@ -5,6 +6,8 @@ from core.workflow.entities.variable_pool import VariablePool
|
||||
from core.workflow.graph_engine.entities.event import GraphEngineEvent, NodeRunSucceededEvent
|
||||
from core.workflow.graph_engine.entities.graph import Graph
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class StreamProcessor(ABC):
|
||||
def __init__(self, graph: Graph, variable_pool: VariablePool) -> None:
|
||||
@ -31,13 +34,22 @@ class StreamProcessor(ABC):
|
||||
if run_result.edge_source_handle:
|
||||
reachable_node_ids = []
|
||||
unreachable_first_node_ids = []
|
||||
if finished_node_id not in self.graph.edge_mapping:
|
||||
logger.warning(f"node {finished_node_id} has no edge mapping")
|
||||
return
|
||||
for edge in self.graph.edge_mapping[finished_node_id]:
|
||||
if (
|
||||
edge.run_condition
|
||||
and edge.run_condition.branch_identify
|
||||
and run_result.edge_source_handle == edge.run_condition.branch_identify
|
||||
):
|
||||
reachable_node_ids.extend(self._fetch_node_ids_in_reachable_branch(edge.target_node_id))
|
||||
# remove unreachable nodes
|
||||
# FIXME: because of the code branch can combine directly, so for answer node
|
||||
# we remove the node maybe shortcut the answer node, so comment this code for now
|
||||
# there is not effect on the answer node and the workflow, when we have a better solution
|
||||
# we can open this code. Issues: #11542 #9560 #10638 #10564
|
||||
|
||||
# reachable_node_ids.extend(self._fetch_node_ids_in_reachable_branch(edge.target_node_id))
|
||||
continue
|
||||
else:
|
||||
unreachable_first_node_ids.append(edge.target_node_id)
|
||||
|
@ -35,4 +35,4 @@ class FailBranchSourceHandle(StrEnum):
|
||||
|
||||
|
||||
CONTINUE_ON_ERROR_NODE_TYPE = [NodeType.LLM, NodeType.CODE, NodeType.TOOL, NodeType.HTTP_REQUEST]
|
||||
RETRY_ON_ERROR_NODE_TYPE = [NodeType.LLM, NodeType.TOOL, NodeType.HTTP_REQUEST]
|
||||
RETRY_ON_ERROR_NODE_TYPE = CONTINUE_ON_ERROR_NODE_TYPE
|
||||
|
@ -16,3 +16,7 @@ class InvalidHttpMethodError(HttpRequestNodeError):
|
||||
|
||||
class ResponseSizeError(HttpRequestNodeError):
|
||||
"""Raised when the response size exceeds the allowed threshold."""
|
||||
|
||||
|
||||
class RequestBodyError(HttpRequestNodeError):
|
||||
"""Raised when the request body is invalid."""
|
||||
|
@ -23,6 +23,7 @@ from .exc import (
|
||||
FileFetchError,
|
||||
HttpRequestNodeError,
|
||||
InvalidHttpMethodError,
|
||||
RequestBodyError,
|
||||
ResponseSizeError,
|
||||
)
|
||||
|
||||
@ -143,13 +144,19 @@ class Executor:
|
||||
case "none":
|
||||
self.content = ""
|
||||
case "raw-text":
|
||||
if len(data) != 1:
|
||||
raise RequestBodyError("raw-text body type should have exactly one item")
|
||||
self.content = self.variable_pool.convert_template(data[0].value).text
|
||||
case "json":
|
||||
if len(data) != 1:
|
||||
raise RequestBodyError("json body type should have exactly one item")
|
||||
json_string = self.variable_pool.convert_template(data[0].value).text
|
||||
json_object = json.loads(json_string, strict=False)
|
||||
self.json = json_object
|
||||
# self.json = self._parse_object_contains_variables(json_object)
|
||||
case "binary":
|
||||
if len(data) != 1:
|
||||
raise RequestBodyError("binary body type should have exactly one item")
|
||||
file_selector = data[0].file
|
||||
file_variable = self.variable_pool.get_file(file_selector)
|
||||
if file_variable is None:
|
||||
@ -317,6 +324,8 @@ class Executor:
|
||||
elif self.json:
|
||||
body = json.dumps(self.json)
|
||||
elif self.node_data.body.type == "raw-text":
|
||||
if len(self.node_data.body.data) != 1:
|
||||
raise RequestBodyError("raw-text body type should have exactly one item")
|
||||
body = self.node_data.body.data[0].value
|
||||
if body:
|
||||
raw += f"Content-Length: {len(body)}\r\n"
|
||||
|
@ -20,7 +20,7 @@ from .entities import (
|
||||
HttpRequestNodeTimeout,
|
||||
Response,
|
||||
)
|
||||
from .exc import HttpRequestNodeError
|
||||
from .exc import HttpRequestNodeError, RequestBodyError
|
||||
|
||||
HTTP_REQUEST_DEFAULT_TIMEOUT = HttpRequestNodeTimeout(
|
||||
connect=dify_config.HTTP_REQUEST_MAX_CONNECT_TIMEOUT,
|
||||
@ -136,9 +136,13 @@ class HttpRequestNode(BaseNode[HttpRequestNodeData]):
|
||||
data = node_data.body.data
|
||||
match body_type:
|
||||
case "binary":
|
||||
if len(data) != 1:
|
||||
raise RequestBodyError("invalid body data, should have only one item")
|
||||
selector = data[0].file
|
||||
selectors.append(VariableSelector(variable="#" + ".".join(selector) + "#", value_selector=selector))
|
||||
case "json" | "raw-text":
|
||||
if len(data) != 1:
|
||||
raise RequestBodyError("invalid body data, should have only one item")
|
||||
selectors += variable_template_parser.extract_selectors_from_template(data[0].key)
|
||||
selectors += variable_template_parser.extract_selectors_from_template(data[0].value)
|
||||
case "x-www-form-urlencoded":
|
||||
|
@ -9,7 +9,7 @@ from typing import TYPE_CHECKING, Any, Optional, cast
|
||||
from flask import Flask, current_app
|
||||
|
||||
from configs import dify_config
|
||||
from core.variables import IntegerVariable
|
||||
from core.variables import ArrayVariable, IntegerVariable, NoneVariable
|
||||
from core.workflow.entities.node_entities import (
|
||||
NodeRunMetadataKey,
|
||||
NodeRunResult,
|
||||
@ -75,12 +75,15 @@ class IterationNode(BaseNode[IterationNodeData]):
|
||||
"""
|
||||
Run the node.
|
||||
"""
|
||||
iterator_list_segment = self.graph_runtime_state.variable_pool.get(self.node_data.iterator_selector)
|
||||
variable = self.graph_runtime_state.variable_pool.get(self.node_data.iterator_selector)
|
||||
|
||||
if not iterator_list_segment:
|
||||
raise IteratorVariableNotFoundError(f"Iterator variable {self.node_data.iterator_selector} not found")
|
||||
if not variable:
|
||||
raise IteratorVariableNotFoundError(f"iterator variable {self.node_data.iterator_selector} not found")
|
||||
|
||||
if len(iterator_list_segment.value) == 0:
|
||||
if not isinstance(variable, ArrayVariable) and not isinstance(variable, NoneVariable):
|
||||
raise InvalidIteratorValueError(f"invalid iterator value: {variable}, please provide a list.")
|
||||
|
||||
if isinstance(variable, NoneVariable) or len(variable.value) == 0:
|
||||
yield RunCompletedEvent(
|
||||
run_result=NodeRunResult(
|
||||
status=WorkflowNodeExecutionStatus.SUCCEEDED,
|
||||
@ -89,7 +92,7 @@ class IterationNode(BaseNode[IterationNodeData]):
|
||||
)
|
||||
return
|
||||
|
||||
iterator_list_value = iterator_list_segment.to_object()
|
||||
iterator_list_value = variable.to_object()
|
||||
|
||||
if not isinstance(iterator_list_value, list):
|
||||
raise InvalidIteratorValueError(f"Invalid iterator value: {iterator_list_value}, please provide a list.")
|
||||
|
@ -1,10 +1,8 @@
|
||||
import json
|
||||
import logging
|
||||
from collections.abc import Mapping, Sequence
|
||||
from typing import TYPE_CHECKING, Any, Optional, cast
|
||||
|
||||
from core.app.entities.app_invoke_entities import ModelConfigWithCredentialsEntity
|
||||
from core.llm_generator.output_parser.errors import OutputParserError
|
||||
from core.memory.token_buffer_memory import TokenBufferMemory
|
||||
from core.model_manager import ModelInstance
|
||||
from core.model_runtime.entities import LLMUsage, ModelPropertyKey, PromptMessageRole
|
||||
@ -99,27 +97,28 @@ class QuestionClassifierNode(LLMNode):
|
||||
jinja2_variables=[],
|
||||
)
|
||||
|
||||
# handle invoke result
|
||||
generator = self._invoke_llm(
|
||||
node_data_model=node_data.model,
|
||||
model_instance=model_instance,
|
||||
prompt_messages=prompt_messages,
|
||||
stop=stop,
|
||||
)
|
||||
|
||||
result_text = ""
|
||||
usage = LLMUsage.empty_usage()
|
||||
finish_reason = None
|
||||
for event in generator:
|
||||
if isinstance(event, ModelInvokeCompletedEvent):
|
||||
result_text = event.text
|
||||
usage = event.usage
|
||||
finish_reason = event.finish_reason
|
||||
break
|
||||
|
||||
category_name = node_data.classes[0].name
|
||||
category_id = node_data.classes[0].id
|
||||
try:
|
||||
# handle invoke result
|
||||
generator = self._invoke_llm(
|
||||
node_data_model=node_data.model,
|
||||
model_instance=model_instance,
|
||||
prompt_messages=prompt_messages,
|
||||
stop=stop,
|
||||
)
|
||||
|
||||
for event in generator:
|
||||
if isinstance(event, ModelInvokeCompletedEvent):
|
||||
result_text = event.text
|
||||
usage = event.usage
|
||||
finish_reason = event.finish_reason
|
||||
break
|
||||
|
||||
category_name = node_data.classes[0].name
|
||||
category_id = node_data.classes[0].id
|
||||
result_text_json = parse_and_check_json_markdown(result_text, [])
|
||||
# result_text_json = json.loads(result_text.strip('```JSON\n'))
|
||||
if "category_name" in result_text_json and "category_id" in result_text_json:
|
||||
@ -130,10 +129,6 @@ class QuestionClassifierNode(LLMNode):
|
||||
if category_id_result in category_ids:
|
||||
category_name = classes_map[category_id_result]
|
||||
category_id = category_id_result
|
||||
|
||||
except OutputParserError:
|
||||
logging.exception(f"Failed to parse result text: {result_text}")
|
||||
try:
|
||||
process_data = {
|
||||
"model_mode": model_config.mode,
|
||||
"prompts": PromptMessageUtil.prompt_messages_to_prompt_for_saving(
|
||||
@ -157,7 +152,7 @@ class QuestionClassifierNode(LLMNode):
|
||||
},
|
||||
llm_usage=usage,
|
||||
)
|
||||
except Exception as e:
|
||||
except ValueError as e:
|
||||
return NodeRunResult(
|
||||
status=WorkflowNodeExecutionStatus.FAILED,
|
||||
inputs=variables,
|
||||
|
@ -1,5 +1,6 @@
|
||||
from collections.abc import Mapping, Sequence
|
||||
from typing import Any
|
||||
from uuid import UUID
|
||||
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.orm import Session
|
||||
@ -231,6 +232,10 @@ class ToolNode(BaseNode[ToolNodeData]):
|
||||
url = str(response.message)
|
||||
transfer_method = FileTransferMethod.TOOL_FILE
|
||||
tool_file_id = url.split("/")[-1].split(".")[0]
|
||||
try:
|
||||
UUID(tool_file_id)
|
||||
except ValueError:
|
||||
raise ToolFileError(f"cannot extract tool file id from url {url}")
|
||||
with Session(db.engine) as session:
|
||||
stmt = select(ToolFile).where(ToolFile.id == tool_file_id)
|
||||
tool_file = session.scalar(stmt)
|
||||
|
@ -27,6 +27,7 @@ def init_app(app: DifyApp):
|
||||
ignore_errors=[
|
||||
HTTPException,
|
||||
ValueError,
|
||||
FileNotFoundError,
|
||||
openai.APIStatusError,
|
||||
InvokeRateLimitError,
|
||||
parse_error.defaultErrorResponse,
|
||||
|
@ -67,7 +67,9 @@ class AwsS3Storage(BaseStorage):
|
||||
yield from response["Body"].iter_chunks()
|
||||
except ClientError as ex:
|
||||
if ex.response["Error"]["Code"] == "NoSuchKey":
|
||||
raise FileNotFoundError("File not found")
|
||||
raise FileNotFoundError("file not found")
|
||||
elif "reached max retries" in str(ex):
|
||||
raise ValueError("please do not request the same file too frequently")
|
||||
else:
|
||||
raise
|
||||
|
||||
|
@ -99,11 +99,6 @@ class Account(UserMixin, db.Model):
|
||||
return db.session.query(Account).filter(Account.id == account_integrate.account_id).one_or_none()
|
||||
return None
|
||||
|
||||
def get_integrates(self) -> list[db.Model]:
|
||||
ai = db.Model
|
||||
return db.session.query(ai).filter(ai.account_id == self.id).all()
|
||||
|
||||
# check current_user.current_tenant.current_role in ['admin', 'owner']
|
||||
@property
|
||||
def is_admin_or_owner(self):
|
||||
return TenantAccountRole.is_privileged_role(self._current_tenant.current_role)
|
||||
|
@ -22,7 +22,7 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
IMPORT_INFO_REDIS_KEY_PREFIX = "app_import_info:"
|
||||
IMPORT_INFO_REDIS_EXPIRY = 180 # 3 minutes
|
||||
CURRENT_DSL_VERSION = "0.1.4"
|
||||
CURRENT_DSL_VERSION = "0.1.5"
|
||||
|
||||
|
||||
class ImportMode(StrEnum):
|
||||
|
@ -1,6 +1,7 @@
|
||||
import os
|
||||
|
||||
import requests
|
||||
import httpx
|
||||
from tenacity import retry, retry_if_not_exception_type, stop_before_delay, wait_fixed
|
||||
|
||||
from extensions.ext_database import db
|
||||
from models.account import TenantAccountJoin, TenantAccountRole
|
||||
@ -39,11 +40,17 @@ class BillingService:
|
||||
return cls._send_request("GET", "/invoices", params=params)
|
||||
|
||||
@classmethod
|
||||
@retry(
|
||||
wait=wait_fixed(2),
|
||||
stop=stop_before_delay(10),
|
||||
retry=retry_if_not_exception_type(httpx.RequestError),
|
||||
reraise=True,
|
||||
)
|
||||
def _send_request(cls, method, endpoint, json=None, params=None):
|
||||
headers = {"Content-Type": "application/json", "Billing-Api-Secret-Key": cls.secret_key}
|
||||
|
||||
url = f"{cls.base_url}{endpoint}"
|
||||
response = requests.request(method, url, json=json, params=params, headers=headers)
|
||||
response = httpx.request(method, url, json=json, params=params, headers=headers)
|
||||
|
||||
return response.json()
|
||||
|
||||
|
@ -488,14 +488,12 @@ def test_run_branch(mock_close, mock_remove):
|
||||
items = []
|
||||
generator = graph_engine.run()
|
||||
for item in generator:
|
||||
# print(type(item), item)
|
||||
items.append(item)
|
||||
|
||||
assert len(items) == 10
|
||||
assert items[3].route_node_state.node_id == "if-else-1"
|
||||
assert items[4].route_node_state.node_id == "if-else-1"
|
||||
assert isinstance(items[5], NodeRunStreamChunkEvent)
|
||||
assert items[5].chunk_content == "1 "
|
||||
assert isinstance(items[6], NodeRunStreamChunkEvent)
|
||||
assert items[6].chunk_content == "takato"
|
||||
assert items[7].route_node_state.node_id == "answer-1"
|
||||
|
@ -2,7 +2,7 @@ version: '3'
|
||||
services:
|
||||
# API service
|
||||
api:
|
||||
image: langgenius/dify-api:0.14.1
|
||||
image: langgenius/dify-api:0.14.2
|
||||
restart: always
|
||||
environment:
|
||||
# Startup mode, 'api' starts the API server.
|
||||
@ -227,7 +227,7 @@ services:
|
||||
# worker service
|
||||
# The Celery worker for processing the queue.
|
||||
worker:
|
||||
image: langgenius/dify-api:0.14.1
|
||||
image: langgenius/dify-api:0.14.2
|
||||
restart: always
|
||||
environment:
|
||||
CONSOLE_WEB_URL: ''
|
||||
@ -397,7 +397,7 @@ services:
|
||||
|
||||
# Frontend web application.
|
||||
web:
|
||||
image: langgenius/dify-web:0.14.1
|
||||
image: langgenius/dify-web:0.14.2
|
||||
restart: always
|
||||
environment:
|
||||
# The base URL of console application api server, refers to the Console base URL of WEB service if console domain is
|
||||
|
@ -2,7 +2,7 @@ x-shared-env: &shared-api-worker-env
|
||||
services:
|
||||
# API service
|
||||
api:
|
||||
image: langgenius/dify-api:0.14.1
|
||||
image: langgenius/dify-api:0.14.2
|
||||
restart: always
|
||||
environment:
|
||||
# Use the shared environment variables.
|
||||
@ -25,7 +25,7 @@ services:
|
||||
# worker service
|
||||
# The Celery worker for processing the queue.
|
||||
worker:
|
||||
image: langgenius/dify-api:0.14.1
|
||||
image: langgenius/dify-api:0.14.2
|
||||
restart: always
|
||||
environment:
|
||||
# Use the shared environment variables.
|
||||
@ -47,7 +47,7 @@ services:
|
||||
|
||||
# Frontend web application.
|
||||
web:
|
||||
image: langgenius/dify-web:0.14.1
|
||||
image: langgenius/dify-web:0.14.2
|
||||
restart: always
|
||||
environment:
|
||||
CONSOLE_API_URL: ${CONSOLE_API_URL:-}
|
||||
|
@ -390,7 +390,7 @@ x-shared-env: &shared-api-worker-env
|
||||
services:
|
||||
# API service
|
||||
api:
|
||||
image: langgenius/dify-api:0.14.1
|
||||
image: langgenius/dify-api:0.14.2
|
||||
restart: always
|
||||
environment:
|
||||
# Use the shared environment variables.
|
||||
@ -413,7 +413,7 @@ services:
|
||||
# worker service
|
||||
# The Celery worker for processing the queue.
|
||||
worker:
|
||||
image: langgenius/dify-api:0.14.1
|
||||
image: langgenius/dify-api:0.14.2
|
||||
restart: always
|
||||
environment:
|
||||
# Use the shared environment variables.
|
||||
@ -435,7 +435,7 @@ services:
|
||||
|
||||
# Frontend web application.
|
||||
web:
|
||||
image: langgenius/dify-web:0.14.1
|
||||
image: langgenius/dify-web:0.14.2
|
||||
restart: always
|
||||
environment:
|
||||
CONSOLE_API_URL: ${CONSOLE_API_URL:-}
|
||||
|
@ -34,7 +34,7 @@ const MembersPage = () => {
|
||||
}
|
||||
const { locale } = useContext(I18n)
|
||||
|
||||
const { userProfile, currentWorkspace, isCurrentWorkspaceOwner, isCurrentWorkspaceManager } = useAppContext()
|
||||
const { userProfile, currentWorkspace, isCurrentWorkspaceOwner, isCurrentWorkspaceManager, systemFeatures } = useAppContext()
|
||||
const { data, mutate } = useSWR({ url: '/workspaces/current/members' }, fetchMembers)
|
||||
const [inviteModalVisible, setInviteModalVisible] = useState(false)
|
||||
const [invitationResults, setInvitationResults] = useState<InvitationResult[]>([])
|
||||
@ -122,6 +122,7 @@ const MembersPage = () => {
|
||||
{
|
||||
inviteModalVisible && (
|
||||
<InviteModal
|
||||
isEmailSetup={systemFeatures.is_email_setup}
|
||||
onCancel={() => setInviteModalVisible(false)}
|
||||
onSend={(invitationResults) => {
|
||||
setInvitedModalVisible(true)
|
||||
|
@ -4,6 +4,7 @@ import { useContext } from 'use-context-selector'
|
||||
import { XMarkIcon } from '@heroicons/react/24/outline'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import { ReactMultiEmail } from 'react-multi-email'
|
||||
import { RiErrorWarningFill } from '@remixicon/react'
|
||||
import RoleSelector from './role-selector'
|
||||
import s from './index.module.css'
|
||||
import cn from '@/utils/classnames'
|
||||
@ -17,11 +18,13 @@ import I18n from '@/context/i18n'
|
||||
|
||||
import 'react-multi-email/dist/style.css'
|
||||
type IInviteModalProps = {
|
||||
isEmailSetup: boolean
|
||||
onCancel: () => void
|
||||
onSend: (invitationResults: InvitationResult[]) => void
|
||||
}
|
||||
|
||||
const InviteModal = ({
|
||||
isEmailSetup,
|
||||
onCancel,
|
||||
onSend,
|
||||
}: IInviteModalProps) => {
|
||||
@ -59,7 +62,23 @@ const InviteModal = ({
|
||||
<div className='text-xl font-semibold text-gray-900'>{t('common.members.inviteTeamMember')}</div>
|
||||
<XMarkIcon className='w-4 h-4 cursor-pointer' onClick={onCancel} />
|
||||
</div>
|
||||
<div className='mb-7 text-[13px] text-gray-500'>{t('common.members.inviteTeamMemberTip')}</div>
|
||||
<div className='mb-3 text-[13px] text-gray-500'>{t('common.members.inviteTeamMemberTip')}</div>
|
||||
{!isEmailSetup && (
|
||||
<div className='grow basis-0 overflow-y-auto pb-4'>
|
||||
<div className='relative mb-1 p-2 rounded-xl border border-components-panel-border shadow-xs'>
|
||||
<div className='absolute top-0 left-0 w-full h-full rounded-xl opacity-40' style={{ background: 'linear-gradient(92deg, rgba(255, 171, 0, 0.25) 18.12%, rgba(255, 255, 255, 0.00) 167.31%)' }}></div>
|
||||
<div className='relative flex items-start w-full h-full'>
|
||||
<div className='shrink-0 mr-0.5 p-0.5'>
|
||||
<RiErrorWarningFill className='w-5 h-5 text-text-warning' />
|
||||
</div>
|
||||
<div className='text-text-primary system-xs-medium'>
|
||||
<span>{t('common.members.emailNotSetup')}</span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
<div>
|
||||
<div className='mb-2 text-sm font-medium text-gray-900'>{t('common.members.email')}</div>
|
||||
<div className='mb-8 h-36 flex items-stretch'>
|
||||
|
@ -387,6 +387,9 @@ export const useWorkflowRun = () => {
|
||||
if (nodeIndex !== -1) {
|
||||
currIteration[nodeIndex] = {
|
||||
...currIteration[nodeIndex],
|
||||
...(currIteration[nodeIndex].retryDetail
|
||||
? { retryDetail: currIteration[nodeIndex].retryDetail }
|
||||
: {}),
|
||||
...data,
|
||||
} as any
|
||||
}
|
||||
@ -626,6 +629,8 @@ export const useWorkflowRun = () => {
|
||||
const {
|
||||
workflowRunningData,
|
||||
setWorkflowRunningData,
|
||||
iterParallelLogMap,
|
||||
setIterParallelLogMap,
|
||||
} = workflowStore.getState()
|
||||
const {
|
||||
getNodes,
|
||||
@ -633,19 +638,65 @@ export const useWorkflowRun = () => {
|
||||
} = store.getState()
|
||||
|
||||
const nodes = getNodes()
|
||||
setWorkflowRunningData(produce(workflowRunningData!, (draft) => {
|
||||
const tracing = draft.tracing!
|
||||
const currentRetryNodeIndex = tracing.findIndex(trace => trace.node_id === data.node_id)
|
||||
const currentNode = nodes.find(node => node.id === data.node_id)!
|
||||
const nodeParent = nodes.find(node => node.id === currentNode.parentId)
|
||||
if (nodeParent) {
|
||||
if (!data.execution_metadata.parallel_mode_run_id) {
|
||||
setWorkflowRunningData(produce(workflowRunningData!, (draft) => {
|
||||
const tracing = draft.tracing!
|
||||
const iteration = tracing.find(trace => trace.node_id === nodeParent.id)
|
||||
|
||||
if (currentRetryNodeIndex > -1) {
|
||||
const currentRetryNode = tracing[currentRetryNodeIndex]
|
||||
if (currentRetryNode.retryDetail)
|
||||
draft.tracing![currentRetryNodeIndex].retryDetail!.push(data as NodeTracing)
|
||||
if (iteration && iteration.details?.length) {
|
||||
const currentNodeRetry = iteration.details[nodeParent.data._iterationIndex - 1]?.find(item => item.node_id === data.node_id)
|
||||
|
||||
else
|
||||
draft.tracing![currentRetryNodeIndex].retryDetail = [data as NodeTracing]
|
||||
if (currentNodeRetry) {
|
||||
if (currentNodeRetry?.retryDetail)
|
||||
currentNodeRetry?.retryDetail.push(data as NodeTracing)
|
||||
else
|
||||
currentNodeRetry.retryDetail = [data as NodeTracing]
|
||||
}
|
||||
}
|
||||
}))
|
||||
}
|
||||
}))
|
||||
else {
|
||||
setWorkflowRunningData(produce(workflowRunningData!, (draft) => {
|
||||
const tracing = draft.tracing!
|
||||
const iteration = tracing.find(trace => trace.node_id === nodeParent.id)
|
||||
|
||||
if (iteration && iteration.details?.length) {
|
||||
const iterRunID = data.execution_metadata?.parallel_mode_run_id
|
||||
|
||||
const currIteration = iterParallelLogMap.get(iteration.node_id)?.get(iterRunID)
|
||||
const currentNodeRetry = currIteration?.find(item => item.node_id === data.node_id)
|
||||
|
||||
if (currentNodeRetry) {
|
||||
if (currentNodeRetry?.retryDetail)
|
||||
currentNodeRetry?.retryDetail.push(data as NodeTracing)
|
||||
else
|
||||
currentNodeRetry.retryDetail = [data as NodeTracing]
|
||||
}
|
||||
setIterParallelLogMap(iterParallelLogMap)
|
||||
const iterLogMap = iterParallelLogMap.get(iteration.node_id)
|
||||
if (iterLogMap)
|
||||
iteration.details = Array.from(iterLogMap.values())
|
||||
}
|
||||
}))
|
||||
}
|
||||
}
|
||||
else {
|
||||
setWorkflowRunningData(produce(workflowRunningData!, (draft) => {
|
||||
const tracing = draft.tracing!
|
||||
const currentRetryNodeIndex = tracing.findIndex(trace => trace.node_id === data.node_id)
|
||||
|
||||
if (currentRetryNodeIndex > -1) {
|
||||
const currentRetryNode = tracing[currentRetryNodeIndex]
|
||||
if (currentRetryNode.retryDetail)
|
||||
draft.tracing![currentRetryNodeIndex].retryDetail!.push(data as NodeTracing)
|
||||
else
|
||||
draft.tracing![currentRetryNodeIndex].retryDetail = [data as NodeTracing]
|
||||
}
|
||||
}))
|
||||
}
|
||||
const newNodes = produce(nodes, (draft) => {
|
||||
const currentNode = draft.find(node => node.id === data.node_id)!
|
||||
|
||||
|
@ -31,7 +31,10 @@ const RetryOnNode = ({
|
||||
}, [data._runningStatus, showSelectedBorder])
|
||||
const showDefault = !isRunning && !isSuccessful && !isException && !isFailed
|
||||
|
||||
if (!retry_config)
|
||||
if (!retry_config?.retry_enabled)
|
||||
return null
|
||||
|
||||
if (!showDefault && !data._retryIndex)
|
||||
return null
|
||||
|
||||
return (
|
||||
@ -74,7 +77,7 @@ const RetryOnNode = ({
|
||||
}
|
||||
</div>
|
||||
{
|
||||
!showDefault && (
|
||||
!showDefault && !!data._retryIndex && (
|
||||
<div>
|
||||
{data._retryIndex}/{data.retry_config?.max_retries}
|
||||
</div>
|
||||
|
@ -78,11 +78,24 @@ const RunPanel: FC<RunProps> = ({ hideResult, activeTab = 'RESULT', runID, getRe
|
||||
|
||||
const groupMap = nodeGroupMap.get(iterationNode.node_id)!
|
||||
|
||||
if (!groupMap.has(runId))
|
||||
if (!groupMap.has(runId)) {
|
||||
groupMap.set(runId, [item])
|
||||
}
|
||||
else {
|
||||
if (item.status === 'retry') {
|
||||
const retryNode = groupMap.get(runId)!.find(node => node.node_id === item.node_id)
|
||||
|
||||
else
|
||||
groupMap.get(runId)!.push(item)
|
||||
if (retryNode) {
|
||||
if (retryNode?.retryDetail)
|
||||
retryNode.retryDetail.push(item)
|
||||
else
|
||||
retryNode.retryDetail = [item]
|
||||
}
|
||||
}
|
||||
else {
|
||||
groupMap.get(runId)!.push(item)
|
||||
}
|
||||
}
|
||||
|
||||
if (item.status === 'failed') {
|
||||
iterationNode.status = 'failed'
|
||||
@ -94,10 +107,24 @@ const RunPanel: FC<RunProps> = ({ hideResult, activeTab = 'RESULT', runID, getRe
|
||||
const updateSequentialModeGroup = (index: number, item: NodeTracing, iterationNode: NodeTracing) => {
|
||||
const { details } = iterationNode
|
||||
if (details) {
|
||||
if (!details[index])
|
||||
if (!details[index]) {
|
||||
details[index] = [item]
|
||||
else
|
||||
details[index].push(item)
|
||||
}
|
||||
else {
|
||||
if (item.status === 'retry') {
|
||||
const retryNode = details[index].find(node => node.node_id === item.node_id)
|
||||
|
||||
if (retryNode) {
|
||||
if (retryNode?.retryDetail)
|
||||
retryNode.retryDetail.push(item)
|
||||
else
|
||||
retryNode.retryDetail = [item]
|
||||
}
|
||||
}
|
||||
else {
|
||||
details[index].push(item)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (item.status === 'failed') {
|
||||
|
@ -11,6 +11,7 @@ import {
|
||||
import { ArrowNarrowLeft } from '../../base/icons/src/vender/line/arrows'
|
||||
import { NodeRunningStatus } from '../types'
|
||||
import TracingPanel from './tracing-panel'
|
||||
import RetryResultPanel from './retry-result-panel'
|
||||
import { Iteration } from '@/app/components/base/icons/src/vender/workflow'
|
||||
import cn from '@/utils/classnames'
|
||||
import type { IterationDurationMap, NodeTracing } from '@/types/workflow'
|
||||
@ -41,8 +42,8 @@ const IterationResultPanel: FC<Props> = ({
|
||||
}))
|
||||
}, [])
|
||||
const countIterDuration = (iteration: NodeTracing[], iterDurationMap: IterationDurationMap): string => {
|
||||
const IterRunIndex = iteration[0].execution_metadata.iteration_index as number
|
||||
const iterRunId = iteration[0].execution_metadata.parallel_mode_run_id
|
||||
const IterRunIndex = iteration[0]?.execution_metadata?.iteration_index as number
|
||||
const iterRunId = iteration[0]?.execution_metadata?.parallel_mode_run_id
|
||||
const iterItem = iterDurationMap[iterRunId || IterRunIndex]
|
||||
const duration = iterItem
|
||||
return `${(duration && duration > 0.01) ? duration.toFixed(2) : 0.01}s`
|
||||
@ -74,6 +75,10 @@ const IterationResultPanel: FC<Props> = ({
|
||||
</>
|
||||
)
|
||||
}
|
||||
const [retryRunResult, setRetryRunResult] = useState<Record<string, NodeTracing[]> | undefined>()
|
||||
const handleRetryDetail = (v: number, detail?: NodeTracing[]) => {
|
||||
setRetryRunResult({ ...retryRunResult, [v]: detail })
|
||||
}
|
||||
|
||||
const main = (
|
||||
<>
|
||||
@ -116,15 +121,28 @@ const IterationResultPanel: FC<Props> = ({
|
||||
{expandedIterations[index] && <div
|
||||
className="grow h-px bg-divider-subtle"
|
||||
></div>}
|
||||
<div className={cn(
|
||||
'overflow-hidden transition-all duration-200',
|
||||
expandedIterations[index] ? 'max-h-[1000px] opacity-100' : 'max-h-0 opacity-0',
|
||||
)}>
|
||||
<TracingPanel
|
||||
list={iteration}
|
||||
className='bg-background-section-burn'
|
||||
/>
|
||||
</div>
|
||||
{
|
||||
!retryRunResult?.[index] && (
|
||||
<div className={cn(
|
||||
'overflow-hidden transition-all duration-200',
|
||||
expandedIterations[index] ? 'max-h-[1000px] opacity-100' : 'max-h-0 opacity-0',
|
||||
)}>
|
||||
<TracingPanel
|
||||
list={iteration}
|
||||
className='bg-background-section-burn'
|
||||
onShowRetryDetail={v => handleRetryDetail(index, v)}
|
||||
/>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
{
|
||||
retryRunResult?.[index] && (
|
||||
<RetryResultPanel
|
||||
list={retryRunResult[index]}
|
||||
onBack={() => handleRetryDetail(index, undefined)}
|
||||
/>
|
||||
)
|
||||
}
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
|
@ -216,6 +216,11 @@ const NodePanel: FC<Props> = ({
|
||||
{nodeInfo.error}
|
||||
</StatusContainer>
|
||||
)}
|
||||
{nodeInfo.status === 'retry' && (
|
||||
<StatusContainer status='failed'>
|
||||
{nodeInfo.error}
|
||||
</StatusContainer>
|
||||
)}
|
||||
</div>
|
||||
{nodeInfo.inputs && (
|
||||
<div className={cn('mb-1')}>
|
||||
|
@ -191,6 +191,7 @@ const translation = {
|
||||
editorTip: 'Kann Apps erstellen & bearbeiten',
|
||||
inviteTeamMember: 'Teammitglied hinzufügen',
|
||||
inviteTeamMemberTip: 'Sie können direkt nach der Anmeldung auf Ihre Teamdaten zugreifen.',
|
||||
emailNotSetup: 'E-Mail-Server ist nicht eingerichtet, daher können keine Einladungs-E-Mails versendet werden. Bitte informieren Sie die Benutzer über den Einladungslink, der nach der Einladung ausgestellt wird.',
|
||||
email: 'E-Mail',
|
||||
emailInvalid: 'Ungültiges E-Mail-Format',
|
||||
emailPlaceholder: 'Bitte E-Mails eingeben',
|
||||
|
@ -205,6 +205,7 @@ const translation = {
|
||||
datasetOperatorTip: 'Only can manage the knowledge base',
|
||||
inviteTeamMember: 'Add team member',
|
||||
inviteTeamMemberTip: 'They can access your team data directly after signing in.',
|
||||
emailNotSetup: 'Email server is not set up, so invitation emails cannot be sent. Please notify users of the invitation link that will be issued after invitation instead.',
|
||||
email: 'Email',
|
||||
emailInvalid: 'Invalid Email Format',
|
||||
emailPlaceholder: 'Please input emails',
|
||||
|
@ -199,6 +199,7 @@ const translation = {
|
||||
datasetOperatorTip: 'Solo puede administrar la base de conocimiento',
|
||||
inviteTeamMember: 'Agregar miembro del equipo',
|
||||
inviteTeamMemberTip: 'Pueden acceder a tus datos del equipo directamente después de iniciar sesión.',
|
||||
emailNotSetup: 'El servidor de correo no está configurado, por lo que no se pueden enviar correos de invitación. En su lugar, notifique a los usuarios el enlace de invitación que se emitirá después de la invitación.',
|
||||
email: 'Correo electrónico',
|
||||
emailInvalid: 'Formato de correo electrónico inválido',
|
||||
emailPlaceholder: 'Por favor ingresa correos electrónicos',
|
||||
|
@ -199,6 +199,7 @@ const translation = {
|
||||
datasetOperatorTip: 'فقط میتواند پایگاه دانش را مدیریت کند',
|
||||
inviteTeamMember: 'افزودن عضو تیم',
|
||||
inviteTeamMemberTip: 'آنها میتوانند پس از ورود به سیستم، مستقیماً به دادههای تیم شما دسترسی پیدا کنند.',
|
||||
emailNotSetup: 'سرور ایمیل راهاندازی نشده است، بنابراین ایمیلهای دعوت نمیتوانند ارسال شوند. لطفاً کاربران را از لینک دعوت که پس از دعوت صادر خواهد شد مطلع کنید。',
|
||||
email: 'ایمیل',
|
||||
emailInvalid: 'فرمت ایمیل نامعتبر است',
|
||||
emailPlaceholder: 'لطفاً ایمیلها را وارد کنید',
|
||||
|
@ -191,6 +191,7 @@ const translation = {
|
||||
editorTip: 'Peut construire des applications, mais ne peut pas gérer les paramètres de l\'équipe',
|
||||
inviteTeamMember: 'Ajouter un membre de l\'équipe',
|
||||
inviteTeamMemberTip: 'Ils peuvent accéder directement à vos données d\'équipe après s\'être connectés.',
|
||||
emailNotSetup: 'Le serveur de messagerie n\'est pas configuré, les e-mails d\'invitation ne peuvent donc pas être envoyés. Veuillez informer les utilisateurs du lien d\'invitation qui sera émis après l\'invitation.',
|
||||
email: 'Courrier électronique',
|
||||
emailInvalid: 'Format de courriel invalide',
|
||||
emailPlaceholder: 'Veuillez entrer des emails',
|
||||
|
@ -204,6 +204,7 @@ const translation = {
|
||||
inviteTeamMember: 'टीम सदस्य जोड़ें',
|
||||
inviteTeamMemberTip:
|
||||
'वे साइन इन करने के बाद सीधे आपकी टीम डेटा तक पहुंच सकते हैं।',
|
||||
emailNotSetup: 'ईमेल सर्वर सेट नहीं है, इसलिए आमंत्रण ईमेल नहीं भेजे जा सकते। कृपया उपयोगकर्ताओं को आमंत्रण के बाद जारी किए जाने वाले आमंत्रण लिंक के बारे में सूचित करें。',
|
||||
email: 'ईमेल',
|
||||
emailInvalid: 'अवैध ईमेल प्रारूप',
|
||||
emailPlaceholder: 'कृपया ईमेल दर्ज करें',
|
||||
|
@ -208,6 +208,7 @@ const translation = {
|
||||
inviteTeamMember: 'Aggiungi membro del team',
|
||||
inviteTeamMemberTip:
|
||||
'Potranno accedere ai dati del tuo team direttamente dopo aver effettuato l\'accesso.',
|
||||
emailNotSetup: 'Il server email non è configurato, quindi non è possibile inviare email di invito. Si prega di notificare agli utenti il link di invito che verrà emesso dopo l\'invito.',
|
||||
email: 'Email',
|
||||
emailInvalid: 'Formato Email non valido',
|
||||
emailPlaceholder: 'Per favore inserisci le email',
|
||||
|
@ -199,6 +199,7 @@ const translation = {
|
||||
datasetOperatorTip: 'ナレッジベースのみを管理できる',
|
||||
inviteTeamMember: 'チームメンバーを招待する',
|
||||
inviteTeamMemberTip: '彼らはサインイン後、直接あなた様のチームデータにアクセスできます。',
|
||||
emailNotSetup: 'メールサーバーがセットアップされていないので、招待メールを送信することはできません。代わりに招待後に発行される招待リンクをユーザーに通知してください。',
|
||||
email: 'メール',
|
||||
emailInvalid: '無効なメール形式',
|
||||
emailPlaceholder: 'メールを入力してください',
|
||||
|
@ -187,6 +187,7 @@ const translation = {
|
||||
editorTip: '앱 빌드만 가능하고 팀 설정 관리 불가능',
|
||||
inviteTeamMember: '팀 멤버 초대',
|
||||
inviteTeamMemberTip: '로그인 후에 바로 팀 데이터에 액세스할 수 있습니다.',
|
||||
emailNotSetup: '이메일 서버가 설정되지 않아 초대 이메일을 보낼 수 없습니다. 대신 초대 후 발급되는 초대 링크를 사용자에게 알려주세요.',
|
||||
email: '이메일',
|
||||
emailInvalid: '유효하지 않은 이메일 형식',
|
||||
emailPlaceholder: '이메일 입력',
|
||||
|
@ -198,6 +198,7 @@ const translation = {
|
||||
inviteTeamMember: 'Dodaj członka zespołu',
|
||||
inviteTeamMemberTip:
|
||||
'Mogą uzyskać bezpośredni dostęp do danych Twojego zespołu po zalogowaniu.',
|
||||
emailNotSetup: 'Serwer poczty nie jest skonfigurowany, więc nie można wysyłać zaproszeń e-mail. Proszę powiadomić użytkowników o linku do zaproszenia, który zostanie wydany po zaproszeniu.',
|
||||
email: 'Email',
|
||||
emailInvalid: 'Nieprawidłowy format e-maila',
|
||||
emailPlaceholder: 'Proszę podać adresy e-mail',
|
||||
|
@ -191,6 +191,7 @@ const translation = {
|
||||
editorTip: 'Pode editar aplicativos, mas não pode gerenciar configurações da equipe',
|
||||
inviteTeamMember: 'Adicionar membro da equipe',
|
||||
inviteTeamMemberTip: 'Eles podem acessar os dados da sua equipe diretamente após fazer login.',
|
||||
emailNotSetup: 'O servidor de e-mail não está configurado, então os e-mails de convite não podem ser enviados. Por favor, notifique os usuários sobre o link de convite que será emitido após o convite.',
|
||||
email: 'E-mail',
|
||||
emailInvalid: 'Formato de e-mail inválido',
|
||||
emailPlaceholder: 'Por favor, insira e-mails',
|
||||
|
@ -191,6 +191,7 @@ const translation = {
|
||||
editorTip: 'Poate construi aplicații, dar nu poate gestiona setările echipei',
|
||||
inviteTeamMember: 'Adaugă membru în echipă',
|
||||
inviteTeamMemberTip: 'Pot accesa direct datele echipei dvs. după autentificare.',
|
||||
emailNotSetup: 'Serverul de e-mail nu este configurat, astfel încât e-mailurile de invitație nu pot fi trimise. Vă rugăm să notificați utilizatorii despre linkul de invitație care va fi emis după invitație.',
|
||||
email: 'Email',
|
||||
emailInvalid: 'Format de email invalid',
|
||||
emailPlaceholder: 'Vă rugăm să introduceți emailuri',
|
||||
|
@ -199,6 +199,7 @@ const translation = {
|
||||
datasetOperatorTip: 'Может управлять только базой знаний',
|
||||
inviteTeamMember: 'Добавить участника команды',
|
||||
inviteTeamMemberTip: 'Они могут получить доступ к данным вашей команды сразу после входа в систему.',
|
||||
emailNotSetup: 'Почтовый сервер не настроен, поэтому приглашения по электронной почте не могут быть отправлены. Пожалуйста, уведомите пользователей о ссылке для приглашения, которая будет выдана после приглашения.',
|
||||
email: 'Электронная почта',
|
||||
emailInvalid: 'Неверный формат электронной почты',
|
||||
emailPlaceholder: 'Пожалуйста, введите адреса электронной почты',
|
||||
|
@ -199,6 +199,7 @@ const translation = {
|
||||
datasetOperatorTip: 'Lahko upravlja samo bazo znanja',
|
||||
inviteTeamMember: 'Dodaj člana ekipe',
|
||||
inviteTeamMemberTip: 'Do vaših podatkov bo lahko dostopal takoj po prijavi.',
|
||||
emailNotSetup: 'E-poštni strežnik ni nastavljen, zato vabil po e-pošti ni mogoče poslati. Prosimo, obvestite uporabnike o povezavi za povabilo, ki bo izdana po povabilu.',
|
||||
email: 'E-pošta',
|
||||
emailInvalid: 'Neveljaven format e-pošte',
|
||||
emailPlaceholder: 'Vnesite e-poštne naslove',
|
||||
|
@ -194,6 +194,7 @@ const translation = {
|
||||
datasetOperatorTip: 'สามารถจัดการฐานความรู้ได้เท่านั้น',
|
||||
inviteTeamMember: 'เพิ่มสมาชิกในทีม',
|
||||
inviteTeamMemberTip: 'พวกเขาสามารถเข้าถึงข้อมูลทีมของคุณได้โดยตรงหลังจากลงชื่อเข้าใช้',
|
||||
emailNotSetup: 'เซิร์ฟเวอร์อีเมลไม่ได้ตั้งค่าไว้ จึงไม่สามารถส่งอีเมลเชิญได้ กรุณาแจ้งผู้ใช้เกี่ยวกับลิงก์เชิญที่จะออกหลังจากการเชิญแทน',
|
||||
email: 'อีเมล',
|
||||
emailInvalid: 'รูปแบบอีเมลไม่ถูกต้อง',
|
||||
emailPlaceholder: 'กรุณากรอกอีเมล',
|
||||
|
@ -199,6 +199,7 @@ const translation = {
|
||||
datasetOperatorTip: 'Sadece bilgi tabanını yönetebilir',
|
||||
inviteTeamMember: 'Takım Üyesi Ekle',
|
||||
inviteTeamMemberTip: 'Giriş yaptıktan sonra takım verilerinize doğrudan erişebilirler.',
|
||||
emailNotSetup: 'E-posta sunucusu kurulu değil, bu nedenle davet e-postaları gönderilemiyor. Lütfen kullanıcıları davetten sonra verilecek davet bağlantısı hakkında bilgilendirin.',
|
||||
email: 'E-posta',
|
||||
emailInvalid: 'Geçersiz E-posta Formatı',
|
||||
emailPlaceholder: 'Lütfen e-postaları girin',
|
||||
|
@ -191,6 +191,7 @@ const translation = {
|
||||
editorTip: 'Може створювати програми, але не може керувати налаштуваннями команди',
|
||||
inviteTeamMember: 'Додати учасника команди',
|
||||
inviteTeamMemberTip: 'Вони зможуть отримати доступ до даних вашої команди безпосередньо після входу.',
|
||||
emailNotSetup: 'Поштовий сервер не налаштований, тому запрошення електронною поштою не можуть бути надіслані. Будь ласка, повідомте користувачів про посилання для запрошення, яке буде видано після запрошення.',
|
||||
email: 'Електронна пошта',
|
||||
emailInvalid: 'Недійсний формат електронної пошти',
|
||||
emailPlaceholder: 'Будь ласка, введіть адресу електронної пошти',
|
||||
|
@ -191,6 +191,7 @@ const translation = {
|
||||
editorTip: 'Có thể xây dựng ứng dụng, không thể quản lý cài đặt nhóm',
|
||||
inviteTeamMember: 'Mời thành viên nhóm',
|
||||
inviteTeamMemberTip: 'Sau khi đăng nhập, họ có thể truy cập trực tiếp vào dữ liệu nhóm của bạn.',
|
||||
emailNotSetup: 'Máy chủ email chưa được thiết lập, vì vậy không thể gửi email mời. Vui lòng thông báo cho người dùng về liên kết mời sẽ được phát hành sau khi mời.',
|
||||
email: 'Email',
|
||||
emailInvalid: 'Định dạng Email không hợp lệ',
|
||||
emailPlaceholder: 'Vui lòng nhập email',
|
||||
|
@ -203,6 +203,7 @@ const translation = {
|
||||
datasetOperatorTip: '只能管理知识库',
|
||||
inviteTeamMember: '添加团队成员',
|
||||
inviteTeamMemberTip: '对方在登录后可以访问你的团队数据。',
|
||||
emailNotSetup: '由于邮件服务器未设置,无法发送邀请邮件。请将邀请后生成的邀请链接通知用户。',
|
||||
email: '邮箱',
|
||||
emailInvalid: '邮箱格式无效',
|
||||
emailPlaceholder: '输入邮箱',
|
||||
|
@ -191,6 +191,7 @@ const translation = {
|
||||
editorTip: '能夠建立並編輯應用程式,不能管理團隊設定',
|
||||
inviteTeamMember: '新增團隊成員',
|
||||
inviteTeamMemberTip: '對方在登入後可以訪問你的團隊資料。',
|
||||
emailNotSetup: '由於郵件伺服器未設置,無法發送邀請郵件。請將邀請後生成的邀請連結通知用戶。',
|
||||
email: '郵箱',
|
||||
emailInvalid: '郵箱格式無效',
|
||||
emailPlaceholder: '輸入郵箱',
|
||||
|
@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "dify-web",
|
||||
"version": "0.14.1",
|
||||
"version": "0.14.2",
|
||||
"private": true,
|
||||
"engines": {
|
||||
"node": ">=18.17.0"
|
||||
|
Loading…
Reference in New Issue
Block a user