diff --git a/.gitignore b/.gitignore index 296aeee873..9195d741f8 100644 --- a/.gitignore +++ b/.gitignore @@ -183,6 +183,7 @@ docker/nginx/conf.d/default.conf docker/nginx/ssl/* !docker/nginx/ssl/.gitkeep docker/middleware.env +docker/docker-compose.override.yaml sdks/python-client/build sdks/python-client/dist diff --git a/README.md b/README.md index deb05fe07f..574fa1fa38 100644 --- a/README.md +++ b/README.md @@ -40,6 +40,7 @@

README in English + 繁體中文文件 简体中文版自述文件 日本語のREADME README en Español @@ -53,14 +54,14 @@ README in বাংলা

- -Dify is an open-source LLM app development platform. Its intuitive interface combines agentic AI workflow, RAG pipeline, agent capabilities, model management, observability features and more, letting you quickly go from prototype to production. +Dify is an open-source LLM app development platform. Its intuitive interface combines agentic AI workflow, RAG pipeline, agent capabilities, model management, observability features and more, letting you quickly go from prototype to production. ## Quick start + > Before installing Dify, make sure your machine meets the following minimum system requirements: -> ->- CPU >= 2 Core ->- RAM >= 4 GiB +> +> - CPU >= 2 Core +> - RAM >= 4 GiB
@@ -76,41 +77,40 @@ docker compose up -d After running, you can access the Dify dashboard in your browser at [http://localhost/install](http://localhost/install) and start the initialization process. #### Seeking help + Please refer to our [FAQ](https://docs.dify.ai/getting-started/install-self-hosted/faqs) if you encounter problems setting up Dify. Reach out to [the community and us](#community--contact) if you are still having issues. > If you'd like to contribute to Dify or do additional development, refer to our [guide to deploying from source code](https://docs.dify.ai/getting-started/install-self-hosted/local-source-code) ## Key features -**1. Workflow**: - Build and test powerful AI workflows on a visual canvas, leveraging all the following features and beyond. +**1. Workflow**: +Build and test powerful AI workflows on a visual canvas, leveraging all the following features and beyond. - https://github.com/langgenius/dify/assets/13230914/356df23e-1604-483d-80a6-9517ece318aa +https://github.com/langgenius/dify/assets/13230914/356df23e-1604-483d-80a6-9517ece318aa - - -**2. Comprehensive model support**: - Seamless integration with hundreds of proprietary / open-source LLMs from dozens of inference providers and self-hosted solutions, covering GPT, Mistral, Llama3, and any OpenAI API-compatible models. A full list of supported model providers can be found [here](https://docs.dify.ai/getting-started/readme/model-providers). +**2. Comprehensive model support**: +Seamless integration with hundreds of proprietary / open-source LLMs from dozens of inference providers and self-hosted solutions, covering GPT, Mistral, Llama3, and any OpenAI API-compatible models. A full list of supported model providers can be found [here](https://docs.dify.ai/getting-started/readme/model-providers). ![providers-v5](https://github.com/langgenius/dify/assets/13230914/5a17bdbe-097a-4100-8363-40255b70f6e3) +**3. Prompt IDE**: +Intuitive interface for crafting prompts, comparing model performance, and adding additional features such as text-to-speech to a chat-based app. -**3. Prompt IDE**: - Intuitive interface for crafting prompts, comparing model performance, and adding additional features such as text-to-speech to a chat-based app. +**4. RAG Pipeline**: +Extensive RAG capabilities that cover everything from document ingestion to retrieval, with out-of-box support for text extraction from PDFs, PPTs, and other common document formats. -**4. RAG Pipeline**: - Extensive RAG capabilities that cover everything from document ingestion to retrieval, with out-of-box support for text extraction from PDFs, PPTs, and other common document formats. +**5. Agent capabilities**: +You can define agents based on LLM Function Calling or ReAct, and add pre-built or custom tools for the agent. Dify provides 50+ built-in tools for AI agents, such as Google Search, DALL·E, Stable Diffusion and WolframAlpha. -**5. Agent capabilities**: - You can define agents based on LLM Function Calling or ReAct, and add pre-built or custom tools for the agent. Dify provides 50+ built-in tools for AI agents, such as Google Search, DALL·E, Stable Diffusion and WolframAlpha. +**6. LLMOps**: +Monitor and analyze application logs and performance over time. You could continuously improve prompts, datasets, and models based on production data and annotations. -**6. LLMOps**: - Monitor and analyze application logs and performance over time. You could continuously improve prompts, datasets, and models based on production data and annotations. - -**7. Backend-as-a-Service**: - All of Dify's offerings come with corresponding APIs, so you could effortlessly integrate Dify into your own business logic. +**7. Backend-as-a-Service**: +All of Dify's offerings come with corresponding APIs, so you could effortlessly integrate Dify into your own business logic. ## Feature Comparison + @@ -180,24 +180,22 @@ Please refer to our [FAQ](https://docs.dify.ai/getting-started/install-self-host ## Using Dify - **Cloud
** -We host a [Dify Cloud](https://dify.ai) service for anyone to try with zero setup. It provides all the capabilities of the self-deployed version, and includes 200 free GPT-4 calls in the sandbox plan. + We host a [Dify Cloud](https://dify.ai) service for anyone to try with zero setup. It provides all the capabilities of the self-deployed version, and includes 200 free GPT-4 calls in the sandbox plan. - **Self-hosting Dify Community Edition
** -Quickly get Dify running in your environment with this [starter guide](#quick-start). -Use our [documentation](https://docs.dify.ai) for further references and more in-depth instructions. + Quickly get Dify running in your environment with this [starter guide](#quick-start). + Use our [documentation](https://docs.dify.ai) for further references and more in-depth instructions. - **Dify for enterprise / organizations
** -We provide additional enterprise-centric features. [Log your questions for us through this chatbot](https://udify.app/chat/22L1zSxg6yW1cWQg) or [send us an email](mailto:business@dify.ai?subject=[GitHub]Business%20License%20Inquiry) to discuss enterprise needs.
+ We provide additional enterprise-centric features. [Log your questions for us through this chatbot](https://udify.app/chat/22L1zSxg6yW1cWQg) or [send us an email](mailto:business@dify.ai?subject=[GitHub]Business%20License%20Inquiry) to discuss enterprise needs.
> For startups and small businesses using AWS, check out [Dify Premium on AWS Marketplace](https://aws.amazon.com/marketplace/pp/prodview-t22mebxzwjhu6) and deploy it to your own AWS VPC with one-click. It's an affordable AMI offering with the option to create apps with custom logo and branding. - ## Staying ahead Star Dify on GitHub and be instantly notified of new releases. ![star-us](https://github.com/langgenius/dify/assets/13230914/b823edc1-6388-4e25-ad45-2f6b187adbb4) - ## Advanced Setup If you need to customize the configuration, please refer to the comments in our [.env.example](docker/.env.example) file and update the corresponding values in your `.env` file. Additionally, you might need to make adjustments to the `docker-compose.yaml` file itself, such as changing image versions, port mappings, or volume mounts, based on your specific deployment environment and requirements. After making any changes, please re-run `docker-compose up -d`. You can find the full list of available environment variables [here](https://docs.dify.ai/getting-started/install-self-hosted/environments). @@ -213,32 +211,34 @@ If you'd like to configure a highly-available setup, there are community-contrib Deploy Dify to Cloud Platform with a single click using [terraform](https://www.terraform.io/) ##### Azure Global + - [Azure Terraform by @nikawang](https://github.com/nikawang/dify-azure-terraform) ##### Google Cloud + - [Google Cloud Terraform by @sotazum](https://github.com/DeNA/dify-google-cloud-terraform) #### Using AWS CDK for Deployment Deploy Dify to AWS with [CDK](https://aws.amazon.com/cdk/) -##### AWS +##### AWS + - [AWS CDK by @KevinZhao](https://github.com/aws-samples/solution-for-deploying-dify-on-aws) ## Contributing -For those who'd like to contribute code, see our [Contribution Guide](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md). +For those who'd like to contribute code, see our [Contribution Guide](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md). At the same time, please consider supporting Dify by sharing it on social media and at events and conferences. - > We are looking for contributors to help with translating Dify to languages other than Mandarin or English. If you are interested in helping, please see the [i18n README](https://github.com/langgenius/dify/blob/main/web/i18n/README.md) for more information, and leave us a comment in the `global-users` channel of our [Discord Community Server](https://discord.gg/8Tpq4AcN9c). ## Community & contact -* [Github Discussion](https://github.com/langgenius/dify/discussions). Best for: sharing feedback and asking questions. -* [GitHub Issues](https://github.com/langgenius/dify/issues). Best for: bugs you encounter using Dify.AI, and feature proposals. See our [Contribution Guide](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md). -* [Discord](https://discord.gg/FngNHpbcY7). Best for: sharing your applications and hanging out with the community. -* [X(Twitter)](https://twitter.com/dify_ai). Best for: sharing your applications and hanging out with the community. +- [Github Discussion](https://github.com/langgenius/dify/discussions). Best for: sharing feedback and asking questions. +- [GitHub Issues](https://github.com/langgenius/dify/issues). Best for: bugs you encounter using Dify.AI, and feature proposals. See our [Contribution Guide](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md). +- [Discord](https://discord.gg/FngNHpbcY7). Best for: sharing your applications and hanging out with the community. +- [X(Twitter)](https://twitter.com/dify_ai). Best for: sharing your applications and hanging out with the community. **Contributors** @@ -250,7 +250,6 @@ At the same time, please consider supporting Dify by sharing it on social media [![Star History Chart](https://api.star-history.com/svg?repos=langgenius/dify&type=Date)](https://star-history.com/#langgenius/dify&Date) - ## Security disclosure To protect your privacy, please avoid posting security issues on GitHub. Instead, send your questions to security@dify.ai and we will provide you with a more detailed answer. @@ -258,4 +257,3 @@ To protect your privacy, please avoid posting security issues on GitHub. Instead ## License This repository is available under the [Dify Open Source License](LICENSE), which is essentially Apache 2.0 with a few additional restrictions. - diff --git a/api/controllers/service_api/app/message.py b/api/controllers/service_api/app/message.py index d5a5afd6a6..1869cf67c2 100644 --- a/api/controllers/service_api/app/message.py +++ b/api/controllers/service_api/app/message.py @@ -70,7 +70,7 @@ class MessageListApi(Resource): try: return MessageService.pagination_by_first_id( - app_model, end_user, args["conversation_id"], args["first_id"], args["limit"], "desc" + app_model, end_user, args["conversation_id"], args["first_id"], args["limit"] ) except services.errors.conversation.ConversationNotExistsError: raise NotFound("Conversation Not Exists.") diff --git a/api/core/file/models.py b/api/core/file/models.py index 2f0026a203..7eef2d2b33 100644 --- a/api/core/file/models.py +++ b/api/core/file/models.py @@ -97,32 +97,18 @@ class File(BaseModel): return text def generate_url(self) -> Optional[str]: - if self.type == FileType.IMAGE: - if self.transfer_method == FileTransferMethod.REMOTE_URL: - return self.remote_url - elif self.transfer_method == FileTransferMethod.LOCAL_FILE: - if self.related_id is None: - raise ValueError("Missing file related_id") - return helpers.get_signed_file_url(upload_file_id=self.related_id) - elif self.transfer_method == FileTransferMethod.TOOL_FILE: - assert self.related_id is not None - assert self.extension is not None - return ToolFileParser.get_tool_file_manager().sign_file( - tool_file_id=self.related_id, extension=self.extension - ) - else: - if self.transfer_method == FileTransferMethod.REMOTE_URL: - return self.remote_url - elif self.transfer_method == FileTransferMethod.LOCAL_FILE: - if self.related_id is None: - raise ValueError("Missing file related_id") - return helpers.get_signed_file_url(upload_file_id=self.related_id) - elif self.transfer_method == FileTransferMethod.TOOL_FILE: - assert self.related_id is not None - assert self.extension is not None - return ToolFileParser.get_tool_file_manager().sign_file( - tool_file_id=self.related_id, extension=self.extension - ) + if self.transfer_method == FileTransferMethod.REMOTE_URL: + return self.remote_url + elif self.transfer_method == FileTransferMethod.LOCAL_FILE: + if self.related_id is None: + raise ValueError("Missing file related_id") + return helpers.get_signed_file_url(upload_file_id=self.related_id) + elif self.transfer_method == FileTransferMethod.TOOL_FILE: + assert self.related_id is not None + assert self.extension is not None + return ToolFileParser.get_tool_file_manager().sign_file( + tool_file_id=self.related_id, extension=self.extension + ) def to_plugin_parameter(self) -> dict[str, Any]: return { diff --git a/api/core/plugin/entities/plugin.py b/api/core/plugin/entities/plugin.py index ad39b972bf..61f8a65918 100644 --- a/api/core/plugin/entities/plugin.py +++ b/api/core/plugin/entities/plugin.py @@ -5,6 +5,7 @@ from collections.abc import Mapping from typing import Any, Optional from pydantic import BaseModel, Field, model_validator +from werkzeug.exceptions import NotFound from core.agent.plugin_entities import AgentStrategyProviderEntity from core.model_runtime.entities.provider_entities import ProviderEntity @@ -153,6 +154,8 @@ class GenericProviderID: return f"{self.organization}/{self.plugin_name}/{self.provider_name}" def __init__(self, value: str, is_hardcoded: bool = False) -> None: + if not value: + raise NotFound("plugin not found, please add plugin") # check if the value is a valid plugin id with format: $organization/$plugin_name/$provider_name if not re.match(r"^[a-z0-9_-]+\/[a-z0-9_-]+\/[a-z0-9_-]+$", value): # check if matches [a-z0-9_-]+, if yes, append with langgenius/$value/$value diff --git a/api/core/tools/docs/en_US/advanced_scale_out.md b/api/core/tools/docs/en_US/advanced_scale_out.md index 644ad29129..d6cab690cc 100644 --- a/api/core/tools/docs/en_US/advanced_scale_out.md +++ b/api/core/tools/docs/en_US/advanced_scale_out.md @@ -55,7 +55,7 @@ If you need to return a text message, you can use the following interface. If you need to return the raw data of a file, such as images, audio, video, PPT, Word, Excel, etc., you can use the following interface. - `blob` The raw data of the file, of bytes type -- `meta` The metadata of the file, if you know the type of the file, it is best to pass a `mime_type`, otherwise Dify will use `octet/stream` as the default type +- `meta` The metadata of the file, if you know the type of the file, it is best to pass a `mime_type`, otherwise Dify will use `application/octet-stream` as the default type ```python def create_blob_message(self, blob: bytes, meta: dict = None, save_as: str = '') -> ToolInvokeMessage: diff --git a/api/core/tools/docs/ja_JP/advanced_scale_out.md b/api/core/tools/docs/ja_JP/advanced_scale_out.md index 96f843354f..10ede6fda6 100644 --- a/api/core/tools/docs/ja_JP/advanced_scale_out.md +++ b/api/core/tools/docs/ja_JP/advanced_scale_out.md @@ -58,7 +58,7 @@ Difyは`テキスト`、`リンク`、`画像`、`ファイルBLOB`、`JSON`な 画像、音声、動画、PPT、Word、Excelなどのファイルの生データを返す必要がある場合は、以下のインターフェースを使用できます。 - `blob` ファイルの生データ(bytes型) -- `meta` ファイルのメタデータ。ファイルの種類が分かっている場合は、`mime_type`を渡すことをお勧めします。そうでない場合、Difyはデフォルトタイプとして`octet/stream`を使用します。 +- `meta` ファイルのメタデータ。ファイルの種類が分かっている場合は、`mime_type`を渡すことをお勧めします。そうでない場合、Difyはデフォルトタイプとして`application/octet-stream`を使用します。 ```python def create_blob_message(self, blob: bytes, meta: dict = None, save_as: str = '') -> ToolInvokeMessage: diff --git a/api/core/tools/docs/zh_Hans/advanced_scale_out.md b/api/core/tools/docs/zh_Hans/advanced_scale_out.md index 0385dfe4e7..c436a64881 100644 --- a/api/core/tools/docs/zh_Hans/advanced_scale_out.md +++ b/api/core/tools/docs/zh_Hans/advanced_scale_out.md @@ -58,7 +58,7 @@ Dify支持`文本` `链接` `图片` `文件BLOB` `JSON` 等多种消息类型 如果你需要返回文件的原始数据,如图片、音频、视频、PPT、Word、Excel等,可以使用以下接口。 - `blob` 文件的原始数据,bytes类型 -- `meta` 文件的元数据,如果你知道该文件的类型,最好传递一个`mime_type`,否则Dify将使用`octet/stream`作为默认类型 +- `meta` 文件的元数据,如果你知道该文件的类型,最好传递一个`mime_type`,否则Dify将使用`application/octet-stream`作为默认类型 ```python def create_blob_message(self, blob: bytes, meta: dict = None, save_as: str = '') -> ToolInvokeMessage: diff --git a/api/core/tools/tool_engine.py b/api/core/tools/tool_engine.py index 37b4582381..cf5411112d 100644 --- a/api/core/tools/tool_engine.py +++ b/api/core/tools/tool_engine.py @@ -290,14 +290,16 @@ class ToolEngine: raise ValueError("missing meta data") yield ToolInvokeMessageBinary( - mimetype=response.meta.get("mime_type", "octet/stream"), + mimetype=response.meta.get("mime_type", "application/octet-stream"), url=cast(ToolInvokeMessage.TextMessage, response.message).text, ) elif response.type == ToolInvokeMessage.MessageType.LINK: # check if there is a mime type in meta if response.meta and "mime_type" in response.meta: yield ToolInvokeMessageBinary( - mimetype=response.meta.get("mime_type", "octet/stream") if response.meta else "octet/stream", + mimetype=response.meta.get("mime_type", "application/octet-stream") + if response.meta + else "application/octet-stream", url=cast(ToolInvokeMessage.TextMessage, response.message).text, ) diff --git a/api/core/tools/tool_file_manager.py b/api/core/tools/tool_file_manager.py index 967cddac6c..360dfb9b4c 100644 --- a/api/core/tools/tool_file_manager.py +++ b/api/core/tools/tool_file_manager.py @@ -101,7 +101,7 @@ class ToolFileManager: except httpx.TimeoutException: raise ValueError(f"timeout when downloading file from {file_url}") - mimetype = guess_type(file_url)[0] or "octet/stream" + mimetype = guess_type(file_url)[0] or "application/octet-stream" extension = guess_extension(mimetype) or ".bin" unique_name = uuid4().hex filename = f"{unique_name}{extension}" diff --git a/api/core/tools/utils/message_transformer.py b/api/core/tools/utils/message_transformer.py index d92aa5ee60..ec0b3a5863 100644 --- a/api/core/tools/utils/message_transformer.py +++ b/api/core/tools/utils/message_transformer.py @@ -58,7 +58,7 @@ class ToolFileMessageTransformer: # get mime type and save blob to storage meta = message.meta or {} - mimetype = meta.get("mime_type", "octet/stream") + mimetype = meta.get("mime_type", "application/octet-stream") # if message is str, encode it to bytes if not isinstance(message.message, ToolInvokeMessage.BlobMessage): diff --git a/api/core/variables/segments.py b/api/core/variables/segments.py index a9f5651692..64ba16c367 100644 --- a/api/core/variables/segments.py +++ b/api/core/variables/segments.py @@ -136,7 +136,7 @@ class ArrayStringSegment(ArraySegment): @property def text(self) -> str: - return json.dumps(self.value) + return json.dumps(self.value, ensure_ascii=False) class ArrayNumberSegment(ArraySegment): diff --git a/api/factories/file_factory.py b/api/factories/file_factory.py index 24d3c8b906..c4f69f6f6b 100644 --- a/api/factories/file_factory.py +++ b/api/factories/file_factory.py @@ -7,7 +7,7 @@ import httpx from sqlalchemy import select from constants import AUDIO_EXTENSIONS, DOCUMENT_EXTENSIONS, IMAGE_EXTENSIONS, VIDEO_EXTENSIONS -from core.file import File, FileBelongsTo, FileTransferMethod, FileType, FileUploadConfig +from core.file import File, FileBelongsTo, FileTransferMethod, FileType, FileUploadConfig, helpers from core.helper import ssrf_proxy from extensions.ext_database import db from models import MessageFile, ToolFile, UploadFile @@ -158,6 +158,39 @@ def _build_from_remote_url( tenant_id: str, transfer_method: FileTransferMethod, ) -> File: + upload_file_id = mapping.get("upload_file_id") + if upload_file_id: + try: + uuid.UUID(upload_file_id) + except ValueError: + raise ValueError("Invalid upload file id format") + stmt = select(UploadFile).where( + UploadFile.id == upload_file_id, + UploadFile.tenant_id == tenant_id, + ) + + upload_file = db.session.scalar(stmt) + if upload_file is None: + raise ValueError("Invalid upload file") + + file_type = FileType(mapping.get("type", "custom")) + file_type = _standardize_file_type( + file_type, extension="." + upload_file.extension, mime_type=upload_file.mime_type + ) + + return File( + id=mapping.get("id"), + filename=upload_file.name, + extension="." + upload_file.extension, + mime_type=upload_file.mime_type, + tenant_id=tenant_id, + type=file_type, + transfer_method=transfer_method, + remote_url=helpers.get_signed_file_url(upload_file_id=str(upload_file_id)), + related_id=mapping.get("upload_file_id"), + size=upload_file.size, + storage_key=upload_file.key, + ) url = mapping.get("url") or mapping.get("remote_url") if not url: raise ValueError("Invalid file url") diff --git a/api/libs/login.py b/api/libs/login.py index b128c53c62..be9478e850 100644 --- a/api/libs/login.py +++ b/api/libs/login.py @@ -77,7 +77,7 @@ def login_required(func): ) if tenant_account_join: tenant, ta = tenant_account_join - account = Account.query.filter_by(id=ta.account_id).first() + account = db.session.query(Account).filter_by(id=ta.account_id).first() # Login admin if account: account.current_tenant = tenant diff --git a/api/models/model.py b/api/models/model.py index cb099d5654..87806eb918 100644 --- a/api/models/model.py +++ b/api/models/model.py @@ -1081,19 +1081,19 @@ class Message(db.Model): # type: ignore[name-defined] files = [] for message_file in message_files: - if message_file.transfer_method == "local_file": + if message_file.transfer_method == FileTransferMethod.LOCAL_FILE.value: if message_file.upload_file_id is None: raise ValueError(f"MessageFile {message_file.id} is a local file but has no upload_file_id") file = file_factory.build_from_mapping( mapping={ "id": message_file.id, - "upload_file_id": message_file.upload_file_id, - "transfer_method": message_file.transfer_method, "type": message_file.type, + "transfer_method": message_file.transfer_method, + "upload_file_id": message_file.upload_file_id, }, tenant_id=current_app.tenant_id, ) - elif message_file.transfer_method == "remote_url": + elif message_file.transfer_method == FileTransferMethod.REMOTE_URL.value: if message_file.url is None: raise ValueError(f"MessageFile {message_file.id} is a remote url but has no url") file = file_factory.build_from_mapping( @@ -1101,11 +1101,12 @@ class Message(db.Model): # type: ignore[name-defined] "id": message_file.id, "type": message_file.type, "transfer_method": message_file.transfer_method, + "upload_file_id": message_file.upload_file_id, "url": message_file.url, }, tenant_id=current_app.tenant_id, ) - elif message_file.transfer_method == "tool_file": + elif message_file.transfer_method == FileTransferMethod.TOOL_FILE.value: if message_file.upload_file_id is None: assert message_file.url is not None message_file.upload_file_id = message_file.url.split("/")[-1].split(".")[0] diff --git a/api/poetry.lock b/api/poetry.lock index 1158e25e2a..bb25cafd18 100644 --- a/api/poetry.lock +++ b/api/poetry.lock @@ -773,21 +773,21 @@ files = [ [[package]] name = "boto3" -version = "1.37.1" +version = "1.35.99" description = "The AWS SDK for Python" optional = false python-versions = ">=3.8" groups = ["main"] markers = "python_version == \"3.11\" or python_version >= \"3.12\"" files = [ - {file = "boto3-1.37.1-py3-none-any.whl", hash = "sha256:4320441f904435a1b85e6ecb81793192e522c737cc9ed6566014e29f0a11cb22"}, - {file = "boto3-1.37.1.tar.gz", hash = "sha256:96d18f7feb0c1fcb95f8837b74b6c8880e1b4e35ce5f8a8f8cb243a090c278ed"}, + {file = "boto3-1.35.99-py3-none-any.whl", hash = "sha256:83e560faaec38a956dfb3d62e05e1703ee50432b45b788c09e25107c5058bd71"}, + {file = "boto3-1.35.99.tar.gz", hash = "sha256:e0abd794a7a591d90558e92e29a9f8837d25ece8e3c120e530526fe27eba5fca"}, ] [package.dependencies] -botocore = ">=1.37.1,<1.38.0" +botocore = ">=1.35.99,<1.36.0" jmespath = ">=0.7.1,<2.0.0" -s3transfer = ">=0.11.0,<0.12.0" +s3transfer = ">=0.10.0,<0.11.0" [package.extras] crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] @@ -811,7 +811,7 @@ python-dateutil = ">=2.1,<3.0.0" urllib3 = {version = ">=1.25.4,<2.2.0 || >2.2.0,<3", markers = "python_version >= \"3.10\""} [package.extras] -crt = ["awscrt (==0.23.8)"] +crt = ["awscrt (==0.22.0)"] [[package]] name = "bottleneck" @@ -6357,6 +6357,7 @@ files = [ {file = "psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:bb89f0a835bcfc1d42ccd5f41f04870c1b936d8507c6df12b7737febc40f0909"}, {file = "psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:f0c2d907a1e102526dd2986df638343388b94c33860ff3bbe1384130828714b1"}, {file = "psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f8157bed2f51db683f31306aa497311b560f2265998122abe1dce6428bd86567"}, + {file = "psycopg2_binary-2.9.10-cp313-cp313-win_amd64.whl", hash = "sha256:27422aa5f11fbcd9b18da48373eb67081243662f9b46e6fd07c3eb46e4535142"}, {file = "psycopg2_binary-2.9.10-cp38-cp38-macosx_12_0_x86_64.whl", hash = "sha256:eb09aa7f9cecb45027683bb55aebaaf45a0df8bf6de68801a6afdc7947bb09d4"}, {file = "psycopg2_binary-2.9.10-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b73d6d7f0ccdad7bc43e6d34273f70d587ef62f824d7261c4ae9b8b1b6af90e8"}, {file = "psycopg2_binary-2.9.10-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ce5ab4bf46a211a8e924d307c1b1fcda82368586a19d0a24f8ae166f5c784864"}, diff --git a/api/pyproject.toml b/api/pyproject.toml index b04ec64cb8..493bfa240b 100644 --- a/api/pyproject.toml +++ b/api/pyproject.toml @@ -18,7 +18,7 @@ package-mode = false authlib = "1.3.1" azure-identity = "1.16.1" beautifulsoup4 = "4.12.2" -boto3 = "1.37.1" +boto3 = "1.35.99" bs4 = "~0.0.1" cachetools = "~5.3.0" celery = "~5.4.0" diff --git a/docker/.env.example b/docker/.env.example index ff838e5014..aff037959f 100644 --- a/docker/.env.example +++ b/docker/.env.example @@ -344,7 +344,7 @@ TENCENT_COS_SCHEME=your-scheme # Oracle Storage Configuration # -OCI_ENDPOINT=https://objectstorage.us-ashburn-1.oraclecloud.com +OCI_ENDPOINT=https://your-object-storage-namespace.compat.objectstorage.us-ashburn-1.oraclecloud.com OCI_BUCKET_NAME=your-bucket-name OCI_ACCESS_KEY=your-access-key OCI_SECRET_KEY=your-secret-key @@ -968,3 +968,6 @@ MARKETPLACE_ENABLED=true MARKETPLACE_API_URL=https://marketplace.dify.ai FORCE_VERIFYING_SIGNATURE=true + +PLUGIN_PYTHON_ENV_INIT_TIMEOUT=120 +PLUGIN_MAX_EXECUTION_TIMEOUT=600 diff --git a/docker/docker-compose-template.yaml b/docker/docker-compose-template.yaml index 54aa1561f1..df37aedab1 100644 --- a/docker/docker-compose-template.yaml +++ b/docker/docker-compose-template.yaml @@ -149,6 +149,8 @@ services: PLUGIN_REMOTE_INSTALLING_PORT: ${PLUGIN_DEBUGGING_PORT:-5003} PLUGIN_WORKING_PATH: ${PLUGIN_WORKING_PATH:-/app/storage/cwd} FORCE_VERIFYING_SIGNATURE: ${FORCE_VERIFYING_SIGNATURE:-true} + PYTHON_ENV_INIT_TIMEOUT: ${PLUGIN_PYTHON_ENV_INIT_TIMEOUT:-120} + PLUGIN_MAX_EXECUTION_TIMEOUT: ${PLUGIN_MAX_EXECUTION_TIMEOUT:-600} ports: - "${EXPOSE_PLUGIN_DEBUGGING_PORT:-5003}:${PLUGIN_DEBUGGING_PORT:-5003}" volumes: diff --git a/docker/docker-compose.middleware.yaml b/docker/docker-compose.middleware.yaml index d9d723df31..2fcf3d375d 100644 --- a/docker/docker-compose.middleware.yaml +++ b/docker/docker-compose.middleware.yaml @@ -88,6 +88,8 @@ services: PLUGIN_REMOTE_INSTALLING_PORT: ${PLUGIN_DEBUGGING_PORT:-5003} PLUGIN_WORKING_PATH: ${PLUGIN_WORKING_PATH:-/app/storage/cwd} FORCE_VERIFYING_SIGNATURE: ${FORCE_VERIFYING_SIGNATURE:-true} + PYTHON_ENV_INIT_TIMEOUT: ${PLUGIN_PYTHON_ENV_INIT_TIMEOUT:-120} + PLUGIN_MAX_EXECUTION_TIMEOUT: ${PLUGIN_MAX_EXECUTION_TIMEOUT:-600} ports: - "${EXPOSE_PLUGIN_DAEMON_PORT:-5002}:${PLUGIN_DAEMON_PORT:-5002}" - "${EXPOSE_PLUGIN_DEBUGGING_PORT:-5003}:${PLUGIN_DEBUGGING_PORT:-5003}" diff --git a/docker/docker-compose.yaml b/docker/docker-compose.yaml index 85e4ec0b31..d0a9c4df63 100644 --- a/docker/docker-compose.yaml +++ b/docker/docker-compose.yaml @@ -105,7 +105,7 @@ x-shared-env: &shared-api-worker-env TENCENT_COS_SECRET_ID: ${TENCENT_COS_SECRET_ID:-your-secret-id} TENCENT_COS_REGION: ${TENCENT_COS_REGION:-your-region} TENCENT_COS_SCHEME: ${TENCENT_COS_SCHEME:-your-scheme} - OCI_ENDPOINT: ${OCI_ENDPOINT:-https://objectstorage.us-ashburn-1.oraclecloud.com} + OCI_ENDPOINT: ${OCI_ENDPOINT:-https://your-object-storage-namespace.compat.objectstorage.us-ashburn-1.oraclecloud.com} OCI_BUCKET_NAME: ${OCI_BUCKET_NAME:-your-bucket-name} OCI_ACCESS_KEY: ${OCI_ACCESS_KEY:-your-access-key} OCI_SECRET_KEY: ${OCI_SECRET_KEY:-your-secret-key} @@ -413,6 +413,8 @@ x-shared-env: &shared-api-worker-env MARKETPLACE_ENABLED: ${MARKETPLACE_ENABLED:-true} MARKETPLACE_API_URL: ${MARKETPLACE_API_URL:-https://marketplace.dify.ai} FORCE_VERIFYING_SIGNATURE: ${FORCE_VERIFYING_SIGNATURE:-true} + PLUGIN_PYTHON_ENV_INIT_TIMEOUT: ${PLUGIN_PYTHON_ENV_INIT_TIMEOUT:-120} + PLUGIN_MAX_EXECUTION_TIMEOUT: ${PLUGIN_MAX_EXECUTION_TIMEOUT:-600} services: # API service @@ -564,6 +566,8 @@ services: PLUGIN_REMOTE_INSTALLING_PORT: ${PLUGIN_DEBUGGING_PORT:-5003} PLUGIN_WORKING_PATH: ${PLUGIN_WORKING_PATH:-/app/storage/cwd} FORCE_VERIFYING_SIGNATURE: ${FORCE_VERIFYING_SIGNATURE:-true} + PYTHON_ENV_INIT_TIMEOUT: ${PLUGIN_PYTHON_ENV_INIT_TIMEOUT:-120} + PLUGIN_MAX_EXECUTION_TIMEOUT: ${PLUGIN_MAX_EXECUTION_TIMEOUT:-600} ports: - "${EXPOSE_PLUGIN_DEBUGGING_PORT:-5003}:${PLUGIN_DEBUGGING_PORT:-5003}" volumes: diff --git a/docker/middleware.env.example b/docker/middleware.env.example index aaecad72b5..e0be6a2980 100644 --- a/docker/middleware.env.example +++ b/docker/middleware.env.example @@ -114,4 +114,7 @@ PLUGIN_DIFY_INNER_API_URL=http://api:5001 MARKETPLACE_ENABLED=true MARKETPLACE_API_URL=https://marketplace.dify.ai -FORCE_VERIFYING_SIGNATURE=true \ No newline at end of file +FORCE_VERIFYING_SIGNATURE=true + +PLUGIN_PYTHON_ENV_INIT_TIMEOUT=120 +PLUGIN_MAX_EXECUTION_TIMEOUT=600 diff --git a/docker/nginx/conf.d/default.conf.template b/docker/nginx/conf.d/default.conf.template index a458412d1e..44191ca1d0 100644 --- a/docker/nginx/conf.d/default.conf.template +++ b/docker/nginx/conf.d/default.conf.template @@ -4,6 +4,19 @@ server { listen ${NGINX_PORT}; server_name ${NGINX_SERVER_NAME}; + # Rule 1: Handle application entry points (preserve /app/{id}) + location ~ ^/app/[a-f0-9-]+$ { + proxy_pass http://api:5001; + include proxy.conf; + } + + # Rule 2: Handle static resource requests (remove /app/{id} prefix) + location ~ ^/app/[a-f0-9-]+/(console/api/.*)$ { + rewrite ^/app/[a-f0-9-]+/(.*)$ /$1 break; + proxy_pass http://api:5001; + include proxy.conf; + } + location /console/api { proxy_pass http://api:5001; include proxy.conf; diff --git a/web/app/(commonLayout)/datasets/Datasets.tsx b/web/app/(commonLayout)/datasets/Datasets.tsx index ea918a2b17..ff0845d84a 100644 --- a/web/app/(commonLayout)/datasets/Datasets.tsx +++ b/web/app/(commonLayout)/datasets/Datasets.tsx @@ -1,6 +1,6 @@ 'use client' -import { useEffect, useRef } from 'react' +import { useCallback, useEffect, useRef } from 'react' import useSWRInfinite from 'swr/infinite' import { debounce } from 'lodash-es' import { useTranslation } from 'react-i18next' @@ -62,21 +62,28 @@ const Datasets = ({ useEffect(() => { loadingStateRef.current = isLoading document.title = `${t('dataset.knowledge')} - Dify` - }, [isLoading]) + }, [isLoading, t]) - useEffect(() => { - const onScroll = debounce(() => { - if (!loadingStateRef.current) { - const { scrollTop, clientHeight } = containerRef.current! - const anchorOffset = anchorRef.current!.offsetTop + const onScroll = useCallback( + debounce(() => { + if (!loadingStateRef.current && containerRef.current && anchorRef.current) { + const { scrollTop, clientHeight } = containerRef.current + const anchorOffset = anchorRef.current.offsetTop if (anchorOffset - scrollTop - clientHeight < 100) setSize(size => size + 1) } - }, 50) + }, 50), + [setSize], + ) - containerRef.current?.addEventListener('scroll', onScroll) - return () => containerRef.current?.removeEventListener('scroll', onScroll) - }, []) + useEffect(() => { + const currentContainer = containerRef.current + currentContainer?.addEventListener('scroll', onScroll) + return () => { + currentContainer?.removeEventListener('scroll', onScroll) + onScroll.cancel() + } + }, [onScroll]) return (
Feature