Merge branch 'fix/chore-fix' into dev/plugin-deploy

This commit is contained in:
Yeuoly 2024-11-21 14:36:34 +08:00
commit cc8117f02a
No known key found for this signature in database
GPG Key ID: A66E7E320FB19F61
17 changed files with 45 additions and 35 deletions

View File

@ -18,12 +18,17 @@
```
2. Copy `.env.example` to `.env`
```cli
cp .env.example .env
```
3. Generate a `SECRET_KEY` in the `.env` file.
bash for Linux
```bash for Linux
sed -i "/^SECRET_KEY=/c\SECRET_KEY=$(openssl rand -base64 42)" .env
```
bash for Mac
```bash for Mac
secret_key=$(openssl rand -base64 42)
sed -i '' "/^SECRET_KEY=/c\\
@ -41,14 +46,6 @@
poetry install
```
In case of contributors missing to update dependencies for `pyproject.toml`, you can perform the following shell instead.
```bash
poetry shell # activate current environment
poetry add $(cat requirements.txt) # install dependencies of production and update pyproject.toml
poetry add $(cat requirements-dev.txt) --group dev # install dependencies of development and update pyproject.toml
```
6. Run migrate
Before the first launch, migrate the database to the latest version.

View File

@ -106,16 +106,9 @@ class BaseAgentRunner(AppRunner):
# check if model supports stream tool call
llm_model = cast(LargeLanguageModel, model_instance.model_type_instance)
model_schema = llm_model.get_model_schema(model_instance.model, model_instance.credentials)
if model_schema and ModelFeature.STREAM_TOOL_CALL in (model_schema.features or []):
self.stream_tool_call = True
else:
self.stream_tool_call = False
# check if model supports vision
if model_schema and ModelFeature.VISION in (model_schema.features or []):
self.files = application_generate_entity.files
else:
self.files = []
features = model_schema.features if model_schema and model_schema.features else []
self.stream_tool_call = ModelFeature.STREAM_TOOL_CALL in features
self.files = application_generate_entity.files if ModelFeature.VISION in features else []
self.query = None
self._current_thoughts: list[PromptMessage] = []
@ -243,7 +236,7 @@ class BaseAgentRunner(AppRunner):
update prompt message tool
"""
# try to get tool runtime parameters
tool_runtime_parameters = tool.get_runtime_parameters() or []
tool_runtime_parameters = tool.get_runtime_parameters()
for parameter in tool_runtime_parameters:
if parameter.form != ToolParameter.ToolParameterForm.LLM:

View File

@ -381,7 +381,7 @@ class WorkflowCycleManage:
id=workflow_run.id,
workflow_id=workflow_run.workflow_id,
sequence_number=workflow_run.sequence_number,
inputs=workflow_run.inputs_dict or {},
inputs=workflow_run.inputs_dict,
created_at=int(workflow_run.created_at.timestamp()),
),
)
@ -428,7 +428,7 @@ class WorkflowCycleManage:
created_by=created_by,
created_at=int(workflow_run.created_at.timestamp()),
finished_at=int(workflow_run.finished_at.timestamp()),
files=self._fetch_files_from_node_outputs(workflow_run.outputs_dict or {}),
files=self._fetch_files_from_node_outputs(workflow_run.outputs_dict),
),
)

View File

@ -29,6 +29,7 @@ from core.rag.splitter.fixed_text_splitter import (
FixedRecursiveCharacterTextSplitter,
)
from core.rag.splitter.text_splitter import TextSplitter
from core.tools.utils.rag_web_reader import get_image_upload_file_ids
from core.tools.utils.text_processing_utils import remove_leading_symbols
from extensions.ext_database import db
from extensions.ext_redis import redis_client
@ -279,6 +280,19 @@ class IndexingRunner:
if len(preview_texts) < 5:
preview_texts.append(document.page_content)
# delete image files and related db records
image_upload_file_ids = get_image_upload_file_ids(document.page_content)
for upload_file_id in image_upload_file_ids:
image_file = db.session.query(UploadFile).filter(UploadFile.id == upload_file_id).first()
try:
storage.delete(image_file.key)
except Exception:
logging.exception(
"Delete image_files failed while indexing_estimate, \
image_upload_file_is: {}".format(upload_file_id)
)
db.session.delete(image_file)
if doc_form and doc_form == "qa_model":
if len(preview_texts) > 0:
# qa model document

View File

@ -58,7 +58,7 @@ class ToolEngine:
# check if this tool has only one parameter
parameters = [
parameter
for parameter in tool.get_runtime_parameters() or []
for parameter in tool.get_runtime_parameters()
if parameter.form == ToolParameter.ToolParameterForm.LLM
]
if parameters and len(parameters) == 1:

View File

@ -145,7 +145,7 @@ class ToolParameterConfigurationManager:
# get tool parameters
tool_parameters = self.tool_runtime.entity.parameters or []
# get tool runtime parameters
runtime_parameters = self.tool_runtime.get_runtime_parameters() or []
runtime_parameters = self.tool_runtime.get_runtime_parameters()
# override parameters
current_parameters = tool_parameters.copy()
for runtime_parameter in runtime_parameters:

View File

@ -166,10 +166,9 @@ def _build_from_remote_url(
def _get_remote_file_info(url: str):
mime_type = mimetypes.guess_type(url)[0] or ""
file_size = -1
filename = url.split("/")[-1].split("?")[0] or "unknown_file"
mime_type = mime_type or mimetypes.guess_type(filename)[0]
mime_type = mimetypes.guess_type(filename)[0] or ""
resp = ssrf_proxy.head(url, follow_redirects=True)
if resp.status_code == httpx.codes.OK:

View File

@ -12,7 +12,7 @@ import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'ddcc8bbef391'
down_revision = '09a8d1878d9b' # HEAD OF PLUGIN BRANCH
down_revision = '01d6889832f7' # HEAD OF PLUGIN BRANCH
branch_labels = None
depends_on = None

View File

@ -344,7 +344,7 @@ class AppService:
if not app_model_config:
return meta
agent_config = app_model_config.agent_mode_dict or {}
agent_config = app_model_config.agent_mode_dict
# get all tools
tools = agent_config.get("tools", [])

View File

@ -259,7 +259,7 @@ class ToolTransformService:
# get tool parameters
parameters = tool.entity.parameters or []
# get tool runtime parameters
runtime_parameters = tool.get_runtime_parameters() or []
runtime_parameters = tool.get_runtime_parameters()
# override parameters
current_parameters = parameters.copy()
for runtime_parameter in runtime_parameters:

View File

@ -51,8 +51,8 @@ class WebsiteService:
excludes = options.get("excludes").split(",") if options.get("excludes") else []
params = {
"crawlerOptions": {
"includes": includes or [],
"excludes": excludes or [],
"includes": includes,
"excludes": excludes,
"generateImgAltText": True,
"limit": options.get("limit", 1),
"returnOnlyUrls": False,

View File

@ -78,6 +78,7 @@ def clean_dataset_task(
"Delete image_files failed when storage deleted, \
image_upload_file_is: {}".format(upload_file_id)
)
db.session.delete(image_file)
db.session.delete(segment)
db.session.query(DatasetProcessRule).filter(DatasetProcessRule.dataset_id == dataset_id).delete()

View File

@ -51,6 +51,7 @@ def clean_document_task(document_id: str, dataset_id: str, doc_form: str, file_i
"Delete image_files failed when storage deleted, \
image_upload_file_is: {}".format(upload_file_id)
)
db.session.delete(image_file)
db.session.delete(segment)
db.session.commit()

View File

@ -36,7 +36,7 @@ Welcome to the new `docker` directory for deploying Dify using Docker Compose. T
- Navigate to the `docker` directory.
- Ensure the `middleware.env` file is created by running `cp middleware.env.example middleware.env` (refer to the `middleware.env.example` file).
2. **Running Middleware Services**:
- Execute `docker-compose -f docker-compose.middleware.yaml up -d` to start the middleware services.
- Execute `docker-compose -f docker-compose.middleware.yaml up --env-file middleware.env -d` to start the middleware services.
### Migration for Existing Users

View File

@ -29,11 +29,13 @@ services:
redis:
image: redis:6-alpine
restart: always
environment:
REDISCLI_AUTH: ${REDIS_PASSWORD:-difyai123456}
volumes:
# Mount the redis data directory to the container.
- ${REDIS_HOST_VOLUME:-./volumes/redis/data}:/data
# Set the redis password when startup redis server.
command: redis-server --requirepass difyai123456
command: redis-server --requirepass ${REDIS_PASSWORD:-difyai123456}
ports:
- "${EXPOSE_REDIS_PORT:-6379}:6379"
healthcheck:

View File

@ -368,6 +368,8 @@ services:
redis:
image: redis:6-alpine
restart: always
environment:
REDISCLI_AUTH: ${REDIS_PASSWORD:-difyai123456}
volumes:
# Mount the redis data directory to the container.
- ./volumes/redis/data:/data

View File

@ -42,11 +42,13 @@ POSTGRES_EFFECTIVE_CACHE_SIZE=4096MB
# -----------------------------
# Environment Variables for redis Service
REDIS_HOST_VOLUME=./volumes/redis/data
# -----------------------------
REDIS_HOST_VOLUME=./volumes/redis/data
REDIS_PASSWORD=difyai123456
# ------------------------------
# Environment Variables for sandbox Service
# ------------------------------
SANDBOX_API_KEY=dify-sandbox
SANDBOX_GIN_MODE=release
SANDBOX_WORKER_TIMEOUT=15
@ -54,7 +56,6 @@ SANDBOX_ENABLE_NETWORK=true
SANDBOX_HTTP_PROXY=http://ssrf_proxy:3128
SANDBOX_HTTPS_PROXY=http://ssrf_proxy:3128
SANDBOX_PORT=8194
# ------------------------------
# ------------------------------
# Environment Variables for ssrf_proxy Service