diff --git a/.github/workflows/build-push.yml b/.github/workflows/build-push.yml
index 49ab983778..5437883441 100644
--- a/.github/workflows/build-push.yml
+++ b/.github/workflows/build-push.yml
@@ -5,6 +5,7 @@ on:
branches:
- "main"
- "deploy/dev"
+ - "deploy/enterprise"
release:
types: [published]
diff --git a/.github/workflows/deploy-enterprise.yml b/.github/workflows/deploy-enterprise.yml
new file mode 100644
index 0000000000..98fa7c3b49
--- /dev/null
+++ b/.github/workflows/deploy-enterprise.yml
@@ -0,0 +1,29 @@
+name: Deploy Enterprise
+
+permissions:
+ contents: read
+
+on:
+ workflow_run:
+ workflows: ["Build and Push API & Web"]
+ branches:
+ - "deploy/enterprise"
+ types:
+ - completed
+
+jobs:
+ deploy:
+ runs-on: ubuntu-latest
+ if: |
+ github.event.workflow_run.conclusion == 'success' &&
+ github.event.workflow_run.head_branch == 'deploy/enterprise'
+
+ steps:
+ - name: Deploy to server
+ uses: appleboy/ssh-action@v0.1.8
+ with:
+ host: ${{ secrets.ENTERPRISE_SSH_HOST }}
+ username: ${{ secrets.ENTERPRISE_SSH_USER }}
+ password: ${{ secrets.ENTERPRISE_SSH_PASSWORD }}
+ script: |
+ ${{ vars.ENTERPRISE_SSH_SCRIPT || secrets.ENTERPRISE_SSH_SCRIPT }}
diff --git a/.gitignore b/.gitignore
index 9195d741f8..7c5f4851c9 100644
--- a/.gitignore
+++ b/.gitignore
@@ -202,3 +202,6 @@ api/.vscode
# plugin migrate
plugins.jsonl
+
+# mise
+mise.toml
diff --git a/api/.env.example b/api/.env.example
index a1136f1b09..151ed14120 100644
--- a/api/.env.example
+++ b/api/.env.example
@@ -386,6 +386,7 @@ HTTP_REQUEST_MAX_READ_TIMEOUT=600
HTTP_REQUEST_MAX_WRITE_TIMEOUT=600
HTTP_REQUEST_NODE_MAX_BINARY_SIZE=10485760
HTTP_REQUEST_NODE_MAX_TEXT_SIZE=1048576
+HTTP_REQUEST_NODE_SSL_VERIFY=True
# Respect X-* headers to redirect clients
RESPECT_XFORWARD_HEADERS_ENABLED=false
diff --git a/api/Dockerfile b/api/Dockerfile
index 0d4679b76a..fbfbd47741 100644
--- a/api/Dockerfile
+++ b/api/Dockerfile
@@ -56,8 +56,6 @@ RUN \
curl nodejs libgmp-dev libmpfr-dev libmpc-dev \
# For Security
expat libldap-2.5-0 perl libsqlite3-0 zlib1g \
- # install a chinese font to support the use of tools like matplotlib
- fonts-noto-cjk \
# install a package to improve the accuracy of guessing mime type and file extension
media-types \
# install libmagic to support the use of python-magic guess MIMETYPE
diff --git a/api/configs/feature/__init__.py b/api/configs/feature/__init__.py
index c06269c199..a13a5997a7 100644
--- a/api/configs/feature/__init__.py
+++ b/api/configs/feature/__init__.py
@@ -332,6 +332,11 @@ class HttpConfig(BaseSettings):
default=1 * 1024 * 1024,
)
+ HTTP_REQUEST_NODE_SSL_VERIFY: bool = Field(
+ description="Enable or disable SSL verification for HTTP requests",
+ default=True,
+ )
+
SSRF_DEFAULT_MAX_RETRIES: PositiveInt = Field(
description="Maximum number of retries for network requests (SSRF)",
default=3,
diff --git a/api/configs/middleware/vdb/pgvector_config.py b/api/configs/middleware/vdb/pgvector_config.py
index 4561a9a7ca..9f5f7284d7 100644
--- a/api/configs/middleware/vdb/pgvector_config.py
+++ b/api/configs/middleware/vdb/pgvector_config.py
@@ -43,3 +43,8 @@ class PGVectorConfig(BaseSettings):
description="Max connection of the PostgreSQL database",
default=5,
)
+
+ PGVECTOR_PG_BIGM: bool = Field(
+ description="Whether to use pg_bigm module for full text search",
+ default=False,
+ )
diff --git a/api/controllers/console/workspace/workspace.py b/api/controllers/console/workspace/workspace.py
index eb12ec77d8..27fb5134ee 100644
--- a/api/controllers/console/workspace/workspace.py
+++ b/api/controllers/console/workspace/workspace.py
@@ -88,28 +88,20 @@ class WorkspaceListApi(Resource):
parser.add_argument("limit", type=inputs.int_range(1, 100), required=False, default=20, location="args")
args = parser.parse_args()
- tenants = Tenant.query.order_by(Tenant.created_at.desc()).paginate(page=args["page"], per_page=args["limit"])
-
+ tenants = Tenant.query.order_by(Tenant.created_at.desc()).paginate(
+ page=args["page"], per_page=args["limit"], error_out=False
+ )
has_more = False
- if len(tenants.items) == args["limit"]:
- current_page_first_tenant = tenants[-1]
- rest_count = (
- db.session.query(Tenant)
- .filter(
- Tenant.created_at < current_page_first_tenant.created_at, Tenant.id != current_page_first_tenant.id
- )
- .count()
- )
- if rest_count > 0:
- has_more = True
- total = db.session.query(Tenant).count()
+ if tenants.has_next:
+ has_more = True
+
return {
"data": marshal(tenants.items, workspace_fields),
"has_more": has_more,
"limit": args["limit"],
"page": args["page"],
- "total": total,
+ "total": tenants.total,
}, 200
diff --git a/api/core/app/app_config/easy_ui_based_app/dataset/manager.py b/api/core/app/app_config/easy_ui_based_app/dataset/manager.py
index 646c4badb9..a1a65e2287 100644
--- a/api/core/app/app_config/easy_ui_based_app/dataset/manager.py
+++ b/api/core/app/app_config/easy_ui_based_app/dataset/manager.py
@@ -89,7 +89,9 @@ class DatasetConfigManager:
dataset_configs["retrieval_model"]
),
top_k=dataset_configs.get("top_k", 4),
- score_threshold=dataset_configs.get("score_threshold"),
+ score_threshold=dataset_configs.get("score_threshold")
+ if dataset_configs.get("score_threshold_enabled", False)
+ else None,
reranking_model=dataset_configs.get("reranking_model"),
weights=dataset_configs.get("weights"),
reranking_enabled=dataset_configs.get("reranking_enabled", True),
diff --git a/api/core/app/app_config/features/file_upload/manager.py b/api/core/app/app_config/features/file_upload/manager.py
index 0dc4efc47a..bcc69e8ec6 100644
--- a/api/core/app/app_config/features/file_upload/manager.py
+++ b/api/core/app/app_config/features/file_upload/manager.py
@@ -17,17 +17,15 @@ class FileUploadConfigManager:
if file_upload_dict:
if file_upload_dict.get("enabled"):
transform_methods = file_upload_dict.get("allowed_file_upload_methods", [])
- data = {
- "image_config": {
- "number_limits": file_upload_dict["number_limits"],
- "transfer_methods": transform_methods,
- }
+ file_upload_dict["image_config"] = {
+ "number_limits": file_upload_dict.get("number_limits", 1),
+ "transfer_methods": transform_methods,
}
if is_vision:
- data["image_config"]["detail"] = file_upload_dict.get("image", {}).get("detail", "low")
+ file_upload_dict["image_config"]["detail"] = file_upload_dict.get("image", {}).get("detail", "high")
- return FileUploadConfig.model_validate(data)
+ return FileUploadConfig.model_validate(file_upload_dict)
@classmethod
def validate_and_set_defaults(cls, config: dict) -> tuple[dict, list[str]]:
diff --git a/api/core/app/apps/base_app_generator.py b/api/core/app/apps/base_app_generator.py
index 00159c858d..5d559b96d7 100644
--- a/api/core/app/apps/base_app_generator.py
+++ b/api/core/app/apps/base_app_generator.py
@@ -151,7 +151,7 @@ class BaseAppGenerator:
def gen():
for message in generator:
- if isinstance(message, (Mapping, dict)):
+ if isinstance(message, Mapping | dict):
yield f"data: {json.dumps(message)}\n\n"
else:
yield f"event: {message}\n\n"
diff --git a/api/core/helper/ssrf_proxy.py b/api/core/helper/ssrf_proxy.py
index c8243b29d0..6367e45638 100644
--- a/api/core/helper/ssrf_proxy.py
+++ b/api/core/helper/ssrf_proxy.py
@@ -11,6 +11,19 @@ from configs import dify_config
SSRF_DEFAULT_MAX_RETRIES = dify_config.SSRF_DEFAULT_MAX_RETRIES
+HTTP_REQUEST_NODE_SSL_VERIFY = True # Default value for HTTP_REQUEST_NODE_SSL_VERIFY is True
+try:
+ HTTP_REQUEST_NODE_SSL_VERIFY = dify_config.HTTP_REQUEST_NODE_SSL_VERIFY
+ http_request_node_ssl_verify_lower = str(HTTP_REQUEST_NODE_SSL_VERIFY).lower()
+ if http_request_node_ssl_verify_lower == "true":
+ HTTP_REQUEST_NODE_SSL_VERIFY = True
+ elif http_request_node_ssl_verify_lower == "false":
+ HTTP_REQUEST_NODE_SSL_VERIFY = False
+ else:
+ raise ValueError("Invalid value. HTTP_REQUEST_NODE_SSL_VERIFY should be 'True' or 'False'")
+except NameError:
+ HTTP_REQUEST_NODE_SSL_VERIFY = True
+
BACKOFF_FACTOR = 0.5
STATUS_FORCELIST = [429, 500, 502, 503, 504]
@@ -39,17 +52,17 @@ def make_request(method, url, max_retries=SSRF_DEFAULT_MAX_RETRIES, **kwargs):
while retries <= max_retries:
try:
if dify_config.SSRF_PROXY_ALL_URL:
- with httpx.Client(proxy=dify_config.SSRF_PROXY_ALL_URL) as client:
+ with httpx.Client(proxy=dify_config.SSRF_PROXY_ALL_URL, verify=HTTP_REQUEST_NODE_SSL_VERIFY) as client:
response = client.request(method=method, url=url, **kwargs)
elif dify_config.SSRF_PROXY_HTTP_URL and dify_config.SSRF_PROXY_HTTPS_URL:
proxy_mounts = {
"http://": httpx.HTTPTransport(proxy=dify_config.SSRF_PROXY_HTTP_URL),
"https://": httpx.HTTPTransport(proxy=dify_config.SSRF_PROXY_HTTPS_URL),
}
- with httpx.Client(mounts=proxy_mounts) as client:
+ with httpx.Client(mounts=proxy_mounts, verify=HTTP_REQUEST_NODE_SSL_VERIFY) as client:
response = client.request(method=method, url=url, **kwargs)
else:
- with httpx.Client() as client:
+ with httpx.Client(verify=HTTP_REQUEST_NODE_SSL_VERIFY) as client:
response = client.request(method=method, url=url, **kwargs)
if response.status_code not in STATUS_FORCELIST:
diff --git a/api/core/ops/ops_trace_manager.py b/api/core/ops/ops_trace_manager.py
index c153e3f9dd..9e4672ce52 100644
--- a/api/core/ops/ops_trace_manager.py
+++ b/api/core/ops/ops_trace_manager.py
@@ -214,6 +214,8 @@ class OpsTraceManager:
provider_config_map[tracing_provider]["trace_instance"],
provider_config_map[tracing_provider]["config_class"],
)
+ if not decrypt_trace_config:
+ return None
tracing_instance = trace_instance(config_class(**decrypt_trace_config))
return tracing_instance
diff --git a/api/core/plugin/backwards_invocation/model.py b/api/core/plugin/backwards_invocation/model.py
index 61e0f12680..490a475c16 100644
--- a/api/core/plugin/backwards_invocation/model.py
+++ b/api/core/plugin/backwards_invocation/model.py
@@ -3,7 +3,7 @@ from binascii import hexlify, unhexlify
from collections.abc import Generator
from core.model_manager import ModelManager
-from core.model_runtime.entities.llm_entities import LLMResult, LLMResultChunk
+from core.model_runtime.entities.llm_entities import LLMResult, LLMResultChunk, LLMResultChunkDelta
from core.model_runtime.entities.message_entities import (
PromptMessage,
SystemPromptMessage,
@@ -46,7 +46,7 @@ class PluginModelBackwardsInvocation(BaseBackwardsInvocation):
model_parameters=payload.completion_params,
tools=payload.tools,
stop=payload.stop,
- stream=payload.stream or True,
+ stream=True if payload.stream is None else payload.stream,
user=user_id,
)
@@ -64,7 +64,21 @@ class PluginModelBackwardsInvocation(BaseBackwardsInvocation):
else:
if response.usage:
LLMNode.deduct_llm_quota(tenant_id=tenant.id, model_instance=model_instance, usage=response.usage)
- return response
+
+ def handle_non_streaming(response: LLMResult) -> Generator[LLMResultChunk, None, None]:
+ yield LLMResultChunk(
+ model=response.model,
+ prompt_messages=response.prompt_messages,
+ system_fingerprint=response.system_fingerprint,
+ delta=LLMResultChunkDelta(
+ index=0,
+ message=response.message,
+ usage=response.usage,
+ finish_reason="",
+ ),
+ )
+
+ return handle_non_streaming(response)
@classmethod
def invoke_text_embedding(cls, user_id: str, tenant: Tenant, payload: RequestInvokeTextEmbedding):
diff --git a/api/core/plugin/entities/parameters.py b/api/core/plugin/entities/parameters.py
index 7bfa616c11..7d858bd7d5 100644
--- a/api/core/plugin/entities/parameters.py
+++ b/api/core/plugin/entities/parameters.py
@@ -147,7 +147,7 @@ def init_frontend_parameter(rule: PluginParameter, type: enum.StrEnum, value: An
init frontend parameter by rule
"""
parameter_value = value
- if not parameter_value and parameter_value != 0:
+ if not parameter_value and parameter_value != 0 and type != PluginParameterType.TOOLS_SELECTOR:
# get default value
parameter_value = rule.default
if not parameter_value and rule.required:
diff --git a/api/core/rag/datasource/retrieval_service.py b/api/core/rag/datasource/retrieval_service.py
index bd0996c9c6..3904bf6231 100644
--- a/api/core/rag/datasource/retrieval_service.py
+++ b/api/core/rag/datasource/retrieval_service.py
@@ -1,5 +1,4 @@
import concurrent.futures
-import json
from concurrent.futures import ThreadPoolExecutor
from typing import Optional
@@ -243,7 +242,7 @@ class RetrievalService:
@staticmethod
def escape_query_for_search(query: str) -> str:
- return json.dumps(query).strip('"')
+ return query.replace('"', '\\"')
@classmethod
def format_retrieval_documents(cls, documents: list[Document]) -> list[RetrievalSegments]:
@@ -277,6 +276,8 @@ class RetrievalService:
continue
dataset_document = dataset_documents[document_id]
+ if not dataset_document:
+ continue
if dataset_document.doc_form == IndexType.PARENT_CHILD_INDEX:
# Handle parent-child documents
diff --git a/api/core/rag/datasource/vdb/pgvector/pgvector.py b/api/core/rag/datasource/vdb/pgvector/pgvector.py
index c8a1e4f90c..13c214bfd7 100644
--- a/api/core/rag/datasource/vdb/pgvector/pgvector.py
+++ b/api/core/rag/datasource/vdb/pgvector/pgvector.py
@@ -1,8 +1,10 @@
import json
+import logging
import uuid
from contextlib import contextmanager
from typing import Any
+import psycopg2.errors
import psycopg2.extras # type: ignore
import psycopg2.pool # type: ignore
from pydantic import BaseModel, model_validator
@@ -25,6 +27,7 @@ class PGVectorConfig(BaseModel):
database: str
min_connection: int
max_connection: int
+ pg_bigm: bool = False
@model_validator(mode="before")
@classmethod
@@ -62,12 +65,18 @@ CREATE INDEX IF NOT EXISTS embedding_cosine_v1_idx ON {table_name}
USING hnsw (embedding vector_cosine_ops) WITH (m = 16, ef_construction = 64);
"""
+SQL_CREATE_INDEX_PG_BIGM = """
+CREATE INDEX IF NOT EXISTS bigm_idx ON {table_name}
+USING gin (text gin_bigm_ops);
+"""
+
class PGVector(BaseVector):
def __init__(self, collection_name: str, config: PGVectorConfig):
super().__init__(collection_name)
self.pool = self._create_connection_pool(config)
self.table_name = f"embedding_{collection_name}"
+ self.pg_bigm = config.pg_bigm
def get_type(self) -> str:
return VectorType.PGVECTOR
@@ -140,7 +149,14 @@ class PGVector(BaseVector):
if not ids:
return
with self._get_cursor() as cur:
- cur.execute(f"DELETE FROM {self.table_name} WHERE id IN %s", (tuple(ids),))
+ try:
+ cur.execute(f"DELETE FROM {self.table_name} WHERE id IN %s", (tuple(ids),))
+ except psycopg2.errors.UndefinedTable:
+ # table not exists
+ logging.warning(f"Table {self.table_name} not found, skipping delete operation.")
+ return
+ except Exception as e:
+ raise e
def delete_by_metadata_field(self, key: str, value: str) -> None:
with self._get_cursor() as cur:
@@ -176,15 +192,27 @@ class PGVector(BaseVector):
top_k = kwargs.get("top_k", 5)
with self._get_cursor() as cur:
- cur.execute(
- f"""SELECT meta, text, ts_rank(to_tsvector(coalesce(text, '')), plainto_tsquery(%s)) AS score
- FROM {self.table_name}
- WHERE to_tsvector(text) @@ plainto_tsquery(%s)
- ORDER BY score DESC
- LIMIT {top_k}""",
- # f"'{query}'" is required in order to account for whitespace in query
- (f"'{query}'", f"'{query}'"),
- )
+ if self.pg_bigm:
+ cur.execute("SET pg_bigm.similarity_limit TO 0.000001")
+ cur.execute(
+ f"""SELECT meta, text, bigm_similarity(unistr(%s), coalesce(text, '')) AS score
+ FROM {self.table_name}
+ WHERE text =%% unistr(%s)
+ ORDER BY score DESC
+ LIMIT {top_k}""",
+ # f"'{query}'" is required in order to account for whitespace in query
+ (f"'{query}'", f"'{query}'"),
+ )
+ else:
+ cur.execute(
+ f"""SELECT meta, text, ts_rank(to_tsvector(coalesce(text, '')), plainto_tsquery(%s)) AS score
+ FROM {self.table_name}
+ WHERE to_tsvector(text) @@ plainto_tsquery(%s)
+ ORDER BY score DESC
+ LIMIT {top_k}""",
+ # f"'{query}'" is required in order to account for whitespace in query
+ (f"'{query}'", f"'{query}'"),
+ )
docs = []
@@ -214,6 +242,9 @@ class PGVector(BaseVector):
# ref: https://github.com/pgvector/pgvector?tab=readme-ov-file#indexing
if dimension <= 2000:
cur.execute(SQL_CREATE_INDEX.format(table_name=self.table_name))
+ if self.pg_bigm:
+ cur.execute("CREATE EXTENSION IF NOT EXISTS pg_bigm")
+ cur.execute(SQL_CREATE_INDEX_PG_BIGM.format(table_name=self.table_name))
redis_client.set(collection_exist_cache_key, 1, ex=3600)
@@ -237,5 +268,6 @@ class PGVectorFactory(AbstractVectorFactory):
database=dify_config.PGVECTOR_DATABASE or "postgres",
min_connection=dify_config.PGVECTOR_MIN_CONNECTION,
max_connection=dify_config.PGVECTOR_MAX_CONNECTION,
+ pg_bigm=dify_config.PGVECTOR_PG_BIGM,
),
)
diff --git a/api/core/rag/retrieval/dataset_retrieval.py b/api/core/rag/retrieval/dataset_retrieval.py
index c5ac63e853..48454bd66e 100644
--- a/api/core/rag/retrieval/dataset_retrieval.py
+++ b/api/core/rag/retrieval/dataset_retrieval.py
@@ -433,30 +433,33 @@ class DatasetRetrieval:
dataset_document = DatasetDocument.query.filter(
DatasetDocument.id == document.metadata["document_id"]
).first()
- if dataset_document.doc_form == IndexType.PARENT_CHILD_INDEX:
- child_chunk = ChildChunk.query.filter(
- ChildChunk.index_node_id == document.metadata["doc_id"],
- ChildChunk.dataset_id == dataset_document.dataset_id,
- ChildChunk.document_id == dataset_document.id,
- ).first()
- if child_chunk:
- segment = DocumentSegment.query.filter(DocumentSegment.id == child_chunk.segment_id).update(
+ if dataset_document:
+ if dataset_document.doc_form == IndexType.PARENT_CHILD_INDEX:
+ child_chunk = ChildChunk.query.filter(
+ ChildChunk.index_node_id == document.metadata["doc_id"],
+ ChildChunk.dataset_id == dataset_document.dataset_id,
+ ChildChunk.document_id == dataset_document.id,
+ ).first()
+ if child_chunk:
+ segment = DocumentSegment.query.filter(DocumentSegment.id == child_chunk.segment_id).update(
+ {DocumentSegment.hit_count: DocumentSegment.hit_count + 1}, synchronize_session=False
+ )
+ db.session.commit()
+ else:
+ query = db.session.query(DocumentSegment).filter(
+ DocumentSegment.index_node_id == document.metadata["doc_id"]
+ )
+
+ # if 'dataset_id' in document.metadata:
+ if "dataset_id" in document.metadata:
+ query = query.filter(DocumentSegment.dataset_id == document.metadata["dataset_id"])
+
+ # add hit count to document segment
+ query.update(
{DocumentSegment.hit_count: DocumentSegment.hit_count + 1}, synchronize_session=False
)
- db.session.commit()
- else:
- query = db.session.query(DocumentSegment).filter(
- DocumentSegment.index_node_id == document.metadata["doc_id"]
- )
- # if 'dataset_id' in document.metadata:
- if "dataset_id" in document.metadata:
- query = query.filter(DocumentSegment.dataset_id == document.metadata["dataset_id"])
-
- # add hit count to document segment
- query.update({DocumentSegment.hit_count: DocumentSegment.hit_count + 1}, synchronize_session=False)
-
- db.session.commit()
+ db.session.commit()
# get tracing instance
trace_manager: TraceQueueManager | None = (
diff --git a/api/core/rag/splitter/fixed_text_splitter.py b/api/core/rag/splitter/fixed_text_splitter.py
index 5d34c80113..67f9b6384d 100644
--- a/api/core/rag/splitter/fixed_text_splitter.py
+++ b/api/core/rag/splitter/fixed_text_splitter.py
@@ -76,16 +76,20 @@ class FixedRecursiveCharacterTextSplitter(EnhanceRecursiveCharacterTextSplitter)
def recursive_split_text(self, text: str) -> list[str]:
"""Split incoming text and return chunks."""
+
final_chunks = []
- # Get appropriate separator to use
separator = self._separators[-1]
- for _s in self._separators:
+ new_separators = []
+
+ for i, _s in enumerate(self._separators):
if _s == "":
separator = _s
break
if _s in text:
separator = _s
+ new_separators = self._separators[i + 1 :]
break
+
# Now that we have the separator, split the text
if separator:
if separator == " ":
@@ -94,23 +98,52 @@ class FixedRecursiveCharacterTextSplitter(EnhanceRecursiveCharacterTextSplitter)
splits = text.split(separator)
else:
splits = list(text)
- # Now go merging things, recursively splitting longer texts.
+ splits = [s for s in splits if (s not in {"", "\n"})]
_good_splits = []
_good_splits_lengths = [] # cache the lengths of the splits
+ _separator = "" if self._keep_separator else separator
s_lens = self._length_function(splits)
- for s, s_len in zip(splits, s_lens):
- if s_len < self._chunk_size:
- _good_splits.append(s)
- _good_splits_lengths.append(s_len)
- else:
- if _good_splits:
- merged_text = self._merge_splits(_good_splits, separator, _good_splits_lengths)
- final_chunks.extend(merged_text)
- _good_splits = []
- _good_splits_lengths = []
- other_info = self.recursive_split_text(s)
- final_chunks.extend(other_info)
- if _good_splits:
- merged_text = self._merge_splits(_good_splits, separator, _good_splits_lengths)
- final_chunks.extend(merged_text)
+ if _separator != "":
+ for s, s_len in zip(splits, s_lens):
+ if s_len < self._chunk_size:
+ _good_splits.append(s)
+ _good_splits_lengths.append(s_len)
+ else:
+ if _good_splits:
+ merged_text = self._merge_splits(_good_splits, _separator, _good_splits_lengths)
+ final_chunks.extend(merged_text)
+ _good_splits = []
+ _good_splits_lengths = []
+ if not new_separators:
+ final_chunks.append(s)
+ else:
+ other_info = self._split_text(s, new_separators)
+ final_chunks.extend(other_info)
+
+ if _good_splits:
+ merged_text = self._merge_splits(_good_splits, _separator, _good_splits_lengths)
+ final_chunks.extend(merged_text)
+ else:
+ current_part = ""
+ current_length = 0
+ overlap_part = ""
+ overlap_part_length = 0
+ for s, s_len in zip(splits, s_lens):
+ if current_length + s_len <= self._chunk_size - self._chunk_overlap:
+ current_part += s
+ current_length += s_len
+ elif current_length + s_len <= self._chunk_size:
+ current_part += s
+ current_length += s_len
+ overlap_part += s
+ overlap_part_length += s_len
+ else:
+ final_chunks.append(current_part)
+ current_part = overlap_part + s
+ current_length = s_len + overlap_part_length
+ overlap_part = ""
+ overlap_part_length = 0
+ if current_part:
+ final_chunks.append(current_part)
+
return final_chunks
diff --git a/api/core/tools/README.md b/api/core/tools/README.md
deleted file mode 100644
index b5d0a30d34..0000000000
--- a/api/core/tools/README.md
+++ /dev/null
@@ -1,25 +0,0 @@
-# Tools
-
-This module implements built-in tools used in Agent Assistants and Workflows within Dify. You could define and display your own tools in this module, without modifying the frontend logic. This decoupling allows for easier horizontal scaling of Dify's capabilities.
-
-## Feature Introduction
-
-The tools provided for Agents and Workflows are currently divided into two categories:
-- `Built-in Tools` are internally implemented within our product and are hardcoded for use in Agents and Workflows.
-- `Api-Based Tools` leverage third-party APIs for implementation. You don't need to code to integrate these -- simply provide interface definitions in formats like `OpenAPI` , `Swagger`, or the `OpenAI-plugin` on the front-end.
-
-### Built-in Tool Providers
-
-
-### API Tool Providers
-
-
-## Tool Integration
-
-To enable developers to build flexible and powerful tools, we provide two guides:
-
-### [Quick Integration 👈🏻](./docs/en_US/tool_scale_out.md)
-Quick integration aims at quickly getting you up to speed with tool integration by walking over an example Google Search tool.
-
-### [Advanced Integration 👈🏻](./docs/en_US/advanced_scale_out.md)
-Advanced integration will offer a deeper dive into the module interfaces, and explain how to implement more complex capabilities, such as generating images, combining multiple tools, and managing the flow of parameters, images, and files between different tools.
\ No newline at end of file
diff --git a/api/core/tools/README_CN.md b/api/core/tools/README_CN.md
deleted file mode 100644
index 7e18441131..0000000000
--- a/api/core/tools/README_CN.md
+++ /dev/null
@@ -1,27 +0,0 @@
-# Tools
-
-该模块提供了各Agent和Workflow中会使用的内置工具的调用、鉴权接口,并为 Dify 提供了统一的工具供应商的信息和凭据表单规则。
-
-- 一方面将工具和业务代码解耦,方便开发者对模型横向扩展,
-- 另一方面提供了只需在后端定义供应商和工具,即可在前端页面直接展示,无需修改前端逻辑。
-
-## 功能介绍
-
-对于给Agent和Workflow提供的工具,我们当前将其分为两类:
-- `Built-in Tools` 内置工具,即Dify内部实现的工具,通过硬编码的方式提供给Agent和Workflow使用。
-- `Api-Based Tools` 基于API的工具,即通过调用第三方API实现的工具,`Api-Based Tool`不需要再额外定义,只需提供`OpenAPI` `Swagger` `OpenAI plugin`等接口文档即可。
-
-### 内置工具供应商
-
-
-### API工具供应商
-
-
-## 工具接入
-为了实现更灵活更强大的功能,Tools提供了一系列的接口,帮助开发者快速构建想要的工具,本文作为开发者的入门指南,将会以[快速接入](./docs/zh_Hans/tool_scale_out.md)和[高级接入](./docs/zh_Hans/advanced_scale_out.md)两部分介绍如何接入工具。
-
-### [快速接入 👈🏻](./docs/zh_Hans/tool_scale_out.md)
-快速接入可以帮助你在10~20分钟内完成工具的接入,但是这种接入方式只能实现简单的功能,如果你想要实现更复杂的功能,可以参考下面的高级接入。
-
-### [高级接入 👈🏻](./docs/zh_Hans/advanced_scale_out.md)
-高级接入将介绍如何实现更复杂的功能配置,包括实现图生图、实现多个工具的组合、实现参数、图片、文件在多个工具之间的流转。
\ No newline at end of file
diff --git a/api/core/tools/README_JA.md b/api/core/tools/README_JA.md
deleted file mode 100644
index 39d0bf1762..0000000000
--- a/api/core/tools/README_JA.md
+++ /dev/null
@@ -1,31 +0,0 @@
-# Tools
-
-このモジュールは、Difyのエージェントアシスタントやワークフローで使用される組み込みツールを実装しています。このモジュールでは、フロントエンドのロジックを変更することなく、独自のツールを定義し表示することができます。この分離により、Difyの機能を容易に水平方向にスケールアウトできます。
-
-## 機能紹介
-
-エージェントとワークフロー向けに提供されるツールは、現在2つのカテゴリーに分類されています。
-
-- `Built-in Tools`はDify内部で実装され、エージェントとワークフローで使用するためにハードコードされています。
-- `Api-Based Tools`はサードパーティのAPIを利用して実装されています。これらを統合するためのコーディングは不要で、フロントエンドで
- `OpenAPI`, `Swagger`または`OpenAI-plugin`などの形式でインターフェース定義を提供するだけです。
-
-### 組み込みツールプロバイダー
-
-
-
-### APIツールプロバイダー
-
-
-
-## ツールの統合
-
-開発者が柔軟で強力なツールを構築できるよう、2つのガイドを提供しています。
-
-### [クイック統合 👈🏻](./docs/ja_JP/tool_scale_out.md)
-
-クイック統合は、Google検索ツールの例を通じて、ツール統合の基本をすばやく理解できるようにすることを目的としています。
-
-### [高度な統合 👈🏻](./docs/ja_JP/advanced_scale_out.md)
-
-高度な統合では、モジュールインターフェースについてより深く掘り下げ、画像生成、複数ツールの組み合わせ、異なるツール間でのパラメーター、画像、ファイルのフロー管理など、より複雑な機能の実装方法を説明します。
\ No newline at end of file
diff --git a/api/core/tools/docs/en_US/advanced_scale_out.md b/api/core/tools/docs/en_US/advanced_scale_out.md
deleted file mode 100644
index d6cab690cc..0000000000
--- a/api/core/tools/docs/en_US/advanced_scale_out.md
+++ /dev/null
@@ -1,278 +0,0 @@
-# Advanced Tool Integration
-
-Before starting with this advanced guide, please make sure you have a basic understanding of the tool integration process in Dify. Check out [Quick Integration](./tool_scale_out.md) for a quick runthrough.
-
-## Tool Interface
-
-We have defined a series of helper methods in the `Tool` class to help developers quickly build more complex tools.
-
-### Message Return
-
-Dify supports various message types such as `text`, `link`, `json`, `image`, and `file BLOB`. You can return different types of messages to the LLM and users through the following interfaces.
-
-Please note, some parameters in the following interfaces will be introduced in later sections.
-
-#### Image URL
-You only need to pass the URL of the image, and Dify will automatically download the image and return it to the user.
-
-```python
- def create_image_message(self, image: str, save_as: str = '') -> ToolInvokeMessage:
- """
- create an image message
-
- :param image: the url of the image
- :return: the image message
- """
-```
-
-#### Link
-If you need to return a link, you can use the following interface.
-
-```python
- def create_link_message(self, link: str, save_as: str = '') -> ToolInvokeMessage:
- """
- create a link message
-
- :param link: the url of the link
- :return: the link message
- """
-```
-
-#### Text
-If you need to return a text message, you can use the following interface.
-
-```python
- def create_text_message(self, text: str, save_as: str = '') -> ToolInvokeMessage:
- """
- create a text message
-
- :param text: the text of the message
- :return: the text message
- """
-```
-
-#### File BLOB
-If you need to return the raw data of a file, such as images, audio, video, PPT, Word, Excel, etc., you can use the following interface.
-
-- `blob` The raw data of the file, of bytes type
-- `meta` The metadata of the file, if you know the type of the file, it is best to pass a `mime_type`, otherwise Dify will use `application/octet-stream` as the default type
-
-```python
- def create_blob_message(self, blob: bytes, meta: dict = None, save_as: str = '') -> ToolInvokeMessage:
- """
- create a blob message
-
- :param blob: the blob
- :return: the blob message
- """
-```
-
-#### JSON
-If you need to return a formatted JSON, you can use the following interface. This is commonly used for data transmission between nodes in a workflow, of course, in agent mode, most LLM are also able to read and understand JSON.
-
-- `object` A Python dictionary object will be automatically serialized into JSON
-
-```python
- def create_json_message(self, object: dict) -> ToolInvokeMessage:
- """
- create a json message
- """
-```
-
-### Shortcut Tools
-
-In large model applications, we have two common needs:
-- First, summarize a long text in advance, and then pass the summary content to the LLM to prevent the original text from being too long for the LLM to handle
-- The content obtained by the tool is a link, and the web page information needs to be crawled before it can be returned to the LLM
-
-To help developers quickly implement these two needs, we provide the following two shortcut tools.
-
-#### Text Summary Tool
-
-This tool takes in an user_id and the text to be summarized, and returns the summarized text. Dify will use the default model of the current workspace to summarize the long text.
-
-```python
- def summary(self, user_id: str, content: str) -> str:
- """
- summary the content
-
- :param user_id: the user id
- :param content: the content
- :return: the summary
- """
-```
-
-#### Web Page Crawling Tool
-
-This tool takes in web page link to be crawled and a user_agent (which can be empty), and returns a string containing the information of the web page. The `user_agent` is an optional parameter that can be used to identify the tool. If not passed, Dify will use the default `user_agent`.
-
-```python
- def get_url(self, url: str, user_agent: str = None) -> str:
- """
- get url
- """ the crawled result
-```
-
-### Variable Pool
-
-We have introduced a variable pool in `Tool` to store variables, files, etc. generated during the tool's operation. These variables can be used by other tools during the tool's operation.
-
-Next, we will use `DallE3` and `Vectorizer.AI` as examples to introduce how to use the variable pool.
-
-- `DallE3` is an image generation tool that can generate images based on text. Here, we will let `DallE3` generate a logo for a coffee shop
-- `Vectorizer.AI` is a vector image conversion tool that can convert images into vector images, so that the images can be infinitely enlarged without distortion. Here, we will convert the PNG icon generated by `DallE3` into a vector image, so that it can be truly used by designers.
-
-#### DallE3
-First, we use DallE3. After creating the image, we save the image to the variable pool. The code is as follows:
-
-```python
-from typing import Any, Dict, List, Union
-from core.tools.entities.tool_entities import ToolInvokeMessage
-from core.tools.tool.builtin_tool import BuiltinTool
-
-from base64 import b64decode
-
-from openai import OpenAI
-
-class DallE3Tool(BuiltinTool):
- def _invoke(self,
- user_id: str,
- tool_parameters: Dict[str, Any],
- ) -> Union[ToolInvokeMessage, List[ToolInvokeMessage]]:
- """
- invoke tools
- """
- client = OpenAI(
- api_key=self.runtime.credentials['openai_api_key'],
- )
-
- # prompt
- prompt = tool_parameters.get('prompt', '')
- if not prompt:
- return self.create_text_message('Please input prompt')
-
- # call openapi dalle3
- response = client.images.generate(
- prompt=prompt, model='dall-e-3',
- size='1024x1024', n=1, style='vivid', quality='standard',
- response_format='b64_json'
- )
-
- result = []
- for image in response.data:
- # Save all images to the variable pool through the save_as parameter. The variable name is self.VARIABLE_KEY.IMAGE.value. If new images are generated later, they will overwrite the previous images.
- result.append(self.create_blob_message(blob=b64decode(image.b64_json),
- meta={ 'mime_type': 'image/png' },
- save_as=self.VARIABLE_KEY.IMAGE.value))
-
- return result
-```
-
-Note that we used `self.VARIABLE_KEY.IMAGE.value` as the variable name of the image. In order for developers' tools to cooperate with each other, we defined this `KEY`. You can use it freely, or you can choose not to use this `KEY`. Passing a custom KEY is also acceptable.
-
-#### Vectorizer.AI
-Next, we use Vectorizer.AI to convert the PNG icon generated by DallE3 into a vector image. Let's go through the functions we defined here. The code is as follows:
-
-```python
-from core.tools.tool.builtin_tool import BuiltinTool
-from core.tools.entities.tool_entities import ToolInvokeMessage, ToolParameter
-from core.tools.errors import ToolProviderCredentialValidationError
-
-from typing import Any, Dict, List, Union
-from httpx import post
-from base64 import b64decode
-
-class VectorizerTool(BuiltinTool):
- def _invoke(self, user_id: str, tool_parameters: Dict[str, Any]) \
- -> Union[ToolInvokeMessage, List[ToolInvokeMessage]]:
- """
- Tool invocation, the image variable name needs to be passed in from here, so that we can get the image from the variable pool
- """
-
-
- def get_runtime_parameters(self) -> List[ToolParameter]:
- """
- Override the tool parameter list, we can dynamically generate the parameter list based on the actual situation in the current variable pool, so that the LLM can generate the form based on the parameter list
- """
-
-
- def is_tool_available(self) -> bool:
- """
- Whether the current tool is available, if there is no image in the current variable pool, then we don't need to display this tool, just return False here
- """
-```
-
-Next, let's implement these three functions
-
-```python
-from core.tools.tool.builtin_tool import BuiltinTool
-from core.tools.entities.tool_entities import ToolInvokeMessage, ToolParameter
-from core.tools.errors import ToolProviderCredentialValidationError
-
-from typing import Any, Dict, List, Union
-from httpx import post
-from base64 import b64decode
-
-class VectorizerTool(BuiltinTool):
- def _invoke(self, user_id: str, tool_parameters: Dict[str, Any]) \
- -> Union[ToolInvokeMessage, List[ToolInvokeMessage]]:
- """
- invoke tools
- """
- api_key_name = self.runtime.credentials.get('api_key_name', None)
- api_key_value = self.runtime.credentials.get('api_key_value', None)
-
- if not api_key_name or not api_key_value:
- raise ToolProviderCredentialValidationError('Please input api key name and value')
-
- # Get image_id, the definition of image_id can be found in get_runtime_parameters
- image_id = tool_parameters.get('image_id', '')
- if not image_id:
- return self.create_text_message('Please input image id')
-
- # Get the image generated by DallE from the variable pool
- image_binary = self.get_variable_file(self.VARIABLE_KEY.IMAGE)
- if not image_binary:
- return self.create_text_message('Image not found, please request user to generate image firstly.')
-
- # Generate vector image
- response = post(
- 'https://vectorizer.ai/api/v1/vectorize',
- files={ 'image': image_binary },
- data={ 'mode': 'test' },
- auth=(api_key_name, api_key_value),
- timeout=30
- )
-
- if response.status_code != 200:
- raise Exception(response.text)
-
- return [
- self.create_text_message('the vectorized svg is saved as an image.'),
- self.create_blob_message(blob=response.content,
- meta={'mime_type': 'image/svg+xml'})
- ]
-
- def get_runtime_parameters(self) -> List[ToolParameter]:
- """
- override the runtime parameters
- """
- # Here, we override the tool parameter list, define the image_id, and set its option list to all images in the current variable pool. The configuration here is consistent with the configuration in yaml.
- return [
- ToolParameter.get_simple_instance(
- name='image_id',
- llm_description=f'the image id that you want to vectorize, \
- and the image id should be specified in \
- {[i.name for i in self.list_default_image_variables()]}',
- type=ToolParameter.ToolParameterType.SELECT,
- required=True,
- options=[i.name for i in self.list_default_image_variables()]
- )
- ]
-
- def is_tool_available(self) -> bool:
- # Only when there are images in the variable pool, the LLM needs to use this tool
- return len(self.list_default_image_variables()) > 0
-```
-
-It's worth noting that we didn't actually use `image_id` here. We assumed that there must be an image in the default variable pool when calling this tool, so we directly used `image_binary = self.get_variable_file(self.VARIABLE_KEY.IMAGE)` to get the image. In cases where the model's capabilities are weak, we recommend developers to do the same, which can effectively improve fault tolerance and avoid the model passing incorrect parameters.
\ No newline at end of file
diff --git a/api/core/tools/docs/en_US/tool_scale_out.md b/api/core/tools/docs/en_US/tool_scale_out.md
deleted file mode 100644
index 1deaf04a47..0000000000
--- a/api/core/tools/docs/en_US/tool_scale_out.md
+++ /dev/null
@@ -1,248 +0,0 @@
-# Quick Tool Integration
-
-Here, we will use GoogleSearch as an example to demonstrate how to quickly integrate a tool.
-
-## 1. Prepare the Tool Provider yaml
-
-### Introduction
-
-This yaml declares a new tool provider, and includes information like the provider's name, icon, author, and other details that are fetched by the frontend for display.
-
-### Example
-
-We need to create a `google` module (folder) under `core/tools/provider/builtin`, and create `google.yaml`. The name must be consistent with the module name.
-
-Subsequently, all operations related to this tool will be carried out under this module.
-
-```yaml
-identity: # Basic information of the tool provider
- author: Dify # Author
- name: google # Name, unique, no duplication with other providers
- label: # Label for frontend display
- en_US: Google # English label
- zh_Hans: Google # Chinese label
- description: # Description for frontend display
- en_US: Google # English description
- zh_Hans: Google # Chinese description
- icon: icon.svg # Icon, needs to be placed in the _assets folder of the current module
- tags:
- - search
-
-```
-
-- The `identity` field is mandatory, it contains the basic information of the tool provider, including author, name, label, description, icon, etc.
- - The icon needs to be placed in the `_assets` folder of the current module, you can refer to [here](../../provider/builtin/google/_assets/icon.svg).
- - The `tags` field is optional, it is used to classify the provider, and the frontend can filter the provider according to the tag, for all tags, they have been listed below:
-
- ```python
- class ToolLabelEnum(Enum):
- SEARCH = 'search'
- IMAGE = 'image'
- VIDEOS = 'videos'
- WEATHER = 'weather'
- FINANCE = 'finance'
- DESIGN = 'design'
- TRAVEL = 'travel'
- SOCIAL = 'social'
- NEWS = 'news'
- MEDICAL = 'medical'
- PRODUCTIVITY = 'productivity'
- EDUCATION = 'education'
- BUSINESS = 'business'
- ENTERTAINMENT = 'entertainment'
- UTILITIES = 'utilities'
- OTHER = 'other'
- ```
-
-## 2. Prepare Provider Credentials
-
-Google, as a third-party tool, uses the API provided by SerpApi, which requires an API Key to use. This means that this tool needs a credential to use. For tools like `wikipedia`, there is no need to fill in the credential field, you can refer to [here](../../provider/builtin/wikipedia/wikipedia.yaml).
-
-After configuring the credential field, the effect is as follows:
-
-```yaml
-identity:
- author: Dify
- name: google
- label:
- en_US: Google
- zh_Hans: Google
- description:
- en_US: Google
- zh_Hans: Google
- icon: icon.svg
-credentials_for_provider: # Credential field
- serpapi_api_key: # Credential field name
- type: secret-input # Credential field type
- required: true # Required or not
- label: # Credential field label
- en_US: SerpApi API key # English label
- zh_Hans: SerpApi API key # Chinese label
- placeholder: # Credential field placeholder
- en_US: Please input your SerpApi API key # English placeholder
- zh_Hans: 请输入你的 SerpApi API key # Chinese placeholder
- help: # Credential field help text
- en_US: Get your SerpApi API key from SerpApi # English help text
- zh_Hans: 从 SerpApi 获取您的 SerpApi API key # Chinese help text
- url: https://serpapi.com/manage-api-key # Credential field help link
-
-```
-
-- `type`: Credential field type, currently can be either `secret-input`, `text-input`, or `select` , corresponding to password input box, text input box, and drop-down box, respectively. If set to `secret-input`, it will mask the input content on the frontend, and the backend will encrypt the input content.
-
-## 3. Prepare Tool yaml
-
-A provider can have multiple tools, each tool needs a yaml file to describe, this file contains the basic information, parameters, output, etc. of the tool.
-
-Still taking GoogleSearch as an example, we need to create a `tools` module under the `google` module, and create `tools/google_search.yaml`, the content is as follows.
-
-```yaml
-identity: # Basic information of the tool
- name: google_search # Tool name, unique, no duplication with other tools
- author: Dify # Author
- label: # Label for frontend display
- en_US: GoogleSearch # English label
- zh_Hans: 谷歌搜索 # Chinese label
-description: # Description for frontend display
- human: # Introduction for frontend display, supports multiple languages
- en_US: A tool for performing a Google SERP search and extracting snippets and webpages.Input should be a search query.
- zh_Hans: 一个用于执行 Google SERP 搜索并提取片段和网页的工具。输入应该是一个搜索查询。
- llm: A tool for performing a Google SERP search and extracting snippets and webpages.Input should be a search query. # Introduction passed to LLM, in order to make LLM better understand this tool, we suggest to write as detailed information about this tool as possible here, so that LLM can understand and use this tool
-parameters: # Parameter list
- - name: query # Parameter name
- type: string # Parameter type
- required: true # Required or not
- label: # Parameter label
- en_US: Query string # English label
- zh_Hans: 查询语句 # Chinese label
- human_description: # Introduction for frontend display, supports multiple languages
- en_US: used for searching
- zh_Hans: 用于搜索网页内容
- llm_description: key words for searching # Introduction passed to LLM, similarly, in order to make LLM better understand this parameter, we suggest to write as detailed information about this parameter as possible here, so that LLM can understand this parameter
- form: llm # Form type, llm means this parameter needs to be inferred by Agent, the frontend will not display this parameter
- - name: result_type
- type: select # Parameter type
- required: true
- options: # Drop-down box options
- - value: text
- label:
- en_US: text
- zh_Hans: 文本
- - value: link
- label:
- en_US: link
- zh_Hans: 链接
- default: link
- label:
- en_US: Result type
- zh_Hans: 结果类型
- human_description:
- en_US: used for selecting the result type, text or link
- zh_Hans: 用于选择结果类型,使用文本还是链接进行展示
- form: form # Form type, form means this parameter needs to be filled in by the user on the frontend before the conversation starts
-
-```
-
-- The `identity` field is mandatory, it contains the basic information of the tool, including name, author, label, description, etc.
-- `parameters` Parameter list
- - `name` (Mandatory) Parameter name, must be unique and not duplicate with other parameters.
- - `type` (Mandatory) Parameter type, currently supports `string`, `number`, `boolean`, `select`, `secret-input` five types, corresponding to string, number, boolean, drop-down box, and encrypted input box, respectively. For sensitive information, we recommend using the `secret-input` type
- - `label` (Mandatory) Parameter label, for frontend display
- - `form` (Mandatory) Form type, currently supports `llm`, `form` two types.
- - In an agent app, `llm` indicates that the parameter is inferred by the LLM itself, while `form` indicates that the parameter can be pre-set for the tool.
- - In a workflow app, both `llm` and `form` need to be filled out by the front end, but the parameters of `llm` will be used as input variables for the tool node.
- - `required` Indicates whether the parameter is required or not
- - In `llm` mode, if the parameter is required, the Agent is required to infer this parameter
- - In `form` mode, if the parameter is required, the user is required to fill in this parameter on the frontend before the conversation starts
- - `options` Parameter options
- - In `llm` mode, Dify will pass all options to LLM, LLM can infer based on these options
- - In `form` mode, when `type` is `select`, the frontend will display these options
- - `default` Default value
- - `min` Minimum value, can be set when the parameter type is `number`.
- - `max` Maximum value, can be set when the parameter type is `number`.
- - `placeholder` The prompt text for input boxes. It can be set when the form type is `form`, and the parameter type is `string`, `number`, or `secret-input`. It supports multiple languages.
- - `human_description` Introduction for frontend display, supports multiple languages
- - `llm_description` Introduction passed to LLM, in order to make LLM better understand this parameter, we suggest to write as detailed information about this parameter as possible here, so that LLM can understand this parameter
-
-
-## 4. Add Tool Logic
-
-After completing the tool configuration, we can start writing the tool code that defines how it is invoked.
-
-Create `google_search.py` under the `google/tools` module, the content is as follows.
-
-```python
-from core.tools.tool.builtin_tool import BuiltinTool
-from core.tools.entities.tool_entities import ToolInvokeMessage
-
-from typing import Any, Dict, List, Union
-
-class GoogleSearchTool(BuiltinTool):
- def _invoke(self,
- user_id: str,
- tool_parameters: Dict[str, Any],
- ) -> Union[ToolInvokeMessage, List[ToolInvokeMessage]]:
- """
- invoke tools
- """
- query = tool_parameters['query']
- result_type = tool_parameters['result_type']
- api_key = self.runtime.credentials['serpapi_api_key']
- # Search with serpapi
- result = SerpAPI(api_key).run(query, result_type=result_type)
-
- if result_type == 'text':
- return self.create_text_message(text=result)
- return self.create_link_message(link=result)
-```
-
-### Parameters
-
-The overall logic of the tool is in the `_invoke` method, this method accepts two parameters: `user_id` and `tool_parameters`, which represent the user ID and tool parameters respectively
-
-### Return Data
-
-When the tool returns, you can choose to return one message or multiple messages, here we return one message, using `create_text_message` and `create_link_message` can create a text message or a link message. If you want to return multiple messages, you can use `[self.create_text_message('msg1'), self.create_text_message('msg2')]` to create a list of messages.
-
-## 5. Add Provider Code
-
-Finally, we need to create a provider class under the provider module to implement the provider's credential verification logic. If the credential verification fails, it will throw a `ToolProviderCredentialValidationError` exception.
-
-Create `google.py` under the `google` module, the content is as follows.
-
-```python
-from core.tools.provider.builtin_tool_provider import BuiltinToolProviderController
-from core.tools.errors import ToolProviderCredentialValidationError
-
-from core.tools.provider.builtin.google.tools.google_search import GoogleSearchTool
-
-from typing import Any, Dict
-
-class GoogleProvider(BuiltinToolProviderController):
- def _validate_credentials(self, credentials: Dict[str, Any]) -> None:
- try:
- # 1. Here you need to instantiate a GoogleSearchTool with GoogleSearchTool(), it will automatically load the yaml configuration of GoogleSearchTool, but at this time it does not have credential information inside
- # 2. Then you need to use the fork_tool_runtime method to pass the current credential information to GoogleSearchTool
- # 3. Finally, invoke it, the parameters need to be passed according to the parameter rules configured in the yaml of GoogleSearchTool
- GoogleSearchTool().fork_tool_runtime(
- meta={
- "credentials": credentials,
- }
- ).invoke(
- user_id='',
- tool_parameters={
- "query": "test",
- "result_type": "link"
- },
- )
- except Exception as e:
- raise ToolProviderCredentialValidationError(str(e))
-```
-
-## Completion
-
-After the above steps are completed, we can see this tool on the frontend, and it can be used in the Agent.
-
-Of course, because google_search needs a credential, before using it, you also need to input your credentials on the frontend.
-
-
diff --git a/api/core/tools/docs/images/index/image-1.png b/api/core/tools/docs/images/index/image-1.png
deleted file mode 100644
index 3bb146ec90..0000000000
Binary files a/api/core/tools/docs/images/index/image-1.png and /dev/null differ
diff --git a/api/core/tools/docs/images/index/image-2.png b/api/core/tools/docs/images/index/image-2.png
deleted file mode 100644
index 9ddc4d5fb7..0000000000
Binary files a/api/core/tools/docs/images/index/image-2.png and /dev/null differ
diff --git a/api/core/tools/docs/images/index/image.png b/api/core/tools/docs/images/index/image.png
deleted file mode 100644
index f6ce3a6b62..0000000000
Binary files a/api/core/tools/docs/images/index/image.png and /dev/null differ
diff --git a/api/core/tools/docs/ja_JP/advanced_scale_out.md b/api/core/tools/docs/ja_JP/advanced_scale_out.md
deleted file mode 100644
index 10ede6fda6..0000000000
--- a/api/core/tools/docs/ja_JP/advanced_scale_out.md
+++ /dev/null
@@ -1,283 +0,0 @@
-# 高度なツール統合
-
-このガイドを始める前に、Difyのツール統合プロセスの基本を理解していることを確認してください。簡単な概要については[クイック統合](./tool_scale_out.md)をご覧ください。
-
-## ツールインターフェース
-
-より複雑なツールを迅速に構築するのを支援するため、`Tool`クラスに一連のヘルパーメソッドを定義しています。
-
-### メッセージの返却
-
-Difyは`テキスト`、`リンク`、`画像`、`ファイルBLOB`、`JSON`などの様々なメッセージタイプをサポートしています。以下のインターフェースを通じて、異なるタイプのメッセージをLLMとユーザーに返すことができます。
-
-注意:以下のインターフェースの一部のパラメータについては、後のセクションで説明します。
-
-#### 画像URL
-画像のURLを渡すだけで、Difyが自動的に画像をダウンロードしてユーザーに返します。
-
-```python
- def create_image_message(self, image: str, save_as: str = '') -> ToolInvokeMessage:
- """
- create an image message
-
- :param image: the url of the image
- :param save_as: save as
- :return: the image message
- """
-```
-
-#### リンク
-リンクを返す必要がある場合は、以下のインターフェースを使用できます。
-
-```python
- def create_link_message(self, link: str, save_as: str = '') -> ToolInvokeMessage:
- """
- create a link message
-
- :param link: the url of the link
- :param save_as: save as
- :return: the link message
- """
-```
-
-#### テキスト
-テキストメッセージを返す必要がある場合は、以下のインターフェースを使用できます。
-
-```python
- def create_text_message(self, text: str, save_as: str = '') -> ToolInvokeMessage:
- """
- create a text message
-
- :param text: the text of the message
- :param save_as: save as
- :return: the text message
- """
-```
-
-#### ファイルBLOB
-画像、音声、動画、PPT、Word、Excelなどのファイルの生データを返す必要がある場合は、以下のインターフェースを使用できます。
-
-- `blob` ファイルの生データ(bytes型)
-- `meta` ファイルのメタデータ。ファイルの種類が分かっている場合は、`mime_type`を渡すことをお勧めします。そうでない場合、Difyはデフォルトタイプとして`application/octet-stream`を使用します。
-
-```python
- def create_blob_message(self, blob: bytes, meta: dict = None, save_as: str = '') -> ToolInvokeMessage:
- """
- create a blob message
-
- :param blob: the blob
- :param meta: meta
- :param save_as: save as
- :return: the blob message
- """
-```
-
-#### JSON
-フォーマットされたJSONを返す必要がある場合は、以下のインターフェースを使用できます。これは通常、ワークフロー内のノード間のデータ伝送に使用されますが、エージェントモードでは、ほとんどの大規模言語モデルもJSONを読み取り、理解することができます。
-
-- `object` Pythonの辞書オブジェクトで、自動的にJSONにシリアライズされます。
-
-```python
- def create_json_message(self, object: dict) -> ToolInvokeMessage:
- """
- create a json message
- """
-```
-
-### ショートカットツール
-
-大規模モデルアプリケーションでは、以下の2つの一般的なニーズがあります:
-- まず長いテキストを事前に要約し、その要約内容をLLMに渡すことで、元のテキストが長すぎてLLMが処理できない問題を防ぐ
-- ツールが取得したコンテンツがリンクである場合、Webページ情報をクロールしてからLLMに返す必要がある
-
-開発者がこれら2つのニーズを迅速に実装できるよう、以下の2つのショートカットツールを提供しています。
-
-#### テキスト要約ツール
-
-このツールはuser_idと要約するテキストを入力として受け取り、要約されたテキストを返します。Difyは現在のワークスペースのデフォルトモデルを使用して長文を要約します。
-
-```python
- def summary(self, user_id: str, content: str) -> str:
- """
- summary the content
-
- :param user_id: the user id
- :param content: the content
- :return: the summary
- """
-```
-
-#### Webページクローリングツール
-
-このツールはクロールするWebページのリンクとユーザーエージェント(空でも可)を入力として受け取り、そのWebページの情報を含む文字列を返します。`user_agent`はオプションのパラメータで、ツールを識別するために使用できます。渡さない場合、Difyはデフォルトの`user_agent`を使用します。
-
-```python
- def get_url(self, url: str, user_agent: str = None) -> str:
- """
- get url from the crawled result
- """
-```
-
-### 変数プール
-
-`Tool`内に変数プールを導入し、ツールの実行中に生成された変数やファイルなどを保存します。これらの変数は、ツールの実行中に他のツールが使用することができます。
-
-次に、`DallE3`と`Vectorizer.AI`を例に、変数プールの使用方法を紹介します。
-
-- `DallE3`は画像生成ツールで、テキストに基づいて画像を生成できます。ここでは、`DallE3`にカフェのロゴを生成させます。
-- `Vectorizer.AI`はベクター画像変換ツールで、画像をベクター画像に変換できるため、画像を無限に拡大しても品質が損なわれません。ここでは、`DallE3`が生成したPNGアイコンをベクター画像に変換し、デザイナーが実際に使用できるようにします。
-
-#### DallE3
-まず、DallE3を使用します。画像を作成した後、その画像を変数プールに保存します。コードは以下の通りです:
-
-```python
-from typing import Any, Dict, List, Union
-from core.tools.entities.tool_entities import ToolInvokeMessage
-from core.tools.tool.builtin_tool import BuiltinTool
-
-from base64 import b64decode
-
-from openai import OpenAI
-
-class DallE3Tool(BuiltinTool):
- def _invoke(self,
- user_id: str,
- tool_parameters: Dict[str, Any],
- ) -> Union[ToolInvokeMessage, List[ToolInvokeMessage]]:
- """
- invoke tools
- """
- client = OpenAI(
- api_key=self.runtime.credentials['openai_api_key'],
- )
-
- # prompt
- prompt = tool_parameters.get('prompt', '')
- if not prompt:
- return self.create_text_message('Please input prompt')
-
- # call openapi dalle3
- response = client.images.generate(
- prompt=prompt, model='dall-e-3',
- size='1024x1024', n=1, style='vivid', quality='standard',
- response_format='b64_json'
- )
-
- result = []
- for image in response.data:
- # Save all images to the variable pool through the save_as parameter. The variable name is self.VARIABLE_KEY.IMAGE.value. If new images are generated later, they will overwrite the previous images.
- result.append(self.create_blob_message(blob=b64decode(image.b64_json),
- meta={ 'mime_type': 'image/png' },
- save_as=self.VARIABLE_KEY.IMAGE.value))
-
- return result
-```
-
-ここでは画像の変数名として`self.VARIABLE_KEY.IMAGE.value`を使用していることに注意してください。開発者のツールが互いに連携できるよう、この`KEY`を定義しました。自由に使用することも、この`KEY`を使用しないこともできます。カスタムのKEYを渡すこともできます。
-
-#### Vectorizer.AI
-次に、Vectorizer.AIを使用して、DallE3が生成したPNGアイコンをベクター画像に変換します。ここで定義した関数を見てみましょう。コードは以下の通りです:
-
-```python
-from core.tools.tool.builtin_tool import BuiltinTool
-from core.tools.entities.tool_entities import ToolInvokeMessage, ToolParameter
-from core.tools.errors import ToolProviderCredentialValidationError
-
-from typing import Any, Dict, List, Union
-from httpx import post
-from base64 import b64decode
-
-class VectorizerTool(BuiltinTool):
- def _invoke(self, user_id: str, tool_parameters: Dict[str, Any])
- -> Union[ToolInvokeMessage, List[ToolInvokeMessage]]:
- """
- Tool invocation, the image variable name needs to be passed in from here, so that we can get the image from the variable pool
- """
-
-
- def get_runtime_parameters(self) -> List[ToolParameter]:
- """
- Override the tool parameter list, we can dynamically generate the parameter list based on the actual situation in the current variable pool, so that the LLM can generate the form based on the parameter list
- """
-
-
- def is_tool_available(self) -> bool:
- """
- Whether the current tool is available, if there is no image in the current variable pool, then we don't need to display this tool, just return False here
- """
-```
-
-次に、これら3つの関数を実装します:
-
-```python
-from core.tools.tool.builtin_tool import BuiltinTool
-from core.tools.entities.tool_entities import ToolInvokeMessage, ToolParameter
-from core.tools.errors import ToolProviderCredentialValidationError
-
-from typing import Any, Dict, List, Union
-from httpx import post
-from base64 import b64decode
-
-class VectorizerTool(BuiltinTool):
- def _invoke(self, user_id: str, tool_parameters: Dict[str, Any])
- -> Union[ToolInvokeMessage, List[ToolInvokeMessage]]:
- """
- invoke tools
- """
- api_key_name = self.runtime.credentials.get('api_key_name', None)
- api_key_value = self.runtime.credentials.get('api_key_value', None)
-
- if not api_key_name or not api_key_value:
- raise ToolProviderCredentialValidationError('Please input api key name and value')
-
- # Get image_id, the definition of image_id can be found in get_runtime_parameters
- image_id = tool_parameters.get('image_id', '')
- if not image_id:
- return self.create_text_message('Please input image id')
-
- # Get the image generated by DallE from the variable pool
- image_binary = self.get_variable_file(self.VARIABLE_KEY.IMAGE)
- if not image_binary:
- return self.create_text_message('Image not found, please request user to generate image firstly.')
-
- # Generate vector image
- response = post(
- 'https://vectorizer.ai/api/v1/vectorize',
- files={ 'image': image_binary },
- data={ 'mode': 'test' },
- auth=(api_key_name, api_key_value),
- timeout=30
- )
-
- if response.status_code != 200:
- raise Exception(response.text)
-
- return [
- self.create_text_message('the vectorized svg is saved as an image.'),
- self.create_blob_message(blob=response.content,
- meta={'mime_type': 'image/svg+xml'})
- ]
-
- def get_runtime_parameters(self) -> List[ToolParameter]:
- """
- override the runtime parameters
- """
- # Here, we override the tool parameter list, define the image_id, and set its option list to all images in the current variable pool. The configuration here is consistent with the configuration in yaml.
- return [
- ToolParameter.get_simple_instance(
- name='image_id',
- llm_description=f'the image id that you want to vectorize, \
- and the image id should be specified in \
- {[i.name for i in self.list_default_image_variables()]}',
- type=ToolParameter.ToolParameterType.SELECT,
- required=True,
- options=[i.name for i in self.list_default_image_variables()]
- )
- ]
-
- def is_tool_available(self) -> bool:
- # Only when there are images in the variable pool, the LLM needs to use this tool
- return len(self.list_default_image_variables()) > 0
-```
-
-ここで注目すべきは、実際には`image_id`を使用していないことです。このツールを呼び出す際には、デフォルトの変数プールに必ず画像があると仮定し、直接`image_binary = self.get_variable_file(self.VARIABLE_KEY.IMAGE)`を使用して画像を取得しています。モデルの能力が弱い場合、開発者にもこの方法を推奨します。これにより、エラー許容度を効果的に向上させ、モデルが誤ったパラメータを渡すのを防ぐことができます。
\ No newline at end of file
diff --git a/api/core/tools/docs/ja_JP/tool_scale_out.md b/api/core/tools/docs/ja_JP/tool_scale_out.md
deleted file mode 100644
index a721023d00..0000000000
--- a/api/core/tools/docs/ja_JP/tool_scale_out.md
+++ /dev/null
@@ -1,240 +0,0 @@
-# ツールの迅速な統合
-
-ここでは、GoogleSearchを例にツールを迅速に統合する方法を紹介します。
-
-## 1. ツールプロバイダーのyamlを準備する
-
-### 概要
-
-このyamlファイルには、プロバイダー名、アイコン、作者などの詳細情報が含まれ、フロントエンドでの柔軟な表示を可能にします。
-
-### 例
-
-`core/tools/provider/builtin`の下に`google`モジュール(フォルダ)を作成し、`google.yaml`を作成します。名前はモジュール名と一致している必要があります。
-
-以降、このツールに関するすべての操作はこのモジュール内で行います。
-
-```yaml
-identity: # ツールプロバイダーの基本情報
- author: Dify # 作者
- name: google # 名前(一意、他のプロバイダーと重複不可)
- label: # フロントエンド表示用のラベル
- en_US: Google # 英語ラベル
- zh_Hans: Google # 中国語ラベル
- description: # フロントエンド表示用の説明
- en_US: Google # 英語説明
- zh_Hans: Google # 中国語説明
- icon: icon.svg # アイコン(現在のモジュールの_assetsフォルダに配置)
- tags: # タグ(フロントエンド表示用)
- - search
-```
-
-- `identity`フィールドは必須で、ツールプロバイダーの基本情報(作者、名前、ラベル、説明、アイコンなど)が含まれます。
- - アイコンは現在のモジュールの`_assets`フォルダに配置する必要があります。[こちら](../../provider/builtin/google/_assets/icon.svg)を参照してください。
- - タグはフロントエンドでの表示に使用され、ユーザーがこのツールプロバイダーを素早く見つけるのに役立ちます。現在サポートされているすべてのタグは以下の通りです:
- ```python
- class ToolLabelEnum(Enum):
- SEARCH = 'search'
- IMAGE = 'image'
- VIDEOS = 'videos'
- WEATHER = 'weather'
- FINANCE = 'finance'
- DESIGN = 'design'
- TRAVEL = 'travel'
- SOCIAL = 'social'
- NEWS = 'news'
- MEDICAL = 'medical'
- PRODUCTIVITY = 'productivity'
- EDUCATION = 'education'
- BUSINESS = 'business'
- ENTERTAINMENT = 'entertainment'
- UTILITIES = 'utilities'
- OTHER = 'other'
- ```
-
-## 2. プロバイダーの認証情報を準備する
-
-GoogleはSerpApiが提供するAPIを使用するサードパーティツールであり、SerpApiを使用するにはAPI Keyが必要です。つまり、このツールを使用するには認証情報が必要です。一方、`wikipedia`のようなツールでは認証情報フィールドを記入する必要はありません。[こちら](../../provider/builtin/wikipedia/wikipedia.yaml)を参照してください。
-
-認証情報フィールドを設定すると、以下のようになります:
-
-```yaml
-identity:
- author: Dify
- name: google
- label:
- en_US: Google
- zh_Hans: Google
- description:
- en_US: Google
- zh_Hans: Google
- icon: icon.svg
-credentials_for_provider: # 認証情報フィールド
- serpapi_api_key: # 認証情報フィールド名
- type: secret-input # 認証情報フィールドタイプ
- required: true # 必須かどうか
- label: # 認証情報フィールドラベル
- en_US: SerpApi API key # 英語ラベル
- zh_Hans: SerpApi API key # 中国語ラベル
- placeholder: # 認証情報フィールドプレースホルダー
- en_US: Please input your SerpApi API key # 英語プレースホルダー
- zh_Hans: 请输入你的 SerpApi API key # 中国語プレースホルダー
- help: # 認証情報フィールドヘルプテキスト
- en_US: Get your SerpApi API key from SerpApi # 英語ヘルプテキスト
- zh_Hans: 从 SerpApi 获取您的 SerpApi API key # 中国語ヘルプテキスト
- url: https://serpapi.com/manage-api-key # 認証情報フィールドヘルプリンク
-```
-
-- `type`:認証情報フィールドタイプ。現在、`secret-input`、`text-input`、`select`の3種類をサポートしており、それぞれパスワード入力ボックス、テキスト入力ボックス、ドロップダウンボックスに対応します。`secret-input`の場合、フロントエンドで入力内容が隠され、バックエンドで入力内容が暗号化されます。
-
-## 3. ツールのyamlを準備する
-
-1つのプロバイダーの下に複数のツールを持つことができ、各ツールにはyamlファイルが必要です。このファイルにはツールの基本情報、パラメータ、出力などが含まれます。
-
-引き続きGoogleSearchを例に、`google`モジュールの下に`tools`モジュールを作成し、`tools/google_search.yaml`を作成します。内容は以下の通りです:
-
-```yaml
-identity: # ツールの基本情報
- name: google_search # ツール名(一意、他のツールと重複不可)
- author: Dify # 作者
- label: # フロントエンド表示用のラベル
- en_US: GoogleSearch # 英語ラベル
- zh_Hans: 谷歌搜索 # 中国語ラベル
-description: # フロントエンド表示用の説明
- human: # フロントエンド表示用の紹介(多言語対応)
- en_US: A tool for performing a Google SERP search and extracting snippets and webpages. Input should be a search query.
- zh_Hans: 一个用于执行 Google SERP 搜索并提取片段和网页的工具。输入应该是一个搜索查询。
- llm: A tool for performing a Google SERP search and extracting snippets and webpages. Input should be a search query. # LLMに渡す紹介文。LLMがこのツールをより理解できるよう、できるだけ詳細な情報を記述することをお勧めします。
-parameters: # パラメータリスト
- - name: query # パラメータ名
- type: string # パラメータタイプ
- required: true # 必須かどうか
- label: # パラメータラベル
- en_US: Query string # 英語ラベル
- zh_Hans: 查询语句 # 中国語ラベル
- human_description: # フロントエンド表示用の紹介(多言語対応)
- en_US: used for searching
- zh_Hans: 用于搜索网页内容
- llm_description: key words for searching # LLMに渡す紹介文。LLMがこのパラメータをより理解できるよう、できるだけ詳細な情報を記述することをお勧めします。
- form: llm # フォームタイプ。llmはこのパラメータがAgentによって推論される必要があることを意味し、フロントエンドではこのパラメータは表示されません。
- - name: result_type
- type: select # パラメータタイプ
- required: true
- options: # ドロップダウンボックスのオプション
- - value: text
- label:
- en_US: text
- zh_Hans: 文本
- - value: link
- label:
- en_US: link
- zh_Hans: 链接
- default: link
- label:
- en_US: Result type
- zh_Hans: 结果类型
- human_description:
- en_US: used for selecting the result type, text or link
- zh_Hans: 用于选择结果类型,使用文本还是链接进行展示
- form: form # フォームタイプ。formはこのパラメータが対話開始前にフロントエンドでユーザーによって入力される必要があることを意味します。
-```
-
-- `identity`フィールドは必須で、ツールの基本情報(名前、作者、ラベル、説明など)が含まれます。
-- `parameters` パラメータリスト
- - `name`(必須)パラメータ名。一意で、他のパラメータと重複しないようにしてください。
- - `type`(必須)パラメータタイプ。現在、`string`、`number`、`boolean`、`select`、`secret-input`の5種類をサポートしており、それぞれ文字列、数値、ブール値、ドロップダウンボックス、暗号化入力ボックスに対応します。機密情報には`secret-input`タイプの使用をお勧めします。
- - `label`(必須)パラメータラベル。フロントエンド表示用です。
- - `form`(必須)フォームタイプ。現在、`llm`と`form`の2種類をサポートしています。
- - エージェントアプリケーションでは、`llm`はこのパラメータがLLM自身によって推論されることを示し、`form`はこのツールを使用するために事前に設定できるパラメータであることを示します。
- - ワークフローアプリケーションでは、`llm`と`form`の両方がフロントエンドで入力する必要がありますが、`llm`のパラメータはツールノードの入力変数として使用されます。
- - `required` パラメータが必須かどうかを示します。
- - `llm`モードでは、パラメータが必須の場合、Agentはこのパラメータを推論する必要があります。
- - `form`モードでは、パラメータが必須の場合、ユーザーは対話開始前にフロントエンドでこのパラメータを入力する必要があります。
- - `options` パラメータオプション
- - `llm`モードでは、DifyはすべてのオプションをLLMに渡し、LLMはこれらのオプションに基づいて推論できます。
- - `form`モードで、`type`が`select`の場合、フロントエンドはこれらのオプションを表示します。
- - `default` デフォルト値
- - `min` 最小値。パラメータタイプが`number`の場合に設定できます。
- - `max` 最大値。パラメータタイプが`number`の場合に設定できます。
- - `human_description` フロントエンド表示用の紹介。多言語対応です。
- - `placeholder` 入力ボックスのプロンプトテキスト。フォームタイプが`form`で、パラメータタイプが`string`、`number`、`secret-input`の場合に設定できます。多言語対応です。
- - `llm_description` LLMに渡す紹介文。LLMがこのパラメータをより理解できるよう、できるだけ詳細な情報を記述することをお勧めします。
-
-## 4. ツールコードを準備する
-
-ツールの設定が完了したら、ツールのロジックを実装するコードを作成します。
-
-`google/tools`モジュールの下に`google_search.py`を作成し、内容は以下の通りです:
-
-```python
-from core.tools.tool.builtin_tool import BuiltinTool
-from core.tools.entities.tool_entities import ToolInvokeMessage
-
-from typing import Any, Dict, List, Union
-
-class GoogleSearchTool(BuiltinTool):
- def _invoke(self,
- user_id: str,
- tool_parameters: Dict[str, Any],
- ) -> Union[ToolInvokeMessage, List[ToolInvokeMessage]]:
- """
- ツールを呼び出す
- """
- query = tool_parameters['query']
- result_type = tool_parameters['result_type']
- api_key = self.runtime.credentials['serpapi_api_key']
- result = SerpAPI(api_key).run(query, result_type=result_type)
-
- if result_type == 'text':
- return self.create_text_message(text=result)
- return self.create_link_message(link=result)
-```
-
-### パラメータ
-ツールの全体的なロジックは`_invoke`メソッドにあります。このメソッドは2つのパラメータ(`user_id`とtool_parameters`)を受け取り、それぞれユーザーIDとツールパラメータを表します。
-
-### 戻り値
-ツールの戻り値として、1つのメッセージまたは複数のメッセージを選択できます。ここでは1つのメッセージを返しています。`create_text_message`と`create_link_message`を使用して、テキストメッセージまたはリンクメッセージを作成できます。複数のメッセージを返す場合は、リストを構築できます(例:`[self.create_text_message('msg1'), self.create_text_message('msg2')]`)。
-
-## 5. プロバイダーコードを準備する
-
-最後に、プロバイダーモジュールの下にプロバイダークラスを作成し、プロバイダーの認証情報検証ロジックを実装する必要があります。認証情報の検証が失敗した場合、`ToolProviderCredentialValidationError`例外が発生します。
-
-`google`モジュールの下に`google.py`を作成し、内容は以下の通りです:
-
-```python
-from core.tools.provider.builtin_tool_provider import BuiltinToolProviderController
-from core.tools.errors import ToolProviderCredentialValidationError
-
-from core.tools.provider.builtin.google.tools.google_search import GoogleSearchTool
-
-from typing import Any, Dict
-
-class GoogleProvider(BuiltinToolProviderController):
- def _validate_credentials(self, credentials: Dict[str, Any]) -> None:
- try:
- # 1. ここでGoogleSearchTool()を使ってGoogleSearchToolをインスタンス化する必要があります。これによりGoogleSearchToolのyaml設定が自動的に読み込まれますが、この時点では認証情報は含まれていません
- # 2. 次に、fork_tool_runtimeメソッドを使用して、現在の認証情報をGoogleSearchToolに渡す必要があります
- # 3. 最後に、invokeを呼び出します。パラメータはGoogleSearchToolのyamlで設定されたパラメータルールに従って渡す必要があります
- GoogleSearchTool().fork_tool_runtime(
- meta={
- "credentials": credentials,
- }
- ).invoke(
- user_id='',
- tool_parameters={
- "query": "test",
- "result_type": "link"
- },
- )
- except Exception as e:
- raise ToolProviderCredentialValidationError(str(e))
-```
-
-## 完了
-
-以上のステップが完了すると、このツールをフロントエンドで確認し、Agentで使用することができるようになります。
-
-もちろん、google_searchには認証情報が必要なため、使用する前にフロントエンドで認証情報を入力する必要があります。
-
-
\ No newline at end of file
diff --git a/api/core/tools/docs/zh_Hans/advanced_scale_out.md b/api/core/tools/docs/zh_Hans/advanced_scale_out.md
deleted file mode 100644
index c436a64881..0000000000
--- a/api/core/tools/docs/zh_Hans/advanced_scale_out.md
+++ /dev/null
@@ -1,283 +0,0 @@
-# 高级接入Tool
-
-在开始高级接入之前,请确保你已经阅读过[快速接入](./tool_scale_out.md),并对Dify的工具接入流程有了基本的了解。
-
-## 工具接口
-
-我们在`Tool`类中定义了一系列快捷方法,用于帮助开发者快速构较为复杂的工具
-
-### 消息返回
-
-Dify支持`文本` `链接` `图片` `文件BLOB` `JSON` 等多种消息类型,你可以通过以下几个接口返回不同类型的消息给LLM和用户。
-
-注意,在下面的接口中的部分参数将在后面的章节中介绍。
-
-#### 图片URL
-只需要传递图片的URL即可,Dify会自动下载图片并返回给用户。
-
-```python
- def create_image_message(self, image: str, save_as: str = '') -> ToolInvokeMessage:
- """
- create an image message
-
- :param image: the url of the image
- :param save_as: save as
- :return: the image message
- """
-```
-
-#### 链接
-如果你需要返回一个链接,可以使用以下接口。
-
-```python
- def create_link_message(self, link: str, save_as: str = '') -> ToolInvokeMessage:
- """
- create a link message
-
- :param link: the url of the link
- :param save_as: save as
- :return: the link message
- """
-```
-
-#### 文本
-如果你需要返回一个文本消息,可以使用以下接口。
-
-```python
- def create_text_message(self, text: str, save_as: str = '') -> ToolInvokeMessage:
- """
- create a text message
-
- :param text: the text of the message
- :param save_as: save as
- :return: the text message
- """
-```
-
-#### 文件BLOB
-如果你需要返回文件的原始数据,如图片、音频、视频、PPT、Word、Excel等,可以使用以下接口。
-
-- `blob` 文件的原始数据,bytes类型
-- `meta` 文件的元数据,如果你知道该文件的类型,最好传递一个`mime_type`,否则Dify将使用`application/octet-stream`作为默认类型
-
-```python
- def create_blob_message(self, blob: bytes, meta: dict = None, save_as: str = '') -> ToolInvokeMessage:
- """
- create a blob message
-
- :param blob: the blob
- :param meta: meta
- :param save_as: save as
- :return: the blob message
- """
-```
-
-#### JSON
-如果你需要返回一个格式化的JSON,可以使用以下接口。这通常用于workflow中的节点间的数据传递,当然agent模式中,大部分大模型也都能够阅读和理解JSON。
-
-- `object` 一个Python的字典对象,会被自动序列化为JSON
-
-```python
- def create_json_message(self, object: dict) -> ToolInvokeMessage:
- """
- create a json message
- """
-```
-
-### 快捷工具
-
-在大模型应用中,我们有两种常见的需求:
-- 先将很长的文本进行提前总结,然后再将总结内容传递给LLM,以防止原文本过长导致LLM无法处理
-- 工具获取到的内容是一个链接,需要爬取网页信息后再返回给LLM
-
-为了帮助开发者快速实现这两种需求,我们提供了以下两个快捷工具。
-
-#### 文本总结工具
-
-该工具需要传入user_id和需要进行总结的文本,返回一个总结后的文本,Dify会使用当前工作空间的默认模型对长文本进行总结。
-
-```python
- def summary(self, user_id: str, content: str) -> str:
- """
- summary the content
-
- :param user_id: the user id
- :param content: the content
- :return: the summary
- """
-```
-
-#### 网页爬取工具
-
-该工具需要传入需要爬取的网页链接和一个user_agent(可为空),返回一个包含该网页信息的字符串,其中`user_agent`是可选参数,可以用来识别工具,如果不传递,Dify将使用默认的`user_agent`。
-
-```python
- def get_url(self, url: str, user_agent: str = None) -> str:
- """
- get url from the crawled result
- """
-```
-
-### 变量池
-
-我们在`Tool`中引入了一个变量池,用于存储工具运行过程中产生的变量、文件等,这些变量可以在工具运行过程中被其他工具使用。
-
-下面,我们以`DallE3`和`Vectorizer.AI`为例,介绍如何使用变量池。
-
-- `DallE3`是一个图片生成工具,它可以根据文本生成图片,在这里,我们将让`DallE3`生成一个咖啡厅的Logo
-- `Vectorizer.AI`是一个矢量图转换工具,它可以将图片转换为矢量图,使得图片可以无限放大而不失真,在这里,我们将`DallE3`生成的PNG图标转换为矢量图,从而可以真正被设计师使用。
-
-#### DallE3
-首先我们使用DallE3,在创建完图片以后,我们将图片保存到变量池中,代码如下
-
-```python
-from typing import Any, Dict, List, Union
-from core.tools.entities.tool_entities import ToolInvokeMessage
-from core.tools.tool.builtin_tool import BuiltinTool
-
-from base64 import b64decode
-
-from openai import OpenAI
-
-class DallE3Tool(BuiltinTool):
- def _invoke(self,
- user_id: str,
- tool_parameters: Dict[str, Any],
- ) -> Union[ToolInvokeMessage, List[ToolInvokeMessage]]:
- """
- invoke tools
- """
- client = OpenAI(
- api_key=self.runtime.credentials['openai_api_key'],
- )
-
- # prompt
- prompt = tool_parameters.get('prompt', '')
- if not prompt:
- return self.create_text_message('Please input prompt')
-
- # call openapi dalle3
- response = client.images.generate(
- prompt=prompt, model='dall-e-3',
- size='1024x1024', n=1, style='vivid', quality='standard',
- response_format='b64_json'
- )
-
- result = []
- for image in response.data:
- # 将所有图片通过save_as参数保存到变量池中,变量名为self.VARIABLE_KEY.IMAGE.value,如果如果后续有新的图片生成,那么将会覆盖之前的图片
- result.append(self.create_blob_message(blob=b64decode(image.b64_json),
- meta={ 'mime_type': 'image/png' },
- save_as=self.VARIABLE_KEY.IMAGE.value))
-
- return result
-```
-
-我们可以注意到这里我们使用了`self.VARIABLE_KEY.IMAGE.value`作为图片的变量名,为了便于开发者们的工具能够互相配合,我们定义了这个`KEY`,大家可以自由使用,也可以不使用这个`KEY`,传递一个自定义的KEY也是可以的。
-
-#### Vectorizer.AI
-接下来我们使用Vectorizer.AI,将DallE3生成的PNG图标转换为矢量图,我们先来过一遍我们在这里定义的函数,代码如下
-
-```python
-from core.tools.tool.builtin_tool import BuiltinTool
-from core.tools.entities.tool_entities import ToolInvokeMessage, ToolParameter
-from core.tools.errors import ToolProviderCredentialValidationError
-
-from typing import Any, Dict, List, Union
-from httpx import post
-from base64 import b64decode
-
-class VectorizerTool(BuiltinTool):
- def _invoke(self, user_id: str, tool_parameters: Dict[str, Any]) \
- -> Union[ToolInvokeMessage, List[ToolInvokeMessage]]:
- """
- 工具调用,图片变量名需要从这里传递进来,从而我们就可以从变量池中获取到图片
- """
-
-
- def get_runtime_parameters(self) -> List[ToolParameter]:
- """
- 重写工具参数列表,我们可以根据当前变量池里的实际情况来动态生成参数列表,从而LLM可以根据参数列表来生成表单
- """
-
-
- def is_tool_available(self) -> bool:
- """
- 当前工具是否可用,如果当前变量池中没有图片,那么我们就不需要展示这个工具,这里返回False即可
- """
-```
-
-接下来我们来实现这三个函数
-
-```python
-from core.tools.tool.builtin_tool import BuiltinTool
-from core.tools.entities.tool_entities import ToolInvokeMessage, ToolParameter
-from core.tools.errors import ToolProviderCredentialValidationError
-
-from typing import Any, Dict, List, Union
-from httpx import post
-from base64 import b64decode
-
-class VectorizerTool(BuiltinTool):
- def _invoke(self, user_id: str, tool_parameters: Dict[str, Any]) \
- -> Union[ToolInvokeMessage, List[ToolInvokeMessage]]:
- """
- invoke tools
- """
- api_key_name = self.runtime.credentials.get('api_key_name', None)
- api_key_value = self.runtime.credentials.get('api_key_value', None)
-
- if not api_key_name or not api_key_value:
- raise ToolProviderCredentialValidationError('Please input api key name and value')
-
- # 获取image_id,image_id的定义可以在get_runtime_parameters中找到
- image_id = tool_parameters.get('image_id', '')
- if not image_id:
- return self.create_text_message('Please input image id')
-
- # 从变量池中获取到之前DallE生成的图片
- image_binary = self.get_variable_file(self.VARIABLE_KEY.IMAGE)
- if not image_binary:
- return self.create_text_message('Image not found, please request user to generate image firstly.')
-
- # 生成矢量图
- response = post(
- 'https://vectorizer.ai/api/v1/vectorize',
- files={ 'image': image_binary },
- data={ 'mode': 'test' },
- auth=(api_key_name, api_key_value),
- timeout=30
- )
-
- if response.status_code != 200:
- raise Exception(response.text)
-
- return [
- self.create_text_message('the vectorized svg is saved as an image.'),
- self.create_blob_message(blob=response.content,
- meta={'mime_type': 'image/svg+xml'})
- ]
-
- def get_runtime_parameters(self) -> List[ToolParameter]:
- """
- override the runtime parameters
- """
- # 这里,我们重写了工具参数列表,定义了image_id,并设置了它的选项列表为当前变量池中的所有图片,这里的配置与yaml中的配置是一致的
- return [
- ToolParameter.get_simple_instance(
- name='image_id',
- llm_description=f'the image id that you want to vectorize, \
- and the image id should be specified in \
- {[i.name for i in self.list_default_image_variables()]}',
- type=ToolParameter.ToolParameterType.SELECT,
- required=True,
- options=[i.name for i in self.list_default_image_variables()]
- )
- ]
-
- def is_tool_available(self) -> bool:
- # 只有当变量池中有图片时,LLM才需要使用这个工具
- return len(self.list_default_image_variables()) > 0
-```
-
-可以注意到的是,我们这里其实并没有使用到`image_id`,我们已经假设了调用这个工具的时候一定有一张图片在默认的变量池中,所以直接使用了`image_binary = self.get_variable_file(self.VARIABLE_KEY.IMAGE)`来获取图片,在模型能力较弱的情况下,我们建议开发者们也这样做,可以有效提升容错率,避免模型传递错误的参数。
\ No newline at end of file
diff --git a/api/core/tools/docs/zh_Hans/tool_scale_out.md b/api/core/tools/docs/zh_Hans/tool_scale_out.md
deleted file mode 100644
index ec61e4677b..0000000000
--- a/api/core/tools/docs/zh_Hans/tool_scale_out.md
+++ /dev/null
@@ -1,237 +0,0 @@
-# 快速接入Tool
-
-这里我们以GoogleSearch为例,介绍如何快速接入一个工具。
-
-## 1. 准备工具供应商yaml
-
-### 介绍
-这个yaml将包含工具供应商的信息,包括供应商名称、图标、作者等详细信息,以帮助前端灵活展示。
-
-### 示例
-
-我们需要在 `core/tools/provider/builtin`下创建一个`google`模块(文件夹),并创建`google.yaml`,名称必须与模块名称一致。
-
-后续,我们关于这个工具的所有操作都将在这个模块下进行。
-
-```yaml
-identity: # 工具供应商的基本信息
- author: Dify # 作者
- name: google # 名称,唯一,不允许和其他供应商重名
- label: # 标签,用于前端展示
- en_US: Google # 英文标签
- zh_Hans: Google # 中文标签
- description: # 描述,用于前端展示
- en_US: Google # 英文描述
- zh_Hans: Google # 中文描述
- icon: icon.svg # 图标,需要放置在当前模块的_assets文件夹下
- tags: # 标签,用于前端展示
- - search
-
-```
- - `identity` 字段是必须的,它包含了工具供应商的基本信息,包括作者、名称、标签、描述、图标等
- - 图标需要放置在当前模块的`_assets`文件夹下,可以参考[这里](../../provider/builtin/google/_assets/icon.svg)。
- - 标签用于前端展示,可以帮助用户快速找到这个工具供应商,下面列出了目前所支持的所有标签
- ```python
- class ToolLabelEnum(Enum):
- SEARCH = 'search'
- IMAGE = 'image'
- VIDEOS = 'videos'
- WEATHER = 'weather'
- FINANCE = 'finance'
- DESIGN = 'design'
- TRAVEL = 'travel'
- SOCIAL = 'social'
- NEWS = 'news'
- MEDICAL = 'medical'
- PRODUCTIVITY = 'productivity'
- EDUCATION = 'education'
- BUSINESS = 'business'
- ENTERTAINMENT = 'entertainment'
- UTILITIES = 'utilities'
- OTHER = 'other'
- ```
-
-## 2. 准备供应商凭据
-
-Google作为一个第三方工具,使用了SerpApi提供的API,而SerpApi需要一个API Key才能使用,那么就意味着这个工具需要一个凭据才可以使用,而像`wikipedia`这样的工具,就不需要填写凭据字段,可以参考[这里](../../provider/builtin/wikipedia/wikipedia.yaml)。
-
-配置好凭据字段后效果如下:
-```yaml
-identity:
- author: Dify
- name: google
- label:
- en_US: Google
- zh_Hans: Google
- description:
- en_US: Google
- zh_Hans: Google
- icon: icon.svg
-credentials_for_provider: # 凭据字段
- serpapi_api_key: # 凭据字段名称
- type: secret-input # 凭据字段类型
- required: true # 是否必填
- label: # 凭据字段标签
- en_US: SerpApi API key # 英文标签
- zh_Hans: SerpApi API key # 中文标签
- placeholder: # 凭据字段占位符
- en_US: Please input your SerpApi API key # 英文占位符
- zh_Hans: 请输入你的 SerpApi API key # 中文占位符
- help: # 凭据字段帮助文本
- en_US: Get your SerpApi API key from SerpApi # 英文帮助文本
- zh_Hans: 从 SerpApi 获取您的 SerpApi API key # 中文帮助文本
- url: https://serpapi.com/manage-api-key # 凭据字段帮助链接
-
-```
-
-- `type`:凭据字段类型,目前支持`secret-input`、`text-input`、`select` 三种类型,分别对应密码输入框、文本输入框、下拉框,如果为`secret-input`,则会在前端隐藏输入内容,并且后端会对输入内容进行加密。
-
-## 3. 准备工具yaml
-一个供应商底下可以有多个工具,每个工具都需要一个yaml文件来描述,这个文件包含了工具的基本信息、参数、输出等。
-
-仍然以GoogleSearch为例,我们需要在`google`模块下创建一个`tools`模块,并创建`tools/google_search.yaml`,内容如下。
-
-```yaml
-identity: # 工具的基本信息
- name: google_search # 工具名称,唯一,不允许和其他工具重名
- author: Dify # 作者
- label: # 标签,用于前端展示
- en_US: GoogleSearch # 英文标签
- zh_Hans: 谷歌搜索 # 中文标签
-description: # 描述,用于前端展示
- human: # 用于前端展示的介绍,支持多语言
- en_US: A tool for performing a Google SERP search and extracting snippets and webpages.Input should be a search query.
- zh_Hans: 一个用于执行 Google SERP 搜索并提取片段和网页的工具。输入应该是一个搜索查询。
- llm: A tool for performing a Google SERP search and extracting snippets and webpages.Input should be a search query. # 传递给LLM的介绍,为了使得LLM更好理解这个工具,我们建议在这里写上关于这个工具尽可能详细的信息,让LLM能够理解并使用这个工具
-parameters: # 参数列表
- - name: query # 参数名称
- type: string # 参数类型
- required: true # 是否必填
- label: # 参数标签
- en_US: Query string # 英文标签
- zh_Hans: 查询语句 # 中文标签
- human_description: # 用于前端展示的介绍,支持多语言
- en_US: used for searching
- zh_Hans: 用于搜索网页内容
- llm_description: key words for searching # 传递给LLM的介绍,同上,为了使得LLM更好理解这个参数,我们建议在这里写上关于这个参数尽可能详细的信息,让LLM能够理解这个参数
- form: llm # 表单类型,llm表示这个参数需要由Agent自行推理出来,前端将不会展示这个参数
- - name: result_type
- type: select # 参数类型
- required: true
- options: # 下拉框选项
- - value: text
- label:
- en_US: text
- zh_Hans: 文本
- - value: link
- label:
- en_US: link
- zh_Hans: 链接
- default: link
- label:
- en_US: Result type
- zh_Hans: 结果类型
- human_description:
- en_US: used for selecting the result type, text or link
- zh_Hans: 用于选择结果类型,使用文本还是链接进行展示
- form: form # 表单类型,form表示这个参数需要由用户在对话开始前在前端填写
-
-```
-
-- `identity` 字段是必须的,它包含了工具的基本信息,包括名称、作者、标签、描述等
-- `parameters` 参数列表
- - `name` (必填)参数名称,唯一,不允许和其他参数重名
- - `type` (必填)参数类型,目前支持`string`、`number`、`boolean`、`select`、`secret-input` 五种类型,分别对应字符串、数字、布尔值、下拉框、加密输入框,对于敏感信息,我们建议使用`secret-input`类型
- - `label`(必填)参数标签,用于前端展示
- - `form` (必填)表单类型,目前支持`llm`、`form`两种类型
- - 在Agent应用中,`llm`表示该参数LLM自行推理,`form`表示要使用该工具可提前设定的参数
- - 在workflow应用中,`llm`和`form`均需要前端填写,但`llm`的参数会做为工具节点的输入变量
- - `required` 是否必填
- - 在`llm`模式下,如果参数为必填,则会要求Agent必须要推理出这个参数
- - 在`form`模式下,如果参数为必填,则会要求用户在对话开始前在前端填写这个参数
- - `options` 参数选项
- - 在`llm`模式下,Dify会将所有选项传递给LLM,LLM可以根据这些选项进行推理
- - 在`form`模式下,`type`为`select`时,前端会展示这些选项
- - `default` 默认值
- - `min` 最小值,当参数类型为`number`时可以设定
- - `max` 最大值,当参数类型为`number`时可以设定
- - `human_description` 用于前端展示的介绍,支持多语言
- - `placeholder` 字段输入框的提示文字,在表单类型为`form`,参数类型为`string`、`number`、`secret-input`时,可以设定,支持多语言
- - `llm_description` 传递给LLM的介绍,为了使得LLM更好理解这个参数,我们建议在这里写上关于这个参数尽可能详细的信息,让LLM能够理解这个参数
-
-
-## 4. 准备工具代码
-当完成工具的配置以后,我们就可以开始编写工具代码了,主要用于实现工具的逻辑。
-
-在`google/tools`模块下创建`google_search.py`,内容如下。
-
-```python
-from core.tools.tool.builtin_tool import BuiltinTool
-from core.tools.entities.tool_entities import ToolInvokeMessage
-
-from typing import Any, Dict, List, Union
-
-class GoogleSearchTool(BuiltinTool):
- def _invoke(self,
- user_id: str,
- tool_parameters: Dict[str, Any],
- ) -> Union[ToolInvokeMessage, List[ToolInvokeMessage]]:
- """
- invoke tools
- """
- query = tool_parameters['query']
- result_type = tool_parameters['result_type']
- api_key = self.runtime.credentials['serpapi_api_key']
- result = SerpAPI(api_key).run(query, result_type=result_type)
-
- if result_type == 'text':
- return self.create_text_message(text=result)
- return self.create_link_message(link=result)
-```
-
-### 参数
-工具的整体逻辑都在`_invoke`方法中,这个方法接收两个参数:`user_id`和`tool_parameters`,分别表示用户ID和工具参数
-
-### 返回数据
-在工具返回时,你可以选择返回一条消息或者多个消息,这里我们返回一条消息,使用`create_text_message`和`create_link_message`可以创建一条文本消息或者一条链接消息。如需返回多条消息,可以使用列表构建,例如`[self.create_text_message('msg1'), self.create_text_message('msg2')]`
-
-## 5. 准备供应商代码
-最后,我们需要在供应商模块下创建一个供应商类,用于实现供应商的凭据验证逻辑,如果凭据验证失败,将会抛出`ToolProviderCredentialValidationError`异常。
-
-在`google`模块下创建`google.py`,内容如下。
-
-```python
-from core.tools.provider.builtin_tool_provider import BuiltinToolProviderController
-from core.tools.errors import ToolProviderCredentialValidationError
-
-from core.tools.provider.builtin.google.tools.google_search import GoogleSearchTool
-
-from typing import Any, Dict
-
-class GoogleProvider(BuiltinToolProviderController):
- def _validate_credentials(self, credentials: Dict[str, Any]) -> None:
- try:
- # 1. 此处需要使用GoogleSearchTool()实例化一个GoogleSearchTool,它会自动加载GoogleSearchTool的yaml配置,但是此时它内部没有凭据信息
- # 2. 随后需要使用fork_tool_runtime方法,将当前的凭据信息传递给GoogleSearchTool
- # 3. 最后invoke即可,参数需要根据GoogleSearchTool的yaml中配置的参数规则进行传递
- GoogleSearchTool().fork_tool_runtime(
- meta={
- "credentials": credentials,
- }
- ).invoke(
- user_id='',
- tool_parameters={
- "query": "test",
- "result_type": "link"
- },
- )
- except Exception as e:
- raise ToolProviderCredentialValidationError(str(e))
-```
-
-## 完成
-当上述步骤完成以后,我们就可以在前端看到这个工具了,并且可以在Agent中使用这个工具。
-
-当然,因为google_search需要一个凭据,在使用之前,还需要在前端配置它的凭据。
-
-
diff --git a/api/core/tools/workflow_as_tool/tool.py b/api/core/tools/workflow_as_tool/tool.py
index cb9b804c77..cf840880bf 100644
--- a/api/core/tools/workflow_as_tool/tool.py
+++ b/api/core/tools/workflow_as_tool/tool.py
@@ -7,6 +7,7 @@ from core.file import FILE_MODEL_IDENTITY, File, FileTransferMethod
from core.tools.__base.tool import Tool
from core.tools.__base.tool_runtime import ToolRuntime
from core.tools.entities.tool_entities import ToolEntity, ToolInvokeMessage, ToolParameter, ToolProviderType
+from core.tools.errors import ToolInvokeError
from extensions.ext_database import db
from factories.file_factory import build_from_mapping
from models.account import Account
@@ -96,11 +97,8 @@ class WorkflowTool(Tool):
assert isinstance(result, dict)
data = result.get("data", {})
- if data.get("error"):
- raise Exception(data.get("error"))
-
- if data.get("error"):
- raise Exception(data.get("error"))
+ if err := data.get("error"):
+ raise ToolInvokeError(err)
outputs = data.get("outputs")
if outputs is None:
diff --git a/api/core/workflow/entities/variable_pool.py b/api/core/workflow/entities/variable_pool.py
index 844b46f352..af26864c01 100644
--- a/api/core/workflow/entities/variable_pool.py
+++ b/api/core/workflow/entities/variable_pool.py
@@ -7,7 +7,7 @@ from pydantic import BaseModel, Field
from core.file import File, FileAttribute, file_manager
from core.variables import Segment, SegmentGroup, Variable
-from core.variables.segments import FileSegment
+from core.variables.segments import FileSegment, NoneSegment
from factories import variable_factory
from ..constants import CONVERSATION_VARIABLE_NODE_ID, ENVIRONMENT_VARIABLE_NODE_ID, SYSTEM_VARIABLE_NODE_ID
@@ -15,7 +15,6 @@ from ..enums import SystemVariableKey
VariableValue = Union[str, int, float, dict, list, File]
-
VARIABLE_PATTERN = re.compile(r"\{\{#([a-zA-Z0-9_]{1,50}(?:\.[a-zA-Z_][a-zA-Z0-9_]{0,29}){1,10})#\}\}")
@@ -131,11 +130,13 @@ class VariablePool(BaseModel):
if attr not in {item.value for item in FileAttribute}:
return None
value = self.get(selector)
- if not isinstance(value, FileSegment):
+ if not isinstance(value, FileSegment | NoneSegment):
return None
- attr = FileAttribute(attr)
- attr_value = file_manager.get_attr(file=value.value, attr=attr)
- return variable_factory.build_segment(attr_value)
+ if isinstance(value, FileSegment):
+ attr = FileAttribute(attr)
+ attr_value = file_manager.get_attr(file=value.value, attr=attr)
+ return variable_factory.build_segment(attr_value)
+ return value
return value
diff --git a/api/core/workflow/graph_engine/graph_engine.py b/api/core/workflow/graph_engine/graph_engine.py
index 9bb54da3a2..498d15c916 100644
--- a/api/core/workflow/graph_engine/graph_engine.py
+++ b/api/core/workflow/graph_engine/graph_engine.py
@@ -873,11 +873,12 @@ class GraphEngine:
def create_copy(self):
"""
create a graph engine copy
- :return: with a new variable pool instance of graph engine
+ :return: graph engine with a new variable pool and initialized total tokens
"""
new_instance = copy(self)
new_instance.graph_runtime_state = copy(self.graph_runtime_state)
new_instance.graph_runtime_state.variable_pool = deepcopy(self.graph_runtime_state.variable_pool)
+ new_instance.graph_runtime_state.total_tokens = 0
return new_instance
def _handle_continue_on_error(
diff --git a/api/core/workflow/nodes/base/node.py b/api/core/workflow/nodes/base/node.py
index b799e74266..e566770870 100644
--- a/api/core/workflow/nodes/base/node.py
+++ b/api/core/workflow/nodes/base/node.py
@@ -22,7 +22,7 @@ GenericNodeData = TypeVar("GenericNodeData", bound=BaseNodeData)
class BaseNode(Generic[GenericNodeData]):
- _node_data_cls: type[BaseNodeData]
+ _node_data_cls: type[GenericNodeData]
_node_type: NodeType
def __init__(
@@ -57,7 +57,7 @@ class BaseNode(Generic[GenericNodeData]):
self.node_id = node_id
node_data = self._node_data_cls.model_validate(config.get("data", {}))
- self.node_data = cast(GenericNodeData, node_data)
+ self.node_data = node_data
@abstractmethod
def _run(self) -> NodeRunResult | Generator[Union[NodeEvent, "InNodeEvent"], None, None]:
diff --git a/api/core/workflow/nodes/http_request/executor.py b/api/core/workflow/nodes/http_request/executor.py
index 5ed2cd6164..bf28222de0 100644
--- a/api/core/workflow/nodes/http_request/executor.py
+++ b/api/core/workflow/nodes/http_request/executor.py
@@ -10,6 +10,7 @@ import httpx
from configs import dify_config
from core.file import file_manager
from core.helper import ssrf_proxy
+from core.variables.segments import ArrayFileSegment, FileSegment
from core.workflow.entities.variable_pool import VariablePool
from .entities import (
@@ -57,7 +58,7 @@ class Executor:
params: list[tuple[str, str]] | None
content: str | bytes | None
data: Mapping[str, Any] | None
- files: Mapping[str, tuple[str | None, bytes, str]] | None
+ files: list[tuple[str, tuple[str | None, bytes, str]]] | None
json: Any
headers: dict[str, str]
auth: HttpRequestNodeAuthorization
@@ -207,17 +208,38 @@ class Executor:
self.variable_pool.convert_template(item.key).text: item.file
for item in filter(lambda item: item.type == "file", data)
}
- files: dict[str, Any] = {}
- files = {k: self.variable_pool.get_file(selector) for k, selector in file_selectors.items()}
- files = {k: v for k, v in files.items() if v is not None}
- files = {k: variable.value for k, variable in files.items() if variable is not None}
- files = {
- k: (v.filename, file_manager.download(v), v.mime_type or "application/octet-stream")
- for k, v in files.items()
- if v.related_id is not None
- }
+
+ # get files from file_selectors, add support for array file variables
+ files_list = []
+ for key, selector in file_selectors.items():
+ segment = self.variable_pool.get(selector)
+ if isinstance(segment, FileSegment):
+ files_list.append((key, [segment.value]))
+ elif isinstance(segment, ArrayFileSegment):
+ files_list.append((key, list(segment.value)))
+
+ # get files from file_manager
+ files: dict[str, list[tuple[str | None, bytes, str]]] = {}
+ for key, files_in_segment in files_list:
+ for file in files_in_segment:
+ if file.related_id is not None:
+ file_tuple = (
+ file.filename,
+ file_manager.download(file),
+ file.mime_type or "application/octet-stream",
+ )
+ if key not in files:
+ files[key] = []
+ files[key].append(file_tuple)
+
+ # convert files to list for httpx request
+ if files:
+ self.files = []
+ for key, file_tuples in files.items():
+ for file_tuple in file_tuples:
+ self.files.append((key, file_tuple))
+
self.data = form_data
- self.files = files or None
def _assembling_headers(self) -> dict[str, Any]:
authorization = deepcopy(self.auth)
@@ -344,10 +366,16 @@ class Executor:
body_string = ""
if self.files:
- for k, v in self.files.items():
+ for key, (filename, content, mime_type) in self.files:
body_string += f"--{boundary}\r\n"
- body_string += f'Content-Disposition: form-data; name="{k}"\r\n\r\n'
- body_string += f"{v[1]}\r\n"
+ body_string += f'Content-Disposition: form-data; name="{key}"\r\n\r\n'
+ # decode content
+ try:
+ body_string += content.decode("utf-8")
+ except UnicodeDecodeError:
+ # fix: decode binary content
+ pass
+ body_string += "\r\n"
body_string += f"--{boundary}--\r\n"
elif self.node_data.body:
if self.content:
diff --git a/api/core/workflow/nodes/iteration/iteration_node.py b/api/core/workflow/nodes/iteration/iteration_node.py
index 16fccf8bf3..a7d0aefc6d 100644
--- a/api/core/workflow/nodes/iteration/iteration_node.py
+++ b/api/core/workflow/nodes/iteration/iteration_node.py
@@ -590,7 +590,6 @@ class IterationNode(BaseNode[IterationNodeData]):
with flask_app.app_context():
parallel_mode_run_id = uuid.uuid4().hex
graph_engine_copy = graph_engine.create_copy()
- graph_engine_copy.graph_runtime_state.total_tokens = 0
variable_pool_copy = graph_engine_copy.graph_runtime_state.variable_pool
variable_pool_copy.add([self.node_id, "index"], index)
variable_pool_copy.add([self.node_id, "item"], item)
diff --git a/api/core/workflow/nodes/iteration/iteration_start_node.py b/api/core/workflow/nodes/iteration/iteration_start_node.py
index deb5066a14..fe955e47d1 100644
--- a/api/core/workflow/nodes/iteration/iteration_start_node.py
+++ b/api/core/workflow/nodes/iteration/iteration_start_node.py
@@ -5,7 +5,7 @@ from core.workflow.nodes.iteration.entities import IterationStartNodeData
from models.workflow import WorkflowNodeExecutionStatus
-class IterationStartNode(BaseNode):
+class IterationStartNode(BaseNode[IterationStartNodeData]):
"""
Iteration Start Node.
"""
diff --git a/api/core/workflow/nodes/loop/loop_node.py b/api/core/workflow/nodes/loop/loop_node.py
index 5e1f18b63c..65acf1211f 100644
--- a/api/core/workflow/nodes/loop/loop_node.py
+++ b/api/core/workflow/nodes/loop/loop_node.py
@@ -1,6 +1,6 @@
import logging
from collections.abc import Generator, Mapping, Sequence
-from datetime import datetime, timezone
+from datetime import UTC, datetime
from typing import Any, cast
from configs import dify_config
@@ -80,7 +80,7 @@ class LoopNode(BaseNode[LoopNodeData]):
thread_pool_id=self.thread_pool_id,
)
- start_at = datetime.now(timezone.utc).replace(tzinfo=None)
+ start_at = datetime.now(UTC).replace(tzinfo=None)
condition_processor = ConditionProcessor()
# Start Loop event
diff --git a/api/core/workflow/nodes/loop/loop_start_node.py b/api/core/workflow/nodes/loop/loop_start_node.py
index 7fd06273f8..7cf145e4e5 100644
--- a/api/core/workflow/nodes/loop/loop_start_node.py
+++ b/api/core/workflow/nodes/loop/loop_start_node.py
@@ -5,7 +5,7 @@ from core.workflow.nodes.loop.entities import LoopStartNodeData
from models.workflow import WorkflowNodeExecutionStatus
-class LoopStartNode(BaseNode):
+class LoopStartNode(BaseNode[LoopStartNodeData]):
"""
Loop Start Node.
"""
diff --git a/api/core/workflow/nodes/tool/tool_node.py b/api/core/workflow/nodes/tool/tool_node.py
index 7ec092cfdd..6f0cc3f6d2 100644
--- a/api/core/workflow/nodes/tool/tool_node.py
+++ b/api/core/workflow/nodes/tool/tool_node.py
@@ -9,6 +9,7 @@ from core.file import File, FileTransferMethod
from core.plugin.manager.exc import PluginDaemonClientSideError
from core.plugin.manager.plugin import PluginInstallationManager
from core.tools.entities.tool_entities import ToolInvokeMessage, ToolParameter
+from core.tools.errors import ToolInvokeError
from core.tools.tool_engine import ToolEngine
from core.tools.utils.message_transformer import ToolFileMessageTransformer
from core.variables.segments import ArrayAnySegment
@@ -119,13 +120,14 @@ class ToolNode(BaseNode[ToolNodeData]):
try:
# convert tool messages
yield from self._transform_message(message_stream, tool_info, parameters_for_log)
- except PluginDaemonClientSideError as e:
+ except (PluginDaemonClientSideError, ToolInvokeError) as e:
yield RunCompletedEvent(
run_result=NodeRunResult(
status=WorkflowNodeExecutionStatus.FAILED,
inputs=parameters_for_log,
metadata={NodeRunMetadataKey.TOOL_INFO: tool_info},
error=f"Failed to transform tool message: {str(e)}",
+ error_type=type(e).__name__,
)
)
diff --git a/api/core/workflow/nodes/variable_assigner/v1/node.py b/api/core/workflow/nodes/variable_assigner/v1/node.py
index 9acc76f326..7c7f14c0b8 100644
--- a/api/core/workflow/nodes/variable_assigner/v1/node.py
+++ b/api/core/workflow/nodes/variable_assigner/v1/node.py
@@ -1,6 +1,6 @@
from core.variables import SegmentType, Variable
from core.workflow.entities.node_entities import NodeRunResult
-from core.workflow.nodes.base import BaseNode, BaseNodeData
+from core.workflow.nodes.base import BaseNode
from core.workflow.nodes.enums import NodeType
from core.workflow.nodes.variable_assigner.common import helpers as common_helpers
from core.workflow.nodes.variable_assigner.common.exc import VariableOperatorNodeError
@@ -11,7 +11,7 @@ from .node_data import VariableAssignerData, WriteMode
class VariableAssignerNode(BaseNode[VariableAssignerData]):
- _node_data_cls: type[BaseNodeData] = VariableAssignerData
+ _node_data_cls = VariableAssignerData
_node_type = NodeType.VARIABLE_ASSIGNER
def _run(self) -> NodeRunResult:
diff --git a/api/factories/file_factory.py b/api/factories/file_factory.py
index c4f69f6f6b..8c989e6b58 100644
--- a/api/factories/file_factory.py
+++ b/api/factories/file_factory.py
@@ -289,9 +289,15 @@ def _is_file_valid_with_config(
):
return False
- if input_file_type == FileType.IMAGE and config.image_config:
- if config.image_config.transfer_methods and file_transfer_method not in config.image_config.transfer_methods:
+ if input_file_type == FileType.IMAGE:
+ if (
+ config.image_config
+ and config.image_config.transfer_methods
+ and file_transfer_method not in config.image_config.transfer_methods
+ ):
return False
+ elif config.allowed_file_upload_methods and file_transfer_method not in config.allowed_file_upload_methods:
+ return False
return True
diff --git a/api/fields/external_dataset_fields.py b/api/fields/external_dataset_fields.py
deleted file mode 100644
index 9cc4e14a05..0000000000
--- a/api/fields/external_dataset_fields.py
+++ /dev/null
@@ -1,11 +0,0 @@
-from flask_restful import fields # type: ignore
-
-from libs.helper import TimestampField
-
-external_knowledge_api_query_detail_fields = {
- "id": fields.String,
- "name": fields.String,
- "setting": fields.String,
- "created_by": fields.String,
- "created_at": TimestampField,
-}
diff --git a/api/fields/segment_fields.py b/api/fields/segment_fields.py
index aaac31cf40..82311e5bb9 100644
--- a/api/fields/segment_fields.py
+++ b/api/fields/segment_fields.py
@@ -40,9 +40,3 @@ segment_fields = {
"stopped_at": TimestampField,
"child_chunks": fields.List(fields.Nested(child_chunk_fields)),
}
-
-segment_list_response = {
- "data": fields.List(fields.Nested(segment_fields)),
- "has_more": fields.Boolean,
- "limit": fields.Integer,
-}
diff --git a/api/models/tools.py b/api/models/tools.py
index b941e4ee0f..aef1490729 100644
--- a/api/models/tools.py
+++ b/api/models/tools.py
@@ -102,6 +102,8 @@ class ApiToolProvider(Base):
@property
def user(self) -> Account | None:
+ if not self.user_id:
+ return None
return db.session.query(Account).filter(Account.id == self.user_id).first()
@property
diff --git a/api/poetry.lock b/api/poetry.lock
index 1cb05b1789..5eb956e81a 100644
--- a/api/poetry.lock
+++ b/api/poetry.lock
@@ -1,4 +1,4 @@
-# This file is automatically @generated by Poetry 2.0.1 and should not be changed by hand.
+# This file is automatically @generated by Poetry 2.1.1 and should not be changed by hand.
[[package]]
name = "aiofiles"
@@ -7,7 +7,6 @@ description = "File support for asyncio."
optional = false
python-versions = ">=3.8"
groups = ["main"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "aiofiles-24.1.0-py3-none-any.whl", hash = "sha256:b4ec55f4195e3eb5d7abd1bf7e061763e864dd4954231fb8539a0ef8bb8260e5"},
{file = "aiofiles-24.1.0.tar.gz", hash = "sha256:22a075c9e5a3810f0c2e48f3008c94d68c65d763b9b03857924c99e57355166c"},
@@ -20,7 +19,6 @@ description = "Happy Eyeballs for asyncio"
optional = false
python-versions = ">=3.9"
groups = ["main", "storage", "vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "aiohappyeyeballs-2.5.0-py3-none-any.whl", hash = "sha256:0850b580748c7071db98bffff6d4c94028d0d3035acc20fd721a0ce7e8cac35d"},
{file = "aiohappyeyeballs-2.5.0.tar.gz", hash = "sha256:18fde6204a76deeabc97c48bdd01d5801cfda5d6b9c8bbeb1aaaee9d648ca191"},
@@ -33,7 +31,6 @@ description = "Async http client/server framework (asyncio)"
optional = false
python-versions = ">=3.9"
groups = ["main", "storage", "vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "aiohttp-3.11.13-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a4fe27dbbeec445e6e1291e61d61eb212ee9fed6e47998b27de71d70d3e8777d"},
{file = "aiohttp-3.11.13-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9e64ca2dbea28807f8484c13f684a2f761e69ba2640ec49dacd342763cc265ef"},
@@ -128,7 +125,7 @@ propcache = ">=0.2.0"
yarl = ">=1.17.0,<2.0"
[package.extras]
-speedups = ["Brotli", "aiodns (>=3.2.0)", "brotlicffi"]
+speedups = ["Brotli ; platform_python_implementation == \"CPython\"", "aiodns (>=3.2.0) ; sys_platform == \"linux\" or sys_platform == \"darwin\"", "brotlicffi ; platform_python_implementation != \"CPython\""]
[[package]]
name = "aiomysql"
@@ -137,7 +134,6 @@ description = "MySQL driver for asyncio."
optional = false
python-versions = ">=3.7"
groups = ["vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "aiomysql-0.2.0-py3-none-any.whl", hash = "sha256:b7c26da0daf23a5ec5e0b133c03d20657276e4eae9b73e040b72787f6f6ade0a"},
{file = "aiomysql-0.2.0.tar.gz", hash = "sha256:558b9c26d580d08b8c5fd1be23c5231ce3aeff2dadad989540fee740253deb67"},
@@ -157,7 +153,6 @@ description = "aiosignal: a list of registered asynchronous callbacks"
optional = false
python-versions = ">=3.9"
groups = ["main", "storage", "vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "aiosignal-1.3.2-py2.py3-none-any.whl", hash = "sha256:45cde58e409a301715980c2b01d0c28bdde3770d8290b5eb2173759d9acb31a5"},
{file = "aiosignal-1.3.2.tar.gz", hash = "sha256:a8c255c66fafb1e499c9351d0bf32ff2d8a0321595ebac3b93713656d2436f54"},
@@ -173,7 +168,6 @@ description = "A database migration tool for SQLAlchemy."
optional = false
python-versions = ">=3.9"
groups = ["main"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "alembic-1.15.1-py3-none-any.whl", hash = "sha256:197de710da4b3e91cf66a826a5b31b5d59a127ab41bd0fc42863e2902ce2bbbe"},
{file = "alembic-1.15.1.tar.gz", hash = "sha256:e1a1c738577bca1f27e68728c910cd389b9a92152ff91d902da649c192e30c49"},
@@ -194,7 +188,6 @@ description = "The alibabacloud credentials module of alibabaCloud Python SDK."
optional = false
python-versions = ">=3.6"
groups = ["vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "alibabacloud_credentials-0.3.6.tar.gz", hash = "sha256:caa82cf258648dcbe1ca14aeba50ba21845567d6ac3cd48d318e0a445fff7f96"},
]
@@ -209,7 +202,6 @@ description = "The endpoint-util module of alibabaCloud Python SDK."
optional = false
python-versions = "*"
groups = ["vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "alibabacloud_endpoint_util-0.0.3.tar.gz", hash = "sha256:8c0efb76fdcc3af4ca716ef24bbce770201a3f83f98c0afcf81655f684b9c7d2"},
]
@@ -224,7 +216,6 @@ description = "Alibaba Cloud Gateway SPI SDK Library for Python"
optional = false
python-versions = ">=3.6"
groups = ["vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "alibabacloud_gateway_spi-0.0.3.tar.gz", hash = "sha256:10d1c53a3fc5f87915fbd6b4985b98338a776e9b44a0263f56643c5048223b8b"},
]
@@ -239,7 +230,6 @@ description = "Alibaba Cloud AnalyticDB for PostgreSQL (20160503) SDK Library fo
optional = false
python-versions = ">=3.6"
groups = ["vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "alibabacloud_gpdb20160503-3.8.3-py3-none-any.whl", hash = "sha256:06e1c46ce5e4e9d1bcae76e76e51034196c625799d06b2efec8d46a7df323fe8"},
{file = "alibabacloud_gpdb20160503-3.8.3.tar.gz", hash = "sha256:4dfcc0d9cff5a921d529d76f4bf97e2ceb9dc2fa53f00ab055f08509423d8e30"},
@@ -262,7 +252,6 @@ description = "Aliyun Tea OpenApi Library for Python"
optional = false
python-versions = "*"
groups = ["vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "alibabacloud_openapi_util-0.2.2.tar.gz", hash = "sha256:ebbc3906f554cb4bf8f513e43e8a33e8b6a3d4a0ef13617a0e14c3dda8ef52a8"},
]
@@ -278,7 +267,6 @@ description = "Alibaba Cloud OpenPlatform (20191219) SDK Library for Python"
optional = false
python-versions = ">=3.6"
groups = ["vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "alibabacloud_openplatform20191219-2.0.0-py3-none-any.whl", hash = "sha256:873821c45bca72a6c6ec7a906c9cb21554c122e88893bbac3986934dab30dd36"},
{file = "alibabacloud_openplatform20191219-2.0.0.tar.gz", hash = "sha256:e67f4c337b7542538746592c6a474bd4ae3a9edccdf62e11a32ca61fad3c9020"},
@@ -297,7 +285,6 @@ description = "Aliyun Tea OSS SDK Library for Python"
optional = false
python-versions = ">=3.6"
groups = ["vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "alibabacloud_oss_sdk-0.1.0.tar.gz", hash = "sha256:cc5ce36044bae758047fccb56c0cb6204cbc362d18cc3dd4ceac54c8c0897b8b"},
]
@@ -316,7 +303,6 @@ description = "The oss util module of alibabaCloud Python SDK."
optional = false
python-versions = "*"
groups = ["vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "alibabacloud_oss_util-0.0.6.tar.gz", hash = "sha256:d3ecec36632434bd509a113e8cf327dc23e830ac8d9dd6949926f4e334c8b5d6"},
]
@@ -331,7 +317,6 @@ description = "The tea module of alibabaCloud Python SDK."
optional = false
python-versions = ">=3.7"
groups = ["vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "alibabacloud-tea-0.4.2.tar.gz", hash = "sha256:2a86eedc2aa3d24070593f61c1cfd40762b2cabce20336e72c690eb57f165520"},
]
@@ -347,7 +332,6 @@ description = "The tea-fileform module of alibabaCloud Python SDK."
optional = false
python-versions = "*"
groups = ["vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "alibabacloud_tea_fileform-0.0.5.tar.gz", hash = "sha256:fd00a8c9d85e785a7655059e9651f9e91784678881831f60589172387b968ee8"},
]
@@ -362,7 +346,6 @@ description = "Alibaba Cloud openapi SDK Library for Python"
optional = false
python-versions = ">=3.7"
groups = ["vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "alibabacloud_tea_openapi-0.3.13.tar.gz", hash = "sha256:77034911dbed41de440e9b6de38cb24646723aa1d0059cefeb3906f8c0a4523e"},
]
@@ -381,7 +364,6 @@ description = "The tea-util module of alibabaCloud Python SDK."
optional = false
python-versions = ">=3.6"
groups = ["vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "alibabacloud_tea_util-0.3.13.tar.gz", hash = "sha256:8cbdfd2a03fbbf622f901439fa08643898290dd40e1d928347f6346e43f63c90"},
]
@@ -396,7 +378,6 @@ description = "The tea-xml module of alibabaCloud Python SDK."
optional = false
python-versions = "*"
groups = ["vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "alibabacloud_tea_xml-0.0.2.tar.gz", hash = "sha256:f0135e8148fd7d9c1f029db161863f37f144f837c280cba16c2edeb2f9c549d8"},
]
@@ -411,7 +392,6 @@ description = "The core module of Aliyun Python SDK."
optional = false
python-versions = ">=3.7"
groups = ["storage"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "aliyun-python-sdk-core-2.16.0.tar.gz", hash = "sha256:651caad597eb39d4fad6cf85133dffe92837d53bdf62db9d8f37dab6508bb8f9"},
]
@@ -427,7 +407,6 @@ description = "The kms module of Aliyun Python sdk."
optional = false
python-versions = "*"
groups = ["storage"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "aliyun-python-sdk-kms-2.16.5.tar.gz", hash = "sha256:f328a8a19d83ecbb965ffce0ec1e9930755216d104638cd95ecd362753b813b3"},
{file = "aliyun_python_sdk_kms-2.16.5-py2.py3-none-any.whl", hash = "sha256:24b6cdc4fd161d2942619479c8d050c63ea9cd22b044fe33b60bbb60153786f0"},
@@ -443,7 +422,6 @@ description = "Low-level AMQP client for Python (fork of amqplib)."
optional = false
python-versions = ">=3.6"
groups = ["main"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "amqp-5.3.1-py3-none-any.whl", hash = "sha256:43b3319e1b4e7d1251833a93d672b4af1e40f3d632d479b98661a95f117880a2"},
{file = "amqp-5.3.1.tar.gz", hash = "sha256:cddc00c725449522023bad949f70fff7b48f0b1ade74d170a6f10ab044739432"},
@@ -459,7 +437,6 @@ description = "A library for parsing ISO 8601 strings."
optional = false
python-versions = "*"
groups = ["main"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "aniso8601-10.0.0-py2.py3-none-any.whl", hash = "sha256:3c943422efaa0229ebd2b0d7d223effb5e7c89e24d2267ebe76c61a2d8e290cb"},
{file = "aniso8601-10.0.0.tar.gz", hash = "sha256:ff1d0fc2346688c62c0151547136ac30e322896ed8af316ef7602c47da9426cf"},
@@ -475,7 +452,6 @@ description = "Reusable constraint types to use with typing.Annotated"
optional = false
python-versions = ">=3.8"
groups = ["main", "storage", "vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"},
{file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"},
@@ -488,7 +464,6 @@ description = "High level compatibility layer for multiple asynchronous event lo
optional = false
python-versions = ">=3.9"
groups = ["main", "storage", "vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "anyio-4.8.0-py3-none-any.whl", hash = "sha256:b5011f270ab5eb0abf13385f851315585cc37ef330dd88e27ec3d34d651fd47a"},
{file = "anyio-4.8.0.tar.gz", hash = "sha256:1d9fe889df5212298c0c0723fa20479d1b94883a2df44bd3897aa91083316f7a"},
@@ -501,7 +476,7 @@ typing_extensions = {version = ">=4.5", markers = "python_version < \"3.13\""}
[package.extras]
doc = ["Sphinx (>=7.4,<8.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx_rtd_theme"]
-test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "trustme", "truststore (>=0.9.1)", "uvloop (>=0.21)"]
+test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "trustme", "truststore (>=0.9.1) ; python_version >= \"3.10\"", "uvloop (>=0.21) ; platform_python_implementation == \"CPython\" and platform_system != \"Windows\" and python_version < \"3.14\""]
trio = ["trio (>=0.26.1)"]
[[package]]
@@ -511,7 +486,6 @@ description = "ASGI specs, helper code, and adapters"
optional = false
python-versions = ">=3.8"
groups = ["vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "asgiref-3.8.1-py3-none-any.whl", hash = "sha256:3e1e3ecc849832fe52ccf2cb6686b7a55f82bb1d6aee72a58826471390335e47"},
{file = "asgiref-3.8.1.tar.gz", hash = "sha256:c343bd80a0bec947a9860adb4c432ffa7db769836c64238fc34bdc3fec84d590"},
@@ -527,7 +501,7 @@ description = "Timeout context manager for asyncio programs"
optional = false
python-versions = ">=3.8"
groups = ["main"]
-markers = "python_version == \"3.11\" and python_full_version < \"3.11.3\""
+markers = "python_full_version < \"3.11.3\""
files = [
{file = "async_timeout-5.0.1-py3-none-any.whl", hash = "sha256:39e3809566ff85354557ec2398b55e096c8364bacac9405a7a1fa429e77fe76c"},
{file = "async_timeout-5.0.1.tar.gz", hash = "sha256:d9321a7a3d5a6a5e187e824d2fa0793ce379a202935782d555d6e9d2735677d3"},
@@ -540,19 +514,18 @@ description = "Classes Without Boilerplate"
optional = false
python-versions = ">=3.8"
groups = ["main", "lint", "storage", "vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "attrs-25.1.0-py3-none-any.whl", hash = "sha256:c75a69e28a550a7e93789579c22aa26b0f5b83b75dc4e08fe092980051e1090a"},
{file = "attrs-25.1.0.tar.gz", hash = "sha256:1c97078a80c814273a76b2a298a932eb681c87415c11dee0a6921de7f1b02c3e"},
]
[package.extras]
-benchmark = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins", "pytest-xdist[psutil]"]
-cov = ["cloudpickle", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"]
-dev = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pre-commit-uv", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"]
+benchmark = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"]
+cov = ["cloudpickle ; platform_python_implementation == \"CPython\"", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"]
+dev = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pre-commit-uv", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"]
docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier (<24.7)"]
-tests = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"]
-tests-mypy = ["mypy (>=1.11.1)", "pytest-mypy-plugins"]
+tests = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"]
+tests-mypy = ["mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\""]
[[package]]
name = "authlib"
@@ -561,7 +534,6 @@ description = "The ultimate Python library in building OAuth and OpenID Connect
optional = false
python-versions = ">=3.8"
groups = ["main", "vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "Authlib-1.3.1-py2.py3-none-any.whl", hash = "sha256:d35800b973099bbadc49b42b256ecb80041ad56b7fe1216a362c7943c088f377"},
{file = "authlib-1.3.1.tar.gz", hash = "sha256:7ae843f03c06c5c0debd63c9db91f9fda64fa62a42a77419fa15fbb7e7a58917"},
@@ -577,7 +549,6 @@ description = "Microsoft Azure Core Library for Python"
optional = false
python-versions = ">=3.8"
groups = ["main", "storage"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "azure_core-1.32.0-py3-none-any.whl", hash = "sha256:eac191a0efb23bfa83fddf321b27b122b4ec847befa3091fa736a5c32c50d7b4"},
{file = "azure_core-1.32.0.tar.gz", hash = "sha256:22b3c35d6b2dae14990f6c1be2912bf23ffe50b220e708a28ab1bb92b1c730e5"},
@@ -598,7 +569,6 @@ description = "Microsoft Azure Identity Library for Python"
optional = false
python-versions = ">=3.8"
groups = ["main"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "azure-identity-1.16.1.tar.gz", hash = "sha256:6d93f04468f240d59246d8afde3091494a5040d4f141cad0f49fc0c399d0d91e"},
{file = "azure_identity-1.16.1-py3-none-any.whl", hash = "sha256:8fb07c25642cd4ac422559a8b50d3e77f73dcc2bbfaba419d06d6c9d7cff6726"},
@@ -617,7 +587,6 @@ description = "Microsoft Azure Blob Storage Client Library for Python"
optional = false
python-versions = ">=3.6"
groups = ["storage"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "azure-storage-blob-12.13.0.zip", hash = "sha256:53f0d4cd32970ac9ff9b9753f83dd2fb3f9ac30e1d01e71638c436c509bfd884"},
{file = "azure_storage_blob-12.13.0-py3-none-any.whl", hash = "sha256:280a6ab032845bab9627582bee78a50497ca2f14772929b5c5ee8b4605af0cb3"},
@@ -635,7 +604,6 @@ description = "Function decoration for backoff and retry"
optional = false
python-versions = ">=3.7,<4.0"
groups = ["main", "vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "backoff-2.2.1-py3-none-any.whl", hash = "sha256:63579f9a0628e06278f7e47b7d7d5b6ce20dc65c5e96a6f3ca99a6adca0396e8"},
{file = "backoff-2.2.1.tar.gz", hash = "sha256:03f829f5bb1923180821643f8753b0502c3b682293992485b0eef2807afa5cba"},
@@ -648,7 +616,6 @@ description = "BCE SDK for python"
optional = false
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,<4,>=2.7"
groups = ["storage"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "bce_python_sdk-0.9.29-py3-none-any.whl", hash = "sha256:6518dc0ada422acd1841eeabcb7f89cfc07e3bb1a4be3c75945cab953907b555"},
{file = "bce_python_sdk-0.9.29.tar.gz", hash = "sha256:326fbd50d57bf6d2fc21d58f589b069e0e84fc0a8733be9575c109293ab08cc4"},
@@ -666,7 +633,6 @@ description = "Modern password hashing for your software and your servers"
optional = false
python-versions = ">=3.8"
groups = ["vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "bcrypt-4.3.0-cp313-cp313t-macosx_10_12_universal2.whl", hash = "sha256:f01e060f14b6b57bbb72fc5b4a83ac21c443c9a2ee708e04a10e9192f90a6281"},
{file = "bcrypt-4.3.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5eeac541cefd0bb887a371ef73c62c3cd78535e4887b310626036a7c0a817bb"},
@@ -732,7 +698,6 @@ description = "Screen-scraping library"
optional = false
python-versions = ">=3.6.0"
groups = ["main", "vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "beautifulsoup4-4.12.2-py3-none-any.whl", hash = "sha256:bd2520ca0d9d7d12694a53d44ac482d181b4ec1888909b035a3dbf40d0f57d4a"},
{file = "beautifulsoup4-4.12.2.tar.gz", hash = "sha256:492bbc69dca35d12daac71c4db1bfff0c876c00ef4a2ffacce226d4638eb72da"},
@@ -752,7 +717,6 @@ description = "Python multiprocessing fork with improvements and bugfixes"
optional = false
python-versions = ">=3.7"
groups = ["main"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "billiard-4.2.1-py3-none-any.whl", hash = "sha256:40b59a4ac8806ba2c2369ea98d876bc6108b051c227baffd928c644d15d8f3cb"},
{file = "billiard-4.2.1.tar.gz", hash = "sha256:12b641b0c539073fc8d3f5b8b7be998956665c4233c7c1fcd66a7e677c4fb36f"},
@@ -765,7 +729,6 @@ description = "Fast, simple object-to-object and broadcast signaling"
optional = false
python-versions = ">=3.9"
groups = ["main", "dev"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "blinker-1.9.0-py3-none-any.whl", hash = "sha256:ba0efaa9080b619ff2f3459d1d500c57bddea4a6b424b60a91141db6fd2f08bc"},
{file = "blinker-1.9.0.tar.gz", hash = "sha256:b4ce2265a7abece45e7cc896e98dbebe6cead56bcf805a3d23136d145f5445bf"},
@@ -778,7 +741,6 @@ description = "The AWS SDK for Python"
optional = false
python-versions = ">=3.8"
groups = ["main"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "boto3-1.35.99-py3-none-any.whl", hash = "sha256:83e560faaec38a956dfb3d62e05e1703ee50432b45b788c09e25107c5058bd71"},
{file = "boto3-1.35.99.tar.gz", hash = "sha256:e0abd794a7a591d90558e92e29a9f8837d25ece8e3c120e530526fe27eba5fca"},
@@ -799,7 +761,6 @@ description = "Low-level, data-driven core of boto 3."
optional = false
python-versions = ">=3.8"
groups = ["main"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "botocore-1.35.99-py3-none-any.whl", hash = "sha256:b22d27b6b617fc2d7342090d6129000af2efd20174215948c0d7ae2da0fab445"},
{file = "botocore-1.35.99.tar.gz", hash = "sha256:1eab44e969c39c5f3d9a3104a0836c24715579a455f12b3979a31d7cde51b3c3"},
@@ -820,7 +781,6 @@ description = "Fast NumPy array functions written in C"
optional = false
python-versions = ">=3.9"
groups = ["main"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "Bottleneck-1.4.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:125436df93751a226eab1732783aa8f6125e88e779587aa61be071fb66e41f9d"},
{file = "Bottleneck-1.4.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c6df9a60ec6ab88fec934ca864266ba95edd89c490af71dc9cd8afb2a54ebd9"},
@@ -873,7 +833,7 @@ description = "Python bindings for the Brotli compression library"
optional = false
python-versions = "*"
groups = ["main"]
-markers = "(python_version == \"3.11\" or python_version >= \"3.12\") and platform_python_implementation != \"PyPy\""
+markers = "platform_python_implementation != \"PyPy\""
files = [
{file = "Brotli-1.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e1140c64812cb9b06c922e77f1c26a75ec5e3f0fb2bf92cc8c58720dec276752"},
{file = "Brotli-1.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c8fd5270e906eef71d4a8d19b7c6a43760c6abcfcc10c9101d14eb2357418de9"},
@@ -885,6 +845,10 @@ files = [
{file = "Brotli-1.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a37b8f0391212d29b3a91a799c8e4a2855e0576911cdfb2515487e30e322253d"},
{file = "Brotli-1.1.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:e84799f09591700a4154154cab9787452925578841a94321d5ee8fb9a9a328f0"},
{file = "Brotli-1.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f66b5337fa213f1da0d9000bc8dc0cb5b896b726eefd9c6046f699b169c41b9e"},
+ {file = "Brotli-1.1.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:5dab0844f2cf82be357a0eb11a9087f70c5430b2c241493fc122bb6f2bb0917c"},
+ {file = "Brotli-1.1.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e4fe605b917c70283db7dfe5ada75e04561479075761a0b3866c081d035b01c1"},
+ {file = "Brotli-1.1.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:1e9a65b5736232e7a7f91ff3d02277f11d339bf34099a56cdab6a8b3410a02b2"},
+ {file = "Brotli-1.1.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:58d4b711689366d4a03ac7957ab8c28890415e267f9b6589969e74b6e42225ec"},
{file = "Brotli-1.1.0-cp310-cp310-win32.whl", hash = "sha256:be36e3d172dc816333f33520154d708a2657ea63762ec16b62ece02ab5e4daf2"},
{file = "Brotli-1.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:0c6244521dda65ea562d5a69b9a26120769b7a9fb3db2fe9545935ed6735b128"},
{file = "Brotli-1.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a3daabb76a78f829cafc365531c972016e4aa8d5b4bf60660ad8ecee19df7ccc"},
@@ -897,8 +861,14 @@ files = [
{file = "Brotli-1.1.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:19c116e796420b0cee3da1ccec3b764ed2952ccfcc298b55a10e5610ad7885f9"},
{file = "Brotli-1.1.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:510b5b1bfbe20e1a7b3baf5fed9e9451873559a976c1a78eebaa3b86c57b4265"},
{file = "Brotli-1.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a1fd8a29719ccce974d523580987b7f8229aeace506952fa9ce1d53a033873c8"},
+ {file = "Brotli-1.1.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c247dd99d39e0338a604f8c2b3bc7061d5c2e9e2ac7ba9cc1be5a69cb6cd832f"},
+ {file = "Brotli-1.1.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:1b2c248cd517c222d89e74669a4adfa5577e06ab68771a529060cf5a156e9757"},
+ {file = "Brotli-1.1.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:2a24c50840d89ded6c9a8fdc7b6ed3692ed4e86f1c4a4a938e1e92def92933e0"},
+ {file = "Brotli-1.1.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f31859074d57b4639318523d6ffdca586ace54271a73ad23ad021acd807eb14b"},
{file = "Brotli-1.1.0-cp311-cp311-win32.whl", hash = "sha256:39da8adedf6942d76dc3e46653e52df937a3c4d6d18fdc94a7c29d263b1f5b50"},
{file = "Brotli-1.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:aac0411d20e345dc0920bdec5548e438e999ff68d77564d5e9463a7ca9d3e7b1"},
+ {file = "Brotli-1.1.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:32d95b80260d79926f5fab3c41701dbb818fde1c9da590e77e571eefd14abe28"},
+ {file = "Brotli-1.1.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b760c65308ff1e462f65d69c12e4ae085cff3b332d894637f6273a12a482d09f"},
{file = "Brotli-1.1.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:316cc9b17edf613ac76b1f1f305d2a748f1b976b033b049a6ecdfd5612c70409"},
{file = "Brotli-1.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:caf9ee9a5775f3111642d33b86237b05808dafcd6268faa492250e9b78046eb2"},
{file = "Brotli-1.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:70051525001750221daa10907c77830bc889cb6d865cc0b813d9db7fefc21451"},
@@ -909,8 +879,24 @@ files = [
{file = "Brotli-1.1.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:4093c631e96fdd49e0377a9c167bfd75b6d0bad2ace734c6eb20b348bc3ea180"},
{file = "Brotli-1.1.0-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:7e4c4629ddad63006efa0ef968c8e4751c5868ff0b1c5c40f76524e894c50248"},
{file = "Brotli-1.1.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:861bf317735688269936f755fa136a99d1ed526883859f86e41a5d43c61d8966"},
+ {file = "Brotli-1.1.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:87a3044c3a35055527ac75e419dfa9f4f3667a1e887ee80360589eb8c90aabb9"},
+ {file = "Brotli-1.1.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:c5529b34c1c9d937168297f2c1fde7ebe9ebdd5e121297ff9c043bdb2ae3d6fb"},
+ {file = "Brotli-1.1.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:ca63e1890ede90b2e4454f9a65135a4d387a4585ff8282bb72964fab893f2111"},
+ {file = "Brotli-1.1.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e79e6520141d792237c70bcd7a3b122d00f2613769ae0cb61c52e89fd3443839"},
{file = "Brotli-1.1.0-cp312-cp312-win32.whl", hash = "sha256:5f4d5ea15c9382135076d2fb28dde923352fe02951e66935a9efaac8f10e81b0"},
{file = "Brotli-1.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:906bc3a79de8c4ae5b86d3d75a8b77e44404b0f4261714306e3ad248d8ab0951"},
+ {file = "Brotli-1.1.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:8bf32b98b75c13ec7cf774164172683d6e7891088f6316e54425fde1efc276d5"},
+ {file = "Brotli-1.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:7bc37c4d6b87fb1017ea28c9508b36bbcb0c3d18b4260fcdf08b200c74a6aee8"},
+ {file = "Brotli-1.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c0ef38c7a7014ffac184db9e04debe495d317cc9c6fb10071f7fefd93100a4f"},
+ {file = "Brotli-1.1.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91d7cc2a76b5567591d12c01f019dd7afce6ba8cba6571187e21e2fc418ae648"},
+ {file = "Brotli-1.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a93dde851926f4f2678e704fadeb39e16c35d8baebd5252c9fd94ce8ce68c4a0"},
+ {file = "Brotli-1.1.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f0db75f47be8b8abc8d9e31bc7aad0547ca26f24a54e6fd10231d623f183d089"},
+ {file = "Brotli-1.1.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6967ced6730aed543b8673008b5a391c3b1076d834ca438bbd70635c73775368"},
+ {file = "Brotli-1.1.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:7eedaa5d036d9336c95915035fb57422054014ebdeb6f3b42eac809928e40d0c"},
+ {file = "Brotli-1.1.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:d487f5432bf35b60ed625d7e1b448e2dc855422e87469e3f450aa5552b0eb284"},
+ {file = "Brotli-1.1.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:832436e59afb93e1836081a20f324cb185836c617659b07b129141a8426973c7"},
+ {file = "Brotli-1.1.0-cp313-cp313-win32.whl", hash = "sha256:43395e90523f9c23a3d5bdf004733246fba087f2948f87ab28015f12359ca6a0"},
+ {file = "Brotli-1.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:9011560a466d2eb3f5a6e4929cf4a09be405c64154e12df0dd72713f6500e32b"},
{file = "Brotli-1.1.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:a090ca607cbb6a34b0391776f0cb48062081f5f60ddcce5d11838e67a01928d1"},
{file = "Brotli-1.1.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2de9d02f5bda03d27ede52e8cfe7b865b066fa49258cbab568720aa5be80a47d"},
{file = "Brotli-1.1.0-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2333e30a5e00fe0fe55903c8832e08ee9c3b1382aacf4db26664a16528d51b4b"},
@@ -920,6 +906,10 @@ files = [
{file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:fd5f17ff8f14003595ab414e45fce13d073e0762394f957182e69035c9f3d7c2"},
{file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:069a121ac97412d1fe506da790b3e69f52254b9df4eb665cd42460c837193354"},
{file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:e93dfc1a1165e385cc8239fab7c036fb2cd8093728cbd85097b284d7b99249a2"},
+ {file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_2_aarch64.whl", hash = "sha256:aea440a510e14e818e67bfc4027880e2fb500c2ccb20ab21c7a7c8b5b4703d75"},
+ {file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_2_i686.whl", hash = "sha256:6974f52a02321b36847cd19d1b8e381bf39939c21efd6ee2fc13a28b0d99348c"},
+ {file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_2_ppc64le.whl", hash = "sha256:a7e53012d2853a07a4a79c00643832161a910674a893d296c9f1259859a289d2"},
+ {file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_2_x86_64.whl", hash = "sha256:d7702622a8b40c49bffb46e1e3ba2e81268d5c04a34f460978c6b5517a34dd52"},
{file = "Brotli-1.1.0-cp36-cp36m-win32.whl", hash = "sha256:a599669fd7c47233438a56936988a2478685e74854088ef5293802123b5b2460"},
{file = "Brotli-1.1.0-cp36-cp36m-win_amd64.whl", hash = "sha256:d143fd47fad1db3d7c27a1b1d66162e855b5d50a89666af46e1679c496e8e579"},
{file = "Brotli-1.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:11d00ed0a83fa22d29bc6b64ef636c4552ebafcef57154b4ddd132f5638fbd1c"},
@@ -931,6 +921,10 @@ files = [
{file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:919e32f147ae93a09fe064d77d5ebf4e35502a8df75c29fb05788528e330fe74"},
{file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:23032ae55523cc7bccb4f6a0bf368cd25ad9bcdcc1990b64a647e7bbcce9cb5b"},
{file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:224e57f6eac61cc449f498cc5f0e1725ba2071a3d4f48d5d9dffba42db196438"},
+ {file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:cb1dac1770878ade83f2ccdf7d25e494f05c9165f5246b46a621cc849341dc01"},
+ {file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:3ee8a80d67a4334482d9712b8e83ca6b1d9bc7e351931252ebef5d8f7335a547"},
+ {file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:5e55da2c8724191e5b557f8e18943b1b4839b8efc3ef60d65985bcf6f587dd38"},
+ {file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:d342778ef319e1026af243ed0a07c97acf3bad33b9f29e7ae6a1f68fd083e90c"},
{file = "Brotli-1.1.0-cp37-cp37m-win32.whl", hash = "sha256:587ca6d3cef6e4e868102672d3bd9dc9698c309ba56d41c2b9c85bbb903cdb95"},
{file = "Brotli-1.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:2954c1c23f81c2eaf0b0717d9380bd348578a94161a65b3a2afc62c86467dd68"},
{file = "Brotli-1.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:efa8b278894b14d6da122a72fefcebc28445f2d3f880ac59d46c90f4c13be9a3"},
@@ -943,6 +937,10 @@ files = [
{file = "Brotli-1.1.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1ab4fbee0b2d9098c74f3057b2bc055a8bd92ccf02f65944a241b4349229185a"},
{file = "Brotli-1.1.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:141bd4d93984070e097521ed07e2575b46f817d08f9fa42b16b9b5f27b5ac088"},
{file = "Brotli-1.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fce1473f3ccc4187f75b4690cfc922628aed4d3dd013d047f95a9b3919a86596"},
+ {file = "Brotli-1.1.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:d2b35ca2c7f81d173d2fadc2f4f31e88cc5f7a39ae5b6db5513cf3383b0e0ec7"},
+ {file = "Brotli-1.1.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:af6fa6817889314555aede9a919612b23739395ce767fe7fcbea9a80bf140fe5"},
+ {file = "Brotli-1.1.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:2feb1d960f760a575dbc5ab3b1c00504b24caaf6986e2dc2b01c09c87866a943"},
+ {file = "Brotli-1.1.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:4410f84b33374409552ac9b6903507cdb31cd30d2501fc5ca13d18f73548444a"},
{file = "Brotli-1.1.0-cp38-cp38-win32.whl", hash = "sha256:db85ecf4e609a48f4b29055f1e144231b90edc90af7481aa731ba2d059226b1b"},
{file = "Brotli-1.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:3d7954194c36e304e1523f55d7042c59dc53ec20dd4e9ea9d151f1b62b4415c0"},
{file = "Brotli-1.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5fb2ce4b8045c78ebbc7b8f3c15062e435d47e7393cc57c25115cfd49883747a"},
@@ -955,6 +953,10 @@ files = [
{file = "Brotli-1.1.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:949f3b7c29912693cee0afcf09acd6ebc04c57af949d9bf77d6101ebb61e388c"},
{file = "Brotli-1.1.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:89f4988c7203739d48c6f806f1e87a1d96e0806d44f0fba61dba81392c9e474d"},
{file = "Brotli-1.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:de6551e370ef19f8de1807d0a9aa2cdfdce2e85ce88b122fe9f6b2b076837e59"},
+ {file = "Brotli-1.1.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:0737ddb3068957cf1b054899b0883830bb1fec522ec76b1098f9b6e0f02d9419"},
+ {file = "Brotli-1.1.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:4f3607b129417e111e30637af1b56f24f7a49e64763253bbc275c75fa887d4b2"},
+ {file = "Brotli-1.1.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:6c6e0c425f22c1c719c42670d561ad682f7bfeeef918edea971a79ac5252437f"},
+ {file = "Brotli-1.1.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:494994f807ba0b92092a163a0a283961369a65f6cbe01e8891132b7a320e61eb"},
{file = "Brotli-1.1.0-cp39-cp39-win32.whl", hash = "sha256:f0d8a7a6b5983c2496e364b969f0e526647a06b075d034f3297dc66f3b360c64"},
{file = "Brotli-1.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:cdad5b9014d83ca68c25d2e9444e28e967ef16e80f6b436918c700c117a85467"},
{file = "Brotli-1.1.0.tar.gz", hash = "sha256:81de08ac11bcb85841e440c13611c00b67d3bf82698314928d0b676362546724"},
@@ -967,7 +969,7 @@ description = "Python CFFI bindings to the Brotli library"
optional = false
python-versions = ">=3.7"
groups = ["main"]
-markers = "(python_version == \"3.11\" or python_version >= \"3.12\") and platform_python_implementation == \"PyPy\""
+markers = "platform_python_implementation == \"PyPy\""
files = [
{file = "brotlicffi-1.1.0.0-cp37-abi3-macosx_10_9_x86_64.whl", hash = "sha256:9b7ae6bd1a3f0df532b6d67ff674099a96d22bc0948955cb338488c31bfb8851"},
{file = "brotlicffi-1.1.0.0-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:19ffc919fa4fc6ace69286e0a23b3789b4219058313cf9b45625016bf7ff996b"},
@@ -1008,7 +1010,6 @@ description = "Dummy package for Beautiful Soup (beautifulsoup4)"
optional = false
python-versions = "*"
groups = ["main"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "bs4-0.0.2-py2.py3-none-any.whl", hash = "sha256:abf8742c0805ef7f662dce4b51cca104cffe52b835238afc169142ab9b3fbccc"},
{file = "bs4-0.0.2.tar.gz", hash = "sha256:a48685c58f50fe127722417bae83fe6badf500d54b55f7e39ffe43b798653925"},
@@ -1024,7 +1025,6 @@ description = "A simple, correct Python build frontend"
optional = false
python-versions = ">=3.8"
groups = ["vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "build-1.2.2.post1-py3-none-any.whl", hash = "sha256:1d61c0887fa860c01971625baae8bdd338e517b836a2f70dd1f7aa3a6b2fc5b5"},
{file = "build-1.2.2.post1.tar.gz", hash = "sha256:b36993e92ca9375a219c99e606a122ff365a760a2d4bba0caa09bd5278b608b7"},
@@ -1037,7 +1037,7 @@ pyproject_hooks = "*"
[package.extras]
docs = ["furo (>=2023.08.17)", "sphinx (>=7.0,<8.0)", "sphinx-argparse-cli (>=1.5)", "sphinx-autodoc-typehints (>=1.10)", "sphinx-issues (>=3.0.0)"]
-test = ["build[uv,virtualenv]", "filelock (>=3)", "pytest (>=6.2.4)", "pytest-cov (>=2.12)", "pytest-mock (>=2)", "pytest-rerunfailures (>=9.1)", "pytest-xdist (>=1.34)", "setuptools (>=42.0.0)", "setuptools (>=56.0.0)", "setuptools (>=56.0.0)", "setuptools (>=67.8.0)", "wheel (>=0.36.0)"]
+test = ["build[uv,virtualenv]", "filelock (>=3)", "pytest (>=6.2.4)", "pytest-cov (>=2.12)", "pytest-mock (>=2)", "pytest-rerunfailures (>=9.1)", "pytest-xdist (>=1.34)", "setuptools (>=42.0.0) ; python_version < \"3.10\"", "setuptools (>=56.0.0) ; python_version == \"3.10\"", "setuptools (>=56.0.0) ; python_version == \"3.11\"", "setuptools (>=67.8.0) ; python_version >= \"3.12\"", "wheel (>=0.36.0)"]
typing = ["build[uv]", "importlib-metadata (>=5.1)", "mypy (>=1.9.0,<1.10.0)", "tomli", "typing-extensions (>=3.7.4.3)"]
uv = ["uv (>=0.1.18)"]
virtualenv = ["virtualenv (>=20.0.35)"]
@@ -1049,7 +1049,6 @@ description = "Extensible memoizing collections and decorators"
optional = false
python-versions = ">=3.7"
groups = ["main", "storage", "vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945"},
{file = "cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105"},
@@ -1062,7 +1061,6 @@ description = "Distributed Task Queue."
optional = false
python-versions = ">=3.8"
groups = ["main"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "celery-5.4.0-py3-none-any.whl", hash = "sha256:369631eb580cf8c51a82721ec538684994f8277637edde2dfc0dacd73ed97f64"},
{file = "celery-5.4.0.tar.gz", hash = "sha256:504a19140e8d3029d5acad88330c541d4c3f64c789d85f94756762d8bca7e706"},
@@ -1083,32 +1081,32 @@ vine = ">=5.1.0,<6.0"
arangodb = ["pyArango (>=2.0.2)"]
auth = ["cryptography (==42.0.5)"]
azureblockblob = ["azure-storage-blob (>=12.15.0)"]
-brotli = ["brotli (>=1.0.0)", "brotlipy (>=0.7.0)"]
+brotli = ["brotli (>=1.0.0) ; platform_python_implementation == \"CPython\"", "brotlipy (>=0.7.0) ; platform_python_implementation == \"PyPy\""]
cassandra = ["cassandra-driver (>=3.25.0,<4)"]
consul = ["python-consul2 (==0.1.5)"]
cosmosdbsql = ["pydocumentdb (==2.3.5)"]
-couchbase = ["couchbase (>=3.0.0)"]
+couchbase = ["couchbase (>=3.0.0) ; platform_python_implementation != \"PyPy\" and (platform_system != \"Windows\" or python_version < \"3.10\")"]
couchdb = ["pycouchdb (==1.14.2)"]
django = ["Django (>=2.2.28)"]
dynamodb = ["boto3 (>=1.26.143)"]
elasticsearch = ["elastic-transport (<=8.13.0)", "elasticsearch (<=8.13.0)"]
-eventlet = ["eventlet (>=0.32.0)"]
+eventlet = ["eventlet (>=0.32.0) ; python_version < \"3.10\""]
gcs = ["google-cloud-storage (>=2.10.0)"]
gevent = ["gevent (>=1.5.0)"]
-librabbitmq = ["librabbitmq (>=2.0.0)"]
-memcache = ["pylibmc (==1.6.3)"]
+librabbitmq = ["librabbitmq (>=2.0.0) ; python_version < \"3.11\""]
+memcache = ["pylibmc (==1.6.3) ; platform_system != \"Windows\""]
mongodb = ["pymongo[srv] (>=4.0.2)"]
msgpack = ["msgpack (==1.0.8)"]
pymemcache = ["python-memcached (>=1.61)"]
-pyro = ["pyro4 (==4.82)"]
+pyro = ["pyro4 (==4.82) ; python_version < \"3.11\""]
pytest = ["pytest-celery[all] (>=1.0.0)"]
redis = ["redis (>=4.5.2,!=4.5.5,<6.0.0)"]
s3 = ["boto3 (>=1.26.143)"]
slmq = ["softlayer-messaging (>=1.0.3)"]
-solar = ["ephem (==4.1.5)"]
+solar = ["ephem (==4.1.5) ; platform_python_implementation != \"PyPy\""]
sqlalchemy = ["sqlalchemy (>=1.4.48,<2.1)"]
-sqs = ["boto3 (>=1.26.143)", "kombu[sqs] (>=5.3.4)", "pycurl (>=7.43.0.5)", "urllib3 (>=1.26.16)"]
-tblib = ["tblib (>=1.3.0)", "tblib (>=1.5.0)"]
+sqs = ["boto3 (>=1.26.143)", "kombu[sqs] (>=5.3.4)", "pycurl (>=7.43.0.5) ; sys_platform != \"win32\" and platform_python_implementation == \"CPython\"", "urllib3 (>=1.26.16)"]
+tblib = ["tblib (>=1.3.0) ; python_version < \"3.8.0\"", "tblib (>=1.5.0) ; python_version >= \"3.8.0\""]
yaml = ["PyYAML (>=3.10)"]
zookeeper = ["kazoo (>=1.3.1)"]
zstd = ["zstandard (==0.22.0)"]
@@ -1120,7 +1118,6 @@ description = "Python package for providing Mozilla's CA Bundle."
optional = false
python-versions = ">=3.6"
groups = ["main", "storage", "tools", "vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "certifi-2025.1.31-py3-none-any.whl", hash = "sha256:ca78db4565a652026a4db2bcdf68f2fb589ea80d0be70e03929ed730746b84fe"},
{file = "certifi-2025.1.31.tar.gz", hash = "sha256:3d5da6925056f6f18f119200434a4780a94263f10d1c21d032a6f6b2baa20651"},
@@ -1202,7 +1199,7 @@ files = [
{file = "cffi-1.17.1-cp39-cp39-win_amd64.whl", hash = "sha256:d016c76bdd850f3c626af19b0542c9677ba156e4ee4fccfdd7848803533ef662"},
{file = "cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824"},
]
-markers = {main = "python_version == \"3.11\" or python_version >= \"3.12\"", storage = "(python_version == \"3.11\" or python_version >= \"3.12\") and platform_python_implementation != \"PyPy\"", vdb = "python_version == \"3.11\" or python_version >= \"3.12\""}
+markers = {storage = "platform_python_implementation != \"PyPy\"", vdb = "python_version < \"3.12\" or platform_python_implementation != \"PyPy\""}
[package.dependencies]
pycparser = "*"
@@ -1214,7 +1211,6 @@ description = "Universal encoding detector for Python 3"
optional = false
python-versions = ">=3.7"
groups = ["main"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "chardet-5.1.0-py3-none-any.whl", hash = "sha256:362777fb014af596ad31334fde1e8c327dfdb076e1960d1694662d46a6917ab9"},
{file = "chardet-5.1.0.tar.gz", hash = "sha256:0d62712b956bc154f85fb0a266e2a3c5913c2967e00348701b32411d6def31e5"},
@@ -1227,7 +1223,6 @@ description = "The Real First Universal Charset Detector. Open, modern and activ
optional = false
python-versions = ">=3.7"
groups = ["main", "storage", "tools", "vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "charset_normalizer-3.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:91b36a978b5ae0ee86c394f5a54d6ef44db1de0815eb43de826d41d21e4af3de"},
{file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7461baadb4dc00fd9e0acbe254e3d7d2112e7f92ced2adc96e54ef6501c5f176"},
@@ -1330,7 +1325,6 @@ description = "Chromas fork of hnswlib"
optional = false
python-versions = "*"
groups = ["vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "chroma_hnswlib-0.7.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f35192fbbeadc8c0633f0a69c3d3e9f1a4eab3a46b65458bbcbcabdd9e895c36"},
{file = "chroma_hnswlib-0.7.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6f007b608c96362b8f0c8b6b2ac94f67f83fcbabd857c378ae82007ec92f4d82"},
@@ -1373,7 +1367,6 @@ description = "Chroma."
optional = false
python-versions = ">=3.8"
groups = ["vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "chromadb-0.5.20-py3-none-any.whl", hash = "sha256:9550ba1b6dce911e35cac2568b301badf4b42f457b99a432bdeec2b6b9dd3680"},
{file = "chromadb-0.5.20.tar.gz", hash = "sha256:19513a23b2d20059866216bfd80195d1d4a160ffba234b8899f5e80978160ca7"},
@@ -1416,7 +1409,6 @@ description = "Python Circuit Breaker pattern implementation"
optional = false
python-versions = "*"
groups = ["main"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "circuitbreaker-2.0.0-py2.py3-none-any.whl", hash = "sha256:c8c6f044b616cd5066368734ce4488020392c962b4bd2869d406d883c36d9859"},
{file = "circuitbreaker-2.0.0.tar.gz", hash = "sha256:28110761ca81a2accbd6b33186bc8c433e69b0933d85e89f280028dbb8c1dd14"},
@@ -1429,7 +1421,6 @@ description = "Composable command line interface toolkit"
optional = false
python-versions = ">=3.7"
groups = ["main", "dev", "lint", "tools", "vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2"},
{file = "click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a"},
@@ -1445,7 +1436,6 @@ description = "click_default_group"
optional = false
python-versions = ">=2.7"
groups = ["lint"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "click_default_group-1.2.4-py2.py3-none-any.whl", hash = "sha256:9b60486923720e7fc61731bdb32b617039aba820e22e1c88766b1125592eaa5f"},
{file = "click_default_group-1.2.4.tar.gz", hash = "sha256:eb3f3c99ec0d456ca6cd2a7f08f7d4e91771bef51b01bdd9580cc6450fe1251e"},
@@ -1464,7 +1454,6 @@ description = "Enables git-like *did-you-mean* feature in click"
optional = false
python-versions = ">=3.6.2"
groups = ["main"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "click_didyoumean-0.3.1-py3-none-any.whl", hash = "sha256:5c4bb6007cfea5f2fd6583a2fb6701a22a41eb98957e63d0fac41c10e7c3117c"},
{file = "click_didyoumean-0.3.1.tar.gz", hash = "sha256:4f82fdff0dbe64ef8ab2279bd6aa3f6a99c3b28c05aa09cbfc07c9d7fbb5a463"},
@@ -1480,7 +1469,6 @@ description = "An extension module for click to enable registering CLI commands
optional = false
python-versions = "*"
groups = ["main"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "click-plugins-1.1.1.tar.gz", hash = "sha256:46ab999744a9d831159c3411bb0c79346d94a444df9a3a3742e9ed63645f264b"},
{file = "click_plugins-1.1.1-py2.py3-none-any.whl", hash = "sha256:5d262006d3222f5057fd81e1623d4443e41dcda5dc815c06b442aa3c02889fc8"},
@@ -1499,7 +1487,6 @@ description = "REPL plugin for Click"
optional = false
python-versions = ">=3.6"
groups = ["main"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "click-repl-0.3.0.tar.gz", hash = "sha256:17849c23dba3d667247dc4defe1757fff98694e90fe37474f3feebb69ced26a9"},
{file = "click_repl-0.3.0-py3-none-any.whl", hash = "sha256:fb7e06deb8da8de86180a33a9da97ac316751c094c6899382da7feeeeb51b812"},
@@ -1519,7 +1506,6 @@ description = "ClickHouse Database Core Driver for Python, Pandas, and Superset"
optional = false
python-versions = "~=3.8"
groups = ["vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "clickhouse-connect-0.7.19.tar.gz", hash = "sha256:ce8f21f035781c5ef6ff57dc162e8150779c009b59f14030ba61f8c9c10c06d0"},
{file = "clickhouse_connect-0.7.19-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6ac74eb9e8d6331bae0303d0fc6bdc2125aa4c421ef646348b588760b38c29e9"},
@@ -1611,7 +1597,6 @@ description = "A Python module to bypass Cloudflare's anti-bot page."
optional = false
python-versions = "*"
groups = ["tools"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "cloudscraper-1.2.71-py2.py3-none-any.whl", hash = "sha256:76f50ca529ed2279e220837befdec892626f9511708e200d48d5bb76ded679b0"},
{file = "cloudscraper-1.2.71.tar.gz", hash = "sha256:429c6e8aa6916d5bad5c8a5eac50f3ea53c9ac22616f6cb21b18dcc71517d0d3"},
@@ -1633,7 +1618,7 @@ files = [
{file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"},
{file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"},
]
-markers = {main = "(python_version == \"3.11\" or python_version >= \"3.12\") and (platform_system == \"Windows\" or sys_platform == \"win32\")", dev = "(python_version == \"3.11\" or python_version >= \"3.12\") and (platform_system == \"Windows\" or sys_platform == \"win32\")", lint = "(python_version == \"3.11\" or python_version >= \"3.12\") and platform_system == \"Windows\"", tools = "(python_version == \"3.11\" or python_version >= \"3.12\") and platform_system == \"Windows\"", vdb = "(python_version == \"3.11\" or python_version >= \"3.12\") and (platform_system == \"Windows\" or os_name == \"nt\" or sys_platform == \"win32\")"}
+markers = {main = "platform_system == \"Windows\" or sys_platform == \"win32\"", dev = "platform_system == \"Windows\" or sys_platform == \"win32\"", lint = "platform_system == \"Windows\"", tools = "platform_system == \"Windows\"", vdb = "platform_system == \"Windows\" or os_name == \"nt\" or sys_platform == \"win32\""}
[[package]]
name = "coloredlogs"
@@ -1642,7 +1627,6 @@ description = "Colored terminal output for Python's logging module"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
groups = ["vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "coloredlogs-15.0.1-py2.py3-none-any.whl", hash = "sha256:612ee75c546f53e92e70049c9dbfcc18c935a2b9a53b66085ce9ef6a6e5c0934"},
{file = "coloredlogs-15.0.1.tar.gz", hash = "sha256:7c991aa71a4577af2f82600d8f8f3a89f936baeaf9b50a9c197da014e5bf16b0"},
@@ -1661,7 +1645,6 @@ description = "cos-python-sdk-v5"
optional = false
python-versions = "*"
groups = ["storage", "vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "cos-python-sdk-v5-1.9.30.tar.gz", hash = "sha256:a23fd090211bf90883066d90cd74317860aa67c6d3aa80fe5e44b18c7e9b2a81"},
]
@@ -1680,7 +1663,6 @@ description = "Python Client for Couchbase"
optional = false
python-versions = ">=3.7"
groups = ["vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "couchbase-4.3.5-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:7b354e59ebd3da994b54fa48859116e59d72394307f52783b83cb76d125414d5"},
{file = "couchbase-4.3.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3a701989e3539faf8b50278337a1df88c6713b1da2d4eb7c1161c0c73c618a3a"},
@@ -1722,7 +1704,6 @@ description = "Code coverage measurement for Python"
optional = false
python-versions = ">=3.7"
groups = ["dev"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "coverage-7.2.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d39b5b4f2a66ccae8b7263ac3c8170994b65266797fb96cbbfd3fb5b23921db8"},
{file = "coverage-7.2.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6d040ef7c9859bb11dfeb056ff5b3872436e3b5e401817d87a31e1750b9ae2fb"},
@@ -1787,7 +1768,7 @@ files = [
]
[package.extras]
-toml = ["tomli"]
+toml = ["tomli ; python_full_version <= \"3.11.0a6\""]
[[package]]
name = "crcmod"
@@ -1796,7 +1777,6 @@ description = "CRC Generator"
optional = false
python-versions = "*"
groups = ["storage", "vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "crcmod-1.7.tar.gz", hash = "sha256:dc7051a0db5f2bd48665a990d3ec1cc305a466a77358ca4492826f41f283601e"},
]
@@ -1808,7 +1788,6 @@ description = "cryptography is a package which provides cryptographic recipes an
optional = false
python-versions = "!=3.9.0,!=3.9.1,>=3.7"
groups = ["main", "storage", "vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "cryptography-44.0.2-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:efcfe97d1b3c79e486554efddeb8f6f53a4cdd4cf6086642784fa31fc384e1d7"},
{file = "cryptography-44.0.2-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:29ecec49f3ba3f3849362854b7253a9f59799e3763b0c9d0826259a88efa02f1"},
@@ -1851,10 +1830,10 @@ files = [
cffi = {version = ">=1.12", markers = "platform_python_implementation != \"PyPy\""}
[package.extras]
-docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=3.0.0)"]
+docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=3.0.0) ; python_version >= \"3.8\""]
docstest = ["pyenchant (>=3)", "readme-renderer (>=30.0)", "sphinxcontrib-spelling (>=7.3.1)"]
-nox = ["nox (>=2024.4.15)", "nox[uv] (>=2024.3.2)"]
-pep8test = ["check-sdist", "click (>=8.0.1)", "mypy (>=1.4)", "ruff (>=0.3.6)"]
+nox = ["nox (>=2024.4.15)", "nox[uv] (>=2024.3.2) ; python_version >= \"3.8\""]
+pep8test = ["check-sdist ; python_version >= \"3.8\"", "click (>=8.0.1)", "mypy (>=1.4)", "ruff (>=0.3.6)"]
sdist = ["build (>=1.0.0)"]
ssh = ["bcrypt (>=3.1.5)"]
test = ["certifi (>=2024)", "cryptography-vectors (==44.0.2)", "pretend (>=0.7)", "pytest (>=7.4.0)", "pytest-benchmark (>=4.0)", "pytest-cov (>=2.10.1)", "pytest-xdist (>=3.5.0)"]
@@ -1867,7 +1846,6 @@ description = "Easily serialize dataclasses to and from JSON."
optional = false
python-versions = "<4.0,>=3.7"
groups = ["main"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "dataclasses_json-0.6.7-py3-none-any.whl", hash = "sha256:0dbf33f26c8d5305befd61b39d2b3414e8a407bedc2834dea9b8d642666fb40a"},
{file = "dataclasses_json-0.6.7.tar.gz", hash = "sha256:b6b3e528266ea45b9535223bc53ca645f5208833c29229e847b3f26a1cc55fc0"},
@@ -1884,7 +1862,6 @@ description = "Decorators for Humans"
optional = false
python-versions = ">=3.8"
groups = ["vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "decorator-5.2.1-py3-none-any.whl", hash = "sha256:d316bb415a2d9e2d2b3abcc4084c6502fc09240e292cd76a76afc106a1c8e04a"},
{file = "decorator-5.2.1.tar.gz", hash = "sha256:65f266143752f734b0a7cc83c46f4618af75b8c5911b00ccb61d0ac9b6da0360"},
@@ -1897,7 +1874,6 @@ description = "XML bomb protection for Python stdlib modules"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
groups = ["main"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "defusedxml-0.7.1-py2.py3-none-any.whl", hash = "sha256:a352e7e428770286cc899e2542b6cdaedb2b4953ff269a210103ec58f6198a61"},
{file = "defusedxml-0.7.1.tar.gz", hash = "sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69"},
@@ -1910,7 +1886,6 @@ description = "Python @deprecated decorator to deprecate old python classes, fun
optional = false
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7"
groups = ["storage", "vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "Deprecated-1.2.18-py2.py3-none-any.whl", hash = "sha256:bd5011788200372a32418f888e326a09ff80d0214bd961147cfed01b5c018eec"},
{file = "deprecated-1.2.18.tar.gz", hash = "sha256:422b6f6d859da6f2ef57857761bfb392480502a64c3028ca9bbe86085d72115d"},
@@ -1920,7 +1895,7 @@ files = [
wrapt = ">=1.10,<2"
[package.extras]
-dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "setuptools", "tox"]
+dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "setuptools ; python_version >= \"3.12\"", "tox"]
[[package]]
name = "deprecation"
@@ -1929,7 +1904,6 @@ description = "A library to handle automated deprecations"
optional = false
python-versions = "*"
groups = ["storage"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "deprecation-2.1.0-py2.py3-none-any.whl", hash = "sha256:a10811591210e1fb0e768a8c25517cabeabcba6f0bf96564f8ff45189f90b14a"},
{file = "deprecation-2.1.0.tar.gz", hash = "sha256:72b3bde64e5d778694b0cf68178aed03d15e15477116add3fb773e581f9518ff"},
@@ -1945,7 +1919,6 @@ description = "Distro - an OS platform information API"
optional = false
python-versions = ">=3.6"
groups = ["main", "vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "distro-1.9.0-py3-none-any.whl", hash = "sha256:7bffd925d65168f85027d8da9af6bddab658135b840670a223589bc0c8ef02b2"},
{file = "distro-1.9.0.tar.gz", hash = "sha256:2fa77c6fd8940f116ee1d6b94a2f90b13b5ea8d019b98bc8bafdcabcdd9bdbed"},
@@ -1958,7 +1931,6 @@ description = "Parse Python docstrings in reST, Google and Numpydoc format"
optional = false
python-versions = ">=3.6,<4.0"
groups = ["main"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "docstring_parser-0.16-py3-none-any.whl", hash = "sha256:bf0a1387354d3691d102edef7ec124f219ef639982d096e26e3b60aeffa90637"},
{file = "docstring_parser-0.16.tar.gz", hash = "sha256:538beabd0af1e2db0146b6bd3caa526c35a34d61af9fd2887f3a8a27a739aa6e"},
@@ -1971,7 +1943,6 @@ description = "Linting dotenv files like a charm!"
optional = false
python-versions = ">=3.9,<4.0"
groups = ["lint"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "dotenv_linter-0.5.0-py3-none-any.whl", hash = "sha256:fd01cca7f2140cb1710f49cbc1bf0e62397a75a6f0522d26a8b9b2331143c8bd"},
{file = "dotenv_linter-0.5.0.tar.gz", hash = "sha256:4862a8393e5ecdfb32982f1b32dbc006fff969a7b3c8608ba7db536108beeaea"},
@@ -1991,7 +1962,6 @@ description = "Module for converting between datetime.timedelta and Go's Duratio
optional = false
python-versions = "*"
groups = ["vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "durationpy-0.9-py3-none-any.whl", hash = "sha256:e65359a7af5cedad07fb77a2dd3f390f8eb0b74cb845589fa6c057086834dd38"},
{file = "durationpy-0.9.tar.gz", hash = "sha256:fd3feb0a69a0057d582ef643c355c40d2fa1c942191f914d12203b1a01ac722a"},
@@ -2004,7 +1974,6 @@ description = "Transport classes and utilities shared among Python Elastic clien
optional = false
python-versions = ">=3.8"
groups = ["vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "elastic_transport-8.17.0-py3-none-any.whl", hash = "sha256:59f553300866750e67a38828fede000576562a0e66930c641adb75249e0c95af"},
{file = "elastic_transport-8.17.0.tar.gz", hash = "sha256:e755f38f99fa6ec5456e236b8e58f0eb18873ac8fe710f74b91a16dd562de2a5"},
@@ -2024,7 +1993,6 @@ description = "Python client for Elasticsearch"
optional = false
python-versions = ">=3.7"
groups = ["vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "elasticsearch-8.14.0-py3-none-any.whl", hash = "sha256:cef8ef70a81af027f3da74a4f7d9296b390c636903088439087b8262a468c130"},
{file = "elasticsearch-8.14.0.tar.gz", hash = "sha256:aa2490029dd96f4015b333c1827aa21fd6c0a4d223b00dfb0fe933b8d09a511b"},
@@ -2046,7 +2014,6 @@ description = "Emoji for Python"
optional = false
python-versions = ">=3.7"
groups = ["main"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "emoji-2.14.1-py3-none-any.whl", hash = "sha256:35a8a486c1460addb1499e3bf7929d3889b2e2841a57401903699fef595e942b"},
{file = "emoji-2.14.1.tar.gz", hash = "sha256:f8c50043d79a2c1410ebfae833ae1868d5941a67a6cd4d18377e2eb0bd79346b"},
@@ -2062,7 +2029,6 @@ description = "OBS Python SDK"
optional = false
python-versions = "*"
groups = ["storage"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "esdk-obs-python-3.24.6.1.tar.gz", hash = "sha256:c45fed143e99d9256c8560c1d78f651eae0d2e809d16e962f8b286b773c33bf0"},
]
@@ -2077,7 +2043,6 @@ description = "An implementation of lxml.xmlfile for the standard library"
optional = false
python-versions = ">=3.8"
groups = ["main"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "et_xmlfile-2.0.0-py3-none-any.whl", hash = "sha256:7a91720bc756843502c3b7504c77b8fe44217c85c537d85037f0f536151b2caa"},
{file = "et_xmlfile-2.0.0.tar.gz", hash = "sha256:dab3f4764309081ce75662649be815c4c9081e88f0837825f90fd28317d4da54"},
@@ -2090,7 +2055,6 @@ description = "Like `typing._eval_type`, but lets older Python versions use newe
optional = false
python-versions = ">=3.8"
groups = ["main"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "eval_type_backport-0.2.2-py3-none-any.whl", hash = "sha256:cb6ad7c393517f476f96d456d0412ea80f0a8cf96f6892834cd9340149111b0a"},
{file = "eval_type_backport-0.2.2.tar.gz", hash = "sha256:f0576b4cf01ebb5bd358d02314d31846af5e07678387486e2c798af0e7d849c1"},
@@ -2106,7 +2070,6 @@ description = "Faker is a Python package that generates fake data for you."
optional = false
python-versions = ">=3.8"
groups = ["dev"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "Faker-32.1.0-py3-none-any.whl", hash = "sha256:c77522577863c264bdc9dad3a2a750ad3f7ee43ff8185072e482992288898814"},
{file = "faker-32.1.0.tar.gz", hash = "sha256:aac536ba04e6b7beb2332c67df78485fc29c1880ff723beac6d1efd45e2f10f5"},
@@ -2123,7 +2086,6 @@ description = "FastAPI framework, high performance, easy to learn, fast to code,
optional = false
python-versions = ">=3.8"
groups = ["vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "fastapi-0.115.11-py3-none-any.whl", hash = "sha256:32e1541b7b74602e4ef4a0260ecaf3aadf9d4f19590bba3e1bf2ac4666aa2c64"},
{file = "fastapi-0.115.11.tar.gz", hash = "sha256:cc81f03f688678b92600a65a5e618b93592c65005db37157147204d8924bf94f"},
@@ -2145,7 +2107,6 @@ description = "A platform independent file lock."
optional = false
python-versions = ">=3.9"
groups = ["main", "vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "filelock-3.17.0-py3-none-any.whl", hash = "sha256:533dc2f7ba78dc2f0f531fc6c4940addf7b70a481e269a5a3b93be94ffbe8338"},
{file = "filelock-3.17.0.tar.gz", hash = "sha256:ee4e77401ef576ebb38cd7f13b9b28893194acc20a8e68e18730ba9c0e54660e"},
@@ -2154,7 +2115,7 @@ files = [
[package.extras]
docs = ["furo (>=2024.8.6)", "sphinx (>=8.1.3)", "sphinx-autodoc-typehints (>=3)"]
testing = ["covdefaults (>=2.3)", "coverage (>=7.6.10)", "diff-cover (>=9.2.1)", "pytest (>=8.3.4)", "pytest-asyncio (>=0.25.2)", "pytest-cov (>=6)", "pytest-mock (>=3.14)", "pytest-timeout (>=2.3.1)", "virtualenv (>=20.28.1)"]
-typing = ["typing-extensions (>=4.12.2)"]
+typing = ["typing-extensions (>=4.12.2) ; python_version < \"3.11\""]
[[package]]
name = "filetype"
@@ -2163,7 +2124,6 @@ description = "Infer file type and MIME type of any file/buffer. No external dep
optional = false
python-versions = "*"
groups = ["main"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "filetype-1.2.0-py2.py3-none-any.whl", hash = "sha256:7ce71b6880181241cf7ac8697a2f1eb6a8bd9b429f7ad6d27b8db9ba5f1c2d25"},
{file = "filetype-1.2.0.tar.gz", hash = "sha256:66b56cd6474bf41d8c54660347d37afcc3f7d1970648de365c102ef77548aadb"},
@@ -2176,7 +2136,6 @@ description = "A simple framework for building complex web applications."
optional = false
python-versions = ">=3.9"
groups = ["main", "dev"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "flask-3.1.0-py3-none-any.whl", hash = "sha256:d667207822eb83f1c4b50949b1623c8fc8d51f2341d65f72e1a1815397551136"},
{file = "flask-3.1.0.tar.gz", hash = "sha256:5f873c5184c897c8d9d1b05df1e3d01b14910ce69607a117bd3277098a5836ac"},
@@ -2200,7 +2159,6 @@ description = "Compress responses in your Flask app with gzip, deflate, brotli o
optional = false
python-versions = ">=3.9"
groups = ["main"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "Flask_Compress-1.17-py3-none-any.whl", hash = "sha256:415131f197c41109f08e8fdfc3a6628d83d81680fb5ecd0b3a97410e02397b20"},
{file = "flask_compress-1.17.tar.gz", hash = "sha256:1ebb112b129ea7c9e7d6ee6d5cc0d64f226cbc50c4daddf1a58b9bd02253fbd8"},
@@ -2222,7 +2180,6 @@ description = "A Flask extension adding a decorator for CORS support"
optional = false
python-versions = "*"
groups = ["main"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "Flask_Cors-4.0.2-py2.py3-none-any.whl", hash = "sha256:38364faf1a7a5d0a55bd1d2e2f83ee9e359039182f5e6a029557e1f56d92c09a"},
{file = "flask_cors-4.0.2.tar.gz", hash = "sha256:493b98e2d1e2f1a4720a7af25693ef2fe32fbafec09a2f72c59f3e475eda61d2"},
@@ -2238,7 +2195,6 @@ description = "User authentication and session management for Flask."
optional = false
python-versions = ">=3.7"
groups = ["main"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "Flask-Login-0.6.3.tar.gz", hash = "sha256:5e23d14a607ef12806c699590b89d0f0e0d67baeec599d75947bf9c147330333"},
{file = "Flask_Login-0.6.3-py3-none-any.whl", hash = "sha256:849b25b82a436bf830a054e74214074af59097171562ab10bfa999e6b78aae5d"},
@@ -2255,7 +2211,6 @@ description = "SQLAlchemy database migrations for Flask applications using Alemb
optional = false
python-versions = ">=3.6"
groups = ["main"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "Flask-Migrate-4.0.7.tar.gz", hash = "sha256:dff7dd25113c210b069af280ea713b883f3840c1e3455274745d7355778c8622"},
{file = "Flask_Migrate-4.0.7-py3-none-any.whl", hash = "sha256:5c532be17e7b43a223b7500d620edae33795df27c75811ddf32560f7d48ec617"},
@@ -2273,7 +2228,6 @@ description = "Simple framework for creating REST APIs"
optional = false
python-versions = "*"
groups = ["main"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "Flask-RESTful-0.3.10.tar.gz", hash = "sha256:fe4af2ef0027df8f9b4f797aba20c5566801b6ade995ac63b588abf1a59cec37"},
{file = "Flask_RESTful-0.3.10-py2.py3-none-any.whl", hash = "sha256:1cf93c535172f112e080b0d4503a8d15f93a48c88bdd36dd87269bdaf405051b"},
@@ -2295,7 +2249,6 @@ description = "Add SQLAlchemy support to your Flask application."
optional = false
python-versions = ">=3.8"
groups = ["main", "dev"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "flask_sqlalchemy-3.1.1-py3-none-any.whl", hash = "sha256:4ba4be7f419dc72f4efd8802d69974803c37259dd42f3913b0dcf75c9447e0a0"},
{file = "flask_sqlalchemy-3.1.1.tar.gz", hash = "sha256:e4b68bb881802dda1a7d878b2fc84c06d1ee57fb40b874d3dc97dabfa36b8312"},
@@ -2312,7 +2265,6 @@ description = "The FlatBuffers serialization format for Python"
optional = false
python-versions = "*"
groups = ["vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "flatbuffers-25.2.10-py2.py3-none-any.whl", hash = "sha256:ebba5f4d5ea615af3f7fd70fc310636fbb2bbd1f566ac0a23d98dd412de50051"},
{file = "flatbuffers-25.2.10.tar.gz", hash = "sha256:97e451377a41262f8d9bd4295cc836133415cc03d8cb966410a4af92eb00d26e"},
@@ -2325,7 +2277,6 @@ description = "A list-like structure which implements collections.abc.MutableSeq
optional = false
python-versions = ">=3.8"
groups = ["main", "storage", "vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "frozenlist-1.5.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:5b6a66c18b5b9dd261ca98dffcb826a525334b2f29e7caa54e182255c5f6a65a"},
{file = "frozenlist-1.5.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d1b3eb7b05ea246510b43a7e53ed1653e55c2121019a97e60cad7efb881a97bb"},
@@ -2428,7 +2379,6 @@ description = "File-system specification"
optional = false
python-versions = ">=3.8"
groups = ["main", "vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "fsspec-2025.3.0-py3-none-any.whl", hash = "sha256:efb87af3efa9103f94ca91a7f8cb7a4df91af9f74fc106c9c7ea0efd7277c1b3"},
{file = "fsspec-2025.3.0.tar.gz", hash = "sha256:a935fd1ea872591f2b5148907d103488fc523295e6c64b835cfad8c3eca44972"},
@@ -2469,7 +2419,6 @@ description = "Clean single-source support for Python 3 and 2"
optional = false
python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*"
groups = ["storage", "vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "future-1.0.0-py3-none-any.whl", hash = "sha256:929292d34f5872e70396626ef385ec22355a1fae8ad29e1a734c3e43f9fbc216"},
{file = "future-1.0.0.tar.gz", hash = "sha256:bd2968309307861edae1458a4f8a4f3598c03be43b97521076aebf5d94c07b05"},
@@ -2482,7 +2431,6 @@ description = "Coroutine-based network library"
optional = false
python-versions = ">=3.9"
groups = ["main"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "gevent-24.11.1-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:92fe5dfee4e671c74ffaa431fd7ffd0ebb4b339363d24d0d944de532409b935e"},
{file = "gevent-24.11.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b7bfcfe08d038e1fa6de458891bca65c1ada6d145474274285822896a858c870"},
@@ -2531,11 +2479,11 @@ greenlet = {version = ">=3.1.1", markers = "platform_python_implementation == \"
"zope.interface" = "*"
[package.extras]
-dnspython = ["dnspython (>=1.16.0,<2.0)", "idna"]
+dnspython = ["dnspython (>=1.16.0,<2.0) ; python_version < \"3.10\"", "idna ; python_version < \"3.10\""]
docs = ["furo", "repoze.sphinx.autointerface", "sphinx", "sphinxcontrib-programoutput", "zope.schema"]
-monitor = ["psutil (>=5.7.0)"]
-recommended = ["cffi (>=1.17.1)", "dnspython (>=1.16.0,<2.0)", "idna", "psutil (>=5.7.0)"]
-test = ["cffi (>=1.17.1)", "coverage (>=5.0)", "dnspython (>=1.16.0,<2.0)", "idna", "objgraph", "psutil (>=5.7.0)", "requests"]
+monitor = ["psutil (>=5.7.0) ; sys_platform != \"win32\" or platform_python_implementation == \"CPython\""]
+recommended = ["cffi (>=1.17.1) ; platform_python_implementation == \"CPython\"", "dnspython (>=1.16.0,<2.0) ; python_version < \"3.10\"", "idna ; python_version < \"3.10\"", "psutil (>=5.7.0) ; sys_platform != \"win32\" or platform_python_implementation == \"CPython\""]
+test = ["cffi (>=1.17.1) ; platform_python_implementation == \"CPython\"", "coverage (>=5.0) ; sys_platform != \"win32\"", "dnspython (>=1.16.0,<2.0) ; python_version < \"3.10\"", "idna ; python_version < \"3.10\"", "objgraph", "psutil (>=5.7.0) ; sys_platform != \"win32\" or platform_python_implementation == \"CPython\"", "requests"]
[[package]]
name = "gmpy2"
@@ -2544,7 +2492,6 @@ description = "gmpy2 interface to GMP, MPFR, and MPC for Python 3.7+"
optional = false
python-versions = ">=3.7"
groups = ["main"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "gmpy2-2.2.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:431d599e1542b6e0b3618d3e296702c25215c97fb461d596e27adbe69d765dc6"},
{file = "gmpy2-2.2.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5e51848975837751d1038e82d006e8bb488b179f093ba7fc8a59e1d8a2c61663"},
@@ -2608,7 +2555,6 @@ description = "Python bindings to the Google search engine."
optional = false
python-versions = "*"
groups = ["vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "google-3.0.0-py2.py3-none-any.whl", hash = "sha256:889cf695f84e4ae2c55fbc0cfdaf4c1e729417fa52ab1db0485202ba173e4935"},
{file = "google-3.0.0.tar.gz", hash = "sha256:143530122ee5130509ad5e989f0512f7cb218b2d4eddbafbad40fd10e8d8ccbe"},
@@ -2624,7 +2570,6 @@ description = "Google API client core library"
optional = false
python-versions = ">=3.7"
groups = ["main", "storage"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "google-api-core-2.18.0.tar.gz", hash = "sha256:62d97417bfc674d6cef251e5c4d639a9655e00c45528c4364fbfebb478ce72a9"},
{file = "google_api_core-2.18.0-py3-none-any.whl", hash = "sha256:5a63aa102e0049abe85b5b88cb9409234c1f70afcda21ce1e40b285b9629c1d6"},
@@ -2640,7 +2585,7 @@ protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4
requests = ">=2.18.0,<3.0.0.dev0"
[package.extras]
-grpc = ["grpcio (>=1.33.2,<2.0dev)", "grpcio (>=1.49.1,<2.0dev)", "grpcio-status (>=1.33.2,<2.0.dev0)", "grpcio-status (>=1.49.1,<2.0.dev0)"]
+grpc = ["grpcio (>=1.33.2,<2.0dev)", "grpcio (>=1.49.1,<2.0dev) ; python_version >= \"3.11\"", "grpcio-status (>=1.33.2,<2.0.dev0)", "grpcio-status (>=1.49.1,<2.0.dev0) ; python_version >= \"3.11\""]
grpcgcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"]
grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"]
@@ -2651,7 +2596,6 @@ description = "Google API Client Library for Python"
optional = false
python-versions = ">=3.7"
groups = ["main"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "google-api-python-client-2.90.0.tar.gz", hash = "sha256:cbcb3ba8be37c6806676a49df16ac412077e5e5dc7fa967941eff977b31fba03"},
{file = "google_api_python_client-2.90.0-py2.py3-none-any.whl", hash = "sha256:4a41ffb7797d4f28e44635fb1e7076240b741c6493e7c3233c0e4421cec7c913"},
@@ -2671,7 +2615,6 @@ description = "Google Authentication Library"
optional = false
python-versions = ">=3.7"
groups = ["main", "storage", "vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "google-auth-2.29.0.tar.gz", hash = "sha256:672dff332d073227550ffc7457868ac4218d6c500b155fe6cc17d2b13602c360"},
{file = "google_auth-2.29.0-py2.py3-none-any.whl", hash = "sha256:d452ad095688cd52bae0ad6fafe027f6a6d6f560e810fec20914e17a09526415"},
@@ -2696,7 +2639,6 @@ description = "Google Authentication Library: httplib2 transport"
optional = false
python-versions = "*"
groups = ["main"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "google-auth-httplib2-0.2.0.tar.gz", hash = "sha256:38aa7badf48f974f1eb9861794e9c0cb2a0511a4ec0679b1f886d108f5640e05"},
{file = "google_auth_httplib2-0.2.0-py2.py3-none-any.whl", hash = "sha256:b65a0a2123300dd71281a7bf6e64d65a0759287df52729bdd1ae2e47dc311a3d"},
@@ -2713,7 +2655,6 @@ description = "Vertex AI API client library"
optional = false
python-versions = ">=3.8"
groups = ["main"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "google-cloud-aiplatform-1.49.0.tar.gz", hash = "sha256:e6e6d01079bb5def49e4be4db4d12b13c624b5c661079c869c13c855e5807429"},
{file = "google_cloud_aiplatform-1.49.0-py2.py3-none-any.whl", hash = "sha256:8072d9e0c18d8942c704233d1a93b8d6312fc7b278786a283247950e28ae98df"},
@@ -2735,9 +2676,9 @@ shapely = "<3.0.0dev"
[package.extras]
autologging = ["mlflow (>=1.27.0,<=2.1.1)"]
cloud-profiler = ["tensorboard-plugin-profile (>=2.4.0,<3.0.0dev)", "tensorflow (>=2.4.0,<3.0.0dev)", "werkzeug (>=2.0.0,<2.1.0dev)"]
-datasets = ["pyarrow (>=10.0.1)", "pyarrow (>=14.0.0)", "pyarrow (>=3.0.0,<8.0dev)"]
+datasets = ["pyarrow (>=10.0.1) ; python_version == \"3.11\"", "pyarrow (>=14.0.0) ; python_version >= \"3.12\"", "pyarrow (>=3.0.0,<8.0dev) ; python_version < \"3.11\""]
endpoint = ["requests (>=2.28.1)"]
-full = ["cloudpickle (<3.0)", "docker (>=5.0.3)", "explainable-ai-sdk (>=1.0.0)", "fastapi (>=0.71.0,<=0.109.1)", "google-cloud-bigquery", "google-cloud-bigquery-storage", "google-cloud-logging (<4.0)", "google-vizier (>=0.1.6)", "httpx (>=0.23.0,<0.25.0)", "immutabledict", "lit-nlp (==0.4.0)", "mlflow (>=1.27.0,<=2.1.1)", "nest-asyncio (>=1.0.0,<1.6.0)", "numpy (>=1.15.0)", "pandas (>=1.0.0)", "pandas (>=1.0.0,<2.2.0)", "pyarrow (>=10.0.1)", "pyarrow (>=14.0.0)", "pyarrow (>=3.0.0,<8.0dev)", "pyarrow (>=6.0.1)", "pydantic (<2)", "pyyaml (>=5.3.1,<7)", "ray[default] (>=2.4,<2.5.dev0 || >2.9.0,!=2.9.1,!=2.9.2,<=2.9.3)", "ray[default] (>=2.5,<=2.9.3)", "requests (>=2.28.1)", "starlette (>=0.17.1)", "tensorflow (>=2.3.0,<3.0.0dev)", "tensorflow (>=2.3.0,<3.0.0dev)", "urllib3 (>=1.21.1,<1.27)", "uvicorn[standard] (>=0.16.0)"]
+full = ["cloudpickle (<3.0)", "docker (>=5.0.3)", "explainable-ai-sdk (>=1.0.0)", "fastapi (>=0.71.0,<=0.109.1)", "google-cloud-bigquery", "google-cloud-bigquery-storage", "google-cloud-logging (<4.0)", "google-vizier (>=0.1.6)", "httpx (>=0.23.0,<0.25.0)", "immutabledict", "lit-nlp (==0.4.0)", "mlflow (>=1.27.0,<=2.1.1)", "nest-asyncio (>=1.0.0,<1.6.0)", "numpy (>=1.15.0)", "pandas (>=1.0.0)", "pandas (>=1.0.0,<2.2.0)", "pyarrow (>=10.0.1) ; python_version == \"3.11\"", "pyarrow (>=14.0.0) ; python_version >= \"3.12\"", "pyarrow (>=3.0.0,<8.0dev) ; python_version < \"3.11\"", "pyarrow (>=6.0.1)", "pydantic (<2)", "pyyaml (>=5.3.1,<7)", "ray[default] (>=2.4,<2.5.dev0 || >2.9.0,!=2.9.1,!=2.9.2,<=2.9.3) ; python_version < \"3.11\"", "ray[default] (>=2.5,<=2.9.3) ; python_version == \"3.11\"", "requests (>=2.28.1)", "starlette (>=0.17.1)", "tensorflow (>=2.3.0,<3.0.0dev)", "tensorflow (>=2.3.0,<3.0.0dev) ; python_version <= \"3.11\"", "urllib3 (>=1.21.1,<1.27)", "uvicorn[standard] (>=0.16.0)"]
langchain = ["langchain (>=0.1.13,<0.2)", "langchain-core (<0.2)", "langchain-google-vertexai (<0.2)"]
langchain-testing = ["absl-py", "cloudpickle (>=2.2.1,<3.0)", "langchain (>=0.1.13,<0.2)", "langchain-core (<0.2)", "langchain-google-vertexai (<0.2)", "pydantic (>=2.6.3,<3)", "pytest-xdist"]
lit = ["explainable-ai-sdk (>=1.0.0)", "lit-nlp (==0.4.0)", "pandas (>=1.0.0)", "tensorflow (>=2.3.0,<3.0.0dev)"]
@@ -2747,11 +2688,11 @@ prediction = ["docker (>=5.0.3)", "fastapi (>=0.71.0,<=0.109.1)", "httpx (>=0.23
preview = ["cloudpickle (<3.0)", "google-cloud-logging (<4.0)"]
private-endpoints = ["requests (>=2.28.1)", "urllib3 (>=1.21.1,<1.27)"]
rapid-evaluation = ["nest-asyncio (>=1.0.0,<1.6.0)", "pandas (>=1.0.0,<2.2.0)"]
-ray = ["google-cloud-bigquery", "google-cloud-bigquery-storage", "immutabledict", "pandas (>=1.0.0,<2.2.0)", "pyarrow (>=6.0.1)", "pydantic (<2)", "ray[default] (>=2.4,<2.5.dev0 || >2.9.0,!=2.9.1,!=2.9.2,<=2.9.3)", "ray[default] (>=2.5,<=2.9.3)"]
-ray-testing = ["google-cloud-bigquery", "google-cloud-bigquery-storage", "immutabledict", "pandas (>=1.0.0,<2.2.0)", "pyarrow (>=6.0.1)", "pydantic (<2)", "pytest-xdist", "ray[default] (>=2.4,<2.5.dev0 || >2.9.0,!=2.9.1,!=2.9.2,<=2.9.3)", "ray[default] (>=2.5,<=2.9.3)", "ray[train] (>=2.4,<2.5.dev0 || >2.9.0,!=2.9.1,!=2.9.2,<=2.9.3)", "scikit-learn", "tensorflow", "torch (>=2.0.0,<2.1.0)", "xgboost", "xgboost-ray"]
+ray = ["google-cloud-bigquery", "google-cloud-bigquery-storage", "immutabledict", "pandas (>=1.0.0,<2.2.0)", "pyarrow (>=6.0.1)", "pydantic (<2)", "ray[default] (>=2.4,<2.5.dev0 || >2.9.0,!=2.9.1,!=2.9.2,<=2.9.3) ; python_version < \"3.11\"", "ray[default] (>=2.5,<=2.9.3) ; python_version == \"3.11\""]
+ray-testing = ["google-cloud-bigquery", "google-cloud-bigquery-storage", "immutabledict", "pandas (>=1.0.0,<2.2.0)", "pyarrow (>=6.0.1)", "pydantic (<2)", "pytest-xdist", "ray[default] (>=2.4,<2.5.dev0 || >2.9.0,!=2.9.1,!=2.9.2,<=2.9.3) ; python_version < \"3.11\"", "ray[default] (>=2.5,<=2.9.3) ; python_version == \"3.11\"", "ray[train] (>=2.4,<2.5.dev0 || >2.9.0,!=2.9.1,!=2.9.2,<=2.9.3)", "scikit-learn", "tensorflow", "torch (>=2.0.0,<2.1.0)", "xgboost", "xgboost-ray"]
reasoningengine = ["cloudpickle (>=2.2.1,<3.0)", "pydantic (>=2.6.3,<3)"]
-tensorboard = ["tensorflow (>=2.3.0,<3.0.0dev)"]
-testing = ["bigframes", "cloudpickle (<3.0)", "docker (>=5.0.3)", "explainable-ai-sdk (>=1.0.0)", "fastapi (>=0.71.0,<=0.109.1)", "google-api-core (>=2.11,<3.0.0)", "google-cloud-bigquery", "google-cloud-bigquery-storage", "google-cloud-logging (<4.0)", "google-vizier (>=0.1.6)", "grpcio-testing", "httpx (>=0.23.0,<0.25.0)", "immutabledict", "ipython", "kfp (>=2.6.0,<3.0.0)", "lit-nlp (==0.4.0)", "mlflow (>=1.27.0,<=2.1.1)", "nest-asyncio (>=1.0.0,<1.6.0)", "numpy (>=1.15.0)", "pandas (>=1.0.0)", "pandas (>=1.0.0,<2.2.0)", "pyarrow (>=10.0.1)", "pyarrow (>=14.0.0)", "pyarrow (>=3.0.0,<8.0dev)", "pyarrow (>=6.0.1)", "pydantic (<2)", "pyfakefs", "pytest-asyncio", "pytest-xdist", "pyyaml (>=5.3.1,<7)", "ray[default] (>=2.4,<2.5.dev0 || >2.9.0,!=2.9.1,!=2.9.2,<=2.9.3)", "ray[default] (>=2.5,<=2.9.3)", "requests (>=2.28.1)", "requests-toolbelt (<1.0.0)", "scikit-learn", "starlette (>=0.17.1)", "tensorboard-plugin-profile (>=2.4.0,<3.0.0dev)", "tensorflow (==2.13.0)", "tensorflow (==2.16.1)", "tensorflow (>=2.3.0,<3.0.0dev)", "tensorflow (>=2.3.0,<3.0.0dev)", "tensorflow (>=2.4.0,<3.0.0dev)", "torch (>=2.0.0,<2.1.0)", "torch (>=2.2.0)", "urllib3 (>=1.21.1,<1.27)", "uvicorn[standard] (>=0.16.0)", "werkzeug (>=2.0.0,<2.1.0dev)", "xgboost"]
+tensorboard = ["tensorflow (>=2.3.0,<3.0.0dev) ; python_version <= \"3.11\""]
+testing = ["bigframes ; python_version >= \"3.10\"", "cloudpickle (<3.0)", "docker (>=5.0.3)", "explainable-ai-sdk (>=1.0.0)", "fastapi (>=0.71.0,<=0.109.1)", "google-api-core (>=2.11,<3.0.0)", "google-cloud-bigquery", "google-cloud-bigquery-storage", "google-cloud-logging (<4.0)", "google-vizier (>=0.1.6)", "grpcio-testing", "httpx (>=0.23.0,<0.25.0)", "immutabledict", "ipython", "kfp (>=2.6.0,<3.0.0)", "lit-nlp (==0.4.0)", "mlflow (>=1.27.0,<=2.1.1)", "nest-asyncio (>=1.0.0,<1.6.0)", "numpy (>=1.15.0)", "pandas (>=1.0.0)", "pandas (>=1.0.0,<2.2.0)", "pyarrow (>=10.0.1) ; python_version == \"3.11\"", "pyarrow (>=14.0.0) ; python_version >= \"3.12\"", "pyarrow (>=3.0.0,<8.0dev) ; python_version < \"3.11\"", "pyarrow (>=6.0.1)", "pydantic (<2)", "pyfakefs", "pytest-asyncio", "pytest-xdist", "pyyaml (>=5.3.1,<7)", "ray[default] (>=2.4,<2.5.dev0 || >2.9.0,!=2.9.1,!=2.9.2,<=2.9.3) ; python_version < \"3.11\"", "ray[default] (>=2.5,<=2.9.3) ; python_version == \"3.11\"", "requests (>=2.28.1)", "requests-toolbelt (<1.0.0)", "scikit-learn", "starlette (>=0.17.1)", "tensorboard-plugin-profile (>=2.4.0,<3.0.0dev)", "tensorflow (==2.13.0) ; python_version <= \"3.11\"", "tensorflow (==2.16.1) ; python_version > \"3.11\"", "tensorflow (>=2.3.0,<3.0.0dev)", "tensorflow (>=2.3.0,<3.0.0dev) ; python_version <= \"3.11\"", "tensorflow (>=2.4.0,<3.0.0dev)", "torch (>=2.0.0,<2.1.0) ; python_version <= \"3.11\"", "torch (>=2.2.0) ; python_version > \"3.11\"", "urllib3 (>=1.21.1,<1.27)", "uvicorn[standard] (>=0.16.0)", "werkzeug (>=2.0.0,<2.1.0dev)", "xgboost"]
vizier = ["google-vizier (>=0.1.6)"]
xai = ["tensorflow (>=2.3.0,<3.0.0dev)"]
@@ -2762,7 +2703,6 @@ description = "Google BigQuery API client library"
optional = false
python-versions = ">=3.7"
groups = ["main"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "google_cloud_bigquery-3.30.0-py2.py3-none-any.whl", hash = "sha256:f4d28d846a727f20569c9b2d2f4fa703242daadcb2ec4240905aa485ba461877"},
{file = "google_cloud_bigquery-3.30.0.tar.gz", hash = "sha256:7e27fbafc8ed33cc200fe05af12ecd74d279fe3da6692585a3cef7aee90575b6"},
@@ -2780,12 +2720,12 @@ requests = ">=2.21.0,<3.0.0dev"
[package.extras]
all = ["google-cloud-bigquery[bigquery-v2,bqstorage,geopandas,ipython,ipywidgets,opentelemetry,pandas,tqdm]"]
bigquery-v2 = ["proto-plus (>=1.22.3,<2.0.0dev)", "protobuf (>=3.20.2,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5,<6.0.0dev)"]
-bqstorage = ["google-cloud-bigquery-storage (>=2.6.0,<3.0.0dev)", "grpcio (>=1.47.0,<2.0dev)", "grpcio (>=1.49.1,<2.0dev)", "pyarrow (>=3.0.0)"]
+bqstorage = ["google-cloud-bigquery-storage (>=2.6.0,<3.0.0dev)", "grpcio (>=1.47.0,<2.0dev)", "grpcio (>=1.49.1,<2.0dev) ; python_version >= \"3.11\"", "pyarrow (>=3.0.0)"]
geopandas = ["Shapely (>=1.8.4,<3.0.0dev)", "geopandas (>=0.9.0,<2.0dev)"]
ipython = ["bigquery-magics (>=0.1.0)"]
ipywidgets = ["ipykernel (>=6.0.0)", "ipywidgets (>=7.7.0)"]
opentelemetry = ["opentelemetry-api (>=1.1.0)", "opentelemetry-instrumentation (>=0.20b0)", "opentelemetry-sdk (>=1.1.0)"]
-pandas = ["db-dtypes (>=0.3.0,<2.0.0dev)", "grpcio (>=1.47.0,<2.0dev)", "grpcio (>=1.49.1,<2.0dev)", "importlib-metadata (>=1.0.0)", "pandas (>=1.1.0)", "pandas-gbq (>=0.26.1)", "pyarrow (>=3.0.0)"]
+pandas = ["db-dtypes (>=0.3.0,<2.0.0dev)", "grpcio (>=1.47.0,<2.0dev)", "grpcio (>=1.49.1,<2.0dev) ; python_version >= \"3.11\"", "importlib-metadata (>=1.0.0) ; python_version < \"3.8\"", "pandas (>=1.1.0)", "pandas-gbq (>=0.26.1) ; python_version >= \"3.8\"", "pyarrow (>=3.0.0)"]
tqdm = ["tqdm (>=4.7.4,<5.0.0dev)"]
[[package]]
@@ -2795,7 +2735,6 @@ description = "Google Cloud API client core library"
optional = false
python-versions = ">=3.7"
groups = ["main", "storage"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "google_cloud_core-2.4.2-py2.py3-none-any.whl", hash = "sha256:7459c3e83de7cb8b9ecfec9babc910efb4314030c56dd798eaad12c426f7d180"},
{file = "google_cloud_core-2.4.2.tar.gz", hash = "sha256:a4fcb0e2fcfd4bfe963837fad6d10943754fd79c1a50097d68540b6eb3d67f35"},
@@ -2815,7 +2754,6 @@ description = "Google Cloud Resource Manager API client library"
optional = false
python-versions = ">=3.7"
groups = ["main"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "google_cloud_resource_manager-1.14.1-py2.py3-none-any.whl", hash = "sha256:68340599f85ebf07a6e18487e460ea07cc15e132068f6b188786d01c2cf25518"},
{file = "google_cloud_resource_manager-1.14.1.tar.gz", hash = "sha256:41e9e546aaa03d5160cdfa2341dbe81ef7596706c300a89b94c429f1f3411f87"},
@@ -2835,7 +2773,6 @@ description = "Google Cloud Storage API client library"
optional = false
python-versions = ">=3.7"
groups = ["main", "storage"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "google-cloud-storage-2.16.0.tar.gz", hash = "sha256:dda485fa503710a828d01246bd16ce9db0823dc51bbca742ce96a6817d58669f"},
{file = "google_cloud_storage-2.16.0-py2.py3-none-any.whl", hash = "sha256:91a06b96fb79cf9cdfb4e759f178ce11ea885c79938f89590344d079305f5852"},
@@ -2859,7 +2796,6 @@ description = "A python wrapper of the C library 'Google CRC32C'"
optional = false
python-versions = ">=3.9"
groups = ["main", "storage"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "google_crc32c-1.6.0-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:5bcc90b34df28a4b38653c36bb5ada35671ad105c99cfe915fb5bed7ad6924aa"},
{file = "google_crc32c-1.6.0-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:d9e9913f7bd69e093b81da4535ce27af842e7bf371cde42d1ae9e9bd382dc0e9"},
@@ -2900,7 +2836,6 @@ description = "Utilities for Google Media Downloads and Resumable Uploads"
optional = false
python-versions = ">=3.7"
groups = ["main", "storage"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "google_resumable_media-2.7.2-py2.py3-none-any.whl", hash = "sha256:3ce7551e9fe6d99e9a126101d2536612bb73486721951e9562fee0f90c6ababa"},
{file = "google_resumable_media-2.7.2.tar.gz", hash = "sha256:5280aed4629f2b60b847b0d42f9857fd4935c11af266744df33d8074cae92fe0"},
@@ -2920,7 +2855,6 @@ description = "Common protobufs used in Google APIs"
optional = false
python-versions = ">=3.7"
groups = ["main", "storage", "vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "googleapis-common-protos-1.63.0.tar.gz", hash = "sha256:17ad01b11d5f1d0171c06d3ba5c04c54474e883b66b949722b4938ee2694ef4e"},
{file = "googleapis_common_protos-1.63.0-py2.py3-none-any.whl", hash = "sha256:ae45f75702f7c08b541f750854a678bd8f534a1a6bace6afe975f1d0a82d6632"},
@@ -2940,7 +2874,6 @@ description = "Python Client Library for Supabase Auth"
optional = false
python-versions = "<4.0,>=3.9"
groups = ["storage"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "gotrue-2.11.4-py3-none-any.whl", hash = "sha256:712e5018acc00d93cfc6d7bfddc3114eb3c420ab03b945757a8ba38c5fc3caa8"},
{file = "gotrue-2.11.4.tar.gz", hash = "sha256:a9ced242b16c6d6bedc43bca21bbefea1ba5fb35fcdaad7d529342099d3b1767"},
@@ -3032,7 +2965,7 @@ files = [
{file = "greenlet-3.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:3319aa75e0e0639bc15ff54ca327e8dc7a6fe404003496e3c6925cd3142e0e22"},
{file = "greenlet-3.1.1.tar.gz", hash = "sha256:4ce3ac6cdb6adf7946475d7ef31777c26d94bccc377e070a7986bd2d5c515467"},
]
-markers = {main = "(python_version == \"3.11\" or python_version >= \"3.12\") and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\" or platform_python_implementation == \"CPython\")", dev = "(python_version == \"3.11\" or python_version >= \"3.12\") and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\")", vdb = "(python_version == \"3.11\" or python_version >= \"3.12\") and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\")"}
+markers = {main = "platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\" or platform_python_implementation == \"CPython\"", dev = "platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\"", vdb = "platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\""}
[package.extras]
docs = ["Sphinx", "furo"]
@@ -3045,7 +2978,6 @@ description = "IAM API client library"
optional = false
python-versions = ">=3.7"
groups = ["main"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "grpc_google_iam_v1-0.14.1-py2.py3-none-any.whl", hash = "sha256:b4eca35b2231dd76066ebf1728f3cd30d51034db946827ef63ef138da14eea16"},
{file = "grpc_google_iam_v1-0.14.1.tar.gz", hash = "sha256:14149f37af0e5779fa8a22a8ae588663269e8a479d9c2e69a5056e589bf8a891"},
@@ -3063,7 +2995,6 @@ description = "HTTP/2-based RPC framework"
optional = false
python-versions = ">=3.8"
groups = ["main", "vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "grpcio-1.67.1-cp310-cp310-linux_armv7l.whl", hash = "sha256:8b0341d66a57f8a3119b77ab32207072be60c9bf79760fa609c5609f2deb1f3f"},
{file = "grpcio-1.67.1-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:f5a27dddefe0e2357d3e617b9079b4bfdc91341a91565111a21ed6ebbc51b22d"},
@@ -3132,7 +3063,6 @@ description = "Status proto mapping for gRPC"
optional = false
python-versions = ">=3.6"
groups = ["main"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "grpcio-status-1.62.3.tar.gz", hash = "sha256:289bdd7b2459794a12cf95dc0cb727bd4a1742c37bd823f760236c937e53a485"},
{file = "grpcio_status-1.62.3-py3-none-any.whl", hash = "sha256:f9049b762ba8de6b1086789d8315846e094edac2c50beaf462338b301a8fd4b8"},
@@ -3150,7 +3080,6 @@ description = "Protobuf code generator for gRPC"
optional = false
python-versions = ">=3.7"
groups = ["vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "grpcio-tools-1.62.3.tar.gz", hash = "sha256:7c7136015c3d62c3eef493efabaf9e3380e3e66d24ee8e94c01cb71377f57833"},
{file = "grpcio_tools-1.62.3-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:2f968b049c2849540751ec2100ab05e8086c24bead769ca734fdab58698408c1"},
@@ -3214,7 +3143,6 @@ description = "WSGI HTTP Server for UNIX"
optional = false
python-versions = ">=3.7"
groups = ["main"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "gunicorn-23.0.0-py3-none-any.whl", hash = "sha256:ec400d38950de4dfd418cff8328b2c8faed0edb0d517d3394e457c317908ca4d"},
{file = "gunicorn-23.0.0.tar.gz", hash = "sha256:f014447a0101dc57e294f6c18ca6b40227a4c90e9bdb586042628030cba004ec"},
@@ -3237,7 +3165,6 @@ description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1"
optional = false
python-versions = ">=3.7"
groups = ["main", "storage", "vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"},
{file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"},
@@ -3250,7 +3177,6 @@ description = "Pure-Python HTTP/2 protocol implementation"
optional = false
python-versions = ">=3.9"
groups = ["storage", "vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "h2-4.2.0-py3-none-any.whl", hash = "sha256:479a53ad425bb29af087f3458a61d30780bc818e4ebcf01f0b536ba916462ed0"},
{file = "h2-4.2.0.tar.gz", hash = "sha256:c8a52129695e88b1a0578d8d2cc6842bbd79128ac685463b887ee278126ad01f"},
@@ -3267,7 +3193,6 @@ description = "Python wrapper for hiredis"
optional = false
python-versions = ">=3.8"
groups = ["main"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "hiredis-3.1.0-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:2892db9db21f0cf7cc298d09f85d3e1f6dc4c4c24463ab67f79bc7a006d51867"},
{file = "hiredis-3.1.0-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:93cfa6cc25ee2ceb0be81dc61eca9995160b9e16bdb7cca4a00607d57e998918"},
@@ -3387,7 +3312,6 @@ description = "Pure-Python HPACK header encoding"
optional = false
python-versions = ">=3.9"
groups = ["storage", "vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "hpack-4.1.0-py3-none-any.whl", hash = "sha256:157ac792668d995c657d93111f46b4535ed114f0c9c8d672271bbec7eae1b496"},
{file = "hpack-4.1.0.tar.gz", hash = "sha256:ec5eca154f7056aa06f196a557655c5b009b382873ac8d1e66e79e87535f1dca"},
@@ -3400,7 +3324,6 @@ description = "HTML parser based on the WHATWG HTML specification"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
groups = ["main"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "html5lib-1.1-py2.py3-none-any.whl", hash = "sha256:0d78f8fde1c230e99fe37986a60526d7049ed4bf8a9fadbad5f00e22e58e041d"},
{file = "html5lib-1.1.tar.gz", hash = "sha256:b2e5b40261e20f354d198eae92afc10d750afb487ed5e50f9c4eaf07c184146f"},
@@ -3411,10 +3334,10 @@ six = ">=1.9"
webencodings = "*"
[package.extras]
-all = ["chardet (>=2.2)", "genshi", "lxml"]
+all = ["chardet (>=2.2)", "genshi", "lxml ; platform_python_implementation == \"CPython\""]
chardet = ["chardet (>=2.2)"]
genshi = ["genshi"]
-lxml = ["lxml"]
+lxml = ["lxml ; platform_python_implementation == \"CPython\""]
[[package]]
name = "httpcore"
@@ -3423,7 +3346,6 @@ description = "A minimal low-level HTTP client."
optional = false
python-versions = ">=3.8"
groups = ["main", "storage", "vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "httpcore-1.0.7-py3-none-any.whl", hash = "sha256:a3fff8f43dc260d5bd363d9f9cf1830fa3a458b332856f34282de498ed420edd"},
{file = "httpcore-1.0.7.tar.gz", hash = "sha256:8551cb62a169ec7162ac7be8d4817d561f60e08eaa485234898414bb5a8a0b4c"},
@@ -3446,7 +3368,6 @@ description = "A comprehensive HTTP client library."
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
groups = ["main"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "httplib2-0.22.0-py3-none-any.whl", hash = "sha256:14ae0a53c1ba8f3d37e9e27cf37eabb0fb9980f435ba405d546948b009dd64dc"},
{file = "httplib2-0.22.0.tar.gz", hash = "sha256:d7a10bc5ef5ab08322488bde8c726eeee5c8618723fdb399597ec58f3d82df81"},
@@ -3462,7 +3383,6 @@ description = "A collection of framework independent HTTP protocol utils."
optional = false
python-versions = ">=3.8.0"
groups = ["vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "httptools-0.6.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3c73ce323711a6ffb0d247dcd5a550b8babf0f757e86a52558fe5b86d6fefcc0"},
{file = "httptools-0.6.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:345c288418f0944a6fe67be8e6afa9262b18c7626c3ef3c28adc5eabc06a68da"},
@@ -3519,7 +3439,6 @@ description = "The next generation HTTP client."
optional = false
python-versions = ">=3.8"
groups = ["main", "storage", "vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "httpx-0.27.2-py3-none-any.whl", hash = "sha256:7bb2708e112d8fdd7829cd4243970f0c223274051cb35ee80c03301ee29a3df0"},
{file = "httpx-0.27.2.tar.gz", hash = "sha256:f7c2be1d2f3c3c3160d441802406b206c2b76f5947b11115e6df10c6c65e66c2"},
@@ -3535,7 +3454,7 @@ sniffio = "*"
socksio = {version = "==1.*", optional = true, markers = "extra == \"socks\""}
[package.extras]
-brotli = ["brotli", "brotlicffi"]
+brotli = ["brotli ; platform_python_implementation == \"CPython\"", "brotlicffi ; platform_python_implementation != \"CPython\""]
cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"]
http2 = ["h2 (>=3,<5)"]
socks = ["socksio (==1.*)"]
@@ -3548,7 +3467,6 @@ description = "Client library to download and publish models, datasets and other
optional = false
python-versions = ">=3.8.0"
groups = ["main", "vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "huggingface_hub-0.29.2-py3-none-any.whl", hash = "sha256:c56f20fca09ef19da84dcde2b76379ecdaddf390b083f59f166715584953307d"},
{file = "huggingface_hub-0.29.2.tar.gz", hash = "sha256:590b29c0dcbd0ee4b7b023714dc1ad8563fe4a68a91463438b74e980d28afaf3"},
@@ -3584,7 +3502,6 @@ description = "Human friendly output for text interfaces using Python"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
groups = ["vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "humanfriendly-10.0-py2.py3-none-any.whl", hash = "sha256:1697e1a8a8f550fd43c2865cd84542fc175a61dcb779b6fee18cf6b6ccba1477"},
{file = "humanfriendly-10.0.tar.gz", hash = "sha256:6b0b831ce8f15f7300721aa49829fc4e83921a9a301cc7f606be6686a2288ddc"},
@@ -3600,7 +3517,6 @@ description = "Pure-Python HTTP/2 framing"
optional = false
python-versions = ">=3.9"
groups = ["storage", "vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "hyperframe-6.1.0-py3-none-any.whl", hash = "sha256:b03380493a519fce58ea5af42e4a42317bf9bd425596f7a0835ffce80f1a42e5"},
{file = "hyperframe-6.1.0.tar.gz", hash = "sha256:f630908a00854a7adeabd6382b43923a4c4cd4b821fcb527e6ab9e15382a3b08"},
@@ -3613,7 +3529,6 @@ description = "Internationalized Domain Names in Applications (IDNA)"
optional = false
python-versions = ">=3.6"
groups = ["main", "storage", "tools", "vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"},
{file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"},
@@ -3629,7 +3544,6 @@ description = "Read metadata from Python packages"
optional = false
python-versions = ">=3.8"
groups = ["main", "vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "importlib_metadata-8.5.0-py3-none-any.whl", hash = "sha256:45e54197d28b7a7f1559e60b95e7c567032b602131fbd588f1497f47880aa68b"},
{file = "importlib_metadata-8.5.0.tar.gz", hash = "sha256:71522656f0abace1d072b9e5481a48f07c138e00f079c38c8f883823f9c26bd7"},
@@ -3639,12 +3553,12 @@ files = [
zipp = ">=3.20"
[package.extras]
-check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"]
+check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\""]
cover = ["pytest-cov"]
doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"]
enabler = ["pytest-enabler (>=2.2)"]
perf = ["ipython"]
-test = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-perf (>=0.9.2)"]
+test = ["flufl.flake8", "importlib-resources (>=1.3) ; python_version < \"3.9\"", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-perf (>=0.9.2)"]
type = ["pytest-mypy"]
[[package]]
@@ -3654,14 +3568,13 @@ description = "Read resources from Python packages"
optional = false
python-versions = ">=3.9"
groups = ["vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "importlib_resources-6.5.2-py3-none-any.whl", hash = "sha256:789cfdc3ed28c78b67a06acb8126751ced69a3d5f79c095a98298cd8a760ccec"},
{file = "importlib_resources-6.5.2.tar.gz", hash = "sha256:185f87adef5bcc288449d98fb4fba07cea78bc036455dd44c5fc4a2fe78fed2c"},
]
[package.extras]
-check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"]
+check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\""]
cover = ["pytest-cov"]
doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"]
enabler = ["pytest-enabler (>=2.2)"]
@@ -3675,7 +3588,6 @@ description = "brain-dead simple config-ini parsing"
optional = false
python-versions = ">=3.7"
groups = ["main", "dev"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"},
{file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"},
@@ -3688,7 +3600,6 @@ description = "An ISO 8601 date/time/duration parser and formatter"
optional = false
python-versions = ">=3.7"
groups = ["storage"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "isodate-0.7.2-py3-none-any.whl", hash = "sha256:28009937d8031054830160fce6d409ed342816b543597cece116d966c6d99e15"},
{file = "isodate-0.7.2.tar.gz", hash = "sha256:4cd1aa0f43ca76f4a6c6c0292a85f40b35ec2e43e315b59f06e6d32171a953e6"},
@@ -3701,7 +3612,6 @@ description = "Safely pass data to untrusted environments and back."
optional = false
python-versions = ">=3.8"
groups = ["main", "dev"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "itsdangerous-2.2.0-py3-none-any.whl", hash = "sha256:c6242fc49e35958c8b15141343aa660db5fc54d4f13a1db01a3f5891b98700ef"},
{file = "itsdangerous-2.2.0.tar.gz", hash = "sha256:e0050c0b7da1eea53ffaf149c0cfbb5c6e2e2b69c4bef22c81fa6eb73e5f6173"},
@@ -3714,7 +3624,6 @@ description = "Chinese Words Segmentation Utilities"
optional = false
python-versions = "*"
groups = ["main"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "jieba-0.42.1.tar.gz", hash = "sha256:055ca12f62674fafed09427f176506079bc135638a14e23e25be909131928db2"},
]
@@ -3726,7 +3635,6 @@ description = "A very fast and expressive template engine."
optional = false
python-versions = ">=3.7"
groups = ["main", "dev"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67"},
{file = "jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d"},
@@ -3745,7 +3653,6 @@ description = "Fast iterable JSON parser."
optional = false
python-versions = ">=3.8"
groups = ["main"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "jiter-0.8.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:ca8577f6a413abe29b079bc30f907894d7eb07a865c4df69475e868d73e71c7b"},
{file = "jiter-0.8.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b25bd626bde7fb51534190c7e3cb97cee89ee76b76d7585580e22f34f5e3f393"},
@@ -3832,7 +3739,6 @@ description = "JSON Matching Expressions"
optional = false
python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*"
groups = ["main", "storage"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "jmespath-0.10.0-py2.py3-none-any.whl", hash = "sha256:cdf6525904cc597730141d61b36f2e4b8ecc257c420fa2f4549bac2c2d0cb72f"},
{file = "jmespath-0.10.0.tar.gz", hash = "sha256:b85d0567b8666149a93172712e68920734333c0ce7e89b78b3e987f71e5ed4f9"},
@@ -3845,7 +3751,6 @@ description = "Lightweight pipelining with Python functions"
optional = false
python-versions = ">=3.8"
groups = ["main", "tools"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "joblib-1.4.2-py3-none-any.whl", hash = "sha256:06d478d5674cbc267e7496a410ee875abd68e4340feff4490bcb7afb88060ae6"},
{file = "joblib-1.4.2.tar.gz", hash = "sha256:2382c5816b2636fbd20a09e0f4e9dad4736765fdfb7dca582943b9c1366b3f0e"},
@@ -3858,7 +3763,6 @@ description = "A more powerful JSONPath implementation in modern python"
optional = false
python-versions = ">=3.6"
groups = ["main"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "jsonpath-python-1.0.6.tar.gz", hash = "sha256:dd5be4a72d8a2995c3f583cf82bf3cd1a9544cfdabf2d22595b67aff07349666"},
{file = "jsonpath_python-1.0.6-py3-none-any.whl", hash = "sha256:1e3b78df579f5efc23565293612decee04214609208a2335884b3ee3f786b575"},
@@ -3871,7 +3775,6 @@ description = "An implementation of JSON Schema validation for Python"
optional = false
python-versions = ">=3.8"
groups = ["main"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "jsonschema-4.23.0-py3-none-any.whl", hash = "sha256:fbadb6f8b144a8f8cf9f0b89ba94501d143e50411a1278633f56a7acf7fd5566"},
{file = "jsonschema-4.23.0.tar.gz", hash = "sha256:d71497fef26351a33265337fa77ffeb82423f3ea21283cd9467bb03999266bc4"},
@@ -3894,7 +3797,6 @@ description = "The JSON Schema meta-schemas and vocabularies, exposed as a Regis
optional = false
python-versions = ">=3.9"
groups = ["main"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "jsonschema_specifications-2024.10.1-py3-none-any.whl", hash = "sha256:a09a0680616357d9a0ecf05c12ad234479f549239d0f5b55f3deea67475da9bf"},
{file = "jsonschema_specifications-2024.10.1.tar.gz", hash = "sha256:0f38b83639958ce1152d02a7f062902c41c8fd20d558b0c34344292d417ae272"},
@@ -3910,7 +3812,6 @@ description = "Static image export for web-based visualization libraries with ze
optional = false
python-versions = "*"
groups = ["indirect"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "kaleido-0.2.1-py2.py3-none-macosx_10_11_x86_64.whl", hash = "sha256:ca6f73e7ff00aaebf2843f73f1d3bacde1930ef5041093fe76b83a15785049a7"},
{file = "kaleido-0.2.1-py2.py3-none-macosx_11_0_arm64.whl", hash = "sha256:bb9a5d1f710357d5d432ee240ef6658a6d124c3e610935817b4b42da9c787c05"},
@@ -3927,7 +3828,6 @@ description = "Messaging library for Python."
optional = false
python-versions = ">=3.8"
groups = ["main"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "kombu-5.4.2-py3-none-any.whl", hash = "sha256:14212f5ccf022fc0a70453bb025a1dcc32782a588c49ea866884047d66e14763"},
{file = "kombu-5.4.2.tar.gz", hash = "sha256:eef572dd2fd9fc614b37580e3caeafdd5af46c1eff31e7fba89138cdb406f2cf"},
@@ -3943,7 +3843,7 @@ azureservicebus = ["azure-servicebus (>=7.10.0)"]
azurestoragequeues = ["azure-identity (>=1.12.0)", "azure-storage-queue (>=12.6.0)"]
confluentkafka = ["confluent-kafka (>=2.2.0)"]
consul = ["python-consul2 (==0.1.5)"]
-librabbitmq = ["librabbitmq (>=2.0.0)"]
+librabbitmq = ["librabbitmq (>=2.0.0) ; python_version < \"3.11\""]
mongodb = ["pymongo (>=4.1.1)"]
msgpack = ["msgpack (==1.1.0)"]
pyro = ["pyro4 (==4.82)"]
@@ -3951,7 +3851,7 @@ qpid = ["qpid-python (>=0.26)", "qpid-tools (>=0.26)"]
redis = ["redis (>=4.5.2,!=4.5.5,!=5.0.2)"]
slmq = ["softlayer-messaging (>=1.0.3)"]
sqlalchemy = ["sqlalchemy (>=1.4.48,<2.1)"]
-sqs = ["boto3 (>=1.26.143)", "pycurl (>=7.43.0.5)", "urllib3 (>=1.26.16)"]
+sqs = ["boto3 (>=1.26.143)", "pycurl (>=7.43.0.5) ; sys_platform != \"win32\" and platform_python_implementation == \"CPython\"", "urllib3 (>=1.26.16)"]
yaml = ["PyYAML (>=3.10)"]
zookeeper = ["kazoo (>=2.8.0)"]
@@ -3962,7 +3862,6 @@ description = "Kubernetes python client"
optional = false
python-versions = ">=3.6"
groups = ["vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "kubernetes-32.0.1-py2.py3-none-any.whl", hash = "sha256:35282ab8493b938b08ab5526c7ce66588232df00ef5e1dbe88a419107dc10998"},
{file = "kubernetes-32.0.1.tar.gz", hash = "sha256:42f43d49abd437ada79a79a16bd48a604d3471a117a8347e87db693f2ba0ba28"},
@@ -3991,7 +3890,6 @@ description = "Language detection library ported from Google's language-detectio
optional = false
python-versions = "*"
groups = ["main"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "langdetect-1.0.9-py2-none-any.whl", hash = "sha256:7cbc0746252f19e76f77c0b1690aadf01963be835ef0cd4b56dddf2a8f1dfc2a"},
{file = "langdetect-1.0.9.tar.gz", hash = "sha256:cbc1fef89f8d062739774bd51eda3da3274006b3661d199c2655f6b3f6d605a0"},
@@ -4007,7 +3905,6 @@ description = "A client library for accessing langfuse"
optional = false
python-versions = "<4.0,>=3.8.1"
groups = ["main"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "langfuse-2.51.5-py3-none-any.whl", hash = "sha256:b95401ca710ef94b521afa6541933b6f93d7cfd4a97523c8fc75bca4d6d219fb"},
{file = "langfuse-2.51.5.tar.gz", hash = "sha256:55bc37b5c5d3ae133c1a95db09117cfb3117add110ba02ebbf2ce45ac4395c5b"},
@@ -4034,7 +3931,6 @@ description = "Client library to connect to the LangSmith LLM Tracing and Evalua
optional = false
python-versions = "<4.0,>=3.8.1"
groups = ["main"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "langsmith-0.1.147-py3-none-any.whl", hash = "sha256:7166fc23b965ccf839d64945a78e9f1157757add228b086141eb03a60d699a15"},
{file = "langsmith-0.1.147.tar.gz", hash = "sha256:2e933220318a4e73034657103b3b1a3a6109cc5db3566a7e8e03be8d6d7def7a"},
@@ -4060,7 +3956,6 @@ description = "Python extension for computing string edit distances and similari
optional = false
python-versions = ">=3.9"
groups = ["main"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "levenshtein-0.27.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:13d6f617cb6fe63714c4794861cfaacd398db58a292f930edb7f12aad931dace"},
{file = "levenshtein-0.27.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ca9d54d41075e130c390e61360bec80f116b62d6ae973aec502e77e921e95334"},
@@ -4168,7 +4063,6 @@ description = "Library to easily interface with LLM API providers"
optional = false
python-versions = "!=2.7.*,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,!=3.7.*,>=3.8"
groups = ["main"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "litellm-1.63.3-py3-none-any.whl", hash = "sha256:bbe56d3b4afa5dda1e9730071780601e03278bba003da1634c4a2dc93c83ae05"},
{file = "litellm-1.63.3.tar.gz", hash = "sha256:329fa60e2c93e95d28798c2fbc63aaca67d8da38b2256d015cb169e97e5382f3"},
@@ -4198,7 +4092,6 @@ description = "lightweight wrapper around basic LLVM functionality"
optional = false
python-versions = ">=3.10"
groups = ["main"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "llvmlite-0.44.0-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:9fbadbfba8422123bab5535b293da1cf72f9f478a65645ecd73e781f962ca614"},
{file = "llvmlite-0.44.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:cccf8eb28f24840f2689fb1a45f9c0f7e582dd24e088dcf96e424834af11f791"},
@@ -4230,7 +4123,6 @@ description = "Powerful and Pythonic XML processing library combining libxml2/li
optional = false
python-versions = ">=3.6"
groups = ["main"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "lxml-5.3.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a4058f16cee694577f7e4dd410263cd0ef75644b43802a689c2b3c2a7e69453b"},
{file = "lxml-5.3.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:364de8f57d6eda0c16dcfb999af902da31396949efa0e583e12675d09709881b"},
@@ -4386,7 +4278,6 @@ description = "LZ4 Bindings for Python"
optional = false
python-versions = ">=3.9"
groups = ["vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "lz4-4.4.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1ebf23ffd36b32b980f720a81990fcfdeadacafe7498fbeff7a8e058259d4e58"},
{file = "lz4-4.4.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8fe3caea61427057a9e3697c69b2403510fdccfca4483520d02b98ffae74531e"},
@@ -4433,7 +4324,6 @@ description = "Mailchimp Transactional API"
optional = false
python-versions = "*"
groups = ["main"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "mailchimp_transactional-1.0.56-py3-none-any.whl", hash = "sha256:a76ea88b90a2d47d8b5134586aabbd3a96c459f6066d8886748ab59e50de36eb"},
]
@@ -4452,7 +4342,6 @@ description = "A super-fast templating language that borrows the best ideas from
optional = false
python-versions = ">=3.8"
groups = ["main"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "Mako-1.3.9-py3-none-any.whl", hash = "sha256:95920acccb578427a9aa38e37a186b1e43156c87260d7ba18ca63aa4c7cbd3a1"},
{file = "mako-1.3.9.tar.gz", hash = "sha256:b5d65ff3462870feec922dbccf38f6efb44e5714d7b593a656be86663d8600ac"},
@@ -4473,7 +4362,6 @@ description = "Python implementation of John Gruber's Markdown."
optional = false
python-versions = ">=3.8"
groups = ["main"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "Markdown-3.5.2-py3-none-any.whl", hash = "sha256:d43323865d89fc0cb9b20c75fc8ad313af307cc087e84b657d9eec768eddeadd"},
{file = "Markdown-3.5.2.tar.gz", hash = "sha256:e1ac7b3dc550ee80e602e71c1d168002f062e49f1b11e26a36264dafd4df2ef8"},
@@ -4490,7 +4378,6 @@ description = "Python port of markdown-it. Markdown parsing, done right!"
optional = false
python-versions = ">=3.8"
groups = ["main", "vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"},
{file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"},
@@ -4516,7 +4403,6 @@ description = "Safely add untrusted strings to HTML/XML markup."
optional = false
python-versions = ">=3.9"
groups = ["main", "dev"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "MarkupSafe-3.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8"},
{file = "MarkupSafe-3.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158"},
@@ -4588,7 +4474,6 @@ description = "A lightweight library for converting complex datatypes to and fro
optional = false
python-versions = ">=3.9"
groups = ["main"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "marshmallow-3.26.1-py3-none-any.whl", hash = "sha256:3350409f20a70a7e4e11a27661187b77cdcaeb20abca41c1454fe33636bea09c"},
{file = "marshmallow-3.26.1.tar.gz", hash = "sha256:e6d8affb6cb61d39d26402096dc0aee12d5a26d490a121f118d2e81dc0719dc6"},
@@ -4609,7 +4494,6 @@ description = "Markdown URL utilities"
optional = false
python-versions = ">=3.7"
groups = ["main", "vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"},
{file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"},
@@ -4622,7 +4506,7 @@ description = "A lightweight version of Milvus wrapped with Python."
optional = false
python-versions = ">=3.7"
groups = ["vdb"]
-markers = "(python_version == \"3.11\" or python_version >= \"3.12\") and sys_platform != \"win32\""
+markers = "sys_platform != \"win32\""
files = [
{file = "milvus_lite-2.4.11-py3-none-macosx_10_9_x86_64.whl", hash = "sha256:9e563ae0dca1b41bfd76b90f06b2bcc474460fe4eba142c9bab18d2747ff843b"},
{file = "milvus_lite-2.4.11-py3-none-macosx_11_0_arm64.whl", hash = "sha256:d21472bd24eb327542817829ce7cb51878318e6173c4d62353c77421aecf98d6"},
@@ -4640,7 +4524,6 @@ description = "Python extension for MurmurHash (MurmurHash3), a set of fast and
optional = false
python-versions = ">=3.9"
groups = ["vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "mmh3-5.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:eaf4ac5c6ee18ca9232238364d7f2a213278ae5ca97897cafaa123fcc7bb8bec"},
{file = "mmh3-5.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:48f9aa8ccb9ad1d577a16104834ac44ff640d8de8c0caed09a2300df7ce8460a"},
@@ -4740,7 +4623,6 @@ description = "An implementation of time.monotonic() for Python 2 & < 3.3"
optional = false
python-versions = "*"
groups = ["vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "monotonic-1.6-py2.py3-none-any.whl", hash = "sha256:68687e19a14f11f26d140dd5c86f3dba4bf5df58003000ed467e0e2a69bca96c"},
{file = "monotonic-1.6.tar.gz", hash = "sha256:3a55207bcfed53ddd5c5bae174524062935efed17792e9de2ad0205ce9ad63f7"},
@@ -4753,7 +4635,6 @@ description = "Python library for arbitrary-precision floating-point arithmetic"
optional = false
python-versions = "*"
groups = ["vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "mpmath-1.3.0-py3-none-any.whl", hash = "sha256:a0b2b9fe80bbcd81a6647ff13108738cfb482d481d826cc0e02f5b35e5c88d2c"},
{file = "mpmath-1.3.0.tar.gz", hash = "sha256:7a28eb2a9774d00c7bc92411c19a89209d5da7c4c9a9e227be8330a23a25b91f"},
@@ -4762,7 +4643,7 @@ files = [
[package.extras]
develop = ["codecov", "pycodestyle", "pytest (>=4.6)", "pytest-cov", "wheel"]
docs = ["sphinx"]
-gmpy = ["gmpy2 (>=2.1.0a4)"]
+gmpy = ["gmpy2 (>=2.1.0a4) ; platform_python_implementation != \"PyPy\""]
tests = ["pytest (>=4.6)"]
[[package]]
@@ -4772,7 +4653,6 @@ description = "The Microsoft Authentication Library (MSAL) for Python library en
optional = false
python-versions = ">=3.7"
groups = ["main"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "msal-1.31.1-py3-none-any.whl", hash = "sha256:29d9882de247e96db01386496d59f29035e5e841bcac892e6d7bf4390bf6bd17"},
{file = "msal-1.31.1.tar.gz", hash = "sha256:11b5e6a3f802ffd3a72107203e20c4eac6ef53401961b880af2835b723d80578"},
@@ -4784,7 +4664,7 @@ PyJWT = {version = ">=1.0.0,<3", extras = ["crypto"]}
requests = ">=2.0.0,<3"
[package.extras]
-broker = ["pymsalruntime (>=0.14,<0.18)", "pymsalruntime (>=0.17,<0.18)"]
+broker = ["pymsalruntime (>=0.14,<0.18) ; python_version >= \"3.6\" and platform_system == \"Windows\"", "pymsalruntime (>=0.17,<0.18) ; python_version >= \"3.8\" and platform_system == \"Darwin\""]
[[package]]
name = "msal-extensions"
@@ -4793,7 +4673,6 @@ description = "Microsoft Authentication Library extensions (MSAL EX) provides a
optional = false
python-versions = ">=3.7"
groups = ["main"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "msal_extensions-1.2.0-py3-none-any.whl", hash = "sha256:cf5ba83a2113fa6dc011a254a72f1c223c88d7dfad74cc30617c4679a417704d"},
{file = "msal_extensions-1.2.0.tar.gz", hash = "sha256:6f41b320bfd2933d631a215c91ca0dd3e67d84bd1a2f50ce917d5874ec646bef"},
@@ -4810,7 +4689,6 @@ description = "AutoRest swagger generator Python client runtime."
optional = false
python-versions = ">=3.6"
groups = ["storage"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "msrest-0.7.1-py3-none-any.whl", hash = "sha256:21120a810e1233e5e6cc7fe40b474eeb4ec6f757a15d7cf86702c369f9567c32"},
{file = "msrest-0.7.1.zip", hash = "sha256:6e7661f46f3afd88b75667b7187a92829924446c7ea1d169be8c4bb7eeb788b9"},
@@ -4824,7 +4702,7 @@ requests = ">=2.16,<3.0"
requests-oauthlib = ">=0.5.0"
[package.extras]
-async = ["aiodns", "aiohttp (>=3.0)"]
+async = ["aiodns ; python_version >= \"3.5\"", "aiohttp (>=3.0) ; python_version >= \"3.5\""]
[[package]]
name = "multidict"
@@ -4833,7 +4711,6 @@ description = "multidict implementation"
optional = false
python-versions = ">=3.8"
groups = ["main", "storage", "vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "multidict-6.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3380252550e372e8511d49481bd836264c009adb826b23fefcc5dd3c69692f60"},
{file = "multidict-6.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:99f826cbf970077383d7de805c0681799491cb939c25450b9b5b3ced03ca99f1"},
@@ -4936,7 +4813,6 @@ description = "Optional static typing for Python"
optional = false
python-versions = ">=3.8"
groups = ["dev"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "mypy-1.13.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6607e0f1dd1fb7f0aca14d936d13fd19eba5e17e1cd2a14f808fa5f8f6d8f60a"},
{file = "mypy-1.13.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8a21be69bd26fa81b1f80a61ee7ab05b076c674d9b18fb56239d72e21d9f4c80"},
@@ -4990,7 +4866,6 @@ description = "Type system extensions for programs checked with the mypy type ch
optional = false
python-versions = ">=3.5"
groups = ["main", "dev"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"},
{file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"},
@@ -5003,7 +4878,6 @@ description = "Patch asyncio to allow nested event loops"
optional = false
python-versions = ">=3.5"
groups = ["main"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "nest_asyncio-1.6.0-py3-none-any.whl", hash = "sha256:87af6efd6b5e897c81050477ef65c62e2b2f35d51703cae01aff2905b1852e1c"},
{file = "nest_asyncio-1.6.0.tar.gz", hash = "sha256:6f172d5449aca15afd6c646851f4e31e02c598d553a667e38cafa997cfec55fe"},
@@ -5016,7 +4890,6 @@ description = "Natural Language Toolkit"
optional = false
python-versions = ">=3.8"
groups = ["main", "tools"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "nltk-3.9.1-py3-none-any.whl", hash = "sha256:4fa26829c5b00715afe3061398a8989dc643b92ce7dd93fb4585a70930d168a1"},
{file = "nltk-3.9.1.tar.gz", hash = "sha256:87d127bd3de4bd89a4f81265e5fa59cb1b199b27440175370f7417d2bc7ae868"},
@@ -5043,7 +4916,6 @@ description = "compiling Python code using LLVM"
optional = false
python-versions = ">=3.10"
groups = ["main"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "numba-0.61.0-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:9cab9783a700fa428b1a54d65295122bc03b3de1d01fb819a6b9dbbddfdb8c43"},
{file = "numba-0.61.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:46c5ae094fb3706f5adf9021bfb7fc11e44818d61afee695cdee4eadfed45e98"},
@@ -5079,7 +4951,6 @@ description = "Fast numerical expression evaluator for NumPy"
optional = false
python-versions = ">=3.9"
groups = ["main"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "numexpr-2.10.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b5b0e82d2109c1d9e63fcd5ea177d80a11b881157ab61178ddbdebd4c561ea46"},
{file = "numexpr-2.10.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3fc2b8035a0c2cdc352e58c3875cb668836018065cbf5752cb531015d9a568d8"},
@@ -5129,7 +5000,6 @@ description = "Fundamental package for array computing in Python"
optional = false
python-versions = ">=3.9"
groups = ["main", "indirect", "vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "numpy-1.26.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ff0f4f29c51e2803569d7a51c2304de5554655a60c5d776e35b4a41413830d0"},
{file = "numpy-1.26.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2e4ee3380d6de9c9ec04745830fd9e2eccb3e6cf790d39d7b98ffd19b0dd754a"},
@@ -5176,7 +5046,6 @@ description = "A generic, spec-compliant, thorough implementation of the OAuth r
optional = false
python-versions = ">=3.6"
groups = ["storage", "vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "oauthlib-3.2.2-py3-none-any.whl", hash = "sha256:8139f29aac13e25d502680e9e19963e83f16838d48a0d71c287fe40e7067fbca"},
{file = "oauthlib-3.2.2.tar.gz", hash = "sha256:9859c40929662bec5d64f34d01c99e093149682a3f38915dc0655d5a633dd918"},
@@ -5194,7 +5063,6 @@ description = "Oracle Cloud Infrastructure Python SDK"
optional = false
python-versions = "*"
groups = ["main"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "oci-2.135.2-py3-none-any.whl", hash = "sha256:5213319244e1c7f108bcb417322f33f01f043fd9636d4063574039f5fdf4e4f7"},
{file = "oci-2.135.2.tar.gz", hash = "sha256:520f78983c5246eae80dd5ecfd05e3a565c8b98d02ef0c1b11ba1f61bcccb61d"},
@@ -5215,7 +5083,6 @@ description = "Python API and tools to manipulate OpenDocument files"
optional = false
python-versions = "*"
groups = ["main"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "odfpy-1.4.1.tar.gz", hash = "sha256:db766a6e59c5103212f3cc92ec8dd50a0f3a02790233ed0b52148b70d3c438ec"},
]
@@ -5230,7 +5097,6 @@ description = "Python package to parse, read and write Microsoft OLE2 files (Str
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
groups = ["main"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "olefile-0.47-py2.py3-none-any.whl", hash = "sha256:543c7da2a7adadf21214938bb79c83ea12b473a4b6ee4ad4bf854e7715e13d1f"},
{file = "olefile-0.47.zip", hash = "sha256:599383381a0bf3dfbd932ca0ca6515acd174ed48870cbf7fee123d698c192c1c"},
@@ -5246,7 +5112,6 @@ description = "ONNX Runtime is a runtime accelerator for Machine Learning models
optional = false
python-versions = ">=3.10"
groups = ["vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "onnxruntime-1.21.0-cp310-cp310-macosx_13_0_universal2.whl", hash = "sha256:95513c9302bc8dd013d84148dcf3168e782a80cdbf1654eddc948a23147ccd3d"},
{file = "onnxruntime-1.21.0-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:635d4ab13ae0f150dd4c6ff8206fd58f1c6600636ecc796f6f0c42e4c918585b"},
@@ -5283,7 +5148,6 @@ description = "The official Python library for the openai API"
optional = false
python-versions = ">=3.8"
groups = ["main"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "openai-1.61.1-py3-none-any.whl", hash = "sha256:72b0826240ce26026ac2cd17951691f046e5be82ad122d20a8e1b30ca18bd11e"},
{file = "openai-1.61.1.tar.gz", hash = "sha256:ce1851507218209961f89f3520e06726c0aa7d0512386f0f977e3ac3e4f2472e"},
@@ -5310,7 +5174,6 @@ description = "Apache OpenDAL™ Python Binding"
optional = false
python-versions = ">=3.10"
groups = ["storage"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "opendal-0.45.16-cp310-cp310-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:aa8d3bcf8466a489800df517bff40e5da107c11f88123b3063aa8c0238552548"},
{file = "opendal-0.45.16-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d10ae4edee9602b55b58469429c56bf82b7044cde25b87f9ec994ca4fbf57a3e"},
@@ -5342,7 +5205,6 @@ description = "A Python library to read/write Excel 2010 xlsx/xlsm files"
optional = false
python-versions = ">=3.8"
groups = ["main"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "openpyxl-3.1.5-py2.py3-none-any.whl", hash = "sha256:5282c12b107bffeef825f4617dc029afaf41d0ea60823bbb665ef3079dc79de2"},
{file = "openpyxl-3.1.5.tar.gz", hash = "sha256:cf0e3cf56142039133628b5acffe8ef0c12bc902d2aadd3e0fe5878dc08d1050"},
@@ -5358,7 +5220,6 @@ description = "Python client for OpenSearch"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, <4"
groups = ["vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "opensearch-py-2.4.0.tar.gz", hash = "sha256:7eba2b6ed2ddcf33225bfebfba2aee026877838cc39f760ec80f27827308cc4b"},
{file = "opensearch_py-2.4.0-py2.py3-none-any.whl", hash = "sha256:316077235437c8ceac970232261f3393c65fb92a80f33c5b106f50f1dab24fd9"},
@@ -5384,7 +5245,6 @@ description = "OpenTelemetry Python API"
optional = false
python-versions = ">=3.8"
groups = ["vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "opentelemetry_api-1.30.0-py3-none-any.whl", hash = "sha256:d5f5284890d73fdf47f843dda3210edf37a38d66f44f2b5aedc1e89ed455dc09"},
{file = "opentelemetry_api-1.30.0.tar.gz", hash = "sha256:375893400c1435bf623f7dfb3bcd44825fe6b56c34d0667c542ea8257b1a1240"},
@@ -5401,7 +5261,6 @@ description = "OpenTelemetry Collector Protobuf over gRPC Exporter"
optional = false
python-versions = ">=3.7"
groups = ["vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "opentelemetry_exporter_otlp_proto_grpc-1.15.0-py3-none-any.whl", hash = "sha256:c2a5492ba7d140109968135d641d06ce3c5bd73c50665f787526065d57d7fd1d"},
{file = "opentelemetry_exporter_otlp_proto_grpc-1.15.0.tar.gz", hash = "sha256:844f2a4bb9bcda34e4eb6fe36765e5031aacb36dc60ed88c90fc246942ea26e7"},
@@ -5425,7 +5284,6 @@ description = "Instrumentation Tools & Auto Instrumentation for OpenTelemetry Py
optional = false
python-versions = ">=3.8"
groups = ["vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "opentelemetry_instrumentation-0.51b0-py3-none-any.whl", hash = "sha256:c6de8bd26b75ec8b0e54dff59e198946e29de6a10ec65488c357d4b34aa5bdcf"},
{file = "opentelemetry_instrumentation-0.51b0.tar.gz", hash = "sha256:4ca266875e02f3988536982467f7ef8c32a38b8895490ddce9ad9604649424fa"},
@@ -5444,7 +5302,6 @@ description = "ASGI instrumentation for OpenTelemetry"
optional = false
python-versions = ">=3.8"
groups = ["vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "opentelemetry_instrumentation_asgi-0.51b0-py3-none-any.whl", hash = "sha256:e8072993db47303b633c6ec1bc74726ba4d32bd0c46c28dfadf99f79521a324c"},
{file = "opentelemetry_instrumentation_asgi-0.51b0.tar.gz", hash = "sha256:b3fe97c00f0bfa934371a69674981d76591c68d937b6422a5716ca21081b4148"},
@@ -5467,7 +5324,6 @@ description = "OpenTelemetry FastAPI Instrumentation"
optional = false
python-versions = ">=3.8"
groups = ["vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "opentelemetry_instrumentation_fastapi-0.51b0-py3-none-any.whl", hash = "sha256:10513bbc11a1188adb9c1d2c520695f7a8f2b5f4de14e8162098035901cd6493"},
{file = "opentelemetry_instrumentation_fastapi-0.51b0.tar.gz", hash = "sha256:1624e70f2f4d12ceb792d8a0c331244cd6723190ccee01336273b4559bc13abc"},
@@ -5490,7 +5346,6 @@ description = "OpenTelemetry Python Proto"
optional = false
python-versions = ">=3.7"
groups = ["vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "opentelemetry_proto-1.15.0-py3-none-any.whl", hash = "sha256:044b6d044b4d10530f250856f933442b8753a17f94ae37c207607f733fb9a844"},
{file = "opentelemetry_proto-1.15.0.tar.gz", hash = "sha256:9c4008e40ac8cab359daac283fbe7002c5c29c77ea2674ad5626a249e64e0101"},
@@ -5506,7 +5361,6 @@ description = "OpenTelemetry Python SDK"
optional = false
python-versions = ">=3.8"
groups = ["vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "opentelemetry_sdk-1.30.0-py3-none-any.whl", hash = "sha256:14fe7afc090caad881addb6926cec967129bd9260c4d33ae6a217359f6b61091"},
{file = "opentelemetry_sdk-1.30.0.tar.gz", hash = "sha256:c9287a9e4a7614b9946e933a67168450b9ab35f08797eb9bc77d998fa480fa18"},
@@ -5524,7 +5378,6 @@ description = "OpenTelemetry Semantic Conventions"
optional = false
python-versions = ">=3.8"
groups = ["vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "opentelemetry_semantic_conventions-0.51b0-py3-none-any.whl", hash = "sha256:fdc777359418e8d06c86012c3dc92c88a6453ba662e941593adb062e48c2eeae"},
{file = "opentelemetry_semantic_conventions-0.51b0.tar.gz", hash = "sha256:3fabf47f35d1fd9aebcdca7e6802d86bd5ebc3bc3408b7e3248dde6e87a18c47"},
@@ -5541,7 +5394,6 @@ description = "Web util for OpenTelemetry"
optional = false
python-versions = ">=3.8"
groups = ["vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "opentelemetry_util_http-0.51b0-py3-none-any.whl", hash = "sha256:0561d7a6e9c422b9ef9ae6e77eafcfcd32a2ab689f5e801475cbb67f189efa20"},
{file = "opentelemetry_util_http-0.51b0.tar.gz", hash = "sha256:05edd19ca1cc3be3968b1e502fd94816901a365adbeaab6b6ddb974384d3a0b9"},
@@ -5554,7 +5406,6 @@ description = "Comet tool for logging and evaluating LLM traces"
optional = false
python-versions = ">=3.8"
groups = ["main"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "opik-1.3.5-py3-none-any.whl", hash = "sha256:c6a195b33851959b8e96ac78fe211b6157288eddc03fa8bfbd1ef53424b702dc"},
{file = "opik-1.3.5.tar.gz", hash = "sha256:943e4b636b70e5781f7a6f40b33fadda0935b57ecad0997f195ce909956b68d7"},
@@ -5581,7 +5432,6 @@ description = "Python interface to Oracle Database"
optional = false
python-versions = ">=3.7"
groups = ["vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "oracledb-2.2.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3dacef7c4dd3fca94728f05336076e063450bb57ea569e8dd67fae960aaf537e"},
{file = "oracledb-2.2.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cd8fdc93a65ae2e1c934a0e3e64cb01997ba004c48a986a37583f670dd344802"},
@@ -5707,7 +5557,7 @@ files = [
{file = "orjson-3.10.15-cp39-cp39-win_amd64.whl", hash = "sha256:efcf6c735c3d22ef60c4aa27a5238f1a477df85e9b15f2142f9d669beb2d13fd"},
{file = "orjson-3.10.15.tar.gz", hash = "sha256:05ca7fe452a2e9d8d9d706a2984c95b9c2ebc5db417ce0b7a49b91d50642a23e"},
]
-markers = {main = "(python_version == \"3.11\" or python_version >= \"3.12\") and platform_python_implementation != \"PyPy\"", vdb = "python_version == \"3.11\" or python_version >= \"3.12\""}
+markers = {main = "platform_python_implementation != \"PyPy\""}
[[package]]
name = "oss2"
@@ -5716,7 +5566,6 @@ description = "Aliyun OSS (Object Storage Service) SDK"
optional = false
python-versions = "*"
groups = ["storage"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "oss2-2.18.5.tar.gz", hash = "sha256:555c857f4441ae42a2c0abab8fc9482543fba35d65a4a4be73101c959a2b4011"},
]
@@ -5736,7 +5585,6 @@ description = "A decorator to automatically detect mismatch when overriding a me
optional = false
python-versions = ">=3.6"
groups = ["vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "overrides-7.7.0-py3-none-any.whl", hash = "sha256:c7ed9d062f78b8e4c1a7b70bd8796b35ead4d9f510227ef9c5dc7626c60d7e49"},
{file = "overrides-7.7.0.tar.gz", hash = "sha256:55158fa3d93b98cc75299b1e67078ad9003ca27945c76162c1c0766d6f91820a"},
@@ -5749,7 +5597,6 @@ description = "Core utilities for Python packages"
optional = false
python-versions = ">=3.8"
groups = ["main", "dev", "storage", "vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759"},
{file = "packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f"},
@@ -5762,7 +5609,6 @@ description = "Powerful data structures for data analysis, time series, and stat
optional = false
python-versions = ">=3.9"
groups = ["main", "vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "pandas-2.2.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1948ddde24197a0f7add2bdc4ca83bf2b1ef84a1bc8ccffd95eda17fd836ecb5"},
{file = "pandas-2.2.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:381175499d3802cde0eabbaf6324cce0c4f5d52ca6f8c377c29ad442f50f6348"},
@@ -5860,7 +5706,6 @@ description = "Type annotations for pandas"
optional = false
python-versions = ">=3.10"
groups = ["main"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "pandas_stubs-2.2.3.250308-py3-none-any.whl", hash = "sha256:a377edff3b61f8b268c82499fdbe7c00fdeed13235b8b71d6a1dc347aeddc74d"},
{file = "pandas_stubs-2.2.3.250308.tar.gz", hash = "sha256:3a6e9daf161f00b85c83772ed3d5cff9522028f07a94817472c07b91f46710fd"},
@@ -5877,7 +5722,6 @@ description = "Python binding for pgvecto.rs"
optional = false
python-versions = "<3.13,>=3.8"
groups = ["vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "pgvecto_rs-0.2.2-py3-none-any.whl", hash = "sha256:5f3f7f806813de408c45dc10a9eb418b986c4d7b7723e8fce9298f2f7d8fbbd5"},
{file = "pgvecto_rs-0.2.2.tar.gz", hash = "sha256:edaa913d1747152b1407cbdf6337d51ac852547b54953ef38997433be3a75a3b"},
@@ -5901,7 +5745,6 @@ description = "pgvector support for Python"
optional = false
python-versions = ">=3.8"
groups = ["vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "pgvector-0.2.5-py2.py3-none-any.whl", hash = "sha256:5e5e93ec4d3c45ab1fa388729d56c602f6966296e19deee8878928c6d567e41b"},
]
@@ -5916,7 +5759,6 @@ description = "Python Imaging Library (Fork)"
optional = false
python-versions = ">=3.9"
groups = ["main"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "pillow-11.1.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:e1abe69aca89514737465752b4bcaf8016de61b3be1397a8fc260ba33321b3a8"},
{file = "pillow-11.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c640e5a06869c75994624551f45e5506e4256562ead981cce820d5ab39ae2192"},
@@ -5996,7 +5838,7 @@ docs = ["furo", "olefile", "sphinx (>=8.1)", "sphinx-copybutton", "sphinx-inline
fpx = ["olefile"]
mic = ["olefile"]
tests = ["check-manifest", "coverage (>=7.4.2)", "defusedxml", "markdown2", "olefile", "packaging", "pyroma", "pytest", "pytest-cov", "pytest-timeout", "trove-classifiers (>=2024.10.12)"]
-typing = ["typing-extensions"]
+typing = ["typing-extensions ; python_version < \"3.10\""]
xmp = ["defusedxml"]
[[package]]
@@ -6006,7 +5848,6 @@ description = "plugin and hook calling mechanisms for python"
optional = false
python-versions = ">=3.8"
groups = ["main", "dev"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"},
{file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"},
@@ -6023,7 +5864,6 @@ description = "Python Lex & Yacc"
optional = false
python-versions = "*"
groups = ["lint"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "ply-3.11-py2.py3-none-any.whl", hash = "sha256:096f9b8350b65ebd2fd1346b12452efe5b9607f7482813ffca50c22722a807ce"},
{file = "ply-3.11.tar.gz", hash = "sha256:00c7c1aaa88358b9c765b6d3000c6eec0ba42abca5351b095321aef446081da3"},
@@ -6036,7 +5876,6 @@ description = "Wraps the portalocker recipe for easy usage"
optional = false
python-versions = ">=3.8"
groups = ["main", "vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "portalocker-2.10.1-py3-none-any.whl", hash = "sha256:53a5984ebc86a025552264b459b46a2086e269b21823cb572f8f28ee759e45bf"},
{file = "portalocker-2.10.1.tar.gz", hash = "sha256:ef1bf844e878ab08aee7e40184156e1151f228f103aa5c6bd0724cc330960f8f"},
@@ -6057,7 +5896,6 @@ description = "PostgREST client for Python. This library provides an ORM interfa
optional = false
python-versions = "<4.0,>=3.9"
groups = ["storage"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "postgrest-0.17.2-py3-none-any.whl", hash = "sha256:f7c4f448e5a5e2d4c1dcf192edae9d1007c4261e9a6fb5116783a0046846ece2"},
{file = "postgrest-0.17.2.tar.gz", hash = "sha256:445cd4e4a191e279492549df0c4e827d32f9d01d0852599bb8a6efb0f07fcf78"},
@@ -6075,7 +5913,6 @@ description = "Integrate PostHog into any python application."
optional = false
python-versions = "*"
groups = ["vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "posthog-3.19.0-py2.py3-none-any.whl", hash = "sha256:c294bc0a939e21ecf88d625496f8073cc566c28ec2a917a47d5d32ba33e90a7f"},
{file = "posthog-3.19.0.tar.gz", hash = "sha256:7fe5c9e494fc2cca9baa2bd8074c0844d572df46a54378101bc20eec2776027e"},
@@ -6102,7 +5939,6 @@ description = "Library for building powerful interactive command lines in Python
optional = false
python-versions = ">=3.8.0"
groups = ["main"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "prompt_toolkit-3.0.50-py3-none-any.whl", hash = "sha256:9b6427eb19e479d98acff65196a307c555eb567989e6d88ebbb1b509d9779198"},
{file = "prompt_toolkit-3.0.50.tar.gz", hash = "sha256:544748f3860a2623ca5cd6d2795e7a14f3d0e1c3c9728359013f79877fc89bab"},
@@ -6118,7 +5954,6 @@ description = "Accelerated property cache"
optional = false
python-versions = ">=3.9"
groups = ["main", "storage", "vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "propcache-0.3.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:efa44f64c37cc30c9f05932c740a8b40ce359f51882c70883cc95feac842da4d"},
{file = "propcache-0.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2383a17385d9800b6eb5855c2f05ee550f803878f344f58b6e194de08b96352c"},
@@ -6227,7 +6062,6 @@ description = "Beautiful, Pythonic protocol buffers"
optional = false
python-versions = ">=3.7"
groups = ["main", "storage"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "proto_plus-1.26.0-py3-none-any.whl", hash = "sha256:bf2dfaa3da281fc3187d12d224c707cb57214fb2c22ba854eb0c105a3fb2d4d7"},
{file = "proto_plus-1.26.0.tar.gz", hash = "sha256:6e93d5f5ca267b54300880fff156b6a3386b3fa3f43b1da62e680fc0c586ef22"},
@@ -6246,7 +6080,6 @@ description = ""
optional = false
python-versions = ">=3.8"
groups = ["main", "storage", "vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "protobuf-4.25.6-cp310-abi3-win32.whl", hash = "sha256:61df6b5786e2b49fc0055f636c1e8f0aff263808bb724b95b164685ac1bcc13a"},
{file = "protobuf-4.25.6-cp310-abi3-win_amd64.whl", hash = "sha256:b8f837bfb77513fe0e2f263250f423217a173b6d85135be4d81e96a4653bcd3c"},
@@ -6268,7 +6101,6 @@ description = "Cross-platform lib for process and system monitoring in Python.
optional = false
python-versions = ">=3.6"
groups = ["main"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "psutil-7.0.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:101d71dc322e3cffd7cea0650b09b3d08b8e7c4109dd6809fe452dfd00e58b25"},
{file = "psutil-7.0.0-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:39db632f6bb862eeccf56660871433e111b6ea58f2caea825571951d4b6aa3da"},
@@ -6293,7 +6125,6 @@ description = "psycopg2 integration with coroutine libraries"
optional = false
python-versions = "*"
groups = ["main"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "psycogreen-1.0.2.tar.gz", hash = "sha256:c429845a8a49cf2f76b71265008760bcd7c7c77d80b806db4dc81116dbcd130d"},
]
@@ -6305,7 +6136,6 @@ description = "psycopg2 - Python-PostgreSQL Database Adapter"
optional = false
python-versions = ">=3.8"
groups = ["main"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "psycopg2-binary-2.9.10.tar.gz", hash = "sha256:4b3df0e6990aa98acda57d983942eff13d824135fe2250e6522edaa782a06de2"},
{file = "psycopg2_binary-2.9.10-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:0ea8e3d0ae83564f2fc554955d327fa081d065c8ca5cc6d2abb643e2c9c1200f"},
@@ -6383,7 +6213,6 @@ description = "library with cross-python path, ini-parsing, io, code, log facili
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
groups = ["vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"},
{file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"},
@@ -6396,7 +6225,6 @@ description = "Get CPU info with pure Python"
optional = false
python-versions = "*"
groups = ["dev"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "py-cpuinfo-9.0.0.tar.gz", hash = "sha256:3cdbbf3fac90dc6f118bfd64384f309edeadd902d7c8fb17f02ffa1fc3f49690"},
{file = "py_cpuinfo-9.0.0-py3-none-any.whl", hash = "sha256:859625bc251f64e21f077d099d4162689c762b5d6a4c3c97553d56241c9674d5"},
@@ -6409,7 +6237,6 @@ description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs
optional = false
python-versions = ">=3.8"
groups = ["main", "storage", "vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "pyasn1-0.6.1-py3-none-any.whl", hash = "sha256:0d632f46f2ba09143da3a8afe9e33fb6f92fa2320ab7e886e2d0f7672af84629"},
{file = "pyasn1-0.6.1.tar.gz", hash = "sha256:6f580d2bdd84365380830acf45550f2511469f673cb4a5ae3857a3170128b034"},
@@ -6422,7 +6249,6 @@ description = "A collection of ASN.1-based protocols modules"
optional = false
python-versions = ">=3.8"
groups = ["main", "storage", "vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "pyasn1_modules-0.4.1-py3-none-any.whl", hash = "sha256:49bfa96b45a292b711e986f222502c1c9a5e1f4e568fc30e2574a6c7d07838fd"},
{file = "pyasn1_modules-0.4.1.tar.gz", hash = "sha256:c28e2dbf9c06ad61c71a075c7e0f9fd0f1b0bb2d2ad4377f240d33ac2ab60a7c"},
@@ -6442,7 +6268,7 @@ files = [
{file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"},
{file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"},
]
-markers = {main = "python_version == \"3.11\" or python_version >= \"3.12\"", storage = "(python_version == \"3.11\" or python_version >= \"3.12\") and platform_python_implementation != \"PyPy\"", vdb = "python_version == \"3.11\" or python_version >= \"3.12\""}
+markers = {storage = "platform_python_implementation != \"PyPy\"", vdb = "python_version < \"3.12\" or platform_python_implementation != \"PyPy\""}
[[package]]
name = "pycryptodome"
@@ -6451,7 +6277,6 @@ description = "Cryptographic library for Python"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
groups = ["main", "storage", "vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "pycryptodome-3.19.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:694020d2ff985cd714381b9da949a21028c24b86f562526186f6af7c7547e986"},
{file = "pycryptodome-3.19.1-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:4464b0e8fd5508bff9baf18e6fd4c6548b1ac2ce9862d6965ff6a84ec9cb302a"},
@@ -6494,7 +6319,6 @@ description = "Data validation using Python type hints"
optional = false
python-versions = ">=3.8"
groups = ["main", "storage", "vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "pydantic-2.9.2-py3-none-any.whl", hash = "sha256:f048cec7b26778210e28a0459867920654d48e5e62db0958433636cde4254f12"},
{file = "pydantic-2.9.2.tar.gz", hash = "sha256:d155cef71265d1e9807ed1c32b4c8deec042a44a50a4188b25ac67ecd81a9c0f"},
@@ -6507,7 +6331,7 @@ typing-extensions = {version = ">=4.6.1", markers = "python_version < \"3.13\""}
[package.extras]
email = ["email-validator (>=2.0.0)"]
-timezone = ["tzdata"]
+timezone = ["tzdata ; python_version >= \"3.9\" and sys_platform == \"win32\""]
[[package]]
name = "pydantic-core"
@@ -6516,7 +6340,6 @@ description = "Core functionality for Pydantic validation and serialization"
optional = false
python-versions = ">=3.8"
groups = ["main", "storage", "vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "pydantic_core-2.23.4-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:b10bd51f823d891193d4717448fab065733958bdb6a6b351967bd349d48d5c9b"},
{file = "pydantic_core-2.23.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4fc714bdbfb534f94034efaa6eadd74e5b93c8fa6315565a222f7b6f42ca1166"},
@@ -6619,7 +6442,6 @@ description = "Extra Pydantic types."
optional = false
python-versions = ">=3.8"
groups = ["main"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "pydantic_extra_types-2.9.0-py3-none-any.whl", hash = "sha256:f0bb975508572ba7bf3390b7337807588463b7248587e69f43b1ad7c797530d0"},
{file = "pydantic_extra_types-2.9.0.tar.gz", hash = "sha256:e061c01636188743bb69f368dcd391f327b8cfbfede2fe1cbb1211b06601ba3b"},
@@ -6629,11 +6451,11 @@ files = [
pydantic = ">=2.5.2"
[package.extras]
-all = ["pendulum (>=3.0.0,<4.0.0)", "phonenumbers (>=8,<9)", "pycountry (>=23)", "python-ulid (>=1,<2)", "python-ulid (>=1,<3)", "pytz (>=2024.1)", "semver (>=3.0.2)", "tzdata (>=2024.1)"]
+all = ["pendulum (>=3.0.0,<4.0.0)", "phonenumbers (>=8,<9)", "pycountry (>=23)", "python-ulid (>=1,<2) ; python_version < \"3.9\"", "python-ulid (>=1,<3) ; python_version >= \"3.9\"", "pytz (>=2024.1)", "semver (>=3.0.2)", "tzdata (>=2024.1)"]
pendulum = ["pendulum (>=3.0.0,<4.0.0)"]
phonenumbers = ["phonenumbers (>=8,<9)"]
pycountry = ["pycountry (>=23)"]
-python-ulid = ["python-ulid (>=1,<2)", "python-ulid (>=1,<3)"]
+python-ulid = ["python-ulid (>=1,<2) ; python_version < \"3.9\"", "python-ulid (>=1,<3) ; python_version >= \"3.9\""]
semver = ["semver (>=3.0.2)"]
[[package]]
@@ -6643,7 +6465,6 @@ description = "Settings management using Pydantic"
optional = false
python-versions = ">=3.8"
groups = ["main"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "pydantic_settings-2.6.1-py3-none-any.whl", hash = "sha256:7fb0637c786a558d3103436278a7c4f1cfd29ba8973238a50c5bb9a55387da87"},
{file = "pydantic_settings-2.6.1.tar.gz", hash = "sha256:e0f92546d8a9923cb8941689abf85d6601a8c19a23e97a34b2964a2e3f813ca0"},
@@ -6665,7 +6486,6 @@ description = "Pygments is a syntax highlighting package written in Python."
optional = false
python-versions = ">=3.8"
groups = ["main", "vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "pygments-2.19.1-py3-none-any.whl", hash = "sha256:9ea1544ad55cecf4b8242fab6dd35a93bbce657034b0611ee383099054ab6d8c"},
{file = "pygments-2.19.1.tar.gz", hash = "sha256:61c16d2a8576dc0649d9f39e089b5f02bcd27fba10d8fb4dcc28173f7a45151f"},
@@ -6681,7 +6501,6 @@ description = "JSON Web Token implementation in Python"
optional = false
python-versions = ">=3.7"
groups = ["main"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "PyJWT-2.8.0-py3-none-any.whl", hash = "sha256:59127c392cc44c2da5bb3192169a91f429924e17aff6534d70fdc02ab3e04320"},
{file = "PyJWT-2.8.0.tar.gz", hash = "sha256:57e28d156e3d5c10088e0c68abb90bfac3df82b40a71bd0daa20c65ccd5c23de"},
@@ -6703,7 +6522,6 @@ description = "Python Sdk for Milvus"
optional = false
python-versions = ">=3.8"
groups = ["vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "pymilvus-2.5.5-py3-none-any.whl", hash = "sha256:b91794fbaf72c6d7ed2419b8d4e67369263bdc16f1722f02c97927cfdf3e69da"},
{file = "pymilvus-2.5.5.tar.gz", hash = "sha256:8985f018961853022e03639a9ff323d5c22d0b659e66e288f4d08de11789e1d4"},
@@ -6730,7 +6548,6 @@ description = "Python SDK for mochow"
optional = false
python-versions = ">=3.7"
groups = ["vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "pymochow-1.3.1-py3-none-any.whl", hash = "sha256:a7f3b34fd6ea5d1d8413650bb6678365aa148fc396ae945e4ccb4f2365a52327"},
{file = "pymochow-1.3.1.tar.gz", hash = "sha256:1693d10cd0bb7bce45327890a90adafb503155922ccc029acb257699a73a20ba"},
@@ -6748,7 +6565,6 @@ description = "Pure Python MySQL Driver"
optional = false
python-versions = ">=3.7"
groups = ["vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "PyMySQL-1.1.1-py3-none-any.whl", hash = "sha256:4de15da4c61dc132f4fb9ab763063e693d521a80fd0e87943b9a453dd4c19d6c"},
{file = "pymysql-1.1.1.tar.gz", hash = "sha256:e127611aaf2b417403c60bf4dc570124aeb4a57f5f37b8e95ae399a42f904cd0"},
@@ -6765,7 +6581,6 @@ description = "A python SDK for OceanBase Vector Store, based on SQLAlchemy, com
optional = false
python-versions = "<4.0,>=3.9"
groups = ["vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "pyobvector-0.1.18-py3-none-any.whl", hash = "sha256:9ca4098fd58f87e9c6ff1cd4a5631c666d51d0607933dd3656b7274eacc36428"},
{file = "pyobvector-0.1.18.tar.gz", hash = "sha256:0497764dc8f60ab2ce8b8d738b05dea946df5679e773049620da5a339091ed92"},
@@ -6784,7 +6599,6 @@ description = "Python wrapper module around the OpenSSL library"
optional = false
python-versions = ">=3.7"
groups = ["main"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "pyOpenSSL-24.3.0-py3-none-any.whl", hash = "sha256:e474f5a473cd7f92221cc04976e48f4d11502804657a08a989fb3be5514c904a"},
{file = "pyopenssl-24.3.0.tar.gz", hash = "sha256:49f7a019577d834746bc55c5fce6ecbcec0f2b4ec5ce1cf43a9a173b8138bb36"},
@@ -6804,7 +6618,6 @@ description = "Thin wrapper for pandoc."
optional = false
python-versions = ">=3.7"
groups = ["main"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "pypandoc-1.15-py3-none-any.whl", hash = "sha256:4ededcc76c8770f27aaca6dff47724578428eca84212a31479403a9731fc2b16"},
{file = "pypandoc-1.15.tar.gz", hash = "sha256:ea25beebe712ae41d63f7410c08741a3cab0e420f6703f95bc9b3a749192ce13"},
@@ -6817,7 +6630,6 @@ description = "pyparsing module - Classes and methods to define and execute pars
optional = false
python-versions = ">=3.9"
groups = ["main", "tools"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "pyparsing-3.2.1-py3-none-any.whl", hash = "sha256:506ff4f4386c4cec0590ec19e6302d3aedb992fdc02c761e90416f158dacf8e1"},
{file = "pyparsing-3.2.1.tar.gz", hash = "sha256:61980854fd66de3a90028d679a954d5f2623e83144b5afe5ee86f43d762e5f0a"},
@@ -6833,7 +6645,6 @@ description = "A pure-python PDF library capable of splitting, merging, cropping
optional = false
python-versions = ">=3.8"
groups = ["main"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "pypdf-5.3.1-py3-none-any.whl", hash = "sha256:20ea5b8686faad1b695fda054462b667d5e5f51e25fbbc092f12c5e0bb20d738"},
{file = "pypdf-5.3.1.tar.gz", hash = "sha256:0b9b715252b3c60bacc052e6a780e8b742cee9b9a2135f6007bb018e22a5adad"},
@@ -6854,7 +6665,6 @@ description = "Python bindings to PDFium"
optional = false
python-versions = ">=3.6"
groups = ["main"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "pypdfium2-4.30.1-py3-none-macosx_10_13_x86_64.whl", hash = "sha256:e07c47633732cc18d890bb7e965ad28a9c5a932e548acb928596f86be2e5ae37"},
{file = "pypdfium2-4.30.1-py3-none-macosx_11_0_arm64.whl", hash = "sha256:5ea2d44e96d361123b67b00f527017aa9c847c871b5714e013c01c3eb36a79fe"},
@@ -6878,7 +6688,6 @@ description = "A SQL query builder API for Python"
optional = false
python-versions = "*"
groups = ["vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "PyPika-0.48.9.tar.gz", hash = "sha256:838836a61747e7c8380cd1b7ff638694b7a7335345d0f559b04b2cd832ad5378"},
]
@@ -6890,7 +6699,6 @@ description = "Wrappers to call pyproject.toml-based build backend hooks."
optional = false
python-versions = ">=3.7"
groups = ["vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "pyproject_hooks-1.2.0-py3-none-any.whl", hash = "sha256:9e5c6bfa8dcc30091c74b0cf803c81fdd29d94f01992a7707bc97babb1141913"},
{file = "pyproject_hooks-1.2.0.tar.gz", hash = "sha256:1e859bd5c40fae9448642dd871adf459e5e2084186e8d2c2a79a824c970da1f8"},
@@ -6903,7 +6711,7 @@ description = "A python implementation of GNU readline."
optional = false
python-versions = ">=3.8"
groups = ["vdb"]
-markers = "(python_version == \"3.11\" or python_version >= \"3.12\") and sys_platform == \"win32\""
+markers = "sys_platform == \"win32\""
files = [
{file = "pyreadline3-3.5.4-py3-none-any.whl", hash = "sha256:eaf8e6cc3c49bcccf145fc6067ba8643d1df34d604a1ec0eccbf7a18e6d3fae6"},
{file = "pyreadline3-3.5.4.tar.gz", hash = "sha256:8d57d53039a1c75adba8e50dd3d992b28143480816187ea5efbd5c78e6c885b7"},
@@ -6919,7 +6727,6 @@ description = "pytest: simple powerful testing with Python"
optional = false
python-versions = ">=3.8"
groups = ["main", "dev"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "pytest-8.3.5-py3-none-any.whl", hash = "sha256:c69214aa47deac29fad6c2a4f590b9c4a9fdb16a403176fe154b79c0b4d4d820"},
{file = "pytest-8.3.5.tar.gz", hash = "sha256:f4efe70cc14e511565ac476b57c279e12a855b11f48f212af1080ef2263d3845"},
@@ -6941,7 +6748,6 @@ description = "A ``pytest`` fixture for benchmarking code. It will group the tes
optional = false
python-versions = ">=3.7"
groups = ["dev"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "pytest-benchmark-4.0.0.tar.gz", hash = "sha256:fb0785b83efe599a6a956361c0691ae1dbb5318018561af10f3e915caa0048d1"},
{file = "pytest_benchmark-4.0.0-py3-none-any.whl", hash = "sha256:fdb7db64e31c8b277dff9850d2a2556d8b60bcb0ea6524e36e28ffd7c87f71d6"},
@@ -6963,7 +6769,6 @@ description = "pytest plugin that allows you to add environment variables."
optional = false
python-versions = ">=3.8"
groups = ["dev"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "pytest_env-1.1.5-py3-none-any.whl", hash = "sha256:ce90cf8772878515c24b31cd97c7fa1f4481cd68d588419fd45f10ecaee6bc30"},
{file = "pytest_env-1.1.5.tar.gz", hash = "sha256:91209840aa0e43385073ac464a554ad2947cc2fd663a9debf88d03b01e0cc1cf"},
@@ -6982,7 +6787,6 @@ description = "Thin-wrapper around the mock package for easier use with pytest"
optional = false
python-versions = ">=3.8"
groups = ["dev"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "pytest-mock-3.14.0.tar.gz", hash = "sha256:2719255a1efeceadbc056d6bf3df3d1c5015530fb40cf347c0f9afac88410bd0"},
{file = "pytest_mock-3.14.0-py3-none-any.whl", hash = "sha256:0b72c38033392a5f4621342fe11e9219ac11ec9d375f8e2a0c164539e0d70f6f"},
@@ -7001,7 +6805,6 @@ description = "Python binding for Rust's library for reading excel and odf file
optional = false
python-versions = ">=3.8"
groups = ["main"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "python_calamine-0.3.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2822c39ad52f289732981cee59b4985388624b54e124e41436bb37565ed32f15"},
{file = "python_calamine-0.3.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f2786751cfe4e81f9170b843741b39a325cf9f49db8d51fc3cd16d6139e0ac60"},
@@ -7115,7 +6918,6 @@ description = "Extensions to the standard Python datetime module"
optional = false
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7"
groups = ["main", "dev", "storage", "vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"},
{file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"},
@@ -7131,7 +6933,6 @@ description = "Create, read, and update Microsoft Word .docx files."
optional = false
python-versions = ">=3.7"
groups = ["main"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "python_docx-1.1.2-py3-none-any.whl", hash = "sha256:08c20d6058916fb19853fcf080f7f42b6270d89eac9fa5f8c15f691c0017fabe"},
{file = "python_docx-1.1.2.tar.gz", hash = "sha256:0cf1f22e95b9002addca7948e16f2cd7acdfd498047f1941ca5d293db7762efd"},
@@ -7148,7 +6949,6 @@ description = "Read key-value pairs from a .env file and set them as environment
optional = false
python-versions = ">=3.8"
groups = ["main", "vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "python-dotenv-1.0.1.tar.gz", hash = "sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca"},
{file = "python_dotenv-1.0.1-py3-none-any.whl", hash = "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a"},
@@ -7164,7 +6964,6 @@ description = "ISO 639 language codes, names, and other associated information"
optional = false
python-versions = ">=3.9"
groups = ["main"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "python_iso639-2025.2.18-py3-none-any.whl", hash = "sha256:b2d471c37483a26f19248458b20e7bd96492e15368b01053b540126bcc23152f"},
{file = "python_iso639-2025.2.18.tar.gz", hash = "sha256:34e31e8e76eb3fc839629e257b12bcfd957c6edcbd486bbf66ba5185d1f566e8"},
@@ -7180,7 +6979,6 @@ description = "File type identification using libmagic"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
groups = ["main"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "python-magic-0.4.27.tar.gz", hash = "sha256:c1ba14b08e4a5f5c31a302b7721239695b2f0f058d125bd5ce1ee36b9d9d3c3b"},
{file = "python_magic-0.4.27-py2.py3-none-any.whl", hash = "sha256:c212960ad306f700aa0d01e5d7a325d20548ff97eb9920dcd29513174f0294d3"},
@@ -7193,7 +6991,6 @@ description = "Extract attachments from Outlook .msg files."
optional = false
python-versions = ">=3.9"
groups = ["main"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "python_oxmsg-0.0.2-py3-none-any.whl", hash = "sha256:22be29b14c46016bcd05e34abddfd8e05ee82082f53b82753d115da3fc7d0355"},
{file = "python_oxmsg-0.0.2.tar.gz", hash = "sha256:a6aff4deb1b5975d44d49dab1d9384089ffeec819e19c6940bc7ffbc84775fad"},
@@ -7211,7 +7008,6 @@ description = "Create, read, and update PowerPoint 2007+ (.pptx) files."
optional = false
python-versions = ">=3.8"
groups = ["main"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "python_pptx-1.0.2-py3-none-any.whl", hash = "sha256:160838e0b8565a8b1f67947675886e9fea18aa5e795db7ae531606d68e785cba"},
{file = "python_pptx-1.0.2.tar.gz", hash = "sha256:479a8af0eaf0f0d76b6f00b0887732874ad2e3188230315290cd1f9dd9cc7095"},
@@ -7230,7 +7026,6 @@ description = "World timezone definitions, modern and historical"
optional = false
python-versions = "*"
groups = ["main", "storage", "vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "pytz-2025.1-py2.py3-none-any.whl", hash = "sha256:89dd22dca55b46eac6eda23b2d72721bf1bdfef212645d81513ef5d03038de57"},
{file = "pytz-2025.1.tar.gz", hash = "sha256:c2db42be2a2518b28e65f9207c4d05e6ff547d1efa4086469ef855e4ab70178e"},
@@ -7243,7 +7038,7 @@ description = "Python for Window Extensions"
optional = false
python-versions = "*"
groups = ["main", "vdb"]
-markers = "(python_version == \"3.11\" or python_version >= \"3.12\") and platform_system == \"Windows\""
+markers = "platform_system == \"Windows\""
files = [
{file = "pywin32-309-cp310-cp310-win32.whl", hash = "sha256:5b78d98550ca093a6fe7ab6d71733fbc886e2af9d4876d935e7f6e1cd6577ac9"},
{file = "pywin32-309-cp310-cp310-win_amd64.whl", hash = "sha256:728d08046f3d65b90d4c77f71b6fbb551699e2005cc31bbffd1febd6a08aa698"},
@@ -7270,7 +7065,6 @@ description = "Excel 2007-2010 Binary Workbook (xlsb) parser"
optional = false
python-versions = "*"
groups = ["main"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "pyxlsb-1.0.10-py2.py3-none-any.whl", hash = "sha256:87c122a9a622e35ca5e741d2e541201d28af00fb46bec492cfa9586890b120b4"},
{file = "pyxlsb-1.0.10.tar.gz", hash = "sha256:8062d1ea8626d3f1980e8b1cfe91a4483747449242ecb61013bc2df85435f685"},
@@ -7283,7 +7077,6 @@ description = "YAML parser and emitter for Python"
optional = false
python-versions = ">=3.8"
groups = ["main", "vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"},
{file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"},
@@ -7347,7 +7140,6 @@ description = "Client library for the Qdrant vector search engine"
optional = false
python-versions = ">=3.8"
groups = ["vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "qdrant_client-1.7.3-py3-none-any.whl", hash = "sha256:b062420ba55eb847652c7d2a26404fb1986bea13aa785763024013f96a7a915c"},
{file = "qdrant_client-1.7.3.tar.gz", hash = "sha256:7b809be892cdc5137ae80ea3335da40c06499ad0b0072b5abc6bad79da1d29fc"},
@@ -7366,7 +7158,7 @@ pydantic = ">=1.10.8"
urllib3 = ">=1.26.14,<3"
[package.extras]
-fastembed = ["fastembed (==0.1.1)"]
+fastembed = ["fastembed (==0.1.1) ; python_version < \"3.12\""]
[[package]]
name = "rank-bm25"
@@ -7375,7 +7167,6 @@ description = "Various BM25 algorithms for document ranking"
optional = false
python-versions = "*"
groups = ["indirect"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "rank_bm25-0.2.2-py3-none-any.whl", hash = "sha256:7bd4a95571adadfc271746fa146a4bcfd89c0cf731e49c3d1ad863290adbe8ae"},
{file = "rank_bm25-0.2.2.tar.gz", hash = "sha256:096ccef76f8188563419aaf384a02f0ea459503fdf77901378d4fd9d87e5e51d"},
@@ -7394,7 +7185,6 @@ description = "rapid fuzzy string matching"
optional = false
python-versions = ">=3.9"
groups = ["main"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "rapidfuzz-3.12.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0b9a75e0385a861178adf59e86d6616cbd0d5adca7228dc9eeabf6f62cf5b0b1"},
{file = "rapidfuzz-3.12.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6906a7eb458731e3dd2495af1d0410e23a21a2a2b7ced535e6d5cd15cb69afc5"},
@@ -7502,7 +7292,6 @@ description = "Python wrapper for Mozilla's Readability.js"
optional = false
python-versions = ">=3.6.0"
groups = ["main"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "readabilipy-0.2.0-py3-none-any.whl", hash = "sha256:0050853cd6ab012ac75bb4d8f06427feb7dc32054da65060da44654d049802d0"},
{file = "readabilipy-0.2.0.tar.gz", hash = "sha256:098bf347b19f362042fb6c08864ad776588bf844ac2261fb230f7f9c250fdae5"},
@@ -7526,7 +7315,6 @@ description = ""
optional = false
python-versions = "<4.0,>=3.9"
groups = ["storage"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "realtime-2.4.1-py3-none-any.whl", hash = "sha256:6aacfec1ca3519fbb87219ce250dee3b6797156f5a091eb48d0e19945bc6d103"},
{file = "realtime-2.4.1.tar.gz", hash = "sha256:8e77616d8c721f0f17ea0a256f6b5cd6d626b0eb66b305544d5f330c3a6d9a4c"},
@@ -7545,7 +7333,6 @@ description = "Python client for Redis database and key-value store"
optional = false
python-versions = ">=3.7"
groups = ["main"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "redis-5.0.8-py3-none-any.whl", hash = "sha256:56134ee08ea909106090934adc36f65c9bcbbaecea5b21ba704ba6fb561f8eb4"},
{file = "redis-5.0.8.tar.gz", hash = "sha256:0c5b10d387568dfe0698c6fad6615750c24170e548ca2deac10c649d463e9870"},
@@ -7566,7 +7353,6 @@ description = "JSON Referencing + Python"
optional = false
python-versions = ">=3.9"
groups = ["main"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "referencing-0.36.2-py3-none-any.whl", hash = "sha256:e8699adbbf8b5c7de96d8ffa0eb5c158b3beafce084968e2ea8bb08c6794dcd0"},
{file = "referencing-0.36.2.tar.gz", hash = "sha256:df2e89862cd09deabbdba16944cc3f10feb6b3e6f18e902f7cc25609a34775aa"},
@@ -7584,7 +7370,6 @@ description = "Alternative regular expression module, to replace re."
optional = false
python-versions = ">=3.8"
groups = ["main", "tools"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "regex-2024.11.6-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ff590880083d60acc0433f9c3f713c51f7ac6ebb9adf889c79a261ecf541aa91"},
{file = "regex-2024.11.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:658f90550f38270639e83ce492f27d2c8d2cd63805c65a13a14d36ca126753f0"},
@@ -7689,7 +7474,6 @@ description = "Python HTTP for Humans."
optional = false
python-versions = ">=3.7"
groups = ["main", "storage", "tools", "vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"},
{file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"},
@@ -7712,7 +7496,6 @@ description = "OAuthlib authentication support for Requests."
optional = false
python-versions = ">=3.4"
groups = ["storage", "vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "requests-oauthlib-2.0.0.tar.gz", hash = "sha256:b3dffaebd884d8cd778494369603a9e7b58d29111bf6b41bdc2dcd87203af4e9"},
{file = "requests_oauthlib-2.0.0-py2.py3-none-any.whl", hash = "sha256:7dd8a5c40426b779b0868c404bdef9768deccf22749cde15852df527e6269b36"},
@@ -7732,7 +7515,6 @@ description = "A utility belt for advanced users of python-requests"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
groups = ["main", "tools"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "requests-toolbelt-1.0.0.tar.gz", hash = "sha256:7681a0a3d047012b5bdc0ee37d7f8f07ebe76ab08caeccfc3921ce23c88d5bc6"},
{file = "requests_toolbelt-1.0.0-py2.py3-none-any.whl", hash = "sha256:cccfdd665f0a24fcf4726e690f65639d272bb0637b9b92dfd91a5568ccf6bd06"},
@@ -7748,7 +7530,6 @@ description = "Resend Python SDK"
optional = false
python-versions = ">=3.7"
groups = ["main"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "resend-0.7.2-py2.py3-none-any.whl", hash = "sha256:4f16711e11b007da7f8826283af6cdc34c99bd77c1dfad92afe9466a90d06c61"},
{file = "resend-0.7.2.tar.gz", hash = "sha256:bb10522a5ef1235b6cc2d74902df39c4863ac12b89dc48b46dd5c6f980574622"},
@@ -7764,7 +7545,6 @@ description = "Easy to use retry decorator."
optional = false
python-versions = "*"
groups = ["vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "retry-0.9.2-py2.py3-none-any.whl", hash = "sha256:ccddf89761fa2c726ab29391837d4327f819ea14d244c232a1d24c67a2f98606"},
{file = "retry-0.9.2.tar.gz", hash = "sha256:f8bfa8b99b69c4506d6f5bd3b0aabf77f98cdb17f3c9fc3f5ca820033336fba4"},
@@ -7781,7 +7561,6 @@ description = "Render rich text, tables, progress bars, syntax highlighting, mar
optional = false
python-versions = ">=3.8.0"
groups = ["main", "vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "rich-13.9.4-py3-none-any.whl", hash = "sha256:6049d5e6ec054bf2779ab3358186963bac2ea89175919d699e378b99738c2a90"},
{file = "rich-13.9.4.tar.gz", hash = "sha256:439594978a49a09530cff7ebc4b5c7103ef57baf48d5ea3184f21d9a2befa098"},
@@ -7801,7 +7580,6 @@ description = "Python bindings to Rust's persistent data structures (rpds)"
optional = false
python-versions = ">=3.9"
groups = ["main"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "rpds_py-0.23.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2a54027554ce9b129fc3d633c92fa33b30de9f08bc61b32c053dc9b537266fed"},
{file = "rpds_py-0.23.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b5ef909a37e9738d146519657a1aab4584018746a18f71c692f2f22168ece40c"},
@@ -7915,7 +7693,6 @@ description = "Pure-Python RSA implementation"
optional = false
python-versions = ">=3.6,<4"
groups = ["main", "storage", "vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "rsa-4.9-py3-none-any.whl", hash = "sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7"},
{file = "rsa-4.9.tar.gz", hash = "sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21"},
@@ -7926,31 +7703,30 @@ pyasn1 = ">=0.1.3"
[[package]]
name = "ruff"
-version = "0.9.10"
+version = "0.11.0"
description = "An extremely fast Python linter and code formatter, written in Rust."
optional = false
python-versions = ">=3.7"
groups = ["lint"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
- {file = "ruff-0.9.10-py3-none-linux_armv6l.whl", hash = "sha256:eb4d25532cfd9fe461acc83498361ec2e2252795b4f40b17e80692814329e42d"},
- {file = "ruff-0.9.10-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:188a6638dab1aa9bb6228a7302387b2c9954e455fb25d6b4470cb0641d16759d"},
- {file = "ruff-0.9.10-py3-none-macosx_11_0_arm64.whl", hash = "sha256:5284dcac6b9dbc2fcb71fdfc26a217b2ca4ede6ccd57476f52a587451ebe450d"},
- {file = "ruff-0.9.10-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:47678f39fa2a3da62724851107f438c8229a3470f533894b5568a39b40029c0c"},
- {file = "ruff-0.9.10-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:99713a6e2766b7a17147b309e8c915b32b07a25c9efd12ada79f217c9c778b3e"},
- {file = "ruff-0.9.10-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:524ee184d92f7c7304aa568e2db20f50c32d1d0caa235d8ddf10497566ea1a12"},
- {file = "ruff-0.9.10-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:df92aeac30af821f9acf819fc01b4afc3dfb829d2782884f8739fb52a8119a16"},
- {file = "ruff-0.9.10-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de42e4edc296f520bb84954eb992a07a0ec5a02fecb834498415908469854a52"},
- {file = "ruff-0.9.10-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d257f95b65806104b6b1ffca0ea53f4ef98454036df65b1eda3693534813ecd1"},
- {file = "ruff-0.9.10-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b60dec7201c0b10d6d11be00e8f2dbb6f40ef1828ee75ed739923799513db24c"},
- {file = "ruff-0.9.10-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:d838b60007da7a39c046fcdd317293d10b845001f38bcb55ba766c3875b01e43"},
- {file = "ruff-0.9.10-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:ccaf903108b899beb8e09a63ffae5869057ab649c1e9231c05ae354ebc62066c"},
- {file = "ruff-0.9.10-py3-none-musllinux_1_2_i686.whl", hash = "sha256:f9567d135265d46e59d62dc60c0bfad10e9a6822e231f5b24032dba5a55be6b5"},
- {file = "ruff-0.9.10-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:5f202f0d93738c28a89f8ed9eaba01b7be339e5d8d642c994347eaa81c6d75b8"},
- {file = "ruff-0.9.10-py3-none-win32.whl", hash = "sha256:bfb834e87c916521ce46b1788fbb8484966e5113c02df216680102e9eb960029"},
- {file = "ruff-0.9.10-py3-none-win_amd64.whl", hash = "sha256:f2160eeef3031bf4b17df74e307d4c5fb689a6f3a26a2de3f7ef4044e3c484f1"},
- {file = "ruff-0.9.10-py3-none-win_arm64.whl", hash = "sha256:5fd804c0327a5e5ea26615550e706942f348b197d5475ff34c19733aee4b2e69"},
- {file = "ruff-0.9.10.tar.gz", hash = "sha256:9bacb735d7bada9cfb0f2c227d3658fc443d90a727b47f206fb33f52f3c0eac7"},
+ {file = "ruff-0.11.0-py3-none-linux_armv6l.whl", hash = "sha256:dc67e32bc3b29557513eb7eeabb23efdb25753684b913bebb8a0c62495095acb"},
+ {file = "ruff-0.11.0-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:38c23fd9bdec4eb437b4c1e3595905a0a8edfccd63a790f818b28c78fe345639"},
+ {file = "ruff-0.11.0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:7c8661b0be91a38bd56db593e9331beaf9064a79028adee2d5f392674bbc5e88"},
+ {file = "ruff-0.11.0-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b6c0e8d3d2db7e9f6efd884f44b8dc542d5b6b590fc4bb334fdbc624d93a29a2"},
+ {file = "ruff-0.11.0-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3c3156d3f4b42e57247275a0a7e15a851c165a4fc89c5e8fa30ea6da4f7407b8"},
+ {file = "ruff-0.11.0-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:490b1e147c1260545f6d041c4092483e3f6d8eba81dc2875eaebcf9140b53905"},
+ {file = "ruff-0.11.0-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:1bc09a7419e09662983b1312f6fa5dab829d6ab5d11f18c3760be7ca521c9329"},
+ {file = "ruff-0.11.0-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bcfa478daf61ac8002214eb2ca5f3e9365048506a9d52b11bea3ecea822bb844"},
+ {file = "ruff-0.11.0-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6fbb2aed66fe742a6a3a0075ed467a459b7cedc5ae01008340075909d819df1e"},
+ {file = "ruff-0.11.0-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:92c0c1ff014351c0b0cdfdb1e35fa83b780f1e065667167bb9502d47ca41e6db"},
+ {file = "ruff-0.11.0-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:e4fd5ff5de5f83e0458a138e8a869c7c5e907541aec32b707f57cf9a5e124445"},
+ {file = "ruff-0.11.0-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:96bc89a5c5fd21a04939773f9e0e276308be0935de06845110f43fd5c2e4ead7"},
+ {file = "ruff-0.11.0-py3-none-musllinux_1_2_i686.whl", hash = "sha256:a9352b9d767889ec5df1483f94870564e8102d4d7e99da52ebf564b882cdc2c7"},
+ {file = "ruff-0.11.0-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:049a191969a10897fe052ef9cc7491b3ef6de79acd7790af7d7897b7a9bfbcb6"},
+ {file = "ruff-0.11.0-py3-none-win32.whl", hash = "sha256:3191e9116b6b5bbe187447656f0c8526f0d36b6fd89ad78ccaad6bdc2fad7df2"},
+ {file = "ruff-0.11.0-py3-none-win_amd64.whl", hash = "sha256:c58bfa00e740ca0a6c43d41fb004cd22d165302f360aaa56f7126d544db31a21"},
+ {file = "ruff-0.11.0-py3-none-win_arm64.whl", hash = "sha256:868364fc23f5aa122b00c6f794211e85f7e78f5dffdf7c590ab90b8c4e69b657"},
+ {file = "ruff-0.11.0.tar.gz", hash = "sha256:e55c620690a4a7ee6f1cccb256ec2157dc597d109400ae75bbf944fc9d6462e2"},
]
[[package]]
@@ -7960,7 +7736,6 @@ description = "An Amazon S3 Transfer Manager"
optional = false
python-versions = ">=3.8"
groups = ["main"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "s3transfer-0.10.4-py3-none-any.whl", hash = "sha256:244a76a24355363a68164241438de1b72f8781664920260c48465896b712a41e"},
{file = "s3transfer-0.10.4.tar.gz", hash = "sha256:29edc09801743c21eb5ecbc617a152df41d3c287f67b615f73e5f750583666a7"},
@@ -7979,7 +7754,6 @@ description = ""
optional = false
python-versions = ">=3.7"
groups = ["main", "indirect"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "safetensors-0.4.5-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:a63eaccd22243c67e4f2b1c3e258b257effc4acd78f3b9d397edc8cf8f1298a7"},
{file = "safetensors-0.4.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:23fc9b4ec7b602915cbb4ec1a7c1ad96d2743c322f20ab709e2c35d1b66dad27"},
@@ -8113,7 +7887,6 @@ description = "Python client for Sentry (https://sentry.io)"
optional = false
python-versions = "*"
groups = ["main"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "sentry-sdk-1.44.1.tar.gz", hash = "sha256:24e6a53eeabffd2f95d952aa35ca52f0f4201d17f820ac9d3ff7244c665aaf68"},
{file = "sentry_sdk-1.44.1-py2.py3-none-any.whl", hash = "sha256:5f75eb91d8ab6037c754a87b8501cc581b2827e923682f593bed3539ce5b3999"},
@@ -8165,20 +7938,19 @@ description = "Easily download, build, install, upgrade, and uninstall Python pa
optional = false
python-versions = ">=3.9"
groups = ["main", "vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "setuptools-76.0.0-py3-none-any.whl", hash = "sha256:199466a166ff664970d0ee145839f5582cb9bca7a0a3a2e795b6a9cb2308e9c6"},
{file = "setuptools-76.0.0.tar.gz", hash = "sha256:43b4ee60e10b0d0ee98ad11918e114c70701bc6051662a9a675a0496c1a158f4"},
]
[package.extras]
-check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)", "ruff (>=0.8.0)"]
-core = ["importlib_metadata (>=6)", "jaraco.collections", "jaraco.functools (>=4)", "jaraco.text (>=3.7)", "more_itertools", "more_itertools (>=8.8)", "packaging", "packaging (>=24.2)", "platformdirs (>=4.2.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"]
+check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\"", "ruff (>=0.8.0) ; sys_platform != \"cygwin\""]
+core = ["importlib_metadata (>=6) ; python_version < \"3.10\"", "jaraco.collections", "jaraco.functools (>=4)", "jaraco.text (>=3.7)", "more_itertools", "more_itertools (>=8.8)", "packaging", "packaging (>=24.2)", "platformdirs (>=4.2.2)", "tomli (>=2.0.1) ; python_version < \"3.11\"", "wheel (>=0.43.0)"]
cover = ["pytest-cov"]
doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"]
enabler = ["pytest-enabler (>=2.2)"]
-test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.7.2)", "jaraco.test (>=5.5)", "packaging (>=24.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"]
-type = ["importlib_metadata (>=7.0.2)", "jaraco.develop (>=7.21)", "mypy (==1.14.*)", "pytest-mypy"]
+test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21) ; python_version >= \"3.9\" and sys_platform != \"cygwin\"", "jaraco.envs (>=2.2)", "jaraco.path (>=3.7.2)", "jaraco.test (>=5.5)", "packaging (>=24.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf ; sys_platform != \"cygwin\"", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"]
+type = ["importlib_metadata (>=7.0.2) ; python_version < \"3.10\"", "jaraco.develop (>=7.21) ; sys_platform != \"cygwin\"", "mypy (==1.14.*)", "pytest-mypy"]
[[package]]
name = "shapely"
@@ -8187,7 +7959,6 @@ description = "Manipulation and analysis of geometric objects"
optional = false
python-versions = ">=3.7"
groups = ["main"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "shapely-2.0.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:33fb10e50b16113714ae40adccf7670379e9ccf5b7a41d0002046ba2b8f0f691"},
{file = "shapely-2.0.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f44eda8bd7a4bccb0f281264b34bf3518d8c4c9a8ffe69a1a05dabf6e8461147"},
@@ -8247,7 +8018,6 @@ description = "Tool to Detect Surrounding Shell"
optional = false
python-versions = ">=3.7"
groups = ["vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "shellingham-1.5.4-py2.py3-none-any.whl", hash = "sha256:7ecfff8f2fd72616f7481040475a65b2bf8af90a56c89140852d1120324e8686"},
{file = "shellingham-1.5.4.tar.gz", hash = "sha256:8dbca0739d487e5bd35ab3ca4b36e11c4078f3a234bfce294b0a0291363404de"},
@@ -8260,7 +8030,6 @@ description = "Python 2 and 3 compatibility utilities"
optional = false
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7"
groups = ["main", "dev", "storage", "vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274"},
{file = "six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81"},
@@ -8273,7 +8042,6 @@ description = "Sniff out which async library your code is running under"
optional = false
python-versions = ">=3.7"
groups = ["main", "storage", "vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"},
{file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"},
@@ -8286,7 +8054,6 @@ description = "Sans-I/O implementation of SOCKS4, SOCKS4A, and SOCKS5."
optional = false
python-versions = ">=3.6"
groups = ["main"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "socksio-1.0.0-py3-none-any.whl", hash = "sha256:95dc1f15f9b34e8d7b16f06d74b8ccf48f609af32ab33c608d08761c5dcbb1f3"},
{file = "socksio-1.0.0.tar.gz", hash = "sha256:f88beb3da5b5c38b9890469de67d0cb0f9d494b78b106ca1845f96c10b91c4ac"},
@@ -8299,7 +8066,6 @@ description = "A modern CSS selector implementation for Beautiful Soup."
optional = false
python-versions = ">=3.8"
groups = ["main", "vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "soupsieve-2.6-py3-none-any.whl", hash = "sha256:e72c4ff06e4fb6e4b5a9f0f55fe6e81514581fca1515028625d0f299c602ccc9"},
{file = "soupsieve-2.6.tar.gz", hash = "sha256:e2e68417777af359ec65daac1057404a3c8a5455bb8abc36f1a9866ab1a51abb"},
@@ -8312,7 +8078,6 @@ description = "Database Abstraction Library"
optional = false
python-versions = ">=3.7"
groups = ["main", "dev", "vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "SQLAlchemy-2.0.35-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:67219632be22f14750f0d1c70e62f204ba69d28f62fd6432ba05ab295853de9b"},
{file = "SQLAlchemy-2.0.35-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4668bd8faf7e5b71c0319407b608f278f279668f358857dbfd10ef1954ac9f90"},
@@ -8401,7 +8166,6 @@ description = "The little ASGI library that shines."
optional = false
python-versions = ">=3.8"
groups = ["main", "vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "starlette-0.41.0-py3-none-any.whl", hash = "sha256:a0193a3c413ebc9c78bff1c3546a45bb8c8bcb4a84cae8747d650a65bd37210a"},
{file = "starlette-0.41.0.tar.gz", hash = "sha256:39cbd8768b107d68bfe1ff1672b38a2c38b49777de46d2a592841d58e3bf7c2a"},
@@ -8420,7 +8184,6 @@ description = "Supabase Storage client for Python."
optional = false
python-versions = "<4.0,>=3.9"
groups = ["storage"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "storage3-0.8.2-py3-none-any.whl", hash = "sha256:f2e995b18c77a2a9265d1a33047d43e4d6abb11eb3ca5067959f68281c305de3"},
{file = "storage3-0.8.2.tar.gz", hash = "sha256:db05d3fe8fb73bd30c814c4c4749664f37a5dfc78b629e8c058ef558c2b89f5a"},
@@ -8438,7 +8201,6 @@ description = "Supabase client for Python."
optional = false
python-versions = "<4.0,>=3.9"
groups = ["storage"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "supabase-2.8.1-py3-none-any.whl", hash = "sha256:dfa8bef89b54129093521d5bba2136ff765baf67cd76d8ad0aa4984d61a7815c"},
{file = "supabase-2.8.1.tar.gz", hash = "sha256:711c70e6acd9e2ff48ca0dc0b1bb70c01c25378cc5189ec9f5ed9655b30bc41d"},
@@ -8460,7 +8222,6 @@ description = "Library for Supabase Functions"
optional = false
python-versions = "<4.0,>=3.9"
groups = ["storage"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "supafunc-0.6.2-py3-none-any.whl", hash = "sha256:101b30616b0a1ce8cf938eca1df362fa4cf1deacb0271f53ebbd674190fb0da5"},
{file = "supafunc-0.6.2.tar.gz", hash = "sha256:c7dfa20db7182f7fe4ae436e94e05c06cd7ed98d697fed75d68c7b9792822adc"},
@@ -8476,7 +8237,6 @@ description = "Computer algebra system (CAS) in Python"
optional = false
python-versions = ">=3.8"
groups = ["vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "sympy-1.13.3-py3-none-any.whl", hash = "sha256:54612cf55a62755ee71824ce692986f23c88ffa77207b30c1368eda4a7060f73"},
{file = "sympy-1.13.3.tar.gz", hash = "sha256:b27fd2c6530e0ab39e275fc9b683895367e51d5da91baa8d3d64db2565fec4d9"},
@@ -8495,7 +8255,6 @@ description = "Pretty-print tabular data"
optional = false
python-versions = ">=3.7"
groups = ["main"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "tabulate-0.9.0-py3-none-any.whl", hash = "sha256:024ca478df22e9340661486f85298cff5f6dcdba14f3813e8830015b9ed1948f"},
{file = "tabulate-0.9.0.tar.gz", hash = "sha256:0095b12bf5966de529c0feb1fa08671671b3368eec77d7ef7ab114be2c068b3c"},
@@ -8511,7 +8270,6 @@ description = "Tencent VectorDB Python SDK"
optional = false
python-versions = ">=3"
groups = ["vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "tcvectordb-1.3.2-py3-none-any.whl", hash = "sha256:c4b6922d5df4cf14fcd3e61220d9374d1d53ec7270c254216ae35f8a752908f3"},
{file = "tcvectordb-1.3.2.tar.gz", hash = "sha256:2772f5871a69744ffc7c970b321312d626078533a721de3c744059a81aab419e"},
@@ -8528,7 +8286,6 @@ description = "Retry code until it succeeds"
optional = false
python-versions = ">=3.8"
groups = ["main", "vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "tenacity-9.0.0-py3-none-any.whl", hash = "sha256:93de0c98785b27fcf659856aa9f54bfbd399e29969b0621bc7f762bd441b4539"},
{file = "tenacity-9.0.0.tar.gz", hash = "sha256:807f37ca97d62aa361264d497b0e31e92b8027044942bfa756160d908320d73b"},
@@ -8545,7 +8302,6 @@ description = "A Python client for TiDB Vector"
optional = false
python-versions = "<4.0,>=3.8.1"
groups = ["vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "tidb_vector-0.0.9-py3-none-any.whl", hash = "sha256:db060ee1c981326d3882d0810e0b8b57811f278668f9381168997b360c4296c2"},
{file = "tidb_vector-0.0.9.tar.gz", hash = "sha256:e10680872532808e1bcffa7a92dd2b05bb65d63982f833edb3c6cd590dec7709"},
@@ -8564,7 +8320,6 @@ description = "tiktoken is a fast BPE tokeniser for use with OpenAI's models"
optional = false
python-versions = ">=3.9"
groups = ["main"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "tiktoken-0.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b07e33283463089c81ef1467180e3e00ab00d46c2c4bbcef0acab5f771d6695e"},
{file = "tiktoken-0.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9269348cb650726f44dd3bbb3f9110ac19a8dcc8f54949ad3ef652ca22a38e21"},
@@ -8613,7 +8368,6 @@ description = ""
optional = false
python-versions = ">=3.7"
groups = ["main", "vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "tokenizers-0.15.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:52f6130c9cbf70544287575a985bf44ae1bda2da7e8c24e97716080593638012"},
{file = "tokenizers-0.15.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:054c1cc9c6d68f7ffa4e810b3d5131e0ba511b6e4be34157aa08ee54c2f8d9ee"},
@@ -8742,7 +8496,6 @@ description = "Python Library for Tom's Obvious, Minimal Language"
optional = false
python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*"
groups = ["vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"},
{file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"},
@@ -8755,7 +8508,6 @@ description = "Volc TOS (Tinder Object Storage) SDK"
optional = false
python-versions = "*"
groups = ["storage"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "tos-2.7.2.tar.gz", hash = "sha256:3c31257716785bca7b2cac51474ff32543cda94075a7b7aff70d769c15c7b7ed"},
]
@@ -8774,7 +8526,6 @@ description = "Fast, Extensible Progress Meter"
optional = false
python-versions = ">=3.7"
groups = ["main", "tools", "vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "tqdm-4.67.1-py3-none-any.whl", hash = "sha256:26445eca388f82e72884e0d580d5464cd801a3ea01e63e5601bdff9ba6a48de2"},
{file = "tqdm-4.67.1.tar.gz", hash = "sha256:f8aef9c52c08c13a65f30ea34f4e5aac3fd1a34959879d7e59e63027286627f2"},
@@ -8797,7 +8548,6 @@ description = "State-of-the-art Machine Learning for JAX, PyTorch and TensorFlow
optional = false
python-versions = ">=3.8.0"
groups = ["main"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "transformers-4.35.2-py3-none-any.whl", hash = "sha256:9dfa76f8692379544ead84d98f537be01cd1070de75c74efb13abcbc938fbe2f"},
{file = "transformers-4.35.2.tar.gz", hash = "sha256:2d125e197d77b0cdb6c9201df9fa7e2101493272e448b9fba9341c695bee2f52"},
@@ -8867,7 +8617,6 @@ description = "Typer, build great CLIs. Easy to code. Based on Python type hints
optional = false
python-versions = ">=3.7"
groups = ["vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "typer-0.15.2-py3-none-any.whl", hash = "sha256:46a499c6107d645a9c13f7ee46c5d5096cae6f5fc57dd11eccbbb9ae3e44ddfc"},
{file = "typer-0.15.2.tar.gz", hash = "sha256:ab2fab47533a813c49fe1f16b1a370fd5819099c00b119e0633df65f22144ba5"},
@@ -8886,7 +8635,6 @@ description = "Typing stubs for beautifulsoup4"
optional = false
python-versions = ">=3.9"
groups = ["dev"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "types_beautifulsoup4-4.12.0.20250204-py3-none-any.whl", hash = "sha256:57ce9e75717b63c390fd789c787d267a67eb01fa6d800a03b9bdde2e877ed1eb"},
{file = "types_beautifulsoup4-4.12.0.20250204.tar.gz", hash = "sha256:f083d8edcbd01279f8c3995b56cfff2d01f1bb894c3b502ba118d36fbbc495bf"},
@@ -8902,7 +8650,6 @@ description = "Typing stubs for Deprecated"
optional = false
python-versions = ">=3.9"
groups = ["dev"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "types_deprecated-1.2.15.20250304-py3-none-any.whl", hash = "sha256:86a65aa550ea8acf49f27e226b8953288cd851de887970fbbdf2239c116c3107"},
{file = "types_deprecated-1.2.15.20250304.tar.gz", hash = "sha256:c329030553029de5cc6cb30f269c11f4e00e598c4241290179f63cda7d33f719"},
@@ -8915,7 +8662,6 @@ description = "Typing stubs for Flask-Cors"
optional = false
python-versions = ">=3.8"
groups = ["dev"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "types-Flask-Cors-5.0.0.20240902.tar.gz", hash = "sha256:8921b273bf7cd9636df136b66408efcfa6338a935e5c8f53f5eff1cee03f3394"},
{file = "types_Flask_Cors-5.0.0.20240902-py3-none-any.whl", hash = "sha256:595e5f36056cd128ab905832e055f2e5d116fbdc685356eea4490bc77df82137"},
@@ -8931,7 +8677,6 @@ description = "Typing stubs for Flask-Migrate"
optional = false
python-versions = ">=3.9"
groups = ["dev"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "types_Flask_Migrate-4.1.0.20250112-py3-none-any.whl", hash = "sha256:1814fffc609c2ead784affd011de92f0beecd48044963a8c898dd107dc1b5969"},
{file = "types_flask_migrate-4.1.0.20250112.tar.gz", hash = "sha256:f2d2c966378ae7bb0660ec810e9af0a56ca03108235364c2a7b5e90418b0ff67"},
@@ -8948,7 +8693,6 @@ description = "Typing stubs for html5lib"
optional = false
python-versions = ">=3.8"
groups = ["dev"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "types-html5lib-1.1.11.20241018.tar.gz", hash = "sha256:98042555ff78d9e3a51c77c918b1041acbb7eb6c405408d8a9e150ff5beccafa"},
{file = "types_html5lib-1.1.11.20241018-py3-none-any.whl", hash = "sha256:3f1e064d9ed2c289001ae6392c84c93833abb0816165c6ff0abfc304a779f403"},
@@ -8961,7 +8705,6 @@ description = "Typing stubs for openpyxl"
optional = false
python-versions = ">=3.9"
groups = ["dev"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "types_openpyxl-3.1.5.20250306-py3-none-any.whl", hash = "sha256:f7733dac1dcb07c89ff5ffde8452ee8d272be638defed855f4c48b2990ce5aa7"},
{file = "types_openpyxl-3.1.5.20250306.tar.gz", hash = "sha256:aa7ad2425e8020ff46a31633becfe1f3c64114498d964c536199f654b464e6bc"},
@@ -8974,7 +8717,6 @@ description = "Typing stubs for protobuf"
optional = false
python-versions = ">=3.9"
groups = ["dev"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "types_protobuf-5.29.1.20250208-py3-none-any.whl", hash = "sha256:c5f8bfb4afdc1b5cbca1848f2c8b361a2090add7401f410b22b599ef647bf483"},
{file = "types_protobuf-5.29.1.20250208.tar.gz", hash = "sha256:c1acd6a59ab554dbe09b5d1fa7dd701e2fcfb2212937a3af1c03b736060b792a"},
@@ -8987,7 +8729,6 @@ description = "Typing stubs for psutil"
optional = false
python-versions = ">=3.8"
groups = ["dev"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "types_psutil-6.1.0.20241221-py3-none-any.whl", hash = "sha256:8498dbe13285a9ba7d4b2fa934c569cc380efc74e3dacdb34ae16d2cdf389ec3"},
{file = "types_psutil-6.1.0.20241221.tar.gz", hash = "sha256:600f5a36bd5e0eb8887f0e3f3ff2cf154d90690ad8123c8a707bba4ab94d3185"},
@@ -9000,7 +8741,6 @@ description = "Typing stubs for psycopg2"
optional = false
python-versions = ">=3.9"
groups = ["dev"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "types_psycopg2-2.9.21.20250121-py3-none-any.whl", hash = "sha256:b890dc6f5a08b6433f0ff73a4ec9a834deedad3e914f2a4a6fd43df021f745f1"},
{file = "types_psycopg2-2.9.21.20250121.tar.gz", hash = "sha256:2b0e2cd0f3747af1ae25a7027898716d80209604770ef3cbf350fe055b9c349b"},
@@ -9013,7 +8753,6 @@ description = "Typing stubs for python-dateutil"
optional = false
python-versions = ">=3.8"
groups = ["dev"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "types_python_dateutil-2.9.0.20241206-py3-none-any.whl", hash = "sha256:e248a4bc70a486d3e3ec84d0dc30eec3a5f979d6e7ee4123ae043eedbb987f53"},
{file = "types_python_dateutil-2.9.0.20241206.tar.gz", hash = "sha256:18f493414c26ffba692a72369fea7a154c502646301ebfe3d56a04b3767284cb"},
@@ -9026,7 +8765,6 @@ description = "Typing stubs for pytz"
optional = false
python-versions = ">=3.8"
groups = ["main", "dev"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "types_pytz-2024.2.0.20241221-py3-none-any.whl", hash = "sha256:8fc03195329c43637ed4f593663df721fef919b60a969066e22606edf0b53ad5"},
{file = "types_pytz-2024.2.0.20241221.tar.gz", hash = "sha256:06d7cde9613e9f7504766a0554a270c369434b50e00975b3a4a0f6eed0f2c1a9"},
@@ -9039,7 +8777,6 @@ description = "Typing stubs for PyYAML"
optional = false
python-versions = ">=3.8"
groups = ["dev"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "types_PyYAML-6.0.12.20241230-py3-none-any.whl", hash = "sha256:fa4d32565219b68e6dee5f67534c722e53c00d1cfc09c435ef04d7353e1e96e6"},
{file = "types_pyyaml-6.0.12.20241230.tar.gz", hash = "sha256:7f07622dbd34bb9c8b264fe860a17e0efcad00d50b5f27e93984909d9363498c"},
@@ -9052,7 +8789,6 @@ description = "Typing stubs for regex"
optional = false
python-versions = ">=3.9"
groups = ["dev"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "types_regex-2024.11.6.20250305-py3-none-any.whl", hash = "sha256:bd6b97d2169563c190b9b8c56e0e03caad1e24e0bea2f1c1fdfe5d354772aa42"},
{file = "types_regex-2024.11.6.20250305.tar.gz", hash = "sha256:28b886bf4eb23400030aa681a2d76b3ee869440c3d458b7017ec168000c1b62f"},
@@ -9065,7 +8801,6 @@ description = "Typing stubs for requests"
optional = false
python-versions = ">=3.9"
groups = ["dev"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "types_requests-2.32.0.20250306-py3-none-any.whl", hash = "sha256:25f2cbb5c8710b2022f8bbee7b2b66f319ef14aeea2f35d80f18c9dbf3b60a0b"},
{file = "types_requests-2.32.0.20250306.tar.gz", hash = "sha256:0962352694ec5b2f95fda877ee60a159abdf84a0fc6fdace599f20acb41a03d1"},
@@ -9081,7 +8816,6 @@ description = "Typing stubs for six"
optional = false
python-versions = ">=3.9"
groups = ["dev"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "types_six-1.17.0.20250304-py3-none-any.whl", hash = "sha256:e482df1d439375f4b7c1f2540b1b8584aea82850164a296203ead4a7024fe14f"},
{file = "types_six-1.17.0.20250304.tar.gz", hash = "sha256:eeb240f9faec63ddd0498d6c0b6abd0496b154a66f960c004d4d733cf31bb4bd"},
@@ -9094,7 +8828,6 @@ description = "Typing stubs for tqdm"
optional = false
python-versions = ">=3.9"
groups = ["dev"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "types_tqdm-4.67.0.20250301-py3-none-any.whl", hash = "sha256:8af97deb8e6874af833555dc1fe0fcd456b1a789470bf6cd8813d4e7ee4f6c5b"},
{file = "types_tqdm-4.67.0.20250301.tar.gz", hash = "sha256:5e89a38ad89b867823368eb97d9f90d2fc69806bb055dde62716a05da62b5e0d"},
@@ -9110,7 +8843,6 @@ description = "Backported and Experimental Type Hints for Python 3.8+"
optional = false
python-versions = ">=3.8"
groups = ["main", "dev", "lint", "storage", "vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"},
{file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"},
@@ -9123,7 +8855,6 @@ description = "Runtime inspection utilities for typing module."
optional = false
python-versions = "*"
groups = ["main"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "typing_inspect-0.9.0-py3-none-any.whl", hash = "sha256:9ee6fc59062311ef8547596ab6b955e1b8aa46242d854bfc78f4f6b0eff35f9f"},
{file = "typing_inspect-0.9.0.tar.gz", hash = "sha256:b23fc42ff6f6ef6954e4852c1fb512cdd18dbea03134f91f856a95ccc9461f78"},
@@ -9140,7 +8871,6 @@ description = "Provider of IANA time zone data"
optional = false
python-versions = ">=2"
groups = ["main", "vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "tzdata-2025.1-py2.py3-none-any.whl", hash = "sha256:7e127113816800496f027041c570f50bcd464a020098a3b6b199517772303639"},
{file = "tzdata-2025.1.tar.gz", hash = "sha256:24894909e88cdb28bd1636c6887801df64cb485bd593f2fd83ef29075a81d694"},
@@ -9153,7 +8883,6 @@ description = "Ultra fast JSON encoder and decoder for Python"
optional = false
python-versions = ">=3.8"
groups = ["vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "ujson-5.10.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2601aa9ecdbee1118a1c2065323bda35e2c5a2cf0797ef4522d485f9d3ef65bd"},
{file = "ujson-5.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:348898dd702fc1c4f1051bc3aacbf894caa0927fe2c53e68679c073375f732cf"},
@@ -9242,7 +8971,6 @@ description = "A library that prepares raw documents for downstream ML tasks."
optional = false
python-versions = ">=3.9.0"
groups = ["main"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "unstructured-0.16.25-py3-none-any.whl", hash = "sha256:14719ccef2830216cf1c5bf654f75e2bf07b17ca5dcee9da5ac74618130fd337"},
{file = "unstructured-0.16.25.tar.gz", hash = "sha256:73b9b0f51dbb687af572ecdb849a6811710b9cac797ddeab8ee80fa07d8aa5e6"},
@@ -9303,7 +9031,6 @@ description = "Python Client SDK for Unstructured API"
optional = false
python-versions = "<4.0,>=3.8"
groups = ["main"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "unstructured_client-0.28.1-py3-none-any.whl", hash = "sha256:0112688908f544681a67abf314e0d2023dfa120c8e5d9fa6d31390b914a06d72"},
{file = "unstructured_client-0.28.1.tar.gz", hash = "sha256:aac11fe5dd6b8dfdbc15aad3205fe791a3834dac29bb9f499fd515643554f709"},
@@ -9329,7 +9056,6 @@ description = "Serverless Vector SDK from Upstash"
optional = false
python-versions = "<4.0,>=3.8"
groups = ["vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "upstash_vector-0.6.0-py3-none-any.whl", hash = "sha256:d0bdad7765b8a7f5c205b7a9c81ca4b9a4cee3ee4952afc7d5ea5fb76c3f3c3c"},
{file = "upstash_vector-0.6.0.tar.gz", hash = "sha256:a716ed4d0251362208518db8b194158a616d37d1ccbb1155f619df690599e39b"},
@@ -9345,7 +9071,6 @@ description = "Implementation of RFC 6570 URI Templates"
optional = false
python-versions = ">=3.6"
groups = ["main"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "uritemplate-4.1.1-py2.py3-none-any.whl", hash = "sha256:830c08b8d99bdd312ea4ead05994a38e8936266f84b9a7878232db50b044e02e"},
{file = "uritemplate-4.1.1.tar.gz", hash = "sha256:4346edfc5c3b79f694bccd6d6099a322bbeb628dbf2cd86eea55a456ce5124f0"},
@@ -9358,14 +9083,13 @@ description = "HTTP library with thread-safe connection pooling, file post, and
optional = false
python-versions = ">=3.9"
groups = ["main", "dev", "storage", "tools", "vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "urllib3-2.3.0-py3-none-any.whl", hash = "sha256:1cee9ad369867bfdbbb48b7dd50374c0967a0bb7710050facf0dd6911440e3df"},
{file = "urllib3-2.3.0.tar.gz", hash = "sha256:f8c5449b3cf0861679ce7e0503c7b44b5ec981bec0d1d3795a07f1ba96f0204d"},
]
[package.extras]
-brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"]
+brotli = ["brotli (>=1.0.9) ; platform_python_implementation == \"CPython\"", "brotlicffi (>=0.8.0) ; platform_python_implementation != \"CPython\""]
h2 = ["h2 (>=4,<5)"]
socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"]
zstd = ["zstandard (>=0.18.0)"]
@@ -9377,7 +9101,6 @@ description = "New time-based UUID formats which are suited for use as a databas
optional = false
python-versions = ">=3.8"
groups = ["main"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "uuid6-2024.7.10-py3-none-any.whl", hash = "sha256:93432c00ba403751f722829ad21759ff9db051dea140bf81493271e8e4dd18b7"},
{file = "uuid6-2024.7.10.tar.gz", hash = "sha256:2d29d7f63f593caaeea0e0d0dd0ad8129c9c663b29e19bdf882e864bedf18fb0"},
@@ -9390,7 +9113,6 @@ description = "The lightning-fast ASGI server."
optional = false
python-versions = ">=3.9"
groups = ["vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "uvicorn-0.34.0-py3-none-any.whl", hash = "sha256:023dc038422502fa28a09c7a30bf2b6991512da7dcdb8fd35fe57cfc154126f4"},
{file = "uvicorn-0.34.0.tar.gz", hash = "sha256:404051050cd7e905de2c9a7e61790943440b3416f49cb409f965d9dcd0fa73e9"},
@@ -9403,12 +9125,12 @@ h11 = ">=0.8"
httptools = {version = ">=0.6.3", optional = true, markers = "extra == \"standard\""}
python-dotenv = {version = ">=0.13", optional = true, markers = "extra == \"standard\""}
pyyaml = {version = ">=5.1", optional = true, markers = "extra == \"standard\""}
-uvloop = {version = ">=0.14.0,<0.15.0 || >0.15.0,<0.15.1 || >0.15.1", optional = true, markers = "(sys_platform != \"win32\" and sys_platform != \"cygwin\") and platform_python_implementation != \"PyPy\" and extra == \"standard\""}
+uvloop = {version = ">=0.14.0,<0.15.0 || >0.15.0,<0.15.1 || >0.15.1", optional = true, markers = "sys_platform != \"win32\" and sys_platform != \"cygwin\" and platform_python_implementation != \"PyPy\" and extra == \"standard\""}
watchfiles = {version = ">=0.13", optional = true, markers = "extra == \"standard\""}
websockets = {version = ">=10.4", optional = true, markers = "extra == \"standard\""}
[package.extras]
-standard = ["colorama (>=0.4)", "httptools (>=0.6.3)", "python-dotenv (>=0.13)", "pyyaml (>=5.1)", "uvloop (>=0.14.0,!=0.15.0,!=0.15.1)", "watchfiles (>=0.13)", "websockets (>=10.4)"]
+standard = ["colorama (>=0.4) ; sys_platform == \"win32\"", "httptools (>=0.6.3)", "python-dotenv (>=0.13)", "pyyaml (>=5.1)", "uvloop (>=0.14.0,!=0.15.0,!=0.15.1) ; sys_platform != \"win32\" and sys_platform != \"cygwin\" and platform_python_implementation != \"PyPy\"", "watchfiles (>=0.13)", "websockets (>=10.4)"]
[[package]]
name = "uvloop"
@@ -9417,7 +9139,7 @@ description = "Fast implementation of asyncio event loop on top of libuv"
optional = false
python-versions = ">=3.8.0"
groups = ["vdb"]
-markers = "(python_version == \"3.11\" or python_version >= \"3.12\") and platform_python_implementation != \"PyPy\" and (sys_platform != \"win32\" and sys_platform != \"cygwin\")"
+markers = "sys_platform != \"win32\" and sys_platform != \"cygwin\" and platform_python_implementation != \"PyPy\""
files = [
{file = "uvloop-0.21.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ec7e6b09a6fdded42403182ab6b832b71f4edaf7f37a9a0e371a01db5f0cb45f"},
{file = "uvloop-0.21.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:196274f2adb9689a289ad7d65700d37df0c0930fd8e4e743fa4834e850d7719d"},
@@ -9470,7 +9192,6 @@ description = "Python Data Validation for Humans™"
optional = false
python-versions = ">=3.8,<4.0"
groups = ["main", "vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "validators-0.21.0-py3-none-any.whl", hash = "sha256:3470db6f2384c49727ee319afa2e97aec3f8fad736faa6067e0fd7f9eaf2c551"},
{file = "validators-0.21.0.tar.gz", hash = "sha256:245b98ab778ed9352a7269c6a8f6c2a839bed5b2a7e3e60273ce399d247dd4b3"},
@@ -9483,7 +9204,6 @@ description = "Python promises."
optional = false
python-versions = ">=3.6"
groups = ["main"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "vine-5.1.0-py3-none-any.whl", hash = "sha256:40fdf3c48b2cfe1c38a49e9ae2da6fda88e4794c810050a728bd7413811fb1dc"},
{file = "vine-5.1.0.tar.gz", hash = "sha256:8b62e981d35c41049211cf62a0a1242d8c1ee9bd15bb196ce38aefd6799e61e0"},
@@ -9496,7 +9216,6 @@ description = "Be Compatible with the Volcengine SDK for Python, The version of
optional = false
python-versions = "*"
groups = ["vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "volcengine_compat-1.0.156-py3-none-any.whl", hash = "sha256:4abc149a7601ebad8fa2d28fab50c7945145cf74daecb71bca797b0bdc82c5a5"},
{file = "volcengine_compat-1.0.156.tar.gz", hash = "sha256:e357d096828e31a202dc6047bbc5bf6fff3f54a98cd35a99ab5f965ea741a267"},
@@ -9518,7 +9237,6 @@ description = "Simple, modern and high performance file watching and code reload
optional = false
python-versions = ">=3.9"
groups = ["vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "watchfiles-1.0.4-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:ba5bb3073d9db37c64520681dd2650f8bd40902d991e7b4cfaeece3e32561d08"},
{file = "watchfiles-1.0.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9f25d0ba0fe2b6d2c921cf587b2bf4c451860086534f40c384329fb96e2044d1"},
@@ -9603,7 +9321,6 @@ description = "Measures the displayed width of unicode strings in a terminal"
optional = false
python-versions = "*"
groups = ["main"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859"},
{file = "wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5"},
@@ -9616,7 +9333,6 @@ description = "A python native Weaviate client"
optional = false
python-versions = ">=3.8"
groups = ["vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "weaviate-client-3.21.0.tar.gz", hash = "sha256:ec94ac554883c765e94da8b2947c4f0fa4a0378ed3bbe9f3653df3a5b1745a6d"},
{file = "weaviate_client-3.21.0-py3-none-any.whl", hash = "sha256:420444ded7106fb000f4f8b2321b5f5fa2387825aa7a303d702accf61026f9d2"},
@@ -9638,7 +9354,6 @@ description = "Character encoding aliases for legacy web content"
optional = false
python-versions = "*"
groups = ["main"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "webencodings-0.5.1-py2.py3-none-any.whl", hash = "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78"},
{file = "webencodings-0.5.1.tar.gz", hash = "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923"},
@@ -9651,7 +9366,6 @@ description = "WebSocket client for Python with low level API options"
optional = false
python-versions = ">=3.8"
groups = ["vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "websocket_client-1.8.0-py3-none-any.whl", hash = "sha256:17b44cc997f5c498e809b22cdf2d9c7a9e71c02c8cc2b6c56e7c2d1239bfa526"},
{file = "websocket_client-1.8.0.tar.gz", hash = "sha256:3239df9f44da632f96012472805d40a23281a991027ce11d2f45a6f24ac4c3da"},
@@ -9669,7 +9383,6 @@ description = "An implementation of the WebSocket Protocol (RFC 6455 & 7692)"
optional = false
python-versions = ">=3.9"
groups = ["storage", "vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "websockets-14.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e8179f95323b9ab1c11723e5d91a89403903f7b001828161b480a7810b334885"},
{file = "websockets-14.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0d8c3e2cdb38f31d8bd7d9d28908005f6fa9def3324edb9bf336d7e4266fd397"},
@@ -9749,7 +9462,6 @@ description = "The comprehensive WSGI web application library."
optional = false
python-versions = ">=3.9"
groups = ["main", "dev"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "werkzeug-3.1.3-py3-none-any.whl", hash = "sha256:54b78bf3716d19a65be4fceccc0d1d7b89e608834989dfae50ea87564639213e"},
{file = "werkzeug-3.1.3.tar.gz", hash = "sha256:60723ce945c19328679790e3282cc758aa4a6040e4bb330f53d30fa546d44746"},
@@ -9768,7 +9480,6 @@ description = "Module for decorators, wrappers and monkey patching."
optional = false
python-versions = ">=3.8"
groups = ["main", "storage", "vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "wrapt-1.17.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3d57c572081fed831ad2d26fd430d565b76aa277ed1d30ff4d40670b1c0dd984"},
{file = "wrapt-1.17.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b5e251054542ae57ac7f3fba5d10bfff615b6c2fb09abeb37d2f1463f841ae22"},
@@ -9858,7 +9569,6 @@ description = "Client for Xinference"
optional = false
python-versions = "*"
groups = ["vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "xinference-client-1.2.2.tar.gz", hash = "sha256:85d2ba0fcbaae616b06719c422364123cbac97f3e3c82e614095fe6d0e630ed0"},
{file = "xinference_client-1.2.2-py3-none-any.whl", hash = "sha256:6941d87cf61283a9d6e81cee6cb2609a183d34c6b7d808c6ba0c33437520518f"},
@@ -9879,7 +9589,6 @@ description = "Library for developers to extract data from Microsoft Excel (tm)
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*"
groups = ["main"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "xlrd-2.0.1-py2.py3-none-any.whl", hash = "sha256:6a33ee89877bd9abc1158129f6e94be74e2679636b8a205b43b85206c3f0bbdd"},
{file = "xlrd-2.0.1.tar.gz", hash = "sha256:f72f148f54442c6b056bf931dbc34f986fd0c3b0b6b5a58d013c9aef274d0c88"},
@@ -9897,7 +9606,6 @@ description = "A Python module for creating Excel XLSX files."
optional = false
python-versions = ">=3.6"
groups = ["main"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "XlsxWriter-3.2.2-py3-none-any.whl", hash = "sha256:272ce861e7fa5e82a4a6ebc24511f2cb952fde3461f6c6e1a1e81d3272db1471"},
{file = "xlsxwriter-3.2.2.tar.gz", hash = "sha256:befc7f92578a85fed261639fb6cde1fd51b79c5e854040847dde59d4317077dc"},
@@ -9910,7 +9618,6 @@ description = "Makes working with XML feel like you are working with JSON"
optional = false
python-versions = ">=3.6"
groups = ["storage", "vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "xmltodict-0.14.2-py2.py3-none-any.whl", hash = "sha256:20cc7d723ed729276e808f26fb6b3599f786cbc37e06c65e192ba77c40f20aac"},
{file = "xmltodict-0.14.2.tar.gz", hash = "sha256:201e7c28bb210e374999d1dde6382923ab0ed1a8a5faeece48ab525b7810a553"},
@@ -9923,7 +9630,6 @@ description = "Yet another URL library"
optional = false
python-versions = ">=3.9"
groups = ["main", "storage", "vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "yarl-1.18.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7df647e8edd71f000a5208fe6ff8c382a1de8edfbccdbbfe649d263de07d8c34"},
{file = "yarl-1.18.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c69697d3adff5aa4f874b19c0e4ed65180ceed6318ec856ebc423aa5850d84f7"},
@@ -10021,18 +9727,17 @@ description = "Backport of pathlib-compatible object wrapper for zip files"
optional = false
python-versions = ">=3.9"
groups = ["main", "vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "zipp-3.21.0-py3-none-any.whl", hash = "sha256:ac1bbe05fd2991f160ebce24ffbac5f6d11d83dc90891255885223d42b3cd931"},
{file = "zipp-3.21.0.tar.gz", hash = "sha256:2c9958f6430a2040341a52eb608ed6dd93ef4392e02ffe219417c1b28b5dd1f4"},
]
[package.extras]
-check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"]
+check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\""]
cover = ["pytest-cov"]
doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"]
enabler = ["pytest-enabler (>=2.2)"]
-test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"]
+test = ["big-O", "importlib-resources ; python_version < \"3.9\"", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"]
type = ["pytest-mypy"]
[[package]]
@@ -10042,7 +9747,6 @@ description = "Very basic event publishing system"
optional = false
python-versions = ">=3.7"
groups = ["main"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "zope.event-5.0-py3-none-any.whl", hash = "sha256:2832e95014f4db26c47a13fdaef84cef2f4df37e66b59d8f1f4a8f319a632c26"},
{file = "zope.event-5.0.tar.gz", hash = "sha256:bac440d8d9891b4068e2b5a2c5e2c9765a9df762944bda6955f96bb9b91e67cd"},
@@ -10062,7 +9766,6 @@ description = "Interfaces for Python"
optional = false
python-versions = ">=3.8"
groups = ["main"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "zope.interface-7.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ce290e62229964715f1011c3dbeab7a4a1e4971fd6f31324c4519464473ef9f2"},
{file = "zope.interface-7.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:05b910a5afe03256b58ab2ba6288960a2892dfeef01336dc4be6f1b9ed02ab0a"},
@@ -10118,7 +9821,6 @@ description = "Zstandard bindings for Python"
optional = false
python-versions = ">=3.8"
groups = ["main", "vdb"]
-markers = "python_version == \"3.11\" or python_version >= \"3.12\""
files = [
{file = "zstandard-0.23.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bf0a05b6059c0528477fba9054d09179beb63744355cab9f38059548fedd46a9"},
{file = "zstandard-0.23.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fc9ca1c9718cb3b06634c7c8dec57d24e9438b2aa9a0f02b8bb36bf478538880"},
@@ -10220,7 +9922,7 @@ files = [
]
[package.dependencies]
-cffi = {version = ">=1.11", markers = "platform_python_implementation == \"PyPy\""}
+cffi = {version = ">=1.11", optional = true, markers = "platform_python_implementation == \"PyPy\" or extra == \"cffi\""}
[package.extras]
cffi = ["cffi (>=1.11)"]
@@ -10228,4 +9930,4 @@ cffi = ["cffi (>=1.11)"]
[metadata]
lock-version = "2.1"
python-versions = ">=3.11,<3.13"
-content-hash = "5ed3febffb932561050bba56b04bd03ffec7d54a7d26b95ad42c55ef45e07fae"
+content-hash = "adc577504435813e7e78b7433b9efb3dc6551f4eec2a65bc9aed762a8ef6540c"
diff --git a/api/pyproject.toml b/api/pyproject.toml
index eb7313449a..0e91e533b1 100644
--- a/api/pyproject.toml
+++ b/api/pyproject.toml
@@ -175,4 +175,4 @@ types-tqdm = "~4.67.0.20241221"
optional = true
[tool.poetry.group.lint.dependencies]
dotenv-linter = "~0.5.0"
-ruff = "~0.9.9"
+ruff = "~0.11.0"
diff --git a/api/services/dataset_service.py b/api/services/dataset_service.py
index 7cb5f3f4af..dde434c3ad 100644
--- a/api/services/dataset_service.py
+++ b/api/services/dataset_service.py
@@ -949,7 +949,7 @@ class DocumentService:
).first()
if document:
document.dataset_process_rule_id = dataset_process_rule.id # type: ignore
- document.updated_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
+ document.updated_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
document.created_from = created_from
document.doc_form = knowledge_config.doc_form
document.doc_language = knowledge_config.doc_language
@@ -1245,7 +1245,7 @@ class DocumentService:
document.name = document_data.name
# update doc_type and doc_metadata if provided
if document_data.metadata is not None:
- document.doc_metadata = document_data.metadata.doc_type
+ document.doc_metadata = document_data.metadata.doc_metadata
document.doc_type = document_data.metadata.doc_type
# update document to be waiting
document.indexing_status = "waiting"
@@ -1916,7 +1916,7 @@ class SegmentService:
if cache_result is not None:
continue
segment.enabled = False
- segment.disabled_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
+ segment.disabled_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
segment.disabled_by = current_user.id
db.session.add(segment)
real_deal_segmment_ids.append(segment.id)
@@ -2008,7 +2008,7 @@ class SegmentService:
child_chunk.content = child_chunk_update_args.content
child_chunk.word_count = len(child_chunk.content)
child_chunk.updated_by = current_user.id
- child_chunk.updated_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
+ child_chunk.updated_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
child_chunk.type = "customized"
update_child_chunks.append(child_chunk)
else:
@@ -2065,7 +2065,7 @@ class SegmentService:
child_chunk.content = content
child_chunk.word_count = len(content)
child_chunk.updated_by = current_user.id
- child_chunk.updated_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
+ child_chunk.updated_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
child_chunk.type = "customized"
db.session.add(child_chunk)
VectorService.update_child_chunk_vector([], [child_chunk], [], dataset)
diff --git a/api/services/entities/knowledge_entities/knowledge_entities.py b/api/services/entities/knowledge_entities/knowledge_entities.py
index f14c5b513a..b84339fd5f 100644
--- a/api/services/entities/knowledge_entities/knowledge_entities.py
+++ b/api/services/entities/knowledge_entities/knowledge_entities.py
@@ -88,6 +88,7 @@ class RetrievalModel(BaseModel):
search_method: Literal["hybrid_search", "semantic_search", "full_text_search"]
reranking_enable: bool
reranking_model: Optional[RerankingModel] = None
+ reranking_mode: Optional[str] = None
top_k: int
score_threshold_enabled: bool
score_threshold: Optional[float] = None
diff --git a/api/services/tag_service.py b/api/services/tag_service.py
index 9600601633..1fbaee96e8 100644
--- a/api/services/tag_service.py
+++ b/api/services/tag_service.py
@@ -20,7 +20,7 @@ class TagService:
)
if keyword:
query = query.filter(db.and_(Tag.name.ilike(f"%{keyword}%")))
- query = query.group_by(Tag.id)
+ query = query.group_by(Tag.id, Tag.type, Tag.name, Tag.created_at)
results: list = query.order_by(Tag.created_at.desc()).all()
return results
diff --git a/api/tasks/add_document_to_index_task.py b/api/tasks/add_document_to_index_task.py
index bd7fcdadea..c5a5ddaadc 100644
--- a/api/tasks/add_document_to_index_task.py
+++ b/api/tasks/add_document_to_index_task.py
@@ -21,7 +21,7 @@ def add_document_to_index_task(dataset_document_id: str):
Async Add document to index
:param dataset_document_id:
- Usage: add_document_to_index.delay(dataset_document_id)
+ Usage: add_document_to_index_task.delay(dataset_document_id)
"""
logging.info(click.style("Start add document to index: {}".format(dataset_document_id), fg="green"))
start_at = time.perf_counter()
diff --git a/api/tasks/batch_clean_document_task.py b/api/tasks/batch_clean_document_task.py
index 3bae82a5e3..8376ab1b03 100644
--- a/api/tasks/batch_clean_document_task.py
+++ b/api/tasks/batch_clean_document_task.py
@@ -21,7 +21,7 @@ def batch_clean_document_task(document_ids: list[str], dataset_id: str, doc_form
:param doc_form: doc_form
:param file_ids: file ids
- Usage: clean_document_task.delay(document_id, dataset_id)
+ Usage: batch_clean_document_task.delay(document_ids, dataset_id)
"""
logging.info(click.style("Start batch clean documents when documents deleted", fg="green"))
start_at = time.perf_counter()
diff --git a/api/tasks/batch_create_segment_to_index_task.py b/api/tasks/batch_create_segment_to_index_task.py
index 40356b9731..648f92b0f8 100644
--- a/api/tasks/batch_create_segment_to_index_task.py
+++ b/api/tasks/batch_create_segment_to_index_task.py
@@ -35,7 +35,7 @@ def batch_create_segment_to_index_task(
:param tenant_id:
:param user_id:
- Usage: batch_create_segment_to_index_task.delay(segment_id)
+ Usage: batch_create_segment_to_index_task.delay(job_id, content, dataset_id, document_id, tenant_id, user_id)
"""
logging.info(click.style("Start batch create segment jobId: {}".format(job_id), fg="green"))
start_at = time.perf_counter()
diff --git a/api/tasks/delete_segment_from_index_task.py b/api/tasks/delete_segment_from_index_task.py
index 3b04143dd9..e4fbd5465e 100644
--- a/api/tasks/delete_segment_from_index_task.py
+++ b/api/tasks/delete_segment_from_index_task.py
@@ -17,7 +17,7 @@ def delete_segment_from_index_task(index_node_ids: list, dataset_id: str, docume
:param dataset_id:
:param document_id:
- Usage: delete_segment_from_index_task.delay(segment_ids)
+ Usage: delete_segment_from_index_task.delay(index_node_ids, dataset_id, document_id)
"""
logging.info(click.style("Start delete segment from index", fg="green"))
start_at = time.perf_counter()
diff --git a/api/tasks/disable_segments_from_index_task.py b/api/tasks/disable_segments_from_index_task.py
index 67112666e7..d43fb90ed3 100644
--- a/api/tasks/disable_segments_from_index_task.py
+++ b/api/tasks/disable_segments_from_index_task.py
@@ -15,7 +15,9 @@ from models.dataset import Document as DatasetDocument
def disable_segments_from_index_task(segment_ids: list, dataset_id: str, document_id: str):
"""
Async disable segments from index
- :param segment_ids:
+ :param segment_ids: list of segment ids
+ :param dataset_id: dataset id
+ :param document_id: document id
Usage: disable_segments_from_index_task.delay(segment_ids, dataset_id, document_id)
"""
diff --git a/api/tasks/document_indexing_task.py b/api/tasks/document_indexing_task.py
index 21b571b6cb..a8e3a69f19 100644
--- a/api/tasks/document_indexing_task.py
+++ b/api/tasks/document_indexing_task.py
@@ -19,7 +19,7 @@ def document_indexing_task(dataset_id: str, document_ids: list):
:param dataset_id:
:param document_ids:
- Usage: document_indexing_task.delay(dataset_id, document_id)
+ Usage: document_indexing_task.delay(dataset_id, document_ids)
"""
documents = []
start_at = time.perf_counter()
diff --git a/api/tasks/duplicate_document_indexing_task.py b/api/tasks/duplicate_document_indexing_task.py
index 8e1d2b6b5d..b0cd486476 100644
--- a/api/tasks/duplicate_document_indexing_task.py
+++ b/api/tasks/duplicate_document_indexing_task.py
@@ -20,7 +20,7 @@ def duplicate_document_indexing_task(dataset_id: str, document_ids: list):
:param dataset_id:
:param document_ids:
- Usage: duplicate_document_indexing_task.delay(dataset_id, document_id)
+ Usage: duplicate_document_indexing_task.delay(dataset_id, document_ids)
"""
documents = []
start_at = time.perf_counter()
@@ -51,7 +51,7 @@ def duplicate_document_indexing_task(dataset_id: str, document_ids: list):
if document:
document.indexing_status = "error"
document.error = str(e)
- document.stopped_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
+ document.stopped_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
db.session.add(document)
db.session.commit()
return
@@ -80,7 +80,7 @@ def duplicate_document_indexing_task(dataset_id: str, document_ids: list):
db.session.commit()
document.indexing_status = "parsing"
- document.processing_started_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
+ document.processing_started_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
documents.append(document)
db.session.add(document)
db.session.commit()
diff --git a/api/tasks/enable_segments_to_index_task.py b/api/tasks/enable_segments_to_index_task.py
index 0864e05e25..3942268afe 100644
--- a/api/tasks/enable_segments_to_index_task.py
+++ b/api/tasks/enable_segments_to_index_task.py
@@ -18,9 +18,11 @@ from models.dataset import Document as DatasetDocument
def enable_segments_to_index_task(segment_ids: list, dataset_id: str, document_id: str):
"""
Async enable segments to index
- :param segment_ids:
+ :param segment_ids: list of segment ids
+ :param dataset_id: dataset id
+ :param document_id: document id
- Usage: enable_segments_to_index_task.delay(segment_ids)
+ Usage: enable_segments_to_index_task.delay(segment_ids, dataset_id, document_id)
"""
start_at = time.perf_counter()
dataset = db.session.query(Dataset).filter(Dataset.id == dataset_id).first()
@@ -97,7 +99,7 @@ def enable_segments_to_index_task(segment_ids: list, dataset_id: str, document_i
{
"error": str(e),
"status": "error",
- "disabled_at": datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None),
+ "disabled_at": datetime.datetime.now(datetime.UTC).replace(tzinfo=None),
"enabled": False,
}
)
diff --git a/api/tasks/external_document_indexing_task.py b/api/tasks/external_document_indexing_task.py
deleted file mode 100644
index a45b3030bf..0000000000
--- a/api/tasks/external_document_indexing_task.py
+++ /dev/null
@@ -1,91 +0,0 @@
-import json
-import logging
-import time
-
-import click
-from celery import shared_task # type: ignore
-
-from core.indexing_runner import DocumentIsPausedError
-from extensions.ext_database import db
-from extensions.ext_storage import storage
-from models.dataset import Dataset, ExternalKnowledgeApis
-from models.model import UploadFile
-from services.external_knowledge_service import ExternalDatasetService
-
-
-@shared_task(queue="dataset")
-def external_document_indexing_task(
- dataset_id: str, external_knowledge_api_id: str, data_source: dict, process_parameter: dict
-):
- """
- Async process document
- :param dataset_id:
- :param external_knowledge_api_id:
- :param data_source:
- :param process_parameter:
- Usage: external_document_indexing_task.delay(dataset_id, document_id)
- """
- start_at = time.perf_counter()
-
- dataset = db.session.query(Dataset).filter(Dataset.id == dataset_id).first()
- if not dataset:
- logging.info(
- click.style("Processed external dataset: {} failed, dataset not exit.".format(dataset_id), fg="red")
- )
- return
-
- # get external api template
- external_knowledge_api = (
- db.session.query(ExternalKnowledgeApis)
- .filter(
- ExternalKnowledgeApis.id == external_knowledge_api_id, ExternalKnowledgeApis.tenant_id == dataset.tenant_id
- )
- .first()
- )
-
- if not external_knowledge_api:
- logging.info(
- click.style(
- "Processed external dataset: {} failed, api template: {} not exit.".format(
- dataset_id, external_knowledge_api_id
- ),
- fg="red",
- )
- )
- return
- files = {}
- if data_source["type"] == "upload_file":
- upload_file_list = data_source["info_list"]["file_info_list"]["file_ids"]
- for file_id in upload_file_list:
- file = (
- db.session.query(UploadFile)
- .filter(UploadFile.tenant_id == dataset.tenant_id, UploadFile.id == file_id)
- .first()
- )
- if file:
- files[file.id] = (file.name, storage.load_once(file.key), file.mime_type)
- try:
- settings = ExternalDatasetService.get_external_knowledge_api_settings(
- json.loads(external_knowledge_api.settings)
- )
-
- # do http request
- response = ExternalDatasetService.process_external_api(settings, files)
- job_id = response.json().get("job_id")
- if job_id:
- # save job_id to dataset
- dataset.job_id = job_id
- db.session.commit()
-
- end_at = time.perf_counter()
- logging.info(
- click.style(
- "Processed external dataset: {} successful, latency: {}".format(dataset.id, end_at - start_at),
- fg="green",
- )
- )
- except DocumentIsPausedError as ex:
- logging.info(click.style(str(ex), fg="yellow"))
-
- except Exception:
- pass
diff --git a/api/tasks/retry_document_indexing_task.py b/api/tasks/retry_document_indexing_task.py
index 74fd542f6c..83ddbcfcc5 100644
--- a/api/tasks/retry_document_indexing_task.py
+++ b/api/tasks/retry_document_indexing_task.py
@@ -20,7 +20,7 @@ def retry_document_indexing_task(dataset_id: str, document_ids: list[str]):
:param dataset_id:
:param document_ids:
- Usage: retry_document_indexing_task.delay(dataset_id, document_id)
+ Usage: retry_document_indexing_task.delay(dataset_id, document_ids)
"""
documents: list[Document] = []
start_at = time.perf_counter()
@@ -48,7 +48,7 @@ def retry_document_indexing_task(dataset_id: str, document_ids: list[str]):
if document:
document.indexing_status = "error"
document.error = str(e)
- document.stopped_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
+ document.stopped_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
db.session.add(document)
db.session.commit()
redis_client.delete(retry_indexing_cache_key)
@@ -76,7 +76,7 @@ def retry_document_indexing_task(dataset_id: str, document_ids: list[str]):
db.session.commit()
document.indexing_status = "parsing"
- document.processing_started_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
+ document.processing_started_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
db.session.add(document)
db.session.commit()
@@ -86,7 +86,7 @@ def retry_document_indexing_task(dataset_id: str, document_ids: list[str]):
except Exception as ex:
document.indexing_status = "error"
document.error = str(ex)
- document.stopped_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
+ document.stopped_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
db.session.add(document)
db.session.commit()
logging.info(click.style(str(ex), fg="yellow"))
diff --git a/api/tasks/sync_website_document_indexing_task.py b/api/tasks/sync_website_document_indexing_task.py
index 8da050d0d1..e75252edbe 100644
--- a/api/tasks/sync_website_document_indexing_task.py
+++ b/api/tasks/sync_website_document_indexing_task.py
@@ -46,7 +46,7 @@ def sync_website_document_indexing_task(dataset_id: str, document_id: str):
if document:
document.indexing_status = "error"
document.error = str(e)
- document.stopped_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
+ document.stopped_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
db.session.add(document)
db.session.commit()
redis_client.delete(sync_indexing_cache_key)
@@ -72,7 +72,7 @@ def sync_website_document_indexing_task(dataset_id: str, document_id: str):
db.session.commit()
document.indexing_status = "parsing"
- document.processing_started_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
+ document.processing_started_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
db.session.add(document)
db.session.commit()
@@ -82,7 +82,7 @@ def sync_website_document_indexing_task(dataset_id: str, document_id: str):
except Exception as ex:
document.indexing_status = "error"
document.error = str(ex)
- document.stopped_at = datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
+ document.stopped_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
db.session.add(document)
db.session.commit()
logging.info(click.style(str(ex), fg="yellow"))
diff --git a/api/tests/unit_tests/core/app/app_config/features/file_upload/test_manager.py b/api/tests/unit_tests/core/app/app_config/features/file_upload/test_manager.py
index 50a612ec5f..2acf8815a5 100644
--- a/api/tests/unit_tests/core/app/app_config/features/file_upload/test_manager.py
+++ b/api/tests/unit_tests/core/app/app_config/features/file_upload/test_manager.py
@@ -18,7 +18,9 @@ def test_convert_with_vision():
number_limits=5,
transfer_methods=[FileTransferMethod.REMOTE_URL],
detail=ImagePromptMessageContent.DETAIL.HIGH,
- )
+ ),
+ allowed_file_upload_methods=[FileTransferMethod.REMOTE_URL],
+ number_limits=5,
)
assert result == expected
@@ -33,7 +35,9 @@ def test_convert_without_vision():
}
result = FileUploadConfigManager.convert(config, is_vision=False)
expected = FileUploadConfig(
- image_config=ImageConfig(number_limits=5, transfer_methods=[FileTransferMethod.REMOTE_URL])
+ image_config=ImageConfig(number_limits=5, transfer_methods=[FileTransferMethod.REMOTE_URL]),
+ allowed_file_upload_methods=[FileTransferMethod.REMOTE_URL],
+ number_limits=5,
)
assert result == expected
diff --git a/api/tests/unit_tests/core/tools/workflow_as_tool/__init__.py b/api/tests/unit_tests/core/tools/workflow_as_tool/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/api/tests/unit_tests/core/tools/workflow_as_tool/test_tool.py b/api/tests/unit_tests/core/tools/workflow_as_tool/test_tool.py
new file mode 100644
index 0000000000..15a9e8e9f4
--- /dev/null
+++ b/api/tests/unit_tests/core/tools/workflow_as_tool/test_tool.py
@@ -0,0 +1,49 @@
+import pytest
+
+from core.app.entities.app_invoke_entities import InvokeFrom
+from core.tools.__base.tool_runtime import ToolRuntime
+from core.tools.entities.common_entities import I18nObject
+from core.tools.entities.tool_entities import ToolEntity, ToolIdentity
+from core.tools.errors import ToolInvokeError
+from core.tools.workflow_as_tool.tool import WorkflowTool
+
+
+def test_workflow_tool_should_raise_tool_invoke_error_when_result_has_error_field(monkeypatch):
+ """Ensure that WorkflowTool will throw a `ToolInvokeError` exception when
+ `WorkflowAppGenerator.generate` returns a result with `error` key inside
+ the `data` element.
+ """
+ entity = ToolEntity(
+ identity=ToolIdentity(author="test", name="test tool", label=I18nObject(en_US="test tool"), provider="test"),
+ parameters=[],
+ description=None,
+ output_schema=None,
+ has_runtime_parameters=False,
+ )
+ runtime = ToolRuntime(tenant_id="test_tool", invoke_from=InvokeFrom.EXPLORE)
+ tool = WorkflowTool(
+ workflow_app_id="",
+ workflow_as_tool_id="",
+ version="1",
+ workflow_entities={},
+ workflow_call_depth=1,
+ entity=entity,
+ runtime=runtime,
+ )
+
+ # needs to patch those methods to avoid database access.
+ monkeypatch.setattr(tool, "_get_app", lambda *args, **kwargs: None)
+ monkeypatch.setattr(tool, "_get_workflow", lambda *args, **kwargs: None)
+ monkeypatch.setattr(tool, "_get_user", lambda *args, **kwargs: None)
+
+ # replace `WorkflowAppGenerator.generate` 's return value.
+ monkeypatch.setattr(
+ "core.app.apps.workflow.app_generator.WorkflowAppGenerator.generate",
+ lambda *args, **kwargs: {"data": {"error": "oops"}},
+ )
+
+ with pytest.raises(ToolInvokeError) as exc_info:
+ # WorkflowTool always returns a generator, so we need to iterate to
+ # actually `run` the tool.
+ list(tool.invoke("test_user", {}))
+ assert exc_info.value.args == ("oops",)
diff --git a/api/tests/unit_tests/core/workflow/nodes/http_request/test_http_request_node.py b/api/tests/unit_tests/core/workflow/nodes/http_request/test_http_request_node.py
index 97bacada74..2073d355f0 100644
--- a/api/tests/unit_tests/core/workflow/nodes/http_request/test_http_request_node.py
+++ b/api/tests/unit_tests/core/workflow/nodes/http_request/test_http_request_node.py
@@ -2,7 +2,7 @@ import httpx
from core.app.entities.app_invoke_entities import InvokeFrom
from core.file import File, FileTransferMethod, FileType
-from core.variables import FileVariable
+from core.variables import ArrayFileVariable, FileVariable
from core.workflow.entities.variable_pool import VariablePool
from core.workflow.graph_engine import Graph, GraphInitParams, GraphRuntimeState
from core.workflow.nodes.answer import AnswerStreamGenerateRoute
@@ -183,7 +183,7 @@ def test_http_request_node_form_with_file(monkeypatch):
def attr_checker(*args, **kwargs):
assert kwargs["data"] == {"name": "test"}
- assert kwargs["files"] == {"file": (None, b"test", "application/octet-stream")}
+ assert kwargs["files"] == [("file", (None, b"test", "application/octet-stream"))]
return httpx.Response(200, content=b"")
monkeypatch.setattr(
@@ -194,3 +194,131 @@ def test_http_request_node_form_with_file(monkeypatch):
assert result.status == WorkflowNodeExecutionStatus.SUCCEEDED
assert result.outputs is not None
assert result.outputs["body"] == ""
+
+
+def test_http_request_node_form_with_multiple_files(monkeypatch):
+ data = HttpRequestNodeData(
+ title="test",
+ method="post",
+ url="http://example.org/upload",
+ authorization=HttpRequestNodeAuthorization(type="no-auth"),
+ headers="",
+ params="",
+ body=HttpRequestNodeBody(
+ type="form-data",
+ data=[
+ BodyData(
+ key="files",
+ type="file",
+ file=["1111", "files"],
+ ),
+ BodyData(
+ key="name",
+ type="text",
+ value="test",
+ ),
+ ],
+ ),
+ )
+
+ variable_pool = VariablePool(
+ system_variables={},
+ user_inputs={},
+ )
+
+ files = [
+ File(
+ tenant_id="1",
+ type=FileType.IMAGE,
+ transfer_method=FileTransferMethod.LOCAL_FILE,
+ related_id="file1",
+ filename="image1.jpg",
+ mime_type="image/jpeg",
+ storage_key="",
+ ),
+ File(
+ tenant_id="1",
+ type=FileType.DOCUMENT,
+ transfer_method=FileTransferMethod.LOCAL_FILE,
+ related_id="file2",
+ filename="document.pdf",
+ mime_type="application/pdf",
+ storage_key="",
+ ),
+ ]
+
+ variable_pool.add(
+ ["1111", "files"],
+ ArrayFileVariable(
+ name="files",
+ value=files,
+ ),
+ )
+
+ node = HttpRequestNode(
+ id="1",
+ config={
+ "id": "1",
+ "data": data.model_dump(),
+ },
+ graph_init_params=GraphInitParams(
+ tenant_id="1",
+ app_id="1",
+ workflow_type=WorkflowType.WORKFLOW,
+ workflow_id="1",
+ graph_config={},
+ user_id="1",
+ user_from=UserFrom.ACCOUNT,
+ invoke_from=InvokeFrom.SERVICE_API,
+ call_depth=0,
+ ),
+ graph=Graph(
+ root_node_id="1",
+ answer_stream_generate_routes=AnswerStreamGenerateRoute(
+ answer_dependencies={},
+ answer_generate_route={},
+ ),
+ end_stream_param=EndStreamParam(
+ end_dependencies={},
+ end_stream_variable_selector_mapping={},
+ ),
+ ),
+ graph_runtime_state=GraphRuntimeState(
+ variable_pool=variable_pool,
+ start_at=0,
+ ),
+ )
+
+ monkeypatch.setattr(
+ "core.workflow.nodes.http_request.executor.file_manager.download",
+ lambda file: b"test_image_data" if file.mime_type == "image/jpeg" else b"test_pdf_data",
+ )
+
+ def attr_checker(*args, **kwargs):
+ assert kwargs["data"] == {"name": "test"}
+
+ assert len(kwargs["files"]) == 2
+ assert kwargs["files"][0][0] == "files"
+ assert kwargs["files"][1][0] == "files"
+
+ file_tuples = [f[1] for f in kwargs["files"]]
+ file_contents = [f[1] for f in file_tuples]
+ file_types = [f[2] for f in file_tuples]
+
+ assert b"test_image_data" in file_contents
+ assert b"test_pdf_data" in file_contents
+ assert "image/jpeg" in file_types
+ assert "application/pdf" in file_types
+
+ return httpx.Response(200, content=b'{"status":"success"}')
+
+ monkeypatch.setattr(
+ "core.helper.ssrf_proxy.post",
+ attr_checker,
+ )
+
+ result = node._run()
+ assert result.status == WorkflowNodeExecutionStatus.SUCCEEDED
+ assert result.outputs is not None
+ assert result.outputs["body"] == '{"status":"success"}'
+ print(result.outputs["body"])
diff --git a/api/tests/unit_tests/core/workflow/nodes/tool/__init__.py b/api/tests/unit_tests/core/workflow/nodes/tool/__init__.py
new file mode 100644
index 0000000000..8b13789179
--- /dev/null
+++ b/api/tests/unit_tests/core/workflow/nodes/tool/__init__.py
@@ -0,0 +1 @@
+
diff --git a/api/tests/unit_tests/core/workflow/nodes/tool/test_tool_node.py b/api/tests/unit_tests/core/workflow/nodes/tool/test_tool_node.py
new file mode 100644
index 0000000000..f593510830
--- /dev/null
+++ b/api/tests/unit_tests/core/workflow/nodes/tool/test_tool_node.py
@@ -0,0 +1,110 @@
+from collections.abc import Generator
+
+import pytest
+
+from core.app.entities.app_invoke_entities import InvokeFrom
+from core.tools.entities.tool_entities import ToolInvokeMessage, ToolProviderType
+from core.tools.errors import ToolInvokeError
+from core.workflow.entities.node_entities import NodeRunResult
+from core.workflow.entities.variable_pool import VariablePool
+from core.workflow.graph_engine import Graph, GraphInitParams, GraphRuntimeState
+from core.workflow.nodes.answer import AnswerStreamGenerateRoute
+from core.workflow.nodes.end import EndStreamParam
+from core.workflow.nodes.enums import ErrorStrategy
+from core.workflow.nodes.event import RunCompletedEvent
+from core.workflow.nodes.tool import ToolNode
+from core.workflow.nodes.tool.entities import ToolNodeData
+from models import UserFrom, WorkflowNodeExecutionStatus, WorkflowType
+
+
+def _create_tool_node():
+ data = ToolNodeData(
+ title="Test Tool",
+ tool_parameters={},
+ provider_id="test_tool",
+ provider_type=ToolProviderType.WORKFLOW,
+ provider_name="test tool",
+ tool_name="test tool",
+ tool_label="test tool",
+ tool_configurations={},
+ plugin_unique_identifier=None,
+ desc="Exception handling test tool",
+ error_strategy=ErrorStrategy.FAIL_BRANCH,
+ version="1",
+ )
+ variable_pool = VariablePool(
+ system_variables={},
+ user_inputs={},
+ )
+ node = ToolNode(
+ id="1",
+ config={
+ "id": "1",
+ "data": data.model_dump(),
+ },
+ graph_init_params=GraphInitParams(
+ tenant_id="1",
+ app_id="1",
+ workflow_type=WorkflowType.WORKFLOW,
+ workflow_id="1",
+ graph_config={},
+ user_id="1",
+ user_from=UserFrom.ACCOUNT,
+ invoke_from=InvokeFrom.SERVICE_API,
+ call_depth=0,
+ ),
+ graph=Graph(
+ root_node_id="1",
+ answer_stream_generate_routes=AnswerStreamGenerateRoute(
+ answer_dependencies={},
+ answer_generate_route={},
+ ),
+ end_stream_param=EndStreamParam(
+ end_dependencies={},
+ end_stream_variable_selector_mapping={},
+ ),
+ ),
+ graph_runtime_state=GraphRuntimeState(
+ variable_pool=variable_pool,
+ start_at=0,
+ ),
+ )
+ return node
+
+
+class MockToolRuntime:
+ def get_merged_runtime_parameters(self):
+ pass
+
+
+def mock_message_stream() -> Generator[ToolInvokeMessage, None, None]:
+ yield from []
+ raise ToolInvokeError("oops")
+
+
+def test_tool_node_on_tool_invoke_error(monkeypatch: pytest.MonkeyPatch):
+ """Ensure that ToolNode can handle ToolInvokeError when transforming
+ messages generated by ToolEngine.generic_invoke.
+ """
+ tool_node = _create_tool_node()
+
+ # Need to patch ToolManager and ToolEngine so that we don't
+ # have to set up a database.
+ monkeypatch.setattr(
+ "core.tools.tool_manager.ToolManager.get_workflow_tool_runtime", lambda *args, **kwargs: MockToolRuntime()
+ )
+ monkeypatch.setattr(
+ "core.tools.tool_engine.ToolEngine.generic_invoke",
+ lambda *args, **kwargs: mock_message_stream(),
+ )
+
+ streams = list(tool_node._run())
+ assert len(streams) == 1
+ stream = streams[0]
+ assert isinstance(stream, RunCompletedEvent)
+ result = stream.run_result
+ assert isinstance(result, NodeRunResult)
+ assert result.status == WorkflowNodeExecutionStatus.FAILED
+ assert "oops" in result.error
+ assert "Failed to transform tool message:" in result.error
+ assert result.error_type == "ToolInvokeError"
diff --git a/docker/.env.example b/docker/.env.example
index c68a65ba44..c7b77d4c56 100644
--- a/docker/.env.example
+++ b/docker/.env.example
@@ -397,12 +397,12 @@ QDRANT_CLIENT_TIMEOUT=20
QDRANT_GRPC_ENABLED=false
QDRANT_GRPC_PORT=6334
-# Milvus configuration Only available when VECTOR_STORE is `milvus`.
+# Milvus configuration. Only available when VECTOR_STORE is `milvus`.
# The milvus uri.
-MILVUS_URI=http://127.0.0.1:19530
+MILVUS_URI=http://host.docker.internal:19530
MILVUS_TOKEN=
-MILVUS_USER=root
-MILVUS_PASSWORD=Milvus
+MILVUS_USER=
+MILVUS_PASSWORD=
MILVUS_ENABLE_HYBRID_SEARCH=False
# MyScale configuration, only available when VECTOR_STORE is `myscale`
@@ -431,6 +431,8 @@ PGVECTOR_PASSWORD=difyai123456
PGVECTOR_DATABASE=dify
PGVECTOR_MIN_CONNECTION=1
PGVECTOR_MAX_CONNECTION=5
+PGVECTOR_PG_BIGM=false
+PGVECTOR_PG_BIGM_VERSION=1.2-20240606
# pgvecto-rs configurations, only available when VECTOR_STORE is `pgvecto-rs`
PGVECTO_RS_HOST=pgvecto-rs
@@ -723,6 +725,7 @@ WORKFLOW_FILE_UPLOAD_LIMIT=10
# HTTP request node in workflow configuration
HTTP_REQUEST_NODE_MAX_BINARY_SIZE=10485760
HTTP_REQUEST_NODE_MAX_TEXT_SIZE=1048576
+HTTP_REQUEST_NODE_SSL_VERIFY=True
# SSRF Proxy server HTTP URL
SSRF_PROXY_HTTP_URL=http://ssrf_proxy:3128
diff --git a/docker/docker-compose-template.yaml b/docker/docker-compose-template.yaml
index 7ec85f522d..29753be565 100644
--- a/docker/docker-compose-template.yaml
+++ b/docker/docker-compose-template.yaml
@@ -322,8 +322,13 @@ services:
POSTGRES_DB: ${PGVECTOR_POSTGRES_DB:-dify}
# postgres data directory
PGDATA: ${PGVECTOR_PGDATA:-/var/lib/postgresql/data/pgdata}
+ # pg_bigm module for full text search
+ PG_BIGM: ${PGVECTOR_PG_BIGM:-false}
+ PG_BIGM_VERSION: ${PGVECTOR_PG_BIGM_VERSION:-1.2-20240606}
volumes:
- ./volumes/pgvector/data:/var/lib/postgresql/data
+ - ./pgvector/docker-entrypoint.sh:/docker-entrypoint.sh
+ entrypoint: [ '/docker-entrypoint.sh' ]
healthcheck:
test: [ 'CMD', 'pg_isready' ]
interval: 1s
diff --git a/docker/docker-compose.yaml b/docker/docker-compose.yaml
index 669b81a140..308621d1d2 100644
--- a/docker/docker-compose.yaml
+++ b/docker/docker-compose.yaml
@@ -134,10 +134,10 @@ x-shared-env: &shared-api-worker-env
QDRANT_CLIENT_TIMEOUT: ${QDRANT_CLIENT_TIMEOUT:-20}
QDRANT_GRPC_ENABLED: ${QDRANT_GRPC_ENABLED:-false}
QDRANT_GRPC_PORT: ${QDRANT_GRPC_PORT:-6334}
- MILVUS_URI: ${MILVUS_URI:-http://127.0.0.1:19530}
+ MILVUS_URI: ${MILVUS_URI:-http://host.docker.internal:19530}
MILVUS_TOKEN: ${MILVUS_TOKEN:-}
- MILVUS_USER: ${MILVUS_USER:-root}
- MILVUS_PASSWORD: ${MILVUS_PASSWORD:-Milvus}
+ MILVUS_USER: ${MILVUS_USER:-}
+ MILVUS_PASSWORD: ${MILVUS_PASSWORD:-}
MILVUS_ENABLE_HYBRID_SEARCH: ${MILVUS_ENABLE_HYBRID_SEARCH:-False}
MYSCALE_HOST: ${MYSCALE_HOST:-myscale}
MYSCALE_PORT: ${MYSCALE_PORT:-8123}
@@ -157,6 +157,8 @@ x-shared-env: &shared-api-worker-env
PGVECTOR_DATABASE: ${PGVECTOR_DATABASE:-dify}
PGVECTOR_MIN_CONNECTION: ${PGVECTOR_MIN_CONNECTION:-1}
PGVECTOR_MAX_CONNECTION: ${PGVECTOR_MAX_CONNECTION:-5}
+ PGVECTOR_PG_BIGM: ${PGVECTOR_PG_BIGM:-false}
+ PGVECTOR_PG_BIGM_VERSION: ${PGVECTOR_PG_BIGM_VERSION:-1.2-20240606}
PGVECTO_RS_HOST: ${PGVECTO_RS_HOST:-pgvecto-rs}
PGVECTO_RS_PORT: ${PGVECTO_RS_PORT:-5432}
PGVECTO_RS_USER: ${PGVECTO_RS_USER:-postgres}
@@ -315,6 +317,7 @@ x-shared-env: &shared-api-worker-env
WORKFLOW_FILE_UPLOAD_LIMIT: ${WORKFLOW_FILE_UPLOAD_LIMIT:-10}
HTTP_REQUEST_NODE_MAX_BINARY_SIZE: ${HTTP_REQUEST_NODE_MAX_BINARY_SIZE:-10485760}
HTTP_REQUEST_NODE_MAX_TEXT_SIZE: ${HTTP_REQUEST_NODE_MAX_TEXT_SIZE:-1048576}
+ HTTP_REQUEST_NODE_SSL_VERIFY: ${HTTP_REQUEST_NODE_SSL_VERIFY:-True}
SSRF_PROXY_HTTP_URL: ${SSRF_PROXY_HTTP_URL:-http://ssrf_proxy:3128}
SSRF_PROXY_HTTPS_URL: ${SSRF_PROXY_HTTPS_URL:-http://ssrf_proxy:3128}
LOOP_NODE_MAX_COUNT: ${LOOP_NODE_MAX_COUNT:-100}
@@ -748,8 +751,13 @@ services:
POSTGRES_DB: ${PGVECTOR_POSTGRES_DB:-dify}
# postgres data directory
PGDATA: ${PGVECTOR_PGDATA:-/var/lib/postgresql/data/pgdata}
+ # pg_bigm module for full text search
+ PG_BIGM: ${PGVECTOR_PG_BIGM:-false}
+ PG_BIGM_VERSION: ${PGVECTOR_PG_BIGM_VERSION:-1.2-20240606}
volumes:
- ./volumes/pgvector/data:/var/lib/postgresql/data
+ - ./pgvector/docker-entrypoint.sh:/docker-entrypoint.sh
+ entrypoint: [ '/docker-entrypoint.sh' ]
healthcheck:
test: [ 'CMD', 'pg_isready' ]
interval: 1s
diff --git a/docker/nginx/docker-entrypoint.sh b/docker/nginx/docker-entrypoint.sh
index d343cb3efa..8e1110ffa9 100755
--- a/docker/nginx/docker-entrypoint.sh
+++ b/docker/nginx/docker-entrypoint.sh
@@ -1,5 +1,7 @@
#!/bin/bash
+HTTPS_CONFIG=''
+
if [ "${NGINX_HTTPS_ENABLED}" = "true" ]; then
# Check if the certificate and key files for the specified domain exist
if [ -n "${CERTBOT_DOMAIN}" ] && \
@@ -20,6 +22,7 @@ if [ "${NGINX_HTTPS_ENABLED}" = "true" ]; then
# Substitute the HTTPS_CONFIG in the default.conf.template with content from https.conf.template
envsubst '${HTTPS_CONFIG}' < /etc/nginx/conf.d/default.conf.template > /etc/nginx/conf.d/default.conf
fi
+export HTTPS_CONFIG
if [ "${NGINX_ENABLE_CERTBOT_CHALLENGE}" = "true" ]; then
ACME_CHALLENGE_LOCATION='location /.well-known/acme-challenge/ { root /var/www/html; }'
@@ -33,7 +36,7 @@ env_vars=$(printenv | cut -d= -f1 | sed 's/^/$/g' | paste -sd, -)
envsubst "$env_vars" < /etc/nginx/nginx.conf.template > /etc/nginx/nginx.conf
envsubst "$env_vars" < /etc/nginx/proxy.conf.template > /etc/nginx/proxy.conf
-envsubst < /etc/nginx/conf.d/default.conf.template > /etc/nginx/conf.d/default.conf
+envsubst "$env_vars" < /etc/nginx/conf.d/default.conf.template > /etc/nginx/conf.d/default.conf
# Start Nginx using the default entrypoint
exec nginx -g 'daemon off;'
\ No newline at end of file
diff --git a/docker/pgvector/docker-entrypoint.sh b/docker/pgvector/docker-entrypoint.sh
new file mode 100755
index 0000000000..262eacfb13
--- /dev/null
+++ b/docker/pgvector/docker-entrypoint.sh
@@ -0,0 +1,24 @@
+#!/bin/bash
+
+PG_MAJOR=16
+
+if [ "${PG_BIGM}" = "true" ]; then
+ # install pg_bigm
+ apt-get update
+ apt-get install -y curl make gcc postgresql-server-dev-${PG_MAJOR}
+
+ curl -LO https://github.com/pgbigm/pg_bigm/archive/refs/tags/v${PG_BIGM_VERSION}.tar.gz
+ tar xf v${PG_BIGM_VERSION}.tar.gz
+ cd pg_bigm-${PG_BIGM_VERSION} || exit 1
+ make USE_PGXS=1 PG_CONFIG=/usr/bin/pg_config
+ make USE_PGXS=1 PG_CONFIG=/usr/bin/pg_config install
+
+ cd - || exit 1
+ rm -rf v${PG_BIGM_VERSION}.tar.gz pg_bigm-${PG_BIGM_VERSION}
+
+ # enable pg_bigm
+ sed -i -e 's/^#\s*shared_preload_libraries.*/shared_preload_libraries = '\''pg_bigm'\''/' /var/lib/postgresql/data/pgdata/postgresql.conf
+fi
+
+# Run the original entrypoint script
+exec /usr/local/bin/docker-entrypoint.sh postgres
diff --git a/web/.husky/pre-commit b/web/.husky/pre-commit
index cca8abe27a..a0d7df1687 100755
--- a/web/.husky/pre-commit
+++ b/web/.husky/pre-commit
@@ -1,3 +1,4 @@
+#!/bin/sh
# get the list of modified files
files=$(git diff --cached --name-only)
@@ -32,7 +33,7 @@ if $api_modified; then
ruff check --fix ./api
# run Ruff linter checks
- ruff check --preview ./api || status=$?
+ ruff check ./api || status=$?
status=${status:-0}
diff --git a/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/panel.tsx b/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/panel.tsx
index eb0e7371bb..cd24ac1467 100644
--- a/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/panel.tsx
+++ b/web/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/tracing/panel.tsx
@@ -197,7 +197,7 @@ const Panel: FC = () => {
{t(`${I18N_PREFIX}.${enabled ? 'enabled' : 'disabled'}`)}
-