Merge main into feat/plugin

This commit is contained in:
Yeuoly 2024-10-22 17:35:11 +08:00
commit dea2962a79
No known key found for this signature in database
GPG Key ID: A66E7E320FB19F61
72 changed files with 789 additions and 1000 deletions

View File

@ -168,7 +168,7 @@ Star Dify on GitHub and be instantly notified of new releases.
> Before installing Dify, make sure your machine meets the following minimum system requirements: > Before installing Dify, make sure your machine meets the following minimum system requirements:
> >
>- CPU >= 2 Core >- CPU >= 2 Core
>- RAM >= 4GB >- RAM >= 4 GiB
</br> </br>

View File

@ -174,7 +174,7 @@ Dify 是一个开源的 LLM 应用开发平台。其直观的界面结合了 AI
在安装 Dify 之前,请确保您的机器满足以下最低系统要求: 在安装 Dify 之前,请确保您的机器满足以下最低系统要求:
- CPU >= 2 Core - CPU >= 2 Core
- RAM >= 4GB - RAM >= 4 GiB
### 快速启动 ### 快速启动

View File

@ -1,5 +1,7 @@
import os import os
from configs import dify_config
if os.environ.get("DEBUG", "false").lower() != "true": if os.environ.get("DEBUG", "false").lower() != "true":
from gevent import monkey from gevent import monkey
@ -36,17 +38,11 @@ if hasattr(time, "tzset"):
time.tzset() time.tzset()
# -------------
# Configuration
# -------------
config_type = os.getenv("EDITION", default="SELF_HOSTED") # ce edition first
# create app # create app
app = create_app() app = create_app()
celery = app.extensions["celery"] celery = app.extensions["celery"]
if app.config.get("TESTING"): if dify_config.TESTING:
print("App is running in TESTING mode") print("App is running in TESTING mode")
@ -54,15 +50,15 @@ if app.config.get("TESTING"):
def after_request(response): def after_request(response):
"""Add Version headers to the response.""" """Add Version headers to the response."""
response.set_cookie("remember_token", "", expires=0) response.set_cookie("remember_token", "", expires=0)
response.headers.add("X-Version", app.config["CURRENT_VERSION"]) response.headers.add("X-Version", dify_config.CURRENT_VERSION)
response.headers.add("X-Env", app.config["DEPLOY_ENV"]) response.headers.add("X-Env", dify_config.DEPLOY_ENV)
return response return response
@app.route("/health") @app.route("/health")
def health(): def health():
return Response( return Response(
json.dumps({"pid": os.getpid(), "status": "ok", "version": app.config["CURRENT_VERSION"]}), json.dumps({"pid": os.getpid(), "status": "ok", "version": dify_config.CURRENT_VERSION}),
status=200, status=200,
content_type="application/json", content_type="application/json",
) )

View File

@ -10,9 +10,6 @@ if os.environ.get("DEBUG", "false").lower() != "true":
grpc.experimental.gevent.init_gevent() grpc.experimental.gevent.init_gevent()
import json import json
import logging
import sys
from logging.handlers import RotatingFileHandler
from flask import Flask, Response, request from flask import Flask, Response, request
from flask_cors import CORS from flask_cors import CORS
@ -27,6 +24,7 @@ from extensions import (
ext_compress, ext_compress,
ext_database, ext_database,
ext_hosting_provider, ext_hosting_provider,
ext_logging,
ext_login, ext_login,
ext_mail, ext_mail,
ext_migrate, ext_migrate,
@ -70,43 +68,7 @@ def create_flask_app_with_configs() -> Flask:
def create_app() -> Flask: def create_app() -> Flask:
app = create_flask_app_with_configs() app = create_flask_app_with_configs()
app.secret_key = dify_config.SECRET_KEY
app.secret_key = app.config["SECRET_KEY"]
log_handlers = None
log_file = app.config.get("LOG_FILE")
if log_file:
log_dir = os.path.dirname(log_file)
os.makedirs(log_dir, exist_ok=True)
log_handlers = [
RotatingFileHandler(
filename=log_file,
maxBytes=1024 * 1024 * 1024,
backupCount=5,
),
logging.StreamHandler(sys.stdout),
]
logging.basicConfig(
level=app.config.get("LOG_LEVEL"),
format=app.config.get("LOG_FORMAT"),
datefmt=app.config.get("LOG_DATEFORMAT"),
handlers=log_handlers,
force=True,
)
log_tz = app.config.get("LOG_TZ")
if log_tz:
from datetime import datetime
import pytz
timezone = pytz.timezone(log_tz)
def time_converter(seconds):
return datetime.utcfromtimestamp(seconds).astimezone(timezone).timetuple()
for handler in logging.root.handlers:
handler.formatter.converter = time_converter
initialize_extensions(app) initialize_extensions(app)
register_blueprints(app) register_blueprints(app)
register_commands(app) register_commands(app)
@ -117,6 +79,7 @@ def create_app() -> Flask:
def initialize_extensions(app): def initialize_extensions(app):
# Since the application instance is now created, pass it to each Flask # Since the application instance is now created, pass it to each Flask
# extension instance to bind it to the Flask application instance (app) # extension instance to bind it to the Flask application instance (app)
ext_logging.init_app(app)
ext_compress.init_app(app) ext_compress.init_app(app)
ext_code_based_extension.init() ext_code_based_extension.init()
ext_database.init_app(app) ext_database.init_app(app)
@ -187,7 +150,7 @@ def register_blueprints(app):
CORS( CORS(
web_bp, web_bp,
resources={r"/*": {"origins": app.config["WEB_API_CORS_ALLOW_ORIGINS"]}}, resources={r"/*": {"origins": dify_config.WEB_API_CORS_ALLOW_ORIGINS}},
supports_credentials=True, supports_credentials=True,
allow_headers=["Content-Type", "Authorization", "X-App-Code"], allow_headers=["Content-Type", "Authorization", "X-App-Code"],
methods=["GET", "PUT", "POST", "DELETE", "OPTIONS", "PATCH"], methods=["GET", "PUT", "POST", "DELETE", "OPTIONS", "PATCH"],
@ -198,7 +161,7 @@ def register_blueprints(app):
CORS( CORS(
console_app_bp, console_app_bp,
resources={r"/*": {"origins": app.config["CONSOLE_CORS_ALLOW_ORIGINS"]}}, resources={r"/*": {"origins": dify_config.CONSOLE_CORS_ALLOW_ORIGINS}},
supports_credentials=True, supports_credentials=True,
allow_headers=["Content-Type", "Authorization"], allow_headers=["Content-Type", "Authorization"],
methods=["GET", "PUT", "POST", "DELETE", "OPTIONS", "PATCH"], methods=["GET", "PUT", "POST", "DELETE", "OPTIONS", "PATCH"],

View File

@ -32,6 +32,21 @@ class SecurityConfig(BaseSettings):
default=5, default=5,
) )
LOGIN_DISABLED: bool = Field(
description="Whether to disable login checks",
default=False,
)
ADMIN_API_KEY_ENABLE: bool = Field(
description="Whether to enable admin api key for authentication",
default=False,
)
ADMIN_API_KEY: Optional[str] = Field(
description="admin api key for authentication",
default=None,
)
class AppExecutionConfig(BaseSettings): class AppExecutionConfig(BaseSettings):
""" """

View File

@ -1,4 +1,3 @@
import os
from functools import wraps from functools import wraps
from flask import request from flask import request
@ -7,6 +6,7 @@ from sqlalchemy import select
from sqlalchemy.orm import Session from sqlalchemy.orm import Session
from werkzeug.exceptions import NotFound, Unauthorized from werkzeug.exceptions import NotFound, Unauthorized
from configs import dify_config
from constants.languages import supported_language from constants.languages import supported_language
from controllers.console import api from controllers.console import api
from controllers.console.wraps import only_edition_cloud from controllers.console.wraps import only_edition_cloud
@ -17,7 +17,7 @@ from models.model import App, InstalledApp, RecommendedApp
def admin_required(view): def admin_required(view):
@wraps(view) @wraps(view)
def decorated(*args, **kwargs): def decorated(*args, **kwargs):
if not os.getenv("ADMIN_API_KEY"): if not dify_config.ADMIN_API_KEY:
raise Unauthorized("API key is invalid.") raise Unauthorized("API key is invalid.")
auth_header = request.headers.get("Authorization") auth_header = request.headers.get("Authorization")
@ -33,7 +33,7 @@ def admin_required(view):
if auth_scheme != "bearer": if auth_scheme != "bearer":
raise Unauthorized("Invalid Authorization header format. Expected 'Bearer <api-key>' format.") raise Unauthorized("Invalid Authorization header format. Expected 'Bearer <api-key>' format.")
if os.getenv("ADMIN_API_KEY") != auth_token: if dify_config.ADMIN_API_KEY != auth_token:
raise Unauthorized("API key is invalid.") raise Unauthorized("API key is invalid.")
return view(*args, **kwargs) return view(*args, **kwargs)

View File

@ -30,13 +30,12 @@ class FileApi(Resource):
@account_initialization_required @account_initialization_required
@marshal_with(upload_config_fields) @marshal_with(upload_config_fields)
def get(self): def get(self):
file_size_limit = dify_config.UPLOAD_FILE_SIZE_LIMIT
batch_count_limit = dify_config.UPLOAD_FILE_BATCH_LIMIT
image_file_size_limit = dify_config.UPLOAD_IMAGE_FILE_SIZE_LIMIT
return { return {
"file_size_limit": file_size_limit, "file_size_limit": dify_config.UPLOAD_FILE_SIZE_LIMIT,
"batch_count_limit": batch_count_limit, "batch_count_limit": dify_config.UPLOAD_FILE_BATCH_LIMIT,
"image_file_size_limit": image_file_size_limit, "image_file_size_limit": dify_config.UPLOAD_IMAGE_FILE_SIZE_LIMIT,
"video_file_size_limit": dify_config.UPLOAD_VIDEO_FILE_SIZE_LIMIT,
"audio_file_size_limit": dify_config.UPLOAD_AUDIO_FILE_SIZE_LIMIT,
}, 200 }, 200
@setup_required @setup_required

View File

@ -42,10 +42,10 @@ class ToolFilePreviewApi(Resource):
stream, stream,
mimetype=tool_file.mimetype, mimetype=tool_file.mimetype,
direct_passthrough=True, direct_passthrough=True,
headers={ headers={},
"Content-Length": str(tool_file.size),
},
) )
if tool_file.size > 0:
response.headers["Content-Length"] = str(tool_file.size)
if args["as_attachment"]: if args["as_attachment"]:
response.headers["Content-Disposition"] = f"attachment; filename={tool_file.name}" response.headers["Content-Disposition"] = f"attachment; filename={tool_file.name}"

View File

@ -48,7 +48,7 @@ class MessageListApi(Resource):
"tool_input": fields.String, "tool_input": fields.String,
"created_at": TimestampField, "created_at": TimestampField,
"observation": fields.String, "observation": fields.String,
"message_files": fields.List(fields.String), "message_files": fields.List(fields.Nested(message_file_fields)),
} }
message_fields = { message_fields = {

View File

@ -46,7 +46,7 @@ class RemoteFileInfoApi(WebApiResource):
response = ssrf_proxy.head(decoded_url) response = ssrf_proxy.head(decoded_url)
return { return {
"file_type": response.headers.get("Content-Type", "application/octet-stream"), "file_type": response.headers.get("Content-Type", "application/octet-stream"),
"file_length": int(response.headers.get("Content-Length", 0)), "file_length": int(response.headers.get("Content-Length", -1)),
} }
except Exception as e: except Exception as e:
return {"error": str(e)}, 400 return {"error": str(e)}, 400

View File

@ -53,11 +53,11 @@ class BasicVariablesConfigManager:
VariableEntity( VariableEntity(
type=variable_type, type=variable_type,
variable=variable.get("variable"), variable=variable.get("variable"),
description=variable.get("description", ""), description=variable.get("description") or "",
label=variable.get("label"), label=variable.get("label"),
required=variable.get("required", False), required=variable.get("required", False),
max_length=variable.get("max_length"), max_length=variable.get("max_length"),
options=variable.get("options", []), options=variable.get("options") or [],
) )
) )

View File

@ -2,7 +2,7 @@ from collections.abc import Sequence
from enum import Enum from enum import Enum
from typing import Any, Optional from typing import Any, Optional
from pydantic import BaseModel, Field from pydantic import BaseModel, Field, field_validator
from core.file import FileExtraConfig, FileTransferMethod, FileType from core.file import FileExtraConfig, FileTransferMethod, FileType
from core.model_runtime.entities.message_entities import PromptMessageRole from core.model_runtime.entities.message_entities import PromptMessageRole
@ -114,6 +114,16 @@ class VariableEntity(BaseModel):
allowed_file_extensions: Sequence[str] = Field(default_factory=list) allowed_file_extensions: Sequence[str] = Field(default_factory=list)
allowed_file_upload_methods: Sequence[FileTransferMethod] = Field(default_factory=list) allowed_file_upload_methods: Sequence[FileTransferMethod] = Field(default_factory=list)
@field_validator("description", mode="before")
@classmethod
def convert_none_description(cls, v: Any) -> str:
return v or ""
@field_validator("options", mode="before")
@classmethod
def convert_none_options(cls, v: Any) -> Sequence[str]:
return v or []
class ExternalDataVariableEntity(BaseModel): class ExternalDataVariableEntity(BaseModel):
""" """

View File

@ -17,10 +17,13 @@ class FileUploadConfigManager:
file_upload_dict = config.get("file_upload") file_upload_dict = config.get("file_upload")
if file_upload_dict: if file_upload_dict:
if file_upload_dict.get("enabled"): if file_upload_dict.get("enabled"):
transform_methods = file_upload_dict.get("allowed_file_upload_methods") or file_upload_dict.get(
"allowed_upload_methods", []
)
data = { data = {
"image_config": { "image_config": {
"number_limits": file_upload_dict["number_limits"], "number_limits": file_upload_dict["number_limits"],
"transfer_methods": file_upload_dict["allowed_file_upload_methods"], "transfer_methods": transform_methods,
} }
} }

View File

@ -53,7 +53,7 @@ class BasedGenerateTaskPipeline:
self._output_moderation_handler = self._init_output_moderation() self._output_moderation_handler = self._init_output_moderation()
self._stream = stream self._stream = stream
def _handle_error(self, event: QueueErrorEvent, message: Optional[Message] = None) -> Exception: def _handle_error(self, event: QueueErrorEvent, message: Optional[Message] = None):
""" """
Handle error event. Handle error event.
:param event: event :param event: event
@ -100,7 +100,7 @@ class BasedGenerateTaskPipeline:
return message return message
def _error_to_stream_response(self, e: Exception) -> ErrorStreamResponse: def _error_to_stream_response(self, e: Exception):
""" """
Error to stream response. Error to stream response.
:param e: exception :param e: exception

View File

@ -4,6 +4,8 @@ from collections.abc import Mapping, Sequence
from datetime import datetime, timezone from datetime import datetime, timezone
from typing import Any, Optional, Union, cast from typing import Any, Optional, Union, cast
from sqlalchemy.orm import Session
from core.app.entities.app_invoke_entities import AdvancedChatAppGenerateEntity, InvokeFrom, WorkflowAppGenerateEntity from core.app.entities.app_invoke_entities import AdvancedChatAppGenerateEntity, InvokeFrom, WorkflowAppGenerateEntity
from core.app.entities.queue_entities import ( from core.app.entities.queue_entities import (
QueueIterationCompletedEvent, QueueIterationCompletedEvent,
@ -232,6 +234,8 @@ class WorkflowCycleManage:
self, workflow_run: WorkflowRun, event: QueueNodeStartedEvent self, workflow_run: WorkflowRun, event: QueueNodeStartedEvent
) -> WorkflowNodeExecution: ) -> WorkflowNodeExecution:
# init workflow node execution # init workflow node execution
with Session(db.engine, expire_on_commit=False) as session:
workflow_node_execution = WorkflowNodeExecution() workflow_node_execution = WorkflowNodeExecution()
workflow_node_execution.tenant_id = workflow_run.tenant_id workflow_node_execution.tenant_id = workflow_run.tenant_id
workflow_node_execution.app_id = workflow_run.app_id workflow_node_execution.app_id = workflow_run.app_id
@ -249,13 +253,11 @@ class WorkflowCycleManage:
workflow_node_execution.created_by = workflow_run.created_by workflow_node_execution.created_by = workflow_run.created_by
workflow_node_execution.created_at = datetime.now(timezone.utc).replace(tzinfo=None) workflow_node_execution.created_at = datetime.now(timezone.utc).replace(tzinfo=None)
db.session.add(workflow_node_execution) session.add(workflow_node_execution)
db.session.commit() session.commit()
db.session.refresh(workflow_node_execution) session.refresh(workflow_node_execution)
db.session.close()
self._wip_workflow_node_executions[workflow_node_execution.node_execution_id] = workflow_node_execution self._wip_workflow_node_executions[workflow_node_execution.node_execution_id] = workflow_node_execution
return workflow_node_execution return workflow_node_execution
def _handle_workflow_node_execution_success(self, event: QueueNodeSucceededEvent) -> WorkflowNodeExecution: def _handle_workflow_node_execution_success(self, event: QueueNodeSucceededEvent) -> WorkflowNodeExecution:

View File

@ -1,8 +1,9 @@
from typing import Optional from typing import Optional
from flask import Config, Flask from flask import Flask
from pydantic import BaseModel from pydantic import BaseModel
from configs import dify_config
from core.entities.provider_entities import ProviderQuotaType, QuotaUnit, RestrictModel from core.entities.provider_entities import ProviderQuotaType, QuotaUnit, RestrictModel
from core.model_runtime.entities.model_entities import ModelType from core.model_runtime.entities.model_entities import ModelType
@ -43,32 +44,30 @@ class HostingConfiguration:
moderation_config: HostedModerationConfig = None moderation_config: HostedModerationConfig = None
def init_app(self, app: Flask) -> None: def init_app(self, app: Flask) -> None:
config = app.config if dify_config.EDITION != "CLOUD":
if config.get("EDITION") != "CLOUD":
return return
self.provider_map["azure_openai"] = self.init_azure_openai(config) self.provider_map["azure_openai"] = self.init_azure_openai()
self.provider_map["openai"] = self.init_openai(config) self.provider_map["openai"] = self.init_openai()
self.provider_map["anthropic"] = self.init_anthropic(config) self.provider_map["anthropic"] = self.init_anthropic()
self.provider_map["minimax"] = self.init_minimax(config) self.provider_map["minimax"] = self.init_minimax()
self.provider_map["spark"] = self.init_spark(config) self.provider_map["spark"] = self.init_spark()
self.provider_map["zhipuai"] = self.init_zhipuai(config) self.provider_map["zhipuai"] = self.init_zhipuai()
self.moderation_config = self.init_moderation_config(config) self.moderation_config = self.init_moderation_config()
@staticmethod @staticmethod
def init_azure_openai(app_config: Config) -> HostingProvider: def init_azure_openai() -> HostingProvider:
quota_unit = QuotaUnit.TIMES quota_unit = QuotaUnit.TIMES
if app_config.get("HOSTED_AZURE_OPENAI_ENABLED"): if dify_config.HOSTED_AZURE_OPENAI_ENABLED:
credentials = { credentials = {
"openai_api_key": app_config.get("HOSTED_AZURE_OPENAI_API_KEY"), "openai_api_key": dify_config.HOSTED_AZURE_OPENAI_API_KEY,
"openai_api_base": app_config.get("HOSTED_AZURE_OPENAI_API_BASE"), "openai_api_base": dify_config.HOSTED_AZURE_OPENAI_API_BASE,
"base_model_name": "gpt-35-turbo", "base_model_name": "gpt-35-turbo",
} }
quotas = [] quotas = []
hosted_quota_limit = int(app_config.get("HOSTED_AZURE_OPENAI_QUOTA_LIMIT", "1000")) hosted_quota_limit = dify_config.HOSTED_AZURE_OPENAI_QUOTA_LIMIT
trial_quota = TrialHostingQuota( trial_quota = TrialHostingQuota(
quota_limit=hosted_quota_limit, quota_limit=hosted_quota_limit,
restrict_models=[ restrict_models=[
@ -121,31 +120,31 @@ class HostingConfiguration:
quota_unit=quota_unit, quota_unit=quota_unit,
) )
def init_openai(self, app_config: Config) -> HostingProvider: def init_openai(self) -> HostingProvider:
quota_unit = QuotaUnit.CREDITS quota_unit = QuotaUnit.CREDITS
quotas = [] quotas = []
if app_config.get("HOSTED_OPENAI_TRIAL_ENABLED"): if dify_config.HOSTED_OPENAI_TRIAL_ENABLED:
hosted_quota_limit = int(app_config.get("HOSTED_OPENAI_QUOTA_LIMIT", "200")) hosted_quota_limit = dify_config.HOSTED_OPENAI_QUOTA_LIMIT
trial_models = self.parse_restrict_models_from_env(app_config, "HOSTED_OPENAI_TRIAL_MODELS") trial_models = self.parse_restrict_models_from_env("HOSTED_OPENAI_TRIAL_MODELS")
trial_quota = TrialHostingQuota(quota_limit=hosted_quota_limit, restrict_models=trial_models) trial_quota = TrialHostingQuota(quota_limit=hosted_quota_limit, restrict_models=trial_models)
quotas.append(trial_quota) quotas.append(trial_quota)
if app_config.get("HOSTED_OPENAI_PAID_ENABLED"): if dify_config.HOSTED_OPENAI_PAID_ENABLED:
paid_models = self.parse_restrict_models_from_env(app_config, "HOSTED_OPENAI_PAID_MODELS") paid_models = self.parse_restrict_models_from_env("HOSTED_OPENAI_PAID_MODELS")
paid_quota = PaidHostingQuota(restrict_models=paid_models) paid_quota = PaidHostingQuota(restrict_models=paid_models)
quotas.append(paid_quota) quotas.append(paid_quota)
if len(quotas) > 0: if len(quotas) > 0:
credentials = { credentials = {
"openai_api_key": app_config.get("HOSTED_OPENAI_API_KEY"), "openai_api_key": dify_config.HOSTED_OPENAI_API_KEY,
} }
if app_config.get("HOSTED_OPENAI_API_BASE"): if dify_config.HOSTED_OPENAI_API_BASE:
credentials["openai_api_base"] = app_config.get("HOSTED_OPENAI_API_BASE") credentials["openai_api_base"] = dify_config.HOSTED_OPENAI_API_BASE
if app_config.get("HOSTED_OPENAI_API_ORGANIZATION"): if dify_config.HOSTED_OPENAI_API_ORGANIZATION:
credentials["openai_organization"] = app_config.get("HOSTED_OPENAI_API_ORGANIZATION") credentials["openai_organization"] = dify_config.HOSTED_OPENAI_API_ORGANIZATION
return HostingProvider(enabled=True, credentials=credentials, quota_unit=quota_unit, quotas=quotas) return HostingProvider(enabled=True, credentials=credentials, quota_unit=quota_unit, quotas=quotas)
@ -155,26 +154,26 @@ class HostingConfiguration:
) )
@staticmethod @staticmethod
def init_anthropic(app_config: Config) -> HostingProvider: def init_anthropic() -> HostingProvider:
quota_unit = QuotaUnit.TOKENS quota_unit = QuotaUnit.TOKENS
quotas = [] quotas = []
if app_config.get("HOSTED_ANTHROPIC_TRIAL_ENABLED"): if dify_config.HOSTED_ANTHROPIC_TRIAL_ENABLED:
hosted_quota_limit = int(app_config.get("HOSTED_ANTHROPIC_QUOTA_LIMIT", "0")) hosted_quota_limit = dify_config.HOSTED_ANTHROPIC_QUOTA_LIMIT
trial_quota = TrialHostingQuota(quota_limit=hosted_quota_limit) trial_quota = TrialHostingQuota(quota_limit=hosted_quota_limit)
quotas.append(trial_quota) quotas.append(trial_quota)
if app_config.get("HOSTED_ANTHROPIC_PAID_ENABLED"): if dify_config.HOSTED_ANTHROPIC_PAID_ENABLED:
paid_quota = PaidHostingQuota() paid_quota = PaidHostingQuota()
quotas.append(paid_quota) quotas.append(paid_quota)
if len(quotas) > 0: if len(quotas) > 0:
credentials = { credentials = {
"anthropic_api_key": app_config.get("HOSTED_ANTHROPIC_API_KEY"), "anthropic_api_key": dify_config.HOSTED_ANTHROPIC_API_KEY,
} }
if app_config.get("HOSTED_ANTHROPIC_API_BASE"): if dify_config.HOSTED_ANTHROPIC_API_BASE:
credentials["anthropic_api_url"] = app_config.get("HOSTED_ANTHROPIC_API_BASE") credentials["anthropic_api_url"] = dify_config.HOSTED_ANTHROPIC_API_BASE
return HostingProvider(enabled=True, credentials=credentials, quota_unit=quota_unit, quotas=quotas) return HostingProvider(enabled=True, credentials=credentials, quota_unit=quota_unit, quotas=quotas)
@ -184,9 +183,9 @@ class HostingConfiguration:
) )
@staticmethod @staticmethod
def init_minimax(app_config: Config) -> HostingProvider: def init_minimax() -> HostingProvider:
quota_unit = QuotaUnit.TOKENS quota_unit = QuotaUnit.TOKENS
if app_config.get("HOSTED_MINIMAX_ENABLED"): if dify_config.HOSTED_MINIMAX_ENABLED:
quotas = [FreeHostingQuota()] quotas = [FreeHostingQuota()]
return HostingProvider( return HostingProvider(
@ -202,9 +201,9 @@ class HostingConfiguration:
) )
@staticmethod @staticmethod
def init_spark(app_config: Config) -> HostingProvider: def init_spark() -> HostingProvider:
quota_unit = QuotaUnit.TOKENS quota_unit = QuotaUnit.TOKENS
if app_config.get("HOSTED_SPARK_ENABLED"): if dify_config.HOSTED_SPARK_ENABLED:
quotas = [FreeHostingQuota()] quotas = [FreeHostingQuota()]
return HostingProvider( return HostingProvider(
@ -220,9 +219,9 @@ class HostingConfiguration:
) )
@staticmethod @staticmethod
def init_zhipuai(app_config: Config) -> HostingProvider: def init_zhipuai() -> HostingProvider:
quota_unit = QuotaUnit.TOKENS quota_unit = QuotaUnit.TOKENS
if app_config.get("HOSTED_ZHIPUAI_ENABLED"): if dify_config.HOSTED_ZHIPUAI_ENABLED:
quotas = [FreeHostingQuota()] quotas = [FreeHostingQuota()]
return HostingProvider( return HostingProvider(
@ -238,17 +237,15 @@ class HostingConfiguration:
) )
@staticmethod @staticmethod
def init_moderation_config(app_config: Config) -> HostedModerationConfig: def init_moderation_config() -> HostedModerationConfig:
if app_config.get("HOSTED_MODERATION_ENABLED") and app_config.get("HOSTED_MODERATION_PROVIDERS"): if dify_config.HOSTED_MODERATION_ENABLED and dify_config.HOSTED_MODERATION_PROVIDERS:
return HostedModerationConfig( return HostedModerationConfig(enabled=True, providers=dify_config.HOSTED_MODERATION_PROVIDERS.split(","))
enabled=True, providers=app_config.get("HOSTED_MODERATION_PROVIDERS").split(",")
)
return HostedModerationConfig(enabled=False) return HostedModerationConfig(enabled=False)
@staticmethod @staticmethod
def parse_restrict_models_from_env(app_config: Config, env_var: str) -> list[RestrictModel]: def parse_restrict_models_from_env(env_var: str) -> list[RestrictModel]:
models_str = app_config.get(env_var) models_str = dify_config.model_dump().get(env_var)
models_list = models_str.split(",") if models_str else [] models_list = models_str.split(",") if models_str else []
return [ return [
RestrictModel(model=model_name.strip(), model_type=ModelType.LLM) RestrictModel(model=model_name.strip(), model_type=ModelType.LLM)

View File

@ -2,6 +2,7 @@ from typing import Optional
from core.app.app_config.features.file_upload.manager import FileUploadConfigManager from core.app.app_config.features.file_upload.manager import FileUploadConfigManager
from core.file import file_manager from core.file import file_manager
from core.file.models import FileType
from core.model_manager import ModelInstance from core.model_manager import ModelInstance
from core.model_runtime.entities import ( from core.model_runtime.entities import (
AssistantPromptMessage, AssistantPromptMessage,
@ -98,6 +99,7 @@ class TokenBufferMemory:
prompt_message_contents: list[PromptMessageContent] = [] prompt_message_contents: list[PromptMessageContent] = []
prompt_message_contents.append(TextPromptMessageContent(data=message.query)) prompt_message_contents.append(TextPromptMessageContent(data=message.query))
for file_obj in file_objs: for file_obj in file_objs:
if file_obj.type in {FileType.IMAGE, FileType.AUDIO}:
prompt_message = file_manager.to_prompt_message_content(file_obj) prompt_message = file_manager.to_prompt_message_content(file_obj)
prompt_message_contents.append(prompt_message) prompt_message_contents.append(prompt_message)

View File

@ -218,7 +218,7 @@ For instance, Xinference supports `max_tokens`, `temperature`, and `top_p` param
However, some vendors may support different parameters for different models. For example, the `OpenLLM` vendor supports `top_k`, but not all models provided by this vendor support `top_k`. Let's say model A supports `top_k` but model B does not. In such cases, we need to dynamically generate the model parameter schema, as illustrated below: However, some vendors may support different parameters for different models. For example, the `OpenLLM` vendor supports `top_k`, but not all models provided by this vendor support `top_k`. Let's say model A supports `top_k` but model B does not. In such cases, we need to dynamically generate the model parameter schema, as illustrated below:
```python ```python
def get_customizable_model_schema(self, model: str, credentials: dict) -> AIModelEntity | None: def get_customizable_model_schema(self, model: str, credentials: dict) -> Optional[AIModelEntity]:
""" """
used to define customizable model schema used to define customizable model schema
""" """

View File

@ -205,7 +205,7 @@ provider_credential_schema:
但是有的供应商根据不同的模型支持不同的参数,如供应商`OpenLLM`支持`top_k`,但是并不是这个供应商提供的所有模型都支持`top_k`我们这里举例A模型支持`top_k`B模型不支持`top_k`那么我们需要在这里动态生成模型参数的Schema如下所示 但是有的供应商根据不同的模型支持不同的参数,如供应商`OpenLLM`支持`top_k`,但是并不是这个供应商提供的所有模型都支持`top_k`我们这里举例A模型支持`top_k`B模型不支持`top_k`那么我们需要在这里动态生成模型参数的Schema如下所示
```python ```python
def get_customizable_model_schema(self, model: str, credentials: dict) -> AIModelEntity | None: def get_customizable_model_schema(self, model: str, credentials: dict) -> Optional[AIModelEntity]:
""" """
used to define customizable model schema used to define customizable model schema
""" """

View File

@ -33,7 +33,7 @@ class PromptTemplateParser:
key = match.group(1) key = match.group(1)
value = inputs.get(key, match.group(0)) # return original matched string if key not found value = inputs.get(key, match.group(0)) # return original matched string if key not found
if remove_template_variables: if remove_template_variables and isinstance(value, str):
return PromptTemplateParser.remove_template_variables(value, self.with_variable_tmpl) return PromptTemplateParser.remove_template_variables(value, self.with_variable_tmpl)
return value return value

View File

@ -428,14 +428,13 @@ class QdrantVectorFactory(AbstractVectorFactory):
if not dataset.index_struct_dict: if not dataset.index_struct_dict:
dataset.index_struct = json.dumps(self.gen_index_struct_dict(VectorType.QDRANT, collection_name)) dataset.index_struct = json.dumps(self.gen_index_struct_dict(VectorType.QDRANT, collection_name))
config = current_app.config
return QdrantVector( return QdrantVector(
collection_name=collection_name, collection_name=collection_name,
group_id=dataset.id, group_id=dataset.id,
config=QdrantConfig( config=QdrantConfig(
endpoint=dify_config.QDRANT_URL, endpoint=dify_config.QDRANT_URL,
api_key=dify_config.QDRANT_API_KEY, api_key=dify_config.QDRANT_API_KEY,
root_path=config.root_path, root_path=current_app.config.root_path,
timeout=dify_config.QDRANT_CLIENT_TIMEOUT, timeout=dify_config.QDRANT_CLIENT_TIMEOUT,
grpc_port=dify_config.QDRANT_GRPC_PORT, grpc_port=dify_config.QDRANT_GRPC_PORT,
prefer_grpc=dify_config.QDRANT_GRPC_ENABLED, prefer_grpc=dify_config.QDRANT_GRPC_ENABLED,

View File

@ -18,6 +18,7 @@ from core.rag.extractor.extractor_base import BaseExtractor
from core.rag.models.document import Document from core.rag.models.document import Document
from extensions.ext_database import db from extensions.ext_database import db
from extensions.ext_storage import storage from extensions.ext_storage import storage
from models.enums import CreatedByRole
from models.model import UploadFile from models.model import UploadFile
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -109,9 +110,10 @@ class WordExtractor(BaseExtractor):
key=file_key, key=file_key,
name=file_key, name=file_key,
size=0, size=0,
extension=image_ext, extension=str(image_ext),
mime_type=mime_type, mime_type=mime_type or "",
created_by=self.user_id, created_by=self.user_id,
created_by_role=CreatedByRole.ACCOUNT,
created_at=datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None), created_at=datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None),
used=True, used=True,
used_by=self.user_id, used_by=self.user_id,

View File

@ -22,7 +22,7 @@ class WorkflowToolConfigurationUtils:
if not start_node: if not start_node:
return [] return []
return [VariableEntity(**variable) for variable in start_node.get("data", {}).get("variables", [])] return [VariableEntity.model_validate(variable) for variable in start_node.get("data", {}).get("variables", [])]
@classmethod @classmethod
def check_is_synced( def check_is_synced(

View File

@ -4,7 +4,6 @@ from collections.abc import Mapping, Sequence
from typing import Any, Union from typing import Any, Union
from pydantic import BaseModel, Field from pydantic import BaseModel, Field
from typing_extensions import deprecated
from core.file import File, FileAttribute, file_manager from core.file import File, FileAttribute, file_manager
from core.variables import Segment, SegmentGroup, Variable from core.variables import Segment, SegmentGroup, Variable
@ -96,9 +95,6 @@ class VariablePool(BaseModel):
if len(selector) < 2: if len(selector) < 2:
raise ValueError("Invalid selector") raise ValueError("Invalid selector")
if value is None:
return
if isinstance(value, Segment): if isinstance(value, Segment):
v = value v = value
else: else:
@ -136,26 +132,6 @@ class VariablePool(BaseModel):
return value return value
@deprecated("This method is deprecated, use `get` instead.")
def get_any(self, selector: Sequence[str], /) -> Any | None:
"""
Retrieves the value from the variable pool based on the given selector.
Args:
selector (Sequence[str]): The selector used to identify the variable.
Returns:
Any: The value associated with the given selector.
Raises:
ValueError: If the selector is invalid.
"""
if len(selector) < 2:
raise ValueError("Invalid selector")
hash_key = hash(tuple(selector[1:]))
value = self.variable_dictionary[selector[0]].get(hash_key)
return value.to_object() if value else None
def remove(self, selector: Sequence[str], /): def remove(self, selector: Sequence[str], /):
""" """
Remove variables from the variable pool based on the given selector. Remove variables from the variable pool based on the given selector.

View File

@ -41,10 +41,15 @@ class CodeNode(BaseNode[CodeNodeData]):
# Get variables # Get variables
variables = {} variables = {}
for variable_selector in self.node_data.variables: for variable_selector in self.node_data.variables:
variable = variable_selector.variable variable_name = variable_selector.variable
value = self.graph_runtime_state.variable_pool.get_any(variable_selector.value_selector) variable = self.graph_runtime_state.variable_pool.get(variable_selector.value_selector)
if variable is None:
variables[variable] = value return NodeRunResult(
status=WorkflowNodeExecutionStatus.FAILED,
inputs=variables,
error=f"Variable `{variable_selector.value_selector}` not found",
)
variables[variable_name] = variable.to_object()
# Run code # Run code
try: try:
result = CodeExecutor.execute_workflow_code_template( result = CodeExecutor.execute_workflow_code_template(

View File

@ -1,5 +1,5 @@
from collections.abc import Sequence from collections.abc import Sequence
from typing import Literal, Optional from typing import Any, Literal, Optional
import httpx import httpx
from pydantic import BaseModel, Field, ValidationInfo, field_validator from pydantic import BaseModel, Field, ValidationInfo, field_validator
@ -52,6 +52,16 @@ class HttpRequestNodeBody(BaseModel):
type: Literal["none", "form-data", "x-www-form-urlencoded", "raw-text", "json", "binary"] type: Literal["none", "form-data", "x-www-form-urlencoded", "raw-text", "json", "binary"]
data: Sequence[BodyData] = Field(default_factory=list) data: Sequence[BodyData] = Field(default_factory=list)
@field_validator("data", mode="before")
@classmethod
def check_data(cls, v: Any):
"""For compatibility, if body is not set, return empty list."""
if not v:
return []
if isinstance(v, str):
return [BodyData(key="", type="text", value=v)]
return v
class HttpRequestNodeTimeout(BaseModel): class HttpRequestNodeTimeout(BaseModel):
connect: int = dify_config.HTTP_REQUEST_MAX_CONNECT_TIMEOUT connect: int = dify_config.HTTP_REQUEST_MAX_CONNECT_TIMEOUT

View File

@ -1,5 +1,5 @@
import json import json
from collections.abc import Mapping, Sequence from collections.abc import Mapping
from copy import deepcopy from copy import deepcopy
from random import randint from random import randint
from typing import Any, Literal from typing import Any, Literal
@ -60,7 +60,7 @@ class Executor:
self.method = node_data.method self.method = node_data.method
self.auth = node_data.authorization self.auth = node_data.authorization
self.timeout = timeout self.timeout = timeout
self.params = None self.params = {}
self.headers = {} self.headers = {}
self.content = None self.content = None
self.files = None self.files = None
@ -108,8 +108,10 @@ class Executor:
case "raw-text": case "raw-text":
self.content = self.variable_pool.convert_template(data[0].value).text self.content = self.variable_pool.convert_template(data[0].value).text
case "json": case "json":
json_object = json.loads(data[0].value) json_string = self.variable_pool.convert_template(data[0].value).text
self.json = self._parse_object_contains_variables(json_object) json_object = json.loads(json_string)
self.json = json_object
# self.json = self._parse_object_contains_variables(json_object)
case "binary": case "binary":
file_selector = data[0].file file_selector = data[0].file
file_variable = self.variable_pool.get_file(file_selector) file_variable = self.variable_pool.get_file(file_selector)
@ -274,14 +276,6 @@ class Executor:
return raw return raw
def _parse_object_contains_variables(self, obj: str | dict | list, /) -> Mapping[str, Any] | Sequence[Any] | str:
if isinstance(obj, dict):
return {k: self._parse_object_contains_variables(v) for k, v in obj.items()}
elif isinstance(obj, list):
return [self._parse_object_contains_variables(v) for v in obj]
elif isinstance(obj, str):
return self.variable_pool.convert_template(obj).text
def _plain_text_to_dict(text: str, /) -> dict[str, str]: def _plain_text_to_dict(text: str, /) -> dict[str, str]:
""" """

View File

@ -5,6 +5,7 @@ from typing import Any, cast
from configs import dify_config from configs import dify_config
from core.model_runtime.utils.encoders import jsonable_encoder from core.model_runtime.utils.encoders import jsonable_encoder
from core.variables import IntegerSegment
from core.workflow.entities.node_entities import NodeRunMetadataKey, NodeRunResult from core.workflow.entities.node_entities import NodeRunMetadataKey, NodeRunResult
from core.workflow.graph_engine.entities.event import ( from core.workflow.graph_engine.entities.event import (
BaseGraphEvent, BaseGraphEvent,
@ -46,6 +47,15 @@ class IterationNode(BaseNode[IterationNodeData]):
if not iterator_list_segment: if not iterator_list_segment:
raise ValueError(f"Iterator variable {self.node_data.iterator_selector} not found") raise ValueError(f"Iterator variable {self.node_data.iterator_selector} not found")
if len(iterator_list_segment.value) == 0:
yield RunCompletedEvent(
run_result=NodeRunResult(
status=WorkflowNodeExecutionStatus.SUCCEEDED,
outputs={"output": []},
)
)
return
iterator_list_value = iterator_list_segment.to_object() iterator_list_value = iterator_list_segment.to_object()
if not isinstance(iterator_list_value, list): if not isinstance(iterator_list_value, list):
@ -138,9 +148,16 @@ class IterationNode(BaseNode[IterationNodeData]):
if NodeRunMetadataKey.ITERATION_ID not in metadata: if NodeRunMetadataKey.ITERATION_ID not in metadata:
metadata[NodeRunMetadataKey.ITERATION_ID] = self.node_id metadata[NodeRunMetadataKey.ITERATION_ID] = self.node_id
metadata[NodeRunMetadataKey.ITERATION_INDEX] = variable_pool.get_any( index_variable = variable_pool.get([self.node_id, "index"])
[self.node_id, "index"] if not isinstance(index_variable, IntegerSegment):
yield RunCompletedEvent(
run_result=NodeRunResult(
status=WorkflowNodeExecutionStatus.FAILED,
error=f"Invalid index variable type: {type(index_variable)}",
) )
)
return
metadata[NodeRunMetadataKey.ITERATION_INDEX] = index_variable.value
event.route_node_state.node_run_result.metadata = metadata event.route_node_state.node_run_result.metadata = metadata
yield event yield event
@ -172,7 +189,16 @@ class IterationNode(BaseNode[IterationNodeData]):
yield event yield event
# append to iteration output variable list # append to iteration output variable list
current_iteration_output = variable_pool.get_any(self.node_data.output_selector) current_iteration_output_variable = variable_pool.get(self.node_data.output_selector)
if current_iteration_output_variable is None:
yield RunCompletedEvent(
run_result=NodeRunResult(
status=WorkflowNodeExecutionStatus.FAILED,
error=f"Iteration output variable {self.node_data.output_selector} not found",
)
)
return
current_iteration_output = current_iteration_output_variable.to_object()
outputs.append(current_iteration_output) outputs.append(current_iteration_output)
# remove all nodes outputs from variable pool # remove all nodes outputs from variable pool
@ -180,11 +206,11 @@ class IterationNode(BaseNode[IterationNodeData]):
variable_pool.remove([node_id]) variable_pool.remove([node_id])
# move to next iteration # move to next iteration
current_index = variable_pool.get([self.node_id, "index"]) current_index_variable = variable_pool.get([self.node_id, "index"])
if current_index is None: if not isinstance(current_index_variable, IntegerSegment):
raise ValueError(f"iteration {self.node_id} current index not found") raise ValueError(f"iteration {self.node_id} current index not found")
next_index = int(current_index.to_object()) + 1 next_index = current_index_variable.value + 1
variable_pool.add([self.node_id, "index"], next_index) variable_pool.add([self.node_id, "index"], next_index)
if next_index < len(iterator_list_value): if next_index < len(iterator_list_value):
@ -196,9 +222,7 @@ class IterationNode(BaseNode[IterationNodeData]):
iteration_node_type=self.node_type, iteration_node_type=self.node_type,
iteration_node_data=self.node_data, iteration_node_data=self.node_data,
index=next_index, index=next_index,
pre_iteration_output=jsonable_encoder(current_iteration_output) pre_iteration_output=jsonable_encoder(current_iteration_output),
if current_iteration_output
else None,
) )
yield IterationRunSucceededEvent( yield IterationRunSucceededEvent(

View File

@ -14,6 +14,7 @@ from core.model_runtime.entities.model_entities import ModelFeature, ModelType
from core.model_runtime.model_providers.__base.large_language_model import LargeLanguageModel from core.model_runtime.model_providers.__base.large_language_model import LargeLanguageModel
from core.rag.retrieval.dataset_retrieval import DatasetRetrieval from core.rag.retrieval.dataset_retrieval import DatasetRetrieval
from core.rag.retrieval.retrieval_methods import RetrievalMethod from core.rag.retrieval.retrieval_methods import RetrievalMethod
from core.variables import StringSegment
from core.workflow.entities.node_entities import NodeRunResult from core.workflow.entities.node_entities import NodeRunResult
from core.workflow.nodes.base import BaseNode from core.workflow.nodes.base import BaseNode
from core.workflow.nodes.enums import NodeType from core.workflow.nodes.enums import NodeType
@ -39,8 +40,14 @@ class KnowledgeRetrievalNode(BaseNode[KnowledgeRetrievalNodeData]):
def _run(self) -> NodeRunResult: def _run(self) -> NodeRunResult:
# extract variables # extract variables
variable = self.graph_runtime_state.variable_pool.get_any(self.node_data.query_variable_selector) variable = self.graph_runtime_state.variable_pool.get(self.node_data.query_variable_selector)
query = variable if not isinstance(variable, StringSegment):
return NodeRunResult(
status=WorkflowNodeExecutionStatus.FAILED,
inputs={},
error="Query variable is not string type.",
)
query = variable.value
variables = {"query": query} variables = {"query": query}
if not query: if not query:
return NodeRunResult( return NodeRunResult(

View File

@ -1,7 +1,7 @@
from collections.abc import Sequence from collections.abc import Sequence
from typing import Any, Optional from typing import Any, Optional
from pydantic import BaseModel, Field from pydantic import BaseModel, Field, field_validator
from core.model_runtime.entities import ImagePromptMessageContent from core.model_runtime.entities import ImagePromptMessageContent
from core.prompt.entities.advanced_prompt_entities import ChatModelMessage, CompletionModelPromptTemplate, MemoryConfig from core.prompt.entities.advanced_prompt_entities import ChatModelMessage, CompletionModelPromptTemplate, MemoryConfig
@ -30,6 +30,13 @@ class VisionConfig(BaseModel):
enabled: bool = False enabled: bool = False
configs: VisionConfigOptions = Field(default_factory=VisionConfigOptions) configs: VisionConfigOptions = Field(default_factory=VisionConfigOptions)
@field_validator("configs", mode="before")
@classmethod
def convert_none_configs(cls, v: Any):
if v is None:
return VisionConfigOptions()
return v
class PromptConfig(BaseModel): class PromptConfig(BaseModel):
jinja2_variables: Optional[list[VariableSelector]] = None jinja2_variables: Optional[list[VariableSelector]] = None

View File

@ -22,7 +22,15 @@ from core.model_runtime.utils.encoders import jsonable_encoder
from core.prompt.advanced_prompt_transform import AdvancedPromptTransform from core.prompt.advanced_prompt_transform import AdvancedPromptTransform
from core.prompt.entities.advanced_prompt_entities import CompletionModelPromptTemplate, MemoryConfig from core.prompt.entities.advanced_prompt_entities import CompletionModelPromptTemplate, MemoryConfig
from core.prompt.utils.prompt_message_util import PromptMessageUtil from core.prompt.utils.prompt_message_util import PromptMessageUtil
from core.variables import ArrayAnySegment, ArrayFileSegment, FileSegment from core.variables import (
ArrayAnySegment,
ArrayFileSegment,
ArraySegment,
FileSegment,
NoneSegment,
ObjectSegment,
StringSegment,
)
from core.workflow.constants import SYSTEM_VARIABLE_NODE_ID from core.workflow.constants import SYSTEM_VARIABLE_NODE_ID
from core.workflow.entities.node_entities import NodeRunMetadataKey, NodeRunResult from core.workflow.entities.node_entities import NodeRunMetadataKey, NodeRunResult
from core.workflow.enums import SystemVariableKey from core.workflow.enums import SystemVariableKey
@ -263,50 +271,44 @@ class LLMNode(BaseNode[LLMNodeData]):
return variables return variables
for variable_selector in node_data.prompt_config.jinja2_variables or []: for variable_selector in node_data.prompt_config.jinja2_variables or []:
variable = variable_selector.variable variable_name = variable_selector.variable
value = self.graph_runtime_state.variable_pool.get_any(variable_selector.value_selector) variable = self.graph_runtime_state.variable_pool.get(variable_selector.value_selector)
if variable is None:
raise ValueError(f"Variable {variable_selector.variable} not found")
def parse_dict(d: dict) -> str: def parse_dict(input_dict: Mapping[str, Any]) -> str:
""" """
Parse dict into string Parse dict into string
""" """
# check if it's a context structure # check if it's a context structure
if "metadata" in d and "_source" in d["metadata"] and "content" in d: if "metadata" in input_dict and "_source" in input_dict["metadata"] and "content" in input_dict:
return d["content"] return input_dict["content"]
# else, parse the dict # else, parse the dict
try: try:
return json.dumps(d, ensure_ascii=False) return json.dumps(input_dict, ensure_ascii=False)
except Exception: except Exception:
return str(d) return str(input_dict)
if isinstance(value, str): if isinstance(variable, ArraySegment):
value = value
elif isinstance(value, list):
result = "" result = ""
for item in value: for item in variable.value:
if isinstance(item, dict): if isinstance(item, dict):
result += parse_dict(item) result += parse_dict(item)
elif isinstance(item, str):
result += item
elif isinstance(item, int | float):
result += str(item)
else: else:
result += str(item) result += str(item)
result += "\n" result += "\n"
value = result.strip() value = result.strip()
elif isinstance(value, dict): elif isinstance(variable, ObjectSegment):
value = parse_dict(value) value = parse_dict(variable.value)
elif isinstance(value, int | float):
value = str(value)
else: else:
value = str(value) value = variable.text
variables[variable] = value variables[variable_name] = value
return variables return variables
def _fetch_inputs(self, node_data: LLMNodeData) -> dict[str, str]: def _fetch_inputs(self, node_data: LLMNodeData) -> dict[str, Any]:
inputs = {} inputs = {}
prompt_template = node_data.prompt_template prompt_template = node_data.prompt_template
@ -320,11 +322,12 @@ class LLMNode(BaseNode[LLMNodeData]):
variable_selectors = variable_template_parser.extract_variable_selectors() variable_selectors = variable_template_parser.extract_variable_selectors()
for variable_selector in variable_selectors: for variable_selector in variable_selectors:
variable_value = self.graph_runtime_state.variable_pool.get_any(variable_selector.value_selector) variable = self.graph_runtime_state.variable_pool.get(variable_selector.value_selector)
if variable_value is None: if variable is None:
raise ValueError(f"Variable {variable_selector.variable} not found") raise ValueError(f"Variable {variable_selector.variable} not found")
if isinstance(variable, NoneSegment):
inputs[variable_selector.variable] = variable_value continue
inputs[variable_selector.variable] = variable.to_object()
memory = node_data.memory memory = node_data.memory
if memory and memory.query_prompt_template: if memory and memory.query_prompt_template:
@ -332,11 +335,12 @@ class LLMNode(BaseNode[LLMNodeData]):
template=memory.query_prompt_template template=memory.query_prompt_template
).extract_variable_selectors() ).extract_variable_selectors()
for variable_selector in query_variable_selectors: for variable_selector in query_variable_selectors:
variable_value = self.graph_runtime_state.variable_pool.get_any(variable_selector.value_selector) variable = self.graph_runtime_state.variable_pool.get(variable_selector.value_selector)
if variable_value is None: if variable is None:
raise ValueError(f"Variable {variable_selector.variable} not found") raise ValueError(f"Variable {variable_selector.variable} not found")
if isinstance(variable, NoneSegment):
inputs[variable_selector.variable] = variable_value continue
inputs[variable_selector.variable] = variable.to_object()
return inputs return inputs
@ -361,14 +365,14 @@ class LLMNode(BaseNode[LLMNodeData]):
if not node_data.context.variable_selector: if not node_data.context.variable_selector:
return return
context_value = self.graph_runtime_state.variable_pool.get_any(node_data.context.variable_selector) context_value_variable = self.graph_runtime_state.variable_pool.get(node_data.context.variable_selector)
if context_value: if context_value_variable:
if isinstance(context_value, str): if isinstance(context_value_variable, StringSegment):
yield RunRetrieverResourceEvent(retriever_resources=[], context=context_value) yield RunRetrieverResourceEvent(retriever_resources=[], context=context_value_variable.value)
elif isinstance(context_value, list): elif isinstance(context_value_variable, ArraySegment):
context_str = "" context_str = ""
original_retriever_resource = [] original_retriever_resource = []
for item in context_value: for item in context_value_variable.value:
if isinstance(item, str): if isinstance(item, str):
context_str += item + "\n" context_str += item + "\n"
else: else:
@ -482,11 +486,12 @@ class LLMNode(BaseNode[LLMNodeData]):
return None return None
# get conversation id # get conversation id
conversation_id = self.graph_runtime_state.variable_pool.get_any( conversation_id_variable = self.graph_runtime_state.variable_pool.get(
["sys", SystemVariableKey.CONVERSATION_ID.value] ["sys", SystemVariableKey.CONVERSATION_ID.value]
) )
if conversation_id is None: if not isinstance(conversation_id_variable, StringSegment):
return None return None
conversation_id = conversation_id_variable.value
# get conversation # get conversation
conversation = ( conversation = (

View File

@ -33,8 +33,13 @@ class TemplateTransformNode(BaseNode[TemplateTransformNodeData]):
variables = {} variables = {}
for variable_selector in self.node_data.variables: for variable_selector in self.node_data.variables:
variable_name = variable_selector.variable variable_name = variable_selector.variable
value = self.graph_runtime_state.variable_pool.get_any(variable_selector.value_selector) value = self.graph_runtime_state.variable_pool.get(variable_selector.value_selector)
variables[variable_name] = value if value is None:
return NodeRunResult(
status=WorkflowNodeExecutionStatus.FAILED,
error=f"Variable {variable_name} not found in variable pool",
)
variables[variable_name] = value.to_object()
# Run code # Run code
try: try:
result = CodeExecutor.execute_workflow_code_template( result = CodeExecutor.execute_workflow_code_template(
@ -56,7 +61,7 @@ class TemplateTransformNode(BaseNode[TemplateTransformNodeData]):
@classmethod @classmethod
def _extract_variable_selector_to_variable_mapping( def _extract_variable_selector_to_variable_mapping(
cls, graph_config: Mapping[str, Any], node_id: str, node_data: TemplateTransformNodeData cls, *, graph_config: Mapping[str, Any], node_id: str, node_data: TemplateTransformNodeData
) -> Mapping[str, Sequence[str]]: ) -> Mapping[str, Sequence[str]]:
""" """
Extract variable selector to variable mapping Extract variable selector to variable mapping

View File

@ -19,27 +19,27 @@ class VariableAggregatorNode(BaseNode[VariableAssignerNodeData]):
if not self.node_data.advanced_settings or not self.node_data.advanced_settings.group_enabled: if not self.node_data.advanced_settings or not self.node_data.advanced_settings.group_enabled:
for selector in self.node_data.variables: for selector in self.node_data.variables:
variable = self.graph_runtime_state.variable_pool.get_any(selector) variable = self.graph_runtime_state.variable_pool.get(selector)
if variable is not None: if variable is not None:
outputs = {"output": variable} outputs = {"output": variable.to_object()}
inputs = {".".join(selector[1:]): variable} inputs = {".".join(selector[1:]): variable.to_object()}
break break
else: else:
for group in self.node_data.advanced_settings.groups: for group in self.node_data.advanced_settings.groups:
for selector in group.variables: for selector in group.variables:
variable = self.graph_runtime_state.variable_pool.get_any(selector) variable = self.graph_runtime_state.variable_pool.get(selector)
if variable is not None: if variable is not None:
outputs[group.group_name] = {"output": variable} outputs[group.group_name] = {"output": variable.to_object()}
inputs[".".join(selector[1:])] = variable inputs[".".join(selector[1:])] = variable.to_object()
break break
return NodeRunResult(status=WorkflowNodeExecutionStatus.SUCCEEDED, outputs=outputs, inputs=inputs) return NodeRunResult(status=WorkflowNodeExecutionStatus.SUCCEEDED, outputs=outputs, inputs=inputs)
@classmethod @classmethod
def _extract_variable_selector_to_variable_mapping( def _extract_variable_selector_to_variable_mapping(
cls, graph_config: Mapping[str, Any], node_id: str, node_data: VariableAssignerNodeData cls, *, graph_config: Mapping[str, Any], node_id: str, node_data: VariableAssignerNodeData
) -> Mapping[str, Sequence[str]]: ) -> Mapping[str, Sequence[str]]:
""" """
Extract variable selector to variable mapping Extract variable selector to variable mapping

View File

@ -3,6 +3,8 @@ from datetime import timedelta
from celery import Celery, Task from celery import Celery, Task
from flask import Flask from flask import Flask
from configs import dify_config
def init_app(app: Flask) -> Celery: def init_app(app: Flask) -> Celery:
class FlaskTask(Task): class FlaskTask(Task):
@ -12,19 +14,19 @@ def init_app(app: Flask) -> Celery:
broker_transport_options = {} broker_transport_options = {}
if app.config.get("CELERY_USE_SENTINEL"): if dify_config.CELERY_USE_SENTINEL:
broker_transport_options = { broker_transport_options = {
"master_name": app.config.get("CELERY_SENTINEL_MASTER_NAME"), "master_name": dify_config.CELERY_SENTINEL_MASTER_NAME,
"sentinel_kwargs": { "sentinel_kwargs": {
"socket_timeout": app.config.get("CELERY_SENTINEL_SOCKET_TIMEOUT", 0.1), "socket_timeout": dify_config.CELERY_SENTINEL_SOCKET_TIMEOUT,
}, },
} }
celery_app = Celery( celery_app = Celery(
app.name, app.name,
task_cls=FlaskTask, task_cls=FlaskTask,
broker=app.config.get("CELERY_BROKER_URL"), broker=dify_config.CELERY_BROKER_URL,
backend=app.config.get("CELERY_BACKEND"), backend=dify_config.CELERY_BACKEND,
task_ignore_result=True, task_ignore_result=True,
) )
@ -37,12 +39,12 @@ def init_app(app: Flask) -> Celery:
} }
celery_app.conf.update( celery_app.conf.update(
result_backend=app.config.get("CELERY_RESULT_BACKEND"), result_backend=dify_config.CELERY_RESULT_BACKEND,
broker_transport_options=broker_transport_options, broker_transport_options=broker_transport_options,
broker_connection_retry_on_startup=True, broker_connection_retry_on_startup=True,
) )
if app.config.get("BROKER_USE_SSL"): if dify_config.BROKER_USE_SSL:
celery_app.conf.update( celery_app.conf.update(
broker_use_ssl=ssl_options, # Add the SSL options to the broker configuration broker_use_ssl=ssl_options, # Add the SSL options to the broker configuration
) )
@ -54,7 +56,7 @@ def init_app(app: Flask) -> Celery:
"schedule.clean_embedding_cache_task", "schedule.clean_embedding_cache_task",
"schedule.clean_unused_datasets_task", "schedule.clean_unused_datasets_task",
] ]
day = app.config.get("CELERY_BEAT_SCHEDULER_TIME") day = dify_config.CELERY_BEAT_SCHEDULER_TIME
beat_schedule = { beat_schedule = {
"clean_embedding_cache_task": { "clean_embedding_cache_task": {
"task": "schedule.clean_embedding_cache_task.clean_embedding_cache_task", "task": "schedule.clean_embedding_cache_task.clean_embedding_cache_task",

View File

@ -1,8 +1,10 @@
from flask import Flask from flask import Flask
from configs import dify_config
def init_app(app: Flask): def init_app(app: Flask):
if app.config.get("API_COMPRESSION_ENABLED"): if dify_config.API_COMPRESSION_ENABLED:
from flask_compress import Compress from flask_compress import Compress
app.config["COMPRESS_MIMETYPES"] = [ app.config["COMPRESS_MIMETYPES"] = [

View File

@ -0,0 +1,45 @@
import logging
import os
import sys
from logging.handlers import RotatingFileHandler
from flask import Flask
from configs import dify_config
def init_app(app: Flask):
log_handlers = None
log_file = dify_config.LOG_FILE
if log_file:
log_dir = os.path.dirname(log_file)
os.makedirs(log_dir, exist_ok=True)
log_handlers = [
RotatingFileHandler(
filename=log_file,
maxBytes=1024 * 1024 * 1024,
backupCount=5,
),
logging.StreamHandler(sys.stdout),
]
logging.basicConfig(
level=dify_config.LOG_LEVEL,
format=dify_config.LOG_FORMAT,
datefmt=dify_config.LOG_DATEFORMAT,
handlers=log_handlers,
force=True,
)
log_tz = dify_config.LOG_TZ
if log_tz:
from datetime import datetime
import pytz
timezone = pytz.timezone(log_tz)
def time_converter(seconds):
return datetime.utcfromtimestamp(seconds).astimezone(timezone).timetuple()
for handler in logging.root.handlers:
handler.formatter.converter = time_converter

View File

@ -4,6 +4,8 @@ from typing import Optional
import resend import resend
from flask import Flask from flask import Flask
from configs import dify_config
class Mail: class Mail:
def __init__(self): def __init__(self):
@ -14,41 +16,44 @@ class Mail:
return self._client is not None return self._client is not None
def init_app(self, app: Flask): def init_app(self, app: Flask):
if app.config.get("MAIL_TYPE"): mail_type = dify_config.MAIL_TYPE
if app.config.get("MAIL_DEFAULT_SEND_FROM"): if not mail_type:
self._default_send_from = app.config.get("MAIL_DEFAULT_SEND_FROM") logging.warning("MAIL_TYPE is not set")
return
if app.config.get("MAIL_TYPE") == "resend": if dify_config.MAIL_DEFAULT_SEND_FROM:
api_key = app.config.get("RESEND_API_KEY") self._default_send_from = dify_config.MAIL_DEFAULT_SEND_FROM
match mail_type:
case "resend":
api_key = dify_config.RESEND_API_KEY
if not api_key: if not api_key:
raise ValueError("RESEND_API_KEY is not set") raise ValueError("RESEND_API_KEY is not set")
api_url = app.config.get("RESEND_API_URL") api_url = dify_config.RESEND_API_URL
if api_url: if api_url:
resend.api_url = api_url resend.api_url = api_url
resend.api_key = api_key resend.api_key = api_key
self._client = resend.Emails self._client = resend.Emails
elif app.config.get("MAIL_TYPE") == "smtp": case "smtp":
from libs.smtp import SMTPClient from libs.smtp import SMTPClient
if not app.config.get("SMTP_SERVER") or not app.config.get("SMTP_PORT"): if not dify_config.SMTP_SERVER or not dify_config.SMTP_PORT:
raise ValueError("SMTP_SERVER and SMTP_PORT are required for smtp mail type") raise ValueError("SMTP_SERVER and SMTP_PORT are required for smtp mail type")
if not app.config.get("SMTP_USE_TLS") and app.config.get("SMTP_OPPORTUNISTIC_TLS"): if not dify_config.SMTP_USE_TLS and dify_config.SMTP_OPPORTUNISTIC_TLS:
raise ValueError("SMTP_OPPORTUNISTIC_TLS is not supported without enabling SMTP_USE_TLS") raise ValueError("SMTP_OPPORTUNISTIC_TLS is not supported without enabling SMTP_USE_TLS")
self._client = SMTPClient( self._client = SMTPClient(
server=app.config.get("SMTP_SERVER"), server=dify_config.SMTP_SERVER,
port=app.config.get("SMTP_PORT"), port=dify_config.SMTP_PORT,
username=app.config.get("SMTP_USERNAME"), username=dify_config.SMTP_USERNAME,
password=app.config.get("SMTP_PASSWORD"), password=dify_config.SMTP_PASSWORD,
_from=app.config.get("MAIL_DEFAULT_SEND_FROM"), _from=dify_config.MAIL_DEFAULT_SEND_FROM,
use_tls=app.config.get("SMTP_USE_TLS"), use_tls=dify_config.SMTP_USE_TLS,
opportunistic_tls=app.config.get("SMTP_OPPORTUNISTIC_TLS"), opportunistic_tls=dify_config.SMTP_OPPORTUNISTIC_TLS,
) )
else: case _:
raise ValueError("Unsupported mail type {}".format(app.config.get("MAIL_TYPE"))) raise ValueError("Unsupported mail type {}".format(mail_type))
else:
logging.warning("MAIL_TYPE is not set")
def send(self, to: str, subject: str, html: str, from_: Optional[str] = None): def send(self, to: str, subject: str, html: str, from_: Optional[str] = None):
if not self._client: if not self._client:

View File

@ -2,6 +2,8 @@ import redis
from redis.connection import Connection, SSLConnection from redis.connection import Connection, SSLConnection
from redis.sentinel import Sentinel from redis.sentinel import Sentinel
from configs import dify_config
class RedisClientWrapper(redis.Redis): class RedisClientWrapper(redis.Redis):
""" """
@ -43,37 +45,37 @@ redis_client = RedisClientWrapper()
def init_app(app): def init_app(app):
global redis_client global redis_client
connection_class = Connection connection_class = Connection
if app.config.get("REDIS_USE_SSL"): if dify_config.REDIS_USE_SSL:
connection_class = SSLConnection connection_class = SSLConnection
redis_params = { redis_params = {
"username": app.config.get("REDIS_USERNAME"), "username": dify_config.REDIS_USERNAME,
"password": app.config.get("REDIS_PASSWORD"), "password": dify_config.REDIS_PASSWORD,
"db": app.config.get("REDIS_DB"), "db": dify_config.REDIS_DB,
"encoding": "utf-8", "encoding": "utf-8",
"encoding_errors": "strict", "encoding_errors": "strict",
"decode_responses": False, "decode_responses": False,
} }
if app.config.get("REDIS_USE_SENTINEL"): if dify_config.REDIS_USE_SENTINEL:
sentinel_hosts = [ sentinel_hosts = [
(node.split(":")[0], int(node.split(":")[1])) for node in app.config.get("REDIS_SENTINELS").split(",") (node.split(":")[0], int(node.split(":")[1])) for node in dify_config.REDIS_SENTINELS.split(",")
] ]
sentinel = Sentinel( sentinel = Sentinel(
sentinel_hosts, sentinel_hosts,
sentinel_kwargs={ sentinel_kwargs={
"socket_timeout": app.config.get("REDIS_SENTINEL_SOCKET_TIMEOUT", 0.1), "socket_timeout": dify_config.REDIS_SENTINEL_SOCKET_TIMEOUT,
"username": app.config.get("REDIS_SENTINEL_USERNAME"), "username": dify_config.REDIS_SENTINEL_USERNAME,
"password": app.config.get("REDIS_SENTINEL_PASSWORD"), "password": dify_config.REDIS_SENTINEL_PASSWORD,
}, },
) )
master = sentinel.master_for(app.config.get("REDIS_SENTINEL_SERVICE_NAME"), **redis_params) master = sentinel.master_for(dify_config.REDIS_SENTINEL_SERVICE_NAME, **redis_params)
redis_client.initialize(master) redis_client.initialize(master)
else: else:
redis_params.update( redis_params.update(
{ {
"host": app.config.get("REDIS_HOST"), "host": dify_config.REDIS_HOST,
"port": app.config.get("REDIS_PORT"), "port": dify_config.REDIS_PORT,
"connection_class": connection_class, "connection_class": connection_class,
} }
) )

View File

@ -5,6 +5,7 @@ from sentry_sdk.integrations.celery import CeleryIntegration
from sentry_sdk.integrations.flask import FlaskIntegration from sentry_sdk.integrations.flask import FlaskIntegration
from werkzeug.exceptions import HTTPException from werkzeug.exceptions import HTTPException
from configs import dify_config
from core.model_runtime.errors.invoke import InvokeRateLimitError from core.model_runtime.errors.invoke import InvokeRateLimitError
@ -18,9 +19,9 @@ def before_send(event, hint):
def init_app(app): def init_app(app):
if app.config.get("SENTRY_DSN"): if dify_config.SENTRY_DSN:
sentry_sdk.init( sentry_sdk.init(
dsn=app.config.get("SENTRY_DSN"), dsn=dify_config.SENTRY_DSN,
integrations=[FlaskIntegration(), CeleryIntegration()], integrations=[FlaskIntegration(), CeleryIntegration()],
ignore_errors=[ ignore_errors=[
HTTPException, HTTPException,
@ -29,9 +30,9 @@ def init_app(app):
InvokeRateLimitError, InvokeRateLimitError,
parse_error.defaultErrorResponse, parse_error.defaultErrorResponse,
], ],
traces_sample_rate=app.config.get("SENTRY_TRACES_SAMPLE_RATE", 1.0), traces_sample_rate=dify_config.SENTRY_TRACES_SAMPLE_RATE,
profiles_sample_rate=app.config.get("SENTRY_PROFILES_SAMPLE_RATE", 1.0), profiles_sample_rate=dify_config.SENTRY_PROFILES_SAMPLE_RATE,
environment=app.config.get("DEPLOY_ENV"), environment=dify_config.DEPLOY_ENV,
release=f"dify-{app.config.get('CURRENT_VERSION')}-{app.config.get('COMMIT_SHA')}", release=f"dify-{dify_config.CURRENT_VERSION}-{dify_config.COMMIT_SHA}",
before_send=before_send, before_send=before_send,
) )

View File

@ -15,7 +15,8 @@ class Storage:
def init_app(self, app: Flask): def init_app(self, app: Flask):
storage_factory = self.get_storage_factory(dify_config.STORAGE_TYPE) storage_factory = self.get_storage_factory(dify_config.STORAGE_TYPE)
self.storage_runner = storage_factory(app=app) with app.app_context():
self.storage_runner = storage_factory()
@staticmethod @staticmethod
def get_storage_factory(storage_type: str) -> type[BaseStorage]: def get_storage_factory(storage_type: str) -> type[BaseStorage]:

View File

@ -1,29 +1,27 @@
from collections.abc import Generator from collections.abc import Generator
import oss2 as aliyun_s3 import oss2 as aliyun_s3
from flask import Flask
from configs import dify_config
from extensions.storage.base_storage import BaseStorage from extensions.storage.base_storage import BaseStorage
class AliyunOssStorage(BaseStorage): class AliyunOssStorage(BaseStorage):
"""Implementation for Aliyun OSS storage.""" """Implementation for Aliyun OSS storage."""
def __init__(self, app: Flask): def __init__(self):
super().__init__(app) super().__init__()
self.bucket_name = dify_config.ALIYUN_OSS_BUCKET_NAME
app_config = self.app.config self.folder = dify_config.ALIYUN_OSS_PATH
self.bucket_name = app_config.get("ALIYUN_OSS_BUCKET_NAME")
self.folder = app.config.get("ALIYUN_OSS_PATH")
oss_auth_method = aliyun_s3.Auth oss_auth_method = aliyun_s3.Auth
region = None region = None
if app_config.get("ALIYUN_OSS_AUTH_VERSION") == "v4": if dify_config.ALIYUN_OSS_AUTH_VERSION == "v4":
oss_auth_method = aliyun_s3.AuthV4 oss_auth_method = aliyun_s3.AuthV4
region = app_config.get("ALIYUN_OSS_REGION") region = dify_config.ALIYUN_OSS_REGION
oss_auth = oss_auth_method(app_config.get("ALIYUN_OSS_ACCESS_KEY"), app_config.get("ALIYUN_OSS_SECRET_KEY")) oss_auth = oss_auth_method(dify_config.ALIYUN_OSS_ACCESS_KEY, dify_config.ALIYUN_OSS_SECRET_KEY)
self.client = aliyun_s3.Bucket( self.client = aliyun_s3.Bucket(
oss_auth, oss_auth,
app_config.get("ALIYUN_OSS_ENDPOINT"), dify_config.ALIYUN_OSS_ENDPOINT,
self.bucket_name, self.bucket_name,
connect_timeout=30, connect_timeout=30,
region=region, region=region,

View File

@ -4,8 +4,8 @@ from collections.abc import Generator
import boto3 import boto3
from botocore.client import Config from botocore.client import Config
from botocore.exceptions import ClientError from botocore.exceptions import ClientError
from flask import Flask
from configs import dify_config
from extensions.storage.base_storage import BaseStorage from extensions.storage.base_storage import BaseStorage
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -14,26 +14,25 @@ logger = logging.getLogger(__name__)
class AwsS3Storage(BaseStorage): class AwsS3Storage(BaseStorage):
"""Implementation for Amazon Web Services S3 storage.""" """Implementation for Amazon Web Services S3 storage."""
def __init__(self, app: Flask): def __init__(self):
super().__init__(app) super().__init__()
app_config = self.app.config self.bucket_name = dify_config.S3_BUCKET_NAME
self.bucket_name = app_config.get("S3_BUCKET_NAME") if dify_config.S3_USE_AWS_MANAGED_IAM:
if app_config.get("S3_USE_AWS_MANAGED_IAM"):
logger.info("Using AWS managed IAM role for S3") logger.info("Using AWS managed IAM role for S3")
session = boto3.Session() session = boto3.Session()
region_name = app_config.get("S3_REGION") region_name = dify_config.S3_REGION
self.client = session.client(service_name="s3", region_name=region_name) self.client = session.client(service_name="s3", region_name=region_name)
else: else:
logger.info("Using ak and sk for S3") logger.info("Using ak and sk for S3")
self.client = boto3.client( self.client = boto3.client(
"s3", "s3",
aws_secret_access_key=app_config.get("S3_SECRET_KEY"), aws_secret_access_key=dify_config.S3_SECRET_KEY,
aws_access_key_id=app_config.get("S3_ACCESS_KEY"), aws_access_key_id=dify_config.S3_ACCESS_KEY,
endpoint_url=app_config.get("S3_ENDPOINT"), endpoint_url=dify_config.S3_ENDPOINT,
region_name=app_config.get("S3_REGION"), region_name=dify_config.S3_REGION,
config=Config(s3={"addressing_style": app_config.get("S3_ADDRESS_STYLE")}), config=Config(s3={"addressing_style": dify_config.S3_ADDRESS_STYLE}),
) )
# create bucket # create bucket
try: try:

View File

@ -2,8 +2,8 @@ from collections.abc import Generator
from datetime import datetime, timedelta, timezone from datetime import datetime, timedelta, timezone
from azure.storage.blob import AccountSasPermissions, BlobServiceClient, ResourceTypes, generate_account_sas from azure.storage.blob import AccountSasPermissions, BlobServiceClient, ResourceTypes, generate_account_sas
from flask import Flask
from configs import dify_config
from extensions.ext_redis import redis_client from extensions.ext_redis import redis_client
from extensions.storage.base_storage import BaseStorage from extensions.storage.base_storage import BaseStorage
@ -11,13 +11,12 @@ from extensions.storage.base_storage import BaseStorage
class AzureBlobStorage(BaseStorage): class AzureBlobStorage(BaseStorage):
"""Implementation for Azure Blob storage.""" """Implementation for Azure Blob storage."""
def __init__(self, app: Flask): def __init__(self):
super().__init__(app) super().__init__()
app_config = self.app.config self.bucket_name = dify_config.AZURE_BLOB_CONTAINER_NAME
self.bucket_name = app_config.get("AZURE_BLOB_CONTAINER_NAME") self.account_url = dify_config.AZURE_BLOB_ACCOUNT_URL
self.account_url = app_config.get("AZURE_BLOB_ACCOUNT_URL") self.account_name = dify_config.AZURE_BLOB_ACCOUNT_NAME
self.account_name = app_config.get("AZURE_BLOB_ACCOUNT_NAME") self.account_key = dify_config.AZURE_BLOB_ACCOUNT_KEY
self.account_key = app_config.get("AZURE_BLOB_ACCOUNT_KEY")
def save(self, filename, data): def save(self, filename, data):
client = self._sync_client() client = self._sync_client()

View File

@ -5,24 +5,23 @@ from collections.abc import Generator
from baidubce.auth.bce_credentials import BceCredentials from baidubce.auth.bce_credentials import BceCredentials
from baidubce.bce_client_configuration import BceClientConfiguration from baidubce.bce_client_configuration import BceClientConfiguration
from baidubce.services.bos.bos_client import BosClient from baidubce.services.bos.bos_client import BosClient
from flask import Flask
from configs import dify_config
from extensions.storage.base_storage import BaseStorage from extensions.storage.base_storage import BaseStorage
class BaiduObsStorage(BaseStorage): class BaiduObsStorage(BaseStorage):
"""Implementation for Baidu OBS storage.""" """Implementation for Baidu OBS storage."""
def __init__(self, app: Flask): def __init__(self):
super().__init__(app) super().__init__()
app_config = self.app.config self.bucket_name = dify_config.BAIDU_OBS_BUCKET_NAME
self.bucket_name = app_config.get("BAIDU_OBS_BUCKET_NAME")
client_config = BceClientConfiguration( client_config = BceClientConfiguration(
credentials=BceCredentials( credentials=BceCredentials(
access_key_id=app_config.get("BAIDU_OBS_ACCESS_KEY"), access_key_id=dify_config.BAIDU_OBS_ACCESS_KEY,
secret_access_key=app_config.get("BAIDU_OBS_SECRET_KEY"), secret_access_key=dify_config.BAIDU_OBS_SECRET_KEY,
), ),
endpoint=app_config.get("BAIDU_OBS_ENDPOINT"), endpoint=dify_config.BAIDU_OBS_ENDPOINT,
) )
self.client = BosClient(config=client_config) self.client = BosClient(config=client_config)

View File

@ -3,16 +3,12 @@
from abc import ABC, abstractmethod from abc import ABC, abstractmethod
from collections.abc import Generator from collections.abc import Generator
from flask import Flask
class BaseStorage(ABC): class BaseStorage(ABC):
"""Interface for file storage.""" """Interface for file storage."""
app = None def __init__(self): # noqa: B027
pass
def __init__(self, app: Flask):
self.app = app
@abstractmethod @abstractmethod
def save(self, filename, data): def save(self, filename, data):

View File

@ -3,20 +3,20 @@ import io
import json import json
from collections.abc import Generator from collections.abc import Generator
from flask import Flask
from google.cloud import storage as google_cloud_storage from google.cloud import storage as google_cloud_storage
from configs import dify_config
from extensions.storage.base_storage import BaseStorage from extensions.storage.base_storage import BaseStorage
class GoogleCloudStorage(BaseStorage): class GoogleCloudStorage(BaseStorage):
"""Implementation for Google Cloud storage.""" """Implementation for Google Cloud storage."""
def __init__(self, app: Flask): def __init__(self):
super().__init__(app) super().__init__()
app_config = self.app.config
self.bucket_name = app_config.get("GOOGLE_STORAGE_BUCKET_NAME") self.bucket_name = dify_config.GOOGLE_STORAGE_BUCKET_NAME
service_account_json_str = app_config.get("GOOGLE_STORAGE_SERVICE_ACCOUNT_JSON_BASE64") service_account_json_str = dify_config.GOOGLE_STORAGE_SERVICE_ACCOUNT_JSON_BASE64
# if service_account_json_str is empty, use Application Default Credentials # if service_account_json_str is empty, use Application Default Credentials
if service_account_json_str: if service_account_json_str:
service_account_json = base64.b64decode(service_account_json_str).decode("utf-8") service_account_json = base64.b64decode(service_account_json_str).decode("utf-8")

View File

@ -1,22 +1,22 @@
from collections.abc import Generator from collections.abc import Generator
from flask import Flask
from obs import ObsClient from obs import ObsClient
from configs import dify_config
from extensions.storage.base_storage import BaseStorage from extensions.storage.base_storage import BaseStorage
class HuaweiObsStorage(BaseStorage): class HuaweiObsStorage(BaseStorage):
"""Implementation for Huawei OBS storage.""" """Implementation for Huawei OBS storage."""
def __init__(self, app: Flask): def __init__(self):
super().__init__(app) super().__init__()
app_config = self.app.config
self.bucket_name = app_config.get("HUAWEI_OBS_BUCKET_NAME") self.bucket_name = dify_config.HUAWEI_OBS_BUCKET_NAME
self.client = ObsClient( self.client = ObsClient(
access_key_id=app_config.get("HUAWEI_OBS_ACCESS_KEY"), access_key_id=dify_config.HUAWEI_OBS_ACCESS_KEY,
secret_access_key=app_config.get("HUAWEI_OBS_SECRET_KEY"), secret_access_key=dify_config.HUAWEI_OBS_SECRET_KEY,
server=app_config.get("HUAWEI_OBS_SERVER"), server=dify_config.HUAWEI_OBS_SERVER,
) )
def save(self, filename, data): def save(self, filename, data):

View File

@ -3,19 +3,20 @@ import shutil
from collections.abc import Generator from collections.abc import Generator
from pathlib import Path from pathlib import Path
from flask import Flask from flask import current_app
from configs import dify_config
from extensions.storage.base_storage import BaseStorage from extensions.storage.base_storage import BaseStorage
class LocalFsStorage(BaseStorage): class LocalFsStorage(BaseStorage):
"""Implementation for local filesystem storage.""" """Implementation for local filesystem storage."""
def __init__(self, app: Flask): def __init__(self):
super().__init__(app) super().__init__()
folder = self.app.config.get("STORAGE_LOCAL_PATH") folder = dify_config.STORAGE_LOCAL_PATH
if not os.path.isabs(folder): if not os.path.isabs(folder):
folder = os.path.join(app.root_path, folder) folder = os.path.join(current_app.root_path, folder)
self.folder = folder self.folder = folder
def save(self, filename, data): def save(self, filename, data):

View File

@ -2,24 +2,24 @@ from collections.abc import Generator
import boto3 import boto3
from botocore.exceptions import ClientError from botocore.exceptions import ClientError
from flask import Flask
from configs import dify_config
from extensions.storage.base_storage import BaseStorage from extensions.storage.base_storage import BaseStorage
class OracleOCIStorage(BaseStorage): class OracleOCIStorage(BaseStorage):
"""Implementation for Oracle OCI storage.""" """Implementation for Oracle OCI storage."""
def __init__(self, app: Flask): def __init__(self):
super().__init__(app) super().__init__()
app_config = self.app.config
self.bucket_name = app_config.get("OCI_BUCKET_NAME") self.bucket_name = dify_config.OCI_BUCKET_NAME
self.client = boto3.client( self.client = boto3.client(
"s3", "s3",
aws_secret_access_key=app_config.get("OCI_SECRET_KEY"), aws_secret_access_key=dify_config.OCI_SECRET_KEY,
aws_access_key_id=app_config.get("OCI_ACCESS_KEY"), aws_access_key_id=dify_config.OCI_ACCESS_KEY,
endpoint_url=app_config.get("OCI_ENDPOINT"), endpoint_url=dify_config.OCI_ENDPOINT,
region_name=app_config.get("OCI_REGION"), region_name=dify_config.OCI_REGION,
) )
def save(self, filename, data): def save(self, filename, data):

View File

@ -2,25 +2,27 @@ import io
from collections.abc import Generator from collections.abc import Generator
from pathlib import Path from pathlib import Path
from flask import Flask
from supabase import Client from supabase import Client
from configs import dify_config
from extensions.storage.base_storage import BaseStorage from extensions.storage.base_storage import BaseStorage
class SupabaseStorage(BaseStorage): class SupabaseStorage(BaseStorage):
"""Implementation for supabase obs storage.""" """Implementation for supabase obs storage."""
def __init__(self, app: Flask): def __init__(self):
super().__init__(app) super().__init__()
app_config = self.app.config if dify_config.SUPABASE_URL is None:
self.bucket_name = app_config.get("SUPABASE_BUCKET_NAME") raise ValueError("SUPABASE_URL is not set")
self.client = Client( if dify_config.SUPABASE_API_KEY is None:
supabase_url=app_config.get("SUPABASE_URL"), supabase_key=app_config.get("SUPABASE_API_KEY") raise ValueError("SUPABASE_API_KEY is not set")
) if dify_config.SUPABASE_BUCKET_NAME is None:
self.create_bucket( raise ValueError("SUPABASE_BUCKET_NAME is not set")
id=app_config.get("SUPABASE_BUCKET_NAME"), bucket_name=app_config.get("SUPABASE_BUCKET_NAME")
) self.bucket_name = dify_config.SUPABASE_BUCKET_NAME
self.client = Client(supabase_url=dify_config.SUPABASE_URL, supabase_key=dify_config.SUPABASE_API_KEY)
self.create_bucket(id=dify_config.SUPABASE_BUCKET_NAME, bucket_name=dify_config.SUPABASE_BUCKET_NAME)
def create_bucket(self, id, bucket_name): def create_bucket(self, id, bucket_name):
if not self.bucket_exists(): if not self.bucket_exists():

View File

@ -1,23 +1,23 @@
from collections.abc import Generator from collections.abc import Generator
from flask import Flask
from qcloud_cos import CosConfig, CosS3Client from qcloud_cos import CosConfig, CosS3Client
from configs import dify_config
from extensions.storage.base_storage import BaseStorage from extensions.storage.base_storage import BaseStorage
class TencentCosStorage(BaseStorage): class TencentCosStorage(BaseStorage):
"""Implementation for Tencent Cloud COS storage.""" """Implementation for Tencent Cloud COS storage."""
def __init__(self, app: Flask): def __init__(self):
super().__init__(app) super().__init__()
app_config = self.app.config
self.bucket_name = app_config.get("TENCENT_COS_BUCKET_NAME") self.bucket_name = dify_config.TENCENT_COS_BUCKET_NAME
config = CosConfig( config = CosConfig(
Region=app_config.get("TENCENT_COS_REGION"), Region=dify_config.TENCENT_COS_REGION,
SecretId=app_config.get("TENCENT_COS_SECRET_ID"), SecretId=dify_config.TENCENT_COS_SECRET_ID,
SecretKey=app_config.get("TENCENT_COS_SECRET_KEY"), SecretKey=dify_config.TENCENT_COS_SECRET_KEY,
Scheme=app_config.get("TENCENT_COS_SCHEME"), Scheme=dify_config.TENCENT_COS_SCHEME,
) )
self.client = CosS3Client(config) self.client = CosS3Client(config)

View File

@ -1,23 +1,22 @@
from collections.abc import Generator from collections.abc import Generator
import tos import tos
from flask import Flask
from configs import dify_config
from extensions.storage.base_storage import BaseStorage from extensions.storage.base_storage import BaseStorage
class VolcengineTosStorage(BaseStorage): class VolcengineTosStorage(BaseStorage):
"""Implementation for Volcengine TOS storage.""" """Implementation for Volcengine TOS storage."""
def __init__(self, app: Flask): def __init__(self):
super().__init__(app) super().__init__()
app_config = self.app.config self.bucket_name = dify_config.VOLCENGINE_TOS_BUCKET_NAME
self.bucket_name = app_config.get("VOLCENGINE_TOS_BUCKET_NAME")
self.client = tos.TosClientV2( self.client = tos.TosClientV2(
ak=app_config.get("VOLCENGINE_TOS_ACCESS_KEY"), ak=dify_config.VOLCENGINE_TOS_ACCESS_KEY,
sk=app_config.get("VOLCENGINE_TOS_SECRET_KEY"), sk=dify_config.VOLCENGINE_TOS_SECRET_KEY,
endpoint=app_config.get("VOLCENGINE_TOS_ENDPOINT"), endpoint=dify_config.VOLCENGINE_TOS_ENDPOINT,
region=app_config.get("VOLCENGINE_TOS_REGION"), region=dify_config.VOLCENGINE_TOS_REGION,
) )
def save(self, filename, data): def save(self, filename, data):

View File

@ -2,6 +2,7 @@ import mimetypes
from collections.abc import Mapping, Sequence from collections.abc import Mapping, Sequence
from typing import Any from typing import Any
import httpx
from sqlalchemy import select from sqlalchemy import select
from constants import AUDIO_EXTENSIONS, DOCUMENT_EXTENSIONS, IMAGE_EXTENSIONS, VIDEO_EXTENSIONS from constants import AUDIO_EXTENSIONS, DOCUMENT_EXTENSIONS, IMAGE_EXTENSIONS, VIDEO_EXTENSIONS
@ -154,7 +155,7 @@ def _build_from_local_file(
file = File( file = File(
id=mapping.get("id"), id=mapping.get("id"),
filename=row.name, filename=row.name,
extension=row.extension, extension="." + row.extension,
mime_type=row.mime_type, mime_type=row.mime_type,
tenant_id=tenant_id, tenant_id=tenant_id,
type=file_type, type=file_type,
@ -177,25 +178,29 @@ def _build_from_remote_url(
url = mapping.get("url") url = mapping.get("url")
if not url: if not url:
raise ValueError("Invalid file url") raise ValueError("Invalid file url")
resp = ssrf_proxy.head(url, follow_redirects=True)
resp.raise_for_status()
resp = ssrf_proxy.head(url, follow_redirects=True)
if resp.status_code == httpx.codes.OK:
# Try to extract filename from response headers or URL # Try to extract filename from response headers or URL
content_disposition = resp.headers.get("Content-Disposition") content_disposition = resp.headers.get("Content-Disposition")
if content_disposition: if content_disposition:
filename = content_disposition.split("filename=")[-1].strip('"') filename = content_disposition.split("filename=")[-1].strip('"')
else: else:
filename = url.split("/")[-1].split("?")[0] filename = url.split("/")[-1].split("?")[0]
# If filename is empty, set a default one
if not filename:
filename = "unknown_file"
# Determine file extension
extension = "." + filename.split(".")[-1] if "." in filename else ".bin"
# Create the File object # Create the File object
file_size = int(resp.headers.get("Content-Length", -1)) file_size = int(resp.headers.get("Content-Length", -1))
mime_type = str(resp.headers.get("Content-Type", "")) mime_type = str(resp.headers.get("Content-Type", ""))
else:
filename = ""
file_size = -1
mime_type = ""
# If filename is empty, set a default one
if not filename:
filename = "unknown_file"
# Determine file extension
extension = "." + filename.split(".")[-1] if "." in filename else ".bin"
if not mime_type: if not mime_type:
mime_type, _ = mimetypes.guess_type(url) mime_type, _ = mimetypes.guess_type(url)
file = File( file = File(

View File

@ -12,9 +12,10 @@ from hashlib import sha256
from typing import Any, Optional, Union from typing import Any, Optional, Union
from zoneinfo import available_timezones from zoneinfo import available_timezones
from flask import Response, current_app, stream_with_context from flask import Response, stream_with_context
from flask_restful import fields from flask_restful import fields
from configs import dify_config
from core.app.features.rate_limiting.rate_limit import RateLimitGenerator from core.app.features.rate_limiting.rate_limit import RateLimitGenerator
from core.file import helpers as file_helpers from core.file import helpers as file_helpers
from extensions.ext_redis import redis_client from extensions.ext_redis import redis_client
@ -214,7 +215,7 @@ class TokenManager:
if additional_data: if additional_data:
token_data.update(additional_data) token_data.update(additional_data)
expiry_minutes = current_app.config[f"{token_type.upper()}_TOKEN_EXPIRY_MINUTES"] expiry_minutes = dify_config.model_dump().get(f"{token_type.upper()}_TOKEN_EXPIRY_MINUTES")
token_key = cls._get_token_key(token, token_type) token_key = cls._get_token_key(token, token_type)
expiry_time = int(expiry_minutes * 60) expiry_time = int(expiry_minutes * 60)
redis_client.setex(token_key, expiry_time, json.dumps(token_data)) redis_client.setex(token_key, expiry_time, json.dumps(token_data))

View File

@ -1,4 +1,3 @@
import os
from functools import wraps from functools import wraps
from flask import current_app, g, has_request_context, request from flask import current_app, g, has_request_context, request
@ -7,6 +6,7 @@ from flask_login.config import EXEMPT_METHODS
from werkzeug.exceptions import Unauthorized from werkzeug.exceptions import Unauthorized
from werkzeug.local import LocalProxy from werkzeug.local import LocalProxy
from configs import dify_config
from extensions.ext_database import db from extensions.ext_database import db
from models.account import Account, Tenant, TenantAccountJoin from models.account import Account, Tenant, TenantAccountJoin
from models.model import EndUser from models.model import EndUser
@ -53,8 +53,7 @@ def login_required(func):
@wraps(func) @wraps(func)
def decorated_view(*args, **kwargs): def decorated_view(*args, **kwargs):
auth_header = request.headers.get("Authorization") auth_header = request.headers.get("Authorization")
admin_api_key_enable = os.getenv("ADMIN_API_KEY_ENABLE", default="False") if dify_config.ADMIN_API_KEY_ENABLE:
if admin_api_key_enable.lower() == "true":
if auth_header: if auth_header:
if " " not in auth_header: if " " not in auth_header:
raise Unauthorized("Invalid Authorization header format. Expected 'Bearer <api-key>' format.") raise Unauthorized("Invalid Authorization header format. Expected 'Bearer <api-key>' format.")
@ -62,10 +61,10 @@ def login_required(func):
auth_scheme = auth_scheme.lower() auth_scheme = auth_scheme.lower()
if auth_scheme != "bearer": if auth_scheme != "bearer":
raise Unauthorized("Invalid Authorization header format. Expected 'Bearer <api-key>' format.") raise Unauthorized("Invalid Authorization header format. Expected 'Bearer <api-key>' format.")
admin_api_key = os.getenv("ADMIN_API_KEY")
admin_api_key = dify_config.ADMIN_API_KEY
if admin_api_key: if admin_api_key:
if os.getenv("ADMIN_API_KEY") == auth_token: if admin_api_key == auth_token:
workspace_id = request.headers.get("X-WORKSPACE-ID") workspace_id = request.headers.get("X-WORKSPACE-ID")
if workspace_id: if workspace_id:
tenant_account_join = ( tenant_account_join = (
@ -83,7 +82,7 @@ def login_required(func):
account.current_tenant = tenant account.current_tenant = tenant
current_app.login_manager._update_request_context_with_user(account) current_app.login_manager._update_request_context_with_user(account)
user_logged_in.send(current_app._get_current_object(), user=_get_user()) user_logged_in.send(current_app._get_current_object(), user=_get_user())
if request.method in EXEMPT_METHODS or current_app.config.get("LOGIN_DISABLED"): if request.method in EXEMPT_METHODS or dify_config.LOGIN_DISABLED:
pass pass
elif not current_user.is_authenticated: elif not current_user.is_authenticated:
return current_app.login_manager.unauthorized() return current_app.login_manager.unauthorized()

View File

@ -1404,7 +1404,7 @@ class UploadFile(Base):
size: int, size: int,
extension: str, extension: str,
mime_type: str, mime_type: str,
created_by_role: str, created_by_role: CreatedByRole,
created_by: str, created_by: str,
created_at: datetime, created_at: datetime,
used: bool, used: bool,
@ -1419,7 +1419,7 @@ class UploadFile(Base):
self.size = size self.size = size
self.extension = extension self.extension = extension
self.mime_type = mime_type self.mime_type = mime_type
self.created_by_role = created_by_role self.created_by_role = created_by_role.value
self.created_by = created_by self.created_by = created_by
self.created_at = created_at self.created_at = created_at
self.used = used self.used = used

199
api/poetry.lock generated
View File

@ -6921,119 +6921,120 @@ files = [
[[package]] [[package]]
name = "pydantic" name = "pydantic"
version = "2.8.2" version = "2.9.2"
description = "Data validation using Python type hints" description = "Data validation using Python type hints"
optional = false optional = false
python-versions = ">=3.8" python-versions = ">=3.8"
files = [ files = [
{file = "pydantic-2.8.2-py3-none-any.whl", hash = "sha256:73ee9fddd406dc318b885c7a2eab8a6472b68b8fb5ba8150949fc3db939f23c8"}, {file = "pydantic-2.9.2-py3-none-any.whl", hash = "sha256:f048cec7b26778210e28a0459867920654d48e5e62db0958433636cde4254f12"},
{file = "pydantic-2.8.2.tar.gz", hash = "sha256:6f62c13d067b0755ad1c21a34bdd06c0c12625a22b0fc09c6b149816604f7c2a"}, {file = "pydantic-2.9.2.tar.gz", hash = "sha256:d155cef71265d1e9807ed1c32b4c8deec042a44a50a4188b25ac67ecd81a9c0f"},
] ]
[package.dependencies] [package.dependencies]
annotated-types = ">=0.4.0" annotated-types = ">=0.6.0"
pydantic-core = "2.20.1" pydantic-core = "2.23.4"
typing-extensions = {version = ">=4.6.1", markers = "python_version < \"3.13\""} typing-extensions = {version = ">=4.6.1", markers = "python_version < \"3.13\""}
[package.extras] [package.extras]
email = ["email-validator (>=2.0.0)"] email = ["email-validator (>=2.0.0)"]
timezone = ["tzdata"]
[[package]] [[package]]
name = "pydantic-core" name = "pydantic-core"
version = "2.20.1" version = "2.23.4"
description = "Core functionality for Pydantic validation and serialization" description = "Core functionality for Pydantic validation and serialization"
optional = false optional = false
python-versions = ">=3.8" python-versions = ">=3.8"
files = [ files = [
{file = "pydantic_core-2.20.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:3acae97ffd19bf091c72df4d726d552c473f3576409b2a7ca36b2f535ffff4a3"}, {file = "pydantic_core-2.23.4-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:b10bd51f823d891193d4717448fab065733958bdb6a6b351967bd349d48d5c9b"},
{file = "pydantic_core-2.20.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:41f4c96227a67a013e7de5ff8f20fb496ce573893b7f4f2707d065907bffdbd6"}, {file = "pydantic_core-2.23.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4fc714bdbfb534f94034efaa6eadd74e5b93c8fa6315565a222f7b6f42ca1166"},
{file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f239eb799a2081495ea659d8d4a43a8f42cd1fe9ff2e7e436295c38a10c286a"}, {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63e46b3169866bd62849936de036f901a9356e36376079b05efa83caeaa02ceb"},
{file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:53e431da3fc53360db73eedf6f7124d1076e1b4ee4276b36fb25514544ceb4a3"}, {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed1a53de42fbe34853ba90513cea21673481cd81ed1be739f7f2efb931b24916"},
{file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f1f62b2413c3a0e846c3b838b2ecd6c7a19ec6793b2a522745b0869e37ab5bc1"}, {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cfdd16ab5e59fc31b5e906d1a3f666571abc367598e3e02c83403acabc092e07"},
{file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5d41e6daee2813ecceea8eda38062d69e280b39df793f5a942fa515b8ed67953"}, {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:255a8ef062cbf6674450e668482456abac99a5583bbafb73f9ad469540a3a232"},
{file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d482efec8b7dc6bfaedc0f166b2ce349df0011f5d2f1f25537ced4cfc34fd98"}, {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a7cd62e831afe623fbb7aabbb4fe583212115b3ef38a9f6b71869ba644624a2"},
{file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e93e1a4b4b33daed65d781a57a522ff153dcf748dee70b40c7258c5861e1768a"}, {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f09e2ff1f17c2b51f2bc76d1cc33da96298f0a036a137f5440ab3ec5360b624f"},
{file = "pydantic_core-2.20.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e7c4ea22b6739b162c9ecaaa41d718dfad48a244909fe7ef4b54c0b530effc5a"}, {file = "pydantic_core-2.23.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e38e63e6f3d1cec5a27e0afe90a085af8b6806ee208b33030e65b6516353f1a3"},
{file = "pydantic_core-2.20.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4f2790949cf385d985a31984907fecb3896999329103df4e4983a4a41e13e840"}, {file = "pydantic_core-2.23.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0dbd8dbed2085ed23b5c04afa29d8fd2771674223135dc9bc937f3c09284d071"},
{file = "pydantic_core-2.20.1-cp310-none-win32.whl", hash = "sha256:5e999ba8dd90e93d57410c5e67ebb67ffcaadcea0ad973240fdfd3a135506250"}, {file = "pydantic_core-2.23.4-cp310-none-win32.whl", hash = "sha256:6531b7ca5f951d663c339002e91aaebda765ec7d61b7d1e3991051906ddde119"},
{file = "pydantic_core-2.20.1-cp310-none-win_amd64.whl", hash = "sha256:512ecfbefef6dac7bc5eaaf46177b2de58cdf7acac8793fe033b24ece0b9566c"}, {file = "pydantic_core-2.23.4-cp310-none-win_amd64.whl", hash = "sha256:7c9129eb40958b3d4500fa2467e6a83356b3b61bfff1b414c7361d9220f9ae8f"},
{file = "pydantic_core-2.20.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:d2a8fa9d6d6f891f3deec72f5cc668e6f66b188ab14bb1ab52422fe8e644f312"}, {file = "pydantic_core-2.23.4-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:77733e3892bb0a7fa797826361ce8a9184d25c8dffaec60b7ffe928153680ba8"},
{file = "pydantic_core-2.20.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:175873691124f3d0da55aeea1d90660a6ea7a3cfea137c38afa0a5ffabe37b88"}, {file = "pydantic_core-2.23.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b84d168f6c48fabd1f2027a3d1bdfe62f92cade1fb273a5d68e621da0e44e6d"},
{file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:37eee5b638f0e0dcd18d21f59b679686bbd18917b87db0193ae36f9c23c355fc"}, {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:df49e7a0861a8c36d089c1ed57d308623d60416dab2647a4a17fe050ba85de0e"},
{file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:25e9185e2d06c16ee438ed39bf62935ec436474a6ac4f9358524220f1b236e43"}, {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ff02b6d461a6de369f07ec15e465a88895f3223eb75073ffea56b84d9331f607"},
{file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:150906b40ff188a3260cbee25380e7494ee85048584998c1e66df0c7a11c17a6"}, {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:996a38a83508c54c78a5f41456b0103c30508fed9abcad0a59b876d7398f25fd"},
{file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ad4aeb3e9a97286573c03df758fc7627aecdd02f1da04516a86dc159bf70121"}, {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d97683ddee4723ae8c95d1eddac7c192e8c552da0c73a925a89fa8649bf13eea"},
{file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d3f3ed29cd9f978c604708511a1f9c2fdcb6c38b9aae36a51905b8811ee5cbf1"}, {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:216f9b2d7713eb98cb83c80b9c794de1f6b7e3145eef40400c62e86cee5f4e1e"},
{file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b0dae11d8f5ded51699c74d9548dcc5938e0804cc8298ec0aa0da95c21fff57b"}, {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6f783e0ec4803c787bcea93e13e9932edab72068f68ecffdf86a99fd5918878b"},
{file = "pydantic_core-2.20.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:faa6b09ee09433b87992fb5a2859efd1c264ddc37280d2dd5db502126d0e7f27"}, {file = "pydantic_core-2.23.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d0776dea117cf5272382634bd2a5c1b6eb16767c223c6a5317cd3e2a757c61a0"},
{file = "pydantic_core-2.20.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9dc1b507c12eb0481d071f3c1808f0529ad41dc415d0ca11f7ebfc666e66a18b"}, {file = "pydantic_core-2.23.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d5f7a395a8cf1621939692dba2a6b6a830efa6b3cee787d82c7de1ad2930de64"},
{file = "pydantic_core-2.20.1-cp311-none-win32.whl", hash = "sha256:fa2fddcb7107e0d1808086ca306dcade7df60a13a6c347a7acf1ec139aa6789a"}, {file = "pydantic_core-2.23.4-cp311-none-win32.whl", hash = "sha256:74b9127ffea03643e998e0c5ad9bd3811d3dac8c676e47db17b0ee7c3c3bf35f"},
{file = "pydantic_core-2.20.1-cp311-none-win_amd64.whl", hash = "sha256:40a783fb7ee353c50bd3853e626f15677ea527ae556429453685ae32280c19c2"}, {file = "pydantic_core-2.23.4-cp311-none-win_amd64.whl", hash = "sha256:98d134c954828488b153d88ba1f34e14259284f256180ce659e8d83e9c05eaa3"},
{file = "pydantic_core-2.20.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:595ba5be69b35777474fa07f80fc260ea71255656191adb22a8c53aba4479231"}, {file = "pydantic_core-2.23.4-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:f3e0da4ebaef65158d4dfd7d3678aad692f7666877df0002b8a522cdf088f231"},
{file = "pydantic_core-2.20.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a4f55095ad087474999ee28d3398bae183a66be4823f753cd7d67dd0153427c9"}, {file = "pydantic_core-2.23.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f69a8e0b033b747bb3e36a44e7732f0c99f7edd5cea723d45bc0d6e95377ffee"},
{file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f9aa05d09ecf4c75157197f27cdc9cfaeb7c5f15021c6373932bf3e124af029f"}, {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:723314c1d51722ab28bfcd5240d858512ffd3116449c557a1336cbe3919beb87"},
{file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e97fdf088d4b31ff4ba35db26d9cc472ac7ef4a2ff2badeabf8d727b3377fc52"}, {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bb2802e667b7051a1bebbfe93684841cc9351004e2badbd6411bf357ab8d5ac8"},
{file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bc633a9fe1eb87e250b5c57d389cf28998e4292336926b0b6cdaee353f89a237"}, {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d18ca8148bebe1b0a382a27a8ee60350091a6ddaf475fa05ef50dc35b5df6327"},
{file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d573faf8eb7e6b1cbbcb4f5b247c60ca8be39fe2c674495df0eb4318303137fe"}, {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:33e3d65a85a2a4a0dc3b092b938a4062b1a05f3a9abde65ea93b233bca0e03f2"},
{file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26dc97754b57d2fd00ac2b24dfa341abffc380b823211994c4efac7f13b9e90e"}, {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:128585782e5bfa515c590ccee4b727fb76925dd04a98864182b22e89a4e6ed36"},
{file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:33499e85e739a4b60c9dac710c20a08dc73cb3240c9a0e22325e671b27b70d24"}, {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:68665f4c17edcceecc112dfed5dbe6f92261fb9d6054b47d01bf6371a6196126"},
{file = "pydantic_core-2.20.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:bebb4d6715c814597f85297c332297c6ce81e29436125ca59d1159b07f423eb1"}, {file = "pydantic_core-2.23.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:20152074317d9bed6b7a95ade3b7d6054845d70584216160860425f4fbd5ee9e"},
{file = "pydantic_core-2.20.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:516d9227919612425c8ef1c9b869bbbee249bc91912c8aaffb66116c0b447ebd"}, {file = "pydantic_core-2.23.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:9261d3ce84fa1d38ed649c3638feefeae23d32ba9182963e465d58d62203bd24"},
{file = "pydantic_core-2.20.1-cp312-none-win32.whl", hash = "sha256:469f29f9093c9d834432034d33f5fe45699e664f12a13bf38c04967ce233d688"}, {file = "pydantic_core-2.23.4-cp312-none-win32.whl", hash = "sha256:4ba762ed58e8d68657fc1281e9bb72e1c3e79cc5d464be146e260c541ec12d84"},
{file = "pydantic_core-2.20.1-cp312-none-win_amd64.whl", hash = "sha256:035ede2e16da7281041f0e626459bcae33ed998cca6a0a007a5ebb73414ac72d"}, {file = "pydantic_core-2.23.4-cp312-none-win_amd64.whl", hash = "sha256:97df63000f4fea395b2824da80e169731088656d1818a11b95f3b173747b6cd9"},
{file = "pydantic_core-2.20.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:0827505a5c87e8aa285dc31e9ec7f4a17c81a813d45f70b1d9164e03a813a686"}, {file = "pydantic_core-2.23.4-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:7530e201d10d7d14abce4fb54cfe5b94a0aefc87da539d0346a484ead376c3cc"},
{file = "pydantic_core-2.20.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:19c0fa39fa154e7e0b7f82f88ef85faa2a4c23cc65aae2f5aea625e3c13c735a"}, {file = "pydantic_core-2.23.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:df933278128ea1cd77772673c73954e53a1c95a4fdf41eef97c2b779271bd0bd"},
{file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa223cd1e36b642092c326d694d8bf59b71ddddc94cdb752bbbb1c5c91d833b"}, {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cb3da3fd1b6a5d0279a01877713dbda118a2a4fc6f0d821a57da2e464793f05"},
{file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c336a6d235522a62fef872c6295a42ecb0c4e1d0f1a3e500fe949415761b8a19"}, {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:42c6dcb030aefb668a2b7009c85b27f90e51e6a3b4d5c9bc4c57631292015b0d"},
{file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7eb6a0587eded33aeefea9f916899d42b1799b7b14b8f8ff2753c0ac1741edac"}, {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:696dd8d674d6ce621ab9d45b205df149399e4bb9aa34102c970b721554828510"},
{file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:70c8daf4faca8da5a6d655f9af86faf6ec2e1768f4b8b9d0226c02f3d6209703"}, {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2971bb5ffe72cc0f555c13e19b23c85b654dd2a8f7ab493c262071377bfce9f6"},
{file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e9fa4c9bf273ca41f940bceb86922a7667cd5bf90e95dbb157cbb8441008482c"}, {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8394d940e5d400d04cad4f75c0598665cbb81aecefaca82ca85bd28264af7f9b"},
{file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:11b71d67b4725e7e2a9f6e9c0ac1239bbc0c48cce3dc59f98635efc57d6dac83"}, {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0dff76e0602ca7d4cdaacc1ac4c005e0ce0dcfe095d5b5259163a80d3a10d327"},
{file = "pydantic_core-2.20.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:270755f15174fb983890c49881e93f8f1b80f0b5e3a3cc1394a255706cabd203"}, {file = "pydantic_core-2.23.4-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:7d32706badfe136888bdea71c0def994644e09fff0bfe47441deaed8e96fdbc6"},
{file = "pydantic_core-2.20.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:c81131869240e3e568916ef4c307f8b99583efaa60a8112ef27a366eefba8ef0"}, {file = "pydantic_core-2.23.4-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ed541d70698978a20eb63d8c5d72f2cc6d7079d9d90f6b50bad07826f1320f5f"},
{file = "pydantic_core-2.20.1-cp313-none-win32.whl", hash = "sha256:b91ced227c41aa29c672814f50dbb05ec93536abf8f43cd14ec9521ea09afe4e"}, {file = "pydantic_core-2.23.4-cp313-none-win32.whl", hash = "sha256:3d5639516376dce1940ea36edf408c554475369f5da2abd45d44621cb616f769"},
{file = "pydantic_core-2.20.1-cp313-none-win_amd64.whl", hash = "sha256:65db0f2eefcaad1a3950f498aabb4875c8890438bc80b19362cf633b87a8ab20"}, {file = "pydantic_core-2.23.4-cp313-none-win_amd64.whl", hash = "sha256:5a1504ad17ba4210df3a045132a7baeeba5a200e930f57512ee02909fc5c4cb5"},
{file = "pydantic_core-2.20.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:4745f4ac52cc6686390c40eaa01d48b18997cb130833154801a442323cc78f91"}, {file = "pydantic_core-2.23.4-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:d4488a93b071c04dc20f5cecc3631fc78b9789dd72483ba15d423b5b3689b555"},
{file = "pydantic_core-2.20.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a8ad4c766d3f33ba8fd692f9aa297c9058970530a32c728a2c4bfd2616d3358b"}, {file = "pydantic_core-2.23.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:81965a16b675b35e1d09dd14df53f190f9129c0202356ed44ab2728b1c905658"},
{file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41e81317dd6a0127cabce83c0c9c3fbecceae981c8391e6f1dec88a77c8a569a"}, {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ffa2ebd4c8530079140dd2d7f794a9d9a73cbb8e9d59ffe24c63436efa8f271"},
{file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:04024d270cf63f586ad41fff13fde4311c4fc13ea74676962c876d9577bcc78f"}, {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:61817945f2fe7d166e75fbfb28004034b48e44878177fc54d81688e7b85a3665"},
{file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eaad4ff2de1c3823fddf82f41121bdf453d922e9a238642b1dedb33c4e4f98ad"}, {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:29d2c342c4bc01b88402d60189f3df065fb0dda3654744d5a165a5288a657368"},
{file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:26ab812fa0c845df815e506be30337e2df27e88399b985d0bb4e3ecfe72df31c"}, {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5e11661ce0fd30a6790e8bcdf263b9ec5988e95e63cf901972107efc49218b13"},
{file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3c5ebac750d9d5f2706654c638c041635c385596caf68f81342011ddfa1e5598"}, {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d18368b137c6295db49ce7218b1a9ba15c5bc254c96d7c9f9e924a9bc7825ad"},
{file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2aafc5a503855ea5885559eae883978c9b6d8c8993d67766ee73d82e841300dd"}, {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ec4e55f79b1c4ffb2eecd8a0cfba9955a2588497d96851f4c8f99aa4a1d39b12"},
{file = "pydantic_core-2.20.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:4868f6bd7c9d98904b748a2653031fc9c2f85b6237009d475b1008bfaeb0a5aa"}, {file = "pydantic_core-2.23.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:374a5e5049eda9e0a44c696c7ade3ff355f06b1fe0bb945ea3cac2bc336478a2"},
{file = "pydantic_core-2.20.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:aa2f457b4af386254372dfa78a2eda2563680d982422641a85f271c859df1987"}, {file = "pydantic_core-2.23.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5c364564d17da23db1106787675fc7af45f2f7b58b4173bfdd105564e132e6fb"},
{file = "pydantic_core-2.20.1-cp38-none-win32.whl", hash = "sha256:225b67a1f6d602de0ce7f6c1c3ae89a4aa25d3de9be857999e9124f15dab486a"}, {file = "pydantic_core-2.23.4-cp38-none-win32.whl", hash = "sha256:d7a80d21d613eec45e3d41eb22f8f94ddc758a6c4720842dc74c0581f54993d6"},
{file = "pydantic_core-2.20.1-cp38-none-win_amd64.whl", hash = "sha256:6b507132dcfc0dea440cce23ee2182c0ce7aba7054576efc65634f080dbe9434"}, {file = "pydantic_core-2.23.4-cp38-none-win_amd64.whl", hash = "sha256:5f5ff8d839f4566a474a969508fe1c5e59c31c80d9e140566f9a37bba7b8d556"},
{file = "pydantic_core-2.20.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:b03f7941783b4c4a26051846dea594628b38f6940a2fdc0df00b221aed39314c"}, {file = "pydantic_core-2.23.4-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:a4fa4fc04dff799089689f4fd502ce7d59de529fc2f40a2c8836886c03e0175a"},
{file = "pydantic_core-2.20.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1eedfeb6089ed3fad42e81a67755846ad4dcc14d73698c120a82e4ccf0f1f9f6"}, {file = "pydantic_core-2.23.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0a7df63886be5e270da67e0966cf4afbae86069501d35c8c1b3b6c168f42cb36"},
{file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:635fee4e041ab9c479e31edda27fcf966ea9614fff1317e280d99eb3e5ab6fe2"}, {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dcedcd19a557e182628afa1d553c3895a9f825b936415d0dbd3cd0bbcfd29b4b"},
{file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:77bf3ac639c1ff567ae3b47f8d4cc3dc20f9966a2a6dd2311dcc055d3d04fb8a"}, {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5f54b118ce5de9ac21c363d9b3caa6c800341e8c47a508787e5868c6b79c9323"},
{file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7ed1b0132f24beeec5a78b67d9388656d03e6a7c837394f99257e2d55b461611"}, {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:86d2f57d3e1379a9525c5ab067b27dbb8a0642fb5d454e17a9ac434f9ce523e3"},
{file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c6514f963b023aeee506678a1cf821fe31159b925c4b76fe2afa94cc70b3222b"}, {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:de6d1d1b9e5101508cb37ab0d972357cac5235f5c6533d1071964c47139257df"},
{file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10d4204d8ca33146e761c79f83cc861df20e7ae9f6487ca290a97702daf56006"}, {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1278e0d324f6908e872730c9102b0112477a7f7cf88b308e4fc36ce1bdb6d58c"},
{file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2d036c7187b9422ae5b262badb87a20a49eb6c5238b2004e96d4da1231badef1"}, {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9a6b5099eeec78827553827f4c6b8615978bb4b6a88e5d9b93eddf8bb6790f55"},
{file = "pydantic_core-2.20.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9ebfef07dbe1d93efb94b4700f2d278494e9162565a54f124c404a5656d7ff09"}, {file = "pydantic_core-2.23.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:e55541f756f9b3ee346b840103f32779c695a19826a4c442b7954550a0972040"},
{file = "pydantic_core-2.20.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:6b9d9bb600328a1ce523ab4f454859e9d439150abb0906c5a1983c146580ebab"}, {file = "pydantic_core-2.23.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a5c7ba8ffb6d6f8f2ab08743be203654bb1aaa8c9dcb09f82ddd34eadb695605"},
{file = "pydantic_core-2.20.1-cp39-none-win32.whl", hash = "sha256:784c1214cb6dd1e3b15dd8b91b9a53852aed16671cc3fbe4786f4f1db07089e2"}, {file = "pydantic_core-2.23.4-cp39-none-win32.whl", hash = "sha256:37b0fe330e4a58d3c58b24d91d1eb102aeec675a3db4c292ec3928ecd892a9a6"},
{file = "pydantic_core-2.20.1-cp39-none-win_amd64.whl", hash = "sha256:d2fe69c5434391727efa54b47a1e7986bb0186e72a41b203df8f5b0a19a4f669"}, {file = "pydantic_core-2.23.4-cp39-none-win_amd64.whl", hash = "sha256:1498bec4c05c9c787bde9125cfdcc63a41004ff167f495063191b863399b1a29"},
{file = "pydantic_core-2.20.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a45f84b09ac9c3d35dfcf6a27fd0634d30d183205230a0ebe8373a0e8cfa0906"}, {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f455ee30a9d61d3e1a15abd5068827773d6e4dc513e795f380cdd59932c782d5"},
{file = "pydantic_core-2.20.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d02a72df14dfdbaf228424573a07af10637bd490f0901cee872c4f434a735b94"}, {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:1e90d2e3bd2c3863d48525d297cd143fe541be8bbf6f579504b9712cb6b643ec"},
{file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d2b27e6af28f07e2f195552b37d7d66b150adbaa39a6d327766ffd695799780f"}, {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e203fdf807ac7e12ab59ca2bfcabb38c7cf0b33c41efeb00f8e5da1d86af480"},
{file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:084659fac3c83fd674596612aeff6041a18402f1e1bc19ca39e417d554468482"}, {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e08277a400de01bc72436a0ccd02bdf596631411f592ad985dcee21445bd0068"},
{file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:242b8feb3c493ab78be289c034a1f659e8826e2233786e36f2893a950a719bb6"}, {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f220b0eea5965dec25480b6333c788fb72ce5f9129e8759ef876a1d805d00801"},
{file = "pydantic_core-2.20.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:38cf1c40a921d05c5edc61a785c0ddb4bed67827069f535d794ce6bcded919fc"}, {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:d06b0c8da4f16d1d1e352134427cb194a0a6e19ad5db9161bf32b2113409e728"},
{file = "pydantic_core-2.20.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:e0bbdd76ce9aa5d4209d65f2b27fc6e5ef1312ae6c5333c26db3f5ade53a1e99"}, {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:ba1a0996f6c2773bd83e63f18914c1de3c9dd26d55f4ac302a7efe93fb8e7433"},
{file = "pydantic_core-2.20.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:254ec27fdb5b1ee60684f91683be95e5133c994cc54e86a0b0963afa25c8f8a6"}, {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:9a5bce9d23aac8f0cf0836ecfc033896aa8443b501c58d0602dbfd5bd5b37753"},
{file = "pydantic_core-2.20.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:407653af5617f0757261ae249d3fba09504d7a71ab36ac057c938572d1bc9331"}, {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:78ddaaa81421a29574a682b3179d4cf9e6d405a09b99d93ddcf7e5239c742e21"},
{file = "pydantic_core-2.20.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:c693e916709c2465b02ca0ad7b387c4f8423d1db7b4649c551f27a529181c5ad"}, {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:883a91b5dd7d26492ff2f04f40fbb652de40fcc0afe07e8129e8ae779c2110eb"},
{file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5b5ff4911aea936a47d9376fd3ab17e970cc543d1b68921886e7f64bd28308d1"}, {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88ad334a15b32a791ea935af224b9de1bf99bcd62fabf745d5f3442199d86d59"},
{file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:177f55a886d74f1808763976ac4efd29b7ed15c69f4d838bbd74d9d09cf6fa86"}, {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:233710f069d251feb12a56da21e14cca67994eab08362207785cf8c598e74577"},
{file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:964faa8a861d2664f0c7ab0c181af0bea66098b1919439815ca8803ef136fc4e"}, {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:19442362866a753485ba5e4be408964644dd6a09123d9416c54cd49171f50744"},
{file = "pydantic_core-2.20.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:4dd484681c15e6b9a977c785a345d3e378d72678fd5f1f3c0509608da24f2ac0"}, {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:624e278a7d29b6445e4e813af92af37820fafb6dcc55c012c834f9e26f9aaaef"},
{file = "pydantic_core-2.20.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f6d6cff3538391e8486a431569b77921adfcdef14eb18fbf19b7c0a5294d4e6a"}, {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f5ef8f42bec47f21d07668a043f077d507e5bf4e668d5c6dfe6aaba89de1a5b8"},
{file = "pydantic_core-2.20.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a6d511cc297ff0883bc3708b465ff82d7560193169a8b93260f74ecb0a5e08a7"}, {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:aea443fffa9fbe3af1a9ba721a87f926fe548d32cab71d188a6ede77d0ff244e"},
{file = "pydantic_core-2.20.1.tar.gz", hash = "sha256:26ca695eeee5f9f1aeeb211ffc12f10bcb6f71e2989988fda61dabd65db878d4"}, {file = "pydantic_core-2.23.4.tar.gz", hash = "sha256:2584f7cf844ac4d970fba483a717dbe10c1c1c96a969bf65d61ffe94df1b2863"},
] ]
[package.dependencies] [package.dependencies]
@ -7063,13 +7064,13 @@ semver = ["semver (>=3.0.2)"]
[[package]] [[package]]
name = "pydantic-settings" name = "pydantic-settings"
version = "2.4.0" version = "2.6.0"
description = "Settings management using Pydantic" description = "Settings management using Pydantic"
optional = false optional = false
python-versions = ">=3.8" python-versions = ">=3.8"
files = [ files = [
{file = "pydantic_settings-2.4.0-py3-none-any.whl", hash = "sha256:bb6849dc067f1687574c12a639e231f3a6feeed0a12d710c1382045c5db1c315"}, {file = "pydantic_settings-2.6.0-py3-none-any.whl", hash = "sha256:4a819166f119b74d7f8c765196b165f95cc7487ce58ea27dec8a5a26be0970e0"},
{file = "pydantic_settings-2.4.0.tar.gz", hash = "sha256:ed81c3a0f46392b4d7c0a565c05884e6e54b3456e6f0fe4d8814981172dc9a88"}, {file = "pydantic_settings-2.6.0.tar.gz", hash = "sha256:44a1804abffac9e6a30372bb45f6cafab945ef5af25e66b1c634c01dd39e0188"},
] ]
[package.dependencies] [package.dependencies]
@ -10795,4 +10796,4 @@ cffi = ["cffi (>=1.11)"]
[metadata] [metadata]
lock-version = "2.0" lock-version = "2.0"
python-versions = ">=3.10,<3.13" python-versions = ">=3.10,<3.13"
content-hash = "642b2dae9e18ee6671d3d2c7129cb9a77327b69dacba996d00de2a9475d5bad3" content-hash = "5b102e3bc077ed730e9fb7be9015541111ffe7787888372d50a757aecb1d9eff"

View File

@ -87,14 +87,6 @@ ignore = [
"tests/*" = [ "tests/*" = [
"F811", # redefined-while-unused "F811", # redefined-while-unused
"F401", # unused-import "F401", # unused-import
"PT001", # missing-function-docstring
"PT004", # missing-parameter-docstring
]
"core/rag/extractor/word_extractor.py" = [
"RUF100", # Unused `noqa` directive
]
"core/tools/provider/builtin/gitlab/tools/gitlab_commits.py" = [
"PLR1714", # Consider merging multiple comparisons
] ]
[tool.ruff.lint.pyflakes] [tool.ruff.lint.pyflakes]
@ -163,8 +155,8 @@ openpyxl = "~3.1.5"
pandas = { version = "~2.2.2", extras = ["performance", "excel"] } pandas = { version = "~2.2.2", extras = ["performance", "excel"] }
psycopg2-binary = "~2.9.6" psycopg2-binary = "~2.9.6"
pycryptodome = "3.19.1" pycryptodome = "3.19.1"
pydantic = "~2.8.2" pydantic = "~2.9.2"
pydantic-settings = "~2.4.0" pydantic-settings = "~2.6.0"
pydantic_extra_types = "~2.9.0" pydantic_extra_types = "~2.9.0"
pyjwt = "~2.8.0" pyjwt = "~2.8.0"
pypdfium2 = "~4.17.0" pypdfium2 = "~4.17.0"

View File

@ -15,8 +15,6 @@ class EnterpriseRequest:
@classmethod @classmethod
def send_request(cls, method, endpoint, json=None, params=None): def send_request(cls, method, endpoint, json=None, params=None):
headers = {"Content-Type": "application/json", "Enterprise-Api-Secret-Key": cls.secret_key} headers = {"Content-Type": "application/json", "Enterprise-Api-Secret-Key": cls.secret_key}
url = f"{cls.base_url}{endpoint}" url = f"{cls.base_url}{endpoint}"
response = requests.request(method, url, json=json, params=params, headers=headers, proxies=cls.proxies) response = requests.request(method, url, json=json, params=params, headers=headers, proxies=cls.proxies)
return response.json() return response.json()

View File

@ -20,6 +20,7 @@ from core.rag.extractor.extract_processor import ExtractProcessor
from extensions.ext_database import db from extensions.ext_database import db
from extensions.ext_storage import storage from extensions.ext_storage import storage
from models.account import Account from models.account import Account
from models.enums import CreatedByRole
from models.model import EndUser, UploadFile from models.model import EndUser, UploadFile
from services.errors.file import FileNotExistsError, FileTooLargeError, UnsupportedFileTypeError from services.errors.file import FileNotExistsError, FileTooLargeError, UnsupportedFileTypeError
@ -85,7 +86,7 @@ class FileService:
size=file_size, size=file_size,
extension=extension, extension=extension,
mime_type=file.mimetype, mime_type=file.mimetype,
created_by_role=("account" if isinstance(user, Account) else "end_user"), created_by_role=(CreatedByRole.ACCOUNT if isinstance(user, Account) else CreatedByRole.END_USER),
created_by=user.id, created_by=user.id,
created_at=datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None), created_at=datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None),
used=False, used=False,
@ -118,6 +119,7 @@ class FileService:
extension="txt", extension="txt",
mime_type="text/plain", mime_type="text/plain",
created_by=current_user.id, created_by=current_user.id,
created_by_role=CreatedByRole.ACCOUNT,
created_at=datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None), created_at=datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None),
used=True, used=True,
used_by=current_user.id, used_by=current_user.id,

View File

@ -1,6 +1,7 @@
import pytest import pytest
from app_factory import create_app from app_factory import create_app
from configs import dify_config
mock_user = type( mock_user = type(
"MockUser", "MockUser",
@ -20,5 +21,5 @@ mock_user = type(
@pytest.fixture @pytest.fixture
def app(): def app():
app = create_app() app = create_app()
app.config["LOGIN_DISABLED"] = True dify_config.LOGIN_DISABLED = True
return app return app

View File

@ -102,6 +102,8 @@ def test_execute_code(setup_code_executor_mock):
} }
node = init_code_node(code_config) node = init_code_node(code_config)
node.graph_runtime_state.variable_pool.add(["1", "123", "args1"], 1)
node.graph_runtime_state.variable_pool.add(["1", "123", "args2"], 2)
# execute node # execute node
result = node._run() result = node._run()
@ -146,6 +148,8 @@ def test_execute_code_output_validator(setup_code_executor_mock):
} }
node = init_code_node(code_config) node = init_code_node(code_config)
node.graph_runtime_state.variable_pool.add(["1", "123", "args1"], 1)
node.graph_runtime_state.variable_pool.add(["1", "123", "args2"], 2)
# execute node # execute node
result = node._run() result = node._run()

View File

@ -1,3 +1,5 @@
import json
import httpx import httpx
from core.app.entities.app_invoke_entities import InvokeFrom from core.app.entities.app_invoke_entities import InvokeFrom
@ -14,7 +16,8 @@ from core.workflow.nodes.http_request import (
HttpRequestNodeBody, HttpRequestNodeBody,
HttpRequestNodeData, HttpRequestNodeData,
) )
from core.workflow.nodes.http_request.executor import _plain_text_to_dict from core.workflow.nodes.http_request.entities import HttpRequestNodeTimeout
from core.workflow.nodes.http_request.executor import Executor, _plain_text_to_dict
from models.enums import UserFrom from models.enums import UserFrom
from models.workflow import WorkflowNodeExecutionStatus, WorkflowType from models.workflow import WorkflowNodeExecutionStatus, WorkflowType
@ -200,3 +203,167 @@ def test_http_request_node_form_with_file(monkeypatch):
assert result.status == WorkflowNodeExecutionStatus.SUCCEEDED assert result.status == WorkflowNodeExecutionStatus.SUCCEEDED
assert result.outputs is not None assert result.outputs is not None
assert result.outputs["body"] == "" assert result.outputs["body"] == ""
def test_executor_with_json_body_and_number_variable():
# Prepare the variable pool
variable_pool = VariablePool(
system_variables={},
user_inputs={},
)
variable_pool.add(["pre_node_id", "number"], 42)
# Prepare the node data
node_data = HttpRequestNodeData(
title="Test JSON Body with Number Variable",
method="post",
url="https://api.example.com/data",
authorization=HttpRequestNodeAuthorization(type="no-auth"),
headers="Content-Type: application/json",
params="",
body=HttpRequestNodeBody(
type="json",
data=[
BodyData(
key="",
type="text",
value='{"number": {{#pre_node_id.number#}}}',
)
],
),
)
# Initialize the Executor
executor = Executor(
node_data=node_data,
timeout=HttpRequestNodeTimeout(connect=10, read=30, write=30),
variable_pool=variable_pool,
)
# Check the executor's data
assert executor.method == "post"
assert executor.url == "https://api.example.com/data"
assert executor.headers == {"Content-Type": "application/json"}
assert executor.params == {}
assert executor.json == {"number": 42}
assert executor.data is None
assert executor.files is None
assert executor.content is None
# Check the raw request (to_log method)
raw_request = executor.to_log()
assert "POST /data HTTP/1.1" in raw_request
assert "Host: api.example.com" in raw_request
assert "Content-Type: application/json" in raw_request
assert '{"number": 42}' in raw_request
def test_executor_with_json_body_and_object_variable():
# Prepare the variable pool
variable_pool = VariablePool(
system_variables={},
user_inputs={},
)
variable_pool.add(["pre_node_id", "object"], {"name": "John Doe", "age": 30, "email": "john@example.com"})
# Prepare the node data
node_data = HttpRequestNodeData(
title="Test JSON Body with Object Variable",
method="post",
url="https://api.example.com/data",
authorization=HttpRequestNodeAuthorization(type="no-auth"),
headers="Content-Type: application/json",
params="",
body=HttpRequestNodeBody(
type="json",
data=[
BodyData(
key="",
type="text",
value="{{#pre_node_id.object#}}",
)
],
),
)
# Initialize the Executor
executor = Executor(
node_data=node_data,
timeout=HttpRequestNodeTimeout(connect=10, read=30, write=30),
variable_pool=variable_pool,
)
# Check the executor's data
assert executor.method == "post"
assert executor.url == "https://api.example.com/data"
assert executor.headers == {"Content-Type": "application/json"}
assert executor.params == {}
assert executor.json == {"name": "John Doe", "age": 30, "email": "john@example.com"}
assert executor.data is None
assert executor.files is None
assert executor.content is None
# Check the raw request (to_log method)
raw_request = executor.to_log()
assert "POST /data HTTP/1.1" in raw_request
assert "Host: api.example.com" in raw_request
assert "Content-Type: application/json" in raw_request
assert '"name": "John Doe"' in raw_request
assert '"age": 30' in raw_request
assert '"email": "john@example.com"' in raw_request
def test_executor_with_json_body_and_nested_object_variable():
# Prepare the variable pool
variable_pool = VariablePool(
system_variables={},
user_inputs={},
)
variable_pool.add(["pre_node_id", "object"], {"name": "John Doe", "age": 30, "email": "john@example.com"})
# Prepare the node data
node_data = HttpRequestNodeData(
title="Test JSON Body with Nested Object Variable",
method="post",
url="https://api.example.com/data",
authorization=HttpRequestNodeAuthorization(type="no-auth"),
headers="Content-Type: application/json",
params="",
body=HttpRequestNodeBody(
type="json",
data=[
BodyData(
key="",
type="text",
value='{"object": {{#pre_node_id.object#}}}',
)
],
),
)
# Initialize the Executor
executor = Executor(
node_data=node_data,
timeout=HttpRequestNodeTimeout(connect=10, read=30, write=30),
variable_pool=variable_pool,
)
# Check the executor's data
assert executor.method == "post"
assert executor.url == "https://api.example.com/data"
assert executor.headers == {"Content-Type": "application/json"}
assert executor.params == {}
assert executor.json == {"object": {"name": "John Doe", "age": 30, "email": "john@example.com"}}
assert executor.data is None
assert executor.files is None
assert executor.content is None
# Check the raw request (to_log method)
raw_request = executor.to_log()
assert "POST /data HTTP/1.1" in raw_request
assert "Host: api.example.com" in raw_request
assert "Content-Type: application/json" in raw_request
assert '"object": {' in raw_request
assert '"name": "John Doe"' in raw_request
assert '"age": 30' in raw_request
assert '"email": "john@example.com"' in raw_request

View File

@ -25,7 +25,7 @@ class VolcengineTosTest:
return cls._instance return cls._instance
def __init__(self): def __init__(self):
self.storage = VolcengineTosStorage(app=Flask(__name__)) self.storage = VolcengineTosStorage()
self.storage.bucket_name = get_example_bucket() self.storage.bucket_name = get_example_bucket()
self.storage.client = TosClientV2( self.storage.client = TosClientV2(
ak="dify", ak="dify",

View File

@ -23,10 +23,6 @@ const titleClassName = `
const descriptionClassName = ` const descriptionClassName = `
mt-1 text-xs font-normal text-gray-500 mt-1 text-xs font-normal text-gray-500
` `
const inputClassName = `
mt-2 w-full px-3 py-2 bg-gray-100 rounded
text-sm font-normal text-gray-800
`
const validPassword = /^(?=.*[a-zA-Z])(?=.*\d).{8,}$/ const validPassword = /^(?=.*[a-zA-Z])(?=.*\d).{8,}$/

View File

@ -1,10 +1,9 @@
'use client' 'use client'
import { useRef, useState } from 'react' import { useState } from 'react'
import { t } from 'i18next' import { t } from 'i18next'
import { useParams, usePathname } from 'next/navigation' import { useParams, usePathname } from 'next/navigation'
import s from './style.module.css' import s from './style.module.css'
import Tooltip from '@/app/components/base/tooltip' import Tooltip from '@/app/components/base/tooltip'
import { randomString } from '@/utils'
import Loading from '@/app/components/base/loading' import Loading from '@/app/components/base/loading'
import { AudioPlayerManager } from '@/app/components/base/audio-btn/audio.player.manager' import { AudioPlayerManager } from '@/app/components/base/audio-btn/audio.player.manager'
@ -28,7 +27,6 @@ const AudioBtn = ({
}: AudioBtnProps) => { }: AudioBtnProps) => {
const [audioState, setAudioState] = useState<AudioState>('initial') const [audioState, setAudioState] = useState<AudioState>('initial')
const selector = useRef(`play-tooltip-${randomString(4)}`)
const params = useParams() const params = useParams()
const pathname = usePathname() const pathname = usePathname()
const audio_finished_call = (event: string): any => { const audio_finished_call = (event: string): any => {

View File

@ -1,119 +0,0 @@
import {
memo,
useMemo,
} from 'react'
import { useTranslation } from 'react-i18next'
import type { OnFeaturesChange } from '../types'
import { useFeatures } from '../hooks'
import FileUpload from './file-upload'
import OpeningStatement from './opening-statement'
import type { OpeningStatementProps } from './opening-statement'
import SuggestedQuestionsAfterAnswer from './suggested-questions-after-answer'
import TextToSpeech from './text-to-speech'
import SpeechToText from './speech-to-text'
import Citation from './citation'
import Moderation from './moderation'
import type { InputVar } from '@/app/components/workflow/types'
export type FeaturePanelProps = {
onChange?: OnFeaturesChange
openingStatementProps: OpeningStatementProps
disabled?: boolean
workflowVariables: InputVar[]
}
const FeaturePanel = ({
onChange,
openingStatementProps,
disabled,
workflowVariables,
}: FeaturePanelProps) => {
const { t } = useTranslation()
const features = useFeatures(s => s.features)
const showAdvanceFeature = useMemo(() => {
return features.opening?.enabled || features.suggested?.enabled || features.speech2text?.enabled || features.text2speech?.enabled || features.citation?.enabled
}, [features])
const showToolFeature = useMemo(() => {
return features.moderation?.enabled
}, [features])
return (
<div className='space-y-3'>
<FileUpload
onChange={onChange}
disabled={disabled}
/>
{
showAdvanceFeature && (
<div>
<div className='flex items-center'>
<div className='shrink-0 text-xs font-semibold text-gray-500'>
{t('appDebug.feature.groupChat.title')}
</div>
<div
className='grow ml-3 h-[1px]'
style={{ background: 'linear-gradient(270deg, rgba(243, 244, 246, 0) 0%, #F3F4F6 100%)' }}
></div>
</div>
<div className='py-2 space-y-2'>
{
features.opening?.enabled && (
<OpeningStatement
{...openingStatementProps}
onChange={onChange}
readonly={disabled}
workflowVariables={workflowVariables}
/>
)
}
{
features.suggested?.enabled && (
<SuggestedQuestionsAfterAnswer />
)
}
{
features.text2speech?.enabled && (
<TextToSpeech onChange={onChange} disabled={disabled} />
)
}
{
features.speech2text?.enabled && (
<SpeechToText />
)
}
{
features.citation?.enabled && (
<Citation />
)
}
</div>
</div>
)
}
{
showToolFeature && (
<div>
<div className='flex items-center'>
<div className='shrink-0 text-xs font-semibold text-gray-500'>
{t('appDebug.feature.groupChat.title')}
</div>
<div
className='grow ml-3 h-[1px]'
style={{ background: 'linear-gradient(270deg, rgba(243, 244, 246, 0) 0%, #F3F4F6 100%)' }}
></div>
</div>
<div className='py-2 space-y-2'>
{
features.moderation?.enabled && (
<Moderation onChange={onChange} disabled={disabled} />
)
}
</div>
</div>
)
}
</div>
)
}
export default memo(FeaturePanel)

View File

@ -1,328 +0,0 @@
/* eslint-disable multiline-ternary */
'use client'
import type { FC } from 'react'
import React, { useEffect, useRef, useState } from 'react'
import produce from 'immer'
import {
RiAddLine,
RiDeleteBinLine,
} from '@remixicon/react'
import { useTranslation } from 'react-i18next'
import { useBoolean } from 'ahooks'
import { ReactSortable } from 'react-sortablejs'
import {
useFeatures,
useFeaturesStore,
} from '../../hooks'
import type { OnFeaturesChange } from '../../types'
import cn from '@/utils/classnames'
import Panel from '@/app/components/app/configuration/base/feature-panel'
import Button from '@/app/components/base/button'
import OperationBtn from '@/app/components/app/configuration/base/operation-btn'
import { getInputKeys } from '@/app/components/base/block-input'
import ConfirmAddVar from '@/app/components/app/configuration/config-prompt/confirm-add-var'
import { getNewVar } from '@/utils/var'
import { varHighlightHTML } from '@/app/components/app/configuration/base/var-highlight'
import type { PromptVariable } from '@/models/debug'
import type { InputVar } from '@/app/components/workflow/types'
const MAX_QUESTION_NUM = 5
export type OpeningStatementProps = {
onChange?: OnFeaturesChange
readonly?: boolean
promptVariables?: PromptVariable[]
onAutoAddPromptVariable: (variable: PromptVariable[]) => void
workflowVariables?: InputVar[]
}
// regex to match the {{}} and replace it with a span
const regex = /\{\{([^}]+)\}\}/g
const OpeningStatement: FC<OpeningStatementProps> = ({
onChange,
readonly,
promptVariables = [],
onAutoAddPromptVariable,
workflowVariables = [],
}) => {
const { t } = useTranslation()
const featureStore = useFeaturesStore()
const openingStatement = useFeatures(s => s.features.opening)
const value = openingStatement?.opening_statement || ''
const suggestedQuestions = openingStatement?.suggested_questions || []
const [notIncludeKeys, setNotIncludeKeys] = useState<string[]>([])
const hasValue = !!(value || '').trim()
const inputRef = useRef<HTMLTextAreaElement>(null)
const [isFocus, { setTrue: didSetFocus, setFalse: setBlur }] = useBoolean(false)
const setFocus = () => {
didSetFocus()
setTimeout(() => {
const input = inputRef.current
if (input) {
input.focus()
input.setSelectionRange(input.value.length, input.value.length)
}
}, 0)
}
const [tempValue, setTempValue] = useState(value)
useEffect(() => {
setTempValue(value || '')
}, [value])
const [tempSuggestedQuestions, setTempSuggestedQuestions] = useState(suggestedQuestions || [])
const notEmptyQuestions = tempSuggestedQuestions.filter(question => !!question && question.trim())
const coloredContent = (tempValue || '')
.replace(/</g, '&lt;')
.replace(/>/g, '&gt;')
.replace(regex, varHighlightHTML({ name: '$1' })) // `<span class="${highLightClassName}">{{$1}}</span>`
.replace(/\n/g, '<br />')
const handleEdit = () => {
if (readonly)
return
setFocus()
}
const [isShowConfirmAddVar, { setTrue: showConfirmAddVar, setFalse: hideConfirmAddVar }] = useBoolean(false)
const handleCancel = () => {
setBlur()
setTempValue(value)
setTempSuggestedQuestions(suggestedQuestions)
}
const handleConfirm = () => {
const keys = getInputKeys(tempValue)
const promptKeys = promptVariables.map(item => item.key)
const workflowVariableKeys = workflowVariables.map(item => item.variable)
let notIncludeKeys: string[] = []
if (promptKeys.length === 0 && workflowVariables.length === 0) {
if (keys.length > 0)
notIncludeKeys = keys
}
else {
if (workflowVariables.length > 0)
notIncludeKeys = keys.filter(key => !workflowVariableKeys.includes(key))
else notIncludeKeys = keys.filter(key => !promptKeys.includes(key))
}
if (notIncludeKeys.length > 0) {
setNotIncludeKeys(notIncludeKeys)
showConfirmAddVar()
return
}
setBlur()
const { getState } = featureStore!
const {
features,
setFeatures,
} = getState()
const newFeatures = produce(features, (draft) => {
if (draft.opening) {
draft.opening.opening_statement = tempValue
draft.opening.suggested_questions = tempSuggestedQuestions
}
})
setFeatures(newFeatures)
if (onChange)
onChange(newFeatures)
}
const cancelAutoAddVar = () => {
const { getState } = featureStore!
const {
features,
setFeatures,
} = getState()
const newFeatures = produce(features, (draft) => {
if (draft.opening)
draft.opening.opening_statement = tempValue
})
setFeatures(newFeatures)
if (onChange)
onChange(newFeatures)
hideConfirmAddVar()
setBlur()
}
const autoAddVar = () => {
const { getState } = featureStore!
const {
features,
setFeatures,
} = getState()
const newFeatures = produce(features, (draft) => {
if (draft.opening)
draft.opening.opening_statement = tempValue
})
setFeatures(newFeatures)
if (onChange)
onChange(newFeatures)
onAutoAddPromptVariable([...notIncludeKeys.map(key => getNewVar(key, 'string'))])
hideConfirmAddVar()
setBlur()
}
const headerRight = !readonly ? (
isFocus ? (
<div className='flex items-center space-x-1'>
<Button
variant='ghost'
size='small'
onClick={handleCancel}
>
{t('common.operation.cancel')}
</Button>
<Button size='small' onClick={handleConfirm} variant="primary">{t('common.operation.save')}</Button>
</div>
) : (
<OperationBtn type='edit' actionName={hasValue ? '' : t('appDebug.openingStatement.writeOpener') as string} onClick={handleEdit} />
)
) : null
const renderQuestions = () => {
return isFocus ? (
<div>
<div className='flex items-center py-2'>
<div className='shrink-0 flex space-x-0.5 leading-[18px] text-xs font-medium text-gray-500'>
<div className='uppercase'>{t('appDebug.openingStatement.openingQuestion')}</div>
<div>·</div>
<div>{tempSuggestedQuestions.length}/{MAX_QUESTION_NUM}</div>
</div>
<div className='ml-3 grow w-0 h-px bg-[#243, 244, 246]'></div>
</div>
<ReactSortable
className="space-y-1"
list={tempSuggestedQuestions.map((name, index) => {
return {
id: index,
name,
}
})}
setList={list => setTempSuggestedQuestions(list.map(item => item.name))}
handle='.handle'
ghostClass="opacity-50"
animation={150}
>
{tempSuggestedQuestions.map((question, index) => {
return (
<div className='group relative rounded-lg border border-gray-200 flex items-center pl-2.5 hover:border-gray-300 hover:bg-white' key={index}>
<div className='handle flex items-center justify-center w-4 h-4 cursor-grab'>
<svg width="6" height="10" viewBox="0 0 6 10" fill="none" xmlns="http://www.w3.org/2000/svg">
<path fillRule="evenodd" clipRule="evenodd" d="M1 2C1.55228 2 2 1.55228 2 1C2 0.447715 1.55228 0 1 0C0.447715 0 0 0.447715 0 1C0 1.55228 0.447715 2 1 2ZM1 6C1.55228 6 2 5.55228 2 5C2 4.44772 1.55228 4 1 4C0.447715 4 0 4.44772 0 5C0 5.55228 0.447715 6 1 6ZM6 1C6 1.55228 5.55228 2 5 2C4.44772 2 4 1.55228 4 1C4 0.447715 4.44772 0 5 0C5.55228 0 6 0.447715 6 1ZM5 6C5.55228 6 6 5.55228 6 5C6 4.44772 5.55228 4 5 4C4.44772 4 4 4.44772 4 5C4 5.55228 4.44772 6 5 6ZM2 9C2 9.55229 1.55228 10 1 10C0.447715 10 0 9.55229 0 9C0 8.44771 0.447715 8 1 8C1.55228 8 2 8.44771 2 9ZM5 10C5.55228 10 6 9.55229 6 9C6 8.44771 5.55228 8 5 8C4.44772 8 4 8.44771 4 9C4 9.55229 4.44772 10 5 10Z" fill="#98A2B3" />
</svg>
</div>
<input
type="input"
value={question || ''}
onChange={(e) => {
const value = e.target.value
setTempSuggestedQuestions(tempSuggestedQuestions.map((item, i) => {
if (index === i)
return value
return item
}))
}}
className={'w-full overflow-x-auto pl-1.5 pr-8 text-sm leading-9 text-gray-900 border-0 grow h-9 bg-transparent focus:outline-none cursor-pointer rounded-lg'}
/>
<div
className='block absolute top-1/2 translate-y-[-50%] right-1.5 p-1 rounded-md cursor-pointer hover:bg-[#FEE4E2] hover:text-[#D92D20]'
onClick={() => {
setTempSuggestedQuestions(tempSuggestedQuestions.filter((_, i) => index !== i))
}}
>
<RiDeleteBinLine className='w-3.5 h-3.5' />
</div>
</div>
)
})}</ReactSortable>
{tempSuggestedQuestions.length < MAX_QUESTION_NUM && (
<div
onClick={() => { setTempSuggestedQuestions([...tempSuggestedQuestions, '']) }}
className='mt-1 flex items-center h-9 px-3 gap-2 rounded-lg cursor-pointer text-gray-400 bg-gray-100 hover:bg-gray-200'>
<RiAddLine className='w-4 h-4' />
<div className='text-gray-500 text-[13px]'>{t('appDebug.variableConfig.addOption')}</div>
</div>
)}
</div>
) : (
<div className='mt-1.5 flex flex-wrap'>
{notEmptyQuestions.map((question, index) => {
return (
<div key={index} className='mt-1 mr-1 max-w-full truncate last:mr-0 shrink-0 leading-8 items-center px-2.5 rounded-lg border border-gray-200 shadow-xs bg-white text-[13px] font-normal text-gray-900 cursor-pointer'>
{question}
</div>
)
})}
</div>
)
}
return (
<Panel
className={cn(isShowConfirmAddVar && 'h-[220px]', 'relative !bg-gray-25')}
title={t('appDebug.openingStatement.title')}
headerIcon={
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
<path fillRule="evenodd" clipRule="evenodd" d="M8.33353 1.33301C4.83572 1.33301 2.00019 4.16854 2.00019 7.66634C2.00019 8.37301 2.11619 9.05395 2.3307 9.69036C2.36843 9.80229 2.39063 9.86853 2.40507 9.91738L2.40979 9.93383L2.40729 9.93903C2.39015 9.97437 2.36469 10.0218 2.31705 10.11L1.2158 12.1484C1.14755 12.2746 1.07633 12.4064 1.02735 12.5209C0.978668 12.6348 0.899813 12.8437 0.938613 13.0914C0.984094 13.3817 1.15495 13.6373 1.40581 13.7903C1.61981 13.9208 1.843 13.9279 1.96683 13.9264C2.09141 13.925 2.24036 13.9095 2.38314 13.8947L5.81978 13.5395C5.87482 13.5338 5.9036 13.5309 5.92468 13.5292L5.92739 13.529L5.93564 13.532C5.96154 13.5413 5.99666 13.5548 6.0573 13.5781C6.76459 13.8506 7.53244 13.9997 8.33353 13.9997C11.8313 13.9997 14.6669 11.1641 14.6669 7.66634C14.6669 4.16854 11.8313 1.33301 8.33353 1.33301ZM5.9799 5.72116C6.73142 5.08698 7.73164 5.27327 8.33144 5.96584C8.93125 5.27327 9.91854 5.09365 10.683 5.72116C11.4474 6.34867 11.5403 7.41567 10.9501 8.16572C10.5845 8.6304 9.6668 9.47911 9.02142 10.0576C8.78435 10.2702 8.66582 10.3764 8.52357 10.4192C8.40154 10.456 8.26134 10.456 8.13931 10.4192C7.99706 10.3764 7.87853 10.2702 7.64147 10.0576C6.99609 9.47911 6.07839 8.6304 5.71276 8.16572C5.12259 7.41567 5.22839 6.35534 5.9799 5.72116Z" fill="#E74694" />
</svg>
}
headerRight={headerRight}
hasHeaderBottomBorder={!hasValue}
isFocus={isFocus}
>
<div className='text-gray-700 text-sm'>
{(hasValue || (!hasValue && isFocus)) ? (
<>
{isFocus
? (
<div>
<textarea
ref={inputRef}
value={tempValue}
rows={3}
onChange={e => setTempValue(e.target.value)}
className="w-full px-0 text-sm border-0 bg-transparent focus:outline-none "
placeholder={t('appDebug.openingStatement.placeholder') as string}
>
</textarea>
</div>
)
: (
<div dangerouslySetInnerHTML={{
__html: coloredContent,
}}></div>
)}
{renderQuestions()}
</>) : (
<div className='pt-2 pb-1 text-xs text-gray-500'>{t('appDebug.openingStatement.noDataPlaceHolder')}</div>
)}
{isShowConfirmAddVar && (
<ConfirmAddVar
varNameArr={notIncludeKeys}
onConfirm={autoAddVar}
onCancel={cancelAutoAddVar}
onHide={hideConfirmAddVar}
/>
)}
</div>
</Panel>
)
}
export default React.memo(OpeningStatement)

View File

@ -45,7 +45,7 @@ const DialogWrapper = ({
leaveFrom="opacity-100 scale-100" leaveFrom="opacity-100 scale-100"
leaveTo="opacity-0 scale-95" leaveTo="opacity-0 scale-95"
> >
<Dialog.Panel className={cn('grow flex relative w-[420px] h-0 p-0 overflow-hidden text-left align-middle transition-all transform bg-components-panel-bg-alt border-components-panel-border shadow-xl', inWorkflow ? 'border-t-[0.5px] border-l-[0.5px] border-b-[0.5px] rounded-l-2xl' : 'border-[0.5px] rounded-2xl', className)}> <Dialog.Panel className={cn('grow flex flex-col relative w-[420px] h-0 p-0 overflow-hidden text-left align-middle transition-all transform bg-components-panel-bg-alt border-components-panel-border shadow-xl', inWorkflow ? 'border-t-[0.5px] border-l-[0.5px] border-b-[0.5px] rounded-l-2xl' : 'border-[0.5px] rounded-2xl', className)}>
{children} {children}
</Dialog.Panel> </Dialog.Panel>
</Transition.Child> </Transition.Child>

View File

@ -80,7 +80,7 @@ const NewFeaturePanel = ({
<span>{isChatMode ? t('workflow.common.fileUploadTip') : t('workflow.common.ImageUploadLegacyTip')}</span> <span>{isChatMode ? t('workflow.common.fileUploadTip') : t('workflow.common.ImageUploadLegacyTip')}</span>
<a <a
className='text-text-accent' className='text-text-accent'
href={`https://docs.dify.ai/${locale === LanguagesSupported[1] && 'v/zh-hans/'}'guides/workflow/bulletin`} href={`https://docs.dify.ai/${locale === LanguagesSupported[1] ? 'v/zh-hans/' : ''}guides/workflow/bulletin`}
target='_blank' rel='noopener noreferrer' target='_blank' rel='noopener noreferrer'
>{t('workflow.common.featuresDocLink')}</a> >{t('workflow.common.featuresDocLink')}</a>
</div> </div>

View File

@ -52,7 +52,7 @@ export const getInputVars = (text: string): ValueSelector[] => {
export const FILE_EXTS: Record<string, string[]> = { export const FILE_EXTS: Record<string, string[]> = {
[SupportUploadFileTypes.image]: ['JPG', 'JPEG', 'PNG', 'GIF', 'WEBP', 'SVG'], [SupportUploadFileTypes.image]: ['JPG', 'JPEG', 'PNG', 'GIF', 'WEBP', 'SVG'],
[SupportUploadFileTypes.document]: ['TXT', 'MARKDOWN', 'PDF', 'HTML', 'XLSX', 'XLS', 'DOCX', 'CSV', 'EML', 'MSG', 'PPTX', 'PPT', 'XML', 'EPUB'], [SupportUploadFileTypes.document]: ['TXT', 'MD', 'MARKDOWN', 'PDF', 'HTML', 'XLSX', 'XLS', 'DOCX', 'CSV', 'EML', 'MSG', 'PPTX', 'PPT', 'XML', 'EPUB'],
[SupportUploadFileTypes.audio]: ['MP3', 'M4A', 'WAV', 'WEBM', 'AMR'], [SupportUploadFileTypes.audio]: ['MP3', 'M4A', 'WAV', 'WEBM', 'AMR'],
[SupportUploadFileTypes.video]: ['MP4', 'MOV', 'MPEG', 'MPGA'], [SupportUploadFileTypes.video]: ['MP4', 'MOV', 'MPEG', 'MPGA'],
} }

View File

@ -24,7 +24,7 @@ const LanguageSelect: FC<ILanguageSelectProps> = ({
disabled={disabled} disabled={disabled}
htmlContent={ htmlContent={
<div className='w-full py-1'> <div className='w-full py-1'>
{languages.filter(language => language.supported).map(({ prompt_name, name }) => ( {languages.filter(language => language.supported).map(({ prompt_name }) => (
<div <div
key={prompt_name} key={prompt_name}
className='py-2 px-3 mx-1 flex items-center gap-2 hover:bg-gray-100 rounded-lg cursor-pointer text-gray-700 text-sm' className='py-2 px-3 mx-1 flex items-center gap-2 hover:bg-gray-100 rounded-lg cursor-pointer text-gray-700 text-sm'