mirror of
https://github.com/langgenius/dify.git
synced 2024-11-16 03:32:23 +08:00
Merge main into feat/plugin
This commit is contained in:
commit
dea2962a79
|
@ -168,7 +168,7 @@ Star Dify on GitHub and be instantly notified of new releases.
|
|||
> Before installing Dify, make sure your machine meets the following minimum system requirements:
|
||||
>
|
||||
>- CPU >= 2 Core
|
||||
>- RAM >= 4GB
|
||||
>- RAM >= 4 GiB
|
||||
|
||||
</br>
|
||||
|
||||
|
|
|
@ -174,7 +174,7 @@ Dify 是一个开源的 LLM 应用开发平台。其直观的界面结合了 AI
|
|||
在安装 Dify 之前,请确保您的机器满足以下最低系统要求:
|
||||
|
||||
- CPU >= 2 Core
|
||||
- RAM >= 4GB
|
||||
- RAM >= 4 GiB
|
||||
|
||||
### 快速启动
|
||||
|
||||
|
|
16
api/app.py
16
api/app.py
|
@ -1,5 +1,7 @@
|
|||
import os
|
||||
|
||||
from configs import dify_config
|
||||
|
||||
if os.environ.get("DEBUG", "false").lower() != "true":
|
||||
from gevent import monkey
|
||||
|
||||
|
@ -36,17 +38,11 @@ if hasattr(time, "tzset"):
|
|||
time.tzset()
|
||||
|
||||
|
||||
# -------------
|
||||
# Configuration
|
||||
# -------------
|
||||
config_type = os.getenv("EDITION", default="SELF_HOSTED") # ce edition first
|
||||
|
||||
|
||||
# create app
|
||||
app = create_app()
|
||||
celery = app.extensions["celery"]
|
||||
|
||||
if app.config.get("TESTING"):
|
||||
if dify_config.TESTING:
|
||||
print("App is running in TESTING mode")
|
||||
|
||||
|
||||
|
@ -54,15 +50,15 @@ if app.config.get("TESTING"):
|
|||
def after_request(response):
|
||||
"""Add Version headers to the response."""
|
||||
response.set_cookie("remember_token", "", expires=0)
|
||||
response.headers.add("X-Version", app.config["CURRENT_VERSION"])
|
||||
response.headers.add("X-Env", app.config["DEPLOY_ENV"])
|
||||
response.headers.add("X-Version", dify_config.CURRENT_VERSION)
|
||||
response.headers.add("X-Env", dify_config.DEPLOY_ENV)
|
||||
return response
|
||||
|
||||
|
||||
@app.route("/health")
|
||||
def health():
|
||||
return Response(
|
||||
json.dumps({"pid": os.getpid(), "status": "ok", "version": app.config["CURRENT_VERSION"]}),
|
||||
json.dumps({"pid": os.getpid(), "status": "ok", "version": dify_config.CURRENT_VERSION}),
|
||||
status=200,
|
||||
content_type="application/json",
|
||||
)
|
||||
|
|
|
@ -10,9 +10,6 @@ if os.environ.get("DEBUG", "false").lower() != "true":
|
|||
grpc.experimental.gevent.init_gevent()
|
||||
|
||||
import json
|
||||
import logging
|
||||
import sys
|
||||
from logging.handlers import RotatingFileHandler
|
||||
|
||||
from flask import Flask, Response, request
|
||||
from flask_cors import CORS
|
||||
|
@ -27,6 +24,7 @@ from extensions import (
|
|||
ext_compress,
|
||||
ext_database,
|
||||
ext_hosting_provider,
|
||||
ext_logging,
|
||||
ext_login,
|
||||
ext_mail,
|
||||
ext_migrate,
|
||||
|
@ -70,43 +68,7 @@ def create_flask_app_with_configs() -> Flask:
|
|||
|
||||
def create_app() -> Flask:
|
||||
app = create_flask_app_with_configs()
|
||||
|
||||
app.secret_key = app.config["SECRET_KEY"]
|
||||
|
||||
log_handlers = None
|
||||
log_file = app.config.get("LOG_FILE")
|
||||
if log_file:
|
||||
log_dir = os.path.dirname(log_file)
|
||||
os.makedirs(log_dir, exist_ok=True)
|
||||
log_handlers = [
|
||||
RotatingFileHandler(
|
||||
filename=log_file,
|
||||
maxBytes=1024 * 1024 * 1024,
|
||||
backupCount=5,
|
||||
),
|
||||
logging.StreamHandler(sys.stdout),
|
||||
]
|
||||
|
||||
logging.basicConfig(
|
||||
level=app.config.get("LOG_LEVEL"),
|
||||
format=app.config.get("LOG_FORMAT"),
|
||||
datefmt=app.config.get("LOG_DATEFORMAT"),
|
||||
handlers=log_handlers,
|
||||
force=True,
|
||||
)
|
||||
log_tz = app.config.get("LOG_TZ")
|
||||
if log_tz:
|
||||
from datetime import datetime
|
||||
|
||||
import pytz
|
||||
|
||||
timezone = pytz.timezone(log_tz)
|
||||
|
||||
def time_converter(seconds):
|
||||
return datetime.utcfromtimestamp(seconds).astimezone(timezone).timetuple()
|
||||
|
||||
for handler in logging.root.handlers:
|
||||
handler.formatter.converter = time_converter
|
||||
app.secret_key = dify_config.SECRET_KEY
|
||||
initialize_extensions(app)
|
||||
register_blueprints(app)
|
||||
register_commands(app)
|
||||
|
@ -117,6 +79,7 @@ def create_app() -> Flask:
|
|||
def initialize_extensions(app):
|
||||
# Since the application instance is now created, pass it to each Flask
|
||||
# extension instance to bind it to the Flask application instance (app)
|
||||
ext_logging.init_app(app)
|
||||
ext_compress.init_app(app)
|
||||
ext_code_based_extension.init()
|
||||
ext_database.init_app(app)
|
||||
|
@ -187,7 +150,7 @@ def register_blueprints(app):
|
|||
|
||||
CORS(
|
||||
web_bp,
|
||||
resources={r"/*": {"origins": app.config["WEB_API_CORS_ALLOW_ORIGINS"]}},
|
||||
resources={r"/*": {"origins": dify_config.WEB_API_CORS_ALLOW_ORIGINS}},
|
||||
supports_credentials=True,
|
||||
allow_headers=["Content-Type", "Authorization", "X-App-Code"],
|
||||
methods=["GET", "PUT", "POST", "DELETE", "OPTIONS", "PATCH"],
|
||||
|
@ -198,7 +161,7 @@ def register_blueprints(app):
|
|||
|
||||
CORS(
|
||||
console_app_bp,
|
||||
resources={r"/*": {"origins": app.config["CONSOLE_CORS_ALLOW_ORIGINS"]}},
|
||||
resources={r"/*": {"origins": dify_config.CONSOLE_CORS_ALLOW_ORIGINS}},
|
||||
supports_credentials=True,
|
||||
allow_headers=["Content-Type", "Authorization"],
|
||||
methods=["GET", "PUT", "POST", "DELETE", "OPTIONS", "PATCH"],
|
||||
|
|
|
@ -32,6 +32,21 @@ class SecurityConfig(BaseSettings):
|
|||
default=5,
|
||||
)
|
||||
|
||||
LOGIN_DISABLED: bool = Field(
|
||||
description="Whether to disable login checks",
|
||||
default=False,
|
||||
)
|
||||
|
||||
ADMIN_API_KEY_ENABLE: bool = Field(
|
||||
description="Whether to enable admin api key for authentication",
|
||||
default=False,
|
||||
)
|
||||
|
||||
ADMIN_API_KEY: Optional[str] = Field(
|
||||
description="admin api key for authentication",
|
||||
default=None,
|
||||
)
|
||||
|
||||
|
||||
class AppExecutionConfig(BaseSettings):
|
||||
"""
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
import os
|
||||
from functools import wraps
|
||||
|
||||
from flask import request
|
||||
|
@ -7,6 +6,7 @@ from sqlalchemy import select
|
|||
from sqlalchemy.orm import Session
|
||||
from werkzeug.exceptions import NotFound, Unauthorized
|
||||
|
||||
from configs import dify_config
|
||||
from constants.languages import supported_language
|
||||
from controllers.console import api
|
||||
from controllers.console.wraps import only_edition_cloud
|
||||
|
@ -17,7 +17,7 @@ from models.model import App, InstalledApp, RecommendedApp
|
|||
def admin_required(view):
|
||||
@wraps(view)
|
||||
def decorated(*args, **kwargs):
|
||||
if not os.getenv("ADMIN_API_KEY"):
|
||||
if not dify_config.ADMIN_API_KEY:
|
||||
raise Unauthorized("API key is invalid.")
|
||||
|
||||
auth_header = request.headers.get("Authorization")
|
||||
|
@ -33,7 +33,7 @@ def admin_required(view):
|
|||
if auth_scheme != "bearer":
|
||||
raise Unauthorized("Invalid Authorization header format. Expected 'Bearer <api-key>' format.")
|
||||
|
||||
if os.getenv("ADMIN_API_KEY") != auth_token:
|
||||
if dify_config.ADMIN_API_KEY != auth_token:
|
||||
raise Unauthorized("API key is invalid.")
|
||||
|
||||
return view(*args, **kwargs)
|
||||
|
|
|
@ -30,13 +30,12 @@ class FileApi(Resource):
|
|||
@account_initialization_required
|
||||
@marshal_with(upload_config_fields)
|
||||
def get(self):
|
||||
file_size_limit = dify_config.UPLOAD_FILE_SIZE_LIMIT
|
||||
batch_count_limit = dify_config.UPLOAD_FILE_BATCH_LIMIT
|
||||
image_file_size_limit = dify_config.UPLOAD_IMAGE_FILE_SIZE_LIMIT
|
||||
return {
|
||||
"file_size_limit": file_size_limit,
|
||||
"batch_count_limit": batch_count_limit,
|
||||
"image_file_size_limit": image_file_size_limit,
|
||||
"file_size_limit": dify_config.UPLOAD_FILE_SIZE_LIMIT,
|
||||
"batch_count_limit": dify_config.UPLOAD_FILE_BATCH_LIMIT,
|
||||
"image_file_size_limit": dify_config.UPLOAD_IMAGE_FILE_SIZE_LIMIT,
|
||||
"video_file_size_limit": dify_config.UPLOAD_VIDEO_FILE_SIZE_LIMIT,
|
||||
"audio_file_size_limit": dify_config.UPLOAD_AUDIO_FILE_SIZE_LIMIT,
|
||||
}, 200
|
||||
|
||||
@setup_required
|
||||
|
|
|
@ -42,10 +42,10 @@ class ToolFilePreviewApi(Resource):
|
|||
stream,
|
||||
mimetype=tool_file.mimetype,
|
||||
direct_passthrough=True,
|
||||
headers={
|
||||
"Content-Length": str(tool_file.size),
|
||||
},
|
||||
headers={},
|
||||
)
|
||||
if tool_file.size > 0:
|
||||
response.headers["Content-Length"] = str(tool_file.size)
|
||||
if args["as_attachment"]:
|
||||
response.headers["Content-Disposition"] = f"attachment; filename={tool_file.name}"
|
||||
|
||||
|
|
|
@ -48,7 +48,7 @@ class MessageListApi(Resource):
|
|||
"tool_input": fields.String,
|
||||
"created_at": TimestampField,
|
||||
"observation": fields.String,
|
||||
"message_files": fields.List(fields.String),
|
||||
"message_files": fields.List(fields.Nested(message_file_fields)),
|
||||
}
|
||||
|
||||
message_fields = {
|
||||
|
|
|
@ -46,7 +46,7 @@ class RemoteFileInfoApi(WebApiResource):
|
|||
response = ssrf_proxy.head(decoded_url)
|
||||
return {
|
||||
"file_type": response.headers.get("Content-Type", "application/octet-stream"),
|
||||
"file_length": int(response.headers.get("Content-Length", 0)),
|
||||
"file_length": int(response.headers.get("Content-Length", -1)),
|
||||
}
|
||||
except Exception as e:
|
||||
return {"error": str(e)}, 400
|
||||
|
|
|
@ -53,11 +53,11 @@ class BasicVariablesConfigManager:
|
|||
VariableEntity(
|
||||
type=variable_type,
|
||||
variable=variable.get("variable"),
|
||||
description=variable.get("description", ""),
|
||||
description=variable.get("description") or "",
|
||||
label=variable.get("label"),
|
||||
required=variable.get("required", False),
|
||||
max_length=variable.get("max_length"),
|
||||
options=variable.get("options", []),
|
||||
options=variable.get("options") or [],
|
||||
)
|
||||
)
|
||||
|
||||
|
|
|
@ -2,7 +2,7 @@ from collections.abc import Sequence
|
|||
from enum import Enum
|
||||
from typing import Any, Optional
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
from pydantic import BaseModel, Field, field_validator
|
||||
|
||||
from core.file import FileExtraConfig, FileTransferMethod, FileType
|
||||
from core.model_runtime.entities.message_entities import PromptMessageRole
|
||||
|
@ -114,6 +114,16 @@ class VariableEntity(BaseModel):
|
|||
allowed_file_extensions: Sequence[str] = Field(default_factory=list)
|
||||
allowed_file_upload_methods: Sequence[FileTransferMethod] = Field(default_factory=list)
|
||||
|
||||
@field_validator("description", mode="before")
|
||||
@classmethod
|
||||
def convert_none_description(cls, v: Any) -> str:
|
||||
return v or ""
|
||||
|
||||
@field_validator("options", mode="before")
|
||||
@classmethod
|
||||
def convert_none_options(cls, v: Any) -> Sequence[str]:
|
||||
return v or []
|
||||
|
||||
|
||||
class ExternalDataVariableEntity(BaseModel):
|
||||
"""
|
||||
|
|
|
@ -17,10 +17,13 @@ class FileUploadConfigManager:
|
|||
file_upload_dict = config.get("file_upload")
|
||||
if file_upload_dict:
|
||||
if file_upload_dict.get("enabled"):
|
||||
transform_methods = file_upload_dict.get("allowed_file_upload_methods") or file_upload_dict.get(
|
||||
"allowed_upload_methods", []
|
||||
)
|
||||
data = {
|
||||
"image_config": {
|
||||
"number_limits": file_upload_dict["number_limits"],
|
||||
"transfer_methods": file_upload_dict["allowed_file_upload_methods"],
|
||||
"transfer_methods": transform_methods,
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -53,7 +53,7 @@ class BasedGenerateTaskPipeline:
|
|||
self._output_moderation_handler = self._init_output_moderation()
|
||||
self._stream = stream
|
||||
|
||||
def _handle_error(self, event: QueueErrorEvent, message: Optional[Message] = None) -> Exception:
|
||||
def _handle_error(self, event: QueueErrorEvent, message: Optional[Message] = None):
|
||||
"""
|
||||
Handle error event.
|
||||
:param event: event
|
||||
|
@ -100,7 +100,7 @@ class BasedGenerateTaskPipeline:
|
|||
|
||||
return message
|
||||
|
||||
def _error_to_stream_response(self, e: Exception) -> ErrorStreamResponse:
|
||||
def _error_to_stream_response(self, e: Exception):
|
||||
"""
|
||||
Error to stream response.
|
||||
:param e: exception
|
||||
|
|
|
@ -4,6 +4,8 @@ from collections.abc import Mapping, Sequence
|
|||
from datetime import datetime, timezone
|
||||
from typing import Any, Optional, Union, cast
|
||||
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from core.app.entities.app_invoke_entities import AdvancedChatAppGenerateEntity, InvokeFrom, WorkflowAppGenerateEntity
|
||||
from core.app.entities.queue_entities import (
|
||||
QueueIterationCompletedEvent,
|
||||
|
@ -232,6 +234,8 @@ class WorkflowCycleManage:
|
|||
self, workflow_run: WorkflowRun, event: QueueNodeStartedEvent
|
||||
) -> WorkflowNodeExecution:
|
||||
# init workflow node execution
|
||||
|
||||
with Session(db.engine, expire_on_commit=False) as session:
|
||||
workflow_node_execution = WorkflowNodeExecution()
|
||||
workflow_node_execution.tenant_id = workflow_run.tenant_id
|
||||
workflow_node_execution.app_id = workflow_run.app_id
|
||||
|
@ -249,13 +253,11 @@ class WorkflowCycleManage:
|
|||
workflow_node_execution.created_by = workflow_run.created_by
|
||||
workflow_node_execution.created_at = datetime.now(timezone.utc).replace(tzinfo=None)
|
||||
|
||||
db.session.add(workflow_node_execution)
|
||||
db.session.commit()
|
||||
db.session.refresh(workflow_node_execution)
|
||||
db.session.close()
|
||||
session.add(workflow_node_execution)
|
||||
session.commit()
|
||||
session.refresh(workflow_node_execution)
|
||||
|
||||
self._wip_workflow_node_executions[workflow_node_execution.node_execution_id] = workflow_node_execution
|
||||
|
||||
return workflow_node_execution
|
||||
|
||||
def _handle_workflow_node_execution_success(self, event: QueueNodeSucceededEvent) -> WorkflowNodeExecution:
|
||||
|
|
|
@ -1,8 +1,9 @@
|
|||
from typing import Optional
|
||||
|
||||
from flask import Config, Flask
|
||||
from flask import Flask
|
||||
from pydantic import BaseModel
|
||||
|
||||
from configs import dify_config
|
||||
from core.entities.provider_entities import ProviderQuotaType, QuotaUnit, RestrictModel
|
||||
from core.model_runtime.entities.model_entities import ModelType
|
||||
|
||||
|
@ -43,32 +44,30 @@ class HostingConfiguration:
|
|||
moderation_config: HostedModerationConfig = None
|
||||
|
||||
def init_app(self, app: Flask) -> None:
|
||||
config = app.config
|
||||
|
||||
if config.get("EDITION") != "CLOUD":
|
||||
if dify_config.EDITION != "CLOUD":
|
||||
return
|
||||
|
||||
self.provider_map["azure_openai"] = self.init_azure_openai(config)
|
||||
self.provider_map["openai"] = self.init_openai(config)
|
||||
self.provider_map["anthropic"] = self.init_anthropic(config)
|
||||
self.provider_map["minimax"] = self.init_minimax(config)
|
||||
self.provider_map["spark"] = self.init_spark(config)
|
||||
self.provider_map["zhipuai"] = self.init_zhipuai(config)
|
||||
self.provider_map["azure_openai"] = self.init_azure_openai()
|
||||
self.provider_map["openai"] = self.init_openai()
|
||||
self.provider_map["anthropic"] = self.init_anthropic()
|
||||
self.provider_map["minimax"] = self.init_minimax()
|
||||
self.provider_map["spark"] = self.init_spark()
|
||||
self.provider_map["zhipuai"] = self.init_zhipuai()
|
||||
|
||||
self.moderation_config = self.init_moderation_config(config)
|
||||
self.moderation_config = self.init_moderation_config()
|
||||
|
||||
@staticmethod
|
||||
def init_azure_openai(app_config: Config) -> HostingProvider:
|
||||
def init_azure_openai() -> HostingProvider:
|
||||
quota_unit = QuotaUnit.TIMES
|
||||
if app_config.get("HOSTED_AZURE_OPENAI_ENABLED"):
|
||||
if dify_config.HOSTED_AZURE_OPENAI_ENABLED:
|
||||
credentials = {
|
||||
"openai_api_key": app_config.get("HOSTED_AZURE_OPENAI_API_KEY"),
|
||||
"openai_api_base": app_config.get("HOSTED_AZURE_OPENAI_API_BASE"),
|
||||
"openai_api_key": dify_config.HOSTED_AZURE_OPENAI_API_KEY,
|
||||
"openai_api_base": dify_config.HOSTED_AZURE_OPENAI_API_BASE,
|
||||
"base_model_name": "gpt-35-turbo",
|
||||
}
|
||||
|
||||
quotas = []
|
||||
hosted_quota_limit = int(app_config.get("HOSTED_AZURE_OPENAI_QUOTA_LIMIT", "1000"))
|
||||
hosted_quota_limit = dify_config.HOSTED_AZURE_OPENAI_QUOTA_LIMIT
|
||||
trial_quota = TrialHostingQuota(
|
||||
quota_limit=hosted_quota_limit,
|
||||
restrict_models=[
|
||||
|
@ -121,31 +120,31 @@ class HostingConfiguration:
|
|||
quota_unit=quota_unit,
|
||||
)
|
||||
|
||||
def init_openai(self, app_config: Config) -> HostingProvider:
|
||||
def init_openai(self) -> HostingProvider:
|
||||
quota_unit = QuotaUnit.CREDITS
|
||||
quotas = []
|
||||
|
||||
if app_config.get("HOSTED_OPENAI_TRIAL_ENABLED"):
|
||||
hosted_quota_limit = int(app_config.get("HOSTED_OPENAI_QUOTA_LIMIT", "200"))
|
||||
trial_models = self.parse_restrict_models_from_env(app_config, "HOSTED_OPENAI_TRIAL_MODELS")
|
||||
if dify_config.HOSTED_OPENAI_TRIAL_ENABLED:
|
||||
hosted_quota_limit = dify_config.HOSTED_OPENAI_QUOTA_LIMIT
|
||||
trial_models = self.parse_restrict_models_from_env("HOSTED_OPENAI_TRIAL_MODELS")
|
||||
trial_quota = TrialHostingQuota(quota_limit=hosted_quota_limit, restrict_models=trial_models)
|
||||
quotas.append(trial_quota)
|
||||
|
||||
if app_config.get("HOSTED_OPENAI_PAID_ENABLED"):
|
||||
paid_models = self.parse_restrict_models_from_env(app_config, "HOSTED_OPENAI_PAID_MODELS")
|
||||
if dify_config.HOSTED_OPENAI_PAID_ENABLED:
|
||||
paid_models = self.parse_restrict_models_from_env("HOSTED_OPENAI_PAID_MODELS")
|
||||
paid_quota = PaidHostingQuota(restrict_models=paid_models)
|
||||
quotas.append(paid_quota)
|
||||
|
||||
if len(quotas) > 0:
|
||||
credentials = {
|
||||
"openai_api_key": app_config.get("HOSTED_OPENAI_API_KEY"),
|
||||
"openai_api_key": dify_config.HOSTED_OPENAI_API_KEY,
|
||||
}
|
||||
|
||||
if app_config.get("HOSTED_OPENAI_API_BASE"):
|
||||
credentials["openai_api_base"] = app_config.get("HOSTED_OPENAI_API_BASE")
|
||||
if dify_config.HOSTED_OPENAI_API_BASE:
|
||||
credentials["openai_api_base"] = dify_config.HOSTED_OPENAI_API_BASE
|
||||
|
||||
if app_config.get("HOSTED_OPENAI_API_ORGANIZATION"):
|
||||
credentials["openai_organization"] = app_config.get("HOSTED_OPENAI_API_ORGANIZATION")
|
||||
if dify_config.HOSTED_OPENAI_API_ORGANIZATION:
|
||||
credentials["openai_organization"] = dify_config.HOSTED_OPENAI_API_ORGANIZATION
|
||||
|
||||
return HostingProvider(enabled=True, credentials=credentials, quota_unit=quota_unit, quotas=quotas)
|
||||
|
||||
|
@ -155,26 +154,26 @@ class HostingConfiguration:
|
|||
)
|
||||
|
||||
@staticmethod
|
||||
def init_anthropic(app_config: Config) -> HostingProvider:
|
||||
def init_anthropic() -> HostingProvider:
|
||||
quota_unit = QuotaUnit.TOKENS
|
||||
quotas = []
|
||||
|
||||
if app_config.get("HOSTED_ANTHROPIC_TRIAL_ENABLED"):
|
||||
hosted_quota_limit = int(app_config.get("HOSTED_ANTHROPIC_QUOTA_LIMIT", "0"))
|
||||
if dify_config.HOSTED_ANTHROPIC_TRIAL_ENABLED:
|
||||
hosted_quota_limit = dify_config.HOSTED_ANTHROPIC_QUOTA_LIMIT
|
||||
trial_quota = TrialHostingQuota(quota_limit=hosted_quota_limit)
|
||||
quotas.append(trial_quota)
|
||||
|
||||
if app_config.get("HOSTED_ANTHROPIC_PAID_ENABLED"):
|
||||
if dify_config.HOSTED_ANTHROPIC_PAID_ENABLED:
|
||||
paid_quota = PaidHostingQuota()
|
||||
quotas.append(paid_quota)
|
||||
|
||||
if len(quotas) > 0:
|
||||
credentials = {
|
||||
"anthropic_api_key": app_config.get("HOSTED_ANTHROPIC_API_KEY"),
|
||||
"anthropic_api_key": dify_config.HOSTED_ANTHROPIC_API_KEY,
|
||||
}
|
||||
|
||||
if app_config.get("HOSTED_ANTHROPIC_API_BASE"):
|
||||
credentials["anthropic_api_url"] = app_config.get("HOSTED_ANTHROPIC_API_BASE")
|
||||
if dify_config.HOSTED_ANTHROPIC_API_BASE:
|
||||
credentials["anthropic_api_url"] = dify_config.HOSTED_ANTHROPIC_API_BASE
|
||||
|
||||
return HostingProvider(enabled=True, credentials=credentials, quota_unit=quota_unit, quotas=quotas)
|
||||
|
||||
|
@ -184,9 +183,9 @@ class HostingConfiguration:
|
|||
)
|
||||
|
||||
@staticmethod
|
||||
def init_minimax(app_config: Config) -> HostingProvider:
|
||||
def init_minimax() -> HostingProvider:
|
||||
quota_unit = QuotaUnit.TOKENS
|
||||
if app_config.get("HOSTED_MINIMAX_ENABLED"):
|
||||
if dify_config.HOSTED_MINIMAX_ENABLED:
|
||||
quotas = [FreeHostingQuota()]
|
||||
|
||||
return HostingProvider(
|
||||
|
@ -202,9 +201,9 @@ class HostingConfiguration:
|
|||
)
|
||||
|
||||
@staticmethod
|
||||
def init_spark(app_config: Config) -> HostingProvider:
|
||||
def init_spark() -> HostingProvider:
|
||||
quota_unit = QuotaUnit.TOKENS
|
||||
if app_config.get("HOSTED_SPARK_ENABLED"):
|
||||
if dify_config.HOSTED_SPARK_ENABLED:
|
||||
quotas = [FreeHostingQuota()]
|
||||
|
||||
return HostingProvider(
|
||||
|
@ -220,9 +219,9 @@ class HostingConfiguration:
|
|||
)
|
||||
|
||||
@staticmethod
|
||||
def init_zhipuai(app_config: Config) -> HostingProvider:
|
||||
def init_zhipuai() -> HostingProvider:
|
||||
quota_unit = QuotaUnit.TOKENS
|
||||
if app_config.get("HOSTED_ZHIPUAI_ENABLED"):
|
||||
if dify_config.HOSTED_ZHIPUAI_ENABLED:
|
||||
quotas = [FreeHostingQuota()]
|
||||
|
||||
return HostingProvider(
|
||||
|
@ -238,17 +237,15 @@ class HostingConfiguration:
|
|||
)
|
||||
|
||||
@staticmethod
|
||||
def init_moderation_config(app_config: Config) -> HostedModerationConfig:
|
||||
if app_config.get("HOSTED_MODERATION_ENABLED") and app_config.get("HOSTED_MODERATION_PROVIDERS"):
|
||||
return HostedModerationConfig(
|
||||
enabled=True, providers=app_config.get("HOSTED_MODERATION_PROVIDERS").split(",")
|
||||
)
|
||||
def init_moderation_config() -> HostedModerationConfig:
|
||||
if dify_config.HOSTED_MODERATION_ENABLED and dify_config.HOSTED_MODERATION_PROVIDERS:
|
||||
return HostedModerationConfig(enabled=True, providers=dify_config.HOSTED_MODERATION_PROVIDERS.split(","))
|
||||
|
||||
return HostedModerationConfig(enabled=False)
|
||||
|
||||
@staticmethod
|
||||
def parse_restrict_models_from_env(app_config: Config, env_var: str) -> list[RestrictModel]:
|
||||
models_str = app_config.get(env_var)
|
||||
def parse_restrict_models_from_env(env_var: str) -> list[RestrictModel]:
|
||||
models_str = dify_config.model_dump().get(env_var)
|
||||
models_list = models_str.split(",") if models_str else []
|
||||
return [
|
||||
RestrictModel(model=model_name.strip(), model_type=ModelType.LLM)
|
||||
|
|
|
@ -2,6 +2,7 @@ from typing import Optional
|
|||
|
||||
from core.app.app_config.features.file_upload.manager import FileUploadConfigManager
|
||||
from core.file import file_manager
|
||||
from core.file.models import FileType
|
||||
from core.model_manager import ModelInstance
|
||||
from core.model_runtime.entities import (
|
||||
AssistantPromptMessage,
|
||||
|
@ -98,6 +99,7 @@ class TokenBufferMemory:
|
|||
prompt_message_contents: list[PromptMessageContent] = []
|
||||
prompt_message_contents.append(TextPromptMessageContent(data=message.query))
|
||||
for file_obj in file_objs:
|
||||
if file_obj.type in {FileType.IMAGE, FileType.AUDIO}:
|
||||
prompt_message = file_manager.to_prompt_message_content(file_obj)
|
||||
prompt_message_contents.append(prompt_message)
|
||||
|
||||
|
|
|
@ -218,7 +218,7 @@ For instance, Xinference supports `max_tokens`, `temperature`, and `top_p` param
|
|||
However, some vendors may support different parameters for different models. For example, the `OpenLLM` vendor supports `top_k`, but not all models provided by this vendor support `top_k`. Let's say model A supports `top_k` but model B does not. In such cases, we need to dynamically generate the model parameter schema, as illustrated below:
|
||||
|
||||
```python
|
||||
def get_customizable_model_schema(self, model: str, credentials: dict) -> AIModelEntity | None:
|
||||
def get_customizable_model_schema(self, model: str, credentials: dict) -> Optional[AIModelEntity]:
|
||||
"""
|
||||
used to define customizable model schema
|
||||
"""
|
||||
|
|
|
@ -205,7 +205,7 @@ provider_credential_schema:
|
|||
但是有的供应商根据不同的模型支持不同的参数,如供应商`OpenLLM`支持`top_k`,但是并不是这个供应商提供的所有模型都支持`top_k`,我们这里举例A模型支持`top_k`,B模型不支持`top_k`,那么我们需要在这里动态生成模型参数的Schema,如下所示:
|
||||
|
||||
```python
|
||||
def get_customizable_model_schema(self, model: str, credentials: dict) -> AIModelEntity | None:
|
||||
def get_customizable_model_schema(self, model: str, credentials: dict) -> Optional[AIModelEntity]:
|
||||
"""
|
||||
used to define customizable model schema
|
||||
"""
|
||||
|
|
|
@ -33,7 +33,7 @@ class PromptTemplateParser:
|
|||
key = match.group(1)
|
||||
value = inputs.get(key, match.group(0)) # return original matched string if key not found
|
||||
|
||||
if remove_template_variables:
|
||||
if remove_template_variables and isinstance(value, str):
|
||||
return PromptTemplateParser.remove_template_variables(value, self.with_variable_tmpl)
|
||||
return value
|
||||
|
||||
|
|
|
@ -428,14 +428,13 @@ class QdrantVectorFactory(AbstractVectorFactory):
|
|||
if not dataset.index_struct_dict:
|
||||
dataset.index_struct = json.dumps(self.gen_index_struct_dict(VectorType.QDRANT, collection_name))
|
||||
|
||||
config = current_app.config
|
||||
return QdrantVector(
|
||||
collection_name=collection_name,
|
||||
group_id=dataset.id,
|
||||
config=QdrantConfig(
|
||||
endpoint=dify_config.QDRANT_URL,
|
||||
api_key=dify_config.QDRANT_API_KEY,
|
||||
root_path=config.root_path,
|
||||
root_path=current_app.config.root_path,
|
||||
timeout=dify_config.QDRANT_CLIENT_TIMEOUT,
|
||||
grpc_port=dify_config.QDRANT_GRPC_PORT,
|
||||
prefer_grpc=dify_config.QDRANT_GRPC_ENABLED,
|
||||
|
|
|
@ -18,6 +18,7 @@ from core.rag.extractor.extractor_base import BaseExtractor
|
|||
from core.rag.models.document import Document
|
||||
from extensions.ext_database import db
|
||||
from extensions.ext_storage import storage
|
||||
from models.enums import CreatedByRole
|
||||
from models.model import UploadFile
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
@ -109,9 +110,10 @@ class WordExtractor(BaseExtractor):
|
|||
key=file_key,
|
||||
name=file_key,
|
||||
size=0,
|
||||
extension=image_ext,
|
||||
mime_type=mime_type,
|
||||
extension=str(image_ext),
|
||||
mime_type=mime_type or "",
|
||||
created_by=self.user_id,
|
||||
created_by_role=CreatedByRole.ACCOUNT,
|
||||
created_at=datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None),
|
||||
used=True,
|
||||
used_by=self.user_id,
|
||||
|
|
|
@ -22,7 +22,7 @@ class WorkflowToolConfigurationUtils:
|
|||
if not start_node:
|
||||
return []
|
||||
|
||||
return [VariableEntity(**variable) for variable in start_node.get("data", {}).get("variables", [])]
|
||||
return [VariableEntity.model_validate(variable) for variable in start_node.get("data", {}).get("variables", [])]
|
||||
|
||||
@classmethod
|
||||
def check_is_synced(
|
||||
|
|
|
@ -4,7 +4,6 @@ from collections.abc import Mapping, Sequence
|
|||
from typing import Any, Union
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
from typing_extensions import deprecated
|
||||
|
||||
from core.file import File, FileAttribute, file_manager
|
||||
from core.variables import Segment, SegmentGroup, Variable
|
||||
|
@ -96,9 +95,6 @@ class VariablePool(BaseModel):
|
|||
if len(selector) < 2:
|
||||
raise ValueError("Invalid selector")
|
||||
|
||||
if value is None:
|
||||
return
|
||||
|
||||
if isinstance(value, Segment):
|
||||
v = value
|
||||
else:
|
||||
|
@ -136,26 +132,6 @@ class VariablePool(BaseModel):
|
|||
|
||||
return value
|
||||
|
||||
@deprecated("This method is deprecated, use `get` instead.")
|
||||
def get_any(self, selector: Sequence[str], /) -> Any | None:
|
||||
"""
|
||||
Retrieves the value from the variable pool based on the given selector.
|
||||
|
||||
Args:
|
||||
selector (Sequence[str]): The selector used to identify the variable.
|
||||
|
||||
Returns:
|
||||
Any: The value associated with the given selector.
|
||||
|
||||
Raises:
|
||||
ValueError: If the selector is invalid.
|
||||
"""
|
||||
if len(selector) < 2:
|
||||
raise ValueError("Invalid selector")
|
||||
hash_key = hash(tuple(selector[1:]))
|
||||
value = self.variable_dictionary[selector[0]].get(hash_key)
|
||||
return value.to_object() if value else None
|
||||
|
||||
def remove(self, selector: Sequence[str], /):
|
||||
"""
|
||||
Remove variables from the variable pool based on the given selector.
|
||||
|
|
|
@ -41,10 +41,15 @@ class CodeNode(BaseNode[CodeNodeData]):
|
|||
# Get variables
|
||||
variables = {}
|
||||
for variable_selector in self.node_data.variables:
|
||||
variable = variable_selector.variable
|
||||
value = self.graph_runtime_state.variable_pool.get_any(variable_selector.value_selector)
|
||||
|
||||
variables[variable] = value
|
||||
variable_name = variable_selector.variable
|
||||
variable = self.graph_runtime_state.variable_pool.get(variable_selector.value_selector)
|
||||
if variable is None:
|
||||
return NodeRunResult(
|
||||
status=WorkflowNodeExecutionStatus.FAILED,
|
||||
inputs=variables,
|
||||
error=f"Variable `{variable_selector.value_selector}` not found",
|
||||
)
|
||||
variables[variable_name] = variable.to_object()
|
||||
# Run code
|
||||
try:
|
||||
result = CodeExecutor.execute_workflow_code_template(
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
from collections.abc import Sequence
|
||||
from typing import Literal, Optional
|
||||
from typing import Any, Literal, Optional
|
||||
|
||||
import httpx
|
||||
from pydantic import BaseModel, Field, ValidationInfo, field_validator
|
||||
|
@ -52,6 +52,16 @@ class HttpRequestNodeBody(BaseModel):
|
|||
type: Literal["none", "form-data", "x-www-form-urlencoded", "raw-text", "json", "binary"]
|
||||
data: Sequence[BodyData] = Field(default_factory=list)
|
||||
|
||||
@field_validator("data", mode="before")
|
||||
@classmethod
|
||||
def check_data(cls, v: Any):
|
||||
"""For compatibility, if body is not set, return empty list."""
|
||||
if not v:
|
||||
return []
|
||||
if isinstance(v, str):
|
||||
return [BodyData(key="", type="text", value=v)]
|
||||
return v
|
||||
|
||||
|
||||
class HttpRequestNodeTimeout(BaseModel):
|
||||
connect: int = dify_config.HTTP_REQUEST_MAX_CONNECT_TIMEOUT
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import json
|
||||
from collections.abc import Mapping, Sequence
|
||||
from collections.abc import Mapping
|
||||
from copy import deepcopy
|
||||
from random import randint
|
||||
from typing import Any, Literal
|
||||
|
@ -60,7 +60,7 @@ class Executor:
|
|||
self.method = node_data.method
|
||||
self.auth = node_data.authorization
|
||||
self.timeout = timeout
|
||||
self.params = None
|
||||
self.params = {}
|
||||
self.headers = {}
|
||||
self.content = None
|
||||
self.files = None
|
||||
|
@ -108,8 +108,10 @@ class Executor:
|
|||
case "raw-text":
|
||||
self.content = self.variable_pool.convert_template(data[0].value).text
|
||||
case "json":
|
||||
json_object = json.loads(data[0].value)
|
||||
self.json = self._parse_object_contains_variables(json_object)
|
||||
json_string = self.variable_pool.convert_template(data[0].value).text
|
||||
json_object = json.loads(json_string)
|
||||
self.json = json_object
|
||||
# self.json = self._parse_object_contains_variables(json_object)
|
||||
case "binary":
|
||||
file_selector = data[0].file
|
||||
file_variable = self.variable_pool.get_file(file_selector)
|
||||
|
@ -274,14 +276,6 @@ class Executor:
|
|||
|
||||
return raw
|
||||
|
||||
def _parse_object_contains_variables(self, obj: str | dict | list, /) -> Mapping[str, Any] | Sequence[Any] | str:
|
||||
if isinstance(obj, dict):
|
||||
return {k: self._parse_object_contains_variables(v) for k, v in obj.items()}
|
||||
elif isinstance(obj, list):
|
||||
return [self._parse_object_contains_variables(v) for v in obj]
|
||||
elif isinstance(obj, str):
|
||||
return self.variable_pool.convert_template(obj).text
|
||||
|
||||
|
||||
def _plain_text_to_dict(text: str, /) -> dict[str, str]:
|
||||
"""
|
||||
|
|
|
@ -5,6 +5,7 @@ from typing import Any, cast
|
|||
|
||||
from configs import dify_config
|
||||
from core.model_runtime.utils.encoders import jsonable_encoder
|
||||
from core.variables import IntegerSegment
|
||||
from core.workflow.entities.node_entities import NodeRunMetadataKey, NodeRunResult
|
||||
from core.workflow.graph_engine.entities.event import (
|
||||
BaseGraphEvent,
|
||||
|
@ -46,6 +47,15 @@ class IterationNode(BaseNode[IterationNodeData]):
|
|||
if not iterator_list_segment:
|
||||
raise ValueError(f"Iterator variable {self.node_data.iterator_selector} not found")
|
||||
|
||||
if len(iterator_list_segment.value) == 0:
|
||||
yield RunCompletedEvent(
|
||||
run_result=NodeRunResult(
|
||||
status=WorkflowNodeExecutionStatus.SUCCEEDED,
|
||||
outputs={"output": []},
|
||||
)
|
||||
)
|
||||
return
|
||||
|
||||
iterator_list_value = iterator_list_segment.to_object()
|
||||
|
||||
if not isinstance(iterator_list_value, list):
|
||||
|
@ -138,9 +148,16 @@ class IterationNode(BaseNode[IterationNodeData]):
|
|||
|
||||
if NodeRunMetadataKey.ITERATION_ID not in metadata:
|
||||
metadata[NodeRunMetadataKey.ITERATION_ID] = self.node_id
|
||||
metadata[NodeRunMetadataKey.ITERATION_INDEX] = variable_pool.get_any(
|
||||
[self.node_id, "index"]
|
||||
index_variable = variable_pool.get([self.node_id, "index"])
|
||||
if not isinstance(index_variable, IntegerSegment):
|
||||
yield RunCompletedEvent(
|
||||
run_result=NodeRunResult(
|
||||
status=WorkflowNodeExecutionStatus.FAILED,
|
||||
error=f"Invalid index variable type: {type(index_variable)}",
|
||||
)
|
||||
)
|
||||
return
|
||||
metadata[NodeRunMetadataKey.ITERATION_INDEX] = index_variable.value
|
||||
event.route_node_state.node_run_result.metadata = metadata
|
||||
|
||||
yield event
|
||||
|
@ -172,7 +189,16 @@ class IterationNode(BaseNode[IterationNodeData]):
|
|||
yield event
|
||||
|
||||
# append to iteration output variable list
|
||||
current_iteration_output = variable_pool.get_any(self.node_data.output_selector)
|
||||
current_iteration_output_variable = variable_pool.get(self.node_data.output_selector)
|
||||
if current_iteration_output_variable is None:
|
||||
yield RunCompletedEvent(
|
||||
run_result=NodeRunResult(
|
||||
status=WorkflowNodeExecutionStatus.FAILED,
|
||||
error=f"Iteration output variable {self.node_data.output_selector} not found",
|
||||
)
|
||||
)
|
||||
return
|
||||
current_iteration_output = current_iteration_output_variable.to_object()
|
||||
outputs.append(current_iteration_output)
|
||||
|
||||
# remove all nodes outputs from variable pool
|
||||
|
@ -180,11 +206,11 @@ class IterationNode(BaseNode[IterationNodeData]):
|
|||
variable_pool.remove([node_id])
|
||||
|
||||
# move to next iteration
|
||||
current_index = variable_pool.get([self.node_id, "index"])
|
||||
if current_index is None:
|
||||
current_index_variable = variable_pool.get([self.node_id, "index"])
|
||||
if not isinstance(current_index_variable, IntegerSegment):
|
||||
raise ValueError(f"iteration {self.node_id} current index not found")
|
||||
|
||||
next_index = int(current_index.to_object()) + 1
|
||||
next_index = current_index_variable.value + 1
|
||||
variable_pool.add([self.node_id, "index"], next_index)
|
||||
|
||||
if next_index < len(iterator_list_value):
|
||||
|
@ -196,9 +222,7 @@ class IterationNode(BaseNode[IterationNodeData]):
|
|||
iteration_node_type=self.node_type,
|
||||
iteration_node_data=self.node_data,
|
||||
index=next_index,
|
||||
pre_iteration_output=jsonable_encoder(current_iteration_output)
|
||||
if current_iteration_output
|
||||
else None,
|
||||
pre_iteration_output=jsonable_encoder(current_iteration_output),
|
||||
)
|
||||
|
||||
yield IterationRunSucceededEvent(
|
||||
|
|
|
@ -14,6 +14,7 @@ from core.model_runtime.entities.model_entities import ModelFeature, ModelType
|
|||
from core.model_runtime.model_providers.__base.large_language_model import LargeLanguageModel
|
||||
from core.rag.retrieval.dataset_retrieval import DatasetRetrieval
|
||||
from core.rag.retrieval.retrieval_methods import RetrievalMethod
|
||||
from core.variables import StringSegment
|
||||
from core.workflow.entities.node_entities import NodeRunResult
|
||||
from core.workflow.nodes.base import BaseNode
|
||||
from core.workflow.nodes.enums import NodeType
|
||||
|
@ -39,8 +40,14 @@ class KnowledgeRetrievalNode(BaseNode[KnowledgeRetrievalNodeData]):
|
|||
|
||||
def _run(self) -> NodeRunResult:
|
||||
# extract variables
|
||||
variable = self.graph_runtime_state.variable_pool.get_any(self.node_data.query_variable_selector)
|
||||
query = variable
|
||||
variable = self.graph_runtime_state.variable_pool.get(self.node_data.query_variable_selector)
|
||||
if not isinstance(variable, StringSegment):
|
||||
return NodeRunResult(
|
||||
status=WorkflowNodeExecutionStatus.FAILED,
|
||||
inputs={},
|
||||
error="Query variable is not string type.",
|
||||
)
|
||||
query = variable.value
|
||||
variables = {"query": query}
|
||||
if not query:
|
||||
return NodeRunResult(
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
from collections.abc import Sequence
|
||||
from typing import Any, Optional
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
from pydantic import BaseModel, Field, field_validator
|
||||
|
||||
from core.model_runtime.entities import ImagePromptMessageContent
|
||||
from core.prompt.entities.advanced_prompt_entities import ChatModelMessage, CompletionModelPromptTemplate, MemoryConfig
|
||||
|
@ -30,6 +30,13 @@ class VisionConfig(BaseModel):
|
|||
enabled: bool = False
|
||||
configs: VisionConfigOptions = Field(default_factory=VisionConfigOptions)
|
||||
|
||||
@field_validator("configs", mode="before")
|
||||
@classmethod
|
||||
def convert_none_configs(cls, v: Any):
|
||||
if v is None:
|
||||
return VisionConfigOptions()
|
||||
return v
|
||||
|
||||
|
||||
class PromptConfig(BaseModel):
|
||||
jinja2_variables: Optional[list[VariableSelector]] = None
|
||||
|
|
|
@ -22,7 +22,15 @@ from core.model_runtime.utils.encoders import jsonable_encoder
|
|||
from core.prompt.advanced_prompt_transform import AdvancedPromptTransform
|
||||
from core.prompt.entities.advanced_prompt_entities import CompletionModelPromptTemplate, MemoryConfig
|
||||
from core.prompt.utils.prompt_message_util import PromptMessageUtil
|
||||
from core.variables import ArrayAnySegment, ArrayFileSegment, FileSegment
|
||||
from core.variables import (
|
||||
ArrayAnySegment,
|
||||
ArrayFileSegment,
|
||||
ArraySegment,
|
||||
FileSegment,
|
||||
NoneSegment,
|
||||
ObjectSegment,
|
||||
StringSegment,
|
||||
)
|
||||
from core.workflow.constants import SYSTEM_VARIABLE_NODE_ID
|
||||
from core.workflow.entities.node_entities import NodeRunMetadataKey, NodeRunResult
|
||||
from core.workflow.enums import SystemVariableKey
|
||||
|
@ -263,50 +271,44 @@ class LLMNode(BaseNode[LLMNodeData]):
|
|||
return variables
|
||||
|
||||
for variable_selector in node_data.prompt_config.jinja2_variables or []:
|
||||
variable = variable_selector.variable
|
||||
value = self.graph_runtime_state.variable_pool.get_any(variable_selector.value_selector)
|
||||
variable_name = variable_selector.variable
|
||||
variable = self.graph_runtime_state.variable_pool.get(variable_selector.value_selector)
|
||||
if variable is None:
|
||||
raise ValueError(f"Variable {variable_selector.variable} not found")
|
||||
|
||||
def parse_dict(d: dict) -> str:
|
||||
def parse_dict(input_dict: Mapping[str, Any]) -> str:
|
||||
"""
|
||||
Parse dict into string
|
||||
"""
|
||||
# check if it's a context structure
|
||||
if "metadata" in d and "_source" in d["metadata"] and "content" in d:
|
||||
return d["content"]
|
||||
if "metadata" in input_dict and "_source" in input_dict["metadata"] and "content" in input_dict:
|
||||
return input_dict["content"]
|
||||
|
||||
# else, parse the dict
|
||||
try:
|
||||
return json.dumps(d, ensure_ascii=False)
|
||||
return json.dumps(input_dict, ensure_ascii=False)
|
||||
except Exception:
|
||||
return str(d)
|
||||
return str(input_dict)
|
||||
|
||||
if isinstance(value, str):
|
||||
value = value
|
||||
elif isinstance(value, list):
|
||||
if isinstance(variable, ArraySegment):
|
||||
result = ""
|
||||
for item in value:
|
||||
for item in variable.value:
|
||||
if isinstance(item, dict):
|
||||
result += parse_dict(item)
|
||||
elif isinstance(item, str):
|
||||
result += item
|
||||
elif isinstance(item, int | float):
|
||||
result += str(item)
|
||||
else:
|
||||
result += str(item)
|
||||
result += "\n"
|
||||
value = result.strip()
|
||||
elif isinstance(value, dict):
|
||||
value = parse_dict(value)
|
||||
elif isinstance(value, int | float):
|
||||
value = str(value)
|
||||
elif isinstance(variable, ObjectSegment):
|
||||
value = parse_dict(variable.value)
|
||||
else:
|
||||
value = str(value)
|
||||
value = variable.text
|
||||
|
||||
variables[variable] = value
|
||||
variables[variable_name] = value
|
||||
|
||||
return variables
|
||||
|
||||
def _fetch_inputs(self, node_data: LLMNodeData) -> dict[str, str]:
|
||||
def _fetch_inputs(self, node_data: LLMNodeData) -> dict[str, Any]:
|
||||
inputs = {}
|
||||
prompt_template = node_data.prompt_template
|
||||
|
||||
|
@ -320,11 +322,12 @@ class LLMNode(BaseNode[LLMNodeData]):
|
|||
variable_selectors = variable_template_parser.extract_variable_selectors()
|
||||
|
||||
for variable_selector in variable_selectors:
|
||||
variable_value = self.graph_runtime_state.variable_pool.get_any(variable_selector.value_selector)
|
||||
if variable_value is None:
|
||||
variable = self.graph_runtime_state.variable_pool.get(variable_selector.value_selector)
|
||||
if variable is None:
|
||||
raise ValueError(f"Variable {variable_selector.variable} not found")
|
||||
|
||||
inputs[variable_selector.variable] = variable_value
|
||||
if isinstance(variable, NoneSegment):
|
||||
continue
|
||||
inputs[variable_selector.variable] = variable.to_object()
|
||||
|
||||
memory = node_data.memory
|
||||
if memory and memory.query_prompt_template:
|
||||
|
@ -332,11 +335,12 @@ class LLMNode(BaseNode[LLMNodeData]):
|
|||
template=memory.query_prompt_template
|
||||
).extract_variable_selectors()
|
||||
for variable_selector in query_variable_selectors:
|
||||
variable_value = self.graph_runtime_state.variable_pool.get_any(variable_selector.value_selector)
|
||||
if variable_value is None:
|
||||
variable = self.graph_runtime_state.variable_pool.get(variable_selector.value_selector)
|
||||
if variable is None:
|
||||
raise ValueError(f"Variable {variable_selector.variable} not found")
|
||||
|
||||
inputs[variable_selector.variable] = variable_value
|
||||
if isinstance(variable, NoneSegment):
|
||||
continue
|
||||
inputs[variable_selector.variable] = variable.to_object()
|
||||
|
||||
return inputs
|
||||
|
||||
|
@ -361,14 +365,14 @@ class LLMNode(BaseNode[LLMNodeData]):
|
|||
if not node_data.context.variable_selector:
|
||||
return
|
||||
|
||||
context_value = self.graph_runtime_state.variable_pool.get_any(node_data.context.variable_selector)
|
||||
if context_value:
|
||||
if isinstance(context_value, str):
|
||||
yield RunRetrieverResourceEvent(retriever_resources=[], context=context_value)
|
||||
elif isinstance(context_value, list):
|
||||
context_value_variable = self.graph_runtime_state.variable_pool.get(node_data.context.variable_selector)
|
||||
if context_value_variable:
|
||||
if isinstance(context_value_variable, StringSegment):
|
||||
yield RunRetrieverResourceEvent(retriever_resources=[], context=context_value_variable.value)
|
||||
elif isinstance(context_value_variable, ArraySegment):
|
||||
context_str = ""
|
||||
original_retriever_resource = []
|
||||
for item in context_value:
|
||||
for item in context_value_variable.value:
|
||||
if isinstance(item, str):
|
||||
context_str += item + "\n"
|
||||
else:
|
||||
|
@ -482,11 +486,12 @@ class LLMNode(BaseNode[LLMNodeData]):
|
|||
return None
|
||||
|
||||
# get conversation id
|
||||
conversation_id = self.graph_runtime_state.variable_pool.get_any(
|
||||
conversation_id_variable = self.graph_runtime_state.variable_pool.get(
|
||||
["sys", SystemVariableKey.CONVERSATION_ID.value]
|
||||
)
|
||||
if conversation_id is None:
|
||||
if not isinstance(conversation_id_variable, StringSegment):
|
||||
return None
|
||||
conversation_id = conversation_id_variable.value
|
||||
|
||||
# get conversation
|
||||
conversation = (
|
||||
|
|
|
@ -33,8 +33,13 @@ class TemplateTransformNode(BaseNode[TemplateTransformNodeData]):
|
|||
variables = {}
|
||||
for variable_selector in self.node_data.variables:
|
||||
variable_name = variable_selector.variable
|
||||
value = self.graph_runtime_state.variable_pool.get_any(variable_selector.value_selector)
|
||||
variables[variable_name] = value
|
||||
value = self.graph_runtime_state.variable_pool.get(variable_selector.value_selector)
|
||||
if value is None:
|
||||
return NodeRunResult(
|
||||
status=WorkflowNodeExecutionStatus.FAILED,
|
||||
error=f"Variable {variable_name} not found in variable pool",
|
||||
)
|
||||
variables[variable_name] = value.to_object()
|
||||
# Run code
|
||||
try:
|
||||
result = CodeExecutor.execute_workflow_code_template(
|
||||
|
@ -56,7 +61,7 @@ class TemplateTransformNode(BaseNode[TemplateTransformNodeData]):
|
|||
|
||||
@classmethod
|
||||
def _extract_variable_selector_to_variable_mapping(
|
||||
cls, graph_config: Mapping[str, Any], node_id: str, node_data: TemplateTransformNodeData
|
||||
cls, *, graph_config: Mapping[str, Any], node_id: str, node_data: TemplateTransformNodeData
|
||||
) -> Mapping[str, Sequence[str]]:
|
||||
"""
|
||||
Extract variable selector to variable mapping
|
||||
|
|
|
@ -19,27 +19,27 @@ class VariableAggregatorNode(BaseNode[VariableAssignerNodeData]):
|
|||
|
||||
if not self.node_data.advanced_settings or not self.node_data.advanced_settings.group_enabled:
|
||||
for selector in self.node_data.variables:
|
||||
variable = self.graph_runtime_state.variable_pool.get_any(selector)
|
||||
variable = self.graph_runtime_state.variable_pool.get(selector)
|
||||
if variable is not None:
|
||||
outputs = {"output": variable}
|
||||
outputs = {"output": variable.to_object()}
|
||||
|
||||
inputs = {".".join(selector[1:]): variable}
|
||||
inputs = {".".join(selector[1:]): variable.to_object()}
|
||||
break
|
||||
else:
|
||||
for group in self.node_data.advanced_settings.groups:
|
||||
for selector in group.variables:
|
||||
variable = self.graph_runtime_state.variable_pool.get_any(selector)
|
||||
variable = self.graph_runtime_state.variable_pool.get(selector)
|
||||
|
||||
if variable is not None:
|
||||
outputs[group.group_name] = {"output": variable}
|
||||
inputs[".".join(selector[1:])] = variable
|
||||
outputs[group.group_name] = {"output": variable.to_object()}
|
||||
inputs[".".join(selector[1:])] = variable.to_object()
|
||||
break
|
||||
|
||||
return NodeRunResult(status=WorkflowNodeExecutionStatus.SUCCEEDED, outputs=outputs, inputs=inputs)
|
||||
|
||||
@classmethod
|
||||
def _extract_variable_selector_to_variable_mapping(
|
||||
cls, graph_config: Mapping[str, Any], node_id: str, node_data: VariableAssignerNodeData
|
||||
cls, *, graph_config: Mapping[str, Any], node_id: str, node_data: VariableAssignerNodeData
|
||||
) -> Mapping[str, Sequence[str]]:
|
||||
"""
|
||||
Extract variable selector to variable mapping
|
||||
|
|
|
@ -3,6 +3,8 @@ from datetime import timedelta
|
|||
from celery import Celery, Task
|
||||
from flask import Flask
|
||||
|
||||
from configs import dify_config
|
||||
|
||||
|
||||
def init_app(app: Flask) -> Celery:
|
||||
class FlaskTask(Task):
|
||||
|
@ -12,19 +14,19 @@ def init_app(app: Flask) -> Celery:
|
|||
|
||||
broker_transport_options = {}
|
||||
|
||||
if app.config.get("CELERY_USE_SENTINEL"):
|
||||
if dify_config.CELERY_USE_SENTINEL:
|
||||
broker_transport_options = {
|
||||
"master_name": app.config.get("CELERY_SENTINEL_MASTER_NAME"),
|
||||
"master_name": dify_config.CELERY_SENTINEL_MASTER_NAME,
|
||||
"sentinel_kwargs": {
|
||||
"socket_timeout": app.config.get("CELERY_SENTINEL_SOCKET_TIMEOUT", 0.1),
|
||||
"socket_timeout": dify_config.CELERY_SENTINEL_SOCKET_TIMEOUT,
|
||||
},
|
||||
}
|
||||
|
||||
celery_app = Celery(
|
||||
app.name,
|
||||
task_cls=FlaskTask,
|
||||
broker=app.config.get("CELERY_BROKER_URL"),
|
||||
backend=app.config.get("CELERY_BACKEND"),
|
||||
broker=dify_config.CELERY_BROKER_URL,
|
||||
backend=dify_config.CELERY_BACKEND,
|
||||
task_ignore_result=True,
|
||||
)
|
||||
|
||||
|
@ -37,12 +39,12 @@ def init_app(app: Flask) -> Celery:
|
|||
}
|
||||
|
||||
celery_app.conf.update(
|
||||
result_backend=app.config.get("CELERY_RESULT_BACKEND"),
|
||||
result_backend=dify_config.CELERY_RESULT_BACKEND,
|
||||
broker_transport_options=broker_transport_options,
|
||||
broker_connection_retry_on_startup=True,
|
||||
)
|
||||
|
||||
if app.config.get("BROKER_USE_SSL"):
|
||||
if dify_config.BROKER_USE_SSL:
|
||||
celery_app.conf.update(
|
||||
broker_use_ssl=ssl_options, # Add the SSL options to the broker configuration
|
||||
)
|
||||
|
@ -54,7 +56,7 @@ def init_app(app: Flask) -> Celery:
|
|||
"schedule.clean_embedding_cache_task",
|
||||
"schedule.clean_unused_datasets_task",
|
||||
]
|
||||
day = app.config.get("CELERY_BEAT_SCHEDULER_TIME")
|
||||
day = dify_config.CELERY_BEAT_SCHEDULER_TIME
|
||||
beat_schedule = {
|
||||
"clean_embedding_cache_task": {
|
||||
"task": "schedule.clean_embedding_cache_task.clean_embedding_cache_task",
|
||||
|
|
|
@ -1,8 +1,10 @@
|
|||
from flask import Flask
|
||||
|
||||
from configs import dify_config
|
||||
|
||||
|
||||
def init_app(app: Flask):
|
||||
if app.config.get("API_COMPRESSION_ENABLED"):
|
||||
if dify_config.API_COMPRESSION_ENABLED:
|
||||
from flask_compress import Compress
|
||||
|
||||
app.config["COMPRESS_MIMETYPES"] = [
|
||||
|
|
45
api/extensions/ext_logging.py
Normal file
45
api/extensions/ext_logging.py
Normal file
|
@ -0,0 +1,45 @@
|
|||
import logging
|
||||
import os
|
||||
import sys
|
||||
from logging.handlers import RotatingFileHandler
|
||||
|
||||
from flask import Flask
|
||||
|
||||
from configs import dify_config
|
||||
|
||||
|
||||
def init_app(app: Flask):
|
||||
log_handlers = None
|
||||
log_file = dify_config.LOG_FILE
|
||||
if log_file:
|
||||
log_dir = os.path.dirname(log_file)
|
||||
os.makedirs(log_dir, exist_ok=True)
|
||||
log_handlers = [
|
||||
RotatingFileHandler(
|
||||
filename=log_file,
|
||||
maxBytes=1024 * 1024 * 1024,
|
||||
backupCount=5,
|
||||
),
|
||||
logging.StreamHandler(sys.stdout),
|
||||
]
|
||||
|
||||
logging.basicConfig(
|
||||
level=dify_config.LOG_LEVEL,
|
||||
format=dify_config.LOG_FORMAT,
|
||||
datefmt=dify_config.LOG_DATEFORMAT,
|
||||
handlers=log_handlers,
|
||||
force=True,
|
||||
)
|
||||
log_tz = dify_config.LOG_TZ
|
||||
if log_tz:
|
||||
from datetime import datetime
|
||||
|
||||
import pytz
|
||||
|
||||
timezone = pytz.timezone(log_tz)
|
||||
|
||||
def time_converter(seconds):
|
||||
return datetime.utcfromtimestamp(seconds).astimezone(timezone).timetuple()
|
||||
|
||||
for handler in logging.root.handlers:
|
||||
handler.formatter.converter = time_converter
|
|
@ -4,6 +4,8 @@ from typing import Optional
|
|||
import resend
|
||||
from flask import Flask
|
||||
|
||||
from configs import dify_config
|
||||
|
||||
|
||||
class Mail:
|
||||
def __init__(self):
|
||||
|
@ -14,41 +16,44 @@ class Mail:
|
|||
return self._client is not None
|
||||
|
||||
def init_app(self, app: Flask):
|
||||
if app.config.get("MAIL_TYPE"):
|
||||
if app.config.get("MAIL_DEFAULT_SEND_FROM"):
|
||||
self._default_send_from = app.config.get("MAIL_DEFAULT_SEND_FROM")
|
||||
mail_type = dify_config.MAIL_TYPE
|
||||
if not mail_type:
|
||||
logging.warning("MAIL_TYPE is not set")
|
||||
return
|
||||
|
||||
if app.config.get("MAIL_TYPE") == "resend":
|
||||
api_key = app.config.get("RESEND_API_KEY")
|
||||
if dify_config.MAIL_DEFAULT_SEND_FROM:
|
||||
self._default_send_from = dify_config.MAIL_DEFAULT_SEND_FROM
|
||||
|
||||
match mail_type:
|
||||
case "resend":
|
||||
api_key = dify_config.RESEND_API_KEY
|
||||
if not api_key:
|
||||
raise ValueError("RESEND_API_KEY is not set")
|
||||
|
||||
api_url = app.config.get("RESEND_API_URL")
|
||||
api_url = dify_config.RESEND_API_URL
|
||||
if api_url:
|
||||
resend.api_url = api_url
|
||||
|
||||
resend.api_key = api_key
|
||||
self._client = resend.Emails
|
||||
elif app.config.get("MAIL_TYPE") == "smtp":
|
||||
case "smtp":
|
||||
from libs.smtp import SMTPClient
|
||||
|
||||
if not app.config.get("SMTP_SERVER") or not app.config.get("SMTP_PORT"):
|
||||
if not dify_config.SMTP_SERVER or not dify_config.SMTP_PORT:
|
||||
raise ValueError("SMTP_SERVER and SMTP_PORT are required for smtp mail type")
|
||||
if not app.config.get("SMTP_USE_TLS") and app.config.get("SMTP_OPPORTUNISTIC_TLS"):
|
||||
if not dify_config.SMTP_USE_TLS and dify_config.SMTP_OPPORTUNISTIC_TLS:
|
||||
raise ValueError("SMTP_OPPORTUNISTIC_TLS is not supported without enabling SMTP_USE_TLS")
|
||||
self._client = SMTPClient(
|
||||
server=app.config.get("SMTP_SERVER"),
|
||||
port=app.config.get("SMTP_PORT"),
|
||||
username=app.config.get("SMTP_USERNAME"),
|
||||
password=app.config.get("SMTP_PASSWORD"),
|
||||
_from=app.config.get("MAIL_DEFAULT_SEND_FROM"),
|
||||
use_tls=app.config.get("SMTP_USE_TLS"),
|
||||
opportunistic_tls=app.config.get("SMTP_OPPORTUNISTIC_TLS"),
|
||||
server=dify_config.SMTP_SERVER,
|
||||
port=dify_config.SMTP_PORT,
|
||||
username=dify_config.SMTP_USERNAME,
|
||||
password=dify_config.SMTP_PASSWORD,
|
||||
_from=dify_config.MAIL_DEFAULT_SEND_FROM,
|
||||
use_tls=dify_config.SMTP_USE_TLS,
|
||||
opportunistic_tls=dify_config.SMTP_OPPORTUNISTIC_TLS,
|
||||
)
|
||||
else:
|
||||
raise ValueError("Unsupported mail type {}".format(app.config.get("MAIL_TYPE")))
|
||||
else:
|
||||
logging.warning("MAIL_TYPE is not set")
|
||||
case _:
|
||||
raise ValueError("Unsupported mail type {}".format(mail_type))
|
||||
|
||||
def send(self, to: str, subject: str, html: str, from_: Optional[str] = None):
|
||||
if not self._client:
|
||||
|
|
|
@ -2,6 +2,8 @@ import redis
|
|||
from redis.connection import Connection, SSLConnection
|
||||
from redis.sentinel import Sentinel
|
||||
|
||||
from configs import dify_config
|
||||
|
||||
|
||||
class RedisClientWrapper(redis.Redis):
|
||||
"""
|
||||
|
@ -43,37 +45,37 @@ redis_client = RedisClientWrapper()
|
|||
def init_app(app):
|
||||
global redis_client
|
||||
connection_class = Connection
|
||||
if app.config.get("REDIS_USE_SSL"):
|
||||
if dify_config.REDIS_USE_SSL:
|
||||
connection_class = SSLConnection
|
||||
|
||||
redis_params = {
|
||||
"username": app.config.get("REDIS_USERNAME"),
|
||||
"password": app.config.get("REDIS_PASSWORD"),
|
||||
"db": app.config.get("REDIS_DB"),
|
||||
"username": dify_config.REDIS_USERNAME,
|
||||
"password": dify_config.REDIS_PASSWORD,
|
||||
"db": dify_config.REDIS_DB,
|
||||
"encoding": "utf-8",
|
||||
"encoding_errors": "strict",
|
||||
"decode_responses": False,
|
||||
}
|
||||
|
||||
if app.config.get("REDIS_USE_SENTINEL"):
|
||||
if dify_config.REDIS_USE_SENTINEL:
|
||||
sentinel_hosts = [
|
||||
(node.split(":")[0], int(node.split(":")[1])) for node in app.config.get("REDIS_SENTINELS").split(",")
|
||||
(node.split(":")[0], int(node.split(":")[1])) for node in dify_config.REDIS_SENTINELS.split(",")
|
||||
]
|
||||
sentinel = Sentinel(
|
||||
sentinel_hosts,
|
||||
sentinel_kwargs={
|
||||
"socket_timeout": app.config.get("REDIS_SENTINEL_SOCKET_TIMEOUT", 0.1),
|
||||
"username": app.config.get("REDIS_SENTINEL_USERNAME"),
|
||||
"password": app.config.get("REDIS_SENTINEL_PASSWORD"),
|
||||
"socket_timeout": dify_config.REDIS_SENTINEL_SOCKET_TIMEOUT,
|
||||
"username": dify_config.REDIS_SENTINEL_USERNAME,
|
||||
"password": dify_config.REDIS_SENTINEL_PASSWORD,
|
||||
},
|
||||
)
|
||||
master = sentinel.master_for(app.config.get("REDIS_SENTINEL_SERVICE_NAME"), **redis_params)
|
||||
master = sentinel.master_for(dify_config.REDIS_SENTINEL_SERVICE_NAME, **redis_params)
|
||||
redis_client.initialize(master)
|
||||
else:
|
||||
redis_params.update(
|
||||
{
|
||||
"host": app.config.get("REDIS_HOST"),
|
||||
"port": app.config.get("REDIS_PORT"),
|
||||
"host": dify_config.REDIS_HOST,
|
||||
"port": dify_config.REDIS_PORT,
|
||||
"connection_class": connection_class,
|
||||
}
|
||||
)
|
||||
|
|
|
@ -5,6 +5,7 @@ from sentry_sdk.integrations.celery import CeleryIntegration
|
|||
from sentry_sdk.integrations.flask import FlaskIntegration
|
||||
from werkzeug.exceptions import HTTPException
|
||||
|
||||
from configs import dify_config
|
||||
from core.model_runtime.errors.invoke import InvokeRateLimitError
|
||||
|
||||
|
||||
|
@ -18,9 +19,9 @@ def before_send(event, hint):
|
|||
|
||||
|
||||
def init_app(app):
|
||||
if app.config.get("SENTRY_DSN"):
|
||||
if dify_config.SENTRY_DSN:
|
||||
sentry_sdk.init(
|
||||
dsn=app.config.get("SENTRY_DSN"),
|
||||
dsn=dify_config.SENTRY_DSN,
|
||||
integrations=[FlaskIntegration(), CeleryIntegration()],
|
||||
ignore_errors=[
|
||||
HTTPException,
|
||||
|
@ -29,9 +30,9 @@ def init_app(app):
|
|||
InvokeRateLimitError,
|
||||
parse_error.defaultErrorResponse,
|
||||
],
|
||||
traces_sample_rate=app.config.get("SENTRY_TRACES_SAMPLE_RATE", 1.0),
|
||||
profiles_sample_rate=app.config.get("SENTRY_PROFILES_SAMPLE_RATE", 1.0),
|
||||
environment=app.config.get("DEPLOY_ENV"),
|
||||
release=f"dify-{app.config.get('CURRENT_VERSION')}-{app.config.get('COMMIT_SHA')}",
|
||||
traces_sample_rate=dify_config.SENTRY_TRACES_SAMPLE_RATE,
|
||||
profiles_sample_rate=dify_config.SENTRY_PROFILES_SAMPLE_RATE,
|
||||
environment=dify_config.DEPLOY_ENV,
|
||||
release=f"dify-{dify_config.CURRENT_VERSION}-{dify_config.COMMIT_SHA}",
|
||||
before_send=before_send,
|
||||
)
|
||||
|
|
|
@ -15,7 +15,8 @@ class Storage:
|
|||
|
||||
def init_app(self, app: Flask):
|
||||
storage_factory = self.get_storage_factory(dify_config.STORAGE_TYPE)
|
||||
self.storage_runner = storage_factory(app=app)
|
||||
with app.app_context():
|
||||
self.storage_runner = storage_factory()
|
||||
|
||||
@staticmethod
|
||||
def get_storage_factory(storage_type: str) -> type[BaseStorage]:
|
||||
|
|
|
@ -1,29 +1,27 @@
|
|||
from collections.abc import Generator
|
||||
|
||||
import oss2 as aliyun_s3
|
||||
from flask import Flask
|
||||
|
||||
from configs import dify_config
|
||||
from extensions.storage.base_storage import BaseStorage
|
||||
|
||||
|
||||
class AliyunOssStorage(BaseStorage):
|
||||
"""Implementation for Aliyun OSS storage."""
|
||||
|
||||
def __init__(self, app: Flask):
|
||||
super().__init__(app)
|
||||
|
||||
app_config = self.app.config
|
||||
self.bucket_name = app_config.get("ALIYUN_OSS_BUCKET_NAME")
|
||||
self.folder = app.config.get("ALIYUN_OSS_PATH")
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.bucket_name = dify_config.ALIYUN_OSS_BUCKET_NAME
|
||||
self.folder = dify_config.ALIYUN_OSS_PATH
|
||||
oss_auth_method = aliyun_s3.Auth
|
||||
region = None
|
||||
if app_config.get("ALIYUN_OSS_AUTH_VERSION") == "v4":
|
||||
if dify_config.ALIYUN_OSS_AUTH_VERSION == "v4":
|
||||
oss_auth_method = aliyun_s3.AuthV4
|
||||
region = app_config.get("ALIYUN_OSS_REGION")
|
||||
oss_auth = oss_auth_method(app_config.get("ALIYUN_OSS_ACCESS_KEY"), app_config.get("ALIYUN_OSS_SECRET_KEY"))
|
||||
region = dify_config.ALIYUN_OSS_REGION
|
||||
oss_auth = oss_auth_method(dify_config.ALIYUN_OSS_ACCESS_KEY, dify_config.ALIYUN_OSS_SECRET_KEY)
|
||||
self.client = aliyun_s3.Bucket(
|
||||
oss_auth,
|
||||
app_config.get("ALIYUN_OSS_ENDPOINT"),
|
||||
dify_config.ALIYUN_OSS_ENDPOINT,
|
||||
self.bucket_name,
|
||||
connect_timeout=30,
|
||||
region=region,
|
||||
|
|
|
@ -4,8 +4,8 @@ from collections.abc import Generator
|
|||
import boto3
|
||||
from botocore.client import Config
|
||||
from botocore.exceptions import ClientError
|
||||
from flask import Flask
|
||||
|
||||
from configs import dify_config
|
||||
from extensions.storage.base_storage import BaseStorage
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
@ -14,26 +14,25 @@ logger = logging.getLogger(__name__)
|
|||
class AwsS3Storage(BaseStorage):
|
||||
"""Implementation for Amazon Web Services S3 storage."""
|
||||
|
||||
def __init__(self, app: Flask):
|
||||
super().__init__(app)
|
||||
app_config = self.app.config
|
||||
self.bucket_name = app_config.get("S3_BUCKET_NAME")
|
||||
if app_config.get("S3_USE_AWS_MANAGED_IAM"):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.bucket_name = dify_config.S3_BUCKET_NAME
|
||||
if dify_config.S3_USE_AWS_MANAGED_IAM:
|
||||
logger.info("Using AWS managed IAM role for S3")
|
||||
|
||||
session = boto3.Session()
|
||||
region_name = app_config.get("S3_REGION")
|
||||
region_name = dify_config.S3_REGION
|
||||
self.client = session.client(service_name="s3", region_name=region_name)
|
||||
else:
|
||||
logger.info("Using ak and sk for S3")
|
||||
|
||||
self.client = boto3.client(
|
||||
"s3",
|
||||
aws_secret_access_key=app_config.get("S3_SECRET_KEY"),
|
||||
aws_access_key_id=app_config.get("S3_ACCESS_KEY"),
|
||||
endpoint_url=app_config.get("S3_ENDPOINT"),
|
||||
region_name=app_config.get("S3_REGION"),
|
||||
config=Config(s3={"addressing_style": app_config.get("S3_ADDRESS_STYLE")}),
|
||||
aws_secret_access_key=dify_config.S3_SECRET_KEY,
|
||||
aws_access_key_id=dify_config.S3_ACCESS_KEY,
|
||||
endpoint_url=dify_config.S3_ENDPOINT,
|
||||
region_name=dify_config.S3_REGION,
|
||||
config=Config(s3={"addressing_style": dify_config.S3_ADDRESS_STYLE}),
|
||||
)
|
||||
# create bucket
|
||||
try:
|
||||
|
|
|
@ -2,8 +2,8 @@ from collections.abc import Generator
|
|||
from datetime import datetime, timedelta, timezone
|
||||
|
||||
from azure.storage.blob import AccountSasPermissions, BlobServiceClient, ResourceTypes, generate_account_sas
|
||||
from flask import Flask
|
||||
|
||||
from configs import dify_config
|
||||
from extensions.ext_redis import redis_client
|
||||
from extensions.storage.base_storage import BaseStorage
|
||||
|
||||
|
@ -11,13 +11,12 @@ from extensions.storage.base_storage import BaseStorage
|
|||
class AzureBlobStorage(BaseStorage):
|
||||
"""Implementation for Azure Blob storage."""
|
||||
|
||||
def __init__(self, app: Flask):
|
||||
super().__init__(app)
|
||||
app_config = self.app.config
|
||||
self.bucket_name = app_config.get("AZURE_BLOB_CONTAINER_NAME")
|
||||
self.account_url = app_config.get("AZURE_BLOB_ACCOUNT_URL")
|
||||
self.account_name = app_config.get("AZURE_BLOB_ACCOUNT_NAME")
|
||||
self.account_key = app_config.get("AZURE_BLOB_ACCOUNT_KEY")
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.bucket_name = dify_config.AZURE_BLOB_CONTAINER_NAME
|
||||
self.account_url = dify_config.AZURE_BLOB_ACCOUNT_URL
|
||||
self.account_name = dify_config.AZURE_BLOB_ACCOUNT_NAME
|
||||
self.account_key = dify_config.AZURE_BLOB_ACCOUNT_KEY
|
||||
|
||||
def save(self, filename, data):
|
||||
client = self._sync_client()
|
||||
|
|
|
@ -5,24 +5,23 @@ from collections.abc import Generator
|
|||
from baidubce.auth.bce_credentials import BceCredentials
|
||||
from baidubce.bce_client_configuration import BceClientConfiguration
|
||||
from baidubce.services.bos.bos_client import BosClient
|
||||
from flask import Flask
|
||||
|
||||
from configs import dify_config
|
||||
from extensions.storage.base_storage import BaseStorage
|
||||
|
||||
|
||||
class BaiduObsStorage(BaseStorage):
|
||||
"""Implementation for Baidu OBS storage."""
|
||||
|
||||
def __init__(self, app: Flask):
|
||||
super().__init__(app)
|
||||
app_config = self.app.config
|
||||
self.bucket_name = app_config.get("BAIDU_OBS_BUCKET_NAME")
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.bucket_name = dify_config.BAIDU_OBS_BUCKET_NAME
|
||||
client_config = BceClientConfiguration(
|
||||
credentials=BceCredentials(
|
||||
access_key_id=app_config.get("BAIDU_OBS_ACCESS_KEY"),
|
||||
secret_access_key=app_config.get("BAIDU_OBS_SECRET_KEY"),
|
||||
access_key_id=dify_config.BAIDU_OBS_ACCESS_KEY,
|
||||
secret_access_key=dify_config.BAIDU_OBS_SECRET_KEY,
|
||||
),
|
||||
endpoint=app_config.get("BAIDU_OBS_ENDPOINT"),
|
||||
endpoint=dify_config.BAIDU_OBS_ENDPOINT,
|
||||
)
|
||||
|
||||
self.client = BosClient(config=client_config)
|
||||
|
|
|
@ -3,16 +3,12 @@
|
|||
from abc import ABC, abstractmethod
|
||||
from collections.abc import Generator
|
||||
|
||||
from flask import Flask
|
||||
|
||||
|
||||
class BaseStorage(ABC):
|
||||
"""Interface for file storage."""
|
||||
|
||||
app = None
|
||||
|
||||
def __init__(self, app: Flask):
|
||||
self.app = app
|
||||
def __init__(self): # noqa: B027
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def save(self, filename, data):
|
||||
|
|
|
@ -3,20 +3,20 @@ import io
|
|||
import json
|
||||
from collections.abc import Generator
|
||||
|
||||
from flask import Flask
|
||||
from google.cloud import storage as google_cloud_storage
|
||||
|
||||
from configs import dify_config
|
||||
from extensions.storage.base_storage import BaseStorage
|
||||
|
||||
|
||||
class GoogleCloudStorage(BaseStorage):
|
||||
"""Implementation for Google Cloud storage."""
|
||||
|
||||
def __init__(self, app: Flask):
|
||||
super().__init__(app)
|
||||
app_config = self.app.config
|
||||
self.bucket_name = app_config.get("GOOGLE_STORAGE_BUCKET_NAME")
|
||||
service_account_json_str = app_config.get("GOOGLE_STORAGE_SERVICE_ACCOUNT_JSON_BASE64")
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
|
||||
self.bucket_name = dify_config.GOOGLE_STORAGE_BUCKET_NAME
|
||||
service_account_json_str = dify_config.GOOGLE_STORAGE_SERVICE_ACCOUNT_JSON_BASE64
|
||||
# if service_account_json_str is empty, use Application Default Credentials
|
||||
if service_account_json_str:
|
||||
service_account_json = base64.b64decode(service_account_json_str).decode("utf-8")
|
||||
|
|
|
@ -1,22 +1,22 @@
|
|||
from collections.abc import Generator
|
||||
|
||||
from flask import Flask
|
||||
from obs import ObsClient
|
||||
|
||||
from configs import dify_config
|
||||
from extensions.storage.base_storage import BaseStorage
|
||||
|
||||
|
||||
class HuaweiObsStorage(BaseStorage):
|
||||
"""Implementation for Huawei OBS storage."""
|
||||
|
||||
def __init__(self, app: Flask):
|
||||
super().__init__(app)
|
||||
app_config = self.app.config
|
||||
self.bucket_name = app_config.get("HUAWEI_OBS_BUCKET_NAME")
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
|
||||
self.bucket_name = dify_config.HUAWEI_OBS_BUCKET_NAME
|
||||
self.client = ObsClient(
|
||||
access_key_id=app_config.get("HUAWEI_OBS_ACCESS_KEY"),
|
||||
secret_access_key=app_config.get("HUAWEI_OBS_SECRET_KEY"),
|
||||
server=app_config.get("HUAWEI_OBS_SERVER"),
|
||||
access_key_id=dify_config.HUAWEI_OBS_ACCESS_KEY,
|
||||
secret_access_key=dify_config.HUAWEI_OBS_SECRET_KEY,
|
||||
server=dify_config.HUAWEI_OBS_SERVER,
|
||||
)
|
||||
|
||||
def save(self, filename, data):
|
||||
|
|
|
@ -3,19 +3,20 @@ import shutil
|
|||
from collections.abc import Generator
|
||||
from pathlib import Path
|
||||
|
||||
from flask import Flask
|
||||
from flask import current_app
|
||||
|
||||
from configs import dify_config
|
||||
from extensions.storage.base_storage import BaseStorage
|
||||
|
||||
|
||||
class LocalFsStorage(BaseStorage):
|
||||
"""Implementation for local filesystem storage."""
|
||||
|
||||
def __init__(self, app: Flask):
|
||||
super().__init__(app)
|
||||
folder = self.app.config.get("STORAGE_LOCAL_PATH")
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
folder = dify_config.STORAGE_LOCAL_PATH
|
||||
if not os.path.isabs(folder):
|
||||
folder = os.path.join(app.root_path, folder)
|
||||
folder = os.path.join(current_app.root_path, folder)
|
||||
self.folder = folder
|
||||
|
||||
def save(self, filename, data):
|
||||
|
|
|
@ -2,24 +2,24 @@ from collections.abc import Generator
|
|||
|
||||
import boto3
|
||||
from botocore.exceptions import ClientError
|
||||
from flask import Flask
|
||||
|
||||
from configs import dify_config
|
||||
from extensions.storage.base_storage import BaseStorage
|
||||
|
||||
|
||||
class OracleOCIStorage(BaseStorage):
|
||||
"""Implementation for Oracle OCI storage."""
|
||||
|
||||
def __init__(self, app: Flask):
|
||||
super().__init__(app)
|
||||
app_config = self.app.config
|
||||
self.bucket_name = app_config.get("OCI_BUCKET_NAME")
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
|
||||
self.bucket_name = dify_config.OCI_BUCKET_NAME
|
||||
self.client = boto3.client(
|
||||
"s3",
|
||||
aws_secret_access_key=app_config.get("OCI_SECRET_KEY"),
|
||||
aws_access_key_id=app_config.get("OCI_ACCESS_KEY"),
|
||||
endpoint_url=app_config.get("OCI_ENDPOINT"),
|
||||
region_name=app_config.get("OCI_REGION"),
|
||||
aws_secret_access_key=dify_config.OCI_SECRET_KEY,
|
||||
aws_access_key_id=dify_config.OCI_ACCESS_KEY,
|
||||
endpoint_url=dify_config.OCI_ENDPOINT,
|
||||
region_name=dify_config.OCI_REGION,
|
||||
)
|
||||
|
||||
def save(self, filename, data):
|
||||
|
|
|
@ -2,25 +2,27 @@ import io
|
|||
from collections.abc import Generator
|
||||
from pathlib import Path
|
||||
|
||||
from flask import Flask
|
||||
from supabase import Client
|
||||
|
||||
from configs import dify_config
|
||||
from extensions.storage.base_storage import BaseStorage
|
||||
|
||||
|
||||
class SupabaseStorage(BaseStorage):
|
||||
"""Implementation for supabase obs storage."""
|
||||
|
||||
def __init__(self, app: Flask):
|
||||
super().__init__(app)
|
||||
app_config = self.app.config
|
||||
self.bucket_name = app_config.get("SUPABASE_BUCKET_NAME")
|
||||
self.client = Client(
|
||||
supabase_url=app_config.get("SUPABASE_URL"), supabase_key=app_config.get("SUPABASE_API_KEY")
|
||||
)
|
||||
self.create_bucket(
|
||||
id=app_config.get("SUPABASE_BUCKET_NAME"), bucket_name=app_config.get("SUPABASE_BUCKET_NAME")
|
||||
)
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
if dify_config.SUPABASE_URL is None:
|
||||
raise ValueError("SUPABASE_URL is not set")
|
||||
if dify_config.SUPABASE_API_KEY is None:
|
||||
raise ValueError("SUPABASE_API_KEY is not set")
|
||||
if dify_config.SUPABASE_BUCKET_NAME is None:
|
||||
raise ValueError("SUPABASE_BUCKET_NAME is not set")
|
||||
|
||||
self.bucket_name = dify_config.SUPABASE_BUCKET_NAME
|
||||
self.client = Client(supabase_url=dify_config.SUPABASE_URL, supabase_key=dify_config.SUPABASE_API_KEY)
|
||||
self.create_bucket(id=dify_config.SUPABASE_BUCKET_NAME, bucket_name=dify_config.SUPABASE_BUCKET_NAME)
|
||||
|
||||
def create_bucket(self, id, bucket_name):
|
||||
if not self.bucket_exists():
|
||||
|
|
|
@ -1,23 +1,23 @@
|
|||
from collections.abc import Generator
|
||||
|
||||
from flask import Flask
|
||||
from qcloud_cos import CosConfig, CosS3Client
|
||||
|
||||
from configs import dify_config
|
||||
from extensions.storage.base_storage import BaseStorage
|
||||
|
||||
|
||||
class TencentCosStorage(BaseStorage):
|
||||
"""Implementation for Tencent Cloud COS storage."""
|
||||
|
||||
def __init__(self, app: Flask):
|
||||
super().__init__(app)
|
||||
app_config = self.app.config
|
||||
self.bucket_name = app_config.get("TENCENT_COS_BUCKET_NAME")
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
|
||||
self.bucket_name = dify_config.TENCENT_COS_BUCKET_NAME
|
||||
config = CosConfig(
|
||||
Region=app_config.get("TENCENT_COS_REGION"),
|
||||
SecretId=app_config.get("TENCENT_COS_SECRET_ID"),
|
||||
SecretKey=app_config.get("TENCENT_COS_SECRET_KEY"),
|
||||
Scheme=app_config.get("TENCENT_COS_SCHEME"),
|
||||
Region=dify_config.TENCENT_COS_REGION,
|
||||
SecretId=dify_config.TENCENT_COS_SECRET_ID,
|
||||
SecretKey=dify_config.TENCENT_COS_SECRET_KEY,
|
||||
Scheme=dify_config.TENCENT_COS_SCHEME,
|
||||
)
|
||||
self.client = CosS3Client(config)
|
||||
|
||||
|
|
|
@ -1,23 +1,22 @@
|
|||
from collections.abc import Generator
|
||||
|
||||
import tos
|
||||
from flask import Flask
|
||||
|
||||
from configs import dify_config
|
||||
from extensions.storage.base_storage import BaseStorage
|
||||
|
||||
|
||||
class VolcengineTosStorage(BaseStorage):
|
||||
"""Implementation for Volcengine TOS storage."""
|
||||
|
||||
def __init__(self, app: Flask):
|
||||
super().__init__(app)
|
||||
app_config = self.app.config
|
||||
self.bucket_name = app_config.get("VOLCENGINE_TOS_BUCKET_NAME")
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.bucket_name = dify_config.VOLCENGINE_TOS_BUCKET_NAME
|
||||
self.client = tos.TosClientV2(
|
||||
ak=app_config.get("VOLCENGINE_TOS_ACCESS_KEY"),
|
||||
sk=app_config.get("VOLCENGINE_TOS_SECRET_KEY"),
|
||||
endpoint=app_config.get("VOLCENGINE_TOS_ENDPOINT"),
|
||||
region=app_config.get("VOLCENGINE_TOS_REGION"),
|
||||
ak=dify_config.VOLCENGINE_TOS_ACCESS_KEY,
|
||||
sk=dify_config.VOLCENGINE_TOS_SECRET_KEY,
|
||||
endpoint=dify_config.VOLCENGINE_TOS_ENDPOINT,
|
||||
region=dify_config.VOLCENGINE_TOS_REGION,
|
||||
)
|
||||
|
||||
def save(self, filename, data):
|
||||
|
|
|
@ -2,6 +2,7 @@ import mimetypes
|
|||
from collections.abc import Mapping, Sequence
|
||||
from typing import Any
|
||||
|
||||
import httpx
|
||||
from sqlalchemy import select
|
||||
|
||||
from constants import AUDIO_EXTENSIONS, DOCUMENT_EXTENSIONS, IMAGE_EXTENSIONS, VIDEO_EXTENSIONS
|
||||
|
@ -154,7 +155,7 @@ def _build_from_local_file(
|
|||
file = File(
|
||||
id=mapping.get("id"),
|
||||
filename=row.name,
|
||||
extension=row.extension,
|
||||
extension="." + row.extension,
|
||||
mime_type=row.mime_type,
|
||||
tenant_id=tenant_id,
|
||||
type=file_type,
|
||||
|
@ -177,25 +178,29 @@ def _build_from_remote_url(
|
|||
url = mapping.get("url")
|
||||
if not url:
|
||||
raise ValueError("Invalid file url")
|
||||
resp = ssrf_proxy.head(url, follow_redirects=True)
|
||||
resp.raise_for_status()
|
||||
|
||||
resp = ssrf_proxy.head(url, follow_redirects=True)
|
||||
if resp.status_code == httpx.codes.OK:
|
||||
# Try to extract filename from response headers or URL
|
||||
content_disposition = resp.headers.get("Content-Disposition")
|
||||
if content_disposition:
|
||||
filename = content_disposition.split("filename=")[-1].strip('"')
|
||||
else:
|
||||
filename = url.split("/")[-1].split("?")[0]
|
||||
# If filename is empty, set a default one
|
||||
if not filename:
|
||||
filename = "unknown_file"
|
||||
|
||||
# Determine file extension
|
||||
extension = "." + filename.split(".")[-1] if "." in filename else ".bin"
|
||||
|
||||
# Create the File object
|
||||
file_size = int(resp.headers.get("Content-Length", -1))
|
||||
mime_type = str(resp.headers.get("Content-Type", ""))
|
||||
else:
|
||||
filename = ""
|
||||
file_size = -1
|
||||
mime_type = ""
|
||||
|
||||
# If filename is empty, set a default one
|
||||
if not filename:
|
||||
filename = "unknown_file"
|
||||
# Determine file extension
|
||||
extension = "." + filename.split(".")[-1] if "." in filename else ".bin"
|
||||
|
||||
if not mime_type:
|
||||
mime_type, _ = mimetypes.guess_type(url)
|
||||
file = File(
|
||||
|
|
|
@ -12,9 +12,10 @@ from hashlib import sha256
|
|||
from typing import Any, Optional, Union
|
||||
from zoneinfo import available_timezones
|
||||
|
||||
from flask import Response, current_app, stream_with_context
|
||||
from flask import Response, stream_with_context
|
||||
from flask_restful import fields
|
||||
|
||||
from configs import dify_config
|
||||
from core.app.features.rate_limiting.rate_limit import RateLimitGenerator
|
||||
from core.file import helpers as file_helpers
|
||||
from extensions.ext_redis import redis_client
|
||||
|
@ -214,7 +215,7 @@ class TokenManager:
|
|||
if additional_data:
|
||||
token_data.update(additional_data)
|
||||
|
||||
expiry_minutes = current_app.config[f"{token_type.upper()}_TOKEN_EXPIRY_MINUTES"]
|
||||
expiry_minutes = dify_config.model_dump().get(f"{token_type.upper()}_TOKEN_EXPIRY_MINUTES")
|
||||
token_key = cls._get_token_key(token, token_type)
|
||||
expiry_time = int(expiry_minutes * 60)
|
||||
redis_client.setex(token_key, expiry_time, json.dumps(token_data))
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
import os
|
||||
from functools import wraps
|
||||
|
||||
from flask import current_app, g, has_request_context, request
|
||||
|
@ -7,6 +6,7 @@ from flask_login.config import EXEMPT_METHODS
|
|||
from werkzeug.exceptions import Unauthorized
|
||||
from werkzeug.local import LocalProxy
|
||||
|
||||
from configs import dify_config
|
||||
from extensions.ext_database import db
|
||||
from models.account import Account, Tenant, TenantAccountJoin
|
||||
from models.model import EndUser
|
||||
|
@ -53,8 +53,7 @@ def login_required(func):
|
|||
@wraps(func)
|
||||
def decorated_view(*args, **kwargs):
|
||||
auth_header = request.headers.get("Authorization")
|
||||
admin_api_key_enable = os.getenv("ADMIN_API_KEY_ENABLE", default="False")
|
||||
if admin_api_key_enable.lower() == "true":
|
||||
if dify_config.ADMIN_API_KEY_ENABLE:
|
||||
if auth_header:
|
||||
if " " not in auth_header:
|
||||
raise Unauthorized("Invalid Authorization header format. Expected 'Bearer <api-key>' format.")
|
||||
|
@ -62,10 +61,10 @@ def login_required(func):
|
|||
auth_scheme = auth_scheme.lower()
|
||||
if auth_scheme != "bearer":
|
||||
raise Unauthorized("Invalid Authorization header format. Expected 'Bearer <api-key>' format.")
|
||||
admin_api_key = os.getenv("ADMIN_API_KEY")
|
||||
|
||||
admin_api_key = dify_config.ADMIN_API_KEY
|
||||
if admin_api_key:
|
||||
if os.getenv("ADMIN_API_KEY") == auth_token:
|
||||
if admin_api_key == auth_token:
|
||||
workspace_id = request.headers.get("X-WORKSPACE-ID")
|
||||
if workspace_id:
|
||||
tenant_account_join = (
|
||||
|
@ -83,7 +82,7 @@ def login_required(func):
|
|||
account.current_tenant = tenant
|
||||
current_app.login_manager._update_request_context_with_user(account)
|
||||
user_logged_in.send(current_app._get_current_object(), user=_get_user())
|
||||
if request.method in EXEMPT_METHODS or current_app.config.get("LOGIN_DISABLED"):
|
||||
if request.method in EXEMPT_METHODS or dify_config.LOGIN_DISABLED:
|
||||
pass
|
||||
elif not current_user.is_authenticated:
|
||||
return current_app.login_manager.unauthorized()
|
||||
|
|
|
@ -1404,7 +1404,7 @@ class UploadFile(Base):
|
|||
size: int,
|
||||
extension: str,
|
||||
mime_type: str,
|
||||
created_by_role: str,
|
||||
created_by_role: CreatedByRole,
|
||||
created_by: str,
|
||||
created_at: datetime,
|
||||
used: bool,
|
||||
|
@ -1419,7 +1419,7 @@ class UploadFile(Base):
|
|||
self.size = size
|
||||
self.extension = extension
|
||||
self.mime_type = mime_type
|
||||
self.created_by_role = created_by_role
|
||||
self.created_by_role = created_by_role.value
|
||||
self.created_by = created_by
|
||||
self.created_at = created_at
|
||||
self.used = used
|
||||
|
|
199
api/poetry.lock
generated
199
api/poetry.lock
generated
|
@ -6921,119 +6921,120 @@ files = [
|
|||
|
||||
[[package]]
|
||||
name = "pydantic"
|
||||
version = "2.8.2"
|
||||
version = "2.9.2"
|
||||
description = "Data validation using Python type hints"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "pydantic-2.8.2-py3-none-any.whl", hash = "sha256:73ee9fddd406dc318b885c7a2eab8a6472b68b8fb5ba8150949fc3db939f23c8"},
|
||||
{file = "pydantic-2.8.2.tar.gz", hash = "sha256:6f62c13d067b0755ad1c21a34bdd06c0c12625a22b0fc09c6b149816604f7c2a"},
|
||||
{file = "pydantic-2.9.2-py3-none-any.whl", hash = "sha256:f048cec7b26778210e28a0459867920654d48e5e62db0958433636cde4254f12"},
|
||||
{file = "pydantic-2.9.2.tar.gz", hash = "sha256:d155cef71265d1e9807ed1c32b4c8deec042a44a50a4188b25ac67ecd81a9c0f"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
annotated-types = ">=0.4.0"
|
||||
pydantic-core = "2.20.1"
|
||||
annotated-types = ">=0.6.0"
|
||||
pydantic-core = "2.23.4"
|
||||
typing-extensions = {version = ">=4.6.1", markers = "python_version < \"3.13\""}
|
||||
|
||||
[package.extras]
|
||||
email = ["email-validator (>=2.0.0)"]
|
||||
timezone = ["tzdata"]
|
||||
|
||||
[[package]]
|
||||
name = "pydantic-core"
|
||||
version = "2.20.1"
|
||||
version = "2.23.4"
|
||||
description = "Core functionality for Pydantic validation and serialization"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "pydantic_core-2.20.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:3acae97ffd19bf091c72df4d726d552c473f3576409b2a7ca36b2f535ffff4a3"},
|
||||
{file = "pydantic_core-2.20.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:41f4c96227a67a013e7de5ff8f20fb496ce573893b7f4f2707d065907bffdbd6"},
|
||||
{file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f239eb799a2081495ea659d8d4a43a8f42cd1fe9ff2e7e436295c38a10c286a"},
|
||||
{file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:53e431da3fc53360db73eedf6f7124d1076e1b4ee4276b36fb25514544ceb4a3"},
|
||||
{file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f1f62b2413c3a0e846c3b838b2ecd6c7a19ec6793b2a522745b0869e37ab5bc1"},
|
||||
{file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5d41e6daee2813ecceea8eda38062d69e280b39df793f5a942fa515b8ed67953"},
|
||||
{file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d482efec8b7dc6bfaedc0f166b2ce349df0011f5d2f1f25537ced4cfc34fd98"},
|
||||
{file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e93e1a4b4b33daed65d781a57a522ff153dcf748dee70b40c7258c5861e1768a"},
|
||||
{file = "pydantic_core-2.20.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e7c4ea22b6739b162c9ecaaa41d718dfad48a244909fe7ef4b54c0b530effc5a"},
|
||||
{file = "pydantic_core-2.20.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4f2790949cf385d985a31984907fecb3896999329103df4e4983a4a41e13e840"},
|
||||
{file = "pydantic_core-2.20.1-cp310-none-win32.whl", hash = "sha256:5e999ba8dd90e93d57410c5e67ebb67ffcaadcea0ad973240fdfd3a135506250"},
|
||||
{file = "pydantic_core-2.20.1-cp310-none-win_amd64.whl", hash = "sha256:512ecfbefef6dac7bc5eaaf46177b2de58cdf7acac8793fe033b24ece0b9566c"},
|
||||
{file = "pydantic_core-2.20.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:d2a8fa9d6d6f891f3deec72f5cc668e6f66b188ab14bb1ab52422fe8e644f312"},
|
||||
{file = "pydantic_core-2.20.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:175873691124f3d0da55aeea1d90660a6ea7a3cfea137c38afa0a5ffabe37b88"},
|
||||
{file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:37eee5b638f0e0dcd18d21f59b679686bbd18917b87db0193ae36f9c23c355fc"},
|
||||
{file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:25e9185e2d06c16ee438ed39bf62935ec436474a6ac4f9358524220f1b236e43"},
|
||||
{file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:150906b40ff188a3260cbee25380e7494ee85048584998c1e66df0c7a11c17a6"},
|
||||
{file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ad4aeb3e9a97286573c03df758fc7627aecdd02f1da04516a86dc159bf70121"},
|
||||
{file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d3f3ed29cd9f978c604708511a1f9c2fdcb6c38b9aae36a51905b8811ee5cbf1"},
|
||||
{file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b0dae11d8f5ded51699c74d9548dcc5938e0804cc8298ec0aa0da95c21fff57b"},
|
||||
{file = "pydantic_core-2.20.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:faa6b09ee09433b87992fb5a2859efd1c264ddc37280d2dd5db502126d0e7f27"},
|
||||
{file = "pydantic_core-2.20.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9dc1b507c12eb0481d071f3c1808f0529ad41dc415d0ca11f7ebfc666e66a18b"},
|
||||
{file = "pydantic_core-2.20.1-cp311-none-win32.whl", hash = "sha256:fa2fddcb7107e0d1808086ca306dcade7df60a13a6c347a7acf1ec139aa6789a"},
|
||||
{file = "pydantic_core-2.20.1-cp311-none-win_amd64.whl", hash = "sha256:40a783fb7ee353c50bd3853e626f15677ea527ae556429453685ae32280c19c2"},
|
||||
{file = "pydantic_core-2.20.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:595ba5be69b35777474fa07f80fc260ea71255656191adb22a8c53aba4479231"},
|
||||
{file = "pydantic_core-2.20.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a4f55095ad087474999ee28d3398bae183a66be4823f753cd7d67dd0153427c9"},
|
||||
{file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f9aa05d09ecf4c75157197f27cdc9cfaeb7c5f15021c6373932bf3e124af029f"},
|
||||
{file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e97fdf088d4b31ff4ba35db26d9cc472ac7ef4a2ff2badeabf8d727b3377fc52"},
|
||||
{file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bc633a9fe1eb87e250b5c57d389cf28998e4292336926b0b6cdaee353f89a237"},
|
||||
{file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d573faf8eb7e6b1cbbcb4f5b247c60ca8be39fe2c674495df0eb4318303137fe"},
|
||||
{file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26dc97754b57d2fd00ac2b24dfa341abffc380b823211994c4efac7f13b9e90e"},
|
||||
{file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:33499e85e739a4b60c9dac710c20a08dc73cb3240c9a0e22325e671b27b70d24"},
|
||||
{file = "pydantic_core-2.20.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:bebb4d6715c814597f85297c332297c6ce81e29436125ca59d1159b07f423eb1"},
|
||||
{file = "pydantic_core-2.20.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:516d9227919612425c8ef1c9b869bbbee249bc91912c8aaffb66116c0b447ebd"},
|
||||
{file = "pydantic_core-2.20.1-cp312-none-win32.whl", hash = "sha256:469f29f9093c9d834432034d33f5fe45699e664f12a13bf38c04967ce233d688"},
|
||||
{file = "pydantic_core-2.20.1-cp312-none-win_amd64.whl", hash = "sha256:035ede2e16da7281041f0e626459bcae33ed998cca6a0a007a5ebb73414ac72d"},
|
||||
{file = "pydantic_core-2.20.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:0827505a5c87e8aa285dc31e9ec7f4a17c81a813d45f70b1d9164e03a813a686"},
|
||||
{file = "pydantic_core-2.20.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:19c0fa39fa154e7e0b7f82f88ef85faa2a4c23cc65aae2f5aea625e3c13c735a"},
|
||||
{file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa223cd1e36b642092c326d694d8bf59b71ddddc94cdb752bbbb1c5c91d833b"},
|
||||
{file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c336a6d235522a62fef872c6295a42ecb0c4e1d0f1a3e500fe949415761b8a19"},
|
||||
{file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7eb6a0587eded33aeefea9f916899d42b1799b7b14b8f8ff2753c0ac1741edac"},
|
||||
{file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:70c8daf4faca8da5a6d655f9af86faf6ec2e1768f4b8b9d0226c02f3d6209703"},
|
||||
{file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e9fa4c9bf273ca41f940bceb86922a7667cd5bf90e95dbb157cbb8441008482c"},
|
||||
{file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:11b71d67b4725e7e2a9f6e9c0ac1239bbc0c48cce3dc59f98635efc57d6dac83"},
|
||||
{file = "pydantic_core-2.20.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:270755f15174fb983890c49881e93f8f1b80f0b5e3a3cc1394a255706cabd203"},
|
||||
{file = "pydantic_core-2.20.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:c81131869240e3e568916ef4c307f8b99583efaa60a8112ef27a366eefba8ef0"},
|
||||
{file = "pydantic_core-2.20.1-cp313-none-win32.whl", hash = "sha256:b91ced227c41aa29c672814f50dbb05ec93536abf8f43cd14ec9521ea09afe4e"},
|
||||
{file = "pydantic_core-2.20.1-cp313-none-win_amd64.whl", hash = "sha256:65db0f2eefcaad1a3950f498aabb4875c8890438bc80b19362cf633b87a8ab20"},
|
||||
{file = "pydantic_core-2.20.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:4745f4ac52cc6686390c40eaa01d48b18997cb130833154801a442323cc78f91"},
|
||||
{file = "pydantic_core-2.20.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a8ad4c766d3f33ba8fd692f9aa297c9058970530a32c728a2c4bfd2616d3358b"},
|
||||
{file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41e81317dd6a0127cabce83c0c9c3fbecceae981c8391e6f1dec88a77c8a569a"},
|
||||
{file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:04024d270cf63f586ad41fff13fde4311c4fc13ea74676962c876d9577bcc78f"},
|
||||
{file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eaad4ff2de1c3823fddf82f41121bdf453d922e9a238642b1dedb33c4e4f98ad"},
|
||||
{file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:26ab812fa0c845df815e506be30337e2df27e88399b985d0bb4e3ecfe72df31c"},
|
||||
{file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3c5ebac750d9d5f2706654c638c041635c385596caf68f81342011ddfa1e5598"},
|
||||
{file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2aafc5a503855ea5885559eae883978c9b6d8c8993d67766ee73d82e841300dd"},
|
||||
{file = "pydantic_core-2.20.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:4868f6bd7c9d98904b748a2653031fc9c2f85b6237009d475b1008bfaeb0a5aa"},
|
||||
{file = "pydantic_core-2.20.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:aa2f457b4af386254372dfa78a2eda2563680d982422641a85f271c859df1987"},
|
||||
{file = "pydantic_core-2.20.1-cp38-none-win32.whl", hash = "sha256:225b67a1f6d602de0ce7f6c1c3ae89a4aa25d3de9be857999e9124f15dab486a"},
|
||||
{file = "pydantic_core-2.20.1-cp38-none-win_amd64.whl", hash = "sha256:6b507132dcfc0dea440cce23ee2182c0ce7aba7054576efc65634f080dbe9434"},
|
||||
{file = "pydantic_core-2.20.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:b03f7941783b4c4a26051846dea594628b38f6940a2fdc0df00b221aed39314c"},
|
||||
{file = "pydantic_core-2.20.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1eedfeb6089ed3fad42e81a67755846ad4dcc14d73698c120a82e4ccf0f1f9f6"},
|
||||
{file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:635fee4e041ab9c479e31edda27fcf966ea9614fff1317e280d99eb3e5ab6fe2"},
|
||||
{file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:77bf3ac639c1ff567ae3b47f8d4cc3dc20f9966a2a6dd2311dcc055d3d04fb8a"},
|
||||
{file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7ed1b0132f24beeec5a78b67d9388656d03e6a7c837394f99257e2d55b461611"},
|
||||
{file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c6514f963b023aeee506678a1cf821fe31159b925c4b76fe2afa94cc70b3222b"},
|
||||
{file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10d4204d8ca33146e761c79f83cc861df20e7ae9f6487ca290a97702daf56006"},
|
||||
{file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2d036c7187b9422ae5b262badb87a20a49eb6c5238b2004e96d4da1231badef1"},
|
||||
{file = "pydantic_core-2.20.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9ebfef07dbe1d93efb94b4700f2d278494e9162565a54f124c404a5656d7ff09"},
|
||||
{file = "pydantic_core-2.20.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:6b9d9bb600328a1ce523ab4f454859e9d439150abb0906c5a1983c146580ebab"},
|
||||
{file = "pydantic_core-2.20.1-cp39-none-win32.whl", hash = "sha256:784c1214cb6dd1e3b15dd8b91b9a53852aed16671cc3fbe4786f4f1db07089e2"},
|
||||
{file = "pydantic_core-2.20.1-cp39-none-win_amd64.whl", hash = "sha256:d2fe69c5434391727efa54b47a1e7986bb0186e72a41b203df8f5b0a19a4f669"},
|
||||
{file = "pydantic_core-2.20.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a45f84b09ac9c3d35dfcf6a27fd0634d30d183205230a0ebe8373a0e8cfa0906"},
|
||||
{file = "pydantic_core-2.20.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d02a72df14dfdbaf228424573a07af10637bd490f0901cee872c4f434a735b94"},
|
||||
{file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d2b27e6af28f07e2f195552b37d7d66b150adbaa39a6d327766ffd695799780f"},
|
||||
{file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:084659fac3c83fd674596612aeff6041a18402f1e1bc19ca39e417d554468482"},
|
||||
{file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:242b8feb3c493ab78be289c034a1f659e8826e2233786e36f2893a950a719bb6"},
|
||||
{file = "pydantic_core-2.20.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:38cf1c40a921d05c5edc61a785c0ddb4bed67827069f535d794ce6bcded919fc"},
|
||||
{file = "pydantic_core-2.20.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:e0bbdd76ce9aa5d4209d65f2b27fc6e5ef1312ae6c5333c26db3f5ade53a1e99"},
|
||||
{file = "pydantic_core-2.20.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:254ec27fdb5b1ee60684f91683be95e5133c994cc54e86a0b0963afa25c8f8a6"},
|
||||
{file = "pydantic_core-2.20.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:407653af5617f0757261ae249d3fba09504d7a71ab36ac057c938572d1bc9331"},
|
||||
{file = "pydantic_core-2.20.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:c693e916709c2465b02ca0ad7b387c4f8423d1db7b4649c551f27a529181c5ad"},
|
||||
{file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5b5ff4911aea936a47d9376fd3ab17e970cc543d1b68921886e7f64bd28308d1"},
|
||||
{file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:177f55a886d74f1808763976ac4efd29b7ed15c69f4d838bbd74d9d09cf6fa86"},
|
||||
{file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:964faa8a861d2664f0c7ab0c181af0bea66098b1919439815ca8803ef136fc4e"},
|
||||
{file = "pydantic_core-2.20.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:4dd484681c15e6b9a977c785a345d3e378d72678fd5f1f3c0509608da24f2ac0"},
|
||||
{file = "pydantic_core-2.20.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f6d6cff3538391e8486a431569b77921adfcdef14eb18fbf19b7c0a5294d4e6a"},
|
||||
{file = "pydantic_core-2.20.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a6d511cc297ff0883bc3708b465ff82d7560193169a8b93260f74ecb0a5e08a7"},
|
||||
{file = "pydantic_core-2.20.1.tar.gz", hash = "sha256:26ca695eeee5f9f1aeeb211ffc12f10bcb6f71e2989988fda61dabd65db878d4"},
|
||||
{file = "pydantic_core-2.23.4-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:b10bd51f823d891193d4717448fab065733958bdb6a6b351967bd349d48d5c9b"},
|
||||
{file = "pydantic_core-2.23.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4fc714bdbfb534f94034efaa6eadd74e5b93c8fa6315565a222f7b6f42ca1166"},
|
||||
{file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63e46b3169866bd62849936de036f901a9356e36376079b05efa83caeaa02ceb"},
|
||||
{file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed1a53de42fbe34853ba90513cea21673481cd81ed1be739f7f2efb931b24916"},
|
||||
{file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cfdd16ab5e59fc31b5e906d1a3f666571abc367598e3e02c83403acabc092e07"},
|
||||
{file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:255a8ef062cbf6674450e668482456abac99a5583bbafb73f9ad469540a3a232"},
|
||||
{file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a7cd62e831afe623fbb7aabbb4fe583212115b3ef38a9f6b71869ba644624a2"},
|
||||
{file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f09e2ff1f17c2b51f2bc76d1cc33da96298f0a036a137f5440ab3ec5360b624f"},
|
||||
{file = "pydantic_core-2.23.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e38e63e6f3d1cec5a27e0afe90a085af8b6806ee208b33030e65b6516353f1a3"},
|
||||
{file = "pydantic_core-2.23.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0dbd8dbed2085ed23b5c04afa29d8fd2771674223135dc9bc937f3c09284d071"},
|
||||
{file = "pydantic_core-2.23.4-cp310-none-win32.whl", hash = "sha256:6531b7ca5f951d663c339002e91aaebda765ec7d61b7d1e3991051906ddde119"},
|
||||
{file = "pydantic_core-2.23.4-cp310-none-win_amd64.whl", hash = "sha256:7c9129eb40958b3d4500fa2467e6a83356b3b61bfff1b414c7361d9220f9ae8f"},
|
||||
{file = "pydantic_core-2.23.4-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:77733e3892bb0a7fa797826361ce8a9184d25c8dffaec60b7ffe928153680ba8"},
|
||||
{file = "pydantic_core-2.23.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b84d168f6c48fabd1f2027a3d1bdfe62f92cade1fb273a5d68e621da0e44e6d"},
|
||||
{file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:df49e7a0861a8c36d089c1ed57d308623d60416dab2647a4a17fe050ba85de0e"},
|
||||
{file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ff02b6d461a6de369f07ec15e465a88895f3223eb75073ffea56b84d9331f607"},
|
||||
{file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:996a38a83508c54c78a5f41456b0103c30508fed9abcad0a59b876d7398f25fd"},
|
||||
{file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d97683ddee4723ae8c95d1eddac7c192e8c552da0c73a925a89fa8649bf13eea"},
|
||||
{file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:216f9b2d7713eb98cb83c80b9c794de1f6b7e3145eef40400c62e86cee5f4e1e"},
|
||||
{file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6f783e0ec4803c787bcea93e13e9932edab72068f68ecffdf86a99fd5918878b"},
|
||||
{file = "pydantic_core-2.23.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d0776dea117cf5272382634bd2a5c1b6eb16767c223c6a5317cd3e2a757c61a0"},
|
||||
{file = "pydantic_core-2.23.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d5f7a395a8cf1621939692dba2a6b6a830efa6b3cee787d82c7de1ad2930de64"},
|
||||
{file = "pydantic_core-2.23.4-cp311-none-win32.whl", hash = "sha256:74b9127ffea03643e998e0c5ad9bd3811d3dac8c676e47db17b0ee7c3c3bf35f"},
|
||||
{file = "pydantic_core-2.23.4-cp311-none-win_amd64.whl", hash = "sha256:98d134c954828488b153d88ba1f34e14259284f256180ce659e8d83e9c05eaa3"},
|
||||
{file = "pydantic_core-2.23.4-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:f3e0da4ebaef65158d4dfd7d3678aad692f7666877df0002b8a522cdf088f231"},
|
||||
{file = "pydantic_core-2.23.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f69a8e0b033b747bb3e36a44e7732f0c99f7edd5cea723d45bc0d6e95377ffee"},
|
||||
{file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:723314c1d51722ab28bfcd5240d858512ffd3116449c557a1336cbe3919beb87"},
|
||||
{file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bb2802e667b7051a1bebbfe93684841cc9351004e2badbd6411bf357ab8d5ac8"},
|
||||
{file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d18ca8148bebe1b0a382a27a8ee60350091a6ddaf475fa05ef50dc35b5df6327"},
|
||||
{file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:33e3d65a85a2a4a0dc3b092b938a4062b1a05f3a9abde65ea93b233bca0e03f2"},
|
||||
{file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:128585782e5bfa515c590ccee4b727fb76925dd04a98864182b22e89a4e6ed36"},
|
||||
{file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:68665f4c17edcceecc112dfed5dbe6f92261fb9d6054b47d01bf6371a6196126"},
|
||||
{file = "pydantic_core-2.23.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:20152074317d9bed6b7a95ade3b7d6054845d70584216160860425f4fbd5ee9e"},
|
||||
{file = "pydantic_core-2.23.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:9261d3ce84fa1d38ed649c3638feefeae23d32ba9182963e465d58d62203bd24"},
|
||||
{file = "pydantic_core-2.23.4-cp312-none-win32.whl", hash = "sha256:4ba762ed58e8d68657fc1281e9bb72e1c3e79cc5d464be146e260c541ec12d84"},
|
||||
{file = "pydantic_core-2.23.4-cp312-none-win_amd64.whl", hash = "sha256:97df63000f4fea395b2824da80e169731088656d1818a11b95f3b173747b6cd9"},
|
||||
{file = "pydantic_core-2.23.4-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:7530e201d10d7d14abce4fb54cfe5b94a0aefc87da539d0346a484ead376c3cc"},
|
||||
{file = "pydantic_core-2.23.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:df933278128ea1cd77772673c73954e53a1c95a4fdf41eef97c2b779271bd0bd"},
|
||||
{file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cb3da3fd1b6a5d0279a01877713dbda118a2a4fc6f0d821a57da2e464793f05"},
|
||||
{file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:42c6dcb030aefb668a2b7009c85b27f90e51e6a3b4d5c9bc4c57631292015b0d"},
|
||||
{file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:696dd8d674d6ce621ab9d45b205df149399e4bb9aa34102c970b721554828510"},
|
||||
{file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2971bb5ffe72cc0f555c13e19b23c85b654dd2a8f7ab493c262071377bfce9f6"},
|
||||
{file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8394d940e5d400d04cad4f75c0598665cbb81aecefaca82ca85bd28264af7f9b"},
|
||||
{file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0dff76e0602ca7d4cdaacc1ac4c005e0ce0dcfe095d5b5259163a80d3a10d327"},
|
||||
{file = "pydantic_core-2.23.4-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:7d32706badfe136888bdea71c0def994644e09fff0bfe47441deaed8e96fdbc6"},
|
||||
{file = "pydantic_core-2.23.4-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ed541d70698978a20eb63d8c5d72f2cc6d7079d9d90f6b50bad07826f1320f5f"},
|
||||
{file = "pydantic_core-2.23.4-cp313-none-win32.whl", hash = "sha256:3d5639516376dce1940ea36edf408c554475369f5da2abd45d44621cb616f769"},
|
||||
{file = "pydantic_core-2.23.4-cp313-none-win_amd64.whl", hash = "sha256:5a1504ad17ba4210df3a045132a7baeeba5a200e930f57512ee02909fc5c4cb5"},
|
||||
{file = "pydantic_core-2.23.4-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:d4488a93b071c04dc20f5cecc3631fc78b9789dd72483ba15d423b5b3689b555"},
|
||||
{file = "pydantic_core-2.23.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:81965a16b675b35e1d09dd14df53f190f9129c0202356ed44ab2728b1c905658"},
|
||||
{file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ffa2ebd4c8530079140dd2d7f794a9d9a73cbb8e9d59ffe24c63436efa8f271"},
|
||||
{file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:61817945f2fe7d166e75fbfb28004034b48e44878177fc54d81688e7b85a3665"},
|
||||
{file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:29d2c342c4bc01b88402d60189f3df065fb0dda3654744d5a165a5288a657368"},
|
||||
{file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5e11661ce0fd30a6790e8bcdf263b9ec5988e95e63cf901972107efc49218b13"},
|
||||
{file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d18368b137c6295db49ce7218b1a9ba15c5bc254c96d7c9f9e924a9bc7825ad"},
|
||||
{file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ec4e55f79b1c4ffb2eecd8a0cfba9955a2588497d96851f4c8f99aa4a1d39b12"},
|
||||
{file = "pydantic_core-2.23.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:374a5e5049eda9e0a44c696c7ade3ff355f06b1fe0bb945ea3cac2bc336478a2"},
|
||||
{file = "pydantic_core-2.23.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5c364564d17da23db1106787675fc7af45f2f7b58b4173bfdd105564e132e6fb"},
|
||||
{file = "pydantic_core-2.23.4-cp38-none-win32.whl", hash = "sha256:d7a80d21d613eec45e3d41eb22f8f94ddc758a6c4720842dc74c0581f54993d6"},
|
||||
{file = "pydantic_core-2.23.4-cp38-none-win_amd64.whl", hash = "sha256:5f5ff8d839f4566a474a969508fe1c5e59c31c80d9e140566f9a37bba7b8d556"},
|
||||
{file = "pydantic_core-2.23.4-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:a4fa4fc04dff799089689f4fd502ce7d59de529fc2f40a2c8836886c03e0175a"},
|
||||
{file = "pydantic_core-2.23.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0a7df63886be5e270da67e0966cf4afbae86069501d35c8c1b3b6c168f42cb36"},
|
||||
{file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dcedcd19a557e182628afa1d553c3895a9f825b936415d0dbd3cd0bbcfd29b4b"},
|
||||
{file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5f54b118ce5de9ac21c363d9b3caa6c800341e8c47a508787e5868c6b79c9323"},
|
||||
{file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:86d2f57d3e1379a9525c5ab067b27dbb8a0642fb5d454e17a9ac434f9ce523e3"},
|
||||
{file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:de6d1d1b9e5101508cb37ab0d972357cac5235f5c6533d1071964c47139257df"},
|
||||
{file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1278e0d324f6908e872730c9102b0112477a7f7cf88b308e4fc36ce1bdb6d58c"},
|
||||
{file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9a6b5099eeec78827553827f4c6b8615978bb4b6a88e5d9b93eddf8bb6790f55"},
|
||||
{file = "pydantic_core-2.23.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:e55541f756f9b3ee346b840103f32779c695a19826a4c442b7954550a0972040"},
|
||||
{file = "pydantic_core-2.23.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a5c7ba8ffb6d6f8f2ab08743be203654bb1aaa8c9dcb09f82ddd34eadb695605"},
|
||||
{file = "pydantic_core-2.23.4-cp39-none-win32.whl", hash = "sha256:37b0fe330e4a58d3c58b24d91d1eb102aeec675a3db4c292ec3928ecd892a9a6"},
|
||||
{file = "pydantic_core-2.23.4-cp39-none-win_amd64.whl", hash = "sha256:1498bec4c05c9c787bde9125cfdcc63a41004ff167f495063191b863399b1a29"},
|
||||
{file = "pydantic_core-2.23.4-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f455ee30a9d61d3e1a15abd5068827773d6e4dc513e795f380cdd59932c782d5"},
|
||||
{file = "pydantic_core-2.23.4-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:1e90d2e3bd2c3863d48525d297cd143fe541be8bbf6f579504b9712cb6b643ec"},
|
||||
{file = "pydantic_core-2.23.4-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e203fdf807ac7e12ab59ca2bfcabb38c7cf0b33c41efeb00f8e5da1d86af480"},
|
||||
{file = "pydantic_core-2.23.4-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e08277a400de01bc72436a0ccd02bdf596631411f592ad985dcee21445bd0068"},
|
||||
{file = "pydantic_core-2.23.4-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f220b0eea5965dec25480b6333c788fb72ce5f9129e8759ef876a1d805d00801"},
|
||||
{file = "pydantic_core-2.23.4-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:d06b0c8da4f16d1d1e352134427cb194a0a6e19ad5db9161bf32b2113409e728"},
|
||||
{file = "pydantic_core-2.23.4-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:ba1a0996f6c2773bd83e63f18914c1de3c9dd26d55f4ac302a7efe93fb8e7433"},
|
||||
{file = "pydantic_core-2.23.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:9a5bce9d23aac8f0cf0836ecfc033896aa8443b501c58d0602dbfd5bd5b37753"},
|
||||
{file = "pydantic_core-2.23.4-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:78ddaaa81421a29574a682b3179d4cf9e6d405a09b99d93ddcf7e5239c742e21"},
|
||||
{file = "pydantic_core-2.23.4-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:883a91b5dd7d26492ff2f04f40fbb652de40fcc0afe07e8129e8ae779c2110eb"},
|
||||
{file = "pydantic_core-2.23.4-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88ad334a15b32a791ea935af224b9de1bf99bcd62fabf745d5f3442199d86d59"},
|
||||
{file = "pydantic_core-2.23.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:233710f069d251feb12a56da21e14cca67994eab08362207785cf8c598e74577"},
|
||||
{file = "pydantic_core-2.23.4-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:19442362866a753485ba5e4be408964644dd6a09123d9416c54cd49171f50744"},
|
||||
{file = "pydantic_core-2.23.4-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:624e278a7d29b6445e4e813af92af37820fafb6dcc55c012c834f9e26f9aaaef"},
|
||||
{file = "pydantic_core-2.23.4-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f5ef8f42bec47f21d07668a043f077d507e5bf4e668d5c6dfe6aaba89de1a5b8"},
|
||||
{file = "pydantic_core-2.23.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:aea443fffa9fbe3af1a9ba721a87f926fe548d32cab71d188a6ede77d0ff244e"},
|
||||
{file = "pydantic_core-2.23.4.tar.gz", hash = "sha256:2584f7cf844ac4d970fba483a717dbe10c1c1c96a969bf65d61ffe94df1b2863"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
|
@ -7063,13 +7064,13 @@ semver = ["semver (>=3.0.2)"]
|
|||
|
||||
[[package]]
|
||||
name = "pydantic-settings"
|
||||
version = "2.4.0"
|
||||
version = "2.6.0"
|
||||
description = "Settings management using Pydantic"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "pydantic_settings-2.4.0-py3-none-any.whl", hash = "sha256:bb6849dc067f1687574c12a639e231f3a6feeed0a12d710c1382045c5db1c315"},
|
||||
{file = "pydantic_settings-2.4.0.tar.gz", hash = "sha256:ed81c3a0f46392b4d7c0a565c05884e6e54b3456e6f0fe4d8814981172dc9a88"},
|
||||
{file = "pydantic_settings-2.6.0-py3-none-any.whl", hash = "sha256:4a819166f119b74d7f8c765196b165f95cc7487ce58ea27dec8a5a26be0970e0"},
|
||||
{file = "pydantic_settings-2.6.0.tar.gz", hash = "sha256:44a1804abffac9e6a30372bb45f6cafab945ef5af25e66b1c634c01dd39e0188"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
|
@ -10795,4 +10796,4 @@ cffi = ["cffi (>=1.11)"]
|
|||
[metadata]
|
||||
lock-version = "2.0"
|
||||
python-versions = ">=3.10,<3.13"
|
||||
content-hash = "642b2dae9e18ee6671d3d2c7129cb9a77327b69dacba996d00de2a9475d5bad3"
|
||||
content-hash = "5b102e3bc077ed730e9fb7be9015541111ffe7787888372d50a757aecb1d9eff"
|
||||
|
|
|
@ -87,14 +87,6 @@ ignore = [
|
|||
"tests/*" = [
|
||||
"F811", # redefined-while-unused
|
||||
"F401", # unused-import
|
||||
"PT001", # missing-function-docstring
|
||||
"PT004", # missing-parameter-docstring
|
||||
]
|
||||
"core/rag/extractor/word_extractor.py" = [
|
||||
"RUF100", # Unused `noqa` directive
|
||||
]
|
||||
"core/tools/provider/builtin/gitlab/tools/gitlab_commits.py" = [
|
||||
"PLR1714", # Consider merging multiple comparisons
|
||||
]
|
||||
|
||||
[tool.ruff.lint.pyflakes]
|
||||
|
@ -163,8 +155,8 @@ openpyxl = "~3.1.5"
|
|||
pandas = { version = "~2.2.2", extras = ["performance", "excel"] }
|
||||
psycopg2-binary = "~2.9.6"
|
||||
pycryptodome = "3.19.1"
|
||||
pydantic = "~2.8.2"
|
||||
pydantic-settings = "~2.4.0"
|
||||
pydantic = "~2.9.2"
|
||||
pydantic-settings = "~2.6.0"
|
||||
pydantic_extra_types = "~2.9.0"
|
||||
pyjwt = "~2.8.0"
|
||||
pypdfium2 = "~4.17.0"
|
||||
|
|
|
@ -15,8 +15,6 @@ class EnterpriseRequest:
|
|||
@classmethod
|
||||
def send_request(cls, method, endpoint, json=None, params=None):
|
||||
headers = {"Content-Type": "application/json", "Enterprise-Api-Secret-Key": cls.secret_key}
|
||||
|
||||
url = f"{cls.base_url}{endpoint}"
|
||||
response = requests.request(method, url, json=json, params=params, headers=headers, proxies=cls.proxies)
|
||||
|
||||
return response.json()
|
||||
|
|
|
@ -20,6 +20,7 @@ from core.rag.extractor.extract_processor import ExtractProcessor
|
|||
from extensions.ext_database import db
|
||||
from extensions.ext_storage import storage
|
||||
from models.account import Account
|
||||
from models.enums import CreatedByRole
|
||||
from models.model import EndUser, UploadFile
|
||||
from services.errors.file import FileNotExistsError, FileTooLargeError, UnsupportedFileTypeError
|
||||
|
||||
|
@ -85,7 +86,7 @@ class FileService:
|
|||
size=file_size,
|
||||
extension=extension,
|
||||
mime_type=file.mimetype,
|
||||
created_by_role=("account" if isinstance(user, Account) else "end_user"),
|
||||
created_by_role=(CreatedByRole.ACCOUNT if isinstance(user, Account) else CreatedByRole.END_USER),
|
||||
created_by=user.id,
|
||||
created_at=datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None),
|
||||
used=False,
|
||||
|
@ -118,6 +119,7 @@ class FileService:
|
|||
extension="txt",
|
||||
mime_type="text/plain",
|
||||
created_by=current_user.id,
|
||||
created_by_role=CreatedByRole.ACCOUNT,
|
||||
created_at=datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None),
|
||||
used=True,
|
||||
used_by=current_user.id,
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
import pytest
|
||||
|
||||
from app_factory import create_app
|
||||
from configs import dify_config
|
||||
|
||||
mock_user = type(
|
||||
"MockUser",
|
||||
|
@ -20,5 +21,5 @@ mock_user = type(
|
|||
@pytest.fixture
|
||||
def app():
|
||||
app = create_app()
|
||||
app.config["LOGIN_DISABLED"] = True
|
||||
dify_config.LOGIN_DISABLED = True
|
||||
return app
|
||||
|
|
|
@ -102,6 +102,8 @@ def test_execute_code(setup_code_executor_mock):
|
|||
}
|
||||
|
||||
node = init_code_node(code_config)
|
||||
node.graph_runtime_state.variable_pool.add(["1", "123", "args1"], 1)
|
||||
node.graph_runtime_state.variable_pool.add(["1", "123", "args2"], 2)
|
||||
|
||||
# execute node
|
||||
result = node._run()
|
||||
|
@ -146,6 +148,8 @@ def test_execute_code_output_validator(setup_code_executor_mock):
|
|||
}
|
||||
|
||||
node = init_code_node(code_config)
|
||||
node.graph_runtime_state.variable_pool.add(["1", "123", "args1"], 1)
|
||||
node.graph_runtime_state.variable_pool.add(["1", "123", "args2"], 2)
|
||||
|
||||
# execute node
|
||||
result = node._run()
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
import json
|
||||
|
||||
import httpx
|
||||
|
||||
from core.app.entities.app_invoke_entities import InvokeFrom
|
||||
|
@ -14,7 +16,8 @@ from core.workflow.nodes.http_request import (
|
|||
HttpRequestNodeBody,
|
||||
HttpRequestNodeData,
|
||||
)
|
||||
from core.workflow.nodes.http_request.executor import _plain_text_to_dict
|
||||
from core.workflow.nodes.http_request.entities import HttpRequestNodeTimeout
|
||||
from core.workflow.nodes.http_request.executor import Executor, _plain_text_to_dict
|
||||
from models.enums import UserFrom
|
||||
from models.workflow import WorkflowNodeExecutionStatus, WorkflowType
|
||||
|
||||
|
@ -200,3 +203,167 @@ def test_http_request_node_form_with_file(monkeypatch):
|
|||
assert result.status == WorkflowNodeExecutionStatus.SUCCEEDED
|
||||
assert result.outputs is not None
|
||||
assert result.outputs["body"] == ""
|
||||
|
||||
|
||||
def test_executor_with_json_body_and_number_variable():
|
||||
# Prepare the variable pool
|
||||
variable_pool = VariablePool(
|
||||
system_variables={},
|
||||
user_inputs={},
|
||||
)
|
||||
variable_pool.add(["pre_node_id", "number"], 42)
|
||||
|
||||
# Prepare the node data
|
||||
node_data = HttpRequestNodeData(
|
||||
title="Test JSON Body with Number Variable",
|
||||
method="post",
|
||||
url="https://api.example.com/data",
|
||||
authorization=HttpRequestNodeAuthorization(type="no-auth"),
|
||||
headers="Content-Type: application/json",
|
||||
params="",
|
||||
body=HttpRequestNodeBody(
|
||||
type="json",
|
||||
data=[
|
||||
BodyData(
|
||||
key="",
|
||||
type="text",
|
||||
value='{"number": {{#pre_node_id.number#}}}',
|
||||
)
|
||||
],
|
||||
),
|
||||
)
|
||||
|
||||
# Initialize the Executor
|
||||
executor = Executor(
|
||||
node_data=node_data,
|
||||
timeout=HttpRequestNodeTimeout(connect=10, read=30, write=30),
|
||||
variable_pool=variable_pool,
|
||||
)
|
||||
|
||||
# Check the executor's data
|
||||
assert executor.method == "post"
|
||||
assert executor.url == "https://api.example.com/data"
|
||||
assert executor.headers == {"Content-Type": "application/json"}
|
||||
assert executor.params == {}
|
||||
assert executor.json == {"number": 42}
|
||||
assert executor.data is None
|
||||
assert executor.files is None
|
||||
assert executor.content is None
|
||||
|
||||
# Check the raw request (to_log method)
|
||||
raw_request = executor.to_log()
|
||||
assert "POST /data HTTP/1.1" in raw_request
|
||||
assert "Host: api.example.com" in raw_request
|
||||
assert "Content-Type: application/json" in raw_request
|
||||
assert '{"number": 42}' in raw_request
|
||||
|
||||
|
||||
def test_executor_with_json_body_and_object_variable():
|
||||
# Prepare the variable pool
|
||||
variable_pool = VariablePool(
|
||||
system_variables={},
|
||||
user_inputs={},
|
||||
)
|
||||
variable_pool.add(["pre_node_id", "object"], {"name": "John Doe", "age": 30, "email": "john@example.com"})
|
||||
|
||||
# Prepare the node data
|
||||
node_data = HttpRequestNodeData(
|
||||
title="Test JSON Body with Object Variable",
|
||||
method="post",
|
||||
url="https://api.example.com/data",
|
||||
authorization=HttpRequestNodeAuthorization(type="no-auth"),
|
||||
headers="Content-Type: application/json",
|
||||
params="",
|
||||
body=HttpRequestNodeBody(
|
||||
type="json",
|
||||
data=[
|
||||
BodyData(
|
||||
key="",
|
||||
type="text",
|
||||
value="{{#pre_node_id.object#}}",
|
||||
)
|
||||
],
|
||||
),
|
||||
)
|
||||
|
||||
# Initialize the Executor
|
||||
executor = Executor(
|
||||
node_data=node_data,
|
||||
timeout=HttpRequestNodeTimeout(connect=10, read=30, write=30),
|
||||
variable_pool=variable_pool,
|
||||
)
|
||||
|
||||
# Check the executor's data
|
||||
assert executor.method == "post"
|
||||
assert executor.url == "https://api.example.com/data"
|
||||
assert executor.headers == {"Content-Type": "application/json"}
|
||||
assert executor.params == {}
|
||||
assert executor.json == {"name": "John Doe", "age": 30, "email": "john@example.com"}
|
||||
assert executor.data is None
|
||||
assert executor.files is None
|
||||
assert executor.content is None
|
||||
|
||||
# Check the raw request (to_log method)
|
||||
raw_request = executor.to_log()
|
||||
assert "POST /data HTTP/1.1" in raw_request
|
||||
assert "Host: api.example.com" in raw_request
|
||||
assert "Content-Type: application/json" in raw_request
|
||||
assert '"name": "John Doe"' in raw_request
|
||||
assert '"age": 30' in raw_request
|
||||
assert '"email": "john@example.com"' in raw_request
|
||||
|
||||
|
||||
def test_executor_with_json_body_and_nested_object_variable():
|
||||
# Prepare the variable pool
|
||||
variable_pool = VariablePool(
|
||||
system_variables={},
|
||||
user_inputs={},
|
||||
)
|
||||
variable_pool.add(["pre_node_id", "object"], {"name": "John Doe", "age": 30, "email": "john@example.com"})
|
||||
|
||||
# Prepare the node data
|
||||
node_data = HttpRequestNodeData(
|
||||
title="Test JSON Body with Nested Object Variable",
|
||||
method="post",
|
||||
url="https://api.example.com/data",
|
||||
authorization=HttpRequestNodeAuthorization(type="no-auth"),
|
||||
headers="Content-Type: application/json",
|
||||
params="",
|
||||
body=HttpRequestNodeBody(
|
||||
type="json",
|
||||
data=[
|
||||
BodyData(
|
||||
key="",
|
||||
type="text",
|
||||
value='{"object": {{#pre_node_id.object#}}}',
|
||||
)
|
||||
],
|
||||
),
|
||||
)
|
||||
|
||||
# Initialize the Executor
|
||||
executor = Executor(
|
||||
node_data=node_data,
|
||||
timeout=HttpRequestNodeTimeout(connect=10, read=30, write=30),
|
||||
variable_pool=variable_pool,
|
||||
)
|
||||
|
||||
# Check the executor's data
|
||||
assert executor.method == "post"
|
||||
assert executor.url == "https://api.example.com/data"
|
||||
assert executor.headers == {"Content-Type": "application/json"}
|
||||
assert executor.params == {}
|
||||
assert executor.json == {"object": {"name": "John Doe", "age": 30, "email": "john@example.com"}}
|
||||
assert executor.data is None
|
||||
assert executor.files is None
|
||||
assert executor.content is None
|
||||
|
||||
# Check the raw request (to_log method)
|
||||
raw_request = executor.to_log()
|
||||
assert "POST /data HTTP/1.1" in raw_request
|
||||
assert "Host: api.example.com" in raw_request
|
||||
assert "Content-Type: application/json" in raw_request
|
||||
assert '"object": {' in raw_request
|
||||
assert '"name": "John Doe"' in raw_request
|
||||
assert '"age": 30' in raw_request
|
||||
assert '"email": "john@example.com"' in raw_request
|
||||
|
|
|
@ -25,7 +25,7 @@ class VolcengineTosTest:
|
|||
return cls._instance
|
||||
|
||||
def __init__(self):
|
||||
self.storage = VolcengineTosStorage(app=Flask(__name__))
|
||||
self.storage = VolcengineTosStorage()
|
||||
self.storage.bucket_name = get_example_bucket()
|
||||
self.storage.client = TosClientV2(
|
||||
ak="dify",
|
||||
|
|
|
@ -23,10 +23,6 @@ const titleClassName = `
|
|||
const descriptionClassName = `
|
||||
mt-1 text-xs font-normal text-gray-500
|
||||
`
|
||||
const inputClassName = `
|
||||
mt-2 w-full px-3 py-2 bg-gray-100 rounded
|
||||
text-sm font-normal text-gray-800
|
||||
`
|
||||
|
||||
const validPassword = /^(?=.*[a-zA-Z])(?=.*\d).{8,}$/
|
||||
|
||||
|
|
|
@ -1,10 +1,9 @@
|
|||
'use client'
|
||||
import { useRef, useState } from 'react'
|
||||
import { useState } from 'react'
|
||||
import { t } from 'i18next'
|
||||
import { useParams, usePathname } from 'next/navigation'
|
||||
import s from './style.module.css'
|
||||
import Tooltip from '@/app/components/base/tooltip'
|
||||
import { randomString } from '@/utils'
|
||||
import Loading from '@/app/components/base/loading'
|
||||
import { AudioPlayerManager } from '@/app/components/base/audio-btn/audio.player.manager'
|
||||
|
||||
|
@ -28,7 +27,6 @@ const AudioBtn = ({
|
|||
}: AudioBtnProps) => {
|
||||
const [audioState, setAudioState] = useState<AudioState>('initial')
|
||||
|
||||
const selector = useRef(`play-tooltip-${randomString(4)}`)
|
||||
const params = useParams()
|
||||
const pathname = usePathname()
|
||||
const audio_finished_call = (event: string): any => {
|
||||
|
|
|
@ -1,119 +0,0 @@
|
|||
import {
|
||||
memo,
|
||||
useMemo,
|
||||
} from 'react'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import type { OnFeaturesChange } from '../types'
|
||||
import { useFeatures } from '../hooks'
|
||||
import FileUpload from './file-upload'
|
||||
import OpeningStatement from './opening-statement'
|
||||
import type { OpeningStatementProps } from './opening-statement'
|
||||
import SuggestedQuestionsAfterAnswer from './suggested-questions-after-answer'
|
||||
import TextToSpeech from './text-to-speech'
|
||||
import SpeechToText from './speech-to-text'
|
||||
import Citation from './citation'
|
||||
import Moderation from './moderation'
|
||||
import type { InputVar } from '@/app/components/workflow/types'
|
||||
|
||||
export type FeaturePanelProps = {
|
||||
onChange?: OnFeaturesChange
|
||||
openingStatementProps: OpeningStatementProps
|
||||
disabled?: boolean
|
||||
workflowVariables: InputVar[]
|
||||
}
|
||||
const FeaturePanel = ({
|
||||
onChange,
|
||||
openingStatementProps,
|
||||
disabled,
|
||||
workflowVariables,
|
||||
}: FeaturePanelProps) => {
|
||||
const { t } = useTranslation()
|
||||
const features = useFeatures(s => s.features)
|
||||
|
||||
const showAdvanceFeature = useMemo(() => {
|
||||
return features.opening?.enabled || features.suggested?.enabled || features.speech2text?.enabled || features.text2speech?.enabled || features.citation?.enabled
|
||||
}, [features])
|
||||
|
||||
const showToolFeature = useMemo(() => {
|
||||
return features.moderation?.enabled
|
||||
}, [features])
|
||||
|
||||
return (
|
||||
<div className='space-y-3'>
|
||||
<FileUpload
|
||||
onChange={onChange}
|
||||
disabled={disabled}
|
||||
/>
|
||||
{
|
||||
showAdvanceFeature && (
|
||||
<div>
|
||||
<div className='flex items-center'>
|
||||
<div className='shrink-0 text-xs font-semibold text-gray-500'>
|
||||
{t('appDebug.feature.groupChat.title')}
|
||||
</div>
|
||||
<div
|
||||
className='grow ml-3 h-[1px]'
|
||||
style={{ background: 'linear-gradient(270deg, rgba(243, 244, 246, 0) 0%, #F3F4F6 100%)' }}
|
||||
></div>
|
||||
</div>
|
||||
<div className='py-2 space-y-2'>
|
||||
{
|
||||
features.opening?.enabled && (
|
||||
<OpeningStatement
|
||||
{...openingStatementProps}
|
||||
onChange={onChange}
|
||||
readonly={disabled}
|
||||
workflowVariables={workflowVariables}
|
||||
/>
|
||||
)
|
||||
}
|
||||
{
|
||||
features.suggested?.enabled && (
|
||||
<SuggestedQuestionsAfterAnswer />
|
||||
)
|
||||
}
|
||||
{
|
||||
features.text2speech?.enabled && (
|
||||
<TextToSpeech onChange={onChange} disabled={disabled} />
|
||||
)
|
||||
}
|
||||
{
|
||||
features.speech2text?.enabled && (
|
||||
<SpeechToText />
|
||||
)
|
||||
}
|
||||
{
|
||||
features.citation?.enabled && (
|
||||
<Citation />
|
||||
)
|
||||
}
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
{
|
||||
showToolFeature && (
|
||||
<div>
|
||||
<div className='flex items-center'>
|
||||
<div className='shrink-0 text-xs font-semibold text-gray-500'>
|
||||
{t('appDebug.feature.groupChat.title')}
|
||||
</div>
|
||||
<div
|
||||
className='grow ml-3 h-[1px]'
|
||||
style={{ background: 'linear-gradient(270deg, rgba(243, 244, 246, 0) 0%, #F3F4F6 100%)' }}
|
||||
></div>
|
||||
</div>
|
||||
<div className='py-2 space-y-2'>
|
||||
{
|
||||
features.moderation?.enabled && (
|
||||
<Moderation onChange={onChange} disabled={disabled} />
|
||||
)
|
||||
}
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
export default memo(FeaturePanel)
|
|
@ -1,328 +0,0 @@
|
|||
/* eslint-disable multiline-ternary */
|
||||
'use client'
|
||||
import type { FC } from 'react'
|
||||
import React, { useEffect, useRef, useState } from 'react'
|
||||
import produce from 'immer'
|
||||
import {
|
||||
RiAddLine,
|
||||
RiDeleteBinLine,
|
||||
} from '@remixicon/react'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import { useBoolean } from 'ahooks'
|
||||
import { ReactSortable } from 'react-sortablejs'
|
||||
import {
|
||||
useFeatures,
|
||||
useFeaturesStore,
|
||||
} from '../../hooks'
|
||||
import type { OnFeaturesChange } from '../../types'
|
||||
import cn from '@/utils/classnames'
|
||||
import Panel from '@/app/components/app/configuration/base/feature-panel'
|
||||
import Button from '@/app/components/base/button'
|
||||
import OperationBtn from '@/app/components/app/configuration/base/operation-btn'
|
||||
import { getInputKeys } from '@/app/components/base/block-input'
|
||||
import ConfirmAddVar from '@/app/components/app/configuration/config-prompt/confirm-add-var'
|
||||
import { getNewVar } from '@/utils/var'
|
||||
import { varHighlightHTML } from '@/app/components/app/configuration/base/var-highlight'
|
||||
import type { PromptVariable } from '@/models/debug'
|
||||
import type { InputVar } from '@/app/components/workflow/types'
|
||||
|
||||
const MAX_QUESTION_NUM = 5
|
||||
|
||||
export type OpeningStatementProps = {
|
||||
onChange?: OnFeaturesChange
|
||||
readonly?: boolean
|
||||
promptVariables?: PromptVariable[]
|
||||
onAutoAddPromptVariable: (variable: PromptVariable[]) => void
|
||||
workflowVariables?: InputVar[]
|
||||
}
|
||||
|
||||
// regex to match the {{}} and replace it with a span
|
||||
const regex = /\{\{([^}]+)\}\}/g
|
||||
|
||||
const OpeningStatement: FC<OpeningStatementProps> = ({
|
||||
onChange,
|
||||
readonly,
|
||||
promptVariables = [],
|
||||
onAutoAddPromptVariable,
|
||||
workflowVariables = [],
|
||||
}) => {
|
||||
const { t } = useTranslation()
|
||||
const featureStore = useFeaturesStore()
|
||||
const openingStatement = useFeatures(s => s.features.opening)
|
||||
const value = openingStatement?.opening_statement || ''
|
||||
const suggestedQuestions = openingStatement?.suggested_questions || []
|
||||
const [notIncludeKeys, setNotIncludeKeys] = useState<string[]>([])
|
||||
|
||||
const hasValue = !!(value || '').trim()
|
||||
const inputRef = useRef<HTMLTextAreaElement>(null)
|
||||
|
||||
const [isFocus, { setTrue: didSetFocus, setFalse: setBlur }] = useBoolean(false)
|
||||
|
||||
const setFocus = () => {
|
||||
didSetFocus()
|
||||
setTimeout(() => {
|
||||
const input = inputRef.current
|
||||
if (input) {
|
||||
input.focus()
|
||||
input.setSelectionRange(input.value.length, input.value.length)
|
||||
}
|
||||
}, 0)
|
||||
}
|
||||
|
||||
const [tempValue, setTempValue] = useState(value)
|
||||
useEffect(() => {
|
||||
setTempValue(value || '')
|
||||
}, [value])
|
||||
|
||||
const [tempSuggestedQuestions, setTempSuggestedQuestions] = useState(suggestedQuestions || [])
|
||||
const notEmptyQuestions = tempSuggestedQuestions.filter(question => !!question && question.trim())
|
||||
const coloredContent = (tempValue || '')
|
||||
.replace(/</g, '<')
|
||||
.replace(/>/g, '>')
|
||||
.replace(regex, varHighlightHTML({ name: '$1' })) // `<span class="${highLightClassName}">{{$1}}</span>`
|
||||
.replace(/\n/g, '<br />')
|
||||
|
||||
const handleEdit = () => {
|
||||
if (readonly)
|
||||
return
|
||||
setFocus()
|
||||
}
|
||||
|
||||
const [isShowConfirmAddVar, { setTrue: showConfirmAddVar, setFalse: hideConfirmAddVar }] = useBoolean(false)
|
||||
|
||||
const handleCancel = () => {
|
||||
setBlur()
|
||||
setTempValue(value)
|
||||
setTempSuggestedQuestions(suggestedQuestions)
|
||||
}
|
||||
|
||||
const handleConfirm = () => {
|
||||
const keys = getInputKeys(tempValue)
|
||||
const promptKeys = promptVariables.map(item => item.key)
|
||||
const workflowVariableKeys = workflowVariables.map(item => item.variable)
|
||||
let notIncludeKeys: string[] = []
|
||||
|
||||
if (promptKeys.length === 0 && workflowVariables.length === 0) {
|
||||
if (keys.length > 0)
|
||||
notIncludeKeys = keys
|
||||
}
|
||||
else {
|
||||
if (workflowVariables.length > 0)
|
||||
notIncludeKeys = keys.filter(key => !workflowVariableKeys.includes(key))
|
||||
|
||||
else notIncludeKeys = keys.filter(key => !promptKeys.includes(key))
|
||||
}
|
||||
|
||||
if (notIncludeKeys.length > 0) {
|
||||
setNotIncludeKeys(notIncludeKeys)
|
||||
showConfirmAddVar()
|
||||
return
|
||||
}
|
||||
setBlur()
|
||||
const { getState } = featureStore!
|
||||
const {
|
||||
features,
|
||||
setFeatures,
|
||||
} = getState()
|
||||
|
||||
const newFeatures = produce(features, (draft) => {
|
||||
if (draft.opening) {
|
||||
draft.opening.opening_statement = tempValue
|
||||
draft.opening.suggested_questions = tempSuggestedQuestions
|
||||
}
|
||||
})
|
||||
setFeatures(newFeatures)
|
||||
|
||||
if (onChange)
|
||||
onChange(newFeatures)
|
||||
}
|
||||
|
||||
const cancelAutoAddVar = () => {
|
||||
const { getState } = featureStore!
|
||||
const {
|
||||
features,
|
||||
setFeatures,
|
||||
} = getState()
|
||||
|
||||
const newFeatures = produce(features, (draft) => {
|
||||
if (draft.opening)
|
||||
draft.opening.opening_statement = tempValue
|
||||
})
|
||||
setFeatures(newFeatures)
|
||||
|
||||
if (onChange)
|
||||
onChange(newFeatures)
|
||||
hideConfirmAddVar()
|
||||
setBlur()
|
||||
}
|
||||
|
||||
const autoAddVar = () => {
|
||||
const { getState } = featureStore!
|
||||
const {
|
||||
features,
|
||||
setFeatures,
|
||||
} = getState()
|
||||
|
||||
const newFeatures = produce(features, (draft) => {
|
||||
if (draft.opening)
|
||||
draft.opening.opening_statement = tempValue
|
||||
})
|
||||
setFeatures(newFeatures)
|
||||
if (onChange)
|
||||
onChange(newFeatures)
|
||||
onAutoAddPromptVariable([...notIncludeKeys.map(key => getNewVar(key, 'string'))])
|
||||
hideConfirmAddVar()
|
||||
setBlur()
|
||||
}
|
||||
|
||||
const headerRight = !readonly ? (
|
||||
isFocus ? (
|
||||
<div className='flex items-center space-x-1'>
|
||||
<Button
|
||||
variant='ghost'
|
||||
size='small'
|
||||
onClick={handleCancel}
|
||||
>
|
||||
{t('common.operation.cancel')}
|
||||
</Button>
|
||||
<Button size='small' onClick={handleConfirm} variant="primary">{t('common.operation.save')}</Button>
|
||||
</div>
|
||||
) : (
|
||||
<OperationBtn type='edit' actionName={hasValue ? '' : t('appDebug.openingStatement.writeOpener') as string} onClick={handleEdit} />
|
||||
)
|
||||
) : null
|
||||
|
||||
const renderQuestions = () => {
|
||||
return isFocus ? (
|
||||
<div>
|
||||
<div className='flex items-center py-2'>
|
||||
<div className='shrink-0 flex space-x-0.5 leading-[18px] text-xs font-medium text-gray-500'>
|
||||
<div className='uppercase'>{t('appDebug.openingStatement.openingQuestion')}</div>
|
||||
<div>·</div>
|
||||
<div>{tempSuggestedQuestions.length}/{MAX_QUESTION_NUM}</div>
|
||||
</div>
|
||||
<div className='ml-3 grow w-0 h-px bg-[#243, 244, 246]'></div>
|
||||
</div>
|
||||
<ReactSortable
|
||||
className="space-y-1"
|
||||
list={tempSuggestedQuestions.map((name, index) => {
|
||||
return {
|
||||
id: index,
|
||||
name,
|
||||
}
|
||||
})}
|
||||
setList={list => setTempSuggestedQuestions(list.map(item => item.name))}
|
||||
handle='.handle'
|
||||
ghostClass="opacity-50"
|
||||
animation={150}
|
||||
>
|
||||
{tempSuggestedQuestions.map((question, index) => {
|
||||
return (
|
||||
<div className='group relative rounded-lg border border-gray-200 flex items-center pl-2.5 hover:border-gray-300 hover:bg-white' key={index}>
|
||||
<div className='handle flex items-center justify-center w-4 h-4 cursor-grab'>
|
||||
<svg width="6" height="10" viewBox="0 0 6 10" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path fillRule="evenodd" clipRule="evenodd" d="M1 2C1.55228 2 2 1.55228 2 1C2 0.447715 1.55228 0 1 0C0.447715 0 0 0.447715 0 1C0 1.55228 0.447715 2 1 2ZM1 6C1.55228 6 2 5.55228 2 5C2 4.44772 1.55228 4 1 4C0.447715 4 0 4.44772 0 5C0 5.55228 0.447715 6 1 6ZM6 1C6 1.55228 5.55228 2 5 2C4.44772 2 4 1.55228 4 1C4 0.447715 4.44772 0 5 0C5.55228 0 6 0.447715 6 1ZM5 6C5.55228 6 6 5.55228 6 5C6 4.44772 5.55228 4 5 4C4.44772 4 4 4.44772 4 5C4 5.55228 4.44772 6 5 6ZM2 9C2 9.55229 1.55228 10 1 10C0.447715 10 0 9.55229 0 9C0 8.44771 0.447715 8 1 8C1.55228 8 2 8.44771 2 9ZM5 10C5.55228 10 6 9.55229 6 9C6 8.44771 5.55228 8 5 8C4.44772 8 4 8.44771 4 9C4 9.55229 4.44772 10 5 10Z" fill="#98A2B3" />
|
||||
</svg>
|
||||
</div>
|
||||
<input
|
||||
type="input"
|
||||
value={question || ''}
|
||||
onChange={(e) => {
|
||||
const value = e.target.value
|
||||
setTempSuggestedQuestions(tempSuggestedQuestions.map((item, i) => {
|
||||
if (index === i)
|
||||
return value
|
||||
|
||||
return item
|
||||
}))
|
||||
}}
|
||||
className={'w-full overflow-x-auto pl-1.5 pr-8 text-sm leading-9 text-gray-900 border-0 grow h-9 bg-transparent focus:outline-none cursor-pointer rounded-lg'}
|
||||
/>
|
||||
|
||||
<div
|
||||
className='block absolute top-1/2 translate-y-[-50%] right-1.5 p-1 rounded-md cursor-pointer hover:bg-[#FEE4E2] hover:text-[#D92D20]'
|
||||
onClick={() => {
|
||||
setTempSuggestedQuestions(tempSuggestedQuestions.filter((_, i) => index !== i))
|
||||
}}
|
||||
>
|
||||
<RiDeleteBinLine className='w-3.5 h-3.5' />
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
})}</ReactSortable>
|
||||
{tempSuggestedQuestions.length < MAX_QUESTION_NUM && (
|
||||
<div
|
||||
onClick={() => { setTempSuggestedQuestions([...tempSuggestedQuestions, '']) }}
|
||||
className='mt-1 flex items-center h-9 px-3 gap-2 rounded-lg cursor-pointer text-gray-400 bg-gray-100 hover:bg-gray-200'>
|
||||
<RiAddLine className='w-4 h-4' />
|
||||
<div className='text-gray-500 text-[13px]'>{t('appDebug.variableConfig.addOption')}</div>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
) : (
|
||||
<div className='mt-1.5 flex flex-wrap'>
|
||||
{notEmptyQuestions.map((question, index) => {
|
||||
return (
|
||||
<div key={index} className='mt-1 mr-1 max-w-full truncate last:mr-0 shrink-0 leading-8 items-center px-2.5 rounded-lg border border-gray-200 shadow-xs bg-white text-[13px] font-normal text-gray-900 cursor-pointer'>
|
||||
{question}
|
||||
</div>
|
||||
)
|
||||
})}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
return (
|
||||
<Panel
|
||||
className={cn(isShowConfirmAddVar && 'h-[220px]', 'relative !bg-gray-25')}
|
||||
title={t('appDebug.openingStatement.title')}
|
||||
headerIcon={
|
||||
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path fillRule="evenodd" clipRule="evenodd" d="M8.33353 1.33301C4.83572 1.33301 2.00019 4.16854 2.00019 7.66634C2.00019 8.37301 2.11619 9.05395 2.3307 9.69036C2.36843 9.80229 2.39063 9.86853 2.40507 9.91738L2.40979 9.93383L2.40729 9.93903C2.39015 9.97437 2.36469 10.0218 2.31705 10.11L1.2158 12.1484C1.14755 12.2746 1.07633 12.4064 1.02735 12.5209C0.978668 12.6348 0.899813 12.8437 0.938613 13.0914C0.984094 13.3817 1.15495 13.6373 1.40581 13.7903C1.61981 13.9208 1.843 13.9279 1.96683 13.9264C2.09141 13.925 2.24036 13.9095 2.38314 13.8947L5.81978 13.5395C5.87482 13.5338 5.9036 13.5309 5.92468 13.5292L5.92739 13.529L5.93564 13.532C5.96154 13.5413 5.99666 13.5548 6.0573 13.5781C6.76459 13.8506 7.53244 13.9997 8.33353 13.9997C11.8313 13.9997 14.6669 11.1641 14.6669 7.66634C14.6669 4.16854 11.8313 1.33301 8.33353 1.33301ZM5.9799 5.72116C6.73142 5.08698 7.73164 5.27327 8.33144 5.96584C8.93125 5.27327 9.91854 5.09365 10.683 5.72116C11.4474 6.34867 11.5403 7.41567 10.9501 8.16572C10.5845 8.6304 9.6668 9.47911 9.02142 10.0576C8.78435 10.2702 8.66582 10.3764 8.52357 10.4192C8.40154 10.456 8.26134 10.456 8.13931 10.4192C7.99706 10.3764 7.87853 10.2702 7.64147 10.0576C6.99609 9.47911 6.07839 8.6304 5.71276 8.16572C5.12259 7.41567 5.22839 6.35534 5.9799 5.72116Z" fill="#E74694" />
|
||||
</svg>
|
||||
}
|
||||
headerRight={headerRight}
|
||||
hasHeaderBottomBorder={!hasValue}
|
||||
isFocus={isFocus}
|
||||
>
|
||||
<div className='text-gray-700 text-sm'>
|
||||
{(hasValue || (!hasValue && isFocus)) ? (
|
||||
<>
|
||||
{isFocus
|
||||
? (
|
||||
<div>
|
||||
<textarea
|
||||
ref={inputRef}
|
||||
value={tempValue}
|
||||
rows={3}
|
||||
onChange={e => setTempValue(e.target.value)}
|
||||
className="w-full px-0 text-sm border-0 bg-transparent focus:outline-none "
|
||||
placeholder={t('appDebug.openingStatement.placeholder') as string}
|
||||
>
|
||||
</textarea>
|
||||
</div>
|
||||
)
|
||||
: (
|
||||
<div dangerouslySetInnerHTML={{
|
||||
__html: coloredContent,
|
||||
}}></div>
|
||||
)}
|
||||
{renderQuestions()}
|
||||
</>) : (
|
||||
<div className='pt-2 pb-1 text-xs text-gray-500'>{t('appDebug.openingStatement.noDataPlaceHolder')}</div>
|
||||
)}
|
||||
|
||||
{isShowConfirmAddVar && (
|
||||
<ConfirmAddVar
|
||||
varNameArr={notIncludeKeys}
|
||||
onConfirm={autoAddVar}
|
||||
onCancel={cancelAutoAddVar}
|
||||
onHide={hideConfirmAddVar}
|
||||
/>
|
||||
)}
|
||||
|
||||
</div>
|
||||
</Panel>
|
||||
)
|
||||
}
|
||||
export default React.memo(OpeningStatement)
|
|
@ -45,7 +45,7 @@ const DialogWrapper = ({
|
|||
leaveFrom="opacity-100 scale-100"
|
||||
leaveTo="opacity-0 scale-95"
|
||||
>
|
||||
<Dialog.Panel className={cn('grow flex relative w-[420px] h-0 p-0 overflow-hidden text-left align-middle transition-all transform bg-components-panel-bg-alt border-components-panel-border shadow-xl', inWorkflow ? 'border-t-[0.5px] border-l-[0.5px] border-b-[0.5px] rounded-l-2xl' : 'border-[0.5px] rounded-2xl', className)}>
|
||||
<Dialog.Panel className={cn('grow flex flex-col relative w-[420px] h-0 p-0 overflow-hidden text-left align-middle transition-all transform bg-components-panel-bg-alt border-components-panel-border shadow-xl', inWorkflow ? 'border-t-[0.5px] border-l-[0.5px] border-b-[0.5px] rounded-l-2xl' : 'border-[0.5px] rounded-2xl', className)}>
|
||||
{children}
|
||||
</Dialog.Panel>
|
||||
</Transition.Child>
|
||||
|
|
|
@ -80,7 +80,7 @@ const NewFeaturePanel = ({
|
|||
<span>{isChatMode ? t('workflow.common.fileUploadTip') : t('workflow.common.ImageUploadLegacyTip')}</span>
|
||||
<a
|
||||
className='text-text-accent'
|
||||
href={`https://docs.dify.ai/${locale === LanguagesSupported[1] && 'v/zh-hans/'}'guides/workflow/bulletin`}
|
||||
href={`https://docs.dify.ai/${locale === LanguagesSupported[1] ? 'v/zh-hans/' : ''}guides/workflow/bulletin`}
|
||||
target='_blank' rel='noopener noreferrer'
|
||||
>{t('workflow.common.featuresDocLink')}</a>
|
||||
</div>
|
||||
|
|
|
@ -52,7 +52,7 @@ export const getInputVars = (text: string): ValueSelector[] => {
|
|||
|
||||
export const FILE_EXTS: Record<string, string[]> = {
|
||||
[SupportUploadFileTypes.image]: ['JPG', 'JPEG', 'PNG', 'GIF', 'WEBP', 'SVG'],
|
||||
[SupportUploadFileTypes.document]: ['TXT', 'MARKDOWN', 'PDF', 'HTML', 'XLSX', 'XLS', 'DOCX', 'CSV', 'EML', 'MSG', 'PPTX', 'PPT', 'XML', 'EPUB'],
|
||||
[SupportUploadFileTypes.document]: ['TXT', 'MD', 'MARKDOWN', 'PDF', 'HTML', 'XLSX', 'XLS', 'DOCX', 'CSV', 'EML', 'MSG', 'PPTX', 'PPT', 'XML', 'EPUB'],
|
||||
[SupportUploadFileTypes.audio]: ['MP3', 'M4A', 'WAV', 'WEBM', 'AMR'],
|
||||
[SupportUploadFileTypes.video]: ['MP4', 'MOV', 'MPEG', 'MPGA'],
|
||||
}
|
||||
|
|
|
@ -24,7 +24,7 @@ const LanguageSelect: FC<ILanguageSelectProps> = ({
|
|||
disabled={disabled}
|
||||
htmlContent={
|
||||
<div className='w-full py-1'>
|
||||
{languages.filter(language => language.supported).map(({ prompt_name, name }) => (
|
||||
{languages.filter(language => language.supported).map(({ prompt_name }) => (
|
||||
<div
|
||||
key={prompt_name}
|
||||
className='py-2 px-3 mx-1 flex items-center gap-2 hover:bg-gray-100 rounded-lg cursor-pointer text-gray-700 text-sm'
|
||||
|
|
Loading…
Reference in New Issue
Block a user