Merge branch 'deploy/dev' of github.com:langgenius/dify into deploy/dev
Some checks failed
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/amd64, build-api-amd64) (push) Has been cancelled
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/arm64, build-api-arm64) (push) Has been cancelled
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/amd64, build-web-amd64) (push) Has been cancelled
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/arm64, build-web-arm64) (push) Has been cancelled
Build and Push API & Web / create-manifest (api, DIFY_API_IMAGE_NAME, merge-api-images) (push) Has been cancelled
Build and Push API & Web / create-manifest (web, DIFY_WEB_IMAGE_NAME, merge-web-images) (push) Has been cancelled

This commit is contained in:
NFish 2024-11-12 14:47:55 +08:00
commit 8cc63ffd65
22 changed files with 121 additions and 43 deletions

View File

@ -1,4 +1,5 @@
import os
import sys
from configs import dify_config
@ -29,6 +30,9 @@ from models import account, dataset, model, source, task, tool, tools, web # no
# DO NOT REMOVE ABOVE
if sys.version_info[:2] == (3, 10):
print("Warning: Python 3.10 will not be supported in the next version.")
warnings.simplefilter("ignore", ResourceWarning)

View File

@ -9,7 +9,7 @@ class PackagingInfo(BaseSettings):
CURRENT_VERSION: str = Field(
description="Dify version",
default="0.11.0",
default="0.11.1",
)
COMMIT_SHA: str = Field(

View File

@ -617,6 +617,10 @@ class OpenAILargeLanguageModel(_CommonOpenAI, LargeLanguageModel):
# o1 compatibility
block_as_stream = False
if model.startswith("o1"):
if "max_tokens" in model_parameters:
model_parameters["max_completion_tokens"] = model_parameters["max_tokens"]
del model_parameters["max_tokens"]
if stream:
block_as_stream = True
stream = False

View File

@ -50,7 +50,7 @@ class WordExtractor(BaseExtractor):
self.web_path = self.file_path
# TODO: use a better way to handle the file
with tempfile.NamedTemporaryFile(delete=False) as self.temp_file:
self.temp_file = tempfile.NamedTemporaryFile() # noqa: SIM115
self.temp_file.write(r.content)
self.file_path = self.temp_file.name
elif not os.path.isfile(self.file_path):

View File

@ -1,6 +1,6 @@
from typing import Literal, Optional
from pydantic import BaseModel
from pydantic import BaseModel, Field, field_validator
from core.model_runtime.utils.encoders import jsonable_encoder
from core.tools.entities.common_entities import I18nObject
@ -32,9 +32,14 @@ class UserToolProvider(BaseModel):
original_credentials: Optional[dict] = None
is_team_authorization: bool = False
allow_delete: bool = True
tools: list[UserTool] | None = None
tools: list[UserTool] = Field(default_factory=list)
labels: list[str] | None = None
@field_validator("tools", mode="before")
@classmethod
def convert_none_to_empty_list(cls, v):
return v if v is not None else []
def to_dict(self) -> dict:
# -------------
# overwrite tool parameter types for temp fix

View File

@ -5,7 +5,7 @@ identity:
en_US: Gitee AI
zh_Hans: Gitee AI
description:
en_US: 快速体验大模型,领先探索 AI 开源世界
en_US: Quickly experience large models and explore the leading AI open source world
zh_Hans: 快速体验大模型,领先探索 AI 开源世界
icon: icon.svg
tags:

View File

@ -32,7 +32,7 @@ parameters:
en_US: RAG Model for your database DDL
zh_Hans: 存储数据库训练数据的RAG模型
llm_description: RAG Model for generating SQL
form: form
form: llm
- name: db_type
type: select
required: true
@ -136,7 +136,7 @@ parameters:
human_description:
en_US: DDL statements for training data
zh_Hans: 用于训练RAG Model的建表语句
form: form
form: llm
- name: question
type: string
required: false
@ -146,7 +146,7 @@ parameters:
human_description:
en_US: Question-SQL Pairs
zh_Hans: Question-SQL中的问题
form: form
form: llm
- name: sql
type: string
required: false
@ -156,7 +156,7 @@ parameters:
human_description:
en_US: SQL queries to your training data
zh_Hans: 用于训练RAG Model的SQL语句
form: form
form: llm
- name: memos
type: string
required: false
@ -166,7 +166,7 @@ parameters:
human_description:
en_US: Sometimes you may want to add documentation about your business terminology or definitions
zh_Hans: 添加更多关于数据库的业务说明
form: form
form: llm
- name: enable_training
type: boolean
required: false

View File

@ -555,6 +555,7 @@ class ToolManager:
"""
get tool provider
"""
provider_name = provider
provider: ApiToolProvider = (
db.session.query(ApiToolProvider)
.filter(
@ -565,7 +566,7 @@ class ToolManager:
)
if provider is None:
raise ValueError(f"you have not added provider {provider}")
raise ValueError(f"you have not added provider {provider_name}")
try:
credentials = json.loads(provider.credentials_str) or {}

View File

@ -356,3 +356,19 @@ def content_digest(element):
digest.update(child.encode("utf-8"))
digest = digest.hexdigest()
return digest
def get_image_upload_file_ids(content):
pattern = r"!\[image\]\((http?://.*?(file-preview|image-preview))\)"
matches = re.findall(pattern, content)
image_upload_file_ids = []
for match in matches:
if match[1] == "file-preview":
content_pattern = r"files/([^/]+)/file-preview"
else:
content_pattern = r"files/([^/]+)/image-preview"
content_match = re.search(content_pattern, match[0])
if content_match:
image_upload_file_id = content_match.group(1)
image_upload_file_ids.append(image_upload_file_id)
return image_upload_file_ids

View File

@ -489,7 +489,10 @@ class IterationNode(BaseNode[IterationNodeData]):
)
yield metadata_event
current_iteration_output = variable_pool.get(self.node_data.output_selector).value
current_output_segment = variable_pool.get(self.node_data.output_selector)
if current_output_segment is None:
raise IterationNodeError("iteration output selector not found")
current_iteration_output = current_output_segment.value
outputs[current_index] = current_iteration_output
# remove all nodes outputs from variable pool
for node_id in iteration_graph.node_ids:

View File

@ -59,4 +59,4 @@ class ListOperatorNodeData(BaseNodeData):
filter_by: FilterBy
order_by: OrderBy
limit: Limit
extract_by: ExtractConfig
extract_by: ExtractConfig = Field(default_factory=ExtractConfig)

View File

@ -46,7 +46,6 @@ def init_app(app: Flask) -> Celery:
broker_connection_retry_on_startup=True,
worker_log_format=dify_config.LOG_FORMAT,
worker_task_log_format=dify_config.LOG_FORMAT,
worker_logfile=dify_config.LOG_FILE,
worker_hijack_root_logger=False,
timezone=pytz.timezone(dify_config.LOG_TZ),
)
@ -56,6 +55,11 @@ def init_app(app: Flask) -> Celery:
broker_use_ssl=ssl_options, # Add the SSL options to the broker configuration
)
if dify_config.LOG_FILE:
celery_app.conf.update(
worker_logfile=dify_config.LOG_FILE,
)
celery_app.set_default()
app.extensions["celery"] = celery_app

View File

@ -9,19 +9,21 @@ from configs import dify_config
def init_app(app: Flask):
log_handlers = None
log_handlers = []
log_file = dify_config.LOG_FILE
if log_file:
log_dir = os.path.dirname(log_file)
os.makedirs(log_dir, exist_ok=True)
log_handlers = [
log_handlers.append(
RotatingFileHandler(
filename=log_file,
maxBytes=dify_config.LOG_FILE_MAX_SIZE * 1024 * 1024,
backupCount=dify_config.LOG_FILE_BACKUP_COUNT,
),
logging.StreamHandler(sys.stdout),
]
)
)
# Always add StreamHandler to log to console
log_handlers.append(logging.StreamHandler(sys.stdout))
logging.basicConfig(
level=dify_config.LOG_LEVEL,

View File

@ -113,8 +113,10 @@ class ApiToolManageService:
if schema_type not in [member.value for member in ApiProviderSchemaType]:
raise ValueError(f"invalid schema type {schema}")
provider_name = provider_name.strip()
# check if the provider exists
provider: ApiToolProvider = (
provider = (
db.session.query(ApiToolProvider)
.filter(
ApiToolProvider.tenant_id == tenant_id,
@ -199,21 +201,21 @@ class ApiToolManageService:
return {"schema": schema}
@staticmethod
def list_api_tool_provider_tools(user_id: str, tenant_id: str, provider: str) -> list[UserTool]:
def list_api_tool_provider_tools(user_id: str, tenant_id: str, provider_name: str) -> list[UserTool]:
"""
list api tool provider tools
"""
provider: ApiToolProvider = (
provider = (
db.session.query(ApiToolProvider)
.filter(
ApiToolProvider.tenant_id == tenant_id,
ApiToolProvider.name == provider,
ApiToolProvider.name == provider_name,
)
.first()
)
if provider is None:
raise ValueError(f"you have not added provider {provider}")
raise ValueError(f"you have not added provider {provider_name}")
controller = ToolTransformService.api_provider_to_controller(db_provider=provider)
labels = ToolLabelManager.get_tool_labels(controller)
@ -246,8 +248,10 @@ class ApiToolManageService:
if schema_type not in [member.value for member in ApiProviderSchemaType]:
raise ValueError(f"invalid schema type {schema}")
provider_name = provider_name.strip()
# check if the provider exists
provider: ApiToolProvider = (
provider = (
db.session.query(ApiToolProvider)
.filter(
ApiToolProvider.tenant_id == tenant_id,
@ -314,7 +318,7 @@ class ApiToolManageService:
"""
delete tool provider
"""
provider: ApiToolProvider = (
provider = (
db.session.query(ApiToolProvider)
.filter(
ApiToolProvider.tenant_id == tenant_id,
@ -364,7 +368,7 @@ class ApiToolManageService:
if tool_bundle is None:
raise ValueError(f"invalid tool name {tool_name}")
db_provider: ApiToolProvider = (
db_provider = (
db.session.query(ApiToolProvider)
.filter(
ApiToolProvider.tenant_id == tenant_id,

View File

@ -5,6 +5,7 @@ import click
from celery import shared_task
from core.rag.index_processor.index_processor_factory import IndexProcessorFactory
from core.tools.utils.web_reader_tool import get_image_upload_file_ids
from extensions.ext_database import db
from extensions.ext_storage import storage
from models.dataset import (
@ -67,6 +68,16 @@ def clean_dataset_task(
db.session.delete(document)
for segment in segments:
image_upload_file_ids = get_image_upload_file_ids(segment.content)
for upload_file_id in image_upload_file_ids:
image_file = db.session.query(UploadFile).filter(UploadFile.id == upload_file_id).first()
try:
storage.delete(image_file.key)
except Exception:
logging.exception(
"Delete image_files failed when storage deleted, \
image_upload_file_is: {}".format(upload_file_id)
)
db.session.delete(segment)
db.session.query(DatasetProcessRule).filter(DatasetProcessRule.dataset_id == dataset_id).delete()

View File

@ -6,6 +6,7 @@ import click
from celery import shared_task
from core.rag.index_processor.index_processor_factory import IndexProcessorFactory
from core.tools.utils.web_reader_tool import get_image_upload_file_ids
from extensions.ext_database import db
from extensions.ext_storage import storage
from models.dataset import Dataset, DocumentSegment
@ -40,6 +41,16 @@ def clean_document_task(document_id: str, dataset_id: str, doc_form: str, file_i
index_processor.clean(dataset, index_node_ids)
for segment in segments:
image_upload_file_ids = get_image_upload_file_ids(segment.content)
for upload_file_id in image_upload_file_ids:
image_file = db.session.query(UploadFile).filter(UploadFile.id == upload_file_id).first()
try:
storage.delete(image_file.key)
except Exception:
logging.exception(
"Delete image_files failed when storage deleted, \
image_upload_file_is: {}".format(upload_file_id)
)
db.session.delete(segment)
db.session.commit()

View File

@ -2,7 +2,7 @@ version: '3'
services:
# API service
api:
image: langgenius/dify-api:0.11.0
image: langgenius/dify-api:0.11.1
restart: always
environment:
# Startup mode, 'api' starts the API server.
@ -227,7 +227,7 @@ services:
# worker service
# The Celery worker for processing the queue.
worker:
image: langgenius/dify-api:0.11.0
image: langgenius/dify-api:0.11.1
restart: always
environment:
CONSOLE_WEB_URL: ''
@ -397,7 +397,7 @@ services:
# Frontend web application.
web:
image: langgenius/dify-web:0.11.0
image: langgenius/dify-web:0.11.1
restart: always
environment:
# The base URL of console application api server, refers to the Console base URL of WEB service if console domain is

View File

@ -280,7 +280,7 @@ x-shared-env: &shared-api-worker-env
services:
# API service
api:
image: langgenius/dify-api:0.11.0
image: langgenius/dify-api:0.11.1
restart: always
environment:
# Use the shared environment variables.
@ -300,7 +300,7 @@ services:
# worker service
# The Celery worker for processing the queue.
worker:
image: langgenius/dify-api:0.11.0
image: langgenius/dify-api:0.11.1
restart: always
environment:
# Use the shared environment variables.
@ -319,7 +319,7 @@ services:
# Frontend web application.
web:
image: langgenius/dify-web:0.11.0
image: langgenius/dify-web:0.11.1
restart: always
environment:
CONSOLE_API_URL: ${CONSOLE_API_URL:-}

View File

@ -261,6 +261,10 @@ const SettingsModal: FC<ISettingsModalProps> = ({
onChange={onChange('chatColorTheme')}
placeholder='E.g #A020F0'
/>
<div className="mt-1 flex justify-between items-center">
<p className={`ml-2 ${s.settingsTip} text-gray-500`}>{t(`${prefixSettings}.chatColorThemeInverted`)}</p>
<Switch defaultValue={inputInfo.chatColorThemeInverted} onChange={v => setInputInfo({ ...inputInfo, chatColorThemeInverted: v })}></Switch>
</div>
</>}
{systemFeatures.enable_web_sso_switch_component && <div className='w-full mt-8'>
<p className='system-xs-medium text-gray-500'>{t(`${prefixSettings}.sso.label`)}</p>

View File

@ -94,6 +94,7 @@ const TextGeneration: FC<IMainProps> = ({
const [isCallBatchAPI, setIsCallBatchAPI] = useState(false)
const isInBatchTab = currentTab === 'batch'
const [inputs, setInputs] = useState<Record<string, any>>({})
const inputsRef = useRef(inputs)
const [appId, setAppId] = useState<string>('')
const [siteInfo, setSiteInfo] = useState<SiteInfo | null>(null)
const [canReplaceLogo, setCanReplaceLogo] = useState<boolean>(false)
@ -604,6 +605,7 @@ const TextGeneration: FC<IMainProps> = ({
<RunOnce
siteInfo={siteInfo}
inputs={inputs}
inputsRef={inputsRef}
onInputsChange={setInputs}
promptConfig={promptConfig}
onSend={handleSend}

View File

@ -1,5 +1,5 @@
import type { FC, FormEvent } from 'react'
import React from 'react'
import React, { useCallback } from 'react'
import { useTranslation } from 'react-i18next'
import {
PlayIcon,
@ -19,6 +19,7 @@ export type IRunOnceProps = {
siteInfo: SiteInfo
promptConfig: PromptConfig
inputs: Record<string, any>
inputsRef: React.MutableRefObject<Record<string, any>>
onInputsChange: (inputs: Record<string, any>) => void
onSend: () => void
visionConfig: VisionSettings
@ -27,6 +28,7 @@ export type IRunOnceProps = {
const RunOnce: FC<IRunOnceProps> = ({
promptConfig,
inputs,
inputsRef,
onInputsChange,
onSend,
visionConfig,
@ -47,6 +49,11 @@ const RunOnce: FC<IRunOnceProps> = ({
onSend()
}
const handleInputsChange = useCallback((newInputs: Record<string, any>) => {
onInputsChange(newInputs)
inputsRef.current = newInputs
}, [onInputsChange, inputsRef])
return (
<div className="">
<section>
@ -60,7 +67,7 @@ const RunOnce: FC<IRunOnceProps> = ({
<Select
className='w-full'
defaultValue={inputs[item.key]}
onSelect={(i) => { onInputsChange({ ...inputs, [item.key]: i.value }) }}
onSelect={(i) => { handleInputsChange({ ...inputsRef.current, [item.key]: i.value }) }}
items={(item.options || []).map(i => ({ name: i, value: i }))}
allowSearch={false}
bgClassName='bg-gray-50'
@ -72,7 +79,7 @@ const RunOnce: FC<IRunOnceProps> = ({
className="block w-full p-2 text-gray-900 border border-gray-300 rounded-lg bg-gray-50 sm:text-xs focus:ring-blue-500 focus:border-blue-500 "
placeholder={`${item.name}${!item.required ? `(${t('appDebug.variableTable.optional')})` : ''}`}
value={inputs[item.key]}
onChange={(e) => { onInputsChange({ ...inputs, [item.key]: e.target.value }) }}
onChange={(e) => { handleInputsChange({ ...inputsRef.current, [item.key]: e.target.value }) }}
maxLength={item.max_length || DEFAULT_VALUE_MAX_LEN}
/>
)}
@ -81,7 +88,7 @@ const RunOnce: FC<IRunOnceProps> = ({
className='h-[104px] sm:text-xs'
placeholder={`${item.name}${!item.required ? `(${t('appDebug.variableTable.optional')})` : ''}`}
value={inputs[item.key]}
onChange={(e) => { onInputsChange({ ...inputs, [item.key]: e.target.value }) }}
onChange={(e) => { handleInputsChange({ ...inputsRef.current, [item.key]: e.target.value }) }}
/>
)}
{item.type === 'number' && (
@ -90,12 +97,12 @@ const RunOnce: FC<IRunOnceProps> = ({
className="block w-full p-2 text-gray-900 border border-gray-300 rounded-lg bg-gray-50 sm:text-xs focus:ring-blue-500 focus:border-blue-500 "
placeholder={`${item.name}${!item.required ? `(${t('appDebug.variableTable.optional')})` : ''}`}
value={inputs[item.key]}
onChange={(e) => { onInputsChange({ ...inputs, [item.key]: e.target.value }) }}
onChange={(e) => { handleInputsChange({ ...inputsRef.current, [item.key]: e.target.value }) }}
/>
)}
{item.type === 'file' && (
<FileUploaderInAttachmentWrapper
onChange={(files) => { onInputsChange({ ...inputs, [item.key]: getProcessedFiles(files)[0] }) }}
onChange={(files) => { handleInputsChange({ ...inputsRef.current, [item.key]: getProcessedFiles(files)[0] }) }}
fileConfig={{
...item.config,
fileUploadConfig: (visionConfig as any).fileUploadConfig,
@ -104,7 +111,7 @@ const RunOnce: FC<IRunOnceProps> = ({
)}
{item.type === 'file-list' && (
<FileUploaderInAttachmentWrapper
onChange={(files) => { onInputsChange({ ...inputs, [item.key]: getProcessedFiles(files) }) }}
onChange={(files) => { handleInputsChange({ ...inputsRef.current, [item.key]: getProcessedFiles(files) }) }}
fileConfig={{
...item.config,
fileUploadConfig: (visionConfig as any).fileUploadConfig,

View File

@ -1,6 +1,6 @@
{
"name": "dify-web",
"version": "0.11.0",
"version": "0.11.1",
"private": true,
"engines": {
"node": ">=18.17.0"