Merge branch 'main' into fix/chore-fix

This commit is contained in:
Yeuoly 2024-10-17 13:46:43 +08:00
commit fa7215cfea
No known key found for this signature in database
GPG Key ID: A66E7E320FB19F61
97 changed files with 7121 additions and 1570 deletions

View File

@ -27,18 +27,17 @@ jobs:
- name: Checkout code
uses: actions/checkout@v4
- name: Install Poetry
uses: abatilo/actions-poetry@v3
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
cache: 'poetry'
cache-dependency-path: |
api/pyproject.toml
api/poetry.lock
- name: Install Poetry
uses: abatilo/actions-poetry@v3
- name: Check Poetry lockfile
run: |
poetry check -C api --lock

View File

@ -23,18 +23,17 @@ jobs:
- name: Checkout code
uses: actions/checkout@v4
- name: Install Poetry
uses: abatilo/actions-poetry@v3
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
cache: 'poetry'
cache-dependency-path: |
api/pyproject.toml
api/poetry.lock
- name: Install Poetry
uses: abatilo/actions-poetry@v3
- name: Install dependencies
run: poetry install -C api

View File

@ -24,15 +24,16 @@ jobs:
with:
files: api/**
- name: Install Poetry
uses: abatilo/actions-poetry@v3
- name: Set up Python
uses: actions/setup-python@v5
if: steps.changed-files.outputs.any_changed == 'true'
with:
python-version: '3.10'
- name: Install Poetry
if: steps.changed-files.outputs.any_changed == 'true'
uses: abatilo/actions-poetry@v3
- name: Python dependencies
if: steps.changed-files.outputs.any_changed == 'true'
run: poetry install -C api --only lint

View File

@ -85,3 +85,4 @@
cd ../
poetry run -C api bash dev/pytest/pytest_all_tests.sh
```

View File

@ -554,11 +554,16 @@ class DataSetConfig(BaseSettings):
Configuration for dataset management
"""
CLEAN_DAY_SETTING: PositiveInt = Field(
description="Interval in days for dataset cleanup operations",
PLAN_SANDBOX_CLEAN_DAY_SETTING: PositiveInt = Field(
description="Interval in days for dataset cleanup operations - plan: sandbox",
default=30,
)
PLAN_PRO_CLEAN_DAY_SETTING: PositiveInt = Field(
description="Interval in days for dataset cleanup operations - plan: pro and team",
default=7,
)
DATASET_OPERATOR_ENABLED: bool = Field(
description="Enable or disable dataset operator functionality",
default=False,

View File

@ -14,7 +14,7 @@ class OracleConfig(BaseSettings):
default=None,
)
ORACLE_PORT: Optional[PositiveInt] = Field(
ORACLE_PORT: PositiveInt = Field(
description="Port number on which the Oracle database server is listening (default is 1521)",
default=1521,
)

View File

@ -14,7 +14,7 @@ class PGVectorConfig(BaseSettings):
default=None,
)
PGVECTOR_PORT: Optional[PositiveInt] = Field(
PGVECTOR_PORT: PositiveInt = Field(
description="Port number on which the PostgreSQL server is listening (default is 5433)",
default=5433,
)

View File

@ -14,7 +14,7 @@ class PGVectoRSConfig(BaseSettings):
default=None,
)
PGVECTO_RS_PORT: Optional[PositiveInt] = Field(
PGVECTO_RS_PORT: PositiveInt = Field(
description="Port number on which the PostgreSQL server with PGVecto.RS is listening (default is 5431)",
default=5431,
)

View File

@ -11,27 +11,39 @@ class VikingDBConfig(BaseModel):
"""
VIKINGDB_ACCESS_KEY: Optional[str] = Field(
default=None, description="The Access Key provided by Volcengine VikingDB for API authentication."
description="The Access Key provided by Volcengine VikingDB for API authentication."
"Refer to the following documentation for details on obtaining credentials:"
"https://www.volcengine.com/docs/6291/65568",
default=None,
)
VIKINGDB_SECRET_KEY: Optional[str] = Field(
default=None, description="The Secret Key provided by Volcengine VikingDB for API authentication."
description="The Secret Key provided by Volcengine VikingDB for API authentication.",
default=None,
)
VIKINGDB_REGION: Optional[str] = Field(
default="cn-shanghai",
VIKINGDB_REGION: str = Field(
description="The region of the Volcengine VikingDB service.(e.g., 'cn-shanghai', 'cn-beijing').",
default="cn-shanghai",
)
VIKINGDB_HOST: Optional[str] = Field(
default="api-vikingdb.mlp.cn-shanghai.volces.com",
VIKINGDB_HOST: str = Field(
description="The host of the Volcengine VikingDB service.(e.g., 'api-vikingdb.volces.com', \
'api-vikingdb.mlp.cn-shanghai.volces.com')",
default="api-vikingdb.mlp.cn-shanghai.volces.com",
)
VIKINGDB_SCHEME: Optional[str] = Field(
default="http",
VIKINGDB_SCHEME: str = Field(
description="The scheme of the Volcengine VikingDB service.(e.g., 'http', 'https').",
default="http",
)
VIKINGDB_CONNECTION_TIMEOUT: Optional[int] = Field(
default=30, description="The connection timeout of the Volcengine VikingDB service."
VIKINGDB_CONNECTION_TIMEOUT: int = Field(
description="The connection timeout of the Volcengine VikingDB service.",
default=30,
)
VIKINGDB_SOCKET_TIMEOUT: Optional[int] = Field(
default=30, description="The socket timeout of the Volcengine VikingDB service."
VIKINGDB_SOCKET_TIMEOUT: int = Field(
description="The socket timeout of the Volcengine VikingDB service.",
default=30,
)

View File

@ -9,7 +9,7 @@ class PackagingInfo(BaseSettings):
CURRENT_VERSION: str = Field(
description="Dify version",
default="0.9.1",
default="0.9.2",
)
COMMIT_SHA: str = Field(

View File

@ -1,88 +1,24 @@
import logging
from flask_restful import Resource
from flask_login import current_user
from flask_restful import Resource, marshal, reqparse
from werkzeug.exceptions import Forbidden, InternalServerError, NotFound
import services
from controllers.console import api
from controllers.console.app.error import (
CompletionRequestError,
ProviderModelCurrentlyNotSupportError,
ProviderNotInitializeError,
ProviderQuotaExceededError,
)
from controllers.console.datasets.error import DatasetNotInitializedError
from controllers.console.datasets.hit_testing_base import DatasetsHitTestingBase
from controllers.console.setup import setup_required
from controllers.console.wraps import account_initialization_required
from core.errors.error import (
LLMBadRequestError,
ModelCurrentlyNotSupportError,
ProviderTokenNotInitError,
QuotaExceededError,
)
from core.model_runtime.errors.invoke import InvokeError
from fields.hit_testing_fields import hit_testing_record_fields
from libs.login import login_required
from services.dataset_service import DatasetService
from services.hit_testing_service import HitTestingService
class HitTestingApi(Resource):
class HitTestingApi(Resource, DatasetsHitTestingBase):
@setup_required
@login_required
@account_initialization_required
def post(self, dataset_id):
dataset_id_str = str(dataset_id)
dataset = DatasetService.get_dataset(dataset_id_str)
if dataset is None:
raise NotFound("Dataset not found.")
dataset = self.get_and_validate_dataset(dataset_id_str)
args = self.parse_args()
self.hit_testing_args_check(args)
try:
DatasetService.check_dataset_permission(dataset, current_user)
except services.errors.account.NoPermissionError as e:
raise Forbidden(str(e))
parser = reqparse.RequestParser()
parser.add_argument("query", type=str, location="json")
parser.add_argument("retrieval_model", type=dict, required=False, location="json")
parser.add_argument("external_retrieval_model", type=dict, required=False, location="json")
args = parser.parse_args()
HitTestingService.hit_testing_args_check(args)
try:
response = HitTestingService.retrieve(
dataset=dataset,
query=args["query"],
account=current_user,
retrieval_model=args["retrieval_model"],
external_retrieval_model=args["external_retrieval_model"],
limit=10,
)
return {"query": response["query"], "records": marshal(response["records"], hit_testing_record_fields)}
except services.errors.index.IndexNotInitializedError:
raise DatasetNotInitializedError()
except ProviderTokenNotInitError as ex:
raise ProviderNotInitializeError(ex.description)
except QuotaExceededError:
raise ProviderQuotaExceededError()
except ModelCurrentlyNotSupportError:
raise ProviderModelCurrentlyNotSupportError()
except LLMBadRequestError:
raise ProviderNotInitializeError(
"No Embedding Model or Reranking Model available. Please configure a valid provider "
"in the Settings -> Model Provider."
)
except InvokeError as e:
raise CompletionRequestError(e.description)
except ValueError as e:
raise ValueError(str(e))
except Exception as e:
logging.exception("Hit testing failed.")
raise InternalServerError(str(e))
return self.perform_hit_testing(dataset, args)
api.add_resource(HitTestingApi, "/datasets/<uuid:dataset_id>/hit-testing")

View File

@ -0,0 +1,85 @@
import logging
from flask_login import current_user
from flask_restful import marshal, reqparse
from werkzeug.exceptions import Forbidden, InternalServerError, NotFound
import services.dataset_service
from controllers.console.app.error import (
CompletionRequestError,
ProviderModelCurrentlyNotSupportError,
ProviderNotInitializeError,
ProviderQuotaExceededError,
)
from controllers.console.datasets.error import DatasetNotInitializedError
from core.errors.error import (
LLMBadRequestError,
ModelCurrentlyNotSupportError,
ProviderTokenNotInitError,
QuotaExceededError,
)
from core.model_runtime.errors.invoke import InvokeError
from fields.hit_testing_fields import hit_testing_record_fields
from services.dataset_service import DatasetService
from services.hit_testing_service import HitTestingService
class DatasetsHitTestingBase:
@staticmethod
def get_and_validate_dataset(dataset_id: str):
dataset = DatasetService.get_dataset(dataset_id)
if dataset is None:
raise NotFound("Dataset not found.")
try:
DatasetService.check_dataset_permission(dataset, current_user)
except services.errors.account.NoPermissionError as e:
raise Forbidden(str(e))
return dataset
@staticmethod
def hit_testing_args_check(args):
HitTestingService.hit_testing_args_check(args)
@staticmethod
def parse_args():
parser = reqparse.RequestParser()
parser.add_argument("query", type=str, location="json")
parser.add_argument("retrieval_model", type=dict, required=False, location="json")
parser.add_argument("external_retrieval_model", type=dict, required=False, location="json")
return parser.parse_args()
@staticmethod
def perform_hit_testing(dataset, args):
try:
response = HitTestingService.retrieve(
dataset=dataset,
query=args["query"],
account=current_user,
retrieval_model=args["retrieval_model"],
external_retrieval_model=args["external_retrieval_model"],
limit=10,
)
return {"query": response["query"], "records": marshal(response["records"], hit_testing_record_fields)}
except services.errors.index.IndexNotInitializedError:
raise DatasetNotInitializedError()
except ProviderTokenNotInitError as ex:
raise ProviderNotInitializeError(ex.description)
except QuotaExceededError:
raise ProviderQuotaExceededError()
except ModelCurrentlyNotSupportError:
raise ProviderModelCurrentlyNotSupportError()
except LLMBadRequestError:
raise ProviderNotInitializeError(
"No Embedding Model or Reranking Model available. Please configure a valid provider "
"in the Settings -> Model Provider."
)
except InvokeError as e:
raise CompletionRequestError(e.description)
except ValueError as e:
raise ValueError(str(e))
except Exception as e:
logging.exception("Hit testing failed.")
raise InternalServerError(str(e))

View File

@ -5,7 +5,6 @@ from libs.external_api import ExternalApi
bp = Blueprint("service_api", __name__, url_prefix="/v1")
api = ExternalApi(bp)
from . import index
from .app import app, audio, completion, conversation, file, message, workflow
from .dataset import dataset, document, segment
from .dataset import dataset, document, hit_testing, segment

View File

@ -4,7 +4,6 @@ from flask_restful import Resource, reqparse
from werkzeug.exceptions import InternalServerError, NotFound
import services
from constants import UUID_NIL
from controllers.service_api import api
from controllers.service_api.app.error import (
AppUnavailableError,
@ -108,7 +107,6 @@ class ChatApi(Resource):
parser.add_argument("conversation_id", type=uuid_value, location="json")
parser.add_argument("retriever_from", type=str, required=False, default="dev", location="json")
parser.add_argument("auto_generate_name", type=bool, required=False, default=True, location="json")
parser.add_argument("parent_message_id", type=uuid_value, required=False, default=UUID_NIL, location="json")
args = parser.parse_args()

View File

@ -0,0 +1,17 @@
from controllers.console.datasets.hit_testing_base import DatasetsHitTestingBase
from controllers.service_api import api
from controllers.service_api.wraps import DatasetApiResource
class HitTestingApi(DatasetApiResource, DatasetsHitTestingBase):
def post(self, tenant_id, dataset_id):
dataset_id_str = str(dataset_id)
dataset = self.get_and_validate_dataset(dataset_id_str)
args = self.parse_args()
self.hit_testing_args_check(args)
return self.perform_hit_testing(dataset, args)
api.add_resource(HitTestingApi, "/datasets/<uuid:dataset_id>/hit-testing")

View File

@ -62,6 +62,8 @@ class CotAgentOutputParser:
thought_str = "thought:"
thought_idx = 0
last_character = ""
for response in llm_response:
if response.delta.usage:
usage_dict["usage"] = response.delta.usage
@ -74,35 +76,38 @@ class CotAgentOutputParser:
while index < len(response):
steps = 1
delta = response[index : index + steps]
last_character = response[index - 1] if index > 0 else ""
yield_delta = False
if delta == "`":
last_character = delta
code_block_cache += delta
code_block_delimiter_count += 1
else:
if not in_code_block:
if code_block_delimiter_count > 0:
last_character = delta
yield code_block_cache
code_block_cache = ""
else:
last_character = delta
code_block_cache += delta
code_block_delimiter_count = 0
if not in_code_block and not in_json:
if delta.lower() == action_str[action_idx] and action_idx == 0:
if last_character not in {"\n", " ", ""}:
yield_delta = True
else:
last_character = delta
action_cache += delta
action_idx += 1
if action_idx == len(action_str):
action_cache = ""
action_idx = 0
index += steps
yield delta
continue
action_cache += delta
action_idx += 1
if action_idx == len(action_str):
action_cache = ""
action_idx = 0
index += steps
continue
elif delta.lower() == action_str[action_idx] and action_idx > 0:
last_character = delta
action_cache += delta
action_idx += 1
if action_idx == len(action_str):
@ -112,24 +117,25 @@ class CotAgentOutputParser:
continue
else:
if action_cache:
last_character = delta
yield action_cache
action_cache = ""
action_idx = 0
if delta.lower() == thought_str[thought_idx] and thought_idx == 0:
if last_character not in {"\n", " ", ""}:
yield_delta = True
else:
last_character = delta
thought_cache += delta
thought_idx += 1
if thought_idx == len(thought_str):
thought_cache = ""
thought_idx = 0
index += steps
yield delta
continue
thought_cache += delta
thought_idx += 1
if thought_idx == len(thought_str):
thought_cache = ""
thought_idx = 0
index += steps
continue
elif delta.lower() == thought_str[thought_idx] and thought_idx > 0:
last_character = delta
thought_cache += delta
thought_idx += 1
if thought_idx == len(thought_str):
@ -139,12 +145,20 @@ class CotAgentOutputParser:
continue
else:
if thought_cache:
last_character = delta
yield thought_cache
thought_cache = ""
thought_idx = 0
if yield_delta:
index += steps
last_character = delta
yield delta
continue
if code_block_delimiter_count == 3:
if in_code_block:
last_character = delta
yield from extra_json_from_code_block(code_block_cache)
code_block_cache = ""
@ -156,8 +170,10 @@ class CotAgentOutputParser:
if delta == "{":
json_quote_count += 1
in_json = True
last_character = delta
json_cache += delta
elif delta == "}":
last_character = delta
json_cache += delta
if json_quote_count > 0:
json_quote_count -= 1
@ -168,16 +184,19 @@ class CotAgentOutputParser:
continue
else:
if in_json:
last_character = delta
json_cache += delta
if got_json:
got_json = False
last_character = delta
yield parse_action(json_cache)
json_cache = ""
json_quote_count = 0
in_json = False
if not in_code_block and not in_json:
last_character = delta
yield delta.replace("`", "")
index += steps

View File

@ -10,6 +10,7 @@ from flask import Flask, current_app
from pydantic import ValidationError
import contexts
from constants import UUID_NIL
from core.app.app_config.features.file_upload.manager import FileUploadConfigManager
from core.app.apps.advanced_chat.app_config_manager import AdvancedChatAppConfigManager
from core.app.apps.advanced_chat.app_runner import AdvancedChatAppRunner
@ -133,7 +134,7 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator):
inputs=conversation.inputs if conversation else self._get_cleaned_inputs(inputs, app_config),
query=query,
files=file_objs,
parent_message_id=args.get("parent_message_id"),
parent_message_id=args.get("parent_message_id") if invoke_from != InvokeFrom.SERVICE_API else UUID_NIL,
user_id=user.id,
stream=stream,
invoke_from=invoke_from,

View File

@ -8,6 +8,7 @@ from typing import Any, Literal, Union, overload
from flask import Flask, current_app
from pydantic import ValidationError
from constants import UUID_NIL
from core.app.app_config.easy_ui_based_app.model_config.converter import ModelConfigConverter
from core.app.app_config.features.file_upload.manager import FileUploadConfigManager
from core.app.apps.agent_chat.app_config_manager import AgentChatAppConfigManager
@ -137,7 +138,7 @@ class AgentChatAppGenerator(MessageBasedAppGenerator):
inputs=conversation.inputs if conversation else self._get_cleaned_inputs(inputs, app_config),
query=query,
files=file_objs,
parent_message_id=args.get("parent_message_id"),
parent_message_id=args.get("parent_message_id") if invoke_from != InvokeFrom.SERVICE_API else UUID_NIL,
user_id=user.id,
stream=stream,
invoke_from=invoke_from,

View File

@ -8,6 +8,7 @@ from typing import Any, Literal, Union, overload
from flask import Flask, current_app
from pydantic import ValidationError
from constants import UUID_NIL
from core.app.app_config.easy_ui_based_app.model_config.converter import ModelConfigConverter
from core.app.app_config.features.file_upload.manager import FileUploadConfigManager
from core.app.apps.base_app_queue_manager import AppQueueManager, GenerateTaskStoppedError, PublishFrom
@ -138,7 +139,7 @@ class ChatAppGenerator(MessageBasedAppGenerator):
inputs=conversation.inputs if conversation else self._get_cleaned_inputs(inputs, app_config),
query=query,
files=file_objs,
parent_message_id=args.get("parent_message_id"),
parent_message_id=args.get("parent_message_id") if invoke_from != InvokeFrom.SERVICE_API else UUID_NIL,
user_id=user.id,
stream=stream,
invoke_from=invoke_from,

View File

@ -2,8 +2,9 @@ from collections.abc import Mapping
from enum import Enum
from typing import Any, Optional
from pydantic import BaseModel, ConfigDict
from pydantic import BaseModel, ConfigDict, Field, ValidationInfo, field_validator
from constants import UUID_NIL
from core.app.app_config.entities import AppConfig, EasyUIBasedAppConfig, WorkflowUIBasedAppConfig
from core.entities.provider_configuration import ProviderModelBundle
from core.file.file_obj import FileVar
@ -116,13 +117,36 @@ class EasyUIBasedAppGenerateEntity(AppGenerateEntity):
model_config = ConfigDict(protected_namespaces=())
class ChatAppGenerateEntity(EasyUIBasedAppGenerateEntity):
class ConversationAppGenerateEntity(AppGenerateEntity):
"""
Base entity for conversation-based app generation.
"""
conversation_id: Optional[str] = None
parent_message_id: Optional[str] = Field(
default=None,
description=(
"Starting from v0.9.0, parent_message_id is used to support message regeneration for internal chat API."
"For service API, we need to ensure its forward compatibility, "
"so passing in the parent_message_id as request arg is not supported for now. "
"It needs to be set to UUID_NIL so that the subsequent processing will treat it as legacy messages."
),
)
@field_validator("parent_message_id")
@classmethod
def validate_parent_message_id(cls, v, info: ValidationInfo):
if info.data.get("invoke_from") == InvokeFrom.SERVICE_API and v != UUID_NIL:
raise ValueError("parent_message_id should be UUID_NIL for service API")
return v
class ChatAppGenerateEntity(ConversationAppGenerateEntity, EasyUIBasedAppGenerateEntity):
"""
Chat Application Generate Entity.
"""
conversation_id: Optional[str] = None
parent_message_id: Optional[str] = None
pass
class CompletionAppGenerateEntity(EasyUIBasedAppGenerateEntity):
@ -133,16 +157,15 @@ class CompletionAppGenerateEntity(EasyUIBasedAppGenerateEntity):
pass
class AgentChatAppGenerateEntity(EasyUIBasedAppGenerateEntity):
class AgentChatAppGenerateEntity(ConversationAppGenerateEntity, EasyUIBasedAppGenerateEntity):
"""
Agent Chat Application Generate Entity.
"""
conversation_id: Optional[str] = None
parent_message_id: Optional[str] = None
pass
class AdvancedChatAppGenerateEntity(AppGenerateEntity):
class AdvancedChatAppGenerateEntity(ConversationAppGenerateEntity):
"""
Advanced Chat Application Generate Entity.
"""
@ -150,8 +173,6 @@ class AdvancedChatAppGenerateEntity(AppGenerateEntity):
# app config
app_config: WorkflowUIBasedAppConfig
conversation_id: Optional[str] = None
parent_message_id: Optional[str] = None
workflow_run_id: Optional[str] = None
query: str

View File

@ -1,168 +0,0 @@
from collections.abc import Mapping
from typing import Optional
import openai
from httpx import Timeout
from openai import OpenAI
from openai.types import ModerationCreateResponse
from core.model_runtime.entities.model_entities import ModelPropertyKey
from core.model_runtime.errors.invoke import (
InvokeAuthorizationError,
InvokeBadRequestError,
InvokeConnectionError,
InvokeError,
InvokeRateLimitError,
InvokeServerUnavailableError,
)
from core.model_runtime.errors.validate import CredentialsValidateFailedError
from core.model_runtime.model_providers.__base.moderation_model import ModerationModel
class OpenAIModerationModel(ModerationModel):
"""
Model class for OpenAI text moderation model.
"""
def _invoke(self, model: str, credentials: dict, text: str, user: Optional[str] = None) -> bool:
"""
Invoke moderation model
:param model: model name
:param credentials: model credentials
:param text: text to moderate
:param user: unique user id
:return: false if text is safe, true otherwise
"""
# transform credentials to kwargs for model instance
credentials_kwargs = self._to_credential_kwargs(credentials)
# init model client
client = OpenAI(**credentials_kwargs)
# chars per chunk
length = self._get_max_characters_per_chunk(model, credentials)
text_chunks = [text[i : i + length] for i in range(0, len(text), length)]
max_text_chunks = self._get_max_chunks(model, credentials)
chunks = [text_chunks[i : i + max_text_chunks] for i in range(0, len(text_chunks), max_text_chunks)]
for text_chunk in chunks:
moderation_result = self._moderation_invoke(model=model, client=client, texts=text_chunk)
for result in moderation_result.results:
if result.flagged is True:
return True
return False
def validate_credentials(self, model: str, credentials: dict) -> None:
"""
Validate model credentials
:param model: model name
:param credentials: model credentials
:return:
"""
try:
# transform credentials to kwargs for model instance
credentials_kwargs = self._to_credential_kwargs(credentials)
client = OpenAI(**credentials_kwargs)
# call moderation model
self._moderation_invoke(
model=model,
client=client,
texts=["ping"],
)
except Exception as ex:
raise CredentialsValidateFailedError(str(ex))
def _moderation_invoke(self, model: str, client: OpenAI, texts: list[str]) -> ModerationCreateResponse:
"""
Invoke moderation model
:param model: model name
:param client: model client
:param texts: texts to moderate
:return: false if text is safe, true otherwise
"""
# call moderation model
moderation_result = client.moderations.create(model=model, input=texts)
return moderation_result
def _get_max_characters_per_chunk(self, model: str, credentials: dict) -> int:
"""
Get max characters per chunk
:param model: model name
:param credentials: model credentials
:return: max characters per chunk
"""
model_schema = self.get_model_schema(model, credentials)
if model_schema and ModelPropertyKey.MAX_CHARACTERS_PER_CHUNK in model_schema.model_properties:
return model_schema.model_properties[ModelPropertyKey.MAX_CHARACTERS_PER_CHUNK]
return 2000
def _get_max_chunks(self, model: str, credentials: dict) -> int:
"""
Get max chunks for given embedding model
:param model: model name
:param credentials: model credentials
:return: max chunks
"""
model_schema = self.get_model_schema(model, credentials)
if model_schema and ModelPropertyKey.MAX_CHUNKS in model_schema.model_properties:
return model_schema.model_properties[ModelPropertyKey.MAX_CHUNKS]
return 1
def _to_credential_kwargs(self, credentials: Mapping) -> dict:
"""
Transform credentials to kwargs for model instance
:param credentials:
:return:
"""
credentials_kwargs = {
"api_key": credentials["openai_api_key"],
"timeout": Timeout(315.0, read=300.0, write=10.0, connect=5.0),
"max_retries": 1,
}
if credentials.get("openai_api_base"):
openai_api_base = credentials["openai_api_base"].rstrip("/")
credentials_kwargs["base_url"] = openai_api_base + "/v1"
if "openai_organization" in credentials:
credentials_kwargs["organization"] = credentials["openai_organization"]
return credentials_kwargs
@property
def _invoke_error_mapping(self) -> dict[type[InvokeError], list[type[Exception]]]:
"""
Map model invoke error to unified error
The key is the error type thrown to the caller
The value is the error type thrown by the model,
which needs to be converted into a unified error type for the caller.
:return: Invoke error mapping
"""
return {
InvokeConnectionError: [openai.APIConnectionError, openai.APITimeoutError],
InvokeServerUnavailableError: [openai.InternalServerError],
InvokeRateLimitError: [openai.RateLimitError],
InvokeAuthorizationError: [openai.AuthenticationError, openai.PermissionDeniedError],
InvokeBadRequestError: [
openai.BadRequestError,
openai.NotFoundError,
openai.UnprocessableEntityError,
openai.APIError,
],
}

View File

@ -1,8 +1,8 @@
from typing import Any
from configs import dify_config
from core.rag.datasource.keyword.jieba.jieba import Jieba
from core.rag.datasource.keyword.keyword_base import BaseKeyword
from core.rag.datasource.keyword.keyword_type import KeyWordType
from core.rag.models.document import Document
from models.dataset import Dataset
@ -13,16 +13,19 @@ class Keyword:
self._keyword_processor = self._init_keyword()
def _init_keyword(self) -> BaseKeyword:
config = dify_config
keyword_type = config.KEYWORD_STORE
keyword_type = dify_config.KEYWORD_STORE
keyword_factory = self.get_keyword_factory(keyword_type)
return keyword_factory(self._dataset)
if not keyword_type:
raise ValueError("Keyword store must be specified.")
@staticmethod
def get_keyword_factory(keyword_type: str) -> type[BaseKeyword]:
match keyword_type:
case KeyWordType.JIEBA:
from core.rag.datasource.keyword.jieba.jieba import Jieba
if keyword_type == "jieba":
return Jieba(dataset=self._dataset)
else:
raise ValueError(f"Keyword store {keyword_type} is not supported.")
return Jieba
case _:
raise ValueError(f"Keyword store {keyword_type} is not supported.")
def create(self, texts: list[Document], **kwargs):
self._keyword_processor.create(texts, **kwargs)

View File

@ -0,0 +1,5 @@
from enum import Enum
class KeyWordType(str, Enum):
JIEBA = "jieba"

View File

@ -112,7 +112,7 @@ class ElasticSearchVector(BaseVector):
self._client.indices.delete(index=self._collection_name)
def search_by_vector(self, query_vector: list[float], **kwargs: Any) -> list[Document]:
top_k = kwargs.get("top_k", 10)
top_k = kwargs.get("top_k", 4)
num_candidates = math.ceil(top_k * 1.5)
knn = {"field": Field.VECTOR.value, "query_vector": query_vector, "k": top_k, "num_candidates": num_candidates}

View File

@ -121,7 +121,7 @@ class MyScaleVector(BaseVector):
return self._search(f"TextSearch('enable_nlq=false')(text, '{query}')", SortOrder.DESC, **kwargs)
def _search(self, dist: str, order: SortOrder, **kwargs: Any) -> list[Document]:
top_k = kwargs.get("top_k", 5)
top_k = kwargs.get("top_k", 4)
score_threshold = float(kwargs.get("score_threshold") or 0.0)
where_str = (
f"WHERE dist < {1 - score_threshold}"

View File

@ -168,14 +168,6 @@ class OracleVector(BaseVector):
docs.append(Document(page_content=record[1], metadata=record[0]))
return docs
# def get_ids_by_metadata_field(self, key: str, value: str):
# with self._get_cursor() as cur:
# cur.execute(f"SELECT id FROM {self.table_name} d WHERE d.meta.{key}='{value}'" )
# idss = []
# for record in cur:
# idss.append(record[0])
# return idss
def delete_by_ids(self, ids: list[str]) -> None:
with self._get_cursor() as cur:
cur.execute(f"DELETE FROM {self.table_name} WHERE id IN %s" % (tuple(ids),))
@ -192,7 +184,7 @@ class OracleVector(BaseVector):
:param top_k: The number of nearest neighbors to return, default is 5.
:return: List of Documents that are nearest to the query vector.
"""
top_k = kwargs.get("top_k", 5)
top_k = kwargs.get("top_k", 4)
with self._get_cursor() as cur:
cur.execute(
f"SELECT meta, text, vector_distance(embedding,:1) AS distance FROM {self.table_name}"

View File

@ -186,7 +186,7 @@ class PGVectoRS(BaseVector):
query_vector,
).label("distance"),
)
.limit(kwargs.get("top_k", 2))
.limit(kwargs.get("top_k", 4))
.order_by("distance")
)
res = session.execute(stmt)
@ -205,18 +205,6 @@ class PGVectoRS(BaseVector):
return docs
def search_by_full_text(self, query: str, **kwargs: Any) -> list[Document]:
# with Session(self._client) as session:
# select_statement = sql_text(
# f"SELECT text, meta FROM {self._collection_name} WHERE to_tsvector(text) @@ '{query}'::tsquery"
# )
# results = session.execute(select_statement).fetchall()
# if results:
# docs = []
# for result in results:
# doc = Document(page_content=result[0],
# metadata=result[1])
# docs.append(doc)
# return docs
return []

View File

@ -143,7 +143,7 @@ class PGVector(BaseVector):
:param top_k: The number of nearest neighbors to return, default is 5.
:return: List of Documents that are nearest to the query vector.
"""
top_k = kwargs.get("top_k", 5)
top_k = kwargs.get("top_k", 4)
with self._get_cursor() as cur:
cur.execute(

View File

@ -224,7 +224,7 @@ class RelytVector(BaseVector):
def search_by_vector(self, query_vector: list[float], **kwargs: Any) -> list[Document]:
results = self.similarity_search_with_score_by_vector(
k=int(kwargs.get("top_k")), embedding=query_vector, filter=kwargs.get("filter")
k=int(kwargs.get("top_k", 4)), embedding=query_vector, filter=kwargs.get("filter")
)
# Organize results.

View File

@ -184,7 +184,7 @@ class TiDBVector(BaseVector):
self._delete_by_ids(ids)
def search_by_vector(self, query_vector: list[float], **kwargs: Any) -> list[Document]:
top_k = kwargs.get("top_k", 5)
top_k = kwargs.get("top_k", 4)
score_threshold = float(kwargs.get("score_threshold") or 0.0)
filter = kwargs.get("filter")
distance = 1 - score_threshold

View File

@ -173,7 +173,7 @@ class VikingDBVector(BaseVector):
def search_by_vector(self, query_vector: list[float], **kwargs: Any) -> list[Document]:
results = self._client.get_index(self._collection_name, self._index_name).search_by_vector(
query_vector, limit=kwargs.get("top_k", 50)
query_vector, limit=kwargs.get("top_k", 4)
)
score_threshold = float(kwargs.get("score_threshold") or 0.0)
return self._get_search_res(results, score_threshold)

View File

@ -235,7 +235,7 @@ class WeaviateVector(BaseVector):
query_obj = query_obj.with_where(kwargs.get("where_filter"))
query_obj = query_obj.with_additional(["vector"])
properties = ["text"]
result = query_obj.with_bm25(query=query, properties=properties).with_limit(kwargs.get("top_k", 2)).do()
result = query_obj.with_bm25(query=query, properties=properties).with_limit(kwargs.get("top_k", 4)).do()
if "errors" in result:
raise ValueError(f"Error during query: {result['errors']}")
docs = []

View File

@ -215,7 +215,7 @@ class DatasetRetrieval:
"data_source_type": document.data_source_type,
"segment_id": segment.id,
"retriever_from": invoke_from.to_source(),
"score": document_score_list.get(segment.index_node_id, None),
"score": document_score_list.get(segment.index_node_id, 0.0),
}
if invoke_from.to_source() == "dev":
@ -229,12 +229,12 @@ class DatasetRetrieval:
source["content"] = segment.content
retrieval_resource_list.append(source)
if hit_callback and retrieval_resource_list:
retrieval_resource_list = sorted(retrieval_resource_list, key=lambda x: x.get("score"), reverse=True)
retrieval_resource_list = sorted(retrieval_resource_list, key=lambda x: x.get("score") or 0.0, reverse=True)
for position, item in enumerate(retrieval_resource_list, start=1):
item["position"] = position
hit_callback.return_retriever_resource_info(retrieval_resource_list)
if document_context_list:
document_context_list = sorted(document_context_list, key=lambda x: x.score, reverse=True)
document_context_list = sorted(document_context_list, key=lambda x: x.score or 0.0, reverse=True)
return str("\n".join([document_context.content for document_context in document_context_list]))
return ""

View File

@ -0,0 +1,48 @@
from datetime import datetime
from typing import Any, Union
import pytz
from core.tools.builtin_tool.tool import BuiltinTool
from core.tools.entities.tool_entities import ToolInvokeMessage
from core.tools.errors import ToolInvokeError
class TimezoneConversionTool(BuiltinTool):
def _invoke(
self,
user_id: str,
tool_parameters: dict[str, Any],
) -> Union[ToolInvokeMessage, list[ToolInvokeMessage]]:
"""
Convert time to equivalent time zone
"""
current_time = tool_parameters.get("current_time")
current_timezone = tool_parameters.get("current_timezone", "Asia/Shanghai")
target_timezone = tool_parameters.get("target_timezone", "Asia/Tokyo")
target_time = self.timezone_convert(current_time, current_timezone, target_timezone)
if not target_time:
return self.create_text_message(
f"Invalid datatime and timezone: {current_time},{current_timezone},{target_timezone}"
)
return self.create_text_message(f"{target_time}")
@staticmethod
def timezone_convert(current_time: str, source_timezone: str, target_timezone: str) -> str:
"""
Convert a time string from source timezone to target timezone.
"""
time_format = "%Y-%m-%d %H:%M:%S"
try:
# get source timezone
input_timezone = pytz.timezone(source_timezone)
# get target timezone
output_timezone = pytz.timezone(target_timezone)
local_time = datetime.strptime(current_time, time_format)
datetime_with_tz = input_timezone.localize(local_time)
# timezone convert
converted_datetime = datetime_with_tz.astimezone(output_timezone)
return converted_datetime.strftime(format=time_format)
except Exception as e:
raise ToolInvokeError(str(e))

View File

@ -0,0 +1,44 @@
identity:
name: timezone_conversion
author: zhuhao
label:
en_US: convert time to equivalent time zone
zh_Hans: 时区转换
description:
human:
en_US: A tool to convert time to equivalent time zone
zh_Hans: 时区转换
llm: A tool to convert time to equivalent time zone
parameters:
- name: current_time
type: string
required: true
form: llm
label:
en_US: current time
zh_Hans: 当前时间
human_description:
en_US: current time, such as 2024-1-1 0:0:0
zh_Hans: 当前时间, 比如2024-1-1 0:0:0
- name: current_timezone
type: string
required: true
form: llm
label:
en_US: Current Timezone
zh_Hans: 当前时区
human_description:
en_US: Current Timezone, such as Asia/Shanghai
zh_Hans: 当前时区, 比如Asia/Shanghai
default: Asia/Shanghai
- name: target_timezone
type: string
required: true
form: llm
label:
en_US: Target Timezone
zh_Hans: 目标时区
human_description:
en_US: Target Timezone, such as Asia/Tokyo
zh_Hans: 目标时区, 比如Asia/Tokyo
default: Asia/Tokyo

View File

@ -234,7 +234,7 @@ class KnowledgeRetrievalNode(BaseNode):
retrieval_resource_list.append(source)
if retrieval_resource_list:
retrieval_resource_list = sorted(
retrieval_resource_list, key=lambda x: x.get("metadata").get("score"), reverse=True
retrieval_resource_list, key=lambda x: x.get("metadata").get("score") or 0.0, reverse=True
)
position = 1
for item in retrieval_resource_list:

View File

@ -1,5 +1,4 @@
from collections.abc import Generator
from contextlib import closing
import oss2 as aliyun_s3
from flask import Flask
@ -34,15 +33,15 @@ class AliyunOssStorage(BaseStorage):
self.client.put_object(self.__wrapper_folder_filename(filename), data)
def load_once(self, filename: str) -> bytes:
with closing(self.client.get_object(self.__wrapper_folder_filename(filename))) as obj:
data = obj.read()
obj = self.client.get_object(self.__wrapper_folder_filename(filename))
data = obj.read()
return data
def load_stream(self, filename: str) -> Generator:
def generate(filename: str = filename) -> Generator:
with closing(self.client.get_object(self.__wrapper_folder_filename(filename))) as obj:
while chunk := obj.read(4096):
yield chunk
obj = self.client.get_object(self.__wrapper_folder_filename(filename))
while chunk := obj.read(4096):
yield chunk
return generate()

View File

@ -1,6 +1,5 @@
import logging
from collections.abc import Generator
from contextlib import closing
import boto3
from botocore.client import Config
@ -55,8 +54,7 @@ class AwsS3Storage(BaseStorage):
def load_once(self, filename: str) -> bytes:
try:
with closing(self.client) as client:
data = client.get_object(Bucket=self.bucket_name, Key=filename)["Body"].read()
data = self.client.get_object(Bucket=self.bucket_name, Key=filename)["Body"].read()
except ClientError as ex:
if ex.response["Error"]["Code"] == "NoSuchKey":
raise FileNotFoundError("File not found")
@ -67,9 +65,8 @@ class AwsS3Storage(BaseStorage):
def load_stream(self, filename: str) -> Generator:
def generate(filename: str = filename) -> Generator:
try:
with closing(self.client) as client:
response = client.get_object(Bucket=self.bucket_name, Key=filename)
yield from response["Body"].iter_chunks()
response = self.client.get_object(Bucket=self.bucket_name, Key=filename)
yield from response["Body"].iter_chunks()
except ClientError as ex:
if ex.response["Error"]["Code"] == "NoSuchKey":
raise FileNotFoundError("File not found")
@ -79,16 +76,14 @@ class AwsS3Storage(BaseStorage):
return generate()
def download(self, filename, target_filepath):
with closing(self.client) as client:
client.download_file(self.bucket_name, filename, target_filepath)
self.client.download_file(self.bucket_name, filename, target_filepath)
def exists(self, filename):
with closing(self.client) as client:
try:
client.head_object(Bucket=self.bucket_name, Key=filename)
return True
except:
return False
try:
self.client.head_object(Bucket=self.bucket_name, Key=filename)
return True
except:
return False
def delete(self, filename):
self.client.delete_object(Bucket=self.bucket_name, Key=filename)

View File

@ -2,7 +2,6 @@ import base64
import io
import json
from collections.abc import Generator
from contextlib import closing
from flask import Flask
from google.cloud import storage as google_cloud_storage
@ -43,7 +42,7 @@ class GoogleCloudStorage(BaseStorage):
def generate(filename: str = filename) -> Generator:
bucket = self.client.get_bucket(self.bucket_name)
blob = bucket.get_blob(filename)
with closing(blob.open(mode="rb")) as blob_stream:
with blob.open(mode="rb") as blob_stream:
while chunk := blob_stream.read(4096):
yield chunk

View File

@ -1,5 +1,4 @@
from collections.abc import Generator
from contextlib import closing
import boto3
from botocore.exceptions import ClientError
@ -28,8 +27,7 @@ class OracleOCIStorage(BaseStorage):
def load_once(self, filename: str) -> bytes:
try:
with closing(self.client) as client:
data = client.get_object(Bucket=self.bucket_name, Key=filename)["Body"].read()
data = self.client.get_object(Bucket=self.bucket_name, Key=filename)["Body"].read()
except ClientError as ex:
if ex.response["Error"]["Code"] == "NoSuchKey":
raise FileNotFoundError("File not found")
@ -40,9 +38,8 @@ class OracleOCIStorage(BaseStorage):
def load_stream(self, filename: str) -> Generator:
def generate(filename: str = filename) -> Generator:
try:
with closing(self.client) as client:
response = client.get_object(Bucket=self.bucket_name, Key=filename)
yield from response["Body"].iter_chunks()
response = self.client.get_object(Bucket=self.bucket_name, Key=filename)
yield from response["Body"].iter_chunks()
except ClientError as ex:
if ex.response["Error"]["Code"] == "NoSuchKey":
raise FileNotFoundError("File not found")
@ -52,16 +49,14 @@ class OracleOCIStorage(BaseStorage):
return generate()
def download(self, filename, target_filepath):
with closing(self.client) as client:
client.download_file(self.bucket_name, filename, target_filepath)
self.client.download_file(self.bucket_name, filename, target_filepath)
def exists(self, filename):
with closing(self.client) as client:
try:
client.head_object(Bucket=self.bucket_name, Key=filename)
return True
except:
return False
try:
self.client.head_object(Bucket=self.bucket_name, Key=filename)
return True
except:
return False
def delete(self, filename):
self.client.delete_object(Bucket=self.bucket_name, Key=filename)

463
api/poetry.lock generated
View File

@ -455,13 +455,13 @@ vertex = ["google-auth (>=2,<3)"]
[[package]]
name = "anyio"
version = "4.6.2"
version = "4.6.2.post1"
description = "High level compatibility layer for multiple asynchronous event loop implementations"
optional = false
python-versions = ">=3.8"
files = [
{file = "anyio-4.6.2-py3-none-any.whl", hash = "sha256:6caec6b1391f6f6d7b2ef2258d2902d36753149f67478f7df4be8e54d03a8f54"},
{file = "anyio-4.6.2.tar.gz", hash = "sha256:f72a7bb3dd0752b3bd8b17a844a019d7fbf6ae218c588f4f9ba1b2f600b12347"},
{file = "anyio-4.6.2.post1-py3-none-any.whl", hash = "sha256:6d170c36fba3bdd840c73d3868c1e777e33676a69c3a72cf0a0d5d6d8009b61d"},
{file = "anyio-4.6.2.post1.tar.gz", hash = "sha256:4c8bc31ccdb51c7f7bd251f51c609e038d63e34219b44aa86e47576389880b4c"},
]
[package.dependencies]
@ -844,13 +844,13 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"]
[[package]]
name = "botocore"
version = "1.35.39"
version = "1.35.40"
description = "Low-level, data-driven core of boto 3."
optional = false
python-versions = ">=3.8"
files = [
{file = "botocore-1.35.39-py3-none-any.whl", hash = "sha256:781c547eb6a79c0e4b0bedd87b81fbfed957816b4841d33e20c8f1989c7c19ce"},
{file = "botocore-1.35.39.tar.gz", hash = "sha256:cb7f851933b5ccc2fba4f0a8b846252410aa0efac5bfbe93b82d10801f5f8e90"},
{file = "botocore-1.35.40-py3-none-any.whl", hash = "sha256:072cc47f29cb1de4fa77ce6632e4f0480af29b70816973ff415fbaa3f50bd1db"},
{file = "botocore-1.35.40.tar.gz", hash = "sha256:547e0a983856c7d7aeaa30fca2a283873c57c07366cd806d2d639856341b3c31"},
]
[package.dependencies]
@ -2153,63 +2153,63 @@ typing_extensions = ">=4.0,<5.0"
[[package]]
name = "duckdb"
version = "1.1.1"
version = "1.1.2"
description = "DuckDB in-process database"
optional = false
python-versions = ">=3.7.0"
files = [
{file = "duckdb-1.1.1-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:e310610b692d30aa7f1f40d7878b26978a5b191f23fa8fa082bd17092c67c2fd"},
{file = "duckdb-1.1.1-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:7acc97c3cc995850a4fa59dfa6ce713d7ea187c9696632161aa09d898f001a2b"},
{file = "duckdb-1.1.1-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:c0a09d78daea0de7ddf3d6d1113e80ceed8c15537e93f8efaad53024ffbde245"},
{file = "duckdb-1.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50c3b1667b0c73cb076b1b1f8fa0fd88fcef5c2bbb2b9acdef79e2eae429c248"},
{file = "duckdb-1.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1499a9b159d4675ea46786b7ebdbabd8287c62b6b116ccfd529112318d47184e"},
{file = "duckdb-1.1.1-cp310-cp310-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:876deda2ce97f4a9005a9ac862f0ebee9e5956d51d589a24955802ca91726d49"},
{file = "duckdb-1.1.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:40be901b38c709076f699b0c2f42a0c5663a496647eba350530e3a77f46a239b"},
{file = "duckdb-1.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:5cb7642c5b21b8165b60029c274fc931c7c29cae3124b9a95ed73d050dd23584"},
{file = "duckdb-1.1.1-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:959716b65cf1c94fc117ac9c9692eea0bd64ae53bc8ab6538d459087b474dbeb"},
{file = "duckdb-1.1.1-cp311-cp311-macosx_12_0_universal2.whl", hash = "sha256:6ff3c52ce0f8d25478155eb01de043ad0a25badbd10e684a2cd74363f1b86cde"},
{file = "duckdb-1.1.1-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:430294cf11ce866d3b726cf4530462316e20b773fed3cf2de3cf63eb89650da6"},
{file = "duckdb-1.1.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dc9d48f772fafeea52568a0568cd11314cd79a10214069f3700dbcb31ebdf511"},
{file = "duckdb-1.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:572095739024d9a5aa2dd8336c289af6a624c203004213e49b7e2469275e940f"},
{file = "duckdb-1.1.1-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:660d9baf637b9a15e1ba74bbe02d3b4a20d82e8cbbd7d0712e0d59e3e9d6efea"},
{file = "duckdb-1.1.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b91973605c8a30a38c4381a27895e7768cb3caa6700b2534ab76cc6b72cac390"},
{file = "duckdb-1.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:f57c9e070cecf42d379145a75f325ec57fb1d410d6ff6592b5a28c2ff2b5792c"},
{file = "duckdb-1.1.1-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:926a99b81c50b9a4a43ca26dcb781f934d35e773d22913548396601ab8d44c12"},
{file = "duckdb-1.1.1-cp312-cp312-macosx_12_0_universal2.whl", hash = "sha256:55a2632d27b5a965f1d9fc74b03383e80a3f8e3dc9596807dfb02c8db08cfcb7"},
{file = "duckdb-1.1.1-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:8d8174fe47caf48d830dc477a45cedc8c970722df09dc1456bddc760ff6ccf68"},
{file = "duckdb-1.1.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9ad84023399002222fa8d5264a8dc2083053027910df728da92cabb07494a489"},
{file = "duckdb-1.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1c8adbc8b37444424c72043288f1521c860555a4f151ee4b744e6125f5d05729"},
{file = "duckdb-1.1.1-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:550524c1b423eeb7ca0fdf1c2e6d29e723d7ec7cfab3050b9feb55a620ae927f"},
{file = "duckdb-1.1.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:4064243e4d3f445975b78773677de0ccbe924f9c7058a7c2cfedb24bba2ba939"},
{file = "duckdb-1.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:4f64516dc62dd0fcbb9785c5bc7532a4fca3e6016bbcc92a2b235aa972c631f6"},
{file = "duckdb-1.1.1-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:4bf75a64c927470b6618496adcfbf0f316ef09d46a44cfe8e38b78e9ff40c8a0"},
{file = "duckdb-1.1.1-cp313-cp313-macosx_12_0_universal2.whl", hash = "sha256:5c8cd6fd7107299b9a243836cd8163e4c08d6228f18cbee4ed9f535f53300096"},
{file = "duckdb-1.1.1-cp313-cp313-macosx_12_0_x86_64.whl", hash = "sha256:fc81c02b4d73533a438a9bbae19499531d85b752233c905facc4df41bbde043c"},
{file = "duckdb-1.1.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:baff4014caf6553b624a296e4db2926602670bd9be6e0fc75f3e970b085631b0"},
{file = "duckdb-1.1.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e21b75a9a60f10b5b5033138c317d929018c92f355fadae5949b310a9179e0a7"},
{file = "duckdb-1.1.1-cp313-cp313-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8220f039c5ea06dc126232464ab9b77197f80ae53d4611b0a41f73c54f6f3931"},
{file = "duckdb-1.1.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:07384414ceae585d4106a7dc154331ae42f45390ed675ec81e3d01f2252a6b01"},
{file = "duckdb-1.1.1-cp313-cp313-win_amd64.whl", hash = "sha256:82776b3999e71a962db0bdc3f0258407ef41453f63eb47c33da29b644f8eb530"},
{file = "duckdb-1.1.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:35d4323655be4053fb90d47e85222c93fd56aea0e8ab0ac44bd8f7249ba85697"},
{file = "duckdb-1.1.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:990d0799e0f543a4369413dc6caf7782cbbab49955c08c28ac56d5dab5ccef11"},
{file = "duckdb-1.1.1-cp37-cp37m-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5ef3ba36b317abe000f502702eaaefdd8c3651a25aa0ad409f9487b286e2fb28"},
{file = "duckdb-1.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:2c6e513a572967cd2bab0f20ce265f8eaf95ea7b554eecf1c233717c38569abc"},
{file = "duckdb-1.1.1-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:567471cb964a0e54a7874c578e81af7b6ab474676ae6469ae1c33c2353f76fb1"},
{file = "duckdb-1.1.1-cp38-cp38-macosx_12_0_universal2.whl", hash = "sha256:a41d8eb4dc538d17660b78f2f4ecd0ba29666a396453bb71d6f4972bf2b3959e"},
{file = "duckdb-1.1.1-cp38-cp38-macosx_12_0_x86_64.whl", hash = "sha256:31be0b9bc1909fb60abda7cd30615fe0224d1e451160d79e8e0313d6205417b0"},
{file = "duckdb-1.1.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:541fb49da108e080d4f2984d2fdabaee36d65967a33642f8bce03373b29952f0"},
{file = "duckdb-1.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e1c54f836dac5eddbe369fa654811e979bb07688638a52d1c006172feb5b75a5"},
{file = "duckdb-1.1.1-cp38-cp38-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:afb97970ee72e554b507c6f2e40b356bdbf8fc1f466e7c4d1797183eb66c0809"},
{file = "duckdb-1.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:a2cdcb68247f02017a35a0b617ceb1d36a02a7c0588d7e2ed91c9a4e9f14c3f6"},
{file = "duckdb-1.1.1-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:36d71969cb98d10dc2391d8755921258d197995cc8c69e6c82fc377c2f71940a"},
{file = "duckdb-1.1.1-cp39-cp39-macosx_12_0_universal2.whl", hash = "sha256:3693f464409379a21aff4e35b5f67eb6c96fc402649d9ffddbda4ee9ee9ba9b6"},
{file = "duckdb-1.1.1-cp39-cp39-macosx_12_0_x86_64.whl", hash = "sha256:06ca7f4ca785cc86e9f9aa23d16b67b82dc454b14c396b2e0ff4c09698c7838e"},
{file = "duckdb-1.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5ed92f3229bf70897a742e7648f648aa8b0c81a7489072aec5515c5635f3303c"},
{file = "duckdb-1.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a80ebf52c03f81265b67720abc06a5c7770d08df82b30cabbe266012bd526229"},
{file = "duckdb-1.1.1-cp39-cp39-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:402a42b992227ebb371a48681ce71b6d1c0661385454b269e6aa379f77a8a83a"},
{file = "duckdb-1.1.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:a182d3cbf2e352aaddf392887331bbac460c473cbd55c65d6b6121ef7b43f174"},
{file = "duckdb-1.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:fafc7d1ec4401787597a5f983d4ef8a9b0638f31e1674a458c57383911166f27"},
{file = "duckdb-1.1.1.tar.gz", hash = "sha256:74fb07c1334a73e0ead1b0a03646d349921dac655762d916c8e45194c8218d30"},
{file = "duckdb-1.1.2-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:91e7f99cf5cab1d26f92cb014429153497d805e79689baa44f4c4585a8cb243f"},
{file = "duckdb-1.1.2-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:0107de622fe208142a1108263a03c43956048dcc99be3702d8e5d2aeaf99554c"},
{file = "duckdb-1.1.2-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:8a09610f780857677725897856f8cdf3cafd8a991f871e6cb8ba88b2dbc8d737"},
{file = "duckdb-1.1.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c0f0ddac0482f0f3fece54d720d13819e82ae26c01a939ffa66a87be53f7f665"},
{file = "duckdb-1.1.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:84103373e818758dfa361d27781d0f096553843c5ffb9193260a0786c5248270"},
{file = "duckdb-1.1.2-cp310-cp310-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:bfdfd23e2bf58014ad0673973bd0ed88cd048dfe8e82420814a71d7d52ef2288"},
{file = "duckdb-1.1.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:25889e6e29b87047b1dd56385ac08156e4713c59326cc6fff89657d01b2c417b"},
{file = "duckdb-1.1.2-cp310-cp310-win_amd64.whl", hash = "sha256:312570fa5277c3079de18388b86c2d87cbe1044838bb152b235c0227581d5d42"},
{file = "duckdb-1.1.2-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:568439ea4fce8cb72ec1f767cd510686a9e7e29a011fc7c56d990059a6e94e48"},
{file = "duckdb-1.1.2-cp311-cp311-macosx_12_0_universal2.whl", hash = "sha256:74974f2d7210623a5d61b1fb0cb589c6e5ffcbf7dbb757a04c5ba24adcfc8cac"},
{file = "duckdb-1.1.2-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:e26422a3358c816d764639070945b73eef55d1b4df990989e3492c85ef725c21"},
{file = "duckdb-1.1.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:87e972bd452eeeab197fe39dcaeecdb7c264b1f75a0ee67e532e235fe45b84df"},
{file = "duckdb-1.1.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9a6b73e70b73c8df85da383f6e557c03cad5c877868b9a7e41715761e8166c1e"},
{file = "duckdb-1.1.2-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:623cb1952466aae5907af84107bcdec25a5ca021a8b6441e961f41edc724f6f2"},
{file = "duckdb-1.1.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:d9fc0b550f96901fa7e76dc70a13f6477ad3e18ef1cb21d414c3a5569de3f27e"},
{file = "duckdb-1.1.2-cp311-cp311-win_amd64.whl", hash = "sha256:181edb1973bd8f493bcb6ecfa035f1a592dff4667758592f300619012ba251c0"},
{file = "duckdb-1.1.2-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:83372b1b411086cac01ab2071122772fa66170b1b41ddbc37527464066083668"},
{file = "duckdb-1.1.2-cp312-cp312-macosx_12_0_universal2.whl", hash = "sha256:db37441deddfee6ac35a0c742d2f9e90e4e50b9e76d586a060d122b8fc56dada"},
{file = "duckdb-1.1.2-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:19142a77e72874aeaa6fda30aeb13612c6de5e8c60fbcc3392cea6ef0694eeaf"},
{file = "duckdb-1.1.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:099d99dd48d6e4682a3dd6233ceab73d977ebe1a87afaac54cf77c844e24514a"},
{file = "duckdb-1.1.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:be86e586ca7af7e807f72479a2b8d0983565360b19dbda4ef8a9d7b3909b8e2c"},
{file = "duckdb-1.1.2-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:578e0953e4d8ba8da0cd69fb2930c45f51ce47d213b77d8a4cd461f9c0960b87"},
{file = "duckdb-1.1.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:72b5eb5762c1a5e68849c7143f3b3747a9f15c040e34e41559f233a1569ad16f"},
{file = "duckdb-1.1.2-cp312-cp312-win_amd64.whl", hash = "sha256:9b4c6b6a08180261d98330d97355503961a25ca31cd9ef296e0681f7895b4a2c"},
{file = "duckdb-1.1.2-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:695dcbc561374b126e86659709feadf883c9969ed718e94713edd4ba15d16619"},
{file = "duckdb-1.1.2-cp313-cp313-macosx_12_0_universal2.whl", hash = "sha256:ada29be1e889f486c6cf1f6dffd15463e748faf361f33996f2e862779edc24a9"},
{file = "duckdb-1.1.2-cp313-cp313-macosx_12_0_x86_64.whl", hash = "sha256:6ca722738fa9eb6218619740631de29acfdd132de6f6a6350fee5e291c2f6117"},
{file = "duckdb-1.1.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c796d33f1e5a0c8c570d22da0c0b1db8578687e427029e1ce2c8ce3f9fffa6a3"},
{file = "duckdb-1.1.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5c0996988a70dd3bc8111d9b9aeab7e38ed1999a52607c5f1b528e362b4dd1c"},
{file = "duckdb-1.1.2-cp313-cp313-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6c37b039f6d6fed14d89450f5ccf54922b3304192d7412e12d6cc8d9e757f7a2"},
{file = "duckdb-1.1.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e8c766b87f675c76d6d17103bf6fb9fb1a9e2fcb3d9b25c28bbc634bde31223e"},
{file = "duckdb-1.1.2-cp313-cp313-win_amd64.whl", hash = "sha256:e3e6300b7ccaf64b609f4f0780a6e1d25ab8cf34cceed46e62c35b6c4c5cb63b"},
{file = "duckdb-1.1.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8a01fae9604a54ecbc26e7503c522311f15afbd2870e6d8f6fbef4545dfae550"},
{file = "duckdb-1.1.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:492b1d86a696428bd3f14dc1c7c3230e2dbca8978f288be64b04a26e0e00fad5"},
{file = "duckdb-1.1.2-cp37-cp37m-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1bba58459ad897a78c4e478a097626fc266459a40338cecc68a49a8d5dc72fb7"},
{file = "duckdb-1.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:d395a3bf510bf24686821eec15802624797dcb33e8f14f8a7cc8e17d909474af"},
{file = "duckdb-1.1.2-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:fd800f75728727fe699ed1eb22b636867cf48c9dd105ee88b977e20c89df4509"},
{file = "duckdb-1.1.2-cp38-cp38-macosx_12_0_universal2.whl", hash = "sha256:d8caaf43909e49537e26df51d80d075ae2b25a610d28ed8bd31d6ccebeaf3c65"},
{file = "duckdb-1.1.2-cp38-cp38-macosx_12_0_x86_64.whl", hash = "sha256:564166811c68d9c7f9911eb707ad32ec9c2507b98336d894fbe658b85bf1c697"},
{file = "duckdb-1.1.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:19386aa09f0d6f97634ba2972096d1c80d880176dfb0e949eadc91c98262a663"},
{file = "duckdb-1.1.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e9e8387bcc9a591ad14011ddfec0d408d1d9b1889c6c9b495a04c7016a24b9b3"},
{file = "duckdb-1.1.2-cp38-cp38-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f8c5ff4970403ed3ff0ac71fe0ce1e6be3199df9d542afc84c424b444ba4ffe8"},
{file = "duckdb-1.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:9283dcca87c3260eb631a99d738fa72b8545ed45b475bc72ad254f7310e14284"},
{file = "duckdb-1.1.2-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:f87edaf20001530e63a4f7bda13b55dc3152d7171226915f2bf34e0813c8759e"},
{file = "duckdb-1.1.2-cp39-cp39-macosx_12_0_universal2.whl", hash = "sha256:efec169b3fe0b821e3207ba3e445f227d42dd62b4440ff79c37fa168a4fc5a71"},
{file = "duckdb-1.1.2-cp39-cp39-macosx_12_0_x86_64.whl", hash = "sha256:89164a2d29d56605a95ee5032aa415dd487028c4fd3e06d971497840e74c56e7"},
{file = "duckdb-1.1.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6858e10c60ff7e70e61d3dd53d2545c8b2609942e45fd6de38cd0dee52932de3"},
{file = "duckdb-1.1.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ca967c5a57b1d0cb0fd5e539ab24110e5a59dcbedd365bb2dc80533d6e44a8d"},
{file = "duckdb-1.1.2-cp39-cp39-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4ce949f1d7999aa6a046eb64067eee41d4c5c2872ba4fa408c9947742d0c7231"},
{file = "duckdb-1.1.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:9ba6d1f918e6ca47a368a0c32806016405cb9beb2c245806b0ca998f569d2bdf"},
{file = "duckdb-1.1.2-cp39-cp39-win_amd64.whl", hash = "sha256:7111fd3e7b334a7be383313ce29918b7c643e4f6ef44d6d63c3ab3fa6716c114"},
{file = "duckdb-1.1.2.tar.gz", hash = "sha256:c8232861dc8ec6daa29067056d5a0e5789919f2ab22ab792787616d7cd52f02a"},
]
[[package]]
@ -3870,51 +3870,58 @@ pyparsing = {version = ">=2.4.2,<3.0.0 || >3.0.0,<3.0.1 || >3.0.1,<3.0.2 || >3.0
[[package]]
name = "httptools"
version = "0.6.1"
version = "0.6.2"
description = "A collection of framework independent HTTP protocol utils."
optional = false
python-versions = ">=3.8.0"
files = [
{file = "httptools-0.6.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d2f6c3c4cb1948d912538217838f6e9960bc4a521d7f9b323b3da579cd14532f"},
{file = "httptools-0.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:00d5d4b68a717765b1fabfd9ca755bd12bf44105eeb806c03d1962acd9b8e563"},
{file = "httptools-0.6.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:639dc4f381a870c9ec860ce5c45921db50205a37cc3334e756269736ff0aac58"},
{file = "httptools-0.6.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e57997ac7fb7ee43140cc03664de5f268813a481dff6245e0075925adc6aa185"},
{file = "httptools-0.6.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0ac5a0ae3d9f4fe004318d64b8a854edd85ab76cffbf7ef5e32920faef62f142"},
{file = "httptools-0.6.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:3f30d3ce413088a98b9db71c60a6ada2001a08945cb42dd65a9a9fe228627658"},
{file = "httptools-0.6.1-cp310-cp310-win_amd64.whl", hash = "sha256:1ed99a373e327f0107cb513b61820102ee4f3675656a37a50083eda05dc9541b"},
{file = "httptools-0.6.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:7a7ea483c1a4485c71cb5f38be9db078f8b0e8b4c4dc0210f531cdd2ddac1ef1"},
{file = "httptools-0.6.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:85ed077c995e942b6f1b07583e4eb0a8d324d418954fc6af913d36db7c05a5a0"},
{file = "httptools-0.6.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b0bb634338334385351a1600a73e558ce619af390c2b38386206ac6a27fecfc"},
{file = "httptools-0.6.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7d9ceb2c957320def533671fc9c715a80c47025139c8d1f3797477decbc6edd2"},
{file = "httptools-0.6.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:4f0f8271c0a4db459f9dc807acd0eadd4839934a4b9b892f6f160e94da309837"},
{file = "httptools-0.6.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6a4f5ccead6d18ec072ac0b84420e95d27c1cdf5c9f1bc8fbd8daf86bd94f43d"},
{file = "httptools-0.6.1-cp311-cp311-win_amd64.whl", hash = "sha256:5cceac09f164bcba55c0500a18fe3c47df29b62353198e4f37bbcc5d591172c3"},
{file = "httptools-0.6.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:75c8022dca7935cba14741a42744eee13ba05db00b27a4b940f0d646bd4d56d0"},
{file = "httptools-0.6.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:48ed8129cd9a0d62cf4d1575fcf90fb37e3ff7d5654d3a5814eb3d55f36478c2"},
{file = "httptools-0.6.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6f58e335a1402fb5a650e271e8c2d03cfa7cea46ae124649346d17bd30d59c90"},
{file = "httptools-0.6.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93ad80d7176aa5788902f207a4e79885f0576134695dfb0fefc15b7a4648d503"},
{file = "httptools-0.6.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9bb68d3a085c2174c2477eb3ffe84ae9fb4fde8792edb7bcd09a1d8467e30a84"},
{file = "httptools-0.6.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:b512aa728bc02354e5ac086ce76c3ce635b62f5fbc32ab7082b5e582d27867bb"},
{file = "httptools-0.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:97662ce7fb196c785344d00d638fc9ad69e18ee4bfb4000b35a52efe5adcc949"},
{file = "httptools-0.6.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:8e216a038d2d52ea13fdd9b9c9c7459fb80d78302b257828285eca1c773b99b3"},
{file = "httptools-0.6.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3e802e0b2378ade99cd666b5bffb8b2a7cc8f3d28988685dc300469ea8dd86cb"},
{file = "httptools-0.6.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4bd3e488b447046e386a30f07af05f9b38d3d368d1f7b4d8f7e10af85393db97"},
{file = "httptools-0.6.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe467eb086d80217b7584e61313ebadc8d187a4d95bb62031b7bab4b205c3ba3"},
{file = "httptools-0.6.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:3c3b214ce057c54675b00108ac42bacf2ab8f85c58e3f324a4e963bbc46424f4"},
{file = "httptools-0.6.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8ae5b97f690badd2ca27cbf668494ee1b6d34cf1c464271ef7bfa9ca6b83ffaf"},
{file = "httptools-0.6.1-cp38-cp38-win_amd64.whl", hash = "sha256:405784577ba6540fa7d6ff49e37daf104e04f4b4ff2d1ac0469eaa6a20fde084"},
{file = "httptools-0.6.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:95fb92dd3649f9cb139e9c56604cc2d7c7bf0fc2e7c8d7fbd58f96e35eddd2a3"},
{file = "httptools-0.6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:dcbab042cc3ef272adc11220517278519adf8f53fd3056d0e68f0a6f891ba94e"},
{file = "httptools-0.6.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cf2372e98406efb42e93bfe10f2948e467edfd792b015f1b4ecd897903d3e8d"},
{file = "httptools-0.6.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:678fcbae74477a17d103b7cae78b74800d795d702083867ce160fc202104d0da"},
{file = "httptools-0.6.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:e0b281cf5a125c35f7f6722b65d8542d2e57331be573e9e88bc8b0115c4a7a81"},
{file = "httptools-0.6.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:95658c342529bba4e1d3d2b1a874db16c7cca435e8827422154c9da76ac4e13a"},
{file = "httptools-0.6.1-cp39-cp39-win_amd64.whl", hash = "sha256:7ebaec1bf683e4bf5e9fbb49b8cc36da482033596a415b3e4ebab5a4c0d7ec5e"},
{file = "httptools-0.6.1.tar.gz", hash = "sha256:c6e26c30455600b95d94b1b836085138e82f177351454ee841c148f93a9bad5a"},
{file = "httptools-0.6.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0238f07780782c018e9801d8f5f5aea3a4680a1af132034b444f677718c6fe88"},
{file = "httptools-0.6.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:10d28e5597d4349390c640232c9366ddc15568114f56724fe30a53de9686b6ab"},
{file = "httptools-0.6.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9ddaf99e362ae4169f6a8b3508f3487264e0a1b1e58c0b07b86407bc9ecee831"},
{file = "httptools-0.6.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:efc9d039b6b8a36b182bc60774bb5d456b8ff9ec44cf97719f2f38bb1dcdd546"},
{file = "httptools-0.6.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:b57cb8a4a8a8ffdaf0395326ef3b9c1aba36e58a421438fc04c002a1f511db63"},
{file = "httptools-0.6.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b73cda1326738eab5d60640ca0b87ac4e4db09a099423c41b59a5681917e8d1d"},
{file = "httptools-0.6.2-cp310-cp310-win_amd64.whl", hash = "sha256:352a496244360deb1c1d108391d76cd6f3dd9f53ccf975a082e74c6761af30c9"},
{file = "httptools-0.6.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2e9d225b178a6cc700c23cf2f5daf85a10f93f1db7c34e9ee4ee0bbc29ad458a"},
{file = "httptools-0.6.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d49b14fcc9b12a52da8667587efa124a18e1a3eb63bbbcabf9882f4008d171d6"},
{file = "httptools-0.6.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d5c33d98b2311ddbe06e92b12b14de334dcfbe64ebcbb2c7a34b5c6036db512"},
{file = "httptools-0.6.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53cd2d776700bf0ed0e6fb203d716b041712ea4906479031cc5ac5421ecaa7d2"},
{file = "httptools-0.6.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7da016a0dab1fcced89dfff8537033c5dc200015e14023368f3f4a69e39b8716"},
{file = "httptools-0.6.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4d6e0ba155a1b3159551ac6b4551eb20028617e2e4bb71f2c61efed0756e6825"},
{file = "httptools-0.6.2-cp311-cp311-win_amd64.whl", hash = "sha256:ad44569b0f508e046ffe85b4a547d5b68d1548fd90767df69449cc28021ee709"},
{file = "httptools-0.6.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:c92d2b7c1a914ab2f66454961eeaf904f4fe7529b93ff537619d22c18b82d070"},
{file = "httptools-0.6.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:78f920a75c1dbcb5a48a495f384d73ceb41e437a966c318eb7e56f1c1ad1df3e"},
{file = "httptools-0.6.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:56bcd9ba0adf16edb4e3e45b8b9346f5b3b2372402e953d54c84b345d0f691e0"},
{file = "httptools-0.6.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e350a887adb38ac65c93c2f395b60cf482baca61fd396ed8d6fd313dbcce6fac"},
{file = "httptools-0.6.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:ddc328c2a2daf2cf4bdc7bbc8a458dc4c840637223d4b8e01bce2168cc79fd23"},
{file = "httptools-0.6.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ddaf38943dbb32333a182c894b6092a68b56c5e36d0c54ba3761d28119b15447"},
{file = "httptools-0.6.2-cp312-cp312-win_amd64.whl", hash = "sha256:052f7f50e4a38f069478143878371ed17937f268349bcd68f6f7a9de9fcfce21"},
{file = "httptools-0.6.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:406f7dc5d9db68cd9ac638d14c74d077085f76b45f704d3ec38d43b842b3cb44"},
{file = "httptools-0.6.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:77e22c33123ce11231ff2773d8905e20b45d77a69459def7481283b72a583955"},
{file = "httptools-0.6.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41965586b02715c3d83dd9153001f654e5b621de0c5255f5ef0635485212d0c0"},
{file = "httptools-0.6.2-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93b1839d54b80a06a51a31b90d024a1770e250d00de57e7ae069bafba932f398"},
{file = "httptools-0.6.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:8fdb4634040d1dbde7e0b373e19668cdb61c0ee8690d3b4064ac748d85365bca"},
{file = "httptools-0.6.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:c30902f9b9da0d74668b6f71d7b57081a4879d9a5ea93d5922dbe15b15b3b24a"},
{file = "httptools-0.6.2-cp313-cp313-win_amd64.whl", hash = "sha256:cf61238811a75335751b4b17f8b221a35f93f2d57489296742adf98412d2a568"},
{file = "httptools-0.6.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:8d80878cb40ebf88a48839ff7206ceb62e4b54327e0c2f9f15ee12edbd8b907e"},
{file = "httptools-0.6.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5141ccc9dbd8cdc59d1e93e318d405477a940dc6ebadcb8d9f8da17d2812d353"},
{file = "httptools-0.6.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1bb67d47f045f56e9a5da4deccf710bdde21212e4b1f4776b7a542449f6a7682"},
{file = "httptools-0.6.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76dcb8f5c866f1537ccbaad01ebb3611890d281ef8d25e050d1cc3d90fba6b3d"},
{file = "httptools-0.6.2-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:1b7bc59362143dc2d02896dde94004ef54ff1989ceedf4b389ad3b530f312364"},
{file = "httptools-0.6.2-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:c7a5715b1f46e9852442f496c0df2f8c393cc8f293f5396d2c8d95cac852fb51"},
{file = "httptools-0.6.2-cp38-cp38-win_amd64.whl", hash = "sha256:3f0246ca7f78fa8e3902ddb985b9f55509d417a862f4634a8fa63a7a496266c8"},
{file = "httptools-0.6.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:1099f73952e18c718ccaaf7a97ae58c94a91839c3d247c6184326f85a2eda7b4"},
{file = "httptools-0.6.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c3e45d004531330030f7d07abe4865bc17963b9989bc1941cebbf7224010fb82"},
{file = "httptools-0.6.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f4f2fea370361a90cb9330610a95303587eda9d1e69930dbbee9978eac1d5946"},
{file = "httptools-0.6.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f0481154c91725f7e7b729a535190388be6c7cbae3bbf0e793343ca386282312"},
{file = "httptools-0.6.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:d25f8fdbc6cc6561353c7a384d76295e6a85a4945115b8bc347855db150e8c77"},
{file = "httptools-0.6.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:054bdee08e4f7c15c186f6e7dbc8f0cf974b8dd1832b5f17f988faf8b12815c9"},
{file = "httptools-0.6.2-cp39-cp39-win_amd64.whl", hash = "sha256:4502620722b453c2c6306fad392c515dcb804dfa9c6d3b90d8926a07a7a01109"},
{file = "httptools-0.6.2.tar.gz", hash = "sha256:ae694efefcb61317c79b2fa1caebc122060992408e389bb00889567e463a47f1"},
]
[package.extras]
test = ["Cython (>=0.29.24,<0.30.0)"]
test = ["Cython (>=0.29.24)"]
[[package]]
name = "httpx"
@ -4122,6 +4129,88 @@ MarkupSafe = ">=2.0"
[package.extras]
i18n = ["Babel (>=2.7)"]
[[package]]
name = "jiter"
version = "0.6.1"
description = "Fast iterable JSON parser."
optional = false
python-versions = ">=3.8"
files = [
{file = "jiter-0.6.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:d08510593cb57296851080018006dfc394070178d238b767b1879dc1013b106c"},
{file = "jiter-0.6.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:adef59d5e2394ebbad13b7ed5e0306cceb1df92e2de688824232a91588e77aa7"},
{file = "jiter-0.6.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b3e02f7a27f2bcc15b7d455c9df05df8ffffcc596a2a541eeda9a3110326e7a3"},
{file = "jiter-0.6.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed69a7971d67b08f152c17c638f0e8c2aa207e9dd3a5fcd3cba294d39b5a8d2d"},
{file = "jiter-0.6.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b2019d966e98f7c6df24b3b8363998575f47d26471bfb14aade37630fae836a1"},
{file = "jiter-0.6.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:36c0b51a285b68311e207a76c385650322734c8717d16c2eb8af75c9d69506e7"},
{file = "jiter-0.6.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:220e0963b4fb507c525c8f58cde3da6b1be0bfddb7ffd6798fb8f2531226cdb1"},
{file = "jiter-0.6.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:aa25c7a9bf7875a141182b9c95aed487add635da01942ef7ca726e42a0c09058"},
{file = "jiter-0.6.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e90552109ca8ccd07f47ca99c8a1509ced93920d271bb81780a973279974c5ab"},
{file = "jiter-0.6.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:67723a011964971864e0b484b0ecfee6a14de1533cff7ffd71189e92103b38a8"},
{file = "jiter-0.6.1-cp310-none-win32.whl", hash = "sha256:33af2b7d2bf310fdfec2da0177eab2fedab8679d1538d5b86a633ebfbbac4edd"},
{file = "jiter-0.6.1-cp310-none-win_amd64.whl", hash = "sha256:7cea41c4c673353799906d940eee8f2d8fd1d9561d734aa921ae0f75cb9732f4"},
{file = "jiter-0.6.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:b03c24e7da7e75b170c7b2b172d9c5e463aa4b5c95696a368d52c295b3f6847f"},
{file = "jiter-0.6.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:47fee1be677b25d0ef79d687e238dc6ac91a8e553e1a68d0839f38c69e0ee491"},
{file = "jiter-0.6.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25f0d2f6e01a8a0fb0eab6d0e469058dab2be46ff3139ed2d1543475b5a1d8e7"},
{file = "jiter-0.6.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0b809e39e342c346df454b29bfcc7bca3d957f5d7b60e33dae42b0e5ec13e027"},
{file = "jiter-0.6.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e9ac7c2f092f231f5620bef23ce2e530bd218fc046098747cc390b21b8738a7a"},
{file = "jiter-0.6.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e51a2d80d5fe0ffb10ed2c82b6004458be4a3f2b9c7d09ed85baa2fbf033f54b"},
{file = "jiter-0.6.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3343d4706a2b7140e8bd49b6c8b0a82abf9194b3f0f5925a78fc69359f8fc33c"},
{file = "jiter-0.6.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:82521000d18c71e41c96960cb36e915a357bc83d63a8bed63154b89d95d05ad1"},
{file = "jiter-0.6.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:3c843e7c1633470708a3987e8ce617ee2979ee18542d6eb25ae92861af3f1d62"},
{file = "jiter-0.6.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a2e861658c3fe849efc39b06ebb98d042e4a4c51a8d7d1c3ddc3b1ea091d0784"},
{file = "jiter-0.6.1-cp311-none-win32.whl", hash = "sha256:7d72fc86474862c9c6d1f87b921b70c362f2b7e8b2e3c798bb7d58e419a6bc0f"},
{file = "jiter-0.6.1-cp311-none-win_amd64.whl", hash = "sha256:3e36a320634f33a07794bb15b8da995dccb94f944d298c8cfe2bd99b1b8a574a"},
{file = "jiter-0.6.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:1fad93654d5a7dcce0809aff66e883c98e2618b86656aeb2129db2cd6f26f867"},
{file = "jiter-0.6.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4e6e340e8cd92edab7f6a3a904dbbc8137e7f4b347c49a27da9814015cc0420c"},
{file = "jiter-0.6.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:691352e5653af84ed71763c3c427cff05e4d658c508172e01e9c956dfe004aba"},
{file = "jiter-0.6.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:defee3949313c1f5b55e18be45089970cdb936eb2a0063f5020c4185db1b63c9"},
{file = "jiter-0.6.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:26d2bdd5da097e624081c6b5d416d3ee73e5b13f1703bcdadbb1881f0caa1933"},
{file = "jiter-0.6.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:18aa9d1626b61c0734b973ed7088f8a3d690d0b7f5384a5270cd04f4d9f26c86"},
{file = "jiter-0.6.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a3567c8228afa5ddcce950631c6b17397ed178003dc9ee7e567c4c4dcae9fa0"},
{file = "jiter-0.6.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e5c0507131c922defe3f04c527d6838932fcdfd69facebafd7d3574fa3395314"},
{file = "jiter-0.6.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:540fcb224d7dc1bcf82f90f2ffb652df96f2851c031adca3c8741cb91877143b"},
{file = "jiter-0.6.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:e7b75436d4fa2032b2530ad989e4cb0ca74c655975e3ff49f91a1a3d7f4e1df2"},
{file = "jiter-0.6.1-cp312-none-win32.whl", hash = "sha256:883d2ced7c21bf06874fdeecab15014c1c6d82216765ca6deef08e335fa719e0"},
{file = "jiter-0.6.1-cp312-none-win_amd64.whl", hash = "sha256:91e63273563401aadc6c52cca64a7921c50b29372441adc104127b910e98a5b6"},
{file = "jiter-0.6.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:852508a54fe3228432e56019da8b69208ea622a3069458252f725d634e955b31"},
{file = "jiter-0.6.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f491cc69ff44e5a1e8bc6bf2b94c1f98d179e1aaf4a554493c171a5b2316b701"},
{file = "jiter-0.6.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc56c8f0b2a28ad4d8047f3ae62d25d0e9ae01b99940ec0283263a04724de1f3"},
{file = "jiter-0.6.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:51b58f7a0d9e084a43b28b23da2b09fc5e8df6aa2b6a27de43f991293cab85fd"},
{file = "jiter-0.6.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5f79ce15099154c90ef900d69c6b4c686b64dfe23b0114e0971f2fecd306ec6c"},
{file = "jiter-0.6.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:03a025b52009f47e53ea619175d17e4ded7c035c6fbd44935cb3ada11e1fd592"},
{file = "jiter-0.6.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c74a8d93718137c021d9295248a87c2f9fdc0dcafead12d2930bc459ad40f885"},
{file = "jiter-0.6.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:40b03b75f903975f68199fc4ec73d546150919cb7e534f3b51e727c4d6ccca5a"},
{file = "jiter-0.6.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:825651a3f04cf92a661d22cad61fc913400e33aa89b3e3ad9a6aa9dc8a1f5a71"},
{file = "jiter-0.6.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:928bf25eb69ddb292ab8177fe69d3fbf76c7feab5fce1c09265a7dccf25d3991"},
{file = "jiter-0.6.1-cp313-none-win32.whl", hash = "sha256:352cd24121e80d3d053fab1cc9806258cad27c53cad99b7a3cac57cf934b12e4"},
{file = "jiter-0.6.1-cp313-none-win_amd64.whl", hash = "sha256:be7503dd6f4bf02c2a9bacb5cc9335bc59132e7eee9d3e931b13d76fd80d7fda"},
{file = "jiter-0.6.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:31d8e00e1fb4c277df8ab6f31a671f509ebc791a80e5c61fdc6bc8696aaa297c"},
{file = "jiter-0.6.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:77c296d65003cd7ee5d7b0965f6acbe6cffaf9d1fa420ea751f60ef24e85fed5"},
{file = "jiter-0.6.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aeeb0c0325ef96c12a48ea7e23e2e86fe4838e6e0a995f464cf4c79fa791ceeb"},
{file = "jiter-0.6.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a31c6fcbe7d6c25d6f1cc6bb1cba576251d32795d09c09961174fe461a1fb5bd"},
{file = "jiter-0.6.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:59e2b37f3b9401fc9e619f4d4badcab2e8643a721838bcf695c2318a0475ae42"},
{file = "jiter-0.6.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bae5ae4853cb9644144e9d0755854ce5108d470d31541d83f70ca7ecdc2d1637"},
{file = "jiter-0.6.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9df588e9c830b72d8db1dd7d0175af6706b0904f682ea9b1ca8b46028e54d6e9"},
{file = "jiter-0.6.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:15f8395e835cf561c85c1adee72d899abf2733d9df72e9798e6d667c9b5c1f30"},
{file = "jiter-0.6.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5a99d4e0b5fc3b05ea732d67eb2092fe894e95a90e6e413f2ea91387e228a307"},
{file = "jiter-0.6.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:a311df1fa6be0ccd64c12abcd85458383d96e542531bafbfc0a16ff6feda588f"},
{file = "jiter-0.6.1-cp38-none-win32.whl", hash = "sha256:81116a6c272a11347b199f0e16b6bd63f4c9d9b52bc108991397dd80d3c78aba"},
{file = "jiter-0.6.1-cp38-none-win_amd64.whl", hash = "sha256:13f9084e3e871a7c0b6e710db54444088b1dd9fbefa54d449b630d5e73bb95d0"},
{file = "jiter-0.6.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:f1c53615fcfec3b11527c08d19cff6bc870da567ce4e57676c059a3102d3a082"},
{file = "jiter-0.6.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f791b6a4da23238c17a81f44f5b55d08a420c5692c1fda84e301a4b036744eb1"},
{file = "jiter-0.6.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c97e90fec2da1d5f68ef121444c2c4fa72eabf3240829ad95cf6bbeca42a301"},
{file = "jiter-0.6.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3cbc1a66b4e41511209e97a2866898733c0110b7245791ac604117b7fb3fedb7"},
{file = "jiter-0.6.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e4e85f9e12cd8418ab10e1fcf0e335ae5bb3da26c4d13a0fd9e6a17a674783b6"},
{file = "jiter-0.6.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:08be33db6dcc374c9cc19d3633af5e47961a7b10d4c61710bd39e48d52a35824"},
{file = "jiter-0.6.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:677be9550004f5e010d673d3b2a2b815a8ea07a71484a57d3f85dde7f14cf132"},
{file = "jiter-0.6.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e8bd065be46c2eecc328e419d6557bbc37844c88bb07b7a8d2d6c91c7c4dedc9"},
{file = "jiter-0.6.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bd95375ce3609ec079a97c5d165afdd25693302c071ca60c7ae1cf826eb32022"},
{file = "jiter-0.6.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:db459ed22d0208940d87f614e1f0ea5a946d29a3cfef71f7e1aab59b6c6b2afb"},
{file = "jiter-0.6.1-cp39-none-win32.whl", hash = "sha256:d71c962f0971347bd552940ab96aa42ceefcd51b88c4ced8a27398182efa8d80"},
{file = "jiter-0.6.1-cp39-none-win_amd64.whl", hash = "sha256:d465db62d2d10b489b7e7a33027c4ae3a64374425d757e963f86df5b5f2e7fc5"},
{file = "jiter-0.6.1.tar.gz", hash = "sha256:e19cd21221fc139fb032e4112986656cb2739e9fe6d84c13956ab30ccc7d4449"},
]
[[package]]
name = "jmespath"
version = "0.10.0"
@ -4446,13 +4535,13 @@ openai = ["openai (>=0.27.8)"]
[[package]]
name = "langsmith"
version = "0.1.134"
version = "0.1.135"
description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform."
optional = false
python-versions = "<4.0,>=3.8.1"
files = [
{file = "langsmith-0.1.134-py3-none-any.whl", hash = "sha256:ada98ad80ef38807725f32441a472da3dd28394010877751f48f458d3289da04"},
{file = "langsmith-0.1.134.tar.gz", hash = "sha256:23abee3b508875a0e63c602afafffc02442a19cfd88f9daae05b3e9054fd6b61"},
{file = "langsmith-0.1.135-py3-none-any.whl", hash = "sha256:b1d1ca3bad483a4239745c57e9b9157b4d099fbf3149be21e3d112c94ede06ac"},
{file = "langsmith-0.1.135.tar.gz", hash = "sha256:7abed7e141386af99a2177f0b3600b124ae3ad1b482879ba0724ce92ef998a11"},
]
[package.dependencies]
@ -5673,23 +5762,24 @@ sympy = "*"
[[package]]
name = "openai"
version = "1.29.0"
version = "1.51.2"
description = "The official Python library for the openai API"
optional = false
python-versions = ">=3.7.1"
files = [
{file = "openai-1.29.0-py3-none-any.whl", hash = "sha256:c61cd12376c84362d406341f9e2f9a9d6b81c082b133b44484dc0f43954496b1"},
{file = "openai-1.29.0.tar.gz", hash = "sha256:d5a769f485610cff8bae14343fa45a8b1d346be3d541fa5b28ccd040dbc8baf8"},
{file = "openai-1.51.2-py3-none-any.whl", hash = "sha256:5c5954711cba931423e471c37ff22ae0fd3892be9b083eee36459865fbbb83fa"},
{file = "openai-1.51.2.tar.gz", hash = "sha256:c6a51fac62a1ca9df85a522e462918f6bb6bc51a8897032217e453a0730123a6"},
]
[package.dependencies]
anyio = ">=3.5.0,<5"
distro = ">=1.7.0,<2"
httpx = ">=0.23.0,<1"
jiter = ">=0.4.0,<1"
pydantic = ">=1.9.0,<3"
sniffio = "*"
tqdm = ">4"
typing-extensions = ">=4.7,<5"
typing-extensions = ">=4.11,<5"
[package.extras]
datalib = ["numpy (>=1)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)"]
@ -8468,6 +8558,11 @@ files = [
{file = "scikit_learn-1.5.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f60021ec1574e56632be2a36b946f8143bf4e5e6af4a06d85281adc22938e0dd"},
{file = "scikit_learn-1.5.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:394397841449853c2290a32050382edaec3da89e35b3e03d6cc966aebc6a8ae6"},
{file = "scikit_learn-1.5.2-cp312-cp312-win_amd64.whl", hash = "sha256:57cc1786cfd6bd118220a92ede80270132aa353647684efa385a74244a41e3b1"},
{file = "scikit_learn-1.5.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e9a702e2de732bbb20d3bad29ebd77fc05a6b427dc49964300340e4c9328b3f5"},
{file = "scikit_learn-1.5.2-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:b0768ad641981f5d3a198430a1d31c3e044ed2e8a6f22166b4d546a5116d7908"},
{file = "scikit_learn-1.5.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:178ddd0a5cb0044464fc1bfc4cca5b1833bfc7bb022d70b05db8530da4bb3dd3"},
{file = "scikit_learn-1.5.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f7284ade780084d94505632241bf78c44ab3b6f1e8ccab3d2af58e0e950f9c12"},
{file = "scikit_learn-1.5.2-cp313-cp313-win_amd64.whl", hash = "sha256:b7b0f9a0b1040830d38c39b91b3a44e1b643f4b36e36567b80b7c6bd2202a27f"},
{file = "scikit_learn-1.5.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:757c7d514ddb00ae249832fe87100d9c73c6ea91423802872d9e74970a0e40b9"},
{file = "scikit_learn-1.5.2-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:52788f48b5d8bca5c0736c175fa6bdaab2ef00a8f536cda698db61bd89c551c1"},
{file = "scikit_learn-1.5.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:643964678f4b5fbdc95cbf8aec638acc7aa70f5f79ee2cdad1eec3df4ba6ead8"},
@ -9036,13 +9131,13 @@ test = ["pytest", "tornado (>=4.5)", "typeguard"]
[[package]]
name = "tencentcloud-sdk-python-common"
version = "3.0.1249"
version = "3.0.1250"
description = "Tencent Cloud Common SDK for Python"
optional = false
python-versions = "*"
files = [
{file = "tencentcloud-sdk-python-common-3.0.1249.tar.gz", hash = "sha256:4e5750c57d3b885555da1662b2005fb4d7d6f9eb37a5a4cb5440b48c5e51ec8d"},
{file = "tencentcloud_sdk_python_common-3.0.1249-py2.py3-none-any.whl", hash = "sha256:f3a646c380062f41503be8b6e70ed242035e799176d73dcb51d804dd1c1eca2a"},
{file = "tencentcloud-sdk-python-common-3.0.1250.tar.gz", hash = "sha256:97c15c3f2ffbde60550656eab3e9337d9e0ec8958a533f223c5d5caa2762b6e9"},
{file = "tencentcloud_sdk_python_common-3.0.1250-py2.py3-none-any.whl", hash = "sha256:e369dee2d920ee365a8e2d314d563d243f2e73f5bc6bd2886f96534c9d00c3a7"},
]
[package.dependencies]
@ -9050,17 +9145,17 @@ requests = ">=2.16.0"
[[package]]
name = "tencentcloud-sdk-python-hunyuan"
version = "3.0.1249"
version = "3.0.1250"
description = "Tencent Cloud Hunyuan SDK for Python"
optional = false
python-versions = "*"
files = [
{file = "tencentcloud-sdk-python-hunyuan-3.0.1249.tar.gz", hash = "sha256:6fb2f2a0386566d662c614d2631235ab6b4d7cc7fe91afe3f92292ad51648793"},
{file = "tencentcloud_sdk_python_hunyuan-3.0.1249-py2.py3-none-any.whl", hash = "sha256:105806bfab50a6f9cd759bb0ccb5244bbc3f97b5a7d5af0d53badbc0d24f59ee"},
{file = "tencentcloud-sdk-python-hunyuan-3.0.1250.tar.gz", hash = "sha256:ac95085edee2a95c69326b2fd6a0f61116fc5d214d5c8cf14a1b42bbb262dba8"},
{file = "tencentcloud_sdk_python_hunyuan-3.0.1250-py2.py3-none-any.whl", hash = "sha256:caac95c47348639452a78d39cdcb87257f97cec3b52398e3be97a5b8c4c5e496"},
]
[package.dependencies]
tencentcloud-sdk-python-common = "3.0.1249"
tencentcloud-sdk-python-common = "3.0.1250"
[[package]]
name = "threadpoolctl"
@ -9092,47 +9187,42 @@ client = ["SQLAlchemy (>=1.4,<3)"]
[[package]]
name = "tiktoken"
version = "0.7.0"
version = "0.8.0"
description = "tiktoken is a fast BPE tokeniser for use with OpenAI's models"
optional = false
python-versions = ">=3.8"
python-versions = ">=3.9"
files = [
{file = "tiktoken-0.7.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:485f3cc6aba7c6b6ce388ba634fbba656d9ee27f766216f45146beb4ac18b25f"},
{file = "tiktoken-0.7.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e54be9a2cd2f6d6ffa3517b064983fb695c9a9d8aa7d574d1ef3c3f931a99225"},
{file = "tiktoken-0.7.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79383a6e2c654c6040e5f8506f3750db9ddd71b550c724e673203b4f6b4b4590"},
{file = "tiktoken-0.7.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d4511c52caacf3c4981d1ae2df85908bd31853f33d30b345c8b6830763f769c"},
{file = "tiktoken-0.7.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:13c94efacdd3de9aff824a788353aa5749c0faee1fbe3816df365ea450b82311"},
{file = "tiktoken-0.7.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:8e58c7eb29d2ab35a7a8929cbeea60216a4ccdf42efa8974d8e176d50c9a3df5"},
{file = "tiktoken-0.7.0-cp310-cp310-win_amd64.whl", hash = "sha256:21a20c3bd1dd3e55b91c1331bf25f4af522c525e771691adbc9a69336fa7f702"},
{file = "tiktoken-0.7.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:10c7674f81e6e350fcbed7c09a65bca9356eaab27fb2dac65a1e440f2bcfe30f"},
{file = "tiktoken-0.7.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:084cec29713bc9d4189a937f8a35dbdfa785bd1235a34c1124fe2323821ee93f"},
{file = "tiktoken-0.7.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:811229fde1652fedcca7c6dfe76724d0908775b353556d8a71ed74d866f73f7b"},
{file = "tiktoken-0.7.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86b6e7dc2e7ad1b3757e8a24597415bafcfb454cebf9a33a01f2e6ba2e663992"},
{file = "tiktoken-0.7.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1063c5748be36344c7e18c7913c53e2cca116764c2080177e57d62c7ad4576d1"},
{file = "tiktoken-0.7.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:20295d21419bfcca092644f7e2f2138ff947a6eb8cfc732c09cc7d76988d4a89"},
{file = "tiktoken-0.7.0-cp311-cp311-win_amd64.whl", hash = "sha256:959d993749b083acc57a317cbc643fb85c014d055b2119b739487288f4e5d1cb"},
{file = "tiktoken-0.7.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:71c55d066388c55a9c00f61d2c456a6086673ab7dec22dd739c23f77195b1908"},
{file = "tiktoken-0.7.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:09ed925bccaa8043e34c519fbb2f99110bd07c6fd67714793c21ac298e449410"},
{file = "tiktoken-0.7.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:03c6c40ff1db0f48a7b4d2dafeae73a5607aacb472fa11f125e7baf9dce73704"},
{file = "tiktoken-0.7.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d20b5c6af30e621b4aca094ee61777a44118f52d886dbe4f02b70dfe05c15350"},
{file = "tiktoken-0.7.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d427614c3e074004efa2f2411e16c826f9df427d3c70a54725cae860f09e4bf4"},
{file = "tiktoken-0.7.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:8c46d7af7b8c6987fac9b9f61041b452afe92eb087d29c9ce54951280f899a97"},
{file = "tiktoken-0.7.0-cp312-cp312-win_amd64.whl", hash = "sha256:0bc603c30b9e371e7c4c7935aba02af5994a909fc3c0fe66e7004070858d3f8f"},
{file = "tiktoken-0.7.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2398fecd38c921bcd68418675a6d155fad5f5e14c2e92fcf5fe566fa5485a858"},
{file = "tiktoken-0.7.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8f5f6afb52fb8a7ea1c811e435e4188f2bef81b5e0f7a8635cc79b0eef0193d6"},
{file = "tiktoken-0.7.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:861f9ee616766d736be4147abac500732b505bf7013cfaf019b85892637f235e"},
{file = "tiktoken-0.7.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:54031f95c6939f6b78122c0aa03a93273a96365103793a22e1793ee86da31685"},
{file = "tiktoken-0.7.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:fffdcb319b614cf14f04d02a52e26b1d1ae14a570f90e9b55461a72672f7b13d"},
{file = "tiktoken-0.7.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:c72baaeaefa03ff9ba9688624143c858d1f6b755bb85d456d59e529e17234769"},
{file = "tiktoken-0.7.0-cp38-cp38-win_amd64.whl", hash = "sha256:131b8aeb043a8f112aad9f46011dced25d62629091e51d9dc1adbf4a1cc6aa98"},
{file = "tiktoken-0.7.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:cabc6dc77460df44ec5b879e68692c63551ae4fae7460dd4ff17181df75f1db7"},
{file = "tiktoken-0.7.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8d57f29171255f74c0aeacd0651e29aa47dff6f070cb9f35ebc14c82278f3b25"},
{file = "tiktoken-0.7.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2ee92776fdbb3efa02a83f968c19d4997a55c8e9ce7be821ceee04a1d1ee149c"},
{file = "tiktoken-0.7.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e215292e99cb41fbc96988ef62ea63bb0ce1e15f2c147a61acc319f8b4cbe5bf"},
{file = "tiktoken-0.7.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:8a81bac94769cab437dd3ab0b8a4bc4e0f9cf6835bcaa88de71f39af1791727a"},
{file = "tiktoken-0.7.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:d6d73ea93e91d5ca771256dfc9d1d29f5a554b83821a1dc0891987636e0ae226"},
{file = "tiktoken-0.7.0-cp39-cp39-win_amd64.whl", hash = "sha256:2bcb28ddf79ffa424f171dfeef9a4daff61a94c631ca6813f43967cb263b83b9"},
{file = "tiktoken-0.7.0.tar.gz", hash = "sha256:1077266e949c24e0291f6c350433c6f0971365ece2b173a23bc3b9f9defef6b6"},
{file = "tiktoken-0.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b07e33283463089c81ef1467180e3e00ab00d46c2c4bbcef0acab5f771d6695e"},
{file = "tiktoken-0.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9269348cb650726f44dd3bbb3f9110ac19a8dcc8f54949ad3ef652ca22a38e21"},
{file = "tiktoken-0.8.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25e13f37bc4ef2d012731e93e0fef21dc3b7aea5bb9009618de9a4026844e560"},
{file = "tiktoken-0.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f13d13c981511331eac0d01a59b5df7c0d4060a8be1e378672822213da51e0a2"},
{file = "tiktoken-0.8.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:6b2ddbc79a22621ce8b1166afa9f9a888a664a579350dc7c09346a3b5de837d9"},
{file = "tiktoken-0.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:d8c2d0e5ba6453a290b86cd65fc51fedf247e1ba170191715b049dac1f628005"},
{file = "tiktoken-0.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d622d8011e6d6f239297efa42a2657043aaed06c4f68833550cac9e9bc723ef1"},
{file = "tiktoken-0.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2efaf6199717b4485031b4d6edb94075e4d79177a172f38dd934d911b588d54a"},
{file = "tiktoken-0.8.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5637e425ce1fc49cf716d88df3092048359a4b3bbb7da762840426e937ada06d"},
{file = "tiktoken-0.8.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fb0e352d1dbe15aba082883058b3cce9e48d33101bdaac1eccf66424feb5b47"},
{file = "tiktoken-0.8.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:56edfefe896c8f10aba372ab5706b9e3558e78db39dd497c940b47bf228bc419"},
{file = "tiktoken-0.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:326624128590def898775b722ccc327e90b073714227175ea8febbc920ac0a99"},
{file = "tiktoken-0.8.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:881839cfeae051b3628d9823b2e56b5cc93a9e2efb435f4cf15f17dc45f21586"},
{file = "tiktoken-0.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fe9399bdc3f29d428f16a2f86c3c8ec20be3eac5f53693ce4980371c3245729b"},
{file = "tiktoken-0.8.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9a58deb7075d5b69237a3ff4bb51a726670419db6ea62bdcd8bd80c78497d7ab"},
{file = "tiktoken-0.8.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2908c0d043a7d03ebd80347266b0e58440bdef5564f84f4d29fb235b5df3b04"},
{file = "tiktoken-0.8.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:294440d21a2a51e12d4238e68a5972095534fe9878be57d905c476017bff99fc"},
{file = "tiktoken-0.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:d8f3192733ac4d77977432947d563d7e1b310b96497acd3c196c9bddb36ed9db"},
{file = "tiktoken-0.8.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:02be1666096aff7da6cbd7cdaa8e7917bfed3467cd64b38b1f112e96d3b06a24"},
{file = "tiktoken-0.8.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:c94ff53c5c74b535b2cbf431d907fc13c678bbd009ee633a2aca269a04389f9a"},
{file = "tiktoken-0.8.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b231f5e8982c245ee3065cd84a4712d64692348bc609d84467c57b4b72dcbc5"},
{file = "tiktoken-0.8.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4177faa809bd55f699e88c96d9bb4635d22e3f59d635ba6fd9ffedf7150b9953"},
{file = "tiktoken-0.8.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5376b6f8dc4753cd81ead935c5f518fa0fbe7e133d9e25f648d8c4dabdd4bad7"},
{file = "tiktoken-0.8.0-cp313-cp313-win_amd64.whl", hash = "sha256:18228d624807d66c87acd8f25fc135665617cab220671eb65b50f5d70fa51f69"},
{file = "tiktoken-0.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7e17807445f0cf1f25771c9d86496bd8b5c376f7419912519699f3cc4dc5c12e"},
{file = "tiktoken-0.8.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:886f80bd339578bbdba6ed6d0567a0d5c6cfe198d9e587ba6c447654c65b8edc"},
{file = "tiktoken-0.8.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6adc8323016d7758d6de7313527f755b0fc6c72985b7d9291be5d96d73ecd1e1"},
{file = "tiktoken-0.8.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b591fb2b30d6a72121a80be24ec7a0e9eb51c5500ddc7e4c2496516dd5e3816b"},
{file = "tiktoken-0.8.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:845287b9798e476b4d762c3ebda5102be87ca26e5d2c9854002825d60cdb815d"},
{file = "tiktoken-0.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:1473cfe584252dc3fa62adceb5b1c763c1874e04511b197da4e6de51d6ce5a02"},
{file = "tiktoken-0.8.0.tar.gz", hash = "sha256:9ccbb2740f24542534369c5635cfd9b2b3c2490754a78ac8831d99f89f94eeb2"},
]
[package.dependencies]
@ -9738,47 +9828,54 @@ standard = ["colorama (>=0.4)", "httptools (>=0.5.0)", "python-dotenv (>=0.13)",
[[package]]
name = "uvloop"
version = "0.20.0"
version = "0.21.0"
description = "Fast implementation of asyncio event loop on top of libuv"
optional = false
python-versions = ">=3.8.0"
files = [
{file = "uvloop-0.20.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:9ebafa0b96c62881d5cafa02d9da2e44c23f9f0cd829f3a32a6aff771449c996"},
{file = "uvloop-0.20.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:35968fc697b0527a06e134999eef859b4034b37aebca537daeb598b9d45a137b"},
{file = "uvloop-0.20.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b16696f10e59d7580979b420eedf6650010a4a9c3bd8113f24a103dfdb770b10"},
{file = "uvloop-0.20.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9b04d96188d365151d1af41fa2d23257b674e7ead68cfd61c725a422764062ae"},
{file = "uvloop-0.20.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:94707205efbe809dfa3a0d09c08bef1352f5d3d6612a506f10a319933757c006"},
{file = "uvloop-0.20.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:89e8d33bb88d7263f74dc57d69f0063e06b5a5ce50bb9a6b32f5fcbe655f9e73"},
{file = "uvloop-0.20.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:e50289c101495e0d1bb0bfcb4a60adde56e32f4449a67216a1ab2750aa84f037"},
{file = "uvloop-0.20.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e237f9c1e8a00e7d9ddaa288e535dc337a39bcbf679f290aee9d26df9e72bce9"},
{file = "uvloop-0.20.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:746242cd703dc2b37f9d8b9f173749c15e9a918ddb021575a0205ec29a38d31e"},
{file = "uvloop-0.20.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82edbfd3df39fb3d108fc079ebc461330f7c2e33dbd002d146bf7c445ba6e756"},
{file = "uvloop-0.20.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:80dc1b139516be2077b3e57ce1cb65bfed09149e1d175e0478e7a987863b68f0"},
{file = "uvloop-0.20.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4f44af67bf39af25db4c1ac27e82e9665717f9c26af2369c404be865c8818dcf"},
{file = "uvloop-0.20.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:4b75f2950ddb6feed85336412b9a0c310a2edbcf4cf931aa5cfe29034829676d"},
{file = "uvloop-0.20.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:77fbc69c287596880ecec2d4c7a62346bef08b6209749bf6ce8c22bbaca0239e"},
{file = "uvloop-0.20.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6462c95f48e2d8d4c993a2950cd3d31ab061864d1c226bbf0ee2f1a8f36674b9"},
{file = "uvloop-0.20.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:649c33034979273fa71aa25d0fe120ad1777c551d8c4cd2c0c9851d88fcb13ab"},
{file = "uvloop-0.20.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3a609780e942d43a275a617c0839d85f95c334bad29c4c0918252085113285b5"},
{file = "uvloop-0.20.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:aea15c78e0d9ad6555ed201344ae36db5c63d428818b4b2a42842b3870127c00"},
{file = "uvloop-0.20.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:f0e94b221295b5e69de57a1bd4aeb0b3a29f61be6e1b478bb8a69a73377db7ba"},
{file = "uvloop-0.20.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:fee6044b64c965c425b65a4e17719953b96e065c5b7e09b599ff332bb2744bdf"},
{file = "uvloop-0.20.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:265a99a2ff41a0fd56c19c3838b29bf54d1d177964c300dad388b27e84fd7847"},
{file = "uvloop-0.20.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b10c2956efcecb981bf9cfb8184d27d5d64b9033f917115a960b83f11bfa0d6b"},
{file = "uvloop-0.20.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:e7d61fe8e8d9335fac1bf8d5d82820b4808dd7a43020c149b63a1ada953d48a6"},
{file = "uvloop-0.20.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2beee18efd33fa6fdb0976e18475a4042cd31c7433c866e8a09ab604c7c22ff2"},
{file = "uvloop-0.20.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:d8c36fdf3e02cec92aed2d44f63565ad1522a499c654f07935c8f9d04db69e95"},
{file = "uvloop-0.20.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a0fac7be202596c7126146660725157d4813aa29a4cc990fe51346f75ff8fde7"},
{file = "uvloop-0.20.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9d0fba61846f294bce41eb44d60d58136090ea2b5b99efd21cbdf4e21927c56a"},
{file = "uvloop-0.20.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95720bae002ac357202e0d866128eb1ac82545bcf0b549b9abe91b5178d9b541"},
{file = "uvloop-0.20.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:36c530d8fa03bfa7085af54a48f2ca16ab74df3ec7108a46ba82fd8b411a2315"},
{file = "uvloop-0.20.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e97152983442b499d7a71e44f29baa75b3b02e65d9c44ba53b10338e98dedb66"},
{file = "uvloop-0.20.0.tar.gz", hash = "sha256:4603ca714a754fc8d9b197e325db25b2ea045385e8a3ad05d3463de725fdf469"},
{file = "uvloop-0.21.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ec7e6b09a6fdded42403182ab6b832b71f4edaf7f37a9a0e371a01db5f0cb45f"},
{file = "uvloop-0.21.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:196274f2adb9689a289ad7d65700d37df0c0930fd8e4e743fa4834e850d7719d"},
{file = "uvloop-0.21.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f38b2e090258d051d68a5b14d1da7203a3c3677321cf32a95a6f4db4dd8b6f26"},
{file = "uvloop-0.21.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87c43e0f13022b998eb9b973b5e97200c8b90823454d4bc06ab33829e09fb9bb"},
{file = "uvloop-0.21.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:10d66943def5fcb6e7b37310eb6b5639fd2ccbc38df1177262b0640c3ca68c1f"},
{file = "uvloop-0.21.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:67dd654b8ca23aed0a8e99010b4c34aca62f4b7fce88f39d452ed7622c94845c"},
{file = "uvloop-0.21.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c0f3fa6200b3108919f8bdabb9a7f87f20e7097ea3c543754cabc7d717d95cf8"},
{file = "uvloop-0.21.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0878c2640cf341b269b7e128b1a5fed890adc4455513ca710d77d5e93aa6d6a0"},
{file = "uvloop-0.21.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b9fb766bb57b7388745d8bcc53a359b116b8a04c83a2288069809d2b3466c37e"},
{file = "uvloop-0.21.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a375441696e2eda1c43c44ccb66e04d61ceeffcd76e4929e527b7fa401b90fb"},
{file = "uvloop-0.21.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:baa0e6291d91649c6ba4ed4b2f982f9fa165b5bbd50a9e203c416a2797bab3c6"},
{file = "uvloop-0.21.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4509360fcc4c3bd2c70d87573ad472de40c13387f5fda8cb58350a1d7475e58d"},
{file = "uvloop-0.21.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:359ec2c888397b9e592a889c4d72ba3d6befba8b2bb01743f72fffbde663b59c"},
{file = "uvloop-0.21.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f7089d2dc73179ce5ac255bdf37c236a9f914b264825fdaacaded6990a7fb4c2"},
{file = "uvloop-0.21.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:baa4dcdbd9ae0a372f2167a207cd98c9f9a1ea1188a8a526431eef2f8116cc8d"},
{file = "uvloop-0.21.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86975dca1c773a2c9864f4c52c5a55631038e387b47eaf56210f873887b6c8dc"},
{file = "uvloop-0.21.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:461d9ae6660fbbafedd07559c6a2e57cd553b34b0065b6550685f6653a98c1cb"},
{file = "uvloop-0.21.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:183aef7c8730e54c9a3ee3227464daed66e37ba13040bb3f350bc2ddc040f22f"},
{file = "uvloop-0.21.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:bfd55dfcc2a512316e65f16e503e9e450cab148ef11df4e4e679b5e8253a5281"},
{file = "uvloop-0.21.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:787ae31ad8a2856fc4e7c095341cccc7209bd657d0e71ad0dc2ea83c4a6fa8af"},
{file = "uvloop-0.21.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5ee4d4ef48036ff6e5cfffb09dd192c7a5027153948d85b8da7ff705065bacc6"},
{file = "uvloop-0.21.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3df876acd7ec037a3d005b3ab85a7e4110422e4d9c1571d4fc89b0fc41b6816"},
{file = "uvloop-0.21.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:bd53ecc9a0f3d87ab847503c2e1552b690362e005ab54e8a48ba97da3924c0dc"},
{file = "uvloop-0.21.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a5c39f217ab3c663dc699c04cbd50c13813e31d917642d459fdcec07555cc553"},
{file = "uvloop-0.21.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:17df489689befc72c39a08359efac29bbee8eee5209650d4b9f34df73d22e414"},
{file = "uvloop-0.21.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bc09f0ff191e61c2d592a752423c767b4ebb2986daa9ed62908e2b1b9a9ae206"},
{file = "uvloop-0.21.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f0ce1b49560b1d2d8a2977e3ba4afb2414fb46b86a1b64056bc4ab929efdafbe"},
{file = "uvloop-0.21.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e678ad6fe52af2c58d2ae3c73dc85524ba8abe637f134bf3564ed07f555c5e79"},
{file = "uvloop-0.21.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:460def4412e473896ef179a1671b40c039c7012184b627898eea5072ef6f017a"},
{file = "uvloop-0.21.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:10da8046cc4a8f12c91a1c39d1dd1585c41162a15caaef165c2174db9ef18bdc"},
{file = "uvloop-0.21.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c097078b8031190c934ed0ebfee8cc5f9ba9642e6eb88322b9958b649750f72b"},
{file = "uvloop-0.21.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:46923b0b5ee7fc0020bef24afe7836cb068f5050ca04caf6b487c513dc1a20b2"},
{file = "uvloop-0.21.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:53e420a3afe22cdcf2a0f4846e377d16e718bc70103d7088a4f7623567ba5fb0"},
{file = "uvloop-0.21.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88cb67cdbc0e483da00af0b2c3cdad4b7c61ceb1ee0f33fe00e09c81e3a6cb75"},
{file = "uvloop-0.21.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:221f4f2a1f46032b403bf3be628011caf75428ee3cc204a22addf96f586b19fd"},
{file = "uvloop-0.21.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:2d1f581393673ce119355d56da84fe1dd9d2bb8b3d13ce792524e1607139feff"},
{file = "uvloop-0.21.0.tar.gz", hash = "sha256:3bf12b0fda68447806a7ad847bfa591613177275d35b6724b1ee573faa3704e3"},
]
[package.extras]
dev = ["Cython (>=3.0,<4.0)", "setuptools (>=60)"]
docs = ["Sphinx (>=4.1.2,<4.2.0)", "sphinx-rtd-theme (>=0.5.2,<0.6.0)", "sphinxcontrib-asyncio (>=0.3.0,<0.4.0)"]
test = ["Cython (>=0.29.36,<0.30.0)", "aiohttp (==3.9.0b0)", "aiohttp (>=3.8.1)", "flake8 (>=5.0,<6.0)", "mypy (>=0.800)", "psutil", "pyOpenSSL (>=23.0.0,<23.1.0)", "pycodestyle (>=2.9.0,<2.10.0)"]
test = ["aiohttp (>=3.10.5)", "flake8 (>=5.0,<6.0)", "mypy (>=0.800)", "psutil", "pyOpenSSL (>=23.0.0,<23.1.0)", "pycodestyle (>=2.9.0,<2.10.0)"]
[[package]]
name = "validators"
@ -10687,4 +10784,4 @@ cffi = ["cffi (>=1.11)"]
[metadata]
lock-version = "2.0"
python-versions = ">=3.10,<3.13"
content-hash = "75a7e7eab36b9386c11a3e9808da28102ad20a43a0e8ae08c37594ecf50da02b"
content-hash = "51f048197baebf9ffdc393e5990b9a90185bc5ff515b8b5d2d9b72de900cf6e2"

View File

@ -86,10 +86,19 @@ ignore = [
]
"tests/*" = [
"F811", # redefined-while-unused
"F401", # unused-import
"PT001", # missing-function-docstring
"PT004", # missing-parameter-docstring
]
"core/rag/extractor/word_extractor.py" = [
"RUF100", # Unused `noqa` directive
]
"core/tools/provider/builtin/gitlab/tools/gitlab_commits.py" = [
"PLR1714", # Consider merging multiple comparisons
]
[tool.ruff.lint.pyflakes]
allowed-unused-imports=[
extend-generics=[
"_pytest.monkeypatch",
"tests.integration_tests",
]
@ -149,7 +158,7 @@ nomic = "~3.1.2"
novita-client = "~0.5.7"
numpy = "~1.26.4"
oci = "~2.135.1"
openai = "~1.29.0"
openai = "~1.51.2"
openpyxl = "~3.1.5"
pandas = { version = "~2.2.2", extras = ["performance", "excel"] }
psycopg2-binary = "~2.9.6"
@ -172,7 +181,7 @@ scikit-learn = "~1.5.1"
sentry-sdk = { version = "~1.44.1", extras = ["flask"] }
sqlalchemy = "~2.0.29"
tencentcloud-sdk-python-hunyuan = "~3.0.1158"
tiktoken = "~0.7.0"
tiktoken = "~0.8.0"
tokenizers = "~0.15.0"
transformers = "~4.35.0"
unstructured = { version = "~0.10.27", extras = ["docx", "epub", "md", "msg", "ppt", "pptx"] }

View File

@ -9,15 +9,19 @@ import app
from configs import dify_config
from core.rag.index_processor.index_processor_factory import IndexProcessorFactory
from extensions.ext_database import db
from extensions.ext_redis import redis_client
from models.dataset import Dataset, DatasetQuery, Document
from services.feature_service import FeatureService
@app.celery.task(queue="dataset")
def clean_unused_datasets_task():
click.echo(click.style("Start clean unused datasets indexes.", fg="green"))
clean_days = dify_config.CLEAN_DAY_SETTING
plan_sandbox_clean_day_setting = dify_config.PLAN_SANDBOX_CLEAN_DAY_SETTING
plan_pro_clean_day_setting = dify_config.PLAN_PRO_CLEAN_DAY_SETTING
start_at = time.perf_counter()
thirty_days_ago = datetime.datetime.now() - datetime.timedelta(days=clean_days)
plan_sandbox_clean_day = datetime.datetime.now() - datetime.timedelta(days=plan_sandbox_clean_day_setting)
plan_pro_clean_day = datetime.datetime.now() - datetime.timedelta(days=plan_pro_clean_day_setting)
page = 1
while True:
try:
@ -28,7 +32,7 @@ def clean_unused_datasets_task():
Document.indexing_status == "completed",
Document.enabled == True,
Document.archived == False,
Document.updated_at > thirty_days_ago,
Document.updated_at > plan_sandbox_clean_day,
)
.group_by(Document.dataset_id)
.subquery()
@ -41,7 +45,7 @@ def clean_unused_datasets_task():
Document.indexing_status == "completed",
Document.enabled == True,
Document.archived == False,
Document.updated_at < thirty_days_ago,
Document.updated_at < plan_sandbox_clean_day,
)
.group_by(Document.dataset_id)
.subquery()
@ -53,7 +57,7 @@ def clean_unused_datasets_task():
.outerjoin(document_subquery_new, Dataset.id == document_subquery_new.c.dataset_id)
.outerjoin(document_subquery_old, Dataset.id == document_subquery_old.c.dataset_id)
.filter(
Dataset.created_at < thirty_days_ago,
Dataset.created_at < plan_sandbox_clean_day,
func.coalesce(document_subquery_new.c.document_count, 0) == 0,
func.coalesce(document_subquery_old.c.document_count, 0) > 0,
)
@ -69,7 +73,7 @@ def clean_unused_datasets_task():
for dataset in datasets:
dataset_query = (
db.session.query(DatasetQuery)
.filter(DatasetQuery.created_at > thirty_days_ago, DatasetQuery.dataset_id == dataset.id)
.filter(DatasetQuery.created_at > plan_sandbox_clean_day, DatasetQuery.dataset_id == dataset.id)
.all()
)
if not dataset_query or len(dataset_query) == 0:
@ -88,5 +92,84 @@ def clean_unused_datasets_task():
click.echo(
click.style("clean dataset index error: {} {}".format(e.__class__.__name__, str(e)), fg="red")
)
page = 1
while True:
try:
# Subquery for counting new documents
document_subquery_new = (
db.session.query(Document.dataset_id, func.count(Document.id).label("document_count"))
.filter(
Document.indexing_status == "completed",
Document.enabled == True,
Document.archived == False,
Document.updated_at > plan_pro_clean_day,
)
.group_by(Document.dataset_id)
.subquery()
)
# Subquery for counting old documents
document_subquery_old = (
db.session.query(Document.dataset_id, func.count(Document.id).label("document_count"))
.filter(
Document.indexing_status == "completed",
Document.enabled == True,
Document.archived == False,
Document.updated_at < plan_pro_clean_day,
)
.group_by(Document.dataset_id)
.subquery()
)
# Main query with join and filter
datasets = (
db.session.query(Dataset)
.outerjoin(document_subquery_new, Dataset.id == document_subquery_new.c.dataset_id)
.outerjoin(document_subquery_old, Dataset.id == document_subquery_old.c.dataset_id)
.filter(
Dataset.created_at < plan_pro_clean_day,
func.coalesce(document_subquery_new.c.document_count, 0) == 0,
func.coalesce(document_subquery_old.c.document_count, 0) > 0,
)
.order_by(Dataset.created_at.desc())
.paginate(page=page, per_page=50)
)
except NotFound:
break
if datasets.items is None or len(datasets.items) == 0:
break
page += 1
for dataset in datasets:
dataset_query = (
db.session.query(DatasetQuery)
.filter(DatasetQuery.created_at > plan_pro_clean_day, DatasetQuery.dataset_id == dataset.id)
.all()
)
if not dataset_query or len(dataset_query) == 0:
try:
features_cache_key = f"features:{dataset.tenant_id}"
plan = redis_client.get(features_cache_key)
if plan is None:
features = FeatureService.get_features(dataset.tenant_id)
redis_client.setex(features_cache_key, 600, features.billing.subscription.plan)
plan = features.billing.subscription.plan
if plan == "sandbox":
# remove index
index_processor = IndexProcessorFactory(dataset.doc_form).init_index_processor()
index_processor.clean(dataset, None)
# update document
update_params = {Document.enabled: False}
Document.query.filter_by(dataset_id=dataset.id).update(update_params)
db.session.commit()
click.echo(
click.style("Cleaned unused dataset {} from db success!".format(dataset.id), fg="green")
)
except Exception as e:
click.echo(
click.style("clean dataset index error: {} {}".format(e.__class__.__name__, str(e)), fg="red")
)
end_at = time.perf_counter()
click.echo(click.style("Cleaned unused dataset from db success latency: {}".format(end_at - start_at), fg="green"))

View File

@ -1,92 +0,0 @@
import datetime
import time
import click
from sqlalchemy import func
from werkzeug.exceptions import NotFound
import app
from configs import dify_config
from core.rag.index_processor.index_processor_factory import IndexProcessorFactory
from extensions.ext_database import db
from models.dataset import Dataset, DatasetQuery, Document
@app.celery.task(queue="dataset")
def clean_unused_message_task():
click.echo(click.style("Start clean unused messages .", fg="green"))
clean_days = int(dify_config.CLEAN_DAY_SETTING)
start_at = time.perf_counter()
thirty_days_ago = datetime.datetime.now() - datetime.timedelta(days=clean_days)
page = 1
while True:
try:
# Subquery for counting new documents
document_subquery_new = (
db.session.query(Document.dataset_id, func.count(Document.id).label("document_count"))
.filter(
Document.indexing_status == "completed",
Document.enabled == True,
Document.archived == False,
Document.updated_at > thirty_days_ago,
)
.group_by(Document.dataset_id)
.subquery()
)
# Subquery for counting old documents
document_subquery_old = (
db.session.query(Document.dataset_id, func.count(Document.id).label("document_count"))
.filter(
Document.indexing_status == "completed",
Document.enabled == True,
Document.archived == False,
Document.updated_at < thirty_days_ago,
)
.group_by(Document.dataset_id)
.subquery()
)
# Main query with join and filter
datasets = (
db.session.query(Dataset)
.outerjoin(document_subquery_new, Dataset.id == document_subquery_new.c.dataset_id)
.outerjoin(document_subquery_old, Dataset.id == document_subquery_old.c.dataset_id)
.filter(
Dataset.created_at < thirty_days_ago,
func.coalesce(document_subquery_new.c.document_count, 0) == 0,
func.coalesce(document_subquery_old.c.document_count, 0) > 0,
)
.order_by(Dataset.created_at.desc())
.paginate(page=page, per_page=50)
)
except NotFound:
break
if datasets.items is None or len(datasets.items) == 0:
break
page += 1
for dataset in datasets:
dataset_query = (
db.session.query(DatasetQuery)
.filter(DatasetQuery.created_at > thirty_days_ago, DatasetQuery.dataset_id == dataset.id)
.all()
)
if not dataset_query or len(dataset_query) == 0:
try:
# remove index
index_processor = IndexProcessorFactory(dataset.doc_form).init_index_processor()
index_processor.clean(dataset, None)
# update document
update_params = {Document.enabled: False}
Document.query.filter_by(dataset_id=dataset.id).update(update_params)
db.session.commit()
click.echo(click.style("Cleaned unused dataset {} from db success!".format(dataset.id), fg="green"))
except Exception as e:
click.echo(
click.style("clean dataset index error: {} {}".format(e.__class__.__name__, str(e)), fg="red")
)
end_at = time.perf_counter()
click.echo(click.style("Cleaned unused dataset from db success latency: {}".format(end_at - start_at), fg="green"))

View File

@ -1,15 +1,25 @@
from services.auth.firecrawl import FirecrawlAuth
from services.auth.jina import JinaAuth
from services.auth.api_key_auth_base import ApiKeyAuthBase
from services.auth.auth_type import AuthType
class ApiKeyAuthFactory:
def __init__(self, provider: str, credentials: dict):
if provider == "firecrawl":
self.auth = FirecrawlAuth(credentials)
elif provider == "jinareader":
self.auth = JinaAuth(credentials)
else:
raise ValueError("Invalid provider")
auth_factory = self.get_apikey_auth_factory(provider)
self.auth = auth_factory(credentials)
def validate_credentials(self):
return self.auth.validate_credentials()
@staticmethod
def get_apikey_auth_factory(provider: str) -> type[ApiKeyAuthBase]:
match provider:
case AuthType.FIRECRAWL:
from services.auth.firecrawl.firecrawl import FirecrawlAuth
return FirecrawlAuth
case AuthType.JINA:
from services.auth.jina.jina import JinaAuth
return JinaAuth
case _:
raise ValueError("Invalid provider")

View File

@ -0,0 +1,6 @@
from enum import Enum
class AuthType(str, Enum):
FIRECRAWL = "firecrawl"
JINA = "jinareader"

View File

View File

View File

View File

@ -0,0 +1,64 @@
import json
from os import path
from pathlib import Path
from typing import Optional
from flask import current_app
from services.recommend_app.recommend_app_base import RecommendAppRetrievalBase
from services.recommend_app.recommend_app_type import RecommendAppType
class BuildInRecommendAppRetrieval(RecommendAppRetrievalBase):
"""
Retrieval recommended app from buildin, the location is constants/recommended_apps.json
"""
builtin_data: Optional[dict] = None
def get_type(self) -> str:
return RecommendAppType.BUILDIN
def get_recommended_apps_and_categories(self, language: str) -> dict:
result = self.fetch_recommended_apps_from_builtin(language)
return result
def get_recommend_app_detail(self, app_id: str):
result = self.fetch_recommended_app_detail_from_builtin(app_id)
return result
@classmethod
def _get_builtin_data(cls) -> dict:
"""
Get builtin data.
:return:
"""
if cls.builtin_data:
return cls.builtin_data
root_path = current_app.root_path
cls.builtin_data = json.loads(
Path(path.join(root_path, "constants", "recommended_apps.json")).read_text(encoding="utf-8")
)
return cls.builtin_data
@classmethod
def fetch_recommended_apps_from_builtin(cls, language: str) -> dict:
"""
Fetch recommended apps from builtin.
:param language: language
:return:
"""
builtin_data = cls._get_builtin_data()
return builtin_data.get("recommended_apps", {}).get(language)
@classmethod
def fetch_recommended_app_detail_from_builtin(cls, app_id: str) -> Optional[dict]:
"""
Fetch recommended app detail from builtin.
:param app_id: App ID
:return:
"""
builtin_data = cls._get_builtin_data()
return builtin_data.get("app_details", {}).get(app_id)

View File

@ -0,0 +1,111 @@
from typing import Optional
from constants.languages import languages
from extensions.ext_database import db
from models.model import App, RecommendedApp
from services.app_dsl_service import AppDslService
from services.recommend_app.recommend_app_base import RecommendAppRetrievalBase
from services.recommend_app.recommend_app_type import RecommendAppType
class DatabaseRecommendAppRetrieval(RecommendAppRetrievalBase):
"""
Retrieval recommended app from database
"""
def get_recommended_apps_and_categories(self, language: str) -> dict:
result = self.fetch_recommended_apps_from_db(language)
return result
def get_recommend_app_detail(self, app_id: str):
result = self.fetch_recommended_app_detail_from_db(app_id)
return result
def get_type(self) -> str:
return RecommendAppType.DATABASE
@classmethod
def fetch_recommended_apps_from_db(cls, language: str) -> dict:
"""
Fetch recommended apps from db.
:param language: language
:return:
"""
recommended_apps = (
db.session.query(RecommendedApp)
.filter(RecommendedApp.is_listed == True, RecommendedApp.language == language)
.all()
)
if len(recommended_apps) == 0:
recommended_apps = (
db.session.query(RecommendedApp)
.filter(RecommendedApp.is_listed == True, RecommendedApp.language == languages[0])
.all()
)
categories = set()
recommended_apps_result = []
for recommended_app in recommended_apps:
app = recommended_app.app
if not app or not app.is_public:
continue
site = app.site
if not site:
continue
recommended_app_result = {
"id": recommended_app.id,
"app": {
"id": app.id,
"name": app.name,
"mode": app.mode,
"icon": app.icon,
"icon_background": app.icon_background,
},
"app_id": recommended_app.app_id,
"description": site.description,
"copyright": site.copyright,
"privacy_policy": site.privacy_policy,
"custom_disclaimer": site.custom_disclaimer,
"category": recommended_app.category,
"position": recommended_app.position,
"is_listed": recommended_app.is_listed,
}
recommended_apps_result.append(recommended_app_result)
categories.add(recommended_app.category)
return {"recommended_apps": recommended_apps_result, "categories": sorted(categories)}
@classmethod
def fetch_recommended_app_detail_from_db(cls, app_id: str) -> Optional[dict]:
"""
Fetch recommended app detail from db.
:param app_id: App ID
:return:
"""
# is in public recommended list
recommended_app = (
db.session.query(RecommendedApp)
.filter(RecommendedApp.is_listed == True, RecommendedApp.app_id == app_id)
.first()
)
if not recommended_app:
return None
# get app detail
app_model = db.session.query(App).filter(App.id == app_id).first()
if not app_model or not app_model.is_public:
return None
return {
"id": app_model.id,
"name": app_model.name,
"icon": app_model.icon,
"icon_background": app_model.icon_background,
"mode": app_model.mode,
"export_data": AppDslService.export_dsl(app_model=app_model),
}

View File

@ -0,0 +1,17 @@
from abc import ABC, abstractmethod
class RecommendAppRetrievalBase(ABC):
"""Interface for recommend app retrieval."""
@abstractmethod
def get_recommended_apps_and_categories(self, language: str) -> dict:
raise NotImplementedError
@abstractmethod
def get_recommend_app_detail(self, app_id: str):
raise NotImplementedError
@abstractmethod
def get_type(self) -> str:
raise NotImplementedError

View File

@ -0,0 +1,23 @@
from services.recommend_app.buildin.buildin_retrieval import BuildInRecommendAppRetrieval
from services.recommend_app.database.database_retrieval import DatabaseRecommendAppRetrieval
from services.recommend_app.recommend_app_base import RecommendAppRetrievalBase
from services.recommend_app.recommend_app_type import RecommendAppType
from services.recommend_app.remote.remote_retrieval import RemoteRecommendAppRetrieval
class RecommendAppRetrievalFactory:
@staticmethod
def get_recommend_app_factory(mode: str) -> type[RecommendAppRetrievalBase]:
match mode:
case RecommendAppType.REMOTE:
return RemoteRecommendAppRetrieval
case RecommendAppType.DATABASE:
return DatabaseRecommendAppRetrieval
case RecommendAppType.BUILDIN:
return BuildInRecommendAppRetrieval
case _:
raise ValueError(f"invalid fetch recommended apps mode: {mode}")
@staticmethod
def get_buildin_recommend_app_retrieval():
return BuildInRecommendAppRetrieval

View File

@ -0,0 +1,7 @@
from enum import Enum
class RecommendAppType(str, Enum):
REMOTE = "remote"
BUILDIN = "builtin"
DATABASE = "db"

View File

@ -0,0 +1,71 @@
import logging
from typing import Optional
import requests
from configs import dify_config
from services.recommend_app.buildin.buildin_retrieval import BuildInRecommendAppRetrieval
from services.recommend_app.recommend_app_base import RecommendAppRetrievalBase
from services.recommend_app.recommend_app_type import RecommendAppType
logger = logging.getLogger(__name__)
class RemoteRecommendAppRetrieval(RecommendAppRetrievalBase):
"""
Retrieval recommended app from dify official
"""
def get_recommend_app_detail(self, app_id: str):
try:
result = self.fetch_recommended_app_detail_from_dify_official(app_id)
except Exception as e:
logger.warning(f"fetch recommended app detail from dify official failed: {e}, switch to built-in.")
result = BuildInRecommendAppRetrieval.fetch_recommended_app_detail_from_builtin(app_id)
return result
def get_recommended_apps_and_categories(self, language: str) -> dict:
try:
result = self.fetch_recommended_apps_from_dify_official(language)
except Exception as e:
logger.warning(f"fetch recommended apps from dify official failed: {e}, switch to built-in.")
result = BuildInRecommendAppRetrieval.fetch_recommended_apps_from_builtin(language)
return result
def get_type(self) -> str:
return RecommendAppType.REMOTE
@classmethod
def fetch_recommended_app_detail_from_dify_official(cls, app_id: str) -> Optional[dict]:
"""
Fetch recommended app detail from dify official.
:param app_id: App ID
:return:
"""
domain = dify_config.HOSTED_FETCH_APP_TEMPLATES_REMOTE_DOMAIN
url = f"{domain}/apps/{app_id}"
response = requests.get(url, timeout=(3, 10))
if response.status_code != 200:
return None
return response.json()
@classmethod
def fetch_recommended_apps_from_dify_official(cls, language: str) -> dict:
"""
Fetch recommended apps from dify official.
:param language: language
:return:
"""
domain = dify_config.HOSTED_FETCH_APP_TEMPLATES_REMOTE_DOMAIN
url = f"{domain}/apps?language={language}"
response = requests.get(url, timeout=(3, 10))
if response.status_code != 200:
raise ValueError(f"fetch recommended apps failed, status code: {response.status_code}")
result = response.json()
if "categories" in result:
result["categories"] = sorted(result["categories"])
return result

View File

@ -1,24 +1,10 @@
import json
import logging
from os import path
from pathlib import Path
from typing import Optional
import requests
from flask import current_app
from configs import dify_config
from constants.languages import languages
from extensions.ext_database import db
from models.model import App, RecommendedApp
from services.app_dsl_service import AppDslService
logger = logging.getLogger(__name__)
from services.recommend_app.recommend_app_factory import RecommendAppRetrievalFactory
class RecommendedAppService:
builtin_data: Optional[dict] = None
@classmethod
def get_recommended_apps_and_categories(cls, language: str) -> dict:
"""
@ -27,109 +13,17 @@ class RecommendedAppService:
:return:
"""
mode = dify_config.HOSTED_FETCH_APP_TEMPLATES_MODE
if mode == "remote":
try:
result = cls._fetch_recommended_apps_from_dify_official(language)
except Exception as e:
logger.warning(f"fetch recommended apps from dify official failed: {e}, switch to built-in.")
result = cls._fetch_recommended_apps_from_builtin(language)
elif mode == "db":
result = cls._fetch_recommended_apps_from_db(language)
elif mode == "builtin":
result = cls._fetch_recommended_apps_from_builtin(language)
else:
raise ValueError(f"invalid fetch recommended apps mode: {mode}")
retrieval_instance = RecommendAppRetrievalFactory.get_recommend_app_factory(mode)()
result = retrieval_instance.get_recommended_apps_and_categories(language)
if not result.get("recommended_apps") and language != "en-US":
result = cls._fetch_recommended_apps_from_builtin("en-US")
return result
@classmethod
def _fetch_recommended_apps_from_db(cls, language: str) -> dict:
"""
Fetch recommended apps from db.
:param language: language
:return:
"""
recommended_apps = (
db.session.query(RecommendedApp)
.filter(RecommendedApp.is_listed == True, RecommendedApp.language == language)
.all()
)
if len(recommended_apps) == 0:
recommended_apps = (
db.session.query(RecommendedApp)
.filter(RecommendedApp.is_listed == True, RecommendedApp.language == languages[0])
.all()
result = (
RecommendAppRetrievalFactory.get_buildin_recommend_app_retrieval().fetch_recommended_apps_from_builtin(
"en-US"
)
)
categories = set()
recommended_apps_result = []
for recommended_app in recommended_apps:
app = recommended_app.app
if not app or not app.is_public:
continue
site = app.site
if not site:
continue
recommended_app_result = {
"id": recommended_app.id,
"app": {
"id": app.id,
"name": app.name,
"mode": app.mode,
"icon": app.icon,
"icon_background": app.icon_background,
},
"app_id": recommended_app.app_id,
"description": site.description,
"copyright": site.copyright,
"privacy_policy": site.privacy_policy,
"custom_disclaimer": site.custom_disclaimer,
"category": recommended_app.category,
"position": recommended_app.position,
"is_listed": recommended_app.is_listed,
}
recommended_apps_result.append(recommended_app_result)
categories.add(recommended_app.category) # add category to categories
return {"recommended_apps": recommended_apps_result, "categories": sorted(categories)}
@classmethod
def _fetch_recommended_apps_from_dify_official(cls, language: str) -> dict:
"""
Fetch recommended apps from dify official.
:param language: language
:return:
"""
domain = dify_config.HOSTED_FETCH_APP_TEMPLATES_REMOTE_DOMAIN
url = f"{domain}/apps?language={language}"
response = requests.get(url, timeout=(3, 10))
if response.status_code != 200:
raise ValueError(f"fetch recommended apps failed, status code: {response.status_code}")
result = response.json()
if "categories" in result:
result["categories"] = sorted(result["categories"])
return result
@classmethod
def _fetch_recommended_apps_from_builtin(cls, language: str) -> dict:
"""
Fetch recommended apps from builtin.
:param language: language
:return:
"""
builtin_data = cls._get_builtin_data()
return builtin_data.get("recommended_apps", {}).get(language)
@classmethod
def get_recommend_app_detail(cls, app_id: str) -> Optional[dict]:
"""
@ -138,117 +32,6 @@ class RecommendedAppService:
:return:
"""
mode = dify_config.HOSTED_FETCH_APP_TEMPLATES_MODE
if mode == "remote":
try:
result = cls._fetch_recommended_app_detail_from_dify_official(app_id)
except Exception as e:
logger.warning(f"fetch recommended app detail from dify official failed: {e}, switch to built-in.")
result = cls._fetch_recommended_app_detail_from_builtin(app_id)
elif mode == "db":
result = cls._fetch_recommended_app_detail_from_db(app_id)
elif mode == "builtin":
result = cls._fetch_recommended_app_detail_from_builtin(app_id)
else:
raise ValueError(f"invalid fetch recommended app detail mode: {mode}")
retrieval_instance = RecommendAppRetrievalFactory.get_recommend_app_factory(mode)()
result = retrieval_instance.get_recommend_app_detail(app_id)
return result
@classmethod
def _fetch_recommended_app_detail_from_dify_official(cls, app_id: str) -> Optional[dict]:
"""
Fetch recommended app detail from dify official.
:param app_id: App ID
:return:
"""
domain = dify_config.HOSTED_FETCH_APP_TEMPLATES_REMOTE_DOMAIN
url = f"{domain}/apps/{app_id}"
response = requests.get(url, timeout=(3, 10))
if response.status_code != 200:
return None
return response.json()
@classmethod
def _fetch_recommended_app_detail_from_db(cls, app_id: str) -> Optional[dict]:
"""
Fetch recommended app detail from db.
:param app_id: App ID
:return:
"""
# is in public recommended list
recommended_app = (
db.session.query(RecommendedApp)
.filter(RecommendedApp.is_listed == True, RecommendedApp.app_id == app_id)
.first()
)
if not recommended_app:
return None
# get app detail
app_model = db.session.query(App).filter(App.id == app_id).first()
if not app_model or not app_model.is_public:
return None
return {
"id": app_model.id,
"name": app_model.name,
"icon": app_model.icon,
"icon_background": app_model.icon_background,
"mode": app_model.mode,
"export_data": AppDslService.export_dsl(app_model=app_model),
}
@classmethod
def _fetch_recommended_app_detail_from_builtin(cls, app_id: str) -> Optional[dict]:
"""
Fetch recommended app detail from builtin.
:param app_id: App ID
:return:
"""
builtin_data = cls._get_builtin_data()
return builtin_data.get("app_details", {}).get(app_id)
@classmethod
def _get_builtin_data(cls) -> dict:
"""
Get builtin data.
:return:
"""
if cls.builtin_data:
return cls.builtin_data
root_path = current_app.root_path
cls.builtin_data = json.loads(
Path(path.join(root_path, "constants", "recommended_apps.json")).read_text(encoding="utf-8")
)
return cls.builtin_data
@classmethod
def fetch_all_recommended_apps_and_export_datas(cls):
"""
Fetch all recommended apps and export datas
:return:
"""
templates = {"recommended_apps": {}, "app_details": {}}
for language in languages:
try:
result = cls._fetch_recommended_apps_from_dify_official(language)
except Exception as e:
logger.warning(f"fetch recommended apps from dify official failed: {e}, skip.")
continue
templates["recommended_apps"][language] = result
for recommended_app in result.get("recommended_apps"):
app_id = recommended_app.get("app_id")
# get app detail
app_detail = cls._fetch_recommended_app_detail_from_dify_official(app_id)
if not app_detail:
continue
templates["app_details"][app_id] = app_detail
return templates

View File

@ -1,4 +1,5 @@
import os
from unittest.mock import MagicMock
import pytest
from _pytest.monkeypatch import MonkeyPatch
@ -10,26 +11,31 @@ from pymochow.model.table import Table
from requests.adapters import HTTPAdapter
class AttrDict(dict):
def __getattr__(self, item):
return self.get(item)
class MockBaiduVectorDBClass:
def mock_vector_db_client(
self,
config=None,
adapter: HTTPAdapter = None,
):
self._conn = None
self._config = None
self.conn = MagicMock()
self._config = MagicMock()
def list_databases(self, config=None) -> list[Database]:
return [
Database(
conn=self._conn,
conn=self.conn,
database_name="dify",
config=self._config,
)
]
def create_database(self, database_name: str, config=None) -> Database:
return Database(conn=self._conn, database_name=database_name, config=config)
return Database(conn=self.conn, database_name=database_name, config=config)
def list_table(self, config=None) -> list[Table]:
return []
@ -88,16 +94,18 @@ class MockBaiduVectorDBClass:
read_consistency=ReadConsistency.EVENTUAL,
config=None,
):
return {
"row": {
"id": "doc_id_001",
"vector": [0.23432432, 0.8923744, 0.89238432],
"text": "text",
"metadata": {"doc_id": "doc_id_001"},
},
"code": 0,
"msg": "Success",
}
return AttrDict(
{
"row": {
"id": primary_key.get("id"),
"vector": [0.23432432, 0.8923744, 0.89238432],
"text": "text",
"metadata": '{"doc_id": "doc_id_001"}',
},
"code": 0,
"msg": "Success",
}
)
def delete(self, primary_key=None, partition_key=None, filter=None, config=None):
return {"code": 0, "msg": "Success"}
@ -111,22 +119,24 @@ class MockBaiduVectorDBClass:
read_consistency=ReadConsistency.EVENTUAL,
config=None,
):
return {
"rows": [
{
"row": {
"id": "doc_id_001",
"vector": [0.23432432, 0.8923744, 0.89238432],
"text": "text",
"metadata": {"doc_id": "doc_id_001"},
},
"distance": 0.1,
"score": 0.5,
}
],
"code": 0,
"msg": "Success",
}
return AttrDict(
{
"rows": [
{
"row": {
"id": "doc_id_001",
"vector": [0.23432432, 0.8923744, 0.89238432],
"text": "text",
"metadata": '{"doc_id": "doc_id_001"}',
},
"distance": 0.1,
"score": 0.5,
}
],
"code": 0,
"msg": "Success",
}
)
MOCK = os.getenv("MOCK_SWITCH", "false").lower() == "true"
@ -146,6 +156,7 @@ def setup_baiduvectordb_mock(request, monkeypatch: MonkeyPatch):
monkeypatch.setattr(Table, "rebuild_index", MockBaiduVectorDBClass.rebuild_index)
monkeypatch.setattr(Table, "describe_index", MockBaiduVectorDBClass.describe_index)
monkeypatch.setattr(Table, "delete", MockBaiduVectorDBClass.delete)
monkeypatch.setattr(Table, "query", MockBaiduVectorDBClass.query)
monkeypatch.setattr(Table, "search", MockBaiduVectorDBClass.search)
yield

View File

@ -4,9 +4,6 @@ from core.rag.datasource.vdb.baidu.baidu_vector import BaiduConfig, BaiduVector
from tests.integration_tests.vdb.__mock.baiduvectordb import setup_baiduvectordb_mock
from tests.integration_tests.vdb.test_vector_store import AbstractVectorTest, get_example_text, setup_mock_redis
mock_client = MagicMock()
mock_client.list_databases.return_value = [{"name": "test"}]
class BaiduVectorTest(AbstractVectorTest):
def __init__(self):

View File

@ -8,4 +8,5 @@ pytest api/tests/integration_tests/vdb/chroma \
api/tests/integration_tests/vdb/qdrant \
api/tests/integration_tests/vdb/weaviate \
api/tests/integration_tests/vdb/elasticsearch \
api/tests/integration_tests/vdb/vikingdb
api/tests/integration_tests/vdb/vikingdb \
api/tests/integration_tests/vdb/baidu

View File

@ -2,7 +2,7 @@ version: '3'
services:
# API service
api:
image: langgenius/dify-api:0.9.1
image: langgenius/dify-api:0.9.2
restart: always
environment:
# Startup mode, 'api' starts the API server.
@ -227,7 +227,7 @@ services:
# worker service
# The Celery worker for processing the queue.
worker:
image: langgenius/dify-api:0.9.1
image: langgenius/dify-api:0.9.2
restart: always
environment:
CONSOLE_WEB_URL: ''
@ -396,7 +396,7 @@ services:
# Frontend web application.
web:
image: langgenius/dify-web:0.9.1
image: langgenius/dify-web:0.9.2
restart: always
environment:
# The base URL of console application api server, refers to the Console base URL of WEB service if console domain is

View File

@ -58,7 +58,7 @@ DEBUG=false
# which is convenient for debugging.
FLASK_DEBUG=false
# A secretkey that is used for securely signing the session cookie
# A secretkey that is used for securely signing the session cookie
# and encrypting sensitive information on the database.
# You can generate a strong key using `openssl rand -base64 42`.
SECRET_KEY=sk-9f73s3ljTXVcMT3Blb3ljTqtsKiGHXVcMT3BlbkFJLK7U
@ -366,8 +366,8 @@ SUPABASE_URL=your-server-url
# Vector Database Configuration
# ------------------------------
# The type of vector store to use.
# Supported values are `weaviate`, `qdrant`, `milvus`, `myscale`, `relyt`, `pgvector`, `pgvecto-rs`, ``chroma`, `opensearch`, `tidb_vector`, `oracle`, `tencent`, `elasticsearch`, `analyticdb`.
# The type of vector store to use.
# Supported values are `weaviate`, `qdrant`, `milvus`, `myscale`, `relyt`, `pgvector`, `pgvecto-rs`, `chroma`, `opensearch`, `tidb_vector`, `oracle`, `tencent`, `elasticsearch`, `analyticdb`, `vikingdb`.
VECTOR_STORE=weaviate
# The Weaviate endpoint URL. Only available when VECTOR_STORE is `weaviate`.
@ -492,6 +492,15 @@ BAIDU_VECTOR_DB_DATABASE=dify
BAIDU_VECTOR_DB_SHARD=1
BAIDU_VECTOR_DB_REPLICAS=3
# VikingDB configurations, only available when VECTOR_STORE is `vikingdb`
VIKINGDB_ACCESS_KEY=your-ak
VIKINGDB_SECRET_KEY=your-sk
VIKINGDB_REGION=cn-shanghai
VIKINGDB_HOST=api-vikingdb.xxx.volces.com
VIKINGDB_SCHEMA=http
VIKINGDB_CONNECTION_TIMEOUT=30
VIKINGDB_SOCKET_TIMEOUT=30
# ------------------------------
# Knowledge Configuration
# ------------------------------
@ -668,7 +677,7 @@ SANDBOX_PORT=8194
# Environment Variables for weaviate Service
# (only used when VECTOR_STORE is weaviate)
# ------------------------------
WEAVIATE_PERSISTENCE_DATA_PATH='/var/lib/weaviate'
WEAVIATE_PERSISTENCE_DATA_PATH=/var/lib/weaviate
WEAVIATE_QUERY_DEFAULTS_LIMIT=25
WEAVIATE_AUTHENTICATION_ANONYMOUS_ACCESS_ENABLED=true
WEAVIATE_DEFAULT_VECTORIZER_MODULE=none
@ -829,4 +838,4 @@ POSITION_PROVIDER_PINS=
POSITION_PROVIDER_INCLUDES=
POSITION_PROVIDER_EXCLUDES=
# CSP https://developer.mozilla.org/en-US/docs/Web/HTTP/CSP
CSP_WHITELIST=
CSP_WHITELIST=

View File

@ -41,7 +41,7 @@ services:
# The DifySandbox
sandbox:
image: langgenius/dify-sandbox:0.2.9
image: langgenius/dify-sandbox:0.2.10
restart: always
environment:
# The DifySandbox configurations

View File

@ -227,7 +227,7 @@ x-shared-env: &shared-api-worker-env
services:
# API service
api:
image: langgenius/dify-api:0.9.1
image: langgenius/dify-api:0.9.2
restart: always
environment:
# Use the shared environment variables.
@ -247,7 +247,7 @@ services:
# worker service
# The Celery worker for processing the queue.
worker:
image: langgenius/dify-api:0.9.1
image: langgenius/dify-api:0.9.2
restart: always
environment:
# Use the shared environment variables.
@ -266,7 +266,7 @@ services:
# Frontend web application.
web:
image: langgenius/dify-web:0.9.1
image: langgenius/dify-web:0.9.2
restart: always
environment:
CONSOLE_API_URL: ${CONSOLE_API_URL:-}
@ -313,7 +313,7 @@ services:
# The DifySandbox
sandbox:
image: langgenius/dify-sandbox:0.2.9
image: langgenius/dify-sandbox:0.2.10
restart: always
environment:
# The DifySandbox configurations

View File

@ -1 +1 @@
from dify_client.client import ChatClient, CompletionClient, DifyClient
from dify_client.client import ChatClient, CompletionClient, DifyClient

View File

@ -9,10 +9,15 @@ class DifyClient:
self.base_url = base_url
def _send_request(self, method, endpoint, json=None, params=None, stream=False):
headers = {"Authorization": f"Bearer {self.api_key}", "Content-Type": "application/json"}
headers = {
"Authorization": f"Bearer {self.api_key}",
"Content-Type": "application/json",
}
url = f"{self.base_url}{endpoint}"
response = requests.request(method, url, json=json, params=params, headers=headers, stream=stream)
response = requests.request(
method, url, json=json, params=params, headers=headers, stream=stream
)
return response
@ -20,7 +25,9 @@ class DifyClient:
headers = {"Authorization": f"Bearer {self.api_key}"}
url = f"{self.base_url}{endpoint}"
response = requests.request(method, url, data=data, headers=headers, files=files)
response = requests.request(
method, url, data=data, headers=headers, files=files
)
return response
@ -34,7 +41,9 @@ class DifyClient:
def file_upload(self, user, files):
data = {"user": user}
return self._send_request_with_files("POST", "/files/upload", data=data, files=files)
return self._send_request_with_files(
"POST", "/files/upload", data=data, files=files
)
def text_to_audio(self, text: str, user: str, streaming: bool = False):
data = {"text": text, "user": user, "streaming": streaming}
@ -47,25 +56,52 @@ class DifyClient:
class CompletionClient(DifyClient):
def create_completion_message(self, inputs, response_mode, user, files=None):
data = {"inputs": inputs, "response_mode": response_mode, "user": user, "files": files}
data = {
"inputs": inputs,
"response_mode": response_mode,
"user": user,
"files": files,
}
return self._send_request(
"POST", "/completion-messages", data, stream=True if response_mode == "streaming" else False
"POST",
"/completion-messages",
data,
stream=True if response_mode == "streaming" else False,
)
class ChatClient(DifyClient):
def create_chat_message(self, inputs, query, user, response_mode="blocking", conversation_id=None, files=None):
data = {"inputs": inputs, "query": query, "user": user, "response_mode": response_mode, "files": files}
def create_chat_message(
self,
inputs,
query,
user,
response_mode="blocking",
conversation_id=None,
files=None,
):
data = {
"inputs": inputs,
"query": query,
"user": user,
"response_mode": response_mode,
"files": files,
}
if conversation_id:
data["conversation_id"] = conversation_id
return self._send_request(
"POST", "/chat-messages", data, stream=True if response_mode == "streaming" else False
"POST",
"/chat-messages",
data,
stream=True if response_mode == "streaming" else False,
)
def get_suggested(self, message_id, user: str):
params = {"user": user}
return self._send_request("GET", f"/messages/{message_id}/suggested", params=params)
return self._send_request(
"GET", f"/messages/{message_id}/suggested", params=params
)
def stop_message(self, task_id, user):
data = {"user": user}
@ -75,7 +111,9 @@ class ChatClient(DifyClient):
params = {"user": user, "last_id": last_id, "limit": limit, "pinned": pinned}
return self._send_request("GET", "/conversations", params=params)
def get_conversation_messages(self, user, conversation_id=None, first_id=None, limit=None):
def get_conversation_messages(
self, user, conversation_id=None, first_id=None, limit=None
):
params = {"user": user}
if conversation_id:
@ -87,9 +125,13 @@ class ChatClient(DifyClient):
return self._send_request("GET", "/messages", params=params)
def rename_conversation(self, conversation_id, name, auto_generate: bool, user: str):
def rename_conversation(
self, conversation_id, name, auto_generate: bool, user: str
):
data = {"name": name, "auto_generate": auto_generate, "user": user}
return self._send_request("POST", f"/conversations/{conversation_id}/name", data)
return self._send_request(
"POST", f"/conversations/{conversation_id}/name", data
)
def delete_conversation(self, conversation_id, user):
data = {"user": user}
@ -102,7 +144,9 @@ class ChatClient(DifyClient):
class WorkflowClient(DifyClient):
def run(self, inputs: dict, response_mode: str = "streaming", user: str = "abc-123"):
def run(
self, inputs: dict, response_mode: str = "streaming", user: str = "abc-123"
):
data = {"inputs": inputs, "response_mode": response_mode, "user": user}
return self._send_request("POST", "/workflows/run", data)
@ -115,7 +159,9 @@ class WorkflowClient(DifyClient):
class KnowledgeBaseClient(DifyClient):
def __init__(self, api_key, base_url: str = "https://api.dify.ai/v1", dataset_id: str = None):
def __init__(
self, api_key, base_url: str = "https://api.dify.ai/v1", dataset_id: str = None
):
"""
Construct a KnowledgeBaseClient object.
@ -137,7 +183,9 @@ class KnowledgeBaseClient(DifyClient):
return self._send_request("POST", "/datasets", {"name": name}, **kwargs)
def list_datasets(self, page: int = 1, page_size: int = 20, **kwargs):
return self._send_request("GET", f"/datasets?page={page}&limit={page_size}", **kwargs)
return self._send_request(
"GET", f"/datasets?page={page}&limit={page_size}", **kwargs
)
def create_document_by_text(self, name, text, extra_params: dict = None, **kwargs):
"""
@ -165,13 +213,20 @@ class KnowledgeBaseClient(DifyClient):
}
:return: Response from the API
"""
data = {"indexing_technique": "high_quality", "process_rule": {"mode": "automatic"}, "name": name, "text": text}
data = {
"indexing_technique": "high_quality",
"process_rule": {"mode": "automatic"},
"name": name,
"text": text,
}
if extra_params is not None and isinstance(extra_params, dict):
data.update(extra_params)
url = f"/datasets/{self._get_dataset_id()}/document/create_by_text"
return self._send_request("POST", url, json=data, **kwargs)
def update_document_by_text(self, document_id, name, text, extra_params: dict = None, **kwargs):
def update_document_by_text(
self, document_id, name, text, extra_params: dict = None, **kwargs
):
"""
Update a document by text.
@ -201,10 +256,14 @@ class KnowledgeBaseClient(DifyClient):
data = {"name": name, "text": text}
if extra_params is not None and isinstance(extra_params, dict):
data.update(extra_params)
url = f"/datasets/{self._get_dataset_id()}/documents/{document_id}/update_by_text"
url = (
f"/datasets/{self._get_dataset_id()}/documents/{document_id}/update_by_text"
)
return self._send_request("POST", url, json=data, **kwargs)
def create_document_by_file(self, file_path, original_document_id=None, extra_params: dict = None):
def create_document_by_file(
self, file_path, original_document_id=None, extra_params: dict = None
):
"""
Create a document by file.
@ -231,15 +290,22 @@ class KnowledgeBaseClient(DifyClient):
:return: Response from the API
"""
files = {"file": open(file_path, "rb")}
data = {"process_rule": {"mode": "automatic"}, "indexing_technique": "high_quality"}
data = {
"process_rule": {"mode": "automatic"},
"indexing_technique": "high_quality",
}
if extra_params is not None and isinstance(extra_params, dict):
data.update(extra_params)
if original_document_id is not None:
data["original_document_id"] = original_document_id
url = f"/datasets/{self._get_dataset_id()}/document/create_by_file"
return self._send_request_with_files("POST", url, {"data": json.dumps(data)}, files)
return self._send_request_with_files(
"POST", url, {"data": json.dumps(data)}, files
)
def update_document_by_file(self, document_id, file_path, extra_params: dict = None):
def update_document_by_file(
self, document_id, file_path, extra_params: dict = None
):
"""
Update a document by file.
@ -269,8 +335,12 @@ class KnowledgeBaseClient(DifyClient):
data = {}
if extra_params is not None and isinstance(extra_params, dict):
data.update(extra_params)
url = f"/datasets/{self._get_dataset_id()}/documents/{document_id}/update_by_file"
return self._send_request_with_files("POST", url, {"data": json.dumps(data)}, files)
url = (
f"/datasets/{self._get_dataset_id()}/documents/{document_id}/update_by_file"
)
return self._send_request_with_files(
"POST", url, {"data": json.dumps(data)}, files
)
def batch_indexing_status(self, batch_id: str, **kwargs):
"""
@ -301,7 +371,9 @@ class KnowledgeBaseClient(DifyClient):
url = f"/datasets/{self._get_dataset_id()}/documents/{document_id}"
return self._send_request("DELETE", url)
def list_documents(self, page: int = None, page_size: int = None, keyword: str = None, **kwargs):
def list_documents(
self, page: int = None, page_size: int = None, keyword: str = None, **kwargs
):
"""
Get a list of documents in this dataset.
@ -329,7 +401,9 @@ class KnowledgeBaseClient(DifyClient):
url = f"/datasets/{self._get_dataset_id()}/documents/{document_id}/segments"
return self._send_request("POST", url, json=data, **kwargs)
def query_segments(self, document_id, keyword: str = None, status: str = None, **kwargs):
def query_segments(
self, document_id, keyword: str = None, status: str = None, **kwargs
):
"""
Query segments in this document.

View File

@ -12,17 +12,15 @@ setup(
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/langgenius/dify",
license='MIT',
packages=['dify_client'],
license="MIT",
packages=["dify_client"],
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
python_requires=">=3.6",
install_requires=[
"requests"
],
keywords='dify nlp ai language-processing',
install_requires=["requests"],
keywords="dify nlp ai language-processing",
include_package_data=True,
)

View File

@ -2,7 +2,12 @@ import os
import time
import unittest
from dify_client.client import ChatClient, CompletionClient, DifyClient, KnowledgeBaseClient
from dify_client.client import (
ChatClient,
CompletionClient,
DifyClient,
KnowledgeBaseClient,
)
API_KEY = os.environ.get("API_KEY")
APP_ID = os.environ.get("APP_ID")
@ -13,7 +18,9 @@ FILE_PATH_BASE = os.path.dirname(__file__)
class TestKnowledgeBaseClient(unittest.TestCase):
def setUp(self):
self.knowledge_base_client = KnowledgeBaseClient(API_KEY, base_url=API_BASE_URL)
self.README_FILE_PATH = os.path.abspath(os.path.join(FILE_PATH_BASE, "../README.md"))
self.README_FILE_PATH = os.path.abspath(
os.path.join(FILE_PATH_BASE, "../README.md")
)
self.dataset_id = None
self.document_id = None
self.segment_id = None
@ -21,7 +28,9 @@ class TestKnowledgeBaseClient(unittest.TestCase):
def _get_dataset_kb_client(self):
self.assertIsNotNone(self.dataset_id)
return KnowledgeBaseClient(API_KEY, base_url=API_BASE_URL, dataset_id=self.dataset_id)
return KnowledgeBaseClient(
API_KEY, base_url=API_BASE_URL, dataset_id=self.dataset_id
)
def test_001_create_dataset(self):
response = self.knowledge_base_client.create_dataset(name="test_dataset")
@ -67,7 +76,9 @@ class TestKnowledgeBaseClient(unittest.TestCase):
def _test_004_update_document_by_text(self):
client = self._get_dataset_kb_client()
self.assertIsNotNone(self.document_id)
response = client.update_document_by_text(self.document_id, "test_document_updated", "test_text_updated")
response = client.update_document_by_text(
self.document_id, "test_document_updated", "test_text_updated"
)
data = response.json()
self.assertIn("document", data)
self.assertIn("batch", data)
@ -82,7 +93,9 @@ class TestKnowledgeBaseClient(unittest.TestCase):
def _test_006_update_document_by_file(self):
client = self._get_dataset_kb_client()
self.assertIsNotNone(self.document_id)
response = client.update_document_by_file(self.document_id, self.README_FILE_PATH)
response = client.update_document_by_file(
self.document_id, self.README_FILE_PATH
)
data = response.json()
self.assertIn("document", data)
self.assertIn("batch", data)
@ -112,9 +125,9 @@ class TestKnowledgeBaseClient(unittest.TestCase):
def _test_010_add_segments(self):
client = self._get_dataset_kb_client()
response = client.add_segments(self.document_id, [
{"content": "test text segment 1"}
])
response = client.add_segments(
self.document_id, [{"content": "test text segment 1"}]
)
data = response.json()
self.assertIn("data", data)
self.assertGreater(len(data["data"]), 0)
@ -131,9 +144,11 @@ class TestKnowledgeBaseClient(unittest.TestCase):
def _test_012_update_document_segment(self):
client = self._get_dataset_kb_client()
self.assertIsNotNone(self.segment_id)
response = client.update_document_segment(self.document_id, self.segment_id,
{"content": "test text segment 1 updated"}
)
response = client.update_document_segment(
self.document_id,
self.segment_id,
{"content": "test text segment 1 updated"},
)
data = response.json()
self.assertIn("data", data)
self.assertGreater(len(data["data"]), 0)
@ -159,29 +174,37 @@ class TestChatClient(unittest.TestCase):
self.chat_client = ChatClient(API_KEY)
def test_create_chat_message(self):
response = self.chat_client.create_chat_message({}, "Hello, World!", "test_user")
response = self.chat_client.create_chat_message(
{}, "Hello, World!", "test_user"
)
self.assertIn("answer", response.text)
def test_create_chat_message_with_vision_model_by_remote_url(self):
files = [{
"type": "image",
"transfer_method": "remote_url",
"url": "your_image_url"
}]
response = self.chat_client.create_chat_message({}, "Describe the picture.", "test_user", files=files)
files = [
{"type": "image", "transfer_method": "remote_url", "url": "your_image_url"}
]
response = self.chat_client.create_chat_message(
{}, "Describe the picture.", "test_user", files=files
)
self.assertIn("answer", response.text)
def test_create_chat_message_with_vision_model_by_local_file(self):
files = [{
"type": "image",
"transfer_method": "local_file",
"upload_file_id": "your_file_id"
}]
response = self.chat_client.create_chat_message({}, "Describe the picture.", "test_user", files=files)
files = [
{
"type": "image",
"transfer_method": "local_file",
"upload_file_id": "your_file_id",
}
]
response = self.chat_client.create_chat_message(
{}, "Describe the picture.", "test_user", files=files
)
self.assertIn("answer", response.text)
def test_get_conversation_messages(self):
response = self.chat_client.get_conversation_messages("test_user", "your_conversation_id")
response = self.chat_client.get_conversation_messages(
"test_user", "your_conversation_id"
)
self.assertIn("answer", response.text)
def test_get_conversations(self):
@ -194,28 +217,31 @@ class TestCompletionClient(unittest.TestCase):
self.completion_client = CompletionClient(API_KEY)
def test_create_completion_message(self):
response = self.completion_client.create_completion_message({"query": "What's the weather like today?"},
"blocking", "test_user")
response = self.completion_client.create_completion_message(
{"query": "What's the weather like today?"}, "blocking", "test_user"
)
self.assertIn("answer", response.text)
def test_create_completion_message_with_vision_model_by_remote_url(self):
files = [{
"type": "image",
"transfer_method": "remote_url",
"url": "your_image_url"
}]
files = [
{"type": "image", "transfer_method": "remote_url", "url": "your_image_url"}
]
response = self.completion_client.create_completion_message(
{"query": "Describe the picture."}, "blocking", "test_user", files)
{"query": "Describe the picture."}, "blocking", "test_user", files
)
self.assertIn("answer", response.text)
def test_create_completion_message_with_vision_model_by_local_file(self):
files = [{
"type": "image",
"transfer_method": "local_file",
"upload_file_id": "your_file_id"
}]
files = [
{
"type": "image",
"transfer_method": "local_file",
"upload_file_id": "your_file_id",
}
]
response = self.completion_client.create_completion_message(
{"query": "Describe the picture."}, "blocking", "test_user", files)
{"query": "Describe the picture."}, "blocking", "test_user", files
)
self.assertIn("answer", response.text)
@ -224,7 +250,9 @@ class TestDifyClient(unittest.TestCase):
self.dify_client = DifyClient(API_KEY)
def test_message_feedback(self):
response = self.dify_client.message_feedback("your_message_id", 'like', "test_user")
response = self.dify_client.message_feedback(
"your_message_id", "like", "test_user"
)
self.assertIn("success", response.text)
def test_get_application_parameters(self):
@ -237,9 +265,7 @@ class TestDifyClient(unittest.TestCase):
mime_type = "image/jpeg"
with open(file_path, "rb") as file:
files = {
"file": (file_name, file, mime_type)
}
files = {"file": (file_name, file, mime_type)}
response = self.dify_client.file_upload("test_user", files)
self.assertIn("name", response.text)

View File

@ -1,7 +1,8 @@
{
"extends": [
"next",
"@antfu"
"@antfu",
"plugin:storybook/recommended"
],
"rules": {
"@typescript-eslint/consistent-type-definitions": [

3
web/.gitignore vendored
View File

@ -49,4 +49,5 @@ package-lock.json
# pmpm
pnpm-lock.yaml
.favorites.json
.favorites.json
*storybook.log

19
web/.storybook/main.ts Normal file
View File

@ -0,0 +1,19 @@
import type { StorybookConfig } from '@storybook/nextjs'
const config: StorybookConfig = {
// stories: ['../stories/**/*.mdx', '../stories/**/*.stories.@(js|jsx|mjs|ts|tsx)'],
stories: ['../app/components/**/*.stories.@(js|jsx|mjs|ts|tsx)'],
addons: [
'@storybook/addon-onboarding',
'@storybook/addon-links',
'@storybook/addon-essentials',
'@chromatic-com/storybook',
'@storybook/addon-interactions',
],
framework: {
name: '@storybook/nextjs',
options: {},
},
staticDirs: ['../public'],
}
export default config

View File

@ -0,0 +1,37 @@
import React from 'react'
import type { Preview } from '@storybook/react'
import { withThemeByDataAttribute } from '@storybook/addon-themes';
import I18nServer from '../app/components/i18n-server'
import '../app/styles/globals.css'
import '../app/styles/markdown.scss'
import './storybook.css'
export const decorators = [
withThemeByDataAttribute({
themes: {
light: 'light',
dark: 'dark',
},
defaultTheme: 'light',
attributeName: 'data-theme',
}),
Story => {
return <I18nServer>
<Story />
</I18nServer>
}
];
const preview: Preview = {
parameters: {
controls: {
matchers: {
color: /(background|color)$/i,
date: /Date$/i,
},
},
},
}
export default preview

View File

@ -0,0 +1,6 @@
html,
body {
max-width: unset;
overflow: auto;
user-select: text;
}

View File

@ -74,6 +74,18 @@ If you want to customize the host and port:
npm run start --port=3001 --host=0.0.0.0
```
## Storybook
This project uses [Storybook](https://storybook.js.org/) for UI component development.
To start the storybook server, run:
```bash
yarn storybook
```
Open [http://localhost:6006](http://localhost:6006) with your browser to see the result.
## Lint Code
If your IDE is VSCode, rename `web/.vscode/settings.example.json` to `web/.vscode/settings.json` for lint code setting.

View File

@ -1050,6 +1050,151 @@ import { Row, Col, Properties, Property, Heading, SubProperty, Paragraph } from
---
<Heading
url='/datasets/{dataset_id}/hit_testing'
method='POST'
title='Dataset hit testing'
name='#dataset_hit_testing'
/>
<Row>
<Col>
### Path
<Properties>
<Property name='dataset_id' type='string' key='dataset_id'>
Dataset ID
</Property>
</Properties>
### Request Body
<Properties>
<Property name='query' type='string' key='query'>
retrieval keywordc
</Property>
<Property name='retrieval_model' type='object' key='retrieval_model'>
retrieval keyword(Optional, if not filled, it will be recalled according to the default method)
- <code>search_method</code> (text) Search method: One of the following four keywords is required
- <code>keyword_search</code> Keyword search
- <code>semantic_search</code> Semantic search
- <code>full_text_search</code> Full-text search
- <code>hybrid_search</code> Hybrid search
- <code>reranking_enable</code> (bool) Whether to enable reranking, optional, required if the search mode is semantic_search or hybrid_search
- <code>reranking_mode</code> (object) Rerank model configuration, optional, required if reranking is enabled
- <code>reranking_provider_name</code> (string) Rerank model provider
- <code>reranking_model_name</code> (string) Rerank model name
- <code>weights</code> (double) Semantic search weight setting in hybrid search mode
- <code>top_k</code> (integer) Number of results to return, optional
- <code>score_threshold_enabled</code> (bool) Whether to enable score threshold
- <code>score_threshold</code> (double) Score threshold
</Property>
<Property name='external_retrieval_model' type='object' key='external_retrieval_model'>
Unused field
</Property>
</Properties>
</Col>
<Col sticky>
<CodeGroup
title="Request"
tag="POST"
label="/datasets/{dataset_id}/hit_testing"
targetCode={`curl --location --request GET '${props.apiBaseUrl}/datasets/{dataset_id}/hit_testing' \\\n--header 'Authorization: Bearer {api_key}'\\\n--header 'Content-Type: application/json'\\\n--data-raw '{
"query": "test",
"retrieval_model": {
"search_method": "keyword_search",
"reranking_enable": false,
"reranking_mode": null,
"reranking_model": {
"reranking_provider_name": "",
"reranking_model_name": ""
},
"weights": null,
"top_k": 1,
"score_threshold_enabled": false,
"score_threshold": null
}
}'`}
>
```bash {{ title: 'cURL' }}
curl --location --request POST '${props.apiBaseUrl}/datasets/{dataset_id}/hit_testing' \
--header 'Authorization: Bearer {api_key}' \
--header 'Content-Type: application/json' \
--data-raw '{
"query": "test",
"retrieval_model": {
"search_method": "keyword_search",
"reranking_enable": false,
"reranking_mode": null,
"reranking_model": {
"reranking_provider_name": "",
"reranking_model_name": ""
},
"weights": null,
"top_k": 2,
"score_threshold_enabled": false,
"score_threshold": null
}
}'
```
</CodeGroup>
<CodeGroup title="Response">
```json {{ title: 'Response' }}
{
"query": {
"content": "test"
},
"records": [
{
"segment": {
"id": "7fa6f24f-8679-48b3-bc9d-bdf28d73f218",
"position": 1,
"document_id": "a8c6c36f-9f5d-4d7a-8472-f5d7b75d71d2",
"content": "Operation guide",
"answer": null,
"word_count": 847,
"tokens": 280,
"keywords": [
"install",
"java",
"base",
"scripts",
"jdk",
"manual",
"internal",
"opens",
"add",
"vmoptions"
],
"index_node_id": "39dd8443-d960-45a8-bb46-7275ad7fbc8e",
"index_node_hash": "0189157697b3c6a418ccf8264a09699f25858975578f3467c76d6bfc94df1d73",
"hit_count": 0,
"enabled": true,
"disabled_at": null,
"disabled_by": null,
"status": "completed",
"created_by": "dbcb1ab5-90c8-41a7-8b78-73b235eb6f6f",
"created_at": 1728734540,
"indexing_at": 1728734552,
"completed_at": 1728734584,
"error": null,
"stopped_at": null,
"document": {
"id": "a8c6c36f-9f5d-4d7a-8472-f5d7b75d71d2",
"data_source_type": "upload_file",
"name": "readme.txt",
"doc_type": null
}
},
"score": 3.730463140527718e-05,
"tsne_position": null
}
]
}
```
</CodeGroup>
</Col>
</Row>
---
<Row>
<Col>
### Error message

View File

@ -1049,6 +1049,152 @@ import { Row, Col, Properties, Property, Heading, SubProperty, Paragraph } from
</Col>
</Row>
---
<Heading
url='/datasets/{dataset_id}/hit_testing'
method='POST'
title='知识库召回测试'
name='#dataset_hit_testing'
/>
<Row>
<Col>
### Path
<Properties>
<Property name='dataset_id' type='string' key='dataset_id'>
知识库 ID
</Property>
</Properties>
### Request Body
<Properties>
<Property name='query' type='string' key='query'>
召回关键词
</Property>
<Property name='retrieval_model' type='object' key='retrieval_model'>
召回参数(选填,如不填,按照默认方式召回)
- <code>search_method</code> (text) 检索方法:以下三个关键字之一,必填
- <code>keyword_search</code> 关键字检索
- <code>semantic_search</code> 语义检索
- <code>full_text_search</code> 全文检索
- <code>hybrid_search</code> 混合检索
- <code>reranking_enable</code> (bool) 是否启用 Reranking非必填如果检索模式为semantic_search模式或者hybrid_search则传值
- <code>reranking_mode</code> (object) Rerank模型配置非必填如果启用了 reranking 则传值
- <code>reranking_provider_name</code> (string) Rerank 模型提供商
- <code>reranking_model_name</code> (string) Rerank 模型名称
- <code>weights</code> (double) 混合检索模式下语意检索的权重设置
- <code>top_k</code> (integer) 返回结果数量,非必填
- <code>score_threshold_enabled</code> (bool) 是否开启Score阈值
- <code>score_threshold</code> (double) Score阈值
</Property>
<Property name='external_retrieval_model' type='object' key='external_retrieval_model'>
未启用字段
</Property>
</Properties>
</Col>
<Col sticky>
<CodeGroup
title="Request"
tag="POST"
label="/datasets/{dataset_id}/hit_testing"
targetCode={`curl --location --request GET '${props.apiBaseUrl}/datasets/{dataset_id}/hit_testing' \\\n--header 'Authorization: Bearer {api_key}'\\\n--header 'Content-Type: application/json'\\\n--data-raw '{
"query": "test",
"retrieval_model": {
"search_method": "keyword_search",
"reranking_enable": false,
"reranking_mode": null,
"reranking_model": {
"reranking_provider_name": "",
"reranking_model_name": ""
},
"weights": null,
"top_k": 1,
"score_threshold_enabled": false,
"score_threshold": null
}
}'`}
>
```bash {{ title: 'cURL' }}
curl --location --request POST '${props.apiBaseUrl}/datasets/{dataset_id}/hit_testing' \
--header 'Authorization: Bearer {api_key}' \
--header 'Content-Type: application/json' \
--data-raw '{
"query": "test",
"retrieval_model": {
"search_method": "keyword_search",
"reranking_enable": false,
"reranking_mode": null,
"reranking_model": {
"reranking_provider_name": "",
"reranking_model_name": ""
},
"weights": null,
"top_k": 2,
"score_threshold_enabled": false,
"score_threshold": null
}
}'
```
</CodeGroup>
<CodeGroup title="Response">
```json {{ title: 'Response' }}
{
"query": {
"content": "test"
},
"records": [
{
"segment": {
"id": "7fa6f24f-8679-48b3-bc9d-bdf28d73f218",
"position": 1,
"document_id": "a8c6c36f-9f5d-4d7a-8472-f5d7b75d71d2",
"content": "Operation guide",
"answer": null,
"word_count": 847,
"tokens": 280,
"keywords": [
"install",
"java",
"base",
"scripts",
"jdk",
"manual",
"internal",
"opens",
"add",
"vmoptions"
],
"index_node_id": "39dd8443-d960-45a8-bb46-7275ad7fbc8e",
"index_node_hash": "0189157697b3c6a418ccf8264a09699f25858975578f3467c76d6bfc94df1d73",
"hit_count": 0,
"enabled": true,
"disabled_at": null,
"disabled_by": null,
"status": "completed",
"created_by": "dbcb1ab5-90c8-41a7-8b78-73b235eb6f6f",
"created_at": 1728734540,
"indexing_at": 1728734552,
"completed_at": 1728734584,
"error": null,
"stopped_at": null,
"document": {
"id": "a8c6c36f-9f5d-4d7a-8472-f5d7b75d71d2",
"data_source_type": "upload_file",
"name": "readme.txt",
"doc_type": null
}
},
"score": 3.730463140527718e-05,
"tsne_position": null
}
]
}
```
</CodeGroup>
</Col>
</Row>
---
<Row>

View File

@ -0,0 +1,107 @@
import type { Meta, StoryObj } from '@storybook/react'
import { fn } from '@storybook/test'
import { RocketLaunchIcon } from '@heroicons/react/20/solid'
import { Button } from '.'
const meta = {
title: 'Base/Button',
component: Button,
parameters: {
layout: 'centered',
},
tags: ['autodocs'],
argTypes: {
loading: { control: 'boolean' },
variant: {
control: 'select',
options: ['primary', 'warning', 'secondary', 'secondary-accent', 'ghost', 'ghost-accent', 'tertiary'],
},
},
args: {
variant: 'ghost',
onClick: fn(),
children: 'adsf',
},
} satisfies Meta<typeof Button>
export default meta
type Story = StoryObj<typeof meta>
export const Default: Story = {
args: {
variant: 'primary',
loading: false,
children: 'Primary Button',
},
}
export const Secondary: Story = {
args: {
variant: 'secondary',
children: 'Secondary Button',
},
}
export const SecondaryAccent: Story = {
args: {
variant: 'secondary-accent',
children: 'Secondary Accent Button',
},
}
export const Ghost: Story = {
args: {
variant: 'ghost',
children: 'Ghost Button',
},
}
export const GhostAccent: Story = {
args: {
variant: 'ghost-accent',
children: 'Ghost Accent Button',
},
}
export const Tertiary: Story = {
args: {
variant: 'tertiary',
children: 'Tertiary Button',
},
}
export const Warning: Story = {
args: {
variant: 'warning',
children: 'Warning Button',
},
}
export const Disabled: Story = {
args: {
variant: 'primary',
disabled: true,
children: 'Disabled Button',
},
}
export const Loading: Story = {
args: {
variant: 'primary',
loading: true,
children: 'Loading Button',
},
}
export const WithIcon: Story = {
args: {
variant: 'primary',
children: (
<>
<RocketLaunchIcon className="h-4 w-4 mr-1.5 stroke-[1.8px]" />
Launch
</>
),
},
}

View File

@ -28,6 +28,8 @@ const HeaderInMobile = () => {
className='mr-2'
size='tiny'
icon={appData?.site.icon}
iconType={appData?.site.icon_type}
imageUrl={appData?.site.icon_url}
background={appData?.site.icon_background}
/>
<div className='py-1 text-base font-semibold text-gray-800 truncate'>

View File

@ -0,0 +1,61 @@
export const markdownContent = `
# Heading 1
## Heading 2
### Heading 3
#### Heading 4
##### Heading 5
###### Heading 6
# Basic markdown content.
Should support **bold**, *italic*, and ~~strikethrough~~.
Should support [links](https://www.google.com).
Should support inline \`code\` blocks.
# Number list
1. First item
2. Second item
3. Third item
# Bullet list
- First item
- Second item
- Third item
# Link
[Google](https://www.google.com)
# Image
![Alt text](https://picsum.photos/200/300)
# Table
| Column 1 | Column 2 | Column 3 |
| -------- | -------- | -------- |
| Cell 1 | Cell 2 | Cell 3 |
| Cell 4 | Cell 5 | Cell 6 |
| Cell 7 | Cell 8 | Cell 9 |
# Code
\`\`\`JavaScript
const code = "code"
\`\`\`
# Blockquote
> This is a blockquote.
# Horizontal rule
---
`

View File

@ -0,0 +1,27 @@
export const markdownContentSVG = `
\`\`\`svg
<svg width="400" height="600" xmlns="http://www.w3.org/2000/svg">
<rect width="100%" height="100%" fill="#F0F8FF"/>
<text x="50%" y="60" font-family="楷体" font-size="32" fill="#4682B4" text-anchor="middle">Logo设计</text>
<line x1="50" y1="80" x2="350" y2="80" stroke="#B0C4DE" stroke-width="2"/>
<text x="50%" y="120" font-family="Arial" font-size="24" fill="#708090" text-anchor="middle"></text>
<text x="50%" y="150" font-family="MS Mincho" font-size="20" fill="#778899" text-anchor="middle"></text>
<text x="50%" y="200" font-family="汇文明朝体" font-size="18" fill="#696969" text-anchor="middle">
<tspan x="50%" dy="25"></tspan>
<tspan x="50%" dy="25"></tspan>
<tspan x="50%" dy="25"></tspan>
<tspan x="50%" dy="25"></tspan>
</text>
<circle cx="200" cy="400" r="80" fill="none" stroke="#4169E1" stroke-width="3"/>
<line x1="200" y1="320" x2="200" y2="480" stroke="#4169E1" stroke-width="3"/>
<line x1="120" y1="400" x2="280" y2="400" stroke="#4169E1" stroke-width="3"/>
<text x="50%" y="550" font-family="微软雅黑" font-size="16" fill="#1E90FF" text-anchor="middle"> </text>
</svg>
\`\`\`
`

View File

@ -0,0 +1,136 @@
import type { WorkflowProcess } from '@/app/components/base/chat/types'
import { WorkflowRunningStatus } from '@/app/components/workflow/types'
export const mockedWorkflowProcess = {
status: WorkflowRunningStatus.Succeeded,
resultText: 'Hello, how can I assist you today?',
tracing: [
{
extras: {},
id: 'f6337dc9-e280-4915-965f-10b0552dd917',
node_id: '1724232060789',
node_type: 'start',
title: 'Start',
index: 1,
predecessor_node_id: null,
inputs: {
'sys.query': 'hi',
'sys.files': [],
'sys.conversation_id': '92ce0a3e-8f15-43d1-b31d-32716c4b10a7',
'sys.user_id': 'fbff43f9-d5a4-4e85-b63b-d3a91d806c6f',
'sys.dialogue_count': 1,
'sys.app_id': 'b2e8906a-aad3-43a0-9ace-0e44cc7315e1',
'sys.workflow_id': '70004abe-561f-418b-b9e8-8c957ce55140',
'sys.workflow_run_id': '69db9267-aaee-42e1-9581-dbfb67e8eeb5',
},
process_data: null,
outputs: {
'sys.query': 'hi',
'sys.files': [],
'sys.conversation_id': '92ce0a3e-8f15-43d1-b31d-32716c4b10a7',
'sys.user_id': 'fbff43f9-d5a4-4e85-b63b-d3a91d806c6f',
'sys.dialogue_count': 1,
'sys.app_id': 'b2e8906a-aad3-43a0-9ace-0e44cc7315e1',
'sys.workflow_id': '70004abe-561f-418b-b9e8-8c957ce55140',
'sys.workflow_run_id': '69db9267-aaee-42e1-9581-dbfb67e8eeb5',
},
status: 'succeeded',
error: null,
elapsed_time: 0.035744,
execution_metadata: null,
created_at: 1728980002,
finished_at: 1728980002,
files: [],
parallel_id: null,
parallel_start_node_id: null,
parent_parallel_id: null,
parent_parallel_start_node_id: null,
iteration_id: null,
},
{
extras: {},
id: '92204d8d-4198-4c46-aa02-c2754b11dec9',
node_id: 'llm',
node_type: 'llm',
title: 'LLM',
index: 2,
predecessor_node_id: '1724232060789',
inputs: null,
process_data: {
model_mode: 'chat',
prompts: [
{
role: 'system',
text: 'hi',
files: [],
},
{
role: 'user',
text: 'hi',
files: [],
},
],
model_provider: 'openai',
model_name: 'gpt-4o-mini',
},
outputs: {
text: 'Hello! How can I assist you today?',
usage: {
prompt_tokens: 13,
prompt_unit_price: '0.15',
prompt_price_unit: '0.000001',
prompt_price: '0.0000020',
completion_tokens: 9,
completion_unit_price: '0.60',
completion_price_unit: '0.000001',
completion_price: '0.0000054',
total_tokens: 22,
total_price: '0.0000074',
currency: 'USD',
latency: 1.8902503330027685,
},
finish_reason: 'stop',
},
status: 'succeeded',
error: null,
elapsed_time: 5.089409,
execution_metadata: {
total_tokens: 22,
total_price: '0.0000074',
currency: 'USD',
},
created_at: 1728980002,
finished_at: 1728980007,
files: [],
parallel_id: null,
parallel_start_node_id: null,
parent_parallel_id: null,
parent_parallel_start_node_id: null,
iteration_id: null,
},
{
extras: {},
id: '7149bac6-60f9-4e06-a5ed-1d9d3764c06b',
node_id: 'answer',
node_type: 'answer',
title: 'Answer',
index: 3,
predecessor_node_id: 'llm',
inputs: null,
process_data: null,
outputs: {
answer: 'Hello! How can I assist you today?',
},
status: 'succeeded',
error: null,
elapsed_time: 0.015339,
execution_metadata: null,
created_at: 1728980007,
finished_at: 1728980007,
parallel_id: null,
parallel_start_node_id: null,
parent_parallel_id: null,
parent_parallel_start_node_id: null,
},
],
} as unknown as WorkflowProcess

View File

@ -0,0 +1,96 @@
import type { Meta, StoryObj } from '@storybook/react'
import type { ChatItem } from '../../types'
import { mockedWorkflowProcess } from './__mocks__/workflowProcess'
import { markdownContent } from './__mocks__/markdownContent'
import { markdownContentSVG } from './__mocks__/markdownContentSVG'
import Answer from '.'
const meta = {
title: 'Base/Chat Answer',
component: Answer,
parameters: {
layout: 'fullscreen',
},
tags: ['autodocs'],
argTypes: {
noChatInput: { control: 'boolean', description: 'If set to true, some buttons that are supposed to be shown on hover will not be displayed.' },
responding: { control: 'boolean', description: 'Indicates if the answer is being generated.' },
showPromptLog: { control: 'boolean', description: 'If set to true, the prompt log button will be shown on hover.' },
},
args: {
noChatInput: false,
responding: false,
showPromptLog: false,
},
} satisfies Meta<typeof Answer>
export default meta
type Story = StoryObj<typeof meta>
const mockedBaseChatItem = {
id: '1',
isAnswer: true,
content: 'Hello, how can I assist you today?',
} satisfies ChatItem
export const Basic: Story = {
args: {
item: mockedBaseChatItem,
question: mockedBaseChatItem.content,
index: 0,
},
render: (args) => {
return <div className="w-full px-10 py-5">
<Answer {...args} />
</div>
},
}
export const WithWorkflowProcess: Story = {
args: {
item: {
...mockedBaseChatItem,
workflowProcess: mockedWorkflowProcess,
},
question: mockedBaseChatItem.content,
index: 0,
},
render: (args) => {
return <div className="w-full px-10 py-5">
<Answer {...args} />
</div>
},
}
export const WithMarkdownContent: Story = {
args: {
item: {
...mockedBaseChatItem,
content: markdownContent,
},
question: mockedBaseChatItem.content,
index: 0,
},
render: (args) => {
return <div className="w-full px-10 py-5">
<Answer {...args} />
</div>
},
}
export const WithMarkdownSVG: Story = {
args: {
item: {
...mockedBaseChatItem,
content: markdownContentSVG,
},
question: mockedBaseChatItem.content,
index: 0,
},
render: (args) => {
return <div className="w-full px-10 py-5">
<Answer {...args} />
</div>
},
}

View File

@ -0,0 +1,33 @@
import type { Meta, StoryObj } from '@storybook/react'
import type { ChatItem } from '../types'
import Question from './question'
import { User } from '@/app/components/base/icons/src/public/avatar'
const meta = {
title: 'Base/Chat Question',
component: Question,
parameters: {
layout: 'centered',
},
tags: ['autodocs'],
argTypes: {},
args: {},
} satisfies Meta<typeof Question>
export default meta
type Story = StoryObj<typeof meta>
export const Default: Story = {
args: {
item: {
id: '1',
isAnswer: false,
content: 'You are a helpful assistant.',
} satisfies ChatItem,
theme: undefined,
questionIcon: <div className='w-full h-full rounded-full border-[0.5px] border-black/5'>
<User className='w-full h-full' />
</div>,
},
}

View File

@ -17,6 +17,7 @@ type IPopover = {
btnElement?: string | React.ReactNode
btnClassName?: string | ((open: boolean) => string)
manualClose?: boolean
disabled?: boolean
}
const timeoutDuration = 100
@ -30,6 +31,7 @@ export default function CustomPopover({
className,
btnClassName,
manualClose,
disabled = false,
}: IPopover) {
const buttonRef = useRef<HTMLButtonElement>(null)
const timeOutRef = useRef<NodeJS.Timeout | null>(null)
@ -60,6 +62,7 @@ export default function CustomPopover({
>
<Popover.Button
ref={buttonRef}
disabled={disabled}
className={`group ${s.popupBtn} ${open ? '' : 'bg-gray-100'} ${!btnClassName
? ''
: typeof btnClassName === 'string'

View File

@ -132,6 +132,7 @@ const StepTwo = ({
? IndexingType.QUALIFIED
: IndexingType.ECONOMICAL,
)
const [isLanguageSelectDisabled, setIsLanguageSelectDisabled] = useState(false)
const [docForm, setDocForm] = useState<DocForm | string>(
(datasetId && documentDetail) ? documentDetail.doc_form : DocForm.TEXT,
)
@ -200,9 +201,9 @@ const StepTwo = ({
}
}
const fetchFileIndexingEstimate = async (docForm = DocForm.TEXT) => {
const fetchFileIndexingEstimate = async (docForm = DocForm.TEXT, language?: string) => {
// eslint-disable-next-line @typescript-eslint/no-use-before-define
const res = await didFetchFileIndexingEstimate(getFileIndexingEstimateParams(docForm)!)
const res = await didFetchFileIndexingEstimate(getFileIndexingEstimateParams(docForm, language)!)
if (segmentationType === SegmentType.CUSTOM)
setCustomFileIndexingEstimate(res)
else
@ -270,7 +271,7 @@ const StepTwo = ({
}
}
const getFileIndexingEstimateParams = (docForm: DocForm): IndexingEstimateParams | undefined => {
const getFileIndexingEstimateParams = (docForm: DocForm, language?: string): IndexingEstimateParams | undefined => {
if (dataSourceType === DataSourceType.FILE) {
return {
info_list: {
@ -282,7 +283,7 @@ const StepTwo = ({
indexing_technique: getIndexing_technique() as string,
process_rule: getProcessRule(),
doc_form: docForm,
doc_language: docLanguage,
doc_language: language || docLanguage,
dataset_id: datasetId as string,
}
}
@ -295,7 +296,7 @@ const StepTwo = ({
indexing_technique: getIndexing_technique() as string,
process_rule: getProcessRule(),
doc_form: docForm,
doc_language: docLanguage,
doc_language: language || docLanguage,
dataset_id: datasetId as string,
}
}
@ -308,7 +309,7 @@ const StepTwo = ({
indexing_technique: getIndexing_technique() as string,
process_rule: getProcessRule(),
doc_form: docForm,
doc_language: docLanguage,
doc_language: language || docLanguage,
dataset_id: datasetId as string,
}
}
@ -483,8 +484,26 @@ const StepTwo = ({
setDocForm(DocForm.TEXT)
}
const previewSwitch = async (language?: string) => {
setPreviewSwitched(true)
setIsLanguageSelectDisabled(true)
if (segmentationType === SegmentType.AUTO)
setAutomaticFileIndexingEstimate(null)
else
setCustomFileIndexingEstimate(null)
try {
await fetchFileIndexingEstimate(DocForm.QA, language)
}
finally {
setIsLanguageSelectDisabled(false)
}
}
const handleSelect = (language: string) => {
setDocLanguage(language)
// Switch language, re-cutter
if (docForm === DocForm.QA && previewSwitched)
previewSwitch(language)
}
const changeToEconomicalType = () => {
@ -494,15 +513,6 @@ const StepTwo = ({
}
}
const previewSwitch = async () => {
setPreviewSwitched(true)
if (segmentationType === SegmentType.AUTO)
setAutomaticFileIndexingEstimate(null)
else
setCustomFileIndexingEstimate(null)
await fetchFileIndexingEstimate(DocForm.QA)
}
useEffect(() => {
// fetch rules
if (!isSetting) {
@ -575,7 +585,7 @@ const StepTwo = ({
<div ref={scrollRef} className='relative h-full w-full overflow-y-scroll'>
<div className={cn(s.pageHeader, scrolled && s.fixed, isMobile && '!px-6')}>
<span>{t('datasetCreation.steps.two')}</span>
{isMobile && (
{(isMobile || !showPreview) && (
<Button
className='border-[0.5px] !h-8 hover:outline hover:outline-[0.5px] hover:outline-gray-300 text-gray-700 font-medium bg-white shadow-[0px_1px_2px_0px_rgba(16,24,40,0.05)]'
onClick={setShowPreview}
@ -777,7 +787,7 @@ const StepTwo = ({
<div className='mb-[2px] text-md font-medium text-gray-900'>{t('datasetCreation.stepTwo.QATitle')}</div>
<div className='inline-flex items-center text-[13px] leading-[18px] text-gray-500'>
<span className='pr-1'>{t('datasetCreation.stepTwo.QALanguage')}</span>
<LanguageSelect currentLanguage={docLanguage} onSelect={handleSelect} />
<LanguageSelect currentLanguage={docLanguage} onSelect={handleSelect} disabled={isLanguageSelectDisabled} />
</div>
</div>
<div className='shrink-0'>
@ -948,7 +958,7 @@ const StepTwo = ({
<div className='grow flex items-center'>
<div>{t('datasetCreation.stepTwo.previewTitle')}</div>
{docForm === DocForm.QA && !previewSwitched && (
<Button className='ml-2' variant='secondary-accent' onClick={previewSwitch}>{t('datasetCreation.stepTwo.previewButton')}</Button>
<Button className='ml-2' variant='secondary-accent' onClick={() => previewSwitch()}>{t('datasetCreation.stepTwo.previewButton')}</Button>
)}
</div>
<div className='flex items-center justify-center w-6 h-6 cursor-pointer' onClick={hidePreview}>

View File

@ -9,16 +9,19 @@ import { languages } from '@/i18n/language'
export type ILanguageSelectProps = {
currentLanguage: string
onSelect: (language: string) => void
disabled?: boolean
}
const LanguageSelect: FC<ILanguageSelectProps> = ({
currentLanguage,
onSelect,
disabled,
}) => {
return (
<Popover
manualClose
trigger='click'
disabled={disabled}
htmlContent={
<div className='w-full py-1'>
{languages.filter(language => language.supported).map(({ prompt_name, name }) => (

View File

@ -81,7 +81,10 @@ const ApiBasedExtensionSelector: FC<ApiBasedExtensionSelectorProps> = ({
</div>
<div
className='flex items-center text-xs text-primary-600 cursor-pointer'
onClick={() => setShowAccountSettingModal({ payload: 'api-based-extension' })}
onClick={() => {
setOpen(false)
setShowAccountSettingModal({ payload: 'api-based-extension' })
}}
>
{t('common.apiBasedExtension.selector.manage')}
<ArrowUpRight className='ml-0.5 w-3 h-3' />
@ -106,7 +109,10 @@ const ApiBasedExtensionSelector: FC<ApiBasedExtensionSelectorProps> = ({
<div className='p-1'>
<div
className='flex items-center px-3 h-8 text-sm text-primary-600 cursor-pointer'
onClick={() => setShowApiBasedExtensionModal({ payload: {}, onSaveCallback: () => mutate() })}
onClick={() => {
setOpen(false)
setShowApiBasedExtensionModal({ payload: {}, onSaveCallback: () => mutate() })
}}
>
<RiAddLine className='mr-2 w-4 h-4' />
{t('common.operation.add')}

View File

@ -14,12 +14,12 @@ const SwrInitor = ({
}: SwrInitorProps) => {
const router = useRouter()
const searchParams = useSearchParams()
const { getNewAccessToken } = useRefreshToken()
const consoleToken = searchParams.get('access_token')
const refreshToken = searchParams.get('refresh_token')
const consoleTokenFromLocalStorage = localStorage?.getItem('console_token')
const refreshTokenFromLocalStorage = localStorage?.getItem('refresh_token')
const [init, setInit] = useState(false)
const { getNewAccessToken } = useRefreshToken()
useEffect(() => {
if (!(consoleToken || refreshToken || consoleTokenFromLocalStorage || refreshTokenFromLocalStorage)) {
@ -27,12 +27,12 @@ const SwrInitor = ({
return
}
if (consoleTokenFromLocalStorage && refreshTokenFromLocalStorage)
getNewAccessToken(consoleTokenFromLocalStorage, refreshTokenFromLocalStorage)
getNewAccessToken()
if (consoleToken && refreshToken) {
localStorage.setItem('console_token', consoleToken)
localStorage.setItem('refresh_token', refreshToken)
getNewAccessToken(consoleToken, refreshToken).then(() => {
getNewAccessToken().then(() => {
router.replace('/apps', { forceOptimisticNavigation: false } as any)
}).catch(() => {
router.replace('/signin')

View File

@ -99,7 +99,7 @@ const NormalForm = () => {
if (res.result === 'success') {
localStorage.setItem('console_token', res.data.access_token)
localStorage.setItem('refresh_token', res.data.refresh_token)
getNewAccessToken(res.data.access_token, res.data.refresh_token)
getNewAccessToken()
router.replace('/apps')
}
else {

View File

@ -31,7 +31,7 @@ const UserSSOForm: FC<UserSSOFormProps> = ({
if (refreshToken && consoleToken) {
localStorage.setItem('console_token', consoleToken)
localStorage.setItem('refresh_token', refreshToken)
getNewAccessToken(consoleToken, refreshToken)
getNewAccessToken()
router.replace('/apps')
}

View File

@ -14,7 +14,6 @@ const useRefreshToken = () => {
const router = useRouter()
const timer = useRef<NodeJS.Timeout>()
const advanceTime = useRef<number>(5 * 60 * 1000)
const interval = useRef<number>(55 * 60 * 1000)
const getExpireTime = useCallback((token: string) => {
if (!token)
@ -31,18 +30,24 @@ const useRefreshToken = () => {
localStorage?.removeItem('is_refreshing')
localStorage?.removeItem('console_token')
localStorage?.removeItem('refresh_token')
localStorage?.removeItem('last_refresh_time')
router.replace('/signin')
}, [])
const getNewAccessToken = useCallback(async (currentAccessToken: string, currentRefreshToken: string) => {
if (localStorage?.getItem('is_refreshing') === '1')
const getNewAccessToken = useCallback(async () => {
const currentAccessToken = localStorage?.getItem('console_token')
const currentRefreshToken = localStorage?.getItem('refresh_token')
if (!currentAccessToken || !currentRefreshToken) {
handleError()
return new Error('No access token or refresh token found')
}
if (localStorage?.getItem('is_refreshing') === '1') {
timer.current = setTimeout(() => {
getNewAccessToken()
}, 1000)
return null
}
const currentTokenExpireTime = getExpireTime(currentAccessToken)
let lastRefreshTime = parseInt(localStorage?.getItem('last_refresh_time') || '0')
lastRefreshTime = isNaN(lastRefreshTime) ? 0 : lastRefreshTime
if (getCurrentTimeStamp() + advanceTime.current > currentTokenExpireTime
&& lastRefreshTime + interval.current < getCurrentTimeStamp()) {
if (getCurrentTimeStamp() + advanceTime.current > currentTokenExpireTime) {
localStorage?.setItem('is_refreshing', '1')
const [e, res] = await fetchWithRetry(fetchNewToken({
body: { refresh_token: currentRefreshToken },
@ -53,24 +58,17 @@ const useRefreshToken = () => {
}
const { access_token, refresh_token } = res.data
localStorage?.setItem('is_refreshing', '0')
localStorage?.setItem('last_refresh_time', getCurrentTimeStamp().toString())
localStorage?.setItem('console_token', access_token)
localStorage?.setItem('refresh_token', refresh_token)
const newTokenExpireTime = getExpireTime(access_token)
timer.current = setTimeout(() => {
const consoleTokenFromLocalStorage = localStorage?.getItem('console_token')
const refreshTokenFromLocalStorage = localStorage?.getItem('refresh_token')
if (consoleTokenFromLocalStorage && refreshTokenFromLocalStorage)
getNewAccessToken(consoleTokenFromLocalStorage, refreshTokenFromLocalStorage)
getNewAccessToken()
}, newTokenExpireTime - advanceTime.current - getCurrentTimeStamp())
}
else {
const newTokenExpireTime = getExpireTime(currentAccessToken)
timer.current = setTimeout(() => {
const consoleTokenFromLocalStorage = localStorage?.getItem('console_token')
const refreshTokenFromLocalStorage = localStorage?.getItem('refresh_token')
if (consoleTokenFromLocalStorage && refreshTokenFromLocalStorage)
getNewAccessToken(consoleTokenFromLocalStorage, refreshTokenFromLocalStorage)
getNewAccessToken()
}, newTokenExpireTime - advanceTime.current - getCurrentTimeStamp())
}
return null
@ -80,7 +78,6 @@ const useRefreshToken = () => {
return () => {
clearTimeout(timer.current)
localStorage?.removeItem('is_refreshing')
localStorage?.removeItem('last_refresh_time')
}
}, [])

View File

@ -1,6 +1,6 @@
{
"name": "dify-web",
"version": "0.9.1",
"version": "0.9.2",
"private": true,
"engines": {
"node": ">=18.17.0"
@ -18,7 +18,9 @@
"check-i18n": "node ./i18n/check-i18n.js",
"auto-gen-i18n": "node ./i18n/auto-gen-i18n.js",
"test": "jest",
"test:watch": "jest --watch"
"test:watch": "jest --watch",
"storybook": "storybook dev -p 6006",
"build-storybook": "storybook build"
},
"dependencies": {
"@babel/runtime": "^7.22.3",
@ -106,8 +108,18 @@
},
"devDependencies": {
"@antfu/eslint-config": "^0.36.0",
"@chromatic-com/storybook": "^1.9.0",
"@faker-js/faker": "^7.6.0",
"@rgrove/parse-xml": "^4.1.0",
"@storybook/addon-essentials": "^8.3.5",
"@storybook/addon-interactions": "^8.3.5",
"@storybook/addon-links": "^8.3.5",
"@storybook/addon-onboarding": "^8.3.5",
"@storybook/addon-themes": "^8.3.5",
"@storybook/blocks": "^8.3.5",
"@storybook/nextjs": "^8.3.5",
"@storybook/react": "^8.3.5",
"@storybook/test": "^8.3.5",
"@testing-library/dom": "^10.3.2",
"@testing-library/jest-dom": "^6.4.6",
"@testing-library/react": "^16.0.0",
@ -134,6 +146,7 @@
"cross-env": "^7.0.3",
"eslint": "^8.36.0",
"eslint-config-next": "^14.0.4",
"eslint-plugin-storybook": "^0.9.0",
"husky": "^8.0.3",
"jest": "^29.7.0",
"jest-environment-jsdom": "^29.7.0",
@ -141,6 +154,7 @@
"magicast": "^0.3.4",
"postcss": "^8.4.31",
"sass": "^1.61.0",
"storybook": "^8.3.5",
"tailwindcss": "^3.4.4",
"ts-node": "^10.9.2",
"typescript": "4.9.5",

File diff suppressed because it is too large Load Diff