Merge branch 'fix/ops-trace-celery' into deploy/dev
Some checks are pending
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/amd64, build-api-amd64) (push) Waiting to run
Build and Push API & Web / build (api, DIFY_API_IMAGE_NAME, linux/arm64, build-api-arm64) (push) Waiting to run
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/amd64, build-web-amd64) (push) Waiting to run
Build and Push API & Web / build (web, DIFY_WEB_IMAGE_NAME, linux/arm64, build-web-arm64) (push) Waiting to run
Build and Push API & Web / create-manifest (api, DIFY_API_IMAGE_NAME, merge-api-images) (push) Blocked by required conditions
Build and Push API & Web / create-manifest (web, DIFY_WEB_IMAGE_NAME, merge-web-images) (push) Blocked by required conditions

* fix/ops-trace-celery:
  fix: ops trace celery
  fix(model_runtime): remove vision from features for Claude 3.5 Haiku (#10360)
  fix: remove unsupported vision in OpenRouter Haiku 3.5 (#10364)
This commit is contained in:
Joe 2024-11-06 17:50:09 +08:00
commit e130ce11b1
8 changed files with 44 additions and 14 deletions

View File

@ -4,7 +4,6 @@ label:
model_type: llm
features:
- agent-thought
- vision
- tool-call
- stream-tool-call
model_properties:

View File

@ -4,7 +4,6 @@ label:
model_type: llm
features:
- agent-thought
- vision
- tool-call
- stream-tool-call
model_properties:

View File

@ -4,7 +4,6 @@ label:
model_type: llm
features:
- agent-thought
- vision
- tool-call
- stream-tool-call
model_properties:

View File

@ -4,7 +4,6 @@ label:
model_type: llm
features:
- agent-thought
- vision
- tool-call
- stream-tool-call
model_properties:

View File

@ -54,3 +54,6 @@ class LangSmithConfig(BaseTracingConfig):
raise ValueError("endpoint must start with https://")
return v
OPS_FILE_PATH = "ops_trace/"

View File

@ -6,12 +6,13 @@ import threading
import time
from datetime import timedelta
from typing import Any, Optional, Union
from uuid import UUID
from uuid import UUID, uuid4
from flask import current_app
from core.helper.encrypter import decrypt_token, encrypt_token, obfuscated_token
from core.ops.entities.config_entity import (
OPS_FILE_PATH,
LangfuseConfig,
LangSmithConfig,
TracingProviderEnum,
@ -28,8 +29,9 @@ from core.ops.entities.trace_entity import (
)
from core.ops.langfuse_trace.langfuse_trace import LangFuseDataTrace
from core.ops.langsmith_trace.langsmith_trace import LangSmithDataTrace
from core.ops.utils import get_message_data
from core.ops.utils import convert_datetime_to_str, get_message_data
from extensions.ext_database import db
from extensions.ext_storage import storage
from models.model import App, AppModelConfig, Conversation, Message, MessageAgentThought, MessageFile, TraceAppConfig
from models.workflow import WorkflowAppLog, WorkflowRun
from tasks.ops_trace_task import process_trace_tasks
@ -740,10 +742,19 @@ class TraceQueueManager:
def send_to_celery(self, tasks: list[TraceTask]):
with self.flask_app.app_context():
for task in tasks:
file_id = uuid4().hex
trace_info = task.execute()
task_data = {
"app_id": task.app_id,
"trace_info_type": type(trace_info).__name__,
"trace_info": trace_info.model_dump() if trace_info else {},
}
process_trace_tasks.delay(task_data)
task_data = convert_datetime_to_str(task_data)
json_data = json.dumps(task_data, ensure_ascii=False).encode("utf-8")
file_path = f"{OPS_FILE_PATH}{task.app_id}/{file_id}.json"
storage.save(file_path, json_data)
file_info = {
"file_id": file_id,
"app_id": task.app_id,
}
process_trace_tasks.delay(file_info)

View File

@ -43,3 +43,17 @@ def replace_text_with_content(data):
return [replace_text_with_content(item) for item in data]
else:
return data
def convert_datetime_to_str(data):
if isinstance(data, dict):
for key, value in data.items():
if isinstance(value, datetime):
data[key] = value.isoformat()
elif isinstance(value, dict):
data[key] = convert_datetime_to_str(value)
elif isinstance(value, list):
data[key] = [convert_datetime_to_str(item) if isinstance(item, dict | list) else item for item in value]
elif isinstance(data, list):
data = [convert_datetime_to_str(item) if isinstance(item, dict | list) else item for item in data]
return data

View File

@ -1,17 +1,19 @@
import json
import logging
import time
from celery import shared_task
from flask import current_app
from core.ops.entities.config_entity import OPS_FILE_PATH
from core.ops.entities.trace_entity import trace_info_info_map
from core.rag.models.document import Document
from extensions.ext_storage import storage
from models.model import Message
from models.workflow import WorkflowRun
@shared_task(queue="ops_trace")
def process_trace_tasks(tasks_data):
def process_trace_tasks(file_info):
"""
Async process trace tasks
:param tasks_data: List of dictionaries containing task data
@ -20,9 +22,12 @@ def process_trace_tasks(tasks_data):
"""
from core.ops.ops_trace_manager import OpsTraceManager
trace_info = tasks_data.get("trace_info")
app_id = tasks_data.get("app_id")
trace_info_type = tasks_data.get("trace_info_type")
app_id = file_info.get("app_id")
file_id = file_info.get("file_id")
file_path = f"{OPS_FILE_PATH}{app_id}/{file_id}.json"
file_data = storage.load(file_path)
trace_info = json.loads(file_data).get("trace_info")
trace_info_type = json.loads(file_data).get("trace_info_type")
trace_instance = OpsTraceManager.get_ops_trace_instance(app_id)
if trace_info.get("message_data"):
@ -39,6 +44,7 @@ def process_trace_tasks(tasks_data):
if trace_type:
trace_info = trace_type(**trace_info)
trace_instance.trace(trace_info)
end_at = time.perf_counter()
except Exception:
logging.exception("Processing trace tasks failed")
logging.exception(f"Processing trace tasks failed, app_id: {app_id}")
finally:
storage.delete(file_path)