mirror of
https://github.com/langgenius/dify.git
synced 2024-11-16 19:59:50 +08:00
279 lines
10 KiB
Python
279 lines
10 KiB
Python
from datetime import datetime
|
|
from decimal import Decimal
|
|
|
|
import pytz
|
|
from flask import jsonify
|
|
from flask_login import current_user
|
|
from flask_restful import Resource, reqparse
|
|
|
|
from controllers.console import api
|
|
from controllers.console.app.wraps import get_app_model
|
|
from controllers.console.setup import setup_required
|
|
from controllers.console.wraps import account_initialization_required
|
|
from extensions.ext_database import db
|
|
from libs.helper import datetime_string
|
|
from libs.login import login_required
|
|
from models.model import AppMode
|
|
from models.workflow import WorkflowRunTriggeredFrom
|
|
|
|
|
|
class WorkflowDailyRunsStatistic(Resource):
|
|
@setup_required
|
|
@login_required
|
|
@account_initialization_required
|
|
@get_app_model
|
|
def get(self, app_model):
|
|
account = current_user
|
|
|
|
parser = reqparse.RequestParser()
|
|
parser.add_argument('start', type=datetime_string('%Y-%m-%d %H:%M'), location='args')
|
|
parser.add_argument('end', type=datetime_string('%Y-%m-%d %H:%M'), location='args')
|
|
args = parser.parse_args()
|
|
|
|
sql_query = '''
|
|
SELECT date(DATE_TRUNC('day', created_at AT TIME ZONE 'UTC' AT TIME ZONE :tz )) AS date, count(id) AS runs
|
|
FROM workflow_runs
|
|
WHERE app_id = :app_id
|
|
AND triggered_from = :triggered_from
|
|
'''
|
|
arg_dict = {'tz': account.timezone, 'app_id': app_model.id, 'triggered_from': WorkflowRunTriggeredFrom.APP_RUN.value}
|
|
|
|
timezone = pytz.timezone(account.timezone)
|
|
utc_timezone = pytz.utc
|
|
|
|
if args['start']:
|
|
start_datetime = datetime.strptime(args['start'], '%Y-%m-%d %H:%M')
|
|
start_datetime = start_datetime.replace(second=0)
|
|
|
|
start_datetime_timezone = timezone.localize(start_datetime)
|
|
start_datetime_utc = start_datetime_timezone.astimezone(utc_timezone)
|
|
|
|
sql_query += ' and created_at >= :start'
|
|
arg_dict['start'] = start_datetime_utc
|
|
|
|
if args['end']:
|
|
end_datetime = datetime.strptime(args['end'], '%Y-%m-%d %H:%M')
|
|
end_datetime = end_datetime.replace(second=0)
|
|
|
|
end_datetime_timezone = timezone.localize(end_datetime)
|
|
end_datetime_utc = end_datetime_timezone.astimezone(utc_timezone)
|
|
|
|
sql_query += ' and created_at < :end'
|
|
arg_dict['end'] = end_datetime_utc
|
|
|
|
sql_query += ' GROUP BY date order by date'
|
|
|
|
response_data = []
|
|
|
|
with db.engine.begin() as conn:
|
|
rs = conn.execute(db.text(sql_query), arg_dict)
|
|
for i in rs:
|
|
response_data.append({
|
|
'date': str(i.date),
|
|
'runs': i.runs
|
|
})
|
|
|
|
return jsonify({
|
|
'data': response_data
|
|
})
|
|
|
|
class WorkflowDailyTerminalsStatistic(Resource):
|
|
@setup_required
|
|
@login_required
|
|
@account_initialization_required
|
|
@get_app_model
|
|
def get(self, app_model):
|
|
account = current_user
|
|
|
|
parser = reqparse.RequestParser()
|
|
parser.add_argument('start', type=datetime_string('%Y-%m-%d %H:%M'), location='args')
|
|
parser.add_argument('end', type=datetime_string('%Y-%m-%d %H:%M'), location='args')
|
|
args = parser.parse_args()
|
|
|
|
sql_query = '''
|
|
SELECT date(DATE_TRUNC('day', created_at AT TIME ZONE 'UTC' AT TIME ZONE :tz )) AS date, count(distinct workflow_runs.created_by) AS terminal_count
|
|
FROM workflow_runs
|
|
WHERE app_id = :app_id
|
|
AND triggered_from = :triggered_from
|
|
'''
|
|
arg_dict = {'tz': account.timezone, 'app_id': app_model.id, 'triggered_from': WorkflowRunTriggeredFrom.APP_RUN.value}
|
|
|
|
timezone = pytz.timezone(account.timezone)
|
|
utc_timezone = pytz.utc
|
|
|
|
if args['start']:
|
|
start_datetime = datetime.strptime(args['start'], '%Y-%m-%d %H:%M')
|
|
start_datetime = start_datetime.replace(second=0)
|
|
|
|
start_datetime_timezone = timezone.localize(start_datetime)
|
|
start_datetime_utc = start_datetime_timezone.astimezone(utc_timezone)
|
|
|
|
sql_query += ' and created_at >= :start'
|
|
arg_dict['start'] = start_datetime_utc
|
|
|
|
if args['end']:
|
|
end_datetime = datetime.strptime(args['end'], '%Y-%m-%d %H:%M')
|
|
end_datetime = end_datetime.replace(second=0)
|
|
|
|
end_datetime_timezone = timezone.localize(end_datetime)
|
|
end_datetime_utc = end_datetime_timezone.astimezone(utc_timezone)
|
|
|
|
sql_query += ' and created_at < :end'
|
|
arg_dict['end'] = end_datetime_utc
|
|
|
|
sql_query += ' GROUP BY date order by date'
|
|
|
|
response_data = []
|
|
|
|
with db.engine.begin() as conn:
|
|
rs = conn.execute(db.text(sql_query), arg_dict)
|
|
for i in rs:
|
|
response_data.append({
|
|
'date': str(i.date),
|
|
'terminal_count': i.terminal_count
|
|
})
|
|
|
|
return jsonify({
|
|
'data': response_data
|
|
})
|
|
|
|
class WorkflowDailyTokenCostStatistic(Resource):
|
|
@setup_required
|
|
@login_required
|
|
@account_initialization_required
|
|
@get_app_model
|
|
def get(self, app_model):
|
|
account = current_user
|
|
|
|
parser = reqparse.RequestParser()
|
|
parser.add_argument('start', type=datetime_string('%Y-%m-%d %H:%M'), location='args')
|
|
parser.add_argument('end', type=datetime_string('%Y-%m-%d %H:%M'), location='args')
|
|
args = parser.parse_args()
|
|
|
|
sql_query = '''
|
|
SELECT
|
|
date(DATE_TRUNC('day', created_at AT TIME ZONE 'UTC' AT TIME ZONE :tz )) AS date,
|
|
SUM(workflow_runs.total_tokens) as token_count
|
|
FROM workflow_runs
|
|
WHERE app_id = :app_id
|
|
AND triggered_from = :triggered_from
|
|
'''
|
|
arg_dict = {'tz': account.timezone, 'app_id': app_model.id, 'triggered_from': WorkflowRunTriggeredFrom.APP_RUN.value}
|
|
|
|
timezone = pytz.timezone(account.timezone)
|
|
utc_timezone = pytz.utc
|
|
|
|
if args['start']:
|
|
start_datetime = datetime.strptime(args['start'], '%Y-%m-%d %H:%M')
|
|
start_datetime = start_datetime.replace(second=0)
|
|
|
|
start_datetime_timezone = timezone.localize(start_datetime)
|
|
start_datetime_utc = start_datetime_timezone.astimezone(utc_timezone)
|
|
|
|
sql_query += ' and created_at >= :start'
|
|
arg_dict['start'] = start_datetime_utc
|
|
|
|
if args['end']:
|
|
end_datetime = datetime.strptime(args['end'], '%Y-%m-%d %H:%M')
|
|
end_datetime = end_datetime.replace(second=0)
|
|
|
|
end_datetime_timezone = timezone.localize(end_datetime)
|
|
end_datetime_utc = end_datetime_timezone.astimezone(utc_timezone)
|
|
|
|
sql_query += ' and created_at < :end'
|
|
arg_dict['end'] = end_datetime_utc
|
|
|
|
sql_query += ' GROUP BY date order by date'
|
|
|
|
response_data = []
|
|
|
|
with db.engine.begin() as conn:
|
|
rs = conn.execute(db.text(sql_query), arg_dict)
|
|
for i in rs:
|
|
response_data.append({
|
|
'date': str(i.date),
|
|
'token_count': i.token_count,
|
|
})
|
|
|
|
return jsonify({
|
|
'data': response_data
|
|
})
|
|
|
|
class WorkflowAverageAppInteractionStatistic(Resource):
|
|
@setup_required
|
|
@login_required
|
|
@account_initialization_required
|
|
@get_app_model(mode=[AppMode.WORKFLOW])
|
|
def get(self, app_model):
|
|
account = current_user
|
|
|
|
parser = reqparse.RequestParser()
|
|
parser.add_argument('start', type=datetime_string('%Y-%m-%d %H:%M'), location='args')
|
|
parser.add_argument('end', type=datetime_string('%Y-%m-%d %H:%M'), location='args')
|
|
args = parser.parse_args()
|
|
|
|
sql_query = """
|
|
SELECT
|
|
AVG(sub.interactions) as interactions,
|
|
sub.date
|
|
FROM
|
|
(SELECT
|
|
date(DATE_TRUNC('day', c.created_at AT TIME ZONE 'UTC' AT TIME ZONE :tz )) AS date,
|
|
c.created_by,
|
|
COUNT(c.id) AS interactions
|
|
FROM workflow_runs c
|
|
WHERE c.app_id = :app_id
|
|
AND c.triggered_from = :triggered_from
|
|
{{start}}
|
|
{{end}}
|
|
GROUP BY date, c.created_by) sub
|
|
GROUP BY sub.date
|
|
"""
|
|
arg_dict = {'tz': account.timezone, 'app_id': app_model.id, 'triggered_from': WorkflowRunTriggeredFrom.APP_RUN.value}
|
|
|
|
timezone = pytz.timezone(account.timezone)
|
|
utc_timezone = pytz.utc
|
|
|
|
if args['start']:
|
|
start_datetime = datetime.strptime(args['start'], '%Y-%m-%d %H:%M')
|
|
start_datetime = start_datetime.replace(second=0)
|
|
|
|
start_datetime_timezone = timezone.localize(start_datetime)
|
|
start_datetime_utc = start_datetime_timezone.astimezone(utc_timezone)
|
|
|
|
sql_query = sql_query.replace('{{start}}', ' AND c.created_at >= :start')
|
|
arg_dict['start'] = start_datetime_utc
|
|
else:
|
|
sql_query = sql_query.replace('{{start}}', '')
|
|
|
|
if args['end']:
|
|
end_datetime = datetime.strptime(args['end'], '%Y-%m-%d %H:%M')
|
|
end_datetime = end_datetime.replace(second=0)
|
|
|
|
end_datetime_timezone = timezone.localize(end_datetime)
|
|
end_datetime_utc = end_datetime_timezone.astimezone(utc_timezone)
|
|
|
|
sql_query = sql_query.replace('{{end}}', ' and c.created_at < :end')
|
|
arg_dict['end'] = end_datetime_utc
|
|
else:
|
|
sql_query = sql_query.replace('{{end}}', '')
|
|
|
|
response_data = []
|
|
|
|
with db.engine.begin() as conn:
|
|
rs = conn.execute(db.text(sql_query), arg_dict)
|
|
for i in rs:
|
|
response_data.append({
|
|
'date': str(i.date),
|
|
'interactions': float(i.interactions.quantize(Decimal('0.01')))
|
|
})
|
|
|
|
return jsonify({
|
|
'data': response_data
|
|
})
|
|
|
|
api.add_resource(WorkflowDailyRunsStatistic, '/apps/<uuid:app_id>/workflow/statistics/daily-conversations')
|
|
api.add_resource(WorkflowDailyTerminalsStatistic, '/apps/<uuid:app_id>/workflow/statistics/daily-terminals')
|
|
api.add_resource(WorkflowDailyTokenCostStatistic, '/apps/<uuid:app_id>/workflow/statistics/token-costs')
|
|
api.add_resource(WorkflowAverageAppInteractionStatistic, '/apps/<uuid:app_id>/workflow/statistics/average-app-interactions')
|