mirror of
http://112.124.100.131/huang.ze/ebiz-dify-ai.git
synced 2025-12-09 02:46:52 +08:00
Feat: time period filter for workflow logs (#14271)
Signed-off-by: -LAN- <laipz8200@outlook.com> Co-authored-by: -LAN- <laipz8200@outlook.com>
This commit is contained in:
@@ -1,13 +1,18 @@
|
||||
from datetime import datetime
|
||||
|
||||
from flask_restful import Resource, marshal_with, reqparse # type: ignore
|
||||
from flask_restful.inputs import int_range # type: ignore
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from controllers.console import api
|
||||
from controllers.console.app.wraps import get_app_model
|
||||
from controllers.console.wraps import account_initialization_required, setup_required
|
||||
from extensions.ext_database import db
|
||||
from fields.workflow_app_log_fields import workflow_app_log_pagination_fields
|
||||
from libs.login import login_required
|
||||
from models import App
|
||||
from models.model import AppMode
|
||||
from models.workflow import WorkflowRunStatus
|
||||
from services.workflow_app_service import WorkflowAppService
|
||||
|
||||
|
||||
@@ -24,17 +29,38 @@ class WorkflowAppLogApi(Resource):
|
||||
parser = reqparse.RequestParser()
|
||||
parser.add_argument("keyword", type=str, location="args")
|
||||
parser.add_argument("status", type=str, choices=["succeeded", "failed", "stopped"], location="args")
|
||||
parser.add_argument(
|
||||
"created_at__before", type=str, location="args", help="Filter logs created before this timestamp"
|
||||
)
|
||||
parser.add_argument(
|
||||
"created_at__after", type=str, location="args", help="Filter logs created after this timestamp"
|
||||
)
|
||||
parser.add_argument("page", type=int_range(1, 99999), default=1, location="args")
|
||||
parser.add_argument("limit", type=int_range(1, 100), default=20, location="args")
|
||||
args = parser.parse_args()
|
||||
|
||||
args.status = WorkflowRunStatus(args.status) if args.status else None
|
||||
if args.created_at__before:
|
||||
args.created_at__before = datetime.fromisoformat(args.created_at__before.replace("Z", "+00:00"))
|
||||
|
||||
if args.created_at__after:
|
||||
args.created_at__after = datetime.fromisoformat(args.created_at__after.replace("Z", "+00:00"))
|
||||
|
||||
# get paginate workflow app logs
|
||||
workflow_app_service = WorkflowAppService()
|
||||
workflow_app_log_pagination = workflow_app_service.get_paginate_workflow_app_logs(
|
||||
app_model=app_model, args=args
|
||||
)
|
||||
with Session(db.engine) as session:
|
||||
workflow_app_log_pagination = workflow_app_service.get_paginate_workflow_app_logs(
|
||||
session=session,
|
||||
app_model=app_model,
|
||||
keyword=args.keyword,
|
||||
status=args.status,
|
||||
created_at_before=args.created_at__before,
|
||||
created_at_after=args.created_at__after,
|
||||
page=args.page,
|
||||
limit=args.limit,
|
||||
)
|
||||
|
||||
return workflow_app_log_pagination
|
||||
return workflow_app_log_pagination
|
||||
|
||||
|
||||
api.add_resource(WorkflowAppLogApi, "/apps/<uuid:app_id>/workflow-app-logs")
|
||||
|
||||
@@ -1,7 +1,9 @@
|
||||
import logging
|
||||
from datetime import datetime
|
||||
|
||||
from flask_restful import Resource, fields, marshal_with, reqparse # type: ignore
|
||||
from flask_restful.inputs import int_range # type: ignore
|
||||
from sqlalchemy.orm import Session
|
||||
from werkzeug.exceptions import InternalServerError
|
||||
|
||||
from controllers.service_api import api
|
||||
@@ -25,7 +27,7 @@ from extensions.ext_database import db
|
||||
from fields.workflow_app_log_fields import workflow_app_log_pagination_fields
|
||||
from libs import helper
|
||||
from models.model import App, AppMode, EndUser
|
||||
from models.workflow import WorkflowRun
|
||||
from models.workflow import WorkflowRun, WorkflowRunStatus
|
||||
from services.app_generate_service import AppGenerateService
|
||||
from services.workflow_app_service import WorkflowAppService
|
||||
|
||||
@@ -125,17 +127,34 @@ class WorkflowAppLogApi(Resource):
|
||||
parser = reqparse.RequestParser()
|
||||
parser.add_argument("keyword", type=str, location="args")
|
||||
parser.add_argument("status", type=str, choices=["succeeded", "failed", "stopped"], location="args")
|
||||
parser.add_argument("created_at__before", type=str, location="args")
|
||||
parser.add_argument("created_at__after", type=str, location="args")
|
||||
parser.add_argument("page", type=int_range(1, 99999), default=1, location="args")
|
||||
parser.add_argument("limit", type=int_range(1, 100), default=20, location="args")
|
||||
args = parser.parse_args()
|
||||
|
||||
args.status = WorkflowRunStatus(args.status) if args.status else None
|
||||
if args.created_at__before:
|
||||
args.created_at__before = datetime.fromisoformat(args.created_at__before.replace("Z", "+00:00"))
|
||||
|
||||
if args.created_at__after:
|
||||
args.created_at__after = datetime.fromisoformat(args.created_at__after.replace("Z", "+00:00"))
|
||||
|
||||
# get paginate workflow app logs
|
||||
workflow_app_service = WorkflowAppService()
|
||||
workflow_app_log_pagination = workflow_app_service.get_paginate_workflow_app_logs(
|
||||
app_model=app_model, args=args
|
||||
)
|
||||
with Session(db.engine) as session:
|
||||
workflow_app_log_pagination = workflow_app_service.get_paginate_workflow_app_logs(
|
||||
session=session,
|
||||
app_model=app_model,
|
||||
keyword=args.keyword,
|
||||
status=args.status,
|
||||
created_at_before=args.created_at__before,
|
||||
created_at_after=args.created_at__after,
|
||||
page=args.page,
|
||||
limit=args.limit,
|
||||
)
|
||||
|
||||
return workflow_app_log_pagination
|
||||
return workflow_app_log_pagination
|
||||
|
||||
|
||||
api.add_resource(WorkflowRunApi, "/workflows/run")
|
||||
|
||||
@@ -17,8 +17,8 @@ workflow_app_log_partial_fields = {
|
||||
|
||||
workflow_app_log_pagination_fields = {
|
||||
"page": fields.Integer,
|
||||
"limit": fields.Integer(attribute="per_page"),
|
||||
"limit": fields.Integer,
|
||||
"total": fields.Integer,
|
||||
"has_more": fields.Boolean(attribute="has_next"),
|
||||
"data": fields.List(fields.Nested(workflow_app_log_partial_fields), attribute="items"),
|
||||
"has_more": fields.Boolean,
|
||||
"data": fields.List(fields.Nested(workflow_app_log_partial_fields)),
|
||||
}
|
||||
|
||||
@@ -354,19 +354,6 @@ class WorkflowRunStatus(StrEnum):
|
||||
STOPPED = "stopped"
|
||||
PARTIAL_SUCCESSED = "partial-succeeded"
|
||||
|
||||
@classmethod
|
||||
def value_of(cls, value: str) -> "WorkflowRunStatus":
|
||||
"""
|
||||
Get value of given mode.
|
||||
|
||||
:param value: mode value
|
||||
:return: mode
|
||||
"""
|
||||
for mode in cls:
|
||||
if mode.value == value:
|
||||
return mode
|
||||
raise ValueError(f"invalid workflow run status value {value}")
|
||||
|
||||
|
||||
class WorkflowRun(Base):
|
||||
"""
|
||||
|
||||
@@ -1,30 +1,46 @@
|
||||
import uuid
|
||||
from datetime import datetime
|
||||
|
||||
from flask_sqlalchemy.pagination import Pagination
|
||||
from sqlalchemy import and_, or_
|
||||
from sqlalchemy import and_, func, or_, select
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from extensions.ext_database import db
|
||||
from models import App, EndUser, WorkflowAppLog, WorkflowRun
|
||||
from models.enums import CreatedByRole
|
||||
from models.workflow import WorkflowRunStatus
|
||||
|
||||
|
||||
class WorkflowAppService:
|
||||
def get_paginate_workflow_app_logs(self, app_model: App, args: dict) -> Pagination:
|
||||
def get_paginate_workflow_app_logs(
|
||||
self,
|
||||
*,
|
||||
session: Session,
|
||||
app_model: App,
|
||||
keyword: str | None = None,
|
||||
status: WorkflowRunStatus | None = None,
|
||||
created_at_before: datetime | None = None,
|
||||
created_at_after: datetime | None = None,
|
||||
page: int = 1,
|
||||
limit: int = 20,
|
||||
) -> dict:
|
||||
"""
|
||||
Get paginate workflow app logs
|
||||
:param app: app model
|
||||
:param args: request args
|
||||
:return:
|
||||
Get paginate workflow app logs using SQLAlchemy 2.0 style
|
||||
:param session: SQLAlchemy session
|
||||
:param app_model: app model
|
||||
:param keyword: search keyword
|
||||
:param status: filter by status
|
||||
:param created_at_before: filter logs created before this timestamp
|
||||
:param created_at_after: filter logs created after this timestamp
|
||||
:param page: page number
|
||||
:param limit: items per page
|
||||
:return: Pagination object
|
||||
"""
|
||||
query = db.select(WorkflowAppLog).where(
|
||||
# Build base statement using SQLAlchemy 2.0 style
|
||||
stmt = select(WorkflowAppLog).where(
|
||||
WorkflowAppLog.tenant_id == app_model.tenant_id, WorkflowAppLog.app_id == app_model.id
|
||||
)
|
||||
|
||||
status = WorkflowRunStatus.value_of(args.get("status", "")) if args.get("status") else None
|
||||
keyword = args["keyword"]
|
||||
if keyword or status:
|
||||
query = query.join(WorkflowRun, WorkflowRun.id == WorkflowAppLog.workflow_run_id)
|
||||
stmt = stmt.join(WorkflowRun, WorkflowRun.id == WorkflowAppLog.workflow_run_id)
|
||||
|
||||
if keyword:
|
||||
keyword_like_val = f"%{keyword[:30].encode('unicode_escape').decode('utf-8')}%".replace(r"\u", r"\\u")
|
||||
@@ -40,20 +56,40 @@ class WorkflowAppService:
|
||||
if keyword_uuid:
|
||||
keyword_conditions.append(WorkflowRun.id == keyword_uuid)
|
||||
|
||||
query = query.outerjoin(
|
||||
stmt = stmt.outerjoin(
|
||||
EndUser,
|
||||
and_(WorkflowRun.created_by == EndUser.id, WorkflowRun.created_by_role == CreatedByRole.END_USER),
|
||||
).filter(or_(*keyword_conditions))
|
||||
).where(or_(*keyword_conditions))
|
||||
|
||||
if status:
|
||||
# join with workflow_run and filter by status
|
||||
query = query.filter(WorkflowRun.status == status.value)
|
||||
stmt = stmt.where(WorkflowRun.status == status)
|
||||
|
||||
query = query.order_by(WorkflowAppLog.created_at.desc())
|
||||
# Add time-based filtering
|
||||
if created_at_before:
|
||||
stmt = stmt.where(WorkflowAppLog.created_at <= created_at_before)
|
||||
|
||||
pagination = db.paginate(query, page=args["page"], per_page=args["limit"], error_out=False)
|
||||
if created_at_after:
|
||||
stmt = stmt.where(WorkflowAppLog.created_at >= created_at_after)
|
||||
|
||||
return pagination
|
||||
stmt = stmt.order_by(WorkflowAppLog.created_at.desc())
|
||||
|
||||
# Get total count using the same filters
|
||||
count_stmt = select(func.count()).select_from(stmt.subquery())
|
||||
total = session.scalar(count_stmt) or 0
|
||||
|
||||
# Apply pagination limits
|
||||
offset_stmt = stmt.offset((page - 1) * limit).limit(limit)
|
||||
|
||||
# Execute query and get items
|
||||
items = list(session.scalars(offset_stmt).all())
|
||||
|
||||
return {
|
||||
"page": page,
|
||||
"limit": limit,
|
||||
"total": total,
|
||||
"has_more": total > page * limit,
|
||||
"data": items,
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
def _safe_parse_uuid(value: str):
|
||||
|
||||
Reference in New Issue
Block a user