From b0876e0ec8de1f3b8a7fe56e47d5f73ae3cedb8b Mon Sep 17 00:00:00 2001 From: Yansong Zhang <916125788@qq.com> Date: Fri, 6 Feb 2026 11:08:09 +0800 Subject: [PATCH] add queue api_token_update --- api/README.md | 2 +- api/docker/entrypoint.sh | 4 ++-- api/schedule/update_api_token_last_used_task.py | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/api/README.md b/api/README.md index 9d89b490b0..b89e653410 100644 --- a/api/README.md +++ b/api/README.md @@ -122,7 +122,7 @@ These commands assume you start from the repository root. ```bash cd api - uv run celery -A app.celery worker -P threads -c 2 --loglevel INFO -Q dataset,priority_dataset,priority_pipeline,pipeline,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation,workflow,schedule_poller,schedule_executor,triggered_workflow_dispatcher,trigger_refresh_executor,retention + uv run celery -A app.celery worker -P threads -c 2 --loglevel INFO -Q api_token_update, dataset,priority_dataset,priority_pipeline,pipeline,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation,workflow,schedule_poller,schedule_executor,triggered_workflow_dispatcher,trigger_refresh_executor,retention ``` 1. Optional: start Celery Beat (scheduled tasks, in a new terminal). diff --git a/api/docker/entrypoint.sh b/api/docker/entrypoint.sh index c0279f893b..9a41085a4b 100755 --- a/api/docker/entrypoint.sh +++ b/api/docker/entrypoint.sh @@ -35,10 +35,10 @@ if [[ "${MODE}" == "worker" ]]; then if [[ -z "${CELERY_QUEUES}" ]]; then if [[ "${EDITION}" == "CLOUD" ]]; then # Cloud edition: separate queues for dataset and trigger tasks - DEFAULT_QUEUES="dataset,priority_dataset,priority_pipeline,pipeline,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation,workflow_professional,workflow_team,workflow_sandbox,schedule_poller,schedule_executor,triggered_workflow_dispatcher,trigger_refresh_executor,retention" + DEFAULT_QUEUES="api_token_update,dataset,priority_dataset,priority_pipeline,pipeline,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation,workflow_professional,workflow_team,workflow_sandbox,schedule_poller,schedule_executor,triggered_workflow_dispatcher,trigger_refresh_executor,retention" else # Community edition (SELF_HOSTED): dataset, pipeline and workflow have separate queues - DEFAULT_QUEUES="dataset,priority_dataset,priority_pipeline,pipeline,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation,workflow,schedule_poller,schedule_executor,triggered_workflow_dispatcher,trigger_refresh_executor,retention" + DEFAULT_QUEUES="api_token_update,dataset,priority_dataset,priority_pipeline,pipeline,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation,workflow,schedule_poller,schedule_executor,triggered_workflow_dispatcher,trigger_refresh_executor,retention" fi else DEFAULT_QUEUES="${CELERY_QUEUES}" diff --git a/api/schedule/update_api_token_last_used_task.py b/api/schedule/update_api_token_last_used_task.py index 09348b7954..a4b2582a43 100644 --- a/api/schedule/update_api_token_last_used_task.py +++ b/api/schedule/update_api_token_last_used_task.py @@ -25,7 +25,7 @@ logger = logging.getLogger(__name__) ACTIVE_TOKEN_KEY_PREFIX = "api_token_active:" -@app.celery.task(queue="dataset") +@app.celery.task(queue="api_token_update") def batch_update_api_token_last_used(): """ Batch update last_used_at for all recently active API tokens.