This commit is contained in:
Iliyan Angelov
2025-09-19 11:58:53 +03:00
parent 306b20e24a
commit 6b247e5b9f
11423 changed files with 1500615 additions and 778 deletions

View File

@@ -0,0 +1,312 @@
"""AMQP 0.9.1 REPL."""
import pprint
import click
from amqp import Connection, Message
from click_repl import register_repl
__all__ = ('amqp',)
from celery.bin.base import handle_preload_options
def dump_message(message):
if message is None:
return 'No messages in queue. basic.publish something.'
return {'body': message.body,
'properties': message.properties,
'delivery_info': message.delivery_info}
class AMQPContext:
def __init__(self, cli_context):
self.cli_context = cli_context
self.connection = self.cli_context.app.connection()
self.channel = None
self.reconnect()
@property
def app(self):
return self.cli_context.app
def respond(self, retval):
if isinstance(retval, str):
self.cli_context.echo(retval)
else:
self.cli_context.echo(pprint.pformat(retval))
def echo_error(self, exception):
self.cli_context.error(f'{self.cli_context.ERROR}: {exception}')
def echo_ok(self):
self.cli_context.echo(self.cli_context.OK)
def reconnect(self):
if self.connection:
self.connection.close()
else:
self.connection = self.cli_context.app.connection()
self.cli_context.echo(f'-> connecting to {self.connection.as_uri()}.')
try:
self.connection.connect()
except (ConnectionRefusedError, ConnectionResetError) as e:
self.echo_error(e)
else:
self.cli_context.secho('-> connected.', fg='green', bold=True)
self.channel = self.connection.default_channel
@click.group(invoke_without_command=True)
@click.pass_context
@handle_preload_options
def amqp(ctx):
"""AMQP Administration Shell.
Also works for non-AMQP transports (but not ones that
store declarations in memory).
"""
if not isinstance(ctx.obj, AMQPContext):
ctx.obj = AMQPContext(ctx.obj)
@amqp.command(name='exchange.declare')
@click.argument('exchange',
type=str)
@click.argument('type',
type=str)
@click.argument('passive',
type=bool,
default=False)
@click.argument('durable',
type=bool,
default=False)
@click.argument('auto_delete',
type=bool,
default=False)
@click.pass_obj
def exchange_declare(amqp_context, exchange, type, passive, durable,
auto_delete):
if amqp_context.channel is None:
amqp_context.echo_error('Not connected to broker. Please retry...')
amqp_context.reconnect()
else:
try:
amqp_context.channel.exchange_declare(exchange=exchange,
type=type,
passive=passive,
durable=durable,
auto_delete=auto_delete)
except Exception as e:
amqp_context.echo_error(e)
amqp_context.reconnect()
else:
amqp_context.echo_ok()
@amqp.command(name='exchange.delete')
@click.argument('exchange',
type=str)
@click.argument('if_unused',
type=bool)
@click.pass_obj
def exchange_delete(amqp_context, exchange, if_unused):
if amqp_context.channel is None:
amqp_context.echo_error('Not connected to broker. Please retry...')
amqp_context.reconnect()
else:
try:
amqp_context.channel.exchange_delete(exchange=exchange,
if_unused=if_unused)
except Exception as e:
amqp_context.echo_error(e)
amqp_context.reconnect()
else:
amqp_context.echo_ok()
@amqp.command(name='queue.bind')
@click.argument('queue',
type=str)
@click.argument('exchange',
type=str)
@click.argument('routing_key',
type=str)
@click.pass_obj
def queue_bind(amqp_context, queue, exchange, routing_key):
if amqp_context.channel is None:
amqp_context.echo_error('Not connected to broker. Please retry...')
amqp_context.reconnect()
else:
try:
amqp_context.channel.queue_bind(queue=queue,
exchange=exchange,
routing_key=routing_key)
except Exception as e:
amqp_context.echo_error(e)
amqp_context.reconnect()
else:
amqp_context.echo_ok()
@amqp.command(name='queue.declare')
@click.argument('queue',
type=str)
@click.argument('passive',
type=bool,
default=False)
@click.argument('durable',
type=bool,
default=False)
@click.argument('auto_delete',
type=bool,
default=False)
@click.pass_obj
def queue_declare(amqp_context, queue, passive, durable, auto_delete):
if amqp_context.channel is None:
amqp_context.echo_error('Not connected to broker. Please retry...')
amqp_context.reconnect()
else:
try:
retval = amqp_context.channel.queue_declare(queue=queue,
passive=passive,
durable=durable,
auto_delete=auto_delete)
except Exception as e:
amqp_context.echo_error(e)
amqp_context.reconnect()
else:
amqp_context.cli_context.secho(
'queue:{} messages:{} consumers:{}'.format(*retval),
fg='cyan', bold=True)
amqp_context.echo_ok()
@amqp.command(name='queue.delete')
@click.argument('queue',
type=str)
@click.argument('if_unused',
type=bool,
default=False)
@click.argument('if_empty',
type=bool,
default=False)
@click.pass_obj
def queue_delete(amqp_context, queue, if_unused, if_empty):
if amqp_context.channel is None:
amqp_context.echo_error('Not connected to broker. Please retry...')
amqp_context.reconnect()
else:
try:
retval = amqp_context.channel.queue_delete(queue=queue,
if_unused=if_unused,
if_empty=if_empty)
except Exception as e:
amqp_context.echo_error(e)
amqp_context.reconnect()
else:
amqp_context.cli_context.secho(
f'{retval} messages deleted.',
fg='cyan', bold=True)
amqp_context.echo_ok()
@amqp.command(name='queue.purge')
@click.argument('queue',
type=str)
@click.pass_obj
def queue_purge(amqp_context, queue):
if amqp_context.channel is None:
amqp_context.echo_error('Not connected to broker. Please retry...')
amqp_context.reconnect()
else:
try:
retval = amqp_context.channel.queue_purge(queue=queue)
except Exception as e:
amqp_context.echo_error(e)
amqp_context.reconnect()
else:
amqp_context.cli_context.secho(
f'{retval} messages deleted.',
fg='cyan', bold=True)
amqp_context.echo_ok()
@amqp.command(name='basic.get')
@click.argument('queue',
type=str)
@click.argument('no_ack',
type=bool,
default=False)
@click.pass_obj
def basic_get(amqp_context, queue, no_ack):
if amqp_context.channel is None:
amqp_context.echo_error('Not connected to broker. Please retry...')
amqp_context.reconnect()
else:
try:
message = amqp_context.channel.basic_get(queue, no_ack=no_ack)
except Exception as e:
amqp_context.echo_error(e)
amqp_context.reconnect()
else:
amqp_context.respond(dump_message(message))
amqp_context.echo_ok()
@amqp.command(name='basic.publish')
@click.argument('msg',
type=str)
@click.argument('exchange',
type=str)
@click.argument('routing_key',
type=str)
@click.argument('mandatory',
type=bool,
default=False)
@click.argument('immediate',
type=bool,
default=False)
@click.pass_obj
def basic_publish(amqp_context, msg, exchange, routing_key, mandatory,
immediate):
if amqp_context.channel is None:
amqp_context.echo_error('Not connected to broker. Please retry...')
amqp_context.reconnect()
else:
# XXX Hack to fix Issue #2013
if isinstance(amqp_context.connection.connection, Connection):
msg = Message(msg)
try:
amqp_context.channel.basic_publish(msg,
exchange=exchange,
routing_key=routing_key,
mandatory=mandatory,
immediate=immediate)
except Exception as e:
amqp_context.echo_error(e)
amqp_context.reconnect()
else:
amqp_context.echo_ok()
@amqp.command(name='basic.ack')
@click.argument('delivery_tag',
type=int)
@click.pass_obj
def basic_ack(amqp_context, delivery_tag):
if amqp_context.channel is None:
amqp_context.echo_error('Not connected to broker. Please retry...')
amqp_context.reconnect()
else:
try:
amqp_context.channel.basic_ack(delivery_tag)
except Exception as e:
amqp_context.echo_error(e)
amqp_context.reconnect()
else:
amqp_context.echo_ok()
register_repl(amqp)

View File

@@ -0,0 +1,306 @@
"""Click customizations for Celery."""
import json
import numbers
from collections import OrderedDict
from functools import update_wrapper
from pprint import pformat
from typing import Any
import click
from click import Context, ParamType
from kombu.utils.objects import cached_property
from celery._state import get_current_app
from celery.signals import user_preload_options
from celery.utils import text
from celery.utils.log import mlevel
from celery.utils.time import maybe_iso8601
try:
from pygments import highlight
from pygments.formatters import Terminal256Formatter
from pygments.lexers import PythonLexer
except ImportError:
def highlight(s, *args, **kwargs):
"""Place holder function in case pygments is missing."""
return s
LEXER = None
FORMATTER = None
else:
LEXER = PythonLexer()
FORMATTER = Terminal256Formatter()
class CLIContext:
"""Context Object for the CLI."""
def __init__(self, app, no_color, workdir, quiet=False):
"""Initialize the CLI context."""
self.app = app or get_current_app()
self.no_color = no_color
self.quiet = quiet
self.workdir = workdir
@cached_property
def OK(self):
return self.style("OK", fg="green", bold=True)
@cached_property
def ERROR(self):
return self.style("ERROR", fg="red", bold=True)
def style(self, message=None, **kwargs):
if self.no_color:
return message
else:
return click.style(message, **kwargs)
def secho(self, message=None, **kwargs):
if self.no_color:
kwargs['color'] = False
click.echo(message, **kwargs)
else:
click.secho(message, **kwargs)
def echo(self, message=None, **kwargs):
if self.no_color:
kwargs['color'] = False
click.echo(message, **kwargs)
else:
click.echo(message, **kwargs)
def error(self, message=None, **kwargs):
kwargs['err'] = True
if self.no_color:
kwargs['color'] = False
click.echo(message, **kwargs)
else:
click.secho(message, **kwargs)
def pretty(self, n):
if isinstance(n, list):
return self.OK, self.pretty_list(n)
if isinstance(n, dict):
if 'ok' in n or 'error' in n:
return self.pretty_dict_ok_error(n)
else:
s = json.dumps(n, sort_keys=True, indent=4)
if not self.no_color:
s = highlight(s, LEXER, FORMATTER)
return self.OK, s
if isinstance(n, str):
return self.OK, n
return self.OK, pformat(n)
def pretty_list(self, n):
if not n:
return '- empty -'
return '\n'.join(
f'{self.style("*", fg="white")} {item}' for item in n
)
def pretty_dict_ok_error(self, n):
try:
return (self.OK,
text.indent(self.pretty(n['ok'])[1], 4))
except KeyError:
pass
return (self.ERROR,
text.indent(self.pretty(n['error'])[1], 4))
def say_chat(self, direction, title, body='', show_body=False):
if direction == '<-' and self.quiet:
return
dirstr = not self.quiet and f'{self.style(direction, fg="white", bold=True)} ' or ''
self.echo(f'{dirstr} {title}')
if body and show_body:
self.echo(body)
def handle_preload_options(f):
"""Extract preload options and return a wrapped callable."""
def caller(ctx, *args, **kwargs):
app = ctx.obj.app
preload_options = [o.name for o in app.user_options.get('preload', [])]
if preload_options:
user_options = {
preload_option: kwargs[preload_option]
for preload_option in preload_options
}
user_preload_options.send(sender=f, app=app, options=user_options)
return f(ctx, *args, **kwargs)
return update_wrapper(caller, f)
class CeleryOption(click.Option):
"""Customized option for Celery."""
def get_default(self, ctx, *args, **kwargs):
if self.default_value_from_context:
self.default = ctx.obj[self.default_value_from_context]
return super().get_default(ctx, *args, **kwargs)
def __init__(self, *args, **kwargs):
"""Initialize a Celery option."""
self.help_group = kwargs.pop('help_group', None)
self.default_value_from_context = kwargs.pop('default_value_from_context', None)
super().__init__(*args, **kwargs)
class CeleryCommand(click.Command):
"""Customized command for Celery."""
def format_options(self, ctx, formatter):
"""Write all the options into the formatter if they exist."""
opts = OrderedDict()
for param in self.get_params(ctx):
rv = param.get_help_record(ctx)
if rv is not None:
if hasattr(param, 'help_group') and param.help_group:
opts.setdefault(str(param.help_group), []).append(rv)
else:
opts.setdefault('Options', []).append(rv)
for name, opts_group in opts.items():
with formatter.section(name):
formatter.write_dl(opts_group)
class DaemonOption(CeleryOption):
"""Common daemonization option"""
def __init__(self, *args, **kwargs):
super().__init__(args,
help_group=kwargs.pop("help_group", "Daemonization Options"),
callback=kwargs.pop("callback", self.daemon_setting),
**kwargs)
def daemon_setting(self, ctx: Context, opt: CeleryOption, value: Any) -> Any:
"""
Try to fetch daemonization option from applications settings.
Use the daemon command name as prefix (eg. `worker` -> `worker_pidfile`)
"""
return value or getattr(ctx.obj.app.conf, f"{ctx.command.name}_{self.name}", None)
class CeleryDaemonCommand(CeleryCommand):
"""Daemon commands."""
def __init__(self, *args, **kwargs):
"""Initialize a Celery command with common daemon options."""
super().__init__(*args, **kwargs)
self.params.extend((
DaemonOption("--logfile", "-f", help="Log destination; defaults to stderr"),
DaemonOption("--pidfile", help="PID file path; defaults to no PID file"),
DaemonOption("--uid", help="Drops privileges to this user ID"),
DaemonOption("--gid", help="Drops privileges to this group ID"),
DaemonOption("--umask", help="Create files and directories with this umask"),
DaemonOption("--executable", help="Override path to the Python executable"),
))
class CommaSeparatedList(ParamType):
"""Comma separated list argument."""
name = "comma separated list"
def convert(self, value, param, ctx):
return text.str_to_list(value)
class JsonArray(ParamType):
"""JSON formatted array argument."""
name = "json array"
def convert(self, value, param, ctx):
if isinstance(value, list):
return value
try:
v = json.loads(value)
except ValueError as e:
self.fail(str(e))
if not isinstance(v, list):
self.fail(f"{value} was not an array")
return v
class JsonObject(ParamType):
"""JSON formatted object argument."""
name = "json object"
def convert(self, value, param, ctx):
if isinstance(value, dict):
return value
try:
v = json.loads(value)
except ValueError as e:
self.fail(str(e))
if not isinstance(v, dict):
self.fail(f"{value} was not an object")
return v
class ISO8601DateTime(ParamType):
"""ISO 8601 Date Time argument."""
name = "iso-86091"
def convert(self, value, param, ctx):
try:
return maybe_iso8601(value)
except (TypeError, ValueError) as e:
self.fail(e)
class ISO8601DateTimeOrFloat(ParamType):
"""ISO 8601 Date Time or float argument."""
name = "iso-86091 or float"
def convert(self, value, param, ctx):
try:
return float(value)
except (TypeError, ValueError):
pass
try:
return maybe_iso8601(value)
except (TypeError, ValueError) as e:
self.fail(e)
class LogLevel(click.Choice):
"""Log level option."""
def __init__(self):
"""Initialize the log level option with the relevant choices."""
super().__init__(('DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL', 'FATAL'))
def convert(self, value, param, ctx):
if isinstance(value, numbers.Integral):
return value
value = value.upper()
value = super().convert(value, param, ctx)
return mlevel(value)
JSON_ARRAY = JsonArray()
JSON_OBJECT = JsonObject()
ISO8601 = ISO8601DateTime()
ISO8601_OR_FLOAT = ISO8601DateTimeOrFloat()
LOG_LEVEL = LogLevel()
COMMA_SEPARATED_LIST = CommaSeparatedList()

View File

@@ -0,0 +1,72 @@
"""The :program:`celery beat` command."""
from functools import partial
import click
from celery.bin.base import LOG_LEVEL, CeleryDaemonCommand, CeleryOption, handle_preload_options
from celery.platforms import detached, maybe_drop_privileges
@click.command(cls=CeleryDaemonCommand, context_settings={
'allow_extra_args': True
})
@click.option('--detach',
cls=CeleryOption,
is_flag=True,
default=False,
help_group="Beat Options",
help="Detach and run in the background as a daemon.")
@click.option('-s',
'--schedule',
cls=CeleryOption,
callback=lambda ctx, _, value: value or ctx.obj.app.conf.beat_schedule_filename,
help_group="Beat Options",
help="Path to the schedule database."
" Defaults to `celerybeat-schedule`."
"The extension '.db' may be appended to the filename.")
@click.option('-S',
'--scheduler',
cls=CeleryOption,
callback=lambda ctx, _, value: value or ctx.obj.app.conf.beat_scheduler,
help_group="Beat Options",
help="Scheduler class to use.")
@click.option('--max-interval',
cls=CeleryOption,
type=int,
help_group="Beat Options",
help="Max seconds to sleep between schedule iterations.")
@click.option('-l',
'--loglevel',
default='WARNING',
cls=CeleryOption,
type=LOG_LEVEL,
help_group="Beat Options",
help="Logging level.")
@click.pass_context
@handle_preload_options
def beat(ctx, detach=False, logfile=None, pidfile=None, uid=None,
gid=None, umask=None, workdir=None, **kwargs):
"""Start the beat periodic task scheduler."""
app = ctx.obj.app
if ctx.args:
try:
app.config_from_cmdline(ctx.args)
except (KeyError, ValueError) as e:
# TODO: Improve the error messages
raise click.UsageError("Unable to parse extra configuration"
" from command line.\n"
f"Reason: {e}", ctx=ctx)
if not detach:
maybe_drop_privileges(uid=uid, gid=gid)
beat = partial(app.Beat,
logfile=logfile, pidfile=pidfile,
quiet=ctx.obj.quiet, **kwargs)
if detach:
with detached(logfile, pidfile, uid, gid, umask, workdir):
return beat().run()
else:
return beat().run()

View File

@@ -0,0 +1,71 @@
"""The ``celery call`` program used to send tasks from the command-line."""
import click
from celery.bin.base import (ISO8601, ISO8601_OR_FLOAT, JSON_ARRAY, JSON_OBJECT, CeleryCommand, CeleryOption,
handle_preload_options)
@click.command(cls=CeleryCommand)
@click.argument('name')
@click.option('-a',
'--args',
cls=CeleryOption,
type=JSON_ARRAY,
default='[]',
help_group="Calling Options",
help="Positional arguments.")
@click.option('-k',
'--kwargs',
cls=CeleryOption,
type=JSON_OBJECT,
default='{}',
help_group="Calling Options",
help="Keyword arguments.")
@click.option('--eta',
cls=CeleryOption,
type=ISO8601,
help_group="Calling Options",
help="scheduled time.")
@click.option('--countdown',
cls=CeleryOption,
type=float,
help_group="Calling Options",
help="eta in seconds from now.")
@click.option('--expires',
cls=CeleryOption,
type=ISO8601_OR_FLOAT,
help_group="Calling Options",
help="expiry time.")
@click.option('--serializer',
cls=CeleryOption,
default='json',
help_group="Calling Options",
help="task serializer.")
@click.option('--queue',
cls=CeleryOption,
help_group="Routing Options",
help="custom queue name.")
@click.option('--exchange',
cls=CeleryOption,
help_group="Routing Options",
help="custom exchange name.")
@click.option('--routing-key',
cls=CeleryOption,
help_group="Routing Options",
help="custom routing key.")
@click.pass_context
@handle_preload_options
def call(ctx, name, args, kwargs, eta, countdown, expires, serializer, queue, exchange, routing_key):
"""Call a task by name."""
task_id = ctx.obj.app.send_task(
name,
args=args, kwargs=kwargs,
countdown=countdown,
serializer=serializer,
queue=queue,
exchange=exchange,
routing_key=routing_key,
eta=eta,
expires=expires
).id
ctx.obj.echo(task_id)

View File

@@ -0,0 +1,231 @@
"""Celery Command Line Interface."""
import os
import pathlib
import sys
import traceback
try:
from importlib.metadata import entry_points
except ImportError:
from importlib_metadata import entry_points
import click
import click.exceptions
from click_didyoumean import DYMGroup
from click_plugins import with_plugins
from celery import VERSION_BANNER
from celery.app.utils import find_app
from celery.bin.amqp import amqp
from celery.bin.base import CeleryCommand, CeleryOption, CLIContext
from celery.bin.beat import beat
from celery.bin.call import call
from celery.bin.control import control, inspect, status
from celery.bin.events import events
from celery.bin.graph import graph
from celery.bin.list import list_
from celery.bin.logtool import logtool
from celery.bin.migrate import migrate
from celery.bin.multi import multi
from celery.bin.purge import purge
from celery.bin.result import result
from celery.bin.shell import shell
from celery.bin.upgrade import upgrade
from celery.bin.worker import worker
UNABLE_TO_LOAD_APP_MODULE_NOT_FOUND = click.style("""
Unable to load celery application.
The module {0} was not found.""", fg='red')
UNABLE_TO_LOAD_APP_ERROR_OCCURRED = click.style("""
Unable to load celery application.
While trying to load the module {0} the following error occurred:
{1}""", fg='red')
UNABLE_TO_LOAD_APP_APP_MISSING = click.style("""
Unable to load celery application.
{0}""")
if sys.version_info >= (3, 10):
_PLUGINS = entry_points(group='celery.commands')
else:
try:
_PLUGINS = entry_points().get('celery.commands', [])
except AttributeError:
_PLUGINS = entry_points().select(group='celery.commands')
@with_plugins(_PLUGINS)
@click.group(cls=DYMGroup, invoke_without_command=True)
@click.option('-A',
'--app',
envvar='APP',
cls=CeleryOption,
# May take either: a str when invoked from command line (Click),
# or a Celery object when invoked from inside Celery; hence the
# need to prevent Click from "processing" the Celery object and
# converting it into its str representation.
type=click.UNPROCESSED,
help_group="Global Options")
@click.option('-b',
'--broker',
envvar='BROKER_URL',
cls=CeleryOption,
help_group="Global Options")
@click.option('--result-backend',
envvar='RESULT_BACKEND',
cls=CeleryOption,
help_group="Global Options")
@click.option('--loader',
envvar='LOADER',
cls=CeleryOption,
help_group="Global Options")
@click.option('--config',
envvar='CONFIG_MODULE',
cls=CeleryOption,
help_group="Global Options")
@click.option('--workdir',
cls=CeleryOption,
type=pathlib.Path,
callback=lambda _, __, wd: os.chdir(wd) if wd else None,
is_eager=True,
help_group="Global Options")
@click.option('-C',
'--no-color',
envvar='NO_COLOR',
is_flag=True,
cls=CeleryOption,
help_group="Global Options")
@click.option('-q',
'--quiet',
is_flag=True,
cls=CeleryOption,
help_group="Global Options")
@click.option('--version',
cls=CeleryOption,
is_flag=True,
help_group="Global Options")
@click.option('--skip-checks',
envvar='SKIP_CHECKS',
cls=CeleryOption,
is_flag=True,
help_group="Global Options",
help="Skip Django core checks on startup. Setting the SKIP_CHECKS environment "
"variable to any non-empty string will have the same effect.")
@click.pass_context
def celery(ctx, app, broker, result_backend, loader, config, workdir,
no_color, quiet, version, skip_checks):
"""Celery command entrypoint."""
if version:
click.echo(VERSION_BANNER)
ctx.exit()
elif ctx.invoked_subcommand is None:
click.echo(ctx.get_help())
ctx.exit()
if loader:
# Default app takes loader from this env (Issue #1066).
os.environ['CELERY_LOADER'] = loader
if broker:
os.environ['CELERY_BROKER_URL'] = broker
if result_backend:
os.environ['CELERY_RESULT_BACKEND'] = result_backend
if config:
os.environ['CELERY_CONFIG_MODULE'] = config
if skip_checks:
os.environ['CELERY_SKIP_CHECKS'] = 'true'
if isinstance(app, str):
try:
app = find_app(app)
except ModuleNotFoundError as e:
if e.name != app:
exc = traceback.format_exc()
ctx.fail(
UNABLE_TO_LOAD_APP_ERROR_OCCURRED.format(app, exc)
)
ctx.fail(UNABLE_TO_LOAD_APP_MODULE_NOT_FOUND.format(e.name))
except AttributeError as e:
attribute_name = e.args[0].capitalize()
ctx.fail(UNABLE_TO_LOAD_APP_APP_MISSING.format(attribute_name))
except Exception:
exc = traceback.format_exc()
ctx.fail(
UNABLE_TO_LOAD_APP_ERROR_OCCURRED.format(app, exc)
)
ctx.obj = CLIContext(app=app, no_color=no_color, workdir=workdir,
quiet=quiet)
# User options
worker.params.extend(ctx.obj.app.user_options.get('worker', []))
beat.params.extend(ctx.obj.app.user_options.get('beat', []))
events.params.extend(ctx.obj.app.user_options.get('events', []))
for command in celery.commands.values():
command.params.extend(ctx.obj.app.user_options.get('preload', []))
@celery.command(cls=CeleryCommand)
@click.pass_context
def report(ctx, **kwargs):
"""Shows information useful to include in bug-reports."""
app = ctx.obj.app
app.loader.import_default_modules()
ctx.obj.echo(app.bugreport())
celery.add_command(purge)
celery.add_command(call)
celery.add_command(beat)
celery.add_command(list_)
celery.add_command(result)
celery.add_command(migrate)
celery.add_command(status)
celery.add_command(worker)
celery.add_command(events)
celery.add_command(inspect)
celery.add_command(control)
celery.add_command(graph)
celery.add_command(upgrade)
celery.add_command(logtool)
celery.add_command(amqp)
celery.add_command(shell)
celery.add_command(multi)
# Monkey-patch click to display a custom error
# when -A or --app are used as sub-command options instead of as options
# of the global command.
previous_show_implementation = click.exceptions.NoSuchOption.show
WRONG_APP_OPTION_USAGE_MESSAGE = """You are using `{option_name}` as an option of the {info_name} sub-command:
celery {info_name} {option_name} celeryapp <...>
The support for this usage was removed in Celery 5.0. Instead you should use `{option_name}` as a global option:
celery {option_name} celeryapp {info_name} <...>"""
def _show(self, file=None):
if self.option_name in ('-A', '--app'):
self.ctx.obj.error(
WRONG_APP_OPTION_USAGE_MESSAGE.format(
option_name=self.option_name,
info_name=self.ctx.info_name),
fg='red'
)
previous_show_implementation(self, file=file)
click.exceptions.NoSuchOption.show = _show
def main() -> int:
"""Start celery umbrella command.
This function is the main entrypoint for the CLI.
:return: The exit code of the CLI.
"""
return celery(auto_envvar_prefix="CELERY")

View File

@@ -0,0 +1,252 @@
"""The ``celery control``, ``. inspect`` and ``. status`` programs."""
from functools import partial
from typing import Literal
import click
from kombu.utils.json import dumps
from celery.bin.base import COMMA_SEPARATED_LIST, CeleryCommand, CeleryOption, handle_preload_options
from celery.exceptions import CeleryCommandException
from celery.platforms import EX_UNAVAILABLE
from celery.utils import text
from celery.worker.control import Panel
def _say_remote_command_reply(ctx, replies, show_reply=False):
node = next(iter(replies)) # <-- take first.
reply = replies[node]
node = ctx.obj.style(f'{node}: ', fg='cyan', bold=True)
status, preply = ctx.obj.pretty(reply)
ctx.obj.say_chat('->', f'{node}{status}',
text.indent(preply, 4) if show_reply else '',
show_body=show_reply)
def _consume_arguments(meta, method, args):
i = 0
try:
for i, arg in enumerate(args):
try:
name, typ = meta.args[i]
except IndexError:
if meta.variadic:
break
raise click.UsageError(
'Command {!r} takes arguments: {}'.format(
method, meta.signature))
else:
yield name, typ(arg) if typ is not None else arg
finally:
args[:] = args[i:]
def _compile_arguments(command, args):
meta = Panel.meta[command]
arguments = {}
if meta.args:
arguments.update({
k: v for k, v in _consume_arguments(meta, command, args)
})
if meta.variadic:
arguments.update({meta.variadic: args})
return arguments
_RemoteControlType = Literal['inspect', 'control']
def _verify_command_name(type_: _RemoteControlType, command: str) -> None:
choices = _get_commands_of_type(type_)
if command not in choices:
command_listing = ", ".join(choices)
raise click.UsageError(
message=f'Command {command} not recognized. Available {type_} commands: {command_listing}',
)
def _list_option(type_: _RemoteControlType):
def callback(ctx: click.Context, param, value) -> None:
if not value:
return
choices = _get_commands_of_type(type_)
formatter = click.HelpFormatter()
with formatter.section(f'{type_.capitalize()} Commands'):
command_list = []
for command_name, info in choices.items():
if info.signature:
command_preview = f'{command_name} {info.signature}'
else:
command_preview = command_name
command_list.append((command_preview, info.help))
formatter.write_dl(command_list)
ctx.obj.echo(formatter.getvalue(), nl=False)
ctx.exit()
return click.option(
'--list',
is_flag=True,
help=f'List available {type_} commands and exit.',
expose_value=False,
is_eager=True,
callback=callback,
)
def _get_commands_of_type(type_: _RemoteControlType) -> dict:
command_name_info_pairs = [
(name, info) for name, info in Panel.meta.items()
if info.type == type_ and info.visible
]
return dict(sorted(command_name_info_pairs))
@click.command(cls=CeleryCommand)
@click.option('-t',
'--timeout',
cls=CeleryOption,
type=float,
default=1.0,
help_group='Remote Control Options',
help='Timeout in seconds waiting for reply.')
@click.option('-d',
'--destination',
cls=CeleryOption,
type=COMMA_SEPARATED_LIST,
help_group='Remote Control Options',
help='Comma separated list of destination node names.')
@click.option('-j',
'--json',
cls=CeleryOption,
is_flag=True,
help_group='Remote Control Options',
help='Use json as output format.')
@click.pass_context
@handle_preload_options
def status(ctx, timeout, destination, json, **kwargs):
"""Show list of workers that are online."""
callback = None if json else partial(_say_remote_command_reply, ctx)
replies = ctx.obj.app.control.inspect(timeout=timeout,
destination=destination,
callback=callback).ping()
if not replies:
raise CeleryCommandException(
message='No nodes replied within time constraint',
exit_code=EX_UNAVAILABLE
)
if json:
ctx.obj.echo(dumps(replies))
nodecount = len(replies)
if not kwargs.get('quiet', False):
ctx.obj.echo('\n{} {} online.'.format(
nodecount, text.pluralize(nodecount, 'node')))
@click.command(cls=CeleryCommand,
context_settings={'allow_extra_args': True})
@click.argument('command')
@_list_option('inspect')
@click.option('-t',
'--timeout',
cls=CeleryOption,
type=float,
default=1.0,
help_group='Remote Control Options',
help='Timeout in seconds waiting for reply.')
@click.option('-d',
'--destination',
cls=CeleryOption,
type=COMMA_SEPARATED_LIST,
help_group='Remote Control Options',
help='Comma separated list of destination node names.')
@click.option('-j',
'--json',
cls=CeleryOption,
is_flag=True,
help_group='Remote Control Options',
help='Use json as output format.')
@click.pass_context
@handle_preload_options
def inspect(ctx, command, timeout, destination, json, **kwargs):
"""Inspect the workers by sending them the COMMAND inspect command.
Availability: RabbitMQ (AMQP) and Redis transports.
"""
_verify_command_name('inspect', command)
callback = None if json else partial(_say_remote_command_reply, ctx,
show_reply=True)
arguments = _compile_arguments(command, ctx.args)
inspect = ctx.obj.app.control.inspect(timeout=timeout,
destination=destination,
callback=callback)
replies = inspect._request(command, **arguments)
if not replies:
raise CeleryCommandException(
message='No nodes replied within time constraint',
exit_code=EX_UNAVAILABLE
)
if json:
ctx.obj.echo(dumps(replies))
return
nodecount = len(replies)
if not ctx.obj.quiet:
ctx.obj.echo('\n{} {} online.'.format(
nodecount, text.pluralize(nodecount, 'node')))
@click.command(cls=CeleryCommand,
context_settings={'allow_extra_args': True})
@click.argument('command')
@_list_option('control')
@click.option('-t',
'--timeout',
cls=CeleryOption,
type=float,
default=1.0,
help_group='Remote Control Options',
help='Timeout in seconds waiting for reply.')
@click.option('-d',
'--destination',
cls=CeleryOption,
type=COMMA_SEPARATED_LIST,
help_group='Remote Control Options',
help='Comma separated list of destination node names.')
@click.option('-j',
'--json',
cls=CeleryOption,
is_flag=True,
help_group='Remote Control Options',
help='Use json as output format.')
@click.pass_context
@handle_preload_options
def control(ctx, command, timeout, destination, json):
"""Send the COMMAND control command to the workers.
Availability: RabbitMQ (AMQP), Redis, and MongoDB transports.
"""
_verify_command_name('control', command)
callback = None if json else partial(_say_remote_command_reply, ctx,
show_reply=True)
args = ctx.args
arguments = _compile_arguments(command, args)
replies = ctx.obj.app.control.broadcast(command, timeout=timeout,
destination=destination,
callback=callback,
reply=True,
arguments=arguments)
if not replies:
raise CeleryCommandException(
message='No nodes replied within time constraint',
exit_code=EX_UNAVAILABLE
)
if json:
ctx.obj.echo(dumps(replies))

View File

@@ -0,0 +1,94 @@
"""The ``celery events`` program."""
import sys
from functools import partial
import click
from celery.bin.base import LOG_LEVEL, CeleryDaemonCommand, CeleryOption, handle_preload_options
from celery.platforms import detached, set_process_title, strargv
def _set_process_status(prog, info=''):
prog = '{}:{}'.format('celery events', prog)
info = f'{info} {strargv(sys.argv)}'
return set_process_title(prog, info=info)
def _run_evdump(app):
from celery.events.dumper import evdump
_set_process_status('dump')
return evdump(app=app)
def _run_evcam(camera, app, logfile=None, pidfile=None, uid=None,
gid=None, umask=None, workdir=None,
detach=False, **kwargs):
from celery.events.snapshot import evcam
_set_process_status('cam')
kwargs['app'] = app
cam = partial(evcam, camera,
logfile=logfile, pidfile=pidfile, **kwargs)
if detach:
with detached(logfile, pidfile, uid, gid, umask, workdir):
return cam()
else:
return cam()
def _run_evtop(app):
try:
from celery.events.cursesmon import evtop
_set_process_status('top')
return evtop(app=app)
except ModuleNotFoundError as e:
if e.name == '_curses':
# TODO: Improve this error message
raise click.UsageError("The curses module is required for this command.")
@click.command(cls=CeleryDaemonCommand)
@click.option('-d',
'--dump',
cls=CeleryOption,
is_flag=True,
help_group='Dumper')
@click.option('-c',
'--camera',
cls=CeleryOption,
help_group='Snapshot')
@click.option('-d',
'--detach',
cls=CeleryOption,
is_flag=True,
help_group='Snapshot')
@click.option('-F', '--frequency', '--freq',
type=float,
default=1.0,
cls=CeleryOption,
help_group='Snapshot')
@click.option('-r', '--maxrate',
cls=CeleryOption,
help_group='Snapshot')
@click.option('-l',
'--loglevel',
default='WARNING',
cls=CeleryOption,
type=LOG_LEVEL,
help_group="Snapshot",
help="Logging level.")
@click.pass_context
@handle_preload_options
def events(ctx, dump, camera, detach, frequency, maxrate, loglevel, **kwargs):
"""Event-stream utilities."""
app = ctx.obj.app
if dump:
return _run_evdump(app)
if camera:
return _run_evcam(camera, app=app, freq=frequency, maxrate=maxrate,
loglevel=loglevel,
detach=detach,
**kwargs)
return _run_evtop(app)

View File

@@ -0,0 +1,197 @@
"""The ``celery graph`` command."""
import sys
from operator import itemgetter
import click
from celery.bin.base import CeleryCommand, handle_preload_options
from celery.utils.graph import DependencyGraph, GraphFormatter
@click.group()
@click.pass_context
@handle_preload_options
def graph(ctx):
"""The ``celery graph`` command."""
@graph.command(cls=CeleryCommand, context_settings={'allow_extra_args': True})
@click.pass_context
def bootsteps(ctx):
"""Display bootsteps graph."""
worker = ctx.obj.app.WorkController()
include = {arg.lower() for arg in ctx.args or ['worker', 'consumer']}
if 'worker' in include:
worker_graph = worker.blueprint.graph
if 'consumer' in include:
worker.blueprint.connect_with(worker.consumer.blueprint)
else:
worker_graph = worker.consumer.blueprint.graph
worker_graph.to_dot(sys.stdout)
@graph.command(cls=CeleryCommand, context_settings={'allow_extra_args': True})
@click.pass_context
def workers(ctx):
"""Display workers graph."""
def simplearg(arg):
return maybe_list(itemgetter(0, 2)(arg.partition(':')))
def maybe_list(l, sep=','):
return l[0], l[1].split(sep) if sep in l[1] else l[1]
args = dict(simplearg(arg) for arg in ctx.args)
generic = 'generic' in args
def generic_label(node):
return '{} ({}://)'.format(type(node).__name__,
node._label.split('://')[0])
class Node:
force_label = None
scheme = {}
def __init__(self, label, pos=None):
self._label = label
self.pos = pos
def label(self):
return self._label
def __str__(self):
return self.label()
class Thread(Node):
scheme = {
'fillcolor': 'lightcyan4',
'fontcolor': 'yellow',
'shape': 'oval',
'fontsize': 10,
'width': 0.3,
'color': 'black',
}
def __init__(self, label, **kwargs):
self.real_label = label
super().__init__(
label=f'thr-{next(tids)}',
pos=0,
)
class Formatter(GraphFormatter):
def label(self, obj):
return obj and obj.label()
def node(self, obj):
scheme = dict(obj.scheme) if obj.pos else obj.scheme
if isinstance(obj, Thread):
scheme['label'] = obj.real_label
return self.draw_node(
obj, dict(self.node_scheme, **scheme),
)
def terminal_node(self, obj):
return self.draw_node(
obj, dict(self.term_scheme, **obj.scheme),
)
def edge(self, a, b, **attrs):
if isinstance(a, Thread):
attrs.update(arrowhead='none', arrowtail='tee')
return self.draw_edge(a, b, self.edge_scheme, attrs)
def subscript(n):
S = {'0': '', '1': '', '2': '', '3': '', '4': '',
'5': '', '6': '', '7': '', '8': '', '9': ''}
return ''.join([S[i] for i in str(n)])
class Worker(Node):
pass
class Backend(Node):
scheme = {
'shape': 'folder',
'width': 2,
'height': 1,
'color': 'black',
'fillcolor': 'peachpuff3',
}
def label(self):
return generic_label(self) if generic else self._label
class Broker(Node):
scheme = {
'shape': 'circle',
'fillcolor': 'cadetblue3',
'color': 'cadetblue4',
'height': 1,
}
def label(self):
return generic_label(self) if generic else self._label
from itertools import count
tids = count(1)
Wmax = int(args.get('wmax', 4) or 0)
Tmax = int(args.get('tmax', 3) or 0)
def maybe_abbr(l, name, max=Wmax):
size = len(l)
abbr = max and size > max
if 'enumerate' in args:
l = [f'{name}{subscript(i + 1)}'
for i, obj in enumerate(l)]
if abbr:
l = l[0:max - 1] + [l[size - 1]]
l[max - 2] = '{}⎨…{}'.format(
name[0], subscript(size - (max - 1)))
return l
app = ctx.obj.app
try:
workers = args['nodes']
threads = args.get('threads') or []
except KeyError:
replies = app.control.inspect().stats() or {}
workers, threads = [], []
for worker, reply in replies.items():
workers.append(worker)
threads.append(reply['pool']['max-concurrency'])
wlen = len(workers)
backend = args.get('backend', app.conf.result_backend)
threads_for = {}
workers = maybe_abbr(workers, 'Worker')
if Wmax and wlen > Wmax:
threads = threads[0:3] + [threads[-1]]
for i, threads in enumerate(threads):
threads_for[workers[i]] = maybe_abbr(
list(range(int(threads))), 'P', Tmax,
)
broker = Broker(args.get(
'broker', app.connection_for_read().as_uri()))
backend = Backend(backend) if backend else None
deps = DependencyGraph(formatter=Formatter())
deps.add_arc(broker)
if backend:
deps.add_arc(backend)
curworker = [0]
for i, worker in enumerate(workers):
worker = Worker(worker, pos=i)
deps.add_arc(worker)
deps.add_edge(worker, broker)
if backend:
deps.add_edge(worker, backend)
threads = threads_for.get(worker._label)
if threads:
for thread in threads:
thread = Thread(thread)
deps.add_arc(thread)
deps.add_edge(thread, worker)
curworker[0] += 1
deps.to_dot(sys.stdout)

View File

@@ -0,0 +1,38 @@
"""The ``celery list bindings`` command, used to inspect queue bindings."""
import click
from celery.bin.base import CeleryCommand, handle_preload_options
@click.group(name="list")
@click.pass_context
@handle_preload_options
def list_(ctx):
"""Get info from broker.
Note:
For RabbitMQ the management plugin is required.
"""
@list_.command(cls=CeleryCommand)
@click.pass_context
def bindings(ctx):
"""Inspect queue bindings."""
# TODO: Consider using a table formatter for this command.
app = ctx.obj.app
with app.connection() as conn:
app.amqp.TaskConsumer(conn).declare()
try:
bindings = conn.manager.get_bindings()
except NotImplementedError:
raise click.UsageError('Your transport cannot list bindings.')
def fmt(q, e, r):
ctx.obj.echo(f'{q:<28} {e:<28} {r}')
fmt('Queue', 'Exchange', 'Routing Key')
fmt('-' * 16, '-' * 16, '-' * 16)
for b in bindings:
fmt(b['destination'], b['source'], b['routing_key'])

View File

@@ -0,0 +1,157 @@
"""The ``celery logtool`` command."""
import re
from collections import Counter
from fileinput import FileInput
import click
from celery.bin.base import CeleryCommand, handle_preload_options
__all__ = ('logtool',)
RE_LOG_START = re.compile(r'^\[\d\d\d\d\-\d\d-\d\d ')
RE_TASK_RECEIVED = re.compile(r'.+?\] Received')
RE_TASK_READY = re.compile(r'.+?\] Task')
RE_TASK_INFO = re.compile(r'.+?([\w\.]+)\[(.+?)\].+')
RE_TASK_RESULT = re.compile(r'.+?[\w\.]+\[.+?\] (.+)')
REPORT_FORMAT = """
Report
======
Task total: {task[total]}
Task errors: {task[errors]}
Task success: {task[succeeded]}
Task completed: {task[completed]}
Tasks
=====
{task[types].format}
"""
class _task_counts(list):
@property
def format(self):
return '\n'.join('{}: {}'.format(*i) for i in self)
def task_info(line):
m = RE_TASK_INFO.match(line)
return m.groups()
class Audit:
def __init__(self, on_task_error=None, on_trace=None, on_debug=None):
self.ids = set()
self.names = {}
self.results = {}
self.ready = set()
self.task_types = Counter()
self.task_errors = 0
self.on_task_error = on_task_error
self.on_trace = on_trace
self.on_debug = on_debug
self.prev_line = None
def run(self, files):
for line in FileInput(files):
self.feed(line)
return self
def task_received(self, line, task_name, task_id):
self.names[task_id] = task_name
self.ids.add(task_id)
self.task_types[task_name] += 1
def task_ready(self, line, task_name, task_id, result):
self.ready.add(task_id)
self.results[task_id] = result
if 'succeeded' not in result:
self.task_error(line, task_name, task_id, result)
def task_error(self, line, task_name, task_id, result):
self.task_errors += 1
if self.on_task_error:
self.on_task_error(line, task_name, task_id, result)
def feed(self, line):
if RE_LOG_START.match(line):
if RE_TASK_RECEIVED.match(line):
task_name, task_id = task_info(line)
self.task_received(line, task_name, task_id)
elif RE_TASK_READY.match(line):
task_name, task_id = task_info(line)
result = RE_TASK_RESULT.match(line)
if result:
result, = result.groups()
self.task_ready(line, task_name, task_id, result)
else:
if self.on_debug:
self.on_debug(line)
self.prev_line = line
else:
if self.on_trace:
self.on_trace('\n'.join(filter(None, [self.prev_line, line])))
self.prev_line = None
def incomplete_tasks(self):
return self.ids ^ self.ready
def report(self):
return {
'task': {
'types': _task_counts(self.task_types.most_common()),
'total': len(self.ids),
'errors': self.task_errors,
'completed': len(self.ready),
'succeeded': len(self.ready) - self.task_errors,
}
}
@click.group()
@click.pass_context
@handle_preload_options
def logtool(ctx):
"""The ``celery logtool`` command."""
@logtool.command(cls=CeleryCommand)
@click.argument('files', nargs=-1)
@click.pass_context
def stats(ctx, files):
ctx.obj.echo(REPORT_FORMAT.format(
**Audit().run(files).report()
))
@logtool.command(cls=CeleryCommand)
@click.argument('files', nargs=-1)
@click.pass_context
def traces(ctx, files):
Audit(on_trace=ctx.obj.echo).run(files)
@logtool.command(cls=CeleryCommand)
@click.argument('files', nargs=-1)
@click.pass_context
def errors(ctx, files):
Audit(on_task_error=lambda line, *_: ctx.obj.echo(line)).run(files)
@logtool.command(cls=CeleryCommand)
@click.argument('files', nargs=-1)
@click.pass_context
def incomplete(ctx, files):
audit = Audit()
audit.run(files)
for task_id in audit.incomplete_tasks():
ctx.obj.echo(f'Did not complete: {task_id}')
@logtool.command(cls=CeleryCommand)
@click.argument('files', nargs=-1)
@click.pass_context
def debug(ctx, files):
Audit(on_debug=ctx.obj.echo).run(files)

View File

@@ -0,0 +1,63 @@
"""The ``celery migrate`` command, used to filter and move messages."""
import click
from kombu import Connection
from celery.bin.base import CeleryCommand, CeleryOption, handle_preload_options
from celery.contrib.migrate import migrate_tasks
@click.command(cls=CeleryCommand)
@click.argument('source')
@click.argument('destination')
@click.option('-n',
'--limit',
cls=CeleryOption,
type=int,
help_group='Migration Options',
help='Number of tasks to consume.')
@click.option('-t',
'--timeout',
cls=CeleryOption,
type=float,
help_group='Migration Options',
help='Timeout in seconds waiting for tasks.')
@click.option('-a',
'--ack-messages',
cls=CeleryOption,
is_flag=True,
help_group='Migration Options',
help='Ack messages from source broker.')
@click.option('-T',
'--tasks',
cls=CeleryOption,
help_group='Migration Options',
help='List of task names to filter on.')
@click.option('-Q',
'--queues',
cls=CeleryOption,
help_group='Migration Options',
help='List of queues to migrate.')
@click.option('-F',
'--forever',
cls=CeleryOption,
is_flag=True,
help_group='Migration Options',
help='Continually migrate tasks until killed.')
@click.pass_context
@handle_preload_options
def migrate(ctx, source, destination, **kwargs):
"""Migrate tasks from one broker to another.
Warning:
This command is experimental, make sure you have a backup of
the tasks before you continue.
"""
# TODO: Use a progress bar
def on_migrate_task(state, body, message):
ctx.obj.echo(f"Migrating task {state.count}/{state.strtotal}: {body}")
migrate_tasks(Connection(source),
Connection(destination),
callback=on_migrate_task,
**kwargs)

View File

@@ -0,0 +1,480 @@
"""Start multiple worker instances from the command-line.
.. program:: celery multi
Examples
========
.. code-block:: console
$ # Single worker with explicit name and events enabled.
$ celery multi start Leslie -E
$ # Pidfiles and logfiles are stored in the current directory
$ # by default. Use --pidfile and --logfile argument to change
$ # this. The abbreviation %n will be expanded to the current
$ # node name.
$ celery multi start Leslie -E --pidfile=/var/run/celery/%n.pid
--logfile=/var/log/celery/%n%I.log
$ # You need to add the same arguments when you restart,
$ # as these aren't persisted anywhere.
$ celery multi restart Leslie -E --pidfile=/var/run/celery/%n.pid
--logfile=/var/log/celery/%n%I.log
$ # To stop the node, you need to specify the same pidfile.
$ celery multi stop Leslie --pidfile=/var/run/celery/%n.pid
$ # 3 workers, with 3 processes each
$ celery multi start 3 -c 3
celery worker -n celery1@myhost -c 3
celery worker -n celery2@myhost -c 3
celery worker -n celery3@myhost -c 3
$ # override name prefix when using range
$ celery multi start 3 --range-prefix=worker -c 3
celery worker -n worker1@myhost -c 3
celery worker -n worker2@myhost -c 3
celery worker -n worker3@myhost -c 3
$ # start 3 named workers
$ celery multi start image video data -c 3
celery worker -n image@myhost -c 3
celery worker -n video@myhost -c 3
celery worker -n data@myhost -c 3
$ # specify custom hostname
$ celery multi start 2 --hostname=worker.example.com -c 3
celery worker -n celery1@worker.example.com -c 3
celery worker -n celery2@worker.example.com -c 3
$ # specify fully qualified nodenames
$ celery multi start foo@worker.example.com bar@worker.example.com -c 3
$ # fully qualified nodenames but using the current hostname
$ celery multi start foo@%h bar@%h
$ # Advanced example starting 10 workers in the background:
$ # * Three of the workers processes the images and video queue
$ # * Two of the workers processes the data queue with loglevel DEBUG
$ # * the rest processes the default' queue.
$ celery multi start 10 -l INFO -Q:1-3 images,video -Q:4,5 data
-Q default -L:4,5 DEBUG
$ # You can show the commands necessary to start the workers with
$ # the 'show' command:
$ celery multi show 10 -l INFO -Q:1-3 images,video -Q:4,5 data
-Q default -L:4,5 DEBUG
$ # Additional options are added to each celery worker's command,
$ # but you can also modify the options for ranges of, or specific workers
$ # 3 workers: Two with 3 processes, and one with 10 processes.
$ celery multi start 3 -c 3 -c:1 10
celery worker -n celery1@myhost -c 10
celery worker -n celery2@myhost -c 3
celery worker -n celery3@myhost -c 3
$ # can also specify options for named workers
$ celery multi start image video data -c 3 -c:image 10
celery worker -n image@myhost -c 10
celery worker -n video@myhost -c 3
celery worker -n data@myhost -c 3
$ # ranges and lists of workers in options is also allowed:
$ # (-c:1-3 can also be written as -c:1,2,3)
$ celery multi start 5 -c 3 -c:1-3 10
celery worker -n celery1@myhost -c 10
celery worker -n celery2@myhost -c 10
celery worker -n celery3@myhost -c 10
celery worker -n celery4@myhost -c 3
celery worker -n celery5@myhost -c 3
$ # lists also works with named workers
$ celery multi start foo bar baz xuzzy -c 3 -c:foo,bar,baz 10
celery worker -n foo@myhost -c 10
celery worker -n bar@myhost -c 10
celery worker -n baz@myhost -c 10
celery worker -n xuzzy@myhost -c 3
"""
import os
import signal
import sys
from functools import wraps
import click
from kombu.utils.objects import cached_property
from celery import VERSION_BANNER
from celery.apps.multi import Cluster, MultiParser, NamespacedOptionParser
from celery.bin.base import CeleryCommand, handle_preload_options
from celery.platforms import EX_FAILURE, EX_OK, signals
from celery.utils import term
from celery.utils.text import pluralize
__all__ = ('MultiTool',)
USAGE = """\
usage: {prog_name} start <node1 node2 nodeN|range> [worker options]
{prog_name} stop <n1 n2 nN|range> [-SIG (default: -TERM)]
{prog_name} restart <n1 n2 nN|range> [-SIG] [worker options]
{prog_name} kill <n1 n2 nN|range>
{prog_name} show <n1 n2 nN|range> [worker options]
{prog_name} get hostname <n1 n2 nN|range> [-qv] [worker options]
{prog_name} names <n1 n2 nN|range>
{prog_name} expand template <n1 n2 nN|range>
{prog_name} help
additional options (must appear after command name):
* --nosplash: Don't display program info.
* --quiet: Don't show as much output.
* --verbose: Show more output.
* --no-color: Don't display colors.
"""
def main():
sys.exit(MultiTool().execute_from_commandline(sys.argv))
def splash(fun):
@wraps(fun)
def _inner(self, *args, **kwargs):
self.splash()
return fun(self, *args, **kwargs)
return _inner
def using_cluster(fun):
@wraps(fun)
def _inner(self, *argv, **kwargs):
return fun(self, self.cluster_from_argv(argv), **kwargs)
return _inner
def using_cluster_and_sig(fun):
@wraps(fun)
def _inner(self, *argv, **kwargs):
p, cluster = self._cluster_from_argv(argv)
sig = self._find_sig_argument(p)
return fun(self, cluster, sig, **kwargs)
return _inner
class TermLogger:
splash_text = 'celery multi v{version}'
splash_context = {'version': VERSION_BANNER}
#: Final exit code.
retcode = 0
def setup_terminal(self, stdout, stderr,
nosplash=False, quiet=False, verbose=False,
no_color=False, **kwargs):
self.stdout = stdout or sys.stdout
self.stderr = stderr or sys.stderr
self.nosplash = nosplash
self.quiet = quiet
self.verbose = verbose
self.no_color = no_color
def ok(self, m, newline=True, file=None):
self.say(m, newline=newline, file=file)
return EX_OK
def say(self, m, newline=True, file=None):
print(m, file=file or self.stdout, end='\n' if newline else '')
def carp(self, m, newline=True, file=None):
return self.say(m, newline, file or self.stderr)
def error(self, msg=None):
if msg:
self.carp(msg)
self.usage()
return EX_FAILURE
def info(self, msg, newline=True):
if self.verbose:
self.note(msg, newline=newline)
def note(self, msg, newline=True):
if not self.quiet:
self.say(str(msg), newline=newline)
@splash
def usage(self):
self.say(USAGE.format(prog_name=self.prog_name))
def splash(self):
if not self.nosplash:
self.note(self.colored.cyan(
self.splash_text.format(**self.splash_context)))
@cached_property
def colored(self):
return term.colored(enabled=not self.no_color)
class MultiTool(TermLogger):
"""The ``celery multi`` program."""
MultiParser = MultiParser
OptionParser = NamespacedOptionParser
reserved_options = [
('--nosplash', 'nosplash'),
('--quiet', 'quiet'),
('-q', 'quiet'),
('--verbose', 'verbose'),
('--no-color', 'no_color'),
]
def __init__(self, env=None, cmd=None,
fh=None, stdout=None, stderr=None, **kwargs):
# fh is an old alias to stdout.
self.env = env
self.cmd = cmd
self.setup_terminal(stdout or fh, stderr, **kwargs)
self.fh = self.stdout
self.prog_name = 'celery multi'
self.commands = {
'start': self.start,
'show': self.show,
'stop': self.stop,
'stopwait': self.stopwait,
'stop_verify': self.stopwait, # compat alias
'restart': self.restart,
'kill': self.kill,
'names': self.names,
'expand': self.expand,
'get': self.get,
'help': self.help,
}
def execute_from_commandline(self, argv, cmd=None):
# Reserve the --nosplash|--quiet|-q/--verbose options.
argv = self._handle_reserved_options(argv)
self.cmd = cmd if cmd is not None else self.cmd
self.prog_name = os.path.basename(argv.pop(0))
if not self.validate_arguments(argv):
return self.error()
return self.call_command(argv[0], argv[1:])
def validate_arguments(self, argv):
return argv and argv[0][0] != '-'
def call_command(self, command, argv):
try:
return self.commands[command](*argv) or EX_OK
except KeyError:
return self.error(f'Invalid command: {command}')
def _handle_reserved_options(self, argv):
argv = list(argv) # don't modify callers argv.
for arg, attr in self.reserved_options:
if arg in argv:
setattr(self, attr, bool(argv.pop(argv.index(arg))))
return argv
@splash
@using_cluster
def start(self, cluster):
self.note('> Starting nodes...')
return int(any(cluster.start()))
@splash
@using_cluster_and_sig
def stop(self, cluster, sig, **kwargs):
return cluster.stop(sig=sig, **kwargs)
@splash
@using_cluster_and_sig
def stopwait(self, cluster, sig, **kwargs):
return cluster.stopwait(sig=sig, **kwargs)
stop_verify = stopwait # compat
@splash
@using_cluster_and_sig
def restart(self, cluster, sig, **kwargs):
return int(any(cluster.restart(sig=sig, **kwargs)))
@using_cluster
def names(self, cluster):
self.say('\n'.join(n.name for n in cluster))
def get(self, wanted, *argv):
try:
node = self.cluster_from_argv(argv).find(wanted)
except KeyError:
return EX_FAILURE
else:
return self.ok(' '.join(node.argv))
@using_cluster
def show(self, cluster):
return self.ok('\n'.join(
' '.join(node.argv_with_executable)
for node in cluster
))
@splash
@using_cluster
def kill(self, cluster):
return cluster.kill()
def expand(self, template, *argv):
return self.ok('\n'.join(
node.expander(template)
for node in self.cluster_from_argv(argv)
))
def help(self, *argv):
self.say(__doc__)
def _find_sig_argument(self, p, default=signal.SIGTERM):
args = p.args[len(p.values):]
for arg in reversed(args):
if len(arg) == 2 and arg[0] == '-':
try:
return int(arg[1])
except ValueError:
pass
if arg[0] == '-':
try:
return signals.signum(arg[1:])
except (AttributeError, TypeError):
pass
return default
def _nodes_from_argv(self, argv, cmd=None):
cmd = cmd if cmd is not None else self.cmd
p = self.OptionParser(argv)
p.parse()
return p, self.MultiParser(cmd=cmd).parse(p)
def cluster_from_argv(self, argv, cmd=None):
_, cluster = self._cluster_from_argv(argv, cmd=cmd)
return cluster
def _cluster_from_argv(self, argv, cmd=None):
p, nodes = self._nodes_from_argv(argv, cmd=cmd)
return p, self.Cluster(list(nodes), cmd=cmd)
def Cluster(self, nodes, cmd=None):
return Cluster(
nodes,
cmd=cmd,
env=self.env,
on_stopping_preamble=self.on_stopping_preamble,
on_send_signal=self.on_send_signal,
on_still_waiting_for=self.on_still_waiting_for,
on_still_waiting_progress=self.on_still_waiting_progress,
on_still_waiting_end=self.on_still_waiting_end,
on_node_start=self.on_node_start,
on_node_restart=self.on_node_restart,
on_node_shutdown_ok=self.on_node_shutdown_ok,
on_node_status=self.on_node_status,
on_node_signal_dead=self.on_node_signal_dead,
on_node_signal=self.on_node_signal,
on_node_down=self.on_node_down,
on_child_spawn=self.on_child_spawn,
on_child_signalled=self.on_child_signalled,
on_child_failure=self.on_child_failure,
)
def on_stopping_preamble(self, nodes):
self.note(self.colored.blue('> Stopping nodes...'))
def on_send_signal(self, node, sig):
self.note('\t> {0.name}: {1} -> {0.pid}'.format(node, sig))
def on_still_waiting_for(self, nodes):
num_left = len(nodes)
if num_left:
self.note(self.colored.blue(
'> Waiting for {} {} -> {}...'.format(
num_left, pluralize(num_left, 'node'),
', '.join(str(node.pid) for node in nodes)),
), newline=False)
def on_still_waiting_progress(self, nodes):
self.note('.', newline=False)
def on_still_waiting_end(self):
self.note('')
def on_node_signal_dead(self, node):
self.note(
'Could not signal {0.name} ({0.pid}): No such process'.format(
node))
def on_node_start(self, node):
self.note(f'\t> {node.name}: ', newline=False)
def on_node_restart(self, node):
self.note(self.colored.blue(
f'> Restarting node {node.name}: '), newline=False)
def on_node_down(self, node):
self.note(f'> {node.name}: {self.DOWN}')
def on_node_shutdown_ok(self, node):
self.note(f'\n\t> {node.name}: {self.OK}')
def on_node_status(self, node, retval):
self.note(retval and self.FAILED or self.OK)
def on_node_signal(self, node, sig):
self.note('Sending {sig} to node {0.name} ({0.pid})'.format(
node, sig=sig))
def on_child_spawn(self, node, argstr, env):
self.info(f' {argstr}')
def on_child_signalled(self, node, signum):
self.note(f'* Child was terminated by signal {signum}')
def on_child_failure(self, node, retcode):
self.note(f'* Child terminated with exit code {retcode}')
@cached_property
def OK(self):
return str(self.colored.green('OK'))
@cached_property
def FAILED(self):
return str(self.colored.red('FAILED'))
@cached_property
def DOWN(self):
return str(self.colored.magenta('DOWN'))
@click.command(
cls=CeleryCommand,
context_settings={
'allow_extra_args': True,
'ignore_unknown_options': True
}
)
@click.pass_context
@handle_preload_options
def multi(ctx, **kwargs):
"""Start multiple worker instances."""
cmd = MultiTool(quiet=ctx.obj.quiet, no_color=ctx.obj.no_color)
# In 4.x, celery multi ignores the global --app option.
# Since in 5.0 the --app option is global only we
# rearrange the arguments so that the MultiTool will parse them correctly.
args = sys.argv[1:]
args = args[args.index('multi'):] + args[:args.index('multi')]
return cmd.execute_from_commandline(args)

View File

@@ -0,0 +1,70 @@
"""The ``celery purge`` program, used to delete messages from queues."""
import click
from celery.bin.base import COMMA_SEPARATED_LIST, CeleryCommand, CeleryOption, handle_preload_options
from celery.utils import text
@click.command(cls=CeleryCommand, context_settings={
'allow_extra_args': True
})
@click.option('-f',
'--force',
cls=CeleryOption,
is_flag=True,
help_group='Purging Options',
help="Don't prompt for verification.")
@click.option('-Q',
'--queues',
cls=CeleryOption,
type=COMMA_SEPARATED_LIST,
help_group='Purging Options',
help="Comma separated list of queue names to purge.")
@click.option('-X',
'--exclude-queues',
cls=CeleryOption,
type=COMMA_SEPARATED_LIST,
help_group='Purging Options',
help="Comma separated list of queues names not to purge.")
@click.pass_context
@handle_preload_options
def purge(ctx, force, queues, exclude_queues, **kwargs):
"""Erase all messages from all known task queues.
Warning:
There's no undo operation for this command.
"""
app = ctx.obj.app
queues = set(queues or app.amqp.queues.keys())
exclude_queues = set(exclude_queues or [])
names = queues - exclude_queues
qnum = len(names)
if names:
queues_headline = text.pluralize(qnum, 'queue')
if not force:
queue_names = ', '.join(sorted(names))
click.confirm(f"{ctx.obj.style('WARNING', fg='red')}:"
"This will remove all tasks from "
f"{queues_headline}: {queue_names}.\n"
" There is no undo for this operation!\n\n"
"(to skip this prompt use the -f option)\n"
"Are you sure you want to delete all tasks?",
abort=True)
def _purge(conn, queue):
try:
return conn.default_channel.queue_purge(queue) or 0
except conn.channel_errors:
return 0
with app.connection_for_write() as conn:
messages = sum(_purge(conn, queue) for queue in names)
if messages:
messages_headline = text.pluralize(messages, 'message')
ctx.obj.echo(f"Purged {messages} {messages_headline} from "
f"{qnum} known task {queues_headline}.")
else:
ctx.obj.echo(f"No messages purged from {qnum} {queues_headline}.")

View File

@@ -0,0 +1,30 @@
"""The ``celery result`` program, used to inspect task results."""
import click
from celery.bin.base import CeleryCommand, CeleryOption, handle_preload_options
@click.command(cls=CeleryCommand)
@click.argument('task_id')
@click.option('-t',
'--task',
cls=CeleryOption,
help_group='Result Options',
help="Name of task (if custom backend).")
@click.option('--traceback',
cls=CeleryOption,
is_flag=True,
help_group='Result Options',
help="Show traceback instead.")
@click.pass_context
@handle_preload_options
def result(ctx, task_id, task, traceback):
"""Print the return value for a given task id."""
app = ctx.obj.app
result_cls = app.tasks[task].AsyncResult if task else app.AsyncResult
task_result = result_cls(task_id)
value = task_result.traceback if traceback else task_result.get()
# TODO: Prettify result
ctx.obj.echo(value)

View File

@@ -0,0 +1,173 @@
"""The ``celery shell`` program, used to start a REPL."""
import os
import sys
from importlib import import_module
import click
from celery.bin.base import CeleryCommand, CeleryOption, handle_preload_options
def _invoke_fallback_shell(locals):
import code
try:
import readline
except ImportError:
pass
else:
import rlcompleter
readline.set_completer(
rlcompleter.Completer(locals).complete)
readline.parse_and_bind('tab:complete')
code.interact(local=locals)
def _invoke_bpython_shell(locals):
import bpython
bpython.embed(locals)
def _invoke_ipython_shell(locals):
for ip in (_ipython, _ipython_pre_10,
_ipython_terminal, _ipython_010,
_no_ipython):
try:
return ip(locals)
except ImportError:
pass
def _ipython(locals):
from IPython import start_ipython
start_ipython(argv=[], user_ns=locals)
def _ipython_pre_10(locals): # pragma: no cover
from IPython.frontend.terminal.ipapp import TerminalIPythonApp
app = TerminalIPythonApp.instance()
app.initialize(argv=[])
app.shell.user_ns.update(locals)
app.start()
def _ipython_terminal(locals): # pragma: no cover
from IPython.terminal import embed
embed.TerminalInteractiveShell(user_ns=locals).mainloop()
def _ipython_010(locals): # pragma: no cover
from IPython.Shell import IPShell
IPShell(argv=[], user_ns=locals).mainloop()
def _no_ipython(self): # pragma: no cover
raise ImportError('no suitable ipython found')
def _invoke_default_shell(locals):
try:
import IPython # noqa
except ImportError:
try:
import bpython # noqa
except ImportError:
_invoke_fallback_shell(locals)
else:
_invoke_bpython_shell(locals)
else:
_invoke_ipython_shell(locals)
@click.command(cls=CeleryCommand, context_settings={
'allow_extra_args': True
})
@click.option('-I',
'--ipython',
is_flag=True,
cls=CeleryOption,
help_group="Shell Options",
help="Force IPython.")
@click.option('-B',
'--bpython',
is_flag=True,
cls=CeleryOption,
help_group="Shell Options",
help="Force bpython.")
@click.option('--python',
is_flag=True,
cls=CeleryOption,
help_group="Shell Options",
help="Force default Python shell.")
@click.option('-T',
'--without-tasks',
is_flag=True,
cls=CeleryOption,
help_group="Shell Options",
help="Don't add tasks to locals.")
@click.option('--eventlet',
is_flag=True,
cls=CeleryOption,
help_group="Shell Options",
help="Use eventlet.")
@click.option('--gevent',
is_flag=True,
cls=CeleryOption,
help_group="Shell Options",
help="Use gevent.")
@click.pass_context
@handle_preload_options
def shell(ctx, ipython=False, bpython=False,
python=False, without_tasks=False, eventlet=False,
gevent=False, **kwargs):
"""Start shell session with convenient access to celery symbols.
The following symbols will be added to the main globals:
- ``celery``: the current application.
- ``chord``, ``group``, ``chain``, ``chunks``,
``xmap``, ``xstarmap`` ``subtask``, ``Task``
- all registered tasks.
"""
sys.path.insert(0, os.getcwd())
if eventlet:
import_module('celery.concurrency.eventlet')
if gevent:
import_module('celery.concurrency.gevent')
import celery
app = ctx.obj.app
app.loader.import_default_modules()
# pylint: disable=attribute-defined-outside-init
locals = {
'app': app,
'celery': app,
'Task': celery.Task,
'chord': celery.chord,
'group': celery.group,
'chain': celery.chain,
'chunks': celery.chunks,
'xmap': celery.xmap,
'xstarmap': celery.xstarmap,
'subtask': celery.subtask,
'signature': celery.signature,
}
if not without_tasks:
locals.update({
task.__name__: task for task in app.tasks.values()
if not task.name.startswith('celery.')
})
if python:
_invoke_fallback_shell(locals)
elif bpython:
try:
_invoke_bpython_shell(locals)
except ImportError:
ctx.obj.echo(f'{ctx.obj.ERROR}: bpython is not installed')
elif ipython:
try:
_invoke_ipython_shell(locals)
except ImportError as e:
ctx.obj.echo(f'{ctx.obj.ERROR}: {e}')
_invoke_default_shell(locals)

View File

@@ -0,0 +1,91 @@
"""The ``celery upgrade`` command, used to upgrade from previous versions."""
import codecs
import sys
import click
from celery.app import defaults
from celery.bin.base import CeleryCommand, CeleryOption, handle_preload_options
from celery.utils.functional import pass1
@click.group()
@click.pass_context
@handle_preload_options
def upgrade(ctx):
"""Perform upgrade between versions."""
def _slurp(filename):
# TODO: Handle case when file does not exist
with codecs.open(filename, 'r', 'utf-8') as read_fh:
return [line for line in read_fh]
def _compat_key(key, namespace='CELERY'):
key = key.upper()
if not key.startswith(namespace):
key = '_'.join([namespace, key])
return key
def _backup(filename, suffix='.orig'):
lines = []
backup_filename = ''.join([filename, suffix])
print(f'writing backup to {backup_filename}...',
file=sys.stderr)
with codecs.open(filename, 'r', 'utf-8') as read_fh:
with codecs.open(backup_filename, 'w', 'utf-8') as backup_fh:
for line in read_fh:
backup_fh.write(line)
lines.append(line)
return lines
def _to_new_key(line, keyfilter=pass1, source=defaults._TO_NEW_KEY):
# sort by length to avoid, for example, broker_transport overriding
# broker_transport_options.
for old_key in reversed(sorted(source, key=lambda x: len(x))):
new_line = line.replace(old_key, keyfilter(source[old_key]))
if line != new_line and 'CELERY_CELERY' not in new_line:
return 1, new_line # only one match per line.
return 0, line
@upgrade.command(cls=CeleryCommand)
@click.argument('filename')
@click.option('--django',
cls=CeleryOption,
is_flag=True,
help_group='Upgrading Options',
help='Upgrade Django project.')
@click.option('--compat',
cls=CeleryOption,
is_flag=True,
help_group='Upgrading Options',
help='Maintain backwards compatibility.')
@click.option('--no-backup',
cls=CeleryOption,
is_flag=True,
help_group='Upgrading Options',
help="Don't backup original files.")
def settings(filename, django, compat, no_backup):
"""Migrate settings from Celery 3.x to Celery 4.x."""
lines = _slurp(filename)
keyfilter = _compat_key if django or compat else pass1
print(f'processing {filename}...', file=sys.stderr)
# gives list of tuples: ``(did_change, line_contents)``
new_lines = [
_to_new_key(line, keyfilter) for line in lines
]
if any(n[0] for n in new_lines): # did have changes
if not no_backup:
_backup(filename)
with codecs.open(filename, 'w', 'utf-8') as write_fh:
for _, line in new_lines:
write_fh.write(line)
print('Changes to your setting have been made!',
file=sys.stdout)
else:
print('Does not seem to require any changes :-)',
file=sys.stdout)

View File

@@ -0,0 +1,360 @@
"""Program used to start a Celery worker instance."""
import os
import sys
import click
from click import ParamType
from click.types import StringParamType
from celery import concurrency
from celery.bin.base import (COMMA_SEPARATED_LIST, LOG_LEVEL, CeleryDaemonCommand, CeleryOption,
handle_preload_options)
from celery.concurrency.base import BasePool
from celery.exceptions import SecurityError
from celery.platforms import EX_FAILURE, EX_OK, detached, maybe_drop_privileges
from celery.utils.log import get_logger
from celery.utils.nodenames import default_nodename, host_format, node_format
logger = get_logger(__name__)
class CeleryBeat(ParamType):
"""Celery Beat flag."""
name = "beat"
def convert(self, value, param, ctx):
if ctx.obj.app.IS_WINDOWS and value:
self.fail('-B option does not work on Windows. '
'Please run celery beat as a separate service.')
return value
class WorkersPool(click.Choice):
"""Workers pool option."""
name = "pool"
def __init__(self):
"""Initialize the workers pool option with the relevant choices."""
super().__init__(concurrency.get_available_pool_names())
def convert(self, value, param, ctx):
# Pools like eventlet/gevent needs to patch libs as early
# as possible.
if isinstance(value, type) and issubclass(value, BasePool):
return value
value = super().convert(value, param, ctx)
worker_pool = ctx.obj.app.conf.worker_pool
if value == 'prefork' and worker_pool:
# If we got the default pool through the CLI
# we need to check if the worker pool was configured.
# If the worker pool was configured, we shouldn't use the default.
value = concurrency.get_implementation(worker_pool)
else:
value = concurrency.get_implementation(value)
if not value:
value = concurrency.get_implementation(worker_pool)
return value
class Hostname(StringParamType):
"""Hostname option."""
name = "hostname"
def convert(self, value, param, ctx):
return host_format(default_nodename(value))
class Autoscale(ParamType):
"""Autoscaling parameter."""
name = "<min workers>, <max workers>"
def convert(self, value, param, ctx):
value = value.split(',')
if len(value) > 2:
self.fail("Expected two comma separated integers or one integer."
f"Got {len(value)} instead.")
if len(value) == 1:
try:
value = (int(value[0]), 0)
except ValueError:
self.fail(f"Expected an integer. Got {value} instead.")
try:
return tuple(reversed(sorted(map(int, value))))
except ValueError:
self.fail("Expected two comma separated integers."
f"Got {value.join(',')} instead.")
CELERY_BEAT = CeleryBeat()
WORKERS_POOL = WorkersPool()
HOSTNAME = Hostname()
AUTOSCALE = Autoscale()
C_FAKEFORK = os.environ.get('C_FAKEFORK')
def detach(path, argv, logfile=None, pidfile=None, uid=None,
gid=None, umask=None, workdir=None, fake=False, app=None,
executable=None, hostname=None):
"""Detach program by argv."""
fake = 1 if C_FAKEFORK else fake
# `detached()` will attempt to touch the logfile to confirm that error
# messages won't be lost after detaching stdout/err, but this means we need
# to pre-format it rather than relying on `setup_logging_subsystem()` like
# we can elsewhere.
logfile = node_format(logfile, hostname)
with detached(logfile, pidfile, uid, gid, umask, workdir, fake,
after_forkers=False):
try:
if executable is not None:
path = executable
os.execv(path, [path] + argv)
return EX_OK
except Exception: # pylint: disable=broad-except
if app is None:
from celery import current_app
app = current_app
app.log.setup_logging_subsystem(
'ERROR', logfile, hostname=hostname)
logger.critical("Can't exec %r", ' '.join([path] + argv),
exc_info=True)
return EX_FAILURE
@click.command(cls=CeleryDaemonCommand,
context_settings={'allow_extra_args': True})
@click.option('-n',
'--hostname',
default=host_format(default_nodename(None)),
cls=CeleryOption,
type=HOSTNAME,
help_group="Worker Options",
help="Set custom hostname (e.g., 'w1@%%h'). "
"Expands: %%h (hostname), %%n (name) and %%d, (domain).")
@click.option('-D',
'--detach',
cls=CeleryOption,
is_flag=True,
default=False,
help_group="Worker Options",
help="Start worker as a background process.")
@click.option('-S',
'--statedb',
cls=CeleryOption,
type=click.Path(),
callback=lambda ctx, _,
value: value or ctx.obj.app.conf.worker_state_db,
help_group="Worker Options",
help="Path to the state database. The extension '.db' may be "
"appended to the filename.")
@click.option('-l',
'--loglevel',
default='WARNING',
cls=CeleryOption,
type=LOG_LEVEL,
help_group="Worker Options",
help="Logging level.")
@click.option('-O',
'--optimization',
default='default',
cls=CeleryOption,
type=click.Choice(('default', 'fair')),
help_group="Worker Options",
help="Apply optimization profile.")
@click.option('--prefetch-multiplier',
type=int,
metavar="<prefetch multiplier>",
callback=lambda ctx, _,
value: value or ctx.obj.app.conf.worker_prefetch_multiplier,
cls=CeleryOption,
help_group="Worker Options",
help="Set custom prefetch multiplier value "
"for this worker instance.")
@click.option('-c',
'--concurrency',
type=int,
metavar="<concurrency>",
callback=lambda ctx, _,
value: value or ctx.obj.app.conf.worker_concurrency,
cls=CeleryOption,
help_group="Pool Options",
help="Number of child processes processing the queue. "
"The default is the number of CPUs available"
" on your system.")
@click.option('-P',
'--pool',
default='prefork',
type=WORKERS_POOL,
cls=CeleryOption,
help_group="Pool Options",
help="Pool implementation.")
@click.option('-E',
'--task-events',
'--events',
is_flag=True,
default=None,
cls=CeleryOption,
help_group="Pool Options",
help="Send task-related events that can be captured by monitors"
" like celery events, celerymon, and others.")
@click.option('--time-limit',
type=float,
cls=CeleryOption,
help_group="Pool Options",
help="Enables a hard time limit "
"(in seconds int/float) for tasks.")
@click.option('--soft-time-limit',
type=float,
cls=CeleryOption,
help_group="Pool Options",
help="Enables a soft time limit "
"(in seconds int/float) for tasks.")
@click.option('--max-tasks-per-child',
type=int,
cls=CeleryOption,
help_group="Pool Options",
help="Maximum number of tasks a pool worker can execute before "
"it's terminated and replaced by a new worker.")
@click.option('--max-memory-per-child',
type=int,
cls=CeleryOption,
help_group="Pool Options",
help="Maximum amount of resident memory, in KiB, that may be "
"consumed by a child process before it will be replaced "
"by a new one. If a single task causes a child process "
"to exceed this limit, the task will be completed and "
"the child process will be replaced afterwards.\n"
"Default: no limit.")
@click.option('--purge',
'--discard',
is_flag=True,
cls=CeleryOption,
help_group="Queue Options")
@click.option('--queues',
'-Q',
type=COMMA_SEPARATED_LIST,
cls=CeleryOption,
help_group="Queue Options")
@click.option('--exclude-queues',
'-X',
type=COMMA_SEPARATED_LIST,
cls=CeleryOption,
help_group="Queue Options")
@click.option('--include',
'-I',
type=COMMA_SEPARATED_LIST,
cls=CeleryOption,
help_group="Queue Options")
@click.option('--without-gossip',
is_flag=True,
cls=CeleryOption,
help_group="Features")
@click.option('--without-mingle',
is_flag=True,
cls=CeleryOption,
help_group="Features")
@click.option('--without-heartbeat',
is_flag=True,
cls=CeleryOption,
help_group="Features", )
@click.option('--heartbeat-interval',
type=int,
cls=CeleryOption,
help_group="Features", )
@click.option('--autoscale',
type=AUTOSCALE,
cls=CeleryOption,
help_group="Features", )
@click.option('-B',
'--beat',
type=CELERY_BEAT,
cls=CeleryOption,
is_flag=True,
help_group="Embedded Beat Options")
@click.option('-s',
'--schedule-filename',
'--schedule',
callback=lambda ctx, _,
value: value or ctx.obj.app.conf.beat_schedule_filename,
cls=CeleryOption,
help_group="Embedded Beat Options")
@click.option('--scheduler',
cls=CeleryOption,
help_group="Embedded Beat Options")
@click.pass_context
@handle_preload_options
def worker(ctx, hostname=None, pool_cls=None, app=None, uid=None, gid=None,
loglevel=None, logfile=None, pidfile=None, statedb=None,
**kwargs):
"""Start worker instance.
\b
Examples
--------
\b
$ celery --app=proj worker -l INFO
$ celery -A proj worker -l INFO -Q hipri,lopri
$ celery -A proj worker --concurrency=4
$ celery -A proj worker --concurrency=1000 -P eventlet
$ celery worker --autoscale=10,0
"""
try:
app = ctx.obj.app
if ctx.args:
try:
app.config_from_cmdline(ctx.args, namespace='worker')
except (KeyError, ValueError) as e:
# TODO: Improve the error messages
raise click.UsageError(
"Unable to parse extra configuration from command line.\n"
f"Reason: {e}", ctx=ctx)
if kwargs.get('detach', False):
argv = ['-m', 'celery'] + sys.argv[1:]
if '--detach' in argv:
argv.remove('--detach')
if '-D' in argv:
argv.remove('-D')
if "--uid" in argv:
argv.remove('--uid')
if "--gid" in argv:
argv.remove('--gid')
return detach(sys.executable,
argv,
logfile=logfile,
pidfile=pidfile,
uid=uid, gid=gid,
umask=kwargs.get('umask', None),
workdir=kwargs.get('workdir', None),
app=app,
executable=kwargs.get('executable', None),
hostname=hostname)
maybe_drop_privileges(uid=uid, gid=gid)
worker = app.Worker(
hostname=hostname, pool_cls=pool_cls, loglevel=loglevel,
logfile=logfile, # node format handled by celery.app.log.setup
pidfile=node_format(pidfile, hostname),
statedb=node_format(statedb, hostname),
no_color=ctx.obj.no_color,
quiet=ctx.obj.quiet,
**kwargs)
worker.start()
ctx.exit(worker.exitcode)
except SecurityError as e:
ctx.obj.error(e.args[0])
ctx.exit(1)