Celery tries to import the broker URL as a module? - python

I recently added Celery to my back end but i got this weird error below
[2017-10-25 21:41:37,142: CRITICAL/MainProcess] Unrecoverable error: ImportError('No module named myredisserverip.com',)
Traceback (most recent call last):
File "/usr/local/lib/python2.7/dist-packages/celery/worker/worker.py", line 203, in start
self.blueprint.start(self)
File "/usr/local/lib/python2.7/dist-packages/celery/bootsteps.py", line 115, in start
self.on_start()
File "/usr/local/lib/python2.7/dist-packages/celery/apps/worker.py", line 143, in on_start
self.emit_banner()
File "/usr/local/lib/python2.7/dist-packages/celery/apps/worker.py", line 158, in emit_banner
' \n', self.startup_info(artlines=not use_image))),
File "/usr/local/lib/python2.7/dist-packages/celery/apps/worker.py", line 221, in startup_info
results=self.app.backend.as_uri(),
File "/usr/local/lib/python2.7/dist-packages/kombu/utils/objects.py", line 44, in __get__
value = obj.__dict__[self.__name__] = self.__get(obj)
File "/usr/local/lib/python2.7/dist-packages/celery/app/base.py", line 1183, in backend
return self._get_backend()
File "/usr/local/lib/python2.7/dist-packages/celery/app/base.py", line 901, in _get_backend
self.loader)
File "/usr/local/lib/python2.7/dist-packages/celery/app/backends.py", line 66, in by_url
return by_name(backend, loader), url
File "/usr/local/lib/python2.7/dist-packages/celery/app/backends.py", line 46, in by_name
cls = symbol_by_name(backend, aliases)
File "/usr/local/lib/python2.7/dist-packages/kombu/utils/imports.py", line 56, in symbol_by_name
module = imp(module_name, package=package, **kwargs)
File "/usr/lib/python2.7/importlib/__init__.py", line 37, in import_module
__import__(name)
ImportError: No module named myredisserverip.com
My config.py looks like this
class BaseConfig(object):
""" A base configuration of the app """
DEBUG = False
SERVER_NAME = "my-production-ip"
SECRET_KEY = os.environ['SECRET']
BASE_DIR = os.path.abspath(os.path.dirname(__file__))
SQLALCHEMY_DATABASE_URI = os.environ['SQL_PRODUCTION']
SQLALCHEMY_TRACK_MODIFICATIONS = True
CELERY_BROKER_URL = os.environ['CELERY_BROKER_PROD']
CELERY_RESULT_BACKEND = os.environ['CELERY_RESULT_BACKEND_PROD']
DATABASE_CONNECT_OPTIONS = {}
THREADS_PER_PAGE = 2
CSRF_ENABLED = True
CSRF_SESSION_KEY = "secret"
MAIL_SERVER = "smtp.gmail.com"
MAIL_PORT = 465
MAIL_USE_SSL = True
MAIL_USE_TSL = False
MAIL_USERNAME = "blabla"
MAIL_PASSWORD = "pwd"
CELERY_BROKER_PROD and CELERY_RESULT_BACKEND are both the same and they contain the URL of the redis instance im running on amazon AWS. When i try to run
celery worker -A app.celery
from within my project directory i get this error, what is happening?
The way i setup celery is this
celery = Celery(app.name, broker=app.config['CELERY_BROKER_URL'])
celery.conf.update(app.config)

can you confirm what the values of CELERY_BROKER_PROD and CELERY_RESULTS_BACKEND are?
It seems that you have defined this as myredisserverip.com, however according to the celery docs, redis server should be defined as:
CELERY_RESULT_BACKEND = 'redis://:password#host:port/db'
as per the documentation here: http://docs.celeryproject.org/en/3.1/configuration.html#redis-backend-settings
For the broker_url you need to define the transport, documentation on this can be found here:
http://docs.celeryproject.org/en/3.1/configuration.html#broker-url
Is the issue that you are missing the transport, ie the redis:// prefix in your environment variables?

Related

Flask SQLAlchemy KeyError 'False' in create_engine

I have a flask app that connects with mysql using sqlalchemy. A strange error happens on DB query.
`Traceback (most recent call last):
File "/home/Desktop/work/nesting-app/env/lib/python3.8/site-packages/sqlalchemy/util/_collections.py", line 1008, in call
return self.registry[key]
KeyError: <greenlet.greenlet object at 0x7f22a0936a90 (otid=0x7f22a08ef100) current active started main>
During handling of the above exception, another exception occurred:
Traceback (most recent call last): File
"/home/Desktop/work/nesting-app/server/users/routes.py", line
28, in register_new_user
if form.validate_on_submit(): File "/home/Desktop/work/nesting-app/env/lib/python3.8/site-packages/flask_wtf/form.py",
line 100, in validate_on_submit
return self.is_submitted() and self.validate() File "/home/Desktop/work/nesting-app/env/lib/python3.8/site-packages/wtforms/form.py",
line 318, in validate
return super(Form, self).validate(extra) File "/home/Desktop/work/nesting-app/env/lib/python3.8/site-packages/wtforms/form.py",
line 150, in validate
if not field.validate(self, extra): File "/home/Desktop/work/nesting-app/env/lib/python3.8/site-packages/wtforms/fields/core.py",
line 226, in validate
stop_validation = self._run_validation_chain(form, chain) File "/home/Desktop/work/nesting-app/env/lib/python3.8/site-packages/wtforms/fields/core.py",
line 246, in _run_validation_chain
validator(form, self) File "/home/Desktop/work/nesting-app/server/users/forms.py", line
43, in validate_email
user = User.query.filter_by(email=email.data).first() File "/home/Desktop/work/nesting-app/env/lib/python3.8/site-packages/flask_sqlalchemy/init.py",
line 552, in get
return type.query_class(mapper, session=self.sa.session()) File "/home/Desktop/work/nesting-app/env/lib/python3.8/site-packages/sqlalchemy/orm/scoping.py",
line 47, in call
sess = self.registry() File "/home/Desktop/work/nesting-app/env/lib/python3.8/site-packages/sqlalchemy/util/collections.py", line 1010, in call
return self.registry.setdefault(key, self.createfunc()) File "/home/Desktop/work/nesting-app/env/lib/python3.8/site-packages/sqlalchemy/orm/session.py",
line 4171, in call
return self.class(**local_kw) File "/home/Desktop/work/nesting-app/env/lib/python3.8/site-packages/flask_sqlalchemy/init.py",
line 176, in init
bind = options.pop('bind', None) or db.engine File "/home/Desktop/work/nesting-app/env/lib/python3.8/site-packages/flask_sqlalchemy/init.py",
line 998, in engine
return self.get_engine() File "/home/Desktop/work/nesting-app/env/lib/python3.8/site-packages/flask_sqlalchemy/init.py",
line 1017, in get_engine
return connector.get_engine() File "/home/Desktop/work/nesting-app/env/lib/python3.8/site-packages/flask_sqlalchemy/init.py",
line 594, in get_engine
self._engine = rv = self._sa.create_engine(sa_url, options) File "/home/Desktop/work/nesting-app/env/lib/python3.8/site-packages/flask_sqlalchemy/init.py",
line 1027, in create_engine
return sqlalchemy.create_engine(sa_url, **engine_opts) File "", line 2, in create_engine File
"/home/Desktop/work/nesting-app/env/lib/python3.8/site-packages/sqlalchemy/util/deprecations.py", line 298, in warned
return fn(*args, **kwargs) File "/home/Desktop/work/nesting-app/env/lib/python3.8/site-packages/sqlalchemy/engine/create.py",
line 661, in create_engine
engine = engineclass(pool, dialect, u, **engine_args) File "/home/Desktop/work/nesting-app/env/lib/python3.8/site-packages/sqlalchemy/engine/base.py",
line 2758, in init
self.echo = echo File "/home/Desktop/work/nesting-app/env/lib/python3.8/site-packages/sqlalchemy/log.py",
line 225, in set
instance_logger(instance, echoflag=value) File "/home/Desktop/work/nesting-app/env/lib/python3.8/site-packages/sqlalchemy/log.py",
line 202, in instance_logger
logger = InstanceLogger(echoflag, name) File "/home/Desktop/work/nesting-app/env/lib/python3.8/site-packages/sqlalchemy/log.py",
line 103, in init
if self._echo_map[echo] <= logging.INFO and not self.logger.handlers: KeyError: 'False' `
Here is the config.py
from os import environ, path
from dotenv import load_dotenv
from pathlib import Path
BASE_DIR = Path(__file__).resolve().parent.parent
load_dotenv(path.join(BASE_DIR, '.env'))
class Config:
SECRET_KEY = environ.get('SECRET_KEY')
SQLALCHEMY_DATABASE_URI = 'mysql+pymysql://user:password#127.0.0.1:3306/dbname'
# SQLALCHEMY_ECHO: When set to 'True', Flask-SQLAlchemy will log all database
# activity to Python's stderr for debugging purposes.
SQLALCHEMY_ECHO = environ.get('SQLALCHEMY_ECHO')
# To suppress the warning "this option takes a lot of system resources" set
SQLALCHEMY_TRACK_MODIFICATIONS = environ.get('SQLALCHEMY_TRACK_MODIFICATIONS')
Here is the init.py
db = SQLAlchemy()
bcrypt = Bcrypt()
login_manager = LoginManager()
login_manager.login_view = 'users.login'
mail = Mail()
csrf = CSRFProtect()
def create_app(config_class=Config):
flask_app = Flask(__name__)
flask_app.config.from_object(config_class)
db.init_app(flask_app)
bcrypt.init_app(flask_app)
login_manager.init_app(flask_app)
mail.init_app(flask_app)
csrf.init_app(flask_app)
I have tried with different connection strings:
SQLALCHEMY_DATABASE_URI=mysql+mysqlconnector://user:pass#localhost:3306/dbname
SQLALCHEMY_DATABASE_URI=mysql+mysqlconnector://user:pass#localhost/dbname
SQLALCHEMY_DATABASE_URI=mysql+mysqlconnector://user:pass#127.0.0.1:3306/dbname
Thank you for any help.
The echo flag for SQLAlchemy's create_engine function accepts a limited set of values: None, False, True, "debug" (source).
The traceback shows that the string "False" is being passed: KeyError: 'False'. In fact, the error can be reproduced by passing any string (except "debug") as the value of create_engine's echo flag:
create_engine('sqlite://', echo='banana')
results in
Traceback (most recent call last):
...
KeyError: 'banana'
At a guess, the problem is that
SQLALCHEMY_ECHO = environ.get('SQLALCHEMY_ECHO')
does not consider that environment variables are always strings. Something like this might be better:
SQLALCHEMY_ECHO = environ.get('SQLALCHEMY_ECHO') in ('1', 'True')
as it will result in a boolean value being assigned.

Connect Celery (Django) to RabbitMQ using SSL/TLS

This question is related to 49320158, I'll try to provide more details.
I am trying to follow the tutorial First Step with Django but I need to add TLS/SSL to be able to connect to my RabbitMQ server v3.7.4.
I have tested my certificates with pika 11.2 and I am able to connect.
But celery is not able to connect, and rabbitmq says 'no peer certificate'.
How do I specify or make sure that celery sends out the certificates?
My settings.py celery settings only (django):
# celery settings
SSL_DIR = os.path.normpath(os.path.join(BASE_DIR, '../../ssl/client'))
CELERY_BROKER_USE_SSL = {
'keyfile': SSL_DIR + '/user-key.pem',
'certfile': SSL_DIR + '/user-cert.pem',
'ca_certs': SSL_DIR + '/default_cacert.pem',
'cert_reqs': ssl.CERT_REQUIRED
}
CELERY_BROKER_LOGIN_METHOD = "EXTERNAL"
CELERY_BROKER_URL = 'amqps://user#rabbitmqserver/vhost'
My celery.py:
from __future__ import absolute_import, unicode_literals
import os
import ssl
from celery import Celery
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'webui.settings')
app = Celery('webui')
app.config_from_object('django.conf:settings',
silent=False, force=True, namespace='CELERY')
PROJ_DIR = os.path.dirname(os.path.dirname(__file__))
BASE_DIR = os.path.normpath(os.path.join(PROJ_DIR, '../../ssl/client'))
cert_conf = {
"ca_certs": BASE_DIR + "default-cacert.pem",
"certfile": BASE_DIR + "user-cert.pem",
"keyfile": BASE_DIR + "user-key.pem",
"cert_reqs": ssl.CERT_REQUIRED
}
# try manually setting the BROKER_USE_SSL
app.conf.update(BROKER_USE_SSL=cert_conf)
# try enabling message signing, too
app.conf.update(
security_key=BASE_DIR+'user-key.pm',
security_certificate=BASE_DIR+'user-cert.pem',
security_cert_store=BASE_DIR+'*.pem',
)
app.setup_security()
# Load task modules from all registered Django app configs.
app.autodiscover_tasks()
#app.task(bind=True)
def debug_task(self):
print('Request: {0!r}'.format(self.request))
celery stack trace:
[2018-06-22 12:04:07,628: CRITICAL/MainProcess] Unrecoverable error: AccessRefused(403, u'ACCESS_REFUSED - Login was refused using authentication mechanism EXTERNAL. For details see the broker logfile.', (0, 0), u'')
Traceback (most recent call last):
File "/home/username/.virtualenvs/edi/local/lib/python2.7/site-packages/celery/worker/worker.py", line 205, in start
self.blueprint.start(self)
File "/home/username/.virtualenvs/edi/local/lib/python2.7/site-packages/celery/bootsteps.py", line 119, in start
step.start(parent)
File "/home/username/.virtualenvs/edi/local/lib/python2.7/site-packages/celery/bootsteps.py", line 369, in start
return self.obj.start()
File "/home/username/.virtualenvs/edi/local/lib/python2.7/site-packages/celery/worker/consumer/consumer.py", line 322, in start
blueprint.start(self)
File "/home/username/.virtualenvs/edi/local/lib/python2.7/site-packages/celery/bootsteps.py", line 119, in start
step.start(parent)
File "/home/username/.virtualenvs/edi/local/lib/python2.7/site-packages/celery/worker/consumer/connection.py", line 23, in start
c.connection = c.connect()
File "/home/username/.virtualenvs/edi/local/lib/python2.7/site-packages/celery/worker/consumer/consumer.py", line 409, in connect
conn = self.connection_for_read(heartbeat=self.amqheartbeat)
File "/home/username/.virtualenvs/edi/local/lib/python2.7/site-packages/celery/worker/consumer/consumer.py", line 416, in connection_for_read
self.app.connection_for_read(heartbeat=heartbeat))
File "/home/username/.virtualenvs/edi/local/lib/python2.7/site-packages/celery/worker/consumer/consumer.py", line 440, in ensure_connected
callback=maybe_shutdown,
File "/home/username/.virtualenvs/edi/local/lib/python2.7/site-packages/kombu/connection.py", line 405, in ensure_connection
callback)
File "/home/username/.virtualenvs/edi/local/lib/python2.7/site-packages/kombu/utils/functional.py", line 332, in retry_over_time
return fun(*args, **kwargs)
File "/home/username/.virtualenvs/edi/local/lib/python2.7/site-packages/kombu/connection.py", line 261, in connect
return self.connection
File "/home/username/.virtualenvs/edi/local/lib/python2.7/site-packages/kombu/connection.py", line 802, in connection
self._connection = self._establish_connection()
File "/home/username/.virtualenvs/edi/local/lib/python2.7/site-packages/kombu/connection.py", line 757, in _establish_connection
conn = self.transport.establish_connection()
File "/home/username/.virtualenvs/edi/local/lib/python2.7/site-packages/kombu/transport/pyamqp.py", line 130, in establish_connection
conn.connect()
File "/home/username/.virtualenvs/edi/local/lib/python2.7/site-packages/amqp/connection.py", line 308, in connect
self.drain_events(timeout=self.connect_timeout)
File "/home/username/.virtualenvs/edi/local/lib/python2.7/site-packages/amqp/connection.py", line 491, in drain_events
while not self.blocking_read(timeout):
File "/home/username/.virtualenvs/edi/local/lib/python2.7/site-packages/amqp/connection.py", line 497, in blocking_read
return self.on_inbound_frame(frame)
File "/home/username/.virtualenvs/edi/local/lib/python2.7/site-packages/amqp/method_framing.py", line 55, in on_frame
callback(channel, method_sig, buf, None)
File "/home/username/.virtualenvs/edi/local/lib/python2.7/site-packages/amqp/connection.py", line 501, in on_inbound_method
method_sig, payload, content,
File "/home/username/.virtualenvs/edi/local/lib/python2.7/site-packages/amqp/abstract_channel.py", line 128, in dispatch_method
listener(*args)
File "/home/username/.virtualenvs/edi/local/lib/python2.7/site-packages/amqp/connection.py", line 623, in _on_close
(class_id, method_id), ConnectionError)
AccessRefused: (0, 0): (403) ACCESS_REFUSED - Login was refused using authentication mechanism EXTERNAL. For details see the broker logfile.
rabbitmq.conf:
listeners.ssl.default = 0.0.0.0:5671
ssl_options.cacertfile = /etc/rabbitmq/ssl/server/default-cacert.pem
ssl_options.certfile = /etc/rabbitmq/ssl/server/rabbitmqserver-cert.pem
ssl_options.keyfile = /etc/rabbitmq/ssl/server/rabbitmqserver-key.pem
ssl_options.verify = verify_peer
ssl_options.fail_if_no_peer_cert = false
ssl_options.depth = 2
ssl_options.versions.1 = tlsv1.2
ssl_options.versions.2 = tlsv1.1
ssl_options.honor_cipher_order = true
ssl_options.honor_ecc_order = true
ssl_options.secure_renegotiate = true
ssl_cert_login_from = common_name
auth_mechanisms.1 = PLAIN
auth_mechanisms.2 = AMQPLAIN
auth_mechanisms.3 = EXTERNAL
log.syslog.level = info
log.file.level = info
RabbitMQ Log:
2018-06-22 20:04:07.604 [info] <0.22240.0> accepting AMQP connection <0.22240.0> (192.168.56.1:43780 -> 192.168.56.252:5671)
2018-06-22 20:04:07.607 [error] <0.22240.0> Error on AMQP connection <0.22240.0> (192.168.56.1:43780 -> 192.168.56.252:5671, state: starting):
EXTERNAL login refused: no peer certificate
2018-06-22 20:04:07.608 [info] <0.22240.0> closing AMQP connection <0.22240.0> (192.168.56.1:43780 -> 192.168.56.252:5671)

ACCESS_REFUSED - Login was refused using authentication mechanism AMQPLAIN. For details see the broker logfile

I'm getting this error when I try to perform celery -A draft1 beat in my terminal.
Traceback (most recent call last):
File "/home/james/postr/env/lib/python3.5/site-packages/celery/apps/beat.py", line 107, in start_scheduler
service.start()
File "/home/james/postr/env/lib/python3.5/site-packages/celery/beat.py", line 558, in start
interval = self.scheduler.tick()
File "/home/james/postr/env/lib/python3.5/site-packages/celery/beat.py", line 279, in tick
self.apply_entry(entry, producer=self.producer)
File "/home/james/postr/env/lib/python3.5/site-packages/kombu/utils/objects.py", line 44, in __get__
value = obj.__dict__[self.__name__] = self.__get(obj)
File "/home/james/postr/env/lib/python3.5/site-packages/celery/beat.py", line 411, in producer
return self.Producer(self._ensure_connected(), auto_declare=False)
File "/home/james/postr/env/lib/python3.5/site-packages/celery/beat.py", line 395, in _ensure_connected
_error_handler, self.app.conf.broker_connection_max_retries
File "/home/james/postr/env/lib/python3.5/site-packages/kombu/connection.py", line 405, in ensure_connection
callback)
File "/home/james/postr/env/lib/python3.5/site-packages/kombu/utils/functional.py", line 333, in retry_over_time
return fun(*args, **kwargs)
File "/home/james/postr/env/lib/python3.5/site-packages/kombu/connection.py", line 261, in connect
return self.connection
File "/home/james/postr/env/lib/python3.5/site-packages/kombu/connection.py", line 802, in connection
self._connection = self._establish_connection()
File "/home/james/postr/env/lib/python3.5/site-packages/kombu/connection.py", line 757, in _establish_connection
conn = self.transport.establish_connection()
File "/home/james/postr/env/lib/python3.5/site-packages/kombu/transport/pyamqp.py", line 130, in establish_connection
conn.connect()
File "/home/james/postr/env/lib/python3.5/site-packages/amqp/connection.py", line 288, in connect
self.drain_events(timeout=self.connect_timeout)
File "/home/james/postr/env/lib/python3.5/site-packages/amqp/connection.py", line 471, in drain_events
while not self.blocking_read(timeout):
File "/home/james/postr/env/lib/python3.5/site-packages/amqp/connection.py", line 477, in blocking_read
return self.on_inbound_frame(frame)
File "/home/james/postr/env/lib/python3.5/site-packages/amqp/method_framing.py", line 55, in on_frame
callback(channel, method_sig, buf, None)
File "/home/james/postr/env/lib/python3.5/site-packages/amqp/connection.py", line 481, in on_inbound_method
method_sig, payload, content,
File "/home/james/postr/env/lib/python3.5/site-packages/amqp/abstract_channel.py", line 128, in dispatch_method
listener(*args)
File "/home/james/postr/env/lib/python3.5/site-packages/amqp/connection.py", line 603, in _on_close
(class_id, method_id), ConnectionError)
amqp.exceptions.AccessRefused: (0, 0): (403) ACCESS_REFUSED - Login was refused using authentication mechanism AMQPLAIN. For details see the broker logfile.
I'm running celery on my remote Ubuntu django server.
Any idea what the problem is?
Here's my code:
settings
CELERYBEAT_SCHEDULE = {
'post_jobs': {
'task': 'post.tasks.post_jobs', # the same goes in the task name
'schedule': crontab(minute=40),
},
'test_post': {
'task': 'post.tasks.test_post',
'schedule': crontab(minute=40),
}
}
draft1/celery.py
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'draft1.settings')
app = Celery("draft1", broker=CELERY_BROKER_URL)
app.config_from_object('django.conf:settings', namespace='CELERY')
app.autodiscover_tasks()
post/celery.py
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'draft1.settings')
app = Celery(broker=CELERY_BROKER_URL)
app.config_from_object('django.conf:settings', namespace='CELERY')
app.autodiscover_tasks()
post/tasks.py
#app.task
def test_post():
from .models import Post
for i in Post.objects.all():
if i.entered_category == "test":
i.entered_category = "not_test"
i.save()
return HttpResponseRedirect('/')
postr-celery.conf
[program:postr-celery]
command=/home/james/postr/env/bin/celery -A post worker --loglevel=INFO
directory=/home/james/postr
user=james
numprocs=1
stdout_logfile=/var/log/supervisor/celery.log
stderr_logfile=/var/log/supervisor/celery.log
autostart=true
autorestart=true
startsecs=10
; Need to wait for currently executing tasks to finish at shutdown.
; Increase this if you have very long running tasks.
stopwaitsecs = 600
stopasgroup=true
; Set Celery priority higher than default (999)
; so, if rabbitmq is supervised, it will start first.
priority=1000
I'm not sure what sparked this error, my celery was working recently. Any idea what the problem is?
There is a good chance that a software update of the SSL features caused the problem. I found my issue in the /var/log/rabbitmq/rabbit#host.log file.
In my case, I found the following, and to resolve I had to re-install my rabbitmq / celery.
/lib/erlang/lib/crypto-4.2/priv/lib/crypto: 'libcrypto.so.1.0.0: cannot open shared object file: No such file
a test of rabbitmqctl list_users and add_users can help determine if your rabbitmqctl is installed correctly when you monitor the logs.

Django, Celery Supervisor: Secret Key Error: django.core.exceptions.ImproperlyConfigured: The SECRET_KEY setting must not be empty

I have a django 1.11.4, python3.5, gunicorn 19.7.1 server with supervisor, redis 2.10.6 and celery 4.1.0.
My File structure is like so:
samy_python
├── celery.py
├── __init__.py
├── settings
├── urls.py
├── wsgi.py
website
├── static
├── templates
├── samy
| ├── samy_firebase.py
| ├── tasks.py
├── views.py
I get my secret with:
# settings.py
SECRET_KEY = os.environ.get("secret_KEY")
My supervisor conf file for celery is:
[program:gunicorn]
....
environment = secret_KEY="12345"
.....
[program:celery]
directory=/home/username/Projects/samy/samy_python
command=/home/username/Projects/samy/samy_python/env/bin/celery --app=samy_python.celery.app worker -B -l info
user=username
stdout_logfile=/var/log/celery/celery.log
stderr_logfile=/var/log/celery/celery.log
autostart=false
autorestart=false
startsecs=10
My Celery File:
# celery.py
from __future__ import absolute_import
import os
import logging
logger = logging.getLogger('myapp.celery.py')
from celery import Celery
from django.conf import settings
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "samy_python.settings")
# Celery App
# TODO
# [START Celery App]
app = Celery('samy_python')
app.config_from_object('django.conf:settings')
app.autodiscover_tasks(lambda: settings.INSTALLED_APPS)
# [END Celery App]
My init file:
# __init__.py
from __future__ import absolute_import
from .celery import app as celery_app
The Configuration in my settings.py file for celery:
BROKER_URL = 'redis://localhost:6379/1'
CELERY_RESULT_BACKEND = 'redis://localhost:6379/1'
CELERY_ACCEPT_CONTENT = ['application/json']
CELERY_TASK_SERIALIZER = 'json'
CELERY_RESULT_SERIALIZER = 'json'
Where the error could be coming from is in my settings.py file I use an env variable to set the environment. (Production, Dev, or Test):
DEBUG = False
DJANGO_TESTING = False
DJANGO_DEVELOPMENT = False
DJANGO_ENV = os.environ.get('DJANGO_ENV')
if DJANGO_ENV == 'DJANGO_TESTING':
DJANGO_TESTING = True
elif DJANGO_ENV == 'DJANGO_DEVELOPMENT':
DJANGO_DEVELOPMENT = True
Then I set the firebase credentials and Google Api credentials. I then get the credentials from within, for example, samy_firebase.py with:
from django.conf import settings
if settings.DJANGO_TESTING == True:
logger.info('Firebase Config: Test DB')
I'm not sure if the last part is relevant or not, but here is the error that I keep receiving:
Traceback (most recent call last):
File "/home/dorian/Projects/samy/samy_python/env/lib/python3.5/site-packages/kombu/utils/objects.py", line 42, in __get__
return obj.__dict__[self.__name__]
KeyError: 'data'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/home/dorian/Projects/samy/samy_python/env/bin/celery", line 11, in <module>
sys.exit(main())
File "/home/dorian/Projects/samy/samy_python/env/lib/python3.5/site-packages/celery/__main__.py", line 14, in main
_main()
File "/home/dorian/Projects/samy/samy_python/env/lib/python3.5/site-packages/celery/bin/celery.py", line 326, in main
cmd.execute_from_commandline(argv)
File "/home/dorian/Projects/samy/samy_python/env/lib/python3.5/site-packages/celery/bin/celery.py", line 488, in execute_from_commandline
super(CeleryCommand, self).execute_from_commandline(argv)))
File "/home/dorian/Projects/samy/samy_python/env/lib/python3.5/site-packages/celery/bin/base.py", line 281, in execute_from_commandline
return self.handle_argv(self.prog_name, argv[1:])
File "/home/dorian/Projects/samy/samy_python/env/lib/python3.5/site-packages/celery/bin/celery.py", line 480, in handle_argv
return self.execute(command, argv)
File "/home/dorian/Projects/samy/samy_python/env/lib/python3.5/site-packages/celery/bin/celery.py", line 412, in execute
).run_from_argv(self.prog_name, argv[1:], command=argv[0])
File "/home/dorian/Projects/samy/samy_python/env/lib/python3.5/site-packages/celery/bin/worker.py", line 219, in run_from_argv
*self.parse_options(prog_name, argv, command))
File "/home/dorian/Projects/samy/samy_python/env/lib/python3.5/site-packages/celery/bin/base.py", line 403, in parse_options
self.parser = self.create_parser(prog_name, command)
File "/home/dorian/Projects/samy/samy_python/env/lib/python3.5/site-packages/celery/bin/base.py", line 419, in create_parser
self.add_arguments(parser)
File "/home/dorian/Projects/samy/samy_python/env/lib/python3.5/site-packages/celery/bin/worker.py", line 275, in add_arguments
default=conf.worker_state_db,
File "/home/dorian/Projects/samy/samy_python/env/lib/python3.5/site-packages/celery/utils/collections.py", line 130, in __getattr__
return self[k]
File "/home/dorian/Projects/samy/samy_python/env/lib/python3.5/site-packages/celery/utils/collections.py", line 431, in __getitem__
return getitem(k)
File "/home/dorian/Projects/samy/samy_python/env/lib/python3.5/site-packages/celery/utils/collections.py", line 280, in __getitem__
return mapping[_key]
File "/home/dorian/Projects/samy/samy_python/env/lib/python3.5/collections/__init__.py", line 982, in __getitem__
if key in self.data:
File "/home/dorian/Projects/samy/samy_python/env/lib/python3.5/site-packages/kombu/utils/objects.py", line 44, in __get__
value = obj.__dict__[self.__name__] = self.__get(obj)
File "/home/dorian/Projects/samy/samy_python/env/lib/python3.5/site-packages/celery/app/base.py", line 148, in data
return self.callback()
File "/home/dorian/Projects/samy/samy_python/env/lib/python3.5/site-packages/celery/app/base.py", line 911, in _finalize_pending_conf
conf = self._conf = self._load_config()
File "/home/dorian/Projects/samy/samy_python/env/lib/python3.5/site-packages/celery/app/base.py", line 921, in _load_config
self.loader.config_from_object(self._config_source)
File "/home/dorian/Projects/samy/samy_python/env/lib/python3.5/site-packages/celery/loaders/base.py", line 133, in config_from_object
self._conf = force_mapping(obj)
File "/home/dorian/Projects/samy/samy_python/env/lib/python3.5/site-packages/celery/utils/collections.py", line 50, in force_mapping
if isinstance(m, (LazyObject, LazySettings)):
File "/home/dorian/Projects/samy/samy_python/env/lib/python3.5/site-packages/django/utils/functional.py", line 238, in inner
self._setup()
File "/home/dorian/Projects/samy/samy_python/env/lib/python3.5/site-packages/django/conf/__init__.py", line 41, in _setup
self._wrapped = Settings(settings_module)
File "/home/dorian/Projects/samy/samy_python/env/lib/python3.5/site-packages/django/conf/__init__.py", line 129, in __init__
raise ImproperlyConfigured("The SECRET_KEY setting must not be empty.")
django.core.exceptions.ImproperlyConfigured: The SECRET_KEY setting must not be empty.
Also, gunicorn works just fine, so its receiving the environment variable if I dont start the celery program. Also if I set the key to "12345" in the settings.py file, it starts.
Set the secret_KEY in env like this:
export SECRET_KEY="#mysecret_key"
And to check:
echo $SECRET_KEY
And in your django settings,change to:
SECRET_KEY = os.environ.get("SECRET_KEY")
For detailed reference check: Read and Set Environment Variables
Add the environment variable to the [program:celery] section of the supervisor conf file. At the moment it's only set for gunicorn.

Python Flask: RQ Worker raising KeyError because of environment variable

I'm trying to setup a redis queue and a worker to process the queue with my flask app. I'm implementing this to handle a task that sends emails. I'm a little confused because it appears that the stack trace is saying that my 'APP_SETTINGS' environment variable is not set when it is in fact set.
Prior to starting up the app, redis or the worker, I set APP_SETTINGS:
export APP_SETTINGS="project.config.DevelopmentConfig"
However, when an item gets added to the queue, here's the stack trace:
17:00:00 *** Listening on default...
17:00:59 default: project.email.sendMailInBG(<flask_mail.Message object at 0x7fc930e1c3d0>) (aacf9546-5558-4db8-9232-5f36c25d521b)
17:01:00 KeyError: 'APP_SETTINGS'
Traceback (most recent call last):
File "/home/tony/pyp-launch/venv/local/lib/python2.7/site-packages/rq/worker.py", line 588, in perform_job
rv = job.perform()
File "/home/tony/pyp-launch/venv/local/lib/python2.7/site-packages/rq/job.py", line 498, in perform
self._result = self.func(*self.args, **self.kwargs)
File "/home/tony/pyp-launch/venv/local/lib/python2.7/site-packages/rq/job.py", line 206, in func
return import_attribute(self.func_name)
File "/home/tony/pyp-launch/venv/local/lib/python2.7/site-packages/rq/utils.py", line 150, in import_attribute
module = importlib.import_module(module_name)
File "/usr/lib/python2.7/importlib/__init__.py", line 37, in import_module
__import__(name)
File "/home/tony/pyp-launch/project/__init__.py", line 24, in <module>
app.config.from_object(os.environ['APP_SETTINGS'])
File "/home/tony/pyp-launch/venv/lib/python2.7/UserDict.py", line 40, in __getitem__
raise KeyError(key)
KeyError: 'APP_SETTINGS'
Traceback (most recent call last):
File "/home/tony/pyp-launch/venv/local/lib/python2.7/site-packages/rq/worker.py", line 588, in perform_job
rv = job.perform()
File "/home/tony/pyp-launch/venv/local/lib/python2.7/site-packages/rq/job.py", line 498, in perform
self._result = self.func(*self.args, **self.kwargs)
File "/home/tony/pyp-launch/venv/local/lib/python2.7/site-packages/rq/job.py", line 206, in func
return import_attribute(self.func_name)
File "/home/tony/pyp-launch/venv/local/lib/python2.7/site-packages/rq/utils.py", line 150, in import_attribute
module = importlib.import_module(module_name)
File "/usr/lib/python2.7/importlib/__init__.py", line 37, in import_module
__import__(name)
File "/home/tony/pyp-launch/project/__init__.py", line 24, in <module>
app.config.from_object(os.environ['APP_SETTINGS'])
File "/home/tony/pyp-launch/venv/lib/python2.7/UserDict.py", line 40, in __getitem__
raise KeyError(key)
KeyError: 'APP_SETTINGS'
17:01:00 Moving job to u'failed' queue
17:01:00
17:01:00 *** Listening on default...
email.py
from flask.ext.mail import Message
from project import app, mail
from redis import Redis
from rq import use_connection, Queue
q = Queue(connection=Redis())
def send_email(to, subject, template, emailable):
if emailable==True:
msg = Message(
subject,
recipients=[to],
html=template,
sender=app.config['MAIL_DEFAULT_SENDER']
)
q.enqueue(sendMailInBG, msg)
else:
print("no email sent, emailable set to: " + str(emailable))
def sendMailInBG(msgContent):
with app.test_request_context():
mail.send(msgContent)
worker.py
import os
import redis
from rq import Worker, Queue, Connection
listen = ['default']
redis_url = os.getenv('REDISTOGO_URL', 'redis://localhost:6379')
conn = redis.from_url(redis_url)
if __name__ == '__main__':
with Connection(conn):
worker = Worker(list(map(Queue, listen)))
worker.work()
I'd really appreciate another set of eyes on this. I can't for the life of me figure out what's going on here.
Thanks to the prompting of #danidee, I discovered that the environment variables need to be defined in each terminal. Hence, APP_SETTINGS was defined for the actual app, but not for the worker.
The solution was to set APP_SETTINGS in the worker terminal.

Categories

Resources