Problem in generating logger file to a specific path in a greengrass - python

I am trying to generate a log file to a specific folder and path in greengrass v2. however the log file is created at the current directory.
The current directory at which the logger file is generated is
/sim/things/t1_gateway_iotgateway_1234/greengrass/packages/artifacts-unarchived/com.data.iot.RulesEngineCore/2.3.1-pp.38/package
Could you please help me where am I missing?
The following is my program.
import logging
from datetime import datetime
import os, sys
from logging.handlers import RotatingFileHandler
def getStandardStdOutHandler():
formatter = logging.Formatter(
fmt="[%(asctime)s][%(levelname)-7s][%(name)s] %(message)s (%(threadName)s[% (thread)d]:%(module)s:%(funcName)s:%(lineno)d)"
)
filename = datetime.now().strftime("rule_engine_%Y_%m_%d_%H_%M.log")
path = "/sim/things/t1_gateway_iotgateway_1234/greengrass/logs/"
_handler = RotatingFileHandler(path + filename, maxBytes=1000000, backupCount=5)
_handler.setLevel(logging.DEBUG)
_handler.setFormatter(formatter)
return _handler
def getLogger(name: str):
logger = logging.getLogger(name)
logger.addHandler(getStandardStdOutHandler())
return logger

we have figured out this issue. The following is a complete program that address the path issue.
import logging
from logging.handlers import RotatingFileHandler
from datetime import datetime
import os
import logging.handlers
from sys import stdout
# rule engine log file name
filename = datetime.now().strftime("rule_engine_%Y_%m_%d_%H_%M.log")
def configure_log_file_path():
GATEWAY_ID = 't1_gateway_iotgateway_1234'
sim_dir = '/sim/things' + GATEWAY_ID + '/greengrass/logs'
virtual_gateway = '/opt/greengrass/v2/logs'
default_directory = os.path.expanduser('~/logs/')
if os.path.exists(sim_dir):
print(f"{sim_dir} exists")
log_directory = '/sim/things/' + GATEWAY_ID + '/greengrass/logs/'
elif os.path.exists(virtual_gateway):
print(f"{virtual_gateway} exists")
log_directory = '/opt/greengrass/v2/logs/'
else:
if os.path.exists(default_directory):
log_directory = default_directory
else:
os.makedirs(default_directory)
print(f"{default_directory} created successfully")
log_directory = default_directory
file_path = os.path.join(log_directory , filename)
return file_path
# handler to catch error and warning
def err_warning_handler():
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
err_warning_handler = logging.StreamHandler(stream=stdout)
err_warning_handler.setLevel(logging.WARNING)
err_warning_handler.setFormatter(formatter)
return err_warning_handler
def getStandardStdOutHandler():
# create a logging format
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
# rotating file handler
file_handler = RotatingFileHandler(configure_log_file_path(), maxBytes=100000, backupCount=5)
file_handler.setLevel(logging.DEBUG)
file_handler.setFormatter(formatter)
return file_handler
# create a logger
logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)
logger.addHandler(getStandardStdOutHandler())
logger.addHandler(err_warning_handler())
# log some messages
logger.info("Application started")
logger.debug("Debug message")
logger.warning("Warning message")
logger.error("Error message")

Directing the link can be a hustle, I prefer to use this simple technique of adding r before the link in quotes.
do check this:
path folders = (r"/sim/things/t1_gateway_iotgateway_1234/greengrass/logs/")

Related

Python Logger is not working to write to file?

I'm trying to create logger file inside directory using the below code -
def createLogger(name,log_path=None):
logger = logging.getLogger(name)
logger.setLevel(logging.DEBUG)
logger.setLevel(logging.ERROR)
logger.setLevel(logging.INFO)
logger.setLevel(logging.CRITICAL)
formatter = logging.Formatter("%(asctime)s - %(levelname)-8s - %(message)s")
stream_handler = logging.StreamHandler()
stream_handler.setFormatter(formatter)
logger.addHandler(stream_handler)
if log_path is not None:
file_handler = logging.FileHandler(log_path)
file_handler.setFormatter(formatter)
logger.addHandler(file_handler)
return logger
name = "temp_logs"
file = open(os.path.join(f'{current_date}', f'{name}_{current_timestamp}.txt'), 'w')
log_path = f"{current_date}/{name}_{current_timestamp}.txt"
logger = createLogger(name = name ,log_path = log_path)
write_message = logger.debug('This is a test file')
file.writelines(str(write_message))
file.close()
However, it is just writing None as message inside the file.
Am I missing anything here? Please help.
Several issues. setLevel sets a threshold. When called multiple times, the last one wins (i.e. CRITICAL). You will not see any debug messages, warnings or error.
Also, you are writing to the log file directly. What's the point of setting a logger with a formatter and then bypassing it?
Try this:
Leave only this one setLevel call: logger.setLevel(logging.DEBUG) in createLogger() and then:
name = "temp_logs"
logger = createLogger(name = name ,log_path = './test.log')
logger.debug('This is a debug message')
logger.error('This is an error')

Logging and exporting conf to other modules

Trying to level up my devOps skill I put log where I want/need to my code.
Catching an env variable I can setup if I want DEBUG/INFO log (dev) on the standard output or WARNING and above (prod) on a file.
But in python I didn't find how to set a logger conf once (in the main file ?) and use it to the whole project without having to re-write everything or transfer the logging object everywhere. I'm pretty sure I'm missing something.
EDIT : I made a log.py file that looks like this
import os
import logging
from dotenv import load_dotenv
from utils import get_timestamp
def get_logger():
load_dotenv(".env")
env_dev = os.getenv('ENV_DEV', "development")
logger = logging.getLogger(__name__)
log_format = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
if env_dev == "prod":
handler = logging.FileHandler(f'log/{get_timestamp("%Y%m%d")}_app.log')
handler.setLevel(logging.WARNING)
handler.setFormatter(log_format)
logger.addHandler(handler)
else: # DEV
handler = logging.StreamHandler()
handler.setLevel(logging.DEBUG)
handler.setFormatter(log_format)
logger.addHandler(handler)
return logger
And I use it like :
from log.logging import get_logger
# On Dev env
logger = get_logger()
logger.info("Do stuff")
...
But I have no error nor log on my term.
You don't need to transfer the logging object. When you configure or name a logger once it is globally available. So in your main file you would set up the logger and in all other places just use it.
main file
import logging
logger = logging.getLogger('mylog')
if debug:
mylog.setLevel(logging.DEBUG)
mylog.addHandler(...)
# do all your setup
logger.log("log that") # use logger
other file
import logging
logger = logging.getLogger('mylog')
logger.log("log this") # use logger, it is already configured

Python3 logging

I'm trying to create my own logger in python 3.6.8 to send output both to stdout and a log file (chosen by date, if the log file doesn't exist yet for today's date it gets created, if there already is a file with the same date just append).
from datetime import date
import logging
import logging.handlers
class Log:
def __init__(self):
pass
def getCleanerLogger(self,moduleName, logFolder, format):
filename = logFolder+ str(date.today()) + '-log.log'
handler = logging.FileHandler(filename)
shandler = logging.StreamHandler()
shandler.setLevel(logging.INFO)
handler.setLevel(logging.DEBUG)
formatter = logging.Formatter(format)
handler.setFormatter(formatter)
shandler.setFormatter(formatter)
logger = logging.getLogger(moduleName)
logger.addHandler(handler)
logger.addHandler(shandler)
print("I've been called")
return logger
import Conf
conf = Conf.configuration()
print(conf['logFolder'] + " " + conf['logFormat'])
logger = Log()
logger = logger.getCleanerLogger("Log", conf['logFolder'], conf['logFormat'])
logger.info('initializing')
logger.debug('initializing debug')
in the json conf file these are the keys I load
"logFolder": "log/",
"logFormat": "%(asctime)s - %(name)s - %(levelname)s - %(message)s"
the log file gets created with the correct logic but there is no logging in either the console or the log file, only the prints go to stdout, no error or exception is raised, I really don't understand why this isn't working. I can only log with logging.root.level('msg') after loading a basiconfig.
Every handler has own logging level but logger has also global logging level which has bigger prioritet so you have to change this level to level which doesn't block handlers - ie.
logger.setLevel(logging.DEBUG)
Mininal working code with few smaller changes.
It doesn't use file with settings so everyone can easily copy it and run it.
from datetime import date
import os
import logging
import logging.handlers
class Log:
def get_cleaner_logger(self, module_name, log_folder, format):
if not os.path.exists(log_folder):
os.makedirs(log_folder)
filename = os.path.join(log_folder, date.today().strftime('%Y-%m-%d-log.log'))
print(filename)
logger = logging.getLogger(module_name)
print('before:', logger.level)
logger.setLevel(logging.DEBUG)
print('after:', logger.level)
formatter = logging.Formatter(format)
handler = logging.FileHandler(filename)
handler.setLevel(logging.DEBUG)
handler.setFormatter(formatter)
logger.addHandler(handler)
shandler = logging.StreamHandler()
shandler.setLevel(logging.INFO)
shandler.setFormatter(formatter)
logger.addHandler(shandler)
print("I've been called")
return logger
conf = {
"logFolder": "log/",
"logFormat": "%(asctime)s - %(name)s - %(levelname)s - %(message)s"
}
logger = Log()
logger = logger.get_cleaner_logger("Log", conf['logFolder'], conf['logFormat'])
logger.info('initializing')
logger.debug('initializing debug')
BTW: I changed some names based on PEP 8 -- Style Guide for Python Code

How to log from PySpark app to HDFS using python logging library

How to log into HDFS using standard logging library. More specifically, I am using TimedRotatingFileHandler like this:
FORMATTER = logging.Formatter("%(asctime)s — %(name)s — %(levelname)s — %(message)s")
LOG_FILE = 'hdfs://node01:8022/tmp/test/run_dev.log'
def get_file_handler():
file_handler = TimedRotatingFileHandler(LOG_FILE, when='midnight')
file_handler.setFormatter(FORMATTER)
return file_handler
def get_logger(logger_name):
logger = logging.getLogger(logger_name)
if (logger.hasHandlers()): logger.handlers.clear()
logger.addHandler(get_file_handler())
logger.propagate = False
return logger
However it seems that I cannot pass fully qualified HDFS path as I was expecting. I am getting
FileNotFoundError: [Errno 2] No such file or directory: '/home/bigdatamgr/hdfs:/node01:8022/tmp/test/run_dev.log'
Is there some way to achieve this functionality?

RotatingFileHandler with console logging

I've written an init function to configure the python logging module to log to a file and the console. I want to limit the log file size using the RotatingFileHandler. The code below doesn't cause any errors and does everything I want, except it doesn't rotate the logs. I set a low file size to test things out.
How can I configure to use rotating logs and the console with different formats like below?
import logging, logging.handlers
LOG_LEVEL = logging.DEBUG
CONSOLE_LEVEL = logging.DEBUG
def init_logger(fullpath, console_level=CONSOLE_LEVEL, log_level=LOG_LEVEL):
"""
Setup the logger object
Args:
fullpath (str): full path to the log file
"""
logging.basicConfig(level=LOG_LEVEL,
format='%(asctime)s %(threadName)-10s %(name)-12s %
(levelname)-8s %(message)s',
datefmt='%m-%d-%y %H:%M:%S',
filename=fullpath,
filemode='w')
_logger = logging.getLogger('_root')
_logger.setLevel(log_level)
log_handler = logging.handlers.RotatingFileHandler(filename=fullpath,
maxBytes=50, backupCount=10)
log_handler.setLevel(log_level)
_logger.addHandler(log_handler)
console = logging.StreamHandler()
console.setLevel(console_level)
# set a format which is simpler for console use
formatter = logging.Formatter('%(name)-12s: %(levelname)-8s %
(message)s')
# tell the handler to use this format
console.setFormatter(formatter)
# add the handler to the root logger
logging.getLogger('').addHandler(console)
logging.debug("Creating log file")
This is your logging's structure:
---root <- I mean real root, which you will get from logging.getLogger([""])
---NormalFileHandler <- Configged by logging.basicConfig, this handler won't rotate file.
---StreamHandler
------_root <- Your root logger, although I don't understand why you want this.
------RotatingFileHandler <- This one will rotate file.
And then you use logging.debug("Creating log file"), which is the same as calling debug on root logger: logging.getLogger().debug(...). So this log passed to StreamHandler and NormalFileHandler.
That's why you see the file isn't got rotated.
The correct configuration should be:
def init_logger(fullpath, console_level=CONSOLE_LEVEL, log_level=LOG_LEVEL):
"""
Setup the logger object
Args:
fullpath (str): full path to the log file
"""
logger = logging.getLogger('YourLogger')
logger.setLevel(log_level)
log_handler = logging.handlers.RotatingFileHandler(filename=fullpath,
maxBytes=50, backupCount=10)
log_handler.setLevel(log_level)
formatter = logging.Formatter('%(asctime)s %(threadName)-10s %(name)-12s %
(levelname)-8s %(message)s')
log_handler.setFormatter(formatter)
logger.addHandler(log_handler)
console = logging.StreamHandler()
console.setLevel(console_level)
formatter = logging.Formatter('%(name)-12s: %(levelname)-8s %
(message)s')
console.setFormatter(formatter)
logger.addHandler(console) # Or you can add it to root logger, but it is not recommended, you should use your own logger instead of root logger. Or it will cause some problems.
logger.debug("Creating log file")
And then when you want to use logger, you should use:
logger = logging.getLogger('YourLogger')
logger.info(...)
logger.debug(...)

Categories

Resources