How to define all environment variables as global variables? - python

I am looking to port a bash script that makes heavy use of environment variables to a python script. For a quick sanity check, I would like to follow a similar approach and define all environment variables (or a selection of them) as python global variables.
Is there a better way of doing this besides defining each variable one by one using VAR = os.getenv('VAR').
Update: I think this will do it. Contents of def_vars.py
#!/usr/bin/env python3
import os
def define_env_vars(env_vars=None):
""" Define all (select few) environment variables as python global variables
Args:
env_vars: list of selected environement variables to import, or None,
in which case all environment variables are imported
Returns:
None
"""
if env_vars == None:
env_vars = os.environ
else:
env_vars = { k: os.environ[k] for k in env_vars }
for k,v in env_vars.items():
globals()[k] = v
# Test
import_env_vars = ["PWD"]
define_env_vars(import_env_vars)
print(PWD)
But using this from another python module doesn't seem to work ??
Update 2 It does work now but only if the variables are prepended with the package path. I want to avoid this too. Contents of test.py
#!/usr/bin/env python3
import def_vars
# from def_vars import * #This doesn't work
env_vars = ["PWD"]
def_vars.define_env_vars(env_vars)
print(def_vars.PWD)
Update 3 This does what I needed it, and puts the variables on the caller module's globals()
#!/usr/bin/env python3
import os
import inspect
def define_env_vars(env_vars=None):
""" Define all (select few) environment variables as python global variables
of the caller module
Args:
env_vars: list of selected environement variables to import, or None,
in which case all environment variables are imported
Returns:
None
"""
if env_vars == None:
env_vars = os.environ
else:
env_vars = { k: os.environ[k] for k in env_vars }
for k,v in env_vars.items():
inspect.stack()[1][0].f_globals[k] = v

Use operator.itemgetter to fetch the values from os.environ.
from operator import itemgetter
import os
import_vars = ['FOO', 'BAR', 'BAZ']
pairs = zip(import_vars, itemgetter(*import_vars)(os.environ))
globals().update(pairs)
It won't work if import_vars is empty, but then, if it were empty, you wouldn't be worried about bulk creation of global variables.

Related

How to write DRY code when setting variable names from string literals?

In short, is there a pythonic way to write SETTING_A = os.environ['SETTING_A']?
I want to provide a module environment.py from which I can import constants that are read from environment variables.
Approach 1:
import os
try:
SETTING_A = os.environ['SETTING_A']
SETTING_B = os.environ['SETTING_B']
SETTING_C = os.environ['SETTING_C']
except KeyError as e:
raise EnvironmentError(f'env var {e} is not defined')
Approach 2
import os
vs = ('SETTING_A', 'SETTING_B', 'SETTING_C')
try:
for v in vs:
locals()[v] = os.environ[v]
except KeyError as e:
raise EnvironmentError(f'env var {e} is not defined')
Approach 1 repeats the names of the variables, approach 2 manipulates locals and it's harder to see what constants will be importable from the module.
Is there a best practice to this problem?
Use python-decouple to set environment variables.
pip install python-decouple
Now you can create a .ini or .env file and store your variables there.
.env
SETTING_A=os.environ['SETTING_A']
SETTING_B=os.environ['SETTING_B']
SETTING_C=os.environ['SETTING_C']
Now in your settings file you can import the env variables and use them.
from decouple import config
setting_a = config('SETTING_A')
setting_b = config('SETTING_B')
setting_c = config('SETTING_C')
Note that while setting your env variables you shouldn't leave gap with "=" and you should use config('') to load them.
You can also use pydantic to achieve this
from pydantic import BaseSettings
class _Settings(BaseSettings):
SETTING_A: str
SETTING_B: str
SETTING_C: str
class SettingsHandler:
#classmethod
def generate(cls):
root_dir = os.path.dirname(os.path.abspath(__file__))
_file_path = f"{root_dir}/local.env"
if os.path.exists(ini_file_path):
return _Settings(_env_file=ini_file_path, _env_file_encoding="utf-8")
return _Settings()
SETTINGS = SettingsHandler.generate()
Now you can use this SETTINGS object. Sample usage is written below
print(SETTINGS.SETTING_A)
print(SETTINGS.SETTING_B)
print(SETTINGS.SETTING_C)
Sample env file
SETTING_A="value_of_setting_a"
SETTING_B="value_of_setting_b"
SETTING_C="value_of_setting_c"

Pytest - How to Parameterize tests with multiple scenarios?

I'm using pytest ,boto3 and aws and want to have dynamic assertions with parameterized tests. How to improve this code to only assert on a specific group of subnetids?
production_private_ids = ["subnet-08f6d70b65b5cxx38", "subnet-0b6aaaf1ce207xx03", "subnet-0e54fda8f811fxxd8"]) ....
nonproduction_private_ids = ["subnet-11f6xx0b65b5cxx38", "subnet-116aaaf1ce207xx99", "subnet-11xxfda8f811fxx77"]) ....
#pytest.mark.parametrize("subnet", ["production_private_ids", "nonproduction_private_ids", "nonproduction_public_ids","production_public_ids ")
# if environment = production, then only check if production_private_ids exists in team_subnet
def test_sharing_subnets_exist(subnet,accountid):
team_subnet = get_team_subnets(accountid)
assert subnet in team_subnet
# if environment = nonproduction, then only check if nonproduction_private_ids exists in team_subnet
def test_sharing_subnets_exist(subnet,accountid):
team_subnet = get_team_subnets(accountid)
assert subnet in team_subnet
One common practice is to set and read from environment variables to determine which platform you are running from.
For example, in the environment you can have a variable isProduction=1. Then in your code, you can check by os.environ['isProduction'] == 1.
You can even save the private ids in the environment for reasons such as safety.
For example in the environment, you can have the following variables on nonproduction
id1="subnet-11f6xx0b65b5cxx38"
id2="subnet-116aaaf1ce207xx99"
id3"subnet-11xxfda8f811fxx77"
And another set on production machine
id1="subnet-08f6d70b65b5cxx38"
id2="subnet-0b6aaaf1ce207xx03"
id3="subnet-0e54fda8f811fxxd8"
in the code you do
import os
private_ids = [os.environ['id1'], os.environ['id2'], os.environ['id3']]
So you'll get the configs on each machine. Just make sure in your workflow/testflow the environment variables are correctly sourced.
You can parametrize the tests via metafunc if you need to execute additional logic on parametrization. Example:
import os
import pytest
production_private_ids = [...]
nonproduction_private_ids = [...]
def pytest_generate_tests(metafunc):
# if the test has `subnet` in args, parametrize it now
if 'subnet' in metafunc.fixturenames:
# replace with your environment check
if os.environ.get('NAME', None) == 'production':
ids = production_private_ids
else:
ids = nonproduction_private_ids
metafunc.parametrize('subnet', ids)
def test_sharing_subnets_exist(subnet, accountid):
team_subnet = get_team_subnets(accountid)
assert subnet in team_subnet
Now running pytest ... will check only non-production IDs, while NAME="production" pytest ... will check only production IDs.

How to replace environment variable value in yaml file to be parsed using python script

I need to use environment variable "PATH" in yaml file which needs to be parsed with a script.
This is the environment variable I have set on my terminal:
$ echo $PATH
/Users/abc/Downloads/tbwork
This is my sample.yml:
---
Top: ${PATH}/my.txt
Vars:
- a
- b
When I parse this yaml file with my script, I don't see PATH variables actual value.
This is my script:
import yaml
import os
import sys
stream = open("sample.yml", "r")
docs = yaml.load_all(stream)
for doc in docs:
for k,v in doc.items():
print k, "->", v
print "\n",
Output:
Top -> ${PATH}/my.txt
Vars -> ['a', 'b']
Expected output is:
Top -> /Users/abc/Downloads/tbwork/my.txt
Vars -> ['a', 'b']
Can someone help me figuring out the correct way to do it if I am doing it wrong way?
PY-yaml library doesn't resolve environment variables by default. You need to define an implicit resolver that will find the regex that defines an environment variable and execute a function to resolve it.
You can do it through yaml.add_implicit_resolver and yaml.add_constructor. In the code below, you are defining a resolver that will match on ${ env variable } in the YAML value and calling the function path_constructor to look up the environment variable.
import yaml
import re
import os
path_matcher = re.compile(r'\$\{([^}^{]+)\}')
def path_constructor(loader, node):
''' Extract the matched value, expand env variable, and replace the match '''
value = node.value
match = path_matcher.match(value)
env_var = match.group()[2:-1]
return os.environ.get(env_var) + value[match.end():]
yaml.add_implicit_resolver('!path', path_matcher)
yaml.add_constructor('!path', path_constructor)
data = """
env: ${VAR}/file.txt
other: file.txt
"""
if __name__ == '__main__':
p = yaml.load(data, Loader=yaml.FullLoader)
print(os.environ.get('VAR')) ## /home/abc
print(p['env']) ## /home/abc/file.txt
Warning: Do not run this if you are not the one specifying the env variables (or any other untrusted input) as there are remote code execution vulnerabilities with FullLoader as of July 2020.
Here is an alternative version which does use a new Loader class if you do not want to modify the global/default yaml Loader.
And more importantly, it correctly replaces interpolated strings that are not just the environment variables, eg path/to/${SOME_VAR}/and/${NEXT_VAR}/foo/bar
path_matcher = re.compile(r'.*\$\{([^}^{]+)\}.*')
def path_constructor(loader, node):
return os.path.expandvars(node.value)
class EnvVarLoader(yaml.SafeLoader):
pass
EnvVarLoader.add_implicit_resolver('!path', path_matcher, None)
EnvVarLoader.add_constructor('!path', path_constructor)
with open(configPath) as f:
c = yaml.load(f, Loader=EnvVarLoader)
There is a nice library envyaml for this.
With it it's very simple:
from envyaml import EnvYAML
# read file env.yaml and parse config
env = EnvYAML('env.yaml')
You can see a how to here, which lead to the very small library pyaml-env for ease of use so that we don't repeat things in every project.
So, using the library, your sample yaml becomes:
---
Top: !ENV ${PATH}/my.txt
Vars:
- a
- b
and with parse_config
from pyaml_env import parse_config
config = parse_config('path/to/config.yaml')
print(config)
# outputs the following, with the environment variables resolved
{
'Top': '/Users/abc/Downloads/tbwork/my.txt'
'Vars': ['a', 'b']
}
There are also options to use default values if you wish, like this:
---
Top: !ENV ${PATH:'~/data/'}/my.txt
Vars:
- a
- b
About the implementation, in short:
For PyYAML to be able to resolve environment variables, we need three main things:
A regex pattern for the environment variable identification e.g. pattern = re.compile(‘.?${(\w+)}.?’)
A tag that will signify that there’s an environment variable (or more) to be parsed, e.g. !ENV.
And a function that the loader will use to resolve the environment variables
A full example:
import os
import re
import yaml
def parse_config(path=None, data=None, tag='!ENV'):
"""
Load a yaml configuration file and resolve any environment variables
The environment variables must have !ENV before them and be in this format
to be parsed: ${VAR_NAME}.
E.g.:
database:
host: !ENV ${HOST}
port: !ENV ${PORT}
app:
log_path: !ENV '/var/${LOG_PATH}'
something_else: !ENV '${AWESOME_ENV_VAR}/var/${A_SECOND_AWESOME_VAR}'
:param str path: the path to the yaml file
:param str data: the yaml data itself as a stream
:param str tag: the tag to look for
:return: the dict configuration
:rtype: dict[str, T]
"""
# pattern for global vars: look for ${word}
pattern = re.compile('.*?\${(\w+)}.*?')
loader = yaml.SafeLoader
# the tag will be used to mark where to start searching for the pattern
# e.g. somekey: !ENV somestring${MYENVVAR}blah blah blah
loader.add_implicit_resolver(tag, pattern, None)
def constructor_env_variables(loader, node):
"""
Extracts the environment variable from the node's value
:param yaml.Loader loader: the yaml loader
:param node: the current node in the yaml
:return: the parsed string that contains the value of the environment
variable
"""
value = loader.construct_scalar(node)
match = pattern.findall(value) # to find all env variables in line
if match:
full_value = value
for g in match:
full_value = full_value.replace(
f'${{{g}}}', os.environ.get(g, g)
)
return full_value
return value
loader.add_constructor(tag, constructor_env_variables)
if path:
with open(path) as conf_data:
return yaml.load(conf_data, Loader=loader)
elif data:
return yaml.load(data, Loader=loader)
else:
raise ValueError('Either a path or data should be defined as input')
You can run it like this on terminal.
ENV_NAME=test
cat << EOF > new.yaml
---
apiVersion: apps/v1
kind: Deployment
metadata:
name: ${ENV_NAME}
EOF
Then do a cat new.yaml
---
apiVersion: apps/v1
kind: Deployment
metadata:
name: test
Using yamls add_implicit_resolver and add_constructor works for me but like this with the above example:
import yaml
import re
import os
os.environ['VAR']="you better work"
path_matcher = re.compile(r'\$\{([^}^{]+)\}')
def path_constructor(loader, node):
''' Extract the matched value, expand env variable, and replace the match '''
print("i'm here")
value = node.value
match = path_matcher.match(value)
env_var = match.group()[2:-1]
return os.environ.get(env_var) + value[match.end():]
yaml.add_implicit_resolver('!path', path_matcher, None, yaml.SafeLoader)
yaml.add_constructor('!path', path_constructor, yaml.SafeLoader)
data = """
env: ${VAR}/file.txt
other: file.txt
"""
if __name__ == '__main__':
p = yaml.safe_load(data)
print(os.environ.get('VAR')) ## you better work
print(p['env']) ## you better work/file.txt

How to store environment variables

To set an environment variable using Windows Command Processor ( cmd) :
SET MY_VARIABLE=c:\path\to\filename.txt
MY_VARIABLE now can be accessed by Python application started by same cmd window:
import os
variable = os.getenv('MY_VARIABLE')
I wonder if there is a way to set an environment variable from inside of Python so it becomes available to other processes running on the same machine?
To set a new environment variable:
os.environ['NEW_VARIABLE'] = 'NEW VALUE'
But this NEW_VARIABLE is lost as soon Python process and exited.
You can store environment variables persistently in the Windows registry. Variables can be stored for the current user, or for the system:
Code to persistently set an environment variable on Windows:
import win32con
import win32gui
try:
import _winreg as winreg
except ImportError:
# this has been renamed in python 3
import winreg
def set_environment_variable(variable, value, user_env=True):
if user_env:
# This is for the user's environment variables
reg_key = winreg.OpenKey(
winreg.HKEY_CURRENT_USER,
'Environment', 0, winreg.KEY_SET_VALUE)
else:
# This is for the system environment variables
reg_key = winreg.OpenKey(
winreg.HKEY_LOCAL_MACHINE,
r'SYSTEM\CurrentControlSet\Control\Session Manager\Environment',
0, winreg.KEY_SET_VALUE)
if '%' in value:
var_type = winreg.REG_EXPAND_SZ
else:
var_type = winreg.REG_SZ
with reg_key:
winreg.SetValueEx(reg_key, variable, 0, var_type, value)
# notify about environment change
win32gui.SendMessageTimeout(
win32con.HWND_BROADCAST, win32con.WM_SETTINGCHANGE, 0,
'Environment', win32con.SMTO_ABORTIFHUNG, 1000)
Test code to invoke above:
set_environment_variable('NEW_VARIABLE', 'NEW VALUE')
Would a simple if not perhaps slightly crude way of doing this be simply to use os.system and pass the command through that as if you were running it in CMD?
An example being os.system("SET MY_VARIABLE=c:\path\to\filename.txt")
Hope that helps.`

ConfigParser and String interpolation with env variable

it's a little bit I'm out of python syntax and I have a problem in reading a .ini file with interpolated values.
this is my ini file:
[DEFAULT]
home=$HOME
test_home=$home
[test]
test_1=$test_home/foo.csv
test_2=$test_home/bar.csv
Those lines
from ConfigParser import SafeConfigParser
parser = SafeConfigParser()
parser.read('config.ini')
print parser.get('test', 'test_1')
does output
$test_home/foo.csv
while I'm expecting
/Users/nkint/foo.csv
EDIT:
I supposed that the $ syntax was implicitly included in the so called string interpolation (referring to the manual):
On top of the core functionality, SafeConfigParser supports
interpolation. This means values can contain format strings which
refer to other values in the same section, or values in a special
DEFAULT section.
But I'm wrong. How to handle this case?
First of all according to the documentation you should use %(test_home)s to interpolate test_home. Moreover the key are case insensitive and you can't use both HOME and home keys. Finally you can use SafeConfigParser(os.environ) to take in account of you environment.
from ConfigParser import SafeConfigParser
import os
parser = SafeConfigParser(os.environ)
parser.read('config.ini')
Where config.ini is
[DEFAULT]
test_home=%(HOME)s
[test]
test_1=%(test_home)s/foo.csv
test_2=%(test_home)s/bar.csv
You can write custom interpolation in case of Python 3:
import configparser
import os
class EnvInterpolation(configparser.BasicInterpolation):
"""Interpolation which expands environment variables in values."""
def before_get(self, parser, section, option, value, defaults):
value = super().before_get(parser, section, option, value, defaults)
return os.path.expandvars(value)
cfg = """
[section1]
key = value
my_path = $PATH
"""
config = configparser.ConfigParser(interpolation=EnvInterpolation())
config.read_string(cfg)
print(config['section1']['my_path'])
If you want to expand some environment variables, you can do so using os.path.expandvars before parsing a StringIO stream:
import ConfigParser
import os
import StringIO
with open('config.ini', 'r') as cfg_file:
cfg_txt = os.path.expandvars(cfg_file.read())
config = ConfigParser.ConfigParser()
config.readfp(StringIO.StringIO(cfg_txt))
the trick for proper variable substitution from environment is to use the ${} syntax for the environment variables:
[DEFAULT]
test_home=${HOME}
[test]
test_1=%(test_home)s/foo.csv
test_2=%(test_home)s/bar.csv
ConfigParser.get values are strings, even if you set values as integer or True. But ConfigParser has getint, getfloat and getboolean.
settings.ini
[default]
home=/home/user/app
tmp=%(home)s/tmp
log=%(home)s/log
sleep=10
debug=True
config reader
>>> from ConfigParser import SafeConfigParser
>>> parser = SafeConfigParser()
>>> parser.read('/home/user/app/settings.ini')
>>> parser.get('defaut', 'home')
'/home/user/app'
>>> parser.get('defaut', 'tmp')
'/home/user/app/tmp'
>>> parser.getint('defaut', 'sleep')
10
>>> parser.getboolean('defaut', 'debug')
True
Edit
Indeed you could get name values as environ var if you initialize SafeConfigParser with os.environ. Thanks for the Michele's answer.
Quite late, but maybe it can help someone else looking for the same answers that I had recently. Also, one of the comments was how to fetch Environment variables and values from other sections. Here is how I deal with both converting environment variables and multi-section tags when reading in from an INI file.
INI FILE:
[PKG]
# <VARIABLE_NAME>=<VAR/PATH>
PKG_TAG = Q1_RC1
[DELIVERY_DIRS]
# <DIR_VARIABLE>=<PATH>
NEW_DELIVERY_DIR=${DEL_PATH}\ProjectName_${PKG:PKG_TAG}_DELIVERY
Python Class that uses the ExtendedInterpolation so that you can use the ${PKG:PKG_TAG} type formatting. I add the ability to convert the windows environment vars when I read in INI to a string using the builtin os.path.expandvars() function such as ${DEL_PATH} above.
import os
from configparser import ConfigParser, ExtendedInterpolation
class ConfigParser(object):
def __init__(self):
"""
initialize the file parser with
ExtendedInterpolation to use ${Section:option} format
[Section]
option=variable
"""
self.config_parser = ConfigParser(interpolation=ExtendedInterpolation())
def read_ini_file(self, file='./config.ini'):
"""
Parses in the passed in INI file and converts any Windows environ vars.
:param file: INI file to parse
:return: void
"""
# Expands Windows environment variable paths
with open(file, 'r') as cfg_file:
cfg_txt = os.path.expandvars(cfg_file.read())
# Parses the expanded config string
self.config_parser.read_string(cfg_txt)
def get_config_items_by_section(self, section):
"""
Retrieves the configurations for a particular section
:param section: INI file section
:return: a list of name, value pairs for the options in the section
"""
return self.config_parser.items(section)
def get_config_val(self, section, option):
"""
Get an option value for the named section.
:param section: INI section
:param option: option tag for desired value
:return: Value of option tag
"""
return self.config_parser.get(section, option)
#staticmethod
def get_date():
"""
Sets up a date formatted string.
:return: Date string
"""
return datetime.now().strftime("%Y%b%d")
def prepend_date_to_var(self, sect, option):
"""
Function that allows the ability to prepend a
date to a section variable.
:param sect: INI section to look for variable
:param option: INI search variable under INI section
:return: Void - Date is prepended to variable string in INI
"""
if self.config_parser.get(sect, option):
var = self.config_parser.get(sect, option)
var_with_date = var + '_' + self.get_date()
self.config_parser.set(sect, option, var_with_date)
Based on #alex-markov answer (and code) and #srand9 comment, the following solution works with environment variables and cross-section references.
Note that the interpolation is now based on ExtendedInterpolation to allow cross-sections references and on before_read instead of before_get.
#!/usr/bin/env python3
import configparser
import os
class EnvInterpolation(configparser.ExtendedInterpolation):
"""Interpolation which expands environment variables in values."""
def before_read(self, parser, section, option, value):
value = super().before_read(parser, section, option, value)
return os.path.expandvars(value)
cfg = """
[paths]
foo : ${HOME}
[section1]
key = value
my_path = ${paths:foo}/path
"""
config = configparser.ConfigParser(interpolation=EnvInterpolation())
config.read_string(cfg)
print(config['section1']['my_path'])
It seems in the last version 3.5.0, ConfigParser was not reading the env variables, so I end up providing a custom Interpolation based on the BasicInterpolation one.
class EnvInterpolation(BasicInterpolation):
"""Interpolation as implemented in the classic ConfigParser,
plus it checks if the variable is provided as an environment one in uppercase.
"""
def _interpolate_some(self, parser, option, accum, rest, section, map,
depth):
rawval = parser.get(section, option, raw=True, fallback=rest)
if depth > MAX_INTERPOLATION_DEPTH:
raise InterpolationDepthError(option, section, rawval)
while rest:
p = rest.find("%")
if p < 0:
accum.append(rest)
return
if p > 0:
accum.append(rest[:p])
rest = rest[p:]
# p is no longer used
c = rest[1:2]
if c == "%":
accum.append("%")
rest = rest[2:]
elif c == "(":
m = self._KEYCRE.match(rest)
if m is None:
raise InterpolationSyntaxError(option, section,
"bad interpolation variable reference %r" % rest)
var = parser.optionxform(m.group(1))
rest = rest[m.end():]
try:
v = os.environ.get(var.upper())
if v is None:
v = map[var]
except KeyError:
raise InterpolationMissingOptionError(option, section, rawval, var) from None
if "%" in v:
self._interpolate_some(parser, option, accum, v,
section, map, depth + 1)
else:
accum.append(v)
else:
raise InterpolationSyntaxError(
option, section,
"'%%' must be followed by '%%' or '(', "
"found: %r" % (rest,))
The difference between the BasicInterpolation and the EnvInterpolation is in:
v = os.environ.get(var.upper())
if v is None:
v = map[var]
where I'm trying to find the var in the enviornment before checking in the map.
Below is a simple solution that
Can use default value if no environment variable is provided
Overrides variables with environment variables (if found)
needs no custom interpolation implementation
Example:
my_config.ini
[DEFAULT]
HOST=http://www.example.com
CONTEXT=${HOST}/auth/
token_url=${CONTEXT}/oauth2/token
ConfigParser:
import os
import configparser
config = configparser.ConfigParser(interpolation=configparser.ExtendedInterpolation())
ini_file = os.path.join(os.path.dirname(__file__), 'my_config.ini')
# replace variables with environment variables(if exists) before loading ini file
with open(ini_file, 'r') as cfg_file:
cfg_env_txt = os.path.expandvars(cfg_file.read())
config.read_string(cfg_env_txt)
print(config['DEFAULT']['token_url'])
Output:
If no environtment variable $HOST or $CONTEXT is present this config will take the default value
user can override the default value by creating $HOST, $CONTEXT environment variable
works well with docker container

Categories

Resources