How to validate structure (or schema) of dictionary in Python? - python
I have a dictionary with config info:
my_conf = {
'version': 1,
'info': {
'conf_one': 2.5,
'conf_two': 'foo',
'conf_three': False,
'optional_conf': 'bar'
}
}
I want to check if the dictionary follows the structure I need.
I'm looking for something like this:
conf_structure = {
'version': int,
'info': {
'conf_one': float,
'conf_two': str,
'conf_three': bool
}
}
is_ok = check_structure(conf_structure, my_conf)
Is there any solution done to this problem or any library that could make implementing check_structure more easy?
You may use schema (PyPi Link)
schema is a library for validating Python data structures, such as those obtained from config-files, forms, external services or command-line parsing, converted from JSON/YAML (or something else) to Python data-types.
from schema import Schema, And, Use, Optional, SchemaError
def check(conf_schema, conf):
try:
conf_schema.validate(conf)
return True
except SchemaError:
return False
conf_schema = Schema({
'version': And(Use(int)),
'info': {
'conf_one': And(Use(float)),
'conf_two': And(Use(str)),
'conf_three': And(Use(bool)),
Optional('optional_conf'): And(Use(str))
}
})
conf = {
'version': 1,
'info': {
'conf_one': 2.5,
'conf_two': 'foo',
'conf_three': False,
'optional_conf': 'bar'
}
}
print(check(conf_schema, conf))
Without using libraries, you could also define a simple recursive function like this:
def check_structure(struct, conf):
if isinstance(struct, dict) and isinstance(conf, dict):
# struct is a dict of types or other dicts
return all(k in conf and check_structure(struct[k], conf[k]) for k in struct)
if isinstance(struct, list) and isinstance(conf, list):
# struct is list in the form [type or dict]
return all(check_structure(struct[0], c) for c in conf)
elif isinstance(conf, type):
# struct is the type of conf
return isinstance(struct, conf)
else:
# struct is neither a dict, nor list, not type
return False
This assumes that the config can have keys that are not in your structure, as in your example.
Update: New version also supports lists, e.g. like 'foo': [{'bar': int}]
Advice for the future: use Pydantic!
Pydantic enforces type hints at runtime, and provides user friendly errors when data is invalid. Define how data should be in pure, canonical python; validate it with pydantic, as simple as that:
from pydantic import BaseModel
class Info(BaseModel):
conf_one: float
conf_two: str
conf_three: bool
class Config:
extra = 'forbid'
class ConfStructure(BaseModel):
version: int
info: Info
If validation fails pydantic will raise an error with a breakdown of what was wrong:
my_conf_wrong = {
'version': 1,
'info': {
'conf_one': 2.5,
'conf_two': 'foo',
'conf_three': False,
'optional_conf': 'bar'
}
}
my_conf_right = {
'version': 10,
'info': {
'conf_one': 14.5,
'conf_two': 'something',
'conf_three': False
}
}
model = ConfStructure(**my_conf_right)
print(model.dict())
# {'version': 10, 'info': {'conf_one': 14.5, 'conf_two': 'something', 'conf_three': False}}
res = ConfStructure(**my_conf_wrong)
# pydantic.error_wrappers.ValidationError: 1 validation error for ConfStructure
# info -> optional_conf
# extra fields not permitted (type=value_error.extra)
You can build structure using recursion:
def get_type(value):
if isinstance(value, dict):
return {key: get_type(value[key]) for key in value}
else:
return str(type(value))
And then compare required structure with your dictionary:
get_type(current_conf) == get_type(required_conf)
Example:
required_conf = {
'version': 1,
'info': {
'conf_one': 2.5,
'conf_two': 'foo',
'conf_three': False,
'optional_conf': 'bar'
}
}
get_type(required_conf)
{'info': {'conf_two': "<type 'str'>", 'conf_one': "<type 'float'>", 'optional_conf': "<type 'str'>", 'conf_three': "<type 'bool'>"}, 'version': "<type 'int'>"}
Looks like the dict-schema-validator package does exactly what you need:
Here is a simple schema representing a Customer:
{
"_id": "ObjectId",
"created": "date",
"is_active": "bool",
"fullname": "string",
"age": ["int", "null"],
"contact": {
"phone": "string",
"email": "string"
},
"cards": [{
"type": "string",
"expires": "date"
}]
}
Validation:
from datetime import datetime
import json
from dict_schema_validator import validator
with open('models/customer.json', 'r') as j:
schema = json.loads(j.read())
customer = {
"_id": 123,
"created": datetime.now(),
"is_active": True,
"fullname": "Jorge York",
"age": 32,
"contact": {
"phone": "559-940-1435",
"email": "york#example.com",
"skype": "j.york123"
},
"cards": [
{"type": "visa", "expires": "12/2029"},
{"type": "visa"},
]
}
errors = validator.validate(schema, customer)
for err in errors:
print(err['msg'])
Output:
[*] "_id" has wrong type. Expected: "ObjectId", found: "int"
[+] Extra field: "contact.skype" having type: "str"
[*] "cards[0].expires" has wrong type. Expected: "date", found: "str"
[-] Missing field: "cards[1].expires"
You can also use dataclasses_json library. Here is how I would normally do it
from dataclasses import dataclass
from dataclasses_json import dataclass_json, Undefined
from dataclasses_json.undefined import UndefinedParameterError
from typing import Optional
#### define schema #######
#dataclass_json(undefined=Undefined.RAISE)
#dataclass
class Info:
conf_one: float
# conf_two: str
conf_three: bool
optional_conf: Optional[str]
#dataclass_json
#dataclass
class ConfStructure:
version: int
info: Info
####### test for compliance####
try:
ConfStructure.from_dict(my_conf).to_dict()
except KeyError as e:
print('theres a missing parameter')
except UndefinedParameterError as e:
print('extra parameters')
You can use dictify from https://pypi.org/project/dictify/.
Read docs here https://dictify.readthedocs.io/en/latest/index.html
This is how it can be done.
from dictify import Field, Model
class Info(Model):
conf_one = Field(required=True).instance(float)
conf_two = Field(required=True).instance(str)
conf_three = Field(required=True).instance(bool)
optional_conf = Field().instance(str)
class MyConf(Model):
version = Field(required=True).instance(int)
info = Field().model(Info)
my_conf = MyConf() # Invalid without required fields
# Valid
my_conf = MyConf({
'version': 1,
'info': {
'conf_one': 2.5,
'conf_two': 'foo',
'conf_three': False,
'optional_conf': 'bar'
}
})
my_conf['info']['conf_one'] = 'hi' # Invalid, won't be assinged
There is a standard for validating JSON files called JSON Schema.
Validators have been implemented in many languages, including the Python. Read also the documentation for more details. In the following example I will use a Python package jsonschema (docs) that I am familiar with.
Given the config data
my_conf = {
'version': 1,
'info': {
'conf_one': 2.5,
'conf_two': 'foo',
'conf_three': False,
'optional_conf': 'bar',
},
}
and the corresponding config schema
conf_structure = {
'type': 'object',
'properties': {
'version': {'type': 'integer'},
'info': {
'type': 'object',
'properties': {
'conf_one': {'type': 'number'},
'conf_two': {'type': 'string'},
'conf_three': {'type': 'boolean'},
'optional_conf': {'type': 'string'},
},
'required': ['conf_one', 'conf_two', 'conf_three'],
},
},
}
the actual code to validate this data is then as simple as this:
import jsonschema
jsonschema.validate(my_conf, schema=conf_structure)
A big advantage of this approach is that you can store both data and schema as JSON-formatted files.
#tobias_k beat me to it (both in time and quality probably) but here is another recursive function for the task that might be a bit easier for you (and me) to follow:
def check_dict(my_dict, check_against):
for k, v in check_against.items():
if isinstance(v, dict):
return check_dict(my_dict[k], v)
else:
if not isinstance(my_dict[k], v):
return False
return True
The nature of dictionaries, if they are being used in python and not exported as some JSON, is that the order of the dictionary need not be set. Instead, looking up keys returns values (hence a dictionary).
In either case, these functions should provide you with what your looking for for the level of nesting present in the samples you provided.
#assuming identical order of keys is required
def check_structure(conf_structure,my_conf):
if my_conf.keys() != conf_structure.keys():
return False
for key in my_conf.keys():
if type(my_conf[key]) == dict:
if my_conf[key].keys() != conf_structure[key].keys():
return False
return True
#assuming identical order of keys is not required
def check_structure(conf_structure,my_conf):
if sorted(my_conf.keys()) != sorted(conf_structure.keys()):
return False
for key in my_conf.keys():
if type(my_conf[key]) != dict:
return False
else:
if sorted(my_conf[key].keys()) != sorted(conf_structure[key].keys()):
return False
return True
This solution would obviously need to be changed if the level of nesting was greater (i.e. it is configured to assess the similarity in structure of dictionaries that have some values as dictionaries, but not dictionaries where some values these latter dictionaries are also dictionaries).
Related
How to create a dataclass that handles nested dicts with different names?
I am using the data from the League of Legends API to learn Python, JSON, and Data Classes. Using dacite, I have created parent and child classes that allow access to the data using this syntax: champs.data['Ahri']['key']. However, I wonder if there is a way to create a class that returns the keys as fields so one could access the data using this syntax: champs.data.Ahri.key. Here is the working code: from dataclasses import dataclass from dacite import from_dict j1 = {'type': 'champion', 'data': {'Aatrox': {'id': 'Aatrox', 'key': '266', 'name': 'Aatrox'}, 'Ahri': {'id': 'Ahri', 'key': '103', 'name': 'Ahri'}}} #dataclass class C: type: str data: dict #dataclass class P: type: str data: dict champs = from_dict(data_class=P, data=j1) champs.data['Ahri']['key']
If it were me, I would probably leave/make champions a dictionary. Then access it like champions['Ahri'].key Something like: import dataclasses #dataclasses.dataclass class Champion: id: str key: str name: str j1 = { 'type': 'champion', 'data': { 'Aatrox': {'id': 'Aatrox', 'key': '266', 'name': 'Aatrox'}, 'Ahri': {'id': 'Ahri', 'key': '103', 'name': 'Ahri'} } } champions = { champion["id"]: Champion(**champion) for champion in j1["data"].values() } print(champions['Ahri'].key) resulting in 103 However if you were really keen on champions.Ahri.key then you can implement Champions as an empty class and use setattr() import dataclasses #dataclasses.dataclass class Champion: id: str key: str name: str #dataclasses.dataclass class Champions: pass j1 = { 'type': 'champion', 'data': { 'Aatrox': {'id': 'Aatrox', 'key': '266', 'name': 'Aatrox'}, 'Ahri': {'id': 'Ahri', 'key': '103', 'name': 'Ahri'} } } champions = Champions() for champion in j1["data"].values(): setattr(champions, champion["id"], Champion(**champion)) print(champions.Ahri.key) again giving you 103 Note: The #dataclass decorator can likely be omitted from Champion().
The closest you can probably get - at least in a safe enough manner - is as #JonSG suggests, using champs.data['Ahri'].key. Here's a straightforward example using the dataclass-wizard. It doesn't do a strict type checking as I know dacite does. Instead, it opts to do implicit type coercision where possible, which is useful in some cases; you can see an example of this below - str to annotated int in this case. Note: This example should work for Python 3.7+ with the included __future__ import. from __future__ import annotations from dataclasses import dataclass from dataclass_wizard import fromdict data = { 'type': 'champion', 'data': { 'Aatrox': {'id': 'Aatrox', 'key': '266', 'name': 'Aatrox'}, 'Ahri': {'id': 'Ahri', 'key': '103', 'name': 'Ahri'}, } } #dataclass class P: type: str data: dict[str, Character] #dataclass class Character: id: str key: int name: str champs = fromdict(P, data) print(champs) print(champs.data['Ahri'].key) Output: P(type='champion', data={'Aatrox': Character(id='Aatrox', key=266, name='Aatrox'), 'Ahri': Character(id='Ahri', key=103, name='Ahri')}) 103
How to do this d = { "type": "champion", "data": { "Aatrox": {"id": "Aatrox", "key": "266", "name": "Aatrox"}, "Ahri": {"id": "Ahri", "key": "103", "name": "Ahri"}, }, } def dict_to_class(d) -> object: if isinstance(d, dict): class C: pass for k, v in d.items(): setattr(C, k, dict_to_class(v)) return C else: return d champ = dict_to_class(d) print(champ.data.Ahri.key) # 103 The key here is the setatter builtin method, which takes an object, a string, and some value, and creates an attribute (field) on that object, named according to the string and containing the value. Don't do this! I must stress that there is almost never a good reason to do this. When dealing with JSON data of an unknown shape, the correct way to represent it is a dict. If you do know the shape of the data, you should create a specialized dataclass, like so: from dataclasses import dataclass d = { "type": "champion", "data": { "Aatrox": {"id": "Aatrox", "key": "266", "name": "Aatrox"}, "Ahri": {"id": "Ahri", "key": "103", "name": "Ahri"}, }, } #dataclass class Champion: id: str key: str name: str champions = {name: Champion(**attributes) for name, attributes in d["data"].items()} print(champions) # {'Aatrox': Champion(id='Aatrox', key='266', name='Aatrox'), 'Ahri': Champion(id='Ahri', key='103', name='Ahri')} print(champions["Aatrox"].key) # 266
The dacite docs have a section about nested structures that is very close to what you want. The example they use, verbatim, is as follows: #dataclass class A: x: str y: int #dataclass class B: a: A data = { 'a': { 'x': 'test', 'y': 1, } } result = from_dict(data_class=B, data=data) assert result == B(a=A(x='test', y=1)) We can access fields at arbitrary depth as e.g. result.a.x == 'test'. The critical difference between this and your data is that the dictionary under the data key has keys with arbitrary values (Aatrox, Ahri, etc.). dacite isn't set up to create new field names on the fly, so the best you're going to get is something like the latter part of #JonSG's answer, which uses setattr to dynamically build new fields. Let's imagine how you would use this data for a moment, though. Probably you'd want a some point to be able to iterate over your champions in order to perform a filter/transform/etc. operation. It's possible to iterate over fields in python, but you have to really dig into python internals, which means your code will be less readable/generally comprehensible. Much better would be one of the following: Preprocess j1 into a shape that fits the structure you want to use, and then use dacite with a dataclass that fits the new structure. For example, maybe it makes sense to pull the values of the data dict out into a list. Process in steps using dacite. For example, something like the following: from dataclasses import dataclass from dacite import from_dict #dataclass class TopLevel: type: str data: dict j1 = { "type": "champion", "data": { "Aatrox": {"id": "Aatrox", "key": "266", "name": "Aatrox"}, "Ahri": {"id": "Ahri", "key": "103", "name": "Ahri"}, }, } champions = from_dict(data_class=TopLevel, data=j1) # champions.data is a dict of dicts #dataclass class Champion: id: str key: str name: str # transform champions.data into a dict of Champions for k, v in champions.data.items(): champions.data[k] = from_dict(data_class=Champion, data=v) # now, you can do interesting things like the following filter operation start_with_a = [ champ for champ in champions.data.values() if champ.name.lower().startswith("a") ] print(start_with_a) # [Champion(id='Aatrox', key='266', name='Aatrox'), Champion(id='Ahri', key='103', name='Ahri')]
CDK WAF Python Multiple Statement velues error
I have AWS WAF CDK that is working with rules, and now I'm trying to add a rule in WAF with multiple statements, but I'm getting this error: Resource handler returned message: "Error reason: You have used none or multiple values for a field that requires exactly one value., field: STATEMENT, parameter: Statement (Service: Wafv2, Status Code: 400, Request ID: 6a36bfe2-543c-458a-9571-e929142f5df1, Extended Request ID: null)" (RequestToken: b751ae12-bb60-bb75-86c0-346926687ea4, HandlerErrorCode: InvalidRequest) My Code: { 'name': 'ruleName', 'priority': 3, 'statement': { 'orStatement': { 'statements': [ { 'iPSetReferenceStatement': { 'arn': 'arn:myARN' } }, { 'iPSetReferenceStatement': { 'arn': 'arn:myARN' } } ] } }, 'action': { 'allow': {} }, 'visibilityConfig': { 'sampledRequestsEnabled': True, 'cloudWatchMetricsEnabled': True, 'metricName': 'ruleName' } },
There are two things going on there: Firstly, your capitalization is off. iPSetReferenceStatement cannot be parsed and creates an empty statement reference. The correct key is ipSetReferenceStatement. However, as mentioned here, there is a jsii implementation bug causing some issues with the IPSetReferenceStatementProperty. This causes it not to be parsed properly resulting in a jsii error when synthesizing. You can fix it by using the workaround mentioned in the post. Add to your file containing the construct: import jsii from aws_cdk import aws_wafv2 as wafv2 # just for clarity, you might already have this imported #jsii.implements(wafv2.CfnRuleGroup.IPSetReferenceStatementProperty) class IPSetReferenceStatement: #property def arn(self): return self._arn #arn.setter def arn(self, value): self._arn = value Then define your ip reference statement as follows: ip_set_ref_stmnt = IPSetReferenceStatement() ip_set_ref_stmnt.arn = "arn:aws:..." ip_set_ref_stmnt_2 = IPSetReferenceStatement() ip_set_ref_stmnt_2.arn = "arn:aws:..." Then in the rules section of the webacl, you can use it as follows: ... rules=[ { 'name': 'ruleName', 'priority': 3, 'statement': { 'orStatement': { 'statements': [ wafv2.CfnWebACL.StatementProperty( ip_set_reference_statement=ip_set_ref_stmnt ), wafv2.CfnWebACL.StatementProperty( ip_set_reference_statement=ip_set_ref_stmnt_2 ), ] } }, 'action': { 'allow': {} }, 'visibilityConfig': { 'sampledRequestsEnabled': True, 'cloudWatchMetricsEnabled': True, 'metricName': 'ruleName' } } ] ... This should synthesize your stack as expected.
Best way to specify nested dict with pydantic?
Context I'm trying to validate/parse some data with pydantic. I want to specify that the dict can have a key daytime, or not. If it does, I want the value of daytime to include both sunrise and sunset. e.g. These should be allowed: { 'type': 'solar', 'daytime': { 'sunrise': 4, # 4am 'sunset': 18 # 6pm } } And { 'type': 'wind' # daytime key is omitted } And { 'type': 'wind', 'daytime': None } But I want to fail validation for { 'type': 'solar', 'daytime': { 'sunrise': 4 } } Because this has a daytime value, but no sunset value. MWE I've got some code that does this. If I run this script, it executes successfully. from pydantic import BaseModel, ValidationError from typing import List, Optional, Dict class DayTime(BaseModel): sunrise: int sunset: int class Plant(BaseModel): daytime: Optional[DayTime] = None type: str p = Plant.parse_obj({'type': 'wind'}) p = Plant.parse_obj({'type': 'wind', 'daytime': None}) p = Plant.parse_obj({ 'type': 'solar', 'daytime': { 'sunrise': 5, 'sunset': 18 }}) try: p = Plant.parse_obj({ 'type': 'solar', 'daytime': { 'sunrise': 5 }}) except ValidationError: pass else: raise AssertionError("Should have failed") Question What I'm wondering is, is this how you're supposed to use pydantic for nested data? I have lots of layers of nesting, and this seems a bit verbose. Is there any way to do something more concise, like: class Plant(BaseModel): daytime: Optional[Dict[('sunrise', 'sunset'), int]] = None type: str
Pydantic create_model function is what you need: from pydantic import BaseModel, create_model class Plant(BaseModel): daytime: Optional[create_model('DayTime', sunrise=(int, ...), sunset=(int, ...))] = None type: str
Validating arbitrary dict keys with strict schemas with Cerberus
I am trying to validate JSON, the schema for which specifies a list of dicts with arbitrary string keys, the corresponding values of which are dicts with a strict schema (i.e, the keys of the inner dict are strictly some string, here 'a'). From the Cerberus docs, I think that what I want is the 'keysrules' rule. The example in the docs seems to only show how to use 'keysrules' to validate arbitrary keys, but not their values. I wrote the below code as an example; the best I could do was assume that 'keysrules' would support a 'schema' argument for defining a schema for these values. keysrules = { 'myDict': { 'type': 'dict', 'keysrules': { 'type': 'string', 'schema': { 'type': 'dict', 'schema': { 'a': {'type': 'string'} } } } } } keysRulesTest = { 'myDict': { 'arbitraryStringKey': { 'a': 'arbitraryStringValue' }, 'anotherArbitraryStringKey': { 'shouldNotValidate': 'arbitraryStringValue' } } } def test_rules(): v = Validator(keysrules) if not v.validate(keysRulesTest): print(v.errors) assert(0) This example does validate, and I would like it to not validate on 'shouldNotValidate', because that key should be 'a'. Does the flexibility implied by 'keysrules' (i.e, keys governed by 'keysrules' have no constraint other than {'type': 'string'}) propagate down recursively to all schemas underneath it? Or have I made some different error? How can I achieve my desired outcome?
I didn't want keysrules, I wanted valuesrules: keysrules = { 'myDict': { 'type': 'dict', 'valuesrules': { 'type': 'dict', 'schema': { 'a': {'type': 'string'} } } } } keysRulesTest = { 'myDict': { 'arbitraryStringKey': { 'a': 'arbitraryStringValue' }, 'anotherArbitraryStringKey': { 'shouldNotValidate': 'arbitraryStringValue' } } } def test_rules(): v = Validator(keysrules) if not v.validate(keysRulesTest): print(v.errors) assert(0) This produces my desired outcome.
Flask-Restplus: how to model string or object?
In Flask-Restplus, I need to model an attribute value that maybe either a list of strings or a list of objects. That is it can look like this: { 'my_attribute': [ 'value1', 'value2' ] } or it can look like the following: { 'my_attribute': [ { 'name': 'value1', 'foo': 'something' }, { 'name': 'value2', 'foo': 'something else' } ] } How should I model that in Flask-Restplus’ api.model?
I've just figured this out myself. In short, create a custom field class that emits its own JSON schema. In turn the schema uses the oneOf type to specify that this is either a string or an object. from flask_restplus import fields element_object = api.model('Element_Object', { 'name': fields.String(), 'foo': fields.String() }) class StringOrObjectElement(fields.Nested): __schema_type__ = ['string','object'] def output(self, key, obj): if isinstance(obj, str): if key == 'name': return obj else: return 'default_value' return super().output(key, obj) def schema(self): schema_dict = super().schema() schema_dict.pop('type') nested_ref = schema_dict.pop('$ref') schema_dict['oneOf'] = [ { 'type': 'string' }, { '$ref': nested_ref } ] return schema_dict root_object = api.model('Root_Object', { 'my_attribute': fields.List(fields.StringOrObjectElement(element_object))