sqlalchemy : how can i map an existing class without modifying it? - python

I have a certain number of python classes that i would like to map to tables in a database with python sqlalchemy. I saw examples where the mapped class is derived from an sqlalchemy base class. I don't want to do that. Is there any other way ?
For example, how to map this simple class ?
class Person:
def __init__(self, firstname: str = "x", name: str = "y", age: int = 0):
self.firstname = firstname
self.name = name
self.age = age
def __str__(self) -> str:
return f"[{self.__firstname},{self.__name},{self.__age}]"
#property
def id(self):
return self.__id
#property
def firstname(self) -> str:
return self.__firstname
#property
def name(self) -> str:
return self.__name
#property
def age(self) -> int:
return self.__age
# setters
#id.setter
def id(self, id: int):
if not isinstance(id,int) or id<=0:
raise MyException(f"...")
#firstname.setter
def firstname(self, firstname: str):
if Utils.is_string_ok(firstname):
self.__firstname = firstname.strip()
else:
raise MyException("...")
#name.setter
def name(self, name: str):
if Utils.is_string_ok(name):
self.__name = name.strip()
else:
raise MyException("...")
#age.setter
def age(self, age: int):
error = False
if isinstance(age, int):
if age >= 0:
self.__age = age
else:
error = True
else:
error = True
if error:
raise MyException("...")
I want to map it to a table with columns (col1,col2,col3,col4) for example (arbitrary names different from class properties).

For anyone interested, i finally got it (below i changed the identifiers of my working code to match the original post):
# imports
from sqlalchemy import create_engine, Table, Column, Integer, String, MetaData
from sqlalchemy.orm import mapper, sessionmaker
from Person import Person
# mysql database
engine = create_engine("mysql+mysqlconnector://root#localhost/dbpersonnes")
# metadata
metadata = MetaData()
# table
persons_table = Table("persons", metadata,
Column('col1', Integer, primary_key=True),
Column('col2', String(30), nullable=False),
Column("col3", String(30), nullable=False),
Column("col4", Integer, nullable=False)
)
# mapping
mapper(Person, persons_table, properties={
'id': persons_table.c.col1,
'firstname': persons_table.c.col2,
'name': persons_table.c.col3,
'age': persons_table.c.col4,
})
# session factory
Session = sessionmaker()
Session.configure(bind=engine)
# session
session = Session()
# insert
session.add(Personne(67, "x", "y", 10))
session.commit()
# query
personnes = session.query(Personne).all()
# logs
for personne in personnes:
print(personne)

Related

Resolve "none is not an allower value"

I am currently trying to write an application in which one object (Room) inherits from the other (Building). But when I try to create a room using the Swagger API (create Room) I get the following error:
pydantic.error_wrappers.ValidationError: 1 validation error for Room
response -> content
none is not an allowed value (type=type_error.none.not_allowed).
I have only recently started with SQL and Python and am having a hard time solving this problem. I would be very happy about help and a possible explanation of what I am doing wrong.
Here is my Code:
models.py:
class Building(_database.Base):
__tablename__ = "buildings"
id = _sql.Column(_sql.Integer, primary_key=True, index=True)
title = _sql.Column(_sql.String, index=True)
date_created = _sql.Column(_sql.DateTime, default=_dt.datetime.utcnow)
date_last_updated = _sql.Column(_sql.DateTime, default=_dt.datetime.utcnow)
rooms = _orm.relationship("Room", back_populates="owner")
class Room(_database.Base):
__tablename__ = "rooms"
id = _sql.Column(_sql.Integer, primary_key=True, index=True)
title = _sql.Column(_sql.String, index=True)
content = _sql.Column(_sql.String, index=True)
owner_id = _sql.Column(_sql.Integer, _sql.ForeignKey("buildings.id"))
building_title = _sql.Column(_sql.String, index=True)
date_created = _sql.Column(_sql.DateTime, default=_dt.datetime.utcnow)
date_last_updated = _sql.Column(_sql.DateTime, default=_dt.datetime.utcnow)
owner = _orm.relationship("Building", back_populates="rooms")
schemas.py:
#Rooms
class _RoomBase(_pydantic.BaseModel):
title: str
content: str
class RoomCreate(_RoomBase):
pass
class RoomUpdate(_RoomBase):
pass
class Room(_RoomBase):
id: int
owner_id: int
building_title: str
date_created: _dt.datetime
date_last_updated: _dt.datetime
class Config:
orm_mode = True
#Buildings
class _BuildingBase(_pydantic.BaseModel):
title: str
class BuildingCreate(_BuildingBase):
pass
class BuildingUpdate(_BuildingBase):
pass
class Building(_BuildingBase):
id: int
date_created: _dt.datetime
date_last_updated: _dt.datetime
rooms: List[Room] = []
class Config:
orm_mode = True
services.py
#Buildings
def create_building(db: _orm.Session, building: _schemas.BuildingCreate):
building = _models.Building(title=building.title)
db.add(building)
db.commit()
db.refresh(building)
return building
def get_building(db: _orm.Session, building_id: int ):
return db.query(_models.Building).filter(_models.Building.id == building_id).first()
def get_building_by_title(db: _orm.Session, building_title: str ):
return db.query(_models.Building).filter(_models.Building.title == building_title).first()
def delete_building(db: _orm.Session, building_id: int):
db.query(_models.Building).filter(_models.Building.id == building_id).delete()
db.commit()
def update_building(db: _orm.Session, building_id: int, building: _schemas.BuildingCreate):
db_building = get_building(db=db, building_id=building_id)
db_building.title = building.title
db.commit()
db.refresh(db_building)
return db_building
#Rooms
def create_room(db: _orm.Session, room: _schemas.RoomCreate, building_id:int, building_title: str):
room = _models.Room(title=room.title,owner_id=building_id, building_title=building_title)
db.add(room)
db.commit()
db.refresh(room)
return room
main.py
#Building
#app.post("/buildings/", response_model=_schemas.Building)
def create_building(
building: _schemas.BuildingCreate, db: _orm.Session = _fastapi.Depends(_services.get_db)
):
return _services.create_building(db=db, building=building)
#app.get("/buildings/{building_id}", response_model=_schemas.Building)
def read_building(building_id: int, db: _orm.Session = _fastapi.Depends(_services.get_db)):
building = _services.get_building(db=db, building_id=building_id)
if building is None:
raise _fastapi.HTTPException(
status_code=404, detail="sorry this building does not exist"
)
return building
#app.delete("/buildings/{building_id}")
def delete_building(building_id: int, db: _orm.Session = _fastapi.Depends(_services.get_db)):
_services.delete_building(db=db, building_id=building_id)
return {"message": f"successfully deleted building with id: {building_id}"}
#app.put("/buildings/{building_id}", response_model=_schemas.Building)
def update_building(
building_id: int,
building: _schemas.BuildingCreate,
db: _orm.Session = _fastapi.Depends(_services.get_db),
):
return _services.update_building(db=db, building=building, building_id=building_id)
#Room
#app.post("/rooms/", response_model=_schemas.Room)
def create_room(
building_title: str,
room: _schemas.RoomCreate,
db: _orm.Session = _fastapi.Depends(_services.get_db),
):
db_building = _services.get_building_by_title(db=db, building_title=building_title)
if db_building is None:
raise _fastapi.HTTPException(
status_code=404, detail="sorry this building does not exist"
)
return _services.create_room(db=db, room=room,building_id=db_building.id, building_title=db_building.title)
Thank you for your help!
As MatsLindh points out, RoomCreate has a content field that is not used in services.create_room. Simply changing to
def create_room(db: _orm.Session, room: _schemas.RoomCreate, building_id:int, building_title: str):
room = _models.Room(title=room.title, content=room.content, owner_id=building_id, building_title=building_title)
If content is required, you should probably also define your SQLAlchemy model as content = _sql.Column(_sql.String, nullable=False, index=True).
For next time, please learn how to provide a minimal reproducible example

How do I properly override `__getattr__` in SQLAlchemy to access dict keys as attributes?

I'm trying to override __getattr__ in SQLAlchemy so I can access certain dictionary fields using object.field syntax rather than having to do object.fields["field"].
However, SQLAlchemy isn't recognizing changes to the field if I use the dot notation syntax. Accessing the field using normal object.fields["field"] however works as expected.
I've provided a code sample below.
#!/usr/bin/env python3
import sqlalchemy
from sqlalchemy import Column, Integer, JSON
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.ext.mutable import MutableDict
from sqlalchemy.orm import sessionmaker
Session = sessionmaker()
Base = declarative_base()
class Track(Base): # noqa: WPS230
__tablename__ = "track"
id = Column(Integer, primary_key=True)
_fields = Column(MutableDict.as_mutable(JSON), default="{}")
def __init__(self, id):
self.id = id
self._fields = {}
self._fields["custom"] = "field"
def __getattr__(self, name: str):
"""See if ``name`` is a custom field."""
try:
return self.__dict__["_fields"][name]
except KeyError:
raise AttributeError from None
def __setattr__(self, name, value):
"""Set custom fields if a valid key."""
if "_fields" in self.__dict__ and name in self.__dict__["_fields"]:
self._fields[name] = value
else:
super().__setattr__(name, value)
engine = sqlalchemy.create_engine("sqlite:///:memory:")
Session.configure(bind=engine)
Base.metadata.create_all(engine) # creates tables
session = Session()
track = Track(id=1)
session.add(track)
session.commit()
assert track.custom
track.custom = "new"
assert track in session.dirty
assert track.custom == "new"
I believe it has something to do with how sqlalchemy loads __dict__, but not sure how to work around it to get my desired behavior.
With the help of #CaseIIT on github, I came up with the following solution using a list FIELDS of fields that can be set:
class Track(Base): # noqa: WPS230
__tablename__ = "track"
FIELDS = ["custom"]
id = Column(Integer, primary_key=True)
_fields = Column(MutableDict.as_mutable(JSON), default="{}")
def __init__(self):
self._fields = {}
self._fields["custom"] = "field"
def __getattr__(self, name: str):
"""See if ``name`` is a custom field."""
if name in self.FIELDS:
return self._fields[name]
else:
raise AttributeError from None
def __setattr__(self, name, value):
"""Set custom custom_fields if a valid key."""
if name in self.FIELDS:
self._fields[name] = value
else:
super().__setattr__(name, value)
It feels wrong to have a duplicate list of accessible fields between _fields.keys() and FIELDS, but I can't seem to get around the issues that come with trying to inspect _fields.

Insert Nested Objects into mongodb in python3 (Nested object is a class Instance)

I have a nested structure like this:
class Student(object):
def __init__(self, name, age, address):
self.name = name
self.age = age
self.address = address
class Address(object):
def __init__(self, street, pcode, another_obj):
self.street = street
self.postal_code = pcode
self.another_obj = another_obj
class AnotherObject(object):
def __init__(self):
self.someattr = 'some_init_value'
# Now i amm going to save data like this
another_obj = AnotherObject()
address = Address('xyz', 'xyz', another_obj)
obj = Student('abc', 32, address)
The object obj is an instance of a class. I am doing collection.insert_one(obj). Usually, I would do obj.dict to get "dict" of class instance which is compatible with pymongo but it does not convert the nested objects into dict too.
The problem here is that the "address" and "some_other_object" are also some other class instances and that causes bson.errors.InvalidDocument Exception while insertion.
Is there a way to convert nested class instances/documents (address & some_other_object) into dict or any other type acceptable to mongodb.
The package I am using for mongodb communication is pymongo v3.9.0.
Error is TypeError:
document must be an instance of dict, bson.son.SON,
bson.raw_bson.RawBSONDocument, or a type that inherits from
collections.MutableMapping
Add in some to_dict() methods to your classes:
from pymongo import MongoClient
class Student(object):
def __init__(self, name, age, address):
self.name = name
self.age = age
self.address = address
def to_dict(self) -> dict:
record = {
"name": self.name,
"age": self.age,
"address": self.address.to_dict() if self.address is not None else None
}
return record
class Address(object):
def __init__(self, street, pcode, another_obj):
self.street = street
self.postal_code = pcode
self.another_obj = another_obj
def to_dict(self) -> dict:
record = {
"street": self.street,
"postal_code": self.postal_code,
"another_obj": self.another_obj.to_dict() if self.another_obj is not None else None
}
return record
class AnotherObject(object):
def __init__(self):
self.someattr = 'some_init_value'
def to_dict(self) -> dict:
record = {
"someattr": self.someattr
}
return record
# Now i amm going to save data like this
another_obj = AnotherObject()
address = Address('xyz', 'xyz', another_obj)
obj = Student('abc', 32, address)
db = MongoClient()['yourdatabase']
db.collection.insert_one(obj.to_dict())
print(db.collection.find_one({}))

How to select different type of columns dynamically to use in where clause?

I have a following table in sqlalchemy:
class FieldType(enum.Enum):
INT_FIELD = 0
FLOAT_FIELD = 1
STRING_FIELD = 2
class EAVTable(Base):
__tablename__ = 'EAVTable'
field_name = Column(Stirng, primary_key=True)
field_type = Column(Enum(FieldType))
int_field = Column(Integer)
float_field = Column(Float)
string_field = Column(String)
This is to model the EAV model which fits my business purpose.
Now to use it easily in the code I have the following hybrid_property.
#hybrid_propderty
def value(self):
if self.field_type == FieldType.INT_FIELD:
return self.int_field
...
#value.setter
def value(self, value):
if type(value) == int:
self.field_type = FieldType.INT_FIELD
self.int_field = value
...
This works fine when I try to get and set the fields in Python code. But I still have a problem:
session.query(EAVTable).filter(EAVTable.value == 123)
This does not work out of the box but I had an idea of using hybrid.expression where we use a case statement:
#value.expression
def value(cls):
return case(
[
(cls.field_type == FieldType.INT_FIELD, cls.int_field),
(cls.field_type == FieldType.FLOAT_FIELD, cls.float_field),
...
]
)
This in theory works, for example, the SQL generated for query session.query(EAVTable.value = 123 looks like:
select * from where case
when field_type = INT_FIELD then int_field
when field_type = FLOAT_FIELD then float_field
when field_type = STRING_FIELD then string_field
end = 123;
Which semantically looks like what I like, but later I find that the case expression requires all the cases have the same type, or they are cast into the same type.
I understand this is a requirement from the SQL language and has nothing to do with sqlachemy, but for more seasoned sqlalchemy user, is there any easy way to do what I want to achieve? Is there a way to walk around this constraint?
You could move the comparison inside the CASE expression using a custom comparator:
from sqlalchemy.ext.hybrid import Comparator
class PolymorphicComparator(Comparator):
def __init__(self, cls):
self.cls = cls
def __clause_element__(self):
# Since SQL doesn't allow polymorphism here, don't bother trying.
raise NotImplementedError(
f"{type(self).__name__} cannot be used as a clause")
def operate(self, op, other):
cls = self.cls
return case(
[
(cls.field_type == field_type, op(field, other))
for field_type, field in [
(FieldType.INT_FIELD, cls.int_field),
(FieldType.FLOAT_FIELD, cls.float_field),
(FieldType.STRING_FIELD, cls.string_field),
]
],
else_=False
)
class EAVTable(Base):
...
# This replaces #value.expression
#value.comparator
def value(cls):
return PolymorphicComparator(cls)
This way the common type is just boolean.

How to "inspect" custom types in sqlalchemy

I have a very simple ChoiceString custom column/data type:
class ChoiceString(types.TypeDecorator):
impl = types.String
def __init__(self, choices, **kw):
self.choices = dict(choices)
super(ChoiceString, self).__init__(**kw)
def process_bind_param(self, value, dialect):
return [k for k, v in self.choices.iteritems() if v == value][0]
def process_result_value(self, value, dialect):
return self.choices[value]
And I am iterating over the table columns using a mapper:
from sqlalchemy.inspection import inspect
mapper = inspect(SomeTableClass)
for col in mapper.columns:
print col
# how to check the choice values?
print dir(mapper.columns[col]) # does not show the 'choices' attribute
print dir(inspect(mapper.columns[col])) # does not show the 'choices' attribute
print mapper.columns[col].choices # error
But I am can't seem to access the choices custom attribute of the custom type. I also tried "inspecting" the column directly instead of the class, but that doesn't work either.
So how do we access custom attributes of custom types in sqlalchemy, while inspecting?
You're inspecting the Column objects, not their types. Access the type through the type attribute of a Column object:
In [9]: class SomeTableClass(Base):
...: __tablename__ = 'sometableclass'
...: id = Column(Integer, primary_key=True)
...: choices = Column(ChoiceString({ 'asdf': 'qwer'}))
...:
In [10]: mapper = inspect(SomeTableClass)
In [12]: mapper.columns['choices']
Out[12]: Column('choices', ChoiceString(), table=<sometableclass>)
In [13]: mapper.columns['choices'].type.choices
Out[13]: {'asdf': 'qwer'}
The columns can be accessed through the __table__.columns collection. The type and the underlying python type can be accessed through col.type and cole.type.python_type, e.g:
import enum
from sqlalchemy import Column, Enum, Integer, String, Text
from sqlalchemy.ext.declarative import declarative_base
Base = declarative_base()
class Gender(enum.Enum):
MALE = "male"
FEMALE = "female"
class Person(Base):
__tablename__ = "table_person"
id = Column(Integer, primary_key=True)
name = Column(String(20))
gender = Column(Enum(Gender))
address = Column(Text)
def test_inspect_columns():
col_id = Person.__table__.columns["id"]
assert isinstance(col_id.type, Integer)
assert col_id.type.python_type is int
col_name = Person.__table__.columns["name"]
assert isinstance(col_name.type, String)
assert col_name.type.python_type is str
col_gender = Person.__table__.columns["gender"]
assert isinstance(col_gender.type, Enum)
assert col_gender.type.python_type is Gender
col_address = Person.__table__.columns["address"]
assert isinstance(col_address.type, Text)
assert col_address.type.python_type is str

Categories

Resources