Working on a Django app with two models (A and B), B has a field link which is a foreign key relationship to A:
# models.py
class A(models.Model):
name = models.CharField(max_length=100)
description = models.CharField(max_length=15)
my_bool = models.BooleanField(default=True)
class B(models.Model):
link = models.ForeignKey(A)
b_bool = models.BooleanField(default=link.my_bool) # Error!
I would like for the b_bool field to have the linked my_bool value as a default if no B.b_bool is provided via graphene mutation.
Currently, using link.my_bool as a default raises the following error when making migrations:
AttributeError: 'ForeignKey' object has no attribute 'my_bool'
I don't think it will work like that. Instead, try overriding save() method:
class B(models.Model):
link = models.ForeignKey(A)
b_bool = models.BooleanField(default=False)
def save(self, *args, **kwargs):
if not self.b_bool:
self.b_bool = self.link.my_bool
super(B, self).save(*args, **kwargs)
Related
I have a model class in Django which has a ForeignKey referencing the model it actually belongs to:
class Foo(models.Model):
name = models.CharField(max_length=256, verbose_name="Name")
#... some other fields
bar = models.ForeignKey(
"self", on_delete=models.CASCADE, null=True, blank=True
)
def __str__(self):
return self.name
I want to add a custom method in that class which resolves, on the fly, the name in a new field, e.g. bar_resolved when instantiating it in a QuerySet in a view:
from .models import Foo
foo = Foo.objects.all()
# do stuff
I've tried this:
class Foo(models.Model):
name = models.CharField(max_length=256, verbose_name="Name")
#... some other fields
bar = models.ForeignKey(
"self", on_delete=models.CASCADE, null=True, blank=True
)
# preparing the resolved bar field which should contain the 'name' value corresponding to the id:
bar_resolved = models.CharField(
max_length=256,
verbose_name="Bar name resolved",
null=True
)
def __str__(self):
return self.name
def resolve(self):
if self.bar:
self.bar_resolved = self.bar.name
return super(Foo, self).resolve()
Then in my view:
from .models import Foo
foo = Foo.objects.all()
foo.resolve()
but it raises: 'QuerySet' object has no attribute 'resolve'
How could I achieve that? and do I need to hard code a 'resolved' field in my model for that (I think it's overkill to do so)?
I do not understand why would you have a Foreing key referencing self in the database.
Instead of using resolve, you could probably do it on the save long before - i.e. when setting value of "bar"
Another idea that comes to mind is setting it in the __ init__
method of the model link to Stack
hope this helps.
def save(self, force_insert: bool = False, force_update: bool = False) -> None:
if self.field is None:
self.field = "value"
# and so on...
return super().save(force_insert, force_update)
One way is to annotate..[Django-doc] your queryset using F expressions..[Django-doc] with bar's name field:
from django.db.models import F
foos = Foo.objects.annotate(bar_resolved=F("bar__name")).all()
for foo in foos:
print(foo.bar_resolved)
I would like to have a form with the preselected checkboxes of a ManyToManyField.
models.py
class Store(models.Model):
...
class Brand(models.Model):
stores = models.ManyToManyField(Store, blank=True, related_name="brands")
forms.py
class StoreForm(ModelForm):
class Meta:
model = Store
fields = ('brands',)
I get this exception:
django.core.exceptions.FieldError: Unknown field(s) (brands) specified for Store
I know that I can add the field manually to the class:
brands = forms.ModelMultipleChoiceField(
queryset=Brand.objects.all(),
widget=forms.CheckboxSelectMultiple,
)
If I do this the checkboxes are not preselected.
How is it possible to include the ManyToMany field from "the other side" of the model (from Store)?
#hedgie To change the field in the other model is not a good option for me because I use it already.
But the __init__() was a good hint. I come up with this solution and it seems to work.
class StoreForm(ModelForm):
def __init__(self, *args, **kwargs):
if kwargs.get('instance'):
brand_ids = [t.pk for t in kwargs['instance'].brands.all()]
kwargs['initial'] = {
'brands': brand_ids,
}
super().__init__(*args, **kwargs)
# https://stackoverflow.com/questions/49932426/save-many-to-many-field-django-forms
def save(self, commit=True):
# Get the unsaved Pizza instance
instance = forms.ModelForm.save(self, False)
# Prepare a 'save_m2m' method for the form,
old_save_m2m = self.save_m2m
def save_m2m():
old_save_m2m()
# This is where we actually link the pizza with toppings
instance.brands.clear()
for brand in self.cleaned_data['brands']:
instance.brands.add(brand)
self.save_m2m = save_m2m
# Do we need to save all changes now?
# Just like this
# if commit:
instance.save()
self.save_m2m()
return instance
brands = forms.ModelMultipleChoiceField(
queryset=Brand.objects.all(),
widget=forms.CheckboxSelectMultiple,
)
Though it seems to be not very elegant. I wonder why django does not support a better way.
One possibility is to define the field on the "other" model. So instead of writing this:
class Store(models.Model):
...
class Brand(models.Model):
stores = models.ManyToManyField(Store, blank=True, related_name="brands")
You can write this:
class Brand(models.Model):
...
class Store(models.Model):
brands = models.ManyToManyField(Brand, blank=True, related_name="stores")
Or, if you have manually added the field to the form, you could populate its initial value in the form's __init__() method.
I have 2 models that are OneToOne related and model that is FK to 2nd model
models.py
class Legal(TimeStampedModel):
name = models.CharField('Name', max_length=255, blank=True)
class LegalCard(TimeStampedModel):
legal = models.OneToOneField('Legal', related_name='legal_card', on_delete=models.CASCADE)
branch = models.ForeignKey('Branch', related_name='branch', null=True)
post_address = models.CharField('Post address', max_length=255, blank=True)
class Branch(TimeStampedModel):
name = models.CharField('Name',max_length=511)
code = models.CharField('Code', max_length=6)
Using DRF I made them to behave as single model so I can create or update both:
serializer.py
class LegalSerializer(serializers.ModelSerializer):
branch = serializers.IntegerField(source='legal_card.branch', allow_null=True, required=False)
post_address = serializers.CharField(source='legal_card.post_address', allow_blank=True, required=False)
class Meta:
model = Legal
fields = ('id',
'name',
'branch',
'post_address',
)
depth = 2
def create(self, validated_data):
legal_card_data = validated_data.pop('legal_card', None)
legal = super(LegalSerializer, self).create(validated_data)
self.update_or_create_legal_card(legal, legal_card_data)
return legal
def update(self, instance, validated_data):
legal_card_data = validated_data.pop('legal_card', None)
self.update_or_create_legal_card(instance, legal_card_data)
return super(LegalSerializer, self).update(instance, validated_data)
def update_or_create_legal_card(self, legal, legal_card_data):
LegalCard.objects.update_or_create(legal=legal, defaults=legal_card_data)
views.py
class LegalDetailView(generics.RetrieveUpdateDestroyAPIView):
queryset = Legal.objects.all()
serializer_class = LegalSerializer
I'm trying to save this by sending FK as integer (I just want to post id of the branch), but I receive error
ValueError: Cannot assign "2": "LegalCard.branch" must be a "Branch" instance.
Is there any way to pass over only ID of the branch?
Thank you
In Django, if you only need the FK value, you can use the FK value that is already on the object you've got rather than getting the related object.
Assume you have a Legal and Branch object with id's as 1. Then you can save a LegalCard object by:
LegalCard(legal_id=1,branch_id=1,post_address="Istanbul Street No:1")
Just use legal_card.branch_id instead of legal_card.branch to get just an id, not a related object.
And depth = 1
I'm having some trouble saving related objects with Django Rest Framework. Here are my models
# models.py
class PowerStatus(models.Model):
status = models.CharField(max_length=50)
class VirtualMachine(models.Model):
power_status = models.ForeignKey(PowerStatus, verbose_name='Power status')
My serializers look like this:
# serializers.py
class PowerStatusSerializer(serializers.ModelSerializer):
status = serializers.CharField(max_length=30)
class Meta:
model = PowerStatus
class VMSerializer(serializers.ModelSerializer):
power_status = PowerStatusSerializer()
class Meta:
model = VirtualMachine
def create(self, validated_data):
power_status_data = validated_data.pop('power_status')
vm = VirtualMachine.objects.create(**validated_data)
PowerStatus.objects.create(vm=vm, **power_status_data)
return vm
Error that I'm getting is: django.db.utils.IntegrityError: (1048, "Column 'power_status_id' cannot be null")
I was following http://www.django-rest-framework.org/api-guide/serializers/#dealing-with-nested-objects but am doing something wrong.
:EDIT:
After dealing with Column cannot be null, another problem arose;
ValueError: Cannot assign "OrderedDict([('status', 'Running')])": "VirtualMachine.power_status" must be a "PowerStatus" instance.
In VirtualMachine model pass null=True to power_status field.
class VirtualMachine(models.Model):
power_status = models.ForeignKey(PowerStatus, verbose_name='Power status', null=True)
I can't find this info in the docs or on the interwebs.
latest django-rest-framework, django 1.6.5
How does one create a ModelSerializer that can handle a nested serializers where the nested model is implemented using multitable inheritance?
e.g.
######## MODELS
class OtherModel(models.Model):
stuff = models.CharField(max_length=255)
class MyBaseModel(models.Model):
whaddup = models.CharField(max_length=255)
other_model = models.ForeignKey(OtherModel)
class ModelA(MyBaseModel):
attr_a = models.CharField(max_length=255)
class ModelB(MyBaseModel):
attr_b = models.CharField(max_length=255)
####### SERIALIZERS
class MyBaseModelSerializer(serializers.ModelSerializer):
class Meta:
model=MyBaseModel
class OtherModelSerializer(serializer.ModelSerializer):
mybasemodel_set = MyBaseModelSerializer(many=True)
class Meta:
model = OtherModel
This obviously doesn't work but illustrates what i'm trying to do here.
In OtherModelSerializer, I'd like mybasemodel_set to serialize specific represenntations of either ModelA or ModelB depending on what we have.
If it matters, I'm also using django.model_utils and inheritencemanager so i can retrieve a queryset where each instance is already an instance of appropriate subclass.
Thanks
I've solved this issue a slightly different way.
Using:
DRF 3.5.x
django-model-utils 2.5.x
My models.py look like this:
class Person(models.Model):
first_name = models.CharField(max_length=40, blank=False, null=False)
middle_name = models.CharField(max_length=80, blank=True, null=True)
last_name = models.CharField(max_length=80, blank=False, null=False)
family = models.ForeignKey(Family, blank=True, null=True)
class Clergy(Person):
category = models.IntegerField(choices=CATEGORY, blank=True, null=True)
external = models.NullBooleanField(default=False, null=True)
clergy_status = models.ForeignKey(ClergyStatus, related_name="%(class)s_status", blank=True, null=True)
class Religious(Person):
religious_order = models.ForeignKey(ReligiousOrder, blank=True, null=True)
major_superior = models.ForeignKey(Person, blank=True, null=True, related_name="%(class)s_superior")
class ReligiousOrder(models.Model):
name = models.CharField(max_length=255, blank=False, null=False)
initials = models.CharField(max_length=20, blank=False, null=False)
class ClergyStatus(models.Model):
display_name = models.CharField(max_length=255, blank=True, null=True)
description = models.CharField(max_length=255, blank=True, null=True)
Basically - The base model is the "Person" model - and a person can either be Clergy, Religious, or neither and simply be a "Person". While the models that inherit Person have special relationships as well.
In my views.py I utilize a mixin to "inject" the subclasses into the queryset like so:
class PersonSubClassFieldsMixin(object):
def get_queryset(self):
return Person.objects.select_subclasses()
class RetrievePersonAPIView(PersonSubClassFieldsMixin, generics.RetrieveDestroyAPIView):
serializer_class = PersonListSerializer
...
And then real "unDRY" part comes in serializers.py where I declare the "base" PersonListSerializer, but override the to_representation method to return special serailzers based on the instance type like so:
class PersonListSerializer(serializers.ModelSerializer):
def to_representation(self, instance):
if isinstance(instance, Clergy):
return ClergySerializer(instance=instance).data
elif isinstance(instance, Religious):
return ReligiousSerializer(instance=instance).data
else:
return LaySerializer(instance=instance).data
class Meta:
model = Person
fields = '__all__'
class ReligiousSerializer(serializers.ModelSerializer):
class Meta:
model = Religious
fields = '__all__'
depth = 2
class LaySerializer(serializers.ModelSerializer):
class Meta:
model = Person
fields = '__all__'
class ClergySerializer(serializers.ModelSerializer):
class Meta:
model = Clergy
fields = '__all__'
depth = 2
The "switch" happens in the to_representation method of the main serializer (PersonListSerializer). It looks at the instance type, and then "injects" the needed serializer. Since Clergy, Religious are all inherited from Person getting back a Person that is also a Clergy member, returns all the Person fields and all the Clergy fields. Same goes for Religious. And if the Person is neither Clergy or Religious - the base model fields are only returned.
Not sure if this is the proper approach - but it seems very flexible, and fits my usecase. Note that I save/update/create Person thru different views/serializers - so I don't have to worry about that with this type of setup.
I was able to do this by creating a custom relatedfield
class MyBaseModelField(serializers.RelatedField):
def to_native(self, value):
if isinstance(value, ModelA):
a_s = ModelASerializer(instance=value)
return a_s.data
if isinstance(value, ModelB):
b_s = ModelBSerializer(instance=value)
return b_s.data
raise NotImplementedError
class OtherModelSerializer(serializer.ModelSerializer):
mybasemodel_set = MyBaseModelField(many=True)
class Meta:
model = OtherModel
fields = # make sure we manually include the reverse relation (mybasemodel_set, )
I do have concerns that instanting a Serializer for each object is the reverse relation queryset is expensive so I'm wondering if there is a better way to do this.
Another approach i tried was dynamically changing the model field on MyBaseModelSerializer inside of __init__ but I ran into the issue described here:
django rest framework nested modelserializer
Using Django 3.1, I found that it is possible to override get_serializer instead of get_serializer_class, in which case you can access the instance as well as self.action and more.
By default get_serializer will call get_serializer_class, but this behavior can be adjusted to your needs.
This is cleaner and easier than the solutions proposed above, so I'm adding it to the thread.
Example:
class MySubclassViewSet(viewsets.ModelViewSet):
# add your normal fields and methods ...
def get_serializer(self, *args, **kwargs):
if self.action in ('list', 'destroy'):
return MyListSerializer(args[0], **kwargs)
if self.action in ('retrieve', ):
instance = args[0]
if instance.name.contains("really?"): # or check if instance of a certain Model...
return MyReallyCoolSerializer(instance)
else return MyNotCoolSerializer(instance)
# ...
return MyListSerializer(*args, **kwargs) # default
I'm attempting to use a solution that involves different serializer subclasses for the different model subclasses:
class MyBaseModelSerializer(serializers.ModelSerializer):
#staticmethod
def _get_alt_class(cls, args, kwargs):
if (cls != MyBaseModel):
# we're instantiating a subclass already, use that class
return cls
# < logic to choose an alternative class to use >
# in my case, I'm inspecting kwargs["data"] to make a decision
# alt_cls = SomeSubClass
return alt_cls
def __new__(cls, *args, **kwargs):
alt_cls = MyBaseModel.get_alt_class(cls, args, kwargs)
return super(MyBaseModel, alt_cls).__new__(alt_cls, *args, **kwargs)
class Meta:
model=MyBaseModel
class ModelASerializer(MyBaseModelSerializer):
class Meta:
model=ModelA
class ModelBSerializer(MyBaseModelSerializer):
class Meta:
model=ModelB
That is, when you try and instantiate an object of type MyBaseModelSerializer, you actually end up with an object of one of the subclasses, which serialize (and crucially for me, deserialize) correctly.
I've just started using this, so it's possible that there are problems I've not run into yet.
I found this post via Google trying to figure out how to handle multiple table inheritance without having to check the model instance type. I implemented my own solution.
I created a class factory and a mixin to generate the serializers for the child classes with the help of InheritanceManger from django-model-utils.
models.py
from django.db import models
from model_utils import InheritanceManager
class Place(models.Model):
name = models.CharField(max_length=50)
address = models.CharField(max_length=80)
# Use the InheritanceManager for select_subclasses()
objects = InheritanceManager()
class Restaurant(Place):
serves_hot_dogs = models.BooleanField(default=False)
serves_pizza = models.BooleanField(default=False)
serializers.py
from rest_framework import serializers
from .models import Location
def modelserializer_factory(model, class_name='ModelFactorySerializer',
meta_cls=None, **kwargs):
"""Generate a ModelSerializer based on Model"""
if meta_cls is None:
# Create a Meta class with the model passed
meta_cls = type('Meta', (object,), dict(model=model))
elif not hasattr(meta_cls, 'model'):
# If a meta_cls is provided but did not include a model,
# set it to the model passed into this function
meta_cls.model = model
# Create the ModelSerializer class with the Meta subclass
# we created above; also pass in any additional keyword
# arguments via kwargs
ModelFactorySerializer = type(class_name, (serializers.ModelSerializer,),
dict(Meta=meta_cls, **kwargs))
ModelFactorySerializer.__class__.__name__ = class_name
return ModelFactorySerializer
class InheritedModelSerializerMixin:
def to_representation(self, instance):
# Get the model of the instance
model = instance._meta.model
# Override the model with the inherited model
self.Meta.model = model
# Create the serializer via the modelserializer_factory
# This will use the name of the class this is mixed with.
serializer = modelserializer_factory(model, self.__class__.__name__,
meta_cls=self.Meta)
# Instantiate the Serializer class with the instance
# and return the data
return serializer(instance=instance).data
# Mix in the InheritedModelSerializerMixin
class LocationSerializer(InheritedModelSerializerMixin, serializers.ModelSerializer):
class Meta:
model = Location # 'model' is optional since it will use
# the instance's model
exclude = ('serves_pizza',) # everything else works as well
depth = 2 # including depth
views.py
from .models import Location
from .serializers import LocationSerializer
# Any view should work.
# This is an example using viewsets.ReadOnlyModelViewSet
# Everything else works as usual. You will need to chain
# ".select_subclasses()" to the queryset to select the
# child classes.
class LocationViewSet(viewsets.ReadOnlyModelViewSet):
queryset = Location.objects.all().select_subclasses()
serializer_class = LocationSerializer