Using an instance method inside another instance method allowed in Python? - python

I have created a class Rational with multiple methods. If it is initialized as follows
r = Rational(49, 168)
I want this to be true:
print(r.get_num() == 7 and r.get_denom() == 24)
I've written this code, which crashes, saying 'NameError: name 'highest_common_denominator' is not defined.
class Rational:
def __init__(self, numerator, denominator=1):
self.numerator = numerator
self.denominator = denominator
def cd_numerators(self):
list_cd_numerators = []
for i in range(1, self.numerator):
if self.numerator % i == 0:
list_cd_numerators.append(i)
return list_cd_numerators
def cd_denominators(self):
list_cd_denominators = []
for i in range(1, self.denominator):
if self.denominator % i == 0:
list_cd_denominators.append(i)
return list_cd_denominators
def highest_common_denominator(self, a, b):
highest_common_denominator1 = 1
for i in a:
if i in b:
highest_common_denominator1 = i
return highest_common_denominator1
def get_num(self):
common_denominator = highest_common_denominator(cd_numerators, cd_denominators)
numerator = self.numerator / common_denominator
self.numerator = numerator
return self.numerator
def get_denom(self):
common_denominator = highest_common_denominator(cd_numerators, cd_denominators)
denominator = self.denominator / common_denominator
self.denominator == denominator
return self.denominator
I cannot understand what I'm doing wrong and would be appreciative of some help.

Related

Python: Operation between integer and custom class

I created a class called Rational - which represents a rational number - with a method that allows it to be added to another Rational object, but am having a hard time figuring out a way to allow it to be added to an integer object through a method inside the Rational class.
# This already works
a = Rational(1,2);
b = Rational(1,1)
# a+b -> 3/2
# This is what I wish also worked
a+1 = 3/2
The code I have:
class Rational:
def __init__(self,num:int,den:int=1):
self.num = num
self.den = den
try:
value = self.num/self.den
except:
print("O denominador não pode ser 0.")
def __str__(self):
if self.den == 1:
return f"{self.num}"
elif self.den > 0:
return f"{self.num}/{self.den}"
elif self.den < 0:
return f"{-self.num}/{-self.den}"
def __add__(self, other):
if self.den != other.den:
num_new = (self.num*other.den) + (other.num*self.den)
den_new = self.den*other.den
else:
num_new = self.num + other.num
den_new = self.den
return Rational(num_new,den_new)
Is there a simple way to make this happen?

Accept only objects in a method and return new object

I want to create a method that only takes objects of the class it is is defined in, manipulates it and returns a new object of the same class.
For example:
class fraction():
def__init__(self, counter, denominator):
self.counter=counter
self.denominator=denominator
def add(self,(object of class fraction))
(addition of object the method from called in and the object which is called in the method
return (new object of the class fraction)
Does anyone have an idea how to get this done?
best regards,
David
You can do something like this :
class Fraction:
def __init__(self, counter, denominator):
self.counter = counter
self.denominator = denominator
def add(self, fraction):
return Fraction(self.counter * fraction.denominator + fraction.counter * self.denominator,
self.denominator * fraction.denominator)
a = Fraction(2, 3)
b = Fraction(3, 5)
a_plus_b = a.add(b)
print(a_plus_b.counter, a_plus_b.denominator)
You can add type checking directly in the add function :
def add(self, fraction):
if not isinstance(fraction, Fraction):
raise TypeError(f"{fraction} is not a Fraction.")
return Fraction(self.counter * fraction.denominator + fraction.counter * self.denominator,
self.denominator * fraction.denominator)
If you want to integrate this in the standard fractions module, you can make a class inheriting from fractions.Fraction, which will outpute you a nice irreductible fraction :
import fractions
class MyFraction(fractions.Fraction):
def add(self, fraction):
return MyFraction(self.numerator * fraction.denominator + fraction.numerator * self.denominator,
self.denominator * fraction.denominator)
a = MyFraction(2, 3)
b = MyFraction(10, 6)
a_plus_b = a.add(b)
print(a_plus_b.numerator, a_plus_b.denominator)

GCD implementation of fraction addition not working properly

I am following the Runestone Academy Python3 course and tried to implement 'addition' feature for fractions using Class but I am getting error.
When I am not using GCD implementation, the code is running fine
Here is my code:
class Fraction:
def __init__(self, top, bottom):
self.num = top
self.den = bottom
def show(self):
print(f'{self.num}/{self.den}')
def __str__(self):
return f'{self.num}/{self.den}'
# adding without GCD implementation
# def __add__(self,other_fraction):
# new_num = self.num * other_fraction.den + self.den * other_fraction.num
# new_den = self.den * other_fraction.den
# return Fraction(new_num, new_den)
# adding with GCD implementation
def gcd(self, m, n):
while m % n != 0:
m, n = n, m % n
return n
def __add__(self, other_fraction):
new_num = self.num * other_fraction.den + self.den * other_fraction.num
new_den = self.den * other_fraction.den
common = gcd(new_num, new_den)
return Fraction(new_num // common, new_den // common)
# my attempt of adding two fractions by creating a method 'add'
# def add(self,other_fraction):
# new_num = self.num * other_fraction.den + self.den * other_fraction.num
# new_den = self.den * other_fraction.den
# return Fraction(new_num, new_den)
# my_fraction = Fraction(3,5)
# print(my_fraction)
# print(f'I ate {my_fraction} of my pizza')
# my_fraction.__str__()
# str(my_fraction)
f1 = Fraction(1, 4)
f2 = Fraction(1, 2)
f3 = f1 + f2
# f3 = f1.add(f2)
print(f3)
This is the error I am getting:
Traceback (most recent call last):
File "D:/Python/Mosh_Lec/app.py", line 74, in <module>
f3 = f1 + f2
File "D:/Python/Mosh_Lec/app.py", line 53, in __add__
common = gcd(new_num, new_den)
NameError: name 'gcd' is not defined
I also tried with this variation but same error:
def gcd(self, m, n):
self.num = m
self.den = n
while self.num % self.den != 0:
self.num, self.den = self.den, self.num % self.den
return self.den
The gcd method should not logically be part of the Fraction class. Indeed, you can call the gcd method with any two numbers, they do not need to be the numerator and denominator of a fraction. Therefore I would move the gcd function outside of the class.
def gcd(m, n):
...
class Fraction:
...
Look at there:
NameError: name 'gcd' is not defined
It means that it cannot find the function(or method) named gcd. Of course! It need to be called with a Fraction object, so try to change your source code at line 32:
- common = gcd(new_num, new_den)
+ common = self.gcd(new_num, new_den)
self is a Fraction object.
By the way, the method gcd that do not use the parament self should be defined as a static function:
class Fraction:
...
#staticmethod
def _gcd(m: int, n: int):
...
...
And call the method by Fraction._gcd (the underline means that it is private function(or method)).
See it's a simple issue if u see the error:
File "D:/Python/Mosh_Lec/app.py", line 53, in __add__
common = gcd(new_num, new_den)
NameError: name 'gcd' is not defined
you will see that you have defined gcd but why it is saying GCD, not defined here's the reason:
common = self.gcd(new_num, new_den)
It is as simple as that just put the above code like this:
def __add__(self, other_fraction):
new_num = self.num * other_fraction.den + self.den * other_fraction.num
new_den = self.den * other_fraction.den
common = self.gcd(new_num, new_den)
return Fraction(new_num // common, new_den // common)
and your problem is solved! Kudos!
It as simple as that.......

Module Won't recognize my function

I have a code for a Fraction class
class Fraction:
def __init__(self,top,bottom):
self.num = top
self.den = bottom
def __str__(self):
return str(self.num)+"/"+str(self.den)
def show(self):
print(self.num,"/",self.den)
def gcd(m,n):
while m%n != 0:
oldm = m
oldn = n
m = oldn
n = oldm%oldn
return n
def __add__(self,otherfraction):
newnum = self.num*otherfraction.den + \
self.den*otherfraction.num
newden = self.den * otherfraction.den
common = gcd(newnum,newden)
return Fraction(newnum//common,newden//common)
def __eq__(self, other):
firstnum = self.num * other.den
secondnum = other.num * self.den
return firstnum == secondnum
When I run it and try to add two fractions it pops up saying
File "/Users/----/Downloads/Listings/listing_1_9.py", line 25,
in __add__
common = gcd(newnum,newden)
NameError: global name 'gcd' is not defined
In your code, gcd is a method of Fraction, so you should use self.gcd when referring to it from inside another method.
Use self.gcd instead.
Explanation
NameError: global name 'gcd' is not defined
Thats because gcd is not a global. Its a method of Fraction.

Overloading Operators with MATLAB

I'm currently writing a code to perform Gaussian elimination in MATLAB and then write out the code needed to generate a LaTex file showing all the steps. A lot of the times when I do Gaussian elimination the answers start turning into Fractions. So I thought as a nice learning exercise for classes in Matlab that I would write a Fraction class. But I have no clue how to overload operators and frankly Mathwork's documentation wasn't helpful.
classdef Fraction
properties
numer
denom
end
methods
function a = Fraction(numer,denom)
a.denom = denom;
a.numer = numer;
end
function r = mtimes(a,b)
r = Fraction(a.numer*b.numer,a.denom*b.demon);
end
function r = plus(a,b)
c = a.numer*b.denom+a.denom*b.numer;
d = a.denom*b.denom;
r = Fraction(c,d);
function r = minus(a,b)
c = a.numer*b.denom-a.denom*b.numer;
d = a.denom*b.denom;
r = Fraction(c,d);
end
function r = mrdivide(a,b)
r = Fraction(a.numer*b.denom,a.denom*b.numer);
end
function b = reduceFrac(a)
x = a.numer;
y = b.denom;
while y ~= 0
x = y;
y = mod(x,y);
end
b =Fraction(a.numer/x, a.denom/x)
end
end
end
The plus operator works but the other three do not. Does any one have any ideas? Also how do I call my method reduceFrac?
Fraction.reduceFrac(Fraction(2.4))
I thought that the code above would work, but it didn't. Below is the python version of what I am trying to acheive.
Fraction.py
class Fraction(object):
"""Fraction class
Attributes:
numer: the numerator of the fraction.
denom: the denominator of the fraction.
"""
def __init__(self, numer, denom):
"""Initializes the Fraction class
Sets the inital numer and denom for the
fraction class.
Args:
numer: Top number of the Fraction
denom: Bottom number of the Fraction
Returns:
None
Raises:
None
"""
self.numer = numer
self.denom = denom
def __str__(self):
"""function call along with the print command
Args:
None
Returns:
String: numer / denom.
Raises:
None
"""
return str(self.numer) + '/' + str(self.denom)
def get_numer(self):
return self.numer
def set_numer(self, numer):
self.numer = numer
def get_denom(self):
return self.denom
def set_denom(self, denom):
self.denom = denom
def __add__(self, other):
numer = self.numer*other.denom+other.numer*self.denom
denom = self.denom*other.denom
return Fraction.reduceFrac(Fraction(numer,denom))
def __div__(self, other):
numer = self.numer*other.denom
denom = self.denom*other.numer
return Fraction.reduceFrac(Fraction(numer,denom))
def __sub__(self, other):
numer = self.numer*other.denom-other.numer*self.denom
denom = self.denom*other.denom
return Fraction.reduceFrac(Fraction(numer,denom))
def __mul__(self, other):
numer = self.numer*other.numer
denom = self.denom*other.denom
return Fraction.reduceFrac(Fraction(numer,denom))
def reduceFrac(self):
x = self.numer
y = self.denom
while y != 0:
(x, y) = (y, x % y)
return Fraction(self.numer/x, self.denom/x)
if __name__ == "__main__":
v = Fraction(4,3)
g = Fraction(7,8)
r = Fraction(4,8)
a = v + g
print a
s = v - g
print s
d = v / g
print d
m = v * g
print m
f = Fraction.reduceFrac(r)
print f
Your plus function misses an end

Categories

Resources