GCD implementation of fraction addition not working properly - python

I am following the Runestone Academy Python3 course and tried to implement 'addition' feature for fractions using Class but I am getting error.
When I am not using GCD implementation, the code is running fine
Here is my code:
class Fraction:
def __init__(self, top, bottom):
self.num = top
self.den = bottom
def show(self):
print(f'{self.num}/{self.den}')
def __str__(self):
return f'{self.num}/{self.den}'
# adding without GCD implementation
# def __add__(self,other_fraction):
# new_num = self.num * other_fraction.den + self.den * other_fraction.num
# new_den = self.den * other_fraction.den
# return Fraction(new_num, new_den)
# adding with GCD implementation
def gcd(self, m, n):
while m % n != 0:
m, n = n, m % n
return n
def __add__(self, other_fraction):
new_num = self.num * other_fraction.den + self.den * other_fraction.num
new_den = self.den * other_fraction.den
common = gcd(new_num, new_den)
return Fraction(new_num // common, new_den // common)
# my attempt of adding two fractions by creating a method 'add'
# def add(self,other_fraction):
# new_num = self.num * other_fraction.den + self.den * other_fraction.num
# new_den = self.den * other_fraction.den
# return Fraction(new_num, new_den)
# my_fraction = Fraction(3,5)
# print(my_fraction)
# print(f'I ate {my_fraction} of my pizza')
# my_fraction.__str__()
# str(my_fraction)
f1 = Fraction(1, 4)
f2 = Fraction(1, 2)
f3 = f1 + f2
# f3 = f1.add(f2)
print(f3)
This is the error I am getting:
Traceback (most recent call last):
File "D:/Python/Mosh_Lec/app.py", line 74, in <module>
f3 = f1 + f2
File "D:/Python/Mosh_Lec/app.py", line 53, in __add__
common = gcd(new_num, new_den)
NameError: name 'gcd' is not defined
I also tried with this variation but same error:
def gcd(self, m, n):
self.num = m
self.den = n
while self.num % self.den != 0:
self.num, self.den = self.den, self.num % self.den
return self.den

The gcd method should not logically be part of the Fraction class. Indeed, you can call the gcd method with any two numbers, they do not need to be the numerator and denominator of a fraction. Therefore I would move the gcd function outside of the class.
def gcd(m, n):
...
class Fraction:
...

Look at there:
NameError: name 'gcd' is not defined
It means that it cannot find the function(or method) named gcd. Of course! It need to be called with a Fraction object, so try to change your source code at line 32:
- common = gcd(new_num, new_den)
+ common = self.gcd(new_num, new_den)
self is a Fraction object.
By the way, the method gcd that do not use the parament self should be defined as a static function:
class Fraction:
...
#staticmethod
def _gcd(m: int, n: int):
...
...
And call the method by Fraction._gcd (the underline means that it is private function(or method)).

See it's a simple issue if u see the error:
File "D:/Python/Mosh_Lec/app.py", line 53, in __add__
common = gcd(new_num, new_den)
NameError: name 'gcd' is not defined
you will see that you have defined gcd but why it is saying GCD, not defined here's the reason:
common = self.gcd(new_num, new_den)
It is as simple as that just put the above code like this:
def __add__(self, other_fraction):
new_num = self.num * other_fraction.den + self.den * other_fraction.num
new_den = self.den * other_fraction.den
common = self.gcd(new_num, new_den)
return Fraction(new_num // common, new_den // common)
and your problem is solved! Kudos!
It as simple as that.......

Related

Python: Operation between integer and custom class

I created a class called Rational - which represents a rational number - with a method that allows it to be added to another Rational object, but am having a hard time figuring out a way to allow it to be added to an integer object through a method inside the Rational class.
# This already works
a = Rational(1,2);
b = Rational(1,1)
# a+b -> 3/2
# This is what I wish also worked
a+1 = 3/2
The code I have:
class Rational:
def __init__(self,num:int,den:int=1):
self.num = num
self.den = den
try:
value = self.num/self.den
except:
print("O denominador não pode ser 0.")
def __str__(self):
if self.den == 1:
return f"{self.num}"
elif self.den > 0:
return f"{self.num}/{self.den}"
elif self.den < 0:
return f"{-self.num}/{-self.den}"
def __add__(self, other):
if self.den != other.den:
num_new = (self.num*other.den) + (other.num*self.den)
den_new = self.den*other.den
else:
num_new = self.num + other.num
den_new = self.den
return Rational(num_new,den_new)
Is there a simple way to make this happen?

Accept only objects in a method and return new object

I want to create a method that only takes objects of the class it is is defined in, manipulates it and returns a new object of the same class.
For example:
class fraction():
def__init__(self, counter, denominator):
self.counter=counter
self.denominator=denominator
def add(self,(object of class fraction))
(addition of object the method from called in and the object which is called in the method
return (new object of the class fraction)
Does anyone have an idea how to get this done?
best regards,
David
You can do something like this :
class Fraction:
def __init__(self, counter, denominator):
self.counter = counter
self.denominator = denominator
def add(self, fraction):
return Fraction(self.counter * fraction.denominator + fraction.counter * self.denominator,
self.denominator * fraction.denominator)
a = Fraction(2, 3)
b = Fraction(3, 5)
a_plus_b = a.add(b)
print(a_plus_b.counter, a_plus_b.denominator)
You can add type checking directly in the add function :
def add(self, fraction):
if not isinstance(fraction, Fraction):
raise TypeError(f"{fraction} is not a Fraction.")
return Fraction(self.counter * fraction.denominator + fraction.counter * self.denominator,
self.denominator * fraction.denominator)
If you want to integrate this in the standard fractions module, you can make a class inheriting from fractions.Fraction, which will outpute you a nice irreductible fraction :
import fractions
class MyFraction(fractions.Fraction):
def add(self, fraction):
return MyFraction(self.numerator * fraction.denominator + fraction.numerator * self.denominator,
self.denominator * fraction.denominator)
a = MyFraction(2, 3)
b = MyFraction(10, 6)
a_plus_b = a.add(b)
print(a_plus_b.numerator, a_plus_b.denominator)

Comparing custom Fractions

I'm working through a book now and I have a question regarding one of exercises (#6).
So we have a hand-made Fraction class and, besides all other kinds of things, we want to compare two fractions.
class Fraction:
def __init__(self, num, den):
if not (isinstance(num, int) and isinstance(den, int)):
raise ValueError('Got non-int argument')
if den == 0:
raise ValueError('Got 0 denominator')
self.num = num
self.den = den
# some class methods
def __lt__(self, other):
selfnum = self.num * other.den
othernum = other.num * self.den
return selfnum < othernum
# some class methods
# trying it out
x = Fraction(1, -2)
y = Fraction(1, 3)
However, when we evaluate x < y, the result is False. I thought of making new attribute to store sign, but that messes everything up quite a bit.
Due to lack of better alternative, I added if in the method, and here's what I got
def __lt__(self, other):
selfnum = self.num * other.den
othernum = other.num * self.den
if self.den * other.den > 0:
return selfnum < othernum
else:
return selfnum > othernum
Although it seems to be working, I wonder, if there is more elegant solution.
Update
Storing sign in numerator does what I wanted (I can just change 2 lines instead of adding a conditional in each method).
If you assume that both denominators are positive, you can safely do the comparison (since a/b < c/d would imply ad < bc). I would just store the sign in the numerator:
self.num = abs(num) * (1 if num / den > 0 else -1)
self.den = abs(den)
Or:
self.num = num
self.den = den
if self.den < 0:
self.num = -self.num
self.den = -self.den
And your __lt__ method can be:
def __lt__(self, other):
return self.num * other.den < other.num * self.den

Module Won't recognize my function

I have a code for a Fraction class
class Fraction:
def __init__(self,top,bottom):
self.num = top
self.den = bottom
def __str__(self):
return str(self.num)+"/"+str(self.den)
def show(self):
print(self.num,"/",self.den)
def gcd(m,n):
while m%n != 0:
oldm = m
oldn = n
m = oldn
n = oldm%oldn
return n
def __add__(self,otherfraction):
newnum = self.num*otherfraction.den + \
self.den*otherfraction.num
newden = self.den * otherfraction.den
common = gcd(newnum,newden)
return Fraction(newnum//common,newden//common)
def __eq__(self, other):
firstnum = self.num * other.den
secondnum = other.num * self.den
return firstnum == secondnum
When I run it and try to add two fractions it pops up saying
File "/Users/----/Downloads/Listings/listing_1_9.py", line 25,
in __add__
common = gcd(newnum,newden)
NameError: global name 'gcd' is not defined
In your code, gcd is a method of Fraction, so you should use self.gcd when referring to it from inside another method.
Use self.gcd instead.
Explanation
NameError: global name 'gcd' is not defined
Thats because gcd is not a global. Its a method of Fraction.

Overloading Operators with MATLAB

I'm currently writing a code to perform Gaussian elimination in MATLAB and then write out the code needed to generate a LaTex file showing all the steps. A lot of the times when I do Gaussian elimination the answers start turning into Fractions. So I thought as a nice learning exercise for classes in Matlab that I would write a Fraction class. But I have no clue how to overload operators and frankly Mathwork's documentation wasn't helpful.
classdef Fraction
properties
numer
denom
end
methods
function a = Fraction(numer,denom)
a.denom = denom;
a.numer = numer;
end
function r = mtimes(a,b)
r = Fraction(a.numer*b.numer,a.denom*b.demon);
end
function r = plus(a,b)
c = a.numer*b.denom+a.denom*b.numer;
d = a.denom*b.denom;
r = Fraction(c,d);
function r = minus(a,b)
c = a.numer*b.denom-a.denom*b.numer;
d = a.denom*b.denom;
r = Fraction(c,d);
end
function r = mrdivide(a,b)
r = Fraction(a.numer*b.denom,a.denom*b.numer);
end
function b = reduceFrac(a)
x = a.numer;
y = b.denom;
while y ~= 0
x = y;
y = mod(x,y);
end
b =Fraction(a.numer/x, a.denom/x)
end
end
end
The plus operator works but the other three do not. Does any one have any ideas? Also how do I call my method reduceFrac?
Fraction.reduceFrac(Fraction(2.4))
I thought that the code above would work, but it didn't. Below is the python version of what I am trying to acheive.
Fraction.py
class Fraction(object):
"""Fraction class
Attributes:
numer: the numerator of the fraction.
denom: the denominator of the fraction.
"""
def __init__(self, numer, denom):
"""Initializes the Fraction class
Sets the inital numer and denom for the
fraction class.
Args:
numer: Top number of the Fraction
denom: Bottom number of the Fraction
Returns:
None
Raises:
None
"""
self.numer = numer
self.denom = denom
def __str__(self):
"""function call along with the print command
Args:
None
Returns:
String: numer / denom.
Raises:
None
"""
return str(self.numer) + '/' + str(self.denom)
def get_numer(self):
return self.numer
def set_numer(self, numer):
self.numer = numer
def get_denom(self):
return self.denom
def set_denom(self, denom):
self.denom = denom
def __add__(self, other):
numer = self.numer*other.denom+other.numer*self.denom
denom = self.denom*other.denom
return Fraction.reduceFrac(Fraction(numer,denom))
def __div__(self, other):
numer = self.numer*other.denom
denom = self.denom*other.numer
return Fraction.reduceFrac(Fraction(numer,denom))
def __sub__(self, other):
numer = self.numer*other.denom-other.numer*self.denom
denom = self.denom*other.denom
return Fraction.reduceFrac(Fraction(numer,denom))
def __mul__(self, other):
numer = self.numer*other.numer
denom = self.denom*other.denom
return Fraction.reduceFrac(Fraction(numer,denom))
def reduceFrac(self):
x = self.numer
y = self.denom
while y != 0:
(x, y) = (y, x % y)
return Fraction(self.numer/x, self.denom/x)
if __name__ == "__main__":
v = Fraction(4,3)
g = Fraction(7,8)
r = Fraction(4,8)
a = v + g
print a
s = v - g
print s
d = v / g
print d
m = v * g
print m
f = Fraction.reduceFrac(r)
print f
Your plus function misses an end

Categories

Resources