Somewhere in my code, a lambda is called to return a true/false response. I am trying to mock this lambda in my unit tests with no success.
This is my code:
def _test_update_allowed():
old = ...
new = ...
assert(is_update_allowed(old, new) == True)
Internally, is_update_allowed calls the lambda, which is what I want to mock.
I tried adding the following code above my test:
import zipfile
import io
import boto3
import os
#pytest.fixture(scope='function')
def aws_credentials():
"""Mocked AWS Credentials for moto."""
os.environ['AWS_ACCESS_KEY_ID'] = 'testing'
os.environ['AWS_SECRET_ACCESS_KEY'] = 'testing'
os.environ['AWS_SECURITY_TOKEN'] = 'testing'
os.environ['AWS_SESSION_TOKEN'] = 'testing'
CLIENT = boto3.client('lambda', region_name='us-east-1')
# Expected response setup and zip file for lambda mock creation
def lambda_event():
code = '''
def lambda_handler(event, context):
return event
'''
zip_output = io.BytesIO()
zip_file = zipfile.ZipFile(zip_output, 'w', zipfile.ZIP_DEFLATED)
zip_file.writestr('lambda_function.py', code)
zip_file.close()
zip_output.seek(0)
return zip_output.read()
# create mocked lambda with zip file
def mock_some_lambda(lambda_name, return_event):
return CLIENT.create_function(
FunctionName=lambda_name,
Runtime='python2.7',
Role='arn:aws:iam::123456789:role/does-not-exist',
Handler='lambda_function.lambda_handler',
Code={
'ZipFile': return_event,
},
Publish=True,
Timeout=30,
MemorySize=128
)
and then updated my test to:
#mock_lambda
def _test_update_allowed():
mock_some_lambda('hello-world-lambda', lambda_event())
old = ...
new = ...
assert(is_update_allowed(old, new) == True)
But I'm getting the following error, which makes me think it's actually trying to talk to AWS
botocore.exceptions.ClientError: An error occurred (UnrecognizedClientException) when calling the CreateFunction operation: The security token included in the request is invalid.
From the error message, I can confirm it definitely not an AWS issue. It is clearly stating that it is trying to use some credentials which are not valid. So that boils down to the code.
I am assuming you already have import statements for necessary libs because those are also not visible in the shared code
import pytest
import moto
from mock import mock, patch
from moto import mock_lambda
So you need to use the
def aws_credentials():
.....
while creating the client because from the code I dont see that you are using the same.
#pytest.fixture(scope='function')
def lambda_mock(aws_credentials):
with mock_lambda():
yield boto3.client('lambda', region_name='us-east-1')
and eventually your mock
#pytest.fixture(scope='function')
def mock_some_lambda(lambda_mock):
lambda_mock.create_function(
FunctionName=lambda_name,
Runtime='python2.7',
Role='arn:aws:iam::123456789:role/does-not-exist',
Handler='lambda_function.lambda_handler',
Code={
'ZipFile': return_event,
},
Publish=True,
Timeout=30,
MemorySize=128
)
yield
then test function
def _test_update_allowed(lambda_mock,mock_some_lambda):
lambda_mock.invoke(...)
.....
Cant give a working example, because not sure what the full logic is. Between take a look this post.
The problems seems due to unexisting arn role. Try mocking it like in moto library tests
def get_role_name():
with mock_iam():
iam = boto3.client("iam", region_name=_lambda_region)
try:
return iam.get_role(RoleName="my-role")["Role"]["Arn"]
except ClientError:
return iam.create_role(
RoleName="my-role",
AssumeRolePolicyDocument="some policy",
Path="/my-path/",
)["Role"]["Arn"]
Related
I am trying to mock a lambda invocation using pytest and moto. Below is my working code.
import zipfile
import os
import boto3
from moto import mock_lambda,mock_iam
import pytest
import io
from botocore.exceptions import ClientError
import json
#pytest.fixture(scope='function')
def aws_credentials():
"""Mocked AWS Credentials for moto."""
os.environ['AWS_ACCESS_KEY_ID'] = 'testing'
os.environ['AWS_SECRET_ACCESS_KEY'] = 'testing'
os.environ['AWS_SECURITY_TOKEN'] = 'testing'
os.environ['AWS_SESSION_TOKEN'] = 'testing'
#pytest.fixture(scope='function')
def lambda_mock(aws_credentials):
with mock_lambda():
yield boto3.client('lambda', region_name='us-east-1')
def get_role_name():
with mock_iam():
iam = boto3.client("iam", region_name='us-east-1')
try:
return iam.get_role(RoleName="my-role")["Role"]["Arn"]
except ClientError:
return iam.create_role(
RoleName="my-role",
AssumeRolePolicyDocument="some policy",
Path="/my-path/",
)["Role"]["Arn"]
#pytest.fixture(scope='function')
def lambda_mock(aws_credentials):
with mock_lambda():
yield boto3.client('lambda', region_name='us-east-1')
def _process_lambda(func_str):
zip_output = io.BytesIO()
zip_file = zipfile.ZipFile(zip_output, "w", zipfile.ZIP_DEFLATED)
zip_file.writestr("lambda_function.py", func_str)
zip_file.close()
zip_output.seek(0)
return zip_output.read()
def get_test_zip_file1():
pfunc = """
def lambda_handler(event, context):
print("custom log event")
return "hello world"
"""
return _process_lambda(pfunc)
# create mocked lambda with zip file
#mock_lambda
def test_mock_some_lambda(lambda_mock):
lambda_mock.create_function(
FunctionName='abc',
Runtime='python3.7',
Role=get_role_name(),
Handler='lambda_function.lambda_handler',
Code={"ZipFile": get_test_zip_file1()},
Publish=True,
Timeout=30,
MemorySize=128
)
resp=lambda_mock.invoke(FunctionName='abc')
response=json.load(resp['Payload'])
assert response=='hello world'
However, I want to separate lambda mocking and my testing function. So I created a pytest fixture which creates a lambda function like below.
#pytest.fixture(scope='function')
def mock_some_lambda(lambda_mock):
yield lambda_mock.create_function(
FunctionName='abc',
Runtime='python3.7',
Role=get_role_name(),
Handler='lambda_function.lambda_handler',
Code={"ZipFile": get_test_zip_file1()},
Publish=True,
Timeout=30,
MemorySize=128
)
And then a new test function which uses this fixture
#mock_lambda
def test_update_allowed(lambda_mock,mock_some_lambda):
resp=lambda_mock.invoke(FunctionName='abc')
response=json.load(resp['Payload'])
assert response=='hello world'
This throws botocore.exceptions.ClientError: An error occurred (404) when calling the Invoke operation:
Looks like the lambda function creation is not sticking in the test function and when I run invoke(), it's not able to find the function.
How do I make this work ?
Having issues writing a unit test for S3 client, it seems the test is trying to use a real s3 client rather than the one i have created for the test here is my example
#pytest.fixture(autouse=True)
def moto_boto(self):
# setup: start moto server and create the bucket
mocks3 = mock_s3()
mocks3.start()
res = boto3.resource('s3')
bucket_name: str = f"{os.environ['BUCKET_NAME']}"
res.create_bucket(Bucket=bucket_name)
yield
# teardown: stop moto server
mocks3.stop()
def test_with_fixture(self):
from functions.s3_upload_worker import (
save_email_in_bucket,
)
client = boto3.client('s3')
bucket_name: str = f"{os.environ['BUCKET_NAME']}"
client.list_objects(Bucket=bucket_name)
save_email_in_bucket(
"123AZT",
os.environ["BUCKET_FOLDER_NAME"],
email_byte_code,
)
This results in the following error
botocore.exceptions.ClientError: An error occurred (ExpiredToken) when calling the PutObject operation: The provided token has expired.
code i am testing looks like this
def save_email_in_bucket(message_id, bucket_folder_name, body):
s3_key = "".join([bucket_folder_name, "/", str(message_id), ".json"])
s3_client.put_object(
Bucket=bucket,
Key=s3_key,
Body=json.dumps(body),
ContentType="application-json",
)
LOGGER.info(
f"Saved email with messsage ID {message_id} in bucket folder {bucket_folder_name}"
)
Not accepting this an an answer but useful for anyone who ends up here, I found a workaround where if I create the s3 client in the function i am trying to test then this approach will work rather than create it globally. I would prefer to find an actual solution though.
In my unit test:
def test_my_function_that_publishes_to_sns():
conn = boto3.client("sns", region_name="us-east-1")
mock_topic = conn.create_topic(Name="mock-topic")
topic_arn = mock_topic.get("TopicArn")
os.environ["SNS_TOPIC"] = topic_arn
# call my_function
my_module.my_method()
The the function being tested
# inside my_module, my_function...
sns_client.publish(
TopicArn=os.environ["SNS_TOPIC"], Message="my message",
)
I get the error: botocore.errorfactory.NotFoundException: An error occurred (NotFound) when calling the Publish operation: Endpoint with arn arn:aws:sns:us-east-1:123456789012:mock-topic not found
Doesn't make sense, that's the topic moto is suppose to have created and mocked. Why is it saying it doesn't exist? If I call conn.publish(TopicArn=topic_arn, Message="sdfsdsdf") inside of the unit test itself it seems to mock it, but it doesn't mock it for my_module.my_method() which the unit test executes. Maybe it's destroying the mocked topic too soon?
EDIT I tried this every which way and I get the exact same error:
# Using context manager
def test_my_function_that_publishes_to_sns():
with mock_sns():
conn = boto3.client("sns", region_name="us-east-1")
mock_topic = conn.create_topic(Name="mocktopic")
topic_arn = mock_topic.get("TopicArn")
os.environ["SNS_TOPIC"] = topic_arn
# call my_function
my_module.my_method()
# Using decorator
#mock_sns
def test_my_function_that_publishes_to_sns():
conn = boto3.client("sns", region_name="us-east-1")
mock_topic = conn.create_topic(Name="mocktopic")
topic_arn = mock_topic.get("TopicArn")
os.environ["SNS_TOPIC"] = topic_arn
# call my_function
my_module.my_method()
# Using decorator and context manager
#mock_sns
def test_my_function_that_publishes_to_sns():
with mock_sns():
conn = boto3.client("sns", region_name="us-east-1")
mock_topic = conn.create_topic(Name="mocktopic")
topic_arn = mock_topic.get("TopicArn")
os.environ["SNS_TOPIC"] = topic_arn
# call my_function
my_module.my_method()
Opened GitHub issue as well: https://github.com/spulec/moto/issues/3027
issue was my_module.my_method() wasn't setting a region just doing client = boto3.client("sns")
It could not find it because it was defaulting to a diff region than us-east-1 which was hard coded into the unit test
maybe it will help you
keep all modules in a single class and put a decorator #mock_sns on the class too for mocking the sns, also put decorator #mock_sns on the function where you are initializing you connection to sns.
Example:
#mock_sns
class TestSnsMock(unittest.TestCase):
#classmethod
#mock_sns
def setUpClass(cls):
cls.conn = boto3.client("sns", region_name="us-east-1")
cls.conn.create_topic(Name="some-topic")
cls.response = cls.conn.list_topics()
cls.topic_arn = cls.response["Topics"][0]["TopicArn"]
def test_publish_sns(self):
message = "here is same message"
self.sns_client.publish(TopicArn=self.topic_arn, Message=message)
if __name__ == "__main__":
unittest.main()
Sample code below. I hope it helps somebody. The suggested fix about setting the Region was not my issue. If you are still stuck, this video is great.
Approach:
Create a mocked Boto3 Resource ( not a Boto3 Client ).
Set mock SNS Topic ARN in this new resource.
Overwrite the SNS Topic ARN environment var for the test.
Get a Boto3 Client that calls Publish to the mocked SNS Topic ARN.
I hit the below error because I set the Topic ARN to mock_topic and not arn:aws:sns:eu-west-1:123456789012:mock_topic:
botocore.errorfactory.NotFoundException: An error occurred (NotFound) when calling the Publish operation: Endpoint does not exist
"""
import main
import boto3
import pytest
import botocore
from moto import mock_sns
# http://docs.getmoto.org/en/latest/docs/getting_started.html
#####################################################################
# test_main.py
#####################################################################
#pytest.fixture()
def mock_message():
return {
"foo": "1st wonderful message.",
"bar": "2nd wonderful message.",
"baz": "3rd wonderful message.",
}
#pytest.fixture()
def mock_sns_client():
return sns_publish.get_sns_client()
def test_get_mocked_sns_client(mock_sns_client):
assert isinstance(mock_sns_client, botocore.client.BaseClient)
mock_topic_name = "mock_topic"
#mock_sns
def test_mock_send_sns(mock_message, monkeypatch, mock_sns_client):
"""
1. Create a mocked Boto3 Resource ( not a Boto3 Client ).
2. Set mock SNS Topic ARN in this new resource.
3. Overwrite the SNS Topic ARN environment var for the test.
"""
sns_resource = boto3.resource(
"sns",
region_name=os.environ.get("AWS_REGION")
)
topic = sns_resource.create_topic(
Name=mock_topic_name
)
assert mock_topic_name in topic.arn
monkeypatch.setenv('SNS_TOPIC_ARN', topic.arn)
assert os.environ.get("SNS_TOPIC_ARN") == topic.arn
response = sns_publish.send_sns(mock_sns_client, mock_message)
assert isinstance(response, dict)
message_id = response.get("MessageId", None)
assert isinstance(message_id, str)
#####################################################################
# main.py
# split the get Client and Publish for simpler testing
#####################################################################
import boto3
import json
import botocore
import os
from conf.base_logger import logger
# split the get Client and Publish for simpler testing
def get_sns_client():
return boto3.client("sns", region_name=os.environ.get("AWS_REGION"))
def send_sns(sns_client, message: dict) -> dict:
if not isinstance(message, dict):
logger.info("message to send Slack is not in expected format")
return None
if not isinstance(sns_client, botocore.client.BaseClient):
logger.info("something wrong with the SNS client")
return None
return sns_client.publish(
TargetArn=os.environ.get("SNS_TOPIC_ARN"),
Message=json.dumps({'default': json.dumps(message, indent=4, sort_keys=True)}),
Subject='Foo\'s stats',
MessageStructure='json'
)
I'm trying to follow the docs to do this:
#pytest.fixture()
def aws_credentials():
"""Mocked AWS Credentials for moto."""
os.environ["AWS_ACCESS_KEY_ID"] = "testing"
os.environ["AWS_SECRET_ACCESS_KEY"] = "testing"
os.environ["AWS_SECURITY_TOKEN"] = "testing"
os.environ["AWS_SESSION_TOKEN"] = "testing"
#pytest.fixture()
def sts(aws_credentials):
with mock_sts():
yield boto3.client("sts", region_name="us-east-1")
#pytest.fixture
def sns(aws_credentials):
with mock_sns():
yield boto3.resource("sns", region_name="us-east-1")
def test_publish(sns):
resp = sns.create_topic(Name="sdfsdfsdfsd")
I get error:
def test_publish(sns):
> topic_arn = sns.create_topic(Name="sdfsdfsdfsd")
E AttributeError: 'generator' object has no attribute 'create_topic'
OK, I'm not 100% sure why but adding the sts decorator seems to have fixed this:
#mock_sts
def test_publish(sns):
resp = sns.create_topic(Name="sdfsdfsdfsd")
I figured that out from this article but I'm still unclear on how it works: https://www.serverlessops.io/blog/aws-lambda-serverless-development-workflow-part2-testing-debugging
Is this because boto needs to use sts so I need to mock that out too? I use a credentials file with profiles to access AWS from laptop
Edit
You also, MUST use yield to return the client. Using return here gave me an sts error. I'd like to understand this better as well. I assume I need to use yield because it's a generator?
#pytest.fixture
def sns(aws_credentials):
with mock_sns():
# using return here causes below error
return boto3.resource("sns", region_name="us-east-1")
Error when not using yield:
botocore.exceptions.ClientError: An error occurred (InvalidClientTokenId) when calling the CreateTopic operation: The security token included in the request is invalid
I'm trying to mock a singluar method from the boto3 s3 client object to throw an exception. But I need all other methods for this class to work as normal.
This is so I can test a singular Exception test when and error occurs performing a upload_part_copy
1st Attempt
import boto3
from mock import patch
with patch('botocore.client.S3.upload_part_copy', side_effect=Exception('Error Uploading')) as mock:
client = boto3.client('s3')
# Should return actual result
o = client.get_object(Bucket='my-bucket', Key='my-key')
# Should return mocked exception
e = client.upload_part_copy()
However this gives the following error:
ImportError: No module named S3
2nd Attempt
After looking at the botocore.client.py source code I found that it is doing something clever and the method upload_part_copy does not exist. I found that it seems to call BaseClient._make_api_call instead so I tried to mock that
import boto3
from mock import patch
with patch('botocore.client.BaseClient._make_api_call', side_effect=Exception('Error Uploading')) as mock:
client = boto3.client('s3')
# Should return actual result
o = client.get_object(Bucket='my-bucket', Key='my-key')
# Should return mocked exception
e = client.upload_part_copy()
This throws an exception... but on the get_object which I want to avoid.
Any ideas about how I can only throw the exception on the upload_part_copy method?
Botocore has a client stubber you can use for just this purpose: docs.
Here's an example of putting an error in:
import boto3
from botocore.stub import Stubber
client = boto3.client('s3')
stubber = Stubber(client)
stubber.add_client_error('upload_part_copy')
stubber.activate()
# Will raise a ClientError
client.upload_part_copy()
Here's an example of putting a normal response in. Additionally, the stubber can now be used in a context. It's important to note that the stubber will verify, so far as it is able, that your provided response matches what the service will actually return. This isn't perfect, but it will protect you from inserting total nonsense responses.
import boto3
from botocore.stub import Stubber
client = boto3.client('s3')
stubber = Stubber(client)
list_buckets_response = {
"Owner": {
"DisplayName": "name",
"ID": "EXAMPLE123"
},
"Buckets": [{
"CreationDate": "2016-05-25T16:55:48.000Z",
"Name": "foo"
}]
}
expected_params = {}
stubber.add_response('list_buckets', list_buckets_response, expected_params)
with stubber:
response = client.list_buckets()
assert response == list_buckets_response
As soon as I posted on here I managed to come up with a solution. Here it is hope it helps :)
import botocore
from botocore.exceptions import ClientError
from mock import patch
import boto3
orig = botocore.client.BaseClient._make_api_call
def mock_make_api_call(self, operation_name, kwarg):
if operation_name == 'UploadPartCopy':
parsed_response = {'Error': {'Code': '500', 'Message': 'Error Uploading'}}
raise ClientError(parsed_response, operation_name)
return orig(self, operation_name, kwarg)
with patch('botocore.client.BaseClient._make_api_call', new=mock_make_api_call):
client = boto3.client('s3')
# Should return actual result
o = client.get_object(Bucket='my-bucket', Key='my-key')
# Should return mocked exception
e = client.upload_part_copy()
Jordan Philips also posted a great solution using the the botocore.stub.Stubber class. Whilst a cleaner solution I was un-able to mock specific operations.
If you don't want to use either moto or the botocore stubber (the stubber does not prevent HTTP requests being made to AWS API endpoints it seems), you can use the more verbose unittest.mock way:
foo/bar.py
import boto3
def my_bar_function():
client = boto3.client('s3')
buckets = client.list_buckets()
...
bar_test.py
import unittest
from unittest import mock
class MyTest(unittest.TestCase):
#mock.patch('foo.bar.boto3.client')
def test_that_bar_works(self, mock_s3_client):
self.assertTrue(mock_s3_client.return_value.list_buckets.call_count == 1)
Here's an example of a simple python unittest that can be used to fake client = boto3.client('ec2') api call...
import boto3
class MyAWSModule():
def __init__(self):
client = boto3.client('ec2')
tags = client.describe_tags(DryRun=False)
class TestMyAWSModule(unittest.TestCase):
#mock.patch("boto3.client.describe_tags")
#mock.patch("boto3.client")
def test_open_file_with_existing_file(self, mock_boto_client, mock_describe_tags):
mock_describe_tags.return_value = mock_get_tags_response
my_aws_module = MyAWSModule()
mock_boto_client.assert_call_once('ec2')
mock_describe_tags.assert_call_once_with(DryRun=False)
mock_get_tags_response = {
'Tags': [
{
'ResourceId': 'string',
'ResourceType': 'customer-gateway',
'Key': 'string',
'Value': 'string'
},
],
'NextToken': 'string'
}
hopefully that helps.
What about simply using moto?
It comes with a very handy decorator:
from moto import mock_s3
#mock_s3
def test_my_model_save():
pass
I had to mock boto3 client for some integration testing and it was a bit painful! The problem that I had is that moto does not support KMS very well, yet I did not want to rewrite my own mock for the S3 buckets. So I created this morph of all of the answers. Also it works globally which is pretty cool!
I have it setup with 2 files.
First one is aws_mock.py. For the KMS mocking I got some predefined responses that came from live boto3 client.
from unittest.mock import MagicMock
import boto3
from moto import mock_s3
# `create_key` response
create_resp = { ... }
# `generate_data_key` response
generate_resp = { ... }
# `decrypt` response
decrypt_resp = { ... }
def client(*args, **kwargs):
if args[0] == 's3':
s3_mock = mock_s3()
s3_mock.start()
mock_client = boto3.client(*args, **kwargs)
else:
mock_client = boto3.client(*args, **kwargs)
if args[0] == 'kms':
mock_client.create_key = MagicMock(return_value=create_resp)
mock_client.generate_data_key = MagicMock(return_value=generate_resp)
mock_client.decrypt = MagicMock(return_value=decrypt_resp)
return mock_client
Second one is the actual test module. Let's call it test_my_module.py. I've omitted the code of my_module. As well as functions that are under the test. Let's call those foo, bar functions.
from unittest.mock import patch
import aws_mock
import my_module
#patch('my_module.boto3')
def test_my_module(boto3):
# Some prep work for the mock mode
boto3.client = aws_mock.client
conn = boto3.client('s3')
conn.create_bucket(Bucket='my-bucket')
# Actual testing
resp = my_module.foo()
assert(resp == 'Valid')
resp = my_module.bar()
assert(resp != 'Not Valid')
# Etc, etc, etc...
One more thing, not sure if that is fixed but I found out that moto was not happy unless you set some environmental variables like credentials and region. They don't have to be actual credentials but they do need to be set. There is a chance it might be fixed by the time you read this! But here is some code in case you do need it, shell code this time!
export AWS_ACCESS_KEY_ID='foo'
export AWS_SECRET_ACCESS_KEY='bar'
export AWS_DEFAULT_REGION='us-east-1'
I know it is probably not the prettiest piece of code but if you are looking for something universal it should work pretty well!
Here is my solution for patching a boto client used in the bowels of my project, with pytest fixtures. I'm only using 'mturk' in my project.
The trick for me was to create my own client, and then patch boto3.client with a function that returns that pre-created client.
#pytest.fixture(scope='session')
def patched_boto_client():
my_client = boto3.client('mturk')
def my_client_func(*args, **kwargs):
return my_client
with patch('bowels.of.project.other_module.boto3.client', my_client_func):
yield my_client_func
def test_create_hit(patched_boto_client):
client = patched_boto_client()
stubber = Stubber(client)
stubber.add_response('create_hit_type', {'my_response':'is_great'})
stubber.add_response('create_hit_with_hit_type', {'my_other_response':'is_greater'})
stubber.activate()
import bowels.of.project # this module imports `other_module`
bowels.of.project.create_hit_function_that_calls_a_function_in_other_module_which_invokes_boto3_dot_client_at_some_point()
I also define another fixture that sets up dummy aws creds so that boto doesn't accidentally pick up some other set of credentials on the system. I literally set 'foo' and 'bar' as my creds for testing -- that's not a redaction.
It's important that AWS_PROFILE env be unset because otherwise boto will go looking for that profile.
#pytest.fixture(scope='session')
def setup_env():
os.environ['AWS_ACCESS_KEY_ID'] = 'foo'
os.environ['AWS_SECRET_ACCESS_KEY'] = 'bar'
os.environ.pop('AWS_PROFILE', None)
And then I specify setup_env as a pytest usefixtures entry so that it gets used for every test run.
I had a slightly different use case where the client is set up during a setup() method in a Class, as it does a few things such as listing things from the AWS service it's talking to (Connect, in my case). Lots of the above approaches weren't quite working, so here's my working version for future Googlers.
In order to get everything to work properly, I had to do this:
In the class under test (src/flow_manager.py):
class FlowManager:
client: botocore.client.BaseClient
def setup(self):
self.client = boto3.client('connect')
def set_instance(self):
response = self.client.list_instances()
... do stuff ....
In the test file (tests/unit/test_flow_manager.py):
#mock.patch('src.flow_manager.boto3.client')
def test_set_instance(self, mock_client):
expected = 'bar'
instance_list = {'alias': 'foo', 'id': 'bar'}
mock_client.list_instances.return_value = instance_list
actual = flow_manager.FlowManager("", "", "", "", 'foo')
actual.client = mock_client
actual.set_instance()
self.assertEqual(expected, actual.instance_id)
I've truncated the code to the relevant bits for this answer.