Python Acme V2 - Reuse Order/Challenge - python

I'm using python-acme to write a small script that does takes in a domain and does one of two things:
Using the DNS01 challenge, if the challenge doesn't pass, return the DNS entry that needs to be added to the domain.
Using the DNS01 challenge, if the challenge passes, return the certificate information.
I'm running into issues however, and the documentation is just terrible :P I'm trying to find a way to "reuse" the order/challenge. Basically, any time I run the script so far, the validation token for the DNS entry changes.
print("Validating Challenge...")
response, validation = challenge.response_and_validation(client_acme.net.key)
print("response %s" % response.to_partial_json())
print("validation %s" % validation)
print("-> Validation Domain: %s" % challenge.chall.validation_domain_name(domain))
print("-> Validation Value: %s" % challenge.chall.validation(client_acme.net.key))
# TODO - We are here, gotta actually get info on the DNS challange and attempt to validate.
print("Validation Completed!")
Things I've tried:
Register the account with the same RSA key. Runs into validation errors when registering.
Running new_order with the same CSR. Still returns a different key.
Full Code (In It's Prototype Glory)
import json
import josepy as jose
import os
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives.asymmetric import rsa
from cryptography.hazmat.primitives import serialization
from cryptography.hazmat.primitives.serialization import load_pem_private_key
from acme import client
from acme import messages
from acme import challenges
from acme import crypto_util
import OpenSSL
from boto.s3.connection import S3Connection
from boto.s3.key import Key
"""
Generates an SSL certificate through Let's Encrypt provided a domain. If the
domain requires a DNS challenge to be passed, that information is passed back
to the user. Otherwise, the SSL certificate is generated and returned back to
the user.
Documentation for generating SSL via this method:
http://www.gilesthomas.com/2018/11/python-code-to-generate-lets-encrypt-certificates/
ACME Documentation:
https://kite.com/python/docs/acme.client.ClientV2.new_order
"""
print('Loading function')
DEBUG = True
DIRECTORY_URL = 'https://acme-staging-v02.api.letsencrypt.org/directory'
KEY_SIZE = 2048
CERT_PKEY_BITS = 2048
USER_AGENT = 'python-acme-example'
EMAIL_ADDRESS = 'REDACTED'
S3_BUCKET = 'REDACTED'
S3_KEY = 'REDACTED'
S3_SECRET = 'REDACTED'
# TODO - Load These From Event
PASSWORD = "swordfish"
SALT = "yourAppName"
def new_csr_comp(domain_name, pkey_pem=None):
"""Create certificate signing request."""
if pkey_pem is None:
# Create private key.
pkey = OpenSSL.crypto.PKey()
pkey.generate_key(OpenSSL.crypto.TYPE_RSA, CERT_PKEY_BITS)
pkey_pem = OpenSSL.crypto.dump_privatekey(OpenSSL.crypto.FILETYPE_PEM,
pkey)
csr_pem = crypto_util.make_csr(pkey_pem, [domain_name])
return pkey_pem, csr_pem
def save_key(pk, filename):
pem = pk.private_bytes(
encoding=serialization.Encoding.PEM,
format=serialization.PrivateFormat.TraditionalOpenSSL,
encryption_algorithm=serialization.NoEncryption()
)
return pem
def lambda_handler(event, context):
print("Formatting Domain...")
domain = event['domain']
domain = domain.lower()
print("Formatted! Domain is: %s" % domain)
print("Generating User Key...")
conn = S3Connection(S3_KEY, S3_SECRET)
bucket = conn.get_bucket(S3_BUCKET)
key_name = "%s.key" % domain
existing_account = False
print("-> Looking For Existing Key.")
rsa_key = bucket.get_key(key_name)
if rsa_key is None or DEBUG:
print("-> Key Not Found. Creating New One.")
rsa_key = rsa.generate_private_key(
public_exponent=65537,
key_size=KEY_SIZE,
backend=default_backend()
)
pem = save_key(rsa_key, key_name)
print(key_name)
k = Key(bucket)
k.key = key_name
k.set_contents_from_string(pem)
else:
print("-> Key File Found.")
existing_account = True
rsa_key = rsa_key.get_contents_as_string()
rsa_key = load_pem_private_key(rsa_key, password=None, backend=default_backend())
print(rsa_key)
print("-> Converted File To Usable Format.")
acc_key = jose.JWKRSA(
key=rsa_key
)
print("Generated!")
print("Registering With Let's Encrypt...")
print("-> Connecting to Let's Encrypt on {}".format(DIRECTORY_URL))
net = client.ClientNetwork(acc_key, user_agent=USER_AGENT)
directory = messages.Directory.from_json(net.get(DIRECTORY_URL).json())
client_acme = client.ClientV2(directory, net=net)
print("-> Registering")
email = (EMAIL_ADDRESS)
regr = None
# TODO - Use Existing Account
# if existing_account:
# regr = messages.NewRegistration(key=acc_key.public_key(), only_return_existing=True)
# else:
account_created = messages.NewRegistration.from_data(email=email, terms_of_service_agreed=True)
regr = client_acme.new_account(account_created)
print("Registered!")
print("Creating CSR...")
temp_pkey_pem, temp_csr_pem = new_csr_comp(domain)
key_name = "%s.pkey_pem" % domain
pkey_pem = bucket.get_key(key_name)
if pkey_pem is None:
print("-> Creating New PKEY")
k = Key(bucket)
k.key = key_name
k.set_contents_from_string(temp_pkey_pem)
pkey_pem = temp_pkey_pem
else:
print("-> Using Existing PKEY")
pkey_pem = pkey_pem.get_contents_as_string()
key_name = "%s.csr_pem" % domain
csr_pem = bucket.get_key(key_name)
if csr_pem is None:
print("-> Creating New CSR")
k = Key(bucket)
k.key = key_name
k.set_contents_from_string(temp_csr_pem)
csr_pem = temp_csr_pem
else:
print("-> Using Existing CSR")
csr_pem = csr_pem.get_contents_as_string()
print("Created!")
print("Requesting Challenges...")
orderr = client_acme.new_order(csr_pem)
print("Requested!")
print("Selecting DNS Challenge...")
challenge = None
authz_list = orderr.authorizations
for authz in authz_list:
for i in authz.body.challenges:
if isinstance(i.chall, challenges.DNS01):
challenge = i
else:
print("-> Other challenge found: %s" % i.chall)
if challenge is None:
raise Exception("Could not find a DNS challenge!")
print("Selected!")
print("Validating Challenge...")
response, validation = challenge.response_and_validation(client_acme.net.key)
print("response %s" % response.to_partial_json())
print("validation %s" % validation)
print("-> Validation Domain: %s" % challenge.chall.validation_domain_name(domain))
print("-> Validation Value: %s" % challenge.chall.validation(client_acme.net.key))
# TODO - We are here, gotta actually get info on the DNS challange and attempt to validate.
print("Validation Completed!")
print("Starting Challenge...")
client_acme.answer_challenge(challenge, response)
finalized_orderr = client_acme.poll_and_finalize(orderr)
fullchain_pem = finalized_orderr.fullchain_pem
print("-> PEM: %s" % fullchain_pem)
print("Challenge Completed!")
# TODO - We need to return the DNS challenge if it hasn't been completed yet.
return "done"
#raise Exception('Something went wrong')

Bryant
print("Starting Challenge...")
client_acme.answer_challenge(challenge, response)
finalized_orderr = client_acme.poll_and_finalize(orderr)
fullchain_pem = finalized_orderr.fullchain_pem
print("-> PEM: %s" % fullchain_pem)
print("Challenge Completed!")`enter code here`
Each time you run this code block, it will send a challenge to LetsEncrypt Server. If this challenge success it will return your certs and everything ok, but if challenge invalid LetsEncrypt Server will change your Key Authoriztion and TXT value.
Hope, its help.

Related

Export VM as ovf template from using ExportVM() soapUI from pyvmomi

I am not able to understand this code can any one explain this code.
'''
#!/usr/bin/env python
#
# Written by JM Lopez
# GitHub: https://github.com/jm66
# Email: jm#jmll.me
# Website: http://jose-manuel.me
#
# Note: Example code For testing purposes only
# Based on ExportOvfToLocal.java by Steve Jin
#
# This code has been released under the terms of the Apache-2.0 license
# http://opensource.org/licenses/Apache-2.0
#
import sys
import os
import threading
from time import sleep
import requests
from pyVmomi import vim
from tools import cli, service_instance, pchelper
# disable urllib3 warnings
requests.packages.urllib3.disable_warnings(
requests.packages.urllib3.exceptions.InsecureRequestWarning)
def print_http_nfc_lease_info(info):
""" Prints information about the lease,
such as the entity covered by the lease,
and HTTP URLs for up/downloading file backings.
:param info:
:type info: vim.HttpNfcLease.Info
:return:
"""
print('Lease timeout: {0.leaseTimeout}\n'
'Disk Capacity KB: {0.totalDiskCapacityInKB}'.format(info))
device_number = 1
if info.deviceUrl:
for device_url in info.deviceUrl:
print('HttpNfcLeaseDeviceUrl: {1}\n \
Device URL Import Key: {0.importKey}\n \
Device URL Key: {0.key}\n \
Device URL: {0.url}\n \
Device URL Size: {0.fileSize}\n \
SSL Thumbprint: {0.sslThumbprint}\n'.format(device_url, device_number))
device_number += 1
else:
print('No devices were found.')
def break_down_cookie(cookie):
""" Breaks down vSphere SOAP cookie
:param cookie: vSphere SOAP cookie
:type cookie: str
:return: Dictionary with cookie_name: cookie_value
"""
cookie_a = cookie.split(';')
cookie_name = cookie_a[0].split('=')[0]
cookie_text = ' {0}; ${1}'.format(cookie_a[0].split('=')[1],
cookie_a[1].lstrip())
return {cookie_name: cookie_text}
class LeaseProgressUpdater(threading.Thread):
"""
Lease Progress Updater & keep alive
thread
"""
def __init__(self, http_nfc_lease, update_interval):
threading.Thread.__init__(self)
self._running = True
self.httpNfcLease = http_nfc_lease
self.updateInterval = update_interval
self.progressPercent = 0
def set_progress_pct(self, progress_pct):
self.progressPercent = progress_pct
def stop(self):
self._running = False
def run(self):
while self._running:
try:
if self.httpNfcLease.state == vim.HttpNfcLease.State.done:
return
print('Updating HTTP NFC Lease Progress to {}%'.format(self.progressPercent))
self.httpNfcLease.HttpNfcLeaseProgress(self.progressPercent)
sleep(self.updateInterval)
except Exception as ex:
print(ex.message)
return
def download_device(headers, cookies, temp_target_disk,
device_url, lease_updater,
total_bytes_written, total_bytes_to_write):
""" Download disk device of HttpNfcLease.info.deviceUrl
list of devices
:param headers: Request headers
:type cookies: dict
:param cookies: Request cookies (session)
:type cookies: dict
:param temp_target_disk: file name to write
:type temp_target_disk: str
:param device_url: deviceUrl.url
:type device_url: str
:param lease_updater:
:type lease_updater: LeaseProgressUpdater
:param total_bytes_written: Bytes written so far
:type total_bytes_to_write: long
:param total_bytes_to_write: VM unshared storage
:type total_bytes_to_write: long
:return:
"""
with open(temp_target_disk, 'wb') as handle:
response = requests.get(device_url, stream=True,
headers=headers,
cookies=cookies, verify=False)
# response other than 200
if not response.ok:
response.raise_for_status()
# keeping track of progress
current_bytes_written = 0
for block in response.iter_content(chunk_size=2048):
# filter out keep-alive new chunks
if block:
handle.write(block)
handle.flush()
os.fsync(handle.fileno())
# getting right progress
current_bytes_written += len(block)
written_pct = ((current_bytes_written +
total_bytes_written) * 100) / total_bytes_to_write
# updating lease
lease_updater.progressPercent = int(written_pct)
return current_bytes_written
def main():
parser = cli.Parser()
parser.add_optional_arguments(cli.Argument.VM_NAME, cli.Argument.UUID)
parser.add_custom_argument('--name', required=False, action='store',
help='The ovf:id to use for the top-level OVF Entity.')
parser.add_custom_argument('--workdir', required=True, action='store',
help='Working directory. Must have write permission.')
args = parser.get_args()
si = service_instance.connect(args)
# Getting VM data
vm_obj = None
if args.uuid:
# if instanceUuid(last argument) is false it will search for VM BIOS UUID instead
vm_obj = si.content.searchIndex.FindByUuid(None, args.uuid, True)
elif args.vm_name:
vm_obj = pchelper.get_obj(si.content, [vim.VirtualMachine], args.vm_name)
# VM does exist
if not vm_obj:
print('VM {} does not exist'.format(args.uuid))
sys.exit(1)
# VM must be powered off to export
if not vm_obj.runtime.powerState == \
vim.VirtualMachine.PowerState.poweredOff:
print('VM {} must be powered off'.format(vm_obj.name))
sys.exit(1)
# Breaking down SOAP Cookie &
# creating Header
soap_cookie = si._stub.cookie
cookies = break_down_cookie(soap_cookie)
headers = {'Accept': 'application/x-vnd.vmware-streamVmdk'} # not required
# checking if working directory exists
print('Working dir: {} '.format(args.workdir))
if not os.path.isdir(args.workdir):
print('Creating working directory {}'.format(args.workdir))
os.mkdir(args.workdir)
# actual target directory for VM
target_directory = os.path.join(args.workdir, vm_obj.config.instanceUuid)
print('Target dir: {}'.format(target_directory))
if not os.path.isdir(target_directory):
print('Creating target dir {}'.format(target_directory))
os.mkdir(target_directory)
# Getting HTTP NFC Lease
http_nfc_lease = vm_obj.ExportVm()
# starting lease updater
lease_updater = LeaseProgressUpdater(http_nfc_lease, 60)
lease_updater.start()
# Creating list for ovf files which will be value of
# ovfFiles parameter in vim.OvfManager.CreateDescriptorParams
ovf_files = list()
total_bytes_written = 0
# http_nfc_lease.info.totalDiskCapacityInKB not real
# download size
total_bytes_to_write = vm_obj.summary.storage.unshared
try:
while True:
if http_nfc_lease.state == vim.HttpNfcLease.State.ready:
print('HTTP NFC Lease Ready')
print_http_nfc_lease_info(http_nfc_lease.info)
for device_url in http_nfc_lease.info.deviceUrl:
if not device_url.targetId:
print("No targetId found for url: {}.".format(device_url.url))
print("Device is not eligible for export. "
"This could be a mounted iso or img of some sort")
print("Skipping...")
continue
temp_target_disk = os.path.join(target_directory,
device_url.targetId)
print('Downloading {} to {}'.format(device_url.url,
temp_target_disk))
current_bytes_written = download_device(
headers=headers, cookies=cookies,
temp_target_disk=temp_target_disk,
device_url=device_url.url,
lease_updater=lease_updater,
total_bytes_written=total_bytes_written,
total_bytes_to_write=total_bytes_to_write)
# Adding up file written bytes to total
total_bytes_written += current_bytes_written
print('Creating OVF file for {}'.format(temp_target_disk))
# Adding Disk to OVF Files list
ovf_file = vim.OvfManager.OvfFile()
ovf_file.deviceId = device_url.key
ovf_file.path = device_url.targetId
ovf_file.size = current_bytes_written
ovf_files.append(ovf_file)
break
if http_nfc_lease.state == vim.HttpNfcLease.State.initializing:
print('HTTP NFC Lease Initializing.')
elif http_nfc_lease.state == vim.HttpNfcLease.State.error:
print("HTTP NFC Lease error: {}".format(
http_nfc_lease.state.error))
sys.exit(1)
sleep(2)
print('Getting OVF Manager')
ovf_manager = si.content.ovfManager
print('Creating OVF Descriptor')
vm_descriptor_name = args.name if args.name else vm_obj.name
ovf_parameters = vim.OvfManager.CreateDescriptorParams()
ovf_parameters.name = vm_descriptor_name
ovf_parameters.ovfFiles = ovf_files
vm_descriptor_result = ovf_manager.CreateDescriptor(obj=vm_obj,
cdp=ovf_parameters)
if vm_descriptor_result.error:
raise vm_descriptor_result.error[0].fault
vm_descriptor = vm_descriptor_result.ovfDescriptor
target_ovf_descriptor_path = os.path.join(target_directory,
vm_descriptor_name +
'.ovf')
print('Writing OVF Descriptor {}'.format(
target_ovf_descriptor_path))
with open(target_ovf_descriptor_path, 'wb') as handle:
handle.write(str.encode(vm_descriptor))
# ending lease
http_nfc_lease.HttpNfcLeaseProgress(100)
http_nfc_lease.HttpNfcLeaseComplete()
# stopping thread
lease_updater.stop()
except Exception as ex:
print(ex)
# Complete lease upon exception
http_nfc_lease.HttpNfcLeaseComplete()
sys.exit(1)
enter code here
if __name__ == '__main__':
main()
'''

How to mock functionality of boto3 module using pytest

I have a custom module written called sqs.py. The script will do the following:
Get a message from AWS SQS
Get the AWS S3 path to delete
Delete the path
Send a confirmation email to the user
I'm trying to write unit tests for this module that will verify the code will execute as expected and that it will raise exceptions when they do occur.
This means I will need to mock the response from Boto3 calls that I make. My problem is that the code will first establish the SQS client to obtain the message and then a second call to establish the S3 client. I'm not sure how to mock these 2 independent calls and be able to fake a response so I can test my script's functionality. Perhaps my approach is incorrect. At any case, any advice on how to do this properly is appreciated.
Here's how the code looks like:
import boto3
import json
import os
import pprint
import time
import asyncio
import logging
from send_email import send_email
queue_url = 'https://xxxx.queue.amazonaws.com/1234567890/queue'
def shutdown(message):
""" Sends shutdown command to OS """
os.system(f'shutdown +5 "{message}"')
def send_failure_email(email_config: dict, error_message: str):
""" Sends email notification to user with error message attached. """
recipient_name = email_config['recipient_name']
email_config['subject'] = 'Subject: Restore Failed'
email_config['message'] = f'Hello {recipient_name},\n\n' \
+ 'We regret that an error has occurred during the restore process. ' \
+ 'Please try again in a few minutes.\n\n' \
+ f'Error: {error_message}.\n\n' \
try:
send_email(email_config)
except RuntimeError as error_message:
logging.error(f'ERROR: cannot send email to user. {error_message}')
async def restore_s3_objects(s3_client: object, p_bucket_name: str, p_prefix: str):
"""Attempts to restore objects specified by p_bucket_name and p_prefix.
Returns True if restore took place, false otherwise.
"""
is_truncated = True
key_marker = None
key = ''
number_of_items_restored = 0
has_restore_occured = False
logging.info(f'performing restore for {p_bucket_name}/{p_prefix}')
try:
while is_truncated == True:
if not key_marker:
version_list = s3_client.list_object_versions(
Bucket = p_bucket_name,
Prefix = p_prefix)
else:
version_list = s3_client.list_object_versions(
Bucket = p_bucket_name,
Prefix = p_prefix,
KeyMarker = key_marker)
if 'DeleteMarkers' in version_list:
logging.info('found delete markers')
delete_markers = version_list['DeleteMarkers']
for d in delete_markers:
if d['IsLatest'] == True:
key = d['Key']
version_id = d['VersionId']
s3_client.delete_object(
Bucket = p_bucket_name,
Key = key,
VersionId = version_id
)
number_of_items_restored = number_of_items_restored + 1
is_truncated = version_list['IsTruncated']
logging.info(f'is_truncated: {is_truncated}')
if 'NextKeyMarker' in version_list:
key_marker = version_list['NextKeyMarker']
if number_of_items_restored > 0:
has_restore_occured = True
return has_restore_occured
except Exception as error_message:
raise RuntimeError(error_message)
async def main():
if 'AWS_ACCESS_KEY_ID' in os.environ \
and 'AWS_SECRET_ACCESS_KEY' in os.environ \
and os.environ['AWS_ACCESS_KEY_ID'] != '' \
and os.environ['AWS_SECRET_ACCESS_KEY'] != '':
sqs_client = boto3.client(
'sqs',
aws_access_key_id=os.environ['AWS_ACCESS_KEY_ID'],
aws_secret_access_key=os.environ['AWS_SECRET_ACCESS_KEY'],
verify=False
)
s3_client = boto3.client(
's3',
aws_access_key_id=os.environ['AWS_ACCESS_KEY_ID'],
aws_secret_access_key=os.environ['AWS_SECRET_ACCESS_KEY'],
verify=False
)
else:
sqs_client = boto3.client(
'sqs',
verify=False,
)
s3_client = boto3.client(
's3',
verify=False,
)
received_message = sqs_client.receive_message(
QueueUrl=queue_url,
AttributeNames=['All'],
VisibilityTimeout=10,
WaitTimeSeconds=20, # Wait up to 20 seconds for a message to arrive
)
if 'Messages' in received_message \
and len(received_message['Messages']) > 0:
# NOTE: Initialize email configuration
receipient_email = 'support#example.com'
username = receipient_email.split('#')[0]
fullname_length = len(username.split('.'))
fullname = f"{username.split('.')[0]}" # Group name / First name only
if (fullname_length == 2): # First name and last name available
fullname = f"{username.split('.')[0]} {username.split('.')[1]}"
fullname = fullname.title()
email_config = {
'destination': receipient_email,
'recipient_name': fullname,
'subject': 'Subject: Restore Complete',
'message': ''
}
try:
receipt_handle = received_message['Messages'][0]['ReceiptHandle']
except Exception as error_message:
logging.error(error_message)
send_failure_email(email_config, error_message)
shutdown(f'{error_message}')
try:
data = received_message['Messages'][0]['Body']
data = json.loads(data)
logging.info('A SQS message for a restore has been received.')
except Exception as error_message:
message = f'Unable to obtain and parse message body. {error_message}'
logging.error(message)
send_failure_email(email_config, message)
shutdown(f'{error_message}')
try:
bucket = data['bucket']
prefix = data['prefix']
except Exception as error_message:
message = f'Retrieving bucket name and prefix failed. {error_message}'
logging.error(message)
send_failure_email(email_config, message)
shutdown(f'{error_message}')
try:
logging.info(f'Initiating restore for path: {bucket}/{prefix}')
restore_was_performed = await asyncio.create_task(restore_s3_objects(s3_client, bucket, prefix))
if restore_was_performed is True:
email_config['message'] = f'Hello {fullname},\n\n' \
+ f'The files in the path \'{bucket}/{prefix}\' have been restored. ' \
send_email(email_config)
logging.info('Restore complete. Shutting down.')
else:
logging.info('Path does not require restore. Shutting down.')
shutdown(f'shutdown +5 "Restore successful! System will shutdown in 5 mins"')
except Exception as error_message:
message = f'File restoration failed. {error_message}'
logging.error(message)
send_failure_email(email_config, message)
shutdown(f'{error_message}')
try:
sqs_client.delete_message(
QueueUrl=queue_url,
ReceiptHandle=receipt_handle,
)
except Exception as error_message:
message = f'Deleting restore session from SQS failed. {error_message}'
logging.error(message)
send_failure_email(email_config, message)
shutdown(f'{error_message}')
if __name__ == '__main__':
logging.basicConfig(filename='restore.log',level=logging.INFO)
loop = asyncio.get_event_loop()
loop.run_until_complete(main())
loop.close()
The only way I was able to mock Boto3 is rebuilding a small class that represents the actual method structure. This is because Boto3 uses dynamic methods and all the resource level methods are created at runtime.
This might not be industry standard but I wasn't able to get any of the methods I found on the internet to work most of the time and this worked pretty well for me and requires minimal effort (comparing to some of the solutions I found).
class MockClient:
def __init__(self, region_name, aws_access_key_id, aws_secret_access_key):
self.region_name = region_name
self.aws_access_key_id = aws_access_key_id
self.aws_secret_access_key = aws_secret_access_key
self.MockS3 = MockS3()
def client(self, service_name, **kwargs):
return self.MockS3
class MockS3:
def __init__(self):
self.response = None # Test your mock data from S3 here
def list_object_versions(self, **kwargs):
return self.response
class S3TestCase(unittest.TestCase):
def test_restore_s3_objects(self):
# Given
bucket = "testBucket" # Test this to something that somewahat realistic
prefix = "some/prefix" # Test this to something that somewahat realistic
env_vars = mock.patch.dict(os.environ, {"AWS_ACCESS_KEY_ID": "abc",
"AWS_SECRET_ACCESS_KEY": "def"})
env_vars.start()
# initialising the Session can be tricy since it has to be imported from
# the module/file that creates the session on actual code rather than
# where's a Session code is. In this case you might have to import from
# main rather than boto3.
boto3.session.Session = mock.Mock(side_effect=[
MockClient(region_name='eu-west-1',
aws_access_key_id=os.environ['AWS_ACCESS_KEY_ID'],
aws_secret_access_key=os.environ['AWS_SECRET_ACCESS_KEY'])])
s3_client = boto3.client('s3', verify=False)
# When
has_restore_occured = restore_s3_objects(s3_client, bucket, prefix)
# Then
self.assertEqual(has_restore_occured, False) # your expected result set
env_vars.stop()

Jasmin SMSC Gateway / source connector in db

I installed Jasmin SMSC Gateway and it's working perfectly.
Now I'm trying to record the sms in a mysql database.
For that I'm using below script to get the messages from RabbitMQ queues :
# -*- coding: utf-8 -*-
import pickle
from twisted.internet.defer import inlineCallbacks
from twisted.internet import reactor
from twisted.internet.protocol import ClientCreator
from twisted.python import log
from txamqp.protocol import AMQClient
from txamqp.client import TwistedDelegate
import txamqp.spec
#Mysql conn pool handler
import PySQLPool
#inlineCallbacks
def gotConnection(conn, username, password):
#print "Connected to broker."
yield conn.authenticate(username, password,'PLAIN')
print "Authenticated. Ready to receive messages"
chan = yield conn.channel(1)
yield chan.channel_open()
yield chan.queue_declare(queue="someQueueName10")
# Bind to submit.sm.* and submit.sm.resp.* routes
yield chan.queue_bind(queue="someQueueName10", exchange="messaging", routing_key='submit.sm.*')
yield chan.queue_bind(queue="someQueueName10", exchange="messaging", routing_key='deliver.sm.*')
yield chan.queue_bind(queue="someQueueName10", exchange="messaging", routing_key='submit.sm.resp.*')
yield chan.basic_consume(queue='someQueueName10', no_ack=True, consumer_tag="someTag")
queue = yield conn.queue("someTag")
#Build Mysql connection pool
PySQLPool.getNewPool().maxActiveConnections = 20 #Set how many reusable conns to buffer in the pool
print "Pooling 20 connections"
#Connection parameters - Fill this info with your MySQL server connection parameters
mysqlconn = PySQLPool.getNewConnection(
username='jasmin_db',
password='jasmindb',
host='127.0.0.1',
db='jasmin_db')
print "Connected to MySQL"
queryp = PySQLPool.getNewQuery(mysqlconn)
# Wait for messages
# This can be done through a callback ...
while True:
print 'test1'
msg = yield queue.get()
props = msg.content.properties
pdu = pickle.loads(msg.content.body)
print 'test'
print '%s' % (msg.routing_key)
if msg.routing_key[:15] == 'submit.sm.resp.':
print 'SubmitSMResp: status: %s, msgid: %s' % (pdu.status,
props['message-id'])
queryp.Query("UPDATE table_name SET status='%s' WHERE messageid='%s'" % (pdu.status,props['message-id']))
PySQLPool.commitPool()
elif msg.routing_key[:10] == 'submit.sm.':
print 'SubmitSM: from %s to %s, content: %s, msgid: %s supp %s ' % (pdu.params['source_addr'],
pdu.params['destination_addr'],
pdu.params['short_message'],
props['message-id'],
pdu.params['source_addr']
)
queryp.Query("INSERT INTO cdrs (messageid,carrier,date,dst,src,status,accountcode,cost,sale,plan_name,amaflags,content) VALUES ('%s','%s',NOW(),'%s','%s','%s','00000','0.0','0.0','plan_name','some_status','%s') " % (props
['message-id'],msg.routing_key.replace("submit.sm.",""), pdu.params['destination_addr'], pdu.params['source_addr'],pdu.status, pdu.params['short_message']) )
PySQLPool.commitPool()
else:
print 'unknown route'
# A clean way to tear down and stop
yield chan.basic_cancel("someTag")
yield chan.channel_close()
chan0 = yield conn.channel(0)
yield chan0.connection_close()
reactor.stop()
if __name__ == "__main__":
host = '127.0.0.1'
port = 5672
vhost = '/'
username = 'guest'
password = 'guest'
spec_file = '/etc/jasmin/resource/amqp0-9-1.xml'
spec = txamqp.spec.load(spec_file)
# Connect and authenticate
d = ClientCreator(reactor,
AMQClient,
delegate=TwistedDelegate(),
vhost=vhost,
spec=spec).connectTCP(host, port)
d.addCallback(gotConnection, username, password)
def whoops(err):
if reactor.running:
log.err(err)
reactor.stop()
d.addErrback(whoops)
reactor.run()
I'm able to save the messages in the database , yet I need a way to get the source connector or the user that sent the message and save it in the database as well.
is there a way to achieve it?
There is a script in the github in the jasmin sms. I think that will solve your question.

GA management API and Python error with client_secrets.json file

I've recently open a question about the GA management API where I explained that the exemple script here didn't worked for me. So looking at the Google Analytics Git Hub, I found a totaly different exemple which doesn't work either but I have a different error :
The client secrets were invalid:
Missing property "client_secret" in a client type of "installed".
WARNING: Please configure OAuth 2.0
To make this sample run you will need to populate the client_secrets.json file
found at:
client_secrets.json
with information from the APIs Console <https://code.google.com/apis/console>.
However I did all this perfectly well, I have my client_secrets.json file. what are the differences between client_secrets authentification and p12 authentification ? Is it possible that the Management API work only with client_secrets authentification ? Does someone have a working code example ?
Thanks.
Here is the script :
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright 2014 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Reference command-line example for Google Analytics Management API v3.
This application demonstrates how to use the python client library to access
all the pieces of data returned by the Google Analytics Management API v3.
The application manages autorization by saving an OAuth2.0 token in a local
file and reusing the token for subsequent requests. It then traverses the
Google Analytics Management hiearchy. It first retrieves and prints all the
authorized user's accounts, next it prints all the web properties for the
first account, then all the profiles for the first web property and finally
all the goals for the first profile. The sample then prints all the
user's advanced segments.
Before You Begin:
Update the client_secrets.json file
You must update the clients_secrets.json file with a client id, client
secret, and the redirect uri. You get these values by creating a new project
in the Google APIs console and registering for OAuth2.0 for installed
applications: https://code.google.com/apis/console
Learn more about registering your analytics application here:
https://developers.google.com/analytics/devguides/config/mgmt/v3/mgmtAuthorization
Sample Usage:
$ python management_v3_reference.py
Also you can also get help on all the command-line flags the program
understands by running:
$ python management_v3_reference.py --help
"""
from __future__ import print_function
__author__ = 'api.nickm#gmail.com (Nick Mihailovski)'
import argparse
import sys
from googleapiclient.errors import HttpError
from googleapiclient import sample_tools
from oauth2client.client import AccessTokenRefreshError
def main(argv):
# Authenticate and construct service.
service, flags = sample_tools.init(
argv, 'analytics', 'v3', __doc__, __file__,
scope='https://www.googleapis.com/auth/analytics.readonly')
# Traverse the Management hiearchy and print results or handle errors.
try:
traverse_hiearchy(service)
except TypeError as error:
# Handle errors in constructing a query.
print(('There was an error in constructing your query : %s' % error))
except HttpError as error:
# Handle API errors.
print(('Arg, there was an API error : %s : %s' %
(error.resp.status, error._get_reason())))
except AccessTokenRefreshError:
print ('The credentials have been revoked or expired, please re-run'
'the application to re-authorize')
def traverse_hiearchy(service):
"""Traverses the management API hiearchy and prints results.
This retrieves and prints the authorized user's accounts. It then
retrieves and prints all the web properties for the first account,
retrieves and prints all the profiles for the first web property,
and retrieves and prints all the goals for the first profile.
Args:
service: The service object built by the Google API Python client library.
Raises:
HttpError: If an error occured when accessing the API.
AccessTokenRefreshError: If the current token was invalid.
"""
accounts = service.management().accounts().list().execute()
print_accounts(accounts)
if accounts.get('items'):
firstAccountId = accounts.get('items')[0].get('id')
webproperties = service.management().webproperties().list(
accountId=firstAccountId).execute()
print_webproperties(webproperties)
if webproperties.get('items'):
firstWebpropertyId = webproperties.get('items')[0].get('id')
profiles = service.management().profiles().list(
accountId=firstAccountId,
webPropertyId=firstWebpropertyId).execute()
print_profiles(profiles)
if profiles.get('items'):
firstProfileId = profiles.get('items')[0].get('id')
goals = service.management().goals().list(
accountId=firstAccountId,
webPropertyId=firstWebpropertyId,
profileId=firstProfileId).execute()
print_goals(goals)
print_segments(service.management().segments().list().execute())
def print_accounts(accounts_response):
"""Prints all the account info in the Accounts Collection.
Args:
accounts_response: The response object returned from querying the Accounts
collection.
"""
print('------ Account Collection -------')
print_pagination_info(accounts_response)
print()
for account in accounts_response.get('items', []):
print('Account ID = %s' % account.get('id'))
print('Kind = %s' % account.get('kind'))
print('Self Link = %s' % account.get('selfLink'))
print('Account Name = %s' % account.get('name'))
print('Created = %s' % account.get('created'))
print('Updated = %s' % account.get('updated'))
child_link = account.get('childLink')
print('Child link href = %s' % child_link.get('href'))
print('Child link type = %s' % child_link.get('type'))
print()
if not accounts_response.get('items'):
print('No accounts found.\n')
def print_webproperties(webproperties_response):
"""Prints all the web property info in the WebProperties collection.
Args:
webproperties_response: The response object returned from querying the
Webproperties collection.
"""
print('------ Web Properties Collection -------')
print_pagination_info(webproperties_response)
print()
for webproperty in webproperties_response.get('items', []):
print('Kind = %s' % webproperty.get('kind'))
print('Account ID = %s' % webproperty.get('accountId'))
print('Web Property ID = %s' % webproperty.get('id'))
print(('Internal Web Property ID = %s' %
webproperty.get('internalWebPropertyId')))
print('Website URL = %s' % webproperty.get('websiteUrl'))
print('Created = %s' % webproperty.get('created'))
print('Updated = %s' % webproperty.get('updated'))
print('Self Link = %s' % webproperty.get('selfLink'))
parent_link = webproperty.get('parentLink')
print('Parent link href = %s' % parent_link.get('href'))
print('Parent link type = %s' % parent_link.get('type'))
child_link = webproperty.get('childLink')
print('Child link href = %s' % child_link.get('href'))
print('Child link type = %s' % child_link.get('type'))
print()
if not webproperties_response.get('items'):
print('No webproperties found.\n')
def print_profiles(profiles_response):
"""Prints all the profile info in the Profiles Collection.
Args:
profiles_response: The response object returned from querying the
Profiles collection.
"""
print('------ Profiles Collection -------')
print_pagination_info(profiles_response)
print()
for profile in profiles_response.get('items', []):
print('Kind = %s' % profile.get('kind'))
print('Account ID = %s' % profile.get('accountId'))
print('Web Property ID = %s' % profile.get('webPropertyId'))
print(('Internal Web Property ID = %s' %
profile.get('internalWebPropertyId')))
print('Profile ID = %s' % profile.get('id'))
print('Profile Name = %s' % profile.get('name'))
print('Currency = %s' % profile.get('currency'))
print('Timezone = %s' % profile.get('timezone'))
print('Default Page = %s' % profile.get('defaultPage'))
print(('Exclude Query Parameters = %s' %
profile.get('excludeQueryParameters')))
print(('Site Search Category Parameters = %s' %
profile.get('siteSearchCategoryParameters')))
print(('Site Search Query Parameters = %s' %
profile.get('siteSearchQueryParameters')))
print('Created = %s' % profile.get('created'))
print('Updated = %s' % profile.get('updated'))
print('Self Link = %s' % profile.get('selfLink'))
parent_link = profile.get('parentLink')
print('Parent link href = %s' % parent_link.get('href'))
print('Parent link type = %s' % parent_link.get('type'))
child_link = profile.get('childLink')
print('Child link href = %s' % child_link.get('href'))
print('Child link type = %s' % child_link.get('type'))
print()
if not profiles_response.get('items'):
print('No profiles found.\n')
def print_goals(goals_response):
"""Prints all the goal info in the Goals collection.
Args:
goals_response: The response object returned from querying the Goals
collection
"""
print('------ Goals Collection -------')
print_pagination_info(goals_response)
print()
for goal in goals_response.get('items', []):
print('Goal ID = %s' % goal.get('id'))
print('Kind = %s' % goal.get('kind'))
print('Self Link = %s' % goal.get('selfLink'))
print('Account ID = %s' % goal.get('accountId'))
print('Web Property ID = %s' % goal.get('webPropertyId'))
print(('Internal Web Property ID = %s' %
goal.get('internalWebPropertyId')))
print('Profile ID = %s' % goal.get('profileId'))
print('Goal Name = %s' % goal.get('name'))
print('Goal Value = %s' % goal.get('value'))
print('Goal Active = %s' % goal.get('active'))
print('Goal Type = %s' % goal.get('type'))
print('Created = %s' % goal.get('created'))
print('Updated = %s' % goal.get('updated'))
parent_link = goal.get('parentLink')
print('Parent link href = %s' % parent_link.get('href'))
print('Parent link type = %s' % parent_link.get('type'))
# Print the goal details depending on the type of goal.
if goal.get('urlDestinationDetails'):
print_url_destination_goal_details(
goal.get('urlDestinationDetails'))
elif goal.get('visitTimeOnSiteDetails'):
print_visit_time_on_site_goal_details(
goal.get('visitTimeOnSiteDetails'))
elif goal.get('visitNumPagesDetails'):
print_visit_num_pages_goal_details(
goal.get('visitNumPagesDetails'))
elif goal.get('eventDetails'):
print_event_goal_details(goal.get('eventDetails'))
print()
if not goals_response.get('items'):
print('No goals found.\n')
def print_url_destination_goal_details(goal_details):
"""Prints all the URL Destination goal type info.
Args:
goal_details: The details portion of the goal response.
"""
print('------ Url Destination Goal -------')
print('Goal URL = %s' % goal_details.get('url'))
print('Case Sensitive = %s' % goal_details.get('caseSensitive'))
print('Match Type = %s' % goal_details.get('matchType'))
print('First Step Required = %s' % goal_details.get('firstStepRequired'))
print('------ Url Destination Goal Steps -------')
for goal_step in goal_details.get('steps', []):
print('Step Number = %s' % goal_step.get('number'))
print('Step Name = %s' % goal_step.get('name'))
print('Step URL = %s' % goal_step.get('url'))
if not goal_details.get('steps'):
print('No Steps Configured')
def print_visit_time_on_site_goal_details(goal_details):
"""Prints all the Visit Time On Site goal type info.
Args:
goal_details: The details portion of the goal response.
"""
print('------ Visit Time On Site Goal -------')
print('Comparison Type = %s' % goal_details.get('comparisonType'))
print('comparison Value = %s' % goal_details.get('comparisonValue'))
def print_visit_num_pages_goal_details(goal_details):
"""Prints all the Visit Num Pages goal type info.
Args:
goal_details: The details portion of the goal response.
"""
print('------ Visit Num Pages Goal -------')
print('Comparison Type = %s' % goal_details.get('comparisonType'))
print('comparison Value = %s' % goal_details.get('comparisonValue'))
def print_event_goal_details(goal_details):
"""Prints all the Event goal type info.
Args:
goal_details: The details portion of the goal response.
"""
print('------ Event Goal -------')
print('Use Event Value = %s' % goal_details.get('useEventValue'))
for event_condition in goal_details.get('eventConditions', []):
event_type = event_condition.get('type')
print('Type = %s' % event_type)
if event_type in ('CATEGORY', 'ACTION', 'LABEL'):
print('Match Type = %s' % event_condition.get('matchType'))
print('Expression = %s' % event_condition.get('expression'))
else: # VALUE type.
print('Comparison Type = %s' % event_condition.get('comparisonType'))
print('Comparison Value = %s' % event_condition.get('comparisonValue'))
def print_segments(segments_response):
"""Prints all the segment info in the Segments collection.
Args:
segments_response: The response object returned from querying the
Segments collection.
"""
print('------ Segments Collection -------')
print_pagination_info(segments_response)
print()
for segment in segments_response.get('items', []):
print('Segment ID = %s' % segment.get('id'))
print('Kind = %s' % segment.get('kind'))
print('Self Link = %s' % segment.get('selfLink'))
print('Name = %s' % segment.get('name'))
print('Definition = %s' % segment.get('definition'))
print('Created = %s' % segment.get('created'))
print('Updated = %s' % segment.get('updated'))
print()
def print_pagination_info(management_response):
"""Prints common pagination details.
Args:
management_response: The common reponse object for each collection in the
Management API.
"""
print('Items per page = %s' % management_response.get('itemsPerPage'))
print('Total Results = %s' % management_response.get('totalResults'))
print('Start Index = %s' % management_response.get('startIndex'))
# These only have values if other result pages exist.
if management_response.get('previousLink'):
print('Previous Link = %s' % management_response.get('previousLink'))
if management_response.get('nextLink'):
print('Next Link = %s' % management_response.get('nextLink'))
if __name__ == '__main__':
main(sys.argv)
client_secrets.json is Oauth2 authentication that being a user is first prompted for access to their account.
P12 file is service account authentication. Service accounts are like dummy users they are pre authorized which means there will be no prompt for user consent. To pre authorize a service account to Google Analytics the service account email address must be added as a user at the ACCOUNT level in the admin section of the google analytics website. Yes service accounts work with the management API.
Note: Oauth2 credentials and service account credentials are created on the google developer console when registering your application. It is now possible to get a .json file for service accounts instead of only the p12. However the code used to authenticate a service account and Oauth2 are different and these two .json files can not be interchanged. (hope that makes sense)
Further reading:
My tutorial on Oauth2 Google Developer Console Oauth2 credentials
My tutorial on service accounts Google Developer console service account

Searching for keywords with pycurl Python is stuck at Shell reverting nothing

I am trying to get tweets related to the keyword in the code But at the python shell there is nothing its just curson only No traceback nothing.The code is here
import time
import pycurl
import urllib
import json
import oauth2 as oauth
API_ENDPOINT_URL = 'https://stream.twitter.com/1.1/statuses/filter.json'
USER_AGENT = 'TwitterStream 1.0' # This can be anything really
# You need to replace these with your own values
OAUTH_KEYS = {'consumer_key': 'ABC',
'consumer_secret': 'ABC',
'access_token_key': 'ABC',
'access_token_secret': 'ABC'}
# These values are posted when setting up the connection
POST_PARAMS = {'include_entities': 0,
'stall_warning': 'true',
'track': 'iphone,ipad,ipod'}
class TwitterStream:
def __init__(self, timeout=False):
self.oauth_token = oauth.Token(key=OAUTH_KEYS['access_token_key'], secret=OAUTH_KEYS['access_token_secret'])
self.oauth_consumer = oauth.Consumer(key=OAUTH_KEYS['consumer_key'], secret=OAUTH_KEYS['consumer_secret'])
self.conn = None
self.buffer = ''
self.timeout = timeout
self.setup_connection()
def setup_connection(self):
""" Create persistant HTTP connection to Streaming API endpoint using cURL.
"""
if self.conn:
self.conn.close()
self.buffer = ''
self.conn = pycurl.Curl()
# Restart connection if less than 1 byte/s is received during "timeout" seconds
if isinstance(self.timeout, int):
self.conn.setopt(pycurl.LOW_SPEED_LIMIT, 1)
self.conn.setopt(pycurl.LOW_SPEED_TIME, self.timeout)
self.conn.setopt(pycurl.URL, API_ENDPOINT_URL)
self.conn.setopt(pycurl.USERAGENT, USER_AGENT)
# Using gzip is optional but saves us bandwidth.
self.conn.setopt(pycurl.ENCODING, 'deflate, gzip')
self.conn.setopt(pycurl.POST, 1)
self.conn.setopt(pycurl.POSTFIELDS, urllib.urlencode(POST_PARAMS))
self.conn.setopt(pycurl.HTTPHEADER, ['Host: stream.twitter.com',
'Authorization: %s' % self.get_oauth_header()])
# self.handle_tweet is the method that are called when new tweets arrive
self.conn.setopt(pycurl.WRITEFUNCTION, self.handle_tweet)
def get_oauth_header(self):
""" Create and return OAuth header.
"""
params = {'oauth_version': '1.0',
'oauth_nonce': oauth.generate_nonce(),
'oauth_timestamp': int(time.time())}
req = oauth.Request(method='POST', parameters=params, url='%s?%s' % (API_ENDPOINT_URL,
urllib.urlencode(POST_PARAMS)))
req.sign_request(oauth.SignatureMethod_HMAC_SHA1(), self.oauth_consumer, self.oauth_token)
return req.to_header()['Authorization'].encode('utf-8')
def start(self):
""" Start listening to Streaming endpoint.
Handle exceptions according to Twitter's recommendations.
"""
backoff_network_error = 0.25
backoff_http_error = 5
backoff_rate_limit = 60
while True:
self.setup_connection()
try:
self.conn.perform()
except:
# Network error, use linear back off up to 16 seconds
print 'Network error: %s' % self.conn.errstr()
print 'Waiting %s seconds before trying again' % backoff_network_error
time.sleep(backoff_network_error)
backoff_network_error = min(backoff_network_error + 1, 16)
continue
# HTTP Error
sc = self.conn.getinfo(pycurl.HTTP_CODE)
if sc == 420:
# Rate limit, use exponential back off starting with 1 minute and double each attempt
print 'Rate limit, waiting %s seconds' % backoff_rate_limit
time.sleep(backoff_rate_limit)
backoff_rate_limit *= 2
else:
# HTTP error, use exponential back off up to 320 seconds
print 'HTTP error %s, %s' % (sc, self.conn.errstr())
print 'Waiting %s seconds' % backoff_http_error
time.sleep(backoff_http_error)
backoff_http_error = min(backoff_http_error * 2, 320)
def handle_tweet(self, data):
""" This method is called when data is received through Streaming endpoint.
"""
self.buffer += data
if data.endswith('\r\n') and self.buffer.strip():
# complete message received
message = json.loads(self.buffer)
self.buffer = ''
msg = ''
if message.get('limit'):
print 'Rate limiting caused us to miss %s tweets' % (message['limit'].get('track'))
elif message.get('disconnect'):
raise Exception('Got disconnect: %s' % message['disconnect'].get('reason'))
elif message.get('warning'):
print 'Got warning: %s' % message['warning'].get('message')
else:
print 'Got tweet with text: %s' % message.get('text')
if __name__ == '__main__':
ts = TwitterStream()
ts.setup_connection()
ts.start()
please help me to resolve the issue with code

Categories

Resources