Get more details from exception - python

Trying to run Google spreadsheets sample application.
Got exception while uploading credentials:
try:
creds = store.get()
except Exception as e:
print(e)
print("exception end")
Strange is fact that only line _module was printed while executing Exception code block.
How to understand what exactly was wrong?
What does _module means?
Whole code:
from __future__ import print_function
from googleapiclient.discovery import build
from httplib2 import Http
from oauth2client import file, client, tools
# If modifying these scopes, delete the file token.json.
SCOPES = 'https://www.googleapis.com/auth/spreadsheets.readonly'
# The ID and range of a sample spreadsheet.
SAMPLE_SPREADSHEET_ID = '1BxiMVs0XRA5nFMdKvBdBZjgmUUqptlbs74OgvE2upms'
SAMPLE_RANGE_NAME = 'Class Data!A2:E'
def main():
print("starting")
"""Shows basic usage of the Sheets API.
Prints values from a sample spreadsheet.
"""
#store = file.Storage('token.json')
store = file.Storage('D:\pyth_nonsens\workspace_python\PyhonTutorial\google\credentials.json')
#store = file.Storage('My Project aaa-8102e33b9fef.json')
try:
creds = store.get()
except Exception as e:
print(e)
print("exception end")
if not creds or creds.invalid:
flow = client.flow_from_clientsecrets('credentials.json', SCOPES)
creds = tools.run_flow(flow, store)
service = build('sheets', 'v4', http=creds.authorize(Http()))
# Call the Sheets API
SPREADSHEET_ID = '1BxiMVs0XRA5nFMdKvBdBZjgmUUqptlbs74OgvE2upms'
RANGE_NAME = 'Class Data!A2:E'
result = service.spreadsheets().values().get(spreadsheetId=SPREADSHEET_ID,
range=RANGE_NAME).execute()
values = result.get('values', [])
if not values:
print('No data found.')
else:
print('Name, Major:')
for row in values:
# Print columns A and E, which correspond to indices 0 and 4.
print('%s, %s' % (row[0], row[4]))
print(__name__)
if __name__ == 'test_spread':
print("true")
main()

Related

Google API Multi-Processing

I'm trying to grab specific information from emails under my Gmail account (Subject, From, Date, Message Body) and was able to do so succesfully using the Google API and relevant libraries, however, I've noticed the more emails you have the longer it takes to parse, so much so that parsing 34 emails takes nearly 15 seconds, which is bad if you tried to scale that to parsing 1000 emails. My aim was to utilise concurrency/multi-processing on the parse_messages() function, however, I've had no luck and keep returning an empty list. The aim is to process all the emails, then append them all to a the combined list.
Apologies for the sloppyness, it's yet to be cleaned up, there's less than 100 lines in total.
from __future__ import print_function
import os.path
from google.auth.transport.requests import Request
from google.oauth2.credentials import Credentials
from google_auth_oauthlib.flow import InstalledAppFlow
from googleapiclient.discovery import build
from concurrent.futures import ProcessPoolExecutor
import base64
import re
combined = []
def authenticate():
# If modifying these scopes, delete the file token.json.
SCOPES = ['https://www.googleapis.com/auth/gmail.readonly']
creds = None
if os.path.exists('token.json'):
creds = Credentials.from_authorized_user_file('token.json', SCOPES)
if not creds or not creds.valid:
if creds and creds.expired and creds.refresh_token:
creds.refresh(Request())
else:
flow = InstalledAppFlow.from_client_secrets_file(
'creds.json', SCOPES)
creds = flow.run_local_server(port=0)
with open('token.json', 'w') as token:
token.write(creds.to_json())
return creds
def get_messages(creds):
# Get the messages
days = 31
service = build('gmail', 'v1', credentials=creds)
results = service.users().messages().list(userId='me', q=f'newer_than:{days}d, in:inbox').execute()
messages = results.get('messages', [])
message_count = len(messages)
print(f"You've received {message_count} email(s) in the last {days} days")
if not messages:
print(f'No Emails found in the last {days} days.')
return messages
def parse_message(msg):
# Call the Gmail API
service = build('gmail', 'v1', credentials=creds)
txt = service.users().messages().get(userId='me', id=msg['id']).execute()
payload = txt['payload']
headers = payload['headers']
#Grab the Subject Line, From and Date from the Email
for d in headers:
if d['name'] == 'Subject':
subject = d['value']
if d['name'] == 'From':
sender = d['value']
try:
match = re.search(r'<(.*)>', sender).group(1)
except:
match = sender
if d['name'] == "Date":
date_received = d['value']
def get_body(payload):
if 'body' in payload and 'data' in payload['body']:
return payload['body']['data']
elif 'parts' in payload:
for part in payload['parts']:
data = get_body(part)
if data:
return data
else:
return None
data = get_body(payload)
data = data.replace("-","+").replace("_","/")
decoded_data = base64.b64decode(data).decode("UTF-8")
decoded_data = (decoded_data.encode('ascii', 'ignore')).decode("UTF-8")
decoded_data = decoded_data.replace('\n','').replace('\r','').replace('\t', '')
# Append parsed message to shared list
return combined.append([date_received, subject, match, decoded_data])
if __name__ == '__main__':
creds = authenticate()
messages = get_messages(creds)
# Create a process pool with 4 worker processes
with ProcessPoolExecutor(max_workers=4) as executor:
# Submit the parse_message function for each message in the messages variable
executor.map(parse_message, messages)
print(f"Combined: {combined}")
When running the script, my output is normally.
You've received 34 email(s) in the last 31 days
combined: []
Thanks to the help of simpleApp, I made their changes along with a few others to get this working.
# Append parsed message to shared list
return [date_received, subject, match, decoded_data]
if __name__ == '__main__':
creds = authenticate()
messages, service = get_messages(creds)
# Create a process pool with default worker processes
with ProcessPoolExecutor() as executor:
combined = []
# Submit the parse_message function for each message in the messages variable
all_pools = executor.map(parse_message, messages, [service]*len(messages))
for e_p in all_pools:
combined.append(e_p)

how make use of multiprocessing in python to make api calls simultaneously

// function to verify user login
def get_authenticated_service(file,client_secret_file):
credentials = None
if os.path.exists(file):
print('Loading Credentials From File...')
with open(file, 'rb') as token:
credentials = pickle.load(token)
return build(API_SERVICE_NAME, API_VERSION, credentials = credentials)
if not credentials or not credentials.valid:
if credentials and credentials.expired and credentials.refresh_token:
print('Refreshing Access Token...')
credentials.refresh(Request())
else:
print('Fetching New Tokens...')
flow = InstalledAppFlow.from_client_secrets_file(
client_secret_file,
scopes=[
'https://www.googleapis.com/auth/youtube.readonly',
'https://www.googleapis.com/auth/youtube.upload',
'https://www.googleapis.com/auth/youtube.force-ssl'
]
)
flow.run_local_server(port=8000, prompt='consent',
authorization_prompt_message='')
credentials = flow.credentials
# Save the credentials for the next run
with open(file, 'wb') as f:
print('Saving Credentials for Future Use...')
pickle.dump(credentials, f)
return build(API_SERVICE_NAME, API_VERSION, credentials = credentials)
//function for verify user and make https body request
def initialize_upload( options, file, tokenFile, client_secret_file):
youtube = get_authenticated_service(tokenFile,client_secret_file)
tags = None
if options.keywords:
tags = options.keywords.split(',')
body=dict(
snippet=dict(
title=options.title,
description=options.description,
tags=tags,
categoryId=options.category
),
status=dict(
privacyStatus=options.privacyStatus
)
)
# Call the API's videos.insert method to create and upload the video.
print("running")
insert_request = youtube.videos().insert(
part=','.join(body.keys()),
body=body,
media_body=MediaFileUpload(file, chunksize=-1, resumable=True)
)
print("multiprocessing start")
resumable_upload(insert_request)
print("multiprocessing ends")
//function for uploading file
def resumable_upload(request):
response = None
error = None
retry = 0
print("running by " + multiprocessing.current_process().name)
while response is None:
try:
print('Uploading file...')
status, response = request.next_chunk()
print('Uploading file done...')
if response is not None:
if 'id' in response:
print('Video id "%s" was successfully uploaded.' % response['id'])
else:
exit('The upload failed with an unexpected response: %s' % response)
except HttpError as e:
if e.resp.status in RETRIABLE_STATUS_CODES:
error = 'A retriable HTTP error %d occurred:\n%s' % (e.resp.status,e.content)
else:
raise
except RETRIABLE_EXCEPTIONS as e:
error = 'A retriable error occurred: %s' % e
if error is not None:
print(error)
retry += 1
if retry > MAX_RETRIES:
exit('No longer attempting to retry.')
max_sleep = 2 ** retry
# sleep_seconds = random.random() * max_sleep
sleep_seconds = 0.5
print('Sleeping %f seconds and then retrying...' % sleep_seconds)
time.sleep(sleep_seconds)
// then title, description etc passing as arguments and making multiprocessing call
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--title', help='Video title', default='Android react native code sample(ANdroid emulator)')
parser.add_argument('--description', help='Android react native code sample description',
default='Android react native code sample description(ANdroid emulator)')
parser.add_argument('--category', default='22',
help='Numeric video category. ' +
'See https://developers.google.com/youtube/v3/docs/videoCategories/list')
parser.add_argument('--keywords', help='react native, react, android, emulator',
default='react native, react')
parser.add_argument('--privacyStatus', choices=VALID_PRIVACY_STATUSES,
default='private', help='Video privacy status.')
args = parser.parse_args()
try:
l = []
start = time.perf_counter()
t1 = multiprocessing.Process(target=initialize_upload, args=(args, "android1.mp4", 'token1.pickle','client_secret.json'))
t2 = multiprocessing.Process(target=initialize_upload, args=(args, "android1.mp4", 'token2.pickle','client_secret.json'))
t1.start()
t2.start()
t1.join()
t2.join()
finish = time.perf_counter()
print(f'Finished in {round(finish-start, 2)} seconds..')
except HttpError as e:
print('An HTTP error %d occurred:\n%s' % (e.resp.status, e.content))
if i try to make this call it takes 12.09 secs and without multiprocessing (i.e calling simultaneously one after another it takes 12.39 secs). after debugging little bit i get to know that till print("uploading file...) (in resumable_upload()) its working properly but after that its like working one at a time. Can anyone suggest how i can make him work with multiprocessing ? below is the output of this file

How do I run and return multiple queries simultaneously using the Youtube Analytics API?

In Code Sample #1 I am running a successful query that returns the the viewer percentage of different age groups from a user's youtube account. My ultimate goal is to run multiple queries at the same time and return the results for those queries. Code Sample #2 is my example of implementation for this method but Google then forces two OAuth login pages instead of using the credentials from just one to address the queries. What is the best implementation to accomplish this?
Code Sample #1
import os
import json
import pandas as pd
from pandas import json_normalize
from google_auth_oauthlib.flow import InstalledAppFlow
from google.auth.transport.requests import Request
from googleapiclient.discovery import build
from collections import OrderedDict
from tabulate import tabulate
import mysql.connector
from mysql.connector import errorcode
creator_ID = input("Creator ID: ")
try:
db2 = mysql.connector.connect(user = 'admin',
password = 'abcdefg',
host = 'database',
database = 'app')
if db2.is_connected():
db_Info = db2.get_server_info()
print("Connected to MySQL Server version ", db_Info)
cursor = db2.cursor()
cursor.execute("select database();")
record = cursor.fetchone()
print("You're connected to database: ", record)
except mysql.connector.Error as err:
if err.errno == errorcode.ER_ACCESS_DENIED_ERROR:
print("Something is wrong with your user name or password")
elif err.errno == errorcode.ER_BAD_DB_ERROR:
print("Database does not exist")
else:
print(err)
else:
print("Successfully connect to database!")
cursor = db2.cursor()
try:
cursor.execute("""__""", list(creator_ID))
ig_long_lived_token = cursor.fetchall()[0]
print("Successfully get long lived token!")
except mysql.connector.IntegrityError as err:
print("Error: {}".format(err))
SCOPES=['https://www.googleapis.com/auth/yt-analytics.readonly']
API_SERVICE_NAME = 'youtubeAnalytics'
API_VERSION = 'v2'
CLIENT_SECRETS_FILE = 'client_secrets3.json'
def get_service():
flow = InstalledAppFlow.from_client_secrets_file(CLIENT_SECRETS_FILE, SCOPES)
credentials = flow.run_local_server(port=8080, prompt="consent", authorization_prompt_message="")
return build(API_SERVICE_NAME, API_VERSION, credentials=credentials)
def execute_api_request(client_library_function, **kwargs):
response = client_library_function(
**kwargs
).execute()
print(response)
return response
def create_table(table, headers=None):
if headers:
headerstring = "\t{}\t" * len(headers)
print(headerstring.format(*headers))
rowstring = "\t{}\t" * len(table[0])
for row in table:
print(rowstring.format(*row))
if __name__ == '__main__':
youtubeAnalytics = get_service()
result = execute_api_request(
youtubeAnalytics.reports().query,
ids='channel==MINE',
startDate='2005-05-01',
endDate='2023-01-01',
dimensions='ageGroup',
metrics='viewerPercentage'
)
headers = ['viewerPercentage']
print(tabulate(result['rows'], headers=headers, tablefmt='pretty'))
-----------------------------------------------------------------------------------------
Code Sample #2
def ageGroup_fetch():
youtubeAnalytics = get_service()
result = execute_api_request(
youtubeAnalytics.reports().query,
ids='channel==MINE',
startDate='2005-05-01',
endDate='2023-01-01',
dimensions='ageGroup',
metrics='viewerPercentage'
)
def gender_fetch():
youtubeAnalytics = get_service()
result = execute_api_request(
youtubeAnalytics.reports().query,
ids='channel==MINE',
startDate='2005-05-01',
endDate='2023-01-01',
dimensions='gender',
metrics='viewerPercentage'
)
if __name__ == '__main__':
ageGroup_fetch()
gender_fetch()

What could be a problem when running a Python script?

In the python I am starting and I have a problem with the use of example scripts, however, there is a problem with the operation even using arguments when calling the script.
I will not say that this script solves a lot of my problems without servicing data writing.
And I will not say that it would be useful to run it :)
I've been giving arguments and nothing in any way
import httplib2
import os
import sys
import csv
from apiclient.discovery import build
from apiclient.errors import HttpError
from oauth2client.client import flow_from_clientsecrets
from oauth2client.file import Storage
import argparse
from oauth2client.tools import argparser, run_flow
library/python/guide/aaa_client_secrets
CLIENT_SECRETS_FILE = "client_secret.json"
YOUTUBE_SCOPES = ["https://www.googleapis.com/auth/youtube.readonly",
"https://www.googleapis.com/auth/yt-analytics.readonly"]
YOUTUBE_API_SERVICE_NAME = "youtube"
YOUTUBE_API_VERSION = "v3"
YOUTUBE_ANALYTICS_API_SERVICE_NAME = "youtubeAnalytics"
YOUTUBE_ANALYTICS_API_VERSION = "v1"
MISSING_CLIENT_SECRETS_MESSAGE = """
WARNING: Please configure OAuth 2.0
To make this sample run you will need to populate the client_secrets.json file
found at:
%s
with information from the Developers Console
https://console.developers.google.com/
For more information about the client_secrets.json file format, please visit:
https://developers.google.com/api-client-library/python/guide/aaa_client_secrets
""" % os.path.abspath(os.path.join(os.path.dirname(__file__),
CLIENT_SECRETS_FILE))
def get_authenticated_services(args):
flow = flow_from_clientsecrets(CLIENT_SECRETS_FILE,
scope=" ".join(YOUTUBE_SCOPES),
message=MISSING_CLIENT_SECRETS_MESSAGE)
storage = Storage("%s-oauth2.json" % sys.argv[0])
credentials = storage.get()
if credentials is None or credentials.invalid:
credentials = run_flow(flow, storage, args)
http = credentials.authorize(httplib2.Http())
youtube_analytics = build(YOUTUBE_ANALYTICS_API_SERVICE_NAME,
YOUTUBE_ANALYTICS_API_VERSION, http=http)
return youtube_analytics
def run_analytics_report(youtube_analytics, options, count):
# Call the Analytics API to retrieve a report. For a list of available
# reports, see:
# https://developers.google.com/youtube/analytics/v1/channel_reports
analytics_query_response = youtube_analytics.reports().query(
ids="channel==%s" % options.channel_id,
metrics=options.metrics,
dimensions=options.dimensions,
filters=options.filters,
start_date=options.start_date,
end_date=options.end_date,
#max_results=options.max_results,
sort=options.sort
).execute()
print "Analytics Data for Channel %s" % options.channel_id
if count == 0:
with open('results.csv', 'w') as csv_out:
csvWriter=csv.writer(csv_out, delimiter=',', lineterminator = '\n')
headers = [ch["name"] for ch in analytics_query_response.get("columnHeaders", [])]
headers.append("country")
csvWriter.writerow(headers)
else:
with open('results.csv', 'a') as csv_out:
csvWriter=csv.writer(csv_out, delimiter=',', lineterminator = '\n')
for row in analytics_query_response.get("rows", []):
values = []
for value in row:
values.append(str(value))
values.append((options.filters[9]+""+options.filters[10]))
csvWriter.writerow(values)
print "Results exported to csv"
if __name__ == "__main__":
count = 0
now = datetime.now()
one_day_ago = (now - timedelta(days=1)).strftime("%Y-%m-%d")
one_week_ago = (now - timedelta(days=7)).strftime("%Y-%m-%d")
f = open('countries.csv', 'rb')
reader = csv.reader(f)
for row in reader:
argparser = argparse.ArgumentParser()
argparser.add_argument("--channel-id", help="Channel ID",
default="UCJ5v_MCY6GNUBTO8-D3XoAg")
argparser.add_argument("--metrics", help="Report metrics",
default="views,estimatedMinutesWatched")
argparser.add_argument("--dimensions", help="Report dimensions",
default="deviceType")
argparser.add_argument("--filters", help="Report filters",
default="country==" + ''.join(row))
argparser.add_argument("--start-date", default=one_week_ago,
help="Start date, in YYYY-MM-DD format")
argparser.add_argument("--end-date", default=one_day_ago,
help="End date, in YYYY-MM-DD format")
#argparser.add_argument("--max-results", help="Max results", default=10)
argparser.add_argument("--sort", help="Sort order", default="-views")
args = argparser.parse_args()
youtube_analytics = get_authenticated_services(args)
try:
run_analytics_report(youtube_analytics, args, count)
count = count + 1
except HttpError, e:
print "An HTTP error %d occurred:\n%s" % (e.resp.status, e.content)
I do not know, my question is howling.
When running the script even giving arguments shows the following message?
File "yt-mario.py", line 91
print "Analytics Data for Channel %s" % options.channel_id
^
SyntaxError: Missing parentheses in call to 'print'. Did you mean print("Analytics Data for Channel %s" % options.channel_id)?
SyntaxError: Missing parentheses in call to 'print'. Did you mean print("Analytics Data for Channel %s" % options.channel_id)?
This code is probably Python2.7, where you don't need to put parenthesis around the print statement.
Try running this using Python 2.7.

Test a cell on a Google Sheet and write to that row

I've got a Google Sheet with students names (columns A and B) and some value in the column next to it (column C). I'd like to test each value in turn and write to that row's (column D) if the test resolves to True.
My code so far:
from __future__ import print_function
import httplib2
import os
from apiclient import discovery
from oauth2client import client
from oauth2client import tools
from oauth2client.file import Storage
# Don't know what this is for
try:
import argparse
flags = argparse.ArgumentParser(parents=[tools.argparser]).parse_args()
except ImportError:
flags = None
# If modifying these scopes, delete your previously saved credentials
# at ~/.credentials/sheets.googleapis.com-python-quickstart.json
SCOPES = 'https://www.googleapis.com/auth/spreadsheets.readonly'
CLIENT_SECRET_FILE = 'client_secret.json'
APPLICATION_NAME = 'Google Sheets API'
# just copied code from Google's Dev guide
def get_credentials():
"""Gets valid user credentials from storage.
If nothing has been stored, or if the stored credentials are invalid,
the OAuth2 flow is completed to obtain the new credentials.
Returns:
Credentials, the obtained credential.
"""
home_dir = os.path.expanduser('~')
credential_dir = os.path.join(home_dir, '.credentials')
if not os.path.exists(credential_dir):
os.makedirs(credential_dir)
credential_path = os.path.join(credential_dir,
'sheets.googleapis.com-python-quickstart.json')
store = Storage(credential_path)
credentials = store.get()
if not credentials or credentials.invalid:
flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)
flow.user_agent = APPLICATION_NAME
if flags:
credentials = tools.run_flow(flow, store, flags)
else: # Needed only for compatibility with Python 2.6
credentials = tools.run(flow, store)
print('Storing credentials to ' + credential_path)
return credentials
# PROBLEM IS IN THIS METHOD
def main():
# Hope Google doesn't change all this API stuff and make us redo:
credentials = get_credentials()
http = credentials.authorize(httplib2.Http())
discoveryUrl = ('https://sheets.googleapis.com/$discovery/rest?'
'version=v4')
service = discovery.build('sheets', 'v4', http=http,
discoveryServiceUrl=discoveryUrl)
# Fake URI of our sheet
spreadsheetId = '5lksdfusdlfkjkj886kJUNKssdff'
# retrieve data works
rangeName = 'Sheet1!A2:D70'
result = service.spreadsheets().values().get(
spreadsheetId=spreadsheetId, range=rangeName).execute()
values = result.get('values', [])
if not values:
print('No data found.')
else:
for row in values:
print('Checking on: %s %s' % (row[0], row[1]))
# PROBLEM... can't seem
if row[2] == "Some arbitrary condition":
# Even trying to hardcode a range doesn't work
# I'd like to reference the coordinates of this row
result = service.spreadsheets().values().update(
spreadsheetId=spreadsheetId, range='Sheet1!D64',
body="can you see me?").execute()
print(result)
if __name__ == '__main__':
main()

Categories

Resources