Serialize and de-serialize oauth2client.client.OAuth2Credentials - python

So I have an object, which is credentials from an OAuth2 authorization for a web service. I want to save the users credentials so I can continue to use them in the future. I'm using Django.
The object is: <oauth2client.client.OAuth2Credentials object at 0x104b47310>
I'm not sure how I can stringify the credentials and then build the credentials object back from a string.
Sample code as requested:
#!/usr/bin/python
import httplib2
from apiclient import errors
from apiclient.discovery import build
from oauth2client.client import OAuth2WebServerFlow
# Copy your credentials from the console
CLIENT_ID = 'YOUR_CLIENT_ID'
CLIENT_SECRET = 'YOUR_CLIENT_SECRET'
# Check https://developers.google.com/webmaster-tools/search-console-api-original/v3/ for all available scopes
OAUTH_SCOPE = 'https://www.googleapis.com/auth/webmasters.readonly'
# Redirect URI for installed apps
REDIRECT_URI = 'urn:ietf:wg:oauth:2.0:oob'
# Run through the OAuth flow and retrieve credentials
flow = OAuth2WebServerFlow(CLIENT_ID, CLIENT_SECRET, OAUTH_SCOPE, REDIRECT_URI)
authorize_url = flow.step1_get_authorize_url()
print 'Go to the following link in your browser: ' + authorize_url
code = raw_input('Enter verification code: ').strip()
credentials = flow.step2_exchange(code)
# Create an httplib2.Http object and authorize it with our credentials
http = httplib2.Http()
http = credentials.authorize(http)
webmasters_service = build('webmasters', 'v3', http=http)
# Retrieve list of properties in account
site_list = webmasters_service.sites().list().execute()
# Filter for verified websites
verified_sites_urls = [s['siteUrl'] for s in site_list['siteEntry']
if s['permissionLevel'] != 'siteUnverifiedUser'
and s['siteUrl'][:4] == 'http']
# Printing the URLs of all websites you are verified for.
for site_url in verified_sites_urls:
print site_url
# Retrieve list of sitemaps submitted
sitemaps = webmasters_service.sitemaps().list(siteUrl=site_url).execute()
if 'sitemap' in sitemaps:
sitemap_urls = [s['path'] for s in sitemaps['sitemap']]
print " " + "\n ".join(sitemap_urls)

You can use pickle module to serialize and de-serialize python objects. Here is untested code:
import pickle
# Store OAuth2Credentials to a file
with open(FILENAME, 'wb') as credentials_file:
pickle.dump(credentials, credentials_file)
# Read OAuth2Credentials from file
with open(FILENAME, 'rb') as credentials_file:
credentials = pickle.load(credentials_file)

Related

How to Capture The OAuth Redirect to POST A Response

So my colleague and I have an application whereby we need to capture the OAuth Redirect from Google's OAuth Server Response, the reason being is we need to send a payload to capture to renew our pickle token, and we need to do it without human intervention. The code looks like this:
#!/usr/bin/env python3
import pickle
import os.path
import pandas as pd
import requests
from googleapiclient.discovery import build
from google_auth_oauthlib.flow import InstalledAppFlow
from google.auth.transport.requests import Request
import base64
from datetime import datetime, timedelta
from urllib.parse import unquote
from bs4 import BeautifulSoup
# If modifying these scopes, delete the file token.pickle.
SCOPES = ['https://www.googleapis.com/auth/gmail.readonly']
def search_message(service, user_id, search_string):
"""
Search the inbox for emails using standard gmail search parameters
and return a list of email IDs for each result
PARAMS:
service: the google api service object already instantiated
user_id: user id for google api service ('me' works here if
already authenticated)
search_string: search operators you can use with Gmail
(see https://support.google.com/mail/answer/7190?hl=en for a list)
RETURNS:
List containing email IDs of search query
"""
try:
# initiate the list for returning
list_ids = []
# get the id of all messages that are in the search string
search_ids = service.users().messages().list(userId=user_id, q=search_string).execute()
# if there were no results, print warning and return empty string
try:
ids = search_ids['messages']
except KeyError:
print("WARNING: the search queried returned 0 results")
print("returning an empty string")
return ""
if len(ids) > 1:
for msg_id in ids:
list_ids.append(msg_id['id'])
return (list_ids)
else:
list_ids.append(ids['id'])
return list_ids
except:
print("An error occured: %s")
def get_message(service, user_id, msg_id):
"""
Search the inbox for specific message by ID and return it back as a
clean string. String may contain Python escape characters for newline
and return line.
PARAMS
service: the google api service object already instantiated
user_id: user id for google api service ('me' works here if
already authenticated)
msg_id: the unique id of the email you need
RETURNS
A string of encoded text containing the message body
"""
try:
final_list = []
message = service.users().messages().get(userId=user_id, id=msg_id).execute() # fetch the message using API
payld = message['payload'] # get payload of the message
report_link = ""
mssg_parts = payld['parts'] # fetching the message parts
part_one = mssg_parts[1] # fetching first element of the part
#part_onee = part_one['parts'][1]
#print(part_one)
part_body = part_one['body'] # fetching body of the message
part_data = part_body['data'] # fetching data from the body
clean_one = part_data.replace("-", "+") # decoding from Base64 to UTF-8
clean_one = clean_one.replace("_", "/") # decoding from Base64 to UTF-8
clean_one = clean_one.replace("amp;", "") # cleaned amp; in links
clean_two = base64.b64decode(clean_one) # decoding from Base64 to UTF-8
soup = BeautifulSoup(clean_two, features="html.parser")
for link in soup.findAll('a'):
if "talentReportRedirect?export" in link.get('href'):
report_link = link.get('href')
break
final_list.append(report_link) # This will create a dictonary item in the final list
except Exception:
print("An error occured: %s")
return final_list
def get_service():
"""
Authenticate the google api client and return the service object
to make further calls
PARAMS
None
RETURNS
service api object from gmail for making calls
"""
creds = None
# The file token.pickle stores the user's access and refresh tokens, and is
# created automatically when the authorization flow completes for the first
# time.
if os.path.exists('token.pickle'):
with open('token.pickle', 'rb') as token:
creds = pickle.load(token)
# If there are no (valid) credentials available, let the user log in.
if not creds or not creds.valid:
if creds and creds.expired and creds.refresh_token:
creds.refresh(Request())
else:
flow = InstalledAppFlow.from_client_secrets_file('credentials.json', SCOPES)
creds = flow.run_local_server(port=0)
# Save the credentials for the next run
with open('token.pickle', 'wb') as token:
pickle.dump(creds, token)
auth_link = build('gmail', 'v1', credentials=creds)
parsed_url = unquote(auth_link).split('redirect')[-1]
return parsed_url
def get_report(link_array):
for link in link_array:
df = requests.get(link[0], allow_redirects=True)
# df.encoding
# dt = pd.DataFrame(data=df)
print(link)
# upload_to_database(df) -- Richard Barret please update this function
print(df.text)
## dt.to_csv(r'C:\Users\user\Desktop\api_gmail.csv', sep='\t',header=True)
if __name__ == "__main__":
link_list = []
monday = datetime(2022,12,5)#datetime.now() - timedelta(days=datetime.now().weekday())
thursday = datetime(2022,12,8)#datetime.now() - timedelta(days=datetime.now().weekday() - 3)
query = 'from:messages-noreply#linkedin.com ' + 'after:' + monday.strftime('%Y/%m/%d') + ' before:' + thursday.strftime('%Y/%m/%d')
service = get_service()
mssg_list = search_message(service, user_id='me', search_string=query)
for msg in mssg_list:
link_list.append(get_message(service, user_id='me', msg_id=msg))
get_report(link_list)
It is assumed that you have a directory structure like this:
├── credentials.json
├── gmail_api_linkedin.py
└── requirements.txt
Obviously, you won't have the credentials.json file, but in essence, the code works and redirects us to a login page to retrieve the new pickle:
The main thing is we can't interact with that in an autonomous fashion. As such, how can we capture the URL from the server that prints out the following information the is differenter every single time.
Please visit this URL to authorize this application: https://accounts.google.com/o/oauth2/auth?response_type=code&client_id=212663976989-96o952s9ujadjgfdp6fm0p462p37opml.apps.googleusercontent.com&redirect_uri=http%3A%2F%2Flocalhost%3A58605%2F&scope=https%3A%2F%2Fwww.googleapis.com%2Fauth%2Fgmail.readonly&state=ztJir0haFQlvTP79BRthhmEHlSsqIj&access_type=offline
More succinctly, how can we capture the URL in a pythonic manner to send POST and PUT requests to that redirect?
renew our pickle token
I still do not understand why you feel the need to renew your token pickle.
how it all works.
The following example will spawn the consent screen directly on the machine its running on. It then stores the token within the token.json file
token.json
This file contains all the information needed by the script to run. It can automatically request a new access token when ever it needs.
{
"token": "[REDACTED]",
"refresh_token": "[REDACTED]",
"token_uri": "https://oauth2.googleapis.com/token",
"client_id": "[REDACTED]",
"client_secret": "[REDACTED],
"scopes": [
"https://mail.google.com/"
],
"expiry": "2023-01-03T19:06:13.959468Z"
}
gmail quickstart.
# To install the Google client library for Python, run the following command:
# pip install --upgrade google-api-python-client google-auth-httplib2 google-auth-oauthlib
from __future__ import print_function
import os.path
import google.auth.exceptions
from google.auth.transport.requests import Request
from google.oauth2.credentials import Credentials
from google_auth_oauthlib.flow import InstalledAppFlow
from googleapiclient.discovery import build
from googleapiclient.errors import HttpError
# If modifying these scopes, delete the file token.json.
SCOPES = ['https://mail.google.com/']
def main():
"""Shows basic usage of the Gmail v1 API.
Prints a list of user messages.
"""
creds = None
# The file token.json stores the user's access and refresh tokens, and is
# created automatically when the authorization flow completes for the first
# time.
if os.path.exists('token.json'):
try:
creds = Credentials.from_authorized_user_file('token.json', SCOPES)
creds.refresh(Request())
except google.auth.exceptions.RefreshError as error:
# if refresh token fails, reset creds to none.
creds = None
print(f'An error occurred: {error}')
# If there are no (valid) credentials available, let the user log in.
if not creds or not creds.valid:
if creds and creds.expired and creds.refresh_token:
creds.refresh(Request())
else:
flow = InstalledAppFlow.from_client_secrets_file(
'C:\YouTube\dev\credentials.json', SCOPES)
creds = flow.run_local_server(port=0)
# Save the credentials for the next run
with open('token.json', 'w') as token:
token.write(creds.to_json())
try:
service = build('gmail', 'v1', credentials=creds)
# Call the Gmail v1 API
results = service.users().messages().list(
userId='me').execute()
messages = results.get('messages', [])
if not messages:
print('No messages found.')
return
print('Messages:')
for message in messages:
print(u'{0} ({1})'.format(message['id'], message['threadId']))
except HttpError as error:
# TODO(developer) - Handle errors from gmail API.
print(f'An error occurred: {error}')
if __name__ == '__main__':
main()
expired refresh token.
If your issue is in fact that your refresh tokens are expiring this is because your app is currently in the testing phase. If you set your app to production then your refresh tokens will stope expiring.

Upgrading a Python Script from oauth2client to google-auth

I would like to upgrade the following code from oauth2client to google-auth. Yes, this code does work and is a copy paste from the Google demo on their site.
from __future__ import print_function
from googleapiclient.discovery import build
from httplib2 import Http
from oauth2client import file, client, tools
# If modifying these scopes, delete the file token.json.
SCOPES = 'https://www.googleapis.com/auth/presentations.readonly'
# The ID of a sample presentation.
PRESENTATION_ID = '<some id>'
def main():
"""Shows basic usage of the Slides API.
Prints the number of slides and elments in a sample presentation.
"""
# The file token.json stores the user's access and refresh tokens, and is
# created automatically when the authorization flow completes for the first
# time.
# delete the file to for authentication and authorization again
store = file.Storage('token.json')
creds = store.get()
if not creds or creds.invalid:
# credentials.json is issued by Google for the application
flow = client.flow_from_clientsecrets('credentials.json', SCOPES)
creds = tools.run_flow(flow, store)
service = build('slides', 'v1', http=creds.authorize(Http()))
# Call the Slides API
presentation = service.presentations().get(presentationId=PRESENTATION_ID).execute()
slides = presentation.get('slides')
print('The presentation contains {} slides:'.format(len(slides)))
for i, slide in enumerate(slides):
print('- Slide #{} contains {} elements.'.format(
i + 1, len(slide.get('pageElements'))))
if __name__ == '__main__':
main()
I was able to upgrade most of it (I think) but could not find the equivalent for tools.run_flow using google-auth. Below is my upgraded version, and it has everything (I think) except the mechanism to do tools.run_flow. How is tools.run_flow accomplished using google-auth?
from google.oauth2 import service_account
from google.auth.transport.requests import AuthorizedSession
from googleapiclient.discovery import build
SCOPES = ['https://www.googleapis.com/auth/presentations.readonly']
PRESENTATION_ID = '<some id>'
credentials = service_account.Credentials.from_service_account_file(
"the-json-file.json",
scopes=SCOPES
)
service = build('slides', 'v1', credentials=credentials)
presentation = service.presentations().get(presentationId=PRESENTATION_ID).execute()
slides = presentation.get('slides')
print('The presentation contains {} slides:'.format(len(slides)))
for i, slide in enumerate(slides):
print('- Slide #{} contains {} elements.'.format(
i + 1, len(slide.get('pageElements'))))
When I run the above code using the google-auth approach, I get the following result:
googleapiclient.errors.HttpError: <HttpError 403 when requesting https://slides.googleapis.com/v1/presentations/<some id>?alt=json returned "The caller does not have permission">
Additional context, I am trying to build a Python script which accesses Google Slides to perform some processing on slide content. I am reading only, not writing to Google Slides. The code above is my attempt to handle the authentication and authorization part of accessing the Google Slides content for my GSuite account (I am the admin of my organization's GSuite). The first example using the older oauth2client works great, but since oauth2client is deprecated and google-auth is recommended I would like to do it the "right" way using the latest library.
UPDATE
I resolved part of my problem with the following code:
from google.oauth2 import service_account
from google.auth.transport.requests import AuthorizedSession
from googleapiclient.discovery import build
from google.oauth2.credentials import Credentials
from google_auth_oauthlib.flow import InstalledAppFlow
import os
import json
def get_credentials():
CLIENT_SECRET_FILE = '<some file>'
SCOPES = ['https://www.googleapis.com/auth/presentations.readonly']
credentials_path = '<some path>/token.json'
if os.path.exists(credentials_path):
# expect these to be valid. may expire at some point, but should be refreshed by google api client...
return Credentials.from_authorized_user_file(credentials_path, scopes=SCOPES)
else:
flow = InstalledAppFlow.from_client_secrets_file(
CLIENT_SECRET_FILE,
scopes=SCOPES,
redirect_uri='urn:ietf:wg:oauth:2.0:oob')
auth_url, _ = flow.authorization_url(prompt='consent')
print('Please go to this URL and finish the authentication flow: {}'.format(auth_url))
code = input('Enter the authorization code: ')
flow.fetch_token(code=code)
credentials = flow.credentials
credentials_as_dict = {
'token': credentials.token,
'refresh_token': credentials.refresh_token,
'id_token': credentials.id_token,
'token_uri': credentials.token_uri,
'client_id': credentials.client_id,
'client_secret': credentials.client_secret
}
with open(credentials_path, 'w') as file:
file.write(json.dumps(credentials_as_dict))
return credentials
# The ID of a sample presentation.
PRESENTATION_ID = '<some id>'
service = build('slides', 'v1', credentials=get_credentials())
presentation = service.presentations().get(presentationId=PRESENTATION_ID).execute()
slides = presentation.get('slides')
print('The presentation contains {} slides:'.format(len(slides)))
for i, slide in enumerate(slides):
print('- Slide #{} contains {} elements.'.format(
i + 1, len(slide.get('pageElements'))))
My challenge now is causing the web browser to open automatically. I can copy and paste the link and get the code manually and it all works. Ideally, I would like the automatic opening of the web browser and capturing of the token like it did in the older library.
SOLVED
Update a section code in the previous code sample to this:
flow = InstalledAppFlow.from_client_secrets_file(
CLIENT_SECRET_FILE,
scopes=SCOPES,
redirect_uri='urn:ietf:wg:oauth:2.0:oob')
credentials = flow.run_local_server()
credentials_as_dict = {
'token': credentials.token,
'refresh_token': credentials.refresh_token,
'id_token': credentials.id_token,
'token_uri': credentials.token_uri,
'client_id': credentials.client_id,
'client_secret': credentials.client_secret
}
Web browser opens automatically and captures the token value. All is good.

How to access Google Apps API from appengine?

What's the actual task?
I'm trying to access google Apps API (Drive API, Sheets API) from my google appengine standard project. So when a form is submitted, it has to create a new google sheet and write the contents of the form to the sheet and then the sheet has to be stored in the google drive associated with the google service account or authorized email account (ie, email account we gave in the authorized email section)
What I actually tried?
I have used google_auth library to get the appengine's default credentials and I have enabled both drive, sheets API on my gae project console.
from googleapiclient import discovery
from google.auth import app_engine
from google.auth.transport.requests import AuthorizedSession
import httplib2
def creat_sample_sheet():
credentials = app_engine.Credentials()
http = AuthorizedSession(credentials)
discoveryUrl = ('https://sheets.googleapis.com/$discovery/rest?'
'version=v4')
service = discovery.build('sheets', 'v4', http=http,
discoveryServiceUrl=discoveryUrl)
result = service.spreadsheets().values()
print result
But it's not working.. Here is the tracback I got...
File "/base/data/home/apps/vidyalay/1.397393333574060152/alumni_registration_dateycollege/sheet_handler.py" in creat_sample_sheet
30. discoveryServiceUrl=discoveryUrl)
File "/base/data/home/apps/vidyalay/1.397393333574060152/lib/oauth2client/_helpers.py" in positional_wrapper
133. return wrapped(*args, **kwargs)
File "/base/data/home/apps/vidyalay/1.397393333574060152/lib/googleapiclient/discovery.py" in build
222. cache)
File "/base/data/home/apps/vidyalay/1.397393333574060152/lib/googleapiclient/discovery.py" in _retrieve_discovery_doc
269. resp, content = http.request(actual_url)
Exception Type: TypeError at /alumni_dateycollege/sheet/
Exception Value: request() takes at least 3 arguments (2 given)
Don't know I'm on the right path..
Update
By following this link mentioned by Daniel works for me. But I don't know how to view the created spreadsheet .
Here is my attempt to download the created sheet.
service.spreadsheets().get(spreadsheetId=SHEET_ID, alt='media')
But this creates an get request to https://sheets.googleapis.com/v4/spreadsheets/1awnM7z_aomHx833Z5S_Z-agFusaidmgcCa0FJIFyGE8?alt=json
url. But I actually want to pass media to alt parameter instead of json. I tried the above, but it won't work.
I managed to solve the problem on my own.
I have used my google service account to create spreadsheet on the service account's google drive (sheet API takecare of storing the sheet to google drive). Then I gave writer role permission for that particular file to my own gmail id. Now I can be able to view that particular form sheet in my google drive.
from googleapiclient import discovery
import cloudstorage as gcs
# from oauth2client import client
# from oauth2client.contrib import appengine
# from google.appengine.api import memcache
import httplib2
from google.appengine.api import memcache
from oauth2client.contrib.appengine import AppAssertionCredentials
import logging
import os
import io
from googleapiclient.http import MediaIoBaseDownload
logger = logging.getLogger(__name__)
credentials = AppAssertionCredentials(scope='https://www.googleapis.com/auth/spreadsheets')
drive_credentials = AppAssertionCredentials(scope='https://www.googleapis.com/auth/drive')
http = credentials.authorize(httplib2.Http(memcache))
drive_http = drive_credentials.authorize(httplib2.Http(memcache))
discoveryUrl = ('https://sheets.googleapis.com/$discovery/rest?'
'version=v4')
service = discovery.build('sheets', 'v4', http=http,
discoveryServiceUrl=discoveryUrl)
def callback(request_id, response, exception):
if exception:
# Handle error
print exception
else:
print "Permission Id: %s" % response.get('id')
def list_drive_files(drive_service):
results = drive_service.files().list(
pageSize=10).execute()
items = results.get('files', [])
if not items:
print('No files found.')
else:
print('Files:')
for item in items:
print('{0} ({1})'.format(item['name'], item['id']))
def give_file_permission(drive_service, file_id):
batch = drive_service.new_batch_http_request(callback=callback)
user_permission = {
'type': 'user',
'role': 'writer',
'emailAddress': 'foobar#gmail.com' # email address of the user you want to give permission
}
batch.add(drive_service.permissions().create(
fileId=file_id,
body=user_permission,
fields='id',
))
batch.execute()
def creat_sample_sheet():
# data = {'properties': {'title': 'Academic Sheet'}}
# res = sheet_service.spreadsheets().create(body=data).execute()
# SHEET_ID = res['spreadsheetId']
SHEET_ID = '1awnM7z_aomHx833Z5S_Z-agFusaidmgcCa0FJIFyGE8'
sheet = service.spreadsheets().get(spreadsheetId=SHEET_ID, includeGridData=True)
drive_service = discovery.build('drive', 'v3', http=drive_http)
list_drive_files(drive_service)
# Sharing a file
# file_id = '1bgvJdXG0eg2JGaNlIcdtde_XJlg2gAUT_DOzHi75zys'
def write_to_sheet(form):
logger.info('Inside write to sheet')
first_name = form.cleaned_data.get('first_name', '')
sur_name = form.cleaned_data.get('sur_name', '')
email = form.cleaned_data.get('e_mail', '')
phone_no = form.cleaned_data.get('mobile_phone', '')
year_of_passing = form.cleaned_data.get('year_of_passing', '')
present_occupation = form.cleaned_data.get('present_occupation', '')
present_city = form.cleaned_data.get('present_city', '')
courses_attended = ', '.join([str(i) for i in form.cleaned_data.get('courses_attended', '')])
volunteer = form.cleaned_data.get('volunteer', '')
fields = [ 'first_name', 'sur_name', 'e_mail', 'mobile_phone', 'year_of_passing', 'present_occupation', 'present_city', 'courses_attended' , 'volunteer' ]
# data = {'properties': {'title': 'Form Sheet'}}
# # create sheet
# res = service.spreadsheets().create(body=data).execute()
# # sheet_id = res['spreadsheetId']
sheet_id = '1bgvJdXG0eg2JGaNlIcdtde_XJlg2gAUT_DOzHi75zys'
# print sheet_id
# update sheet
data = {'values': [[first_name, sur_name, email, phone_no, year_of_passing, present_occupation,
present_city, courses_attended, volunteer]]}
service.spreadsheets().values().append(spreadsheetId=sheet_id,
range='A1', body=data, valueInputOption='RAW').execute()
# Getting rows
# rows = service.spreadsheets().values().get(spreadsheetId=sheet_id,
# range='Sheet1').execute().get('values', [])
# for row in rows:
# print row

How to use Google Blogger API with Python?

I'm trying to use the blog functions from Google API gdata. I tried my best following the documentation but I failed very badly. Can anybody tell me how can I use the Google blogger API? My code is pretty messed up and now I'm out of clue.
EDIT FULL WORKING CODE :) :
from oauth2client.client import OAuth2WebServerFlow
import httplib2
from apiclient.discovery import build
from oauth2client.file import Storage
#flow = OAuth2WebServerFlow(client_id='', #ID
# client_secret='', #SECRET ID
# scope='https://www.googleapis.com/auth/blogger',
# redirect_uri='urn:ietf:wg:oauth:2.0:oob')
#auth_uri = flow.step1_get_authorize_url()
# Redirect the user to auth_uri on your platform.
# Open a file
#fo = open("foo.txt", "wb")
#fo.write( auth_uri +"\n");
#fo.close()
#credentials = flow.step2_exchange( raw_input ( ) )
storage = Storage('a_credentials_file')
#storage.put(credentials)
credentials = storage.get()
http = httplib2.Http()
http = credentials.authorize(http)
service = build('blogger', 'v3', http=http)
users = service.users()
# Retrieve this user's profile information
thisuser = users.get(userId='self').execute()
print('This user\'s display name is: %s' % thisuser['displayName'])
While I was myself trying to find a solution, I found this. Then after some modifications, the code finally worked. It successfully print all details about you blog site.
from oauth2client.client import flow_from_clientsecrets
import httplib2
from apiclient.discovery import build
from oauth2client.file import Storage
import webbrowser
def get_credentials():
scope = 'https://www.googleapis.com/auth/blogger'
flow = flow_from_clientsecrets(
'client_secrets.json', scope,
redirect_uri='urn:ietf:wg:oauth:2.0:oob')
storage = Storage('credentials.dat')
credentials = storage.get()
if not credentials or credentials.invalid:
auth_uri = flow.step1_get_authorize_url()
webbrowser.open(auth_uri)
auth_code = raw_input('Enter the auth code: ')
credentials = flow.step2_exchange(auth_code)
storage.put(credentials)
return credentials
def get_service():
"""Returns an authorised blogger api service."""
credentials = get_credentials()
http = httplib2.Http()
http = credentials.authorize(http)
service = build('blogger', 'v3', http=http)
return service
if __name__ == '__main__':
served = get_service()
blogs = served.blogs()
blog_get_obj = blogs.get(blogId='123456789123456')
details = blog_get_obj.execute()
print details
The results of print will look like:
{u'description': u'Look far and wide. There are worlds to conquer.',
u'id': u'8087466742945672359',
u'kind': u'blogger#blog',
u'locale': {u'country': u'', u'language': u'en', u'variant': u''},
u'name': u'The World Around us',
u'pages': {u'selfLink': u'https://www.googleapis.com/blogger/v3/blogs/1234567897894569/pages',
u'totalItems': 2},
u'posts': {u'selfLink': u'https://www.googleapis.com/blogger/v3/blogs/1245678992359/posts',
u'totalItems': 26},
u'published': u'2015-11-02T18:47:02+05:30',
u'selfLink': u'https://www.googleapis.com/blogger/v3/blogs/9874652945672359',
u'updated': u'2017-06-29T19:41:00+05:30',
u'url': u'http://www.safarnuma.com/'}
This is an updated and long-term stable implementation, taken from this answer and edited a bit for Blogger API v3.
All methods in the official documentation can be called on blogger_service variable of this code.
import os
import pickle
from googleapiclient.discovery import build
from google_auth_oauthlib.flow import InstalledAppFlow
from google.auth.transport.requests import Request
SCOPES = ['https://www.googleapis.com/auth/blogger', ]
# we check if the file to store the credentials exists
if not os.path.exists('credentials.dat'):
flow = InstalledAppFlow.from_client_secrets_file('credentials.json', SCOPES)
credentials = flow.run_local_server()
with open('credentials.dat', 'wb') as credentials_dat:
pickle.dump(credentials, credentials_dat)
else:
with open('credentials.dat', 'rb') as credentials_dat:
credentials = pickle.load(credentials_dat)
if credentials.expired:
credentials.refresh(Request())
blogger_service = build('blogger', 'v3', credentials=credentials)
users = blogger_service.users()
# Retrieve this user's profile information
thisuser = users.get(userId='self').execute()
print('Your display name is: %s' % thisuser['displayName'])
You can use goco
pip install goco
then use this code:
from goco import Goco
GoogleApi = Goco("path\\to\\client_secret.json", "path\\to\\credentials.storage")
MyBlog = GoogleApi.connect(scope='Blogger', service_name='blogger', version='v3')
Posts = MyBlog.posts().list(blogId='desired-blog-id').execute()
print(Posts)
you can also connect to any google service via this module.

Creating a DfpClient with a Service Account in Python

I use the google DFP api to collect some statistics on ads clicked on our website. The
code is written in Python. Currently, I am trying to upgrade the code to use oAuth 2.
Since, the code runs automatically everyday without any user involvement, I created a
service account under my google project and added the account to the DoubleClick for
Publishers network of our company. Based on the sample codes on the web, I wrote this:
import httplib2
from oauth2client.client import SignedJwtAssertionCredentials
from apiclient.discovery import build
from googleads.dfp import DfpClient
GOOGLE_DFP_SCOPE="https://www.googleapis.com/auth/dfp"
API_VERSION="v201411"
KEY_FILE="*******.p12"
ACCT_EMAIL="************************#developer.gserviceaccount.com"
NETWORK_CODE="**********"
with open(KEY_FILE) as config_file:
my_private_key = config_file.read()
credentials = SignedJwtAssertionCredentials(service_account_name=ACCT_EMAIL, private_key=my_private_key,scope=GOOGLE_DFP_SCOPE)
http = httplib2.Http()
http_auth = credentials.authorize(http)
dfp_client = build(serviceName='dfp',version=API_VERSION,http=http_auth)
This code does not seem to be correct, because the network_code has not been passed
anywhere in the code. In addition, it fails with the following message:
apiclient.errors.UnknownApiNameOrVersion: name: dfp version: v201411.
Also, the line below:
dfp_client = DfpClient.LoadFromStorage()
does not work for my case, because, this is based on googleads.yaml which seems
to be formatted only for web-app accounts with client secret, not P12 private key.
Any advice? Thanks.
Apiclient.discovery uses a default route to check the services.
But I did not find a service for DoubleClick for publishers.
I use this code to use API with Oauth2. Using Flask
import json
import requests
import flask
from googleads import dfp
from googleads import oauth2
app = flask.Flask(__name__)
CLIENT_ID = ''
CLIENT_SECRET = '' # Read from a file or environmental variable in a real app
SCOPE = 'https://www.googleapis.com/auth/dfp'
REDIRECT_URI = ''
APPLICATION_NAME = 'DFP API SERVICE'
NETWORK_CODE = ''
#app.route('/')
def index():
if 'credentials' not in flask.session:
return flask.redirect(flask.url_for('oauth2callback'))
credentials = json.loads(flask.session['credentials'])
if credentials['expires_in'] <= 0:
return flask.redirect(flask.url_for('oauth2callback'))
else:
try:
refresh_token = credentials['refresh_token']
oauth2_client = oauth2.GoogleRefreshTokenClient(CLIENT_ID, CLIENT_SECRET, refresh_token)
dfp_client = dfp.DfpClient(oauth2_client, APPLICATION_NAME, NETWORK_CODE)
user_service = dfp_client.GetService('UserService', version='v201508')
user = user_service.getCurrentUser()
return flask.render_template('index.html', name=user['name'])
except:
flask.session.pop('credentials', None)
return flask.redirect(flask.url_for('oauth2callback'))
#app.route('/oauth2callback')
def oauth2callback():
if 'code' not in flask.request.args:
auth_uri = ('https://accounts.google.com/o/oauth2/auth?response_type=code'
'&access_type=offline&client_id={}&redirect_uri={}&scope={}&').format(CLIENT_ID, REDIRECT_URI, SCOPE)
return flask.redirect(auth_uri)
else:
auth_code = flask.request.args.get('code')
data = {'code': auth_code,
'client_id': CLIENT_ID,
'client_secret': CLIENT_SECRET,
'redirect_uri': REDIRECT_URI,
'grant_type': 'authorization_code'}
r = requests.post('https://www.googleapis.com/oauth2/v3/token', data=data)
flask.session['credentials'] = r.text
return flask.redirect(flask.url_for('index'))
if __name__ == '__main__':
import uuid
app.secret_key = str(uuid.uuid4())
app.debug = False
app.run()
I hope this help you
You are correct. You have to pass the network code when you create a dfp client. And version is not necessary. Try the following code to create the client in python.
import os
from googleads import oauth2
from googleads import dfp
def get_dfp_client():
application_name = "Your application name" # from google developer console. eg: Web Client
network_code = ********
private_key_password = 'notasecret'
key_file = os.path.join('path/to/p12file')
service_account_email = '****#***.iam.gserviceaccount.com'
# create oath2 client(google login)
oauth2_client = oauth2.GoogleServiceAccountClient(
oauth2.GetAPIScope('dfp'), service_account_email, key_file)
dfp_client = dfp.DfpClient(oauth2_client, application_name, network_code)
return dfp_client
client = get_dfp_client()
Reference
Please comment if you need more clarification.
Update
googleads renamed module dfp to ad_manager, docs here – Gocht

Categories

Resources