i want to read an write data from a sheet, reading works fine but writing doesn't. i use all the scopes mentioned in the documentation: https://developers.google.com/sheets/api/reference/rest/v4/spreadsheets.values/append
data_writer(1,1,1)
code:
from __future__ import print_function
from apiclient.discovery import build
from httplib2 import Http
from oauth2client import file, client, tools
# Setup the Sheets API
SCOPES = 'https://www.googleapis.com/auth/spreadsheets'+"https://www.googleapis.com/auth/drive.file"+"https://www.googleapis.com/auth/drive"
store = file.Storage('credentials.json')
creds = store.get()
if not creds or creds.invalid:
flow = client.flow_from_clientsecrets('client_secret.json', SCOPES)
creds = tools.run_flow(flow, store)
service = apiclient.discovery.build('sheets', 'v4', http=creds.authorize(Http()))
# Call the Sheets API
SPREADSHEET_ID = '1JwVOqtUCWBMm_O6esIb-9J4TgqAmMIdYm9sf5y-A7EM'
RANGE_NAME = 'Jokes!A:C'
# How the input data should be interpreted.
value_input_option = 'USER_ENTERED'
# How the input data should be inserted.
insert_data_option = 'INSERT_ROWS'
def data_reader():
#reading data
read = service.spreadsheets().values().get(spreadsheetId=SPREADSHEET_ID,range=RANGE_NAME).execute()
#reading values
values = read.get('values', [])
if not values:
print('No data found.')
else:
for row in values:
print(row[2])
continue
def data_writer(score,num_comments,mystring):
value_range_body = {
"score":score,
"num_comments":num_comments,
"joke":mystring
}
request = service.spreadsheets().values().append(spreadsheetId=SPREADSHEET_ID, range=RANGE_NAME, valueInputOption=value_input_option, insertDataOption=insert_data_option, body=value_range_body)
response = request.execute()
SCOPES must be of type list
SCOPES = ['https://www.googleapis.com/auth/spreadsheets', "https://www.googleapis.com/auth/drive.file", "https://www.googleapis.com/auth/drive"]
Side note: you have
.../auth/drive
And
.../auth/drive.file
/drive.file limits the API to work with only drive.file API but /drive opens up to all of the drive API. So you should pick one that fits your needs.
Side note 2:
Based on the link you’ve provided, it mentions you need at least one of the API’s to work with spreadsheets, so you may not need all of them either.
First off you should only need https://www.googleapis.com/auth/drive as it gives full access to a users drive account including reading and writing sheets.
list of Sheet scopes
list of drive scopes
If you have already run your code once and authenticated your user then changed the scopes in your code. Remember that you will need to run your code again and re-authenticate the user to gain the access granted by the new scopes.
Side note:
SCOPES = 'https://www.googleapis.com/auth/spreadsheets'+"https://www.googleapis.com/auth/drive.file"+"https://www.googleapis.com/auth/drive"
Is just going to be one long string you need to separate them with a space or as the other answer states use an array.
SCOPES = 'https://www.googleapis.com/auth/spreadsheets ' + "https://www.googleapis.com/auth/drive.file " + "https://www.googleapis.com/auth/drive"
Related
So my colleague and I have an application whereby we need to capture the OAuth Redirect from Google's OAuth Server Response, the reason being is we need to send a payload to capture to renew our pickle token, and we need to do it without human intervention. The code looks like this:
#!/usr/bin/env python3
import pickle
import os.path
import pandas as pd
import requests
from googleapiclient.discovery import build
from google_auth_oauthlib.flow import InstalledAppFlow
from google.auth.transport.requests import Request
import base64
from datetime import datetime, timedelta
from urllib.parse import unquote
from bs4 import BeautifulSoup
# If modifying these scopes, delete the file token.pickle.
SCOPES = ['https://www.googleapis.com/auth/gmail.readonly']
def search_message(service, user_id, search_string):
"""
Search the inbox for emails using standard gmail search parameters
and return a list of email IDs for each result
PARAMS:
service: the google api service object already instantiated
user_id: user id for google api service ('me' works here if
already authenticated)
search_string: search operators you can use with Gmail
(see https://support.google.com/mail/answer/7190?hl=en for a list)
RETURNS:
List containing email IDs of search query
"""
try:
# initiate the list for returning
list_ids = []
# get the id of all messages that are in the search string
search_ids = service.users().messages().list(userId=user_id, q=search_string).execute()
# if there were no results, print warning and return empty string
try:
ids = search_ids['messages']
except KeyError:
print("WARNING: the search queried returned 0 results")
print("returning an empty string")
return ""
if len(ids) > 1:
for msg_id in ids:
list_ids.append(msg_id['id'])
return (list_ids)
else:
list_ids.append(ids['id'])
return list_ids
except:
print("An error occured: %s")
def get_message(service, user_id, msg_id):
"""
Search the inbox for specific message by ID and return it back as a
clean string. String may contain Python escape characters for newline
and return line.
PARAMS
service: the google api service object already instantiated
user_id: user id for google api service ('me' works here if
already authenticated)
msg_id: the unique id of the email you need
RETURNS
A string of encoded text containing the message body
"""
try:
final_list = []
message = service.users().messages().get(userId=user_id, id=msg_id).execute() # fetch the message using API
payld = message['payload'] # get payload of the message
report_link = ""
mssg_parts = payld['parts'] # fetching the message parts
part_one = mssg_parts[1] # fetching first element of the part
#part_onee = part_one['parts'][1]
#print(part_one)
part_body = part_one['body'] # fetching body of the message
part_data = part_body['data'] # fetching data from the body
clean_one = part_data.replace("-", "+") # decoding from Base64 to UTF-8
clean_one = clean_one.replace("_", "/") # decoding from Base64 to UTF-8
clean_one = clean_one.replace("amp;", "") # cleaned amp; in links
clean_two = base64.b64decode(clean_one) # decoding from Base64 to UTF-8
soup = BeautifulSoup(clean_two, features="html.parser")
for link in soup.findAll('a'):
if "talentReportRedirect?export" in link.get('href'):
report_link = link.get('href')
break
final_list.append(report_link) # This will create a dictonary item in the final list
except Exception:
print("An error occured: %s")
return final_list
def get_service():
"""
Authenticate the google api client and return the service object
to make further calls
PARAMS
None
RETURNS
service api object from gmail for making calls
"""
creds = None
# The file token.pickle stores the user's access and refresh tokens, and is
# created automatically when the authorization flow completes for the first
# time.
if os.path.exists('token.pickle'):
with open('token.pickle', 'rb') as token:
creds = pickle.load(token)
# If there are no (valid) credentials available, let the user log in.
if not creds or not creds.valid:
if creds and creds.expired and creds.refresh_token:
creds.refresh(Request())
else:
flow = InstalledAppFlow.from_client_secrets_file('credentials.json', SCOPES)
creds = flow.run_local_server(port=0)
# Save the credentials for the next run
with open('token.pickle', 'wb') as token:
pickle.dump(creds, token)
auth_link = build('gmail', 'v1', credentials=creds)
parsed_url = unquote(auth_link).split('redirect')[-1]
return parsed_url
def get_report(link_array):
for link in link_array:
df = requests.get(link[0], allow_redirects=True)
# df.encoding
# dt = pd.DataFrame(data=df)
print(link)
# upload_to_database(df) -- Richard Barret please update this function
print(df.text)
## dt.to_csv(r'C:\Users\user\Desktop\api_gmail.csv', sep='\t',header=True)
if __name__ == "__main__":
link_list = []
monday = datetime(2022,12,5)#datetime.now() - timedelta(days=datetime.now().weekday())
thursday = datetime(2022,12,8)#datetime.now() - timedelta(days=datetime.now().weekday() - 3)
query = 'from:messages-noreply#linkedin.com ' + 'after:' + monday.strftime('%Y/%m/%d') + ' before:' + thursday.strftime('%Y/%m/%d')
service = get_service()
mssg_list = search_message(service, user_id='me', search_string=query)
for msg in mssg_list:
link_list.append(get_message(service, user_id='me', msg_id=msg))
get_report(link_list)
It is assumed that you have a directory structure like this:
├── credentials.json
├── gmail_api_linkedin.py
└── requirements.txt
Obviously, you won't have the credentials.json file, but in essence, the code works and redirects us to a login page to retrieve the new pickle:
The main thing is we can't interact with that in an autonomous fashion. As such, how can we capture the URL from the server that prints out the following information the is differenter every single time.
Please visit this URL to authorize this application: https://accounts.google.com/o/oauth2/auth?response_type=code&client_id=212663976989-96o952s9ujadjgfdp6fm0p462p37opml.apps.googleusercontent.com&redirect_uri=http%3A%2F%2Flocalhost%3A58605%2F&scope=https%3A%2F%2Fwww.googleapis.com%2Fauth%2Fgmail.readonly&state=ztJir0haFQlvTP79BRthhmEHlSsqIj&access_type=offline
More succinctly, how can we capture the URL in a pythonic manner to send POST and PUT requests to that redirect?
renew our pickle token
I still do not understand why you feel the need to renew your token pickle.
how it all works.
The following example will spawn the consent screen directly on the machine its running on. It then stores the token within the token.json file
token.json
This file contains all the information needed by the script to run. It can automatically request a new access token when ever it needs.
{
"token": "[REDACTED]",
"refresh_token": "[REDACTED]",
"token_uri": "https://oauth2.googleapis.com/token",
"client_id": "[REDACTED]",
"client_secret": "[REDACTED],
"scopes": [
"https://mail.google.com/"
],
"expiry": "2023-01-03T19:06:13.959468Z"
}
gmail quickstart.
# To install the Google client library for Python, run the following command:
# pip install --upgrade google-api-python-client google-auth-httplib2 google-auth-oauthlib
from __future__ import print_function
import os.path
import google.auth.exceptions
from google.auth.transport.requests import Request
from google.oauth2.credentials import Credentials
from google_auth_oauthlib.flow import InstalledAppFlow
from googleapiclient.discovery import build
from googleapiclient.errors import HttpError
# If modifying these scopes, delete the file token.json.
SCOPES = ['https://mail.google.com/']
def main():
"""Shows basic usage of the Gmail v1 API.
Prints a list of user messages.
"""
creds = None
# The file token.json stores the user's access and refresh tokens, and is
# created automatically when the authorization flow completes for the first
# time.
if os.path.exists('token.json'):
try:
creds = Credentials.from_authorized_user_file('token.json', SCOPES)
creds.refresh(Request())
except google.auth.exceptions.RefreshError as error:
# if refresh token fails, reset creds to none.
creds = None
print(f'An error occurred: {error}')
# If there are no (valid) credentials available, let the user log in.
if not creds or not creds.valid:
if creds and creds.expired and creds.refresh_token:
creds.refresh(Request())
else:
flow = InstalledAppFlow.from_client_secrets_file(
'C:\YouTube\dev\credentials.json', SCOPES)
creds = flow.run_local_server(port=0)
# Save the credentials for the next run
with open('token.json', 'w') as token:
token.write(creds.to_json())
try:
service = build('gmail', 'v1', credentials=creds)
# Call the Gmail v1 API
results = service.users().messages().list(
userId='me').execute()
messages = results.get('messages', [])
if not messages:
print('No messages found.')
return
print('Messages:')
for message in messages:
print(u'{0} ({1})'.format(message['id'], message['threadId']))
except HttpError as error:
# TODO(developer) - Handle errors from gmail API.
print(f'An error occurred: {error}')
if __name__ == '__main__':
main()
expired refresh token.
If your issue is in fact that your refresh tokens are expiring this is because your app is currently in the testing phase. If you set your app to production then your refresh tokens will stope expiring.
How to automatize the process of getting access to Google spreadsheets?
Right now we use gspread and oauth2client.service_account to get an access to Google spreadsheet. It works fine, but using OAuth2 credentials makes us manually share every single spreadsheet to "client_email" from credentials json-file.
import gspread
from oauth2client.service_account import ServiceAccountCredentials
scope = [
'https://www.googleapis.com/auth/spreadsheets',
'https://www.googleapis.com/auth/drive'
]
credentials =
ServiceAccountCredentials.from_json_keyfile_name('path.json', scope)
gs = gspread.authorize(credentials)
That works, but how to modify?
So the desired outcome is: somebody shares a spreadsheet with me and I can start to work with it immediately in Python. Is it possible? Maybe we can use some triggers from incoming emails with the information about sharing or something similar?
You can try this script. It has a few sections we can differentiate:
Requesting access to Drive and Gmail. As you see we use the full
drive scope instead of drive.file. This is because there is an
existing bug that causes drive.file to crash(1), so meanwhile we have
to use this.
from __future__ import print_function
import pickle
import sys
import os.path
from googleapiclient.discovery import build
from google_auth_oauthlib.flow import InstalledAppFlow
from google.auth.transport.requests import Request
# If modifying these scopes, delete the file token.pickle.
SCOPES = ['https://www.googleapis.com/auth/drive',
'https://www.googleapis.com/auth/gmail.modify']
creds = None
# The file token.pickle stores the user's access and refresh tokens, and is
# created automatically when the authorization flow completes for the first
# time.
if os.path.exists('token.pickle'):
with open('token.pickle', 'rb') as token:
creds = pickle.load(token)
# If there are no (valid) credentials available, let the user log in.
if not creds or not creds.valid:
if creds and creds.expired and creds.refresh_token:
creds.refresh(Request())
else:
flow = InstalledAppFlow.from_client_secrets_file(
'credentials.json', SCOPES)
creds = flow.run_local_server()
# Save the credentials for the next run
with open('token.pickle', 'wb') as token:
pickle.dump(creds, token)
mail_service = build('gmail', 'v1', credentials=creds)
drive_service = build('drive','v3', credentials=creds)
Declaring some variables. There is no issue here, just declaring the
variables where we will keep the mail ids, the file name and the
file name formatted for our needs.
mail_ids = []
file_name = []
name_string = []
Get the emails. We will only take the unread emails from
drive-shares-noreply. After this we will mark them as “read” so we
won’t take them the next time we execute the script.
def get_emails(mail_ids):
user_id = 'me' #Or your email
query = 'from:drive-shares-noreply#google.com, is:UNREAD' #Will search mails from drive shares and unread
response = mail_service.users().messages().list(userId=user_id, q=query).execute()
items = response.get('messages', [])
if not items:
print('No unread mails found')
sys.exit()
else:
for items in items:
mail_ids.append(items['id'])
for mail_ids in mail_ids:
mail_service.users().messages().modify(userId=user_id, id=mail_ids, body={"removeLabelIds":["UNREAD"]}).execute() #Marks the mails as read
Get the file names of the emails. The syntax of the Subject of the sharing sheets email is “Filename - Invitation to edit”, so we will take the subject of each email, and we will format the string later.
def get_filename(mail_ids, file_name):
user_id = 'me'
headers = []
for mail_ids in mail_ids:
response = mail_service.users().messages().get(userId=user_id, id=mail_ids, format="metadata", metadataHeaders="Subject").execute()
items = response.get('payload', [])
headers.append(items['headers'])
length = len(headers)
for i in range(length):
file_name.append(headers[i][0]['value'])
def process_name(file_name, name_string):
for file_name in file_name:
name_string.append(str(file_name).replace(" - Invitation to edit", ""))
Give permissions to the client_email
def give_permissions(name_string):
for name_string in name_string:
body = "'{}'".format(name_string)
results = drive_service.files().list(q="name = "+body).execute()
items = results.get('files', [])
if not items:
print('No files found.')
sys.exit()
else:
print('Files:')
for item in items:
print(u'{0} ({1})'.format(item['name'], item['id']))
file_id = item['id']
user_permission = {
'type': 'user',
'role': 'writer',
'emailAddress': 'your_client_email'
}
drive_service.permissions().create(body=user_permission, fileId=file_id).execute()
And then we just have to call the functions
get_emails(mail_ids)
get_filename(mail_ids, file_name)
process_name(file_name, name_string)
give_permissions(name_string)
There is no way to trigger this script for each new email received, but you can trigger it with a timer or something like that and it will search for new emails.
(1) The drive.file scope only works with certain files, according to the last update of the documentation
I would like to upgrade the following code from oauth2client to google-auth. Yes, this code does work and is a copy paste from the Google demo on their site.
from __future__ import print_function
from googleapiclient.discovery import build
from httplib2 import Http
from oauth2client import file, client, tools
# If modifying these scopes, delete the file token.json.
SCOPES = 'https://www.googleapis.com/auth/presentations.readonly'
# The ID of a sample presentation.
PRESENTATION_ID = '<some id>'
def main():
"""Shows basic usage of the Slides API.
Prints the number of slides and elments in a sample presentation.
"""
# The file token.json stores the user's access and refresh tokens, and is
# created automatically when the authorization flow completes for the first
# time.
# delete the file to for authentication and authorization again
store = file.Storage('token.json')
creds = store.get()
if not creds or creds.invalid:
# credentials.json is issued by Google for the application
flow = client.flow_from_clientsecrets('credentials.json', SCOPES)
creds = tools.run_flow(flow, store)
service = build('slides', 'v1', http=creds.authorize(Http()))
# Call the Slides API
presentation = service.presentations().get(presentationId=PRESENTATION_ID).execute()
slides = presentation.get('slides')
print('The presentation contains {} slides:'.format(len(slides)))
for i, slide in enumerate(slides):
print('- Slide #{} contains {} elements.'.format(
i + 1, len(slide.get('pageElements'))))
if __name__ == '__main__':
main()
I was able to upgrade most of it (I think) but could not find the equivalent for tools.run_flow using google-auth. Below is my upgraded version, and it has everything (I think) except the mechanism to do tools.run_flow. How is tools.run_flow accomplished using google-auth?
from google.oauth2 import service_account
from google.auth.transport.requests import AuthorizedSession
from googleapiclient.discovery import build
SCOPES = ['https://www.googleapis.com/auth/presentations.readonly']
PRESENTATION_ID = '<some id>'
credentials = service_account.Credentials.from_service_account_file(
"the-json-file.json",
scopes=SCOPES
)
service = build('slides', 'v1', credentials=credentials)
presentation = service.presentations().get(presentationId=PRESENTATION_ID).execute()
slides = presentation.get('slides')
print('The presentation contains {} slides:'.format(len(slides)))
for i, slide in enumerate(slides):
print('- Slide #{} contains {} elements.'.format(
i + 1, len(slide.get('pageElements'))))
When I run the above code using the google-auth approach, I get the following result:
googleapiclient.errors.HttpError: <HttpError 403 when requesting https://slides.googleapis.com/v1/presentations/<some id>?alt=json returned "The caller does not have permission">
Additional context, I am trying to build a Python script which accesses Google Slides to perform some processing on slide content. I am reading only, not writing to Google Slides. The code above is my attempt to handle the authentication and authorization part of accessing the Google Slides content for my GSuite account (I am the admin of my organization's GSuite). The first example using the older oauth2client works great, but since oauth2client is deprecated and google-auth is recommended I would like to do it the "right" way using the latest library.
UPDATE
I resolved part of my problem with the following code:
from google.oauth2 import service_account
from google.auth.transport.requests import AuthorizedSession
from googleapiclient.discovery import build
from google.oauth2.credentials import Credentials
from google_auth_oauthlib.flow import InstalledAppFlow
import os
import json
def get_credentials():
CLIENT_SECRET_FILE = '<some file>'
SCOPES = ['https://www.googleapis.com/auth/presentations.readonly']
credentials_path = '<some path>/token.json'
if os.path.exists(credentials_path):
# expect these to be valid. may expire at some point, but should be refreshed by google api client...
return Credentials.from_authorized_user_file(credentials_path, scopes=SCOPES)
else:
flow = InstalledAppFlow.from_client_secrets_file(
CLIENT_SECRET_FILE,
scopes=SCOPES,
redirect_uri='urn:ietf:wg:oauth:2.0:oob')
auth_url, _ = flow.authorization_url(prompt='consent')
print('Please go to this URL and finish the authentication flow: {}'.format(auth_url))
code = input('Enter the authorization code: ')
flow.fetch_token(code=code)
credentials = flow.credentials
credentials_as_dict = {
'token': credentials.token,
'refresh_token': credentials.refresh_token,
'id_token': credentials.id_token,
'token_uri': credentials.token_uri,
'client_id': credentials.client_id,
'client_secret': credentials.client_secret
}
with open(credentials_path, 'w') as file:
file.write(json.dumps(credentials_as_dict))
return credentials
# The ID of a sample presentation.
PRESENTATION_ID = '<some id>'
service = build('slides', 'v1', credentials=get_credentials())
presentation = service.presentations().get(presentationId=PRESENTATION_ID).execute()
slides = presentation.get('slides')
print('The presentation contains {} slides:'.format(len(slides)))
for i, slide in enumerate(slides):
print('- Slide #{} contains {} elements.'.format(
i + 1, len(slide.get('pageElements'))))
My challenge now is causing the web browser to open automatically. I can copy and paste the link and get the code manually and it all works. Ideally, I would like the automatic opening of the web browser and capturing of the token like it did in the older library.
SOLVED
Update a section code in the previous code sample to this:
flow = InstalledAppFlow.from_client_secrets_file(
CLIENT_SECRET_FILE,
scopes=SCOPES,
redirect_uri='urn:ietf:wg:oauth:2.0:oob')
credentials = flow.run_local_server()
credentials_as_dict = {
'token': credentials.token,
'refresh_token': credentials.refresh_token,
'id_token': credentials.id_token,
'token_uri': credentials.token_uri,
'client_id': credentials.client_id,
'client_secret': credentials.client_secret
}
Web browser opens automatically and captures the token value. All is good.
I've connected Gmail API to Django project. I have a problem with storing credentials of every user. What is the most correct way to store them, so every users can log in? Without storing it just creates one file for one user and that's all. Should I create a database?
This is my "quickstart" file from Google:
from __future__ import print_function
from googleapiclient.discovery import build
from httplib2 import Http
from oauth2client import file, client, tools
from django.contrib.auth.models import User
# user = User
# storage = (CredentialsModel, 'id', user, 'credential') As you see I tried it here but it didn't go well
# credential = storage
#
# storage.__add__(credential)
# Setup the Gmail API
SCOPES = 'https://www.googleapis.com/auth/gmail.readonly'
store = file.Storage('credentials.json')
creds = store.get()
if not creds or creds.invalid:
flow = client.flow_from_clientsecrets('client_secret.json', SCOPES)
creds = tools.run_flow(flow, store)
service = build('gmail', 'v1', http=creds.authorize(Http()))
def go():
# Call the Gmail API
results = service.users().labels().list(userId='me').execute()
labels = results.get('labels', [])
# if not labels:
# print('No labels found.')
# else:
# print('Labels:')
# for label in labels:
# print(label['name'])
pass
Upfront: I'm extremely new to Python. : )
I have taken Google's Python Quickstart information, and can successfully connect to my Google Sheet with read/write privilege and clear the sheet for any previous information. Also, I was able to follow the pyodbc docs, and can successfully connect to a MSSQL server that we utilize and write out an Excel copy of the MSSQL table.
However, I can't seem to figure out how to get the table MSSQL query results appended to the Google Sheet. Within VSCode, it does provide the most recent call in the traceback, and it does appear to be working correctly with no errors. However, the sheet doesn't get updated.
Note: If I change the value for dfListFormatto a text string, it does append that single value in A1 of the target range.
value_range_body = {
"majorDimension": "ROWS",
"values": [
[dfListFormat]
]
}
Below is the full code I currently have. Any help/advice you all could provide would be greatly appreciated.
from __future__ import print_function
import httplib2
import oauth2client
import os
import googleapiclient
import openpyxl
import pandas
import pyodbc
from apiclient import discovery
from oauth2client import client
from oauth2client import tools
from oauth2client.file import Storage
from googleapiclient.discovery import build
from openpyxl import Workbook
from pandas import DataFrame, ExcelWriter
""" This is the code to get raw data from a specific Google Sheet"""
try:
import argparse
flags = argparse.ArgumentParser(parents=[tools.argparser]).parse_args()
except ImportError:
flags = None
# If modifying these scopes, delete your previously saved credentials
# at ~/.credentials/sheets.googleapis.com-python-quickstart.json
SCOPES = 'https://www.googleapis.com/auth/spreadsheets'
CLIENT_SECRET_FILE = 'client_secret_noemail.json'
APPLICATION_NAME = 'Google Sheets API Python'
def get_credentials():
"""Gets valid user credentials from storage.
If nothing has been stored, or if the stored credentials are invalid,
the OAuth2 flow is completed to obtain the new credentials.
Returns:
Credentials, the obtained credential.
"""
home_dir = os.path.expanduser('~')
credential_dir = os.path.join(home_dir, '.credentials')
if not os.path.exists(credential_dir):
os.makedirs(credential_dir)
credential_path = os.path.join(credential_dir,
'sheets.googleapis.com-python-quickstart.json')
store = Storage(credential_path)
credentials = store.get()
if not credentials or credentials.invalid:
flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)
flow.user_agent = APPLICATION_NAME
if flags:
credentials = tools.run_flow(flow, store, flags)
else: # Needed only for compatibility with Python 2.6
credentials = tools.run_flow(flow, store)
print('Storing credentials to ' + credential_path)
return credentials
def main():
"""Shows basic usage of the Sheets API.
Creates a Sheets API service object and prints the names and majors of
students in a sample spreadsheet:
https://docs.google.com/spreadsheets/d/1BxiMVs0XRA5nFMdKvBdBZjgmUUqptlbs74OgvE2upms/edit
"""
credentials = get_credentials()
http = credentials.authorize(httplib2.Http())
discoveryUrl = ('https://sheets.googleapis.com/$discovery/rest?version=v4')
service = discovery.build(
'sheets', 'v4', http=http, discoveryServiceUrl=discoveryUrl)
# Google Sheet Url Link and Range name. Can use tab names to get full page.
spreadsheetId = '[spreadsheetid]'
rangeName = 'tblActiveEmployees'
# TODO: Add desired entries to the request body if needed
clear_values_request_body = {}
# Building Service to Clear Google Sheet
request = service.spreadsheets().values().clear(spreadsheetId=spreadsheetId,
range=rangeName, body=clear_values_request_body)
response = request.execute()
# Prints response that Google Sheet has been cleared
responseText = '\n'.join(
[str(response), 'The Google Sheet has been cleared!'])
print(responseText)
# SQL Server Connection
server = '[SQLServerIP]'
database = '[SQLServerDB]'
username = '[SQLServerUserID]'
password = '[SQLServerPW]'
cnxn = pyodbc.connect('Driver={ODBC Driver 13 for SQL Server};SERVER=' +
server+';DATABASE='+database+';UID='+username+';PWD='+password)
# Sample SQL Query to get Data
sql = 'select * from tblActiveEmployees'
cursor = cnxn.cursor()
cursor.execute(sql)
list(cursor.fetchall())
# Pandas reading values from SQL query, and building table
sqlData = pandas.read_sql_query(sql, cnxn)
# Pandas building dataframe, and exporting .xlsx copy of table
df = DataFrame(data=sqlData)
df.to_excel('tblActiveEmployees.xlsx',
header=True, index=False)
dfListFormat = df.values.tolist()
# How the input data should be interpreted.
value_input_option = 'USER_ENTERED' # TODO: Update placeholder value.
# How the input data should be inserted.
insert_data_option = 'OVERWRITE' # TODO: Update placeholder value.
value_range_body = {
"majorDimension": "ROWS",
"values": [
[dfListFormat]
]
}
request = service.spreadsheets().values().append(spreadsheetId=spreadsheetId, range=rangeName,
valueInputOption=value_input_option, insertDataOption=insert_data_option, body=value_range_body)
response = request.execute()
if __name__ == '__main__':
main()
Thanks to #tehhowch's input, the following was able to solve my problem. The issue was that my data was already in a list, and using it as "values": [[dfListFormat]] made "values" an array of an array of arrays, rather than just an array of arrays. Simply assigning to "values" without brackets worked perfectly.
Below is the updated code, and big thanks to tehhowch!
from __future__ import print_function
import httplib2
import oauth2client
import os
import googleapiclient
import openpyxl
import pandas
import pyodbc
from googleapiclient import discovery
from oauth2client import client
from oauth2client import tools
from oauth2client.file import Storage
from openpyxl import Workbook
from pandas import DataFrame, ExcelWriter
""" This is the code to get raw data from a specific Google Sheet"""
try:
import argparse
flags = argparse.ArgumentParser(parents=[tools.argparser]).parse_args()
except ImportError:
flags = None
# If modifying these scopes, delete your previously saved credentials
# at ~/.credentials/sheets.googleapis.com-python-quickstart.json
SCOPES = 'https://www.googleapis.com/auth/spreadsheets'
CLIENT_SECRET_FILE = 'client_secret_noemail.json'
APPLICATION_NAME = 'Google Sheets API Python'
def get_credentials():
"""Gets valid user credentials from storage.
If nothing has been stored, or if the stored credentials are invalid,
the OAuth2 flow is completed to obtain the new credentials.
Returns:
Credentials, the obtained credential.
"""
home_dir = os.path.expanduser('~')
credential_dir = os.path.join(home_dir, '.credentials')
if not os.path.exists(credential_dir):
os.makedirs(credential_dir)
credential_path = os.path.join(credential_dir,
'sheets.googleapis.com-python-quickstart.json')
store = Storage(credential_path)
credentials = store.get()
if not credentials or credentials.invalid:
flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)
flow.user_agent = APPLICATION_NAME
if flags:
credentials = tools.run_flow(flow, store, flags)
else: # Needed only for compatibility with Python 2.6
credentials = tools.run_flow(flow, store)
print('Storing credentials to ' + credential_path)
return credentials
def main():
"""Shows basic usage of the Sheets API.
Creates a Sheets API service object and prints the names and majors of
students in a sample spreadsheet:
https://docs.google.com/spreadsheets/d/1BxiMVs0XRA5nFMdKvBdBZjgmUUqptlbs74OgvE2upms/edit
"""
credentials = get_credentials()
http = credentials.authorize(httplib2.Http())
discoveryUrl = ('https://sheets.googleapis.com/$discovery/rest?version=v4')
service = googleapiclient.discovery.build(
'sheets', 'v4', http=http, discoveryServiceUrl=discoveryUrl)
# Google Sheet Url Link and Range name. Can use tab names to get full page.
spreadsheetId = '[spreadsheetID'
rangeName = 'tblActiveEmployees'
# TODO: Add desired entries to the request body if needed
clear_values_request_body = {}
# Building Service to Clear Google Sheet
request = service.spreadsheets().values().clear(spreadsheetId=spreadsheetId,
range=rangeName, body=clear_values_request_body)
response = request.execute()
# Prints response that Google Sheet has been cleared
responseText = '\n'.join(
[str(response), 'The Google Sheet has been cleared!'])
print(responseText)
# SQL Server Connection
server = '[SQLServerIP]'
database = '[SQLServerDB]'
username = '[SQLServerUserID]'
password = '[SQLServerPW]'
cnxn = pyodbc.connect('Driver={ODBC Driver 13 for SQL Server};SERVER=' +
server+';DATABASE='+database+';UID='+username+';PWD='+password)
# Sample SQL Query to get Data
sql = 'select * from tblActiveEmployees'
cursor = cnxn.cursor()
cursor.execute(sql)
list(cursor.fetchall())
# Pandas reading values from SQL query, and building table
sqlData = pandas.read_sql_query(sql, cnxn)
# Pandas building dataframe, and exporting .xlsx copy of table
df = DataFrame(data=sqlData)
df.to_excel('tblActiveEmployees.xlsx',
header=True, index=False)
dfHeaders = df.columns.values.tolist()
dfHeadersArray = [dfHeaders]
dfData = df.values.tolist()
print(dfHeaders)
print(dfData)
# How the input data should be interpreted.
value_input_option = 'USER_ENTERED' # TODO: Update placeholder value.
# How the input data should be inserted.
insert_data_option = 'OVERWRITE' # TODO: Update placeholder value.
value_range_body = {
"majorDimension": "ROWS",
"values": dfHeadersArray + dfData
}
request = service.spreadsheets().values().append(spreadsheetId=spreadsheetId, range=rangeName,
valueInputOption=value_input_option, insertDataOption=insert_data_option, body=value_range_body)
response = request.execute()
if __name__ == '__main__':
main()