InsufficientCredentials on google API - python

I'm trying to understand what is this error:
File "C:\...\googleapiclient\http.py", line 856, in execute
raise HttpError(resp, content, uri=self.uri)
googleapiclient.errors.HttpError: <HttpError 403 when requesting https://www.googleapis.com/drive/v3/files/19XYXTA2xER982xIHnDqH0cOtjBtSZtgC?alt=media returned "Insufficient Permission: Request had insufficient authentication scopes.">
What does "Insufficient Permission: Request had insufficient authentication scopes." mean?
Below is the whole code: credentials.json and client_secrets.json are both present in my directory.
from __future__ import print_function
import pickle
import os.path
from pydrive.auth import GoogleAuth
from pydrive.drive import GoogleDrive
from googleapiclient.discovery import build
from google_auth_oauthlib.flow import InstalledAppFlow
from google.auth.transport.requests import Request
import os
g_login = GoogleAuth()
g_login.LocalWebserverAuth() # This sucker might break [NK]
drive = GoogleDrive(g_login)
#end of tempCodeForEaseOfUse [NK]
# If modifying these scopes, delete the file token.pickle.
SCOPES = ['https://www.googleapis.com/auth/spreadsheets https://www.googleapis.com/auth/documents'] # https://www.googleapis.com/auth/drive.metadata.readonly https://www.googleapis.com/auth/documents.readonly']
# The ID and range of a sample spreadsheet.
SAMPLE_SPREADSHEET_ID = '1eb0DIJ7QglHqCrvIW94MUbMvFdbGKGXBSwaH49ILCvQ/edit?usp=sharing'
SAMPLE_RANGE_NAME = 'Class Data!A2:E'
creds = None
# The file token.pickle stores the user's access and refresh tokens, and is
# created automatically when the authorization flow completes for the first
# time.
if os.path.exists('token.pickle'):
with open('token.pickle', 'rb') as token:
creds = pickle.load(token)
# If there are no (valid) credentials available, let the user log in.
if not creds or not creds.valid:
if creds and creds.expired and creds.refresh_token:
creds.refresh(Request())
else:
flow = InstalledAppFlow.from_client_secrets_file(
'credentials.json', SCOPES)
creds = flow.run_local_server(port=0)
# Save the credentials for the next run
with open('token.pickle', 'wb') as token:
pickle.dump(creds, token)
# TODO: fileID extracted manually, must automate [NK]
fileId = '19XYXTA2xER982xIHnDqH0cOtjBtSZtgC/view?usp=sharing' # Please set the file ID of text file. [StackTanaike]
sheets = build('sheets', 'v4', credentials=creds)
drive = build('drive', 'v3', credentials=creds)
# Retrieve data from Google Drive and parse data as an array. [StackTanaike]
data = drive.files().get_media(fileId=fileId).execute()

Your scopes aren't set correctly. Replace your SCOPES = ['https:// ... with the following:
SCOPES = ['https://www.googleapis.com/auth/spreadsheets', 'https://www.googleapis.com/auth/drive']

Related

Google drive api keeps requesting authentication whenever my script is run

I am trying to create a script that uploads pictures to my google drive folder. This script works well on my computer, but the problem is that this script is supposed to run itself every other week automatically, but right now, whenever I run the script google prompts me to log in, instead of remembering the session from the first time I logged in.
my script to push the pictures to google drive looks like this:
def upload_files():
upload_files_list = []
for element in os.listdir("billeder"):
upload_files_list.append("/root/billeder/" + element)
for upload_file in upload_files_list:
gfile = drive.CreateFile({'parents': [{'id': 'folder_id_here'}]})
# Read file and set it as the content of this instance.
gfile.SetContentFile(upload_file)
gfile.Upload() # Upload the file.
my script to authenticate looks like this:
from __future__ import print_function
import os.path
from google.auth.transport.requests import Request
from google.oauth2.credentials import Credentials
from google_auth_oauthlib.flow import InstalledAppFlow
from googleapiclient.discovery import build
from googleapiclient.errors import HttpError
# If modifying these scopes, delete the file token.json.
SCOPES = ['https://www.googleapis.com/auth/drive.metadata.readonly']
def main():
"""Shows basic usage of the Drive v3 API.
Prints the names and ids of the first 10 files the user has access to.
"""
creds = None
# The file token.json stores the user's access and refresh tokens, and is
# created automatically when the authorization flow completes for the firstp
# time.
if os.path.exists('token.json'):
creds = Credentials.from_authorized_user_file('token.json', SCOPES)
# If there are no (valid) credentials available, let the user log in.
if not creds or not creds.valid:
if creds and creds.expired and creds.refresh_token:
creds.refresh(Request())
else:
flow = InstalledAppFlow.from_client_secrets_file(
'client_secrets.json', SCOPES)
creds = flow.run_local_server(port=0)
# Save the credentials for the next run
with open('token.json', 'w') as token:
token.write(creds.to_json())
try:
service = build('drive', 'v3', credentials=creds)
# Call the Drive v3 API
results = service.files().list(
pageSize=10, fields="nextPageToken, files(id, name)").execute()
items = results.get('files', [])
if not items:
print('No files found.')
return
print('Files:')
for item in items:
print(u'{0} ({1})'.format(item['name'], item['id']))
except HttpError as error:
# TODO(developer) - Handle errors from drive API.
print(f'An error occurred: {error}')
if __name__ == '__main__':
main()
As far as I can see you are not creating a drive service object. In your upload files method. Which means its not authenticated.
# To install the Google client library for Python, run the following command:
# pip install --upgrade google-api-python-client google-auth-httplib2 google-auth-oauthlib
from __future__ import print_function
import os.path
from google.auth.transport.requests import Request
from google.oauth2.credentials import Credentials
from google_auth_oauthlib.flow import InstalledAppFlow
from googleapiclient.discovery import build
from googleapiclient.errors import HttpError
from google.auth.exceptions import RefreshError
from googleapiclient.http import MediaFileUpload
# If modifying these scopes, delete the file token.json.
SCOPES = ['https://www.googleapis.com/auth/drive']
CREDENTIALS = 'C:\YouTube\dev\credentials.json'
TOKENJSON = 'tokenDriveUpload.json'
def main():
"""Shows basic usage of the Drive v3 API.
Prints the names and ids of the first 10 files the user has access to.
"""
creds = None
# The file token.json stores the user's access and refresh tokens, and is
# created automatically when the authorization flow completes for the first
# time.
if os.path.exists(TOKENJSON):
creds = Credentials.from_authorized_user_file(TOKENJSON, SCOPES)
try:
creds.refresh(Request())
except RefreshError as error:
# If the refresh token has expired then we request authorization again.
os.remove(TOKENJSON)
creds.refresh_token = None
# If there are no (valid) credentials available, let the user log in.
if not creds or not creds.valid:
if creds and creds.expired and creds.refresh_token:
creds.refresh(Request())
else:
flow = InstalledAppFlow.from_client_secrets_file(
CREDENTIALS, SCOPES)
creds = flow.run_local_server(port=0)
# Save the credentials for the next run
with open(TOKENJSON, 'w') as token:
token.write(creds.to_json())
upload_file(creds, 'C:\YouTube\dev\logo.png')
def upload_file(creds, filepath):
try:
# create drive api client
service = build('drive', 'v3', credentials=creds)
basename = os.path.basename(filepath)
print(basename)
file_metadata = {'name':basename}
media = MediaFileUpload(filepath,
mimetype='text/plain')
file = service.files().create(body=file_metadata, media_body=media,
fields='id').execute()
print(F'File ID: {file.get("id")}')
except HttpError as error:
# TODO(developer) - Handle errors from drive API.
print(F'An error occurred: {error}')
if __name__ == '__main__':
main()

In python, is there any way I can store a 'Resource' object so I can use it later?

I am writing a program about the YouTube API
flow = google_auth_oauthlib.flow.InstalledAppFlow.from_client_secrets_file(client_secrets_file, scopes)
credentials = flow.run_console()
youtube_analytics = googleapiclient.discovery.build("youtubeAnalytics", "v2", credentials=credentials)
Finally it will return a 'Resource' object, can I store this object?
So that I can get this object to use in the future just by referring to the file
You can store the json creds returned by the authorization flow. The library will then be able to load those stored creds the next time it needs access.
The following example is adapted from the official Google drive quickstart
from __future__ import print_function
import os.path
from google.auth.transport.requests import Request
from google.oauth2.credentials import Credentials
from google_auth_oauthlib.flow import InstalledAppFlow
from googleapiclient.discovery import build
from googleapiclient.errors import HttpError
# If modifying these scopes, delete the file token.json.
SCOPES = ['https://www.googleapis.com/auth/yt-analytics.readonly']
def main():
"""Shows basic usage of the YouTube Analytics v2 API.
"""
creds = None
# The file token.json stores the user's access and refresh tokens, and is
# created automatically when the authorization flow completes for the first
# time.
if os.path.exists('token.json'):
creds = Credentials.from_authorized_user_file('token.json', SCOPES)
# If there are no (valid) credentials available, let the user log in.
if not creds or not creds.valid:
if creds and creds.expired and creds.refresh_token:
creds.refresh(Request())
else:
flow = InstalledAppFlow.from_client_secrets_file(
'credentials.json', SCOPES)
creds = flow.run_local_server(port=0)
# Save the credentials for the next run
with open('token.json', 'w') as token:
token.write(creds.to_json())
try:
service = build('youtubeAnalytics', 'v2', credentials=creds)
# Call the YouTube analytics
...
except HttpError as error:
# TODO(developer) - Handle errors from API.
print(f'An error occurred: {error}')
if __name__ == '__main__':
main()
If you have any issues with this please let me know.
save it as a pkl file then call the file and update it
import pickle
dump into pkl file for first time and updates
with open('mypickle.pickle', 'wb') as f:
pickle.dump(resource, f)
resource = whatever you want to store
open file at later time
with open('mypickle.pickle', 'rb') as f:
loaded_obj = pickle.load(f)

Update CSV file on Google Drive to be used on Data Studio with PyDrive

I need to update a CSV file located on my Drive because I use it on a dashboard in Google Data Studio.
Until now, I was using this code :
previous_GDS_df = pd.read_excel(path_to_GDS_file)
pd.concat(objs=[previous_GDS_df, df_GDS]).to_excel(path_to_GDS_file, index=False)
f = drive.CreateFile({'id': spreadsheet_id})
f.SetContentFile(path_to_GDS_file)
f.Upload()
On which :
"previous_GDS_df" is the content of the CSV file I'm updating,
""path_to_GDS_file" the path to my local CSV file, on which I do the modifications,
"df_GDS" the df of my modifications, the elements I'd like to append to my file on Drive.
Basically, my theory was the following :
"I extract the previous content of the file, I append to it the new content then I edit my Drive file with 'SetContentFile' and I upload it all."
The problem is that when I edit my file on Drive, I need to reconnect everytime my file in my dashboard GDS because I think that SetContentFile erase entirely the previous file Drive to write a new one. In this case, I must reconnect the Drive file to GDS because it was deleted and rewrited.
So, I'm looking for a solution to update my Drive file so I will not have to reconnect everytime my file to the dahsboard and the modifications will appear magically.
Do you have a solution ? My theory is surely bad. I'm missing something somewhere.
Thank you for your help, ask me if more information is needed.
--- EDIT ---
I've tested some solutions but nothing worked. The best solution i tested was this one (thanks to the helps in the comments) :
from __future__ import print_function
import os.path
from google.auth.transport.requests import Request
from google.oauth2.credentials import Credentials
from google_auth_oauthlib.flow import InstalledAppFlow
from googleapiclient.discovery import build
from googleapiclient.errors import HttpError
from apiclient.http import MediaFileUpload
SCOPES = ['https://www.googleapis.com/auth/drive'] # If modifying these scopes, delete the file token.json.
def getCreds(): # Authentication
creds = None
# The file token.json stores the user's access and refresh tokens, and is
# created automatically when the authorization flow completes for the first
# time.
if os.path.exists('token.json'):
creds = Credentials.from_authorized_user_file('token.json', SCOPES)
# If there are no (valid) credentials available, let the user log in.
if not creds or not creds.valid:
if creds and creds.expired and creds.refresh_token:
creds.refresh(Request())
else:
flow = InstalledAppFlow.from_client_secrets_file(
'credentials.json', SCOPES)
creds = flow.run_local_server(port=0)
# Save the credentials for the next run
with open('token.json', 'w') as token:
token.write(creds.to_json())
return creds
def updateFile(service, spreadsheet_id, path_to_GDS_file): # Call the API
media = MediaFileUpload(path_to_GDS_file, mimetype='application/vnd.google-apps.spSreadsheet', resumable=True)
res = service.files().update(fileId=spreadsheet_id,media_body=media,fields="*").execute()
return res
def main(spreadsheet_id, path_to_GDS_file):
creds = getCreds()
service = build('drive', 'v3', credentials=creds)
updateFile(service, spreadsheet_id, path_to_GDS_file)
if __name__ == '__main__':
main()
But the main() function doesn't just append to my csv file on GDrive, it rewrites the whole file so I have to reconnect on Data Studio.
Do you know how I could just append rows to my csv file located on the GDrive ?
Thank you.
I don't know much about pydrive, but in order to update a file via Drive API, you have to use Files: update. This allows you either to just update file metadata, or also file content.
Here's a possible sample that uses the official Python library:
from __future__ import print_function
import os.path
from google.auth.transport.requests import Request
from google.oauth2.credentials import Credentials
from google_auth_oauthlib.flow import InstalledAppFlow
from googleapiclient.discovery import build
from googleapiclient.errors import HttpError
from apiclient.http import MediaFileUpload
SCOPES = ['https://www.googleapis.com/auth/drive'] # If modifying these scopes, delete the file token.json.
fileId = "DRIVE_FILE_ID" # Change to yours
filePath = "LOCAL_FILE_PATH" # Change to yours
def getCreds(): # Authentication
creds = None
if os.path.exists('token.pickle'):
with open('token.pickle', 'rb') as token:
creds = pickle.load(token)
# If there are no (valid) credentials available, let the user log in.
if not creds or not creds.valid:
if creds and creds.expired and creds.refresh_token:
creds.refresh(Request())
else:
flow = InstalledAppFlow.from_client_secrets_file(
'credentials.json', SCOPES)
creds = flow.run_local_server(port=0)
# Save the credentials for the next run
with open('token.pickle', 'wb') as token:
pickle.dump(creds, token)
return creds
def updateFile(service, fileId): # Call the API
media = MediaFileUpload(filePath, mimetype='text/csv', resumable=True)
res = service.files().update(fileId=fileId,media_body=media,fields="*").execute()
return res
def main():
creds = getCreds()
service = build('drive', 'v3', credentials=creds)
updateFile(service, fileId)
if __name__ == '__main__':
main()
Note:
You'll first have to download your credentials file, as explained in the quickstart referenced below.
Reference:
Python Quickstart
service.files().update()

How do I download every Gmail email with Python?

I want to download them with as few API calls as possible. https://github.com/googleapis/google-api-python-client/tree/master/samples doesn't have Gmail.
Start by following Python quickstart it will give you an idea of how to get the Auth part working
from __future__ import print_function
import pickle
import os.path
from googleapiclient.discovery import build
from google_auth_oauthlib.flow import InstalledAppFlow
from google.auth.transport.requests import Request
# If modifying these scopes, delete the file token.pickle.
SCOPES = ['https://www.googleapis.com/auth/gmail.readonly']
def main():
"""Shows basic usage of the Gmail API.
Lists the user's Gmail labels.
"""
creds = None
# The file token.pickle stores the user's access and refresh tokens, and is
# created automatically when the authorization flow completes for the first
# time.
if os.path.exists('token.pickle'):
with open('token.pickle', 'rb') as token:
creds = pickle.load(token)
# If there are no (valid) credentials available, let the user log in.
if not creds or not creds.valid:
if creds and creds.expired and creds.refresh_token:
creds.refresh(Request())
else:
flow = InstalledAppFlow.from_client_secrets_file(
'credentials.json', SCOPES)
creds = flow.run_local_server(port=0)
# Save the credentials for the next run
with open('token.pickle', 'wb') as token:
pickle.dump(creds, token)
service = build('gmail', 'v1', credentials=creds)
# Call the Gmail API
results = service.users().labels().list(userId='me').execute()
labels = results.get('labels', [])
if not labels:
print('No labels found.')
else:
print('Labels:')
for label in labels:
print(label['name'])
if __name__ == '__main__':
main()
Once you understand what that is doing you should be able to consult user message list to get an idea of how to alter it for listing a messages.

How to fix 'No OUs found'

I am using the Google Admin SDK - Directory API, I have a successful connection to the API but no OUs are being stored in my array.
Result:
Getting the OUs in the domain
No OUs found in the domain.
I tried modifying the code to list the first 10 users instead and it works.
from __future__ import print_function
import pickle
import os.path
from googleapiclient.discovery import build
from google_auth_oauthlib.flow import InstalledAppFlow
from google.auth.transport.requests import Request
# If modifying these scopes, delete the file token.pickle.
SCOPES = ['https://www.googleapis.com/auth/admin.directory.orgunit']
def main():
creds = None
# The file token.pickle stores the user's access and refresh tokens, and is
# created automatically when the authorization flow completes for the first
# time.
if os.path.exists('token.pickle'):
with open('token.pickle', 'rb') as token:
creds = pickle.load(token)
# If there are no (valid) credentials available, let the user log in.
if not creds or not creds.valid:
if creds and creds.expired and creds.refresh_token:
creds.refresh(Request())
else:
flow = InstalledAppFlow.from_client_secrets_file('credentials.json', SCOPES)
creds = flow.run_local_server()
# Save the credentials for the next run
with open('token.pickle', 'wb') as token:
pickle.dump(creds, token)
service = build('admin', 'directory_v1', credentials=creds)
# Call the Admin SDK Directory API
print('Getting the OUs in the domain')
results = service.orgunits().list(customerId='my_customer').execute()
ou_list = results.get('orgunits', [])
if not ou_list:
print('No OUs found in the domain.')
else:
print('OU List:')
# for ou in ou_list:
# print(u'{0} ({1})'.format(ou_list['name'], ou_list['orgUnitPath']))
if __name__ == '__main__':
main()
No error messages are displayed but, OUs are not being stored.
The key is not orgunits, it is orgazinationalUnits:
ou_list = results.get('orgunits', [])

Categories

Resources