This question already has answers here:
Google Drive API: How to create a file in appDataFolder?
(2 answers)
Closed 1 year ago.
I have a small script copied from Google, it is supposed to upload a file to my appDataFolder at google drive,
When run for the first time it asks for the authentications and I grant permission for it
import os.path
from googleapiclient.http import MediaFileUpload
from googleapiclient.discovery import build
from google_auth_oauthlib.flow import InstalledAppFlow
from google.auth.transport.requests import Request
from google.oauth2.credentials import Credentials
# If modifying these scopes, delete the file token.json.
SCOPES = ['https://www.googleapis.com/auth/drive.appdata']
def main():
"""Shows basic usage of the Drive v3 API.
Prints the names and ids of the first 10 files the user has access to.
"""
creds = None
# The file token.json stores the user's access and refresh tokens, and is
# created automatically when the authorization flow completes for the first
# time.
if os.path.exists('token.json'):
creds = Credentials.from_authorized_user_file('token.json', SCOPES)
# If there are no (valid) credentials available, let the user log in.
if not creds or not creds.valid:
if creds and creds.expired and creds.refresh_token:
creds.refresh(Request())
else:
flow = InstalledAppFlow.from_client_secrets_file(
'credentials.json', SCOPES)
creds = flow.run_local_server(port=0)
# Save the credentials for the next run
with open('token.json', 'w') as token:
token.write(creds.to_json())
# Create the service
service = build('drive', 'v3', credentials=creds)
file_metadata = {
'title': 'token_lol.json',
'parents': [{
'id': 'appDataFolder'
}]
}
media = MediaFileUpload('token_lol.json',
mimetype='application/json',
resumable=True)
service.files().create(body=file_metadata,
media_body=media,
fields='id').execute()
#print 'File ID: %s' % file.get('id')
if __name__ == '__main__':
main()
Then When I try to upload the file I get this error:
HttpError(resp, content, uri=self.uri)
googleapiclient.errors.HttpError:
<HttpError 403 when requesting https://www.googleapis.com/upload/drive/v3/files?fields=id&alt=json&uploadType=resumable returned "The user does not have sufficient permissions for this file.". Details: "[{'domain': 'global', 'reason': 'insufficientFilePermissions', 'message': 'The user does not have sufficient permissions for this file.'}]">
It only works when I use this SCOPE : https://www.googleapis.com/auth/drive
But according to Google documentation here it should work with this SCOPE : https://www.googleapis.com/auth/drive.appdata
So what is the problem?
I found the answer here
You need to add these 2 SCOPES together:
SCOPES = ['https://www.googleapis.com/auth/drive.appdata','https://www.googleapis.com/auth/drive.file']
Related
I am trying to create a script that uploads pictures to my google drive folder. This script works well on my computer, but the problem is that this script is supposed to run itself every other week automatically, but right now, whenever I run the script google prompts me to log in, instead of remembering the session from the first time I logged in.
my script to push the pictures to google drive looks like this:
def upload_files():
upload_files_list = []
for element in os.listdir("billeder"):
upload_files_list.append("/root/billeder/" + element)
for upload_file in upload_files_list:
gfile = drive.CreateFile({'parents': [{'id': 'folder_id_here'}]})
# Read file and set it as the content of this instance.
gfile.SetContentFile(upload_file)
gfile.Upload() # Upload the file.
my script to authenticate looks like this:
from __future__ import print_function
import os.path
from google.auth.transport.requests import Request
from google.oauth2.credentials import Credentials
from google_auth_oauthlib.flow import InstalledAppFlow
from googleapiclient.discovery import build
from googleapiclient.errors import HttpError
# If modifying these scopes, delete the file token.json.
SCOPES = ['https://www.googleapis.com/auth/drive.metadata.readonly']
def main():
"""Shows basic usage of the Drive v3 API.
Prints the names and ids of the first 10 files the user has access to.
"""
creds = None
# The file token.json stores the user's access and refresh tokens, and is
# created automatically when the authorization flow completes for the firstp
# time.
if os.path.exists('token.json'):
creds = Credentials.from_authorized_user_file('token.json', SCOPES)
# If there are no (valid) credentials available, let the user log in.
if not creds or not creds.valid:
if creds and creds.expired and creds.refresh_token:
creds.refresh(Request())
else:
flow = InstalledAppFlow.from_client_secrets_file(
'client_secrets.json', SCOPES)
creds = flow.run_local_server(port=0)
# Save the credentials for the next run
with open('token.json', 'w') as token:
token.write(creds.to_json())
try:
service = build('drive', 'v3', credentials=creds)
# Call the Drive v3 API
results = service.files().list(
pageSize=10, fields="nextPageToken, files(id, name)").execute()
items = results.get('files', [])
if not items:
print('No files found.')
return
print('Files:')
for item in items:
print(u'{0} ({1})'.format(item['name'], item['id']))
except HttpError as error:
# TODO(developer) - Handle errors from drive API.
print(f'An error occurred: {error}')
if __name__ == '__main__':
main()
As far as I can see you are not creating a drive service object. In your upload files method. Which means its not authenticated.
# To install the Google client library for Python, run the following command:
# pip install --upgrade google-api-python-client google-auth-httplib2 google-auth-oauthlib
from __future__ import print_function
import os.path
from google.auth.transport.requests import Request
from google.oauth2.credentials import Credentials
from google_auth_oauthlib.flow import InstalledAppFlow
from googleapiclient.discovery import build
from googleapiclient.errors import HttpError
from google.auth.exceptions import RefreshError
from googleapiclient.http import MediaFileUpload
# If modifying these scopes, delete the file token.json.
SCOPES = ['https://www.googleapis.com/auth/drive']
CREDENTIALS = 'C:\YouTube\dev\credentials.json'
TOKENJSON = 'tokenDriveUpload.json'
def main():
"""Shows basic usage of the Drive v3 API.
Prints the names and ids of the first 10 files the user has access to.
"""
creds = None
# The file token.json stores the user's access and refresh tokens, and is
# created automatically when the authorization flow completes for the first
# time.
if os.path.exists(TOKENJSON):
creds = Credentials.from_authorized_user_file(TOKENJSON, SCOPES)
try:
creds.refresh(Request())
except RefreshError as error:
# If the refresh token has expired then we request authorization again.
os.remove(TOKENJSON)
creds.refresh_token = None
# If there are no (valid) credentials available, let the user log in.
if not creds or not creds.valid:
if creds and creds.expired and creds.refresh_token:
creds.refresh(Request())
else:
flow = InstalledAppFlow.from_client_secrets_file(
CREDENTIALS, SCOPES)
creds = flow.run_local_server(port=0)
# Save the credentials for the next run
with open(TOKENJSON, 'w') as token:
token.write(creds.to_json())
upload_file(creds, 'C:\YouTube\dev\logo.png')
def upload_file(creds, filepath):
try:
# create drive api client
service = build('drive', 'v3', credentials=creds)
basename = os.path.basename(filepath)
print(basename)
file_metadata = {'name':basename}
media = MediaFileUpload(filepath,
mimetype='text/plain')
file = service.files().create(body=file_metadata, media_body=media,
fields='id').execute()
print(F'File ID: {file.get("id")}')
except HttpError as error:
# TODO(developer) - Handle errors from drive API.
print(F'An error occurred: {error}')
if __name__ == '__main__':
main()
I'm have a python script that gets data from a csv myFile.csv file and pushes it into a google drive folder.
When i run my code, I get an error
googleapiclient.errors.HttpError: <HttpError 403 when requesting https://www.googleapis.com/upload/drive/v3/files?fields=id&alt=json&uploadType=multipart returned "Insufficient Permission: Request had insufficient authentication scopes.". Details: "[{'domain': 'global', 'reason': 'insufficientPermissions', 'message': 'Insufficient Permission: Request had insufficient authentication scopes.'}]">
What I'm I missing?
Below is my code
from googleapiclient.discovery import build
from googleapiclient.http import MediaFileUpload
import google.auth
scope = [
'https://www.googleapis.com/auth/drive.file',
'https://www.googleapis.com/auth/drive.resource',
'https://spreadsheets.google.com/feeds',
'https://www.googleapis.com/auth/drive',
'https://www.googleapis.com/auth/drive.readonly']
creds, _ = google.auth.default(scopes=scope)
def push_csv_to_google_drive(creds):
service = build('drive', 'v3', credentials=creds)
file_metadata = {"name": 'myFile.csv', "parents": [gdrive_destination_folder_id]}
media = MediaFileUpload(
source_csv_file_path,
mimetype="file/csv")
file = service.files().create(
body=file_metadata,
media_body=media,
fields="id").execute()
if __name__ == '__main__':
push_csv_to_google_drive(creds=creds)
As far as i can see you are not authorizing a user at all.
This is my drive upload sample.
# To install the Google client library for Python, run the following command:
# pip install --upgrade google-api-python-client google-auth-httplib2 google-auth-oauthlib
from __future__ import print_function
import os.path
from google.auth.transport.requests import Request
from google.oauth2.credentials import Credentials
from google_auth_oauthlib.flow import InstalledAppFlow
from googleapiclient.discovery import build
from googleapiclient.errors import HttpError
from googleapiclient.http import MediaFileUpload
# If modifying these scopes, delete the file token.json.
SCOPES = ['https://www.googleapis.com/auth/drive']
def main():
"""Shows basic usage of the Drive v3 API.
Prints the names and ids of the first 10 files the user has access to.
"""
creds = None
# The file token.json stores the user's access and refresh tokens, and is
# created automatically when the authorization flow completes for the first
# time.
if os.path.exists('tokenDriveUpload.json'):
creds = Credentials.from_authorized_user_file('tokenDriveUpload.json', SCOPES)
# If there are no (valid) credentials available, let the user log in.
if not creds or not creds.valid:
if creds and creds.expired and creds.refresh_token:
creds.refresh(Request())
else:
flow = InstalledAppFlow.from_client_secrets_file(
'C:\YouTube\dev\credentials.json', SCOPES)
creds = flow.run_local_server(port=0)
# Save the credentials for the next run
with open('tokenDriveUpload.json', 'w') as token:
token.write(creds.to_json())
try:
# create drive api client
service = build('drive', 'v3', credentials=creds)
file_metadata = {'name': 'Upload.csv'}
media = MediaFileUpload('Upload.csv',
mimetype='text/plain')
# pylint: disable=maybe-no-member
file = service.files().create(body=file_metadata, media_body=media,
fields='id').execute()
print(F'File ID: {file.get("id")}')
except HttpError as error:
# TODO(developer) - Handle errors from drive API.
print(F'An error occurred: {error}')
if __name__ == '__main__':
main()
I worked on this question for 2 days and found no direct answers on the web or SO, so I thought it would be prudent to do a Q&A here now that I've solved it.
Essentially, I have Python code running in an AWS Lambda that collects a bunch of data, processes them, and generates an Excel file with the info that my team needs. I need to push this file out to Google Drive (to a shared folder) so that the team can all see the info.
The problem was that I was trying to do it using MediaFileUpload from the Google API. This method takes in a filename as a string, like this (3rd to last line):
def upload_file_to_gdrive(folder_id, filename, FOLDER):
# If modifying these scopes, delete the file token.pickle.
SCOPES = ['https://www.googleapis.com/auth/drive.file', 'https://www.googleapis.com/auth/drive',
'https://www.googleapis.com/auth/drive.appdata']
creds = None
# The file token.pickle stores the user's access and refresh tokens, and is
# created automatically when the authorization flow completes for the first
# time.
if os.path.exists(FOLDER+'token.pickle'):
with open(FOLDER+'token.pickle', 'rb') as token:
creds = pickle.load(token)
# If there are no (valid) credentials available, let the user log in.
if not creds or not creds.valid:
print('creds didnt exist or were invalid')
if creds and creds.expired and creds.refresh_token:
print('creds were expired')
creds.refresh(gRequest())
else:
with open(FOLDER+'credentials.json', 'rb') as f:
flow = InstalledAppFlow.from_client_secrets_file(f, SCOPES)
creds = flow.run_local_server(port=0)
# Save the credentials for the next run
with open(FOLDER+'token.pickle', 'wb') as token:
pickle.dump(creds, token)
service = build('drive', 'v3', credentials=creds, cache_discovery=False)
###filename is something like '2020-07-02_data.xlsx'
###path is something like 'c:/datafolder/'
file_info = MediaFileUpload(FOLDER+filename, mimetype='application/vnd.google-apps.spreadsheet')
upload_metadata = {'name': filename, 'parents': [folder_id], 'mimeType': 'application/vnd.google-apps.spreadsheet'}
return service.files().create(body=upload_metadata, media_body=file_info, fields='id').execute()
Well, that works fine for local files on my computer, but how do I do this for s3 files? I cannot pass "s3://mybucket/my_file.xlsx" to the MediaFileUpload() method.
In order to solve this, I just needed to use the MediaIoBaseUpload() method from the Google Drive API, instead. The trick was reading the xlsx file contents in from the s3 file, pulling that into BytesIO, and then pushing it into the Google Drive MediaIoBaseUpload method. Then it all worked.
import pickle
import os.path
from googleapiclient.discovery import build
from google_auth_oauthlib.flow import InstalledAppFlow
from google.auth.transport.requests import Request as gRequest
from apiclient.http import MediaFileUpload, BytesIO, MediaIoBaseUpload
def upload_file_to_gdrive(folder_id, filename, CRED_FOLDER, S3_FOLDER):
s3 = s3fs.S3FileSystem(anon=False,key=<AWS_KEY>,secret=<AWS_SECRET>)
# If modifying these scopes, delete the file token.pickle.
SCOPES = ['https://www.googleapis.com/auth/drive.file', 'https://www.googleapis.com/auth/drive',
'https://www.googleapis.com/auth/drive.appdata']
creds = None
# The file token.pickle stores the user's access and refresh tokens, and is
# created automatically when the authorization flow completes for the first
# time.
if os.path.exists(CRED_FOLDER+'token.pickle'):
with s3.open(CRED_FOLDER+'token.pickle', 'rb') as token:
creds = pickle.load(token)
# If there are no (valid) credentials available, let the user log in.
if not creds or not creds.valid:
print('creds didnt exist or were invalid')
if creds and creds.expired and creds.refresh_token:
print('creds were expired')
creds.refresh(gRequest())
else:
with s3.open(CRED_FOLDER+'credentials.json', 'rb') as f:
flow = InstalledAppFlow.from_client_secrets_file(f, SCOPES)
creds = flow.run_local_server(port=0)
# Save the credentials for the next run
with s3.open(CRED_FOLDER+'token.pickle', 'wb') as token:
pickle.dump(creds, token)
service = build('drive', 'v3', credentials=creds, cache_discovery=False)
### filename is still like '2020-07-02_site_ads_txt.xlsx'
### but S3_FOLDER is a valid s3 bucket path
with s3.open(S3_FOLDER+filename, 'rb') as f:
fbytes = BytesIO(f.read())
media = MediaIoBaseUpload(fbytes, mimetype='application/vnd.google-apps.spreadsheet')
upload_metadata = {'name': filename, 'parents': [folder_id], 'mimeType': 'application/vnd.google-apps.spreadsheet'}
return service.files().create(body=upload_metadata, media_body=media, fields='id').execute()
I'm trying to understand what is this error:
File "C:\...\googleapiclient\http.py", line 856, in execute
raise HttpError(resp, content, uri=self.uri)
googleapiclient.errors.HttpError: <HttpError 403 when requesting https://www.googleapis.com/drive/v3/files/19XYXTA2xER982xIHnDqH0cOtjBtSZtgC?alt=media returned "Insufficient Permission: Request had insufficient authentication scopes.">
What does "Insufficient Permission: Request had insufficient authentication scopes." mean?
Below is the whole code: credentials.json and client_secrets.json are both present in my directory.
from __future__ import print_function
import pickle
import os.path
from pydrive.auth import GoogleAuth
from pydrive.drive import GoogleDrive
from googleapiclient.discovery import build
from google_auth_oauthlib.flow import InstalledAppFlow
from google.auth.transport.requests import Request
import os
g_login = GoogleAuth()
g_login.LocalWebserverAuth() # This sucker might break [NK]
drive = GoogleDrive(g_login)
#end of tempCodeForEaseOfUse [NK]
# If modifying these scopes, delete the file token.pickle.
SCOPES = ['https://www.googleapis.com/auth/spreadsheets https://www.googleapis.com/auth/documents'] # https://www.googleapis.com/auth/drive.metadata.readonly https://www.googleapis.com/auth/documents.readonly']
# The ID and range of a sample spreadsheet.
SAMPLE_SPREADSHEET_ID = '1eb0DIJ7QglHqCrvIW94MUbMvFdbGKGXBSwaH49ILCvQ/edit?usp=sharing'
SAMPLE_RANGE_NAME = 'Class Data!A2:E'
creds = None
# The file token.pickle stores the user's access and refresh tokens, and is
# created automatically when the authorization flow completes for the first
# time.
if os.path.exists('token.pickle'):
with open('token.pickle', 'rb') as token:
creds = pickle.load(token)
# If there are no (valid) credentials available, let the user log in.
if not creds or not creds.valid:
if creds and creds.expired and creds.refresh_token:
creds.refresh(Request())
else:
flow = InstalledAppFlow.from_client_secrets_file(
'credentials.json', SCOPES)
creds = flow.run_local_server(port=0)
# Save the credentials for the next run
with open('token.pickle', 'wb') as token:
pickle.dump(creds, token)
# TODO: fileID extracted manually, must automate [NK]
fileId = '19XYXTA2xER982xIHnDqH0cOtjBtSZtgC/view?usp=sharing' # Please set the file ID of text file. [StackTanaike]
sheets = build('sheets', 'v4', credentials=creds)
drive = build('drive', 'v3', credentials=creds)
# Retrieve data from Google Drive and parse data as an array. [StackTanaike]
data = drive.files().get_media(fileId=fileId).execute()
Your scopes aren't set correctly. Replace your SCOPES = ['https:// ... with the following:
SCOPES = ['https://www.googleapis.com/auth/spreadsheets', 'https://www.googleapis.com/auth/drive']
How do I upload to a shared drive using the Python version of Google Drive API v3?
You just need to add supportsAllDrives=True to the Files: Create request.
# In case you need background on the credential/scoping code, it was copied from
# https://developers.google.com/drive/api/v3/quickstart/python
# Basis for upload code from:
# https://developers.google.com/drive/api/v3/manage-uploads
import pickle
import os.path
from googleapiclient.discovery import build
from google_auth_oauthlib.flow import InstalledAppFlow
from google.auth.transport.requests import Request
from apiclient.http import MediaFileUpload
# If modifying these scopes, delete the file token.pickle.
SCOPES = ['https://www.googleapis.com/auth/drive']
credentials_json = 'oauth-credentials.json'
credentials_pickle = 'token.pickle'
def get_creds():
creds = None
# Obtain OAuth token / user authorization.
if os.path.exists(credentials_pickle):
with open(credentials_pickle, 'rb') as token:
creds = pickle.load(token)
# If there are no (valid) credentials available, let the user log in.
if not creds or not creds.valid:
if creds and creds.expired and creds.refresh_token:
creds.refresh(Request())
else:
flow = InstalledAppFlow.from_client_secrets_file(
credentials_json, SCOPES)
creds = flow.run_local_server(port=0)
# Save the credentials for the next run
with open(credentials_pickle, 'wb') as token:
pickle.dump(creds, token)
return creds
def main():
creds = get_creds()
# Build the drive service.
drive_service = build('drive', 'v3', credentials=creds)
# Get the drive ID of the first shared drive. You can introspect the
# 'results' dict here to get the right shared drive if it's not the first
# one.
results = drive_service.drives().list(pageSize=10).execute()
shared_drive_id = results['drives'][0]['id']
# Create the request metatdata, letting drive API know what it's receiving.
# In this example, we place the image inside the shared drive root folder,
# which has the same ID as the shared drive itself, but we could instead
# choose the ID of a folder inside the shared drive.
file_metadata = {
'name': 'wakeupcat.jpg',
'mimeType': 'image/jpeg',
'parents': [shared_drive_id]}
# Now create the media file upload object and tell it what file to upload,
# in this case, "wakeupcat.jpg"
media = MediaFileUpload('/path/to/wakeupcat.jpg', mimetype='image/jpeg')
# Upload the file, making sure supportsAllDrives=True to enable uploading
# to shared drives.
f = drive_service.files().create(
body=file_metadata, media_body=media, supportsAllDrives=True).execute()
print("Created file '%s' id '%s'." % (f.get('name'), f.get('id')))
if __name__ == '__main__':
main()