I want to create directories using python script. I spent the whole day finding a tutorial about this but all the posts were old. I visited the Google Drive website but there was a short piece of code. When I used it like this,
def createFolder(name):
file_metadata = {
'name': name,
'mimeType': 'application/vnd.google-apps.folder'
}
file = drive_service.files().create(body=file_metadata,
fields='id').execute()
print ('Folder ID: %s' % file.get('id'))
It gave me the following error
NameError: name 'drive_service' is not defined
I didn't import anything I don't know which library to import? I just use this code. How to use this or updated code to create folders in google drive? I am a beginner.
Try this code:
import httplib2
from googleapiclient.discovery import build
from oauth2client.service_account import ServiceAccountCredentials
scope = 'https://www.googleapis.com/auth/drive'
# `client_secrets.json` should be your credentials file, as generated by Google.
credentials = ServiceAccountCredentials.from_json_keyfile_name('client_secrets.json', scope)
http = httplib2.Http()
drive_service = build('drive', 'v3', http=credentials.authorize(http))
def createFolder(name):
file_metadata = {
'name': name,
'mimeType': 'application/vnd.google-apps.folder'
}
file = drive_service.files().create(body=file_metadata,
fields='id').execute()
print('Folder ID: %s' % file.get('id'))
createFolder('folder_name')
You will need to install oath2client, google-api-python-client and httplib2 via pip.
To check, all of the folders:
page_token = None
while True:
response = drive_service.files().list(q="mimeType='application/vnd.google-apps.folder'",
spaces='drive',
fields='nextPageToken, files(id, name)',
pageToken=page_token).execute()
for file in response.get('files', []):
# Process change
print('Found file: %s (%s)' % (file.get('name'), file.get('id')))
page_token = response.get('nextPageToken', None)
if page_token is None:
break
By the way:
The user cannot directly access data in the hidden app folders, only
the app can access them. This is designed for configuration or other
hidden data that the user should not directly manipulate. (The user
can choose to delete the data to free up the space used by it.)
The only way the user can get access to it is via some functionality
exposed by the specific app.
According to documentation
https://developers.google.com/drive/v3/web/appdata you can access,
download and manipulate the files if you want to. Just not though the
normal Google Drive UI.
Answer
I recommend you to follow this guide to start working with the Drive API and Python. Once you have managed to run the sample, replace the lines above # Call the Drive v3 API with this code that a folder in your Drive. Furthermore, you have to modify the scopes in order to create a folder, in this case, you can use https://www.googleapis.com/auth/drive.file. The final result looks like this:
Code
from __future__ import print_function
import os.path
from googleapiclient.discovery import build
from google_auth_oauthlib.flow import InstalledAppFlow
from google.auth.transport.requests import Request
from google.oauth2.credentials import Credentials
# If modifying these scopes, delete the file token.json.
SCOPES = ['https://www.googleapis.com/auth/drive.file']
def main():
"""Shows basic usage of the Drive v3 API.
"""
creds = None
# The file token.json stores the user's access and refresh tokens, and is
# created automatically when the authorization flow completes for the first
# time.
if os.path.exists('token.json'):
creds = Credentials.from_authorized_user_file('token.json', SCOPES)
# If there are no (valid) credentials available, let the user log in.
if not creds or not creds.valid:
if creds and creds.expired and creds.refresh_token:
creds.refresh(Request())
else:
flow = InstalledAppFlow.from_client_secrets_file(
'credentials.json', SCOPES)
creds = flow.run_local_server(port=0)
# Save the credentials for the next run
with open('token.json', 'w') as token:
token.write(creds.to_json())
service = build('drive', 'v3', credentials=creds)
# Call the Drive v3 API
folder_name = 'folder A'
file_metadata = {
'name': folder_name,
'mimeType': 'application/vnd.google-apps.folder'
}
file = drive_service.files().create(body=file_metadata,
fields='id').execute()
print('Folder ID: %s' % file.get('id'))
if __name__ == '__main__':
main()
References:
Drive API: Python Quickstart
Drive API: Create a folder
Related
I installed Google Drive on my computer (Windows 11 x64) to drive G:\
I want to be able to get a shared link for a specific file/folder that I have a path to.
Google Drive will have duplicate file/folder.
How can i do this whith python?
Thanks in advance
Edited:
I managed to get a link for specific file name, but now I have a problem if there are 2+ files with the same file name in Google Drive.
For example I want link of this file G:\RootFolder\Subfolder1\Subfolder2\myfile.txt but there is another file with same name G:\RootFolder\Subfolder3\Subfolder4\Subfolder5\myfile.txt. How can I give link only for G:\RootFolder\Subfolder1\Subfolder2\myfile.txt ?
from Google import Create_Service
CLIENT_SECRET_FILE = 'client-secret.json'
API_NAME = 'drive'
API_VERSION = 'v3'
SCOPES = ['https://www.googleapis.com/auth/drive']
service = Create_Service(CLIENT_SECRET_FILE, API_NAME, API_VERSION, SCOPES)
# Update Sharing Setting
file_id = '<file id>'
request_body = {
'role': 'reader',
'type': 'anyone'
}
response_permission = service.permissions().create(
fileId=file_id,
body=request_body
).execute()
print(response_permission)
# Print Sharing URL
response_share_link = service.files().get(
fileId=file_id,
fields='webViewLink'
).execute()
print(response_share_link)
# Remove Sharing Permission
service.permissions().delete(
fileId=file_id,
permissionId='anyoneWithLink'
).execute()
I managed to create script that works for me.
Packages: pip install --upgrade google-api-python-client google-auth-httplib2 google-auth-oauthlib
from __future__ import print_function
import argparse
import os.path
from google.auth.transport.requests import Request
from google.oauth2.credentials import Credentials
from google_auth_oauthlib.flow import InstalledAppFlow
from googleapiclient.discovery import build
from googleapiclient.errors import HttpError
# According the guide https://developers.google.com/drive/api/quickstart/python
# TODO: First time - Create "credentials.json" file https://developers.google.com/workspace/guides/create-credentials#oauth-client-id
# TODO: First time - Enable the Google Drive API https://developers.google.com/drive/api/guides/enable-drive-api
def is_folder_name_in_parents(service, parents, folder_name):
for parent_id in parents:
response = service.files().get(fileId=parent_id, fields='name').execute()
if folder_name == response.get("name"):
return parent_id
return None
def is_correct_file_path(service, folder_path, parents, root_folder_name, root_folder_id):
folder_name = os.path.basename(folder_path)
if folder_name == root_folder_name and root_folder_id in parents:
return True
parent_id = is_folder_name_in_parents(service=service, parents=parents, folder_name=folder_name)
if not parent_id:
return False
response = service.files().get(fileId=parent_id, fields='parents').execute()
new_parents = response.get("parents")
return is_correct_file_path(service=service,
folder_path=os.path.dirname(folder_path),
parents=new_parents,
root_folder_name=root_folder_name,
root_folder_id=root_folder_id)
def get_sharing_link_by_path(root_folder_name, root_folder_id, filepath):
# If modifying these scopes, delete the file token.json.
SCOPES = ['https://www.googleapis.com/auth/drive.metadata.readonly']
"""Shows basic usage of the Drive v3 API.
Prints the names and ids of the first 10 files the user has access to.
"""
creds = None
# The file token.json stores the user's access and refresh tokens, and is
# created automatically when the authorization flow completes for the first time.
if os.path.exists('token.json'):
creds = Credentials.from_authorized_user_file('token.json', SCOPES)
# If there are no (valid) credentials available, let the user log in.
if not creds or not creds.valid:
if creds and creds.expired and creds.refresh_token:
creds.refresh(Request())
else:
flow = InstalledAppFlow.from_client_secrets_file('credentials.json', SCOPES)
creds = flow.run_local_server(port=0)
# Save the credentials for the next run
with open('token.json', 'w') as token:
token.write(creds.to_json())
try:
service = build('drive', 'v3', credentials=creds)
filename = os.path.basename(filepath)
folder_path = os.path.dirname(filepath)
page_token = None
while True:
response = service.files().list(
q=f"name='{filename}'",
spaces='drive',
fields='nextPageToken, files(name, webViewLink, parents)',
pageToken=page_token
).execute()
print(f"There are {len(response.get('files', []))} results in Google Drive for: {filename}")
for file in response.get('files', []):
if "parents" in file.keys():
if is_correct_file_path(service=service,
folder_path=folder_path,
parents=file["parents"],
root_folder_name=root_folder_name,
root_folder_id=root_folder_id):
if 'webViewLink' in file.keys():
print(f"File path: {filename}\nWeb View Link: {file['webViewLink']}")
return file['webViewLink']
print(f"Web view link for this file not found: {filepath}")
return None
page_token = response.get('nextPageToken', None)
if page_token is None:
print(f"File not found: {filepath}")
return None
except HttpError as error:
# TODO(developer) - Handle errors from drive API.
print(f'An error occurred: {error}')
This question already has answers here:
Google Drive API: How to create a file in appDataFolder?
(2 answers)
Closed 1 year ago.
I have a small script copied from Google, it is supposed to upload a file to my appDataFolder at google drive,
When run for the first time it asks for the authentications and I grant permission for it
import os.path
from googleapiclient.http import MediaFileUpload
from googleapiclient.discovery import build
from google_auth_oauthlib.flow import InstalledAppFlow
from google.auth.transport.requests import Request
from google.oauth2.credentials import Credentials
# If modifying these scopes, delete the file token.json.
SCOPES = ['https://www.googleapis.com/auth/drive.appdata']
def main():
"""Shows basic usage of the Drive v3 API.
Prints the names and ids of the first 10 files the user has access to.
"""
creds = None
# The file token.json stores the user's access and refresh tokens, and is
# created automatically when the authorization flow completes for the first
# time.
if os.path.exists('token.json'):
creds = Credentials.from_authorized_user_file('token.json', SCOPES)
# If there are no (valid) credentials available, let the user log in.
if not creds or not creds.valid:
if creds and creds.expired and creds.refresh_token:
creds.refresh(Request())
else:
flow = InstalledAppFlow.from_client_secrets_file(
'credentials.json', SCOPES)
creds = flow.run_local_server(port=0)
# Save the credentials for the next run
with open('token.json', 'w') as token:
token.write(creds.to_json())
# Create the service
service = build('drive', 'v3', credentials=creds)
file_metadata = {
'title': 'token_lol.json',
'parents': [{
'id': 'appDataFolder'
}]
}
media = MediaFileUpload('token_lol.json',
mimetype='application/json',
resumable=True)
service.files().create(body=file_metadata,
media_body=media,
fields='id').execute()
#print 'File ID: %s' % file.get('id')
if __name__ == '__main__':
main()
Then When I try to upload the file I get this error:
HttpError(resp, content, uri=self.uri)
googleapiclient.errors.HttpError:
<HttpError 403 when requesting https://www.googleapis.com/upload/drive/v3/files?fields=id&alt=json&uploadType=resumable returned "The user does not have sufficient permissions for this file.". Details: "[{'domain': 'global', 'reason': 'insufficientFilePermissions', 'message': 'The user does not have sufficient permissions for this file.'}]">
It only works when I use this SCOPE : https://www.googleapis.com/auth/drive
But according to Google documentation here it should work with this SCOPE : https://www.googleapis.com/auth/drive.appdata
So what is the problem?
I found the answer here
You need to add these 2 SCOPES together:
SCOPES = ['https://www.googleapis.com/auth/drive.appdata','https://www.googleapis.com/auth/drive.file']
import pickle
import os.path
import win32api
from googleapiclient.discovery import build
from google_auth_oauthlib.flow import InstalledAppFlow
from google.auth.transport.requests import Request
from datetime import date
# If modifying these scopes, delete the file token.pickle.
SCOPES = ['https://www.googleapis.com/auth/drive.metadata.readonly']
today = date.today()
# current date
d1 = today.strftime("%Y/%m/%d")
def main():
"""Shows basic usage of the Drive v3 API.
Prints the names and ids of the first 10 files the user has access to.
"""
creds = None
# The file token.pickle stores the user's access and refresh tokens, and is
# created automatically when the authorization flow completes for the first
# time.
# if os.path.exists('token.pickle'):
#with open('token.pickle', 'rb') as token:
#creds = pickle.load(token)
# If there are no (valid) credentials available, let the user log in.
service = build('drive', 'v3', credentials=creds)
# Call the Drive v3 API
results = service.files().list(
q="mimeType='application/vnd.google-apps.folder'",spaces="drive",
pageSize=10, fields="nextPageToken, files(id, name)").execute()\
items = results.get('files', [])
file_metadata = {
'name': d1,
'mimeType': 'application/vnd.google-apps.folder'
}
file = service.files().create(body=file_metadata, fields='id').execute()
if not items:
print('No files found.')
else: #start things here hehe
print(items)
print("d1 =", d1)
print ('Folder ID: %s' % file.get('id'))
if __name__ == '__main__':
main()
Yes i tried to delete the pickle file and run the app again, no commenting out the creation of the pickle file didnt help. when i run the python file it runs for like 30 secs with no response followed by a shiton of random errors that end in the error mentioned above.
after a day and restarting my pc and trying to delete the pickle file (again) it kinda just worked
Everyday I download many pdf files which are stored onto my local PC. To store all of them, I need to create a folder onto Google drive daily basis and store all the pdf files under the current date folder.
My challenge over here is I have successfully completed coding in Python to create folder using GDrive API v3 but stuck in uploading all the files onto the folder just created. Below is my coding how I achieved to create folder by current date:
import pickle
import os.path, time
from googleapiclient.discovery import build
from google_auth_oauthlib.flow import InstalledAppFlow
from google.auth.transport.requests import Request
from apiclient.http import MediaFileUpload
from datetime import date
SCOPES = ['https://www.googleapis.com/auth/drive.file']
CLIENT_SECRET_FILE = 'e:\\Python Programs\\credentials.json'
creds = None
if os.path.exists('token.pickle'):
with open('token.pickle', 'rb') as token:
creds = pickle.load(token)
if not creds or not creds.valid:
if creds and creds.expired and creds.refresh_token:
creds.refresh(Request())
else:
flow = InstalledAppFlow.from_client_secrets_file(CLIENT_SECRET_FILE, SCOPES)
creds = flow.run_local_server(port=0)
with open('token.pickle', 'wb') as token:
pickle.dump(creds, token)
# main folder ID under which we are going to create subfolder
parents_ID = '19nKlHGCypKPr40f3vjaEq22kgVkS7OCE'
# creates sub folder under main folder ID
fldr_name = date.today().strftime('%d%b%Y')
mimetype = 'application/vnd.google-apps.folder'
file_metadata = {'name': fldr_name, 'parents': [parents_ID],'mimeType': mimetype}
# with following line I could successfully create folder without any problem
service.files().create(body=file_metadata, fields='id').execute()
# with following lines, i tried to get the folder ID which was recently created so that I can start coding to upload pdf files onto this. Here im stuck
page_token = None
response = service.files().list(q="mimeType='application/vnd.google-apps.folder' and name = '27Apr2020' and trashed = false", spaces='drive', fields='nextPageToken, files(id, name)', pageToken=page_token).execute()
for file in response.get('files', []):
print(file.get('name'), file.get('id'))
time.sleep(5)
I am getting blank screen for 5 sec then it gets vanished. Please help me to upload all my files to my recently created folder.
Thanks
From your above Code, the following line already returns the id of the folder just created(fields='id').
service.files().create(body=file_metadata, fields='id').execute()
you can also return other values (name, parents, etc.) using the fields parameter while creating the folder,
service.files().create(body=file_metadata, fields='id, name, parents').execute()
so if you catch the return value in a variable like below,
folder = service.files().create(body=file_metadata, fields='id').execute()
the returned value looks like {'id': '17w2RS1H7S8no6X0oGtkieY'}
you can then upload file by the commands below.
file_metadata = {
'name': "test.pdf", <Name of the file to be in drive>
'parents': [folder["id"]],
'mimeType': 'application/pdf'
}
media = MediaFileUpload(<local path to file>, resumable=True)
file = service.files().create(body=file_metadata, media_body=media, fields='id,name').execute()
you need to import the following,
from googleapiclient.http import MediaFileUpload
How do I upload to a shared drive using the Python version of Google Drive API v3?
You just need to add supportsAllDrives=True to the Files: Create request.
# In case you need background on the credential/scoping code, it was copied from
# https://developers.google.com/drive/api/v3/quickstart/python
# Basis for upload code from:
# https://developers.google.com/drive/api/v3/manage-uploads
import pickle
import os.path
from googleapiclient.discovery import build
from google_auth_oauthlib.flow import InstalledAppFlow
from google.auth.transport.requests import Request
from apiclient.http import MediaFileUpload
# If modifying these scopes, delete the file token.pickle.
SCOPES = ['https://www.googleapis.com/auth/drive']
credentials_json = 'oauth-credentials.json'
credentials_pickle = 'token.pickle'
def get_creds():
creds = None
# Obtain OAuth token / user authorization.
if os.path.exists(credentials_pickle):
with open(credentials_pickle, 'rb') as token:
creds = pickle.load(token)
# If there are no (valid) credentials available, let the user log in.
if not creds or not creds.valid:
if creds and creds.expired and creds.refresh_token:
creds.refresh(Request())
else:
flow = InstalledAppFlow.from_client_secrets_file(
credentials_json, SCOPES)
creds = flow.run_local_server(port=0)
# Save the credentials for the next run
with open(credentials_pickle, 'wb') as token:
pickle.dump(creds, token)
return creds
def main():
creds = get_creds()
# Build the drive service.
drive_service = build('drive', 'v3', credentials=creds)
# Get the drive ID of the first shared drive. You can introspect the
# 'results' dict here to get the right shared drive if it's not the first
# one.
results = drive_service.drives().list(pageSize=10).execute()
shared_drive_id = results['drives'][0]['id']
# Create the request metatdata, letting drive API know what it's receiving.
# In this example, we place the image inside the shared drive root folder,
# which has the same ID as the shared drive itself, but we could instead
# choose the ID of a folder inside the shared drive.
file_metadata = {
'name': 'wakeupcat.jpg',
'mimeType': 'image/jpeg',
'parents': [shared_drive_id]}
# Now create the media file upload object and tell it what file to upload,
# in this case, "wakeupcat.jpg"
media = MediaFileUpload('/path/to/wakeupcat.jpg', mimetype='image/jpeg')
# Upload the file, making sure supportsAllDrives=True to enable uploading
# to shared drives.
f = drive_service.files().create(
body=file_metadata, media_body=media, supportsAllDrives=True).execute()
print("Created file '%s' id '%s'." % (f.get('name'), f.get('id')))
if __name__ == '__main__':
main()