List of file in a folder/ DRIVE API PyDRIVE - python

I can't change folder and view what's inside.
drive = GoogleDrive(gauth)
file_list = drive.ListFile({'q': "'root' in parents and trashed=false"}).GetList()
for file1 in file_list:
print("File %s\n\n",(file1))
I used the following code:
file_list = drive.ListFile({'q': "'/test1' in parents and trashed=false"}).GetList()
but it doesn't work.
There's the documentation of the functions I used: https://developers.google.com/drive/v3/reference/files/list

You have to insert the folder ID instead of its path. You can get the ID in different ways:
Using PyDrive: If you list all folders in root, you can list all folder names with their respective IDs.
Using the Web interface: Navigate into the folder you want to get the ID from. Look at the URL, it has this format: drive.google.com/drive/u/0/folders/<folder ID>
Now insert the folder ID into the request.
file_list = drive.ListFile({'q': "'<folder ID>' in parents and trashed=false"}).GetList()
FYI: Google Drive is a tag-based (also called semantic) file system, which, for example, allows a file to be in several places at the same time (just by adding IDs of folders to the file's parents property).

Below are two full working examples for printing Google drive
files structure with pydrive - follow comments in the code.
Example 1 - basic usage of pydrive and printing top levels folders
from pydrive.auth import GoogleAuth
from pydrive.drive import GoogleDrive
gauth = GoogleAuth()
gauth.LocalWebserverAuth()
drive = GoogleDrive(gauth)
# 1) Choose your starting point by inserting file name
folder_title = "your-starting-point-folder"
folder_id = ''
# 2) Retrieve the folder id - start searching from root
file_list = drive.ListFile({'q': "'root' in parents and trashed=false"}).GetList()
for file in file_list:
if(file['title'] == folder_title):
folder_id = file['id']
break
# 3) Build string dynamically (need to use escape characters to support single quote syntax)
str = "\'" + folder_id + "\'" + " in parents and trashed=false"
# 4) Starting iterating over files
file_list = drive.ListFile({'q': str}).GetList()
for file in file_list:
print('title: %s, id: %s' % (file['title'], file['id']))
Example 2 - Recursively printing all file structure
I used a tree visualization library named treelib.
from treelib import Node, Tree
from pydrive.auth import GoogleAuth
from pydrive.drive import GoogleDrive
gauth = GoogleAuth()
gauth.LocalWebserverAuth()
drive = GoogleDrive(gauth)
### Some basic helper functions ###
def get_children(root_folder_id):
str = "\'" + root_folder_id + "\'" + " in parents and trashed=false"
file_list = drive.ListFile({'q': str}).GetList()
return file_list
def get_folder_id(root_folder_id, root_folder_title):
file_list = get_children(root_folder_id)
for file in file_list:
if(file['title'] == root_folder_title):
return file['id']
def add_children_to_tree(tree, file_list, parent_id):
for file in file_list:
tree.create_node(file['title'], file['id'], parent=parent_id)
# For debugging
# print('parent: %s, title: %s, id: %s' % (parent_id, file['title'], file['id']))
### Go down the tree until you reach a leaf ###
def populate_tree_recursively(tree,parent_id):
children = get_children(parent_id)
add_children_to_tree(tree, children, parent_id)
if(len(children) > 0):
for child in children:
populate_tree_recursively(tree, child['id'])
### Create the tree and the top level node ###
def main():
root_folder_title = "my-top-level-root-folder-name"
root_folder_id = get_folder_id("root", root_folder_title)
tree = Tree()
tree.create_node(root_folder_title, root_folder_id)
populate_tree_recursively(tree, root_folder_id)
tree.show()
if __name__ == "__main__":
main()

Please refer google drive api docs - https://developers.google.com/drive/api/v2/search-files
Replace search_term based on your requirement:
Example: Searching for a file/folder with name
file_list = drive.ListFile({'q': " title='<folder_name>' "}).GetList()
(' single quotes are part of api request)

I think https://stackoverflow.com/a/47764444/1003629 solves a similar problem and should help you. The problem here is permissions to the required folders, which is by default only the root.

Related

How to skip folder creation if the folder already exists in google drive? (PyDrive)

def CreateGDriveFolder(self):
today = str(date.today())
print("Todays Date: " + today)
gauth = GoogleAuth()
self.drive = GoogleDrive(gauth)
folder_name = today
folder = self.drive.CreateFile({'title' : folder_name, 'mimeType' : 'application/vnd.google-apps.folder', 'parents': [{'id': '1k1kFXJa1MTlAQrkPE0uKW-kO3Hj1Bjp5'}]})
time.sleep(2)
print("Folder Created")
folder.Upload()
#Get folder info and print to screen
self.foldertitle = folder['title']
self.folderid = folder['id']
print('title: %s, id: %s' % (self.foldertitle, self.folderid))
This is my code for folder creation upon the python script being ran. The issue is, if I need to run it twice a day or something. It creates duplicate folders in my drive with the same date. I would like to just check if folder has been created. If it has, just skip and upload files to that folder or if that folder doesn't exist, create it and upload files. I've tried googling but could not find anything that suits my issue.
Thanks in advance.
You can use the ListFile() method to check if a folder with the same name exists.
today = str(date.today())
print("Todays Date: " + today)
gauth = GoogleAuth()
self.drive = GoogleDrive(gauth)
folder_name = today
# Check if folder exists
folder = self.drive.ListFile('1k1kFXJa1MTlAQrkPE0uKW-kO3Hj1Bjp5')
if folder['name'] == folder_name:
# Folder exists, skip upload
else:
#Folder does not exist, create it and upload files
In your script, how about the following modification?
Modified script:
def CreateGDriveFolder(self):
today = str(date.today())
print("Todays Date: " + today)
gauth = GoogleAuth()
self.drive = GoogleDrive(gauth)
parent_folder_id = '###' # Please set your parent folder ID.
folder_name = today
file_list = self.drive.ListFile({"q": "title='" + folder_name + "' and '" + parent_folder_id + "' in parents and " + "mimeType='application/vnd.google-apps.folder' and trashed=false"}).GetList()
if not file_list:
folder = self.drive.CreateFile({'title' : folder_name, 'mimeType' : 'application/vnd.google-apps.folder', 'parents': [{'id': parent_folder_id}]})
time.sleep(2)
print("Folder Created")
folder.Upload()
# Get folder info and print to screen
self.foldertitle = folder["title"]
self.folderid = folder["id"]
else:
# Get folder info and print to screen
self.foldertitle = list[0]["title"]
self.folderid = list[0]["id"]
print("title: %s, id: %s" % (self.foldertitle, self.folderid))
In this modification, first, it checks whether the folder name of folder_name is existing in the Drive. And, when the folder is not existing, your script is run. When the folder of folder_name is existing, the information of existing folder is returned.
References:
Getting list of files
Files: list
Search for files and folders

I can't find my uploaded files in Dropbox

I'm working on uploading some countries' admin data to my Dropbox app. Here below is my code that does that:
# importing the required libraries
import dropbox, sys, os
import requests
# Get your app key and secret from the Dropbox developer website
app_key = 'qie********'
app_secret = 'qom**********'
dbx = dropbox.Dropbox('YYPRp-*******************_JzclLe-***************-3Js')
# verify if the account is connected
dbx.users_get_current_account()
#find all the folders present
for entry in dbx.files_list_folder('').entries:
print(entry.name)
# creating a path to where all the data to be uploaded is
root_dir = "H:/WORK/Upwork/Project 7 - Python School Data Analysis/Planning and Costing Model/Updated Script/Extracted"
print ("Attempting to upload...")
z = 1
for dir, dirs, files in os.walk(root_dir):
# the first dir is the root dir itself so we skip it
if z == 1:
z = z + 1
continue
# uploading contents of the file path
elif z > 15:
# split the path to get the country, which is the very last item after split (-1)
split_dir = dir.split('\\')
folder_name = split_dir[-1] # country name
# creating a new folder in my Dropbox for each country
country_name = dbx.files_create_folder('/Data/'+ folder_name)
dropbox_folder = country_name.path_display #obtaining the name of the folder
folder_split = dropbox_folder.split('/') # splitting the path to get root folder and created folder
folder_created = folder_split[-1] #created/country folder
dest_path = os.path.join('/Data/', folder_created) #joining the two to make a full path
print(dest_path)
# looping through the files in each of the country's folder
for file in files:
try:
# getting the path for each of the file in the folder
file_path = os.path.join(dir, file)
print(f'Uploading to {folder_name} in Dropbox')
f = open(file_path, 'rb')
connect = '/' # will be used to separate the destination path and the file
# this is where the file will be saved
d_path = os.path.join(dest_path, connect, file)
dbx.files_upload(f.read(), d_path, mode=dropbox.files.WriteMode.overwrite)
print(dest_path)
print(file_path)
print(dir)
print('\n')
except Exception as err:
print("Error!", file, err)
z = z + 1
The code runs successfully with no errors. Here is how it looks at the console:
It successfully creates the folders for each of the countries. Note that in my countries' folders, it has several files (max of 15). When I visit my dropbox app, the folders are there but nothing is inside the folders. There are completely not files, I receive the message notification that says:
This Folder is Empty
See below the images:
With folders created
One of the countries, with no files:
I have given it over an hour but nothing changes. Also note that I configured all permissions necessary for writing files and folders. Could there be something I may have done wrong? I will appreciate any assistance. Thanks!
After some help from Greg, I was able to find where the issue was. The files_upload class functions expects a working path as part of its parameters. The path provided could not be found in the App and so I added the following to make it work: d_path = dest_path+d_path
Here is the full working code:
# importing the required libraries
import dropbox, sys, os
import requests
# Get your app key and secret from the Dropbox developer website
app_key = 'qie********'
app_secret = 'qom**********'
dbx = dropbox.Dropbox('YYPRp-*******************_JzclLe-***************-3Js')
# verify if the account is connected
dbx.users_get_current_account()
#find all the folders present
for entry in dbx.files_list_folder('').entries:
print(entry.name)
# creating a path to where all the data to be uploaded is
root_dir = "H:/WORK/Upwork/Project 7 - Python School Data Analysis/Planning and Costing Model/Updated Script/Extracted"
print ("Attempting to upload...")
z = 1
for dir, dirs, files in os.walk(root_dir):
# the first dir is the root dir itself so we skip it
if z == 1:
z = z + 1
continue
# uploading contents of the file path
elif z > 15:
# split the path to get the country, which is the very last item after split (-1)
split_dir = dir.split('\\')
folder_name = split_dir[-1] # country name
# creating a new folder in my Dropbox for each country
country_name = dbx.files_create_folder('/Data/'+ folder_name)
dropbox_folder = country_name.path_display #obtaining the name of the folder
folder_split = dropbox_folder.split('/') # splitting the path to get root folder and created folder
folder_created = folder_split[-1] #created/country folder
dest_path = os.path.join('/Data/', folder_created) #joining the two to make a full path
print(dest_path)
# looping through the files in each of the country's folder
for file in files:
try:
# getting the path for each of the file in the folder
file_path = os.path.join(dir, file)
print(f'Uploading to {folder_name} in Dropbox')
f = open(file_path, 'rb')
connect = '/' # will be used to separate the destination path and the file
# this is where the file will be saved
d_path = os.path.join(dest_path, connect, file)
d_path = dest_path+d_path
dbx.files_upload(f.read(), d_path, mode=dropbox.files.WriteMode.overwrite)
print(dest_path)
print(file_path)
print(dir)
print('\n')
except Exception as err:
print("Error!", file, err)
z = z + 1

List of files in a google drive folder with python

I've got the exact same question as the one asked on this post: List files and folders in a google drive folder
I don't figure out in the google drive rest api documentation how to get a list of files in a folder of google drive
You can look here for an example of how to list files in Drive: https://developers.google.com/drive/api/v3/search-files . You need to construct a query that lists the files in a folder: use
q = "'1234' in parents"
where 1234 is the ID of the folder that you want to list. You can modify the query to list all the files of a particular type (such as all jpeg files in the folder), etc.
Here's a hacky-yet-successful solution. This actually gets all the files from a particular Google Drive folder (in this case, a folder called "thumbnails"). I needed to get (not just list) all the files from a particular folder and perform image adjustments on them, so I used this code:
`# First, get the folder ID by querying by mimeType and name
folderId = drive.files().list(q = "mimeType = 'application/vnd.google-apps.folder' and name = 'thumbnails'", pageSize=10, fields="nextPageToken, files(id, name)").execute()
# this gives us a list of all folders with that name
folderIdResult = folderId.get('files', [])
# however, we know there is only 1 folder with that name, so we just get the id of the 1st item in the list
id = folderIdResult[0].get('id')
# Now, using the folder ID gotten above, we get all the files from
# that particular folder
results = drive.files().list(q = "'" + id + "' in parents", pageSize=10, fields="nextPageToken, files(id, name)").execute()
items = results.get('files', [])
# Now we can loop through each file in that folder, and do whatever (in this case, download them and open them as images in OpenCV)
for f in range(0, len(items)):
fId = items[f].get('id')
fileRequest = drive.files().get_media(fileId=fId)
fh = io.BytesIO()
downloader = MediaIoBaseDownload(fh, fileRequest)
done = False
while done is False:
status, done = downloader.next_chunk()
fh.seek(0)
fhContents = fh.read()
baseImage = cv2.imdecode(np.fromstring(fhContents, dtype=np.uint8), cv2.IMREAD_COLOR)
See the API for the available functions...
You can search for files with the Drive API files: list method. You can call Files.list without any parameters, which returns all files on the user's drive. By default, Files.list only returns a subset of properties for a resource. If you want more properties returned, use the fields parameter that specifies which properties to return in the query string q. To make your search query more specific, you can use several operators with each query property.
# Import PyDrive and associated libraries.
# This only needs to be done once per notebook.
from pydrive.auth import GoogleAuth
from pydrive.drive import GoogleDrive
from google.colab import auth
from oauth2client.client import GoogleCredentials
# Authenticate and create the PyDrive client.
# This only needs to be done once per notebook.
auth.authenticate_user()
gauth = GoogleAuth()
gauth.credentials = GoogleCredentials.get_application_default()
drive = GoogleDrive(gauth)
# List .txt files in the root.
#
# Search query reference:
# https://developers.google.com/drive/v2/web/search-parameters
listed = drive.ListFile({'q': "title contains 'CV'"}).GetList()
for file in listed:
print('title {}, id {}'.format(file['title'], file['id']))
Easiest solution if your are working with google collab.
Connect to your Drive in the collab notebook:
from google.colab import drive
drive.mount('/content/drive')
Use the special command '!' with the "ls" command to see the list of files in the path of folder drive you specify.
!ls PATH OF YOUR DRIVE FOLDER
Example: !ls drive/MyDrive/Folder1/Folder2/

Python Google Drive API Slow Multi-File Upload

I'm trying to upload a directory and all of its contents to Google Drive. I can accomplish this in Python fine and the files upload, but it goes 1 file at a time with a request to the API with each file and its very slow. I'm practicing with a small directory for now, but when I have 2000 files in the future it will take for-ev-er. Is there a faster way I can accomplish it, maybe with a single request instead of a request for each file?
Thanks
Here is my main program:
# user wants to upload to Google Drive HOME-SYNC
print("4: upload to HOME-SYNC on Google Drive")
# assuming HOME-SYNC is empty, for now first step is copying directory
# structure on local machine to HOME-SYNC
# in the future need to ask if want to backup HOME-SYNC first, and if
# so back it up
# then need to empty it
# specify the start path
start_path = "/home/geoff/HOME-SYNC"
start_path = ff.abs_path_from_user_input(start_path)
print("START PATH")
print(start_path)
# create a directory object with start path
start_directory = Directory(start_path)
# create a google drive service resource
google_service = ff.create_google_token()
# create the directory tree on google drive
# '1YOTDKowprC2Paq95X-MIKSUG_vpuViQw' is the id of HOME-SYNC on
# Google Drive
start_directory.create_google_drive_tree(
'HOME-SYNC',
google_service,
'1YOTDKowprC2Paq95X-MIKSUG_vpuViQw')
print("FINISHED")
Here is my Directory Class:
class Directory():
def __init__(self, directory_path):
"""Initialize directory"""
self.directory_path = directory_path
#print("__INIT__ DIR PATH=" + self.directory_path)
def create_google_drive_tree(
self,
google_drive_folder="",
google_service=False,
parent_dir_id=''):
"""Creates the same tree in google drive that is in the Directory
object, with 'google_drive_folder' as the ROOT directory
(== Directory obj)"""
# google_drive_folder = name of the current directory
# google_service = Google API resource
# parent_dir_id = id of the parent dir on Google drive
# create the files_and_dirs list in the current directory
files_and_dirs = \
[files_and_dirs for files_and_dirs in listdir(self.directory_path)]
print(files_and_dirs)
# sorts the files and dirs so their alphabetical and files come first
files_and_dirs = \
ff.sort_files_and_dirs(self.directory_path, files_and_dirs)
# loop through files and directories, outputting if its a file or dir
# if its a dir and full_tree==true, make a recursive call by creating
# new Directory instance then listing the contents of that as well
for fd in files_and_dirs:
abs_path = ff.abs_path_from_local_dir(self.directory_path, fd)
if ff.check_file_or_dir(abs_path) == "file":
# its a file
# need to copy the file to Google Drive
file_metadata = {
'name': fd,
'parents': [parent_dir_id]
}
media = MediaFileUpload(abs_path)
file = google_service.files().create(body=file_metadata,
media_body=media,
fields='id').execute()
else:
# its a directory
# create the directory in google drive
file_metadata = {
'name': fd,
'mimeType': 'application/vnd.google-apps.folder',
'parents': [parent_dir_id]
}
file = google_service.files().create(body=file_metadata,
fields='id').execute()
# create a new Directory obj with the current Directory
# which is a subdirectory of the current Directory
sub_dir = Directory(abs_path)
# Recursively build tree inside the subdirectory
sub_dir.create_google_drive_tree(
fd,
google_service,
file.get('id'))
and I have utility functions in file_functions.py
def abs_path_from_user_input(start_path):
if start_path[:1] == '/':
path_type = "absolute"
else:
path_type = "relative"
if path_type != "absolute":
start_path = realpath(start_path)
return start_path
def abs_path_from_local_dir(directory, content):
abs_path = realpath(join(directory, content))
return abs_path
def sort_files_and_dirs(curr_path, files_and_dirs):
files = []
dirs = []
for file_dir in files_and_dirs:
abs_path = abs_path_from_local_dir(curr_path, file_dir)
if check_file_or_dir(abs_path) == "file":
files.append(file_dir)
else:
dirs.append(file_dir)
files.sort()
dirs.sort()
combined = []
for f in files:
combined.append(f)
for d in dirs:
combined.append(d)
return combined
def check_file_or_dir(path):
if not exists(path):
print("ERROR: PATH IS NOT VALID: " + path)
return False;
else:
if isfile(path):
return "file"
else:
return "dir"
def is_valid_dir(path):
if exists(path):
# the path is a valid path
if not isfile(path):
# its a valid directory
return True
else:
# its a valid file, but we want directories
return False
else:
# the path doesnt exist
return False
def create_google_token():
store = file.Storage('credentials.json')
creds = store.get()
if not creds or creds.invalid:
flow = client.flow_from_clientsecrets('client_secret.json', SCOPES)
creds = tools.run_flow(flow, store)
# service resource is the connection to google drive
service = build('drive', 'v3', http=creds.authorize(Http()))
return service
1 file at a time with a request to the API with each file and its very slow.
Uploading does take time.
Is there a faster way I can accomplish it, maybe with a single request instead of a request for each file?
There is no batch method for uploading files. you will need to upload your files one at a time as you are doing now. Remember there is a quota limit on this your only going to be able upload so fast. You could consider multi threading this and running a version of your script for each of the files to upload. However i wouldn't advise this as its going to be the same user and your going to end up having quota and flooding errors.
Note: you can batch the metadata upload but thats really not going to solve your problem batching request.

Pydrive error: No downloadLink/exportLinks for mimetype found in metadata

I am trying to download a simple text file from google drive automatically with the pydrive module for python. I keep getting the following error:
Traceback (most recent call last):
File "C:\GIS\AVGOPS\Scripts\GoogleDrive_Test.py", line 20, in
item.GetContentFile(r'C:\Users\pfilyer\Desktop\googedrive\' + item['title'])
File "C:\Python27\ArcGIS10.4\lib\site-packages\pydrive\files.py", line 210, in GetContentFile
self.FetchContent(mimetype, remove_bom)
File "C:\Python27\ArcGIS10.4\lib\site-packages\pydrive\files.py", line 43, in _decorated
return decoratee(self, *args, **kwargs)
File "C:\Python27\ArcGIS10.4\lib\site-packages\pydrive\files.py", line 265, in FetchContent
'No downloadLink/exportLinks for mimetype found in metadata')
FileNotDownloadableError: No downloadLink/exportLinks for mimetype found in metadata
Any suggestions?
import pydrive
from pydrive.drive import GoogleDrive
from pydrive.auth import GoogleAuth
gauth = GoogleAuth()
gauth.LoadCredentialsFile(r"C:\Users\XXXXX\.credentials\drive-python-quickstart.json")
drive = GoogleDrive(gauth)
print "Auth Success"
folder_id = '0BxbuUXtrs7adSFFYMG0zS3VZNFE'
lister = drive.ListFile({'q': "'%s' in parents" % folder_id}).GetList()
for item in lister:
print item['title']
item.GetContentFile(r'C:\Users\XXXXX\Desktop\googedrive\\' + item['title'])
You are possibly trying to download a google document, spreadsheet or whatever else which is not a ordinary file.
I got exactly the same error a few moments ago, when I tried to download a file of mimeType: application/vnd.google-apps.document. The document has to be exported to other format before downloading.
Check this:
import pydrive
from pydrive.drive import GoogleDrive
from pydrive.auth import GoogleAuth
gauth = GoogleAuth()
gauth.LoadCredentialsFile(r"C:\Users\XXXXX\.credentials\drive-python- quickstart.json")
drive = GoogleDrive(gauth)
print "Auth Success"
folder_id = '0BxbuUXtrs7adSFFYMG0zS3VZNFE'
lister = drive.ListFile({'q': "'%s' in parents" % folder_id}).GetList()
for item in lister:
print(item['title'])
# this should tell you which mimetype the file you're trying to download
# has.
print('title: %s, mimeType: %s' % (item['title'], item['mimeType']))
mimetypes = {
# Drive Document files as PDF
'application/vnd.google-apps.document': 'application/pdf',
# Drive Sheets files as MS Excel files.
'application/vnd.google-apps.spreadsheet': 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
# see https://developers.google.com/drive/v3/web/mime-types
}
download_mimetype = None
if file['mimeType'] in mimetypes:
download_mimetype = mimetypes[file['mimeType']]
file.GetContentFile(file['title'], mimetype=download_mimetype)
item.GetContentFile(r'C:\Users\XXXXX\Desktop\googedrive\\' + item['title'], mimetype=download_mimetype)
else:
item.GetContentFile(r'C:\Users\XXXXX\Desktop\googedrive\\' + item['title'])
This should work.
When you upload documents to google drive, make sure the 'Convert documents to Google docs editor format option is unchecked in google drive settings. This ensures that your uploaded file is in the same format as the one in your local (e.g., .csv, .txt, etc.). For me, this worked.
In my case it was because there was multiple files with the same name and extension but different ID, I removed one and the code works
Built from the examples in this thread, here a script which walks through all subfolders of a gdrive folder and downloads folderstructure and all files within. Takes care of gdrive docs, spreadsheets and presentations. Logs if a file fails :)
from pydrive.auth import GoogleAuth
from pydrive.drive import GoogleDrive
import os
gauth = GoogleAuth()
gauth.LocalWebserverAuth()
drive = GoogleDrive(gauth)
MIMETYPES = {
# Drive Document files as MS dox
'application/vnd.google-apps.document': 'application/vnd.openxmlformats-officedocument.wordprocessingml.document',
# Drive Sheets files as MS Excel files.
'application/vnd.google-apps.spreadsheet': 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet',
# Drive presentation as MS pptx
'application/vnd.google-apps.presentation': 'application/vnd.openxmlformats-officedocument.presentationml.presentation'
# see https://developers.google.com/drive/v3/web/mime-types
}
EXTENSTIONS = {
'application/vnd.google-apps.document': '.docx',
'application/vnd.google-apps.spreadsheet': '.xlsx',
'application/vnd.google-apps.presentation': '.pptx'
}
f = open("failed.txt","w+")
folder_id = '<folder_id_from_browser_address_bar>'
root = 'drive_download'
os.mkdir(root)
def escape_fname(name):
return name.replace('/','_')
def search_folder(folder_id, root):
file_list = drive.ListFile({'q': "'%s' in parents and trashed=false" % folder_id}).GetList()
for file in file_list:
# print('title: %s, id: %s, kind: %s' % (file['title'], file['id'], file['mimeType']))
# print(file)
if file['mimeType'].split('.')[-1] == 'folder':
foldername = escape_fname(file['title'])
create_folder(root,foldername)
search_folder(file['id'], '{}{}/'.format(root,foldername))
else:
download_mimetype = None
filename = escape_fname(file['title'])
filename = '{}{}'.format(root,filename)
try:
print('DOWNLOADING:', filename)
if file['mimeType'] in MIMETYPES:
download_mimetype = MIMETYPES[file['mimeType']]
file.GetContentFile(filename+EXTENSTIONS[file['mimeType']], mimetype=download_mimetype)
else:
file.GetContentFile(filename)
except:
print('FAILED')
f.write(filename+'\n')
def create_folder(path,name):
os.mkdir('{}{}'.format(path,escape_fname(name)))
search_folder(folder_id,root+'/')
f.close()

Categories

Resources