I am trying to copy a files from different folders under a path to my usb drive.
So my source directory structure looks like this
/user/arun/Music/Songs/
under this I have different sub directories
Songs_1
Songs_2
Songs_3
the target folder is under anyone of these Songs directory
Songs_1/Kid Rock/All summer long.mp3
Songs_2/Linkin Park/In the end.mp3
Now I am constructing my src_dir in a try/except way like this.
for album,song in song_database.iteritems():
for s in song:
try:
src_dir_1 = src_dir + "/" + "Songs_1" + "/" + album + "/" + s + ".mp3"
shutil.copy2(src_dir_1,dest_dir
print src_dir_1
except IOError:
pass
try:
src_dir_1 = src_dir + "/" + "Songs_2" + "/" + album + "/" + s + ".mp3"
shutil.copy2(src_dir_1,dest_dir)
print src_dir_1
except IOError:
pass
try:
src_dir_1 = src_dir + "/" + "Songs_3" + "/" + album + "/" + s + ".mp3"
shutil.copy2(src_dir_1,dest_dir)
print src_dir_1
except IOError:
pass
try:
src_dir_1 = src_dir + "/" + "Songs_4" + "/" + album + "/" + s + ".mp3"
shutil.copy2(src_dir_1,dest_dir)
print src_dir_1
except IOError:
pass
Is there a better way to do this ?
Seems like a loop would be better:
for album,song in song_database.iteritems():
for s in song:
for sdir in 'Songs_1', 'Songs_2', 'Songs_3':
try:
src_dir_1 = src_dir + "/" + sdir + "/" + album + "/" + s + ".mp3"
shutil.copy2(src_dir_1,dest_dir)
print src_dir_1
except IOError:
pass
And, perhaps you would want to add a break statement if you succeed in copying the source to the destination...
As a side note, you might want to use os.path.join instead:
src_dir_1 = os.path.join(src_dir, sdir, album, s + ".mp3")
Related
I'm not able to figure out why cannot get exit code 1 in this function when item or item.filename which is the absolute path to file is empty ?
def sftp_get_recursive(path, dest, sftp):
item_list = sftp.listdir_attr(path)
dest = str(dest)
if not os.path.isdir(dest):
os.makedirs(dest, exist_ok=True)
for item in item_list:
mode = item.st_mode
if not str(item) :
print(item.filename)
print("No file found")
sys.exit(1)
elif S_ISDIR(mode):
sftp_get_recursive(path + "/" + item.filename, dest + "/" + item.filename, sftp)
else:
sftp.get(path + "/" + item.filename, dest + "/" + item.filename)
print("Sending file from : ",path + "/" + item.filename)
sftp.remove(path + "/" + item.filename)
If you want to return specific exit code, if there is no file anywhere in the directory tree, you will have to recursively count the files.
def sftp_get_recursive(path, dest, sftp):
count = 0
item_list = sftp.listdir_attr(path)
if not os.path.isdir(dest):
os.makedirs(dest, exist_ok=True)
for item in item_list:
source_path = path + "/" + item.filename
dest_path = os.path.join(dest, item.filename)
if S_ISDIR(item.st_mode):
count += sftp_get_recursive(source_path, dest_path, sftp)
else:
print("Sending file from : ", source_path)
sftp.get(source_path, dest_path)
sftp.remove(source_path)
count += 1
return count
(I have refactored your code a bit to avoid repeating code)
And then use the count at the top level:
count = sftp_get_recursive(path, dest, sftp)
if count == 0:
print("No file found")
sys.exit(1)
Note that even if no files are found, the code will still recreate the empty remote directory structure locally. I'm not sure, if that's what you want.
I have more than 500 xml files and each xml file should processed on FME workbench individually (iteration of FME workbench for each xml file).
For such a propose i have to run a python file (loop.py) to iterate FME workbench for each xml file.
The whole process was working in past on other PC without any problem. Now Once i run Module i got the following error:
Traceback (most recent call last):E:\XML_Data
File "E:\XML_Data\process\01_XML_Tile_1.py", line 28, in
if "Translation was SUCCESSFUL" in open(path_log + "\" + data + ".log").read():
IOError: [Errno 2] No such file or directory: 'E:\XML_Data\data_out\log_01\re_3385-5275.xml.log'
Attached the python code(loop.py).
Any help is greatly appreciated.
import os
import time
# Mainpath and Working Folder:
#path_main = r"E:\XML_Data"
path_main = r"E:\XML_Data"
teil = str("01")
# variables
path_in = path_main + r"\data_in\03_Places\teil_" + teil # "Source folder of XML files"
path_in_tile10 = path_main + r"\data_in\01_Tiling\10x10.shp" # "Source folder of Grid shapefile"
path_in_commu = path_main + r"\data_in\02_Communities\Communities.shp" # "Source folder of Communities shapefile"
path_out = path_main + r"\data_out\teil_" + teil # "Output folder of shapefiles that resulted from XML files (tile_01 folder)"
path_log = path_main + r"\data_out\log_" + teil # "Output folder of log files for each run(log_01 folder)"
path_fme = r"%FME_EXE_2015%" # "C:\Program Files\FME2015\fme.exe"
path_fme_workbench = path_main + r"\process\PY_FME2015.fmw" # "path of FME workbench"
datalists = os.listdir(path_in)
count = 0
# loop each file individually in FME
for data in datalists:
if data.find(".xml") != -1:
count +=1
print ("Run-No." + str(count) + ": with data " + data)
os.system (path_fme + " " + path_fme_workbench + " " + "--SourceDataset_XML"+ " " + path_in + "\\" + data + " " + "--SourceDataset_SHAPE" + " " + path_in_tile10 + " " + "--SourceDataset_SHAPE_COMU" + " " + path_in_commu + " " + "--DestDataset_SHAPE" +" " +path_out + " " +"LOG_FILENAME" + " " + path_log + "\\" + data + ".log" )
print ("Data processed: " + data)
shape = str(data[19:28]) + "_POPINT_CENTR_UTM32N.shp"
print ("ResultsFileName: " + shape)
if "Translation was SUCCESSFUL" in open(path_log + "\\" + data + ".log").read():
# Translation was successful and SHP file exists:
if os.path.isfile(path_out + "\\" + shape):
write_log = open(path_out + "\\" + "result_xml.log", "a")
write_log.write(time.asctime(time.localtime()) + " " + shape + "\n")
write_log.close()
print("Everything ok")
#Translation was successful, but SHP file does not exist:
else:
write_log = open(path_out + "\\" + "error_xml.log", "a")
write_log.write(time.asctime(time.localtime()) + " Data: " + shape + " unavailable.\n")
write_log.close()
# Translation was not successful:
else:
write_log = open(path_out + "\\" + "error_xml.log", "a")
write_log.write(time.asctime(time.localtime()) + " Translation " + Data + " not successful.\n")
write_log.close()
print ("Number of calculated files: " + str(count))
Most likely, the script failed at the os.system line, so the log file was not created from the command. Since you mentioned a different computer, it could be caused by many reasons, such as a different version of FME (so the environment variable %FME_EXE_2015% would not exist).
Use a workspace runner transformer to do this.
The FME version is outdated.so first check the version whether it is creating the problem.
subprocess.call(["C:/Program Files/fme/FMEStarter/FMEStarter.exe", "C:/Program Files/fme/fme20238/fme.exe", "/fmefile.fmw" "LOG_FILENAME","logfile"], stdin=None, stdout=None, stderr=None, shell=True, timeout=None)
I have some code that I wrote that downloads images from a website. The way that it currently works it needs to guess what the file extension will be for the url it will be downloading from. The block of code that does that looks like this:
for imageLink in imageLinks:
try:
urllib.request.urlretrieve(imageLink + ".png", str(threadName) + "/" + str(count) + ".png")
except:
try:
urllib.request.urlretrieve(imageLink + ".jpg",str(threadName) + "/" + str(count) + ".png")
except:
try:
urllib.request.urlretrieve(imageLink + ".gif",str(threadName) + "/" + str(count) + ".gif")
except:
urllib.request.urlretrieve(imageLink + ".webm",str(threadName) + "/" + str(count) + ".webm")
As it stands the code is relying on a fail in order to try something else.
I wanted to know if their is a way to have this functionality but to basically just look better. These methods will give identical errors if they fail so I want to just go through them sequentially until one works
for ext in ('.png', '.jpg', '.gif', '.webm'):
try:
urllib.request.urlretrieve(imageLink + ext, str(threadName) + "/" + str(count) + ext)
break
except:
pass
You can use a try/except block inside a function and return None if the control goes to the except statement. You can optimize the for loop according to your own needs. One example is here:
def get_url(link1, link2):
try:
requestData = urllib.request.urlretrieve(link1, link2)
except:
return None
return requestData
for imageLink in imageLinks:
data = urllib.request.urlretrieve(imageLink + ".png", str(threadName) + "/" + str(count) + ".png")
if data == None:
data = urllib.request.urlretrieve(imageLink + ".jpg",str(threadName) + "/" + str(count) + ".png")
if data == None:
data = urllib.request.urlretrieve(imageLink + ".gif",str(threadName) + "/" + str(count) + ".gif")
if data == None:
urllib.request.urlretrieve(imageLink + ".webm",str(threadName) + "/" + str(count) + ".webm")
So I made this script to sort a folder into subfolders of different types and it worked! But now I would like it to sort folders within the folder I tell it to sort. I tried recursion but it didn't work? is my syntax wrong? Also how would you get it to move file types of x up a directory into the appropriately sorted folder in the folder I told the script to sort? If that makes sense.
Here's my code:
#!/bin/python
import os
path = raw_input("Enter your folder you would like sorted: ")
def searchFolders(path):
if os.path.exists(path):
dirList = os.listdir(path)
for filename in dirList:
if ".jpg" in filename:
if not os.path.exists(path + "Photos"):
os.makedirs(path + "Photos")
os.rename(path + filename, path + "Photos/" + filename)
elif ".pptx" in filename:
if not os.path.exists(path + "Powerpoints"):
os.makedirs(path + "Powerpoints")
os.rename(path + filename, path + "Powerpoints/" + filename)
elif ".zip" in filename:
if not os.path.exists(path + "Zip Files"):
os.makedirs(path + "Zip Files")
os.rename(path + filename, path + "Zip Files/" + filename)
elif ".dmg" in filename:
if not os.path.exists(path + "Disk Images"):
os.makedirs(path + "Disk Images")
os.rename(path + filename, path + "Disk Images/" + filename)
elif ".mp3" in filename:
if not os.path.exists(path + "Music"):
os.makedirs(path + "Music")
os.rename(path + filename, path + "Music/" + filename)
elif ".pdf" in filename:
if not os.path.exists(path + "Pdf"):
os.makedirs(path + "Pdf")
os.rename(path + filename, path + "Pdf/" + filename)
elif ".cpp" in filename:
if not os.path.exists(path + "C++"):
os.makedirs(path + "C++")
os.rename(path + filename, path + "C++/" + filename)
elif ".psd" in filename:
if not os.path.exists(path + "Photoshop"):
os.makedirs(path + "Photoshop")
os.rename(path + filename, path + "Photoshop/" + filename)
elif ".dng" in filename:
if not os.path.exists(path + "Photos/Raw Photos"):
os.makedirs(path + "Photos/Raw Photos")
os.rename(path + filename, path + "Photos/Raw Photos/" + filename)
elif not "." in filename:
folderPath = path + filename
searchFolders(folderPath)
else:
if not os.path.exists(path + "Random"):
os.makedirs(path + "Random")
os.rename(path + filename, path + "Random/" + filename)
print "Sorting Complete"
else:
print "Folder Does not exist"
shutil.copytree(src,dst)
shutil.rmtree(src)
should get you where you want ...
I have many subdirectories in my main directory and would like to write a script to unzip and convert all the files within it. If possible, I would also like to combine all the CSV within a single directory into a single CSV. But more importantly, I need help with my nested loop.
import gzip
import csv
import os
subdirlist = os.listdir('/home/user/Desktop/testloop')
subtotal = len(subdirlist)
subcounter = 0
for dirlist in subdirlist:
print "Working On " + dirlist
total = len(dirlist)
counter = 0
for dir in dirlist:
print "Working On " + dir
f = gzip.open('/' + str(subdirlist) + '/' + dir, 'rb')
file_content = f.read()
f.close()
print "25% Complete"
filename = '/' + str(subdirlist) + '/temp.txt'
target = open(filename, 'w')
target.write(file_content)
target.close()
print "50% Complete!"
csv_file = '/' + str(subdirlist) + '/' + str(dir) + '.csv'
in_txt = csv.reader(open(filename, "rb"), delimiter = '\t')
out_csv = csv.writer(open(csv_file, 'wb'))
out_csv.writerows(in_txt)
os.remove(filename)
os.remove('/' + str(subdirlist) + '/' + dir)
counter+=1
print str(counter) + "/" + str(total) + " " + str(dir) + " Complete!"
print "SubDirectory Converted!"
print str(subcounter) + "/" + str(subtotal) + " " + str(subdirlist) + " Complete!"
subcounter+=1
print "All Files Converted!"
Thanks in advance
To get lists of files and subdirectories, you can use os.walk. Below is an implementation I wrote to get all files (optionally, of certain type(s)) in arbitrarily nested subdirectories:
from os import walk, sep
from functools import reduce # in Python 3.x only
def get_filelist(root, extensions=None):
"""Return a list of files (path and name) within a supplied root directory.
To filter by extension(s), provide a list of strings, e.g.
get_filelist(root, ["zip", "csv"])
"""
return reduce(lambda x, y: x+y,
[[sep.join([item[0], name]) for name in item[2]
if (extensions is None or
name.split(".")[-1] in extensions)]
for item in walk(root)])