Suppose I have the following BUILD file
py_library(
name = "foo",
src = ["foo.py"],
data = ["//bar:data.json"],
)
How should I refer to the data.json in foo.py file? I wanted to have something like below, what should I use for some_path?
with open(os.path.join(some_path, "bar/data.json"), 'r') as fp:
data = json.load(fp)
I couldn't find much general documentation about *.runfiles online -- any pointer will be appreciated!
Short answer: os.path.dirname(__file__)
Here is the full example:
$ ls
bar/ BUILD foo.py WORKSPACE
$ cat BUILD
py_binary(
name = "foo",
srcs = ["foo.py"],
data = ["//bar:data.json"],
)
$ cat foo.py
import json
import os
ws = os.path.dirname(__file__)
with open(os.path.join(ws, "bar/data.json"), 'r') as fp:
print(json.load(fp))
$ cat bar/BUILD
exports_files(["data.json"])
$ bazel run :foo
Edit: it doesn't work well when your package is in a subdirectory. You may need to go back using os.path.dirname.
Here is a function that should return the path to the runfiles root for any py_binary in all the cases that I'm aware of:
import os
import re
def find_runfiles():
"""Find the runfiles tree (useful when _not_ run from a zip file)"""
# Follow symlinks, looking for my module space
stub_filename = os.path.abspath(sys.argv[0])
while True:
# Found it?
module_space = stub_filename + '.runfiles'
if os.path.isdir(module_space):
break
runfiles_pattern = r"(.*\.runfiles)"
matchobj = re.match(runfiles_pattern, os.path.abspath(sys.argv[0]))
if matchobj:
module_space = matchobj.group(1)
break
raise RuntimeError('Cannot find .runfiles directory for %s' %
sys.argv[0])
return module_space
For the example in your question you could use it like so:
with open(os.path.join(find_runfiles(), "name_of_workspace/bar/data.json"), 'r') as fp:
data = json.load(fp)
Note that this function won't help if you build zipped executables of your python apps (using subpar, probably); for those you will need some more code. This next snippet includes get_resource_filename() and get_resource_directory(), which will work for both regular py_binary and .par binaries:
import atexit
import os
import re
import shutil
import sys
import tempfile
import zipfile
def get_resource_filename(path):
zip_path = get_zip_path(sys.modules.get("__main__").__file__)
if zip_path:
tmpdir = tempfile.mkdtemp()
atexit.register(lambda: shutil.rmtree(tmpdir, ignore_errors=True))
zf = BetterZipFile(zip_path)
zf.extract(member=path, path=tmpdir)
return os.path.join(tmpdir, path)
elif os.path.exists(path):
return path
else:
path_in_runfiles = os.path.join(find_runfiles(), path)
if os.path.exists(path_in_runfiles):
return path_in_runfiles
else:
raise ResourceNotFoundError
def get_resource_directory(path):
"""Find or extract an entire subtree and return its location."""
zip_path = get_zip_path(sys.modules.get("__main__").__file__)
if zip_path:
tmpdir = tempfile.mkdtemp()
atexit.register(lambda: shutil.rmtree(tmpdir, ignore_errors=True))
zf = BetterZipFile(zip_path)
members = []
for fn in zf.namelist():
if fn.startswith(path):
members += [fn]
zf.extractall(members=members, path=tmpdir)
return os.path.join(tmpdir, path)
elif os.path.exists(path):
return path
else:
path_in_runfiles = os.path.join(find_runfiles(), path)
if os.path.exists(path_in_runfiles):
return path_in_runfiles
else:
raise ResourceNotFoundError
def get_zip_path(path):
"""If path is inside a zip file, return the zip file's path."""
if path == os.path.sep:
return None
elif zipfile.is_zipfile(path):
return path
return get_zip_path(os.path.dirname(path))
class ResourceNotFoundError(RuntimeError):
pass
def find_runfiles():
"""Find the runfiles tree (useful when _not_ run from a zip file)"""
# Follow symlinks, looking for my module space
stub_filename = os.path.abspath(sys.argv[0])
while True:
# Found it?
module_space = stub_filename + '.runfiles'
if os.path.isdir(module_space):
break
runfiles_pattern = r"(.*\.runfiles)"
matchobj = re.match(runfiles_pattern, os.path.abspath(sys.argv[0]))
if matchobj:
module_space = matchobj.group(1)
break
raise RuntimeError('Cannot find .runfiles directory for %s' %
sys.argv[0])
return module_space
class BetterZipFile(zipfile.ZipFile):
"""Shim around ZipFile that preserves permissions on extract."""
def extract(self, member, path=None, pwd=None):
if not isinstance(member, zipfile.ZipInfo):
member = self.getinfo(member)
if path is None:
path = os.getcwd()
ret_val = self._extract_member(member, path, pwd)
attr = member.external_attr >> 16
os.chmod(ret_val, attr)
return ret_val
Using this second code snippet, your example would look like:
with open(get_resource_filename("name_of_workspace/bar/data.json"), 'r') as fp:
data = json.load(fp)
Related
Does Python have any built-in functionality to add a number to a filename if it already exists?
My idea is that it would work the way certain OS's work - if a file is output to a directory where a file of that name already exists, it would append a number or increment it.
I.e: if "file.pdf" exists it will create "file2.pdf", and next time "file3.pdf".
I ended up writing my own simple function for this. Primitive, but gets the job done:
def uniquify(path):
filename, extension = os.path.splitext(path)
counter = 1
while os.path.exists(path):
path = filename + " (" + str(counter) + ")" + extension
counter += 1
return path
In a way, Python has this functionality built into the tempfile module. Unfortunately, you have to tap into a private global variable, tempfile._name_sequence. This means that officially, tempfile makes no guarantee that in future versions _name_sequence even exists -- it is an implementation detail.
But if you are okay with using it anyway, this shows how you can create uniquely named files of the form file#.pdf in a specified directory such as /tmp:
import tempfile
import itertools as IT
import os
def uniquify(path, sep = ''):
def name_sequence():
count = IT.count()
yield ''
while True:
yield '{s}{n:d}'.format(s = sep, n = next(count))
orig = tempfile._name_sequence
with tempfile._once_lock:
tempfile._name_sequence = name_sequence()
path = os.path.normpath(path)
dirname, basename = os.path.split(path)
filename, ext = os.path.splitext(basename)
fd, filename = tempfile.mkstemp(dir = dirname, prefix = filename, suffix = ext)
tempfile._name_sequence = orig
return filename
print(uniquify('/tmp/file.pdf'))
I was trying to implement the same thing in my project but #unutbu's answer seemed too 'heavy' for my needs so I came up with following code finally:
import os
index = ''
while True:
try:
os.makedirs('../hi'+index)
break
except WindowsError:
if index:
index = '('+str(int(index[1:-1])+1)+')' # Append 1 to number in brackets
else:
index = '(1)'
pass # Go and try create file again
Just in case someone stumbled upon this and requires something simpler.
If all files being numbered isn't a problem, and you know beforehand the name of the file to be written, you could simply do:
import os
counter = 0
filename = "file{}.pdf"
while os.path.isfile(filename.format(counter)):
counter += 1
filename = filename.format(counter)
recently I encountered the same thing and here is my approach:
import os
file_name = "file_name.txt"
if os.path.isfile(file_name):
expand = 1
while True:
expand += 1
new_file_name = file_name.split(".txt")[0] + str(expand) + ".txt"
if os.path.isfile(new_file_name):
continue
else:
file_name = new_file_name
break
Let's say you already have those files:
This function generates the next available non-already-existing filename, by adding a _1, _2, _3, ... suffix before the extension if necessary:
import os
def nextnonexistent(f):
fnew = f
root, ext = os.path.splitext(f)
i = 0
while os.path.exists(fnew):
i += 1
fnew = '%s_%i%s' % (root, i, ext)
return fnew
print(nextnonexistent('foo.txt')) # foo_3.txt
print(nextnonexistent('bar.txt')) # bar_1.txt
print(nextnonexistent('baz.txt')) # baz.txt
Since the tempfile hack A) is a hack and B) still requires a decent amount of code anyway, I went with a manual implementation. You basically need:
A way to Safely create a file if and only if it does not exist (this is what the tempfile hack affords us).
A generator for filenames.
A wrapping function to hide the mess.
I defined a safe_open that can be used just like open:
def iter_incrementing_file_names(path):
"""
Iterate incrementing file names. Start with path and add " (n)" before the
extension, where n starts at 1 and increases.
:param path: Some path
:return: An iterator.
"""
yield path
prefix, ext = os.path.splitext(path)
for i in itertools.count(start=1, step=1):
yield prefix + ' ({0})'.format(i) + ext
def safe_open(path, mode):
"""
Open path, but if it already exists, add " (n)" before the extension,
where n is the first number found such that the file does not already
exist.
Returns an open file handle. Make sure to close!
:param path: Some file name.
:return: Open file handle... be sure to close!
"""
flags = os.O_CREAT | os.O_EXCL | os.O_WRONLY
if 'b' in mode and platform.system() == 'Windows':
flags |= os.O_BINARY
for filename in iter_incrementing_file_names(path):
try:
file_handle = os.open(filename, flags)
except OSError as e:
if e.errno == errno.EEXIST:
pass
else:
raise
else:
return os.fdopen(file_handle, mode)
# Example
with safe_open("some_file.txt", "w") as fh:
print("Hello", file=fh)
I haven't tested this yet but it should work, iterating over possible filenames until the file in question does not exist at which point it breaks.
def increment_filename(fn):
fn, extension = os.path.splitext(path)
n = 1
yield fn + extension
for n in itertools.count(start=1, step=1)
yield '%s%d.%s' % (fn, n, extension)
for filename in increment_filename(original_filename):
if not os.isfile(filename):
break
This works for me.
The initial file name is 0.yml, if it exists, it will add one until meet the requirement
import os
import itertools
def increment_filename(file_name):
fid, extension = os.path.splitext(file_name)
yield fid + extension
for n in itertools.count(start=1, step=1):
new_id = int(fid) + n
yield "%s%s" % (new_id, extension)
def get_file_path():
target_file_path = None
for file_name in increment_filename("0.yml"):
file_path = os.path.join('/tmp', file_name)
if not os.path.isfile(file_path):
target_file_path = file_path
break
return target_file_path
import os
class Renamer():
def __init__(self, name):
self.extension = name.split('.')[-1]
self.name = name[:-len(self.extension)-1]
self.filename = self.name
def rename(self):
i = 1
if os.path.exists(self.filename+'.'+self.extension):
while os.path.exists(self.filename+'.'+self.extension):
self.filename = '{} ({})'.format(self.name,i)
i += 1
return self.filename+'.'+self.extension
I found that the os.path.exists() conditional function did what I needed. I'm using a dictionary-to-csv saving as an example, but the same logic could work for any file type:
import os
def smart_save(filename, dict):
od = filename + '_' # added underscore before number for clarity
for i in np.arange(0,500,1): # I set an arbitrary upper limit of 500
d = od + str(i)
if os.path.exists(d + '.csv'):
pass
else:
with open(d + '.csv', 'w') as f: #or any saving operation you need
for key in dict.keys():
f.write("%s,%s\n"%(key, dictionary[key]))
break
Note: this appends a number (starting at 0) to the file name by default, but it's easy to shift that around.
This function validates if the file name exists using regex expresion and recursion
def validate_outfile_name(input_path):
filename, extension = os.path.splitext(input_path)
if os.path.exists(input_path):
output_path = ""
pattern = '\([0-9]\)'
match = re.search(pattern, filename)
if match:
version = filename[match.start() + 1]
try: new_version = int(version) + 1
except: new_version = 1
output_path = f"{filename[:match.start()]}({new_version}){extension}"
output_path = validate_outfile_name(output_path)
else:
version = 1
output_path = f"{filename}({version}){extension}"
return output_path
else:
return input_path
I've implemented a similar solution with pathlib:
Create file-names that match the pattern path/<file-name>-\d\d.ext. Perhaps this solution can help...
import pathlib
from toolz import itertoolz as itz
def file_exists_add_number(path_file_name, digits=2):
pfn = pathlib.Path(path_file_name)
parent = pfn.parent # parent-dir of file
stem = pfn.stem # file-name w/o extension
suffix = pfn.suffix # NOTE: extension starts with '.' (dot)!
try:
# search for files ending with '-\d\d.ext'
last_file = itz.last(parent.glob(f"{stem}-{digits * '?'}{suffix}"))
except:
curr_no = 1
else:
curr_no = int(last_file.stem[-digits:]) + 1
# int to string and add leading zeros
curr_no = str(last_no).zfill(digits)
path_file_name = parent / f"{stem}-{curr_no}{suffix}"
return str(path_file_name)
Pls note: That solution starts at 01 and will only find file-pattern containing -\d\d!
def create_file():
counter = 0
filename = "file"
while os.path.isfile(f"dir/{filename}{counter}.txt"):
counter += 1
print(f"{filename}{counter}.txt")
A little bit later but there is still something like this should work properly, mb it will be useful for someone.
You can use built-in iterator to do this ( image downloader as example for you ):
def image_downloader():
image_url = 'some_image_url'
for count in range(10):
image_data = requests.get(image_url).content
with open(f'image_{count}.jpg', 'wb') as handler:
handler.write(image_data)
Files will increment properly. Result is:
image.jpg
image_0.jpg
image_1.jpg
image_2.jpg
image_3.jpg
image_4.jpg
image_5.jpg
image_6.jpg
image_7.jpg
image_8.jpg
image_9.jpg
Easy way for create new file if this name in your folder
if 'sample.xlsx' in os.listdir('testdir/'):
i = 2
while os.path.exists(f'testdir/sample ({i}).xlsx'):
i += 1
wb.save(filename=f"testdir/sample ({i}).xlsx")
else:
wb.save(filename=f"testdir/sample.xlsx")
I am trying to validate the dictionary parameter.
import logging
import os
# decorator
def file_validator(f):
def wrapped(*args):
"""
Once there is passed values,
file_path = os.path.join(path_info, file_info)
if os.path.exists(file_path):
logging.info('{} exists'.format(file_info))
else:
logging.info('{} does not exist'.format(file_info))
"""
# original function
#file_validator
def original_function(file_dict):
# pass only specific element to file_validator decorator for checking
# for example only "pathA": "/files", "fileA": "bar.csv"
sample_dict = {"pathA": "/files", "fileA": "bar.csv", "fileB": "hello.txt"}
original_function(sample_dict)
Is there a way to check this way using decorator?
EDIT
This could be equivalent to what I want to do.
def file_validator(filepath, filename):
file_path = os.path.join(filepath + filename)
if os.path.exists(file_path):
logging.info('{} exists'.format(filename))
else:
logging.info('{} does not exist'.format(filename))
def original_function(file_dict):
file_validator(file_dict['pathA'], file_dict['fileA'])
file_validator(file_dict['pathA'], file_dict['fileB'])
sample_dict = {"pathA": "/files", "fileA": "bar.csv", "fileB": "hello.txt"}
original_function(sample_dict)
Seems like something like this should do the trick:
import os
import logging
def file_validator(func):
def wrapper(file_dict:dict):
# Turn file_dict to two lists:
# paths = ["/files"]
# files = ["bar.csv", "hello.txt"]
paths = [
path
for name, path in file_dict.items()
if name.startswith("path")
]
files = [
file
for name, file in file_dict.items()
if name.startswith("file")
]
# Loop through all the path & file combinations and check if they exist
for path in paths:
for file in files:
full_path = os.path.join(path, file)
if os.path.exists(full_path):
logging.info('{} exists'.format(file))
else:
logging.info('{} does not exist'.format(file))
# Run the actual function
return func(file_dict)
return wrapper
#file_validator
def original_function(file_dict):
...
files = {"pathA": "/files", "fileA": "bar.csv", "fileB": "hello.txt"}
original_function(files)
# Note that fileB is not checked as it's missing "pathB"
# but from the question it is unclear how this should be handled
But there is some code smell. If possible, I would advise not to store your paths and files in that way as that's not easy to manipulate and prone to bugs. Much better would be to store them as a list of full paths by creating all the combinations using itertools.combinations, or then just having two lists: paths and files.
logging.info replaced by print here for verification.
import logging
import os
def file_validator(f):
def wrapper(args):
for path, file in args.items():
file_path = os.path.join(path, file)
if os.path.exists(file_path):
print('{} exists'.format(file_path))
else:
print('{} does not exist'.format(file_path))
f(args)
return wrapper
#file_validator
def original_function(file_dict):
print(file_dict)
sample_dict = {"pathA": "\\files", "fileA": "bar.csv", "fileB": "hello.txt"}
original_function(sample_dict)
I have multiple .png and .json file in same directory . And I want to check where the files available in the directory are of same name or not like a.png & a.json, b.png & b.json
You may try this:
import os
_, _ ,files = os.walk('.').next()
json = [f[:-5] for f in files if f.endswith('.json')]
png = [f[:-4] for f in files if f.endswith('.png')]
json_only = set(json) - set(png)
png_only = set(png) - set(json)
json_and_png = set(json) & set(png)
... etc...
from pathlib import Path
print("TEST CASE::NAMING CONVENTIONS SHOULD BE START WITH GAME PREFIX ")
def get_invalid_files_from(directory: str) -> []:
dir_path = Path(r"E:\abc\\xyz")
parent_folder = dir_path.stem
files = dir_path.rglob('*')
return [str(f) for f in files if is_file_invalid(f, prefix=parent_folder)]
def is_file_invalid(file: Path, prefix: str) -> bool:
return file.suffix.lower() in ['.png', '.json'] and not file.name.startswith(prefix)
testcase = True
invalid_files = get_invalid_files_from(r'E:\abc\\xyz')
#assert not invalid_files, 'Invalid file paths:\n' + '\n'.join(invalid_files)
if invalid_files :
testcase = False
print(r'Below given File found with invalid prefix:')
print('\n'.join(invalid_files))
if not testcase:
print("test case failed ")
else:
print("Test Case Passed ")
#lenik is on the right track, I believe using sets is the easiest way to get what you want. Here's a complete and tested solution using the pathlib module that lists any png/json files that do not have a matching pair in the provided folder and all sub-folders:
def get_unpaired_files(directory: str) -> []:
dir_path = Path(directory).resolve()
json_files = get_files_without_extension(dir_path, pattern='*.json')
png_files = get_files_without_extension(dir_path, pattern='*.png')
return [str(f) for f in set(json_files) ^ set(png_files)]
def get_files_without_extension(dir_path: Path, pattern: str) -> []:
return [f.with_suffix('') for f in dir_path.rglob(pattern)]
Usage:
unpaired_files = get_unpaired_files(r'E:\abc')
if unpaired_files :
print('Unpaired file paths were found:')
print('\n'.join(unpaired_files))
With pyminizip i am able to zip a file with password in python :
filepath=r"C:\Users\xxx\Desktop\myFolder\file.txt"
import pyminizip
pyminizip.compress(filepath, None,"output.zip", "password", 0)
But how do I zip the whole folder 'myFolder' into a zip file with password?
I tried removing the filename from the path but it gives the error
OSError: error in opening C:\Users\xxx\Desktop\myFolder for reading
EDIT :
The below link has a function which will zip the directory. But It wont add a password.
https://www.calazan.com/how-to-zip-an-entire-directory-with-python/
If anyone can let me know if it is possible to add a password to an existing zip file, that will solve my problem. Is that possible?
I was finally able to accomplish encryping the whole directory(including all subfolder struncture and files) using a library called 'pyzipper' suggested by Anupam Chaplot.
Here is the solution :
def zip_folderPyzipper(folder_path, output_path):
"""Zip the contents of an entire folder (with that folder included
in the archive). Empty subfolders will be included in the archive
as well.
"""
parent_folder = os.path.dirname(folder_path)
# Retrieve the paths of the folder contents.
contents = os.walk(folder_path)
try:
zip_file = pyzipper.AESZipFile('new_test.zip','w',compression=pyzipper.ZIP_DEFLATED,encryption=pyzipper.WZ_AES)
zip_file.pwd=b'PASSWORD'
for root, folders, files in contents:
# Include all subfolders, including empty ones.
for folder_name in folders:
absolute_path = os.path.join(root, folder_name)
relative_path = absolute_path.replace(parent_folder + '\\',
'')
print ("Adding '%s' to archive." % absolute_path)
zip_file.write(absolute_path, relative_path)
for file_name in files:
absolute_path = os.path.join(root, file_name)
relative_path = absolute_path.replace(parent_folder + '\\',
'')
print ("Adding '%s' to archive." % absolute_path)
zip_file.write(absolute_path, relative_path)
print ("'%s' created successfully." % output_path)
except IOError as message:
print (message)
sys.exit(1)
except OSError as message:
print(message)
sys.exit(1)
except zipfile.BadZipfile as message:
print (message)
sys.exit(1)
finally:
zip_file.close()
Since I am new in python i cant explain the code in detail. Here are the references :
https://pypi.org/project/pyzipper/
https://www.calazan.com/how-to-zip-an-entire-directory-with-python/
To extract the Generated ZIP file in windows :
Right Click - > Unzip(Encripted)
If you directly click Extract All option, then it will give error
Try this:
Firstly check here please for pynzip. After that try it.
import pyminizip as pyzip
compression = 8
pyzip.compress("test.txt", "test.zip", "Pswrd", compression)
Here is how to copy all a directory with its subdirectories and its files, then compress it and encrypt a zip, with password and without needing an associated backup file, here we will see how to authorize a mac address to execute the decryption. So then it's up to you to change or improve the script.
But the essentials work very well.
After a lot of research, testing and thinking, I created this effective solution
my setup:
Python 3.8 64:bits on windows 7 64:bits
Usage terminology:
First step, we need to import the cryptography module
check for support or other is here https://cryptography.io/en/latest/installation/
command:
pip install cryptography
Then we will use the fernet object resulting from this module
https://cryptography.io/en/latest/fernet/
with password
https://cryptography.io/en/latest/fernet/#using-passwords-with-fernet
and shutil:
https://docs.python.org/3/library/shutil.html
file second.py:
import os
import re, uuid
import string
import shutil
import zlib
from cryptography.fernet import Fernet
from cryptography.hazmat.primitives import hashes
from cryptography.hazmat.primitives.kdf.pbkdf2 import PBKDF2HMAC
import base64
import zipfile
class zipy:
def __init__(self, pathDir=None):
"""If pathDir optional is none, this script copy all directory in current execution."""
if pathDir != None:
if os.path.isdir(pathDir):
pathDir = pathDir.replace(os.sep, '/')
if pathDir.endswith('/'):
self.root = pathDir
else:
self.root = pathDir + '/'
else:
self.root = os.getcwd()+os.sep
self.root = self.root.replace(os.sep, '/')
else:
self.root = os.getcwd()+os.sep
self.root = self.root.replace(os.sep, '/')
os.chdir(self.root)
self.name = 'sauvegarde'
self.dirSauvegarde = self.root+self.name
self.dirSauvegarde = self.dirSauvegarde.replace(os.sep, '/')
lectureDossier = os.listdir(self.root)
print(lectureDossier)
self.path_system = {}
for element in lectureDossier:
if os.path.isdir(element):
if element != '__pycache__':
self.path_system[element] = self.root + element + os.sep.replace(os.sep, '/')
self.path_system[element] = self.path_system[element].replace(os.sep, '/')
else:
pass
elif os.path.isfile(element):
self.path_system[element] = self.root + element
self.path_system[element] = self.path_system[element].replace(os.sep, '/')
else:
pass
self.zipi = myZip(self.dirSauvegarde)
def save(self):
"""sauvegarde le fichier"""
self.createDir(self.dirSauvegarde)
chemin_src = ""
chemin_dist = ""
for element in self.path_system:
if element != self.dirSauvegarde:
chemin_src = self.root+element
chemin_dest = self.dirSauvegarde + os.sep + element
chemin_dest = chemin_dest.replace(os.sep, '/')
if os.path.isdir(chemin_src):
self.copyDir(chemin_src, chemin_dest)
else:
self.copyFile(chemin_src, chemin_dest)
self.zipi.zip(zip_exist=True)
self.delDir(self.dirSauvegarde)
def copyDir(self, src, dest):
try:
shutil.copytree(src, dest, dirs_exist_ok=True)
except:
pass
def copyFile(self, src, dest):
try:
shutil.copyfile(src, dest)
except:
pass
def createDir(self, dirPath):
if os.path.isdir(dirPath):
self.delDir(dirPath)
else:
pass
os.makedirs(dirPath, exist_ok=True)
def delDir(self, dir):
if os.path.isdir(dir):
if len(os.listdir(dir)) > 0:
try:
print('rmtree')
shutil.rmtree(dir, ignore_errors=True)
except:
pass
else:
try:
os.rmdir(dir)
except:
pass
def decrypt(self):
self.zipi.unzip()
class myZip:
def __init__(self, dir):
self.pathDir = dir
self.nom = os.path.basename(dir)
self.pathZip = self.pathDir + '.zip'
self.crypt = Encryptor()
def zip(self, zip_exist=False):
if zip_exist == False:
pass
else:
if os.path.isfile(self.pathZip):
try:
os.remove(self.pathZip)
except:
pass
shutil.make_archive(os.path.splitext(self.pathZip)[0], 'zip', self.pathDir)
key = self.crypt.key_create()
#TEST
self.crypt.file_encrypt(key, self.pathZip, self.pathZip)
self.crypt.key_write(self.pathZip, key)
def unzip(self):
#TEST
if self.crypt.checkPass(self.pathZip):
#print('ok adresse mac autoriser')
key = self.crypt.key_load(self.pathZip)
self.crypt.file_decrypt(key, self.pathZip, self.pathZip)
else:
print('pas ok adresse mac erroner')
class Encryptor:
def __init__(self):
self.salto = None
def key_create(self):
password = self.getMac()
password = bytes(password, encoding="utf-8")
self.salto = os.urandom(16)
print(self.salto)
kdf = PBKDF2HMAC(
algorithm=hashes.SHA256(),
length=32,
salt=self.salto,
iterations=100,
)
key = base64.urlsafe_b64encode(kdf.derive(password))
return key
def key_write(self, pathZip, key):
with zipfile.ZipFile(pathZip, 'a') as zip:
zip.comment = key + bytes(' byMe ', encoding="utf-8") + self.salto
def key_load(self, pathZip):
stri = []
with zipfile.ZipFile(pathZip, 'a') as zip:
stri = zip.comment.split(b' byMe ')
print(stri[0])
print(stri[1])
key = stri[0]
self.salto = stri[1]
return key
def checkPass(self, pathZip):
key = base64.urlsafe_b64decode(self.key_load(pathZip))
salt = self.salto
mdp = self.getMac()
mdp = bytes(mdp, encoding="utf-8")
kdf = PBKDF2HMAC(
algorithm=hashes.SHA256(),
length=32,
salt=salt,
iterations=100,
)
retour = False
try:
kdf.verify(mdp, key)
retour = True
except:
retour = False
return retour
def file_encrypt(self, key, original_file, encrypted_file):
f = Fernet(key)
with open(original_file, 'rb') as file:
original = file.read()
encrypted = f.encrypt(original)
with open (encrypted_file, 'wb') as file:
file.write(encrypted)
def file_decrypt(self, key, encrypted_file, decrypted_file):
f = Fernet(key)
with open(encrypted_file, 'rb') as file:
encrypted = file.read()
decrypted = f.decrypt(encrypted)
with open(decrypted_file, 'wb') as file:
file.write(decrypted)
def getMac(self):
return "".join(re.findall('..', '%012x' % uuid.getnode()))
Use like this:
file : main.py
from second import zipy
#If the argument is empty, the script will make a copy of the directory being executed, otherwise the script will work and output the zip in the place indicated in argument
dd = zipy("E:/path")
#or dd = zipy("E:/path/") or dd = zipy() if you give arg, give absolute path
#Save the zip and encrypt it. Change second.py to directly give it a password as an argument
dd.save()
#decrypt zip
dd.decrypt()
Here's a snippet with pyminizip: gets a list of files and zips the whole thing.
import pyminizip
import os
def get_paths_recursively(src_root_path):
files = []
if src_root_path is not None:
for root, directories, filenames in os.walk(src_root_path):
entries = []
for filename in filenames:
full_file_name = os.path.join(root, filename)
if os.path.isfile(full_file_name) and not filename.startswith('.'):
files.append(os.path.join(root, filename))
return files
def pyminizip_zipper(folder_path, output_path, password):
paths = get_paths_recursively(folder_path)
roots = []
for path in paths:
roots.append(os.path.dirname(path.replace(os.path.dirname(folder_path), './')))
pyminizip.compress_multiple(paths, roots, output_path, password, 5)
I'm trying to define arg1 outside of rename() and it does not work since dirs is not defined. If I use rename("dirs", False), the function does not work.
Any idea?
# Defining the function that renames the target
def rename(arg1, arg2):
for root, dirs, files in os.walk( # Listing
path, topdown=arg2):
for i, name in enumerate(arg1):
output = name.replace(pattern, "") # Taking out pattern
if output != name:
os.rename( # Renaming
os.path.join(root, name),
os.path.join(root, output))
else:
pass
# Run
rename(dirs, False)
Here's the whole program:
#!/usr/bin/python
# -*- coding: utf-8 -*-
# This program batch renames files or folders by taking out a certain pattern
import os
import subprocess
import re
# Defining the function that renames the target
def rename(arg1, arg2):
for root, dirs, files in os.walk( # Listing
path, topdown=arg2):
for i, name in enumerate(arg1):
output = name.replace(pattern, "") # Taking out pattern
if output != name:
os.rename( # Renaming
os.path.join(root, name),
os.path.join(root, output))
else:
pass
# User chooses between file and folder
print "What do you want to rename?"
print "1 - Folders\n2 - Files\n"
valid = False
while not valid:
try:
choice = int(raw_input("Enter number here: "))
if choice > 2:
print "Please enter a valid number\n"
valid = False
else:
valid = True
except ValueError:
print "Please enter a valid number\n"
valid = False
choice = 3 # To have a correct value of choice
# Asking for path & pattern
if choice == 1:
kind = "folders"
elif choice == 2:
kind = "files"
else:
pass
path = raw_input("What is the path to the %s?\n " % (kind))
pattern = raw_input("What is the pattern to remove?\n ")
# CHOICE = 1
# Renaming folders
if choice == 1:
rename(dirs, False)
# CHOICE = 2
# Renaming files
if choice == 2:
rename(files,True)
# Success message
kind = kind.replace("f", "F")
print "%s renamed" % (kind)
Recorrect my code in a better way.
#!/usr/bin/env python
import os
import sys
# the command like this: python rename dirs /your/path/name/ tst
if __name__ == '__main__':
mode = sys.argv[1] # dirs or files
pathname = sys.argv[2]
pattern = sys.argv[3]
ndict = {'dirs': '', 'files': ''}
topdown = {'dirs': False, 'files': True}
for root, ndict['dirs'], ndict['files'] in os.walk(
pathname, topdown[mode]):
for name in enumerate(ndict[mode]):
newname = name.replace(pattern, '')
if newname != name:
os.rename(
os.path.join(root, name),
os.path.join(root, newname))
This is better achieved as a command-line tool using the py library:
import sys
from py.path import local # import local path object/class
def rename_files(root, pattern):
"""
Iterate over all paths starting at root using ``~py.path.local.visit()``
check if it is a file using ``~py.path.local.check(file=True)`` and
rename it with a new basename with ``pattern`` stripped out.
"""
for path in root.visit(rec=True):
if path.check(file=True):
path.rename(path.new(basename=path.basename.replace(pattern, "")))
def rename_dirs(root, pattern):
"""
Iterate over all paths starting at root using ``~py.path.local.visit()``
check if it is a directory using ``~py.path.local.check(dir=True)`` and
rename it with a new basename with ``pattern`` stripped out.
"""
for path in root.visit(rec=True):
if path.check(dir=True):
path.rename(path.new(basename=path.basename.replace(pattern, "")))
def main():
"""Define our main top-level entry point"""
root = local(sys.argv[1]) # 1 to skip the program name
pattern = sys.argv[2]
if local(sys.argv[0]).purebasename == "renamefiles":
rename_files(root, pattern)
else:
rename_dirs(root, pattern)
if __name__ == "__main__":
"""
Python sets ``__name__`` (a global variable) to ``__main__`` when being called
as a script/application. e.g: Python renamefiles or ./renamefiles
"""
main() # Call our main function
Usage:
renamefiles /path/to/dir pattern
or:
renamedirs /path/to/dir pattern
Save this as renamefiles or renamedirs.
A common approach in UNIX is to name the script/tool renamefiles and symlink renamefiles to renamedirs.
Improvement Notes:
Use optparse or argparse to provide Command Line Options = and a --help
Make rename_files() and rename_dirs() generic and move it into a single function.
Write documentation (docstrings)
Write unit tests.