Clone folder from ftp to disk and disk to ftp - python

Hi i have a ftp with a folder named fotos, i need to create a script to use as a cron that get all files from the folder but dont replace if exist and in the end put in the ftp server fotos folder all that dont exist in the server but exist in disk.
I will try to explain a little better
ftp -> folder Fotos -> several folders with subfolders with fotos and videos
disk -> drive j: -> folder Fotos -> several folders with subfolders with fotos and videos
i need to do ftp -> disk and disk -> ftp without replace because i already have 50.000 files in there.
this is what im working on
import os
from ftplib import FTP
ftp = FTP()
server = "ftp.server.com"
user = "user"
passw = "password"
path = "Fotos"
path2 = "J:\FOTOS"
ftp.connect(server, 21, timeout=15)
login_response = ftp.login(user, passw)
ftp.encoding = "utf-8"
ftp.cwd(path)
print(ftp.getwelcome())
for ftp_file in ftp.nlst():
print("checking " , ftp_file)
for root, dirs, files in os.walk(ftp_file):
for name in files:
print(name)
if name !="Thumbs.db":
print(name)
if ftp_file not in os.listdir(path2):
print('Downloading file: %s' , ftp_file)
#ftp.retrbinary('RETR '+ ftp_file ,open(ftp_file,'wb').write,rest=0)
else:
print("existe")
print("end")
It connects to the ftp then print ('checking ', '1. Folder') and nothing more.
Could this be because of the number of files in the folder?
if i just add this
for ftp_file in ftp.nlst():
print("checking " , ftp_file)
it prints all the folders names

Related

Unable to delete files using python file on windows even after giving full-control

I recovered from a fully functioning server(Windows Server R2-2012) from my backup, and instead of overwriting the existing files, it created copies of all the files in my server.
I wrote a python(3.5.2) script to delete the files that has the name "2017-09-09 00-00 Copy of_Filename" but it still doesn't delete few files.
My folders and sub-directories have "full control" for the account I signed in with. I am manually able to delete the files, but my script or batch file couldn't. The error I'm getting is "permission denied"
I have run my batch file in administrative mode as well, but still no effect.
Python(3.5.2) script:
import os
import re
import fnmatch
f = open("log.txt","w+")
count = 1
for root, dirs, files in os.walk("c:\\Website\\",topdown=False):
for fl in files:
if re.match('2017-09-09 00-00 Copy of',fl ):
try:
file_path = os.path.join(root, fl)
os.remove(file_path)
print("Deleting file " + file_path)
except:
file_path = os.path.join(root, fl)
f.write(str(count) + " " + file_path + "\n")
#print(str(count) + " " + file_path + "\n")
count+=1
f.close()
This is an example of the folder contents that I am trying to delete 1.
This is the access status of the folder. I am "Administrator"
2
I feel my server(IIS) is not running due to this error.
Any help is appreciated.

Dropbox API v2 - uploading files

I'm trying to loop through a folder structure in python and upload each file it finds to a specified folder. The problem is that it's uploading a file with the correct name, however there is no content and the file size is only 10 bytes.
import dropbox, sys, os
try:
dbx = dropbox.Dropbox('some_access_token')
user = dbx.users_get_current_account()
except:
print ("Negative, Ghostrider")
sys.exit()
rootdir = os.getcwd()
print ("Attempting to upload...")
for subdir, dirs, files in os.walk(rootdir):
for file in files:
try:
dbx.files_upload("afolder",'/bfolder/' + file, mute=True)
print("Uploaded " + file)
except:
print("Failed to upload " + file)
print("Finished upload.")
Your call to dbx.files_upload("afolder",'/bfolder/' + file, mute=True) says: "Send the text afolder and write it as a file named '/bfolder/' + file".
From doc:
files_upload(f, path, mode=WriteMode('add', None), autorename=False, client_modified=None, mute=False)
Create a new file with the contents provided in the request.
Parameters:
f – A string or file-like obj of data.
path (str) – Path in the user’s Dropbox to save the file.
....
Meaning that f must be the content of the file (and not the filename string).
Here is a working example:
import dropbox, sys, os
dbx = dropbox.Dropbox('token')
rootdir = '/tmp/test'
print ("Attempting to upload...")
# walk return first the current folder that it walk, then tuples of dirs and files not "subdir, dirs, files"
for dir, dirs, files in os.walk(rootdir):
for file in files:
try:
file_path = os.path.join(dir, file)
dest_path = os.path.join('/test', file)
print 'Uploading %s to %s' % (file_path, dest_path)
with open(file_path) as f:
dbx.files_upload(f, dest_path, mute=True)
except Exception as err:
print("Failed to upload %s\n%s" % (file, err))
print("Finished upload.")
EDIT: For Python3 the following should be used:
dbx.files_upload(f.read(), dest_path, mute=True)
For Dropbox Business API below python code helps uploading files to dropbox.
#function code
import dropbox
def dropbox_file_upload(access_token,dropbox_file_path,local_file_name):
'''
The function upload file to dropbox.
Parameters:
access_token(str): Access token to authinticate dropbox
dropbox_file_path(str): dropboth file path along with file name
Eg: '/ab/Input/f_name.xlsx'
local_file_name(str): local file name with path from where file needs to be uploaded
Eg: 'f_name.xlsx' # if working directory
Returns:
Boolean:
True on successful upload
False on unsuccessful upload
'''
try:
dbx = dropbox.DropboxTeam(access_token)
# get the team member id for common user
members = dbx.team_members_list()
for i in range(0,len(members.members)):
if members.members[i].profile.name.display_name == logged_in_user:
member_id = members.members[i].profile.team_member_id
break
# connect to dropbox with member id
dbx = dropbox.DropboxTeam(access_token).as_user(member_id)
# upload local file to dropbox
f = open(local_file_name, 'rb')
dbx.files_upload(f.read(),dropbox_file_path)
return True
except Exception as e:
print(e)
return False

How do I upload full directory on FTP in python?

Ok, so I have to upload a directory, with subdirectories and files inside, on a FTP server. But I can't seem to get it right. I want to upload the directory as it is, with it's subdirectories and files where they were.
ftp = FTP()
ftp.connect('host',port)
ftp.login('user','pass')
filenameCV = "directorypath"
def placeFiles():
for root,dirnames,filenames in os.walk(filenameCV):
for files in filenames:
print(files)
ftp.storbinary('STOR ' + files, open(files,'rb'))
ftp.quit()
placeFiles()
There are multiple problems with your code: First, the filenames array will only contain the actual filenames, not the entire path, so you need to join it with fullpath = os.path.join(root, files) and then use open(fullpath). Secondly, you quit the FTP connection inside the loop, move that ftp.quit() down on the level of the placeFiles() function.
To recursively upload your directory, you have to walk through your root directories and at the same time through your remote directory, uploading files on the go.
Full example code:
import os.path, os
from ftplib import FTP, error_perm
host = 'localhost'
port = 21
ftp = FTP()
ftp.connect(host,port)
ftp.login('user','pass')
filenameCV = "directorypath"
def placeFiles(ftp, path):
for name in os.listdir(path):
localpath = os.path.join(path, name)
if os.path.isfile(localpath):
print("STOR", name, localpath)
ftp.storbinary('STOR ' + name, open(localpath,'rb'))
elif os.path.isdir(localpath):
print("MKD", name)
try:
ftp.mkd(name)
# ignore "directory already exists"
except error_perm as e:
if not e.args[0].startswith('550'):
raise
print("CWD", name)
ftp.cwd(name)
placeFiles(ftp, localpath)
print("CWD", "..")
ftp.cwd("..")
placeFiles(ftp, filenameCV)
ftp.quit()

How to download a file from 3ftp sites

I have a list of ftp sites ( eg:10 ) in text file and i need to download the last created file from ftp sites. Is this possible. This is my code :
import os
from ftplib import FTP
ftp = FTP("xxx.xx.xx.xx1", "USERNAME1", "PASSWORD1")
ftp = FTP("xxx.xx.xx.xx2", "USERNAME2", "PASSWORD2")
ftp = FTP("xxx.xx.xx.xx3", "USERNAME3", "PASSWORD3")
ftp = FTP("xxx.xx.xx.xx4", "USERNAME4", "PASSWORD4")
ftp = FTP("xxx.xx.xx.xx5", "USERNAME5", "PASSWORD5")
ftp.login()
ftp.retrlines("LIST")
ftp.cwd("SmythIN/2014-10-29") --- here i have a folder created by current date ...how can i pass current date folder i change directory.
ftp.cwd("subFolder") # or ftp.cwd("folderOne/subFolder")
listing = []
ftp.retrlines("LIST", listing.append)
words = listing[0].split(None, 8)
filename = words[-1].lstrip()
# download the file
local_filename = os.path.join(r"c:\myfolder", filename)
lf = open(local_filename, "wb")
ftp.retrbinary("RETR " + filename, lf.write, 8*1024)
lf.close()
updated code :
ftp.cwd("SmythIN/2014-10-29")- the directory with today date is already created.
Just looping through the servers and pulling the last file within specified directories (if I understand your question correctly) is straight forward. Remembering what server each file came from should not be problematic either since you can use different local directories on your local machine or edit the filename as the file transfers. Here is my suggestions (to be modified to your application of course):
import os
from ftplib import FTP
# read in text file containing server login information and jam into dictionary
with open('server_file.txt','r') as tmp:
servers = {}
for r in tmp.read().split('\n'):
rs = r.split(',') # split r by comma
servers[rs[0]] = {'uname':rs[1],'pwd':[rs[2]]}
# if you want to create a new directory to save the file to
heute = dt.datetime.strftime(dt.datetime.today(),'%Y%m%d')
if os.path.isdir('my_dir' + heute)==False:
os.mkdir('my_dir' + heute)
for s in servers:
ftp = FTP(s,servers[s]['uname'],servers[s]['pwd'])
ftp.cwd('desired_subdir')
# if you want to download the last file I would us nlst
with open('local_file','wb') as lf:
ftp.retrbinary('RETR' + ftp.nlst()[-1], lf.write, 8*1024)

downloaded files using ftplib don't show up in folder

This may be a stupid question. My code works fine and says that I've downloaded each file from the FTP server, but when I go to the folder on my computer where my code is the downloaded files are not there. It has worked in the past but all of sudden I dont know where my files are. What is the problem? Thanks for any help
#Extract UV files from FTP server
import ftplib
try:
# Connection information
server = 'ftp.ncep.noaa.gov'
username = 'anonymous'
password = 'anything'
# Directory and matching information
directory = '/pub/data/nccf/com/hourly/prod/uv.20130729/'#Format:YearMonthDay Remeber to change date
filematch = '*.grib2'
# Establish the connection
ftp = ftplib.FTP(server)
ftp.login(username, password)
# Change to the proper directory
ftp.cwd(directory)
# Loop through matching files and download each one individually
for filename in ftp.nlst(filematch):
fhandle = open(filename, 'wb')
print 'Getting ' + filename
ftp.retrbinary('RETR ' + filename, fhandle.write)
fhandle.close()
except ftplib.all_errors as err:
print 'Change date on directory'
print err

Categories

Resources