Python Move a File from Remote Machine to Local - python

I am new in python and trying different stuff.
Currently trying to copy a text file to_copy.txt from a remote machine with local ip 192.168.1.101 to my current machine.
What i tried from googling does not seem to work.
import paramiko
from scp import SCPClient
ssh = paramiko.SSHClient()
ssh.load_system_host_keys()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
ssh.connect("testme#192.168.1.101", password="look420")
print("Connected")
scp = SCPClient(ssh.get_transport())
scp.get("/home/testme/target_folder/to_copy.txt")
scp.close()
But, when i run this i get error;
Traceback (most recent call last):
File "/home/uc/Python_Projects/MoveFileAndFolder/move_remote.py", line 7, in <module>
ssh.connect("testme#192.168.1.101", password="look420")
File "/usr/local/lib/python3.4/dist-packages/paramiko/client.py", line 296, in connect
to_try = list(self._families_and_addresses(hostname, port))
File "/usr/local/lib/python3.4/dist-packages/paramiko/client.py", line 200, in _families_and_addresses
addrinfos = socket.getaddrinfo(hostname, port, socket.AF_UNSPEC, socket.SOCK_STREAM)
File "/usr/lib/python3.4/socket.py", line 530, in getaddrinfo
for res in _socket.getaddrinfo(host, port, family, type, proto, flags):
socket.gaierror: [Errno -2] Name or service not known
What i am doing wrong here?
NOTE: Current machine is running Debian Jessie and the remote machine runs Ubuntu 14.04.4 LTS

Did you try
ssh.connect("192.168.1.101", username="testme", password="look420")
Please refer Doc

The port for scp (22) is likely not open on the remote machine. Please check with a command line call to confirm that you can indeed make an ssh or scp connection.
See here for more details
https://help.ubuntu.com/community/SSH/TransferFiles

""
Download file resources from remote server to local through paramiko
author: gxcuizy
time: 2018-08-01
"""
import paramiko
import os
from stat import S_ISDIR as isdir
def down_from_remote(sftp_obj, remote_dir_name, local_dir_name):
"" "download files remotely" ""
remote_file = sftp_obj.stat(remote_dir_name)
if isdir(remote_file.st_mode):
# Folder, can't download directly, need to continue cycling
check_local_dir(local_dir_name)
print('Start downloading folder: ' + remote_dir_name)
for remote_file_name in sftp.listdir(remote_dir_name):
sub_remote = os.path.join(remote_dir_name, remote_file_name)
sub_remote = sub_remote.replace('\\', '/')
sub_local = os.path.join(local_dir_name, remote_file_name)
sub_local = sub_local.replace('\\', '/')
down_from_remote(sftp_obj, sub_remote, sub_local)
else:
# Files, downloading directly
print('Start downloading file: ' + remote_dir_name)
sftp.get(remote_dir_name, local_dir_name)
def check_local_dir(local_dir_name):
"" "whether the local folder exists, create if it does not exist" ""
if not os.path.exists(local_dir_name):
os.makedirs(local_dir_name)
if name == "main":
"" "program main entry" ""
# Server connection information
host_name = 'ip'
user_name = 'name'
password = 'pwd'
port = 22
# Remote file path (absolute path required)
remote_dir = '/data/nfs/zdlh/pdf/2018/07/31'
# Local file storage path (either absolute or relative)
local_dir = 'file_download/'
# Connect to remote server
t = paramiko.Transport((host_name, port))
t.connect(username=user_name, password=password)
sftp = paramiko.SFTPClient.from_transport(t)
# Remote file start download
down_from_remote(sftp, remote_dir, local_dir)
# Close connection
t.close()

Download file resources from a remote server to a local through paramiko
import paramiko
import os
from stat import S_ISDIR as isdir
def down_from_remote(sftp_obj, remote_dir_name, local_dir_name):
"" "download files remotely" ""
remote_file = sftp_obj.stat(remote_dir_name)
if isdir(remote_file.st_mode):
# Folder, can't download directly, need to continue cycling
check_local_dir(local_dir_name)
print('Start downloading folder: ' + remote_dir_name)
for remote_file_name in sftp.listdir(remote_dir_name):
sub_remote = os.path.join(remote_dir_name, remote_file_name)
sub_remote = sub_remote.replace('\\', '/')
sub_local = os.path.join(local_dir_name, remote_file_name)
sub_local = sub_local.replace('\\', '/')
down_from_remote(sftp_obj, sub_remote, sub_local)
else:
# Files, downloading directly
print('Start downloading file: ' + remote_dir_name)
sftp.get(remote_dir_name, local_dir_name)
def check_local_dir(local_dir_name):
"" "whether the local folder exists, create if it does not exist" ""
if not os.path.exists(local_dir_name):
os.makedirs(local_dir_name)
if __name__ == "__main__":
"" "program main entry" ""
# Server connection information
host_name = 'ipaddress'
user_name = 'username'
password = 'password'
port = 22
# Remote file path (absolute path required)
remote_dir = '/data/nfs/zdlh/pdf/2018/07/31'
# Local file storage path (either absolute or relative)
local_dir = 'file_download/'
# Connect to remote server
t = paramiko.Transport((host_name, port))
t.connect(username=user_name, password=password)
sftp = paramiko.SFTPClient.from_transport(t)
# Remote file start download
down_from_remote(sftp, remote_dir, local_dir)
# Close connection
t.close()

Related

How do I transfer SPECIFICALLY an Image file to from client to server using Python Paramiko [duplicate]

Aim: I am trying to use SFTP through Paramiko in Python to upload files on server pc.
What I've done: To test that functionality, I am using my localhost (127.0.0.1) IP. To achieve that I created the following code with the help of Stack Overflow suggestions.
Problem: The moment I run this code and enter the file name, I get the "IOError : Failure", despite handling that error. Here's a snapshot of the error:
import paramiko as pk
import os
userName = "sk"
ip = "127.0.0.1"
pwd = "1234"
client=""
try:
client = pk.SSHClient()
client.set_missing_host_key_policy(pk.AutoAddPolicy())
client.connect(hostname=ip, port=22, username=userName, password=pwd)
print '\nConnection Successful!'
# This exception takes care of Authentication error& exceptions
except pk.AuthenticationException:
print 'ERROR : Authentication failed because of irrelevant details!'
# This exception will take care of the rest of the error& exceptions
except:
print 'ERROR : Could not connect to %s.'%ip
local_path = '/home/sk'
remote_path = '/home/%s/Desktop'%userName
#File Upload
file_name = raw_input('Enter the name of the file to upload :')
local_path = os.path.join(local_path, file_name)
ftp_client = client.open_sftp()
try:
ftp_client.chdir(remote_path) #Test if remote path exists
except IOError:
ftp_client.mkdir(remote_path) #Create remote path
ftp_client.chdir(remote_path)
ftp_client.put(local_path, '.') #At this point, you are in remote_path in either case
ftp_client.close()
client.close()
Can you point out where's the problem and the method to resolve it?
Thanks in advance!
The second argument of SFTPClient.put (remotepath) is path to a file, not a folder.
So use file_name instead of '.':
ftp_client.put(local_path, file_name)
... assuming you are already in remote_path, as you call .chdir earlier.
To avoid a need for .chdir, you can use an absolute path:
ftp_client.put(local_path, remote_path + '/' + file_name)

Downloading CSV inside a .zip with pysftp

I'm trying to load a .csv file stored on a FTP Server (SFTP protocol). I'm using Python in combination with pysftp library. On the FTP server, the CSV file is inside a .zip file. Is there a way to open the zip and then retrieve only the csv file inside it?
Thank you in advance,
import pysftp
cnopts = pysftp.CnOpts()
cnopts.hostkeys = None
# Make connection to sFTP
with pysftp.Connection(hostname,
username=sftp_username,
password=sftp_pw,
cnopts = cnopts
)
with pysftp.cd(download_directory):
with sftp.cd('download_directory'):
print(f'Downloading this file: {filename}')
sftp.get(filename, preserve_mtime=True)
sftp.close()
If you have ssh access to the remote host and know enough about the remote path to the zip file you want and the zip utilities on that host, you can use your ssh client to run the unzip command remotely and capture its output. Here, my target is a linux machine and the zipfile is in the login user's home directory path. I can use the paramiko ssh client to do the work
Its a good idea to log into the remote server via ssh and practice to see what the path structure is like
import sys
import paramiko
import shutil
def sshclient_exec_command_binary(sshclient, command, bufsize=-1,
timeout=None, get_pty=False):
"""Paramiko SSHClient helper that implements exec_command with binary
output.
"""
chan = sshclient._transport.open_session()
if get_pty:
chan.get_pty()
chan.settimeout(timeout)
chan.exec_command(command)
stdin = chan.makefile('wb', bufsize)
stdout = chan.makefile('rb', bufsize)
stderr = chan.makefile_stderr('rb', bufsize)
return stdin, stdout, stderr
# example gets user/pw from command line
if len(sys.argv) != 3:
print("usage: test.py username password")
exit(1)
username, password = sys.argv[1:3]
# put your host/file info here
hostname = "localhost"
remote_zipfile = "tmp/mytest.zip"
file_to_extract = "myfile"
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
ssh.connect(hostname, username=username, password=password)
unzip_cmd = "unzip -p {} {}".format(remote_zipfile, file_to_extract)
print("running", unzip_cmd)
stdin, out, err = sshclient_exec_command_binary(ssh, unzip_cmd)
# if the command worked, out is a file-like object to read.
print("writing", file_to_extract)
with open(file_to_extract, 'wb') as out_fp:
shutil.copyfileobj(out, out_fp)

File upload through SFTP (Paramiko) in Python gives IOError: Failure

Aim: I am trying to use SFTP through Paramiko in Python to upload files on server pc.
What I've done: To test that functionality, I am using my localhost (127.0.0.1) IP. To achieve that I created the following code with the help of Stack Overflow suggestions.
Problem: The moment I run this code and enter the file name, I get the "IOError : Failure", despite handling that error. Here's a snapshot of the error:
import paramiko as pk
import os
userName = "sk"
ip = "127.0.0.1"
pwd = "1234"
client=""
try:
client = pk.SSHClient()
client.set_missing_host_key_policy(pk.AutoAddPolicy())
client.connect(hostname=ip, port=22, username=userName, password=pwd)
print '\nConnection Successful!'
# This exception takes care of Authentication error& exceptions
except pk.AuthenticationException:
print 'ERROR : Authentication failed because of irrelevant details!'
# This exception will take care of the rest of the error& exceptions
except:
print 'ERROR : Could not connect to %s.'%ip
local_path = '/home/sk'
remote_path = '/home/%s/Desktop'%userName
#File Upload
file_name = raw_input('Enter the name of the file to upload :')
local_path = os.path.join(local_path, file_name)
ftp_client = client.open_sftp()
try:
ftp_client.chdir(remote_path) #Test if remote path exists
except IOError:
ftp_client.mkdir(remote_path) #Create remote path
ftp_client.chdir(remote_path)
ftp_client.put(local_path, '.') #At this point, you are in remote_path in either case
ftp_client.close()
client.close()
Can you point out where's the problem and the method to resolve it?
Thanks in advance!
The second argument of SFTPClient.put (remotepath) is path to a file, not a folder.
So use file_name instead of '.':
ftp_client.put(local_path, file_name)
... assuming you are already in remote_path, as you call .chdir earlier.
To avoid a need for .chdir, you can use an absolute path:
ftp_client.put(local_path, remote_path + '/' + file_name)

How to copy file from FTP server to another FTP server (platform independently)

I have made a script which copy file from local machine to FTP server. I referred this link to make script Upload folders from local system to FTP using Python script, but now I want to copy file from FTP to another remote FTP machine on different location using Python script. How to do this?
File copy can be done using rsync command, but I want to do this using Python script.
Code:
import ftplib
import os
server = 'host'
username = 'name'
password = 'pass'
ftp = ftplib.FTP(server, username, password)
Path = 'path'#source
val = "/des/"#destination
def copy(source,destination):
print(source)
print(destination)
os.chdir(source)
ftp.cwd(destination)
if "C:\\" in Path or "c:\\" in Path:
ftp_path = (source).split("\\")[-1]
else:
ftp_path = (source).split("/")[-1]
directory = destination+ftp_path
mkdir(directory)
ftp.cwd(directory)
read = os.listdir(source)
for file in read:
print(file)
if "C:\\" in Path or "c:\\" in Path:
Type = source + "\\" + file
else:
Type = source + "/" + file
print(Type)
print()
if os.path.isdir(Type):#If Type is Folder then it will create new
folder
copy(Type,directory+"/")
elif os.path.isfile(Type):#If Type is file then it will create file
print(Type)
current_dir = ftp.pwd() + "/"
f = Type
fh = open(f, 'rb')
ftp.storbinary('STOR %s' % current_dir + file, fh)
fh.close()
def mkdir(path):
#print(path)
ftp.mkd(path)
copy(Path,val)
ftp.close()
In general, you cannot transfer a file from one remote FTP server to another remote FTP server, if FTP protocol is the only way you can access the machines.
There's FXP protocol that allows that, but that's typically not allowed on most FTP servers.
If you have another access to one of the servers, like SSH, you can of course automatically login to the server and then run FTP client on it, to upload/download to/from the other server.
See also FTP copy a file to another place in same FTP.

Cannot SCP file to AWS using boto

I'm trying to automate some uploading with python to my AWS EC2 server. I cannot get the ssh_client.put_file() to work. It either keeps giving me either IOERROR: Failire or IOError: [Errno 2] No such file
Any ideas as to what I'm missing? Can this ssh_client not be used to scp upload?
import boto
import boto.ec2
from boto.manage.cmdshell import sshclient_from_instance
import argparse
#Parse input
parser = argparse.ArgumentParser(description='Upload and train images for detection')
parser.add_argument('path_to_key', help='Path to Pem key')
parser.add_argument('path_to_tar', help='Path to positives.tar')
args = parser.parse_args()
args_keypath = args.path_to_key
args_tarpath = args.path_to_tar
# Connect to your region of choice
print "Connecting to server..."
access_key = ""
secret_access_key = ""
conn = boto.ec2.connect_to_region('us-east-1', aws_access_key_id=access_key, aws_secret_access_key=secret_access_key)
print "Connecting to instance..."
# Connect to an existing instance
reservations = conn.get_all_instances(['i-c8aab576'])
instance = reservations[0].instances[0]
# Create an SSH client for our instance
# key_path is the path to the SSH private key associated with instance
# user_name is the user to login as on the instance (e.g. ubuntu, ec2-user, etc.)
print "Creating CommandShell..."
key_path = args_keypath
ssh_client = boto.manage.cmdshell.sshclient_from_instance(instance,
key_path,
host_key_file='~/.ssh/known_hosts',
user_name='ubuntu')
status, stdout, stderr = ssh_client.run('ls -al')
print(status)
print(stdout)
print(stderr)
#Upload positives - WELL THIS ISN'T WORKING
print "Uploading file..."
local_filepath = args_tarpath
remote_filepath = "~/Sharing/"
ssh_client.put_file("/home/willem/.ssh/test.txt", "/home/ubuntu/Sharing/")
#ssh_client.put_file(local_filepath, remote_filepath)
If you have ssh login access you can use the .pem and run the command locally, my solution was:
Create the server by creating the reservation:
reservation = conn.run_instances(my_AMI,
key_name=my_key,
instance_type='c4.xlarge',
security_group_ids=security_group,
placement='us-east-1d')
instance = reservation.instances[0]
print colored("Instance IP: %s" % instance.ip_address, 'yellow')
Then later I could scp the file:
instance_IP = instance.ip_address
os.system('scp -i %s %s ubuntu#%s:~/Sharing' % (key_path, args_tarpath, instance_IP) )
One simple solution: specify the file name in the destination path as well. Boto in turn uses the paramiko module in which sftp.put requires the file name to be specified.

Categories

Resources