Cannot SCP file to AWS using boto - python

I'm trying to automate some uploading with python to my AWS EC2 server. I cannot get the ssh_client.put_file() to work. It either keeps giving me either IOERROR: Failire or IOError: [Errno 2] No such file
Any ideas as to what I'm missing? Can this ssh_client not be used to scp upload?
import boto
import boto.ec2
from boto.manage.cmdshell import sshclient_from_instance
import argparse
#Parse input
parser = argparse.ArgumentParser(description='Upload and train images for detection')
parser.add_argument('path_to_key', help='Path to Pem key')
parser.add_argument('path_to_tar', help='Path to positives.tar')
args = parser.parse_args()
args_keypath = args.path_to_key
args_tarpath = args.path_to_tar
# Connect to your region of choice
print "Connecting to server..."
access_key = ""
secret_access_key = ""
conn = boto.ec2.connect_to_region('us-east-1', aws_access_key_id=access_key, aws_secret_access_key=secret_access_key)
print "Connecting to instance..."
# Connect to an existing instance
reservations = conn.get_all_instances(['i-c8aab576'])
instance = reservations[0].instances[0]
# Create an SSH client for our instance
# key_path is the path to the SSH private key associated with instance
# user_name is the user to login as on the instance (e.g. ubuntu, ec2-user, etc.)
print "Creating CommandShell..."
key_path = args_keypath
ssh_client = boto.manage.cmdshell.sshclient_from_instance(instance,
key_path,
host_key_file='~/.ssh/known_hosts',
user_name='ubuntu')
status, stdout, stderr = ssh_client.run('ls -al')
print(status)
print(stdout)
print(stderr)
#Upload positives - WELL THIS ISN'T WORKING
print "Uploading file..."
local_filepath = args_tarpath
remote_filepath = "~/Sharing/"
ssh_client.put_file("/home/willem/.ssh/test.txt", "/home/ubuntu/Sharing/")
#ssh_client.put_file(local_filepath, remote_filepath)

If you have ssh login access you can use the .pem and run the command locally, my solution was:
Create the server by creating the reservation:
reservation = conn.run_instances(my_AMI,
key_name=my_key,
instance_type='c4.xlarge',
security_group_ids=security_group,
placement='us-east-1d')
instance = reservation.instances[0]
print colored("Instance IP: %s" % instance.ip_address, 'yellow')
Then later I could scp the file:
instance_IP = instance.ip_address
os.system('scp -i %s %s ubuntu#%s:~/Sharing' % (key_path, args_tarpath, instance_IP) )

One simple solution: specify the file name in the destination path as well. Boto in turn uses the paramiko module in which sftp.put requires the file name to be specified.

Related

How to create a Docker container that runs a python script that writes file to a location outside the container

I have a python script that I run on a Raspberry Pi that checks Gmail on a given interval and saves any attachments to a specified directory. I did not create this script and the website that I got it from doesn't seem to be active any more. All credit to knight-of-pi.org for the script.
I would like to take this script and run it in a container. I have run existing containers, but I have limited experience creating one. I am not sure I understand how I would get the python script to save the files outside of the container onto the host machine.
Here is the python script:
#!/usr/bin/env python
import os, sys, time, argparse
import email
from imapclient import IMAPClient
# credentials could be stored here permanently, but that's bad security.
HOSTNAME = 'smtp.gmail.com'
USERNAME = 'example#gmail.com'
PASSWORD = 'example'
MESSAGE_SUBJECT = 'example'
MAILBOX = 'Inbox'
TARGET = '/media/USBHDD1/shares/' # this is the path where attachments are stored
CHECK_FREQ = 3600 # in seconds
def parse_arguments(sysargs):
""" Setup the command line options. """
description = '''The script parseIMAPemail.py is looped and repeatedly
checks a mail box if a mail with a specified subject has arrived.
If so, the emails attachment is stored. This script is a part of the tutorial
www.knight-of-pi.org/accessing-and-parsing-emails-with-the-raspberry-pi-and-imapclient'''
parser = argparse.ArgumentParser(description=description)
parser.add_argument('-u', '--username', nargs='?', metavar='str', type=str,
default=USERNAME, help='Username of the Email account',)
parser.add_argument('-s', '--subject', nargs='?', metavar='str', type=str,
default=MESSAGE_SUBJECT, help='The subject new emails should be scanned for')
parser.add_argument('--host', nargs='?', metavar='str', type=str,
default=HOSTNAME, help='Name of the IMAP host webserver')
parser.add_argument('--pwd', nargs='?', metavar='str', type=str,
default=PASSWORD, help='Password belonging to the username')
return parser.parse_args(sysargs)
def store_attachment(part):
""" Store attached files as they are and with the same name. """
filename = part.get_filename()
att_path = os.path.join(TARGET, filename)
if not os.path.isfile(att_path) :
fp = open(att_path, 'wb')
fp.write(part.get_payload(decode=True))
fp.close()
print "Successfully stored attachment!"
def check_attachment(mail):
""" Examine if the email has the requested subject and store the attachment if so. """
print "["+mail["From"]+"] :" + mail["Subject"]
for part in mail.walk():
if part.get_content_maintype() == 'multipart':
continue
if part.get('Content-Disposition') is None:
continue
store_attachment(part)
time.sleep(3)
def scan_emails(args, unread):
""" Scan all unread Emails for the given Subject. """
for msg_id, stuff in unread.iteritems():
new_email = email.message_from_string(unread[msg_id]['RFC822'])
if new_email['subject'] == args.subject:
print "Found subject! Storing the attachment of mail id ", msg_id
check_attachment(new_email)
def loop(args):
""" Main loop: log into the IMAP server and fetch all unread emails,
which are delegated into scan_emails. """
print('Logging into ' + args.username)
try:
server = IMAPClient(args.host, use_uid=True, ssl=True)
server.login(args.username, args.pwd)
select_info = server.select_folder(MAILBOX)
messages = server.search(['UNSEEN'])
all_unread = server.fetch(messages, ['RFC822'])
scan_emails(args, all_unread)
except:
print('oops')
time.sleep(CHECK_FREQ)
if __name__ == '__main__':
args = parse_arguments(sys.argv[1:])
try:
while True:
loop(args)
finally:
pass
I have found lots of examples of running python scripts in a container and examples of writing files outside of containers, but getting the container to do both is confusing me. From my research I think I need to use volume command which I think is something like "-v $(pwd)/media/USBHDD1/shares/:/media/USBHDD1/shares/" in the run command, but I am not sure how to do that with calling the python script. I would appreciate any suggestion that could point me in the right direction.
Thanks.
This can be achieved with Bind Mounts.
Your Docker run should have the -v parameter like this:
docker run -it --name sample-container -v "${pwd}:/media/USBHDD1/shares" -d python:latest
${pwd} is the current relative host directory, and /media/USBHDD1/shares is the container directory.
then, when your Python code runs in your container, write the output to the /media/USBHDD1/shares directory:
with open('/media/USBHDD1/shares/[your file name here]', 'w') as f:
f.write('example data')
These directories are synced between host and container. i.e. Write to host and data will be accessible to container. Write to container and data will be accessible to host.

How do I transfer SPECIFICALLY an Image file to from client to server using Python Paramiko [duplicate]

Aim: I am trying to use SFTP through Paramiko in Python to upload files on server pc.
What I've done: To test that functionality, I am using my localhost (127.0.0.1) IP. To achieve that I created the following code with the help of Stack Overflow suggestions.
Problem: The moment I run this code and enter the file name, I get the "IOError : Failure", despite handling that error. Here's a snapshot of the error:
import paramiko as pk
import os
userName = "sk"
ip = "127.0.0.1"
pwd = "1234"
client=""
try:
client = pk.SSHClient()
client.set_missing_host_key_policy(pk.AutoAddPolicy())
client.connect(hostname=ip, port=22, username=userName, password=pwd)
print '\nConnection Successful!'
# This exception takes care of Authentication error& exceptions
except pk.AuthenticationException:
print 'ERROR : Authentication failed because of irrelevant details!'
# This exception will take care of the rest of the error& exceptions
except:
print 'ERROR : Could not connect to %s.'%ip
local_path = '/home/sk'
remote_path = '/home/%s/Desktop'%userName
#File Upload
file_name = raw_input('Enter the name of the file to upload :')
local_path = os.path.join(local_path, file_name)
ftp_client = client.open_sftp()
try:
ftp_client.chdir(remote_path) #Test if remote path exists
except IOError:
ftp_client.mkdir(remote_path) #Create remote path
ftp_client.chdir(remote_path)
ftp_client.put(local_path, '.') #At this point, you are in remote_path in either case
ftp_client.close()
client.close()
Can you point out where's the problem and the method to resolve it?
Thanks in advance!
The second argument of SFTPClient.put (remotepath) is path to a file, not a folder.
So use file_name instead of '.':
ftp_client.put(local_path, file_name)
... assuming you are already in remote_path, as you call .chdir earlier.
To avoid a need for .chdir, you can use an absolute path:
ftp_client.put(local_path, remote_path + '/' + file_name)

How To Read/Write Files From a SFTP Server in Python? [duplicate]

I'm working on a simple tool that transfers files to a hard-coded location with the password also hard-coded. I'm a python novice, but thanks to ftplib, it was easy:
import ftplib
info= ('someuser', 'password') #hard-coded
def putfile(file, site, dir, user=(), verbose=True):
"""
upload a file by ftp to a site/directory
login hard-coded, binary transfer
"""
if verbose: print 'Uploading', file
local = open(file, 'rb')
remote = ftplib.FTP(site)
remote.login(*user)
remote.cwd(dir)
remote.storbinary('STOR ' + file, local, 1024)
remote.quit()
local.close()
if verbose: print 'Upload done.'
if __name__ == '__main__':
site = 'somewhere.com' #hard-coded
dir = './uploads/' #hard-coded
import sys, getpass
putfile(sys.argv[1], site, dir, user=info)
The problem is that I can't find any library that supports sFTP. What's the normal way to do something like this securely?
Edit: Thanks to the answers here, I've gotten it working with Paramiko and this was the syntax.
import paramiko
host = "THEHOST.com" #hard-coded
port = 22
transport = paramiko.Transport((host, port))
password = "THEPASSWORD" #hard-coded
username = "THEUSERNAME" #hard-coded
transport.connect(username = username, password = password)
sftp = paramiko.SFTPClient.from_transport(transport)
import sys
path = './THETARGETDIRECTORY/' + sys.argv[1] #hard-coded
localpath = sys.argv[1]
sftp.put(localpath, path)
sftp.close()
transport.close()
print 'Upload done.'
Thanks again!
Paramiko supports SFTP. I've used it, and I've used Twisted. Both have their place, but you might find it easier to start with Paramiko.
You should check out pysftp https://pypi.python.org/pypi/pysftp it depends on paramiko, but wraps most common use cases to just a few lines of code.
import pysftp
import sys
path = './THETARGETDIRECTORY/' + sys.argv[1] #hard-coded
localpath = sys.argv[1]
host = "THEHOST.com" #hard-coded
password = "THEPASSWORD" #hard-coded
username = "THEUSERNAME" #hard-coded
with pysftp.Connection(host, username=username, password=password) as sftp:
sftp.put(localpath, path)
print 'Upload done.'
Here is a sample using pysftp and a private key.
import pysftp
def upload_file(file_path):
private_key = "~/.ssh/your-key.pem" # can use password keyword in Connection instead
srv = pysftp.Connection(host="your-host", username="user-name", private_key=private_key)
srv.chdir('/var/web/public_files/media/uploads') # change directory on remote server
srv.put(file_path) # To download a file, replace put with get
srv.close() # Close connection
pysftp is an easy to use sftp module that utilizes paramiko and pycrypto. It provides a simple interface to sftp.. Other things that you can do with pysftp which are quite useful:
data = srv.listdir() # Get the directory and file listing in a list
srv.get(file_path) # Download a file from remote server
srv.execute('pwd') # Execute a command on the server
More commands and about PySFTP here.
If you want easy and simple, you might also want to look at Fabric. It's an automated deployment tool like Ruby's Capistrano, but simpler and of course for Python. It's build on top of Paramiko.
You might not want to do 'automated deployment' but Fabric would suit your use case perfectly none the less. To show you how simple Fabric is: the fab file and command for your script would look like this (not tested, but 99% sure it will work):
fab_putfile.py:
from fabric.api import *
env.hosts = ['THEHOST.com']
env.user = 'THEUSER'
env.password = 'THEPASSWORD'
def put_file(file):
put(file, './THETARGETDIRECTORY/') # it's copied into the target directory
Then run the file with the fab command:
fab -f fab_putfile.py put_file:file=./path/to/my/file
And you're done! :)
fsspec is a great option for this, it offers a filesystem like implementation of sftp.
from fsspec.implementations.sftp import SFTPFileSystem
fs = SFTPFileSystem(host=host, username=username, password=password)
# list a directory
fs.ls("/")
# open a file
with fs.open(file_name) as file:
content = file.read()
Also worth noting that fsspec uses paramiko in the implementation.
With RSA Key then refer here
Snippet:
import pysftp
import paramiko
from base64 import decodebytes
keydata = b"""AAAAB3NzaC1yc2EAAAADAQABAAABAQDl"""
key = paramiko.RSAKey(data=decodebytes(keydata))
cnopts = pysftp.CnOpts()
cnopts.hostkeys.add(host, 'ssh-rsa', key)
with pysftp.Connection(host=host, username=username, password=password, cnopts=cnopts) as sftp:
with sftp.cd(directory):
sftp.put(file_to_sent_to_ftp)
Twisted can help you with what you are doing, check out their documentation, there are plenty of examples. Also it is a mature product with a big developer/user community behind it.
There are a bunch of answers that mention pysftp, so in the event that you want a context manager wrapper around pysftp, here is a solution that is even less code that ends up looking like the following when used
path = "sftp://user:p#ssw0rd#test.com/path/to/file.txt"
# Read a file
with open_sftp(path) as f:
s = f.read()
print s
# Write to a file
with open_sftp(path, mode='w') as f:
f.write("Some content.")
The (fuller) example: http://www.prschmid.com/2016/09/simple-opensftp-context-manager-for.html
This context manager happens to have auto-retry logic baked in in the event you can't connect the first time around (which surprisingly happens more often than you'd expect in a production environment...)
The context manager gist for open_sftp: https://gist.github.com/prschmid/80a19c22012e42d4d6e791c1e4eb8515
Paramiko is so slow. Use subprocess and shell, here is an example:
remote_file_name = "filename"
remotedir = "/remote/dir"
localpath = "/local/file/dir"
ftp_cmd_p = """
#!/bin/sh
lftp -u username,password sftp://ip:port <<EOF
cd {remotedir}
lcd {localpath}
get {filename}
EOF
"""
subprocess.call(ftp_cmd_p.format(remotedir=remotedir,
localpath=localpath,
filename=remote_file_name
),
shell=True, stdout=sys.stdout, stderr=sys.stderr)
PyFilesystem with its sshfs is one option. It uses Paramiko under the hood and provides a nicer paltform independent interface on top.
import fs
sf = fs.open_fs("sftp://[user[:password]#]host[:port]/[directory]")
sf.makedir('my_dir')
or
from fs.sshfs import SSHFS
sf = SSHFS(...
Here's a generic function that will download any given sftp url to a specified path
from urllib.parse import urlparse
import paramiko
url = 'sftp://username:password#hostname/filepath.txt'
def sftp_download(url, dest):
url = urlparse(url)
with paramiko.Transport((url.hostname, 22)) as transport:
transport.connect(None,url.username,url.password)
with paramiko.SFTPClient.from_transport(transport) as sftp:
sftp.get(url.path, dest)
Call it with
sftp_download(url, "/tmp/filepath.txt")

File upload through SFTP (Paramiko) in Python gives IOError: Failure

Aim: I am trying to use SFTP through Paramiko in Python to upload files on server pc.
What I've done: To test that functionality, I am using my localhost (127.0.0.1) IP. To achieve that I created the following code with the help of Stack Overflow suggestions.
Problem: The moment I run this code and enter the file name, I get the "IOError : Failure", despite handling that error. Here's a snapshot of the error:
import paramiko as pk
import os
userName = "sk"
ip = "127.0.0.1"
pwd = "1234"
client=""
try:
client = pk.SSHClient()
client.set_missing_host_key_policy(pk.AutoAddPolicy())
client.connect(hostname=ip, port=22, username=userName, password=pwd)
print '\nConnection Successful!'
# This exception takes care of Authentication error& exceptions
except pk.AuthenticationException:
print 'ERROR : Authentication failed because of irrelevant details!'
# This exception will take care of the rest of the error& exceptions
except:
print 'ERROR : Could not connect to %s.'%ip
local_path = '/home/sk'
remote_path = '/home/%s/Desktop'%userName
#File Upload
file_name = raw_input('Enter the name of the file to upload :')
local_path = os.path.join(local_path, file_name)
ftp_client = client.open_sftp()
try:
ftp_client.chdir(remote_path) #Test if remote path exists
except IOError:
ftp_client.mkdir(remote_path) #Create remote path
ftp_client.chdir(remote_path)
ftp_client.put(local_path, '.') #At this point, you are in remote_path in either case
ftp_client.close()
client.close()
Can you point out where's the problem and the method to resolve it?
Thanks in advance!
The second argument of SFTPClient.put (remotepath) is path to a file, not a folder.
So use file_name instead of '.':
ftp_client.put(local_path, file_name)
... assuming you are already in remote_path, as you call .chdir earlier.
To avoid a need for .chdir, you can use an absolute path:
ftp_client.put(local_path, remote_path + '/' + file_name)

Python Move a File from Remote Machine to Local

I am new in python and trying different stuff.
Currently trying to copy a text file to_copy.txt from a remote machine with local ip 192.168.1.101 to my current machine.
What i tried from googling does not seem to work.
import paramiko
from scp import SCPClient
ssh = paramiko.SSHClient()
ssh.load_system_host_keys()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
ssh.connect("testme#192.168.1.101", password="look420")
print("Connected")
scp = SCPClient(ssh.get_transport())
scp.get("/home/testme/target_folder/to_copy.txt")
scp.close()
But, when i run this i get error;
Traceback (most recent call last):
File "/home/uc/Python_Projects/MoveFileAndFolder/move_remote.py", line 7, in <module>
ssh.connect("testme#192.168.1.101", password="look420")
File "/usr/local/lib/python3.4/dist-packages/paramiko/client.py", line 296, in connect
to_try = list(self._families_and_addresses(hostname, port))
File "/usr/local/lib/python3.4/dist-packages/paramiko/client.py", line 200, in _families_and_addresses
addrinfos = socket.getaddrinfo(hostname, port, socket.AF_UNSPEC, socket.SOCK_STREAM)
File "/usr/lib/python3.4/socket.py", line 530, in getaddrinfo
for res in _socket.getaddrinfo(host, port, family, type, proto, flags):
socket.gaierror: [Errno -2] Name or service not known
What i am doing wrong here?
NOTE: Current machine is running Debian Jessie and the remote machine runs Ubuntu 14.04.4 LTS
Did you try
ssh.connect("192.168.1.101", username="testme", password="look420")
Please refer Doc
The port for scp (22) is likely not open on the remote machine. Please check with a command line call to confirm that you can indeed make an ssh or scp connection.
See here for more details
https://help.ubuntu.com/community/SSH/TransferFiles
""
Download file resources from remote server to local through paramiko
author: gxcuizy
time: 2018-08-01
"""
import paramiko
import os
from stat import S_ISDIR as isdir
def down_from_remote(sftp_obj, remote_dir_name, local_dir_name):
"" "download files remotely" ""
remote_file = sftp_obj.stat(remote_dir_name)
if isdir(remote_file.st_mode):
# Folder, can't download directly, need to continue cycling
check_local_dir(local_dir_name)
print('Start downloading folder: ' + remote_dir_name)
for remote_file_name in sftp.listdir(remote_dir_name):
sub_remote = os.path.join(remote_dir_name, remote_file_name)
sub_remote = sub_remote.replace('\\', '/')
sub_local = os.path.join(local_dir_name, remote_file_name)
sub_local = sub_local.replace('\\', '/')
down_from_remote(sftp_obj, sub_remote, sub_local)
else:
# Files, downloading directly
print('Start downloading file: ' + remote_dir_name)
sftp.get(remote_dir_name, local_dir_name)
def check_local_dir(local_dir_name):
"" "whether the local folder exists, create if it does not exist" ""
if not os.path.exists(local_dir_name):
os.makedirs(local_dir_name)
if name == "main":
"" "program main entry" ""
# Server connection information
host_name = 'ip'
user_name = 'name'
password = 'pwd'
port = 22
# Remote file path (absolute path required)
remote_dir = '/data/nfs/zdlh/pdf/2018/07/31'
# Local file storage path (either absolute or relative)
local_dir = 'file_download/'
# Connect to remote server
t = paramiko.Transport((host_name, port))
t.connect(username=user_name, password=password)
sftp = paramiko.SFTPClient.from_transport(t)
# Remote file start download
down_from_remote(sftp, remote_dir, local_dir)
# Close connection
t.close()
Download file resources from a remote server to a local through paramiko
import paramiko
import os
from stat import S_ISDIR as isdir
def down_from_remote(sftp_obj, remote_dir_name, local_dir_name):
"" "download files remotely" ""
remote_file = sftp_obj.stat(remote_dir_name)
if isdir(remote_file.st_mode):
# Folder, can't download directly, need to continue cycling
check_local_dir(local_dir_name)
print('Start downloading folder: ' + remote_dir_name)
for remote_file_name in sftp.listdir(remote_dir_name):
sub_remote = os.path.join(remote_dir_name, remote_file_name)
sub_remote = sub_remote.replace('\\', '/')
sub_local = os.path.join(local_dir_name, remote_file_name)
sub_local = sub_local.replace('\\', '/')
down_from_remote(sftp_obj, sub_remote, sub_local)
else:
# Files, downloading directly
print('Start downloading file: ' + remote_dir_name)
sftp.get(remote_dir_name, local_dir_name)
def check_local_dir(local_dir_name):
"" "whether the local folder exists, create if it does not exist" ""
if not os.path.exists(local_dir_name):
os.makedirs(local_dir_name)
if __name__ == "__main__":
"" "program main entry" ""
# Server connection information
host_name = 'ipaddress'
user_name = 'username'
password = 'password'
port = 22
# Remote file path (absolute path required)
remote_dir = '/data/nfs/zdlh/pdf/2018/07/31'
# Local file storage path (either absolute or relative)
local_dir = 'file_download/'
# Connect to remote server
t = paramiko.Transport((host_name, port))
t.connect(username=user_name, password=password)
sftp = paramiko.SFTPClient.from_transport(t)
# Remote file start download
down_from_remote(sftp, remote_dir, local_dir)
# Close connection
t.close()

Categories

Resources