Python Async download certificate chain - python

I want to download the TLS certificate chain for a given website.
I have a running code using blocking sockets, code provided here,
Getting certificate chain with Python 3.3 SSL module.
from OpenSSL import SSL
import socket
def get_certificates(hostname, port):
context = SSL.Context(method=SSL.TLSv1_METHOD)
conn = SSL.Connection(context, socket=socket.socket(family=socket.AF_INET, type=socket.SOCK_STREAM))
conn.settimeout(1)
conn.connect((hostname, port))
conn.setblocking(1)
conn.do_handshake()
conn.set_tlsext_host_name(hostname.encode())
chain = conn.get_peer_cert_chain()
conn.close()
return chain
def main():
hostname = 'www.google.com'
port = 443
chain = get_certificates(hostname, port)
This code is running fine. I want to use async to make multiprocessing with a large list of hostnames more performant. I didn't find a clear way to do it. What's the best way?

Related

Python - Paramiko Client over SSL Socket - Non-Blocking Issue

I wrote some Python code which enables the user to update my server with an edited HTML file and upload images. However I am encountering some issues with the connection.
On the server side I am running an Ubuntu virtual machine in VMware behind a pfSense router running HAproxy. The proxy enables users with the right client certificates to SSH into the right machine. Now this has been working for a long time now with no issues.
What I think my problem is, is within my SFTP code where the socket closes due to the SSL wrapper not being non-blocking. However I could not think or find a way of fixing my code. Especially since I am no expert using these packages. The Paramiko client and SFTP client have both been tested separately on the remote server with a VPN connection, they worked fine in that situation. Also the certificates the SSL socket uses work correctly.
Versions:
Python: 3.8.3
openssl: 1.1.1.f
paramiko: 2.7.2
import socket
import ssl
import paramiko
class GE_SFTP_Client:
def __init__(self, Username, Password):
self.Username = Username
self.Password = Password
context = ssl.create_default_context(ssl.Purpose.SERVER_AUTH) # Verify Server cert
context.load_cert_chain(certfile=_client_cert, keyfile=_client_key) # Load Client cert
context.set_alpn_protocols(['ssh/2.0'])
self.s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.conn = context.wrap_socket(self.s, server_side=False, server_hostname=_target_host)
self.conn.connect((_proxy_host, _proxy_port))
self.client = paramiko.SSHClient()
self.client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
self.connected_status = False
SSH_Connection_Tries = 0
while SSH_Connection_Tries < 4: # Try 4 times to connect.
try:
self.client.connect(
hostname=_target_host, username=self.Username, password=self.Password,
sock=self.conn, timeout=200, banner_timeout=200, auth_timeout=200
)
self.connected_status = True
break
except:
SSH_Connection_Tries += 1
pass
if self.connected_status:
self.sftp = self.client.open_sftp()
def GE_SFTP_ClientEnd(self):
try:
self.sftp.close()
except AttributeError:
pass
self.client.close()
self.conn.close()
self.s.close()
An example of the ouput when using the paramiko SFTP client to get() a few images.
succes! /images/fulls/3.jpg
succes! /images/fulls/2.jpg
succes! /images/fulls/9.jpg
Socket exception: A non-blocking socket operation could not be completed immediately (10035)
Thank you, any advice you have would be greatly appreciated!
I left the idea of using the standard SSL wrapper from the SSL package and found a different way of doing things, namely with the asyncssh package combined with asyncio. Since together they provide me with the needed tools to tackle this problem.
The connection is now much more stable, even when downloading larger files. I hope this might help someone else as well! :)
import socket
import ssl
import asyncio
import asyncssh
class SSL_Socket:
async def create_connection(self, protocol_factory, host, port):
loop = asyncio.get_event_loop()
context = ssl.create_default_context(ssl.Purpose.SERVER_AUTH) # Verify Server cert
context.load_cert_chain(certfile=_client_cert, keyfile=_client_key) # Load Client cert
context.set_alpn_protocols(['ssh/2.0'])
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect((_proxy_host, _proxy_port))
return (await loop.create_connection(protocol_factory, sock=s, ssl=context, server_hostname=host))
async def WorkingExample():
async with asyncssh.connect(_target_host, tunnel=SSL_Socket(), username=Username, password=Password, known_hosts=_known_hosts) as conn:
async with conn.start_sftp_client() as sftp:
print(await sftp.stat(f'{remote_dir}/index.html'))

What details about the host machine does python send to the web server while establishing a connection through sockets?

I am building a passive reconnaissance tool and one of it functionality is to get certificate info about a domain.
What details of my machine are being to sent to the webserver?
The below code is used to get the certificate info.
from OpenSSL import SSL
from cryptography import x509
from cryptography.x509.oid import NameOID
import idna
from socket import socket
from collections import namedtuple
HostInfo = namedtuple(field_names='cert hostname peername', typename='HostInfo')
HOSTS = [
('google.com', 443),
('yahoo.com', 443),
('yahoo.com', 443),
]
def get_certificate(hostname, port):
hostname_idna = idna.encode(hostname)
sock = socket()
sock.connect((hostname, port))
peername = sock.getpeername()
ctx = SSL.Context(SSL.SSLv23_METHOD) # most compatible
ctx.check_hostname = False
ctx.verify_mode = SSL.VERIFY_NONE
sock_ssl = SSL.Connection(ctx, sock)
sock_ssl.set_connect_state()
sock_ssl.set_tlsext_host_name(hostname_idna)
sock_ssl.do_handshake()
cert = sock_ssl.get_peer_certificate()
crypto_cert = cert.to_cryptography()
sock_ssl.close()
sock.close()
return HostInfo(cert=crypto_cert, peername=peername, hostname=hostname)
import concurrent.futures
if __name__ == '__main__':
with concurrent.futures.ThreadPoolExecutor(max_workers=4) as e:
for hostinfo in e.map(lambda x: get_certificate(x[0], x[1]), HOSTS):
print_basic_info(hostinfo)
Is my IP address being sent?
Is python sending any user-agent and what other details are being sent while establishing a connection?
While Python doesn't explicitly send your IP, it is always sent with any TCP request. If it isn't sent, you cannot receive the response from the server.
According to http://evanhahn.com/python-requests-library-useragent, the useragent is
python-requests/{package version} {runtime}/{runtime version} {uname}/{uname -r} when using python-requests, but you can override it by setting the request header.
However, you are not using python-requests but raw sockets, so no data is transferred except what you tell it to (and your IP and some metadata for the TCP connection)

How can I do certificate pinning for TLS sockets in python 2.7?

I have a python TLS server using a self signed certificate. That works. The code looks like this for now:
#!/usr/bin/python
import socket, ssl
context = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH)
context.load_cert_chain(certfile="server.crt", keyfile="server.key")
bindsocket = socket.socket()
bindsocket.bind(('127.0.0.1', 8888))
bindsocket.listen(5)
while True:
newsocket, fromaddr = bindsocket.accept()
connstream = context.wrap_socket(newsocket, server_side=True)
try:
print("Got connection!")
finally:
connstream.shutdown(socket.SHUT_RDWR)
connstream.close()
I am now trying to make a client in python that connects to this server. On the first connection attempt, I'd like to retrieve either the public key, or a hash of the public key, and then verify this on all future connections. How can I do with with python and the ssl package?
This is the code I'm playing with:
#!/usr/bin/python
import ssl, socket, pprint
context = ssl.SSLContext(ssl.PROTOCOL_TLS)
context.verify_mode = ssl.CERT_REQUIRED
context.check_hostname = False
cnx = context.wrap_socket(socket.socket(socket.AF_INET), certfile="server.crt")
cnx.connect(('127.0.0.1', 8888))
pprint.pprint(cnx.getpeercert())
As it stands right now, it fails because there is no certificate chain to verify the cert. I don't care about that, though. All I care about is that the server I'm talking to has the private key that matches the public key. What do I do?
So I've made this work, though it's not exactly what I hoped for. My biggest complaint is that it requires storing the entire server certificate in a file on disk, whereas really all I care about is the public key. However, here's what I have working:
#!/usr/bin/python
import ssl, socket, pprint, os, sys
# If we haven't retrieved the certificate previously...
if not os.path.isfile('server_cert.pem'):
# Grab the cert
cert = ssl.get_server_certificate(('127.0.0.1', 8888))
# If it worked...
if cert:
# Write it to a file
with open('server_cert.pem', 'w') as f:
f.write(cert)
else:
sys.exit()
# Prepare context, including reference to the server's certificate
context = ssl.SSLContext(ssl.PROTOCOL_TLS)
context.verify_mode = ssl.CERT_REQUIRED
context.check_hostname = False
context.load_verify_locations(cafile='server_cert.pem')
cnx = context.wrap_socket(socket.socket(socket.AF_INET))
# Connect and evaluate
cnx.connect(('127.0.0.1', 8888))
I'll wait a bit to mark this answer accepted to see if someone can find a cleaner way of doing this.

how to do tls renegotiation by python ssl socket

I need to simulate a tls renegotiation behaviour (I understand this as a new handshake) by python. Is that possible?
I tried below codes, the first do_handshake works but the second one do nothing.
import socket, ssl, pprint, re, time
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
ssl_sock = ssl.wrap_socket(s,ca_certs="cacert.pem",do_handshake_on_connect=False)
ssl_sock.connect(('172.18.7.162', 443))
time.sleep(3)
ssl_sock.do_handshake()
print repr(ssl_sock.getpeername())
print ssl_sock.cipher()
print pprint.pformat(ssl_sock.getpeercert())
send_content="aaaa"
ssl_sock.write(send_content)
time.sleep(2)
print "do_handshake_again"
ssl_sock.do_handshake()
print "do_handshake_again done"
ssl_sock.write(send_content)
Thanks for helping!
To answer my own question:
Finally I implement this behaviour by using python openssl lib.
from OpenSSL import SSL
import sys, os, select, socket
........
# Initialize context
ctx = SSL.Context(SSL.SSLv23_METHOD)
ctx.set_options(SSL.OP_NO_SSLv2)
#ctx.set_verify(SSL.VERIFY_PEER|SSL.VERIFY_FAIL_IF_NO_PEER_CERT, verify_cb) # Demand a certificate
ctx.use_privatekey_file (os.path.join(dir, 'server.pkey'))
ctx.use_certificate_file(os.path.join(dir, 'server.cert'))
ctx.load_verify_locations(os.path.join(dir, 'CA.cert'))
# Set up server
server = SSL.Connection(ctx, socket.socket(socket.AF_INET, socket.SOCK_STREAM))
server.bind(('', int(sys.argv[1])))
server.listen(3)
server.setblocking(0)
........
for cli in w:
try:
ret = cli.send(writers[cli])
cli.renegotiate()
cli.do_handshake()
ret = cli.send(writers[cli])
......
The important thing is the last 4 lines:
1, sending something with the socket
2-3, trigger renegotiate and do handshake
4, sending something again
Because it's non-blocking socket, I can see this code send out two packets: the first packet will only send application data(content type 23), the second packet will have two payloads: one is ssl handshake(type 22) and another one is application data(type 23).
By the way, this is trying to simulate re-negotiate packet has application data in same packet. If for pure tls re-negotiate, we can use openssl to send "R" to trigger a pure tls renegotiation behaviour.

Malformed DNS response packet (python + scapy)

I'm working on creating a proxy server using Python and scapy. TCP packets seem to be working fine but I'm running into some issues with UDP, specifically DNS requests. Essentially when a DNS request comes in I capture it in my script, preform the DNS lookup, and am trying to return it back to the person requesting the DNS query. The script successfully preforms the lookup and returns the DNS response, however when looking at wireshark it tells me it's a "Malformed Packet". Could someone tell me what I need to do in order to correctly return the DNS response?
#!/usr/bin/env python
from tornado.websocket import WebSocketHandler
from tornado.httpserver import HTTPServer
from tornado.web import Application
from tornado.ioloop import IOLoop
from collections import defaultdict
from scapy.all import *
import threading
outbound_udp = defaultdict(int)
connection = None
class PacketSniffer(threading.Thread):
def __init__(self):
threading.Thread.__init__(self)
def run(self):
global connection
while (True):
pkt = sniff(iface="eth0", count=1)
if pkt[0].haslayer(DNS):
print "Returning back has UDP"
print pkt.summary()
ipPacket = pkt[0][IP]
dnsPacket = pkt[0][DNS]
if outbound_udp[(ipPacket.src, dnsPacket.id)] > 0:
outbound_udp[(ipPacket.src, dnsPacket.id)] -= 1
print "Found in outbound_udp"
# Modify the destination address back to the address of the TUN on the host.
ipPacket.dst = "10.0.0.1"
try:
del ipPacket[TCP].chksum
del ipPacket[IP].chksum
del ipPacket[UDP].chksum
except IndexError:
print ""
ipPacket.show2() # Force recompute the checksum
if connection:
connection.write_message(str(ipPacket).encode('base64'))
sniffingThread = PacketSniffer()
sniffingThread.daemon = True
sniffingThread.start()
Some bugs have been fixed recently in Scapy around DNS (and other sophisticated protocols, but DNS is the most frequently seen):
https://bitbucket.org/secdev/scapy/issue/913/
https://bitbucket.org/secdev/scapy/issue/5104/
https://bitbucket.org/secdev/scapy/issue/5105/
Trying with the latest Scapy development version from the Mercurial repository (hg clone http://bb.secdev.org/scapy) should fix this.

Categories

Resources