I'm trying to capture network traffic using scapy , by running sniff in a thread rather than running it in the main thread itself. So as to avoid blocking of the app . But the problem that i'm facing is that that the wrpcap is not creating a file, if it does it creates a 0 Kb file.
The other reason why i'm using a separate thread is, because i want to be able to close the thread when the user wishes to end the capture.
from scapy.all import *
from time import gmtime , strftime
import threading
import time
def bomber(stop_event):
data=[]
pkts=[]
while not stop_event.isSet() :
pkt100=sniff(count=100)
data.append(pkt100)
for pkt in data:
for x in range(100):
pkts.append(pkt[x])
pktsRoll=PacketList(pkts)
savename="F:\\%s.pcap" % strftime("%Y-%m-%d,%H:%M", gmtime())
wrpcap(savename,pktsRoll)
print " its done "
def main():
stop_event = threading.Event()
c_thread = threading.Thread(target=bomber, args=(stop_event,))
c_thread.start()
time.sleep(20)
stop_event.set()
This is the code that i'm trying out . I'm using Python 2.5
Thanks in advance.
Related
I want to monitor the stdout of a program and whenever it prints something into stdout, I want to get a callback in python to process the gathered data.
The program I want to monitor is not written in python, but behaves similar to this dummy_script.py:
import datetime
import random
import time
i = 0
while True:
line = f"{datetime.datetime.now()} {i}"
print(line)
i += 1
time.sleep(random.uniform(0, 1))
For the main python script I tried something like this:
from threading import Thread
import os
def do_stuff():
command = f"python3 dummy_script.py"
os.system(command)
thread = Thread(target=do_stuff)
thread.daemon = True
thread.start()
So is there a way to create a callback when a new line is printed to stdout?
So I've got a bluetooth connection from an arduino reading a joystick and sending the axis readout over bluetooth to my raspberry pi (4b running ubuntu 20.10). I've confirmed it's receiving this data too.
Now I try to run this bluetooth communcication in a separate process using the python multiprocessing module. to access the data from the arduino, I give the function a queue from the parent main process to put the data in there. Then in the main function I continuously try to read from this queue and process the data there.
The queue in the parent process always remains empty, however, and as such I can't process the data any further.
How can I get the data from the bluetooth process back to the main process?
main.py
#!/usr/bin/env python3
import time
import logging
import multiprocessing as mp
import bluetoothlib
logging.basicConfig(level=logging.DEBUG)
logging.info("creating queue")
global q
q = mp.Queue()
def main():
try:
logging.info("starting bluetooth process")
p = mp.Process(target=bluetoothlib.serlistener, args=(q,))
p.start()
except:
logging.error("unable to start bluetooth listener")
logging.info("start reading from queue")
while True:
#logging.info(q.qsize())
if not q.empty():
mss = q.get()
logging.info(mss)
#do something with data
elif q.empty():
logging.info("queue empty")
time.sleep(1)
main()
bluetoothlib.py
#!/usr/bin/env python3
import os
import serial
import io
def serlistener(q):
print ("creating connection")
btConn = serial.Serial("/dev/rfcomm0", 57600, timeout=1)
btConn.flushInput()
sio = io.TextIOWrapper(io.BufferedRWPair(btConn, btConn, 1),encoding="utf-8")
print ("connection created, starting listening")
while btConn.is_open:
try:
mss = sio.readline()
q.put(mss)
except:
print("error")
break
At thelizardking34's suggestion, I relooked at the global stuff I'd been messing with and after correcting it, the code as now given in the question works.
Thanks to thelizardking34!
I am working on a web app with CherryPy that needs to access a few applications via COM.
Right now I create a new instance of the application with each request, which means each request waits 3 seconds for the application to start and 0.01 for the actual job.
I would like to start each COM application once and keep it alive and reuse it for a few seconds on the following requests because most of the time it is used by a burst of 5-10 ajax requests, then nothing for hours.
Is it possible to share a COM abject across all the threads of a CherryPy application?
Here is the summary of a few experiments that show how it is working now on each request and how it does not work across threads.
The following code successfully starts and stops Excel:
>>> import pythoncom, win32com.client
>>> def start():
global xl
xl = win32com.client.Dispatch('Excel.Application')
>>> def stop():
global xl
xl.quit()
xl = None
>>> start()
>>> stop()
But the following code starts Excel and closes it after 3 seconds.
>>> import pythoncom, win32com.client, threading, time
>>> def start():
global xl
pythoncom.CoInitialize()
xl = win32com.client.Dispatch('Excel.Application')
time.sleep(3)
>>> threading.Thread(target=start).start()
I added the call to CoInitialize() otherwise the xl object would not work (see this post).
And I added the 3 second pause, so I could see on the task manager that the EXCEL.EXE process starts and is alive for 3 seconds.
Why does it die after the thread that started it ends?
I checked the documentation of CoInitialize(), but I couldn't understand if it is possible to get it to work in multithreaded environment.
If you want to use win32com in multiple threads you need to do a little bit of work more as COMObject cannot be passed to a thread directly. You need to use CoMarshalInterThreadInterfaceInStream() and CoGetInterfaceAndReleaseStream() to pass instance between threads:
import pythoncom, win32com.client, threading, time
def start():
# Initialize
pythoncom.CoInitialize()
# Get instance
xl = win32com.client.Dispatch('Excel.Application')
# Create id
xl_id = pythoncom.CoMarshalInterThreadInterfaceInStream(pythoncom.IID_IDispatch, xl)
# Pass the id to the new thread
thread = threading.Thread(target=run_in_thread, kwargs={'xl_id': xl_id})
thread.start()
# Wait for child to finish
thread.join()
def run_in_thread(xl_id):
# Initialize
pythoncom.CoInitialize()
# Get instance from the id
xl = win32com.client.Dispatch(
pythoncom.CoGetInterfaceAndReleaseStream(xl_id, pythoncom.IID_IDispatch)
)
time.sleep(5)
if __name__ == '__main__':
start()
For more info see: https://mail.python.org/pipermail/python-win32/2008-June/007788.html
The answer from #Mauriusz Jamro ( https://stackoverflow.com/a/27966218/7733418 ) was really helpful. Just to add to it, also ensure that you do:
pythoncom.CoUninitialize ()
in the end so that there's no memory leak. You can call it somewhere after using CoInitialize() and before your process ends.
Try using multiprocessing. Worked for me, after a long search.
from multiprocessing import Process
p = Process(target=test, args=())
p.start()
p.join()
Okay, time for another question/post...
So currently i am trying to develop a simple python program that has a webkit/ webpage view and a serial port interface.. Not that it should matter, but this is also running on a raspberry pi.
The following code works fine.. But it will freeze the system as soon as i uncomment the serial port line that you can see commented out.
The day has been long and this one for some reason has my brain fried.. Python is not my strongest point, but mind you this is just a quick test script for now... Yes i have used google and other resources...
#!/usr/bin/env python
import sys
import serial
import threading
import time
from PyQt4.QtCore import *
from PyQt4.QtGui import *
from PyQt4.QtWebKit import *
sURL = ""
sURL2 = ""
objSerial = serial.Serial(0)
def SerialLooper():
global objSerial
if objSerial.isOpen() == True:
print("is_responding")
#objSerial.write("is_responding")
time.sleep(10)
SerialLooper()
class TestCLASS(object):
def __init__(self):
global sURL
global sURL2
global objSerial
objSerial = serial.Serial(0)
sURL = "http://localhost/tester"
app = QApplication(sys.argv)
webMain = QWebView()
webMain.loadFinished.connect(self.load_finished)
webMain.load(QUrl(sURL))
webMain.show()
thread = threading.Thread(target=SerialLooper)
thread.start()
sys.exit(app.exec_())
def load_finished(boolNoErrors):
global sURL
print("Url - " + sURL)
#something here
#something else here
newObjClass = TestCLASS()
EDIT
Futher on this, it appears its not the multithreading but the serial.write()
It has been a while since I used serial, but IIRC it is not threadsafe (on Windows at least). You are opening the port in the main thread and performing a write in another thread. It's a bad practice anyway. You might also consider writing a simple single-threaded program to see if the serial port is actually working.
PS Your program structure could use some work. You only need one of the global statements (global objSerial), the rest do nothing. It would be better to get rid of that one, too.
And the recursive call to SerialLooper() will eventually fail when the recursion depth is exceeded; why not just use a while loop...
def SerialLooper():
while objSerial().isOpen(): # Drop the == True
# print something
# write to the port
# Sleep or do whatever
I have this Python based service daemon which is doing a lot of multiplexed IO (select).
From another script (also Python) I want to query this service daemon about status/information and/or control the processing (e.g. pause it, shut it down, change some parameters, etc).
What is the best way to send control messages ("from now on you process like this!") and query processed data ("what was the result of that?") using python?
I read somewhere that named pipes might work, but don't know that much about named pipes, especially in python - and whether there are any better alternatives.
Both the background service daemon AND the frontend will be programmed by me, so all options are open :)
I am using Linux.
Pipes and Named pipes are good solution to communicate between different processes.
Pipes work like shared memory buffer but has an interface that mimics a simple file on each of two ends. One process writes data on one end of the pipe, and another reads that data on the other end.
Named pipes are similar to above , except that this pipe is actually associated with a real file in your computer.
More details at
http://www.softpanorama.org/Scripting/pipes.shtml
In Python, named pipe files are created with the os.mkfifo call
x = os.mkfifo(filename)
In child and parent open this pipe as file
out = os.open(filename, os.O_WRONLY)
in = open(filename, 'r')
To write
os.write(out, 'xxxx')
To read
lines = in.readline( )
Edit: Adding links from SO
Create a temporary FIFO (named pipe) in Python?
https://stackoverflow.com/search?q=python+named+pipes
You may want to read more on "IPC and Python"
http://www.freenetpages.co.uk/hp/alan.gauld/tutipc.htm
The best way to do IPC is using message Queue in python as bellow
server process server.py (run this before running client.py and interact.py)
from multiprocessing.managers import BaseManager
import Queue
queue1 = Queue.Queue()
queue2 = Queue.Queue()
class QueueManager(BaseManager): pass
QueueManager.register('get_queue1', callable=lambda:queue1)
QueueManager.register('get_queue2', callable=lambda:queue2)
m = QueueManager(address=('', 50000), authkey='abracadabra')
s = m.get_server()
s.serve_forever()
The inter-actor which is for I/O interact.py
from multiprocessing.managers import BaseManager
import threading
import sys
class QueueManager(BaseManager): pass
QueueManager.register('get_queue1')
QueueManager.register('get_queue2')
m = QueueManager(address=('localhost', 50000),authkey='abracadabra')
m.connect()
queue1 = m.get_queue1()
queue2 = m.get_queue2()
def read():
while True:
sys.stdout.write(queue2.get())
def write():
while True:
queue1.put(sys.stdin.readline())
threads = []
threadr = threading.Thread(target=read)
threadr.start()
threads.append(threadr)
threadw = threading.Thread(target=write)
threadw.start()
threads.append(threadw)
for thread in threads:
thread.join()
The client program Client.py
from multiprocessing.managers import BaseManager
import sys
import string
import os
class QueueManager(BaseManager): pass
QueueManager.register('get_queue1')
QueueManager.register('get_queue2')
m = QueueManager(address=('localhost', 50000), authkey='abracadabra')
m.connect()
queue1 = m.get_queue1()
queue2 = m.get_queue2()
class RedirectOutput:
def __init__(self, stdout):
self.stdout = stdout
def write(self, s):
queue2.put(s)
class RedirectInput:
def __init__(self, stdin):
self.stdin = stdin
def readline(self):
return queue1.get()
# redirect standard output
sys.stdout = RedirectOutput(sys.stdout)
sys.stdin = RedirectInput(sys.stdin)
# The test program which will take input and produce output
Text=raw_input("Enter Text:")
print "you have entered:",Text
def x():
while True:
x= raw_input("Enter 'exit' to end and some thing else to continue")
print x
if 'exit' in x:
break
x()
this can be used to communicate between two process in network or on same machine
remember that inter-actor and server process will not terminate until you manually kill it.