I wrote a Flask app that is working fine, and I wanted that while it is running, a separate background thread should parallel to it doing some stuff. The problem is, doing this doesn't spawn the thread at all, but I know that my code is right because using the exact same portion of the thread code on a simple python script works as intended.
app.py
weatherCollectorThread = WeatherDataCollectorThread()
...
if __name__ == '__main__':
try:
print("Starting Weather Collector Thread...")
weatherCollectorThread.start()
print("Starting the WebApp...")
app.run(debug=True)
except KeyboardInterrupt:
try:
weatherCollectorThread.stop()
except:
pass
WeatherDataCollectorThread Class
class WeatherDataCollectorThread:
def __init__(self):
self.weatherStations = DBHelper.get_weather_stations()
self.weatherApiKey = "REDACTED"
self.baseURL = "SOME URL"
self.isThreadRunning = False
self.result_log = open('results.log','a+')
def storeWeatherData(self,weather):
conn = DBHelper.get_connection()
cur = conn.cursor()
cur.execute("INSERT INTO weather_data(city,country,now_unixtime,last_updated_unixtime,temperature,isDay,condition_text,condition_icon,windspeed,winddir,pressure,precipitation,cloud,humidity) VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?)",[weather['city'],weather['country'],weather['now_unixtime'],weather['last_updated_unixtime'],weather['temperature'],weather['isDay'],weather['condition_text'],weather['condition_icon'],weather['windspeed'],weather['winddir'],weather['pressure'],weather['precipitation'],weather['cloud'],weather['humidity']])
conn.commit()
conn.close()
def collectWeatherData(self):
self.isThreadRunning = True
while self.isThreadRunning:
for each_station in self.weatherStations:
if each_station['isWorking'] != 1:
continue
print("Sending request")
params = {'q':each_station['location'],'key':self.weatherApiKey}
resp = requests.get(url=self.baseURL,params=params)
print("Request received")
weatherData = json.loads(resp.text)
location = weatherData['location']
current = weatherData['current']
weather = {}
weather['city'] = location['name']
weather['country'] = location['country']
weather['now_unixtime'] = location['localtime_epoch']
weather['last_updated_unixtime'] = current['last_updated_epoch']
weather['temperature'] = current['temp_c']
weather['isDay'] = current['is_day']
weather['condition_text'] = current['condition']['text']
weather['condition_icon'] = current['condition']['icon']
weather['windspeed'] = current['wind_kph']
weather['winddir'] = current['wind_dir']
weather['pressure'] = current['pressure_mb']
weather['precipitation'] = current['precip_mm']
weather['cloud'] = current['cloud']
weather['humidity'] = current['humidity']
self.storeWeatherData(weather)
print("Data stored\n" + '-'*24)
self.result_log.write(resp.text + '\n')
sleep(60)
def start(self):
self.thread = Thread(target=self.collectWeatherData)
self.thread.start()
def join_instrument(self,session):
conn = DBHelper.get_connection()
cur = conn.cursor()
cur.execute("UPDATE weather_stations SET isWorking=1 WHERE weatherStationID=?",[session['weatherStationID']])
conn.commit()
conn.close()
def detach_instrument(self,session):
conn = DBHelper.get_connection()
cur = conn.cursor()
cur.execute("UPDATE weather_stations SET isWorking=0 WHERE weatherStationID=?",[session['weatherStationID']])
conn.commit()
conn.close()
def stop(self):
self.result_log.close()
self.isThreadRunning = False
So I figured out the solution.
You see, when you use flask run to run your web-app, it ignores every single function call in the script and parses through the decorators and starts the app on its own. So, if you do something like:
if __name__ == '__main__':
app.start()
someOtherFunction()
Neither the app.start() nor the someOtherFunction() would start.
So the solution?
Simply use python3 app.py to run the script.
... yes, it's that simple :|
Related
I wanted to develop an app with Python that would run on a Raspberry and be remote controlled.
This app should control (switch on and off) 2 leds based on the current day, time and the user's indication.
The app is to be executed when the Raspberry is booted (auto start) and should be run endlessly. For this purpose, a service is created under /usr/lib/systemd/system. The postgres (Heroku) databases were used for this purpose.
I have read several articles and used several suggestions, but unfortunately without much success. The problem with my app is that it crashes every 24 hours with the following error:
Light_system Error 1: SSL SYSCALL error: Connection timed out
Light_system Error 1: server closed the connection unexpectedly
This probably means the server terminated abnormally
before or while processing the request.
The raspberry is connected to the internet through a sim module (SIM7600E -H 4G HAT). But this is not supposed to be the problem, because I also connect the raspberry to the direct router, but get the same error again.
I am very grateful for any suggestion!
I have already tried the following steps:
keepalive_kwargs = {
"keepalives": 1,
"keepalives_idle": 30,
"keepalives_interval": 5,
"keepalives_count": 5,
}
conn = psycopg2.connect(conn_string, **keepalive_kwargs)
and
engine = sqlalchemy.create_engine(DATABASE_URL, pool_pre_ping=True)
Here is my code:
crud.py
keepalive_kwargs = {
"keepalives": 1,
"keepalives_idle": 30,
"keepalives_interval": 5,
"keepalives_count": 5,
}
conn_string = "host, dbname, user, password"
conn = psycopg2.connect(conn_string, **keepalive_kwargs)
class CRUDSettings:
#staticmethod
def read_from_db():
try:
cur = conn.cursor()
cur.execute("SELECT * FROM time_settings_tabel")
data = cur.fetchall()
return data
except Exception as e:
with open("read_from_db.log", "a") as f:
f.write(f"ERROR : {str(e)}")
database.py
DATABASE_URL = "URL for DB"
database = databases.Database(DATABASE_URL)
engine = sqlalchemy.create_engine(DATABASE_URL, pool_pre_ping=True)
SessionLocal = sessionmaker(autocommit=False, autoflush = False, bind = engine)
Base = declarative_base()
metadata = sqlalchemy.MetaData()
Base.metadata.create_all(bind=engine)
def get_db():
Base.metadata.create_all(bind=engine)
database = engine.connect()
db = SessionLocal()
try:
yield db
finally:
db.close()
app.py
class System:
async def run(self, app):
try:
while(app.state.isRunning):
for row in crud.settings.read_from_db():
now = datetime.now()
current_time = convert(now.hour, now.minute,now.second)
current_day= now.strftime("%A")
start_split = row[2].split(":")
end_split = row[3].split(":")
start = convert(int(start_split[0]), int(start_split[1]), int(start_split[2]))
end = convert(int(end_split[0]), int(end_split[1]), int(end_split[2]))
current_id = row[0]
GPIO.output(20,GPIO.LOW)
GPIO.output(21,GPIO.LOW)
while ((current_day == row[1].replace(" ","")) and (current_time >= start) and (current_time <= end) and pumpe and (current_id == row[0])):
print("START")
GPIO.output(20,GPIO.HIGH)
GPIO.output(21,GPIO.HIGH)
now = datetime.now()
current_time = convert(now.hour, now.minute,now.second)
if (current_day == row[1].replace(" ","") and current_time >= end):
print("END")
GPIO.output(20,GPIO.LOW)
GPIO.output(21,GPIO.LOW)
break
await asyncio.sleep(1)
except Exception as e:
with open("watersystem.log", "a") as f:
f.write("ERROR : %s", str(e))
system = System()
main.py
#app.on_event("startup")
async def startup_event():
db = SessionLocal()
app.state.isRunning = True
for row in get_db():
app.state.settings = row
asyncio.create_task(system.run(app))
This code is a DNS resolver that check from a DB for an entry not older than 5 minutes.
#!/usr/bin/python3
from MySQLdb import _mysql as MySQL
from dnslib import RR, QTYPE, RCODE, A
from dnslib.label import DNSLabel
from dnslib.server import DNSServer, BaseResolver
from time import sleep, time
class MariaResolver(BaseResolver):
DELTA = 300
def __init__(self):
self.password = "********************"
def resolve(self, request, handler):
reply = request.reply()
qname = request.q.qname
fqdn = str(request.q.qname)
try:
if fqdn.find("iut-") == -1:
reply.header.rcode = RCODE.REFUSED
else:
hostname = fqdn.split(".")[0]
timestamp = int(time()) - self.DELTA
query = "SELECT ip FROM dns WHERE record='{}' AND timestamp>{}"
db = MySQL.connect("localhost", "dns", self.password, "salles")
db.query(query.format(hostname, timestamp))
result = db.store_result()
row = result.fetch_row(how=1)
if row:
ip = row[0]["ip"].decode("utf-8")
reply.add_answer(RR(qname, QTYPE.A, ttl=0,
rdata=A(ip)))
else:
reply.header.rcode = RCODE.REFUSED
db.close()
except Exception as e:
print(e)
reply.header.rcode = RCODE.REFUSED
return reply
if __name__ == '__main__':
resolver = MariaResolver()
udp_server = DNSServer(resolver, port=53)
udp_server.start_thread()
while udp_server.isAlive():
sleep(0.1)
This code leaks over time and I do not understand why.
In the Proxmox screenshot, you can see service restarted at the and.
My SAP is very old and I can't make API calls with it. So, I have to manipulate the GUI of SAP to do my stuff.
I'm trying to access two SAP transactions at the same time in two different windows using Python.
To do this I'm using the libraries: pywin32, subprocess and multiprocessing.
But I'm getting the following error:
TypeError: cannot pickle 'PyIDispatch' object
and
PermissionError: [WinError 5] Acess denied
What I got until now is to open two windows (create two SAP sessions) and access the transaction in different windows but one after the other, in other words, not at the same time.
This test program constitutes in 3 separated scripts:
One have the class to create a connection, create the first session and login into the account.
The second class is to "manipulate" the SAP
The last one is the main script.
The Scripts:
createconnection.py
from subprocess import Popen
import time
from win32com.client import GetObject
class Sap:
def __init__(self, sap_env, user_id, user_password, language="EN",
newSession=False, connectBy=2):
self.sap_file = "C:\\Program Files (x86)\\SAP\\FrontEnd\\SapGui" +\
"\\saplogon.exe"
self.sap_env = sap_env
self.user_id = user_id
self.user_password = user_password
self.language = language
self.connectBy = connectBy
self.newSession = newSession
def __get_sap_gui__(self):
try:
return GetObject('SAPGUI').GetScriptingEngine
except:
time.sleep(0.5)
return self.__get_sap_gui__()
def get_sap_connection(self):
if self.connectBy == 3:
Popen(self.sap_file + ' ' + self.sap_env)
sapGui = self.__get_sap_gui__()
conn = sapGui.Connections(0)
timeout = 10
while conn.Sessions.Count == 0 and timeout:
time.sleep(1)
timeout -= 1
if timeout == 0: raise Exception("Fail to connect")
else:
Popen(self.sap_file)
sapGui = self.__get_sap_gui__()
conn = None
if self.connectBy == 1:
if sapGui.Connections.Count > 0: # it's not good, I'll fix this later
for conn in sapGui.Connections:
if conn.Description == self.sap_env:
break
if not conn:
conn = sapGui.OpenConnection(self.sap_env)
else:
if sapGui.Connections.Count > 0:
for conn in sapGui.Connections:
if self.sap_env in conn.ConnectionString:
break
if not conn:
conn = sapGui.OpenConnectionByConnectionString(self.sap_env)
return conn
def get_sap_session(self, conn):
if self.newSession:
numSessions = conn.Sessions.Count + 1
conn.Sessions(0).createsession()
while conn.Sessions.Count < numSessions: pass
session = conn.Sessions(numSessions-1)
else:
session = conn.Sessions(0)
if session.findById('wnd[0]/sbar').text.startswith('SNC logon'):
session.findById('wnd[0]/usr/txtRSYST-LANGU').text = self.language
session.findById('wnd[0]').sendVKey(0)
session.findById('wnd[0]').sendVKey(0)
elif session.Info.User == '':
session.findById('wnd[0]/usr/txtRSYST-BNAME').text = self.user_id
session.findById('wnd[0]/usr/pwdRSYST-BCODE').text =\
self.user_password
session.findById('wnd[0]/usr/txtRSYST-LANGU').text = self.language
session.findById('wnd[0]').sendVKey(0)
session.findById('wnd[0]').maximize()
return session
manipulatesap.py
from createconnection import Sap
class QuerySap(Sap):
def __init__(self, sap_env, user_id, user_password, language):
super().__init__(sap_env, user_id, user_password, language=language)
self.connection = self.get_sap_connection()
self.session = self.get_sap_session(self.connection)
self.new_session = None
def open_new_windows(self):
self.connection.Sessions(0).createsession()
self.connection.Sessions(0).createsession()
self.new_session = self.connection.Sessions(1)
#property
def sess1(self):
return self.session
#property
def sess2(self):
return self.new_session
main.py
from manipulatesap import QuerySap
from multiprocessing import Pool, Process
from time import sleep
def goto_trasaction(session, transacion):
session.findById("wnd[0]/tbar[0]/okcd").text = transacion
session.findById("wnd[0]").sendVKey(0)
sleep(5)
def sap_interface_multi_process(usr, pw, env):
sap_nav = QuerySap(sap_env=env, user_id=usr,user_password=pw,
language="PT")
sap_nav.open_new_windows()
session1 = sap_nav.sess1
session2 = sap_nav.sess2
p1 = Process(target=goto_trasaction, args=(session1, "TRANSACION A"))
p2 = Process(target=goto_trasaction, args=(session2, "TRANSACTION B"))
p1.start()
p2.start()
p1.join()
p1.join()
def main():
print(">>> Start")
sap_env = "string_for_connection"
sap_interface_multi_process("usr_id", "usr_pw", sap_env)
print(">>> Finish")
if __name__ == "__main__":
main()
Could you guys help me to find what I missing and what I should do?
Thank you very much
Finally I got the solution after sometime of vaction.
But I had to refactory a lot of my code.
What I did was to instantiate the sap class to an object and pass this object to a function that will be executed in parallel. Inside of this function I use the sap class method to create a connection and create a session.
Here is my solution. Not pretty but worked:
from Modules.Sap.sapinit import Sap
def create_sap_session(sap_obj, extra_num_sessions):
sap_conn = sap_obj.get_sap_connection()
sap_obj.get_sap_session(sap_conn)
if extra_num_sessions < 1:
return
for _ in range(extra_num_sessions):
sap_conn.Sessions(0).createsession()
return
def parallel_sap_query(sap_obj, sessions_num, transaction):
sap_conn = sap_obj.get_sap_connection()
sap_session = sap_conn.Sessions(sessions_num)
session.findById("wnd[0]/tbar[0]/okcd").text = transaction
session.findById("wnd[0]").sendVKey(0)
def execute_cancellations(sap_obj):
create_sap_session(sap_obj, 2)
sleep(3)
p1 = Process(target=parallel_sap_query, args=(sap_obj, 0, "A", ))
p2 = Process(target=parallel_sap_query, args=(sap_obj, 1, "B", ))
p3 = Process(target=parallel_sap_query, args=(sap_obj, 2, "C", ))
p1.start()
p2.start()
p3.start()
p1.join()
p2.join()
p3.join()
close_sap_sessions(sap_obj, 2, 1, 0)
def close_sap_sessions(sap_obj, *sessions):
sap_conn = sap_obj.get_sap_connection()
for session in sessions:
sap_session = sap_conn.Sessions(session)
sap_session.findById("wnd[0]").close()
sap_session.findById("wnd[1]/usr/btnSPOP-OPTION1").press()
def main():
sap_obj = Sap(sap_env, sap_id, sap_pw, "PT")
execute_cancellations(sap_obj)
I am a beginner writing a small twitter tool for scheduled tweets and automatic retweets in python/flask.
I got stuck with issues of processes running in the background.
I want scheduled tweets and retweets to work simultaneously in the background for a given user.
I want to be able to terminate these background processes running retweets/scheduled tweets separately from each other.
How would you change the code below to achieve this?
If you look at the code below now, it works, but user can not run scheduled tweets and retweets simultaneously. Also if user decides to terminate one of the processes, let us say retweets the other process terminates as well (scheduled tweets) and vice versa.
I thought about putting the identification data for a given process into a database and recalling this identification data from the database when there is a need to terminate it, instead of using cookies session, but I do not know how to implement this idea in code.
import ........
mysql = MySQL()
app = Flask(__name__)
app.secret_key = 'xxx'
app.config['MYSQL_DATABASE_USER'] = 'xxx'
app.config['MYSQL_DATABASE_PASSWORD'] = 'xxx'
app.config['MYSQL_DATABASE_DB'] = 'xxx'
app.config['MYSQL_DATABASE_HOST'] = '0.0.0.0'
mysql.init_app(app)
#app.route('/showSignin')
def showSignin():
if session.get('user'):
return redirect('/userHome')
else:
return render_template('signin.html')
#app.route('/showscheduletweets')
def showscheduletweets():
if session.get('user'):
return render_template('scheduletweets.html')
else:
return render_template('signin.html')
#app.route('/validateLogin',methods=['POST'])
def validateLogin():
try:
_username = request.form['inputEmail']
_password = request.form['inputPassword']
# connect to mysql
con = mysql.connect()
cursor = con.cursor()
cursor.callproc('sp_validateLogin',(_username,))
data = cursor.fetchall()
if len(data) > 0:
if check_password_hash(str(data[0][3]),_password):
session['user'] = data[0][0]
consumerkey = data [0][4]
consumersecret = data [0][5]
accesstoken = data [0][6]
tokensecret = data [0][7]
twitter = Twython(consumerkey, consumersecret, accesstoken, tokensecret)
twitter.update_status(status="xxx says hello.")
return render_template('userHome.html')
else:
return render_template('error.html',error = 'Wrong Email address or Password.')
else:
return render_template('error.html',error = 'Wrong Email address or Password.')
except Exception as e:
return render_template('error.html',error = str(e))
finally:
cursor.close()
con.close()
#schedule tweets
#app.route('/scheduletweets',methods=['POST'])
def scheduletweets():
if session.get('user'):
_username = request.form['inputEmail']
con = mysql.connect()
cursor = con.cursor()
cursor.callproc('sp_GetTwitter', (_username,))
data = cursor.fetchall()
session['user'] = data[0][0]
consumerkey = data [0][4]
consumersecret = data [0][5]
accesstoken = data [0][6]
tokensecret = data [0][7]
twitter = Twython(consumerkey, consumersecret, accesstoken, tokensecret)
tweet1 = request.form['inputTweet1']
tweet2 = request.form['inputTweet2']
tweet3 = request.form['inputTweet3']
tweet4 = request.form['inputTweet4']
tweet5 = request.form['inputTweet5']
tweet6 = request.form['inputTweet6']
Hash1 = request.form['inputHash1']
Hash2 = request.form['inputHash2']
Hash3 = request.form['inputHash3']
Hash4 = request.form['inputHash4']
fruits = [Hash1, Hash2, Hash3, Hash4]
list = [tweet1, tweet2, tweet3, tweet4, tweet5, tweet6]
def workit():
while True:
try:
if len(list) > 0:
z = random.randint(1, len(fruits))
a = random.sample(fruits, z)
b=" ".join(str(x) for x in a)
toTweet = list[random.randint(0,len(list))-1] + " " + b
twitter.update_status(status=toTweet)
time.sleep(10)
else:
twitter.update_status(status="Oh dear... I'm afraid I'm rather empty =(")
break
except TwythonError as e:
print (e)
if 'work_process' not in session:
process = Process(target=workit)
process.start()
pid = process.pid
parent_pid = psutil.Process(process.pid).parent().pid
session['work_process'] = (parent_pid, pid)
return redirect('/showscheduletweets')
#retweets
#app.route('/retweet',methods=['POST'])
def retweet():
if session.get('user'):
_username = request.form['inputEmail']
con = mysql.connect()
cursor = con.cursor()
cursor.callproc('sp_GetTwitter', (_username,))
data = cursor.fetchall()
session['user'] = data[0][0]
consumerkey = data [0][4]
consumersecret = data [0][5]
accesstoken = data [0][6]
tokensecret = data [0][7]
Retweet1 = request.form['inputRetweet1']
Retweet2 = request.form['inputRetweet2']
Retweet3 = request.form['inputRetweet3']
Retweet4 = request.form['inputRetweet4']
Exclude1 = request.form['inputExclude1']
Exclude2 = request.form['inputExclude2']
def work():
twitter = Twython(consumerkey, consumersecret, accesstoken, tokensecret)
naughty_words = [Exclude1, Exclude2]
good_words = [Retweet1, Retweet2, Retweet3, Retweet4]
filter = " OR ".join(good_words)
blacklist = " -".join(naughty_words)
keywords = filter +" -"+ blacklist
print(keywords)
while True:
search_results = twitter.search(q=keywords, count=10)
try:
for tweet in search_results["statuses"]:
try:
twitter.retweet(id = tweet["id_str"])
time.sleep(60)
except TwythonError as e:
print (e)
except TwythonError as e:
print (e)
if 'work_process' not in session:
process = Process(target=work)
process.start()
pid = process.pid
parent_pid = psutil.Process(process.pid).parent().pid
session['work_process'] = (parent_pid, pid)
return redirect('/showretweet')
#terminating scheduled tweets and retweets
#app.route('/stoptweet', methods=['POST'])
def stoptweet():
if 'work_process' in session:
parent_pid, pid = session['work_process']
try:
process = psutil.Process(pid)
if process.parent().pid == parent_pid:
process.terminate()
except psutil.NoSuchProcess:
pass
session.pop('work_process')
return render_template('index.html')
else:
return render_template('index.html')
if __name__ == '__main__':
app.run(host=os.getenv('IP', '0.0.0.0'),port=int(os.getenv('PORT', xxx)))
You might want to use celery python module, and move schedule tweet and retweet as background works.
For further info, see doc: http://flask.pocoo.org/docs/0.11/patterns/celery/
You will decorate those functions related to celery, rather than flask.
As example:
In your script:
import my_schedule_module
and then in my_schedule_module.py:
from celery import Celery, Task
from celery.result import AsyncResult
from celery.task.base import periodic_task
import sqlite3 # Here I use sqlite, can be sql
import redis # Here I am using redis, you can use another db as well > check documentation
from datetime import timedelta # used to schedule your background jobs, see in configuration below
app_schedule = Celery('my_schedule_module')
'''
Celery Configuration
'''
# a mockup configuration of your background jobs, as example use retweet each 60s
app_schedule.conf.update(
CELERY_ACCEPT_CONTENT = ['application/json'],
CELERY_TASK_SERIALIZER='json',
# CELERY_ACCEPT_CONTENT=['json'], # Ignore other content
CELERY_RESULT_SERIALIZER='json',
# CELERY_TIMEZONE='Europe/Oslo',
# CELERY_ENABLE_UTC=True,
CELERYD_TASK_TIME_LIMIT = 600,
CELERYD_TASK_SOFT_TIME_LIMIT = 600,
CELERYD_MAX_TASKS_PER_CHILD = 1000,
CELERYD_OPTS="--time-limit=600 --concurrency=4",
BROKER_URL = 'redis://localhost:6379/0',
CELERY_RESULT_BACKEND = 'redis://localhost',
CELERYBEAT_SCHEDULE = {
'add-every-60-seconds': {
'task': 'my_schedule_module.retweet',
'schedule': timedelta(seconds=60)
},
}
)
#app_schedule.task()
def retweet(tweet):
# your tweet function
#app_schedule.task()
def scheduletweets():
# your background job
# pseudo code
tweets = get_tweets()
process_tweet_list = []
for tweet in tweets:
process_tweet_list.append( retweet.s(tweet) )
job = group(process_tweet_list) #group is celery.group, see documentation
result = job.apply_async() # process job list async
print 'result', result.ready(), result.successful()
You can also use callback functions - as example, you might want to update datetime in your db of when your tweet was retweeted.
In this case, you would have a syntax like:
result = my_schedule_module.retweet.apply_async( (tweet,) , link=my_schedule_module.callback_to_store_results_of_retweet.s())
my script is a server that listens to clients requests and send responses. It handles requests by threading:
class Server:
def __init__(self):
self.host = ''
self.port = 50000
self.backlog = 5
self.size = 1024
self.server = None
self.threads = []
def open_socket(self):
try:
self.server = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.server.bind((self.host,self.port))
self.server.listen(5)
except socket.error, (value,message):
if self.server:
self.server.close()
print "Could not open socket: " + message
sys.exit(1)
def run(self):
self.open_socket()
input = [self.server,sys.stdin]
running = 1
while running:
inputready,outputready,exceptready = select.select(input,[],[])
for s in inputready:
if s == self.server:
# handle the server socket
c = Client(self.server.accept())
c.start()
self.threads.append(c)
elif s == sys.stdin:
# handle standard input
junk = sys.stdin.readline()
running = 0
# close all threads
self.server.close()
for c in self.threads:
c.join()
class Client(threading.Thread):
def __init__(self,(client,address)):
threading.Thread.__init__(self)
self.client = client
self.address = address
self.size = 1024
def run(self):
running = 1
while running:
data = self.client.recv(self.size)
if data:
data2 = data.split()
if data2[0] == 'Hello':
status = 'Hello'
#fetch from database users by location
reply= '6'
if data2[0] == 'Index':
status = 'Index'
#fetch from database users by location
reply='I'
db = MySQLdb.connect(host="localhost", # your host, usually localhost
user="root", # your username
passwd="Rambo_9134", # your password
db="secure_login") # name of the data base
# you must create a Cursor object. It will let
# you execute all the queries you need
cur = db.cursor()
# Use all the SQL you like
cur.execute("SELECT ml.member,m.username FROM locations l JOIN memberlocation ml ON(l.id = ml.location) JOIN members m ON(m.id = ml.member) where l.id = 1;")
# print all the first cell of all the rows
data = []
for row in cur.fetchall() :
print row[1]
data.append({row[0]:row[1]})
print 'JSON', json.dumps(data)
reply = data
self.client.send(json.dumps(reply))
else:
self.client.close()
running = 0
if __name__ == "__main__":
s = Server()
s.run()
this script runs perfectly but it stops when i press enter. I have tried many alternatives: deamon, nohup, ... i couldn't make it run as a service in the background. i think this is a programming issue
how can i make this script run in the background as a service ?
For a quick and easy way in a test/dev environment you can use screen.
screen -S mySessionName
This starts a new screen session with the name mySessionName and attaches to that session. Inside this session you can now run your code.
Use Ctrl+A, D to detach from that session. Your code will continue to run.
To reattach to that session use:
screen -r mySessionName
To show all sessions use:
screen -ls
In a production environment however you should be looking at supervisor. This serverfault question might help.
Make a PHP or HTML script devoted solely to running that python program. Then, run that PHP/HTML script on the server and you're good :).