close a PythonShell on js (electron + flask) - python

I've googled around but I cannot find a proper answer, I have to say I'm pretty novice re NodeJS and electron: My problem is that I have connected my electron app with flask using Python-Shell but when I close my app, flask is still running in background even if close the terminal.
this is how I connect my app to flask:
var pyshell = require('python-shell');
pyshell.PythonShell.run('engine.py', function (err, results) {
if (err) console.log(err);
});
is there any way to "un-run" (close, quit, kill) the pyshell?
I've tried this but is not working:
app.on('window-all-closed', () => {
if (process.platform !== 'darwin') {
pyshell.kill('engine.py'); // <-- I'm guessing here
app.quit()
}
This is the full code, is quite short, maybe could be useful to identify problems:
const {app, BrowserWindow} = require('electron')
function createWindow () {
window = new BrowserWindow({width: 800, height: 600})
window.loadFile('index.html')
var pyshell = require('python-shell');
pyshell.PythonShell.run('engine.py', function (err, results) {
if (err) console.log(err);
});
}
app.on('ready', createWindow)
app.on('window-all-closed', () => {
if (process.platform !== 'darwin') {
pyshell.kill('engine.py'); // <-- I'm guessing here
app.quit()
}
})

Using pyshell.kill() didn't actually kill the process rather it signals child process to stop. However, the child fails to stop sometimes.
Try using tree-kill npm package to kill that process, it handles such scenarios nicely.
// Declaring tree-kill
var kill = require('tree-kill');
//Killing python process
kill(pyshell.pid);

Related

How can I use node.js to run a python file that runs forever?

I am wondering how to use node.js to run a python file that prints until it stops.
Right now when I run it it does not print anything, is there a way I can make it work properly?
Node.js
let {PythonShell} = require('python-shell')
var options = {
pythonOptions: ['-u']
};
PythonShell.run('main.py', options, function (err, result){
if (err) throw err;
// result is an array consisting of messages collected
//during execution of script.
if (result !== null){
console.log(result.toString());
}
});
PythonShell.run('main.py', options, function (err, result){
if (err) throw err;
// result is an array consisting of messages collected
//during execution of script.
if (result !== null){
console.log(result.toString());
}
});
A function similar to mine
main.py
num = 1
while True:
print(num)
num += 1
I'm not familiar with python-shell package but you can easily spawn a new process to run python programs by using spawn method from child_process package that comes with node.
Here is how you can use it:
const { spawn } = require("child_process");
const cmd = spawn("python3", [__dirname + "/main.py"]);
cmd.stdout.on("data", (data) => {
console.log(`stdout: ${data}`);
});
cmd.stderr.on("data", (data) => {
console.error(`stderr: ${data}`);
});
cmd.on("close", (code) => {
console.log(`child process exited with code ${code}`);
});
Read the documentation for more info [link]

Is there a way to increase the time out on a callback function in nodejs?

I am running a simple nodejs web application that executes a Python 2.7 script using the module python-shell. However, since this script takes too long to execute, around 3 minutes, the function fails with an ERR_EMPTY_RESPONSE error.
The function executes at the end, since Python still runs it in the background but the web application crashes.
generatePPage: (req, res) => {
const ps = require('python-shell');
let nombre = req.params.id;
var PythonShell = require('python-shell');
var options = {
pythonPath: '/usr/bin/python2.7',
args: [nombre],
};
var users = ps.PythonShell.run('./generateDetail.py', options, function (err, results) {
if (err){throw err;}
console.log('The results: %j', results);
res.render('success.ejs', {
title: 'Edit Project'
,project: ''
,message: 'Success'
});
});
},
How could I force it to wait longer?
Yes, you have to use setTimeout.
Check this link for more information: http://www.java2s.com/Tutorials/Javascript/Node.js_Tutorial/0270__Node.js_setTimeout_setInterval.htm
And also, check out this link: https://nodejs.org/ar/docs/guides/timers-in-node/
Solved it. The way to do it is to assign the return value of calling the listen function to a const and then assign a new timeout value in milliseconds.
const server = app.listen(port, () => {
console.log(`Server running on port: ${port}`);
});
server.timeout = 300000;

Can I use the child_process to spawn multiple python outputs on a server?

I am trying to use NodeJS to execute multiple python scripts and send the contents of those scripts to a local host. I would like to not be specific to the exact python script, but to use something similar to executing python script that uses ".py".
I have tried to run multiple processes, but the last one overwrote the former on the localhost.
Python scripts:
hellothere.py
print("hello there")
helloworld.py
print("Hello World!")
Goodbye.py
print("Goodbye!")
Pythonspawn.js
var express = require('express');
var app = express();
app.get('/name', function callName(req, res) {
var spawn = require("child_process").spawn;
var PythonProcess1 = spawn('python',["./hellothere.py"] );
var PythonProcess2 = spawn('python', ['./helloworld.py']);
var PythonProcess3 = spawn('python', ['./Goodbye.py']);
PythonProcess1.stdout.on('data', function(data) {
res.send(data.toString());
})
PythonProcess2.stdout.on('data', function(data) {
res.send(data.toString());
})
PythonProcess3.stdout.on('data', function(data) {
res.send(data.toString());
})
}
})
app.listen(1820, function() {
console.log('Server is running on port %d.', this.address().port);
})
I would like to execute any python script that uses ".py" rather than specifying the exact script I want executed. If possible, I would like to also execute the scripts if they have a different amount of arguments. (I.e if helloworld.py had two sys.arg[i] and Goodbye.py had one sys.arg[i].)
You can make use of exec() here, Here I am checking for all the .js files in the current working directory executing it all and add the result in an array and finally returning it.
const { exec } = require('child_process');
var result = [];
exec('ls | grep .js', (error, stdout, stderr) => {
if (error) {
console.error(`exec error: ${error}`);
return;
}
var s = stdout.split('\n');
s.pop();
console.log(s);
executeFiles(s);
});
function executeFiles(filenames) {
filenames.forEach((element, index) => {
exec(`node ${element}`, (error, stdout, stderr) => {
if (error) {
console.error(`exec error: ${error}`);
return;
}
console.log(stdout);
result.push(stdout.toString());
if (index === filenames.length - 1) {
console.log(result);
return result;
}
});
});
}

Executing Python from NodeJS (hosted on an external domain)

So I have the following node.js express server running:
const express = require('express')
const app = express()
const PythonShell = require('python-shell');
app.get('/', (req, res) => {
res.send('Please append a search');
});
app.get('/:card', (req, res) => {
card = [req.params.card]
var pyshell = new PythonShell('search_card.py', {args: card});
pyshell.on('message', (message) => {
res.send(message);
});
pyshell.end((err) => {
if (err) throw err;
});
});
app.listen(9000, () => console.log('App listening on port 9000!'));
When I use the app using the local IP of the server (which is hosted within the intranet), everything works fine.
But if I use the external IP, the root of the app works ( / ) so clearly the connection is possible and the port is open, etc, but if I use the search function ( /search_query ), it takes a few seconds (as if the python script is actually running) and then chrome says ERR_CONNECTION_REFUSED (This site can't be reached).
Can anyone explain this behavior?
EDIT:
I put a console.log inside the pyshell.on('message') and the data is being received from the python program. Its as soon as it writes ends that it fails

return JSON from python to node via spawn

I have a python script that takes two arguments; a directory and a file name.
The python script will create a JSON object from specific files in the directory provided and save it with the name being the second argument.
However if the second argument is equal to string "stream", the the JSON data is output to STDOUT.
I wrote a node script that spawns a child process to call the python script from terminal and it works as intended.
"use strict";
const spawn = require("child_process").spawn;
const command = "(path to python)";
const loc = "(path to .py script)";
const acct = process.argv[2];
const output = process.argv[3];
let callPy = spawn(command, ["erik.py", acct, output], {
cwd: loc,
stdio: "pipe"
});
callPy.stdout.on("data", (data) => {
if (data.toString() === "success") {
console.log(acct, "generated");
} else {
console.log(data.toString());
}
});
EDIT:
I have unmarked this issue as solved: after spending a bit more time trying to implement this, I have not come to a satisfactory solution that allows me to synchronously call a child process from node, signal the python script to emit JSON data, receive the data, and then send the data to the browser. I tried using a promise chain on the child process:
let child = require("child_process").spawn; // or spawnSync
let spawn = () => {
let spawned = child(command, args, options,(err, stdout, stderr) => {
if (err) { console.log(err) };
});
return spawned
};
let listen = (child) => {
child.stdout.on("data", (data) => {
console.log("PID", child.pid);
console.log("data from listen func: ", data);
return child
});
};
let kill = (child) => {
child.kill( "SIGTERM" );
}
var p = new Promise((res, e) => {
res( spawn() )
e( console.error(e) )
});
p.then(result => {
return listen(result);
})
p.then(result => {
return kill(result);
});
using spawn() the child terminates before any of the data is returned
using spawnSync() the promise chain tries (and fails) to listen on the child's io before the child is spawned
I have yet to try websockets to transmit the data but I doubt that will solve this, the promise is returning an empty object to my router function invocation before the promise chain retrieves the chunks from the python script.
Any further insight is welcome.
So you need at least two things to do this
A way to queue commands to execute with spawn
A async pattern to wait execution of a command and join processes when each executable terminates
A minimalistic examples is
var cmd = new CommandLine({
debug : true,
error : true,
delay : true });
// commandItemsArray is a list of commands list, command options, command arguments
commandItemsArray = [ ['ls','-l','./'], ['ls','-a','./'] ];
cmd.executeCommands( commandItemsArray
, function(results) {
console.log( results );
}
, function(error) {
console.log( error );
});
there are several package on npm to do both (search for node cli, command line, etc), one is this one node-commander that usese a Promise.all pattern to achieve the second task:
function PromiseAll(items, block, done, fail) {
var self=this;
var promises = [], index=0;
items.forEach(function(item) {
promises.push( function(item,i) {
return new Promise(function(resolve, reject) {
return block.apply(this,[item,index,resolve,reject]);
});
}(item,++index))
});
Promise.all(promises).then(function AcceptHandler(results) {
if(done) done( results );
}, function ErrorHandler(error) {
if(fail) fail( error );
});
} //promiseAll
I was able to resolve this issue relatively simply using websockets:
the client submits the request, which is communicated to the server via socket.IO, the request is received and the spawn event is triggered, when the chunks are finished appending a termination event is emitted which triggers killing of the child process and returning the data to the client

Categories

Resources