Executing Linux command in background from a python script - python

I have an ubuntu machine where kubernetes cluster is running. I am basically trying to run a Kubernetes command in background from python3 script but it's not working, please help...
Below is a part of code from a larger script, I am creating cmd4 using a formatted string and then passing cmd4 to os.system as os.system(cmd4). But as soon as I execute the script it starts showing logs in cmdline. I tried this using nohup as well as mentioned below but it starts populating logs in nohup.out.
for podname in pod_names:
if "smf" in podname or "pcf" in podname or "udm" in podname:
containername = 'worker'
else:
containername = 'cppe'
**cmd4 = f"kubectl logs -f -n core {podname} --container={containername} > {podname}_{containername}_log </dev/null &>/dev/null &&"**
cmd5 = f"echo $! >> pid.txt"
os.system(cmd4)
os.system(cmd5)
pid_file = open('pid.txt', 'a+')
pid_file.write("\n")
pid_file.close
=================================
tried with nohup as:
cmd4 = f"nohup kubectl logs -f -n core {podname} --container={containername} > {podname}_{containername}_log </dev/null &>/dev/null &&"
But it gives this o/p
updatedReplicas: 1
deployment.apps/core-pcf configured
nohup: ignoring input and appending output to 'nohup.out'

Have you tried using the subprocess library?
I think the solution for your script will be the following.
import subprocess
for podname in pod_names:
if "smf" in podname or "pcf" in podname or "udm" in podname:
containername = 'worker'
else:
containername = 'cppe'
**cmd4 = f"kubectl logs -f -n core {podname} --container={containername} > {podname}_{containername}_log </dev/null &>/dev/null &&"**
cmd5 = f"echo $! >> pid.txt"
cmd4_bash = subprocess.Popen(cmd4.split(), stdout=subprocess.PIPE)
cmd5_bash = subprocess.Popen(cmd5.split(), stdout=subprocess.PIPE)
pid_file = open('pid.txt', 'a+')
pid_file.write("\n")
pid_file.close

Related

Standard input EOF error when creating a Python container using docker-py [duplicate]

Considering this shell example:
echo "hello" | docker run --rm -ti -a stdin busybox \
/bin/sh -c "cat - >/out"
This will execute a busybox container and create a new file /out with the contents hello.
How would I accomplish this with docker-py ?
The docker-py equivalent:
container = docker_client.create_container( 'busybox',
stdin_open = True,
command = 'sh -c "cat - >/out"'
)
docker_client.start( container )
There is stdin_open = True, but where do I write the 'hello' ?
Back then it was not possible to attach stdin to a running container. This has changed.
With current version of docker-py this is now somehow possible (aka slix's workaround). This is taken from a discussion at GitHub which is focused on python 2.7.
See this example in python 3 with docker-py version 3.1.1
import docker, tarfile
from io import BytesIO
def test_send_data_via_stdin_into_container():
client = docker.APIClient()
# create container
container = client.create_container(
'busybox',
stdin_open = True,
command = 'sh -c "cat - >/received.txt"')
client.start(container)
# attach stdin to container and send data
original_text_to_send = 'hello this is from the other side'
s = client.attach_socket(container, params={'stdin': 1, 'stream': 1})
s._sock.send(original_text_to_send.encode('utf-8'))
s.close()
# stop container and collect data from the testfile
client.stop(container)
client.wait(container)
raw_stream,status = client.get_archive(container,'/received.txt')
tar_archive = BytesIO(b"".join((i for i in raw_stream)))
t = tarfile.open(mode='r:', fileobj=tar_archive)
text_from_container_file = t.extractfile('received.txt').read().decode('utf-8')
client.remove_container(container)
# check for equality
assert text_from_container_file == original_text_to_send
if __name__ == '__main__':
test_send_data_via_stdin_into_container()
Here's an updated solution:
#!/usr/bin/env python
import docker
# connect to docker
client = docker.APIClient()
# create a container
container = docker_client.create_container(
'busybox',
stdin_open = True,
command = 'sh -c "cat - >/out"')
client.start(container)
# attach to the container stdin socket
s = client.attach_socket(container, params={'stdin': 1, 'stream': 1})
# send text
s.send('hello')
# close, stop and disconnect
s.close()
client.stop(container)
client.wait(container)
client.remove_container(container)

understanding the input to subprocess.Popen()

I want to run a few lines in commandline while using input from a GUI
I checked google and what I understand is that it's best to use subprocess.
I'm having a hard time understanding how to input my cli code into subprocess.Popen()
I want to run the following CLI code using the function I created below:
SET PRESTO_PASSWORD=XXXXX
java -jar presto.jar --server https://address.to.server --catalog hive --debug --schema XXXX --user=XXXX --password --execute "SELECT * FROM TABLE;" --output-format CSV_HEADER > test.csv
echo done
I've created the code below but if i run it I only get the output:
output: b''
error: b''
(so I don't even see 'Done' as an output, even though I echo it at the end)
I'm sure that the CLI code is ok, I've ran it manually in CMD and I do get results.
What am I doing wrong? Below the function I use to execute the commands:
def calculate():
def subprocess_cmd(command):
process = subprocess.Popen(command,stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
stdout, stderr = process.communicate()
output = f'output: {stdout}'
output_error = f'error: {stderr}'
return output, output_error
def update_output(output_string):
output_sqlcode.configure(state='normal')
output_sqlcode.insert('end', f'{output_string}\n')
output_sqlcode.configure(state='disabled')
label_output.grid(column=0, row=6, columnspan=4, pady=0)
output_sqlcode.grid(column=0, row=7, columnspan=4, padx = 10, ipadx = 250, ipady=100,pady=10)
usr_name = input_username.get()
usr_pass = input_password.get()
sql_code = input_sqlcode.get()
file_name = 'test'
cli_code = f'SET PRESTO_PASSWORD={usr_pass}; java -jar presto.jar --server {server_address} --catalog hive --debug --schema {schema_name}--user={usr_name} --password --execute "{sql_code};" --output-format CSV_HEADER > {file_name}.csv; echo done'
output, error = subprocess_cmd(cli_code)
update_output(output)
update_output(error)
Update
I think I found the reason why it is not working. I was running the script in a folder which is in the cloud (onedrive). Given that it uses CMD, it gave errors. I moved the folder to a different location but all my locations are UNC drives:
\\xx.xxx.net\xxx\xx\x\xx\xx\07 Personal\xxxx\file.py
Given that all my locations are like that, I need to find a way to run CMD with a UNC location. I tried using Pushd and Popd but that still gave me the UNC location error (probably because i'm trying to run pushd while i'm in UNC working directory?)
cli_code = f'Pushd {unc_location};SET PRESTO_PASSWORD={usr_pass}; java -jar presto.jar --server {server_address} --catalog hive --debug --schema {schema_name}--user={usr_name} --password --execute "{sql_code};" --output-format CSV_HEADER > {file_name}.csv; popd; echo done'

Syntax error when trying to create and print out a series of command-line commands

I'm trying to write a Python 3 script that will print out a series of command-line commands using variables.
Here's an example of the commands I'm trying to replicate:
filemorph cp testBitmap_1 gs://mapper-bitmap/TestBitmaps
filemorph cp gs://mapper-bitmap/TestBitmaps/testBitmap_1.svs /mnt/pixels/bitmaps
mkdir -p /mnt/pixels/1024/testBitmap_1
image_rotate --image_rotate-progress bitsave "/mnt/pixels/bitmaps/testBitmap_1.svs" /mnt/pixels/1024/testBitmap_1/ --pixel-size 1024
filemorph -m rsync -d -r /mnt/pixels/1024/testBitmap_1 gs://mapper-pixels/1024/testBitmap_1
Whenever I run my script, I get this error:
G:\Projects\Python\BitmapBot
λ python bitmapbot.py
File "bitmapbot.py", line 26
commands = """\
^
SyntaxError: invalid syntax
I checked all my intentations and they all seem correct so I'm not sure why it's giving me an error.
I'm not quite sure what I'm doing wrong.
If anyone sees anything, please let me know.
Thanks!
Oh here's the script:
import os
# define variables
data = dict(
Bitmap_Name = 'testBitmap_1.svs',
Bitmap_Title = 'testBitmap_1',
Bitmap_Folder_Name = 'TestBitmaps',
Cloud_Bitmap_Directory = 'gs://mapper-bitmap/',
Pixel_Bitmap_Engine = '/mnt/pixels/bitmaps',
Local_Bitmap_Directory = '',
Local_Pixel_Directory = '/mnt/pixels/1024/',
Cloud_Pixel_Directory = 'gs://mapper-pixels/1024/'
# create commands with Python:
commands = """\
filemorph cp {Bitmap_Name} {Cloud_Bitmap_Directory}/{Bitmap_Folder_Name}
filemorph cp {Cloud_Bitmap_Directory}/{Bitmap_Folder_Name}/{Bitmap_Name} {Pixel_Bitmap_Engine}
mkdir -p {Local_Pixel_Directory}/{Bitmap_Title}
image_rotate --image_rotate-progress bitsave {Pixel_Bitmap_Engine}/{Bitmap_Name} {Local_Pixel_Directory}/{Bitmap_Title}/ --pixel-size 1024
filemorph -m rsync -d -r {Local_Pixel_Directory}/{Bitmap_Title} {Cloud_Pixel_Directory}/{Bitmap_Title}
"""
# loop through commands and print
for command in commands.splitlines():
command = command.format(**data) # populate command
# os.system(command) # execute command
print(command)
You never closed the bracket after data = dict(…

Unable to access / var / tmp in Subprocess via Django

I created a script that outputs the execution result of a shell script to a Web screen using Django and subprocess of python.
Specifically, the following two scripts were created.
test.py
#!/usr/bin/python
import sys,os
import subprocess
import syslog
command_list = ['/bin/sh', '/var/tmp/test.sh']
proc = subprocess.Popen(args=command_list,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
cwd=os.path.dirname(command_list[0]),
shell=False)
result = proc.communicate(input=None)
print str( result )
test.sh
#!/bin/bash
echo "begin"
cat /var/tmp/data.txt
data.txt
data1
data2
Unit tests were performed on the two scripts, and they were confirmed to work properly.
However, when I used test.py via Django, test.sh's "cat" command and data.txt existed,
“Cat: /var/tmp/data.txt: No such file or directory” is displayed.
What is the cause?
version
python 2.7.13
Django 1.11.20
When I set PrivateTmp to PrivateTmp = false, httpd can now access / var / tmp.
view /usr/lib/systemd/system/httpd.service
PrivateTmp=false
systemctl daemon-reload
service http restart

create logical volume using python cgi

I am creating python CGI script that accepts lv size from client and then creates and mount the logical volume using nfs.
Here is my code:
#!/usr/bin/python
print "Content-type:text/html"
print ""
import cgi,commands,os,socket,time,getpass
form = cgi.FieldStorage()
st=form.getvalue("st")
mount=form.getvalue('mount')
backup=form.getvalue('backup')
ip=os.environ["REMOTE_ADDR"]
s=socket.socket(socket.AF_INET,socket.SOCK_DGRAM)
s.bind(("192.168.1.100",4444))
a,b=s.recvfrom(100)
print a
s.sendto(mount,b)
if(backup=='Yes'):
os.system("lvcreate --size "+st+" --thin volume/pooL1")
os.system("lvcreate -V "+st+" --name "+ip+" --thin volume/pooL1")
os.system("mkfs.ext4 /dev/volume/"+ip)
os.system("mkdir /mnt/"+ip)
os.system("mount /dev/volume/"+ip+" /mnt/"+ip+"" )
os.system("lvcreate -s --name snap"+ip+" /dev/volume/"+ip)
os.system("mkdir /media/snap"+ip)
os.system("mount /dev/volume/snap"+ip+" /media/snap"+ip+"")
else:
os.system("lvcreate --size "+st+" --thin volume/pooL1")
os.system("lvcreate -V "+st+" --name "+ip+" --thin volume/pooL1")
os.system("mkfs.ext4 /dev/volume/"+ip)
os.system("mkdir /mnt/"+ip)
os.system("mount /dev/volume/"+ip+" /mnt/"+ip+"/" )
f=open('/etc/fstab','a+')
f.write("/mnt/"+ip+" /dev/volume/"+ip+" ext4 defaults 0 0")
f.close()
f=open('/etc/exports','a+')
f.write("/mnt/"+ip+" "+ip+ "(rw,sync,no_root_squash) \n")
f.close()
os.system("exportfs -a")
s.sendto("now you can use your storage",b)
s.close()
st is storage size.
I have given permissions to apache to create logical volume.The problem is lv is not mounting.Also client is getting internal server error even when lv is created at server.
You need to write sudo before every command like os.system('sudo ...') if you are using rhel version 7.2.
For working sudo user you have to configure /etc/sudoers file and adding extra line #apache ALL=(ALL) NOPASSWD:ALL at any line.

Categories

Resources