How to pass variable from script 1 to script 2 in python - python

I need help please.
I have 2 scripts. The first script consumes from RabbitMQ and I need to send the body received to a variable in script 2.
However, the variable remains empty. I think that script 1 maybe is calling script 2 before the value is received from RabbitMQ?
How can I achieve this? Thanks
script 1
import pika
import time
from script2 import strQueue
class ReceiveFromMQ(object):
def __init__(self):
credentials = pika.PlainCredentials('xxxx', 'xxxx')
parameters = pika.ConnectionParameters('xxxx', xxx, 'xxx',
credentials)
self.connection = pika.BlockingConnection(parameters)
self.channel = self.connection.channel()
self.channel.basic_qos(prefetch_count=1)
self.channel.basic_consume(
queue='queue',
on_message_callback=self.on_response,
auto_ack=True)
self.response = None
self.channel.start_consuming()
def on_response(self, ch, method, props, body):
self.response = body.decode()
strQueue = body.decode()
print(" [x] Received %r" % body.decode())
# getMsg(body.decode())
time.sleep(body.count(b'.'))
print(" [x] Done")
print(' [*] Waiting for messages. To exit press CTRL+C')
return self.response
def call(self):
self.response = None
self.connection.process_data_events(time_limit=None)
print(str(self.response))
return str(self.response)
receive_mq = ReceiveFromMQ()
response = receive_mq.call()
print(response)
script 2
import requests
import json
strQueue = None
# Function Authenticate
def httpAuthenticate (in_apiusers, in_apipass, in_Tenant, in_URL):
try:
print('retrieve token...')
url = in_URL
payload = json.dumps({
"password": str(in_apipass),
"usernameOrEmailAddress": str(in_apiusers),
"tenancyName": str(in_Tenant)
})
headers = {
'Content-Type': 'application/json'
}
response = requests.request("POST", url, headers=headers, data=payload)
json_object = json.loads(response.text)
print('token code: ' + str(response.status_code))
return str(json_object["result"])
except Exception as e:
return 'Fail:'
# Function:Add Queue Item on Uipath Orchestrator
def httpAddQueueItems(in_URL, in_Token, in_QueueName, in_strjson):
try:
print('add queue item...')
url = in_URL
payload = json.dumps({
"itemData": {
"Priority": "Normal",
"Name": str(in_QueueName),
"SpecificContent": {
"in_pjsorequest": in_strpjson
},
"Reference": "ggg"
}
})
headers = {
'X-UIPATH-OrganizationUnitId': '',
'Content-Type': 'application/json',
'Authorization': 'Bearer ' + in_Token
}
response = requests.request("POST", url, headers=headers, data=payload)
except Exception as e:
print(e)
return 'Fail'
# CONSTANTS
OnPremuser = "xxxx"
OnPrempass = "xxx!"
OnPremtenant = "Default"
OnPremUrlAuth = "xxxx"
OnPremUrlAddQueue = "https://xxxx"
OnPremQueue = "JSON"
OnPremPJSON = strQueue
OnPremtoken = httpAuthenticate(OnPremuser, OnPrempass, OnPremtenant, OnPremUrlAuth)
httpAddQueueItems(OnPremUrlAddQueue, OnPremtoken, OnPremQueue, OnPremJSON)

What you are trying to achieve is not possible in this way since you are
trying to access a shared variable (Race Condition).
Moreover, only one bytecode instruction can be executed at a time, mean to
say, only one CPU bound task can be run at a time.
P.S:- It can be achieved by running a consumer for the RabbitMQ producer and then assign the json received to a variable.

Related

Facebook Graph API | Request [400] Errorr

I create a bot to monitor the comment if there is any new comment and if so it will automatically private_replies them But instead i got a Request [400] Error instead.
def monitor_comment():
print("Bot is monitoring comments")
time.sleep(5)
comment_data = graph.get_connections(COMBINED_POST_ID_TO_MONITOR,"comments",order='reverse_chronological')
commends = []
for comment in comment_data['data'][:10]:
commends.append (comment)
data = commends[0]['id']
data_converted = str(data)
#time.sleep(5)
print(data)
return data_converted
def private_reply(comment_ids):
url = "https://graph.facebook.com/v12.0/me/messages?"
access = {"access_token":Page_Token}
params = {
"recipient": {
"comment_id": comment_ids
},
"message": {
"text":"Testing Private_Replies"
}
request = requests.post(url=url, files=access, json=params)
print(request)
This is the logs
{"error":{"message":"An active access token must be used to query information about the current user.","type":"OAuthException","code":2500,"fbtrace_id":"AMCiqy1Aw8CyODPlUBE1b98"}}

Why does Cloud Tasks gives "Deadline Exceeded" error after 60s?

I'm using google-cloud-tasks==2.2.0 with Flask Gunicorn. This is how I send a task to a queue:
def send_task(payload, queue, uri, *args):
url = f'https://www.mywebsite.com/{uri}'
payload = json.dumps(payload)
payload = payload.encode()
parent = client.queue_path(project=project, location=location, queue=queue)
service_account_email = 'myaccount.com'
# Construct the request body.
td = '1800s'
duration = duration_pb2.Duration()
time = duration.FromJsonString(td)
now = datetime.utcnow() + timedelta(seconds=10)
ts = timestamp_pb2.Timestamp()
now = ts.FromDatetime(now)
task = {
'http_request': { # Specify the type of request.
'http_method': tasks_v2beta3.HttpMethod.POST,
'url': url,
'body': payload, # Convert dictionary to string
'headers': { # Add custom header
'Content-Type': 'application/json'
},
'oidc_token': {'service_account_email': service_account_email}
}
}
task['schedule_time'] = now
task['dispatch_deadline'] = time
response = client.create_task(request={"parent": parent, "task": task}, timeout=30.0)
I use dispatch_deadline which is supposed to support 30 minutes timeout, using this API reference.
But no matter how I try, my task fails after 60 seconds with 504 DEADLINE_EXCEEDED error.
Honestly, is this something necessary I'm missing here, or is it a bug?

New Twitch API getting json data Python 3

I am trying to get a python script to say whether a twitch channel is live but haven't been able to do it, any and all help would be appreciated.
here are the docs I've been able to find
https://dev.twitch.tv/docs/api/guide
This is what I have atm but I keep on getting "'set' object has no attribute 'items'". This is modified code from "Is There Any Way To Check if a Twitch Stream Is Live Using Python?" however it is now outdated because of the new API.
import requests
def checkUser():
API_HEADERS = {
'Client-ID : [client id here from dev portal]',
'Accept : application/vnd.twitchtv.v5+json',
}
url = "https://api.twitch.tv/helix/streams/[streamer here]"
req = requests.Session().get(url, headers=API_HEADERS)
jsondata = req.json()
print(jsondata)
checkUser()
The answer to your problem of "'set' object has no attribute 'items'" is just a simple typo. It should be
API_HEADERS = {
'Client-ID' : '[client id here from dev portal]',
'Accept' : 'application/vnd.twitchtv.v5+json'
}
Notice how the Colon's aren't part of the text now
And to answer your overarching question of how to tell if a channel is online you can look at this sample code I made.
import requests
URL = 'https://api.twitch.tv/helix/streams?user_login=[Channel_Name_Here]'
authURL = 'https://id.twitch.tv/oauth2/token'
Client_ID = [Your_client_ID]
Secret = [Your Client_Secret]
AutParams = {'client_id': Client_ID,
'client_secret': Secret,
'grant_type': 'client_credentials'
}
def Check():
AutCall = requests.post(url=authURL, params=AutParams)
access_token = AutCall.json()['access_token']
head = {
'Client-ID' : Client_ID,
'Authorization' : "Bearer " + access_token
}
r = requests.get(URL, headers = head).json()['data']
if r:
r = r[0]
if r['type'] == 'live':
return True
else:
return False
else:
return False
print(Check())

400 Bad Request: Failed to decode JSON object; PUT request (Flask)

Upon trying to test the JSON payload to a PUT request, I'm getting the following error. I'm not sure if its my test that's causing the error or if there is something else that is causing the issue? If I comment out that line the HTTP verb responds with no error.
werkzeug.exceptions.BadRequest:
400 Bad Request: Failed to decode JSON object: Expecting value: line 1 column 1 (char 0)
-> args = self.put_request_parser.parse_args()
Upon debugging, here what is actually being sent when parse_args() is invoked. I'm not sure why unparsed_arguments is an empty dictionary in this case?
EnvironHeaders([('User-Agent', 'werkzeug/0.14.1'), ('Host', 'localhost'),
('Content-Type', 'application/json'), ('Content-Length', '0'),
('Authorization', 'Bearer <token>')]),
'url': 'http://localhost/api/v1/todos/3', 'unparsed_arguments': {}}
tests.py
class TestUpdateTodoResource(ApiTestCase):
'''Verify that a client succesfully updates an existing todo.'''
def test_put_update_user_todo(self):
with app.test_client() as client:
http_response = client.put(
"api/v1/todos/3",
headers={
'content-type': 'application/json',
'authorization': f"Bearer {token}"
},
data = {
"name": "Never do this todo!"
}
)
self.assertEqual(http_response.status_code, 204)
todos.py
class ApiTodo(Resource):
put_request_parser = reqparse.RequestParser()
put_request_parser.add_argument(
'name',
required=True,
location=['form', 'json'],
help="Cannot accept a blank description"
)
#auth.login_required
def put(self, id):
try:
user_todo = Todo.select().join(User).where(
(Todo.id == id) & (User.id == g.user.id)
).get()
except Todo.DoesNotExist:
abort(404, description="That todo no longer exists")
args = self.put_request_parser.parse_args()
if not args['name']:
abort(400, description="Must provide a todo description")
updated_todo = user_todo.update(**args)
updated_todo.execute()
return marshal(set_todo_creator(updated_todo), todo_fields, 'todo'), 204

Get data by pages and merge it into one using Python (pagination)

I'm connecting to API which has 500 rows limit per call.
This is my code for a single API call (Works great):
def getdata(data):
auth_token = access_token
hed = {'Authorization': 'Bearer ' + auth_token, 'Accept': 'application/json'}
urlApi = 'https://..../orders?Offset=0&Limit=499'
datar = requests.get(urlApi, data=data, headers=hed, verify=True)
return datar
Now I want to scale it up so it will get me all the records.
This is what I tried to do:
In order to make sure that I have all the rows, I must iterate until there is no more data:
get 1st page
get 2nd page
merge
get 3rd page
merge
etc...
each page is an API call.
This is what I'm trying to do:
def getData(data):
auth_token = access_token
value_offset = 0
hed = {'Authorization': 'Bearer ' + auth_token, 'Accept': 'application/json'}
datarALL = None
while True:
urlApi = 'https://..../orders?Offset=' + value_offset + '&Limit=499'
responsedata = requests.get(urlApi, data=data, headers=hed, verify=True)
if responsedata.ok:
value_offset = value_offset + 499
#to do: merge the result of the get request
datarALL= datarALL+ responsedata (?)
# to do: check if response is empty then break out.
return datarALL
I couldn't find information about how I merge the results of the API calls nor how do I check if I can break the loop.
Edit:
To clear what I'm after.
I can see the results of the API call using:
logger.debug('response is : {0}'.format(datar.json()))
What I want to be able to do:
logger.debug('response is : {0}'.format(datarALL.json()))
and it will show all results from all calls. This requires generate API calls until there is no more data to get.
This is the return sample of API call:
"offset": 0,
"limit": 0,
"total": 0,
"results": [
{
"field1": 0,
"field2": "string",
"field3": "string",
"field4": "string"
}
]
}
In this case, you are almost correct with the idea.
is_valid = True
while is_valid:
is_valid = False
...
...
responsedata = requests.get(urlApi, data=data, headers=hed, verify=True)
if responsedata.status_code == 200: #Use status code to check request status, 200 for successful call
responsedata = responsedata.text
value_offset = value_offset + 499
#to do: merge the result of the get request
jsondata = json.loads(responsedata)
if "results" in jsondata:
if jsondata["results"]:
is_valid = True
if is_valid:
#concat array by + operand
datarALL = datarALL + jsondata["results"]
As I don't know if "results" still exists when the data ran out, so I checked both level.

Categories

Resources