Python Request Send Massage again in same channel - python

Is there any way if the status response code of CHANNEL_ID_1comes 200, then it should not send the message again in the same channel.
USERS_TOKEN = "TOKEN"
CHANNEL_ID_1 = "CHANNELID HERE"
CHANNEL_ID_2 = "CHANNELID HERE"
CHANNEL_ID_3 = "CHANNELID HERE"
CHANNEL_ID_4 = "CHANNELID HERE"
MESSAGE_1 = "msg 1 channel 1"
MESSAGE_2 = "msg 2 channel 2"
MESSAGE_3 = "msg 3 channel 3"
MESSAGE_4 = "msg 4 channel 4"
msgcount = "10"
def sendMessage(token, channel_id, message):
url = 'https://discord.com/api/v9/channels/{}/messages'.format(channel_id)
data = {"content": message}
header = {"authorization": token}
r = requests.post(url, data=data, headers=header)
print(r.status_code)
for i in range(int(msgcount)):
time.sleep(0.3)
sendMessage(USERS_TOKEN, CHANNEL_ID_1, MESSAGE_1)
sendMessage(USERS_TOKEN, CHANNEL_ID_2, MESSAGE_2)
sendMessage(USERS_TOKEN, CHANNEL_ID_3, MESSAGE_3)
sendMessage(USERS_TOKEN, CHANNEL_ID_4, MESSAGE_4)
plz modify that code its really helpful for me

If I understand the question correctly, I would suggest this code structure.
[ Note: This is untested ]
import requests
import time
USERS_TOKEN = "TOKEN"
MDICT = {"CHANNEL_1": "message_1",
"CHANNEL_2": "message_2",
"CHANNEL_3": "message_3",
"CHANNEL_4": "message_4"
}
msgcount = 10
def sendMessage(token, channel_id, message):
url = f'https://discord.com/api/v9/channels/{channel_id}/messages'
data = {"content": message}
header = {"authorization": token}
with requests.Session() as session:
return session.post(url, data=data, headers=header).status_code
while msgcount > 0 and MDICT:
time.sleep(0.3)
key = next(iter(MDICT.keys()))
if sendMessage(USERS_TOKEN, key, MDICT[key]) == 200:
del MDICT[key]
msgcount -= 1

Related

How to pass variable from script 1 to script 2 in python

I need help please.
I have 2 scripts. The first script consumes from RabbitMQ and I need to send the body received to a variable in script 2.
However, the variable remains empty. I think that script 1 maybe is calling script 2 before the value is received from RabbitMQ?
How can I achieve this? Thanks
script 1
import pika
import time
from script2 import strQueue
class ReceiveFromMQ(object):
def __init__(self):
credentials = pika.PlainCredentials('xxxx', 'xxxx')
parameters = pika.ConnectionParameters('xxxx', xxx, 'xxx',
credentials)
self.connection = pika.BlockingConnection(parameters)
self.channel = self.connection.channel()
self.channel.basic_qos(prefetch_count=1)
self.channel.basic_consume(
queue='queue',
on_message_callback=self.on_response,
auto_ack=True)
self.response = None
self.channel.start_consuming()
def on_response(self, ch, method, props, body):
self.response = body.decode()
strQueue = body.decode()
print(" [x] Received %r" % body.decode())
# getMsg(body.decode())
time.sleep(body.count(b'.'))
print(" [x] Done")
print(' [*] Waiting for messages. To exit press CTRL+C')
return self.response
def call(self):
self.response = None
self.connection.process_data_events(time_limit=None)
print(str(self.response))
return str(self.response)
receive_mq = ReceiveFromMQ()
response = receive_mq.call()
print(response)
script 2
import requests
import json
strQueue = None
# Function Authenticate
def httpAuthenticate (in_apiusers, in_apipass, in_Tenant, in_URL):
try:
print('retrieve token...')
url = in_URL
payload = json.dumps({
"password": str(in_apipass),
"usernameOrEmailAddress": str(in_apiusers),
"tenancyName": str(in_Tenant)
})
headers = {
'Content-Type': 'application/json'
}
response = requests.request("POST", url, headers=headers, data=payload)
json_object = json.loads(response.text)
print('token code: ' + str(response.status_code))
return str(json_object["result"])
except Exception as e:
return 'Fail:'
# Function:Add Queue Item on Uipath Orchestrator
def httpAddQueueItems(in_URL, in_Token, in_QueueName, in_strjson):
try:
print('add queue item...')
url = in_URL
payload = json.dumps({
"itemData": {
"Priority": "Normal",
"Name": str(in_QueueName),
"SpecificContent": {
"in_pjsorequest": in_strpjson
},
"Reference": "ggg"
}
})
headers = {
'X-UIPATH-OrganizationUnitId': '',
'Content-Type': 'application/json',
'Authorization': 'Bearer ' + in_Token
}
response = requests.request("POST", url, headers=headers, data=payload)
except Exception as e:
print(e)
return 'Fail'
# CONSTANTS
OnPremuser = "xxxx"
OnPrempass = "xxx!"
OnPremtenant = "Default"
OnPremUrlAuth = "xxxx"
OnPremUrlAddQueue = "https://xxxx"
OnPremQueue = "JSON"
OnPremPJSON = strQueue
OnPremtoken = httpAuthenticate(OnPremuser, OnPrempass, OnPremtenant, OnPremUrlAuth)
httpAddQueueItems(OnPremUrlAddQueue, OnPremtoken, OnPremQueue, OnPremJSON)
What you are trying to achieve is not possible in this way since you are
trying to access a shared variable (Race Condition).
Moreover, only one bytecode instruction can be executed at a time, mean to
say, only one CPU bound task can be run at a time.
P.S:- It can be achieved by running a consumer for the RabbitMQ producer and then assign the json received to a variable.

Google drive chunked uploads in python using rest api

I'm trying to upload files > 5mb to google drive with python over the rest api, so i'm using the chunked upload method.
When the file is small enough to fit in one request everything is fine, but when i have a larger file it fails at the last request with
header i sended: "{'Content-Length': '416768', 'Content-Type': 'application/tar', 'Content-Range': 'bytes 262144-416768/416768'}"
content:"Failed to parse Content-Range header."
And yes this file is only 407kb big to have faster test execution (20mb file also fails).
This is my code made from this SO Question for the sessionUri, file_name=backup.tar.
The imported google_api is my script witch returns folder_id and access_token.
It finishes successfully and return's the sessioUri.
file_size = str(os.path.getsize(file_name))
import google_api, base64
from datetime import datetime
access_token = google_api.access_token()
folder_id = google_api.folder_id()
headers = {
'Authorization': 'Bearer ' + access_token,
'Content-Type': 'application/json; charset=UTF-8',
'X-Upload-Content-Type': 'application/tar',
'X-Upload-Content-Length': file_size,
'X-Upload-Content-Range': 'bytes 0-{}/{}'.format(file_size, file_size)
}
querystring = {"uploadType": "resumable"}
THUMBNAIL_IMAGE = str(open("drive_image.base64","r").read())
THUMBNAIL_MIME_TYPE = "image/png"
f = open("drive_logo.png","rb")
urlsafe_thumbnail = base64.urlsafe_b64encode(f.read()).decode('utf8')
drive_content_hints = '"contentHints": {"thumbnail": { "image": "'+urlsafe_thumbnail+'", "mimeType": "image/png" } } '
drive_parents_folder = '"parents": ["'+ folder_id +'"]'
drive_description = '"description": "This is a backup file"'
drive_file_name = '"name": "'+file_name+'"'
currentdate = datetime.strftime(datetime.today(),"%Y-%m-%dT%H:%M:%SZ")
drive_creation_time = '"createdTime": "'+currentdate+'"'
drive_modificition_time = '"createdTime": "'+currentdate+'"'
payload = '{'+drive_file_name+','+drive_description+','+drive_content_hints+','+drive_creation_time+','+drive_modificition_time+','+drive_parents_folder+'}'
response = requests.post(
'https://www.googleapis.com/upload/drive/v3/files',
headers=headers,
data=payload,
params=querystring
)
if response.status_code == 200:
sessionUri = response.headers['Location']
else:
print("error")
print(response.content)
return
The problem is with the rest of the code.
file_size = os.path.getsize(file_name)
in_file = open(file_name, "rb") # opening for [r]eading as [b]inary
chunk = in_file.read()
print(file_size)
BASE_CHUNK_SIZE = 256 * 1024 # 262144
CHUNK_SIZE = 1 * BASE_CHUNK_SIZE
TOTAL_BYTES = file_size
first_byte = 0
last_byte = CHUNK_SIZE - 1
import math
times = int(math.ceil(file_size/CHUNK_SIZE))
print(times)
for _ in range(times):
if last_byte > TOTAL_BYTES:
last_byte = TOTAL_BYTES -1
print("hi")
data2 = chunk[first_byte:last_byte+1]
bytes = "bytes 0-" + str(CHUNK_SIZE-1) + "/" + str(file_size)
headers = {
'Content-Length': str(TOTAL_BYTES),
'Content-Type': "application/tar",
'Content-Range': "bytes " + str(first_byte) +"-"+str(last_byte)+"/"+str(TOTAL_BYTES)
}
print(headers)
response = requests.request(
"PUT", sessionUri, data=data2, headers=headers)
print(response.content)
byte_range = response.headers["Range"]
first_byte = byte_range.split("=",1)[1].split("-",1)[0]
last_byte = byte_range.split("=",1)[1].split("-",1)[1]
print(last_byte)
first_byte = int(last_byte)+1
last_byte = int(first_byte)+CHUNK_SIZE
print(str(first_byte)+"-"+str(last_byte))
I appreciate your help.
SOLVED IT! The if for the last request must contain -1 because it gets added later. Corrected my question! Need to add END for RANGE header after last request.

New Twitch API getting json data Python 3

I am trying to get a python script to say whether a twitch channel is live but haven't been able to do it, any and all help would be appreciated.
here are the docs I've been able to find
https://dev.twitch.tv/docs/api/guide
This is what I have atm but I keep on getting "'set' object has no attribute 'items'". This is modified code from "Is There Any Way To Check if a Twitch Stream Is Live Using Python?" however it is now outdated because of the new API.
import requests
def checkUser():
API_HEADERS = {
'Client-ID : [client id here from dev portal]',
'Accept : application/vnd.twitchtv.v5+json',
}
url = "https://api.twitch.tv/helix/streams/[streamer here]"
req = requests.Session().get(url, headers=API_HEADERS)
jsondata = req.json()
print(jsondata)
checkUser()
The answer to your problem of "'set' object has no attribute 'items'" is just a simple typo. It should be
API_HEADERS = {
'Client-ID' : '[client id here from dev portal]',
'Accept' : 'application/vnd.twitchtv.v5+json'
}
Notice how the Colon's aren't part of the text now
And to answer your overarching question of how to tell if a channel is online you can look at this sample code I made.
import requests
URL = 'https://api.twitch.tv/helix/streams?user_login=[Channel_Name_Here]'
authURL = 'https://id.twitch.tv/oauth2/token'
Client_ID = [Your_client_ID]
Secret = [Your Client_Secret]
AutParams = {'client_id': Client_ID,
'client_secret': Secret,
'grant_type': 'client_credentials'
}
def Check():
AutCall = requests.post(url=authURL, params=AutParams)
access_token = AutCall.json()['access_token']
head = {
'Client-ID' : Client_ID,
'Authorization' : "Bearer " + access_token
}
r = requests.get(URL, headers = head).json()['data']
if r:
r = r[0]
if r['type'] == 'live':
return True
else:
return False
else:
return False
print(Check())

Get data by pages and merge it into one using Python (pagination)

I'm connecting to API which has 500 rows limit per call.
This is my code for a single API call (Works great):
def getdata(data):
auth_token = access_token
hed = {'Authorization': 'Bearer ' + auth_token, 'Accept': 'application/json'}
urlApi = 'https://..../orders?Offset=0&Limit=499'
datar = requests.get(urlApi, data=data, headers=hed, verify=True)
return datar
Now I want to scale it up so it will get me all the records.
This is what I tried to do:
In order to make sure that I have all the rows, I must iterate until there is no more data:
get 1st page
get 2nd page
merge
get 3rd page
merge
etc...
each page is an API call.
This is what I'm trying to do:
def getData(data):
auth_token = access_token
value_offset = 0
hed = {'Authorization': 'Bearer ' + auth_token, 'Accept': 'application/json'}
datarALL = None
while True:
urlApi = 'https://..../orders?Offset=' + value_offset + '&Limit=499'
responsedata = requests.get(urlApi, data=data, headers=hed, verify=True)
if responsedata.ok:
value_offset = value_offset + 499
#to do: merge the result of the get request
datarALL= datarALL+ responsedata (?)
# to do: check if response is empty then break out.
return datarALL
I couldn't find information about how I merge the results of the API calls nor how do I check if I can break the loop.
Edit:
To clear what I'm after.
I can see the results of the API call using:
logger.debug('response is : {0}'.format(datar.json()))
What I want to be able to do:
logger.debug('response is : {0}'.format(datarALL.json()))
and it will show all results from all calls. This requires generate API calls until there is no more data to get.
This is the return sample of API call:
"offset": 0,
"limit": 0,
"total": 0,
"results": [
{
"field1": 0,
"field2": "string",
"field3": "string",
"field4": "string"
}
]
}
In this case, you are almost correct with the idea.
is_valid = True
while is_valid:
is_valid = False
...
...
responsedata = requests.get(urlApi, data=data, headers=hed, verify=True)
if responsedata.status_code == 200: #Use status code to check request status, 200 for successful call
responsedata = responsedata.text
value_offset = value_offset + 499
#to do: merge the result of the get request
jsondata = json.loads(responsedata)
if "results" in jsondata:
if jsondata["results"]:
is_valid = True
if is_valid:
#concat array by + operand
datarALL = datarALL + jsondata["results"]
As I don't know if "results" still exists when the data ran out, so I checked both level.

Conditional Statement to re-start Python script based on response from POST request

I have a python script where I am sending a POST request for data to a server. I am expecting a particular response which indicates there is data in the response. If I do not receive this response, how can I restart my script/go to the beginning of it. The script is wrapped in a function which allows it to run every minute.
I would like to return to the beginning of my function if my response isn't as expected.
Script:
import sched, time, requests, jsonpickle, arcpy, requests, json, datetime
s = sched.scheduler(time.time, time.sleep)
def do_something(sc):
data2 = jsonpickle.decode((f2.read()))
Start = datetime.datetime.now()
# Start = datetime.datetime.strftime(data2['QueryRequest']['LastUpdatedDate'])
DD = datetime.timedelta(minutes=5)
earlier = Start - DD
earlier_str = earlier.strftime('X%m/%d/%Y %H:%M:%S').replace('X0','X').replace('X','')
data2["QueryRequest"]['LastUpdatedDate'] = str(earlier_str)
data2 = jsonpickle.encode(data2)
BulkyItemInfo = " "
spatial_ref = arcpy.SpatialReference(4326)
lastpage = 'false'
startrow = 0
newquery = 'new'
pagesize = 100
url2 = "URL"
headers2 = {'Content-type': 'text/plain', 'Accept': '/'}
while lastpage == 'false':
r2 = requests.post(url2, data=data2, headers=headers2)
print r2.text
decoded2 = json.loads(r2.text)
f2 =open('C:\Users\GeoffreyWest\Desktop\Request.json')
data2 = jsonpickle.decode((f2.read()))
if decoded2['Response']['LastPage'] == 'false':
data2['QueryRequest']['PageSize'] = pagesize
startrow = startrow + data2['QueryRequest']['PageSize']
data2['QueryRequest']['StartRowNum'] = startrow
data2['QueryRequest']['NewQuery'] = 'false'
data2 = jsonpickle.encode(data2)
print startrow
else:
lastpage = 'true'
print json.dumps(decoded2, sort_keys=True, indent=4)
items = []
for sr in decoded2['Response']['ListOfServiceRequest']['ServiceRequest']:#Where response is successful or fails
Output for successful response:
{
"status": {
"code": 311,
"message": "Service Request Successfully Queried.",
"cause": ""
},
"Response": {
"LastPage": "false",
"NumOutputObjects": "100",
"ListOfServiceRequest": {
"ServiceRequest": [
{
Output for unsuccessful response:
{"status":{"code":311,"message":"Service Request Successfully Queried.","cause":""},"Response":{"LastPage":"true","NumOutputObjects":"0","ListOfServiceRequest":{}}}

Categories

Resources