So, I'm tryin python and ive come to a strange poblem. Im trying to see if a YT video id is valid,
This part; https://www.youtube.com/watch?v=**QB7ACr7pUuE** and it always prints 404 even with a valid url.
Code:
import requests
def check_video_url(video_id):
checker_url = "https://www.youtube.com/oembed?url=http://www.youtube.com/watch?v="
video_url = checker_url + video_id
request = requests.get(video_url)
return request.status_code == 200
and
testId = "QB7ACr7pUuE"
check_video_url(video_id=testId)
print(str(status))
I'm not getting any errors. any ideas?
Looking at your code, you don't check the returned value from check_video_url:
import requests
def check_video_url(video_id):
checker_url = (
"https://www.youtube.com/oembed?url=http://www.youtube.com/watch?v="
)
video_url = checker_url + video_id
request = requests.get(video_url)
return request.status_code == 200
testId = "QB7ACr7pUuE"
print(check_video_url(video_id=testId)) # <-- Here I print the returned value
Prints:
True
testId = "QB7ACr7pUuExxx" # <--- non-existent video ID
print(check_video_url(video_id=testId))
Prints:
False
EDIT: To put the returned value to variable:
testId = "QB7ACr7pUuExxx"
status = check_video_url(video_id=testId)
if status:
print("Valid ID")
else:
print("Invalid ID")
Adding upon #Andrej's answer. I would suggest that you use
request.ok and return your message
ok - Returns True if status_code is less than 400, otherwise False
import requests
def check_video_url(video_id):
checker_url = "https://www.youtube.com/oembed?url=http://www.youtube.com/watch?v="
video_url = checker_url + video_id
request = requests.get(video_url)
return request.ok
testId = "QB7ACr7pUuE"
status = check_video_url(video_id=testId)
if status:
print("Valid")
else:
print("Not Valid")
Related
So I have this endpoint below(Block 2) called from the directly below(Block 1)...the headers I see in the browser appear to be sending the access_token_cookie in the cookie header....I also have a after_header block(Block 3)
THE AXIOS CALL FROM THE PAGE
var myFunction = function(element){
console.log(element.innerText);
axios.defaults.withCredentials = true;
axios.post('/api/cmsaudit/'+element.innerText).then(function(response){
//console.log(response.data);
document.getElementById('resultcol').innerHTML = response.data;
}).catch(function(error){
})
THE AJAX ENDPOINT CALL
#cms_blueprint.route('/api/cmsaudit/<cause>',methods=['POST','GET'])
#jwt_required() <---- if I remove this it works fine.
def applist(cause):
f = open('cause.txt','w')
f.write('inhere')
f.write(str(datetime.now()))
getapps = OracleConnect()
opencon = getapps.openconnection()
if opencon == 0:
if cause == 'List Applications':
getapps.mycursor.execute("QUERY")
result = getapps.mycursor.fetchall()
getapps.closeconnection
print(result)
results = {}
results["value"]=[]
for x in result:
results["value"].append(x[0])
sys.stdout.flush()
return render_template('cmsaudit/applist.html',applist=results["value"])
return jsonify(results),200
else:
f.write('else is running')
return jsonify({'msg':str(opencon)})
AFTER REQUEST BLOCK TO CHECK/REFRESH TOKEN
#app.after_request
def refresh_expiring_jwts(response):
try:
now = datetime.now()
if request.path == '/api/jwt/check':
return response
exp_timestamp = get_jwt()["exp"]
target_timestamp = datetime.timestamp(now + timedelta(minutes=28))
if target_timestamp > exp_timestamp:
access_token = create_access_token(identity=get_jwt_identity())
set_access_cookies(response, access_token)
return response
except (RuntimeError, KeyError):
# Case where there is not a valid JWT. Just return the original respone
return response
i am working on a Tool which can post Ingame News Updates from Games on your Twitter Account, in the last few Days I searched for an solution to find a way to look if an image is already in the cache so it wont be posted again if a News Feed Update of an Game is online, it should only post the new ones and skip the old ones (actually it posts every active News Feed again), the problem is can't do it. I tested it about 100 Times but it wont work. I really hope that one of you can help me with this issue because it would be fantastic if this Tool would work with an method like this. Thanks for every single help in advance.
Here is my code:
import tweepy
import time
from colorama import *
init()
auth = tweepy.OAuthHandler('API', 'APISECRET')
auth.set_access_token('ACESS', 'ACESSSECRET')
response = requests.get('https://fortnite-api.com/v2/news/br')
newsData = response.json()["data"]
#-----
footer = '#Fortnite'
delay = 5
saveImages = True
#-----
while 1:
response = requests.get('https://fortnite-api.com/v2/news/br')
if response:
newsDataLoop = response.json()["data"]
print("2 - Checking for change in news feed...")
if newsData != newsDataLoop:
#if loop == True:
print("News Feed has changed...")
for i in newsDataLoop["motds"]:
try:
print("Saving: "+i["id"])
url = i["image"]
r = requests.get(url, allow_redirects=True)
open("NewsImages/"+i["id"]+'.png', 'wb').write(r.content)
print("Saved: "+i["id"])
try:
api = tweepy.API(auth)
api.update_with_media("NewsImages/"+i["id"]+'.png',"Fortnite News Update:\n\n"+i["title"]+":\n"+i["body"]+"\n\n"+footer)
print("Tweeted: "+i["id"])
except:
print("Failed to tweet: "+i["id"])
if saveImages == 'False':
os.remove("NewsImages/"+i["id"]+'.png')
response = requests.get('https://fortnite-api.com/v2/news/br')
newsData = response.json()["data"]
except:
print("Error in tweeting news feed: skipping")
print("Finished news feed publishing")
else:
print("FAILED TO GRAB NEWS DATA: URL DOWN")
time.sleep(delay)
You need to check each motd to see if it existed in the old dataset.
import tweepy
import time
from colorama import *
init()
auth = tweepy.OAuthHandler('API', 'APISECRET')
auth.set_access_token('ACESS', 'ACESSSECRET')
response = requests.get('https://fortnite-api.com/v2/news/br')
newsData = response.json()["data"]
#-----
footer = '#Fortnite'
delay = 5
saveImages = True
#-----
while 1:
response = requests.get('https://fortnite-api.com/v2/news/br')
if response:
newsDataLoop = response.json()["data"]
print("2 - Checking for change in news feed...")
if newsData != newsDataLoop:
#if loop == True:
print("News Feed has changed...")
for i in newsDataLoop["motds"]:
if i in newsData["motds"]:
# has already been posted
print("Already posted")
continue
try:
print("Saving: "+i["id"])
url = i["image"]
r = requests.get(url, allow_redirects=True)
open("NewsImages/"+i["id"]+'.png', 'wb').write(r.content)
print("Saved: "+i["id"])
try:
api = tweepy.API(auth)
api.update_with_media("NewsImages/"+i["id"]+'.png',"Fortnite News Update:\n\n"+i["title"]+":\n"+i["body"]+"\n\n"+footer)
print("Tweeted: "+i["id"])
except:
print("Failed to tweet: "+i["id"])
if saveImages == 'False':
os.remove("NewsImages/"+i["id"]+'.png')
response = requests.get('https://fortnite-api.com/v2/news/br')
newsData = response.json()["data"]
except:
print("Error in tweeting news feed: skipping")
print("Finished news feed publishing")
else:
print("FAILED TO GRAB NEWS DATA: URL DOWN")
time.sleep(delay)
i was trying to use a code writing in python 2.7 with python 3.5,but i coudn't solve this error
TypeError: the JSON object must be str, not 'bytes'
while has_next_page:
after = '' if after is '' else "&after={}".format(after)
base_url = base + node + parameters + after + since + until
url = getFacebookPageFeedUrl(base_url)
statuses = json.loads(request_until_succeed(url))
reactions = getReactionsForStatuses(base_url)
reactions = getReactionsForStatuses(base_url)
for status in statuses['data']:
# Ensure it is a status with the expected metadata
if 'reactions' in status:
status_data = processFacebookPageFeedStatus(status)
reactions_data = reactions[status_data[0]]
# calculate thankful/pride through algebra
num_special = status_data[6] - sum(reactions_data)
w.writerow(status_data + reactions_data + (num_special,))
num_processed += 1
if num_processed % 100 == 0:
print(("{} Statuses Processed: {}".format
(num_processed, datetime.datetime.now())))
# if there is no next page, we're done.
if 'paging' in statuses:
after = statuses['paging']['cursors']['after']
else:
has_next_page = False
the probleme is 6th line with json.load, is any one who have idea how to solve it ?
thank you
here is the request_until_succeed function:
def request_until_succeed(url):
req = Request(url)
success = False
while success is False:
try:
response = urlopen(req)
if response.getcode() == 200:
success = True
except Exception as e:
print(e)
time.sleep(5)
print("Error for URL {}: {}".format(url, datetime.datetime.now()))
print("Retrying.")
return response.read()
I have a function that gets the profile data of an user:
API.py
def getProfileData(self):
data = json.dumps({
'_uuid' : self.uuid,
'_uid' : self.username_id,
'_csrftoken' : self.token
})
return self.SendRequest('accounts/current_user/?edit=true', self.generateSignature(data))
I want to print the returned request in the terminal, so I did this:
test.py
from API import API
API = API("username", "password")
API.login() # login
print(API.getProfileData())
But nothing is logged in the console.
Maybe I'm doing it the JavaScript way, since that's my background.
What's the correct way to do it?
EDIT:
This is what's inside SendRequest:
def SendRequest(self, endpoint, post = None, login = False):
if (not self.isLoggedIn and not login):
raise Exception("Not logged in!\n")
return;
self.s.headers.update ({'Connection' : 'close',
'Accept' : '*/*',
'Content-type' : 'application/x-www-form-urlencoded; charset=UTF-8',
'Cookie2' : '$Version=1',
'Accept-Language' : 'en-US',
'User-Agent' : self.USER_AGENT})
if (post != None): # POST
response = self.s.post(self.API_URL + endpoint, data=post) # , verify=False
else: # GET
response = self.s.get(self.API_URL + endpoint) # , verify=False
if response.status_code == 200:
self.LastResponse = response
self.LastJson = json.loads(response.text)
return True
else:
print ("Request return " + str(response.status_code) + " error!")
# for debugging
try:
self.LastResponse = response
self.LastJson = json.loads(response.text)
except:
pass
return False
def getTotalFollowers(self,usernameId):
followers = []
next_max_id = ''
while 1:
self.getUserFollowers(usernameId,next_max_id)
temp = self.LastJson
for item in temp["users"]:
followers.append(item)
if temp["big_list"] == False:
return followers
next_max_id = temp["next_max_id"]
def getTotalFollowings(self,usernameId):
followers = []
next_max_id = ''
while 1:
self.getUserFollowings(usernameId,next_max_id)
temp = self.LastJson
for item in temp["users"]:
followers.append(item)
if temp["big_list"] == False:
return followers
next_max_id = temp["next_max_id"]
def getTotalUserFeed(self, usernameId, minTimestamp = None):
user_feed = []
next_max_id = ''
while 1:
self.getUserFeed(usernameId, next_max_id, minTimestamp)
temp = self.LastJson
for item in temp["items"]:
user_feed.append(item)
if temp["more_available"] == False:
return user_feed
next_max_id = temp["next_max_id"]
If all you want to do is print the response that you get back, you can do that in SendRequest, but I suspect tha tyour real problem is that you are self-serializing your post data when requests does that for you. In any case, since your question is about printing:
if response.status_code == 200:
print('Yay, my response was: %s' % response.content)
self.LastResponse = response
self.LastJson = json.loads(response.text)
return True
else:
print ("Request return " + str(response.status_code) + " error!")
# for debugging
try:
self.LastResponse = response
self.LastJson = json.loads(response.text)
except:
pass
return False
I'm playing around the Twitter API and am in the process of developing a script to pull all Tweets with a certain hashtag down to a local mongoDB. I have it working fine when I'm downloading tweets from users, but when downloading tweets from a hashtag I get:
return loads(fp.read(),
AttributeError: 'int' object has no attribute 'read'
Can anyone offer their infinite wisdom into how I could get this script to work?
To run, save it as a .py file, cd to the folder and run:
python twitter.py
Code:
__author__ = 'Tom Cusack'
import pymongo
import oauth2 as oauth
import urllib2, json
import sys, argparse, time
def oauth_header(url, consumer, token):
params = {'oauth_version': '1.0',
'oauth_nonce': oauth.generate_nonce(),
'oauth_timestamp': int(time.time()),
}
req = oauth.Request(method = 'GET',url = url, parameters = params)
req.sign_request(oauth.SignatureMethod_HMAC_SHA1(),consumer, token)
return req.to_header()['Authorization'].encode('utf-8')
def main():
### Twitter Settings
numtweets = '32000'
verbose = 'store_true'
retweet = 'store_false'
CONSUMER_KEY = 'M7Xu9Wte0eIZvqhb4G9HnIn3G'
CONSUMER_SECRET = 'c8hB4Qwps2aODQUx7UsyzQuCRifEp3PKu6hPQll8wnJGIhbKgZ'
ACCESS_TOKEN = '3213221313-APuXuNjVMbRbZpu6sVbETbgqkponGsZJVT53QmG'
ACCESS_SECRET = 'BJHrqWC9ed3pA5oDstSMCYcUcz2pYF3DmJ7jcuDe7yxvi'
base_url = url = 'https://api.twitter.com/1.1/search/tweets.json?include_entities=true&count=200&q=#mongodb&include_rts=%s' % (retweet)
oauth_consumer = oauth.Consumer(key = CONSUMER_KEY, secret = CONSUMER_SECRET)
oauth_token = oauth.Token(key = ACCESS_TOKEN, secret = ACCESS_SECRET)
### Mongodb Settings
uri = 'mongodb://127.0.0.1:27017/SARKY'
if uri != None:
try:
conn = pymongo.MongoClient(uri)
print 'Pulling Tweets..'
except:
print 'Error: Unable to connect to DB. Check uri variable.'
return
uri_parts = pymongo.uri_parser.parse_uri(uri)
db = conn[uri_parts['database']]
db['twitter-harvest'].ensure_index('id_str')
### Helper Variables for Harvest
max_id = -1
tweet_count = 0
stream = 0
### Begin Harvesting
while True:
auth = oauth_header(url, oauth_consumer, oauth_token)
headers = {"Authorization": auth}
request = urllib2.Request(url, headers = headers)
try:
stream = urllib2.urlopen(request)
except urllib2.HTTPError, err:
if err.code == 404:
print 'Error: Unknown user. Check --user arg'
return
if err.code == 401:
print 'Error: Unauthorized. Check Twitter credentials'
return
tweet_list = json.load(stream)
if len(tweet_list) == 0:
print 'No tweets to harvest!'
return
if 'errors' in tweet_list:
print 'Hit rate limit, code: %s, message: %s' % (tweets['errors']['code'], tweets['errors']['message'])
return
if max_id == -1:
tweets = tweet_list
else:
tweets = tweet_list[1:]
if len(tweets) == 0:
print 'Finished Harvest!'
return
for tweet in tweets:
max_id = id_str = tweet['id_str']
try:
if tweet_count == numtweets:
print 'Finished Harvest- hit numtweets!'
return
if uri != None:
db[user].update({'id_str':id_str},tweet,upsert = True)
else:
print tweet['text']
tweet_count+=1
if verbose == True and uri != None:
print tweet['text']
except Exception, err:
print 'Unexpected error encountered: %s' %(err)
return
url = base_url + '&max_id=' + max_id
if __name__ == '__main__':
try:
main()
except SystemExit as e:
if e.code == 0:
pass
You initially set stream = 0. When your try...except block catches a HTTP response with a code that isn't 404 or 401, stream is still equal to 0, but your except block doesn't break out of the function.
I'd look more closely at what this response says.