I am trying to follow tutorials on youtube and a lot of tutorials from official google dev. But I can not find out where is my error
my code:
import os
import google.auth
from googleapiclient.discovery import build
from googleapiclient.errors import HttpError
path_to_key = "client_secret.json"
# set the environment variable
os.environ["GOOGLE_APPLICATION_CREDENTIALS"] = path_to_key
def upload_video_to_youtube(path_to_video, video_title, video_description, video_tags):
try:
# authenticate and build the YouTube API client
client = build("youtube", "v3", credentials=google.auth.default())
# define the video metadata
video_metadata = {
"snippet": {
"title": video_title,
"description": video_description,
"tags": video_tags,
"categoryId": 22
},
"status": {
"privacyStatus": "public"
}
}
# create the request to upload the video
request = client.videos().insert(
part=",".join(video_metadata.keys()),
body=video_metadata,
media_body=path_to_video
)
# execute the request and upload the video
response = None
while response is None:
status, response = request.next_chunk()
if status:
print("Uploaded %d%%." % int(status.progress() * 100))
print("Video ID: ", response["id"])
except HttpError as error:
print("An error occurred while uploading the video: ", error)
my error is:
google.auth.exceptions.DefaultCredentialsError: The file client_secret.json does not have a valid type. Type is None, expected one of ('authorized_user', 'service_account', 'external_account', 'impersonated_service_account', 'gdch_service_account')
I do not know where is the problem because I chack my json and it is correct. I had new one google claud console project
my json:
{
"installed": {
"client_id": "280376606200-a1luulq********j3fmo0qege7.apps.googleusercontent.com",
"project_id": "bilionare-livestyle-shorts",
"auth_uri": "https://accounts.google.com/o/oauth2/auth",
"token_uri": "https://oauth2.googleapis.com/token",
"auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs",
"client_secret": "GOCSPX-2cxBjDE*********xmHHVKtJV4aS ",
"redirect_uris": [
"http://localhost"
]
}
}
Related
I am new to this, but the code seems to be not working.
Intension is to read json message from Service bus endpoint and then copy and store them in a blob container, but to keep the integrity constant throughout need to keep the name as is.
Do not have much knowledge on this , collected these codes from some blog.
Also if i can listen without func that will also help
Here is the code piece:
with receiver:
for msg in receiver:
print(str(msg))
logging.info('Python ServiceBus trigger processed an Topics: %s', msg.get_body().decode('utf-8'))
#receiver.complete_message(msg)
temp_path = tempfile.gettempdir()
# Create a file in the local data directory to upload and download
local_file_name = str(uuid.uuid4()) + ".txt"
upload_file_path = os.path.join(temp_path, local_file_name)
# Write text to the file
file = open(upload_file_path, 'w')
file.write(msg.get_body().decode('utf-8'))
file.close()
# Create a blob client using the local file name as the name for the blob
blob_client = blob_service_client.get_blob_client(container=container_name, blob=local_file_name)
print("\nUploading to Azure Storage as blob:\n\t" + local_file_name)
# Upload the created file
with open(upload_file_path, "rb") as data:
blob_client.upload_blob(data)
Here we need to create a function where we can configure messages to read which are received from Service Bus queues.
For this we need to mention the bindings in function.json file as below:
serviceBusTrigger:
{
"bindings": [
{
"type": "serviceBusTrigger",
"name": "inputMessage",
"connection": "AzureServiceBusConnectionString",
"queueName": "inputqueue",
"accessRights": "listen",
"direction": "in"
},
{
"type": "blob",
"name": "inputBlob",
"path": "container/{inputMessage}",
"connection": "EnterConnection",
"direction": "in"
}
],
"disabled": false
}
queueTrigger:
{
"bindings": [
{
"type": "blob",
"name": "inputBlob",
"path": "incontainer/{queueTrigger}",
"connection": "testweblog_STORAGE",
"direction": "in"
},
{
"type": "queueTrigger",
"name": "myQueue",
"queueName": "myqueue",
"connection": " EnterConnection _STORAGE",
"direction": "in"
}
],
"disabled": false
}
For more information about the triggers please refer to input and output triggers:
A queue is basically for first-in-first-out messages. When a message comes from the service bus, the service bus queue trigger gets fired and the Azure Function is called. In the Azure Function, we can process the message and then deliver it to destination.
Below is the sample code to receive service bus queue.
Import os
from azure.servicebus import ServiceBusClient
CONNECTION_STR = os.environ['SERVICE_BUS_CONNECTION_STR']
QUEUE_NAME = os.environ["SERVICE_BUS_QUEUE_NAME"]
servicebus_client = ServiceBusClient.from_connection_string(conn_str=CONNECTION_STR)
with servicebus_client:
receiver = servicebus_client.get_queue_receiver(queue_name=QUEUE_NAME)
with receiver:
received_msgs = receiver.receive_messages(max_message_count=10, max_wait_time=5)
for msg in received_msgs:
print(str(msg))
receiver.complete_message(msg)
print("Receive is done.")
For more information refer to Azure Service Bus client library for Python
servicebus_client = ServiceBusClient.from_connection_string(conn_str=CONNECTION_STR, logging_enable=True)
msg_topic="XYZ"
with servicebus_client:
receiver = servicebus_client.get_subscription_receiver(topic_name=TOPIC_NAME, subscription_name=SUBSCRIPTION_NAME, max_wait_time=5)
with receiver:
for msg in receiver:
print(str(msg))
msg_topic=msg
print(str(msg_topic))
receiver.complete_message(msg)
block_blob_service=BlockBlobService(account_name='stgidpdev',account_key='ZZZZ')
block_blob_service.create_container('servicebuscontainer',public_access=PublicAccess.Container)
print('Container Created')
#from azure.storage.blob import ContentSetting
block_blob_service.create_blob_from_text('servicebuscontainer','myblockblob',str(msg_topic),content_settings=None)
I am trying to automate my blog writing a bit by using Blogger API v3.0.
I have my API_KEY and by using it I managed to access my blog like that:
import requests
APIKEY = 'XXX-YYY-ZZZ'
BLOGID = '12345678'
get_blog = 'https://www.googleapis.com/blogger/v3/blogs/{BLOGID}?key={APIKEY}'.format(BLOGID=BLOGID, APIKEY=APIKEY)
response = requests.get(get_blog)
Next I tried to create a new post:
params = {
"kind": "blogger#post",
"blog": {
"id": BLOGID
},
"title": "A new post",
"content": "With <b>exciting</b> content..."
}
new_post = 'https://www.googleapis.com/blogger/v3/blogs/{blogID}/posts/?key={APIKEY}'.format(blogID=bereshitID, APIKEY=APIKEY)
response = requests.post(get_blog, params=params)
But I got an Error:
{u'error': {u'status': u'PERMISSION_DENIED', u'message': u'The caller does not have permission', u'code': 403, u'errors': [{u'reason': u'forbidden', u'message': u'The caller does not have permission', u'domain': u'global'}]}}
So I figured I need to have OAuth 2.0 credentials. So I created it and now I have client_id and client_secret and I tried to add it to the params:
CLIENT_SECRET = 'ABCD-EFGH'
CLIENT_ID = '1111'
params = {
"client_secret" : CLIENT_SECRET,
"client_id" : CLIENT_ID,
"kind": "blogger#post",
"blog": {
"id": BLOGID
},
"title": "A new post",
"content": "With <b>exciting</b> content..."
}
new_post = 'https://www.googleapis.com/blogger/v3/blogs/{blogID}/posts/?key={APIKEY}'.format(blogID=bereshitID, APIKEY=APIKEY)
response = requests.post(get_blog, params=params)
However I am still getting the error as before.
Clearly I am missing something here but I couldn't find a solution... So how should I use the OAuth 2.0 credentials correctly?
I found this guide by Rajashekar Jangam (ImRaj90) very informative.
I followed it and managed to work with my blog using the API.
Thank you Rajashekar.
I'm trying to post a videoId in a youtube playlist using youtube api in python. Below is my code,
import json
from apiclient.discovery import build
import requests
post_url = "https://www.googleapis.com/youtube/v3/playlistItems"
payload = {
'key':"*******",
'part':'snippet',
'playlistId':'****',
'videoId':'****'
}
r = requests.post(post_url, data=json.dumps(payload))
print(r.text)
But now I keep getting below error,
{
"error": {
"errors": [
{
"domain": "global",
"reason": "required",
"message": "Login Required",
"locationType": "header",
"location": "Authorization"
}
],
"code": 401,
"message": "Login Required"
}
}
I've tried getting data by GET request in similar way and I was successful in that. What is wrong with this then?
401 means Unauthorized.
So, you need to pass authentication details.
Examples:
https://developers.google.com/youtube/v3/docs/playlists/list#examples
https://developers.google.com/api-client-library/python/guide/aaa_client_secrets
Using the Python Quickstart example for the v4 Google Sheets API as a starting point, I've tried to make a library with read and write functions which can then be used by higher-level classes to easily interact with my sheet. This only works if I use the library itself as a script to call the read/write functions. Both read and write throw the following error if I use them after importing into an external script located in a different directory:
HttpError 404 when requesting https://sheets.googleapis.com/$discovery/v4/spreadsheets/
That URL looks malformed with "$discovery" in it.
Here's my library with a main section which works well if this library is run as a script:
# sheetlib.py
""" Google Docs Spreadsheets wrapper
"""
import httplib2
import os
import json
json.JSONEncoder.default=str
from apiclient import discovery
from oauth2client import client
from oauth2client import tools
from oauth2client.file import Storage
SCOPES = 'https://www.googleapis.com/auth/spreadsheets'
CLIENT_SECRET_FILE = 'client_secret.json'
CREDENTIAL_PATH = 'sheets.googleapis.test.json'
APPLICATION_NAME = 'Test Sheet'
SPREADSHEET_ID = 'abcdefg'
def get_credentials():
"""Gets valid user credentials from storage.
If nothing has been stored, or if the stored credentials are invalid,
the OAuth2 flow is completed to obtain the new credentials.
Returns:
Credentials, the obtained credential.
"""
store = Storage(CREDENTIAL_PATH)
print 'Environment: {}'.format(json.dumps(os.environ.__dict__['data']))
print 'Loaded store from {}: {}'.format(CREDENTIAL_PATH, json.dumps(store.__dict__))
credentials = store.get()
if not credentials or credentials.invalid:
flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)
flow.user_agent = APPLICATION_NAME
credentials = tools.run_flow(flow, store)
print 'Storing credentials to ' + CREDENTIAL_PATH
return credentials
def build_service():
""" Returns service object for reading/writing to spreadsheet """
credentials = get_credentials()
print "credentials: {}".format(json.dumps(credentials.__dict__))
http = credentials.authorize(httplib2.Http())
discoveryUrl = ('https://sheets.googleapis.com/$discovery/rest?version=v4')
service = discovery.build('sheets', 'v4', http=http, discoveryServiceUrl=discoveryUrl)
print 'service: {}'.format(json.dumps(service._http.__dict__))
return service
def write(range, values):
service = build_service()
body = {
'values': values
}
service.spreadsheets().values().append(
spreadsheetId=SPREADSHEET_ID, range=range,
valueInputOption='RAW', body=body, insertDataOption='INSERT_ROWS').execute()
def read(range):
""" Pass a range to read, like 'RawData!A:E' """
service = build_service()
resp = service.spreadsheets().values().get(spreadsheetId=SPREADSHEET_ID, range=range).execute()
return resp
class Magic():
"""Reads and writes to the Magic tab of sheet"""
def spell_list(self):
return [r for r in read('Magic!A1:G100')['values'][1:]]
if __name__ == '__main__':
m = Magic()
print m.spell_list()
If I move the Magic class to another file located in a different directory and try to use imported read, it throws the 404 error:
# magic_test.py
from sheetlib import read
class BadMagic():
"""Reads and writes to the Magic tab of sheet"""
def spell_list(self):
return [r for r in read('Magic!A1:G100')['values'][1:]]
m = BadMagic()
m.spell_list()
Traceback (most recent call last):
File "magic_test.py", line 0, in main
return [r[0] for r in read('Magic!A2:A100')['values']]
File "sheetlib.py", line 0, in read
resp = service.spreadsheets().values().get(spreadsheetId=SPREADSHEET_ID, range=range).execute()
File "/Applications/GoogleAppEngineLauncher.app/Contents/Resources/GoogleAppEngine-default.bundle/Contents/Resources/google_appengine/lib/google-api-python-client/apiclient/http.py", line 292, in execute
raise HttpError(resp, content, self.uri)
apiclient.errors.HttpError: <HttpError 404 when requesting https://sheets.googleapis.com/$discovery/v4/spreadsheets/abcdefg/values/Magic%21A2%3AA100?alt=json returned "Not Found">
Exploring further, I see that the credentials: and service: output from the build_service() function is different depending on which script is using it:
Calling from sheetlib.py (working)
credentials:
{
"scopes": "set([u'https://www.googleapis.com/auth/spreadsheets'])",
"revoke_uri": "https://accounts.google.com/o/oauth2/revoke",
"access_token": "asdf",
"token_uri": "https://accounts.google.com/o/oauth2/token",
"token_info_uri": "https://www.googleapis.com/oauth2/v3/tokeninfo",
"token_response": {
"access_token": "asdf",
"token_type": "Bearer",
"expires_in": 3600
},
"invalid": false,
"refresh_token": "qwer",
"client_id": "1234.apps.googleusercontent.com",
"id_token": null,
"client_secret": "zxcv",
"token_expiry": "2017-03-08 17:01:42",
"store": "<oauth2client.file.Storage object at 0x10bbd6690>",
"user_agent": "Magic Sheet"
}
service:
{
"force_exception_to_status_code": false,
"forward_authorization_headers": false,
"authorizations": [],
"proxy_info": "<function proxy_info_from_environment at 0x10af9aed8>",
"follow_redirects": true,
"cache": null,
"request": "<function new_request at 0x10b3dba28>",
"connections": {},
"certificates": "<httplib2.KeyCerts object at 0x10b3df3d0>",
"optimistic_concurrency_methods": [
"PUT",
"PATCH"
],
"follow_all_redirects": false,
"timeout": null,
"ignore_etag": false,
"ca_certs": null,
"credentials": "<httplib2.Credentials object at 0x10b3df410>",
"disable_ssl_certificate_validation": false
}
Calling from magic_test.py (broken)
credentials:
{
"access_token": "asdf",
"token_uri": "https://accounts.google.com/o/oauth2/token",
"invalid": false,
"refresh_token": "qwer",
"client_id": "1234.apps.googleusercontent.com",
"id_token": null,
"client_secret": "zxcv",
"token_expiry": "2017-03-08 17:01:42",
"store": "<oauth2client.file.Storage object at 0x1101a2e50>",
"user_agent": "Accounting Sheet"
}
service:
{
"force_exception_to_status_code": false,
"forward_authorization_headers": false,
"authorizations": [],
"proxy_info": "<bound method type.from_environment of <class 'httplib2.ProxyInfo'>>",
"follow_redirects": true,
"cache": null,
"request": "<function new_request at 0x1101bae60>",
"connections": {
"https:sheets.googleapis.com": "<httplib2.HTTPSConnectionWithTimeout instance at 0x1101b1ea8>"
},
"certificates": "<httplib2.KeyCerts object at 0x1101c2890>",
"optimistic_concurrency_methods": [
"PUT",
"PATCH"
],
"follow_all_redirects": false,
"timeout": null,
"ignore_etag": false,
"ca_certs": null,
"credentials": "<httplib2.Credentials object at 0x1101c28d0>",
"disable_ssl_certificate_validation": false
}
Any clue why different parts of http2lib would be used depending on which script called it?
You may refer with this thread. Be noted that the body arg included in update(), although shown in the documentation as json, actually needs to be a regular python dictionary. Also, your 404 error means that you requested something that doesn't exist (or that it doesn't want you to know exists). Here's an article which might help you in fixing 404 error.
I am trying load local file to bigquery by setting up a server-server auth.
I've done following steps
Created service account
Create JSON key file for this account
Activated service acount with
gcloud auth activate-service-account command
Logged in with
gcloud auth login
Trying to execute python script to upload file to BigQuery
scopes =
['https://www.googleapis.com/auth/bigquery',
'https://www.googleapis.com/auth/bigquery.insertdata']
credentials = ServiceAccountCredentials.from_json_keyfile_name(
'/path/privatekey.json', scopes)
# Construct the service object for interacting with the BigQuery API.
service = build('bigquery', 'v2', credentials=credentials)
# Load configuration with the destination specified.
load_config = {
'destinationTable': {
'projectId': "project id",
'datasetId': "data set id",
'tableId': "table name"
}
}
# Setup the job here.
# load[property] = value
load_config['schema'] = {
'fields': [
<several field>
]
}
upload = MediaFileUpload('/path/to/csv/file',
mimetype='application/octet-stream',
# This enables resumable uploads.
resumable=True)
# End of job configuration.
run_load.start_and_wait(service.jobs(),
"my project id",
load_config,
media_body=upload)
The result is
"error": {
"errors": [
{
"domain": "global",
"reason": "required",
"message": "Login Required",
"locationType": "header",
"location": "Authorization"
}
],
"code": 401,
"message": "Login Required"
}
}
But I have enough rights to create query jobs
query_request = service.jobs()
query_data = {
'query': (
'SELECT COUNT(*) FROM [dmrebg.testDay];')
}
query_response = query_request.query(
projectId=project_id,
body=query_data).execute()
print('Query Results:')
for row in query_response['rows']:
print('\t'.join(field['v'] for field in row['f']))
What did I miss? I thought that I've already logged in.
The problem is that any call to
https://www.googleapis.com/bigquery/v2/projects/project_id/jobs/* will cause the same problem
{
"error": {
"errors": [
{
"domain": "global",
"reason": "required",
"message": "Login Required",
"locationType": "header",
"location": "Authorization"
}
],
"code": 401,
"message": "Login Required"
}
}
So it is a problem with my browser auth, python auth is good.
And root cause is that I my CSV Schema and data do not match.
Errors:
Too many errors encountered. (error code: invalid)