Feed string using slack webhook - python

So basically, I have a set of data (domain name, severity score) that is in string format as post, and I am trying to get it to post in slack and It keeps throwing errors out and I don't know why. I appreciate the help.
I have tried changing the JSON portion a bit to see if it was that as well as changing what is being sent to the function in general, and nothing helps.
def slackHook(post):
webhook_url = #Ommited
slack_content = {"channel": "#brian", "user": "Awesom-O", "attachment": [{
"type": "section",
"text": {
"text": "Random message before domains",
"type": "mrkdwn",
},
"fields": [
{
"type": "mrkdwn",
"text": "Domain Severity Score"
},
{
"type": "plain_text",
"text": post
}
]
}]}
string_payload = json.dumps(slack_content)
r = requests.post(webhook_url, data=string_payload)
if r.status_code != 200:
raise ValueError('Request to slack.com returned an error %s, the response is:\n%s' % (r.status_code, r.text))
domains = db_query()
domains = str(domains)
slackHook(domains)
Happy Path: I would just like to take my string and post it to my slack channel using the fields that I've given for context.
The current error:
raise ValueError('Request to slack.com returned an error %s, the response is:\n%s' % (r.status_code, r.text))
ValueError: Request to slack.com returned an error 400, the response is:
no_text

Your main issue is that you where mixing the syntax for attachments and blocks which are different concepts. attachments are outdated and should no longer be used.
Just replace "attachment" with "blocks" like so:
slack_content = {"channel": "#brian", "user": "Awesom-O", "blocks": [{

Related

Python - AWS Lambda extract a key from JSON input

Im trying to implement a function that will get the event from cloudwatch and print the results. I am able to get the event but I want to extract one particular key from that JSON.
Here is my function:
import json
def lambda_handler(event, context):
print("Received event: " + json.dumps(event, indent=2))
message = event['Records'][0]['Sns']['Message']
print(message)
The event got from Cloudwatch:
"Records": [
{
"EventVersion": "1.0",
"EventSubscriptionArn": "arn:aws:sns:us-east-1:xxxxxxxxxxxxx:bhuvi:XXXXXXXXXXXXXXXXXXXXXXXXXX",
"EventSource": "aws:sns",
"Sns": {
"SignatureVersion": "1",
"Timestamp": "2018-01-13T19:18:44.369Z",
"Signature": "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx",
"SigningCertUrl": "https://sns.us-east-1.amazonaws.com/SimpleNotificationService-XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX.pem",
"MessageId": "4b76b0ea-5e0f-502f-81ec-e23e03dbaf01",
"Message": "{\"AlarmName\":\"test\",\"AlarmDescription\":\"test\",\"AWSAccountId\":\"xxxxxxxxxxxxx\",\"NewStateValue\":\"ALARM\",\"NewStateReason\":\"Threshold Crossed: 1 out of the last 1 datapoints [2.6260535333900545 (13/01/18 19:13:00)] was greater than or equal to the threshold (1.0) (minimum 1 datapoint for OK -> ALARM transition).\",\"StateChangeTime\":\"2018-01-13T19:18:44.312+0000\",\"Region\":\"US East (N. Virginia)\",\"OldStateValue\":\"OK\",\"Trigger\":{\"MetricName\":\"CPUUtilization\",\"Namespace\":\"AWS/RDS\",\"StatisticType\":\"Statistic\",\"Statistic\":\"AVERAGE\",\"Unit\":null,\"Dimensions\":[{\"name\":\"DBInstanceIdentifier\",\"value\":\"myrds\"}],\"Period\":300,\"EvaluationPeriods\":1,\"ComparisonOperator\":\"GreaterThanOrEqualToThreshold\",\"Threshold\":1.0,\"TreatMissingData\":\"\",\"EvaluateLowSampleCountPercentile\":\"\"}}",
"MessageAttributes":
{}
,
"Type": "Notification",
"UnsubscribeUrl": "https://sns.us-east-1.amazonaws.com/?xcsgagrgrwgwrg",
"TopicArn": "arn:aws:sns:us-east-1:xxxxxxxxxxxxx:bhuvi",
"Subject": "ALARM: \"test\" in US East (N. Virginia)"
}
}
]
}
My extract command(Upto message) and its result:
message = event['Records'][0]['Sns']['Message']
print(message)
Result
{
"AlarmName": "test",
"AlarmDescription": "test",
"AWSAccountId": "xxxxxxxxxxxxx",
"NewStateValue": "ALARM",
"NewStateReason": "Threshold Crossed: 1 out of the last 1 datapoints [2.6260535333900545 (13/01/18 19:13:00)] was greater than or equal to the threshold (1.0) (minimum 1 datapoint for OK -> ALARM transition).",
"StateChangeTime": "2018-01-13T19:18:44.312+0000",
"Region": "US East (N. Virginia)",
"OldStateValue": "OK",
"Trigger": {
"MetricName": "CPUUtilization",
"Namespace": "AWS/RDS",
"StatisticType": "Statistic",
"Statistic": "AVERAGE",
"Unit": null,
"Dimensions": [
{
"name": "DBInstanceIdentifier",
"value": "myrds"
}
],
"Period": 300,
"EvaluationPeriods": 1,
"ComparisonOperator": "GreaterThanOrEqualToThreshold",
"Threshold": 1,
"TreatMissingData": "",
"EvaluateLowSampleCountPercentile": ""
}
I want to extract some values from this message pane.
For eg: I want to extract name. So I tried the below command, but unfortunately its not working. Can anyone help me on this?
my code for this:
message = event['Records'][0]['Sns']['Message']['Trigger']['Dimensions']['name']
print(message)
ERROR:
{
"stackTrace": [
[
"/var/task/lambda_function.py",
14,
"lambda_handler",
"message = event['Records'][0]['Sns']['Message']['Trigger']['Dimensions']['name']"
]
],
"errorType": "TypeError",
"errorMessage": "string indices must be integers"
}
So there are 3 problems:
Problem 1: In your example event, ['Records'][0]['Sns']['Message'] is a str in JSON format. That means that you need to parse to a dict like this:
message = event['Records'][0]['Sns']['Message']
message = json.loads(message)
Problem 2: message['Trigger']['Dimensions'] is a list but you are trying to access it like if it were a dict. So you only need to change your code to:
message = message['Trigger']['Dimensions'][0]['name']
Problem 3: Message is a str that means that you need to verify that is a plain str or json str (otherwise you are going to have problems with multiple structures and types). For that your code could look like:
message = event['Records'][0]['Sns']['Message']
if isinstance(message, str):
try:
message = json.loads(message)
except Exception as e:
print(e) # Or do nothing, this is just to log the error
elif isinstance(message, list):
message = message[0]
# Maybe evaluate bool, tuple, etc other types
print('RESPONSE', message['Trigger']['Dimensions'][0]['name'] if isinstance(message, dict) else message)
However I would also recommend to make it more extensible iterating the elements that you know are list. And for safety reasons (trying to avoid null pointer exceptions), use the get() function with a default value. http://www.tutorialspoint.com/python/dictionary_get.htm . Try maybe to create a function to parse structures and make it reusable.
Good luck!
Just as Records is a list, so you use ['Records'][0]['Sns']..., so is Dimensions, so again you need to access the first element.

Getting KeyError when parsing JSON in Python for following response

TL;DR:
Confused on how to parse following JSON response and get the value of [status of 12345 of dynamicValue_GGG of payload] in this case.
Full question:
I get the following as (sanitized) response upon hitting a REST API via Python code below:
response = requests.request("POST", url, data=payload,
headers=headers).json()
{
"payload": {
"name": "asdasdasdasd",
"dynamicValue_GGG": {
"12345": {
"model": "asad",
"status": "active",
"subModel1": {
"dynamicValue_67890": {
"model": "qwerty",
"status": "active"
},
"subModel2": {
"dynamicValue_33445": {
"model": "gghjjj",
"status": "active"
},
"subModel3": {
"dynamicValue_66778": {
"model": "tyutyu",
"status": "active"
}
}
}
},
"date": "2016-02-04"
},
"design": "asdasdWWWsaasdasQ"
}
If I do a type(response['payload']), it gives me 'dict'.
Now, I'm trying to parse the response above and fetch certain keys and values out of it. The problem is that I'm not able to iterate through using "index" and rather have to specify the "key", but then the response has certain "keys" that are dynamically generated and sent over. For instance, the keys called "dynamicValue_GGG", "dynamicValue_66778" etc are not static unlike the "status" key.
I can successfully parse by mentioning like:
print response['payload']['dynamicValue_GGG']['12345'][status]
in which case I get the expected output = 'active'.
However, since I have no control on 'dynamicValue_GGG', it would work only if I can specify something like this instead:
print response['payload'][0][0][status]
But the above line gives me error: " KeyError: 0 " when the python code is executed.
Is there someway in which I can use the power of both keys as well as index together in this case?
The order of values in a dictionary in Python are random, so you cannot use indexing. You'll have to iterate over all elements, potentially recursive, and test to see if it's the thing you're looking for. For example:
def find_submodels(your_dict):
for item_key, item_values in your_dict.items():
if 'status' in item_values:
print item_key, item_values['status']
if type(item_values) == dict:
find_submodels(item_values)
find_submodels(your_dict)
Which would output:
12345 active
dynamicValue_67890 active
dynamicValue_66778 active
dynamicValue_33445 active

In python, communicating with stackdriver always returns success, doesn't send anything

For some inexplicable reason, google provides no stackdriver api for appengine, so I'm stuck implementing one. No worries - I thought - I have already worked with the API builder to talk to bigquery, so I built up a client and started trying to send events:
credentials = SignedJwtAssertionCredentials(STACKDRIVER_AUTH_GOOGLE_CLIENT_EMAIL,
STACKDRIVER_AUTH_GOOGLE_PRIVATE_KEY,
scope='https://www.googleapis.com/auth/trace.append')
http = httplib2.Http()
credentials.refresh(http) #Working around an oauth2client bug
credentials = credentials.authorize(http)
service = build('cloudtrace', 'v1', http=http)
batch = service.new_batch_http_request()
batch.add(service.projects().patchTraces(
body=traces_json,
projectId=STACKDRIVER_AUTH_GOOGLE_PROJECT_ID))
print batch.execute()
I left out the definition of traces_json because no matter what I send, the service always responds with an error. If traces_json = '{}':
{u'error': {u'code': 400,
u'errors': [{u'domain': u'global',
u'message': u'Invalid value at \'traces\' (type.googleapis.com/google.devtools.cloudtrace.v1.Traces), "{}"',
u'reason': u'badRequest'}],
u'message': u'Invalid value at \'traces\' (type.googleapis.com/google.devtools.cloudtrace.v1.Traces), "{}"',
u'status': u'INVALID_ARGUMENT'}}
But even if I use a body, crafted from the google documentation, I still get the same error.
I'm running a packet sniffer on the machine where I'm attempting this, and only very rarely see it actually communicating with googleapis.com.
So the question is, really, what am I missing that will get me sending events to stackdriver?
UPDATE
Here's the most recent iteration of what I'd been working with, though using the google doc example verbatim (with the exception of changing the project id) produces the same result.
{
"traces": [
{
"projectId": "projectname",
"traceId": "1234123412341234aaaabb3412347890",
"spans": [
{
"kind": "RPC_SERVER",
"name": "trace_name",
"labels": {"label1": "value1", "label2": "value2"},
"spanId": "spanId1",
"startTime": "2016-06-01T05:01:23.045123456Z",
"endTime": "2016-06-01T05:01:23.945123456Z",
},
],
},
],
}
And the error message that comes with it:
{u'error': {u'code': 400,
u'errors': [{u'domain': u'global',
u'message': u'Invalid value at \'traces\' (type.googleapis.com/google.devtools.cloudtrace.v1.Traces), "MY ENTIRE JSON IS REPEATED HERE"',
u'reason': u'badRequest'}],
u'message': u'Invalid value at \'traces\' (type.googleapis.com/google.devtools.cloudtrace.v1.Traces), "MY ENTIRE JSON IS REPEATED HERE"',
u'status': u'INVALID_ARGUMENT'}}
SECOND UPDATE
Doing this in the explorer produces approximately the same result. I had to switch to a numeric span_id because, despite the docs' statement that it only has to be a unique string, I get errors about requiring what appears to be a 64-bit integer, any time I provide anything else.
PATCH https://cloudtrace.googleapis.com/v1/projects/[number or name]/traces?key={YOUR_API_KEY}
{
"traces": [
{
"projectId": "[number or name]",
"traceId": "1234123412341234aaaabb3412347891",
"spans": [
{
"kind": "RPC_SERVER",
"name": "trace_name",
"labels": {
"label1": "value1"
},
"startTime": "2016-06-01T05:01:23.045123456Z",
"endTime": "2016-06-01T05:01:25.045123456Z"
},
{
"spanId": "0"
}
]
}
]
}
Response:
{
"error": {
"code": 400,
"message": "Request contains an invalid argument.",
"status": "INVALID_ARGUMENT"
}
}
The issue is in the format of your data. You can not send empty messages either. The best way to explore how to use the API is to go to the StackDriver Trace API explorer, you will find out the exact data structure to send:
https://cloud.google.com/trace/api/reference/rest/v1/projects/patchTraces#traces
Pay special attention to the format of traceId. It needs to be 32 character hex string like this: 7d9d1a6e2d1f3f27484992f33d97e5cb
Here is a working python example to show how to use the 3 methods on StackDriver trace on github: https://github.com/qike/cloud-trace-samples-python
Copy paste code below:
def list_traces(stub, project_id):
"""Lists traces in the given project."""
trace_id = None
req = trace_pb2.ListTracesRequest(project_id=project_id)
try:
resp = stub.ListTraces(req, TIMEOUT)
for t in resp.traces:
trace_id = t.trace_id
print("Trace is: {}".format(t.trace_id))
except NetworkError, e:
logging.warning('Failed to list traces: {}'.format(e))
sys.exit(1)
return trace_id
def patch_traces(stub, project_id):
req = trace_pb2.PatchTracesRequest(project_id=project_id)
trace_id = str(uuid.uuid1()).replace('-', '')
now = time.time()
trace = req.traces.traces.add()
trace.project_id = project_id
trace.trace_id = trace_id
span1 = trace.spans.add()
span1.span_id = 1
span1.name = "/span1.{}".format(trace_id)
span1.start_time.seconds = int(now)-10
span1.end_time.seconds = int(now)
span2 = trace.spans.add()
span2.span_id = 2
span2.name = "/span2"
span2.start_time.seconds = int(now)-8
span2.end_time.seconds = int(now)-5
try:
resp = stub.PatchTraces(req, TIMEOUT)
print("Trace added successfully.\n"
"To view list of traces, go to: "
"http://console.cloud.google.com/traces/traces?project={}&tr=2\n"
"To view this trace added, go to: "
"http://console.cloud.google.com/traces/details/{}?project={}"
.format(project_id, trace_id, project_id))
except NetworkError, e:
logging.warning('Failed to patch traces: {}'.format(e))
sys.exit(1)
def get_trace(stub, project_id, trace_id):
req = trace_pb2.GetTraceRequest(project_id=project_id,
trace_id=trace_id)
try:
resp = stub.GetTrace(req, TIMEOUT)
print("Trace retrieved: {}".format(resp))
except NetworkError, e:
logging.warning('Failed to get trace: {}'.format(e))
sys.exit(1)
UPDATED to answer error received from API explorer
Regarding the errors you got from using API explorer, it was due to using 0 as span_id. It should be a 64 bit int other than 0.
I also observed that the span_id you set is in a different span object than the one you intended. Make sure you don't by mistake clicking on a "+" sign to add a new span object.
Below is a successful patch request I sent to my project through API explorer:
{
"traces": [
{
"projectId": "<project ID>", // I used string ID, not numeric number
"traceId": "1234123412341234aaaabb3412347891",
"spans": [
{
"spanId": "1",
"name": "foo",
"startTime": "2016-06-01T05:01:23.045123456Z",
"endTime": "2016-06-01T05:01:25.045123456Z"
}
]
}
]
}
Response
200

Equivalent of Python "json.dumps()" in R?

I'm a very beginner student of R (still coursing the "R Programming" course on Coursera) and I'm trying to practice R porting some easy code from Python to R.
Currently I'm trying to make API calls for a KairosDB database. In order to make the query, I need to encode the Python object with json.dumps() (from the json native library), but I've searched a lot and I don't get how I can do that with R and it's jsonlite library. I don't even know if I'm creating the JSON object corretly, but that's what I've found in some searches.
My code written in Python 3 (from this repo):
import requests
import json
kairosdb_server = "http://localhost:8080"
# Simple test
query = {
"start_relative": {
"value": "4",
"unit": "years"
},
"metrics": [
{
"name": "test",
"limit": 10000
}
]
}
response = requests.post(kairosdb_server + "/api/v1/datapoints/query", data=json.dumps(query))
print("Status code: %d" % response.status_code)
print("JSON response:")
print(response.json())
My current code written in R 3.2.3:
library(httr)
library(jsonlite)
kairosdb_server <- 'http://localhost:8080'
query <- serializeJSON(toJSON('
"start_relative": {
"value": "4",
"unit": "years"
},
"metrics": [
{
"name": "test",
"limit": 1000
}
]
'))
url <- paste(kairosdb_server, '/api/v1/datapoints/query')
response <- POST(url, body = query, encode = 'json')
print(paste("Query status code: ", response$status_code))
print(paste("JSON response: \n", content(response, type = 'application/json')))
If I run that I got the following error:
print(paste("Query status code: ", response$status_code))
# [1] "Query status code: 400"
print(paste("JSON response: \n", content(response, type = 'application/json')))
# [1] "JSON response: \n list(\"query.metric[] must have a size of at least 1\")"
What I'm doing wrong?
Normally one would pass a named list into body but trying to get R to preserve the array in "metrics" is tricky. Since you kinda already have JSON with the original Python structure, why not just add brackets and pass it in as a character vector? i.e.
query <- '{"start_relative": {
"value": "4",
"unit": "years"
},
"metrics": [
{
"name": "test",
"limit": 10000
}
]}'
(then just use that query in the POST). It's equivalent JSON to what json.dumps() spits out:
# get rid of newlines and spaces just to show they are the same,
# the server won't (shouldn't) care if there are newlines/spaces
cat(gsub(" \\]", "]", gsub("\\[ ", "[", gsub(" \\}", "}", gsub("\\{ ", "{", gsub("\ +", " ", gsub("\\n", "", query)))))))
{"start_relative": {"value": "4", "unit": "years"}, "metrics": [{"name": "test", "limit": 10000}]}
# python
json.dumps(query)
'{"metrics": [{"limit": 10000, "name": "test"}], "start_relative": {"unit": "years", "value": "4"}}'
If you do need an R data structure to work with, you're going to end up manipulating the output of toJSON.

How to send data and custom headers using urllib2

Hi I am using urrlib2 to send some data and custom headers to a link. Am getting 500 internal server error. I have contacted the service, and they are saying the data(json data) is correct but there is some error in my python code. What am I doing wrong ?
Following is the code.
import urllib2,urllib
import json
PREPAYMENT_URL = "https://services.gharpay.in/rest/GharpayService/"
PREPAYMENT_USERNAME = "somename"
PREPAYMENT_PASSWORD = "somepass"
data = {
"customerDetails":{
"address": "ads",
"contactNo": "9663924147",
"email": "a#c.com",
"firstName": "akash",
"lastName": "deshpande",
"prefix": "Mr."
},
"orderDetails" : {
"pincode": "411036",
"clientOrderID": "21234",
"deliveryDate": "13-10-2013",
"orderAmount": "123",
"clientComments": "please be carefull",
"paymentMode": "Cash",
"productDetails": {
"productID": "21334",
"productQuantity": "1",
"unitCost": "123",
"productDescription": "tshirt"
},
"templateID": ""
},
}
def create(request):
function = 'createOrder'
url = PREPAYMENT_URL
url = url+ function
headers= {'username':PREPAYMENT_USERNAME,'password':PREPAYMENT_PASSWORD,'Content-type':'application/json'}
data1 = urllib.urlencode(data)
req = urllib2.Request(url,data1,headers)
try:
contents = urllib2.urlopen(req).read()
except urllib2.HTTPError as e:
error_message = e.read()
print error_message # this error message is being printed. It is showing 500 error.
Your code is perfect except for one teensy-weensy detail:
The header should be Content-Type, not Content-type.
Maybe try changing this header and let me know if it works!

Categories

Resources