I have been using tap-mysql from singer.io
Below is my config file:
{
"host": "localhost",
"port": "3306",
"user": "root",
"password": "password"}
This successfully returns schema on --discover
and my properties file is:
{
"streams": [
{
"key_properties": [
"id"
],
"tap_stream_id": "example_db-animals",
"schema": {
"selected": "true",
"properties": {
"likes_getting_petted": {
"selected": "true",
"inclusion": "available",
"type": [
"null",
"boolean"
]
},
"name": {
"selected": "true",
"maxLength": 255,
"inclusion": "available",
"type": [
"null",
"string"
]
},
"id": {
"selected": "true",
"minimum": -2147483648,
"inclusion": "automatic",
"maximum": 2147483647,
"type": [
"null",
"integer"
]
}
},
"type": "object"
},
"table_name": "animals",
"metadata": [
{
"metadata": {
"selected-by-default": true,
"sql-datatype": "int(11)"
},
"breadcrumb": [
"properties",
"id"
]
},
{
"metadata": {
"database-name": "example_db",
"selected-by-default": false,
"is-view": false,
"row-count": 3
},
"breadcrumb": []
},
{
"metadata": {
"selected-by-default": true,
"sql-datatype": "varchar(255)"
},
"breadcrumb": [
"properties",
"name"
]
},
{
"metadata": {
"selected-by-default": true,
"sql-datatype": "tinyint(1)"
},
"breadcrumb": [
"properties",
"likes_getting_petted"
]
}
],
"stream": "animals"
}
]
}
I have added selected flags.
On following command
$ tap-mysql -c config.json --properties properties.json
I have received the following response
{"type": "STATE", "value": {"currently_syncing": null}}
Though my table has rows
You need to make sure that you are marking that table as 'selected' in your properties.json file. Also make sure you specify the replication-method type.
this section below needs to be changed from
"metadata": {
"database-name": "example_db",
"selected-by-default": false,
"is-view": false,
"row-count": 3
},
"breadcrumb": []
to
"metadata": {
"database-name": "example_db",
"selected-by-default": false,
"is-view": false,
"row-count": 3,
"selected": true,
"replication-method": "FULL_TABLE"
},
"breadcrumb": []
The two pieces I believe you are missing are the two lines below:
"selected": true,
"replication-method": "FULL_TABLE"
See the GitHub documentation example for further clarification: https://github.com/singer-io/tap-mysql#replication-methods-and-state-file
Related
I would like to modify the value of a field on a specific index of a nested type depending on another value of the same nested object or a field outside of the nested object.
As example, I have the current mapping of my index feed:
{
"feed": {
"mappings": {
"properties": {
"attacks_ids": {
"type": "keyword"
},
"created_by": {
"type": "keyword"
},
"date": {
"type": "date"
},
"groups_related": {
"type": "keyword"
},
"indicators": {
"type": "nested",
"properties": {
"date": {
"type": "date"
},
"description": {
"type": "text"
},
"role": {
"type": "keyword"
},
"type": {
"type": "keyword"
},
"value": {
"type": "keyword"
}
}
},
"malware_families": {
"type": "keyword"
},
"published": {
"type": "boolean"
},
"references": {
"type": "keyword"
},
"tags": {
"type": "keyword"
},
"targeted_countries": {
"type": "keyword"
},
"title": {
"type": "text"
},
"tlp": {
"type": "keyword"
}
}
}
}
}
Take the following document as example:
{
"took": 194,
"timed_out": false,
"_shards": {
"total": 1,
"successful": 1,
"skipped": 0,
"failed": 0
},
"hits": {
"total": {
"value": 1,
"relation": "eq"
},
"max_score": 1,
"hits": [
{
"_index": "feed",
"_type": "_doc",
"_id": "W3CS7IABovFpcGfZjfyu",
"_score": 1,
"_source": {
"title": "Test",
"date": "2022-05-22T16:21:09.159711",
"created_by": "finch",
"tlp": "white",
"published": true,
"references": [
"test",
"test"
],
"tags": [
"tag1",
"tag2"
],
"targeted_countries": [
"Italy",
"Germany"
],
"malware_families": [
"family1",
"family2"
],
"groups_related": [
"group1",
"griup2"
],
"attacks_ids": [
""
],
"indicators": [
{
"value": "testest",
"description": "This is a test",
"type": "sha256",
"role": "file",
"date": "2022-05-22T16:21:09.159560"
},
{
"value": "testest2",
"description": "This is a test 2",
"type": "ipv4",
"role": "c2",
"date": "2022-05-22T16:21:09.159699"
}
]
}
}
]
}
}
I would like to make this update: indicators[0].value = 'changed'
if _id == 'W3CS7IABovFpcGfZjfyu'
or if title == 'some_title'
or if indicators[0].role == 'c2'
I already tried with a script, but it seems I can't manage to get it work, I hope the explanation is clear, ask any question if not, thank you.
Edit 1:
I managed to make it work, however it needs the _id, still looking for a way to do that without it.
My partial solution:
update = Pulse.get(id="XHCz7IABovFpcGfZWfz9") #Pulse is my document
update.update(script="for (indicator in ctx._source.indicators) {if (indicator.value=='changed2') {indicator.value='changed3'}}")
# Modify depending on the value of a field inside the same nested object
I did everything according to this manual .
https://developers.onelogin.com/saml/python
But I didn 't succeed .
I get an error.
Errors: invalid_response
There is no AttributeStatement on the Response
my settings:
https://i.stack.imgur.com/phH5s.png
{ [ {]
"strict": true,
"debug": true,
"sp": {
"entityId": "http://siteproxy.ru/metadata/",
"assertionConsumerService": {
"url": "http://siteproxy.ru/?acs",
"binding": "urn:oasis:names:tc:SAML:2.0:bindings:HTTP-POST"
},
"singleLogoutService": {
"url": "http://siteproxy.ru/?sls",
"binding": "urn:oasis:names:tc:SAML:2.0:bindings:HTTP-Redirect"
},
"NameIDFormat": "urn:oasis:names:tc:SAML:1.1:nameid-format:emailAddress",
"x509cert": "",
"privateKey": ""
},
"idp": {
"entityId": "https://app.onelogin.com/saml/metadata/918d180a-a86a-406f-88db-a4dc44d9c150",
"singleSignOnService": {
"url": "https://supertestapp-dev.onelogin.com/trust/saml2/http-post/sso/918d180a-a86a-406f-88db-a4dc44d9c150",
"binding": "urn:oasis:names:tc:SAML:2.0:bindings:HTTP-Redirect"
},
"singleLogoutService": {
"url": "https://supertestapp-dev.onelogin.com/trust/saml2/http-redirect/slo/1668425",
"binding": "urn:oasis:names:tc:SAML:2.0:bindings:HTTP-Redirect"
},
"x509cert": "MIID7jCCAtagAwIBAgIUA0HawEb/ruz25jOJEj9cBcKdcKswDQYJKoZIhvcNAQEFBQAwSzEWMBQGA1UECgwNTW9ub3NuYXAgSW5jLjEVMBMGA1UECwwMT25lTG9naW4gSWRQMRowGAYDVQQDDBFPbmVMb2dpbiBBY2NvdW50IDAeFw0yMDEwMTIxMjI0MzVaFw0yNTEwMTIxMjI0MzVaMEsxFjAUBgNVBAoMDU1vbm9zbmFwIEluYy4xFTATBgNVBAsMDE9uZUxvZ2luIElkUDEaMBgGA1UEAwwRT25lTG9naW4gQWNjb3VudCAwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC+kpEaaUr3zbftURxz1c05hVHMdCn4IlZ+fV5TmX0y3JgTCN0H5Y0cXrgCVmY3LvdcqjL8LEXlbsZyvMnzXLIChcXQoAp5JSMtKdl+KG4j6aPi37MWlxADV7bJoAtclxJayhO0AldIz3wdAhzfYkbQctYuamnm7Y6Qpyd3elctYNajVYVIxrYwWzMQjwdapDGfRsjK509u1fyYxkxwEnvfVDG2e77TJLD4SY+4bkHiMXVQf87E19xZtmT0tW7ANCCrOpn4D2Uf7OFUJRYHLPRJA+BD9AVjRbWnobcRd1wM69c+24z5G4S6ly4T0PqwM/Spms33lRqOE6uYacmW6i07AgMBAAGjgckwgcYwDAYDVR0TAQH/BAIwADAdBgNVHQ4EFgQUVAaut68tuEQGq9fYYzN4NpNqYS8wgYYGA1UdIwR/MH2AFFQGrrevLbhEBqvX2GMzeDaTamEvoU+kTTBLMRYwFAYDVQQKDA1Yw25vc25hcCBJbmMuMRUwEwYDVQQLDAxPbmVMb2dpbiBJZFAxGjAYBgNVBAMMEU9uZUxvZ2luIEFjY291bnQgghQDQdrARv+u7PbmM4kSP1wFwp1wqzAOBgNVHQ8BAf8EBAMCB4AwDQYJKoZIhvcNAQEFBQADggEBAEghILiZNTJJ8T+8iyOQ6JWtO5LYgpr/rjDJxxyL8hBCtpLHO4ruM5ZBLTytZBTbLJVc0fBUtivRXlCypJuEtCueH7mHf9YwEFsTHZvmY9Ywy9cDa3GydygugLPpABYzgDXRxcps7N4xcs83/4m5uZBFcQCu5YwPsUwZHwOX+CjIyPPs5lZd4ybBEviykDDXkE1LPaQtFhXHJ1X6OChG6QTxtVZWfVyDr8Js1fSxBU2mnEEhCBMQZoFe8aViahUqrCjKm429oHk1ibgTcZ23rIxY9ZaB/88PV+vfrbj1BJGs0MfStuX7YwgAiUSynbg3cw/tnj/9e1Sk9wDEhYEf+k8="
}
}
{
"security": {
"nameIdEncrypted": false,
"authnRequestsSigned": false,
"logoutRequestSigned": false,
"logoutResponseSigned": false,
"signMetadata": false,
"wantMessagesSigned": false,
"wantAssertionsSigned": false,
"wantNameId" : true,
"wantNameIdEncrypted": false,
"wantAssertionsEncrypted": false,
"allowSingleLabelDomains": false,
"signatureAlgorithm": "http://www.w3.org/2001/04/xmldsig-more#rsa-sha256",
"digestAlgorithm": "http://www.w3.org/2001/04/xmlenc#sha256",
"rejectDeprecatedAlgorithm": true
},
"contactPerson": {
"technical": {
"givenName": "technical_name",
"emailAddress": "technical#example.com"
},
"support": {
"givenName": "support_name",
"emailAddress": "support#example.com"
}
},
"organization": {
"en-US": {
"name": "siteproxy",
"displayname": "siteproxy",
"url": "http://siteproxy.ru"
}
}
}
I realized that if I pass
"wantAttributeStatement": false,
It looks like the registration will take place. but I won't be able to get email and other attributes.
Im use python3-saml https://github.com/onelogin/python3-saml/issues/301
I am a Java developer trying to write a script in python. I have the following JSON: (json1)
{
"name": "Data",
"id": "9c27ea56-6f9e-4694-a088-c1423c114e88",
"nodes": [
{
"id": "6eb8cb19-53e5-4f1d-88f3-119a3ee09b5d",
"properties": [
{
"visible": true,
"uniqueness": true,
"id": "ab6d8974-59a2-4f5d-b026-d9ed54886fa8",
"dataType": "String",
"shortName": "id",
"longName": "id"
},
{
"visible": true,
"uniqueness": false,
"id": "24f5547e-c360-4293-9e67-10b19d38c774",
"dataType": "String",
"shortName": "event_id",
"longName": "event_id"
},
{
"visible": true,
"uniqueness": false,
"id": "2929d02b-950f-4495-8b36-04ea4c142f8d",
"dataType": "String",
"shortName": "event_name",
"longName": "event_name"
},
{
"visible": true,
"uniqueness": false,
"id": "3456d02b-950f-4495-8b36-04ea4c142f8d",
"dataType": "String",
"shortName": "event_date",
"longName": "Event Date"
}
]
}
]
}
And I've another same json but some modifications. Something like this:(json2)
{
"name": "Data",
"id": "9c27ea56-6f9e-4694-a088-c1423c114e88",
"nodes": [
{
"id": "6eb8cb19-53e5-4f1d-88f3-119a3ee09b5d",
"properties": [
{
"visible": true,
"uniqueness": true,
"id": "ab6d8974-59a2-4f5d-b026-d9ed54886fa8",
"dataType": "String",
"shortName": "id",
"longName": "id"
},
{
"visible": true,
"uniqueness": false,
"id": "24f5547e-c360-4293-9e67-10b19d38c774",
"dataType": "String",
"shortName": "event_id",
"longName": "event_id"
},
{
"visible": true,
"uniqueness": false,
"id": "2929d02b-950f-4495-8b36-04ea4c142f8d",
"dataType": "String",
"shortName": "event_name",
"longName": "Event Name"
},
{
"visible": true,
"uniqueness": false,
"id": "123245g-950f-4495-8b36-04ea4c142f8d",
"dataType": "String",
"shortName": "event_status",
"longName": "Event Status"
}
]
}
]
}
Now here in json2, I've added some more objects in the properties array other than json1.
When I'll iterate the json1 on the "properties" array I want to check whether it is present in the json2 array of "properties". And to check this I want to verify whether shortName is equal (in json1 properties array and json2 properties array)
Here json1 is considered to be as base object.
then How to filter "properties" array based on shortName (from "properties" array). As properties is an array of objects.
The most convenient way is to use json.loads, thus you could get then get the key, value pares.
# load the json object
jsonlist = json.loads(your_json)
#Show the list from properties
print(jsonlist['properties'])
#get the kets from the json list
for field in jsonlist['properties']:
for key, value in field.items():
print(key, value)
I just downloaded some json from spotify and took a look into the pd.normalize_json().
But if I normalise the data i still have dictionaries within my dataframe. Also setting the level doesnt help.
DATA I want to have in my dataframe:
{
"collaborative": false,
"description": "",
"external_urls": {
"spotify": "https://open.spotify.com/playlist/5"
},
"followers": {
"href": null,
"total": 0
},
"href": "https://api.spotify.com/v1/playlists/5?additional_types=track",
"id": "5",
"images": [
{
"height": 640,
"url": "https://i.scdn.co/image/a",
"width": 640
}
],
"name": "Another",
"owner": {
"display_name": "user",
"external_urls": {
"spotify": "https://open.spotify.com/user/user"
},
"href": "https://api.spotify.com/v1/users/user",
"id": "user",
"type": "user",
"uri": "spotify:user:user"
},
"primary_color": null,
"public": true,
"snapshot_id": "M2QxNTcyYTkMDc2",
"tracks": {
"href": "https://api.spotify.com/v1/playlists/100&additional_types=track",
"items": [
{
"added_at": "2020-12-13T18:34:09Z",
"added_by": {
"external_urls": {
"spotify": "https://open.spotify.com/user/user"
},
"href": "https://api.spotify.com/v1/users/user",
"id": "user",
"type": "user",
"uri": "spotify:user:user"
},
"is_local": false,
"primary_color": null,
"track": {
"album": {
"album_type": "album",
"artists": [
{
"external_urls": {
"spotify": "https://open.spotify.com/artist/1dfeR4Had"
},
"href": "https://api.spotify.com/v1/artists/1dfDbWqFHLkxsg1d",
"id": "1dfeR4HaWDbWqFHLkxsg1d",
"name": "Q",
"type": "artist",
"uri": "spotify:artist:1dfeRqFHLkxsg1d"
}
],
"available_markets": [
"CA",
"US"
],
"external_urls": {
"spotify": "https://open.spotify.com/album/6wPXmlLzZ5cCa"
},
"href": "https://api.spotify.com/v1/albums/6wPXUJ9LzZ5cCa",
"id": "6wPXUmYJ9zZ5cCa",
"images": [
{
"height": 640,
"url": "https://i.scdn.co/image/ab676620a47",
"width": 640
},
{
"height": 300,
"url": "https://i.scdn.co/image/ab67616d0620a47",
"width": 300
},
{
"height": 64,
"url": "https://i.scdn.co/image/ab603e6620a47",
"width": 64
}
],
"name": "The (Deluxe ",
"release_date": "1920-07-17",
"release_date_precision": "day",
"total_tracks": 15,
"type": "album",
"uri": "spotify:album:6m5cCa"
},
"artists": [
{
"external_urls": {
"spotify": "https://open.spotify.com/artist/1dg1d"
},
"href": "https://api.spotify.com/v1/artists/1dsg1d",
"id": "1dfeR4HaWDbWqFHLkxsg1d",
"name": "Q",
"type": "artist",
"uri": "spotify:artist:1dxsg1d"
}
],
"available_markets": [
"CA",
"US"
],
"disc_number": 1,
"duration_ms": 21453,
"episode": false,
"explicit": false,
"external_ids": {
"isrc": "GBU6015"
},
"external_urls": {
"spotify": "https://open.spotify.com/track/5716J"
},
"href": "https://api.spotify.com/v1/tracks/5716J",
"id": "5716J",
"is_local": false,
"name": "Another",
"popularity": 73,
"preview_url": null,
"track": true,
"track_number": 3,
"type": "track",
"uri": "spotify:track:516J"
},
"video_thumbnail": {
"url": null
}
}
],
"limit": 100,
"next": null,
"offset": 0,
"previous": null,
"total": 1
},
"type": "playlist",
"uri": "spotify:playlist:fek"
}
So what are best practices to read nested data like this into one dataframe in pandas?
I'm glad for any advice.
EDIT:
so basically I want all keys as columns in my dataframe. But with normalise it stops at "tracks.items" and if I normalise this again i have the recursive problem again.
It depends on the information you are looking for. Take a look at pandas.read_json() to see if that can work. Also you can select data as such
json_output = {"collaborative": 'false',"description": "", "external_urls": {"spotify": "https://open.spotify.com/playlist/5"}}
df['collaborative'] = json_output['collaborative'] #set value of your df to value of returned json values
I am trying to import the Grafana dashboard using HTTP API by following Grafana
Grafana Version: 5.1.3
OS -Windows 10
This is what i tried
curl --user admin:admin "http://localhost:3000/api/dashboards/db" -X POST -H "Content-Type:application/json;charset=UTF-8" --data-binary #c:/Users/Mahadev/Desktop/Dashboard.json
and
Here is my python code
import requests
headers = {
'Content-Type': 'application/json;charset=UTF-8',
}
data = open('C:/Users/Mahadev/Desktop/Dashboard.json', 'rb').read()
response = requests.post('http://admin:admin#localhost:3000/api/dashboards/db', headers=headers, data=data)
print (response.text)
And output of both is:
[{"fieldNames":["Dashboard"],"classification":"RequiredError","message":"Required"}]
It is asking for root property called dashboard in my json payload. Can anybody suggest me how to use that porperty and what data should i provide.
If any one want to dig more here are some links.
https://github.com/grafana/grafana/issues/8193
https://github.com/grafana/grafana/issues/2816
https://github.com/grafana/grafana/issues/8193
https://community.grafana.com/t/how-can-i-import-a-dashboard-from-a-json-file/669
https://github.com/grafana/grafana/issues/273
https://github.com/grafana/grafana/issues/5811
https://stackoverflow.com/questions/39968111/unable-to-post-to-grafana-using-python3-module-requests
https://stackoverflow.com/questions/39954475/post-request-works-in-postman-but-not-in-python/39954514#39954514
https://www.bountysource.com/issues/44431991-use-api-to-import-json-file-error
https://github.com/grafana/grafana/issues/7029
Maybe you should try to download your dashboard from the API so you will a "proper" json model to push after?
You can download it with the following command :
curl -H "Authorization: Bearer $TOKEN" https://grafana.domain.tld/api/dashboards/uid/$DASHBOARD_UID
An other way to do it , you can download a dashboard JSON on grafana website => grafana.com/dashboards and try to upload it with your current code? ;)
The dashboard field contain everything that will be display, alerts, graph etc....
Here is an example of dashboard.json :
{
"meta": {
"type": "db",
"canSave": true,
"canEdit": true,
"canAdmin": false,
"canStar": true,
"slug": "status-app",
"url": "/d/lOy3lIImz/status-app",
"expires": "0001-01-01T00:00:00Z",
"created": "2018-06-04T11:40:20+02:00",
"updated": "2018-06-14T17:51:23+02:00",
"updatedBy": "jean",
"createdBy": "jean",
"version": 89,
"hasAcl": false,
"isFolder": false,
"folderId": 0,
"folderTitle": "General",
"folderUrl": "",
"provisioned": false
},
"dashboard": {
"annotations": {
"list": [
{
"builtIn": 1,
"datasource": "-- Grafana --",
"enable": true,
"hide": true,
"iconColor": "rgba(0, 211, 255, 1)",
"name": "Annotations & Alerts",
"type": "dashboard"
}
]
},
"editable": true,
"gnetId": null,
"graphTooltip": 0,
"id": 182,
"links": [],
"panels": [
{
"alert": {
"conditions": [
{
"evaluator": {
"params": [
1
],
"type": "lt"
},
"operator": {
"type": "and"
},
"query": {
"params": [
"A",
"5m",
"now"
]
},
"reducer": {
"params": [],
"type": "avg"
},
"type": "query"
}
],
"executionErrorState": "alerting",
"frequency": "60s",
"handler": 1,
"name": "Status of alert",
"noDataState": "alerting",
"notifications": [
{
"id": 7
}
]
},
"aliasColors": {},
"bars": false,
"dashLength": 10,
"dashes": false,
"datasource": "Collectd",
"fill": 1,
"gridPos": {
"h": 7,
"w": 8,
"x": 0,
"y": 0
},
"id": 4,
"legend": {
"alignAsTable": true,
"avg": true,
"current": true,
"max": false,
"min": false,
"rightSide": false,
"show": true,
"total": false,
"values": true
},
"lines": true,
"linewidth": 1,
"links": [],
"nullPointMode": "connected",
"percentage": false,
"pointradius": 5,
"points": false,
"renderer": "flot",
"seriesOverrides": [],
"spaceLength": 10,
"stack": false,
"steppedLine": false,
"targets": [
{
"alias": "Status",
"groupBy": [
{
"params": [
"$__interval"
],
"type": "time"
},
{
"params": [
"null"
],
"type": "fill"
}
],
"measurement": "processes_processes",
"orderByTime": "ASC",
"policy": "default",
"query": "SELECT mean(value) FROM \"processes_processes\" WHERE (\"instance\" = '' AND \"host\" = 'Webp01') AND $timeFilter GROUP BY time($interval) fill(null)",
"rawQuery": true,
"refId": "A",
"resultFormat": "time_series",
"select": [
[
{
"params": [
"value"
],
"type": "field"
},
{
"params": [],
"type": "mean"
}
]
],
"tags": [
{
"key": "instance",
"operator": "=",
"value": ""
},
{
"condition": "AND",
"key": "host",
"operator": "=",
"value": "Webp01"
}
]
}
],
"thresholds": [
{
"colorMode": "critical",
"fill": true,
"line": true,
"op": "lt",
"value": 1
}
],
"timeFrom": null,
"timeShift": null,
"title": "Status of ",
"tooltip": {
"shared": true,
"sort": 0,
"value_type": "individual"
},
"type": "graph",
"xaxis": {
"buckets": null,
"mode": "time",
"name": null,
"show": true,
"values": []
},
"yaxes": [
{
"format": "short",
"label": null,
"logBase": 1,
"max": null,
"min": null,
"show": true
},
{
"format": "short",
"label": null,
"logBase": 1,
"max": null,
"min": null,
"show": true
}
],
"yaxis": {
"align": false,
"alignLevel": null
}
}
],
"refresh": "5m",
"schemaVersion": 16,
"style": "dark",
"tags": [
"web",
"nodejs"
],
"templating": {
"list": []
},
"time": {
"from": "now/d",
"to": "now"
},
"timepicker": {
"hidden": false,
"refresh_intervals": [
"5s",
"10s",
"30s",
"1m",
"5m",
"15m",
"30m",
"1h",
"2h",
"1d"
],
"time_options": [
"5m",
"15m",
"1h",
"6h",
"12h",
"24h",
"2d",
"7d",
"30d"
]
},
"timezone": "",
"title": "Status APP",
"uid": "lOy3lIImz",
"version": 89
},
}
Edit:
Here is a JSON snipper for templating your dashboard :
"templating": {
"list": [
{
"allValue": null,
"current": {
"text": "PRD_Web01",
"value": "PRD_Web01"
},
"datasource": "Collectd",
"hide": 0,
"includeAll": false,
"label": null,
"multi": false,
"name": "host",
"options": [],
"query": "SHOW TAG VALUES WITH KEY=host",
"refresh": 1,
"regex": "",
"sort": 0,
"tagValuesQuery": "",
"tags": [],
"tagsQuery": "",
"type": "query",
"useTags": false
},
{
"allValue": null,
"current": {
"text": "sda",
"value": "sda"
},
"datasource": "Collectd",
"hide": 0,
"includeAll": false,
"label": null,
"multi": false,
"name": "device",
"options": [],
"query": "SHOW TAG VALUES FROM \"disk_read\" WITH KEY = \"instance\"",
"refresh": 1,
"regex": "",
"sort": 0,
"tagValuesQuery": "",
"tags": [],
"tagsQuery": "",
"type": "query",
"useTags": false
}
]
},
As I read your answer, I guess you will be OK with this ;). I will try to keep a better eye on this thread
Can you show how your dashboard json looks like ? The json MUST contain a key dashboard in it with all the details inside its value like the following:
{
"dashboard": {
"id": null,
"uid": null,
"title": "Production Overview",
"tags": [ "templated" ],
"timezone": "browser",
"schemaVersion": 16,
"version": 0
},
"folderId": 0,
"overwrite": false
}