How to get an ec2 instance cost in python? - python

I launch an instance, stress it's cpu and delete the instance.
Using the aws cdk this takes a couple of minutes and I'm looping over 100
instance types (for benchmark purposes).
How can I get that instance's loop cost programmatically (aws cli or boto3)?
I have the instance-id

import boto3
import pprint
client = boto3.client('ce')
response = client.get_cost_and_usage_with_resources(
Granularity='DAILY',
Metrics=["BlendedCost", "UnblendedCost", "UsageQuantity"],
TimePeriod={
'Start': '2021-12-20',
'End': '2021-12-28'
},
Filter={
"Dimensions": {
"Key": "SERVICE",
"Values": ["Amazon Elastic Compute Cloud - Compute"]
}
},
GroupBy=[{
"Type": "DIMENSION",
"Key": "RESOURCE_ID"
}])
pprint.pprint(response)
Returns (shortened excerpt):
{'DimensionValueAttributes': [],
'GroupDefinitions': [{'Key': 'RESOURCE_ID', 'Type': 'DIMENSION'}],
'ResponseMetadata': {'HTTPHeaders': {'cache-control': 'no-cache',
'connection': 'keep-alive',
'content-length': '8461',
'content-type': 'application/x-amz-json-1.1',
'date': 'Wed, 29 Dec 2021 09:08:16 GMT',
'x-amzn-requestid': '2de9c92e-6d1c-4b1c-9087-bee17a41cb4f'},
'HTTPStatusCode': 200,
'RequestId': '2de9c92e-6d1c-4b1c-9087-bee17a41cb4f',
'RetryAttempts': 0},
'ResultsByTime': [{'Estimated': True,
'Groups': [],
'TimePeriod': {'End': '2021-12-21T00:00:00Z',
'Start': '2021-12-20T00:00:00Z'},
'Total': {'BlendedCost': {'Amount': '0', 'Unit': 'USD'},
'UnblendedCost': {'Amount': '0', 'Unit': 'USD'},
'UsageQuantity': {'Amount': '0', 'Unit': 'N/A'}}},
{'Estimated': True,
'Groups': [],
'TimePeriod': {'End': '2021-12-22T00:00:00Z',
'Start': '2021-12-21T00:00:00Z'},
'Total': {'BlendedCost': {'Amount': '0', 'Unit': 'USD'},
'UnblendedCost': {'Amount': '0', 'Unit': 'USD'},
'UsageQuantity': {'Amount': '0', 'Unit': 'N/A'}}},
{'Estimated': True,
'Groups': [],
'TimePeriod': {'End': '2021-12-23T00:00:00Z',
'Start': '2021-12-22T00:00:00Z'},
'Total': {'BlendedCost': {'Amount': '0', 'Unit': 'USD'},
'UnblendedCost': {'Amount': '0', 'Unit': 'USD'},
'UsageQuantity': {'Amount': '0', 'Unit': 'N/A'}}},
{'Estimated': True,
'Groups': [{'Keys': ['i-03ffa7c932a515d76'],
'Metrics': {'BlendedCost': {'Amount': '0.0027617772',
'Unit': 'USD'},
....
..
.
shortened here
.
.
'TimePeriod': {'End': '2021-12-27T00:00:00Z',
'Start': '2021-12-26T00:00:00Z'},
'Total': {}},
{'Estimated': True,
'Groups': [{'Keys': ['i-0665a330b242714f2'],
'Metrics': {'BlendedCost': {'Amount': '0.216643501',
'Unit': 'USD'},
'UnblendedCost': {'Amount': '0.216643501',
'Unit': 'USD'},
'UsageQuantity': {'Amount': '0.554054168',
'Unit': 'N/A'}}},
{'Keys': ['i-080780d0d7e3394dd'],
'Metrics': {'BlendedCost': {'Amount': '2.7341269802',
'Unit': 'USD'},
'UnblendedCost': {'Amount': '2.7341269802',
'Unit': 'USD'},
'UsageQuantity': {'Amount': '1.0241218603',
'Unit': 'N/A'}}},
{'Keys': ['i-0b95613810475903b'],
'Metrics': {'BlendedCost': {'Amount': '0.432736006',
'Unit': 'USD'},
'UnblendedCost': {'Amount': '0.432736006',
'Unit': 'USD'},
'UsageQuantity': {'Amount': '0.5530218935',
'Unit': 'N/A'}}},
{'Keys': ['i-0eab899e392cf4f35'],
'Metrics': {'BlendedCost': {'Amount': '0.5645311508',
'Unit': 'USD'},
'UnblendedCost': {'Amount': '0.5645311508',
'Unit': 'USD'},
'UsageQuantity': {'Amount': '1.1896368629',
'Unit': 'N/A'}}}],
'TimePeriod': {'End': '2021-12-28T00:00:00Z',
'Start': '2021-12-27T00:00:00Z'},
'Total': {}}]}

Related

having trouble passing multiple dictionaries for the argument record_path in json_normalize

I'm having troubles completely unnesting this json from an Api.
[{'id': 1,
'name': 'Buzz',
'tagline': 'A Real Bitter Experience.',
'first_brewed': '09/2007',
'description': 'A light, crisp and bitter IPA brewed with English and American hops. A small batch brewed only once.',
'image_url': 'https://images.punkapi.com/v2/keg.png',
'abv': 4.5,
'ibu': 60,
'target_fg': 1010,
'target_og': 1044,
'ebc': 20,
'srm': 10,
'ph': 4.4,
'attenuation_level': 75,
'volume': {'value': 20, 'unit': 'litres'},
'boil_volume': {'value': 25, 'unit': 'litres'},
'method': {'mash_temp': [{'temp': {'value': 64, 'unit': 'celsius'},
'duration': 75}],
'fermentation': {'temp': {'value': 19, 'unit': 'celsius'}},
'twist': None},
'ingredients': {'malt': [{'name': 'Maris Otter Extra Pale',
'amount': {'value': 3.3, 'unit': 'kilograms'}},
{'name': 'Caramalt', 'amount': {'value': 0.2, 'unit': 'kilograms'}},
{'name': 'Munich', 'amount': {'value': 0.4, 'unit': 'kilograms'}}],
'hops': [{'name': 'Fuggles',
'amount': {'value': 25, 'unit': 'grams'},
'add': 'start',
'attribute': 'bitter'},
{'name': 'First Gold',
'amount': {'value': 25, 'unit': 'grams'},
'add': 'start',
'attribute': 'bitter'},
{'name': 'Fuggles',
'amount': {'value': 37.5, 'unit': 'grams'},
'add': 'middle',
'attribute': 'flavour'},
{'name': 'First Gold',
'amount': {'value': 37.5, 'unit': 'grams'},
'add': 'middle',
'attribute': 'flavour'},
{'name': 'Cascade',
'amount': {'value': 37.5, 'unit': 'grams'},
'add': 'end',
'attribute': 'flavour'}],
'yeast': 'Wyeast 1056 - American Ale™'},
'food_pairing': ['Spicy chicken tikka masala',
'Grilled chicken quesadilla',
'Caramel toffee cake'],
'brewers_tips': 'The earthy and floral aromas from the hops can be overpowering. Drop a little Cascade in at the end of the boil to lift the profile with a bit of citrus.',
'contributed_by': 'Sam Mason <samjbmason>'},
{'id': 2,
'name': 'Trashy Blonde',
'tagline': "You Know You Shouldn't",
'first_brewed': '04/2008',
'description': 'A titillating, neurotic, peroxide punk of a Pale Ale. Combining attitude, style, substance, and a little bit of low self esteem for good measure; what would your mother say? The seductive lure of the sassy passion fruit hop proves too much to resist. All that is even before we get onto the fact that there are no additives, preservatives, pasteurization or strings attached. All wrapped up with the customary BrewDog bite and imaginative twist.',
'image_url': 'https://images.punkapi.com/v2/2.png',
'abv': 4.1,
'ibu': 41.5,
'target_fg': 1010,
'target_og': 1041.7,
'ebc': 15,
'srm': 15,
'ph': 4.4,
'attenuation_level': 76,
'volume': {'value': 20, 'unit': 'litres'},
'boil_volume': {'value': 25, 'unit': 'litres'},
'method': {'mash_temp': [{'temp': {'value': 69, 'unit': 'celsius'},
'duration': None}],
'fermentation': {'temp': {'value': 18, 'unit': 'celsius'}},
'twist': None},
'ingredients': {'malt': [{'name': 'Maris Otter Extra Pale',
'amount': {'value': 3.25, 'unit': 'kilograms'}},
{'name': 'Caramalt', 'amount': {'value': 0.2, 'unit': 'kilograms'}},
{'name': 'Munich', 'amount': {'value': 0.4, 'unit': 'kilograms'}}],
'hops': [{'name': 'Amarillo',
'amount': {'value': 13.8, 'unit': 'grams'},
'add': 'start',
'attribute': 'bitter'},
{'name': 'Simcoe',
'amount': {'value': 13.8, 'unit': 'grams'},
'add': 'start',
'attribute': 'bitter'},
{'name': 'Amarillo',
'amount': {'value': 26.3, 'unit': 'grams'},
'add': 'end',
'attribute': 'flavour'},
{'name': 'Motueka',
'amount': {'value': 18.8, 'unit': 'grams'},
'add': 'end',
'attribute': 'flavour'}],
'yeast': 'Wyeast 1056 - American Ale™'},
'food_pairing': ['Fresh crab with lemon',
'Garlic butter dipping sauce',
'Goats cheese salad',
'Creamy lemon bar doused in powdered sugar'],
'brewers_tips': 'Be careful not to collect too much wort from the mash. Once the sugars are all washed out there are some very unpleasant grainy tasting compounds that can be extracted into the wort.',
'contributed_by': 'Sam Mason <samjbmason>'}]
I was able to unnest it to a level using json_normalize
import requests
import pandas as pd
url = "https://api.punkapi.com/v2/beers"
requests.get(url).json()
data = requests.get(url).json()
pd.json_normalize(data)
this is an image of the output after using json_normalize
now to unnest the column 'method.mash_temp' I included record_path
pd.json_normalize(
data,
record_path =['method', 'mash_temp'],
meta=['id', 'name']
)
but I am having troubles adding the other columns('ingredients.malt', 'ingredients.hops') with list of dictionaries in the record_path argument.

Iterating over JSON data and printing. (or creating Pandas DataFrame from JSON file)

I’m trying to use Python print specific values from a JSON file that I pulled from an API. From what I understand, I am pulling it as a JSON file that has a list of dictionaries of players, with a nested dictionary for each player containing their data (i.e. name, team, etc.).
I’m running into issues printing the values within the JSON file, as each character is printing on a separate line.
The end result I am trying to get to is a Pandas DataFrame containing all the values from the JSON file, but I can’t even seem to iterate through the JSON file correctly.
Here is my code:
url = "https://api-football-v1.p.rapidapi.com/v3/players"
querystring = {"league":"39","season":"2020", "page":"2"}
headers = {
"X-RapidAPI-Host": "api-football-v1.p.rapidapi.com",
"X-RapidAPI-Key": "xxxxxkeyxxxxx"
}
response = requests.request("GET", url, headers=headers, params=querystring).json()
response_dump = json.dumps(response)
for item in response_dump:
for player_item in item:
print(player_item)
This is the output when I print the JSON response (first two items):
{'get': 'players', 'parameters': {'league': '39', 'page': '2', 'season': '2020'}, 'errors': [], 'results': 20, 'paging': {'current': 2, 'total': 37}, 'response': [{'player': {'id': 301, 'name': 'Benjamin Luke Woodburn', 'firstname': 'Benjamin Luke', 'lastname': 'Woodburn', 'age': 23, 'birth': {'date': '1999-10-15', 'place': 'Nottingham', 'country': 'England'}, 'nationality': 'Wales', 'height': '174 cm', 'weight': '72 kg', 'injured': False, 'photo': 'https://media.api-sports.io/football/players/301.png'}, 'statistics': [{'team': {'id': 40, 'name': 'Liverpool', 'logo': 'https://media.api-sports.io/football/teams/40.png'}, 'league': {'id': 39, 'name': 'Premier League', 'country': 'England', 'logo': 'https://media.api-sports.io/football/leagues/39.png', 'flag': 'https://media.api-sports.io/flags/gb.svg', 'season': 2020}, 'games': {'appearences': 0, 'lineups': 0, 'minutes': 0, 'number': None, 'position': 'Attacker', 'rating': None, 'captain': False}, 'substitutes': {'in': 0, 'out': 0, 'bench': 3}, 'shots': {'total': None, 'on': None}, 'goals': {'total': 0, 'conceded': 0, 'assists': None, 'saves': None}, 'passes': {'total': None, 'key': None, 'accuracy': None}, 'tackles': {'total': None, 'blocks': None, 'interceptions': None}, 'duels': {'total': None, 'won': None}, 'dribbles': {'attempts': None, 'success': None, 'past': None}, 'fouls': {'drawn': None, 'committed': None}, 'cards': {'yellow': 0, 'yellowred': 0, 'red': 0}, 'penalty': {'won': None, 'commited': None, 'scored': 0, 'missed': 0, 'saved': None}}]}, {'player': {'id': 518, 'name': 'Meritan Shabani', 'firstname': 'Meritan', 'lastname': 'Shabani', 'age': 23, 'birth': {'date': '1999-03-15', 'place': 'München', 'country': 'Germany'}, 'nationality': 'Germany', 'height': '185 cm', 'weight': '78 kg', 'injured': False, 'photo': 'https://media.api-sports.io/football/players/518.png'}, 'statistics': [{'team': {'id': 39, 'name': 'Wolves', 'logo': 'https://media.api-sports.io/football/teams/39.png'}, 'league': {'id': 39, 'name': 'Premier League', 'country': 'England', 'logo': 'https://media.api-sports.io/football/leagues/39.png', 'flag': 'https://media.api-sports.io/flags/gb.svg', 'season': 2020}, 'games': {'appearences': 0, 'lineups': 0, 'minutes': 0, 'number': None, 'position': 'Midfielder', 'rating': None, 'captain': False}, 'substitutes': {'in': 0, 'out': 0, 'bench': 3}, 'shots': {'total': None, 'on': None}, 'goals': {'total': 0, 'conceded': 0, 'assists': None, 'saves': None}, 'passes': {'total': None, 'key': None, 'accuracy': None}, 'tackles': {'total': None, 'blocks': None, 'interceptions': None}, 'duels': {'total': None, 'won': None}, 'dribbles': {'attempts': None, 'success': None, 'past': None}, 'fouls': {'drawn': None, 'committed': None}, 'cards': {'yellow': 0, 'yellowred': 0, 'red': 0}, 'penalty': {'won': None, 'commited': None, 'scored': 0, 'missed': 0, 'saved': None}}]},
This is the data type of each layer of the JSON file, from when I iterated through it with a For loop:
print(type(response)) <class 'dict'>
print(type(response_dump)) <class 'str'>
print(type(item)) <class 'str'>
print(type(player_item)) <class 'str'>
You do not have to json.dumps() in my opinion, just use the JSON from response to iterate:
for player in response['response']:
print(player)
{'player': {'id': 301, 'name': 'Benjamin Luke Woodburn', 'firstname': 'Benjamin Luke', 'lastname': 'Woodburn', 'age': 23, 'birth': {'date': '1999-10-15', 'place': 'Nottingham', 'country': 'England'}, 'nationality': 'Wales', 'height': '174 cm', 'weight': '72 kg', 'injured': False, 'photo': 'https://media.api-sports.io/football/players/301.png'}, 'statistics': [{'team': {'id': 40, 'name': 'Liverpool', 'logo': 'https://media.api-sports.io/football/teams/40.png'}, 'league': {'id': 39, 'name': 'Premier League', 'country': 'England', 'logo': 'https://media.api-sports.io/football/leagues/39.png', 'flag': 'https://media.api-sports.io/flags/gb.svg', 'season': 2020}, 'games': {'appearences': 0, 'lineups': 0, 'minutes': 0, 'number': None, 'position': 'Attacker', 'rating': None, 'captain': False}, 'substitutes': {'in': 0, 'out': 0, 'bench': 3}, 'shots': {'total': None, 'on': None}, 'goals': {'total': 0, 'conceded': 0, 'assists': None, 'saves': None}, 'passes': {'total': None, 'key': None, 'accuracy': None}, 'tackles': {'total': None, 'blocks': None, 'interceptions': None}, 'duels': {'total': None, 'won': None}, 'dribbles': {'attempts': None, 'success': None, 'past': None}, 'fouls': {'drawn': None, 'committed': None}, 'cards': {'yellow': 0, 'yellowred': 0, 'red': 0}, 'penalty': {'won': None, 'commited': None, 'scored': 0, 'missed': 0, 'saved': None}}]}
{'player': {'id': 518, 'name': 'Meritan Shabani', 'firstname': 'Meritan', 'lastname': 'Shabani', 'age': 23, 'birth': {'date': '1999-03-15', 'place': 'München', 'country': 'Germany'}, 'nationality': 'Germany', 'height': '185 cm', 'weight': '78 kg', 'injured': False, 'photo': 'https://media.api-sports.io/football/players/518.png'}, 'statistics': [{'team': {'id': 39, 'name': 'Wolves', 'logo': 'https://media.api-sports.io/football/teams/39.png'}, 'league': {'id': 39, 'name': 'Premier League', 'country': 'England', 'logo': 'https://media.api-sports.io/football/leagues/39.png', 'flag': 'https://media.api-sports.io/flags/gb.svg', 'season': 2020}, 'games': {'appearences': 0, 'lineups': 0, 'minutes': 0, 'number': None, 'position': 'Midfielder', 'rating': None, 'captain': False}, 'substitutes': {'in': 0, 'out': 0, 'bench': 3}, 'shots': {'total': None, 'on': None}, 'goals': {'total': 0, 'conceded': 0, 'assists': None, 'saves': None}, 'passes': {'total': None, 'key': None, 'accuracy': None}, 'tackles': {'total': None, 'blocks': None, 'interceptions': None}, 'duels': {'total': None, 'won': None}, 'dribbles': {'attempts': None, 'success': None, 'past': None}, 'fouls': {'drawn': None, 'committed': None}, 'cards': {'yellow': 0, 'yellowred': 0, 'red': 0}, 'penalty': {'won': None, 'commited': None, 'scored': 0, 'missed': 0, 'saved': None}}]}
or
for player in response['response']:
print(player['player'])
{'id': 301, 'name': 'Benjamin Luke Woodburn', 'firstname': 'Benjamin Luke', 'lastname': 'Woodburn', 'age': 23, 'birth': {'date': '1999-10-15', 'place': 'Nottingham', 'country': 'England'}, 'nationality': 'Wales', 'height': '174 cm', 'weight': '72 kg', 'injured': False, 'photo': 'https://media.api-sports.io/football/players/301.png'}
{'id': 518, 'name': 'Meritan Shabani', 'firstname': 'Meritan', 'lastname': 'Shabani', 'age': 23, 'birth': {'date': '1999-03-15', 'place': 'München', 'country': 'Germany'}, 'nationality': 'Germany', 'height': '185 cm', 'weight': '78 kg', 'injured': False, 'photo': 'https://media.api-sports.io/football/players/518.png'}
To get a DataFrame simply call pd.json_normalize() - Cause your question is not that clear I am not sure wiche information is needed and how to displayed. This is predestinated to ask a new question with exact that focus.:
pd.json_normalize(response['response'])
EDIT
Based on your comment and improvment:
pd.concat([pd.json_normalize(response,['response'])\
,pd.json_normalize(response,['response','statistics'])], axis=1)\
.drop(['statistics'], axis=1)
player.id
player.name
player.firstname
player.lastname
player.age
player.birth.date
player.birth.place
player.birth.country
player.nationality
player.height
player.weight
player.injured
player.photo
team.id
team.name
team.logo
league.id
league.name
league.country
league.logo
league.flag
league.season
games.appearences
games.lineups
games.minutes
games.number
games.position
games.rating
games.captain
substitutes.in
substitutes.out
substitutes.bench
shots.total
shots.on
goals.total
goals.conceded
goals.assists
goals.saves
passes.total
passes.key
passes.accuracy
tackles.total
tackles.blocks
tackles.interceptions
duels.total
duels.won
dribbles.attempts
dribbles.success
dribbles.past
fouls.drawn
fouls.committed
cards.yellow
cards.yellowred
cards.red
penalty.won
penalty.commited
penalty.scored
penalty.missed
penalty.saved
0
301
Benjamin Luke Woodburn
Benjamin Luke
Woodburn
23
1999-10-15
Nottingham
England
Wales
174 cm
72 kg
False
https://media.api-sports.io/football/players/301.png
40
Liverpool
https://media.api-sports.io/football/teams/40.png
39
Premier League
England
https://media.api-sports.io/football/leagues/39.png
https://media.api-sports.io/flags/gb.svg
2020
0
0
0
Attacker
False
0
0
3
0
0
0
0
0
0
0
1
518
Meritan Shabani
Meritan
Shabani
23
1999-03-15
München
Germany
Germany
185 cm
78 kg
False
https://media.api-sports.io/football/players/518.png
39
Wolves
https://media.api-sports.io/football/teams/39.png
39
Premier League
England
https://media.api-sports.io/football/leagues/39.png
https://media.api-sports.io/flags/gb.svg
2020
0
0
0
Midfielder
False
0
0
3
0
0
0
0
0
0
0

ccxt OKEx placing orders

I placed DEMO order on OKEx with amount 246 and price 0.46. When I looked on site, order amount was more than 11k:
I fetched info about order:
{'info': {'accFillSz': '0', 'avgPx': '', 'cTime': '1652262833825', 'category': 'normal', 'ccy': '', 'clOrdId': 'e847386590ce4dBCc812b22b16d7807c', 'fee': '0', 'feeCcy': 'USDT', 'fillPx': '', 'fillSz': '0', 'fillTime': '', 'instId': 'XRP-USDT-SWAP', 'instType': 'SWAP', 'lever': '1', 'ordId': '444557778278035458', 'ordType': 'limit', 'pnl': '0', 'posSide': 'long', 'px': '0.45693', 'rebate': '0', 'rebateCcy': 'USDT', 'side': 'buy', 'slOrdPx': '-1', 'slTriggerPx': '0.44779', 'slTriggerPxType': 'mark', 'source': '', 'state': 'live', 'sz': '246', 'tag': '', 'tdMode': 'isolated', 'tgtCcy': '', 'tpOrdPx': '-1', 'tpTriggerPx': '0.46606', 'tpTriggerPxType': 'mark', 'tradeId': '', 'uTime': '1652262833825'}, 'id': '444557778278035458', 'clientOrderId': 'e847386590ce4dBCc812b22b16d7807c', 'timestamp': 1652262833825, 'datetime': '2022-05-11T09:53:53.825Z', 'lastTradeTimestamp': None, 'symbol': 'XRP/USDT:USDT', 'type': 'limit', 'timeInForce': None, 'postOnly': None, 'side': 'buy', 'price': 0.45693, 'stopPrice': 0.44779, 'average': None, 'cost': 0.0, 'amount': 246.0, 'filled': 0.0, 'remaining': 246.0, 'status': 'open', 'fee': {'cost': 0.0, 'currency': 'USDT'}, 'trades': [], 'fees': [{'cost': 0.0, 'currency': 'USDT'}]}
and amount is 246.
Here is my code:
exchange = ccxt.okx(
{
'apiKey': API_KEY,
'secret': API_SECRET,
'password': API_PASSPHRASE,
'options': {
'defaultType': 'swap'
},
'headers': {
'x-simulated-trading': '1'
}
}
exchange.load_markets()
market = exchange.market(PAIR)
params = {
'tdMode': 'isolated',
'posSide': 'long',
'instId': market['id'],
'side': 'buy',
'sz': 246,
'tpOrdPx': '-1',
'slOrdPx': '-1',
'tpTriggerPx': str(take_profit),
'slTriggerPx': str(stop_loss),
'tpTriggerPxType': 'mark',
'slTriggerPxType': 'mark',
}
order = exchange.create_order(
f"{PAIR}", ORDER_TYPE, 'buy', summa, price, params=params)
info = exchange.fetch_order(order['id'], PAIR)
print(info)
What I'm doing wrong?
For starters you can only buy multiples of 100 of XRP as you can see in the screenshot below so you can only buy 200 or 300 and not 246.
Secondly, it looks like there's a multiplier of 100 being applied in the api where 1 = 100 XRP. I was able to deduce this by entering 24,600 XRP which gives you around $11k that you mentioned.
In your case, if you were to buy 200 or 300 XRP, you would need to enter 2 or 3 as an amount in the api request.

how do I check the complete array?

I am trying to check for the data-id is equal or not from the given list array
how do I check the complete array to know where the id is found or not?
order_id= 121553197332
inf = {data of array given below}
if inf[n]["id"] == order_id:
info = inf[n]["info"]
elif
do someting here
return (info)
the array in need to check?
[{'amount': 0.3,
'id': '121553197332',
'info': {'avgFillPrice': None,
'id': '121553197332',
'ioc': False,
'liquidation': False,
'market': 'FTT/USD',
'postOnly': True,
'price': '40.0',
'reduceOnly': False,
'remainingSize': '0.3',
'side': 'buy',
'size': '0.3',
'status': 'open',
'type': 'limit'},
'side': 'buy',
'status': 'open',
'stopPrice': None,
'symbol': 'FTT/USD',
'trades': [],
'type': 'limit'},
{'amount': 0.3,
'id': '121553197726',
'info': {'avgFillPrice': None,
'future': None,
'id': '121553197726',
'ioc': False,
'liquidation': False,
'market': 'FTT/USD',
'postOnly': True,
'price': '40.062',
'side': 'buy',
'size': '0.3',
'status': 'open',
'type': 'limit'},
'postOnly': True,
'price': 40.062,
'remaining': 0.3,
'side': 'buy',
'status': 'open',
'stopPrice': None,
'symbol': 'FTT/USD',
'trades': [],
'type': 'limit'}]
I need to return the info of the array at last.
You Can try use a for loop
like this:
inf = [{ 'amount': 0.3,
'id': '121553197332',
'info': { 'avgFillPrice': None,
'id': '121553197332',
'ioc': False,
'liquidation': False,
'market': 'FTT/USD',
'postOnly': True,
'price': '40.0',
'reduceOnly': False,
'remainingSize': '0.3',
'side': 'buy',
'size': '0.3',
'status': 'open',
'type': 'limit' },
'side': 'buy',
'status': 'open',
'stopPrice': None,
'symbol': 'FTT/USD',
'trades': [],
'type': 'limit' },
{ 'amount': 0.3,
'id': '121553197726',
'info': { 'avgFillPrice': None,
'future': None,
'id': '121553197726',
'ioc': False,
'liquidation': False,
'market': 'FTT/USD',
'postOnly': True,
'price': '40.062',
'side': 'buy',
'size': '0.3',
'status': 'open',
'type': 'limit' },
'postOnly': True,
'price': 40.062,
'remaining': 0.3,
'side': 'buy',
'status': 'open',
'stopPrice': None,
'symbol': 'FTT/USD',
'trades': [],
'type': 'limit' }]
order_id= 121553197332
for inner_data in inf:
if inner_data['id'] == order_id:
print(inner_data)
info = inner_data["info"]
Use next:
# Sample data (reduced)
orders = [{
'id': '121553197332',
'info': { 'id': '121553197332' },
}, {
'id': '121553197726',
'info': { 'id': '121553197726' },
}]
# The order to find
order_id = 121553197332
# Find it
found = next((d for d in orders if int(d["id"]) == order_id), None)
Here is the snippet that is ready to run. target variable is the one you are searching for in the records.
data = [{'amount': 0.3,
'id': '121553197332',
'info': {'avgFillPrice': None,
'id': '121553197332',
'ioc': False,
'liquidation': False,
'market': 'FTT/USD',
'postOnly': True,
'price': '40.0',
'reduceOnly': False,
'remainingSize': '0.3',
'side': 'buy',
'size': '0.3',
'status': 'open',
'type': 'limit'},
'side': 'buy',
'status': 'open',
'stopPrice': None,
'symbol': 'FTT/USD',
'trades': [],
'type': 'limit'},
{'amount': 0.3,
'id': '121553197726',
'info': {'avgFillPrice': None,
'future': None,
'id': '121553197726',
'ioc': False,
'liquidation': False,
'market': 'FTT/USD',
'postOnly': True,
'price': '40.062',
'side': 'buy',
'size': '0.3',
'status': 'open',
'type': 'limit'},
'postOnly': True,
'price': 40.062,
'remaining': 0.3,
'side': 'buy',
'status': 'open',
'stopPrice': None,
'symbol': 'FTT/USD',
'trades': [],
'type': 'limit'}]
target= '121553197726'
for d in data:
if d['id']==target:
info = d["info"]
print(info)
else:
pass

doing a 'group by', 'sum' and 'count' at one time using panda

I have a dataframe that I created from a Data Dictionary format in the following way:
df = pd.DataFrame( info_closed, columns = [ 'type', 'origQty', 'executedQty' ] )
The result is as follows:
type origQty executedQty
0 LIMIT 0.00362000 0.00362000
1 MARKET 0.00200000 0.00200000
2 MARKET 0.00150000 0.00150000
3 MARKET 0.00150000 0.00150000
4 LIMIT 0.00150000 0.00150000
5 LIMIT 0.00150000 0.00150000
6 MARKET 0.00199500 0.00199500
7 LIMIT 0.00150000 0.00150000
8 MARKET 0.00149800 0.00149800
9 LIMIT 0.00150000 0.00150000
10 LIMIT 0.00149900 0.00149900
11 LIMIT 0.00150000 0.00150000
12 MARKET 0.00149800 0.00149800
[... snip ...]
I am trying to create a result in the following manner:
type origQty executedQty Count
0 LIMIT 13.03 15.01 23
1 MARKET 122.0l 40.00 54
[.. snip ...]
Basically, this would be a group_by (type) and a sum( origQty ) and sum ( origQty ) within each 'type' and a count of records that were use to calculate the values of sum( origQty ) and sum (origQty)
I tried:
g = df.groupby(['type'])['origQty', 'executedQty'].sum().reset_index()
but the results come out as follows:
type origQty executedQty
0 LIMIT 0.003620000.001500000.001500000.001500000.0015... 0.003620000.001500000.001500000.001500000.0015...
1 LIMIT_MAKER 0.001499000.001500000.001500000.001500000.0014... 0.001499000.001500000.001500000.001500000.0014...
2 MARKET 0.002000000.001500000.001500000.001995000.0014... 0.002000000.001500000.001500000.001995000.0014...
3 STOP_LOSS_LIMIT 0.00150000 0.00150000
Question: what am I doing wrong?
TIA
ETA:
Thanks all for the provided solutions!
I ran some but I was still getting this type of output:
origQty
executedQty
type
LIMIT_MAKER 0.001499000.001500000.001500000.001500000.0014... 0.001499000.001500000.001500000.001500000.0014...
The original data was like this (it is a combination of data from the Binance exchange and the ccxt wrapper code. I was attempting to isolate the Binance data ~only~ (whichi is associated with ['info'])
[{'info': {'symbol': 'BTCUSDT', 'orderId': 2538903025, 'orderListId':
-1, 'clientOrderId': 'ENDsgXoqtv2ct5jizrfeQe', 'price': '9638.00000000', 'origQty': '0.00150000', 'executedQty': '0.00150000',
'cummulativeQuoteQty': '14.45700000', 'status': 'FILLED',
'timeInForce': 'GTC', 'type': 'LIMIT_MAKER', 'side': 'BUY',
'stopPrice': '0.00000000', 'icebergQty': '0.00000000', 'time':
1592879158045, 'updateTime': 1592879162299, 'isWorking': True,
'origQuoteOrderQty': '0.00000000'}, 'id': '2538903025',
'clientOrderId': 'ENDsgXoqtv2ct5jizrfeQe', 'timestamp': 1592879158045,
'datetime': '2020-06-23T02:25:58.045Z', 'lastTradeTimestamp': None,
'symbol': 'BTC/USDT', 'type': 'limit', 'side': 'buy', 'price': 9638.0,
'amount': 0.0015, 'cost': 14.457, 'average': 9638.0, 'filled': 0.0015,
'remaining': 0.0, 'status': 'closed', 'fee': None, 'trades': None},
{'info': {'symbol': 'BTCUSDT', 'orderId': 2539250884, 'orderListId':
-1, 'clientOrderId': '5UFBYwDF6b9qJ1UWNsvOYU', 'price': '9653.00000000', 'origQty': '0.00299700', 'executedQty': '0.00299700',
'cummulativeQuoteQty': '28.93004100', 'status': 'FILLED',
'timeInForce': 'GTC', 'type': 'LIMIT_MAKER', 'side': 'SELL',
'stopPrice': '0.00000000', 'icebergQty': '0.00000000', 'time':
1592883883927, 'updateTime': 1592884056113, 'isWorking': True,
'origQuoteOrderQty': '0.00000000'}, 'id': '2539250884',
'clientOrderId': '5UFBYwDF6b9qJ1UWNsvOYU', 'timestamp': 1592883883927,
'datetime': '2020-06-23T03:44:43.927Z', 'lastTradeTimestamp': None,
'symbol': 'BTC/USDT', 'type': 'limit', 'side': 'sell', 'price':
9653.0, 'amount': 0.002997, 'cost': 28.930041, 'average': 9653.0, 'filled': 0.002997, 'remaining': 0.0, 'status': 'closed', 'fee': None,
'trades': None}, {'info': {'symbol': 'BTCUSDT', 'orderId': 2539601261,
'orderListId': -1, 'clientOrderId': 'testme-15928890617592764',
'price': '9633.00000000', 'origQty': '0.00150000', 'executedQty':
'0.00150000', 'cummulativeQuoteQty': '14.44950000', 'status':
'FILLED', 'timeInForce': 'GTC', 'type': 'LIMIT_MAKER', 'side': 'BUY',
'stopPrice': '0.00000000', 'icebergQty': '0.00000000', 'time':
1592889061852, 'updateTime': 1592889136305, 'isWorking': True,
'origQuoteOrderQty': '0.00000000'}, 'id': '2539601261',
'clientOrderId': 'testme-15928890617592764', 'timestamp':
1592889061852, 'datetime': '2020-06-23T05:11:01.852Z',
'lastTradeTimestamp': None, 'symbol': 'BTC/USDT', 'type': 'limit',
'side': 'buy', 'price': 9633.0, 'amount': 0.0015, 'cost': 14.4495,
'average': 9633.0, 'filled': 0.0015, 'remaining': 0.0, 'status':
'closed', 'fee': None, 'trades': None}]
I paired it back by executing the following :
info_closed = []
for index,item in enumerate( orders_closed ):
info_closed.append( item['info'] )
The results of what I had is listed above in the first post.
I then ran:
df = pd.DataFrame( final_output, columns = [ 'type', 'origQty', 'executedQty' ] )
I am starting to wonder if there is something amiss with the dataframe ... will start looking at this area ...
try this, before groupby cast the values to float.
df[['origQty', 'executedQty']] = df[['origQty', 'executedQty']].astype(float)
(
df.groupby(['type'])
.agg({"origQty": sum, "executedQty": sum, "type": len})
.rename(columns={'type': 'count'})
.reset_index()
)
I am 99% sure you get the result you want by just doing this:
df.groupby(['type'])[['origQty', 'executedQty']].sum()

Categories

Resources