Python Requests AJAX Response Different from Browser Due to Cookie Handling - python

My request:
# python 3.7.3
import requests
from requests import Session
session = Session()
session.head('https://www.basspro.com/shop/en/blazer-brass-handgun-ammo')
cookies = requests.utils.cookiejar_from_dict(requests.utils.dict_from_cookiejar(session.cookies))
response = session.post(
url='https://www.basspro.com/shop/BPSGetInventoryStatusByIDView',
data={
'productId': '3074457345616736172',
'itemId': '3074457345616736949',
'isGunFlag': 'false',
},
cookies=cookies,
headers={
'accept': '*/*',
'accept-encoding': 'gzip, deflate, br',
'accept-language': 'en-US,en;q=0.9',
'content-length': '72',
'content-type': 'application/x-www-form-urlencoded',
'origin': 'https://www.basspro.com',
'referer': 'https://www.basspro.com/shop/en/blazer-brass-handgun-ammo',
'sec-fetch-mode': 'cors',
'sec-fetch-site': 'same-origin',
'user-agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/78.0.3904.92 Safari/537.36 Vivaldi/2.9.1705.38',
'x-requested-with': 'XMLHttpRequest',
},
)
print(response.text)
Output:
<input type="hidden" class="relativeToAbsolute" value="true" />
/*
{
"onlineInventory": {
"status": "Status Not Available",
"image": "widget_product_info/outofstock_icon.svg",
"altText": "Status Not Available",
"isDropShip": false,
"availableDate":""
},
"inStoreInventory": {
"stores": [],
"checkStoreText": "Check Store Availability",
"isInStoreInventory": true,
"isPickupInventory": false
}
}
*/
My output when inspecting and running the same AJAX request via browser:
/*
{
"onlineInventory": {
"status": "Backordered",
"image": "widget_product_info/backordered_icon.svg",
"altText": "Backordered",
"isDropShip": false,
"quantity": 0,
"availableDate":"May 1-8"
},
"inStoreInventory": {
"stores": [{
id: '715839555',
name: '83',
gunRestricted: 'false',
dsName: 'TX - Round Rock',
status: 'Unavailable',
statusText: 'Out of Stock',
image: 'widget_product_info/outofstock_icon.svg',
altText: 'Out of Stock',
availableDate: '',
availableQuantity: '',
availableQuantityDisplay: 'false',
cityState: 'Round Rock, TX',
ISPavailableDate: '',
ISPavailableQuantity: '',
pickupTime: 'by 2:00pm',
offerISPOnBPS: 'Yes',
offerISPOnCAB: 'No'}],
"checkStoreText": "Change Store",
"isInStoreInventory": true,
"isPickupInventory": true
}
}
*/
I tried assigning cookies this way as well:
url = "https://www.basspro.com/shop/en/blazer-brass-handgun-ammo"
r = requests.get(url)
cookies = r.cookies
# fails to pass the right cookie
If I instead copy the cookie verbatim from an inspected GET request at https://www.basspro.com/shop/en/blazer-brass-handgun-ammo and put that into the POST headers, it works. How do I get cookies to work properly programatically?
EDIT:
Here's my attempt at just using Session() for cookies:
# python 3.7.3
import requests
from requests import Session
session = Session()
session.get("https://www.basspro.com/shop/en/blazer-brass-handgun-ammo")
# session.head('https://www.basspro.com/shop/en/blazer-brass-handgun-ammo')
response = session.post(
url='https://www.basspro.com/shop/BPSGetInventoryStatusByIDView',
data={
'productId': '3074457345616736172',
'itemId': '3074457345616736949',
'isGunFlag': 'false',
},
headers={
'accept': '*/*',
'accept-encoding': 'gzip, deflate, br',
'accept-language': 'en-US,en;q=0.9',
'content-length': '72',
'content-type': 'application/x-www-form-urlencoded',
'origin': 'https://www.basspro.com',
'referer': 'https://www.basspro.com/shop/en/blazer-brass-handgun-ammo',
'sec-fetch-mode': 'cors',
'sec-fetch-site': 'same-origin',
'user-agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/78.0.3904.92 Safari/537.36 Vivaldi/2.9.1705.38',
'x-requested-with': 'XMLHttpRequest',
},
)
print(response.text)
I get the same result as before ("status": "Status Not Available", etc.)
Here's my attempt at the second solution:
# python 3.7.3
import requests
from requests import Session
url = "https://www.basspro.com/shop/en/blazer-brass-handgun-ammo"
r = requests.get(url)
cookies = r.cookies # the type is RequestsCookieJar
response = requests.post(
url='https://www.basspro.com/shop/BPSGetInventoryStatusByIDView',
data={
'productId': '3074457345616736172',
'itemId': '3074457345616736949',
'isGunFlag': 'false',
},
cookies=cookies,
headers={
'accept': '*/*',
'accept-encoding': 'gzip, deflate, br',
'accept-language': 'en-US,en;q=0.9',
'content-length': '72',
'content-type': 'application/x-www-form-urlencoded',
'origin': 'https://www.basspro.com',
'referer': 'https://www.basspro.com/shop/en/blazer-brass-handgun-ammo',
'sec-fetch-mode': 'cors',
'sec-fetch-site': 'same-origin',
'user-agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/78.0.3904.92 Safari/537.36 Vivaldi/2.9.1705.38',
'x-requested-with': 'XMLHttpRequest',
},
)
print(response.text)
Again, I get the same result as before. What am I doing wrong?

can you try like this
session = Session()
session.get("https://www.basspro.com/shop/en/blazer-brass-handgun-ammo")
Then all the following calls with
session.xxx
donot use cookies parameter in it
another way I have tested,
cookies = r.cookies # the type is RequestsCookieJar
requests.post(.... cookies=cookies...)
at last ,I tested this works:
Please compare carefully
from requests import Session
session = Session()
agent = 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.163 Safari/537.36'
r1 = session.get("https://www.basspro.com/shop/en/blazer-brass-handgun-ammo",headers={'user-agent': agent})
response = session.post(
url='https://www.basspro.com/shop/BPSGetOnlineInventoryStatusByIDView',
data={
'productId': '3074457345616736172',
'catalogId': '3074457345616676768',
'storeId': '715838534',
'langId':-1
},
headers={
'user-agent': agent,
'x-requested-with': 'XMLHttpRequest',
},
cookies=r1.cookies
)
print(response.text)

Related

i have give all the payload data but not working

Can someone please help me with proxy.webshare.io/register on creating account please?
i have tried
task_id = captcha.create_task(website_url="https://proxy.webshare.io/register/", website_key="6LeHZ6UUAAAAAKat_YS--O2tj_by3gv3r_l03j9d")
print("Wait for respone")
print(task_id)
respone = captcha.join_task_result(task_id).get("gRecaptchaResponse")
print("Recieved key: " + respone)
source = client.get('https://proxy.webshare.io/register').content
soup = BeautifulSoup(source, 'html.parser')
header = {
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9',
'Content-Type': 'application/x-www-form-urlencoded',
'sec-ch-ua': '"Chromium";v="104", " Not A;Brand";v="99", "Google Chrome";v="104"',
'sec-ch-ua-mobile': '?0',
'sec-ch-ua-platform': '"Windows"',
'Sec-Fetch-Dest': 'document',
'Sec-Fetch-Mode': 'navigate',
'Sec-Fetch-Site': 'same-origin',
'Sec-Fetch-User': '?1',
'Upgrade-Insecure-Requests': '1',
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/104.0.0.0 Safari/537.36'
}
val = soup.find("input", {'id': 'a'}).get("value")
print(val)
if 'csrftoken' in client.cookies:
csrftoken = client.cookies['csrftoken']
else:
csrftoken = client.cookies['csrf']
print(csrftoken)
mail = em_f_n+ "#gmail.com"
passz = pw_f_n
print(mail)
print(passz)
data = {
"csrfmiddlewaretoken": csrftoken,
"next": "",
"a": val,
"email": mail,
"password1": passz,
"g-recaptcha-response": respone
}
r = client.post("https://proxy.webshare.io/register", json=data, headers=header)
print(r.context)
print(r.status_code)
but it just return 200 status code(although it returned all of data above) and didn't work at all please help

Scrape multiple pages with json

I am trying to scrape multiple pages with json but they will provide me error
import requests
import json
import pandas as pd
headers = {
'Accept-Language': 'en-GB,en-US;q=0.9,en;q=0.8,pt;q=0.7',
'Connection': 'keep-alive',
'Origin': 'https://www.nationalhardwareshow.com',
'Referer': 'https://www.nationalhardwareshow.com/',
'Sec-Fetch-Dest': 'empty',
'Sec-Fetch-Mode': 'cors',
'Sec-Fetch-Site': 'cross-site',
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/103.0.0.0 Safari/537.36',
'accept': 'application/json',
'content-type': 'application/x-www-form-urlencoded',
'sec-ch-ua': '".Not/A)Brand";v="99", "Google Chrome";v="103", "Chromium";v="103"',
'sec-ch-ua-mobile': '?0',
'sec-ch-ua-platform': '"Windows"',
}
params = {
'x-algolia-agent': 'Algolia for vanilla JavaScript 3.27.1',
'x-algolia-application-id': 'XD0U5M6Y4R',
'x-algolia-api-key': 'd5cd7d4ec26134ff4a34d736a7f9ad47',
}
for i in range(0,4):
data = '{"params":"query=&page={i}&facetFilters=&optionalFilters=%5B%5D"}'
resp = requests.post('https://xd0u5m6y4r-dsn.algolia.net/1/indexes/event-edition-eve-e6b1ae25-5b9f-457b-83b3-335667332366_en-us/query', params=params, headers=headers, data=data).json()
req_json=resp
df = pd.DataFrame(req_json['hits'])
f = pd.DataFrame(df[['name','representedBrands','description']])
print(f)
the error :
Traceback (most recent call last):
File "e:\ScriptScraping\Extract data from json\uk.py", line 31, in <module>
df = pd.DataFrame(req_json['hits']) KeyError: 'hits'
Try to concatenate the variable i with data parameter
import requests
import json
import pandas as pd
headers = {
'Accept-Language': 'en-GB,en-US;q=0.9,en;q=0.8,pt;q=0.7',
'Connection': 'keep-alive',
'Origin': 'https://www.nationalhardwareshow.com',
'Referer': 'https://www.nationalhardwareshow.com/',
'Sec-Fetch-Dest': 'empty',
'Sec-Fetch-Mode': 'cors',
'Sec-Fetch-Site': 'cross-site',
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/103.0.0.0 Safari/537.36',
'accept': 'application/json',
'content-type': 'application/x-www-form-urlencoded',
'sec-ch-ua': '".Not/A)Brand";v="99", "Google Chrome";v="103", "Chromium";v="103"',
'sec-ch-ua-mobile': '?0',
'sec-ch-ua-platform': '"Windows"'
}
params = {
'x-algolia-agent': 'Algolia for vanilla JavaScript 3.27.1',
'x-algolia-application-id': 'XD0U5M6Y4R',
'x-algolia-api-key': 'd5cd7d4ec26134ff4a34d736a7f9ad47'
}
lst=[]
for i in range(0,4):
data = '{"params":"query=&page='+str(i)+'&facetFilters=&optionalFilters=%5B%5D"}'
resp = requests.post('https://xd0u5m6y4r-dsn.algolia.net/1/indexes/event-edition-eve-e6b1ae25-5b9f-457b-83b3-335667332366_en-us/query', params=params, headers=headers, data=data).json()
req_json=resp
df = pd.DataFrame(req_json['hits'])
f = pd.DataFrame(df[['name','representedBrands','description']])
lst.append(f)
#print(f)
d=pd.concat(lst)
print(d)
It is returning status_code 400 as the request is bad. You are sending wrongly formatted data. Change:
data = '{"params":"query=&page={i}&facetFilters=&optionalFilters=%5B%5D"}'
To
data = '{"params":"query=&page='+str(i)+'&facetFilters=&optionalFilters=%5B%5D"}'
For it to work. Hope I could help.

python request trying to login session barchart

I am trying to create a session login into barchart with no luck using requests but i am not sure what i'm missing.
I am always geting 500 error
code:
import requests
def main():
site_url = "https://www.barchart.com/login"
payload = {
'email': 'user',
'password': 'pass',
}
headers = {
'authority': 'www.barchart.com',
'accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9',
'accept-language': 'es-ES,es;q=0.9,en;q=0.8,gl;q=0.7',
'cache-control': 'no-cache',
'cookie': '_gcl_au=1.1.1986418753.1652987508; _fbp=fb.1.1652987508275.1121978397; _ga=GA1.2.581266708.1652987508; __gads=ID=4850c10bd629ae1e-227a8d8bb6d30042:T=1652987552:RT=1652987552:S=ALNI_MYiYqDwr6eWdC-6Q67HlsmfGR9TFQ; _admrla=2.2-8105254c8da36a72-ab5313fc-d7a7-11ec-8803-06c18b5dfeba; pubcv=%7B%7D; tcf2cookie=CPZN1UAPZN1UAAJAEBENCQCsAP_AAEPAACiQIxNd_X__bX9j-_5_bft0eY1P9_r3_-QzjhfNs-8F3L_W_L0Xw2E7NF36pq4KuR4Eu3LBIQNlHMHUTUmwaokVrzHsak2cpyNKJ7LEknMZO2dYGH9Pn9lDuYKY7_5___bx3D-v_t_-39T378Xf3_d5_2_--vCfV599jbn9fV_7_9nP___9v-_8_________wRgAJMNS8gC7MscGTSMIoUQIwrCQqgUAFFAMLRFYAODgp2VgEuoIWACAVIRgRAgxBRgwCAAQSAJCIgJACwQCIAiAQAAgARAIQAETAILACwMAgAFANCxACgAECQgyICI5TAgKkSiglsrEEoK9jTCAOs8AKBRGRUACJJIQSAgJCwcxwBICXiyQNMUL5ACMEKAAAAA.f_gACHgAAAAA; webinar107WebinarClosed=true; _gid=GA1.2.1453697314.1653243141; _hjid=e2d759d4-08f2-4e97-b4bf-15667795cb0e; _hjSessionUser_2563157=eyJpZCI6ImMyZWQyMTQ2LTZmZGItNTViNi1hMzljLTlkODIyOTAyYTlkYSIsImNyZWF0ZWQiOjE2NTMyNDMxNTkwMTAsImV4aXN0aW5nIjp0cnVlfQ==; __aaxsc=2; _hjSession_2563157=eyJpZCI6Ijc2MDE2ZDE3LTRlN2MtNGFiNS05OTgzLTRjNjY5YTg3ODM0YyIsImNyZWF0ZWQiOjE2NTMyNTA4MDE3MzQsImluU2FtcGxlIjpmYWxzZX0=; _hjAbsoluteSessionInProgress=0; _hjIncludedInPageviewSample=1; _hjIncludedInSessionSample=0; market=eyJpdiI6ImdJTllrNEpHMnF6U3B3THRoQ0dZTkE9PSIsInZhbHVlIjoibjkwM3lrYkNORXU0cDNhV25VUHNYUTZ6eFlCOHVQRC9GOEJhM2VJK0RtN21IYjFWQVZMVlRTYXZpZk5idWNLSCIsIm1hYyI6IjE5NmY0MGI3OGNjNjVhZjY5ZWU5N2FkZjY1NWVlYzVjZThmMGM3Mjk0YjljNWEwZjI0YzBjMjQwOThmYTAyN2EifQ%3D%3D; bcFreeUserPageView=0; cto_bundle=JsBghF84Rm9rTThnUWNFdEM0blV1Q1lFUUVha3huMEY1NkpnZFVjblpsNyUyRk8zUFBZNUM2dGp1Q054bkElMkYyR09aaUtRckpUMHViJTJCQjJ2cEg0OGt2c3B6QllxWUczeWRmZEJVUnUlMkZ6MnRrT0xvakxnWXIxeGJtRUdYMXJVVFglMkZ3RWJDSUFEeFFqZDZIN3pSemtZRjZrdndmazNnJTNEJTNE; laravel_token=eyJpdiI6IkNjWW9EUkI1OGdkT1duRVNEMlU1U2c9PSIsInZhbHVlIjoiRXMyRXlsRnpzbFlvdkpRL0RSU0lPeC85Zkx0MGJkdi9mczQ1Nk9WUFlNbGorTlVEUDBGd0VhTysrTHhUWGxRNTVaa3lzMVFOZ0pMRjFIYklFQW9TUlBFT0pZN1BjOUU0TldYVEZjbmZBcFBBWWViRVFHTzFVazFHMHZ0bUlSbEhndzdRNEs0L0xMUjc3cHlKL3FEdGJuTDN5VktaRlVhTTdtYlpLVWM3TDlpWGlBWEtKa3p4Rjh4Ty9zOXVtSGF5djRTVHpPQWZZRFNQQTlpNGNnQURNclpucjlVMG8xaUc0U2NRejdjU25Td0hIb3pLNkxwS1IzcG9KU3p2TUYybmMyajM5cmFsWlhOM0xhQS9tR0xDNktPdHcxK0lKR0JHNE5qUjZPQnlTZUNndkFvQ0l6QjhaVWxlbEJoVWVOKzAiLCJtYWMiOiJhMDc0ZmVmN2I4NGMxNjE2ZWRiM2IwMjY3YmNhNzY3MjZjNjA4ODU5MTQ0MmY2YjMwNWVjZDA2NDIwM2E1MTljIn0%3D; XSRF-TOKEN=eyJpdiI6IlUzSzRkTExjZElxY2FGNGlCVWlNQ3c9PSIsInZhbHVlIjoiM2Y3QllmVGViMEJEOEdjOHNXR1lBdHd0enQxRnp4YlJmVXBiTDhSNjZNYTRYK3lLajVESWg0QTlxcHpLS2pHdDBKYjkrWEV0eHcvQzE0U2J2QnpwR2dQMWVVN1RlNTlhVlJ1M2FlSjhBd2hRd09zVC9YbG8rN1ZVcGQ1OWkwNXMiLCJtYWMiOiI2ZTkzOWMwY2E4OGQyYmU0ZTI0NDc3NjM0NDhmZjAwOGFjYzhiMWQxMGU0ZTdmMDM5YmJmMmUwZmViNDFiODRlIn0%3D; laravel_session=eyJpdiI6Ijk4R1Z1c0U4L0R5cTU0TjBFTWdqY3c9PSIsInZhbHVlIjoiR1lHTVI0c1JPblc2VENtY0thMy9ROXhMYXE2VDZwU3BKNGhpZUUxc2QzOFJySlNhTWVwWnh1RzdTaitDYUdpRXlIckszV0NEL1JCbkpXN3YzamtOWEIvUFJYZGhDMzFVaysrSUJvL3NTQ2NaYndiQjIwbWIxcVZmTGR4Uk5lZVoiLCJtYWMiOiI1YjczYzczZmIyMTQ4NzMxODIzMGIwZjk2MTdkNzU2YjU0N2NjZjkxMDViOGI1YTIzMzA5ZGY1OGY1OWQyYjM0In0%3D; _gat_UA-2009749-51=1; IC_ViewCounter_www.barchart.com=10; aasd=15%7C1653252462025; _awl=2.1653252699.0.5-06f6ddfaf139e746127acfcfca00de3c-6763652d6575726f70652d7765737431-0',
'origin': 'https://www.barchart.com',
'pragma': 'no-cache',
'referer': 'https://www.barchart.com/',
'sec-ch-ua': '" Not A;Brand";v="99", "Chromium";v="101", "Google Chrome";v="101"',
'sec-ch-ua-mobile': '?0',
'sec-ch-ua-platform': '"Windows"',
'sec-fetch-dest': 'document',
'sec-fetch-mode': 'navigate',
'sec-fetch-site': 'same-origin',
'sec-fetch-user': '?1',
'upgrade-insecure-requests': '1',
'user-agent': 'Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/101.0.4951.67 Safari/537.36',
}
with requests.Session() as req:
req.headers.update(headers)
res=req.post(site_url,data=payload)
print(res)
if __name__ == '__main__':
main()

UnicodeEncodeError: 'latin-1' codec can't encode character '\u201d' in position 0: ordinal not in range(256)

I have a problem with requests on Instagram if anyone knows help me.
This is the code :
import requests
username = input('user: ')
headers = {
'accept':'*/*',
'accept-encoding': 'gzip, deflate, br',
'accept-language': 'en-ZW,en-US;q=0.9,en;q=0.8',
'content-length': '84',
'content-type': 'application/x-www-form-urlencoded',
'cookie': 'ig_cb=2; ig_did=9A4D7DF8-A4A7-4061-A279-9B69C4683ED2; mid=YED50AAEAAGr9ZEXMr25akKRsufT; shbid=14062; rur=ATN; shbts=1619229397.0332613; csrftoken=Pa0VXyUGq674IOlySdbKFFLSGmhHWXQu',
'origin': 'https://www.instagram.com',
'referer':'https://www.instagram.com/accounts/password/reset/',
'sec-ch-ua': '”Google Chrome";v="89", "Chromium";v="89", ";Not\"A\\Brand";v="99”',
'sec-ch-ua-mobile': '?1',
'sec-fetch-dest':'empty',
'sec-fetch-mode': 'cors',
'sec-fetch-site': 'same-origin',
'user-agent': 'Mozilla/5.0 (Linux; Android 6.0; Nexus 5 Build/MRA58N) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/89.0.4389.128 Mobile Safari/537.36',
'x-csrftoken': 'Pa0VXyUGq674IOlySdbKFFLSGmhHWXQu',
'x-ig-app-id': '1217981644879628',
'x-ig-www-claim': 'hmac.AR1kp8P0iLWIGnK2piGMehJw2lZQaN68UqFoWWr4QtEB0gPY',
'x-instagram-ajax': '822bad258fea',
'x-requested-with': 'XMLHttpRequest'
}
data = {
'email_or_username': username
}
req = requests.post('https://www.instagram.com/accounts/account_recovery_send_ajax/', headers=headers , data=data).text
print(req)
The value of your sec-ch-ua header starts with a strange character. Replace it by ".

How to write table format to an Excel sheet?

Here is my Python code:
import requests
url = 'https://www1.nseindia.com/live_market/dynaContent/live_watch/stock_watch/juniorNiftyStockWatch.json'
headers = {
'Connection': 'keep-alive',
'Cache-Control': 'max-age=0, no-cache, no-store',
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/69.0.0.2990 Safari/537.36',
'Accept': 'application/json, text/javascript, */*; q=0.01',
'Accept-Encoding': 'gzip, deflate, br',
'Accept-Language': 'en-US,en;q=0.9,hi;q=0.8'
}
cookie_dict = {
'bm_sv': 'FE5D5202FA442377EAE6BE578E5AE53D~TmgiDfrRzUs3GLDNWdi+VYOGUgD/J5uggMUSrRYzOob13kW5hAIyKjXHPWK42UDCM69u2LxxjUmIZ3uo/6orCmkSTim/QNEqHDMaqyU+lnjwtT3j4bI7rpt2OfhdZ/wPYtAQPaG7IJQj8fFUKxNm0EtMj/ZHW8jDRzOvy0JfZhk='
}
session = requests.session()
for cookie in `cookie_dict`:
session.cookies.set(cookie, cookie_dict[cookie])
json = session.get(url, headers = headers).json()
print(json)
Here is the sample output for 2 stocks:
{
"declines":18,
"data":[
{
"symbol":"IDEA",
"open":"4.25",
"high":"4.80",
"low":"4.20",
"ltP":"4.55",
"ptsC":"0.35",
"per":"8.33",
"trdVol":"9,245.45",
"trdVolM":"924.54",
"ntP":"420.67",
"mVal":"4.21",
"wkhi":"15.00",
"wklo":"2.40",
"wkhicm_adj":"183.70",
"wklocm_adj":"125.10",
"xDt":"31-DEC-2999",
"cAct":"-",
"previousClose":"4.20",
"dayEndClose":"4.6",
"iislPtsChange":"0.40",
"iislPercChange":"9.52",
"yPC":"-68.18",
"mPC":"42.19"
},
{
"symbol":"MOTHERSUMI",
"open":"78.50",
"high":"85.00",
"low":"78.10",
"ltP":"80.60",
"ptsC":"3.60",
"per":"4.68",
"trdVol":"547.02",
"trdVolM":"54.70",
"ntP":"450.85",
"mVal":"4.51",
"wkhi":"151.00",
"wklo":"48.65",
"wkhicm_adj":"450.70",
"wklocm_adj":"178.25",
"xDt":"12-MAR-2020",
"cAct":"INTERIM DIVIDEND - RS 1.50 PER SHARE",
"previousClose":"77.00",
"dayEndClose":"80.45",
"iislPtsChange":"3.45",
"iislPercChange":"4.48",
"yPC":"-37.54",
"mPC":"43.42"
}
]
}
How to write this required table format in an Excel sheet while continuously fetching data with a time interval and setting a running time period for the program.
No, symbol, open, high, low, ltp, ptsc, per, trdvol, trdvolm, ntp, mval', wkhi, wklo, wkhicm_adj

Categories

Resources