how do i get data from function, var in scripts using python? - python

<script defer="">
window.__CURRENT_SITE__ = window.__CURRENT_SITE__ || "videoblocks";
window.__CURRENT_PATH__ = window.__CURRENT_PATH__ || "\/video\/stock\/overheated-young-african-american-lady-suffering-from-high-temperature-indoors-belnttknlk6yv56x6";
window.__CURRENT_SEARCH_PARAMS__ = window.__CURRENT_SEARCH_PARAMS__ || "?";
(function() {
var initialState8277 = {"auth":{"isLoggedIn":false,"isMobile":false,"user":null,"subscription":null,"primarySubscription":null,"videoSubscription":null,"audioSubscription":null,"imageSubscription":null,"permissions":{"makerHooksVisible":false,"hasFolderAccess":false},"featureFlags":{"licenseRevampTest":false,"shouldShowMakerActionsHook":false}},"details":{"stockItem":{"id":10852901,"contentClass":"video","contentId":338169628,"assetId":"SBV-338169628","title":"Overheated young african american lady suffering from high temperature indoors.","description":null,"detailsUrl":"\/video\/stock\/overheated-young-african-american-lady-suffering-from-high-temperature-indoors-belnttknlk6yv56x6","previewUrl":"https:\/\/dm0qx8t0i9gc9.cloudfront.net\/watermarks\/video\/Sz9LmzxmBjyscquj0\/videoblocks-overheated-young-african-american-lady-suffering-from-high-temperature-indoors_bxo5ta1nu__b14441d66874772d2a7b34dde20873a9__P360.mp4","smallPreviewUrl":"https:\/\/dm0qx8t0i9gc9.cloudfront.net\/watermarks\/video\/Sz9LmzxmBjyscquj0\/videoblocks-overheated-young-african-american-lady-suffering-from-high-temperature-indoors_bxo5ta1nu__b14441d66874772d2a7b34dde20873a9__P180.mp4","thumbnailUrl":"https:\/\/dm0qx8t0i9gc9.cloudfront.net\/thumbnails\/video\/Sz9LmzxmBjyscquj0\/videoblocks-overheated-young-african-american-lady-suffering-from-high-temperature-indoors_bxo5ta1nu_thumbnail-180_01.jpg","isMarketPlaceItem":false,"contributorPortalId":"BelnTTkNLk6yv56x6","distributionType":"RS","expirationDate":null,"shouldStockItemHaveNewFlag":false,"shouldStockItemHaveStaffPickFlag":false,"dateAdded":"2020-02-23 05:02:16","contentType":"footage","dateDistributionTypeUpdated":"2020-02-23 15:02:16","isActive":true,"mediumImageUrl":"https:\/\/dm0qx8t0i9gc9.cloudfront.net\/thumbnails\/video\/Sz9LmzxmBjyscquj0\/videoblocks-overheated-young-african-american-lady-suffering-from-high-temperature-indoors_bxo5ta1nu_thumbnail-360_01.jpg","duration":14,"is360":false,"isTemplate":false,"collapsedSetId":"","numCollapses":null,"rating":0,"downloads":1247,"hasTalentReleased":true,"hasPropertyReleased":false,"keywords":["african","african american","alone","biracial","discomfort","emotion","expression","female","from","girl","heat","high temperature","home","hot","indoors","lady","millennial","mixed","multiracial","overheated","people","person","portrait","problem","room","sit","sofa","stressed","suffer","suffering","uncomfortable","unhappy","unpleasant","unwell","upset","woman","young"],"hasAlphaChannel":false,"maxResolution":null,"isEditorial":false,"categories":[{"id":10,"groupId":189304,"urlId":"people","name":"People","description":"Download royalty free stock footage video clips of people.","type":"category","source":"auto","searchIndexable":true,"searchType":"footage","active":true,"dateAdded":"2014-10-19T23:35:05.000Z","dateUpdated":"2021-01-21T16:11:04.441Z","contributorId":null,"stockContributorId":null,"position":null,"isHomepageCollection":null,"showInApi":false}],"contributor":{"username":"fizkes","portfolioLink":"https:\/\/www.storyblocks.com\/video\/portfolio\/fizkes"}},"stockItemFormats":[{"id":null,"label":"4KMOV","prettyLabel":"4K MOV","filesize":"1501 MB","filename":"videoblocks-overheated-young-african-american-lady-suffering-from-high-temperature-indoors_Bxo5Ta1NU.mov","resolution":"4096 x 2160","fileExtension":"mov","quality":"4K","formatName":"4KMOV","downloadAjaxUrl":"\/video\/download-ajax\/346730940\/4KMP4","downloadUrl":"\/video\/download\/346730940\/4KMP4","exportUrl":"\/video\/cloud-export\/346730940\/4KMP4","frameRate":"25 fps","codec":"h264"},{"id":null,"label":"HDMOV","prettyLabel":"HD MOV","filesize":"159.4 MB","filename":"videoblocks-61a620ba428c7c51d487da72_stqmh0u9tk_1080__D.mov","resolution":"1920 x 1080","fileExtension":"mov","quality":"HD","formatName":"HDMOV","downloadAjaxUrl":"\/video\/download-ajax\/346730940\/HDMOV","downloadUrl":"\/video\/download\/346730940\/HDMOV","exportUrl":"\/video\/cloud-export\/346730940\/HDMOV","frameRate":"25 fps","codec":"prores"},{"id":null,"label":"HDMP4","prettyLabel":"HD MP4","filesize":"3.9 MB","filename":"videoblocks-61a620ba428c7c51d487da72_stqmh0u9tk_1080__D.mp4","resolution":"1920 x 1080","fileExtension":"mp4","quality":"HD","formatName":"HDMP4","downloadAjaxUrl":"\/video\/download-ajax\/346730940\/HDMP4","downloadUrl":"\/video\/download\/346730940\/HDMP4","exportUrl":"\/video\/cloud-export\/346730940\/HDMP4","frameRate":"25 fps","codec":"h264"}],"selectedAcquisitionOption":[{"id":null,"label":"4KMOV","prettyLabel":"4K MOV","filesize":"1342.4 MB","filename":"videoblocks-61a620ba428c7c51d487da72_STQMh0U9tK.mov","resolution":"3840 x 2160","fileExtension":"mov","quality":"4K","formatName":"4KMOV","downloadAjaxUrl":"\/video\/download-ajax\/346730940\/4KMOV","downloadUrl":"\/video\/download\/346730940\/4KMOV","exportUrl":"\/video\/cloud-export\/346730940\/4KMOV","frameRate":"25 fps","codec":"prores"},{"id":null,"label":"4KMP4","prettyLabel":"4K MP4","filesize":"17.7 MB","filename":"videoblocks-61a620ba428c7c51d487da72_stqmh0u9tk_2160__D.mp4","resolution":"3840 x 2160","fileExtension":"mp4","quality":"4K","formatName":"4KMP4","downloadAjaxUrl":"\/video\/download-ajax\/346730940\/4KMP4","downloadUrl":"\/video\/download\/346730940\/4KMP4","exportUrl":"\/video\/cloud-export\/346730940\/4KMP4","frameRate":"25 fps","codec":"h264"},{"id":null,"label":"HDMOV","prettyLabel":"HD MOV","filesize":"159.4 MB","filename":"videoblocks-61a620ba428c7c51d487da72_stqmh0u9tk_1080__D.mov","resolution":"1920 x 1080","fileExtension":"mov","quality":"HD","formatName":"HDMOV","downloadAjaxUrl":"\/video\/download-ajax\/346730940\/HDMOV","downloadUrl":"\/video\/download\/346730940\/HDMOV","exportUrl":"\/video\/cloud-export\/346730940\/HDMOV","frameRate":"25 fps","codec":"prores"},{"id":null,"label":"HDMP4","prettyLabel":"HD MP4","filesize":"3.9 MB","filename":"videoblocks-61a620ba428c7c51d487da72_stqmh0u9tk_1080__D.mp4","resolution":"1920 x 1080","fileExtension":"mp4","quality":"HD","formatName":"HDMP4","downloadAjaxUrl":"\/video\/download-ajax\/346730940\/HDMP4","downloadUrl":"\/video\/download\/346730940\/HDMP4","exportUrl":"\/video\/cloud-export\/346730940\/HDMP4","frameRate":"25 fps","codec":"h264"}],"isFavorite":false,"topTags":[],"stockItemArtists":[],"moods":[],"genres":[]},{"stockItem":{"id":346653750,"contentClass":"video","contentId":346653750,"assetId":"SBV-346653750","title":"High blood pressure. Close up portrait of sad unhappy african american lady suffering from acute headache","description":null,"detailsUrl":"\/video\/stock\/high-blood-pressure-close-up-portrait-of-sad-unhappy-african-american-lady-suffering-from-acute-headache-h3wb0l6zkktivcy0i","previewUrl":"https:\/\/dm0qx8t0i9gc9.cloudfront.net\/watermarks\/video\/SuFLR1_Nwkex4zo1y\/videoblocks-613f65163396b06b98621838_hh3xcgtft__1911c24512c9600378e44fd3fc60226a__P360.mp4","smallPreviewUrl":"https:\/\/dm0qx8t0i9gc9.cloudfront.net\/watermarks\/video\/SuFLR1_Nwkex4zo1y\/videoblocks-613f65163396b06b98621838_hh3xcgtft__1911c24512c9600378e44fd3fc60226a__P180.mp4","thumbnailUrl":"https:\/\/dm0qx8t0i9gc9.cloudfront.net\/thumbnails\/video\/SuFLR1_Nwkex4zo1y\/videoblocks-613f65163396b06b98621838_hh3xcgtft_thumbnail-180_01.jpg","isMarketPlaceItem":false,"contributorPortalId":"H3WB0l6zKktivcy0i","distributionType":"RS","expirationDate":null,"shouldStockItemHaveNewFlag":false,"shouldStockItemHaveStaffPickFlag":false,"dateAdded":"2021-09-13 12:37:20","contentType":"footage","dateDistributionTypeUpdated":"2021-09-13 16:37:21","isActive":true,"mediumImageUrl":"https:\/\/dm0qx8t0i9gc9.cloudfront.net\/thumbnails\/video\/SuFLR1_Nwkex4zo1y\/videoblocks-613f65163396b06b98621838_hh3xcgtft_thumbnail-360_01.jpg","duration":16,"is360":false,"isTemplate":false,"collapsedSetId":"","numCollapses":null},"stockItemFormats":[{"id":null,"label":"4KMOV","prettyLabel":"4K MOV","filesize":"1338.2 MB","filename":"videoblocks-613f65163396b06b98621838_Hh3XCgTft.mov","resolution":"3840 x 2160","fileExtension":"mov","quality":"4K","formatName":"4KMOV","downloadAjaxUrl":"\/video\/download-ajax\/346653750\/4KMOV","downloadUrl":"\/video\/download\/346653750\/4KMOV","exportUrl":"\/video\/cloud-export\/346653750\/4KMOV","frameRate":"25 fps","codec":"prores"},{"id":null,"label":"4KMP4","prettyLabel":"4K MP4","filesize":"20.3 MB","filename":"videoblocks-613f65163396b06b98621838_hh3xcgtft_2160__D.mp4","resolution":"3840 x 2160","fileExtension":"mp4","quality":"4K","formatName":"4KMP4","downloadAjaxUrl":"\/video\/download-ajax\/346653750\/4KMP4","downloadUrl":"\/video\/download\/346653750\/4KMP4","exportUrl":"\/video\/cloud-export\/346653750\/4KMP4","frameRate":"25 fps","codec":"h264"},{"id":null,"label":"HDMOV","prettyLabel":"HD MOV","filesize":"328.2 MB","filename":"videoblocks-613f65163396b06b98621838_hh3xcgtft_1080__D.mov","resolution":"1920 x 1080","fileExtension":"mov","quality":"HD","formatName":"HDMOV","downloadAjaxUrl":"\/video\/download-ajax\/346653750\/HDMOV","downloadUrl":"\/video\/download\/346653750\/HDMOV","exportUrl":"\/video\/cloud-export\/346653750\/HDMOV","frameRate":"25 fps","codec":"prores"},{"id":null,"label":"HDMP4","prettyLabel":"HD MP4","filesize":"3.6 MB","filename":"videoblocks-613f65163396b06b98621838_hh3xcgtft_1080__D.mp4","resolution":"1920 x 1080","fileExtension":"mp4","quality":"HD","formatName":"HDMP4","downloadAjaxUrl":"\/video\/download-ajax\/346653750\/HDMP4","downloadUrl":"\/video\/download\/346653750\/HDMP4","exportUrl":"\/video\/cloud-export\/346653750\/HDMP4","frameRate":"25 fps","codec":"h264"}],"selectedAcquisitionOption":[{"id":null,"label":"4KMOV","prettyLabel":"4K MOV","filesize":"1338.2 MB","filename":"videoblocks-613f65163396b06b98621838_Hh3XCgTft.mov","resolution":"3840 x 2160","fileExtension":"mov","quality":"4K","formatName":"4KMOV","downloadAjaxUrl":"\/video\/download-ajax\/346653750\/4KMOV","downloadUrl":"\/video\/download\/346653750\/4KMOV","exportUrl":"\/video\/cloud-export\/346653750\/4KMOV","frameRate":"25 fps","codec":"prores"},{"id":null,"label":"4KMP4","prettyLabel":"4K MP4","filesize":"20.3 MB","filename":"videoblocks-613f65163396b06b98621838_hh3xcgtft_2160__D.mp4","resolution":"3840 x 2160","fileExtension":"mp4","quality":"4K","formatName":"4KMP4","downloadAjaxUrl":"\/video\/download-ajax\/346653750\/4KMP4","downloadUrl":"\/video\/download\/346653750\/4KMP4","exportUrl":"\/video\/cloud-export\/346653750\/4KMP4","frameRate":"25 fps","codec":"h264"},{"id":null,"label":"HDMOV","prettyLabel":"HD MOV","filesize":"328.2 MB","filename":"videoblocks-613f65163396b06b98621838_hh3xcgtft_1080__D.mov","resolution":"1920 x 1080","fileExtension":"mov","quality":"HD","formatName":"HDMOV","downloadAjaxUrl":"\/video\/download-ajax\/346653750\/HDMOV","downloadUrl":"\/video\/download\/346653750\/HDMOV","exportUrl":"\/video\/cloud-export\/346653750\/HDMOV","frameRate":"25 fps","codec":"prores"},{"id":null,"label":"HDMP4","prettyLabel":"HD MP4","filesize":"3.6 MB","filename":"videoblocks-613f65163396b06b98621838_hh3xcgtft_1080__D.mp4","resolution":"1920 x 1080","fileExtension":"mp4","quality":"HD","formatName":"HDMP4","downloadAjaxUrl":"\/video\/download-ajax\/346653750\/HDMP4","downloadUrl":"\/video\/download\/346653750\/HDMP4","exportUrl":"\/video\/cloud-export\/346653750\/HDMP4","frameRate":"25 fps","codec":"h264"}],"isFavorite":false,"topTags":[],"stockItemArtists":[],"moods":[],"genres":[]},{"stockItem":{"id":10898291,"contentClass":"video","contentId":338435819,"assetId":"SBV-338435819","title":"Unhealthy young african american woman suffering from flu.","description":null,"detailsUrl":"\/video\/stock\/unhealthy-young-african-american-woman-suffering-from-flu-soi03trjikaihfomm","previewUrl":"https:\/\/dm0qx8t0i9gc9.cloudfront.net\/watermarks\/video\/Sz9LmzxmBjyscquj0\/videoblocks-unhealthy-young-african-american-woman-suffering-from-flu_bdba3yhsi__56c4021384112fbb32db144fbc3fdac6__P360.mp4","smallPreviewUrl":"https:\/\/dm0qx8t0i9gc9.cloudfront.net\/watermarks\/video\/Sz9LmzxmBjyscquj0\/videoblocks-unhealthy-young-african-american-woman-suffering-from-flu_bdba3yhsi__56c4021384112fbb32db144fbc3fdac6__P180.mp4","thumbnailUrl":"https:\/\/dm0qx8t0i9gc9.cloudfront.net\/thumbnails\/video\/Sz9LmzxmBjyscquj0\/videoblocks-unhealthy-young-african-american-woman-suffering-from-flu_bdba3yhsi_thumbnail-180_01.jpg","isMarketPlaceItem":false,"contributorPortalId":"SOI03trjIkaihfomm","distributionType":"RS","expirationDate":null,"shouldStockItemHaveNewFlag":false,"shouldStockItemHaveStaffPickFlag":false,"dateAdded":"2020-05-22 13:33:01","contentType":"footage","dateDistributionTypeUpdated":"2020-05-22 21:33:01","isActive":true,"mediumImageUrl":"https:\/\/dm0qx8t0i9gc9.cloudfront.net\/thumbnails\/video\/Sz9LmzxmBjyscquj0\/videoblocks-unhealthy-young-african-american-woman-suffering-from-flu_bdba3yhsi_thumbnail-360_01.jpg","duration":12,"is360":false,"isTemplate":false,"collapsedSetId":"","numCollapses":null},"stockItemFormats":[{"id":null,"label":"4KMOV","prettyLabel":"4K MOV","filesize":"1193 MB","filename":"videoblocks-unhealthy-young-african-american-woman-suffering-from-flu_BdBa3YHsI.mov","resolution":"4096 x 2160","fileExtension":"mov","quality":"4K","formatName":"4KMOV","downloadAjaxUrl":"\/video\/download-ajax\/10898291\/4KMOV","downloadUrl":"\/video\/download\/10898291\/4KMOV","exportUrl":"\/video\/cloud-export\/10898291\/4KMOV","frameRate":"29.97 fps","codec":"prores"},{"id":null,"label":"4KMP4","prettyLabel":"4K MP4","filesize":"12.8 MB","filename":"videoblocks-unhealthy-young-african-american-woman-suffering-from-flu_bdba3yhsi_2160__D.mp4","resolution":"3840 x 2160","fileExtension":"mp4","quality":"4K","formatName":"4KMP4","downloadAjaxUrl":"\/video\/download-ajax\/10898291\/4KMP4","downloadUrl":"\/video\/download\/10898291\/4KMP4","exportUrl":"\/video\/cloud-export\/10898291\/4KMP4","frameRate":"29.97 fps","codec":"h264"},{"id":null,"label":"HDMOV","prettyLabel":"HD MOV","filesize":"189.6 MB","filename":"videoblocks-unhealthy-young-african-american-woman-suffering-from-flu_bdba3yhsi_1080__D.mov","resolution":"1920 x 1080","fileExtension":"mov","quality":"HD","formatName":"HDMOV","downloadAjaxUrl":"\/video\/download-ajax\/10898291\/HDMOV","downloadUrl":"\/video\/download\/10898291\/HDMOV","exportUrl":"\/video\/cloud-export\/10898291\/HDMOV","frameRate":"29.97 fps","codec":"prores"},{"id":null,"label":"HDMP4","prettyLabel":"HD MP4","filesize":"3.4 MB","filename":"videoblocks-unhealthy-young-african-american-woman-suffering-from-flu_bdba3yhsi_1080__D.mp4","resolution":"1920 x 1080","fileExtension":"mp4","quality":"HD","formatName":"HDMP4","downloadAjaxUrl":"\/video\/download-ajax\/10898291\/HDMP4","downloadUrl":"\/video\/download\/10898291\/HDMP4","exportUrl":"\/video\/cloud-export\/10898291\/HDMP4","frameRate":"29.97 fps","codec":"h264"}],"selectedAcquisitionOption":[{"id":null,"label":"4KMOV","prettyLabel":"4K MOV","filesize":"1193 MB","filename":"videoblocks-unhealthy-young-african-american-woman-suffering-from-flu_BdBa3YHsI.mov","resolution":"4096 x 2160","fileExtension":"mov","quality":"4K","formatName":"4KMOV","downloadAjaxUrl":"\/video\/download-ajax\/10898291\/4KMOV","downloadUrl":"\/video\/download\/10898291\/4KMOV","exportUrl":"\/video\/cloud-export\/10898291\/4KMOV","frameRate":"29.97 fps","codec":"prores"},{"id":null,"label":"4KMP4","prettyLabel":"4K MP4","filesize":"12.8 MB","filename":"videoblocks-unhealthy-young-african-american-woman-suffering-from-flu_bdba3yhsi_2160__D.mp4","resolution":"3840 x 2160","fileExtension":"mp4","quality":"4K","formatName":"4KMP4","downloadAjaxUrl":"\/video\/download-ajax\/10898291\/4KMP4","downloadUrl":"\/video\/download\/10898291\/4KMP4","exportUrl":"\/video\/cloud-export\/10898291\/4KMP4","frameRate":"29.97 fps","codec":"h264"},{"id":null,"label":"HDMOV","prettyLabel":"HD MOV","filesize":"189.6 MB","filename":"videoblocks-unhealthy-young-african-american-woman-suffering-from-flu_bdba3yhsi_1080__D.mov","resolution":"1920 x 1080","fileExtension":"mov","quality":"HD","formatName":"HDMOV","downloadAjaxUrl":"\/video\/download-ajax\/346673065\/HDMOV","downloadUrl":"\/video\/download\/346673065\/HDMOV","exportUrl":"\/video\/cloud-export\/346673065\/HDMOV","frameRate":"25 fps","codec":"prores"},{"id":null,"label":"HDMP4","prettyLabel":"HD MP4","filesize":"2.5 MB","filename":"videoblocks-615ade16105bea1663ce70d6_h6a8bj_vk_1080__D.mp4","resolution":"1920 x 1080","fileExtension":"mp4","quality":"HD","formatName":"HDMP4","downloadAjaxUrl":"\/video\/download-ajax\/346673065\/HDMP4","downloadUrl":"\/video\/download\/346673065\/HDMP4","exportUrl":"\/video\/cloud-export\/346673065\/HDMP4","frameRate":"25 fps","codec":"h264"}],"selectedAcquisitionOption":[{"id":null,"label":"4KMOV","prettyLabel":"4K MOV","filesize":"860.1 MB","filename":"videoblocks-615ade16105bea1663ce70d6_H6a8bj_VK.mov","resolution":"3840 x 2160","fileExtension":"mov","quality":"4K","formatName":"4KMOV","downloadAjaxUrl":"\/video\/download-ajax\/346673065\/4KMOV","downloadUrl":"\/video\/download\/346673065\/4KMOV","exportUrl":"\/video\/cloud-export\/346673065\/4KMOV","frameRate":"25 fps","codec":"prores"},{"id":null,"label":"4KMP4","prettyLabel":"4K MP4","filesize":"9.4 MB","filename":"videoblocks-615ade16105bea1663ce70d6_h6a8bj_vk_2160__D.mp4","resolution":"3840 x 2160","fileExtension":"mp4","quality":"4K","formatName":"4KMP4","downloadAjaxUrl":"\/video\/download-ajax\/346673065\/4KMP4","downloadUrl":"\/video\/download\/346673065\/4KMP4","exportUrl":"\/video\/cloud-export\/346673065\/4KMP4","frameRate":"25 fps","codec":"h264"},{"id":null,"label":"HDMOV","prettyLabel":"HD MOV","filesize":"133.6 MB","filename":"videoblocks-615ade16105bea1663ce70d6_h6a8bj_vk_1080__D.mov","resolution":"1920 x 1080","fileExtension":"mov","quality":"HD","formatName":"HDMOV","downloadAjaxUrl":"\/video\/download-ajax\/346673065\/HDMOV","downloadUrl":"\/video\/download\/346673065\/HDMOV","exportUrl":"\/video\/cloud-export\/346673065\/HDMOV","frameRate":"25 fps","codec":"prores"},{"id":null,"label":"HDMP4","prettyLabel":"HD MP4","filesize":"2.5 MB","filename":"videoblocks-615ade16105bea1663ce70d6_h6a8bj_vk_1080__D.mp4","resolution":"1920 x 1080","fileExtension":"mp4","quality":"HD","formatName":"HDMP4","downloadAjaxUrl":"\/video\/download-ajax\/346673065\/HDMP4","downloadUrl":"\/video\/download\/346673065\/HDMP4","exportUrl":"\/video\/cloud-export\/346673065\/HDMP4","frameRate":"25 fps","codec":"h264"}],"isFavorite":false,"topTags":[],"stockItemArtists":[],"moods":[],"genres":[]},{"stockItem":{"id":11122603,"contentClass":"video","contentId":346560856,"assetId":"SBV-346560856","title":"Young african american lady suffering from strong abdominal ache, lying on white bed wearing pajamas and frowning","description":null,"detailsUrl":"\/video\/stock\/young-african-american-lady-suffering-from-strong-abdominal-ache-lying-on-white-bed-wearing-pajamas-and-frowning-rb-eiawtokqs0wd2g","previewUrl":"https:\/\/dm0qx8t0i9gc9.cloudfront.net\/watermarks\/video\/SuFLR1_Nwkex4zo1y\/videoblocks-young-african-american-lady-suffering-from-strong-abdominal-ache-lying-on-white-bed-wearing-pajamas-and-frowning_ssjqitw6d__b6866d051bcc3a7bbf67d50c5f8d939b__P360.mp4","smallPreviewUrl":"https:\/\/dm0qx8t0i9gc9.cloudfront.net\/watermarks\/video\/SuFLR1_Nwkex4zo1y\/videoblocks-young-african-american-lady-suffering-from-strong-abdominal-ache-lying-on-white-bed-wearing-pajamas-and-frowning_ssjqitw6d__b6866d051bcc3a7bbf67d50c5f8d939b__P180.mp4","thumbnailUrl":"https:\/\/dm0qx8t0i9gc9.cloudfront.net\/thumbnails\/video\/SuFLR1_Nwkex4zo1y\/videoblocks-young-african-american-lady-suffering-from-strong-abdominal-ache-lying-on-white-bed-wearing-pajamas-and-frowning_ssjqitw6d_thumbnail-180_01.jpg","isMarketPlaceItem":false,"contributorPortalId":"rB-EiaWTOkqs0wd2g","distributionType":"RS","expirationDate":null,"shouldStockItemHaveNewFlag":false,"shouldStockItemHaveStaffPickFlag":false,"dateAdded":"2021-07-06 08:23:05","contentType":"footage","dateDistributionTypeUpdated":"2021-07-06 12:23:05","isActive":true,"mediumImageUrl":"https:\/\/dm0qx8t0i9gc9.cloudfront.net\/thumbnails\/video\/SuFLR1_Nwkex4zo1y\/videoblocks-young-african-american-lady-suffering-from-strong-abdominal-ache-lying-on-white-bed-wearing-pajamas-and-frowning_ssjqitw6d_thumbnail-360_01.jpg","duration":19,"is360":false,"isTemplate":false,"collapsedSetId":"","numCollapses":null},"stockItemFormats":[{"id":null,"label":"HDMOV","prettyLabel":"HD MOV","filesize":"396.9 MB","filename":"videoblocks-young-african-american-lady-suffering-from-strong-abdominal-ache-lying-on-white-bed-wearing-pajamas-and-frowning_SSjQiTW6d.mov","resolution":"1920 x 1080","fileExtension":"mov","quality":"HD","formatName":"HDMOV","downloadAjaxUrl":"\/video\/download-ajax\/11122603\/HDMOV","downloadUrl":"\/video\/download\/11122603\/HDMOV","exportUrl":"\/video\/cloud-export\/11122603\/HDMOV","frameRate":"25 fps","codec":"prores"},{"id":null,"label":"HDMP4","prettyLabel":"HD MP4","filesize":"3.2 MB","filename":"videoblocks-young-african-american-lady-suffering-from-strong-abdominal-ache-lying-on-white-bed-wearing-pajamas-and-frowning_ssjqitw6d_1080__D.mp4","resolution":"1920 x 1080","fileExtension":"mp4","quality":"HD","formatName":"HDMP4","downloadAjaxUrl":"\/video\/download-ajax\/11122603\/HDMP4","downloadUrl":"\/video\/download\/11122603\/HDMP4","exportUrl":"\/video\/cloud-export\/11122603\/HDMP4","frameRate":"25 fps","codec":"h264"}],"selectedAcquisitionOption":[{"id":null,"label":"HDMOV","prettyLabel":"HD MOV","filesize":"396.9 MB","filename":"videoblocks-young-african-american-lady-suffering-from-strong-abdominal-ache-lying-on-white-bed-wearing-pajamas-and-frowning_SSjQiTW6d.mov","resolution":"1920 x 1080","fileExtension":"mov","quality":"HD","formatName":"HDMOV","downloadAjaxUrl":"\/video\/download-ajax\/11122603\/HDMOV","downloadUrl":"\/video\/download\/11122603\/HDMOV","exportUrl":"\/video\/cloud-export\/11122603\/HDMOV","frameRate":"25 fps","codec":"prores"},{"id":null,"label":"HDMP4","prettyLabel":"HD MP4","filesize":"3.2 MB","filename":"videoblocks-young-african-american-lady-suffering-from-strong-abdominal-ache-lying-on-white-bed-wearing-pajamas-and-frowning_ssjqitw6d_1080__D.mp4","resolution":"1920 x 1080","fileExtension":"mp4","quality":"HD","formatName":"HDMP4","downloadAjaxUrl":"\/video\/download-ajax\/11122603\/HDMP4","downloadUrl":"\/video\/download\/11122603\/HDMP4","exportUrl":"\/video\/cloud-export\/11122603\/HDMP4","frameRate":"25 fps","codec":"h264"}],"isFavorite":false,"topTags":[],"stockItemArtists":[],"moods":[],"genres":[]},{"stockItem":{"id":11126650,"contentClass":"video","contentId":346565766,"assetId":"SBV-346565766","title":"Young african american lady suffering from strong abdominal ache, lying on white bed wearing pajamas and frowning","description":null,"detailsUrl":"\/video\/stock\/young-african-american-lady-suffering-from-strong-abdominal-ache-lying-on-white-bed-wearing-pajamas-and-frowning-sbr7rwkpukr09hxdi","previewUrl":"https:\/\/dm0qx8t0i9gc9.cloudfront.net\/watermarks\/video\/SuFLR1_Nwkex4zo1y\/videoblocks-young-african-american-lady-suffering-from-strong-abdominal-ache-lying-on-white-bed-wearing-pajamas-and-frowning_rbumswkpu__cb16a49f7af5326267be1b675680fc9f__P360.mp4","smallPreviewUrl":"https:\/\/dm0qx8t0i9gc9.cloudfront.net\/watermarks\/video\/SuFLR1_Nwkex4zo1y\/videoblocks-young-african-american-lady-suffering-from-strong-abdominal-ache-lying-on-white-bed-wearing-pajamas-and-frowning_rbumswkpu__cb16a49f7af5326267be1b675680fc9f__P180.mp4","thumbnailUrl":"https:\/\/dm0qx8t0i9gc9.cloudfront.net\/thumbnails\/video\/SuFLR1_Nwkex4zo1y\/videoblocks-young-african-american-lady-suffering-from-strong-abdominal-ache-lying-on-white-bed-wearing-pajamas-and-frowning_rbumswkpu_thumbnail-180_01.jpg","isMarketPlaceItem":false,"contributorPortalId":"SBR7rwKpukr09hxdi","distributionType":"RS","expirationDate":null,"shouldStockItemHaveNewFlag":false,"shouldStockItemHaveStaffPickFlag":false,"dateAdded":"2021-07-12 02:45:58","contentType":"footage","dateDistributionTypeUpdated":"2021-07-12 06:45:58","isActive":true,"mediumImageUrl":"https:\/\/dm0qx8t0i9gc9.cloudfront.net\/thumbnails\/video\/SuFLR1_Nwkex4zo1y\/videoblocks-young-african-american-lady-suffering-from-strong-abdominal-ache-lying-on-white-bed-wearing-pajamas-and-frowning_rbumswkpu_thumbnail-360_01.jpg","duration":19,"is360":false,"isTemplate":false,"collapsedSetId":"","numCollapses":null},"stockItemFormats":[{"id":null,"label":"HDMOV","prettyLabel":"HD MOV","filesize":"396.9 MB","filename":"videoblocks-young-african-american-lady-suffering-from-strong-abdominal-ache-lying-on-white-bed-wearing-pajamas-and-frowning_rBumSwKpu.mov","resolution":"1920 x 1080","fileExtension":"mov","quality":"HD","formatName":"HDMOV","downloadAjaxUrl":"\/video\/download-ajax\/4267085\/HDMOV","downloadUrl":"\/video\/download\/4267085\/HDMOV","exportUrl":"\/video\/cloud-export\/4267085\/HDMOV","frameRate":"25 fps","codec":"mjpeg"},{"id":null,"label":"HDMP4","prettyLabel":"HD MP4","filesize":"6.6 MB","filename":"videoblocks-a-young-sick-man-suffering-from-influenza-measuring-temperature-and-drinking-medicine_rvme1wxmeb_1080__D.mp4","resolution":"1920 x 1080","fileExtension":"mp4","quality":"HD","formatName":"HDMP4","downloadAjaxUrl":"\/video\/download-ajax\/4267085\/HDMP4","downloadUrl":"\/video\/download\/4267085\/HDMP4","exportUrl":"\/video\/cloud-export\/4267085\/HDMP4","frameRate":"25 fps","codec":"h264"}],"isFavorite":false,"topTags":[],"stockItemArtists":[],"moods":[],"genres":[]}],"memberDownloadDate":"","makerHooks":{"makerPreCreateProjectRoute":"https:\/\/maker.storyblocks.com\/edit?videoIds=10852901","makerSignupRoute":"https:\/\/www.storyblocks.com\/join\/become-user\/maker\/edit?videoIds=10852901"},"canDownload":false,"mvtPages":{"premiere-pro-templates":"\/video\/premiere-pro-templates","apple-motion-templates":"\/video\/apple-motion-templates","intro":"\/video\/after-effects-templates\/intros","green-screen":"\/video\/footage\/green-screen","lower-thirds":"\/video\/after-effects-templates\/lower-thirds"},"canExportToCloud":false,"cloudExportSubfolders":[]}};
</script>
I am new in python, bs4
here is the json data want to scrape, from that script defer > var initialState > {"stockItem":{"id":217104, only i want 217104 or "downloadAjaxUrl":"/video/download-ajax/10852901/4KMP4" or more elements. what should i do?
I have tried these attempts:
import requests
from bs4 import BeautifulSoup
URL = 'https://www.storyblocks.com/video/stock/overheated-young-african-american-lady-suffering-from-high-temperature-indoors-belnttknlk6yv56x6'
HEADERS = {'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/98.0.4758.102 Safari/537.36', 'origin': URL}
page = requests.get(URL, headers=HEADERS)
soup = BeautifulSoup(page.content, features='lxml')
script = soup.select('body > div.wrapper > script:nth-child(6)')
# or
function = soup.find_all('script')[24]
print(script)
print(function)
#attempt 1
f = function[30:] # 30 for check
print(f)
#output
return self.attrs[key]
TypeError: unhashable type: 'slice'
#attempt 2
f = function["stockItem"]
print(f)
#output
return self.attrs[key]
KeyError: 'stockItem'
can anyone please help me?

You'd use regular expression to pull out that json pattern.
import requests
from bs4 import BeautifulSoup
import re
import json
URL = 'https://www.storyblocks.com/video/stock/overheated-young-african-american-lady-suffering-from-high-temperature-indoors-belnttknlk6yv56x6'
HEADERS = {'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/98.0.4758.102 Safari/537.36', 'origin': URL}
page = requests.get(URL, headers=HEADERS)
soup = BeautifulSoup(page.content, features='lxml')
script = soup.find_all('script', {'defer':''})
# or
function = soup.find_all('script')[24]
jsonStr = re.search('var initialState.*= ({.*})', str(function)).group(1)
jsonData = json.loads(jsonStr)
Output:
stockItem = jsonData['details']['stockItem']
print(stockItem['id'])
10852901

Related

how to iterate through a list of URL and save it to CSV?

import requests
from bs4 import BeautifulSoup
headers = {'User-Agent':'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/103.0.0.0 Safari/537.36',
'Accept-Language': 'en-US, en;q=0.5'}
URL = "https://www.amazon.com/TRESemm%C3%A9-Botanique-Shampoo-Nourish-Replenish/dp/B0199WNJE8/ref=sxin_14_pa_sp_search_thematic_sspa?content-id=amzn1.sym.a15c61b7-4b93-404d-bb70-88600dfb718d%3Aamzn1.sym.a15c61b7-4b93-404d-bb70-88600dfb718d&crid=2HG5WSUDCJBMZ&cv_ct_cx=hair%2Btresemme&keywords=hair%2Btresemme&pd_rd_i=B0199WNJE8&pd_rd_r=28d72361-7f35-4b1a-be43-98e7103da70c&pd_rd_w=6UL4P&pd_rd_wg=JtUqB&pf_rd_p=a15c61b7-4b93-404d-bb70-88600dfb718d&pf_rd_r=DFPZNAG391M5JS55R6HP&qid=1660432925&sprefix=hair%2Btresemme%2Caps%2C116&sr=1-3-a73d1c8c-2fd2-4f19-aa41-2df022bcb241-spons&smid=A3DEFW12560V8M&spLa=ZW5jcnlwdGVkUXVhbGlmaWVyPUExQlM3VFpGRVM5Tk8wJmVuY3J5cHRlZElkPUEwNjE5MjQwM01JV0FNN1pOMlRHSSZlbmNyeXB0ZWRBZElkPUEwNTA1MDQyMlQ5RjhRQUxIWEdaUiZ3aWRnZXROYW1lPXNwX3NlYXJjaF90aGVtYXRpYyZhY3Rpb249Y2xpY2tSZWRpcmVjdCZkb05vdExvZ0NsaWNrPXRydWU&th=1"
webpage = requests.get(URL, headers=headers)
soup = BeautifulSoup(webpage.content)
rank = soup.select_one('#detailBulletsWrapper_feature_div span:-soup-contains("Best Seller")').contents[2].get_text().split()[0]
Category = soup.select_one('#detailBulletsWrapper_feature_div span:-soup-contains("Best Seller")').contents[2].get_text().split()[2:6]
Category = ' '.join(Category)
type(rank)
type(Category)
import string
for char in string.punctuation:
rank = rank.replace(char, '')
print(rank)
print(Category)
I have other URLs similar to this and I want to loop through them: Here are the links: How can I loop through them and save them to a csv file. Thank you very much in advanced!
URL = ['https://www.amazon.com/Dove-Intensive-Concentrate-Technology-Protects/dp/B0B1VVXTKL',
'https://www.amazon.com/Dove-Intensive-Concentrate-Conditioner-Technology/dp/B0B1VXFLQ2']
You could use a for-loop to iterate the list:
for url in URL:
webpage = requests.get(url, headers=headers)
soup = BeautifulSoup(webpage.content)
Note: amazon do not want to be scraped, so it is a question of time, that they will block you. May use some delay, rotating proxy, ...
Example
import requests
import pandas as pd
from bs4 import BeautifulSoup
headers = {'User-Agent':'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/103.0.0.0 Safari/537.36',
'Accept-Language': 'en-US, en;q=0.5'}
URL = ['https://www.amazon.com/Dove-Intensive-Concentrate-Technology-Protects/dp/B0B1VVXTKL',
'https://www.amazon.com/Dove-Intensive-Concentrate-Conditioner-Technology/dp/B0B1VXFLQ2']
data = []
for url in URL:
webpage = requests.get(url, headers=headers)
soup = BeautifulSoup(webpage.content)
data.append({
'url':url,
'rank':soup.select_one('#detailBulletsWrapper_feature_div span:-soup-contains("Best Seller")').contents[2].split()[0][1:],
'category':soup.select_one('#detailBulletsWrapper_feature_div span:-soup-contains("Best Seller") a').text.split('Top 100 in ')[-1]
})
pd.DataFrame(data).to_csv('myfile.csv', index=False)

Create Rows and Columns in BeautifulSoup

Below is code python code output.I want output in rows and column in dataframe:
response = requests.get(source_data)
soup = BeautifulSoup(response.text, "html.parser")
States = soup.find_all('div',class_ = 'card bg-darker p-3 mb-3')
for item in States :
state_name = item.find(class_='fw-bold fs-5 mb-2').text
vaccinated_per = item.find(class_='col-3 text-end fs-5 ff-s text-success').text
print(state_name,vaccinated_per)
Output:
Flanders 80.24%
Wallonia 70.00%
Brussels 56.73%
Ostbelgien 65.11%
Collect your information in a list of dicts and then simply create a data frame from it:
data = []
for item in States :
data.append({
'state_name' : item.find(class_='fw-bold fs-5 mb-2').text,
'vaccinated_per' : item.find(class_='col-3 text-end fs-5 ff-s text-success').text
})
pd.DataFrame(data)
Example
from bs4 import BeautifulSoup
import requests
import pandas as pd
headers = {'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/62.0.3202.94 Safari/537.36'}
response = requests.get('https://covid-vaccinatie.be/en', headers=headers)
soup = BeautifulSoup(response.text, "html.parser")
States = soup.find_all('div',class_ = 'card bg-darker p-3 mb-3')
data = []
for item in States :
data.append({
'state_name' : item.find(class_='fw-bold fs-5 mb-2').text,
'vaccinated_per' : item.find(class_='col-3 text-end fs-5 ff-s text-success').text
})
pd.DataFrame(data)
Output
state_name vaccinated_per
0 Flanders 80.24%
1 Wallonia 70.00%
2 Brussels 56.73%
3 Ostbelgien 65.11%

How to extract data using beautiful soup

import requests
from bs4 import BeautifulSoup
import pandas as pd
baseurl='https://locations.atipt.com/'
headers ={
'User-Agent':'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.114 Safari/537.36'
}
r =requests.get('https://locations.atipt.com/al')
soup=BeautifulSoup(r.content, 'html.parser')
tra = soup.find_all('ul',class_='list-unstyled')
productlinks=[]
for links in tra:
for link in links.find_all('a',href=True):
comp=baseurl+link['href']
productlinks.append(comp)
for link in productlinks:
r =requests.get(link,headers=headers)
soup=BeautifulSoup(r.content, 'html.parser')
tag=soup.find_all('div',class_='listing content-card')
for pro in tag:
tup=pro.find('a',class_='name').find_all('p')
for i in tup:
print(i.get_text())
I am trying to extract data but they will provide me nothing I try to extract data from the p tagthese is the page in which I try to extract data from p tag check it https://locations.atipt.com/al/alabaster
The working solution so far using css selectors to get data from p tags as follows:
import requests
from bs4 import BeautifulSoup
import pandas as pd
baseurl = 'https://locations.atipt.com/'
headers = {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.114 Safari/537.36'
}
r = requests.get('https://locations.atipt.com/al')
soup = BeautifulSoup(r.content, 'html.parser')
tra = soup.find_all('ul', class_='list-unstyled')
productlinks = []
for links in tra:
for link in links.find_all('a', href=True):
comp = baseurl+link['href']
productlinks.append(comp)
for link in productlinks:
r = requests.get(link, headers=headers)
soup = BeautifulSoup(r.content, 'html.parser')
tag = ''.join([x.get_text(strip=True).replace('\xa0','') for x in soup.select('div.listing.content-card div:nth-child(2)>p')])
print(tag)
Output:
634 1st Street NSte 100Alabaster, AL35007
9256 Parkway ESte ABirmingham, AL352061940 28th Ave SBirmingham, AL352095431 Patrick WaySte 101Birmingham, AL35235833 St. Vincent's DrSte 100Birmingham, AL352051401 Doug Baker BlvdSte 104Birmingham, AL35242
1877 Cherokee Ave SWCullman, AL350551301-A Bridge Creek Dr NECullman, AL35055
1821 Beltline Rd SWSte BDecatur, AL35601
4825 Montgomery HwySte 103Dothan, AL36303
550 Fieldstown RdGardendale, AL35071323 Fieldstown Rd, Ste 105Gardendale, AL35071
2804 John Hawkins PkwySte 104Hoover, AL35244
700 Pelham Rd NorthJacksonville, AL36265
1811 Hwy 78 ESte 108 & 109Jasper, AL35501-4081
76359 AL-77Ste CLincoln, AL35096
1 College DriveStation #14Livingston, AL35470
106 6th Street SouthSte AOneonta, AL35121-1823
50 Commons WaySte DOxford, AL36203
301 Huntley PkwyPelham, AL35124
41 Eminence WaySte BPell City, AL35128
124 W Grand AveSte A-4Rainbow City, AL35906
1147 US-231Ste 9 & 10Troy, AL36081
7201 Happy Hollow RdTrussville, AL35173
100 Rice Mine Road LoopSte 102Tuscaloosa, AL354061451 Dr. Edward Hillard DrSte 130Tuscaloosa, AL35401
3735 Corporate Woods DrSte 109Vestavia, AL35242-2296
636 Montgomery HwyVestavia Hills, AL352161539 Montgomery HwySte 111Vestavia Hills, AL35216

How to grab specific items from entire json response api calls

I want to grab only Symbol and Company Name items from the entire json data but getting
all data. How I can get above mentioned data and store in pandas DataFrame.
Base_url
My code:
import requests
import pandas as pd
params = {
'sectorID': 'All',
'_': '1630217365368'}
headers = {'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/92.0.4515.107 Safari/537.36'}
def main(url):
with requests.Session() as req:
req.headers.update(headers)
sym = []
name = []
r = req.get(url, params=params, headers =headers)
for item in r.json()['data']:
print(item)
# sym.append(item['symbol']),
# name.append(item['lonaName'])
# df = pd.DataFrame(sym, name, columns=[["Symble","Company name"]])
# print(df)
main('https://www.saudiexchange.sa/wps/portal/tadawul/market-participants/issuers/issuers-directory/!ut/p/z1/04_Sj9CPykssy0xPLMnMz0vMAfIjo8zi_Tx8nD0MLIy8DTyMXAwczVy9vV2cTY0MnEz1w8EKjIycLQwtTQx8DHzMDYEK3A08A31NjA0CjfWjSNLv7ulnbuAY6OgR5hYWYgzUQpl-AxPi9BvgAI4GhPVHgZXgCwFUBVi8iFcByA9gBXgcWZAbGhoaYZDpma6oCABqndOv/p0/IZ7_NHLCH082KOAG20A6BDUU6K3082=CZ6_NHLCH082K0H2D0A6EKKDC520B5=N/')
you need to fix the way you are creating the dataframe:
import requests
import pandas as pd
params = {
'sectorID': 'All',
'_': '1630217365368'}
headers = {'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/92.0.4515.107 Safari/537.36'}
def main(url):
with requests.Session() as req:
req.headers.update(headers)
sym = []
name = []
r = req.get(url, params=params, headers =headers)
for item in r.json()['data']:
# print(item)
sym.append(item['symbol']),
name.append(item['lonaName'])
df = pd.DataFrame({'symbol':sym , 'longName':name})
print(df)
main('https://www.saudiexchange.sa/wps/portal/tadawul/market-participants/issuers/issuers-directory/!ut/p/z1/04_Sj9CPykssy0xPLMnMz0vMAfIjo8zi_Tx8nD0MLIy8DTyMXAwczVy9vV2cTY0MnEz1w8EKjIycLQwtTQx8DHzMDYEK3A08A31NjA0CjfWjSNLv7ulnbuAY6OgR5hYWYgzUQpl-AxPi9BvgAI4GhPVHgZXgCwFUBVi8iFcByA9gBXgcWZAbGhoaYZDpma6oCABqndOv/p0/IZ7_NHLCH082KOAG20A6BDUU6K3082=CZ6_NHLCH082K0H2D0A6EKKDC520B5=N/')
symbol longName
0 1330 Abdullah A. M. Al-Khodari Sons Co.
1 4001 Abdullah Al Othaim Markets Co.
2 4191 Abdullah Saad Mohammed Abo Moati for Bookstore...
3 1820 Abdulmohsen Alhokair Group for Tourism and Dev...
4 2330 Advanced Petrochemical Co.
.. ... ...
199 3020 Yamama Cement Co.
200 3060 Yanbu Cement Co.
201 2290 Yanbu National Petrochemical Co.
202 3007 Zahrat Al Waha for Trading Co.
203 2240 Zamil Industrial Investment Co.
To get all data from the site, you can use their API:
import requests
import pandas as pd
url = "https://www.saudiexchange.sa/tadawul.eportal.theme.helper/TickerServlet"
data = requests.get(url).json()
# print(json.dumps(data, indent=4))
df = pd.json_normalize(data["stockData"])
print(df)
Prints:
pk_rf_company companyShortNameEn companyShortNameAr companyLongNameEn companyLongNameAr highPrice lowPrice noOfTrades previousClosePrice todaysOpen transactionDate turnOver volumeTraded aveTradeSize change changePercent lastTradePrice transactionDateStr
0 4700 Alkhabeer Income الخبير للدخل Al Khabeer Diversified Income Traded Fund صندوق الخبير للدخل المتنوع المتداول None None 308 None None None 1.293560e+06 142791 463.61 0.01 0.11 9.07 None
1 2030 SARCO المصافي Saudi Arabia Refineries Co. شركة المصافي العربية السعودية None None 877 None None None 1.352797e+07 83391 95.09 -0.40 -0.25 162.20 None
2 2222 SAUDI ARAMCO أرامكو السعودية Saudi Arabian Oil Co. شركة الزيت العربية السعودية None None 4054 None None None 6.034732e+07 1731463 427.10 0.05 0.14 34.90 None
...and so on.
To get only symbol/company name:
print(df[["pk_rf_company", "companyLongNameEn"]])
pk_rf_company companyLongNameEn
0 4700 Al Khabeer Diversified Income Traded Fund
1 2030 Saudi Arabia Refineries Co.
2 2222 Saudi Arabian Oil Co.
...and so on.
It will be way faster if you store data in pandas DataFrame and later process it.
Example Code:
import requests
import pandas as pd
params = {
'sectorID': 'All',
'_': '1630217365368'}
headers = {'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/92.0.4515.107 Safari/537.36'}
def main(url):
with requests.Session() as req:
req.headers.update(headers)
r = req.get(url, params=params, headers =headers)
data = r.json()['data']
df_main = pd.DataFrame(data)
df_min = df_main.iloc[:, 0:2]
df_min.columns = ['Symbol', 'Company name']
print(df_min)
main('https://www.saudiexchange.sa/wps/portal/tadawul/market-participants/issuers/issuers-directory/!ut/p/z1/04_Sj9CPykssy0xPLMnMz0vMAfIjo8zi_Tx8nD0MLIy8DTyMXAwczVy9vV2cTY0MnEz1w8EKjIycLQwtTQx8DHzMDYEK3A08A31NjA0CjfWjSNLv7ulnbuAY6OgR5hYWYgzUQpl-AxPi9BvgAI4GhPVHgZXgCwFUBVi8iFcByA9gBXgcWZAbGhoaYZDpma6oCABqndOv/p0/IZ7_NHLCH082KOAG20A6BDUU6K3082=CZ6_NHLCH082K0H2D0A6EKKDC520B5=N/')
Output:

Pick only one number from an html page with beatifulsoup

I have this url from coronavirus worldwide and I would like to pick only one number, the newcases in Arizona which is +2383 right now.
import requests
from bs4 import BeautifulSoup
import lxml
url = "https://www.worldmeter.com/coronavirus/us/"
page = requests.get("https://www.worldmeter.com/coronavirus/us/")
soup = BeautifulSoup(page.content, "lxml")
page.close()
newcases = soup.find('a', href_="https://worldmeter.com/coronavirus/arizona", class_="tableRowLinkYellow newCasesStates").get_text(strip=True)
print(newcases)
I get this error:
AttributeError: 'NoneType' object has no attribute 'get_text'
How do I pick only that number from the whole table? Thank you for your time.
Just like Linh said, it was generated by Javascript.Using selenium is an easy way but not efficient enough.(too slow)
You could scrape the API directly:
import requests
url = "https://worldmeter.com/coronavirus/wp-admin/admin-ajax.php?action=wp_ajax_ninja_tables_public_action&table_id=2582&target_action=get-all-data&default_sorting=old_first"
headers = {
"user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/85.0.4183.83 Safari/537.36",
}
results = requests.get(url, headers=headers).json()
for result in results:
if result["state_name"] == "Arizona":
print(result)
print("The newcases is", result["new_cases"])
And this gave me:
{'state_name': 'Arizona', 'positive': '275,436', 'new_cases': '2,383', 'death_in_states': '6,302', 'new_deaths': '2', 'recovered_states': '45,400', 'new_recovered': '364', 'totaltestresults': 'Arizona', 'postname': 'arizona', 'cases_100_k_population': '3,866.37', 'state_population': '7278717', 'death_100_k_population': '88.46'}
The newcases is 2,383

Categories

Resources