ElementNotInteractableException in python selenuim - python

So I am trying to scrape this page:
https://www.tripadvisor.com/CheapFlightsHome
but when ever I try to click on the choosing the flight class element it just gives this error:
File "e:\code\Python\non machine learning projects\web scrabbing\Projects\flight-anlaysis\flight-anlaysis.py", line 128, in <module>
extra_info("Economy" , 2 , 0 , 3)
File "e:\code\Python\non machine learning projects\web scrabbing\Projects\flight-anlaysis\flight-anlaysis.py", line 79, in extra_info
drop_down_btn = WebDriverWait(driver, 20).until(EC.element_to_be_clickable((By.XPATH , '//span[#class = "ui_icon caret-down open-close"]')))
File "C:\Users\user\anaconda3\envs\mix\lib\site-packages\selenium\webdriver\support\wait.py", line 95, in until
raise TimeoutException(message, screen, stacktrace)
selenium.common.exceptions.TimeoutException: Message:
the Python code:
def extra_info(wclass , noa , nos , noc):#noa : number of adults nos: number of seniors noc: number of children
# CLicking the main button
mbtn = driver.find_element(By.XPATH , '//span[#class = "summaryContainer target"]')
mbtn.click()
time.sleep(2)
mdiv = driver.find_element(By.XPATH , '//div[#class = "prw_rup prw_flights_cos_passenger_picker cosPassengerPopover"]')
time.sleep(2)
mmdiv = mdiv.find_element(By.XPATH , '//div[#class = "popoverContents"]')
wclassbtn = mmdiv.find_element(By.XPATH , '//div[#class = "picker-inner localizationCosStrings"]')
drop_down_btn = WebDriverWait(driver, 20).until(EC.element_to_be_clickable((By.XPATH , '//span[#class = "ui_icon caret-down open-close"]')))
time.sleep(5)
drop_down_btn.click()
plane_grades = []
eco = driver.find_element(By.XPATH , '//#li[text() = "Economy"]')
peco = driver.find_element(By.XPATH , '//#li[text() = "Premium Economy"]')
bus = driver.find_element(By.XPATH , '//li[#text() = "Business Class"]')
fc = driver.find_element(By.XPATH , '//li[#text = "First Class"]')
plane_grades.append(eco , peco , bus , fc)
for plane , i in enumerate(plane_grades): # this for loop choses the class grade for the plane
if plane == wclass:
choosen_btn = plane_grades[i]
choosen_btn.click()
# checking now for the nos noa noc
adult_counter_div = driver.find_element(By.XPATH , '//div[#class = "adultCounter counter"]')
senior_counter_div = driver.find_element(By.XPATH , '//div[#class = "seniorCounter counter"]')
child_counter_div = driver.find_element(By.XPATH , '//div[#class = "childrenCounter counter"]')
if noa <= 6 and senior_counter_div<=6 and child_counter_div<=5: #Checking the count of the number of tickets
add_adult_btn = adult_counter_div.find_element(By.XPATH , '//span[#class = "ui_icon plus-circle enabled"]')
add_senior_btn = senior_counter_div.find_element(By.XPATH , '//span[#class = "ui_icon plus-circle enabled"]')
add_child_btn = child_counter_div.find_element(By.XPATH , '//span[#class = "ui_icon plus-circle enabled"]')
# Clicking the the button the number of times
for i in range(noa):
add_adult_btn.click()
for i in range(nos):
add_senior_btn.click()
for i in range(noc):
add_child_btn.click()
else:
print('MORE THAN THE LIMIT')
Thanks.

please try to use WebDriverWait with Expected Conditions
from selenium.webdriver.support import expected_conditions as EC
...
...
...
WebDriverWait(driver, 20).until(EC.element_to_be_clickable((By.XPATH , '//span[#class = "ui_icon caret-down open-close"]'))).click()
Find more details here - https://selenium-python.readthedocs.io/waits.html

To send the character sequence Bangalore within the From field on tripadvisor you need to induce WebDriverWait for the element_to_be_clickable() and you can use either of the following locator strategies:
driver.get('https://www.tripadvisor.com/CheapFlightsHome')
from_where = WebDriverWait(driver, 5).until(EC.element_to_be_clickable((By.XPATH, "//input[#placeholder='From where?']")))
from_where.click()
from_where.clear()
from_where.send_keys("Bangalore")
from_where = WebDriverWait(driver, 5).until(EC.element_to_be_clickable((By.XPATH, "//div[#class='ui_typeahead_results']/ul/li"))).click()
Note: You have to add the following imports :
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions as EC
Browser Snapshot:

Related

Selenium ElementClickInterceptedException when headless = True

I am running a selenium code on the website DNCA to scrap for some of the document links. I am trying to get links of each value in the drop down for each section shown in this page. My code is working fine, but when I run the same code with option headless = True, I am getting the following error:
ElementClickInterceptedException: element click intercepted: Element <li data-original-index="0">...</li> is not clickable at point (226, 250). Other element would receive the click: <div class="col-md-12">...</div>
(Session info: headless chrome=104.0.5112.81)
Code:
def get_active_row(active_tab, fund_id):
active_row = active_tab.find_elements(By.XPATH, ".//tr[#style='' or #style='display: table-row;'][#fund-id = '{}']".format(fund_id))
try:
assert len(active_row) == 1
active_row = active_row[0]
return active_row
except AssertionError as asserr:
print(asserr, ' -- More than one active row for the fund id: ', fund_id)
sys.exit(1)
except Exception as err:
print(err, ' -- fund id:', fund_id)
sys.exit(1)
def scrap(driver):
tab_list = driver.find_element(By.XPATH, "//ul[contains(#role, 'tablist')]")
tab_list_names = tab_list.find_elements(By.XPATH, './/li')
data_list = []
for loc, tab_name in enumerate(tab_list_names):
if loc < 20:
tab_name.click()
html = driver.page_source
soup = BeautifulSoup(html)
bs_active_tab = soup.find('div', {'class': 'tab-pane table-datas active'})
bs_headers = bs_active_tab.find('thead')
headers = [i.text for i in bs_headers.find_all('td')]
active_tab = driver.find_element(By.XPATH, "//div[contains(#class, 'tab-pane table-datas active')]")
unique_fund_ids = [i_fund.get_attribute('fund-id') for i_fund in active_tab.find_elements(By.XPATH, ".//tr[#style]") if i_fund.get_attribute('fund-id') != '-']
lookup = set()
unique_fund_ids = [x for x in unique_fund_ids if x not in lookup and lookup.add(x) is None]
for fund_id in unique_fund_ids: #Iterate over each fund
active_row = get_active_row(active_tab, fund_id)
active_row.find_element(By.XPATH, './/button').click()
isin_list = [i.text for i in active_row.find_elements(By.XPATH, './/li')]
for pos, isin_val in enumerate(isin_list):
isin_selected = active_row.find_elements(By.XPATH, './/li')[pos]
isin_selected.click()
active_row = get_active_row(active_tab, fund_id)
fund_name = ''
for pos_inner, td in enumerate(active_row.find_elements(By.XPATH, ".//td")):
a_tag = td.find_elements(By.XPATH, ".//a")
if len(a_tag) == 1:
a_tag = a_tag[0]
if pos_inner == 0:
fund_name = a_tag.text
link = a_tag.get_attribute('href')
data_list.append([tab_name.text, fund_name, isin_val, headers[pos_inner], link])
else:
data_list.append([tab_name.text, fund_name, isin_val, headers[pos_inner], ''])
active_row = get_active_row(active_tab, fund_id)
active_row.find_element(By.XPATH, './/button').click()
isin_selected_to_close = active_row.find_elements(By.XPATH, './/li')[0]
isin_selected_to_close.click()
tlg_tr_tab = active_tab.find_element(By.XPATH, ".//tr[#fund-id='-']")
for tlg_pos_inner, tlg_td in enumerate(tlg_tr_tab.find_elements(By.XPATH, ".//td")):
tlg_a_tag = tlg_td.find_elements(By.XPATH, ".//a")
if len(tlg_a_tag) == 1:
tlg_a_tag = tlg_a_tag[0]
tlg_link = tlg_a_tag.get_attribute('href') #Get document link
data_list.append([tab_name.text, 'Toute la gamme', '', headers[tlg_pos_inner], tlg_link])
else:
data_list.append([tab_name.text, 'Toute la gamme', '', headers[tlg_pos_inner], ''])
dataset_links = pd.DataFrame(data_list, columns = ['Tab', 'Fund Name', 'ISIN', 'Type', 'Link'])
driver.quit()
Can someone please explain me why is it working fine with headless = False but not with with headless = True.
In headless mode the default screen size is very small, significantly less than screen size in regular mode.
So, to overcome this problem you need to set the screen size.
It can be done in the following ways:
options = Options()
options.add_argument("--headless")
options.add_argument("window-size=1920, 1080")
webdriver_service = Service('C:\webdrivers\chromedriver.exe')
driver = webdriver.Chrome(service=webdriver_service, options=options)
Or just
driver.set_window_size(1920, 1080)
Both approaches should work.
I prefer the first way :)

selenium multithreading python

I wrote a selenium script to check the case statuses of uscis cases an I want to speed it up as I am trying to check more than 500 cases every time.
How to use it with multithreading of concurrent futures library in python?
import re
import json
from datetime import date
from unittest import case
from selenium import webdriver
from selenium.webdriver.chrome.options import Options
from selenium.common.exceptions import NoSuchElementException
from multiprocessing import Pool
import concurrent.futures
options = Options()
options.add_argument('--headless')
options.add_argument('--disable-gpu')
# browser = webdriver.Chrome('PATH', options=options)
cases = []
def getStatus(CN):
browser = webdriver.Chrome('PATH', options=options)
browser.get("https://egov.uscis.gov/casestatus/landing.do")
serachField = browser.find_element_by_xpath('/html/body/div[2]/form/div/div[1]/div/div[1]/fieldset/div[1]/div[4]/input')
serachField.click()
serachField.send_keys(CN)
searchButton = browser.find_element_by_xpath('/html/body/div[2]/form/div/div[1]/div/div[1]/fieldset/div[2]/div[2]/input')
searchButton.click()
try:
outputFieldHeading = browser.find_element_by_xpath('/html/body/div[2]/form/div/div[1]/div/div/div[2]/div[3]/h1')
outputField = browser.find_element_by_xpath('/html/body/div[2]/form/div/div[1]/div/div/div[2]/div[3]/p')
dateMatch = re.search(r'\w+\s\d+,\s\d+', outputField.text)
try:
formMatch = re.search(r'([I][-]\d+,^)|([I][-]\d+\w)', outputField.text)
formNumber = formMatch.group()
except:
formNumber = "Form Unknown"
cases.append({'caseNumber': CN,'currentDate': today, 'Date': dateMatch.group(), 'FormNumber': formNumber, 'Status': outputFieldHeading.text, 'Description': outputField.text})
print(f"{CN} : {outputFieldHeading.text} : {dateMatch.group()} : {formNumber}")
return f"{CN} : {outputFieldHeading.text} : {dateMatch.group()} : {formNumber}"
except NoSuchElementException:
cases.append({'caseNumber': CN,'currentDate': today, 'Date': "Unknown", 'FormNumber': "Unknown Form", 'Status': "Not Found", 'Description': ""})
print(f"{CN} : Not Found")
return f"{CN} : Not Found"
pass
if __name__ == '__main__':
casenumbers = ["EAC2134250100", "EAC2134250101", "EAC2134250102", "EAC2134250103", "EAC2134250104", "EAC2134250105", "EAC2134250106", "EAC2134250107", "EAC2134250108", "EAC2134250109", "EAC2134250110", "EAC2134250111", "EAC2134250112", "EAC2134250113", "EAC2134250114", "EAC2134250115", "EAC2134250116", "EAC2134250117", "EAC2134250118", "EAC2134250119", "EAC2134250120", "EAC2134250121", "EAC2134250122", "EAC2134250123", "EAC2134250124", "EAC2134250125", "EAC2134250126", "EAC2134250127", "EAC2134250128", "EAC2134250129", "EAC2134250130", "EAC2134250131", "EAC2134250132", "EAC2134250133", "EAC2134250134", "EAC2134250135", "EAC2134250136", "EAC2134250137", "EAC2134250138", "EAC2134250139", "EAC2134250140", "EAC2134250141", "EAC2134250142", "EAC2134250143", "EAC2134250144", "EAC2134250145", "EAC2134250146", "EAC2134250147", "EAC2134250148", "EAC2134250149", "EAC2134250150", "EAC2134250151", "EAC2134250152", "EAC2134250153", "EAC2134250154", "EAC2134250155", "EAC2134250156", "EAC2134250157", "EAC2134250158", "EAC2134250159", "EAC2134250160", "EAC2134250161", "EAC2134250162", "EAC2134250163", "EAC2134250164", "EAC2134250165", "EAC2134250166", "EAC2134250167", "EAC2134250168", "EAC2134250169", "EAC2134250170", "EAC2134250171", "EAC2134250172", "EAC2134250173", "EAC2134250174", "EAC2134250175", "EAC2134250176", "EAC2134250177", "EAC2134250178", "EAC2134250179", "EAC2134250180", "EAC2134250181", "EAC2134250182", "EAC2134250183", "EAC2134250184", "EAC2134250185", "EAC2134250186", "EAC2134250187", "EAC2134250188", "EAC2134250189", "EAC2134250190", "EAC2134250191", "EAC2134250192", "EAC2134250193", "EAC2134250194", "EAC2134250195", "EAC2134250196", "EAC2134250197", "EAC2134250198", "EAC2134250199", "EAC2134250200", "EAC2134250201", "EAC2134250202", "EAC2134250203", "EAC2134250204", "EAC2134250205", "EAC2134250206", "EAC2134250207", "EAC2134250208", "EAC2134250209", "EAC2134250210", "EAC2134250211", "EAC2134250212", "EAC2134250213", "EAC2134250214", "EAC2134250215", "EAC2134250216", "EAC2134250217", "EAC2134250218", "EAC2134250219", "EAC2134250220", "EAC2134250221", "EAC2134250222", "EAC2134250223", "EAC2134250224", "EAC2134250225", "EAC2134250226", "EAC2134250227", "EAC2134250228", "EAC2134250229", "EAC2134250230", "EAC2134250231", "EAC2134250232", "EAC2134250233", "EAC2134250234", "EAC2134250235", "EAC2134250236", "EAC2134250237", "EAC2134250238", "EAC2134250239", "EAC2134250240", "EAC2134250241", "EAC2134250242", "EAC2134250243", "EAC2134250244", "EAC2134250245", "EAC2134250246", "EAC2134250247", "EAC2134250248", "EAC2134250249", "EAC2134250250", "EAC2134250251", "EAC2134250252", "EAC2134250253", "EAC2134250254", "EAC2134250255", "EAC2134250256", "EAC2134250257", "EAC2134250258", "EAC2134250259", "EAC2134250260", "EAC2134250261", "EAC2134250262", "EAC2134250263", "EAC2134250264", "EAC2134250265", "EAC2134250266", "EAC2134250267", "EAC2134250268", "EAC2134250269", "EAC2134250270", "EAC2134250271", "EAC2134250272", "EAC2134250273", "EAC2134250274", "EAC2134250275", "EAC2134250276", "EAC2134250277", "EAC2134250278", "EAC2134250279", "EAC2134250280", "EAC2134250281", "EAC2134250282", "EAC2134250283", "EAC2134250284", "EAC2134250285", "EAC2134250286", "EAC2134250287", "EAC2134250288", "EAC2134250289", "EAC2134250290", "EAC2134250291", "EAC2134250292", "EAC2134250293", "EAC2134250294", "EAC2134250295", "EAC2134250296", "EAC2134250297", "EAC2134250298", "EAC2134250299", "EAC2134250300", "EAC2134250301", "EAC2134250302", "EAC2134250303", "EAC2134250304", "EAC2134250305", "EAC2134250306", "EAC2134250307", "EAC2134250308", "EAC2134250309", "EAC2134250310", "EAC2134250311", "EAC2134250312", "EAC2134250313", "EAC2134250314", "EAC2134250315", "EAC2134250316", "EAC2134250317", "EAC2134250318", "EAC2134250319", "EAC2134250320", "EAC2134250321", "EAC2134250322", "EAC2134250323", "EAC2134250324", "EAC2134250325", "EAC2134250326", "EAC2134250327", "EAC2134250328", "EAC2134250329", "EAC2134250330", "EAC2134250331", "EAC2134250332", "EAC2134250333", "EAC2134250334", "EAC2134250335", "EAC2134250336", "EAC2134250337", "EAC2134250338", "EAC2134250339", "EAC2134250340", "EAC2134250341", "EAC2134250342", "EAC2134250343", "EAC2134250344", "EAC2134250345", "EAC2134250346", "EAC2134250347", "EAC2134250348", "EAC2134250349", "EAC2134250350", "EAC2134250351", "EAC2134250352", "EAC2134250353", "EAC2134250354", "EAC2134250355", "EAC2134250356", "EAC2134250357", "EAC2134250358", "EAC2134250359", "EAC2134250360", "EAC2134250361", "EAC2134250362", "EAC2134250363", "EAC2134250364", "EAC2134250365", "EAC2134250366", "EAC2134250367", "EAC2134250368", "EAC2134250369", "EAC2134250370", "EAC2134250371", "EAC2134250372", "EAC2134250373", "EAC2134250374", "EAC2134250375", "EAC2134250376", "EAC2134250377", "EAC2134250378", "EAC2134250379", "EAC2134250380", "EAC2134250381", "EAC2134250382", "EAC2134250383", "EAC2134250384", "EAC2134250385", "EAC2134250386", "EAC2134250387", "EAC2134250388", "EAC2134250389", "EAC2134250390", "EAC2134250391", "EAC2134250392", "EAC2134250393", "EAC2134250394", "EAC2134250395", "EAC2134250396", "EAC2134250397", "EAC2134250398", "EAC2134250399", "EAC2134250400", "EAC2134250401", "EAC2134250402", "EAC2134250403", "EAC2134250404", "EAC2134250405", "EAC2134250406", "EAC2134250407", "EAC2134250408", "EAC2134250409", "EAC2134250410", "EAC2134250411", "EAC2134250412", "EAC2134250413", "EAC2134250414", "EAC2134250415", "EAC2134250416", "EAC2134250417", "EAC2134250418", "EAC2134250419"]
with concurrent.futures.ThreadPoolExecutor(max_workers=2) as executor:
results = [executor.submit(getStatus, x) for x in casenumbers]
print(results)
This is not working and i get nothing printed in the terminal. How to improve this code, Thanks.

selenium.common.exceptions.NoSuchElementException error extracting element text from #shadow-root (open) using Selenium Python

I'm am currently using selenium to take the product information from Sneider electric and this is currently the error I am receiving:
selenium.common.exceptions.NoSuchElementException: Message:
no such element: Unable to locate element:
{"method":"xpath","selector":"/html/body/div[2]/main/div[5]/ul/li/div/div/div/div/div/ul/li[1]/div/div/div[2]/div[2]/section/div/product-cards-wrapper//div/ul/li[1]/product-card/article/div/div[1]/product-card-main-info//div/pes-router-link[2]/a/h3"}
Currently, the website I am trying to pull this information from is this URL:
https://www.se.com/us/en/product-range/63426-powerlogic-accusine-pcs%2B/?N=4176697776&No=0&Nrpp=12
The Xpath file is for the description of their products which according to my inspection and findings is this:
/html/body/div[2]/main/div[5]/ul/li/div/div/div/div/div/ul/li[1]/div/div/div[2]/div[2]/section/div/product-cards-wrapper//div/ul/li[1]/product-card//article/div/div[1]/product-card-main-info//div/pes-router-link[2]/a/h3
Any ideas??
Current Code:
def page_function():
driver.get('https://www.se.com/us/en/product-range/63426-powerlogic-accusine-pcs%2B/?N=4176697776&No=12&Nrpp=12')
driver.maximize_window()
# gets the amount of items in the search bar
print("Number of products:", 69)
# for loop to read the product name and descriptions
# product = driver.find_element(By.CSS_SELECTOR, ".search-item")
# product = product.text
# print(product)
pr = "]/product-card//article/div/div[2]/div[1]/pes-product-price/p/span[1]"
nam = "]/product-card//article/div/div[1]/product-card-main-info//div/pes-router-link[1]/a"
des = "]/product-card//article/div/div[1]/product-card-main-info//div/pes-router-link[2]/a/h3"
# des_path = "#search-items > .search-item .details > a > .row.pt-5.pb-sm-5 > .multilines-3.text-truncate-multilines.xs-single-col-8.col-12 > .font-weight-bold.text-dark"
follow_loop = range(1, 70)
for x in follow_loop:
y = x
if (x > 61):
y = x - 60
elif (x > 49):
y = x - 48
elif (x > 37):
y = x - 36
elif (x > 25):
y = x - 24
elif(x > 13):
y = x - 12
else:
print("")
if ( ((x % 13) == 0) ):
driver.delete_all_cookies()
next_arrow = driver.find_element(By.CLASS_NAME, "page-links__arrow page-links__arrow--next js-page-link js-page-link-next")
driver.execute_script("arguments[0].click();", next_arrow)
xpath = "/html/body/div[2]/main/div[5]/ul/li/div/div/div/div/div/ul/li[1]/div/div/div[2]/div[2]/section/div/product-cards-wrapper//div/ul/li["
xpath += str(y)
xpath += des
driver.implicitly_wait(5)
description.append(driver.find_element(By.XPATH, xpath))
xpath2 = xpath.replace(des, '')
xpath2 += pr
unit_price.append(driver.find_element(By.XPATH, xpath2).text)
xpath3 = xpath2.replace(pr, '')
xpath3 += nam
name.append(driver.find_element(By.XPATH, xpath3).text)
The product description is within a #shadow-root (open)
Solution
Tto extract the desired text you need to use shadowRoot.querySelector() and you can use the following Locator Strategy:
driver.get("https://www.se.com/us/en/product-range/63426-powerlogic-accusine-pcs%2B/?N=4176697776&No=0&Nrpp=12")
time.sleep(5)
description = driver.execute_script('''return document.querySelector("product-cards-wrapper.hydrated").shadowRoot.querySelector("product-card.hydrated").shadowRoot.querySelector("product-card-main-info.hydrated").shadowRoot.querySelector("pes-router-link.description.hydrated a > h3")''')
print(description.text)
Console Output:
Active harmonic filter - 60 A 380..480 V AC - IP00 enclosure
References
You can find a couple of relevant detailed discussions in:
How to locate the First name field within shadow-root (open) within the website https://www.virustotal.com using Selenium and Python
How to get past a cookie agreement page using Python and Selenium?
Unable to locate the Sign In element within #shadow-root (open) using Selenium and Python

How can I make click over a SVG element?

I'm newbie with web scraping and using selenium I would like to make click over a SVG element to get access to the information showed by a modal window.
Making click over a point or cross in the basketball court of this webpage: https://www.fiba.basketball/euroleaguewomen/21-22/game/1310/MBA-Moscow-ZVVZ-USK-Praha#tab=shot_chart You will get a modal window with information like you can see in the next picture:
I have made this development which works find because find the "svg" element printing the values of his attributes "x" and "y":
b = self.driver
b.set_window_size(300, 300)
b.get("https://www.fiba.basketball/euroleaguewomen/21-22/game/1310/MBA-Moscow-ZVVZ-USK-Praha#tab=shot_chart")
periods = b.find_element_by_css_selector('g.team-A').find_element_by_css_selector('g.period-list').find_elements_by_css_selector("g.period-item")
for quarter in periods:
shots = quarter.find_element_by_css_selector("g.shot-list").find_elements_by_css_selector("g.shot-item")
for item in shots:
print(f"x: {item.find_element_by_tag_name('svg').get_attribute('x')} - y: {item.find_element_by_tag_name('svg').get_attribute('y')}")
print("\n")
You can see exit of the code here:
x: 40.5 - y: 99.5
x: 151.5 - y: 211.5
x: 34.5 - y: 125.5
x: 35.5 - y: 121.5
x: 157.5 - y: 204.5
x: 59.5 - y: 122.5
x: 32 - y: 142
x: 40 - y: 121
x: 27.5 - y: 117.5
x: 164 - y: 124
x: 80.5 - y: 7.5
x: 49.5 - y: 111.5
x: 135.5 - y: 42.5
x: 34.5 - y: 67.5
x: 27.5 - y: 117.5
x: 138 - y: 54
x: 22 - y: 140
x: 119.5 - y: 32.5
x: 135.5 - y: 42.5
x: 154.5 - y: 186.5
x: 37.5 - y: 106.5
x: 39 - y: 117
x: 31 - y: 114
x: 40.5 - y: 117.5
x: 22 - y: 5
x: 46.5 - y: 4.5
x: 20 - y: 125
x: 148.5 - y: 197.5
x: 71.5 - y: 169.5
x: 118 - y: 230
x: 30.5 - y: 263.5
x: 25 - y: 124
x: 135.5 - y: 213.5
x: 82.5 - y: 128.5
x: 40 - y: 119
x: 158.5 - y: 131.5
x: 50.5 - y: 174.5
x: 166.5 - y: 82.5
x: 26 - y: 149
x: 36 - y: 133
x: 114.5 - y: 239.5
x: 48 - y: 222
x: 127.5 - y: 226.5
x: 23 - y: 132
x: 110.5 - y: 107.5
x: 114 - y: 138
x: 15 - y: 260
x: 137.5 - y: 131.5
x: 34 - y: 118
x: 75 - y: 65
x: 54.5 - y: 167.5
x: 30.5 - y: 127.5
But, If I try to make click over "svg" component adding this code:
point = item.find_element_by_tag_name('svg')
point.click()
Finally, my code will be:
b = self.driver
b.set_window_size(300, 300)
b.get("https://www.fiba.basketball/euroleaguewomen/21-22/game/1310/MBA-Moscow-ZVVZ-USK-Praha#tab=shot_chart")
periods = b.find_element_by_css_selector('g.team-A').find_element_by_css_selector('g.period-list').find_elements_by_css_selector("g.period-item")
for quarter in periods:
shots = quarter.find_element_by_css_selector("g.shot-list").find_elements_by_css_selector("g.shot-item")
for item in shots:
print(f"x: {item.find_element_by_tag_name('svg').get_attribute('x')} - y: {item.find_element_by_tag_name('svg').get_attribute('y')}")
point = item.find_element_by_tag_name('svg')
point.click()
print("\n")
I've got this error:
selenium.common.exceptions.WebDriverException: Message: unknown error: Element <svg class="shot-miss icon icon-miss clickable" x="40.5" y="99.5" width="16" height="16" viewBox="0 0 30 30" title="">...</svg> is not clickable at point (44, 165). Other element would receive the click: <p class="cc_message">...</p>
(Session info: chrome=72.0.3626.121)
(Driver info: chromedriver=2.44.609551 (5d576e9a44fe4c5b6a07e568f1ebc753f1214634),platform=Linux 5.4.0-88-generic x86_64)
How is that possible? What am I doing wrong? How can I get the content of the modal window?
Edit I (solution provided by #Prophet):
Now, my code is:
from selenium import webdriver
from selenium.webdriver.common.desired_capabilities import DesiredCapabilities
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions as EC
b = self.driver
wait = WebDriverWait(b, 20)
b.set_window_size(1920, 1080)
b.get("https://www.fiba.basketball/euroleaguewomen/21-22/game/1310/MBA-Moscow-ZVVZ-USK-Praha#tab=shot_chart")
wait.until(EC.visibility_of_element_located((By.CSS_SELECTOR, "a.cc_btn_accept_all"))).click()
b.execute_script("window.scrollTo(0, document.body.scrollHeight);")
periods = b.find_element_by_css_selector('g.team-A').find_element_by_css_selector('g.period-list').find_elements_by_css_selector("g.period-item")
for quarter in periods:
shots = quarter.find_element_by_css_selector("g.shot-list").find_elements_by_css_selector("g.shot-item")
for item in shots:
print(f"x: {item.find_element_by_tag_name('svg').get_attribute('x')} - y: {item.find_element_by_tag_name('svg').get_attribute('y')}")
point = item.find_element_by_tag_name('svg')
point.click()
print("\n")
And I've got this error:
selenium.common.exceptions.WebDriverException: Message: unknown error: Element <svg class="shot-miss icon icon-miss clickable" x="151.5" y="211.5" width="16" height="16" viewBox="0 0 30 30" title="">...</svg> is not clickable at point (805, 312). Other element would receive the click: <th class="player"></th>
(Session info: chrome=72.0.3626.121)
(Driver info: chromedriver=2.44.609551 (5d576e9a44fe4c5b6a07e568f1ebc753f1214634),platform=Linux 5.4.0-88-generic x86_64)
Edit II (solution provided by #cruisepandey:
I have edited my code and now I've got this code:
from selenium import webdriver
from selenium.webdriver.common.desired_capabilities import DesiredCapabilities
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions as EC
b = self.driver
b.set_window_size(1920, 1080)
b.get("https://www.fiba.basketball/euroleaguewomen/21-22/game/1310/MBA-Moscow-ZVVZ-USK-Praha#tab=shot_chart")
b.execute_script("window.scrollTo(0, document.body.scrollHeight);")
periods = b.find_element_by_css_selector('g.team-A').find_element_by_css_selector('g.period-list').find_elements_by_css_selector("g.period-item")
WebDriverWait(b, 20).until(EC.element_to_be_clickable((By.XPATH, "//*[name()='svg']"))).click()
for quarter in periods:
shots = quarter.find_element_by_css_selector("g.shot-list").find_elements_by_css_selector("g.shot-item")
for item in shots:
print(f"x: {item.find_element_by_tag_name('svg').get_attribute('x')} - y: {item.find_element_by_tag_name('svg').get_attribute('y')}")
point = item.find_element_by_xpath("//*[name()='svg']")
point.click()
print("\n")
But, It doesn't work :( I've got this error:
Traceback (most recent call last):
File "/home/josecarlos/Workspace/python/basketmetrics/test/test_shot_chart_get_data.py", line 27, in test_something
WebDriverWait(b, 20).until(EC.element_to_be_clickable((By.XPATH, "//*[name()='svg']"))).click()
File "/home/josecarlos/Workspace/python/basketmetrics/venv/python/lib/python3.8/site-packages/selenium/webdriver/support/wait.py", line 80, in until
raise TimeoutException(message, screen, stacktrace)
selenium.common.exceptions.TimeoutException: Message:
Edit III:
I have tried to access to the "svg" tag through his absoloute XPath route. Using the tools for developers of Firefox or Chrome, we can get this absolute Xpath. In my case, I have got the XPath of the element in blue:
This is the route:
/html/body/div[3]/div[3]/div/section/div[2]/div/div/ul[2]/li[6]/div/div[1]/div/div[2]/div[2]/div[2]/svg/g[1]/g/g[1]/g/g[1]/svg
If I try to acces to this route in my code I've got an error
b = self.driver
wait = WebDriverWait(b, 20)
b.set_window_size(300, 300)
b.get("https://www.fiba.basketball/euroleaguewomen/21-22/game/1310/MBA-Moscow-ZVVZ-USK-Praha#tab=shot_chart")
periods = b.find_element_by_css_selector('g.team-A').find_element_by_css_selector('g.period-list').find_elements_by_css_selector("g.period-item")
#WebDriverWait(b, 20).until(EC.element_to_be_clickable((By.XPATH, "//*[name()='svg']"))).click()
for quarter in periods:
shots = quarter.find_element_by_css_selector("g.shot-list").find_elements_by_css_selector("g.shot-item")
for item in shots:
print(f"x: {item.find_element_by_tag_name('svg').get_attribute('x')} - y: {item.find_element_by_tag_name('svg').get_attribute('y')}")
point = b.find_element_by_xpath("/html/body/div[3]/div[3]/div/section/div[2]/div/div/ul[2]/li[6]/div/div[1]/div/div[2]/div[2]/div[2]/svg/g[1]/g/g[1]/g/g[1]/svg")
point.click()
print("\n")
The error that I've got is this:
selenium.common.exceptions.NoSuchElementException: Message: no such element: Unable to locate element: {"method":"xpath","selector":"/html/body/div[3]/div[3]/div/section/div[2]/div/div/ul[2]/li[6]/div/div[1]/div/div[2]/div[2]/div[2]/svg/g[1]/g/g[1]/g/g[1]/svg"}
(Session info: chrome=72.0.3626.121)
(Driver info: chromedriver=2.44.609551 (5d576e9a44fe4c5b6a07e568f1ebc753f1214634),platform=Linux 5.4.0-88-generic x86_64)
What am I doing wrong? What happend?
Edit IV:
I have modified in my code the string who gives me access to the svg but it doesn't work.
b = self.driver
wait = WebDriverWait(b, 20)
b.set_window_size(1920, 1080)
b.get("https://www.fiba.basketball/euroleaguewomen/21-22/game/1310/MBA-Moscow-ZVVZ-USK-Praha#tab=shot_chart")
periods = b.find_element_by_css_selector('g.team-A').find_element_by_css_selector('g.period-list').find_elements_by_css_selector("g.period-item")
#WebDriverWait(b, 20).until(EC.element_to_be_clickable((By.XPATH, "//*[name()='svg']"))).click()
for quarter in periods:
shots = quarter.find_element_by_css_selector("g.shot-list").find_elements_by_css_selector("g.shot-item")
for item in shots:
print(f"x: {item.find_element_by_tag_name('svg').get_attribute('x')} - y: {item.find_element_by_tag_name('svg').get_attribute('y')}")
point = b.find_element_by_xpath("//div[#class='shot-chart_canvas']/*[name()='svg']/*[name()='g'][1]/*[name()='g']/*[name()='g'][1]/*[name()='g']/*[name()='g'][1]/*[name()='svg']")
point.click()
print("\n")
self.assertEqual(True, True, "Error!!!")
And the error that I get is:
selenium.common.exceptions.WebDriverException: Message: unknown error: Element <svg class="shot-miss icon icon-miss clickable" x="40.5" y="99.5" width="16" height="16" viewBox="0 0 30 30" title="">...</svg> is not clickable at point (687, 917). Other element would receive the click: <div class="cc_banner cc_container cc_container--open">...</div>
(Session info: chrome=72.0.3626.121)
(Driver info: chromedriver=2.44.609551 (5d576e9a44fe4c5b6a07e568f1ebc753f1214634),platform=Linux 5.4.0-88-generic x86_64)
Sorry, but I don't understand what happend :(((
Edit V:
I have to retrive the information of all the crosses and points. I've got two teams and inside this tag we've got an array of periods and inside of each period we've got all the shoots and of each shot-item I need to retrieve the information. So, how can I make an Xpath loop to retrieve the information of each shoot?
g tag is under svg tag. so for locating g.team-A
Please use the below xpath :
//*[name()='g' and #class='team-A']
this is an xpath expression.
so the possible fix for this :
point = item.find_element_by_tag_name('svg')
point.click()
to use this :
point = item.find_element_by_xpath("//*[name()='svg']")
point.click()
What I would suggest here is to have a explicit wait defined and then you can try to click on it.
Code-trial :
WebDriverWait(driver, 20).until(EC.element_to_be_clickable((By.XPATH, "//*[name()='svg']"))).click()
for this explicit wait, you'd have to import these as well :
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions as EC
Update :
driver = webdriver.Chrome(driver_path)
driver.maximize_window()
wait = WebDriverWait(driver, 30)
driver.get("https://www.fiba.basketball/euroleaguewomen/21-22/game/1310/MBA-Moscow-ZVVZ-USK-Praha#tab=shot_chart")
ele = wait.until(EC.presence_of_element_located((By.CSS_SELECTOR, "div.shot-chart_canvas")))
driver.execute_script("arguments[0].scrollIntoView(true);", ele)
wait.until(EC.element_to_be_clickable((By.XPATH, "//*[name()='svg' and #class='chart']//*[name()='g']//descendant::*[name()='g' and #class='shot-item']"))).click()
a = wait.until(EC.visibility_of_element_located((By.CSS_SELECTOR, "div.player-profile"))).get_attribute('innerText')
print(a)
Imports :
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions as EC
output :
Q1 09:520 0
Alexandra Stolyar
2pt jump shot missed
View Player Profile
FG 2Pts 3Pts FT Pts
In this game
2/6
33.33%
1/4
25%
1/2
50%
3/3
100%
8
Update 2 :
driver.get("https://www.fiba.basketball/euroleaguewomen/21-22/game/1310/MBA-Moscow-ZVVZ-USK-Praha#tab=shot_chart")
ele = wait.until(EC.presence_of_element_located((By.CSS_SELECTOR, "div.shot-chart_canvas")))
driver.execute_script("arguments[0].scrollIntoView(true);", ele)
all_points = wait.until(EC.presence_of_all_elements_located((By.XPATH, "//*[name()='svg' and #class='chart']//*[name()='g']//descendant::*[name()='g' and #class='shot-item']")))
print(len(all_points))
for point in all_points:
point.click()
a = wait.until(EC.visibility_of_element_located((By.CSS_SELECTOR, "div.player-profile"))).get_attribute('innerText')
print(a)
wait.until(EC.element_to_be_clickable((By.CSS_SELECTOR, "button.icon--close"))).click()
time.sleep(1)
Imports :
119
Q1 09:520 0
Alexandra Stolyar
2pt jump shot missed
View Player Profile
FG 2Pts 3Pts FT Pts
In this game
2/6
33.33%
1/4
25%
1/2
50%
3/3
100%
8
Q1 09:350 0
Karina Nizamova
3pt jump shot missed
View Player Profile
FG 2Pts 3Pts FT Pts
In this game
1/7
14.29%
1/3
33.33%
0/4
0%
2/3
66.67%
4
The elements you are trying to click on are initially out of the visible screen. Also there is a "accept cookies" banner on the bottom.
You need to close the cookies banner and scroll the page up in order to make your code working.
Please try this:
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
b = self.driver
wait = WebDriverWait(b, 20)
b.set_window_size(300, 300)
b.get("https://www.fiba.basketball/euroleaguewomen/21-22/game/1310/MBA-Moscow-ZVVZ-USK-Praha#tab=shot_chart")
wait.until(EC.visibility_of_element_located((By.CSS_SELECTOR, "a.cc_btn_accept_all"))).click()
b.execute_script("window.scrollTo(0, document.body.scrollHeight);")
periods = b.find_element_by_css_selector('g.team-A').find_element_by_css_selector('g.period-list').find_elements_by_css_selector("g.period-item")
for quarter in periods:
shots = quarter.find_element_by_css_selector("g.shot-list").find_elements_by_css_selector("g.shot-item")
for item in shots:
print(f"x: {item.find_element_by_tag_name('svg').get_attribute('x')} - y: {item.find_element_by_tag_name('svg').get_attribute('y')}")
point = item.find_element_by_tag_name('svg')
point.click()
print("\n")
I would also recommend to use a normal window size, as following:
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
b = self.driver
wait = WebDriverWait(b, 20)
b.set_window_size(1920, 1080)
b.get("https://www.fiba.basketball/euroleaguewomen/21-22/game/1310/MBA-Moscow-ZVVZ-USK-Praha#tab=shot_chart")
wait.until(EC.visibility_of_element_located((By.CSS_SELECTOR, "a.cc_btn_accept_all"))).click()
b.execute_script("window.scrollTo(0, document.body.scrollHeight);")
periods = b.find_element_by_css_selector('g.team-A').find_element_by_css_selector('g.period-list').find_elements_by_css_selector("g.period-item")
for quarter in periods:
shots = quarter.find_element_by_css_selector("g.shot-list").find_elements_by_css_selector("g.shot-item")
for item in shots:
print(f"x: {item.find_element_by_tag_name('svg').get_attribute('x')} - y: {item.find_element_by_tag_name('svg').get_attribute('y')}")
point = item.find_element_by_tag_name('svg')
point.click()
print("\n")
UPD
Svg, rect,g etc are special tag names.
/svg or /g Xpath will not work. You will need to use /*[name()='svg'] or /*[name()='g'] respectively.
Also, the absolute paths are strongly NOT recommended. You need to use better locators.
For example instead of
/html/body/div[3]/div[3]/div/section/div[2]/div/div/ul[2]/li[6]/div/div[1]/div/div[2]/div[2]/div[2]/svg/g[1]/g/g[1]/g/g[1]/svg
This will work better:
//div[#class='shot-chart_canvas']/*[name()='svg']/*[name()='g'][1]/*[name()='g']/*[name()='g'][1]/*[name()='g']/*[name()='g'][1]/*[name()='svg']
So instead of
point = b.find_element_by_xpath("/html/body/div[3]/div[3]/div/section/div[2]/div/div/ul[2]/li[6]/div/div[1]/div/div[2]/div[2]/div[2]/svg/g[1]/g/g[1]/g/g[1]/svg")
point.click()
Try this:
point = b.find_element_by_xpath("//div[#class='shot-chart_canvas']/*[name()='svg']/*[name()='g'][1]/*[name()='g']/*[name()='g'][1]/*[name()='g']/*[name()='g'][1]/*[name()='svg']")
point.click()

Web Scraping ESPN Data With Selenium

I'm trying to scrape some data off ESPN and run some calculations off the scraped data. Ideally, I will like to iterate through a dataframe, grab the players name with Selenium, send the player's name into the search box and tell Selenium to click the player's name. I was able to do this successfully with one player. I'm not quite sure how to iterate through all the players in my data frame.
The second part of the code is where I'm struggling. For some reason I am not able to get the data. Selenium isn't able to find any of the elements. I don't think I'm doing it properly. If I am able to scrape the required data, I will like to plug them into a calculation and append the calculated projected points into my dataframe, dfNBA.
Can someone please help me with my code? and point me in the right direction. I'm trying to be more efficient writing python codes but right now I'm stuck
Thanks
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
import time
import pandas as pd
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
#sample data
pp = {'Player Name':['Donovan Mitchell', 'Kawhi Leonard', 'Rudy Gobert', 'Paul George','Reggie Jackson', 'Jordan Clarkson'],
'Fantasy Score': [46.0, 50.0, 40.0, 44.0, 25.0, 26.5]}
#Creating a dataframe from dictionary
dfNBA = pd.DataFrame(pp)
#Scraping ESPN
PATH = "C:\Program Files (x86)\chromedriver.exe"
driver = webdriver.Chrome(PATH)
driver.get("https://www.espn.com/")
#Clicking the search button
driver.find_element_by_xpath("//a[#id='global-search-trigger']").click()
#sending data to the search button
driver.find_element_by_xpath("//input[#placeholder='Search Sports, Teams or Players...']").send_keys(dfNBA.iloc[0,:].values[0])
WebDriverWait(driver, 20).until(EC.presence_of_all_elements_located((By.CSS_SELECTOR, ".search_results__details")))
playerPage = driver.find_element_by_css_selector(".search_results__details").click()
#Scraping data from last 10 games
points = driver.find_element_by_xpath(".//div[#class='Table__TD']")[13]
#rebs = driver.find_element_by_xpath("//*[#id='fittPageContainer'']/div[2]/div[5]/div/div[1]/div[1]/section/div/div[3]/div/div/div[2]/table/tbody/tr[1]/td[7]")
#asts = driver.find_element_by_xpath("//*[#id='fittPageContainer']/div[2]/div[5]/div/div[1]/div[1]/section/div/div[3]/div/div/div[2]/table/tbody/tr[1]/td[8]")
#blks = driver.find_element_by_xpath("//*[#id='fittPageContainer']/div[2]/div[5]/div/div[1]/div[1]/section/div/div[3]/div/div/div[2]/table/tbody/tr[1]/td[9]")
#stls = driver.find_element_by_xpath("//*[#id='fittPageContainer']/div[2]/div[5]/div/div[1]/div[1]/section/div/div[3]/div/div/div[2]/table/tbody/tr[1]/td[10]")
#tnvrs = driver.find_element_by_xpath("//*[#id='fittPageContainer']/div[2]/div[5]/div/div[1]/div[1]/section/div/div[3]/div/div/div[2]/table/tbody/tr[1]/td[12]")
#projectedPoints = points+(rebs*1.2)+(asts*1.5)+(blks*3)+(stls*3)-(tnvrs*1)
print(points)
I think Selenium is a bit overkill when there's a viable api option.
Give this a try. Note, that in the overview, the L10 games refers to last 10 regular season games. My code here does the last 10 games which include playoffs. If you only want regular season, let me know, and I can adjust it. I also added a variable here so if you wanted for example, just last 5 games, or last 15 games, etc. you could do that too.
import requests
import pandas as pd
previous_games = 10
pp = {'Player Name':['Donovan Mitchell', 'Kawhi Leonard', 'Rudy Gobert', 'Paul George','Reggie Jackson', 'Jordan Clarkson'],
'Fantasy Score': [46.0, 50.0, 40.0, 44.0, 25.0, 26.5]}
#Creating a dataframe from dictionary
dfNBA = pd.DataFrame(pp)
search_api = 'https://site.api.espn.com/apis/search/v2'
for idx, row in dfNBA.iterrows():
playerName = row['Player Name']
payload = {'query': '%s' %playerName}
results = requests.get(search_api, params=payload).json()['results']
for each in results:
if each['type'] == 'player':
playerID = each['contents'][0]['uid'].split('a:')[-1]
break
player_api = 'https://site.web.api.espn.com/apis/common/v3/sports/basketball/nba/athletes/%s/gamelog' %playerID
playload = {'season':'2021' }
jsonData_player = requests.get(player_api, params=payload).json()
#Scraping data from last x games
last_x_gameIDs = list(jsonData_player['events'].keys())
last_x_gameIDs.sort()
last_x_gameIDs = last_x_gameIDs[-1*previous_games:]
gamelog_dict = {}
seasonTypes = jsonData_player['seasonTypes']
for gameID in last_x_gameIDs:
for each in seasonTypes:
categories = each['categories']
for category in categories:
if category['type'] == 'total':
continue
events = category['events']
for event in events:
if gameID == event['eventId']:
gamelog_dict[gameID] = event['stats']
labels = jsonData_player['labels']
# Aggrigate totals
for k, v in gamelog_dict.items():
v = dict(zip(labels, v))
gamelog_dict[k] = v
stats = pd.DataFrame(gamelog_dict.values())
points = stats['PTS'].astype(float).sum() / previous_games
rebs = stats['REB'].astype(float).sum() / previous_games
asts = stats['AST'].astype(float).sum() / previous_games
blks = stats['BLK'].astype(float).sum() / previous_games
stls = stats['STL'].astype(float).sum() / previous_games
tnvrs = stats['TO'].astype(float).sum() /previous_games
projectedPoints = float(points)+(float(rebs)*1.2)+(float(asts)*1.5)+(float(blks)*3)+(float(stls)*3)-(float(tnvrs)*1)
print('%s: %.02f' %(playerName,projectedPoints))
Output:
Donovan Mitchell: 42.72
Kawhi Leonard: 52.25
Rudy Gobert: 38.47
Paul George: 44.18
Reggie Jackson: 24.21
Jordan Clarkson: 25.88
Here's some code to accomplish (I think) what you want. You need to wait for the table elements to appear, fix your xpath, and choose the right elements from the table array.
pp = {'Player Name':['Donovan Mitchell', 'Kawhi Leonard', 'Rudy Gobert', 'Paul George','Reggie Jackson', 'Jordan Clarkson'],
'Fantasy Score': [46.0, 50.0, 40.0, 44.0, 25.0, 26.5]}
#Creating a dataframe from dictionary
dfNBA = pd.DataFrame(pp)
#Scraping ESPN
PATH = "C:\Program Files (x86)\chromedriver.exe"
driver = webdriver.Chrome(PATH)
driver.get("https://www.espn.com/")
#Clicking the search button
driver.find_element_by_xpath("//a[#id='global-search-trigger']").click()
#sending data to the search button
driver.find_element_by_xpath("//input[#placeholder='Search Sports, Teams or Players...']").send_keys(dfNBA.iloc[0,:].values[0])
WebDriverWait(driver, 20).until(EC.presence_of_all_elements_located((By.CSS_SELECTOR, ".search_results__details")))
playerPage = driver.find_element_by_css_selector(".search_results__details").click()
#Scraping data from last 10 games
WebDriverWait(driver, 20).until(EC.visibility_of_all_elements_located((By.XPATH, "//td[#class='Table__TD']")))
points = driver.find_elements_by_xpath("//td[#class='Table__TD']")[12].text
rebs = driver.find_elements_by_xpath("//td[#class='Table__TD']")[6].text
asts = driver.find_elements_by_xpath("//td[#class='Table__TD']")[7].text
blks = driver.find_elements_by_xpath("//td[#class='Table__TD']")[8].text
stls = driver.find_elements_by_xpath("//td[#class='Table__TD']")[9].text
tnvrs = driver.find_elements_by_xpath("//td[#class='Table__TD']")[11].text
projectedPoints = float(points)+(float(rebs)*1.2)+(float(asts)*1.5)+(float(blks)*3)+(float(stls)*3)-(float(tnvrs)*1)
print(projectedPoints)

Categories

Resources