Stale Element - Selenium - Python - python

So I'll start by saying that this has became such a mess with me trying to solve this issue, other times I have been able to resolve the stale element issue.
Problem all starts after the first players stats are stored ( Everything it should be doing up this point ), and then once it goes back to loop and find the next player we have the issue.
I'm not sure if its caused by the nested loops or what.
I try reinstating the variable that is giving me the issues I assume all throughout the code.
player_stats
The thing is I did have it previously going through 5 players, and I am not sure what happened, or when the bug first established itself lol, as I was working on getting the rounds won, and played situated.
(We aren't even able to print("Found playerCol element") on the second go around)
All print statements works till it hangs in the while loop after the first iteration.
Here is the full code (with comments):
from selenium import webdriver
from selenium.webdriver.support.ui import WebDriverWait
from selenium.common.exceptions import NoSuchElementException
from selenium.common.exceptions importStaleElementReferenceException
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.common.by import By
import pandas as pd
import re
import time
# Initialize the webdriver
driver = webdriver.Firefox()
# Navigate to the website
url = "https://www.hltv.org/stats/players"
driver.get(url)
WebDriverWait(driver, 15).until(EC.element_to_be_clickable((By.ID, "CybotCookiebotDialogBodyLevelButtonLevelOptinAllowAll"))).click()
# Find the elements containing the player statistics
player_stats = WebDriverWait(driver, 10).until(
EC.presence_of_all_elements_located((By.CSS_SELECTOR, ".playerCol, .statsDetail"))
)
# Extract the relevant data from the elements
players = []
for i, player_stat in enumerate(player_stats):
try:
WebDriverWait(driver, 10).until(EC.presence_of_element_located((By.CSS_SELECTOR, ".playerCol, .statsDetail")))
while True:
player_stats = WebDriverWait(driver, 10).until(EC.presence_of_all_elements_located((By.CSS_SELECTOR, ".playerCol, .statsDetail")))
try:
if "playerCol" in player_stat.get_attribute("class"):
print("Found playerCol element")
name = player_stat.find_element(By.CSS_SELECTOR, "a").text if player_stat.find_elements(By.CSS_SELECTOR, "a") else player_stat.text
print(f"Name: {name}")
elif "statsDetail" in player_stat.get_attribute("class"):
stats = player_stat.text.split()
if len(stats) >= 1 and re.search(r"\d+\.\d+", stats[0]):
kd_ratio = stats[0]
break
except StaleElementReferenceException as e:
player_stats = WebDriverWait(driver, 10).until(EC.presence_of_all_elements_located((By.CSS_SELECTOR, ".playerCol, .statsDetail")))
player_stats = driver.find_elements(By.CSS_SELECTOR, ".playerCol, .statsDetail")
print(f"An error occurred while processing match stats: {e}")
break
# Extract the player stats
if "statsDetail" in player_stat.get_attribute("class"):
stats = player_stat.text.split()
if len(stats) >= 1 and re.search(r"\d+\.\d+", stats[0]):
kd_ratio = stats[0]
# Process match stats for the player
try:
time.sleep(1)
WebDriverWait(driver, 15).until(EC.presence_of_element_located((By.CSS_SELECTOR, ".playerCol, .statsDetail")))
player_link = driver.find_element(By.XPATH, f"//a[contains(text(), '{name}')]")
print(player_link.get_attribute('outerHTML'))
driver.execute_script("arguments[0].click();", player_link)
time.sleep(1)
player_stats = driver.find_elements(By.CSS_SELECTOR, ".playerCol, .statsDetail")
player = [name, kd_ratio]
# Extract additional player stats
headshot_percentage = WebDriverWait(driver, 5).until(EC.presence_of_element_located((By.XPATH, "//span[contains(text(), 'Headshot %')]/following-sibling::span"))).text
player.append(headshot_percentage)
kpr = WebDriverWait(driver, 5).until(EC.presence_of_element_located((By.XPATH, "//span[contains(text(), 'Kills / round')]/following-sibling::span"))).text
player.append(kpr)
dpr = WebDriverWait(driver, 5).until(EC.presence_of_element_located((By.XPATH, "//span[contains(text(), 'Deaths / round')]/following-sibling::span"))).text
player.append(dpr)
# Extract match stats for the player
matches_link = WebDriverWait(driver, 5).until(EC.presence_of_element_located((By.CSS_SELECTOR, "a[href*='/stats/players/matches/'][data-link-tracking-destination='Click on Matches -> Individual -> Overview [subnavigation]']")))
driver.execute_script("arguments[0].click();", matches_link)
match_stats = WebDriverWait(driver, 5).until(EC.presence_of_all_elements_located((By.CSS_SELECTOR, "tr.group-2, tr.group-1")))
match_scores = []
num_of_matches = 0
rounds_won = 0
rounds_played = 0
# Process match stats for the player
for i, match_stat in enumerate(match_stats):
player_name = player[0]
player_team = driver.find_element(By.CSS_SELECTOR, ".gtSmartphone-only span:last-of-type").text
try:
team_name = ""
score = ""
while team_name == "" or score == "":
try:
team = match_stat.find_element(By.CSS_SELECTOR, ".gtSmartphone-only span:last-of-type").text
team_name = team.strip()
score_span = match_stat.find_element(By.XPATH, ".//div[contains(#class, 'gtSmartphone-only')]//*[contains(text(), '(')]")
score_text = score_span.text.strip()
score = re.search(r'\((\d+)\)', score_text).group(1)
except:
time.sleep(1)
match_stats = WebDriverWait(driver, 5).until(EC.presence_of_all_elements_located((By.CSS_SELECTOR, "tr.group-2, tr.group-1")))
match_stat = match_stats[i]
team_data = match_stat.find_elements(By.CSS_SELECTOR, ".gtSmartphone-only span")
print("Team data:", team_data[3].text)
if team_name.lower() == player_team.lower():
player_score = score
opposing_team_name = team_data[2].text.strip()
print(opposing_team_name)
opposing_team_score = team_data[3].text.strip('()')
print("Score strip: ", opposing_team_score)
rounds_won += int(player_score)
rounds_played += int(player_score) + int(opposing_team_score)
else:
player_score = team_data[1].text.strip('()')
print(player_score)
opposing_team_score = score
print(opposing_team_score)
opposing_team_name = team_data[0].text.strip()
print(opposing_team_name)
rounds_won += int(opposing_team_score)
rounds_played += int(player_score) + int(opposing_team_score)
match_scores.append((team_name, opposing_team_name, player_score, opposing_team_score))
num_of_matches += 1
if num_of_matches == 5: # exit loop after 5 iterations
break
except:
# Refresh the page if the element can't be found
driver.back()
player_stats = driver.find_elements(By.CSS_SELECTOR, ".playerCol, .statsDetail")
time.sleep(1)
match_stats = WebDriverWait(driver, 5).until(EC.presence_of_all_elements_located((By.CSS_SELECTOR, "tr.group-2, tr.group-1")))
except Exception as e:
print(f"An error occurred while processing data for player {name}: {e}")
continue
players.append([name, kd_ratio, headshot_percentage, kpr, dpr, rounds_won, rounds_played])
print(players)
print(f"{player_name}: {rounds_won} rounds won out of {rounds_played} rounds played in {num_of_matches} matches")
driver.get(url)
time.sleep(1)
except StaleElementReferenceException as e:
# handle the exception here
print(f"An error occurred while processing match stats: {e}")
break
# Close the webdriver
driver.quit()
# Store the data in a Pandas dataframe
df = pd.DataFrame(players, columns=["Name", "K/D", "HS %", "KPR", "DPR", "RW", "RP"])
# Clean the data
df["K/D"] = df["K/D"].str.extract(r"(\d+\.\d+)").astype(float)
df["HS %"] = df["HS %"].str.extract(r"(\d+\.\d+)").astype(float)
df["KPR"] = df["KPR"].str.extract(r"(\d+\.\d+)").astype(float)
df["DPR"] = df["DPR"].str.extract(r"(\d+\.\d+)").astype(float)
# Drop any rows that have missing or invalid data
df.dropna(subset=["Name", "K/D", "HS %", "KPR", "DPR"], inplace=True)
# Save the data to a CSV file
df.to_csv("player_stats.csv", index=False, sep='\t')
# Close the webdriver
driver.quit()

Related

How to create a for-loop in relation to value obtained via Selenium

Variables
chrome_path = 'chromedriver'
driver = webdriver.Chrome(chrome_path)
chrome_options = webdriver.ChromeOptions()
chrome_options.add_argument("--disable-popup-blocking")
driver.get("https://gibiru.com/")
driver.find_element(By.CSS_SELECTOR, '.form-control.has-feedback.has-clear').click()
driver.find_element(By.CSS_SELECTOR, '.form-control.has-feedback.has-clear').send_keys("lfc")
driver.find_element(By.CSS_SELECTOR, '.form-control.has-feedback.has-clear').send_keys(Keys.RETURN)
driver.find_element(By.XPATH, "/html/body/div[1]/main/div[1]/div/div/div/div[2]").click()
time.sleep(2)
I have this try-stratement, which works perfect, but needs to be looped arcordding to the value of page_length, which is equal to 10 in this situation.
try:
#1st page
page_length = len(driver.find_elements(By.CSS_SELECTOR, "div.gsc-resultsRoot.gsc-tabData.gsc-tabdActive div.gsc-cursor-box.gs-bidi-start-align div.gsc-cursor div.gsc-cursor-page"))
index_count = 0
current_page = int(page_length) - int(index_count)
print("Number of availables pages : "+str(current_page)) #Print = 10
find_href = driver.find_elements(By.CSS_SELECTOR, 'img.gs-image.gs-image-scalable')
with open("txt.txt", "a") as textFile:
for my_href in find_href:
textFile.write(str(my_href.get_attribute("src")) + "\n")
print(my_href.get_attribute("src"))
index_count += 1
driver.execute_script("window.scrollTo(100,document.body.scrollHeight);")
driver.find_element(By.XPATH, '/html/body/div[1]/main/div[2]/div[2]/div/div[1]/div/div/div/div/div[5]/div[2]/div[2]/div/div[2]/div/div['+str(index_count)+']').click()
time.sleep(2)
#2nd page
current_page = int(page_length) - int(index_count)
print("Number of availables pages : "+str(current_page)) #Print = 10
find_href = driver.find_elements(By.CSS_SELECTOR, 'img.gs-image.gs-image-scalable')
with open("txt.txt", "a") as textFile:
for my_href in find_href:
textFile.write(str(my_href.get_attribute("src")) + "\n")
print(my_href.get_attribute("src"))
index_count += 1
driver.execute_script("window.scrollTo(100,document.body.scrollHeight);")
driver.find_element(By.XPATH, '/html/body/div[1]/main/div[2]/div[2]/div/div[1]/div/div/div/div/div[5]/div[2]/div[2]/div/div[2]/div/div['+str(index_count)+']').click()
time.sleep(2)
except Exception as e:
print(e)
driver.quit()
But I seek help in regards to creating a for-loop. That can do what the try-statement can, but in fewer lines of code. This is what I'm thinking of :
for x in page_array_number:
index_count = 0
current_page = int(page_length) - int(index_count)
print("Number of availables pages : "+str(current_page))
find_href = driver.find_elements(By.CSS_SELECTOR, 'img.gs-image.gs-image-scalable')
with open("txt.txt", "a") as textFile:
for my_href in find_href:
textFile.write(str(my_href.get_attribute("src")) + "\n")
print(my_href.get_attribute("src"))
print("Counter is before : "+str(index_count))
index_count += 1
print("Counter is after : "+str(index_count))
driver.execute_script("window.scrollTo(100,document.body.scrollHeight);")
time.sleep(2)
driver.find_element(By.XPATH, '/html/body/div[1]/main/div[2]/div[2]/div/div[1]/div/div/div/div/div[5]/div[2]/div[2]/div/div[2]/div/div['+str(index_count)+']').click()
time.sleep(2)
if index_count == page_length:
print("Done scraping urls from "+str(page_length)+" pages")
break
The output I be getting is as such: It's seems like it is the counter that is the problem, it doesnt add 1 on for every loop.
len() returns an integer, which is not an iterable object. I would use the enumerate() method, which returns the index and value of the next item in the iterable. enumerate() is also faster in many cases.
pages = driver.find_elements()
page_length = len(pages)
for index, value in enumerate(pages):
current_page = page_length - index
...
Also, the last two lines of code is redundant. If index_count == page_length, then that is the last iteration of the loop and will exit anyway.
Some other notes: if you are looping and don't need the loop variable, replace it with an underscore. In the above code, since we don't need the variable value:
for index, _ in enumerate(pages):
# This is clear that we don't use the values contained in pages
current_page = page_length - index
...
Lastly, you can often get errors like NoSuchAttributeException and ElementNotInteractableException due to variations in page load and JS execution times. I would suggest encapsulating selenium code that interacts with the web page in try except statements.
I got it working with this for loop :
for index, item in enumerate(page_array_number):
print(index)
current_page = int(page_length) - int(index)
print("Number of availables pages : "+str(current_page))
index = index+1
print("Counter is after : "+str(index))
find_href = driver.find_elements(By.CSS_SELECTOR, 'img.gs-image.gs-image-scalable')
with open("txt.txt", "a") as textFile:
for my_href in find_href:
textFile.write(str(my_href.get_attribute("src")) + "\n")
print(my_href.get_attribute("src"))
driver.execute_script("window.scrollTo(100,document.body.scrollHeight);")
time.sleep(2)
if index == 1:
driver.find_element(By.XPATH, '/html/body/div[1]/main/div[2]/div[2]/div/div[1]/div/div/div/div/div[5]/div[2]/div[2]/div/div[2]/div/div['+str(index+1)+']').click()
time.sleep(2)
elif index > 1:
driver.find_element(By.XPATH, '/html/body/div[1]/main/div[2]/div[2]/div/div[1]/div/div/div/div/div[5]/div[2]/div[2]/div/div[2]/div/div['+str(index)+']').click()
time.sleep(2)
elif index == page_length:
print("Done scraping urls from "+str(page_length)+" pages")
break

I am very new to scraping please bear with me and this is my 1st project. I am trying to scrape a site using selenium

"problem lines"
for_tariff_loop = driver.find_elements_by_xpath("//span[#class='phx-radio__element']")
radio_label_list = for_tariff_loop[i].find_element_by_css_selector('span[class="phx-radio__label"]')
print(radio_label_list)
time.sleep(1)
website I'm scraping https://www.telekom.de/unterwegs/apple/apple-iphone-13-pro/graphit-512gb
label image
I was not able to print the radio buttons label according to checked button. I don't know what is the mistake and where I did it. could anyone help on this. It will be helpful for me to learn. Change tariff links given below links,
import xlwt
from selenium import webdriver
import re
import time
from datetime import date
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.common.by import By
from selenium.webdriver.common.action_chains import ActionChains
from selenium.webdriver.common.keys import Keys
class telekommobiles:
def __init__(self):
self.url="https://www.telekom.de/mobilfunk/geraete/smartphone?page=1&pageFilter=promotion"
self.country='DE'
self.currency='GBP'
self.VAT='Included'
self.shipping = 'free shipping within 3-4 weeks'
self.Pre_PromotionPrice ='N/A'
self.color ='N/A'
def telekom(self):
#try:
driver=webdriver.Chrome()
driver.maximize_window()
driver.get(self.url)
today = date.today()
time.sleep(5)
cookies = driver.find_element_by_css_selector('button.cl-btn.cl-btn--accept-all').click()
print("cookies accepted")
links_prod_check = []
prod_models = []
prod_manufacturer =[]
prod_memorys = []
product_colors =[]
product_price_monthly_payments = []
product_price_one_time_payments =[]
product_links = []
containers = driver.find_elements_by_css_selector('div[class="styles_item__12Aw4"]')
i = 1
for container in containers:
p_links =container.find_element_by_tag_name('a').get_attribute('href')
i = i + 1
product_links.append(p_links)
#print(p_links)
for links in product_links:
driver.get(links)
#time.sleep(5)
#print(driver.current_url)
#links_prod_check.append(driver.current_url)
coloroptions = WebDriverWait(driver, 30).until(EC.presence_of_all_elements_located((By.XPATH,"//li[#data-qa='list_ColorVariant']")))
#print(coloroptions)
for i in range(len(coloroptions)):
coloroption = driver.find_elements_by_xpath("//li[#data-qa='list_ColorVariant']")
coloroption[i].click()
#print(coloroption[i])
time.sleep(3)
memoryoptions = WebDriverWait(driver, 30).until(EC.presence_of_all_elements_located((By.XPATH,"//span[#class='phx-radio__element']")))
for i in range(len(memoryoptions)):
memoryoption = driver.find_elements_by_xpath("//span[#class='phx-radio__element']")
try:
memoryoption[i].click()
except:
pass
time.sleep(5)
change_traiff = driver.find_element_by_css_selector('button[class="phx-link phx-list-of-links__link js-mod tracking-added"]').click()
time.sleep(3)
#looping for each section
section_loops = driver.find_elements_by_css_selector('section[class="tariff-catalog--layer"]')
#print(len(section_loops))
for section_loop in section_loops:
#print(section_loop)
time.sleep(5)
#Headings
heading_1 = section_loop.find_element_by_css_selector('h2[class="page-title page-title--lowercase"]').text
print(heading_1)
# looping for each separate boxes
each_box_subcontainers = section_loop.find_elements_by_css_selector('.phx-tariff-box__section')
#print(len(each_box_subcontainers))
for subcontainer in each_box_subcontainers:
#print(subcontainer)
looping_for_tariff = WebDriverWait(driver, 10).until(EC.presence_of_all_elements_located((By.XPATH,"//span[#class='phx-radio__element']")))
#print(looping_for_tariff)
for i in range(len(looping_for_tariff)):
#print(i)
try:
for_tariff_loop = driver.find_elements_by_xpath("//span[#class='phx-radio__element']")
for_tariff_loop[i].click()
time.sleep(3)
except:
pass
for_tariff_loop = driver.find_elements_by_xpath("//span[#class='phx-radio__element']")
radio_label_list = for_tariff_loop[i].find_element_by_css_selector('span[class="phx-radio__label"]')
print(radio_label_list)
time.sleep(1)
change_traiff_close_button = driver.find_element_by_css_selector('span[class="icon-after-yellow-close right close popup-close-tr js-popup-close"]').click()
telekom_de=telekommobiles()
telekom_de.telekom()
You are trying to find element within an element. Finding radio_label_list using for_tariff_loop[i], xpath for radio_label_list will become like below:
//span[#class='phx-radio__element']//span[#class="phx-radio__label"]
Which does not exist in the DOM.
I tried the last part of the code. And was able to print the Memory size like below. Do try and confirm:
Replaced css-selector for radio_label_list with this xpath ./following-sibling::span
looping_for_tariff = WebDriverWait(driver, 10).until(EC.presence_of_all_elements_located((By.XPATH, "//span[#class='phx-radio__element']")))
# print(looping_for_tariff)
for i in range(len(looping_for_tariff)):
# print(i)
try:
for_tariff_loop = driver.find_elements_by_xpath("//span[#class='phx-radio__element']")
for_tariff_loop[i].click()
time.sleep(3)
except:
pass
for_tariff_loop = driver.find_elements_by_xpath("//span[#class='phx-radio__element']")
radio_label_list = for_tariff_loop[i].find_element_by_xpath("./following-sibling::span").text
print(radio_label_list)
time.sleep(1)
As per the comments, check this code:
driver.get("https://www.telekom.de/unterwegs/apple/apple-iphone-13-pro/graphit-512gb")
wait = WebDriverWait(driver,30)
wait.until(EC.element_to_be_clickable((By.XPATH,"//button[text()='Accept All']"))).click()
wait.until(EC.element_to_be_clickable((By.XPATH,"//ul[contains(#class,'phx-tariff-notification-box-new__element--desktop-tablet')]/li[2]/button"))).click()
length = len(driver.find_elements_by_class_name("phx-tariff-box__section"))
for i in range(length):
print("----------------------------------------------------------------------------------------------------------")
options = driver.find_elements_by_class_name("phx-tariff-box__section")
datas = options[i].find_element_by_xpath(".//div[contains(#class,'phx-tariff-box__volume')]").get_attribute("innerText")
print("data: {}".format(datas))
len_types = len(options[i].find_elements_by_xpath(".//div[#class='phx-tariff-box__radios-inner']//label"))
types = options[i].find_elements_by_xpath(".//div[#class='phx-tariff-box__radios-inner']//label")
if len(types) == 0:
price = options[i].find_element_by_xpath(".//p[#data-qa='block_TariffPrice']").get_attribute("innerText")
print(price)
else:
for j in range(len_types):
types[j].click()
time.sleep(2)
options = driver.find_elements_by_class_name("phx-tariff-box__section")
types = options[i].find_elements_by_xpath(".//div[#class='phx-tariff-box__radios-inner']//label")
try:
types[j].find_element_by_xpath("./input[#checked]")
type = types[j].find_element_by_xpath("./span[2]").get_attribute("innerText")
price = options[i].find_element_by_xpath(".//p[#data-qa='block_TariffPrice']").get_attribute("innerText")
print(f"{type}: {price}")
except:
pass

Why my Python code is extracting the same data for all the elements in my list?

My project consists of making a competitive watch table for hotel rates for an agency. It is a painful action that I wanted to automate, the code extract correctly the name of hotels and the prices I want to extract but it's working correctly only for the first hotel and I don't know where is the problem. I provide you with the code and the output, if any of you can help me and thank you in advance.
NB : the code 2 works correctly but when i've added more operations the problem appeared
code 1
#!/usr/bin/env python
# coding: utf-8
import time
from time import sleep
import ast
import pandas as pd
from selenium import webdriver
from selenium.webdriver.support.ui import WebDriverWait, Select
from selenium.common.exceptions import StaleElementReferenceException, NoSuchElementException
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.common.by import By
driver = webdriver.Chrome("C:\\Users\\marketing2\\Documents\\chromedriver.exe")
driver.get('https://tn.tunisiebooking.com/')
# params to select
params = {
'destination': 'Tozeur',
'date_from': '11/09/2021',
'date_to': '12/09/2021',
'bedroom': '1'
}
# select destination
destination_select = Select(WebDriverWait(driver, 20).until(EC.element_to_be_clickable((By.ID, 'ville_des'))))
destination_select.select_by_value(params['destination'])
# select bedroom
bedroom_select = Select(WebDriverWait(driver, 10).until(EC.element_to_be_clickable((By.ID, 'select_ch'))))
bedroom_select.select_by_value(params['bedroom'])
# select dates
script = f"document.getElementById('checkin').value ='{params['date_from']}';"
script += f"document.getElementById('checkout').value ='{params['date_to']}';"
script += f"document.getElementById('depart').value ='{params['date_from']}';"
script += f"document.getElementById('arrivee').value ='{params['date_to']}';"
driver.execute_script(script)
# submit form
btn_rechercher = WebDriverWait(driver, 10).until(EC.element_to_be_clickable((By.XPATH, '//*[#id="boutonr"]')))
btn_rechercher.click()
urls = []
hotels = WebDriverWait(driver, 10).until(EC.presence_of_all_elements_located((By.XPATH, "//div[starts-with(#id,'produit_affair')]")))
for hotel in hotels:
link = hotel.find_element_by_xpath(".//span[#class='tittre_hotel']/a").get_attribute("href")
urls.append(link)
for url in urls:
driver.get(url)
def existsElement(xpath):
try:
driver.find_element_by_id(xpath);
except NoSuchElementException:
return "false"
else:
return "true"
if (existsElement('result_par_arrangement')=="false"):
btn_t = WebDriverWait(driver, 10).until(EC.element_to_be_clickable((By.XPATH, '//*[#id="moteur_rech"]/form/div/div[3]/div')))
btn_t.click()
sleep(10)
else :
pass
try:
name = WebDriverWait(driver, 10).until(EC.presence_of_element_located((By.XPATH, "//div[#class='bloc_titre_hotels']/h2"))).text
arropt = driver.find_element_by_xpath("//div[contains(#class,'line_result')][1]")
opt = arropt.find_element_by_tag_name("b").text
num = len(arropt.find_elements_by_tag_name("option"))
optiondata = {}
achats = {}
marges= {}
selection = Select(driver.find_element_by_id("arrangement"))
for i in range(num):
try:
selection = Select(driver.find_element_by_id("arrangement"))
selection.select_by_index(i)
time.sleep(2)
arr = driver.find_element_by_xpath("//select[#id='arrangement']/option[#selected='selected']").text
prize = driver.find_element_by_id("prix_total").text
optiondata[arr] = (int(prize))
btn_passe = WebDriverWait(driver, 10).until(EC.element_to_be_clickable((By.XPATH, '//*[#id="resultat"]/div/form/div/div[2]/div[1]/div[2]/div[2]/div')))
btn_passe.click()
# params to select
params = {
'civilite_acheteur': 'Mlle',
'prenom_acheteur': 'test',
'nom_acheteur': 'test',
'e_mail_acheteur': 'test#gmail.com',
'portable_acheteur': '22222222',
'ville_acheteur': 'Test',
}
# select civilite
civilite_acheteur = Select(WebDriverWait(driver, 10).until(EC.element_to_be_clickable((By.NAME, 'civilite_acheteur'))))
civilite_acheteur.select_by_value(params['civilite_acheteur'])
# saisir prenom
script = f"document.getElementsByName('prenom_acheteur')[0].value ='{params['prenom_acheteur']}';"
script += f"document.getElementsByName('nom_acheteur')[0].value ='{params['nom_acheteur']}';"
script += f"document.getElementsByName('e_mail_acheteur')[0].value ='{params['e_mail_acheteur']}';"
script += f"document.getElementsByName('portable_acheteur')[0].value ='{params['portable_acheteur']}';"
script += f"document.getElementsByName('ville_acheteur')[0].value ='{params['ville_acheteur']}';"
driver.execute_script(script)
# submit form
btn_agence = driver.find_element_by_id('titre_Nabeul')
btn_agence.click()
btn_continuez = WebDriverWait(driver, 10).until(EC.element_to_be_clickable((By.ID, 'boutonr')))
btn_continuez.click()
achat = int(driver.find_element_by_xpath('/html/body/header/div[2]/div[1]/div[1]/div[4]/div[2]/div[2]').text.replace(' TND', ''))
achats[arr]=achat
marge =int(((float(prize) - float(achat)) / float(achat)) * 100);
marges[arr]=marge
optiondata[arr]=prize,achat,marge
driver.get(url)
btn_display = WebDriverWait(driver, 10).until(EC.element_to_be_clickable((By.XPATH, '//*[#id="moteur_rech"]/form/div/div[3]/div')))
btn_display.click()
sleep(10)
except StaleElementReferenceException:
pass
except NoSuchElementException:
pass
s="- {} | {} : {}".format(name, opt, optiondata)
print(s)
ds = []
for l in s.splitlines():
d = l.split('-')
if len(d) > 1:
df = pd.DataFrame(ast.literal_eval(d[1].strip()))
ds.append(df)
for df in ds:
df.reset_index(drop=True, inplace=True)
df = pd.concat(ds, axis= 1)
cols = df.columns
cols = [((col.split('.')[0], col)) for col in df.columns]
df.columns=pd.MultiIndex.from_tuples(cols)
print(df.T)
#print("{} : {} - {}".format(name, opt, optiondata))
code 2
from selenium.webdriver.support.ui import Select
from selenium.common.exceptions import StaleElementReferenceException,NoSuchElementException
urls = []
hotels = driver.find_elements_by_xpath("//div[starts-with(#id,'produit_affair')]")
for hotel in hotels:
link = hotel.find_element_by_xpath(".//span[#class='tittre_hotel']/a").get_attribute("href")
urls.append(link)
for url in urls:
driver.get(url)
try:
name = driver.find_element_by_xpath("//div[#class='bloc_titre_hotels']/h2").text
arropt = driver.find_element_by_xpath("//div[contains(#class,'line_result')][1]")
opt = arropt.find_element_by_tag_name("b").text
num = len(arropt.find_elements_by_tag_name("option"))
optiondata = {}
selection = Select(driver.find_element_by_id("arrangement"))
for i in range(num):
try:
selection = Select(driver.find_element_by_id("arrangement"))
selection.select_by_index(i)
time.sleep(2)
arr = driver.find_element_by_xpath("//select[#id='arrangement']/option[#selected='selected']").text
prize = driver.find_element_by_id("prix_total").text
optiondata[arr]=prize
except StaleElementReferenceException:
pass
except NoSuchElementException:
pass
print("{} : {} - {} - {}".format(name,opt,num,optiondata))
Your code is outdated. The HTML has been changed/updated and elements such as the one with identity boutonr doesn't exist on the page anymore.
Your loop and order of execution is wrong so this makes the code evaluating still the same fields.
You should not use or at least minimise the usage of time.sleep() to an absolute minimum as it is a waste of time for your code execution. Use WebDriverWait(...) instead
I don't speak French so I could not understand what you are after in your code, but this minimised example below should help you to understand the principle.
#!/usr/bin/env python
# coding: utf-8
import time
from selenium import webdriver
from selenium.webdriver.support.ui import WebDriverWait, Select
from selenium.common.exceptions import StaleElementReferenceException, NoSuchElementException
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.common.by import By
driver = webdriver.Chrome("C:\chromedriver.exe")
driver.get('https://tn.tunisiebooking.com/')
# params to select
params = { 'destination': 'Nabeul',
'date_from': '25/08/2021',
'date_to': '26/08/2021',
'bedroom': '1' }
# select destination
destination_select = Select(WebDriverWait(driver, 20).until(EC.element_to_be_clickable((By.ID, 'ville_des'))))
destination_select.select_by_value(params['destination'])
# select bedroom
bedroom_select = Select(WebDriverWait(driver, 10).until(EC.element_to_be_clickable((By.ID, 'select_ch'))))
bedroom_select.select_by_value(params['bedroom'])
# select dates
script = f"document.getElementById('checkin').value ='{params['date_from']}';"
script += f"document.getElementById('checkout').value ='{params['date_to']}';"
script += f"document.getElementById('depart').value ='{params['date_from']}';"
script += f"document.getElementById('arrivee').value ='{params['date_to']}';"
driver.execute_script(script)
# submit form
btn_rechercher = WebDriverWait(driver, 10).until(EC.element_to_be_clickable((By.XPATH, '//div[#onclick="return submit_hotel_recherche()"]')))
btn_rechercher.click()
urls = []
hotels = WebDriverWait(driver, 10).until(EC.presence_of_all_elements_located((By.XPATH, "//div[starts-with(#id,'produit_affair')]")))
for hotel in hotels:
link = hotel.find_element_by_xpath(".//span[#class='tittre_hotel']/a").get_attribute("href")
urls.append(link)
for url in urls:
driver.get(url)
try:
name = WebDriverWait(driver, 10).until(EC.presence_of_element_located((By.XPATH, "//div[#class='bloc_titre_hotels']/h2"))).text
arropt = driver.find_element_by_xpath("//div[contains(#class,'line_result')][1]")
opt = arropt.find_element_by_tag_name("b").text
num = len(arropt.find_elements_by_tag_name("option"))
optiondata = {}
achats = {}
marges= {}
for i in range(num):
try:
selection = Select(WebDriverWait(driver, 10).until(EC.element_to_be_clickable((By.ID, 'arrangement')))).select_by_index(i)
time.sleep(0.5)
arr = WebDriverWait(driver, 10).until(EC.element_to_be_clickable((By.XPATH, "//select[#id='arrangement']/option[#selected='selected']"))).text
prize = driver.find_element_by_id("prix_total").text
optiondata[arr] = int(prize)
except StaleElementReferenceException:
pass
print("{} : {} - {}".format(name, opt, optiondata))
except NoSuchElementException:
pass
driver.quit()
Result:
Byzance Nabeul : Chambre Double - {'All Inclusive soft': 93, 'Demi Pension': 38, 'Petit Dejeuner': 28, 'Pension Complete': 78}
Palmyra Club Nabeul Nabeul : Double Standard - {'All Inclusive soft': 92}
The following code goes to the payment page and extracts all the info there:
#!/usr/bin/env python
# coding: utf-8
import time
from selenium import webdriver
from selenium.webdriver.support.ui import WebDriverWait, Select
from selenium.common.exceptions import StaleElementReferenceException, NoSuchElementException
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.common.by import By
driver = webdriver.Chrome("/usr/local/bin/chromedriver")
driver.get('https://tn.tunisiebooking.com/')
# params to select
params = {
'destination': 'Nabeul',
'date_from': '29/08/2021',
'date_to': '30/08/2021',
'bedroom': '1'
}
# select destination
destination_select = Select(WebDriverWait(driver, 20).until(EC.element_to_be_clickable((By.ID, 'ville_des'))))
destination_select.select_by_value(params['destination'])
# select bedroom
bedroom_select = Select(WebDriverWait(driver, 10).until(EC.element_to_be_clickable((By.ID, 'select_ch'))))
bedroom_select.select_by_value(params['bedroom'])
# select dates
script = f"document.getElementById('checkin').value ='{params['date_from']}';"
script += f"document.getElementById('checkout').value ='{params['date_to']}';"
script += f"document.getElementById('depart').value ='{params['date_from']}';"
script += f"document.getElementById('arrivee').value ='{params['date_to']}';"
driver.execute_script(script)
# submit form
btn_rechercher = WebDriverWait(driver, 10).until(EC.element_to_be_clickable((By.XPATH, '//div[#onclick="return submit_hotel_recherche()"]')))
btn_rechercher.click()
urls = []
hotels = WebDriverWait(driver, 10).until(EC.presence_of_all_elements_located((By.XPATH, "//div[starts-with(#id,'produit_affair')]")))
for hotel in hotels:
link = hotel.find_element_by_xpath(".//span[#class='tittre_hotel']/a").get_attribute("href")
urls.append(link)
for url in urls:
driver.get(url)
try:
name = WebDriverWait(driver, 10).until(EC.presence_of_element_located((By.XPATH, "//div[#class='bloc_titre_hotels']/h2"))).text
arropt = driver.find_element_by_xpath("//div[contains(#class,'line_result')][1]")
opt = arropt.find_element_by_tag_name("b").text
num = len(arropt.find_elements_by_tag_name("option"))
optiondata = {}
achats = {}
marges= {}
try:
selection = Select(WebDriverWait(driver, 10).until(EC.element_to_be_clickable((By.ID, 'arrangement'))))
time.sleep(0.5)
arr = WebDriverWait(driver, 10).until(EC.element_to_be_clickable((By.XPATH, "//select[#id='arrangement']/option[#selected='selected']"))).text
prize = driver.find_element_by_id("prix_total").text
optiondata[arr] = (int(prize))
btn_passe = WebDriverWait(driver, 10).until(EC.element_to_be_clickable((By.ID, 'resa')))
btn_passe.click()
tot = WebDriverWait(driver, 10).until(EC.element_to_be_clickable((By.ID, 'montant_total_apres_code')))
total = int(tot.text.replace(' €', ''))
# params to select
params = {
'civilite_acheteur': 'Mlle',
'prenom_acheteur': 'test',
'nom_acheteur': 'test',
'e_mail_acheteur': 'test#gmail.com',
'portable_acheteur': '22222222',
'ville_acheteur': 'Test',
}
# select civilite
civilite_acheteur = Select(WebDriverWait(driver, 10).until(EC.element_to_be_clickable((By.NAME, 'civilite_acheteur'))))
civilite_acheteur.select_by_value(params['civilite_acheteur'])
# saisir prenom
script = f"document.getElementsByName('prenom_acheteur')[0].value ='{params['prenom_acheteur']}';"
script += f"document.getElementsByName('nom_acheteur')[0].value ='{params['nom_acheteur']}';"
script += f"document.getElementsByName('e_mail_acheteur')[0].value ='{params['e_mail_acheteur']}';"
script += f"document.getElementsByName('portable_acheteur')[0].value ='{params['portable_acheteur']}';"
script += f"document.getElementsByName('ville_acheteur')[0].value ='{params['ville_acheteur']}';"
driver.execute_script(script)
# submit form
btn_agence = driver.find_element_by_class_name('continuez_resa')
btn_agence.click()
achat1 = int(driver.find_element_by_id('montant_a_payer').text.replace(' €', ''))
achat = int(driver.find_element_by_id('montant_restant').text.replace(' €', ''))
achat3 = float(driver.find_element_by_xpath('//div[#class="ligne_interne_total"]/div[3]/div[#class="prix_total1 text_shadow"]').text.replace(' TND', ''))
achats[arr]=achat
marge =int(((float(prize) - float(achat)) / float(achat)) * 100);
marges[arr]=marge
optiondata[arr]=prize,total,achat1,achat,achat3,marge
except StaleElementReferenceException:
pass
print("{} : {} - {}".format(name, opt, optiondata))
except NoSuchElementException:
pass
driver.quit()
Output:
Byzance Nabeul : Chambre Double - {'Petit Dejeuner': (36, 41, 12, 29, 4.0, 24)}
Where:
36 = Prix Total
41 = Montant Total
12 = Montant de l'acompte
29 = Vous payerez le reste à votre arrivée à l'hôtel
4.0 = Total taxe de séjour à payer sur place à l'hôtel est
24 = Marges
Hotel page:
You are using sleeps to load the pages in your first example but not in your second one (the one that you state works just fine).
This is typically not the way you want to actually use selenium and leads me to believe that your timing is off.
This SO answer shows you how to use "Explicit Waits" on "expected_conditions" to not have "specific timings" which can/will fail.
You even create a wait object but never use it.
Use it in conjunction with expected_conditions and remove the specific timed sleeps and things will get better.
expected_conditions docs are here
The problem was that it can't access to the element listing arrangements for the rest of the hotels in the list i've added a function that tests the presence of the data and it workod
for url in urls:
driver.get(url)
def existsElement(xpath):
try:
driver.find_element_by_id(xpath);
except NoSuchElementException:
return "false"
else:
return "true"
if (existsElement('result_par_arrangement')=="false"):
btn_t = WebDriverWait(driver, 10).until(EC.element_to_be_clickable((By.XPATH, '//*[#id="moteur_rech"]/form/div/div[3]/div')))
btn_t.click()
else :
pass

trying to close popover - python - selenium - Glassdoor

Trying to close a popover while scraping Glassdoor for jobs [It keeps popping up from time to time - need to close it every time].. I've tried quite a few things
Tried closing it by looking for the close button. Please help !
driver.find_element_by_class_name("SVG_Inline modal_closeIcon").click()
Tried looking for a ElementClickInterceptedException when the bot couldn't click on the next company, and everywhere else there was a click
element = WebDriverWait(driver, 3).until(EC.presence_of_element_located((By.CLASS_NAME, "SVG_Inline-svg modal_closeIcon-svg")))
element.click()
This is the website:
https://www.glassdoor.co.uk/Job/web-developer-jobs-SRCH_KO0,13.htm
This is the complete code:
from selenium.common.exceptions import NoSuchElementException, ElementClickInterceptedException, StaleElementReferenceException
from selenium import webdriver
import time
import pandas as pd
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
def get_jobs(keyword, num_jobs, verbose, place):
'''Gathers jobs as a dataframe, scraped from Glassdoor'''
#Initializing the webdriver
options = webdriver.ChromeOptions()
#Uncomment the line below if you'd like to scrape without a new Chrome window every time.
#options.add_argument('headless')
#Change the path to where chromedriver is in your home folder.
#driver = webdriver.Chrome(executable_path="/Users/omersakarya/Documents/GitHub/scraping-glassdoor-selenium/chromedriver", options=options)
driver = webdriver.Chrome()
driver.set_window_size(1120, 1000)
url = "https://www.glassdoor.co.uk/Job/web-developer-jobs-SRCH_KO0,13.htm"
driver.get(url)
jobs = []
time.sleep(3)
driver.find_element_by_id("onetrust-accept-btn-handler").click()
time.sleep(3)
while len(jobs) < num_jobs: #If true, should be still looking for new jobs.
job_buttons = driver.find_elements_by_class_name("jl") #jl for Job Listing. These are the buttons we're going to click.
try:
for job_button in job_buttons:
if len(jobs) >= num_jobs:
break
print("Progress: {}".format("" + str(len(jobs)) + "/" + str(num_jobs)))
job_button.click()
collected_successfully = False
while not collected_successfully:
try:
company_name = driver.find_element_by_xpath('.//div[#class="employerName"]').text
location = driver.find_element_by_xpath('.//div[#class="location"]').text
job_title = driver.find_element_by_xpath('.//div[contains(#class, "title")]').text
job_description = driver.find_element_by_xpath('.//div[#class="jobDescriptionContent desc"]').text
collected_successfully = True
except:
time.sleep(5)
try:
#salary_estimate = driver.find_element_by_xpath('.//span[#class="gray salary"]').text
salary_estimate = driver.find_element_by_xpath('//*[#id="HeroHeaderModule"]/div[3]/div[1]/div[4]/span').text
except NoSuchElementException:
salary_estimate = -1 #You need to set a "not found value. It's important."
try:
rating = driver.find_element_by_xpath('.//span[#class="rating"]').text
except NoSuchElementException:
rating = -1 #You need to set a "not found value. It's important."
#Printing for debugging
if verbose:
print("Job Title: {}".format(job_title))
print("Salary Estimate: {}".format(salary_estimate))
print("Job Description: {}".format(job_description[:500]))
print("Rating: {}".format(rating))
print("Company Name: {}".format(company_name))
print("Location: {}".format(location))
#Going to the Company tab...
#clicking on this:
#<div class="tab" data-tab-type="overview"><span>Company</span></div>
try:
driver.find_element_by_xpath('.//div[#class="tab" and #data-tab-type="overview"]').click()
try:
#<div class="infoEntity">
# <label>Headquarters</label>
# <span class="value">San Francisco, CA</span>
#</div>
headquarters = driver.find_element_by_xpath('.//div[#class="infoEntity"]//label[text()="Headquarters"]//following-sibling::*').text
except NoSuchElementException:
headquarters = -1
try:
size = driver.find_element_by_xpath('.//div[#class="infoEntity"]//label[text()="Size"]//following-sibling::*').text
except NoSuchElementException:
size = -1
try:
founded = driver.find_element_by_xpath('.//div[#class="infoEntity"]//label[text()="Founded"]//following-sibling::*').text
except (NoSuchElementException, StaleElementReferenceException):
founded = -1
try:
type_of_ownership = driver.find_element_by_xpath('.//div[#class="infoEntity"]//label[text()="Type"]//following-sibling::*').text
except NoSuchElementException:
type_of_ownership = -1
try:
industry = driver.find_element_by_xpath('.//div[#class="infoEntity"]//label[text()="Industry"]//following-sibling::*').text
except NoSuchElementException:
industry = -1
try:
sector = driver.find_element_by_xpath('.//div[#class="infoEntity"]//label[text()="Sector"]//following-sibling::*').text
except NoSuchElementException:
sector = -1
try:
revenue = driver.find_element_by_xpath('.//div[#class="infoEntity"]//label[text()="Revenue"]//following-sibling::*').text
except NoSuchElementException:
revenue = -1
try:
competitors = driver.find_element_by_xpath('.//div[#class="infoEntity"]//label[text()="Competitors"]//following-sibling::*').text
except NoSuchElementException:
competitors = -1
except (NoSuchElementException,ElementClickInterceptedException,StaleElementReferenceException): #Rarely, some job postings do not have the "Company" tab.
if NoSuchElementException:
time.sleep(1)
headquarters = -1
size = -1
founded = -1
type_of_ownership = -1
industry = -1
sector = -1
revenue = -1
competitors = -1
else:
driver.find_element_by_class_name("selected").click()
driver.find_element_by_class_name("SVG_Inline modal_closeIcon").click()
element = WebDriverWait(driver, 3).until(EC.presence_of_element_located((By.CLASS_NAME, "SVG_Inline-svg modal_closeIcon-svg")))
element.click()
pass
if verbose:
print("Headquarters: {}".format(headquarters))
print("Size: {}".format(size))
print("Founded: {}".format(founded))
print("Type of Ownership: {}".format(type_of_ownership))
print("Industry: {}".format(industry))
print("Sector: {}".format(sector))
print("Revenue: {}".format(revenue))
print("Competitors: {}".format(competitors))
print("####################################################")
jobs.append({"Job Title" : job_title,
"Salary Estimate" : salary_estimate,
"Job Description" : job_description,
"Rating" : rating,
"Company Name" : company_name,
"Location" : location,
"Headquarters" : headquarters,
"Size" : size,
"Founded" : founded,
"Type of ownership" : type_of_ownership,
"Industry" : industry,
"Sector" : sector,
"Revenue" : revenue,
"Competitors" : competitors})
#You might
#time.sleep(0.5)
except (ElementClickInterceptedException, StaleElementReferenceException):
alertObj = driver.switch_to.alert
alertObj.accept()
alertObj.dismiss()
driver.find_element_by_class_name("selected").click()
driver.find_element_by_class_name("SVG_Inline modal_closeIcon").click()
element = WebDriverWait(driver, 3).until(EC.presence_of_element_located((By.CLASS_NAME, "SVG_Inline-svg modal_closeIcon-svg")))
element.click()
pass
#add job to jobs
#Clicking on the "next page" button
# try:
# driver.find_element_by_xpath('.//li[#class="page"]//a').click()
# except NoSuchElementException:
# print("Scraping terminated before reaching target number of jobs. Needed {}, got {}.".format(num_jobs, len(jobs)))
# break
# time.sleep(5)
try:
driver.find_element_by_xpath('.//li[#class="next"]//a').click()
except (ElementClickInterceptedException):
#print("Scraping terminated before reaching target number of jobs. Needed {}, got {}.".format(num_jobs, len(jobs)))
driver.find_element_by_class_name("selected").click()
driver.find_element_by_class_name("SVG_Inline modal_closeIcon").click()
element = WebDriverWait(driver, 3).until(EC.presence_of_element_located((By.CLASS_NAME, "SVG_Inline-svg modal_closeIcon-svg")))
element.click()
element.text
pass
#break
return pd.DataFrame(jobs) #This line converts the dictionary object into a pandas DataFrame.
df = gs.get_jobs(keyword, num_jobs, False, place)
Trying to get rid of this:
enter image description here
[Screenshot of the element I need to close and continue with the loop][2]

StaleElementException while writing a text file in python using selenium webdriver

I am trying to scrape the reviews of a hotel from TripAdvisor and write it in a text file. So far the code is doing good except the fact that every now and then it throws StaleElementException on the line where I am writing the text file. Here's is my code:
for num in range(page_count):
try:
if num != 0:
try:
nxt = WebDriverWait(driver, 10).until(EC.presence_of_element_located((By.CSS_SELECTOR, "a.nav.next.rndBtn.ui_button.primary.taLnk")))
#nxt = driver.find_element_by_css_selector("a.nav.next.rndBtn.ui_button.primary.taLnk")
nxt.click()
driver.implicitly_wait(5)
except NoSuchElementException:
driver.refresh()
#driver.implicitly_wait(5)
nxt = WebDriverWait(driver, 10).until(EC.presence_of_element_located((By.CSS_SELECTOR, "a.nav.next.rndBtn.ui_button.primary.taLnk")))
nxt.click()
driver.implicitly_wait(5)
try:
more = WebDriverWait(driver, 10).until(EC.element_to_be_clickable((By.CSS_SELECTOR, "span.taLnk.ulBlueLinks")))
#more = driver.find_element_by_css_selector("span.taLnk.ulBlueLinks")
more.click()
time.sleep(1)
except TimeoutException:
print("There is no 'more' button on page %d" % (num+1))
except WebDriverException:
nxt = WebDriverWait(driver, 10).until(EC.presence_of_element_located((By.CSS_SELECTOR, "span.taLnk.ulBlueLinks")))
nxt.click()
driver.implicitly_wait(5)
review_result = WebDriverWait(driver, 10).until(EC.presence_of_all_elements_located((By.CLASS_NAME, 'entry')))
with open('New_Review.txt', 'a') as fid:
for review in review_result:
fid.write(unidecode(review.text))
fid.write(sep)
fid.write(line_break)
print ("processing done till page number %d" % (num+1))
except StaleElementReferenceException:
driver.refresh()
driver.implicitly_wait(5)
try:
more = WebDriverWait(driver, 10).until(EC.element_to_be_clickable((By.CSS_SELECTOR, "span.taLnk.ulBlueLinks")))
#more = driver.find_element_by_css_selector("span.taLnk.ulBlueLinks")
more.click()
except TimeoutException:
print("There is no 'more' button on page %d" % (num+1))
except WebDriverException:
nxt = WebDriverWait(driver, 10).until(EC.presence_of_element_located((By.CSS_SELECTOR, "span.taLnk.ulBlueLinks")))
nxt.click()
driver.implicitly_wait(5)
review_result = WebDriverWait(driver, 10).until(EC.presence_of_all_elements_located((By.CLASS_NAME, 'entry')))
#print (review_result[1].text)
with open('New_Review.csv', 'a') as fid:
writer = csv.writer(fid, delimiter = ',', line_break = '\n')
for review in review_result:
fid.write(unidecode(review.text))
fid.write(sep)
fid.write(line_break)
print ("processing done till page number %d" % (num+1))
Here is the error:
StaleElementReferenceException: stale element reference: element is
not attached to the page document
The traceback gives this line:
fid.write(unidecode(review.text))
I have already tried to handle the exception but its not working for me and I am having a hard time trying to figure out where exactly am I wrong. Any help is appreciated!
Try creating helper method such as
def get_text(locator):
staled = True
while staled:
try:
return WebDriverWait(driver, 10).until(EC.presence_of_element(locator)).text
except StaleElementReferenceException:
`log something or limit retry to certain times`
then change how you get the text
review_result = WebDriverWait(driver, 10).until(EC.presence_of_all_elements_located())
num_of_review - review_result.length
with open('New_Review.txt', 'a') as fid:
for index in range(1, num_of_review):
review_text = get_text((By.XPATH, "//*[#class='entry'][index]"))
fid.write(unidecode(review_text))
fid.write(sep)
fid.write(line_break)

Categories

Resources