Can't find element by name using selenium - python

I'm using selenium 4.7.2 and can't find the element by its name. The following code returns NoSuchElementException error:
from selenium import webdriver
from selenium.webdriver.common.by import By
import time
# Get the website using the Chrome webbdriver
browser = webdriver.Chrome()
browser.get('https://www.woofshack.com/en/cloud-chaser-waterproof-softshell-dog-jacket-ruffwear-rw-5102.html')
# Print out the result
price = browser.find_element(By.NAME, 'data-price-665')
print("Price: " + price.text)
# Close the browser
time.sleep(3)
browser.close()
What's wrong in using find_element method?

Looks like you are using a wrong locator here. I see no element with name attribute value 'data-price-665' on that page.
The following code is working:
from selenium import webdriver
from selenium.webdriver import ActionChains
from selenium.webdriver.chrome.service import Service
from selenium.webdriver.chrome.options import Options
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions as EC
options = Options()
options.add_argument("start-maximized")
webdriver_service = Service('C:\webdrivers\chromedriver.exe')
driver = webdriver.Chrome(service=webdriver_service, options=options)
wait = WebDriverWait(driver, 20)
actions = ActionChains(driver)
url = "https://www.woofshack.com/en/cloud-chaser-waterproof-softshell-dog-jacket-ruffwear-rw-5102.html"
driver.get(url)
price = wait.until(EC.visibility_of_element_located((By.CSS_SELECTOR, "#product-price-665 .price")))
print("Price: " + price.text)
The output is:
Price: €112.95

Related

Python selenium Element error event wait function is not helping

This is my first attempt to login a website using selenium, i have written the below piece of code but not able to login the website, getting the following error message.
Message: no such element: Unable to locate element: {"method":"css selector","selector":"[name="txtUsername"]"}
(Session info: chrome=108.0.5359.95)
from selenium import webdriver
from selenium.webdriver.chrome.service import Service
from selenium.webdriver.common.by import By
from selenium.webdriver.support.wait import WebDriverWait
s=Service("C:\Program Files\Google\Chrome\Application\chromedriver.exe")
driver = webdriver.Chrome(service=s)
driver.maximize_window()
driver.get("https://opensource-demo.orangehrmlive.com")
wait = WebDriverWait(driver, 10)
driver.find_element(By.NAME,"txtUsername").send_keys("Admin")
driver.find_element(By.ID, "txtPassword").send_keys("admin123")
driver.quit()
Looking for some suggestions
There are 2 problems with your code:
You created the wait object but not used it...
Your locators are wrong.
The following code works:
from selenium import webdriver
from selenium.webdriver.chrome.service import Service
from selenium.webdriver.chrome.options import Options
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions as EC
options = Options()
options.add_argument("start-maximized")
webdriver_service = Service('C:\webdrivers\chromedriver.exe')
driver = webdriver.Chrome(service=webdriver_service, options=options)
wait = WebDriverWait(driver, 20)
url = "https://opensource-demo.orangehrmlive.com"
driver.get(url)
wait.until(EC.element_to_be_clickable((By.NAME, "username"))).send_keys("Admin")
wait.until(EC.element_to_be_clickable((By.NAME, "password"))).send_keys("admin123")
wait.until(EC.element_to_be_clickable((By.TAG_NAME, "button"))).click()
The wait needs to be called:
from selenium import webdriver
from selenium.webdriver.chrome.service import Service
from selenium.webdriver.common.by import By
from selenium.webdriver.support.wait import WebDriverWait
s=Service("C:\Program Files\Google\Chrome\Application\chromedriver.exe")
driver = webdriver.Chrome(service=s)
driver.maximize_window()
driver.get("https://opensource-demo.orangehrmlive.com")
wait = WebDriverWait(driver, 10)
user = wait.until(EC.element_to_be_clickable((By.XPATH, "//div")))
user.send_keys("Admin")
driver.find_element(By.ID, "txtPassword").send_keys("admin123")
driver.quit()

WebScrapping with Selenium and BeaufitulSoup can't find anything

I am trying to extract all the description in the links in the class="publication u-padding-xs-ver js-publication" of this website: https://www.sciencedirect.com/browse/journals-and-books?accessType=openAccess&accessType=containsOpenAccess
I tried both with BeautifulSoup and Selenium but I can't extract anything. You can see in the image below the result I got
result
Here is the code I am using
options = Options()
options.add_argument("headless")
driver = webdriver.Chrome(service=Service(ChromeDriverManager().install()), options=options)
ul = driver.find_element(By.ID, "publication-list")
print("Links")
allLi = ul.find_elements(By.TAG_NAME, "li")
for li in allLi:
print("Links " + str(count) + " " + li.text)
You are missing waits.
You have to wait for elements to become visible before accessing them.
The best approach to do that is with use of WebDriverWait expected_conditions explicit waits.
The following code works
from selenium import webdriver
from selenium.webdriver.chrome.service import Service
from selenium.webdriver.chrome.options import Options
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions as EC
options = Options()
options.add_argument("start-maximized")
webdriver_service = Service('C:\webdrivers\chromedriver.exe')
driver = webdriver.Chrome(options=options, service=webdriver_service)
wait = WebDriverWait(driver, 20)
url = "https://www.sciencedirect.com/browse/journals-and-books?accessType=openAccess&accessType=containsOpenAccess"
driver.get(url)
ul = wait.until(EC.visibility_of_element_located((By.ID, "publication-list")))
allLi = wait.until(EC.presence_of_all_elements_located((By.TAG_NAME, "li")))
print(len(allLi))
the output is:
167

xpath give empty output using selenium

I am not getting price they give me empty output this is page link https://www.amazon.com/dp/B00M0DWQYI?th=1
from selenium import webdriver
import time
from selenium.webdriver.common.by import By
from selenium.webdriver.support.wait import WebDriverWait
from selenium.webdriver.support.select import Select
from selenium.webdriver.support import expected_conditions as EC
import pandas as pd
url='https://www.amazon.com/dp/B00M0DWQYI?th=1'
PATH="C:\Program Files (x86)\chromedriver.exe"
driver =webdriver.Chrome(PATH)
driver.get(url)
item=dict()
try:
item['price'] = driver.find_element(By.XPATH, "//div[#id='corePrice_feature_div'] //span[#class='a-offscreen']").text
except:
item['price']=''
print(item)
You may want to wait for that element to properly load, prior to locating it:
[...]
wait = WebDriverWait(driver, 10)
item['price'] = wait.until(EC.element_to_be_clickable((By.XPATH, "//div[#id='corePrice_feature_div']//span[#class='a-offscreen']"))).text
Selenium documentation can be found at https://www.selenium.dev/documentation/
EDIT: Here is a complete example of how you can get that information:
from selenium import webdriver
from selenium.webdriver.chrome.service import Service
from selenium.webdriver.chrome.options import Options
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.common.keys import Keys
import time as t
chrome_options = Options()
chrome_options.add_argument("--no-sandbox")
chrome_options.add_argument('disable-notifications')
chrome_options.add_argument("window-size=1920,1080")
webdriver_service = Service("chromedriver/chromedriver") ## path to where you saved chromedriver binary
driver = webdriver.Chrome(service=webdriver_service, options=chrome_options)
wait = WebDriverWait(driver, 5)
items = dict()
driver.get('https://www.amazon.com/dp/B00M0DWQYI?th=1')
t.sleep(1)
driver.refresh()
items['price'] = wait.until(EC.element_to_be_clickable((By.XPATH, '//div[#id="corePrice_feature_div"]//span[#class="a-price aok-align-center"]'))).text.replace('\n', '.')
print(items)
Result in terminal:
{'price': '$32.98'}
You need to wait for element visibility and then to extract it's text.
The following Selenium code works:
from selenium import webdriver
from selenium.webdriver.chrome.service import Service
from selenium.webdriver.chrome.options import Options
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions as EC
options = Options()
options.add_argument("start-maximized")
webdriver_service = Service('C:\webdrivers\chromedriver.exe')
driver = webdriver.Chrome(service=webdriver_service, options=options)
url = 'https://www.amazon.com/dp/B00M0DWQYI'
driver.get(url)
wait = WebDriverWait(driver, 10)
print(wait.until(EC.visibility_of_element_located((By.XPATH, "//div[#id='corePrice_feature_div']"))).text)
The output is
$32
98
You can use bs4 and it will work fine
from bs4 import BeautifulSoup
soup = BeautifulSoup(driver.page_source, 'lxml')
try:
item['price'] = soup.find('input', id="attach-base-product-price").get('value')
except:
item['price'] = ''
finally:
driver.close()
driver.quit()
print(item)

Selenium Web driver ( driver.find_element(By.XPATH, '')) IS NOT WORKING Python

https://www.espncricinfo.com/player/aamer-jamal-793441
This is the URL and here i am trying to access Full Name "Aamer Jamal". with the help of selenium web driver. But I dont know why it gives
NoSuchElementException
`the code is written below:
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from selenium_firefox import Firefox
import time
import pandas as pd
driver = webdriver.Firefox()
#Reach to the Landing page
driver.get('https://www.espncricinfo.com/player/aamer-jamal-793441')
driver.maximize_window()
time.sleep(25)
not_now = driver.find_element(By.ID, 'wzrk-cancel')
not_now.click()
fullname = driver.find_element(By.XPATH, '/html/body/div[1]/section/section/div[4]/div[2]/div/div[1]/div/div/div[1]/div[1]/span/h5')
print(fullname.text)`
Error :
NoSuchElementException: Message: Unable to locate element: /html/body/div[1]/section/section/div[4]/div[2]/div/div[1]/div/div/div[1]/div[1]/span
You have to use WebDriverWait expected_conditions explicit waits, not a long hardcoded pauses. You also have to learn how to create correct locators. Long absolute XPaths and CSS Selectors are extremely breakable. The following code works:
from selenium import webdriver
from selenium.webdriver import ActionChains
from selenium.webdriver.chrome.service import Service
from selenium.webdriver.chrome.options import Options
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions as EC
options = Options()
options.add_argument("start-maximized")
webdriver_service = Service('C:\webdrivers\chromedriver.exe')
driver = webdriver.Chrome(options=options, service=webdriver_service)
wait = WebDriverWait(driver, 60)
actions = ActionChains(driver)
url = "https://www.espncricinfo.com/player/aamer-jamal-793441"
driver.get(url)
wait.until(EC.element_to_be_clickable((By.ID, 'wzrk-cancel')))
fullname = wait.until(EC.visibility_of_element_located((By.CLASS_NAME, 'ds-text-title-l'))).text
print(fullname)
The output is:
Aamer Jamal

Selenium .send_keys() only sending first character of my string - Python

I'm trying to write a script that fills out the destination box on the google flights page. My code is behaving very inconsistently. Sometimes it works perfectly, other times it only types the letter 'B', rather than the full string 'Barcelona'. Sometimes, I get this error message:
"element click intercepted: Element (redacted html code) is not clickable at point (547, 472). Other element would receive the click"
Any idea why it's freaking out like this? Here's my code:
from selenium import webdriver
from selenium.webdriver.chrome.service import Service
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.chrome.options import Options
import os
#Give path to chrome driver using service argument so it doesn't throw the path deprecation warning
script_dir = os.path.dirname(__file__) #<-- absolute dir the script is in
chromedriver_path = '/path/to/chromedriver'
abs_chromedriver_path = os.path.join(script_dir, chromedriver_path)
driver_service = Service(executable_path = abs_chromedriver_path)
browser = webdriver.Chrome(service = driver_service)
url = 'https://www.google.com/travel/flights'
selector = "div[aria-placeholder='Where from?'] input"
phrase = "Barcelona"
browser.get(url)
WebDriverWait(browser, 20).until(EC.element_to_be_clickable((By.CSS_SELECTOR, selector))).click()
WebDriverWait(browser, 20).until(EC.element_to_be_clickable((By.CSS_SELECTOR, selector))).send_keys(phrase)
WebDriverWait(browser, 20).until(EC.element_to_be_clickable((By.CSS_SELECTOR, selector))).send_keys(Keys.ENTER)
This is one way to look for a flight from Barcelona to Vancouver, select the first suggestion from the suggestion lists for both starting/endpoint, and clicking 'Search':
from selenium import webdriver
from selenium.webdriver.chrome.service import Service
from selenium.webdriver.chrome.options import Options
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
import time as t
chrome_options = Options()
chrome_options.add_argument("--no-sandbox")
chrome_options.add_argument('disable-notifications')
chrome_options.add_argument("window-size=1280,720")
webdriver_service = Service("chromedriver/chromedriver") ## path to where you saved chromedriver binary
browser = webdriver.Chrome(service=webdriver_service, options=chrome_options)
url = 'https://www.google.com/travel/flights'
browser.get(url)
try:
cookie_button = WebDriverWait(browser, 3).until(EC.element_to_be_clickable((By.CSS_SELECTOR,'button[aria-label="Reject all"]')))
print(cookie_button.location_once_scrolled_into_view)
t.sleep(1)
cookie_button.click()
print('rejected cookies')
except Exception as e:
print('no cookie button')
t.sleep(1)
comboboxes = WebDriverWait(browser, 3).until(EC.presence_of_all_elements_located((By.CSS_SELECTOR,'input[role="combobox"]')))
comboboxes[0].click()
comboboxes[0].clear()
comboboxes[1].send_keys('Barcelona')
t.sleep(1)
suggestion_listbox = WebDriverWait(browser, 10).until(EC.presence_of_all_elements_located((By.CSS_SELECTOR, "ul[role='listbox']")))
print(suggestion_listbox[-1].get_attribute('outerHTML'))
suggestion_listbox[-1].find_elements(By.TAG_NAME, 'li')[0].click()
comboboxes[2].click()
comboboxes[3].send_keys('Vancouver')
t.sleep(1)
suggestion_listbox = WebDriverWait(browser, 10).until(EC.presence_of_all_elements_located((By.CSS_SELECTOR, "ul[role='listbox']")))
print(suggestion_listbox[-1].get_attribute('outerHTML'))
suggestion_listbox[-1].find_elements(By.TAG_NAME, 'li')[0].click()
t.sleep(1)
WebDriverWait(browser, 3).until(EC.element_to_be_clickable((By.XPATH,'//span[text() = "Search"]'))).click()
You just have to adapt it to your own selenium setup. Selenium docs: https://www.selenium.dev/documentation/

Categories

Resources