Nested divs inside iframe selenium python - python

Im trying to use selenium with python to execute javascript on the id callled g-recaptcha-response.
Picture of HTML with div im targeting
But i'm getting this error saying Message: no such element: Unable to locate element. Here is the script that I have so far
import time
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.support.wait import WebDriverWait
driver = webdriver.Chrome(executable_path="C:\\chromedriver.exe")
driver.get("https://testform2020.bss.design")
#open up where the id is located
driver.find_element_by_class_name('btn-block').click()
#remove overlay
driver.execute_script("document.querySelector('body > div:nth-child(6)').style.display = 'none'")
#target the frame
iframes = driver.find_elements_by_tag_name("iframe")
driver.switch_to.frame(iframes[0])
driver.switch_to.default_content()
time.sleep(3)
container = driver.find_element_by_name('g-recaptcha-response')
driver.execute_script("arguments[0].style.display = 'block';", container)

The problem was I had driver.switch_to.default_content() which was switching back to the main content but i needed to go further down the page

Related

How to Fix: Unable to locate element: method- XPath

This is the website I am trying to automate some clicks:
I have tried clicking the button using Xpath and FullXpath, but still got no luck.
This is the simple code:
w = webdriver.Chrome(executable_path='chromedriver.exe',
chrome_options=options)
w.get("https://quillbot.com/")
time.sleep(5)
pasteXpath = "//button[contains(#class,'outlinedPrimary') and .//span[contains(text(),'Paste Text')]]"
element = w.find_element_by_xpath(pasteXpath).click()
But it fails with this message in the console:
selenium.common.exceptions.NoSuchElementException: Message: no such element: Unable to locate element: {"method":"xpath","selector":"//*[#id="inOutContainer"]/div[2]/div[2]/div/div[1]/div/div/div[1]/div/div/div[2]/div/div/button/span[1]/div"}
Please show me how to automate this click using selenium.
I recommend using By, WebDriverWait, and expected_conditions in the place of .find_element_by_xpath.
After you click the paste button you will receive a permissions prompt. See below to get past it.
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from selenium import webdriver
from selenium.webdriver.firefox.service import Service
from selenium.webdriver.chrome.service import Service
import time
import pyautogui
service = Service('C:\\Path_To_Your\\chromedriver.exe')
driver = webdriver.Chrome(service=service)
driver.get('https://quillbot.com/')
paste_button = WebDriverWait(driver, 3).until(EC.visibility_of_element_located(
(By.XPATH, "//span[text()='Paste Text']")))
paste_button.click()
time.sleep(2)
pyautogui.press('tab')
pyautogui.press('tab')
pyautogui.press('enter')
This will work:
pasteXpath = "//button[contains(#class,'outlinedPrimary') and .//span[contains(text(),'Paste Text')]]"
element = w.find_element_by_xpath(pasteXpath).click()
Don't forget to add some wait / delay before it to make sure the page is fully loaded.
Try to use CSS selector instead:
element = w.find_element_by_css_selector('div[class*="MuiGrid-root"] > div[class="jss473"]').click()
You can find all the doc about css selector here

Python, Selenium. Google Chrome. Web Scraping. How to navigate between 'tabs' in website

im quite noob in python and right now building up a web scraper in Selenium that would take all URL's for products in the clicked 'tab' on web page. But my code take the URL's from the first 'tab'. Code below. Thank you guys. Im starting to be kind of frustrated lol.
Screenshot
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
import time
from lxml import html
PATH = 'C:\Program Files (x86)\chromedriver.exe'
driver = webdriver.Chrome(PATH)
url = 'https://www.alza.sk/vypredaj-akcia-zlava/e0.htm'
driver.get(url)
driver.find_element_by_xpath('//*[#id="tabs"]/ul/li[2]').click()
links = []
try:
WebDriverWait(driver, 10).until(
EC.presence_of_element_located((By.CLASS_NAME, 'blockFilter')))
link = driver.find_elements_by_xpath("//a[#class='name browsinglink impression-binded']")
for i in link:
links.append(i.get_attribute('href'))
finally:
driver.quit()
print(links)
To select current tab:
current_tab = driver.current_window_handle
To switch between tabs:
driver.switch_to_window(driver.window_handles[1])
driver.switch_to.window(driver.window_handles[-1])
Assuming you have the new tab url as TAB_URL, you should try:
from selenium.webdriver.common.action_chains import ActionChains
action = ActionChains(driver)
action.key_down(Keys.CONTROL).click(TAB_URL).key_up(Keys.CONTROL).perform()
Also, apparently the li doesn't have a click event, are you sure this element you are getting '//*[#id="tabs"]/ul/li[2]' has the aria-selected property set to true or any of these classes: ui-tabs-active ui-state-active?
If not, you should call click on the a tag inside this li.
Then you should increase the timeout parameter of your WebDriverWait to guarantee that the div is loaded.

Python, Selenium - can't locate a webpage element (button) with ID 'next'

I started learning python with selenium to automate some of my tasks. I have a problem which I'm unable to solve.
I'm trying to create a script which would automatically click the 'next' button.
I can't locate a webpage element (button) with ID 'next'. I tried every possible solution - by ID, by xpath etc.
It's possible that part of the webpage in which I'm trying to find the element is dynamically generated by jquery.
This is the error I get:
Unable to locate element: [id="next"]
Here is what I've done until this point (fragments):
from selenium import webdriver
from selenium.common.exceptions import WebDriverException
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
import time
from selenium.common.exceptions import TimeoutException
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.common.by import By
browser = webdriver.Firefox(executable_path=r'C:\Users\technik
informatyk\Desktop\python\geckodriver.exe')
browser.get('https://it-szkola.edu.pl/usr')
browser.maximize_window()
browser.implicitly_wait(3)
zgoda = browser.find_element_by_xpath('//*[#id="cookieLayerBoxButton"]')
zgoda.click()
browser.implicitly_wait(10)
emailElem = browser.find_element_by_id('login-box-field-input')
emailElem.send_keys('mylogin')
passwordElem = browser.find_element_by_id('password')
passwordElem.send_keys('mypassword')
passwordElem.submit()
time.sleep(2)
aktualnosci = browser.find_element_by_id('hBLAkt').click()
grandtest = browser.find_element_by_xpath('//* [#id="New178"]/div[2]/div[1]/p[2]/a').click()
lista = browser.find_element_by_link_text('Lista testów').click()
test = WebDriverWait(browser, 10).until(EC.presence_of_element_located((By.XPATH,"/html/body/div[2]/div[1]/div[2]/table/tbody/tr[5]/td[5]/a"))).click()
#browser.refresh()
#time.sleep(2)
nastepne = browser.find_element_by_id("next")
#nastepne = WebDriverWait(browser, 10).until(EC.presence_of_element_located((By.LINK_TEXT, "Następne")))
#nastepne = browser.find_element_by_link_text('Następne').click()
nastepne.click()
Screenshots
,
,
,
,
,
Your help will be much appreciated.
Try xpath:
browser.find_element_by_xpath('//button[#id="next"]')
Try getting all buttons:
btns = browser.find_elements_by_xpath('//button')
for btn in btns:
txt = btn.text
if txt == 'Następne':
btn.click()

Script fails to keep clicking on load more button

I've written a script in Python in association with selenium to keep clicking on MORE button to load more items until there are no new items left to load from a webpage. However, my below script can click once on that MORE button available on the bottom of that page.
Link to that site
This is my try so far:
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
link = "https://angel.co/companies?company_types[]=Startup&company_types[]=Private+Company&company_types[]=Mobile+App&locations[]=1688-United+States"
driver = webdriver.Chrome()
wait = WebDriverWait(driver, 10)
driver.get(link)
while True:
for elems in wait.until(EC.presence_of_all_elements_located((By.CSS_SELECTOR,".results .name a.startup-link"))):
print(elems.get_attribute("href"))
try:
loadmore = wait.until(EC.visibility_of_element_located((By.CSS_SELECTOR,"[class='more']")))
driver.execute_script("arguments[0].scrollIntoView();", loadmore)
loadmore.click()
except Exception:break
driver.quit()
How can I keep clicking on that MORE button until there are no such button left to click and parse the links as I've already tried using for loop.
I've managed to solve the problem pursuing sir Andersson's logic within my exising script. This is what the modified script look like.
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
link = "https://angel.co/companies?company_types[]=Startup&company_types[]=Private+Company&company_types[]=Mobile+App&locations[]=1688-United+States"
driver = webdriver.Chrome()
wait = WebDriverWait(driver, 10)
driver.get(link)
while True:
try:
loadmore = wait.until(EC.visibility_of_element_located((By.CSS_SELECTOR,"[class='more']")))
driver.execute_script("arguments[0].click();", loadmore)
wait.until(EC.staleness_of(loadmore))
except Exception:break
for elems in wait.until(EC.presence_of_all_elements_located((By.CSS_SELECTOR,".results .name a.startup-link"))):
print(elems.get_attribute("href"))
driver.quit()
why not just?
while (driver.FindElements(By.ClassName("more")).Count > 0)
{
driver.FindElement(By.ClassName("more")).Click();
//Some delay to wait lazyload to complete
}
c# example. pretty sure that it can be done with python as well

How to reach inside <main> tags using selenium Python?

I am using Python 2.7.12 and Selenium 3.0.2.
I wanted to find a tag inside tag <section>, here is the code:
driver = webdriver.Chrome()
driver.get("https://www.semanticscholar.org/")
input_t = driver.find_element_by_xpath('//input[#type="search"]')
input_t.send_keys(keyword)
input_t.send_keys(Keys.ENTER)
target = driver.find_element_by_xpath('//main')
Running this, I got an exception:
selenium.common.exceptions.NoSuchElementException: Message: no such element: Unable to locate element: {"method":"xpath","selector":"//main"}
But actually, there is indeed a tag in the page:
<main class="main-column results" data-reactid=".dyth4mk2kg.0.1.0.1"><div class="controls" data-reactid=".dyth4mk2kg.0.1.0.1.1">
...
</main>
It's just timing issue. You should try using Explicit Waits to wait until main tag loaded and visible after clicking on search button as below :-
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
driver = webdriver.Chrome()
driver.get("https://www.semanticscholar.org/")
#Now enter text to search
driver.find_element_by_name("q").send_keys(keyword)
#Now click on search button
driver.find_element_by_css_selector(".search-bar .button").click()
#Now wait until main tag visible
target = WebDriverWait(driver, 30).until(EC.visibility_of_element_located((By.CSS_SELECTOR, "main.main-column.results")))

Categories

Resources