Script fails to keep clicking on load more button - python

I've written a script in Python in association with selenium to keep clicking on MORE button to load more items until there are no new items left to load from a webpage. However, my below script can click once on that MORE button available on the bottom of that page.
Link to that site
This is my try so far:
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
link = "https://angel.co/companies?company_types[]=Startup&company_types[]=Private+Company&company_types[]=Mobile+App&locations[]=1688-United+States"
driver = webdriver.Chrome()
wait = WebDriverWait(driver, 10)
driver.get(link)
while True:
for elems in wait.until(EC.presence_of_all_elements_located((By.CSS_SELECTOR,".results .name a.startup-link"))):
print(elems.get_attribute("href"))
try:
loadmore = wait.until(EC.visibility_of_element_located((By.CSS_SELECTOR,"[class='more']")))
driver.execute_script("arguments[0].scrollIntoView();", loadmore)
loadmore.click()
except Exception:break
driver.quit()
How can I keep clicking on that MORE button until there are no such button left to click and parse the links as I've already tried using for loop.

I've managed to solve the problem pursuing sir Andersson's logic within my exising script. This is what the modified script look like.
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
link = "https://angel.co/companies?company_types[]=Startup&company_types[]=Private+Company&company_types[]=Mobile+App&locations[]=1688-United+States"
driver = webdriver.Chrome()
wait = WebDriverWait(driver, 10)
driver.get(link)
while True:
try:
loadmore = wait.until(EC.visibility_of_element_located((By.CSS_SELECTOR,"[class='more']")))
driver.execute_script("arguments[0].click();", loadmore)
wait.until(EC.staleness_of(loadmore))
except Exception:break
for elems in wait.until(EC.presence_of_all_elements_located((By.CSS_SELECTOR,".results .name a.startup-link"))):
print(elems.get_attribute("href"))
driver.quit()

why not just?
while (driver.FindElements(By.ClassName("more")).Count > 0)
{
driver.FindElement(By.ClassName("more")).Click();
//Some delay to wait lazyload to complete
}
c# example. pretty sure that it can be done with python as well

Related

Getting Trouble in Dismissing Ads from Website using Selenium

I am trying to do web automation where I am using selenium library to moves towards one page for finding title of that page but when I am trying to click on find button suddenly ads pop up and it disturbs the flow and it will not allow the find button to click on it. Let me know that how can I close that ad so that I can move towards the next page and get the tile of that page.
Here is my code:
#Using Selenium to move towards the next pages by clicking on button
#Libs Included
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
import time
#Path to Chrome Driver
path='chromedriver.exe'
driver=webdriver.Chrome(path)
#Main_Url Page
main_url='https://www.zameen.com/'
#Getting the MainPage
driver.get(main_url)
print(driver.title)
#Selecting the Drop Down Menu First
search=driver.find_element_by_class_name('eedc221b').click()
#How To Move to Specific Area using Finding Box To get All the List of Cities
list_of_cities=[]
Cities=driver.find_elements_by_class_name("d92d11c7")
#print(Cities)
for i in Cities:
city=i.text
list_of_cities.append(city)
#print("List of Cities are: \n",list_of_cities)
#Reach towards the first Location by sending the citname to the combobox and then hit enter
driver.find_element_by_css_selector("button[aria-label='"+Cities[0].text+"']").click()
time.sleep(3)
driver.find_element_by_css_selector("a[aria-label='Find button'][class='c3901770 _22dc5e0a']").click()
try:
WebDriverWait(driver,10).until(EC.presence_of_element_located((By.TAG_NAME,"html")))
print("Tilte of next Page is: {0}".format(driver.title))
time.sleep(5)
driver.quit()
finally:
driver.quit()
That add close button can be identified with the help of below css selector :
# Path to Chrome Driver
path = 'chromedriver.exe'
driver = webdriver.Chrome(path)
wait = WebDriverWait(driver, 10)
# Main_Url Page
main_url = 'https://www.zameen.com/'
driver.maximize_window()
# Getting the MainPage
driver.get(main_url)
try:
wait.until(EC.element_to_be_clickable((By.CSS_SELECTOR, "img.close_cross_big"))).click()
except:
print("could not click")
pass
Imports :
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions as EC
and then you can continue with the rest of your code.

selenium wait expected conditions not working

I am trying to click the google store once the google webpage loads. I do not want to use time.sleep() for a few seconds for the google page to load in. I want the browser to click "store" once the page loads. Below is my code, what am I doing wrong?
from selenium import webdriver
import requests
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions as EC
import pause
driver = webdriver.Chrome('/applications/chromedriver')
driver.set_window_size(1024, 600)
driver.maximize_window()
driver.get("https://www.google.com")
wait = WebDriverWait(driver, 10)
element = wait.until(EC.element_to_be_clickable((By.xpath, "/html/body/div[2]/div[2]/div[1]/a[2]")))
element.click()
The html for xpath is correct too since it works with driver.find_element_by_xpath("/html/body/div[2]/div[2]/div[1]/a[2]").click()

Python, selenium find_element_by_link_text not working

I am trying to scrape a website. Where in I have to press a link. for this purpose, I am using selenium library with chrome drive.
from selenium import webdriver
url = 'https://sjobs.brassring.com/TGnewUI/Search/Home/Home?partnerid=25222&siteid=5011&noback=1&fromSM=true#Applications'
browser = webdriver.Chrome()
browser.get(url)
time.sleep(3)
link = browser.find_element_by_link_text("Don't have an account yet?")
link.click()
But it is not working. Any ideas why it is not working? Is there a workaround?
You can get it done in several ways. Here is one of such. I've used driver.execute_script() command to force the clicking. You should not go for hardcoded delay as they are very inconsistent.
Modified script:
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait as wait
from selenium.webdriver.support import expected_conditions as EC
url = 'https://sjobs.brassring.com/TGnewUI/Search/Home/Home?partnerid=25222&siteid=5011&noback=1&fromSM=true#Applications'
driver = webdriver.Chrome()
driver.get(url)
item = wait(driver, 10).until(EC.presence_of_element_located((By.CSS_SELECTOR, "a[ng-click='newAccntScreen()']")))
driver.execute_script("arguments[0].click();",item)

Facing issues while clicking on some links in a webpage

I've written a script in python to click on some categories in a webpage. I could manage to click on the first two categories but got stuck when it comes to initiate the final click. I've given a link leading to the two images in I have marked where to click.
This is the first link where there is a sign (marked with pencil) to click on to enter the second portion.
This is the second link where I get stuck when I try to click on the names (I've marked those names with pencil)
This is the site link.
Script I've tried with so far:
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
driver = webdriver.Chrome()
wait = WebDriverWait(driver, 10)
driver.get("replace_with_above_link")
wait.until(EC.element_to_be_clickable((By.CLASS_NAME, "i4ewOd-pzNkMb-ornU0b-b0t70b-Bz112c"))).click()
post = wait.until(EC.visibility_of_all_elements_located((By.CSS_SELECTOR, "div[role='checkbox']")))[1]
post.click()
for item in wait.until(EC.visibility_of_all_elements_located((By.CSS_SELECTOR,".HzV7m-pbTTYe-JNdkSc .suEOdc"))):
item.click()
driver.quit()
My intention is to click the names cyclically. Thanks in advance.
Try below code to click each item in list:
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
driver = webdriver.Chrome()
wait = WebDriverWait(driver, 10)
driver.get(URL)
wait.until(EC.element_to_be_clickable((By.CLASS_NAME, "i4ewOd-pzNkMb-ornU0b-b0t70b-Bz112c"))).click()
post = wait.until(EC.visibility_of_all_elements_located((By.CSS_SELECTOR, "div[role='checkbox']")))[1]
post.click()
for item in wait.until(EC.visibility_of_all_elements_located((By.CSS_SELECTOR,".HzV7m-pbTTYe-JNdkSc .suEOdc")))[1:]:
item.click()
wait.until(EC.element_to_be_clickable((By.CSS_SELECTOR, ".HzV7m-tJHJj-LgbsSe-Bz112c.qqvbed-a4fUwd-LgbsSe-Bz112c"))).click()
wait.until(EC.invisibility_of_element_located((By.CSS_SELECTOR, ".qqvbed-p83tee")))
driver.quit()

Python Selenium - dynamically generated data not added to DOM

I am using python selenium
I am running this simple code
driver = webdriver.PhantomJS()
#Also use driver = webdriver.Chrome()
driver.get("my_url")
driver.find_element_by_xpath("//*[#id='lb_LoadMore_button_text_2']").click()
print [x.text for x in driver.find_elements_by_xpath("//font[#class='ProductTitle']")]
The button I am clicking is a 'load more' button. On the Chrome webdriver I see that the items are being loaded, but I don't know how to access them after the click was committed.
I have also tried driver.refresh() before trying to print the elements
I am admittedly fresh to selenium and have not been able to find a solution.
You might just need a delay after a click, but let's not just add a time.sleep() call, but explicitly wait for the product titles to be present via presence_of_all_elements_located():
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.common.by import By
driver.find_element_by_xpath("//*[#id='lb_LoadMore_button_text_2']").click()
# waiting
wait = WebDriverWait(driver, 10)
product_titles = wait.until(EC.presence_of_all_elements_located((By.XPATH, "//font[#class='ProductTitle']")))
print [x.text for x in product_titles]

Categories

Resources