How to click few buttons with same xpath, class - python

I have a problem clicking every button on the LinkedIn page. In some profiles which contain a lot of information about job experience, schools, license we have to expand this information by click on 'Show more button".
Sample profile 1
Sample profile 2
I try many things like searching for elements by Xpath and then looping them to click every button on the page but it didn't work - because every button class is the same as other elements that we can find using selenium. I figure it that first "show more" button is always for experiane section and that code make job to click it:
self.driver.execute_script("arguments[0].click();", WebDriverWait(
self.driver, 3).until(EC.element_to_be_clickable((By.XPATH, "//li-icon[#class='pv-profile"
"-section__toggle-detail-icon']"))))
Then we have the education section, license, and certification section - this makes me trouble. Temporary solution is to click at element that contain string:
self.driver.find_element_by_xpath("//*[contains(text(), 'Pokaż więcej')]").click()
OR
self.driver.find_element_by_xpath("//*[contains(text(), 'Pokaż 1 uczelnię więcej')]").click()
Sooner than later I know that code has a lot of limitations. Does anyone have a better idea of how to solve this problem?
Solution
containers = self.driver.find_elements_by_xpath("//li-icon[#class='pv-profile-section__toggle-detail-icon']")
for button in containers:
self.driver.execute_script('arguments[0].click()', button)

I tested page with own code and it seems you can get all buttons with
items = driver.find_elements(By.XPATH, '//div[#class="profile-detail"]//button')
for item in items:
driver.execute_script("arguments[0].click();", item)
But there can be other problem. Page uses "lazy loading" and it may need to use JavaScript code which scrolls down to load all component.
Here is my full code with some ideas in comments.
I tried also select buttons in sections but not all methods work.
But maybe it will be useful for other ideas.
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from selenium.common.exceptions import NoSuchElementException, TimeoutException
import time
USERNAME = 'XXXXX'
PASSWORD = 'YYYYY'
url = 'https://www.linkedin.com/in/jakub-bialoskorski/?miniProfileUrn=urn%3Ali%3Afs_miniProfile%3AACoAABp5UJ8BDpi5ZwNGebljqDlYx7OXIKgxH80'
driver = webdriver.Firefox()
# -------------------------------------
driver.get(url)
time.sleep(5)
#wait = WebDriverWait(driver, 10)
cookies = driver.find_element(By.XPATH, '//button[#action-type="ACCEPT"]')
cookies.click()
time.sleep(1)
link = driver.find_element(By.XPATH, '//p[#class="authwall-join-form__subtitle"]/button')
link.click()
time.sleep(1)
login_form = driver.find_element(By.XPATH, '//div[#class="authwall-sign-in-form"]')
time.sleep(1)
username = login_form.find_element(By.XPATH, '//input[#id="session_key"]')
username.send_keys(USERNAME)
password = login_form.find_element(By.XPATH, '//input[#id="session_password"]')
password.send_keys(PASSWORD)
time.sleep(1)
#button = login_form.find_element(By.XPATH, '//button[#type="submit"]')
button = login_form.find_element(By.XPATH, '//button[contains(text(), "Zaloguj się")]')
button.click()
time.sleep(5)
# -------------------------------------
url = 'https://www.linkedin.com/in/jakub-bialoskorski/?miniProfileUrn=urn%3Ali%3Afs_miniProfile%3AACoAABp5UJ8BDpi5ZwNGebljqDlYx7OXIKgxH80'
#from selenium.webdriver.common.action_chains import ActionChains
driver.get(url)
time.sleep(5)
# -----------
print('... scrolling for lazy loading ...')
last_height = 0
while True:
driver.execute_script("window.scrollTo(0, window.scrollY + window.innerHeight);")
time.sleep(2)
new_height = driver.execute_script("return window.scrollY")
if new_height == last_height:
break
last_height = new_height
# -----------
def click_items(items):
for item in items:
print('text:', item.text)
#print(item.get_attribute('innerHTML'))
#print('... scrolling ...')
#ActionChains(driver).move_to_element(item).perform()
print('... scrolling ...')
driver.execute_script("arguments[0].scrollIntoView(true);", item)
#print('... clicking ...')
#item.click()
#time.sleep(1)
print('... clicking ...')
driver.execute_script("arguments[0].click();", item)
time.sleep(1)
print('----')
print('\n>>> Pokaż <<<\n')
#items = driver.find_elements(By.XPATH, '//button[contains(text(), "Pokaż")]')
#click_items(items)
print('\n>>> Doświadczenie - Pokaż więcej <<<\n')
#section = driver.find_elements(By.XPATH, '//section[#id="experience-section"]')
#items = driver.find_elements(By.XPATH, '//button[contains(text(), "zobacz wi")]')
items = driver.find_elements(By.XPATH, '//button[contains(#class, "inline-show-more-text__button")]')
click_items(items)
print('\n>>> Umiejętności i potwierdzenia - Pokaż więcej <<<\n')
#section = driver.find_elements(By.XPATH, '//section[#id="experience-section"]')
items = driver.find_elements(By.XPATH, '//button[#data-control-name="skill_details"]')
click_items(items)
print('\n>>> Wyświetl <<<\n')
items = driver.find_elements(By.XPATH, '//button[contains(text(), "Wyświetl")]')
click_items(items)
print('\n>>> Rekomendacje <<<\n')
items = driver.find_elements(By.XPATH, '//button[#aria-controls="recommendation-list"]')
click_items(items)
print('\n>>> Osiągnięcia <<<\n')
print('--- projects ---')
items = driver.find_elements(By.XPATH, '//button[#aria-controls="projects-expandable-content"]')
click_items(items)
print('--- languages ---')
items = driver.find_elements(By.XPATH, '//button[#aria-controls="languages-expandable-content"]')
click_items(items)
# --- all buttons ---
#items = driver.find_elements(By.XPATH, '//div[#class="profile-detail"]//button')
#click_items(items)

Related

"element is not attached to the page document" Expection thrown after adding the implicit wait for selenium automation

I have added implicit wait in my code and it results in error "Message: stale element reference: element is not attached to the page document"
Below is the code
from selenium import webdriver
from selenium.webdriver.chrome.service import Service
from selenium.webdriver.common.by import By
from selenium.webdriver.chrome.options import Options
import time
service_obj = Service("C:/Users/divya/Downloads/chromedriver_win32/chromedriver.exe")
chrome_options = Options()
chrome_options.add_experimental_option("detach", True)
driver = webdriver.Chrome(service=service_obj)
driver.implicitly_wait(5)
driver.get("https://rahulshettyacademy.com/seleniumPractise/#/")
driver.find_element(By.CSS_SELECTOR, ".search-keyword").send_keys("ber")
# time.sleep(2)
products = driver.find_elements(By.XPATH, "//div[#class='products']/div")
count = len(products)
assert count > 0
for product in products:
product.find_element(By.XPATH, "div/button").click() #=======ERROR HERE=============
driver.find_element(By.CSS_SELECTOR, ".cart-icon").click()
driver.find_element(By.XPATH, "//button[text()='PROCEED TO CHECKOUT']").click()
# time.sleep(3)
driver.find_element(By.XPATH, "//input[#type='text']").send_keys("rahulshettyacademy")
driver.find_element(By.CSS_SELECTOR, ".promoBtn").click()
Sleep() functions work fine instead of implicit waits.
Anyone having suggestion/reason for the same. Please guide
You need to wait for some time after searching:
driver.get("https://rahulshettyacademy.com/seleniumPractise/#/")
driver.find_element(By.CSS_SELECTOR, ".search-keyword").send_keys("ber")
time.sleep(1)
products = driver.find_elements(By.XPATH, "//div[#class='products']/div")
count = len(products)
assert count > 0
i = 0
for i in range(len(products)):
driver.find_element(By.XPATH, "(//div[#class='products']//button)[" + str(i + 1) + "]").click()
driver.find_element(By.CSS_SELECTOR, ".cart-icon").click()
driver.find_element(By.XPATH, "//button[text()='PROCEED TO CHECKOUT']").click()
time.sleep(1)
driver.find_element(By.XPATH, "//input[#type='text']").send_keys("rahulshettyacademy")
driver.find_element(By.CSS_SELECTOR, ".promoBtn").click()

having trouble with disabled button on selenium

Im trying to login in a site with selenium, in this site, the 'enter' field is disabled, and it will only be enabled after you starting typing. The problem is that when I enter the username with sendkeys the button won't change the 'enable status' to true. What can I do?
t1 = time.perf_counter()
url = 'https://auth.iupp.com.br/login?client_id=2raqh4sjj73q10efeguphmk4gn&nonce=5149eba248ed491cbde001d686e688dd&redirect_uri=https%3A%2F%2Fwww.iupp.com.br%2Fauth%2Fcallback&response_type=token&scope=profile%20email%20openid%20aws.cognito.signin.user.admin%20webpremios.campaigns%2F40455&state=dc6544eec0244aa0be61d3b8aeded338'
op = webdriver.ChromeOptions()
op.add_argument('headless')
op.add_argument("user-agent=" + ua.random)
op.add_argument("incognito")
driver = webdriver.Chrome(options=op)
driver.get(url)
driver.implicitly_wait(5)
username = driver.find_element_by_xpath(".//*[#id='username']")
username.send_keys("45143080581")
print('0')
element = WebDriverWait(driver, 20).until(EC.element_to_be_clickable((By.ID, "btnContinue")))
print('1')
driver.find_element_by_id("btnContinue").click()
bs = BeautifulSoup(driver.page_source, 'html.parser')
driver.quit()
t2 = time.perf_counter()
print(f'tempo:{t2-t1}')
Ps: the output show 0 and 1 printed
wait = WebDriverWait(driver, 30)
t1 = time.perf_counter()
driver.get('https://auth.iupp.com.br/login?client_id=2raqh4sjj73q10efeguphmk4gn&nonce=5149eba248ed491cbde001d686e688dd&redirect_uri=https%3A%2F%2Fwww.iupp.com.br%2Fauth%2Fcallback&response_type=token&scope=profile%20email%20openid%20aws.cognito.signin.user.admin%20webpremios.campaigns%2F40455&state=dc6544eec0244aa0be61d3b8aeded338')
wait.until(EC.element_to_be_clickable((By.CSS_SELECTOR,'#root > div > div.alertLDGPBackground > div > div > div > div.col-12.col-md-auto.txt-center > a'))).click()
print('0')
wait.until(EC.element_to_be_clickable((By.CSS_SELECTOR,'#username'))).send_keys('45143080581')
print('1')
wait.until(EC.element_to_be_clickable((By.CSS_SELECTOR,'#btnContinue'))).click()
bs = BeautifulSoup(driver.page_source, 'html.parser')
driver.quit()
t2 = time.perf_counter()
print(f'tempo:{t2-t1}')
You should click the accept when the page loads to make sure the other element is clickable.
Just wait for the element to be clickable using waits.
Import:
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC

how to get the dm text in instagram from selenium?

I want to get the text of the sender on my console, I tried beautiful soup for scraping but it didn't work. I had used several other features like XPath and different class names on selenium but not able to resolve this issue.
Here, is my code,
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.common.keys import Keys
import time
myemail = "<username>"
mypassword = "<password>"
friendusernames = ["<>sender username"]
PATH = "C:/Chromedriver.exe"
driver = webdriver.Chrome(PATH)
url = "https://www.instagram.com/"
driver.get(url)
usernamebox = WebDriverWait(driver, 10).until(
EC.presence_of_element_located((By.NAME, 'username')))
usernamebox.send_keys(myemail)
passwordbox = driver.find_element_by_name('password')
passwordbox.send_keys(mypassword)
loginbutton = driver.find_element_by_css_selector('.Igw0E')
loginbutton.click()
print("Logging in")
dmbtn = WebDriverWait(driver, 10).until(
EC.presence_of_element_located((By.CSS_SELECTOR, '.xWeGp')))
dmbtn.click()
notificationsnotnow = WebDriverWait(driver, 10).until(
EC.presence_of_element_located((By.CSS_SELECTOR, '.HoLwm')))
notificationsnotnow.click()
for friendusername in friendusernames:
searchuser = WebDriverWait(driver, 10).until(
EC.presence_of_element_located((By.CSS_SELECTOR, '.EQ1Mr')))
searchuser.click()
searchuserbox = WebDriverWait(driver, 10).until(
EC.presence_of_element_located((By.CSS_SELECTOR, '.j_2Hd')))
searchuserbox.send_keys(friendusername)
time.sleep(3)
firstuser = driver.find_element_by_xpath(
'/html/body/div[5]/div/div/div[2]/div[2]/div[1]/div')
firstuser.click()
pressingnext = WebDriverWait(driver, 10).until(
EC.presence_of_element_located((By.CSS_SELECTOR, '.rIacr')))
pressingnext.click()
names = driver.find_element_by_class_name(
'_7UhW9 > span').text
# names = driver.find_element_by_class_name(
# '.xLCgt').text
# names = driver.find_element_by_class_name(
# '.MMzanKV-D4').text
# names = driver.find_element_by_class_name(
# '.p1tLr').text
# names = driver.find_element_by_class_name(
# '.hjZTB').text
print(names)
time.sleep(1)
I want this text on my console
How, can I do so??
Since every page on the internet has HTML in it, I would inspect the page with right click and find out which tag belongs to the message in the chat, then I'd find the tag's XPath or class and finally get its innerText for retrieving the string.

Waiting for invisible elements not on the page

I am trying to scrape off of this webpage with following script.
I cannot wait for this element and it does not scrape correctly.
clickMe = wait(driver, 3).until(EC.element_to_be_clickable((By.CSS_SELECTOR, ('//a[#class='style-scope match-pop-market']'))))
The element is correct in Chrome inspect.
//a[#class='style-scope match-pop-market']
How can I get the current pages elem_href and not invisible other elements it seems to be picking up on other pages.
//div[#class='mpm_match_title' and .//div[#class='mpm_match_title style-scope match-pop-market']]//a[#class='style-scope match-pop-market']
Does not work though this should fix this issue in theory. Any ideas? Current output:
None
None
None
None
None
None
None
None
None
None
https://www.palmerbet.com/sports/soccer/italy-serie-b/match/6381070
https://www.palmerbet.com/sports/soccer/italy-serie-b/match/6386987
https://www.palmerbet.com/sports/soccer/italy-serie-b/match/6386988
https://www.palmerbet.com/sports/soccer/italy-serie-b/match/6386989
https://www.palmerbet.com/sports/soccer/italy-serie-b/match/6386990
https://www.palmerbet.com/sports/soccer/italy-serie-b/match/6386991
https://www.palmerbet.com/sports/soccer/italy-serie-b/match/6386992
https://www.palmerbet.com/sports/soccer/italy-serie-b/match/6387025
https://www.palmerbet.com/sports/soccer/italy-serie-b/match/6387026
https://www.palmerbet.com/sports/soccer/italy-serie-b/match/6387027
https://www.palmerbet.com/sports/soccer/italy-serie-b/match/6387028
Unable to wait for element as it wants to wait for the invisible elements not on that current page.
So:
//div[contains(#class, 'mpm_match_title')] #TEXT
//div[contains(#class, 'mpm_match_title style-scope match-pop-market')] #BAR
//a[contains(#class, 'style-scope match-pop-market')] #HREF
style-scope match-pop-market
Combined:
//div[contains(#class, 'mpm_match_title') and .//div[contains(#class, 'mpm_match_title style-scope match-pop-market')]//a[#class='style-scope match-pop-market']
unable to find.
Desired output:
https://www.palmerbet.com/sports/soccer/italy-serie-b/match/6381070
https://www.palmerbet.com/sports/soccer/italy-serie-b/match/6386987
https://www.palmerbet.com/sports/soccer/italy-serie-b/match/6386988
https://www.palmerbet.com/sports/soccer/italy-serie-b/match/6386989
https://www.palmerbet.com/sports/soccer/italy-serie-b/match/6386990
https://www.palmerbet.com/sports/soccer/italy-serie-b/match/6386991
https://www.palmerbet.com/sports/soccer/italy-serie-b/match/6386992
https://www.palmerbet.com/sports/soccer/italy-serie-b/match/6387025
https://www.palmerbet.com/sports/soccer/italy-serie-b/match/6387026
https://www.palmerbet.com/sports/soccer/italy-serie-b/match/6387027
https://www.palmerbet.com/sports/soccer/italy-serie-b/match/6387028
Using the code from the pastebin link in the comments, I basically just modified the Xpath to search for specific elements that would identify the links on the current page.
from random import shuffle
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.support.ui import WebDriverWait as wait
driver = webdriver.Chrome()
driver.set_window_size(1024, 600)
driver.maximize_window()
driver.get('https://www.palmerbet.com/sports/soccer')
clickMe = wait(driver, 3).until(EC.element_to_be_clickable((By.XPATH,
('//*[contains(#class,"filter_labe")]'))))
options = driver.find_elements_by_xpath('//*[contains(#class,"filter_labe")]')
indexes = [index for index in range(len(options))]
shuffle(indexes)
xp = '//sport-match-grp[not(contains(#style, "display: none;"))]' \
'//match-pop-market[#class="sport-match-grp" and ' \
'not(contains(#style, "display: none;")) and ' \
'.//a[#id="match_link" and boolean(#href)]]'
for index in indexes:
print(f'Loading index {index}')
driver.get('https://www.palmerbet.com/sports/soccer')
clickMe1 = wait(driver, 10).until(EC.element_to_be_clickable((By.XPATH,
'(//ul[#id="tournaments"]//li//input)[%s]' % str(index + 1))))
driver.execute_script("arguments[0].scrollIntoView();", clickMe1)
clickMe1.click()
try:
# this attempts to find any links on the page
clickMe = wait(driver, 3).until(EC.element_to_be_clickable((
By.XPATH, xp)))
elems = driver.find_elements_by_xpath(xp)
elem_href = []
for elem in elems:
print(elem.find_element_by_xpath('.//a[#id="match_link"]')
.get_attribute('href'))
elem_href.append(elem.get_attribute("href"))
except:
print(f'There are no matches in index {index}.')

Scroll in Selenium Webdriver (Python)

Prerequisites.
You need an account at Instagram to use this script.
Setup a test environment:
Log in, open the needed list(works correctly):
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
from time import sleep
driver = webdriver.Chrome(
# driver = webdriver.Firefox(
# driver = webdriver.PhantomJS(
service_args=['--ignore-ssl-errors=true', '--ssl-protocol=any'])
driver.get("https://instagram.com/accounts/login")
username = driver.find_element_by_name("username")
password = driver.find_element_by_name("password")
username1 = 'instagram' # change it!
password1 = 'instagrampassword1' # change it!
username.send_keys(username1)
password.send_keys(password1)
submit_button = driver.find_element_by_css_selector(
'#react-root > div > article > div > div:nth-child(1) > div > form > span > button')
submit_button.click()
sleep(2)
link = 'https://www.instagram.com/youtube/'
driver.get(link)
driver.implicitly_wait(2)
driver.find_elements_by_class_name("_218yx")[2].click()
Wrong scroll.
How to fix this block?
How to focus and scroll correctly on this page?
My attempts:
driver.find_element_by_class_name("_cx1ua").send_keys(Keys.NULL) # focus
#The element has been deleted entirely or
#The element is no longer attached to the DOM.
driver.find_element_by_class_name("_q44m8").send_keys(Keys.NULL)
# cannot focus element
driver.find_element_by_class_name("_qjr85").send_keys(Keys.NULL)
# cannot focus element
for i in range(5):
driver.find_element_by_class_name("_cx1ua").send_keys(Keys.END)
=============================================================
to #Moshisho :
We need to focus on some element to activate it.
The question is what the element we need to choose to focus and how?
This is not a "body":
something like that, but not this:
background = driver.find_element_by_css_selector("body")
# background = driver.find_element_by_css_selector("div._2uju6")
for i in range(5):
background.send_keys(Keys.SPACE)
time.sleep(1)
Without it this command do not work.
to #Naveen :
print(driver.find_element_by_css_selector("div._a1rcs").location_once_scrolled_into_view) # {'x': 0, 'y': 0}
print(driver.find_element_by_class_name("_cx1ua").location_once_scrolled_into_view) # {'x': 376, 'y': 229}
print(driver.find_element_by_class_name("_q44m8").location_once_scrolled_into_view) # {'x': 376, 'y': 180}
print(driver.find_element_by_class_name("_qjr85").location_once_scrolled_into_view) # {'x': 376, 'y': 180}
And what's next?
driver.execute_script("window.scrollTo(0, 3000);") # do not working
Try the following code:
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
from time import sleep
from selenium.webdriver.support.ui import Select
driver = webdriver.Chrome(
# driver = webdriver.Firefox(
# driver = webdriver.PhantomJS(
service_args=['--ignore-ssl-errors=true', '--ssl-protocol=any'])
driver.maximize_window()
driver.get("https://instagram.com/accounts/login")
username = driver.find_element_by_name("username")
password = driver.find_element_by_name("password")
username1 = 'instagramlogin1' # change it!
password1 = 'instagrampassword1' # change it!
username.send_keys(username1)
password.send_keys(password1)
submit_button = driver.find_element_by_css_selector(
'#react-root > div > article > div > div:nth-child(1) > div > form > span > button')
submit_button.click()
sleep(2)
link = 'https://www.instagram.com/youtube/'
driver.get(link)
driver.implicitly_wait(2)
following = driver.find_element_by_xpath("//a[#href='/youtube/following/']/span")
total_following = int(following.text)
print "total no. of users following: ", total_following
# click on 239 following, displays 10 users
following.click()
loaded_following = driver.find_elements_by_xpath("//ul[#class='_539vh _4j13h']/li")
loaded_till_now = len(loaded_following)
while(loaded_till_now<total_following):
print "following users loaded till now: ", loaded_till_now
print loaded_following[loaded_till_now-1]
loaded_following[loaded_till_now-1].location_once_scrolled_into_view
# driver.execute_script("arguments[0].focus();", loaded_following[loaded_till_now-1])
driver.find_element_by_tag_name('body').send_keys(Keys.END) # triggers AJAX request to load more users. observed that loading 10 users at a time.
sleep(1) # tried wihtout sleep but throws StaleElementReferenceException. As it takes time to get the resposne and update the DOM
loaded_following = driver.find_elements_by_xpath("//ul[#class='_539vh _4j13h']/li")
loaded_till_now = len(loaded_following)
# All 239 users are loaded.
driver.quit()
Observed that browser is sending AJAX request to load more users. this action is triggered when you scroll using mouse or enter Space or End keys
In order to scroll in the window, you need to execute JavaScript, try this:
driver.execute_script("window.scrollTo(0, document.body.scrollHeight);")
EDIT: in order to focus an element (it needs to be able to get the focus e.g. an anchor, input, button etc...) you also need to use JavaScript executor:
elementToFocus = driver.find_element_by_id("yourID")
driver.execute_script("arguments[0].focus();", elementToFocus)
I'm working with a dynamic React app, I need to scroll to the pages bottom to make react render all the data.
For unknown reasons, solutions based on JS execute_script didn't work. However I got send_keys solution working:
# scroll to bottom to load all
WebDriverWait(driver, 5).until(
EC.presence_of_element_located((By.XPATH, "//body"))
)
attempt_num = 2
while attempt_num > 0:
try:
elem = driver.find_element_by_xpath("//body")
elem.click()
elem.send_keys(Keys.END)
except StaleElementReferenceException as e:
print(e)
attempt_num = attempt_num - 1
The click() on body and the retry for StaleElementReferenceException are crucial. I haven't found a more elegant way than to retry.
See top answer of How to avoid "StaleElementReferenceException" in Selenium?

Categories

Resources