I am making a program for scrapping the Amazon websites mobile phones but my program is giving me timeout exception even after the page is loaded on time.
Here is my code
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.common.by import By
from selenium.common.exceptions import TimeoutException
from selenium.common.exceptions import NoSuchElementException
from bs4 import BeautifulSoup
import urllib.request
class Amazon_all_mobile_scraper:
def __init__(self):
self.driver = webdriver.Firefox()
self.delay = 60
self.url = "https://www.amazon.in/mobile-phones/b/ref=sd_allcat_sbc_mobcomp_all_mobiles?ie=UTF8&node=1389401031"
def load_amazon(self):
self.driver.get(self.url)
try:
wait = WebDriverWait(self.driver,self.delay)
wait.until(EC.presence_of_element_located((By.CLASS_NAME,"acs-ln-link")))
print("Page is ready.")
except TimeoutException:
print("Took too much time to load!")
except:
print("Something went wrong in loading part!!")
def extract_list_of_mobiles(self):
try:
mobile_list = self.driver.find_element_by_xpath('//div[@class = "acs-ln-link"]')
print(mobile_list)
except NoSuchElementException:
print("Sorry, Unable to get the requested element")
scraper = Amazon_all_mobile_scraper()
scraper.load_amazon()
scraper.extract_list_of_mobiles()
Please help me to figure out whats wrong in this code.
xpathmatches nothing.