使用Python提交的Selenium循环动态下降

时间:2017-05-06 00:13:28

标签: python selenium web-scraping

我一直在敲打这一天大约一天半,但我似乎无法取得任何进展。我是Selenium的新手,我目前正在尝试从动态网络应用程序中收集数据,其中BeautifulSoup(我的典型选择)不是一个选项,因为没有静态网址。我正在抓取的应用程序是一个汽车零件目录,我需要选择第一个可用年份,加载可用品牌,选择第一个品牌,加载可用型号,选择第一个型号,然后点击提交,刮取数据结果,然后回去做下一个模型。完成所有模型后,继续进行下一个模型并开始使用更多模型。当所有模型完成后,移动到下一年,然后做所有可用的品牌,然后是模型。我的嵌套for循环如下:

from selenium import webdriver
from selenium.webdriver.support.ui import Select
from selenium.webdriver.support.ui import WebDriverWait
from selenium.common.exceptions import StaleElementReferenceException

def make_waitfor_elem_updated_predicate(driver, 
    waitfor_elem_xpath_select):
    elem = driver.find_element_by_xpath(waitfor_elem_xpath_select)

def elem_updated(driver):
    try:
        elem.text
    except StaleElementReferenceException:
        return True
    except:
        pass

    return False

return lambda driver: elem_updated(driver)

class Partbot:

    def __init__(self):
        self.home_page = 'http://monroe-px.rtrk.com/en-US/e-catalog'
        self.driver = webdriver.Chrome()
        self.year_xpath = '//select[@id="widget-ymm-year-desktop"]'
        self.make_xpath = '//select[@id="widget-ymm-make-desktop"]'
        self.model_xpath = '//select[@id="widget-ymm-model-desktop"]'

    def get_select(self, xpath_select):
        select_elem = self.driver.find_element_by_xpath(xpath_select)
        select = Select(select_elem)
        return select

    def select_option(self, xpath_select, value, 
                 waitfor_elem_xpath_select=None):
        if waitfor_elem_xpath_select:
            func = make_waitfor_elem_updated_predicate(
                       self.driver,
                       waitfor_elem_xpath_select
                   )

        select = self.get_select(xpath_select)
        select.select_by_value(value)

        return self.get_select(xpath_select)

    def make_select_option_iterator(self, xpath_select, 
                             waitfor_elem_xpath_select):

        def next_option(xpath_select_select, 
                  waitfor_elem_xpath_select):
            select = self.get_select(xpath_select)
            select_option_values = [
                 '{}'.format(op.get_attribute('value'))
                  for op
                  in select.options[1:]
                  ]
            for v in select_option_values:
                select = self.select_option(xpath_select, v, 
                                   waitfor_elem_xpath_select)
                yield select.first_selected_option.text

        return lambda: next_option(xpath_select, 
                      waitfor_elem_xpath_select)

    def load_page(self):
        self.driver.get(self.home_page)

        def page_loaded(driver):
            path = '//select[@id="widget-ymm-year-desktop"]'
            return driver.find_element_by_xpath(path)

        wait = WebDriverWait(self.driver, 10)
        wait.until(page_loaded)

    def are_extras_present(self):
        extras = self.driver.find_elements_by_xpath("//*
                  [contains(@id, 'widget-ymm-moreinfo')]")
        if len(extras) >= 1:
            return True
        else:
            return False

    def scrape_items(self):
        years = self.make_select_option_iterator(
            self.year_xpath,
            self.make_xpath
        )

        makes = self.make_select_option_iterator(
            self.make_xpath,
            self.model_xpath
        )

        models = self.make_select_option_iterator(
            self.model_xpath,
            None
        )

        self.load_page()

        try:
            for year in years():
                print(year)
                for make in makes():
                    print(2*' ', make)
                    for model in models():
                        print(4*' ', model)
                        subm = 
self.driver.find_element_by_id('lookup-form-desktop')
                      subm.find_element_by_tag_name("button").click()
                      time.sleep(2)


            except:
                self.driver.quit()

    if __name__ == '__main__':
        pb = Partbot()
        pb.scrape_items()

0 个答案:

没有答案