element click intercepted错误
来源:4-5 订单数据分页抓取并实现入库
 
			曲别针010
2023-12-21 10:24:10
问题描述:
    代码执行有如下报错,但是将options设置成空,就没有错了,可以正常执行,请老师帮忙看看哪里出错了
报错信息:
D:\python3\python.exe D:/2.Backup/python/PythonLearn/Session4/W16/4-1.py 设置浏览器参数:--headless 设置浏览器参数:--no-sandbox 设置浏览器参数:--disable-gpu 登录系统:http://sleeve.talelin.com/#/login 登录成功 开始抓取数据 Traceback (most recent call last): File "D:/2.Backup/python/PythonLearn/Session4/W16/4-1.py", line 112, in <module> main() File "D:/2.Backup/python/PythonLearn/Session4/W16/4-1.py", line 108, in main crawl.crawl_order_info(url='http://sleeve.talelin.com/#/statics/order/list') File "D:/2.Backup/python/PythonLearn/Session4/W16/4-1.py", line 66, in crawl_order_info self.driver.find_element(By.XPATH, "//button[@class='btn-next']").click() File "D:\python3\lib\site-packages\selenium\webdriver\remote\webelement.py", line 94, in click self._execute(Command.CLICK_ELEMENT) File "D:\python3\lib\site-packages\selenium\webdriver\remote\webelement.py", line 395, in _execute return self._parent.execute(command, params) File "D:\python3\lib\site-packages\selenium\webdriver\remote\webdriver.py", line 348, in execute self.error_handler.check_response(response) File "D:\python3\lib\site-packages\selenium\webdriver\remote\errorhandler.py", line 229, in check_response raise exception_class(message, screen, stacktrace) selenium.common.exceptions.ElementClickInterceptedException: Message: element click intercepted: Element <button type="button" class="btn-next">...</button> is not clickable at point (582, 586). Other element would receive the click: <div class="el-notification__group">...</div> (Session info: headless chrome=119.0.6045.106) Stacktrace: GetHandleVerifier [0x00007FF7D70886D2+54882] (No symbol) [0x00007FF7D6FF6822] (No symbol) [0x00007FF7D6EB05CB] (No symbol) [0x00007FF7D6EF7A6E] (No symbol) [0x00007FF7D6EF5E39] (No symbol) [0x00007FF7D6EF3C08] (No symbol) [0x00007FF7D6EF2C8A] (No symbol) [0x00007FF7D6EE87BF] (No symbol) [0x00007FF7D6F120AA] (No symbol) [0x00007FF7D6EE80CF] (No symbol) [0x00007FF7D6F122C0] (No symbol) [0x00007FF7D6F2A7E4] (No symbol) [0x00007FF7D6F11E83] (No symbol) [0x00007FF7D6EE671A] (No symbol) [0x00007FF7D6EE7964] GetHandleVerifier [0x00007FF7D7401AAB+3697211] GetHandleVerifier [0x00007FF7D7457F4E+4050654] GetHandleVerifier [0x00007FF7D744FDE3+4017523] GetHandleVerifier [0x00007FF7D7125EE6+700022] (No symbol) [0x00007FF7D70019C8] (No symbol) [0x00007FF7D6FFDB74] (No symbol) [0x00007FF7D6FFDCA2] (No symbol) [0x00007FF7D6FEDDA3] BaseThreadInitThunk [0x00007FFACA38257D+29] RtlUserThreadStart [0x00007FFACAA6AA58+40] Process finished with exit code 1
相关代码:
# coding: utf-8
'''
1、需求:抓取订单列表中的订单信息
    每条订单包含:id 订单号 单品数量 总价格 状态
    每页多条订单数据,共279页数据
2、实现方式:
    1)登录http://sleeve.talelin.com/#/login  (该url已包含账号密码,不用再设置输入)
    2)打开订单列表http://sleeve.talelin.com/#/statics/order/list
    3)抓取当前页数据,存入mongoDB,并点击下一页
    4)重复3)操作,直到最后一页
'''
from selenium import webdriver
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.common.by import By
from selenium.webdriver.chrome.options import Options
from pymongo import MongoClient
from lxml import etree
class CrawlProject(object):
    def __init__(self, chrome_options):
        self.driver = webdriver.Chrome(options=chrome_options)
        self.driver.maximize_window()
        myclient = MongoClient(host='192.168.0.188', port=27017)
        myclient.admin.authenticate('root', 'root')
        mydb = myclient['db_order']
        self.mycollection = mydb['order_info']
    def login(self, url):
        '''
        登录http://sleeve.talelin.com/#/login
        :return:
        '''
        print('登录系统:{}'.format(url))
        # 打开登录页面
        self.driver.get(url)
        # 判断登录表单是否加载完毕,若加载完毕则进行登录操作
        if WebDriverWait(self.driver, 5, 0.5).until(EC.presence_of_element_located((By.CLASS_NAME, 'login-form'))):
            self.driver.find_element(By.XPATH, "//button[@class='submit-btn']").click()
            # 检查是否登录成功,并输出提示信息
            if WebDriverWait(self.driver, 5, 0.5).until(EC.presence_of_element_located((By.CLASS_NAME, 'welcome'))):
                print('登录成功')
                return True
            else:
                print('登录失败')
                return False
    def crawl_order_info(self, url):
        '''
        抓取订单数据
        :param url:订单页面URL
        :return:无返回值,将数据直接写入mongoDB
        '''
        print('开始抓取数据')
        self.driver.get(url)
        # 循环执行翻页抓取,直到最后一页
        while True:
            # 判断是否进入订单列表页面,若进入则进行数据抓取
            if WebDriverWait(self.driver, 5, 0.5).until(EC.presence_of_element_located((By.XPATH,"//div[@class='title']"))):
                # 抓取当前页数据
                self.parse_html(self.driver.page_source)
                # 翻页
                self.driver.find_element(By.XPATH, "//button[@class='btn-next']").click()
                # 判断是否到最后一页,到的话,退出循环
                if self.driver.find_element(By.XPATH,"//button[@class='btn-next']").get_attribute('disabled'):
                    break
        self.driver.quit()
    def parse_html(self, content):
        '''
        解析网页数据
        :param content:网页源代码
        :return:
        '''
        html = etree.HTML(content)
        orders_info = html.xpath("//tr[@class='el-table__row']")
        for item in orders_info:
            data={
                    'id':''.join(item.xpath('./td[1]/div/text()')),
                    'order_num':''.join(item.xpath('./td[2]/div/text()')),
                    'number':''.join(item.xpath('./td[3]/div/text()')),
                    'total_price':''.join(item.xpath('./td[4]/div/text()')),
                    'status':''.join(item.xpath('./td[5]//span/text()'))
                }
            # 将结果插入到mongoDB中
            self.mycollection.insert_one(data)
def main():
    # 定义option头部,设置浏览器参数.此时将不会弹出浏览器页面,直接在设置无头隐身抓取
    options = {
        'headless': '--headless',
        'no_sanddox': '--no-sandbox',
        'gpu': '--disable-gpu'
    }
    # options = {}
    chrome_options = Options()
    for k,v in options.items():
        print('设置浏览器参数:{}'.format(v))
        # 设置浏览器参数
        chrome_options.add_argument(v)
    crawl = CrawlProject(chrome_options)
    url = 'http://sleeve.talelin.com/#/login'
    login_status = crawl.login(url)
    if login_status:
        crawl.crawl_order_info(url='http://sleeve.talelin.com/#/statics/order/list')
if __name__ == '__main__':
    main()
    # TODO1回答
 
					好帮手慕小猿
2023-12-21
同学,你好!老师这边测试了7次,没有出现同学的问题。查找资料可能说是chromedriver的问题,建议同学换个其它版本的chrome和chromedriver,再运行下程序看看。
祝学习愉快~
相似问题