简体   繁体   中英

Update PyQt5 Gui inside a main thread based on signal from scrapy

I have a very basic spider that looks like the followall spider from scrapy testspiders.

import re

import scrapy.signals
from scrapy.http import Request, HtmlResponse
from scrapy.linkextractors import LinkExtractor
from six.moves.urllib.parse import urlparse

from page import Page


class ZenSpider( scrapy.Spider ) :
    def __init__(self) :
        super().__init__()

    name = 'followall'
    custom_settings = {
        'CLOSESPIDER_PAGECOUNT' : 2,
        "FEEDS" : {
            "items.csv" : {"format" : "csv"},
        },
    }

    def __init__(self, **kw) :
        super( ZenSpider, self ).__init__( **kw )
        url = kw.get( 'url' ) or kw.get( 'domain' ) or 'http://scrapinghub.com/'
        if not url.startswith( 'http://' ) and not url.startswith( 'https://' ) :
            url = 'http://%s/' % url
        self.url = url
        self.allowed_domains = [re.sub(r'^www\.', '', urlparse(url).hostname)]
        self.link_extractor = LinkExtractor()

    def start_requests(self):
        return [Request(self.url, callback=self.parse, dont_filter=True)]

    def parse(self, response):
        """Parse a PageItem and all requests to follow

        @url http://www.scrapinghub.com/
        @returns items 1 1
        @returns requests 1
        @scrapes url title foo
        """
        page = self._get_item(response)
        r = [page]
        r.extend(self._extract_requests(response))
        return r

    def _get_item(self, response):
        items = []
        item = Page(
            url=response.url,
            size=str( len( response.body ) ),
            status=response.status,
            # content_type=response.request.headers.get('Content-Type'),
            # encoding=response.request.headers.get('encoding'),
            # referer=response.request.headers.get('Referer'),
        )
        self._set_title( item, response )
        self._set_description( item, response )
        return item

    def _extract_requests(self, response):
        r = []
        if isinstance(response, HtmlResponse):
            links = self.link_extractor.extract_links( response )
            r.extend( Request( x.url, callback=self.parse ) for x in links )
        return r

    def _set_title(self, page, response) :
        if isinstance( response, HtmlResponse ) :
            title = response.xpath( "//title/text()" ).extract()
            if title :
                page['title'] = title[0]

    def _set_description(self, page, response) :
        if isinstance( response, HtmlResponse ) :
            description = response.xpath( "//meta[@name='description']/@content" ).extract()
            if description :
                page['description'] = description[0]

I am calling this spider from a script as below. The spider is run using the CrawlRunner class and when it fetches an item emits a signal as p.signals.connect which then calls the method crawler_results and prints item scraped.

As far as my understanding is I cannot move the crawling into it's own class because then the signal wont work with PyQt5

import scrapy
from PyQt5 import QtWidgets, QtCore, QtGui
from PyQt5.QtCore import QRunnable, pyqtSlot, QThread, pyqtSignal, QTimer
from PyQt5.QtWidgets import QTableWidgetItem, QLabel
from scrapy import signals
from scrapy.crawler import CrawlerProcess, CrawlerRunner
from twisted.internet import reactor
from scrapy.utils.log import configure_logging

from Layout import Ui_MainWindow
from ZenSpider import ZenSpider


class MainWindow( QtWidgets.QMainWindow, Ui_MainWindow ) :

    def __init__(self, parent=None) :
        super(MainWindow, self).__init__()

        self.setupUi( self )
        self.pushButton.pressed.connect( self.on_url_entered )

    def crawler_results(self, item) :
        print( "SCRAPED AN ITEM" )
        ##Do Something here ##

    def on_url_entered(self) :
        # global userInput
        # userInput = self.urlbar.text()
        configure_logging()
        runner = CrawlerRunner()
        runner.crawl(ZenSpider, domain="google.com.au")
        for p in runner.crawlers :
            p.signals.connect(self.crawler_results, signal=signals.item_scraped)
        reactor.run()

if __name__ == "__main__" :
    app = QtWidgets.QApplication( [] )
    main_window = MainWindow()
    main_window.show()
    app.exec_()

I have a layout with a simple QTableWidget and a pushbutton

# -*- coding: utf-8 -*-

# Form implementation generated from reading ui file 'basic.ui'
#
# Created by: PyQt5 UI code generator 5.14.2
#
# WARNING! All changes made in this file will be lost!


from PyQt5 import QtCore, QtGui, QtWidgets


class Ui_MainWindow(object):
    def setupUi(self, MainWindow):
        MainWindow.setObjectName("MainWindow")
        MainWindow.resize(1034, 803)
        self.centralwidget = QtWidgets.QWidget(MainWindow)
        self.centralwidget.setObjectName("centralwidget")
        self.tableWidget = QtWidgets.QTableWidget(self.centralwidget)
        self.tableWidget.setGeometry(QtCore.QRect(140, 200, 831, 401))
        self.tableWidget.setObjectName("tableWidget")
        self.tableWidget.setColumnCount(1)
        self.tableWidget.setRowCount(0)
        item = QtWidgets.QTableWidgetItem()
        self.tableWidget.setHorizontalHeaderItem(0, item)
        self.pushButton = QtWidgets.QPushButton(self.centralwidget)
        self.pushButton.setGeometry(QtCore.QRect(880, 610, 89, 25))
        self.pushButton.setObjectName("pushButton")
        MainWindow.setCentralWidget(self.centralwidget)
        self.statusbar = QtWidgets.QStatusBar(MainWindow)
        self.statusbar.setObjectName("statusbar")
        MainWindow.setStatusBar(self.statusbar)

        self.retranslateUi(MainWindow)
        QtCore.QMetaObject.connectSlotsByName(MainWindow)

    def retranslateUi(self, MainWindow):
        _translate = QtCore.QCoreApplication.translate
        MainWindow.setWindowTitle(_translate("MainWindow", "MainWindow"))
        item = self.tableWidget.horizontalHeaderItem(0)
        item.setText(_translate("MainWindow", "URL"))
        self.pushButton.setText(_translate("MainWindow", "Start"))


if __name__ == "__main__":
    import sys
    app = QtWidgets.QApplication(sys.argv)
    MainWindow = QtWidgets.QMainWindow()
    ui = Ui_MainWindow()
    ui.setupUi(MainWindow)
    MainWindow.show()
    sys.exit(app.exec_())

When I hit the pushbutton I can see the crawler running and entering the crawler_results method as it prints the item scraped. The spider returns each item as the following value

{'size': '164125',
 'status': 200,
 'title': 'Google Advanced Search',
 'url': 'https://www.google.com.au/advanced_search?hl=en-AU&authuser=0'}

Page is simply my scrapy items

import scrapy

class Page(scrapy.Item):
    url = scrapy.Field()
    size = scrapy.Field()
    status = scrapy.Field()
    title = scrapy.Field()

My question is how do I translate this data into the GUI and have it auto refresh as long as the spider runs. This means that every time an item is scraped the GUI updates and then the spider continues.

I have so far explored

  1. Using scrapy deferred without much luck
  2. Slots/Signals but am unable to get the GUI to update.
  3. A Qtimer function to update the GUI every second but that again yields no result.

Any help is much appreciated

You have to install a reactor compatible with the Qt event loop, for example using:

import sys

from PyQt5 import QtWidgets, QtCore, QtGui

import qt5reactor
# import qreactor

from scrapy import signals
from scrapy.crawler import CrawlerRunner
from scrapy.utils.log import configure_logging

import twisted

from Layout import Ui_MainWindow
from ZenSpider import ZenSpider


class MainWindow(QtWidgets.QMainWindow, Ui_MainWindow):
    def __init__(self, parent=None):
        super(MainWindow, self).__init__()

        self.setupUi(self)
        self.pushButton.pressed.connect(self.on_url_entered)
        self.tableWidget.horizontalHeader().setSectionResizeMode(
            QtWidgets.QHeaderView.ResizeToContents
        )

    def crawler_results(self, item):
        row = self.tableWidget.rowCount()

        url = item["url"]

        it = QtWidgets.QTableWidgetItem(url)
        self.tableWidget.insertRow(row)
        self.tableWidget.setItem(row, 0, it)

    def on_url_entered(self):
        configure_logging()
        runner = CrawlerRunner()
        runner.crawl(ZenSpider, domain="google.com.au")
        for p in runner.crawlers:
            p.signals.connect(self.crawler_results, signal=signals.item_scraped)

    def closeEvent(self, event):
        super(MainWindow, self).closeEvent(event)
        twisted.internet.reactor.stop()


if __name__ == "__main__":
    app = QtWidgets.QApplication([])

    qt5reactor.install()
    # qreactor.install()

    main_window = MainWindow()
    main_window.show()
    twisted.internet.reactor.run()

The technical post webpages of this site follow the CC BY-SA 4.0 protocol. If you need to reprint, please indicate the site URL or the original address.Any question please contact:yoyou2525@163.com.

 
粤ICP备18138465号  © 2020-2024 STACKOOM.COM