[英]How do I parallelize a Python loop with webdriver?
在下面的代碼中,我嘗試從不同的 url 頁面(總共 25 個)導入所有賠率。 我目前使用一個簡單的循環,但它需要太多時間。 如何並行化此代碼以減少執行時間。
這是代碼:
#!/usr/bin/python3
# -*- coding: utf-8 -*-
from selenium import webdriver
import statistics as stat
import numpy as np
driver = webdriver.Firefox()
url = 'https://www.coteur.com/cotes-foot.php'
driver.get(url)
#Store url associated with the soccer games
url_links = []
for i in driver.find_elements_by_xpath('//a[contains(@href, "match/cotes-")]'):
url_links.append(i.get_attribute('href'))
driver.close()
print(len(url_links), '\n')
for l in range(len(url_links)):
driver = webdriver.Firefox()
driver.get(url_links[l])
#Store odds into table
odds = []
header = []
for i in driver.find_elements_by_xpath('//button[contains(@class, "btn btn-default btn-xs btncote")]'):
odds.append(i.text)
for i in driver.find_elements_by_xpath('//th[contains(@width, "20%")]'):
header.append(i.text)
rows = int(len(odds)/3)
columns = 3
odds = [float(i) for i in odds]
odds = np.array(odds)
header = np.array(header)
odds = odds.reshape(rows, columns)
print(odds, '\n')
driver;close()
如果您使用 python>3.5,ThreadPoolExecutor 會做得很完美。
from selenium import webdriver
import statistics as stat
import numpy as np
from concurrent.futures import ThreadPoolExecutor
driver = webdriver.Firefox()
url = 'https://www.coteur.com/cotes-foot.php'
driver.get(url)
#Store url associated with the soccer games
url_links = []
for i in driver.find_elements_by_xpath('//a[contains(@href, "match/cotes-")]'):
url_links.append(i.get_attribute('href'))
driver.close()
print(len(url_links), '\n')
def sraper(url_link):
driver = webdriver.Firefox()
driver.get(url_link)
#Store odds into table
odds = []
header = []
for i in driver.find_elements_by_xpath('//button[contains(@class, "btn btn-default btn-xs btncote")]'):
odds.append(i.text)
for i in driver.find_elements_by_xpath('//th[contains(@width, "20%")]'):
header.append(i.text)
rows = int(len(odds)/3)
columns = 3
odds = [float(i) for i in odds]
odds = np.array(odds)
header = np.array(header)
odds = odds.reshape(rows, columns)
print(odds, '\n')
driver;close()
with ThreadPoolExecutor(max_workers=8) as executor:
executor.map(sraper, url_links)
聲明:本站的技術帖子網頁,遵循CC BY-SA 4.0協議,如果您需要轉載,請注明本站網址或者原文地址。任何問題請咨詢:yoyou2525@163.com.