[英]Removing elements from lists
我無法從lists
中刪除元素。 發送e-mail
時,我想從urls[]
和prices[]
中刪除相關元素。
前任。 if email has: like a url
an Iphone X
and like a price 500€
and it had been sent, i want to "rewrite"
the elements in urls[]
and prices[]
lists removing iphone's url and 500€
import requests
from bs4 import BeautifulSoup
import smtplib
import time
# https://www.amazon.it/Corsair-Vengeance-Memorie-Desktop-Prestazioni/dp/B0143UM4TC
# https://www.amazon.it/AMD-Ryzen-5-3600-Processori/dp/B07STGGQ18
# https://www.amazon.it/Apple-iPhone-Grigio-Siderale-Ricondizionato/dp/B07985C44N
urls = []
prices=[]
all_product = []
n = int(input("Inserisci il numero di prodotti: "))
#agginge il link da controllare
print("\nInserisci i link:")
for i in range(0, n):
link = str(input())
urls.append(link)
#aggiunge il realtivi prezzi ai link
print("\nInserisci i prezzi:")
for i in range(0, n):
money = int(input())
prices.append(money)
#headers per i diversi motori di ricerca
headers = {'User-Agent': 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:77.0) Gecko/20100101 Firefox/77.0 Chrome/83.0.4103.97 Safari/537.36'}
def check_price():
for url, price in zip(urls, prices):
soup = BeautifulSoup(requests.get(url, headers=headers).content, 'lxml')
title = soup.find(id='productTitle').get_text(strip=True)
try:
products = soup.find(id='priceblock_ourprice').get_text()
fix_string = products.replace(",", ".")
converted_price = float(fix_string[0:5])
all_product.append(converted_price)
money_saved=converted_price-price
if (converted_price<=price):
#send email
remove_link=str(url)
remove_price=price
if(urls.index(remove_link)&prices.index(remove_price)):
urls.pop((urls.index(remove_link)))
prices.pop(prices.index(remove_price))
except AttributeError:
print ("Prezzo non trovato, controlla se il prodotto ha un prezzo esposto")
print(all_product)
在 Python 中,從用於for... in....
的列表中刪除不是一個好主意,因為當您刪除元素時,其他元素會被移動(因此下一個元素代替已刪除的元素)但for
不會知道它,它會跳轉到列表中的下一個元素,並跳過已移動到已刪除元素位置的元素。
最好在循環之前創建空列表( keep_urls = []
),在循環 append 到要保留的列表元素( keep_urls.append(url)
),然后在循環之后將此列表分配給舊變量( urls = keep_urls
)。 之后,您可以再次運行它,它將使用沒有刪除元素的列表。
這段代碼顯示了我的看法。
順便說一句:因為使用input()
添加數據很長而且很無聊,所以我添加了從文件中讀取數據的代碼。
import requests
from bs4 import BeautifulSoup
import smtplib
import time
# --- functions ---
def ask_for_data():
urls = []
prices = []
n = int(input("Inserisci il numero di prodotti: "))
#agginge il link da controllare
print("\nInserisci i link:")
for i in range(n):
link = input()
urls.append(link)
#aggiunge il realtivi prezzi ai link
print("\nInserisci i prezzi:")
for i in range(n):
money = input()
prices.append(money)
return urls, prices
def read_data():
with open('urls.txt') as fh:
text = fh.read()
urls = text.split('\n')
with open('prices.txt') as fh:
text = fh.read()
prices = text.split('\n')
return urls, prices
def write_data(urls, prices):
with open('urls.txt', 'w') as fh:
text = "\n".join(urls)
fh.write(text)
with open('prices.txt', 'w') as fh:
text = "\n".join(prices)
fh.write(text)
def send_email(url, price, converted_price):
#money_saved = converted_price-price
print('TODO: send mail with', url, price, converted_price)
# --- main ---
# - start -
#urls, prices = ask_for_data()
urls, prices = read_data()
#headers per i diversi motori di ricerca
headers = {
'User-Agent': 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:77.0) Gecko/20100101 Firefox/77.0 Chrome/83.0.4103.97 Safari/537.36'
}
while True:
# - before loop -
keep_urls = []
keep_prices = []
all_products = []
# - loop -
for url, price in zip(urls, prices):
r = requests.get(url, headers=headers)
#print(r.status_code)
soup = BeautifulSoup(r.content, 'lxml')
title = soup.find(id='productTitle').get_text(strip=True)
try:
products = soup.find(id='priceblock_ourprice').get_text()
fix_string = products.replace(",", ".")
converted_price = float(fix_string[0:5])
all_products.append(converted_price)
if converted_price <= price:
send_email(url, price, converted_price)
else:
keep_urls.append(url)
keep_prices.append(price)
except AttributeError as ex:
print('Ex:', ex)
print("Prezzo non trovato, controlla se il prodotto ha un prezzo esposto")
# - loop -
urls = keep_urls
prices = keep_prices
print(all_products)
time_sleep(60)
# - end -
write_data(urls, prices)
聲明:本站的技術帖子網頁,遵循CC BY-SA 4.0協議,如果您需要轉載,請注明本站網址或者原文地址。任何問題請咨詢:yoyou2525@163.com.