Удаление элементов, которые, кажется, не используются в данный момент, может быть возможным решением
import csv
import bs4
import requests
from selenium import webdriver
from time import sleep
def retrieve_asin_from(base_url, idx):
url = base_url.format(idx)
r = requests.get(url)
soup = bs4.BeautifulSoup(r.content, 'html.parser')
with webdriver.Chrome() as driver:
driver.get(url)
HTML1 = driver.page_source
soup = bs4.BeautifulSoup(HTML1, "html.parser")
res1 = [i.attrs["data-asin"]
for i in soup.find_all("div") if i.has_attr("data-asin")]
sleep(1)
return res1
url = "https://www.amazon.es/s?k=doll&i=toys&rh=n%3A599385031&dc&page={}"
data_record = [retrieve_asin_from(url, i) for i in range(1, 4)]
combined_data_record = combine_records(data_record) # fcn to write
with open('asin_data.csv', 'w', newline='') as fd:
csvfile = csv.writer(fd)
csvfile.writerows(combined_data_record)