Изменение страницы = настройки xxx (xxx = 1 до 102) при попытке удалить элементы lazada возвращают много повторяющихся элементов. Многие элементы на веб-странице не были удалены из-за этой проблемы.
https://redmart.lazada.sg/shop-party-supplies/?acm=201903252.1003.1.3731409&m=redmart&page=102&pos=17&scm=1003.1.201903252.null_16_3731409&spm=a2o42.redmart_channel.nav_category_tree.213.6fea48a6uGZyQM
Попробовал следующее, но бесполезно:
1.) Установите следующие
'CONCURRENT_REQUESTS': 1,
'CONCURRENT_REQUESTS_PER_DOMAIN': 1,
'RANDOMIZE_DOWNLOAD_DELAY': Правда,
'CONCURRENT_REQUESTS_PER_IP': 1,
2.) Включите заголовки в запрос, например: yield scrapy.Request (url = next_page, headers = {'JSESSIONID': '2DE61BF1E734471FBB8C768B21D47D85'}
3.) Установите страницу от 102 до 1 вместо 1 до 102
импортная терапия
из scrapy.loader импорт ItemLoader
из scrapy.loader.processors импорт TakeFirst, составить, присоединиться
импорт ре
из diffmarts.items импорт DiffmartsItem
из diffmarts.util import ProductLoader, to_price, to_link, to_name
импорт JSON
время импорта
класс RedmartSpider (scrapy.Spider):
name = 'redmart'
start_urls = ['https://redmart.lazada.sg/shop-party-supplies/?acm=201903252.1003.1.3731409&m=redmart&page=102&pos=17&scm=1003.1.201903252.null_16_3731409&spm=a2o42.redmart_channel.nav_category_tree.213.6fea48a6uGZyQM']
«»»
start_urls = ['https://redmart.lazada.sg/shop-groceries-fresh-produce/?m=redmart&page=102&spm=a2o42.redmart_channel.nav_category_tree.18.e7ea48a6I6BibL',
«https://redmart.lazada.sg/meat-and-seafood/?m=redmart&page=102&spm=a2o42.redmart_channel.nav_category_tree.30.2e5d48a6aXcB49',
«https://redmart.lazada.sg/shop-Groceries-DairyChilled/?m=redmart&page=102&spm=a2o42.redmart_channel.nav_category_tree.37.2e5d48a6YIJ1n7',
«https://redmart.lazada.sg/shop-Groceries-FoodStaplesCookingEssentials/?m=redmart&page=102&spm=a2o42.redmart_channel.nav_category_tree.52.2e5d48a6YIJ1n7',
«https://redmart.lazada.sg/beverages/?m=redmart&page=102&spm=a2o42.redmart_channel.nav_category_tree.67.2e5d48a6YIJ1n7',
«https://redmart.lazada.sg/laundry-and-home-care/?m=redmart&page=102&spm=a2o42.redmart_channel.nav_category_tree.81.2e5d48a6YIJ1n7'
«https://redmart.lazada.sg/mother-baby/?m=redmart&page=102&spm=a2o42.redmart_channel.nav_category_tree.92.2e5d48a6YIJ1n7',
«https://redmart.lazada.sg/shop-groceries-frozen/?m=redmart&page=102&spm=a2o42.redmart_channel.nav_category_tree.108.2e5d48a6YIJ1n7',
«https://redmart.lazada.sg/shop-Groceries-ChocolateSnacksSweets/?m=redmart&page=102&spm=a2o42.redmart_channel.nav_category_tree.121.2e5d48a6YIJ1n7',
«https://redmart.lazada.sg/breakfast/?m=redmart&page=102&spm=a2o42.redmart_channel.nav_category_tree.131.2e5d48a6YIJ1n7',
«https://redmart.lazada.sg/wines-beers-spirits/?m=redmart&page=102&spm=a2o42.redmart_channel.nav_category_tree.143.2e5d48a6YIJ1n7',
«https://redmart.lazada.sg/shop-health-beauty/?m=redmart&page=102&spm=a2o42.redmart_channel.nav_category_tree.156.2e5d48a6YIJ1n7',
«https://redmart.lazada.sg/shop-kitchen-dining/?m=redmart&page=102&spm=a2o42.redmart_channel.nav_category_tree.173.2e5d48a6YIJ1n7',
«https://redmart.lazada.sg/shop-furniture-decor-2/?m=redmart&page=102&spm=a2o42.redmart_channel.nav_category_tree.189.2e5d48a6YIJ1n7',
«https://redmart.lazada.sg/shop-pet-supplies/?m=redmart&page=102&spm=a2o42.redmart_channel.nav_category_tree.203.2e5d48a6YIJ1n7',
«https://redmart.lazada.sg/shop-party-supplies/?m=redmart&page=102&spm=a2o42.redmart_channel.nav_category_tree.211.2e5d48a6YIJ1n7']
«»»
custom_settings = {
'CONCURRENT_REQUESTS': 1,
'CONCURRENT_REQUESTS_PER_DOMAIN': 1,
'RANDOMIZE_DOWNLOAD_DELAY': Правда,
# 'CONCURRENT_REQUESTS_PER_IP': 1,
# Задержка, похоже, влияет на количество просканированных элементов 0,5 = 663 0,01 = 691
# 'DOWNLOAD_DELAY': 0,01,
}
def parse (self, response):
cat_name = response.url
cat_name = re.sub ('https://redmart.lazada.sg/','',cat_name)
cat_name = re.sub ('-', '', cat_name)
cat_link = response.url
yield scrapy.Request (url = response.url, headers = {'JSESSIONID': '2DE61BF1E734471FBB8C768B21D47D85'}, callback = self.parse_category, meta = {'cat_link': cat_link, 'cat_name': cat_name, 'page': 102: )
def parse_category(self, response):
print("@@@ Parsing: %s " % (response.url))
if len(response.body) == 0:
print("@@@ Response empty, retry parsing: %s" % (response.url))
yield scrapy.Request(url=response.url,callback=self.parse_category, meta={'cat_link': response.meta['cat_link'], 'cat_name': response.meta['cat_name'],'page': response.meta['page']}, dont_filter=True)
else:
#print("debug url %s" % response.url)
data=response.xpath("//script[contains(.,'mod')]/text()").extract_first()
sdata=re.sub('window.pageData=','',data)
json_response=json.loads(sdata)
#checksuccess=json_response['msg'] json_response is a dict with len 2 //json_response.keys
#> a['mods'].keys()
if ('mods' in json_response.keys()):
print("@@@ %s: It's got %d items" % (response.url,len(json_response['mods']['listItems'])))
for product in range(len(json_response['mods']['listItems'])):
yield self.parse_item(response, json_response['mods']['listItems'][product], json_response['mainInfo']['title'])
next_page=response.xpath("//link[@rel='prev']//@href").extract_first()
page=int(response.meta['page'])
if (next_page is not None):
pre_page='page='+str(page)
#print("debug 1 pre-page %s" % pre_page)
next_page='page='+str(page-1)
#print("debug 1 next-page %s" % next_page)
next_page=re.sub(pre_page,next_page,response.url)
#print("debug next page %s" % next_page)
#yield scrapy.Request(url=next_page, callback=self.parse_category, meta={'cat_link': response.meta['cat_link'], 'cat_name': response.meta['cat_name'],'page': next_page}, dont_filter=True)
yield scrapy.Request(url=next_page, headers={'JSESSIONID':'2DE61BF1E734471FBB8C768B21D47D85'},callback=self.parse_category, meta={'page': response.meta['page']-1})
def parse_item(self, response, json_product, cat_name):
item_loader = ProductLoader(DiffmartsItem(), None)
item_loader.add_value('id', str(json_product['itemId']))
#print("debug url %s" % response.url)
#print("debug ID %s" % json_product['name'])
item_loader.add_value('cat_name', cat_name)
item_loader.add_value('name', json_product['name'], Compose(to_name))
if 'originalPrice' in json_product.keys():
item_loader.add_value('price', re.sub('$','',json_product['priceShow']), Compose(to_price))
item_loader.add_value('prev_price', json_product['originalPrice'], Compose(to_price))
item_loader.add_value('promotion', json_product['discount'])
else:
item_loader.add_value('price',json_product['price'], Compose(to_price))
item_loader.add_value('prev_price', '0')
item_loader.add_value('link', json_product['productUrl'], Compose(lambda v:to_link(v, response)))
item_loader.add_value('image_link', json_product['image'], Compose(lambda v:to_link(v, response)))
checksoldout=json_product['inStock']
if checksoldout=='Yes':
item_loader.add_value('sold_out', 1)
else:
item_loader.add_value('sold_out', 0)
return item_loader.load_item()
Ожидается, что получит 102 (страниц) х24 = 2448 предметов, но вместо этого получит только от 1200 до 1300 предметов.