Существует ответ json, содержащий html. Кажется, что-то блокирует запросы в случайных точках во всей версии цикла результатов в конце
Версия на одной странице, где вы меняете значение current_page
на соответствующий номер страницы.
import requests
import pandas as pd
from bs4 import BeautifulSoup as bs
url = 'https://www.seethroughny.net/tools/required/reports/payroll?action=get'
headers = {
'Accept' : 'application/json, text/javascript, */*; q=0.01' ,
'Content-Type' : 'application/x-www-form-urlencoded; charset=UTF-8',
'User-Agent' : 'Mozilla/5.0',
'Referer' : 'https://www.seethroughny.net/payrolls/110681'
}
data = {
'PayYear[]' : '2018',
'BranchName[]' : 'Villages',
'SortBy' : 'YTDPay DESC',
'current_page' : '0',
'result_id' : '110687408',
'url' : '/tools/required/reports/payroll?action=get',
'nav_request' : '0'
}
r = requests.post(url, headers = headers, data = data).json()
soup = bs(r['html'], 'lxml')
results = []
for item in soup.select('tr:nth-child(odd)'):
row = [subItem.text for subItem in item.select('td')][1:]
results.append(row)
df = pd.DataFrame(results)
df.to_csv(r'C:\Users\User\Desktop\Data.csv', sep=',', encoding='utf-8-sig',index = False )
Версия для всех страниц (работа в процессе, так как текущий запрос может не вернуть json в разных точках цикла, несмотря на задержку). Кажется, улучшилось благодаря предложению @ sim об обмене пользовательскими агентами.
import requests
import pandas as pd
from bs4 import BeautifulSoup as bs
import time
from requests.packages.urllib3.util.retry import Retry
from requests.adapters import HTTPAdapter
import random
ua = ['Mozilla/5.0',
'Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; rv:11.0) like Gecko',
'Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/60.0.3112.90 Safari/537.36',
'Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/46.0.2490.71 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1; Trident/7.0; rv:11.0) like Gecko'
'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/63.0.3239.132 Safari/537.36'
]
url = 'https://www.seethroughny.net/tools/required/reports/payroll?action=get'
headers = {
'Accept' : 'application/json, text/javascript, */*; q=0.01' ,
'Content-Type' : 'application/x-www-form-urlencoded; charset=UTF-8',
'User-Agent' : 'Mozilla/5.0',
'Referer' : 'https://www.seethroughny.net/payrolls/110681'
}
data = {
'PayYear[]' : '2018',
'BranchName[]' : 'Villages',
'SortBy' : 'YTDPay DESC',
'current_page' : '0',
'result_id' : '110687408',
'url' : '/tools/required/reports/payroll?action=get',
'nav_request' : '0'
}
results = []
i = 0
with requests.Session() as s:
retries = Retry(total=5,
backoff_factor=0.1,
status_forcelist=[ 500, 502, 503, 504 ])
s.mount('http://', HTTPAdapter(max_retries=retries))
while len(results) < 1000: #total:
data['current_page'] = i
data['result_id'] = str(int(data['result_id']) + i)
try:
r = s.post(url, headers = headers, data = data).json()
except Exception as e:
print(e)
time.sleep(2)
headers['User-Agent'] = random.choice(ua)
r = s.post(url, headers = headers, data = data).json()
continue
soup = bs(r['html'], 'lxml')
for item in soup.select('tr:nth-child(odd)'):
row = [subItem.text for subItem in item.select('td')][1:]
results.append(row)
i+=1
@ Версия Сима:
import requests
import pandas as pd
from bs4 import BeautifulSoup
from fake_useragent import UserAgent
url = 'https://www.seethroughny.net/tools/required/reports/payroll?action=get'
headers = {
'User-Agent' : 'Mozilla/5.0',
'Referer' : 'https://www.seethroughny.net/payrolls/110681'
}
data = {
'PayYear[]' : '2018',
'BranchName[]' : 'Villages',
'SortBy' : 'YTDPay DESC',
'current_page' : '0',
'result_id' : '110687408',
'url' : '/tools/required/reports/payroll?action=get',
'nav_request' : '0'
}
results = []
i = 0
def get_content(i):
while len(results) < 15908:
print(len(results))
data['current_page'] = i
headers['User-Agent'] = ua.random
try:
r = requests.post(url, headers = headers, data = data).json()
except Exception:
time.sleep(1)
get_content(i)
soup = BeautifulSoup(r['html'], 'lxml')
for item in soup.select('tr:nth-child(odd)'):
row = [subItem.text for subItem in item.select('td')][1:]
results.append(row)
i+=1
if __name__ == '__main__':
ua = UserAgent()
get_content(i)