Я пытаюсь очистить данные со страницы Википедии (это таблица из 100 лучших синглов определенных лет), сохраняя вывод в csv, полученный с 1951-1959 гг., Затем выдававший ошибку:
строка 43, в
writer.writerow (песни) Файл "C: \ Python36_64 \ lib \ encodings \ cp1252.py",
строка 19, в кодировке возврата
codecs.charmap_encode (input, self.errors, encoding_table) [0]
UnicodeEncodeError: кодек 'charmap' не может закодировать символ '\ u0107' в
позиция 29: персонаж отображается на
код:
from bs4 import BeautifulSoup
import requests
import csv
data = []
def scrape_data(search_year):
year_data = []
url = f'https://en.wikipedia.org/wiki/Billboard_Year-End_Hot_100_singles_of_{str(search_year)}'
# Get a source code from url
r = requests.get(url).text
soup = BeautifulSoup(r, 'html.parser')
# Isolate the table part from the source code
table = soup.find('table', attrs={'class': 'wikitable'})
# Extract every row of the table
rows = table.find_all('tr')
# Iterate through every row
for row in rows[1:]:
# Extract cols (with tags td and th)
cols = row.find_all(['td', 'th'])
# List comprehension (create a list of lists, list of rows, in which every row is a list of table text)
year_data.append([col.text.replace('\n', '') for col in cols])
# Add the year, this data is from to the beginning of the list
for n in year_data:
n.insert(0, search_year)
return year_data
for year in range(1951, 2019):
try:
data.append(scrape_data(year))
print(f'Year {str(year)} Scrapped')
except AttributeError as e:
print(f'Year {str(year)} is not aviable')
writer = csv.writer(open('songs.csv', 'w'), delimiter=',', lineterminator='\n', quotechar='"')
for year_data in data:
for songs in year_data:
writer.writerow(songs)
print(songs)