Вы можете использовать find_all
с пониманием списка:
import requests
from bs4 import BeautifulSoup as soup
d = soup(requests.get('https://www.fdic.gov/bank/individual/failed/banklist.html').text, 'html.parser')
h, data = [i.text for i in d.find_all('th')], [[i.text for i in b.find_all('td')] for b in d.find_all('tr')[1:]]
Вывод (сокращен из-за ограничения символов SO):
['Bank Name', 'City', 'ST', 'CERT', 'Acquiring Institution', 'Closing Date', 'Updated Date']
[['The Enloe State Bank', 'Cooper', 'TX', '10716', 'Legend Bank, N. A.', 'May 31, 2019', 'June 5, 2019'], ['Washington Federal Bank for Savings', 'Chicago', 'IL', '30570', 'Royal Savings Bank', 'December 15, 2017', 'February 1, 2019'], ['The Farmers and Merchants State Bank of Argonia', 'Argonia', 'KS', '17719', 'Conway Bank', 'October 13, 2017', 'February 21, 2018'], ['Fayette County Bank', 'Saint Elmo', 'IL', '1802', 'United Fidelity Bank, fsb', 'May 26, 2017', 'January 29, 2019'], ['Guaranty Bank, (d/b/a BestBank in Georgia & Michigan) ', 'Milwaukee', 'WI', '30003', 'First-Citizens Bank & Trust Company', 'May 5, 2017', 'March 22, 2018'], ['First NBC Bank', 'New Orleans', 'LA', '58302', 'Whitney Bank', 'April 28, 2017', 'January 29, 2019'], ['Proficio Bank', 'Cottonwood Heights', 'UT', '35495', 'Cache Valley Bank', 'March 3, 2017', 'January 29, 2019'], ]