import requests
from bs4 import BeautifulSoup
source = requests.get('http://www.pickyourownchristmastree.org/').text
soup = BeautifulSoup(source, 'html.parser')
concatenate = 'http://www.pickyourownchristmastree.org/'
state_links = []
ab = ['AL', 'AK', 'AZ', 'AR', 'CA', 'CO', 'CT', 'DE', 'FL', 'GA', 'HI', 'ID','IL', 'IN', 'IA', 'KS', 'KY', 'LA',
'ME', 'MD', 'MA', 'MI', 'MN', 'MS', 'MO', 'MT', 'NE', 'NV', 'NH', 'NJ', 'NM', 'NY', 'NC', 'ND', 'OH', 'OK',
'OR', 'PA', 'RI', 'SC', 'SD', 'TN', 'TX', 'UT', 'VT', 'VA', 'WA', 'WV', 'WI', 'WY']
ab = [x + "xmastrees.php" for x in ab]
find_state_group = soup.find('div', class_ = 'alert')
links = find_state_group.find_all('a', href=True)
for link in links:
if link['href'] in ab:
states = concatenate + link['href']
state_links.append(states)
print(state_links)
i = 1
for source_state in state_links:
source_state = states[-1 + i:]
x=requests.get(source_state).text
soup_state = BeautifulSoup(x, 'lxml')
state_county = soup_state.find('div', class_='alert')
for county_link in state_county.find_all('area', href=True):
if '.php' in county_link['href']:
county_link_update = concatenate + county_link['href']
print(county_link_update)
i = i + 1