cannabis_licenses / algorithms /get_licenses_wa.py
keeganskeate's picture
pr/2 (#2)
124701c
raw
history blame
6.36 kB
"""
Cannabis Licenses | Get Washington Licenses
Copyright (c) 2022 Cannlytics
Authors:
Keegan Skeate <https://github.com/keeganskeate>
Candace O'Sullivan-Sutherland <https://github.com/candy-o>
Created: 9/29/2022
Updated: 9/29/2022
License: <https://github.com/cannlytics/cannlytics/blob/main/LICENSE>
Description:
Collect Washington cannabis license data.
Data Source:
- Washington State Liquor and Cannabis Board | Frequently Requested Lists
URL: <https://lcb.wa.gov/records/frequently-requested-lists>
"""
# Standard imports.
from datetime import datetime
import os
from typing import Optional
# External imports.
from bs4 import BeautifulSoup
from cannlytics.data.gis import geocode_addresses
from dotenv import dotenv_values
import pandas as pd
import requests
# Specify where your data lives.
DATA_DIR = '../data/wa'
ENV_FILE = '../.env'
# Specify state-specific constants.
STATE = 'WA'
WASHINGTON = {
'licensing_authority_id': 'WSLCB',
'licensing_authority': 'Washington State Liquor and Cannabis Board',
'retailers': {
'key': 'CannabisApplicants',
'columns': {
'Tradename': 'business_dba_name',
'License ': 'license_number',
'UBI': 'id',
'Street Address': 'premise_street_address',
'Suite Rm': 'premise_street_address_2',
'City': 'premise_city',
'State': 'premise_state',
'county': 'premise_county',
'Zip Code': 'premise_zip_code',
'Priv Desc': 'license_type',
'Privilege Status': 'license_status',
'Day Phone': 'business_phone',
},
}
}
def get_licenses_wa(
data_dir: Optional[str] = None,
env_file: Optional[str] = '.env',
):
"""Get Washington cannabis license data."""
# Create the necessary directories.
file_dir = f'{data_dir}/.datasets'
if not os.path.exists(data_dir): os.makedirs(data_dir)
if not os.path.exists(file_dir): os.makedirs(file_dir)
# Get the URLs for the license workbooks.
labs_url, medical_url, retailers_url = None, None, None
url = 'https://lcb.wa.gov/records/frequently-requested-lists'
response = requests.get(url)
soup = BeautifulSoup(response.content, 'html.parser')
links = soup.find_all('a')
for link in links:
href = link['href']
if 'Lab-List' in href:
labs_url = href
elif 'CannabisApplicants' in href:
retailers_url = href
elif 'MedicalCannabisEndorsements' in href:
medical_url = href
break
# TODO: Also download and collect lab + medical dispensary data.
# Download the licenses workbook.
filename = retailers_url.split('/')[-1]
retailers_source_file = os.path.join(file_dir, filename)
response = requests.get(retailers_url)
with open(retailers_source_file, 'wb') as doc:
doc.write(response.content)
# Extract and standardize the data from the workbook.
retailers = pd.read_excel(retailers_source_file)
retailers.rename(columns=WASHINGTON['retailers']['columns'], inplace=True)
retailers['licensing_authority_id'] = WASHINGTON['licensing_authority_id']
retailers['licensing_authority'] = WASHINGTON['licensing_authority']
retailers['license_designation'] = 'Adult-Use'
retailers['premise_state'] = STATE
retailers['license_status_date'] = None
retailers['license_term'] = None
retailers['issue_date'] = None
retailers['expiration_date'] = None
retailers['business_legal_name'] = retailers['business_dba_name']
retailers['business_owner_name'] = None
retailers['business_structure'] = None
retailers['business_email'] = None
retailers['activity'] = None
retailers['parcel_number'] = None
# Keep only active licenses.
retailers = retailers.loc[
(retailers['license_status'] == 'ACTIVE (ISSUED)') |
(retailers['license_status'] == 'ACTIVE TITLE CERTIFICATE')
]
# Convert certain columns from upper case title case.
cols = ['business_dba_name', 'premise_city', 'premise_county',
'premise_street_address', 'license_type', 'license_status']
for col in cols:
retailers[col] = retailers[col].apply(lambda x: x.title().strip())
# Get the refreshed date.
date = retailers_source_file.split('\\')[-1].split('.')[0]
date = date.replace('CannabisApplicants', '')
date = date[:2] + '-' + date[2:4] + '-' + date[4:]
retailers['data_refreshed_date'] = pd.to_datetime(date).isoformat()
# FIXME: Append `premise_street_address_2` to `premise_street_address`.
# Geocode licenses to get `premise_latitude` and `premise_longitude`.
config = dotenv_values(env_file)
google_maps_api_key = config['GOOGLE_MAPS_API_KEY']
cols = ['premise_street_address', 'premise_city', 'premise_state',
'premise_zip_code']
retailers['address'] = retailers[cols].apply(
lambda row: ', '.join(row.values.astype(str)),
axis=1,
)
retailers = geocode_addresses(
retailers,
api_key=google_maps_api_key,
address_field='address',
)
drop_cols = ['state', 'state_name', 'county', 'address', 'formatted_address']
retailers.drop(columns=drop_cols, inplace=True)
gis_cols = {
'latitude': 'premise_latitude',
'longitude': 'premise_longitude'
}
retailers.rename(columns=gis_cols, inplace=True)
# Save and return the data.
if data_dir is not None:
timestamp = datetime.now().isoformat()[:19].replace(':', '-')
retailers.to_excel(f'{data_dir}/licenses-{STATE.lower()}-{timestamp}.xlsx')
return retailers
if __name__ == '__main__':
# Support command line usage.
import argparse
try:
arg_parser = argparse.ArgumentParser()
arg_parser.add_argument('--d', dest='data_dir', type=str)
arg_parser.add_argument('--data_dir', dest='data_dir', type=str)
arg_parser.add_argument('--env', dest='env_file', type=str)
args = arg_parser.parse_args()
except SystemExit:
args = {'d': DATA_DIR, 'env_file': ENV_FILE}
# Get licenses, saving them to the specified directory.
data_dir = args.get('d', args.get('data_dir'))
env_file = args.get('env_file')
data = get_licenses_wa(data_dir, env_file=env_file)