Skip to content

Instantly share code, notes, and snippets.

@apsun
Created February 22, 2025 19:31
Show Gist options
  • Save apsun/2d7294a6556890213ffd899471dfcf9e to your computer and use it in GitHub Desktop.
Save apsun/2d7294a6556890213ffd899471dfcf9e to your computer and use it in GitHub Desktop.
#!/bin/bash
#
# generates a database containing all amateur radio entities from the FCC
# documentation: https://www.fcc.gov/wireless/data/public-access-files-database-downloads
#
# example usage:
# find all 1x3 callsigns with suffix ABC
# select call_sign, license_status from HD where call_sign like '__ABC';
#
# find all callsigns registered in zip code 98005
# select call_sign from EN where zip_code like '98005%';
set -euo pipefail
DB_FILE='amateur.db'
ULS_URL='https://data.fcc.gov/download/pub/uls'
[ -f "${DB_FILE}" ] && rm -f "${DB_FILE}"
# initialize table schema
# https://www.fcc.gov/sites/default/files/public_access_database_definitions_sql_20240215.txt
sqlite3 "${DB_FILE}" <<EOF
create table HD
(
record_type char(2) not null,
unique_system_identifier numeric(9,0) not null,
uls_file_number char(14) null,
ebf_number varchar(30) null,
call_sign char(10) null,
license_status char(1) null,
radio_service_code char(2) null,
grant_date char(10) null,
expired_date char(10) null,
cancellation_date char(10) null,
eligibility_rule_num char(10) null,
applicant_type_code_reserved char(1) null,
alien char(1) null,
alien_government char(1) null,
alien_corporation char(1) null,
alien_officer char(1) null,
alien_control char(1) null,
revoked char(1) null,
convicted char(1) null,
adjudged char(1) null,
involved_reserved char(1) null,
common_carrier char(1) null,
non_common_carrier char(1) null,
private_comm char(1) null,
fixed char(1) null,
mobile char(1) null,
radiolocation char(1) null,
satellite char(1) null,
developmental_or_sta char(1) null,
interconnected_service char(1) null,
certifier_first_name varchar(20) null,
certifier_mi char(1) null,
certifier_last_name varchar(20) null,
certifier_suffix char(3) null,
certifier_title char(40) null,
gender char(1) null,
african_american char(1) null,
native_american char(1) null,
hawaiian char(1) null,
asian char(1) null,
white char(1) null,
ethnicity char(1) null,
effective_date char(10) null,
last_action_date char(10) null,
auction_id int null,
reg_stat_broad_serv char(1) null,
band_manager char(1) null,
type_serv_broad_serv char(1) null,
alien_ruling char(1) null,
licensee_name_change char(1) null,
whitespace_ind char(1) null,
additional_cert_choice char(1) null,
additional_cert_answer char(1) null,
discontinuation_ind char(1) null,
regulatory_compliance_ind char(1) null,
eligibility_cert_900 char(1) null,
transition_plan_cert_900 char(1) null,
return_spectrum_cert_900 char(1) null,
payment_cert_900 char(1) null
);
create table EN
(
record_type char(2) not null,
unique_system_identifier numeric(9,0) not null,
uls_file_number char(14) null,
ebf_number varchar(30) null,
call_sign char(10) null,
entity_type char(2) null,
licensee_id char(9) null,
entity_name varchar(200) null,
first_name varchar(20) null,
mi char(1) null,
last_name varchar(20) null,
suffix char(3) null,
phone char(10) null,
fax char(10) null,
email varchar(50) null,
street_address varchar(60) null,
city varchar(20) null,
state char(2) null,
zip_code char(9) null,
po_box varchar(20) null,
attention_line varchar(35) null,
sgin char(3) null,
frn char(10) null,
applicant_type_code char(1) null,
applicant_type_other char(40) null,
status_code char(1) null,
status_date datetime null,
lic_category_code char(1) null,
linked_license_id numeric(9,0) null,
linked_callsign char(10) null
);
EOF
for table in 'EN' 'HD'; do
# ingest weekly (aggregated) db
[ -f 'l_amat.zip' ] || curl -o 'l_amat.zip' "${ULS_URL}/complete/l_amat.zip"
unzip -q -o 'l_amat.zip' "${table}.dat"
sqlite3 -ascii -separator '|' -newline $'\n' "${DB_FILE}" ".import ${table}.dat ${table}"
weekly_time="$(date -r "${table}.dat" '+%s')"
rm "${table}.dat"
# download and extract daily files
files=()
for day in 'sun' 'mon' 'tue' 'wed' 'thu' 'fri' 'sat'; do
[ -f "l_am_${day}.zip" ] || curl -o "l_am_${day}.zip" "${ULS_URL}/daily/l_am_${day}.zip"
unzip -q -o "l_am_${day}.zip" "${table}.dat" || continue
mv "${table}.dat" "${table}.dat.${day}"
files+=("${table}.dat.${day}")
done
# ingest daily files in order if newer than weekly file
for file in $(ls -tr "${files[@]}"); do
daily_time="$(date -r "${file}" '+%s')"
[ "${daily_time}" -gt "${weekly_time}" ] && \
sqlite3 -ascii -separator '|' -newline $'\n' "${DB_FILE}" ".import ${file} ${table}"
rm "${file}"
done
done
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment