Add UPS service area layers
Requested-By: Eric Villnow
This commit is contained in:
parent
2faf722f03
commit
7e5cb5ff1d
7 changed files with 180 additions and 0 deletions
|
|
@ -22,6 +22,7 @@ import tjx from './tjx/index.js';
|
|||
import minnesotaAdventureTrails from './minnesota-adventure-trails/index.js';
|
||||
import cropHistory from './crop-history/index.js';
|
||||
import mnAmbulanceServiceAreas from './mn-ambulance-service-areas/layer.js';
|
||||
import upsServiceAreas from './ups/index.js';
|
||||
|
||||
const layerCategories = [
|
||||
{ // Base maps
|
||||
|
|
@ -94,6 +95,7 @@ const layerCategories = [
|
|||
mnAmbulanceServiceAreas,
|
||||
]
|
||||
},
|
||||
upsServiceAreas,
|
||||
minnesotaAdventureTrails,
|
||||
survey_markers,
|
||||
dot_cams,
|
||||
|
|
|
|||
2
layers/ups/.gitignore
vendored
Normal file
2
layers/ups/.gitignore
vendored
Normal file
|
|
@ -0,0 +1,2 @@
|
|||
*.csv
|
||||
*.zip
|
||||
6
layers/ups/README.md
Normal file
6
layers/ups/README.md
Normal file
|
|
@ -0,0 +1,6 @@
|
|||
# UPS
|
||||
|
||||
## obtaining data
|
||||
Go to https://www.ups.com/hcic/hcic and download the latest CSV; edit the script
|
||||
to reference the new file name. (Note that HCIC has been loginwalled (2023),
|
||||
404ing (2024), and 500ing (2025-11-29).)
|
||||
77
layers/ups/get_data.py
Executable file
77
layers/ups/get_data.py
Executable file
|
|
@ -0,0 +1,77 @@
|
|||
#!/usr/bin/env nix-shell
|
||||
#!nix-shell -i "python3 -i" -p python3Packages.shapely python3Packages.fiona python3Packages.requests
|
||||
|
||||
print("Starting…")
|
||||
|
||||
from collections import defaultdict
|
||||
import csv
|
||||
import json
|
||||
import os
|
||||
import io
|
||||
|
||||
import requests
|
||||
import fiona
|
||||
from shapely.geometry import shape, mapping
|
||||
from shapely.ops import unary_union
|
||||
|
||||
print("Completed imports")
|
||||
|
||||
# Create lists of zip codes per center and centers per state
|
||||
print("parsing URC255V.csv…", end="", flush=True)
|
||||
if not os.path.exists("URC255V.csv"):
|
||||
print("Warning: URC255V.csv not found. Please download it from UPS and place it in this directory.")
|
||||
exit(1)
|
||||
# Read URC255V.csv into a dict
|
||||
reader = csv.DictReader(open("URC255V.csv", encoding="utf-8"))
|
||||
centers_by_state = defaultdict(set)
|
||||
zips_by_center = defaultdict(list)
|
||||
# "CountryCode","PostalLow","PostalHigh","URC25.5V","06/2023"
|
||||
# "US","55336","55336"," MN 553 0-01"
|
||||
# "US","55337","55337"," MN 551 9-02"
|
||||
for row in reader:
|
||||
if row["CountryCode"] == "US":
|
||||
center = row["URC25.5V"].strip().split('-')[0]
|
||||
state = center.split(" ")[0]
|
||||
centers_by_state[state].add(center)
|
||||
for zip in range(int(row["PostalLow"]), int(row["PostalHigh"]) + 1):
|
||||
zips_by_center[center].append(str(zip).zfill(5))
|
||||
print("complete.")
|
||||
|
||||
# Fetch and parse zip code geometries
|
||||
# TODO: could also get as geopackage or kml file I think? Not sure if either of those is easier to open.
|
||||
print("fetching zip code data…", end="", flush=True)
|
||||
res = requests.get("https://www2.census.gov/geo/tiger/GENZ2020/shp/cb_2020_us_zcta520_500k.zip")
|
||||
res.raise_for_status()
|
||||
print("complete.")
|
||||
print("parsing zip code data…", end="", flush=True)
|
||||
# Yeah, this loads into memory, but the file is only 60ish MiB.
|
||||
zip_data = list(fiona.io.ZipMemoryFile(io.BytesIO(res.content)).open())
|
||||
zips = {}
|
||||
for zip in zip_data: # PIVOT! PIVOT!
|
||||
zips[zip.properties["NAME20"]] = zip.geometry
|
||||
print("complete.")
|
||||
|
||||
# Save output
|
||||
print("writing output files…")
|
||||
os.makedirs("states", exist_ok=True)
|
||||
for state, centers in centers_by_state.items():
|
||||
print(" "+ state)
|
||||
features = []
|
||||
for center in centers:
|
||||
center_zips = zips_by_center[center]
|
||||
features.append({
|
||||
"type": "Feature",
|
||||
"properties": {
|
||||
"center": center,
|
||||
},
|
||||
"geometry": mapping(unary_union([shape(zips[zip]) for zip in center_zips if zip in zips])),
|
||||
})
|
||||
with open(f"states/{state}.geojson", "w", encoding="utf-8") as f:
|
||||
json.dump(
|
||||
{
|
||||
"type": "FeatureCollection",
|
||||
"features": features,
|
||||
},
|
||||
f,
|
||||
)
|
||||
print("complete.")
|
||||
38
layers/ups/index.js
Normal file
38
layers/ups/index.js
Normal file
|
|
@ -0,0 +1,38 @@
|
|||
import VectorLayer from 'ol/layer/Vector';
|
||||
import {Vector as VectorSource} from 'ol/source.js';
|
||||
import GeoJSON from 'ol/format/GeoJSON.js';
|
||||
import states from './states.js';
|
||||
|
||||
import {Style, Fill, Text, Stroke} from 'ol/style.js';
|
||||
|
||||
let layers = [];
|
||||
for (let [name, state] of Object.entries(states)) {
|
||||
const vectorLayer = new VectorLayer({
|
||||
source: new VectorSource({
|
||||
url: state,
|
||||
format: new GeoJSON,
|
||||
}),
|
||||
style: function(feature){
|
||||
return new Style({
|
||||
text: new Text({
|
||||
text: feature.get('center'),
|
||||
}),
|
||||
fill: new Fill({
|
||||
color: 'rgba(255,255,255,0.4)',
|
||||
}),
|
||||
stroke: new Stroke({
|
||||
color: '#3399CC',
|
||||
width: 1.25,
|
||||
}),
|
||||
});
|
||||
}
|
||||
});
|
||||
layers.push({
|
||||
name: name + ' UPS',
|
||||
layer: vectorLayer,
|
||||
});
|
||||
}
|
||||
|
||||
layers.sort((a, b) => a.name > b.name ? 1 : -1); // Names are always unique
|
||||
|
||||
export default {name: "UPS Service Areas (stale 2023-06 data)", layers};
|
||||
52
layers/ups/states.js
Normal file
52
layers/ups/states.js
Normal file
|
|
@ -0,0 +1,52 @@
|
|||
import AK from './states/AK.geojson?url';
|
||||
import AL from './states/AL.geojson?url';
|
||||
import AR from './states/AR.geojson?url';
|
||||
import AZ from './states/AZ.geojson?url';
|
||||
import CA from './states/CA.geojson?url';
|
||||
import CO from './states/CO.geojson?url';
|
||||
import CT from './states/CT.geojson?url';
|
||||
import DE from './states/DE.geojson?url';
|
||||
import FL from './states/FL.geojson?url';
|
||||
import GA from './states/GA.geojson?url';
|
||||
import HI from './states/HI.geojson?url';
|
||||
import IA from './states/IA.geojson?url';
|
||||
import ID from './states/ID.geojson?url';
|
||||
import IL from './states/IL.geojson?url';
|
||||
import IN from './states/IN.geojson?url';
|
||||
import KS from './states/KS.geojson?url';
|
||||
import KY from './states/KY.geojson?url';
|
||||
import LA from './states/LA.geojson?url';
|
||||
import MA from './states/MA.geojson?url';
|
||||
import MD from './states/MD.geojson?url';
|
||||
import ME from './states/ME.geojson?url';
|
||||
import MI from './states/MI.geojson?url';
|
||||
import MN from './states/MN.geojson?url';
|
||||
import MO from './states/MO.geojson?url';
|
||||
import MS from './states/MS.geojson?url';
|
||||
import MT from './states/MT.geojson?url';
|
||||
import NC from './states/NC.geojson?url';
|
||||
import ND from './states/ND.geojson?url';
|
||||
import NE from './states/NE.geojson?url';
|
||||
import NH from './states/NH.geojson?url';
|
||||
import NJ from './states/NJ.geojson?url';
|
||||
import NM from './states/NM.geojson?url';
|
||||
import NV from './states/NV.geojson?url';
|
||||
import NY from './states/NY.geojson?url';
|
||||
import OH from './states/OH.geojson?url';
|
||||
import OK from './states/OK.geojson?url';
|
||||
import OR from './states/OR.geojson?url';
|
||||
import PA from './states/PA.geojson?url';
|
||||
import RI from './states/RI.geojson?url';
|
||||
import SC from './states/SC.geojson?url';
|
||||
import SD from './states/SD.geojson?url';
|
||||
import TN from './states/TN.geojson?url';
|
||||
import TX from './states/TX.geojson?url';
|
||||
import UT from './states/UT.geojson?url';
|
||||
import VA from './states/VA.geojson?url';
|
||||
import VT from './states/VT.geojson?url';
|
||||
import WA from './states/WA.geojson?url';
|
||||
import WI from './states/WI.geojson?url';
|
||||
import WV from './states/WV.geojson?url';
|
||||
import WY from './states/WY.geojson?url';
|
||||
|
||||
export default { AK, AL, AR, AZ, CA, CO, CT, DE, FL, GA, HI, IA, ID, IL, IN, KS, KY, LA, MA, MD, ME, MI, MN, MO, MS, MT, NC, ND, NE, NH, NJ, NM, NV, NY, OH, OK, OR, PA, RI, SC, SD, TN, TX, UT, VA, VT, WA, WI, WV, WY };
|
||||
3
util/util.js
Normal file
3
util/util.js
Normal file
|
|
@ -0,0 +1,3 @@
|
|||
const states = ['AL','AK','AR','AZ','CA','CO','CT','DE','FL','GA','HI','IA','ID','IL','IN','KS','KY','LA','MA','MD','ME','MI','MN','MO','MS','MT','NE','NV','NH','NJ','NM','NY','NC','ND','OH','OK','OR','PA','RI','SC','SD','TN','TX','UT','VA','VT','WA','WI','WV','WY'];
|
||||
|
||||
export {states};
|
||||
Loading…
Add table
Add a link
Reference in a new issue