121 lines
3.9 KiB
Python
Executable file
121 lines
3.9 KiB
Python
Executable file
#!/usr/bin/env python3
|
|
|
|
import requests
|
|
import json
|
|
import re
|
|
|
|
states = {
|
|
"Minnesota": "https://511mn.org/",
|
|
"Colorado": "https://maps.cotrip.org/",
|
|
"Iowa": "https://511ia.org/",
|
|
"Indiana": "https://511in.org/",
|
|
"Kansas": "https://www.kandrive.gov/",
|
|
"Massachusetts": "https://mass511.com/",
|
|
"Nebraska": "https://new.511.nebraska.gov/"
|
|
}
|
|
|
|
with open("query.graphql") as f:
|
|
QUERY = f.read()
|
|
|
|
for state, baseURL in states.items():
|
|
print(f"{state}: ", end="", flush=True)
|
|
PAYLOAD = [
|
|
{
|
|
"query": QUERY,
|
|
"variables": {
|
|
"input": {
|
|
# Cover the whole state (this is pretty overkill, admittedly)
|
|
"north":90,
|
|
"south":0,
|
|
"east":0,
|
|
"west":-179,
|
|
"zoom":15,
|
|
"layerSlugs": ["normalCameras"],
|
|
"nonClusterableUris": ["dashboard"],
|
|
},
|
|
"plowType":"plowCameras",
|
|
},
|
|
},
|
|
]
|
|
|
|
res = requests.post(f'{baseURL}api/graphql', json=PAYLOAD)
|
|
res.raise_for_status()
|
|
|
|
camera_views = res.json()[0]['data']['mapFeaturesQuery']['mapFeatures']
|
|
|
|
cameras = []
|
|
|
|
viewCount = 0
|
|
photoCount = 0
|
|
videoCount = 0
|
|
|
|
for c in camera_views:
|
|
if len(c['features']) != 1:
|
|
print(c)
|
|
raise Exception(f"Unexpected number of features: {len(c['features'])}")
|
|
|
|
if re.match(r"Show .* cameras", c['tooltip']):
|
|
raise Exception(f"Not zoomed in enough! Finding aggregate cameras: {c}")
|
|
|
|
if len(c['views']) == 0:
|
|
raise Exception("Camera has no views")
|
|
|
|
for view in c['views']:
|
|
if view['category'] != c['views'][0]['category']:
|
|
print(f"warn: Differing types detected: {c['views']}")
|
|
if view['category'] == 'VIDEO':
|
|
if state == "Nebraska":
|
|
# Nebraska has mislabeled a small amount of their data;
|
|
# there is one location with 4 views labeled as "VIDEO"
|
|
# which are not, in fact, video-containing views
|
|
view['category'] = "PHOTO"
|
|
continue
|
|
videoCount += 1
|
|
if len(view['sources']) != 1:
|
|
raise Exception(f"Unexpected number of sources ({len(view['sources'])})")
|
|
else:
|
|
photoCount += 1
|
|
for source in view['sources'] or []:
|
|
if source['type'] != 'application/x-mpegURL':
|
|
raise Exception(f"Unexpected type {source['type']}")
|
|
|
|
cameras.append({
|
|
"type": "Feature",
|
|
"geometry": c['features'][0]['geometry'],
|
|
"properties": {
|
|
'name': c['tooltip'],
|
|
'views': [
|
|
{
|
|
'hasVideo': v['category'] == 'VIDEO',
|
|
'src': v['sources'][0]['src'] if v['category'] == 'VIDEO' else v['url'],
|
|
} for v in c['views']
|
|
],
|
|
# 'originalData': c,
|
|
},
|
|
})
|
|
|
|
geojson = {
|
|
"type": "FeatureCollection",
|
|
"features": cameras,
|
|
}
|
|
|
|
with open(f"data/{state}.geojson", "w") as f:
|
|
f.write(json.dumps(geojson))
|
|
|
|
print(f"{len(cameras)} locations found")
|
|
print(f"{state}: {photoCount} photo + {videoCount} video cameras")
|
|
|
|
# hack hack hack
|
|
#
|
|
# If I write this to one big file, I can't take advantage of any lazy loading
|
|
# for performance reasons, so I'm constrained to having a bunch of files. I
|
|
# can't programmatically import those, since es6 imports don't allow for that.
|
|
# So, codegen it is (and fairly gross codegen at that!).
|
|
with open('data/states.js', 'w') as f:
|
|
for state in states:
|
|
f.write(f"import {state} from './{state}.geojson?url';\n")
|
|
f.write('\nexport default {\n')
|
|
for state in states:
|
|
f.write(f" {state}: {state},\n")
|
|
f.write("};\n")
|