2024-01-30 00:14:55 -06:00
|
|
|
#!/usr/bin/env python3
|
|
|
|
|
|
|
|
import requests
|
|
|
|
import json
|
|
|
|
import re
|
|
|
|
|
2024-01-31 01:17:04 -06:00
|
|
|
states = {
|
|
|
|
"Minnesota": "https://511mn.org/",
|
|
|
|
"Colorado": "https://maps.cotrip.org/",
|
|
|
|
"Iowa": "https://511ia.org/",
|
|
|
|
"Indiana": "https://511in.org/",
|
|
|
|
"Kansas": "https://www.kandrive.gov/",
|
|
|
|
"Massachusetts": "https://mass511.com/",
|
|
|
|
"Nebraska": "https://new.511.nebraska.gov/"
|
|
|
|
}
|
2024-01-30 00:14:55 -06:00
|
|
|
|
|
|
|
with open("query.graphql") as f:
|
|
|
|
QUERY = f.read()
|
|
|
|
|
|
|
|
for state, baseURL in states.items():
|
2024-01-31 01:17:04 -06:00
|
|
|
print(f"{state}: ", end="", flush=True)
|
2024-01-30 00:14:55 -06:00
|
|
|
PAYLOAD = [
|
|
|
|
{
|
|
|
|
"query": QUERY,
|
|
|
|
"variables": {
|
|
|
|
"input": {
|
|
|
|
# Cover the whole state (this is pretty overkill, admittedly)
|
|
|
|
"north":90,
|
|
|
|
"south":0,
|
|
|
|
"east":0,
|
|
|
|
"west":-179,
|
2024-01-30 01:11:14 -06:00
|
|
|
"zoom":15,
|
2024-01-30 00:14:55 -06:00
|
|
|
"layerSlugs": ["normalCameras"],
|
|
|
|
"nonClusterableUris": ["dashboard"],
|
|
|
|
},
|
|
|
|
"plowType":"plowCameras",
|
|
|
|
},
|
|
|
|
},
|
|
|
|
]
|
|
|
|
|
|
|
|
res = requests.post(f'{baseURL}api/graphql', json=PAYLOAD)
|
|
|
|
res.raise_for_status()
|
|
|
|
|
|
|
|
camera_views = res.json()[0]['data']['mapFeaturesQuery']['mapFeatures']
|
|
|
|
|
|
|
|
cameras = []
|
|
|
|
|
|
|
|
viewCount = 0
|
2024-01-31 01:17:04 -06:00
|
|
|
photoCount = 0
|
|
|
|
videoCount = 0
|
2024-01-30 00:14:55 -06:00
|
|
|
|
|
|
|
for c in camera_views:
|
|
|
|
if len(c['features']) != 1:
|
|
|
|
print(c)
|
|
|
|
raise Exception(f"Unexpected number of features: {len(c['features'])}")
|
|
|
|
|
|
|
|
if re.match(r"Show .* cameras", c['tooltip']):
|
|
|
|
raise Exception(f"Not zoomed in enough! Finding aggregate cameras: {c}")
|
|
|
|
|
2024-01-31 01:17:04 -06:00
|
|
|
if len(c['views']) == 0:
|
|
|
|
raise Exception("Camera has no views")
|
|
|
|
|
2024-01-30 00:14:55 -06:00
|
|
|
for view in c['views']:
|
2024-01-31 01:17:04 -06:00
|
|
|
if view['category'] != c['views'][0]['category']:
|
|
|
|
print(f"warn: Differing types detected: {c['views']}")
|
|
|
|
if view['category'] == 'VIDEO':
|
|
|
|
if state == "Nebraska":
|
2024-02-03 01:36:11 -06:00
|
|
|
# Nebraska has mislabeled a small amount of their data;
|
|
|
|
# there is one location with 4 views labeled as "VIDEO"
|
|
|
|
# which are not, in fact, video-containing views
|
|
|
|
view['category'] = "PHOTO"
|
|
|
|
continue
|
2024-01-31 01:17:04 -06:00
|
|
|
videoCount += 1
|
|
|
|
if len(view['sources']) != 1:
|
|
|
|
raise Exception(f"Unexpected number of sources ({len(view['sources'])})")
|
|
|
|
else:
|
|
|
|
photoCount += 1
|
2024-01-30 00:14:55 -06:00
|
|
|
for source in view['sources'] or []:
|
|
|
|
if source['type'] != 'application/x-mpegURL':
|
|
|
|
raise Exception(f"Unexpected type {source['type']}")
|
|
|
|
|
|
|
|
cameras.append({
|
|
|
|
"type": "Feature",
|
|
|
|
"geometry": c['features'][0]['geometry'],
|
|
|
|
"properties": {
|
2024-01-31 01:17:04 -06:00
|
|
|
'name': c['tooltip'],
|
|
|
|
'views': [
|
|
|
|
{
|
|
|
|
'hasVideo': v['category'] == 'VIDEO',
|
|
|
|
'src': v['sources'][0]['src'] if v['category'] == 'VIDEO' else v['url'],
|
|
|
|
} for v in c['views']
|
|
|
|
],
|
|
|
|
# 'originalData': c,
|
2024-01-30 00:14:55 -06:00
|
|
|
},
|
|
|
|
})
|
|
|
|
|
|
|
|
geojson = {
|
|
|
|
"type": "FeatureCollection",
|
|
|
|
"features": cameras,
|
|
|
|
}
|
|
|
|
|
|
|
|
with open(f"data/{state}.geojson", "w") as f:
|
|
|
|
f.write(json.dumps(geojson))
|
|
|
|
|
2024-01-31 01:17:04 -06:00
|
|
|
print(f"{len(cameras)} locations found")
|
|
|
|
print(f"{state}: {photoCount} photo + {videoCount} video cameras")
|
2024-01-30 00:14:55 -06:00
|
|
|
|
|
|
|
# hack hack hack
|
|
|
|
#
|
|
|
|
# If I write this to one big file, I can't take advantage of any lazy loading
|
|
|
|
# for performance reasons, so I'm constrained to having a bunch of files. I
|
|
|
|
# can't programmatically import those, since es6 imports don't allow for that.
|
|
|
|
# So, codegen it is (and fairly gross codegen at that!).
|
|
|
|
with open('data/states.js', 'w') as f:
|
|
|
|
for state in states:
|
|
|
|
f.write(f"import {state} from './{state}.geojson?url';\n")
|
|
|
|
f.write('\nexport default {\n')
|
|
|
|
for state in states:
|
|
|
|
f.write(f" {state}: {state},\n")
|
|
|
|
f.write("};\n")
|