2024-01-30 00:37:34 -06:00
|
|
|
#!/usr/bin/python3
|
|
|
|
|
|
|
|
import requests
|
|
|
|
import json
|
|
|
|
|
2024-01-31 01:17:04 -06:00
|
|
|
states = {
|
2024-02-03 01:21:58 -06:00
|
|
|
"Alaska": "https://511.alaska.gov/",
|
2024-02-03 03:01:05 -06:00
|
|
|
"Arizona": "https://az511.com/",
|
|
|
|
"Connecticut": "https://ctroads.org/",
|
2024-01-31 01:17:04 -06:00
|
|
|
"Georgia": "https://511ga.org/",
|
2024-02-03 03:01:05 -06:00
|
|
|
"Idaho": "https://511.idaho.gov/",
|
2024-01-31 01:17:04 -06:00
|
|
|
"Louisiana": "https://www.511la.org/",
|
|
|
|
"Nevada": "https://www.nvroads.com/",
|
2024-02-03 03:01:05 -06:00
|
|
|
"NewEngland": "https://newengland511.org/",
|
2024-01-31 01:17:04 -06:00
|
|
|
"NewYork": "https://www.511ny.org/",
|
2024-02-03 03:01:05 -06:00
|
|
|
"Utah": "https://www.udottraffic.utah.gov/",
|
2024-01-31 01:17:04 -06:00
|
|
|
"Wisconsin": "https://511wi.gov/"
|
|
|
|
}
|
2024-01-30 00:37:34 -06:00
|
|
|
|
|
|
|
for state, baseURL in states.items():
|
|
|
|
query={
|
|
|
|
"columns": [ # no clue what any of this is, so here it stays
|
|
|
|
{
|
|
|
|
"data": None,
|
|
|
|
"name": "",
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"name": "sortId",
|
|
|
|
"s": True,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"name": "region",
|
|
|
|
"s": True,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"name": "county",
|
|
|
|
"s": True,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"name": "roadway",
|
|
|
|
"s": True,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"name": "description1",
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"data": 6,
|
|
|
|
"name": "",
|
|
|
|
},
|
|
|
|
],
|
|
|
|
"start": 0,
|
|
|
|
"length": 100,
|
|
|
|
}
|
|
|
|
|
2024-02-03 01:21:58 -06:00
|
|
|
video_camera_count = 0
|
|
|
|
still_camera_count = 0
|
|
|
|
|
2024-01-30 00:37:34 -06:00
|
|
|
cameras = []
|
|
|
|
available_cameras = 999_999 # lots
|
|
|
|
|
|
|
|
while len(cameras) < available_cameras:
|
2024-02-03 03:01:05 -06:00
|
|
|
if state == "Connecticut":
|
|
|
|
# gotta be a special snowflake I guess?
|
|
|
|
res = requests.post(f"{baseURL}/List/GetData/Cameras", json={"draw":1,"columns":[{"data":"sortId","name":"sortId","searchable":True,"orderable":True,"search":{"value":"","regex":False}},{"data":"cityName","name":"cityName","searchable":True,"orderable":True,"search":{"value":"","regex":False}},{"data":"roadway","name":"roadway","searchable":True,"orderable":True,"search":{"value":"","regex":False}},{"data":"sortIdDisplay","name":"sortIdDisplay","searchable":True,"orderable":True,"search":{"value":"","regex":False}},{"data":"description1","name":"description1","searchable":False,"orderable":True,"search":{"value":"","regex":False}},{"data":"direction","name":"direction","searchable":False,"orderable":False,"search":{"value":"","regex":False}},{"data":6,"name":"","searchable":False,"orderable":False,"search":{"value":"","regex":False}}],"order":[{"column":0,"dir":"asc"},{"column":1,"dir":"asc"}],"start":0,"length":10,"search":{"value":"","regex":False}})
|
|
|
|
else:
|
|
|
|
res = requests.get(f"{baseURL}/List/GetData/Cameras", {
|
|
|
|
"query": json.dumps(query),
|
|
|
|
"lang": "en",
|
|
|
|
})
|
2024-01-30 00:37:34 -06:00
|
|
|
res.raise_for_status()
|
|
|
|
res = res.json()
|
|
|
|
available_cameras = res['recordsTotal']
|
|
|
|
for c in res['data']:
|
2024-02-03 03:01:05 -06:00
|
|
|
if 'videoUrl' in c and isinstance(c['videoUrl'], list): # LA returns multiple (identical?) streams
|
2024-02-03 01:21:58 -06:00
|
|
|
src = c['videoUrl'][0]
|
|
|
|
video_camera_count += 1
|
2024-02-03 03:01:05 -06:00
|
|
|
elif 'videoUrl' in c and c['videoUrl'] and c['videoUrl'] != '0':
|
|
|
|
# Yeah, Idaho has a single camera where videoURL = '0'. Nice.
|
2024-02-03 01:21:58 -06:00
|
|
|
src = c['videoUrl']
|
|
|
|
video_camera_count += 1
|
|
|
|
else:
|
|
|
|
src = baseURL + "map/Cctv/" + c['id']
|
|
|
|
still_camera_count += 1
|
|
|
|
|
2024-01-30 00:37:34 -06:00
|
|
|
cameras.append({
|
|
|
|
"type": "Feature",
|
|
|
|
"geometry": {
|
|
|
|
"type": "Point",
|
|
|
|
"coordinates": [c['longitude'], c['latitude']], # yes, [lon, lat] since it's [x, y]
|
|
|
|
},
|
|
|
|
"properties": {
|
2024-01-31 01:17:04 -06:00
|
|
|
'name': c['displayName'],
|
|
|
|
'views': [{
|
2024-02-03 03:01:05 -06:00
|
|
|
'hasVideo': 'videoUrl' in c and bool(c['videoUrl']),
|
2024-02-03 01:21:58 -06:00
|
|
|
'src': src,
|
2024-01-31 01:17:04 -06:00
|
|
|
}],
|
2024-01-30 00:37:34 -06:00
|
|
|
},
|
|
|
|
})
|
|
|
|
query['start'] += 100
|
|
|
|
|
|
|
|
geojson = {
|
|
|
|
"type": "FeatureCollection",
|
|
|
|
"features": cameras,
|
|
|
|
}
|
|
|
|
|
|
|
|
with open(f"data/{state}.geojson", "w") as f:
|
|
|
|
f.write(json.dumps(geojson))
|
|
|
|
|
|
|
|
print(f"{len(cameras)} locations found for {state}")
|
2024-02-03 01:21:58 -06:00
|
|
|
print(f"{state}: {still_camera_count} photo + {video_camera_count} video cameras")
|
2024-01-30 00:37:34 -06:00
|
|
|
|
|
|
|
# hack hack hack
|
|
|
|
#
|
|
|
|
# If I write this to one big file, I can't take advantage of any lazy loading
|
|
|
|
# for performance reasons, so I'm constrained to having a bunch of files. I
|
|
|
|
# can't programmatically import those, since es6 imports don't allow for that.
|
|
|
|
# So, codegen it is (and fairly gross codegen at that!).
|
|
|
|
with open('data/states.js', 'w') as f:
|
|
|
|
for state in states:
|
|
|
|
f.write(f"import {state} from './{state}.geojson?url';\n")
|
|
|
|
f.write('\nexport default {\n')
|
|
|
|
for state in states:
|
|
|
|
f.write(f" {state}: {state},\n")
|
|
|
|
f.write("};\n")
|