maps.chandlerswift.com/layers/dot-cams/travel-iq/get_data.py

93 lines
2.5 KiB
Python
Raw Normal View History

2024-01-30 00:37:34 -06:00
#!/usr/bin/python3
import requests
import json
with open('states.json') as f:
states = json.loads(f.read())
for state, baseURL in states.items():
query={
"columns": [ # no clue what any of this is, so here it stays
{
"data": None,
"name": "",
},
{
"name": "sortId",
"s": True,
},
{
"name": "region",
"s": True,
},
{
"name": "county",
"s": True,
},
{
"name": "roadway",
"s": True,
},
{
"name": "description1",
},
{
"data": 6,
"name": "",
},
],
"start": 0,
"length": 100,
}
cameras = []
available_cameras = 999_999 # lots
while len(cameras) < available_cameras:
res = requests.get(f"{baseURL}/List/GetData/Cameras", {
"query": json.dumps(query),
"lang": "en",
})
res.raise_for_status()
res = res.json()
available_cameras = res['recordsTotal']
for c in res['data']:
cameras.append({
"type": "Feature",
"geometry": {
"type": "Point",
"coordinates": [c['longitude'], c['latitude']], # yes, [lon, lat] since it's [x, y]
},
"properties": {
'address': c['displayName'],
'website': c['videoUrl'],
'originalData': c,
},
})
query['start'] += 100
geojson = {
"type": "FeatureCollection",
"features": cameras,
}
with open(f"data/{state}.geojson", "w") as f:
f.write(json.dumps(geojson))
print(f"{len(cameras)} locations found for {state}")
# hack hack hack
#
# If I write this to one big file, I can't take advantage of any lazy loading
# for performance reasons, so I'm constrained to having a bunch of files. I
# can't programmatically import those, since es6 imports don't allow for that.
# So, codegen it is (and fairly gross codegen at that!).
with open('data/states.js', 'w') as f:
for state in states:
f.write(f"import {state} from './{state}.geojson?url';\n")
f.write('\nexport default {\n')
for state in states:
f.write(f" {state}: {state},\n")
f.write("};\n")