#!/usr/bin/python3 import requests import json states = { "Alaska": "https://511.alaska.gov/", "Arizona": "https://az511.com/", "Connecticut": "https://ctroads.org/", "Georgia": "https://511ga.org/", "Idaho": "https://511.idaho.gov/", "Louisiana": "https://www.511la.org/", "Nevada": "https://www.nvroads.com/", "NewEngland": "https://newengland511.org/", "NewYork": "https://www.511ny.org/", "Utah": "https://www.udottraffic.utah.gov/", "Wisconsin": "https://511wi.gov/" } for state, baseURL in states.items(): query={ "columns": [ # no clue what any of this is, so here it stays { "data": None, "name": "", }, { "name": "sortId", "s": True, }, { "name": "region", "s": True, }, { "name": "county", "s": True, }, { "name": "roadway", "s": True, }, { "name": "description1", }, { "data": 6, "name": "", }, ], "start": 0, "length": 100, } video_camera_count = 0 still_camera_count = 0 cameras = [] available_cameras = 999_999 # lots while len(cameras) < available_cameras: if state == "Connecticut": # gotta be a special snowflake I guess? res = requests.post(f"{baseURL}/List/GetData/Cameras", json={"draw":1,"columns":[{"data":"sortId","name":"sortId","searchable":True,"orderable":True,"search":{"value":"","regex":False}},{"data":"cityName","name":"cityName","searchable":True,"orderable":True,"search":{"value":"","regex":False}},{"data":"roadway","name":"roadway","searchable":True,"orderable":True,"search":{"value":"","regex":False}},{"data":"sortIdDisplay","name":"sortIdDisplay","searchable":True,"orderable":True,"search":{"value":"","regex":False}},{"data":"description1","name":"description1","searchable":False,"orderable":True,"search":{"value":"","regex":False}},{"data":"direction","name":"direction","searchable":False,"orderable":False,"search":{"value":"","regex":False}},{"data":6,"name":"","searchable":False,"orderable":False,"search":{"value":"","regex":False}}],"order":[{"column":0,"dir":"asc"},{"column":1,"dir":"asc"}],"start":0,"length":10,"search":{"value":"","regex":False}}) else: res = requests.get(f"{baseURL}/List/GetData/Cameras", { "query": json.dumps(query), "lang": "en", }) res.raise_for_status() res = res.json() available_cameras = res['recordsTotal'] for c in res['data']: if 'videoUrl' in c and isinstance(c['videoUrl'], list): # LA returns multiple (identical?) streams src = c['videoUrl'][0] video_camera_count += 1 elif 'videoUrl' in c and c['videoUrl'] and c['videoUrl'] != '0': # Yeah, Idaho has a single camera where videoURL = '0'. Nice. src = c['videoUrl'] video_camera_count += 1 else: src = baseURL + "map/Cctv/" + c['id'] still_camera_count += 1 cameras.append({ "type": "Feature", "geometry": { "type": "Point", "coordinates": [c['longitude'], c['latitude']], # yes, [lon, lat] since it's [x, y] }, "properties": { 'name': c['displayName'], 'views': [{ 'hasVideo': 'videoUrl' in c and bool(c['videoUrl']), 'src': src, }], }, }) query['start'] += 100 geojson = { "type": "FeatureCollection", "features": cameras, } with open(f"data/{state}.geojson", "w") as f: f.write(json.dumps(geojson)) print(f"{len(cameras)} locations found for {state}") print(f"{state}: {still_camera_count} photo + {video_camera_count} video cameras") # hack hack hack # # If I write this to one big file, I can't take advantage of any lazy loading # for performance reasons, so I'm constrained to having a bunch of files. I # can't programmatically import those, since es6 imports don't allow for that. # So, codegen it is (and fairly gross codegen at that!). with open('data/states.js', 'w') as f: for state in states: f.write(f"import {state} from './{state}.geojson?url';\n") f.write('\nexport default {\n') for state in states: f.write(f" {state}: {state},\n") f.write("};\n")