Alter 5 files

Add `formattedSchedules.json`
Add `requirements.txt`
Add `scheduleExport.py`
Add `stations.json`
Add `__main__.py`
This commit is contained in:
akp 2023-02-12 12:11:24 +00:00
parent 0a66e36c85
commit d20ccc03f4
No known key found for this signature in database
GPG key ID: AA5726202C8879B7
5 changed files with 3027864 additions and 0 deletions

3014814
pivot/formattedSchedules.json Normal file

File diff suppressed because it is too large Load diff

1
pivot/requirements.txt Normal file
View file

@ -0,0 +1 @@
flask

97
pivot/scheduleExport.py Normal file
View file

@ -0,0 +1,97 @@
#!/usr/bin/env python3
import json
import sys
import datetime
import requests
def process_file(fname: str):
tiploc_to_crs = {}
res = []
with open(fname) as f:
while True:
line = f.readline()
if line == "":
# EOF just spits out empty lines
break
line = json.loads(line)
if "TiplocV1" in line:
line = line["TiplocV1"]
if line["crs_code"] is not None:
tiploc_to_crs[line["tiploc_code"]] = line["crs_code"]
continue
if "JsonScheduleV1" not in line:
continue
line = line["JsonScheduleV1"]
if line.get("train_status", "") != "P":
continue
start_date = datetime.datetime.strptime(line["schedule_start_date"], "%Y-%m-%d")
end_date = datetime.datetime.strptime(line["schedule_end_date"], "%Y-%m-%d")
now = datetime.datetime.now()
if start_date - now > datetime.timedelta(0) or end_date - now < datetime.timedelta(0):
continue
output = {}
output["days"] = line["schedule_days_runs"]
output["toc"] = line["atoc_code"]
output["signallingID"] = line["schedule_segment"]["signalling_id"]
stops = []
for schloc in line["schedule_segment"]["schedule_location"]:
tiploc = schloc["tiploc_code"]
if tiploc not in tiploc_to_crs:
print(f"WARNING: no CRS for TIPLOC {tiploc}", file=sys.stderr)
continue
pubdep = schloc.get("public_departure")
pubarr = schloc.get("public_arrival")
if pubdep is None and pubarr is None:
continue
stops.append({
"time": pubdep if pubdep is not None else pubarr,
"crs": tiploc_to_crs[tiploc],
})
output["stops"] = stops
if len(stops) != 0:
res.append(output)
with open("formattedSchedules.json", "w") as f:
json.dump(res, f, indent=1)
with open("stations.json", "w") as f:
json.dump(fetch_crs_info(tiploc_to_crs.values()), f, indent=1)
def fetch_crs_info(all_crs):
res = {}
req_body = "[out:json];" + ("".join(map(lambda x: f'node["ref:crs"="{x}"]; out;', all_crs)))
r = requests.post("http://overpass-api.de/api/interpreter", data={"data": req_body})
r.raise_for_status()
j = r.json()
for x in j["elements"]:
res[x["tags"]["ref:crs"]] = {"lat": x["lat"], "lon": x["lon"], "name": x["tags"]["name"]}
return res
if __name__ == "__main__":
FNAME = sys.argv[1]
process_file(FNAME)

12937
pivot/stations.json Normal file

File diff suppressed because it is too large Load diff

15
pivot/web/__main__.py Normal file
View file

@ -0,0 +1,15 @@
import flask
if __name__ == "__main__":
app = flask.Flask()
app.add_url_rule("/search", )
app.run(port=8080, debug=True, host="127.0.0.1")
class Endpoints:
@staticmethod
def search():
day = flask.request.args.get("day", 0)
from_station =