commit e815bbfd2ef4f49c4e3a757739a60d802bcc53f5
Author: Leah (ctucx) <leah@ctu.cx>
Date: Thu, 18 Feb 2021 22:49:23 +0100
Author: Leah (ctucx) <leah@ctu.cx>
Date: Thu, 18 Feb 2021 22:49:23 +0100
init (move hafas-library from oeffisearch,oeffi-web)
22 files changed, 984 insertions(+), 0 deletions(-)
A
|
80
+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
A
|
199
+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
A
|
78
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
A
|
73
+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
diff --git a/hafas.nim b/hafas.nim @@ -0,0 +1,7 @@ +import hafas/api/journeys +import hafas/api/suggestions +import hafas/api/refresh_journey + +export journeys +export suggestions +export refresh_journey
diff --git a/hafas.nimble b/hafas.nimble @@ -0,0 +1,12 @@ +# Package + +version = "0.1.0" +author = "Milan P\xC3\xA4ssler (petabyteboy)" +description = "a hafas-client library written in nim" +license = "GPL-3.0" +srcDir = "./" + + +# Dependencies + +requires "nim >= 1.4.2"
diff --git a/hafas/api/journeys.nim b/hafas/api/journeys.nim @@ -0,0 +1,80 @@ +import ../parse/products +import ../parse/point +import ../parse/accessibility +import ../parse/journeys_response +import ../types +import ../util +import json +import times +import asyncdispatch +import options + +proc journeys*(params: JourneysParams): Future[JourneysResponse] {.async.} = + var `when` = now().toTime() + var isDeparture = true + if params.departure.isSome: + `when` = params.departure.get.fromUnix + elif params.arrival.isSome: + `when` = params.arrival.get.fromUnix + isDeparture = false + + let req = %* { + "cfg": { + "polyEnc": "GPA" + }, + "meth": "TripSearch", + "req": { + "ctxScr": nil, + "getPasslist": params.stopovers.get(false), + "maxChg": params.transfers.get(-1), + "minChgTime": params.transferTime.get(0), + "numF": params.results.get(5), + "depLocL": [ params.fromPoint.formatPoint() ], + "viaLocL": [], + "arrLocL": [ params.toPoint.formatPoint() ], + "jnyFltrL": [ + { + "type": "PROD", + "mode": "INC", + "value": $formatProducts(params.products.get(parseProducts(1023))), + }, + { + "type": "META", + "mode": "INC", + "meta": formatAccessibility(params.accessibility.get(Accessibility.none)), + } + ], + "gisFltrL": [], + "getTariff": params.tickets.get(true), + "ushrp": params.startWithWalking.get(true), + "getPT": true, + "getIV": false, + "getPolyline": params.polylines.get(false), + "outFrwd": isDeparture, + "outDate": `when`.format("yyyyMMdd"), + "outTime": `when`.format("HHmmss"), + "trfReq": { + "jnyCl": 2, + "tvlrProf": [ + { + "type": "E", + "redtnCard": nil + } + ], + "cType": "PK" + } + } + } + + if params.laterRef.isSome: + req["req"]["ctxScr"] = %* params.laterRef.get + elif params.earlierRef.isSome: + req["req"]["ctxScr"] = %* params.earlierRef.get + + if params.viaPoint.isSome: + let viaPoint = params.viaPoint.get + req["req"]["viaLocL"] = %* [{ "loc": viaPoint.formatPoint() }] + + let data = await request(req) + + return parseJourneysResponse(data)
diff --git a/hafas/api/refresh_journey.nim b/hafas/api/refresh_journey.nim @@ -0,0 +1,23 @@ +import ../types +import ../parse/journeys_response +import ../util +import json +import asyncdispatch +import options + +proc refreshJourney*(params: RefreshJourneyParams): Future[Journey] {.async.} = + let req = %* { + "cfg": { + }, + "meth": "Reconstruction", + "req": { + "ctxRecon": params.refreshToken, + "getIST": true, + "getPasslist": params.stopovers.get(false), + "getPolyline": params.polylines.get(false), + "getTariff": params.tickets.get(false), + } + } + + let data = await request(req) + return parseJourneysResponse(data, true).journeys[0]
diff --git a/hafas/api/suggestions.nim b/hafas/api/suggestions.nim @@ -0,0 +1,29 @@ +import ../../../types +import ../parse/point +import ../util +import json +import asyncdispatch +import sequtils +import options + +proc suggestions*(params: SuggestionsParams): Future[seq[Point]] {.async.} = + let req = %* { + "cfg": { + "polyEnc": "GPA" + }, + "meth": "LocMatch", + "req": { + "input": { + "loc": { + "type": "ALL", + "name": params.query & "?" + }, + "maxLoc": params.results.get(10), + "field": "S" + } + } + } + let data = await request(req) + let locs = data["res"]["match"]["locL"].getElems() + + return locs.map(parsePoint)
diff --git a/hafas/errors.nim b/hafas/errors.nim @@ -0,0 +1,199 @@ +import asynchttpserver +import ../../types + +proc parseError*(errstr: string): hafasException = + case errstr: + of "H_UNKNOWN": + return hafasException( + code: SERVER_ERROR, + message: "unknown internal error", + statusCode: Http500, + ) + of "AUTH": + return hafasException( + code: ACCESS_DENIED, + message: "invalid or missing authentication data", + statusCode: Http401, + ) + of "R0001": + return hafasException( + code: INVALID_REQUEST, + message: "unknown method", + statusCode: Http400, + ) + of "R0002": + return hafasException( + code: INVALID_REQUEST, + message: "invalid or missing request parameters", + statusCode: Http400, + ) + of "R0007": + return hafasException( + code: SERVER_ERROR, + message: "internal communication error", + statusCode: Http500, + ) + of "R5000": + return hafasException( + code: ACCESS_DENIED, + message: "access denied", + statusCode: Http401, + ) + of "S1": + return hafasException( + code: SERVER_ERROR, + message: "journeys search: a connection to the backend server couldn\'t be established", + statusCode: Http503, + ) + of "LOCATION": + return hafasException( + code: INVALID_REQUEST, + message: "location/stop not found", + statusCode: Http400, + ) + of "H390": + return hafasException( + code: INVALID_REQUEST, + message: "journeys search: departure/arrival station replaced", + statusCode: Http400, + ) + of "H410": + return hafasException( + code: SERVER_ERROR, + message: "journeys search: incomplete response due to timetable change" + ) + of "H455": + return hafasException( + code: INVALID_REQUEST, + message: "journeys search: prolonged stop", + statusCode: Http400, + ) + of "H460": + return hafasException( + code: INVALID_REQUEST, + message: "journeys search: stop(s) passed multiple times", + statusCode: Http400, + ) + of "H500": + return hafasException( + code: INVALID_REQUEST, + message: "journeys search: too many trains, connection is not complete", + statusCode: Http400, + ) + of "H890": + return hafasException( + code: NOT_FOUND, + message: "journeys search unsuccessful", + statusCode: Http404, + ) + of "H891": + return hafasException( + code: NOT_FOUND, + message: "journeys search: no route found, try with an intermediate stations", + statusCode: Http404, + ) + of "H892": + return hafasException( + code: INVALID_REQUEST, + message: "journeys search: query too complex, try less intermediate stations", + statusCode: Http400, + ) + of "H895": + return hafasException( + code: INVALID_REQUEST, + message: "journeys search: departure & arrival are too near", + statusCode: Http400, + ) + of "H899": + return hafasException( + code: SERVER_ERROR, + message: "journeys search unsuccessful or incomplete due to timetable change" + ) + of "H900": + return hafasException( + code: SERVER_ERROR, + message: "journeys search unsuccessful or incomplete due to timetable change" + ) + of "H9220": + return hafasException( + code: NOT_FOUND, + message: "journeys search: no stations found close to the address", + statusCode: Http400, + ) + of "H9230": + return hafasException( + code: SERVER_ERROR, + message: "journeys search: an internal error occured", + statusCode: Http500, + ) + of "H9240": + return hafasException( + code: NOT_FOUND, + message: "journeys search unsuccessful", + statusCode: Http404, + ) + of "H9250": + return hafasException( + code: SERVER_ERROR, + message: "journeys search: leg query interrupted", + statusCode: Http500, + ) + of "H9260": + return hafasException( + code: INVALID_REQUEST, + message: "journeys search: unknown departure station", + statusCode: Http400, + ) + of "H9280": + return hafasException( + code: INVALID_REQUEST, + message: "journeys search: unknown intermediate station", + statusCode: Http400, + ) + of "H9300": + return hafasException( + code: INVALID_REQUEST, + message: "journeys search: unknown arrival station", + statusCode: Http400, + ) + of "H9320": + return hafasException( + code: INVALID_REQUEST, + message: "journeys search: the input is incorrect or incomplete", + statusCode: Http400, + ) + of "H9360": + return hafasException( + code: INVALID_REQUEST, + message: "journeys search: error in a data field", + statusCode: Http400, + ) + of "H9380": + return hafasException( + code: INVALID_REQUEST, + message: "journeys search: departure/arrival/intermediate station defined more than once", + statusCode: Http400, + ) + of "SQ001": + return hafasException( + code: SERVER_ERROR, + message: "no departures/arrivals data available", + statusCode: Http503, + ) + of "SQ005": + return hafasException( + code: NOT_FOUND, + message: "no trips found", + statusCode: Http404, + ) + of "TI001": + return hafasException( + code: SERVER_ERROR, + message: "no trip info available", + statusCode: Http503, + ) + return hafasException( + code: SERVER_ERROR, + message: "unknown HAFAS exception " & errstr, + statusCode: Http500, + )
diff --git a/hafas/parse/accessibility.nim b/hafas/parse/accessibility.nim @@ -0,0 +1,6 @@ +import ../types + +proc formatAccessibility*(a: Accessibility): string = + if a == none: result = "notBarrierfree" + elif a == partial: result = "limitedBarrierfree" + elif a == complete: result = "completeBarrierfree"
diff --git a/hafas/parse/date.nim b/hafas/parse/date.nim @@ -0,0 +1,31 @@ +import ../types +import options +import strutils +import times + +proc parseDate*(common: CommonData, time: Option[string], tzoffset: Option[int]): Option[int64] = + if time.isNone: + return none(int64) + + let tzoffset = tzoffset.get(60) # FIXME: sometimes no time zone is given. how to deal with that? + let date = common.dateStr + var time = time.get + var dayoffset = 0 + + if time.len == 8: + dayoffset = parseInt(time[0..1]) + time = time[2..7] + + var tzoffhours = align($(int(tzoffset / 60)), 2, '0') + var tzoffmins = align($(tzoffset mod 60), 2, '0') + var tzoff = tzoffhours & ":" & tzoffmins + if tzoffset >= 0: + tzoff = "+" & tzoff + + let datestr = date & time & tzoff + let dateformat = "yyyyMMddHHmmsszzz" + var dt = datestr.parse(dateformat) + + dt = dt + initTimeInterval(days = dayoffset) + return some(dt.toTime().toUnix()) +
diff --git a/hafas/parse/journey.nim b/hafas/parse/journey.nim @@ -0,0 +1,38 @@ +import ../types +import json +import leg +import options + +proc mkParseJourney*(common: CommonData): proc = + proc parseJourney(j: JsonNode): Journey = + var common = common + common.dateStr = j{"date"}.getStr() + + if j{"trfRes"}{"statusCode"}.getStr == "OK": + result.price = some(Price( + amount: j["trfRes"]["fareSetL"][0]["fareL"][0]["prc"].getInt / 100, + currency: some("Euro"), + )) + + result.refreshToken = j{"ctxRecon"}.getStr() + result.legs = j{"secL"}.getElems().map(mkParseLeg(common)) + result.lastUpdated = common.timeStamp + + # combine walking legs + var i = -1 + var firstWalking = -1 + while true: + inc(i) + if i >= len(result.legs): break + if result.legs[i].isWalking: + if firstWalking == -1: + firstWalking = i + else: + result.legs[firstWalking].arrival = result.legs[i].arrival + result.legs[firstWalking].distance.get += result.legs[i].distance.get + result.legs.delete(i) + dec(i) + else: + firstWalking = -1 + + return parseJourney
diff --git a/hafas/parse/journeys_response.nim b/hafas/parse/journeys_response.nim @@ -0,0 +1,27 @@ +import ../types +import ./remark +import ./point +import ./operator +import ./journey +import ./line +import ./polyline +import json +import sequtils +import strutils + +proc parseJourneysResponse*(data: JsonNode, isRefresh: bool = false): JourneysResponse = + let points = map(data["res"]["common"]["locL"].getElems(), parsePoint) + let operators = map(data["res"]["common"]["opL"].getElems(), parseOperator) + let remarks = map(data["res"]["common"]["remL"].getElems(), parseRemark) + let lines = data["res"]["common"]["prodL"] + let polylines = map(data["res"]["common"]["polyL"].getElems(), mkParsePolyline(points)) + let timestamp = parseInt(data["res"]["planrtTS"].getStr()) + let common = CommonData(points: points, operators: operators, remarks: remarks, lines: lines, polylines: polylines, timestamp: timestamp) + + result.journeys = data["res"]["outConL"].getElems().map(mkParseJourney(common)) + if not isRefresh: + if data["res"].hasKey("outCtxScrB"): + result.earlierRef = data["res"]["outCtxScrB"].getStr() + + if data["res"].hasKey("outCtxScrF"): + result.laterRef = data["res"]["outCtxScrF"].getStr()
diff --git a/hafas/parse/leg.nim b/hafas/parse/leg.nim @@ -0,0 +1,67 @@ +import ../types +import ./stopover +import ./msg +import ./date +import ./line +import json +import options + +proc parseLegPart(common: CommonData, lp: JsonNode): LegPart = + let h = lp.to(HafasStopParams) + let plannedDepartureTime = parseDate(common, h.dTimeS, h.dTZOffset) + let plannedArrivalTime = parseDate(common, h.aTimeS, h.aTZOffset) + + if h.dPlatfS.isSome: result.plannedPlatform = h.dPlatfS + elif h.aPlatfS.isSome: result.plannedPlatform = h.aPlatfS + if h.dPlatfR.isSome: result.prognosedPlatform = h.dPlatfR + elif h.aPlatfR.isSome: result.prognosedPlatform = h.aPlatfR + if h.dTimeR.isSome: result.prognosedTime = parseDate(common, h.dTimeR, h.dTZOffset) + elif h.aTimeR.isSome: result.prognosedTime = parseDate(common, h.aTimeR, h.aTZOffset) + + if plannedDepartureTime.isSome: result.plannedTime = plannedDepartureTime.get + elif plannedArrivalTime.isSome: result.plannedTime = plannedArrivalTime.get + else: raise newException(CatchableError, "missing departure and arrival time for leg") + + result.point = common.points[h.locX.get] + +proc mkParseLeg*(common: CommonData): proc = + proc parseLeg(l: JsonNode): Leg = + + if l{"jny"}{"polyG"}{"polyXL"}.getElems().len() > 0: + result.polyline = some(Polyline( + type: "FeatureCollection", + )) + for n in l{"jny"}{"polyG"}{"polyXL"}.getElems(): + result.polyline.get.features &= common.polylines[n.getInt()].features + + let typeStr = l{"type"}.getStr() + if typeStr == "JNY": + result.direction = some(l{"jny"}{"dirTxt"}.getStr()) + result.tripId = some(l{"jny"}{"jid"}.getStr()) + result.line = common.parseLine(l["jny"]["prodX"].getInt()) + + let stopovers = l{"jny"}{"stopL"}.getElems() + if stopovers.len > 0: + result.stopovers = some(stopovers.map(mkParseStopover(common))) + + let remarks = l{"jny"}{"msgL"}.getElems() + if remarks.len > 0: + result.remarks = some(remarks.map(mkParseMsg(common))) + + elif typeStr == "WALK": + result.isWalking = true + result.distance = some(l{"gis"}{"dist"}.getInt()) + + elif typeStr == "TRSF" or typeStr == "DEVI": + result.isTransfer = true + + else: + raise newException(CatchableError, "Unimplemented hafas leg type: " & typeStr) + + result.departure = common.parseLegPart(l{"dep"}) + result.arrival = common.parseLegPart(l{"arr"}) + + result.cancelled = l{"dep"}{"dCncl"}.getBool(false) or l{"arr"}{"aCncl"}.getBool(false) + + return parseLeg +
diff --git a/hafas/parse/line.nim b/hafas/parse/line.nim @@ -0,0 +1,71 @@ +import ../types +import ../util +import ./products +import json +import options +import tables +import httpClient +import asyncdispatch + + +var trainTypes = initTable[string, string]() +var trainTypesShort = initTable[string, string]() + +proc fetchTrainTypes() {.async.} = + var client = newAsyncHttpClient() + let resp = await client.getContent("https://lib.finalrewind.org/dbdb/ice_type.json") + let data = parseJson(resp) + for key, info in pairs(data): + if info{"type"}.getStr != "" and info{"type"}.getStr != "EC" and info{"type"}.getStr != "IC": + trainTypes[key] = info{"type"}.getStr + if info{"short"}.getStr != "": + trainTypesShort[key] = info{"short"}.getStr + +asyncCheck fetchTrainTypes() + + +proc parseLine*(common: CommonData, i: int): Option[Line] = + let l = common.lines[i] + + # unparsable + if l{"cls"}.getInt == 0: + return options.none(Line) + + let line = l.to(HafasProd) + var res = Line() + + res.name = line.name + res.product = parseProduct(line.cls) + res.tripNum = line.prodCtx.num + + if not isNone(line.prodCtx.catOut): + res.productName = get(line.prodCtx.catOut) + else: + res.productName = "?" + + res.fullProductName = line.prodCtx.catOutL + res.id = slug(line.prodCtx.lineId.get(line.name)) + + if line.opX.isSome: + res.operator = some(common.operators[line.opX.get]) + + # DB + + if res.productName == "IC" or res.productName == "ICE" or res.productName == "EC" or res.productName == "ECE": + if trainTypes.contains(res.tripNum) and trainTypes[res.tripNum] != res.productName: + res.trainType = some(trainTypes[res.tripNum]) + if trainTypesShort.contains(res.tripNum): + res.trainTypeShort = some(trainTypesShort[res.tripNum]) + + if line.nameS.isSome and (res.product == bus or res.product == tram or res.product == ferry): + res.name = line.nameS.get + + if line.addName.isSome: + # swap name and addName + res.additionalName = some(res.name) + res.name = line.addName.get + + # End DB + + res.mode = MODES[int(res.product)] + return some(res)
diff --git a/hafas/parse/msg.nim b/hafas/parse/msg.nim @@ -0,0 +1,12 @@ +import ../types +import json + +proc mkParseMsg*(common: CommonData): proc = + proc parseMsg(m: JsonNode): Remark = + let typeStr = m{"type"}.getStr() + if typeStr != "REM": + raise newException(CatchableError, "Unimplemented hafas msg type: " & typeStr) + return common.remarks[m{"remX"}.getInt()] + + return parseMsg +
diff --git a/hafas/parse/operator.nim b/hafas/parse/operator.nim @@ -0,0 +1,9 @@ +import ../../../types +import ../util +import json + +proc parseOperator*(op: JsonNode): Operator = + return (%* { + "name": op{"name"}, + "id": %* slug(op{"name"}.getStr()), + }).to(Operator)
diff --git a/hafas/parse/point.nim b/hafas/parse/point.nim @@ -0,0 +1,78 @@ +import ../types +import ./products +import json +import options +import tables + +proc parsePoint*(loc: JsonNode): Point = + let typeStr = loc{"type"}.getStr() + if typeStr == "S": + result.stop = some(Stop( + id: loc{"extId"}.getStr(), + name: loc{"name"}.getStr(), + location: Location( + latitude: loc{"crd"}{"y"}.getInt() / 1000000, + longitude: loc{"crd"}{"x"}.getInt() / 1000000, + ), + products: parseProducts(loc{"pCls"}.getInt()), + )) + elif typeStr == "P": + result.location = some(Location( + id: some(loc{"extId"}.getStr()), + name: some(loc{"name"}.getStr()), + latitude: loc{"crd"}{"y"}.getInt() / 1000000, + longitude: loc{"crd"}{"x"}.getInt() / 1000000, + )) + elif typeStr == "A": + result.location = some(Location( + address: some(loc{"name"}.getStr()), + latitude: loc{"crd"}{"y"}.getInt() / 1000000, + longitude: loc{"crd"}{"x"}.getInt() / 1000000, + )) + else: + raise newException(CatchableError, "Unimplemented hafas loc type: " & typeStr) + +proc formatLocationIdentifier(d: Table[string, string]): string = + for key, val in d: + result &= key + result &= "=" + result &= val + result &= "@" + +proc formatCoord(c: float): string = + return $int(c * 1000000) + +proc formatPoint*(point: Point): JsonNode = + if point.stop.isSome: + let stop = point.stop.get + return %* { + "type": "S", + "lid": formatLocationIdentifier({ + "A": "1", + "L": $stop.id, + }.toTable), + } + elif point.location.isSome: + let loc = point.location.get + if loc.address.isSome: + return %* { + "type": "A", + "lid": formatLocationIdentifier({ + "A": "2", + "O": loc.address.get, + "X": formatCoord(loc.longitude), + "Y": formatCoord(loc.latitude), + }.toTable), + } + elif loc.name.isSome and loc.id.isSome: + return %* { + "type": "P", + "lid": formatLocationIdentifier({ + "A": "4", + "O": loc.address.get, + "L": loc.id.get, + "X": formatCoord(loc.longitude), + "Y": formatCoord(loc.latitude), + }.toTable), + } + raise newException(CatchableError, "Cannot format HAFAS location")
diff --git a/hafas/parse/polyline.nim b/hafas/parse/polyline.nim @@ -0,0 +1,73 @@ +import ../types +import json +import options +import math + +proc gpsDistance(fromLat: float, fromLon: float, toLat: float, toLon: float): float = + proc toRad(x: float): float = x * PI / 180 + let dLat = toRad(toLat - fromLat) + let dLon = toRad(toLon - fromLon) + let fromLat = toRad(fromLat) + let toLat = toRad(toLat) + let a = pow(sin(dLat / 2), 2) + (pow(sin(dLon / 2), 2) * cos(fromLat) * cos(toLat)) + let c = 2 * arctan2(sqrt(a), sqrt(1 - a)) + return 6371 * c + +proc parseIntegers(str: string): seq[int] = + var byte = 0 + var current = 0 + var bits = 0 + for c in str: + byte = int(c) - 63 + current = current or (( byte and 31 ) shl bits) + bits += 5 + + if byte < 32: + if (current and 1) == 1: + current = -current + current = current shr 1 + + result.add(current) + + current = 0 + bits = 0 + +proc mkParsePolyline*(points: seq[Point]): proc = + proc parsePolyline(l: JsonNode): Polyline = + let line = l.to(HafasPolyLine) + + result.type = "FeatureCollection" + + var lat = 0 + var lon = 0 + + let ints = parseIntegers(line.crdEncYX) + var i = 0 + while i < len(ints): + lat += ints[i] + lon += ints[i+1] + result.features.add(Feature( + type: "Feature", + geometry: FeatureGeometry( + type: "Point", + coordinates: @[lon / 100000, lat / 100000], + ), + )) + i += 2 + + if line.ppLocRefL.isSome: + for p in line.ppLocRefL.get: + result.features[p.ppIdx].properties = points[p.locX].stop + + # sort out coordinates closer than 5m to their neighbours + var j = 1 + while true: + if j >= len(result.features): break + let last = result.features[j-1].geometry.coordinates + let current = result.features[j].geometry.coordinates + if gpsDistance(last[1], last[0], current[1], current[0]) <= 0.005: + result.features.delete(j) + continue + j += 1 + + return parsePolyline
diff --git a/hafas/parse/products.nim b/hafas/parse/products.nim @@ -0,0 +1,37 @@ +import ../../../types +import bitops + +proc parseProduct*(cls: int): Product = + var tmp = cls + var res = 0 + while tmp > 1: + tmp = tmp shr 1 + res += 1 + + return Product(res) + +proc parseProducts*(pCls: int): Products = + return Products( + nationalExp: pCls.testBit(0), + national: pCls.testBit(1), + regionalExp: pCls.testBit(2), + regional: pCls.testBit(3), + suburban: pCls.testBit(4), + bus: pCls.testBit(5), + ferry: pCls.testBit(6), + subway: pCls.testBit(7), + tram: pCls.testBit(8), + taxi: pCls.testBit(9), + ) + +proc formatProducts*(p: Products): int = + if p.nationalExp: result.setBit(0) + if p.national: result.setBit(1) + if p.regionalExp: result.setBit(2) + if p.regional: result.setBit(3) + if p.suburban: result.setBit(4) + if p.bus: result.setBit(5) + if p.ferry: result.setBit(6) + if p.subway: result.setBit(7) + if p.tram: result.setBit(8) + if p.taxi: result.setBit(9)
diff --git a/hafas/parse/remark.nim b/hafas/parse/remark.nim @@ -0,0 +1,32 @@ +import ../../../types +import json + +proc parseRemark*(rem: JsonNode): Remark = + let typeStr = rem{"type"}.getStr() + if typeStr == "M" or typeStr == "P": + return (%* { + "type": %* "status", + "summary": rem{"txtS"}, + "code": rem{"code"}, + "text": rem{"txtN"}, + }).to(Remark) + elif typeStr == "L": + return (%* { + "type": %* "status", + "code": %* "alternative-trip", + "text": rem{"txtN"}, + "tripId": rem{"jid"}, + }).to(Remark) + elif typeStr == "A" or typeStr == "I" or typeStr == "H": + return (%* { + "type": %* "hint", + "code": rem{"code"}, + "text": rem{"txtN"}, + }).to(Remark) + else: + # TODO: parse more accurately + return (%* { + "type": %* "status", + "code": rem{"code"}, + "text": rem{"txtN"}, + }).to(Remark)
diff --git a/hafas/parse/stopover.nim b/hafas/parse/stopover.nim @@ -0,0 +1,33 @@ +import ../types +import ./date +import options +import json + +proc parseStopoverPart(common: CommonData, mode: string, h: HafasStopParams): StopoverPart = + + if (mode != "arrival"): + result.plannedPlatform = h.dPlatfS + result.prognosedPlatform = h.dPlatfR + result.plannedTime = parseDate(common, h.dTimeS, h.dTZOffset) + result.prognosedTime = parseDate(common, h.dTimeR, h.dTZOffset) + else: + result.plannedPlatform = h.aPlatfS + result.prognosedPlatform = h.aPlatfR + result.plannedTime = parseDate(common, h.aTimeS, h.aTZOffset) + result.prognosedTime = parseDate(common, h.aTimeR, h.aTZOffset) + +proc mkParseStopover*(common: CommonData): proc = + proc parseStopover(s: JsonNode): Stopover = + let typeStr = s{"type"}.getStr() + if typeStr != "N": + echo pretty(s) + raise newException(CatchableError, "Unimplemented hafas stopover type: " & typeStr) + + let h = s.to(HafasStopParams) + result.stop = common.points[s{"locX"}.getInt()].stop.get + result.cancelled = h.aCncl.get(false) or h.dCncl.get(false) + result.arrival = common.parseStopoverPart("arrival", h) + result.departure = common.parseStopoverPart("departure", h) + + return parseStopover +
diff --git a/hafas/todo b/hafas/todo @@ -0,0 +1,10 @@ +- implement leg parsing with nim types +- check and pass back hafas errors +- isAdd (is additional stop) +- hafas information manager (him) remarks +- cycle information +- price information +- reachable +- profiles +- mark current alternatives +- stations
diff --git a/hafas/types.nim b/hafas/types.nim @@ -0,0 +1,53 @@ +import ../../types +import options +export types +import json + +type + CommonData* = object + lines*: JsonNode + remarks*: seq[Remark] + operators*: seq[Operator] + points*: seq[Point] + polylines*: seq[Polyline] + dateStr*: string + timestamp*: int64 + + HafasStopParams* = object + aTimeS*: Option[string] + aPlatfS*: Option[string] + aTZOffset*: Option[int] + aCncl*: Option[bool] + aTimeR*: Option[string] + aPlatfR*: Option[string] + dTimeS*: Option[string] + dPlatfS*: Option[string] + dTZOffset*: Option[int] + dCncl*: Option[bool] + dTimeR*: Option[string] + dPlatfR*: Option[string] + locX*: Option[int] + + HafasProdCtx* = object + name*: string + num*: string + catOut*: Option[string] + catOutL*: string + lineId*: Option[string] + + HafasProd* = object + name*: string + cls*: int + icoX*: int + nameS*: Option[string] + addName*: Option[string] + opX*: Option[int] + prodCtx*: HafasProdCtx + + HafasLocRef* = object + ppIdx*: int + locX*: int + + HafasPolyline* = object + crdEncYX*: string + ppLocRefL*: Option[seq[HafasLocRef]]
diff --git a/hafas/util.nim b/hafas/util.nim @@ -0,0 +1,57 @@ +import httpclient +import asyncdispatch +import md5 +import json +import strutils +import errors + +proc slug*(s: string): string = + for c in s: + if c.isAlphaNumeric(): + result &= c.toLowerAscii() + else: + result &= '-' + +proc request*(req: JsonNode): Future[JsonNode] {.async.} = + let client = newAsyncHttpClient() + + let body = %*{ + "lang": "de", + "svcReqL": [req] + } + + # TODO: move to profile + body["svcReqL"][0]["cfg"]["rtMode"] = %* "HYBRID" + body["client"] = %* { + "id": "DB", + "v": "16040000", + "type": "IPH", + "name": "DB Navigator" + } + body["ext"] = %* "DB.R19.04.a" + body["ver"] = %* "1.16" + body["auth"] = %* { + "type": "AID", + "aid": "n91dB8Z77MLdoR0K" + } + let salt = "bdI8UVj40K5fvxwf" + let bodytext = $body + let checksum = $toMD5(bodytext & salt) + let url = "https://reiseauskunft.bahn.de/bin/mgate.exe?checksum=" & checksum + + client.headers = newHttpHeaders({ + "Content-Type": "application/json", + "Accept": "application/json", + "user-agent": "my-awesome-e5f276d8fe6cprogram", + }) + + #echo pretty body + let req = await client.request(url, httpMethod = HttpPost, body = $body) + let resp = await req.body + let data = parseJson(resp) + + let error = data{"svcResL"}{0}{"err"}.getStr() + if error != "OK": + raise parseError(error) + + return data{"svcResL"}{0}