commit 14c3aafd9fd404142fc56198d714080a4b48f49c
parent 13cdfdb7b8598df1b17eaeab89f332aa2fc51f78
Author: Yureka <yuka@yuka.dev>
Date: Sat, 11 Jun 2022 22:36:11 +0200
parent 13cdfdb7b8598df1b17eaeab89f332aa2fc51f78
Author: Yureka <yuka@yuka.dev>
Date: Sat, 11 Jun 2022 22:36:11 +0200
javascript refresh tokens
11 files changed, 575 insertions(+), 498 deletions(-)
A
|
90
+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
A
|
334
+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
A
|
148
+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
D
|
433
-------------------------------------------------------------------------------
diff --git a/Cargo.lock b/Cargo.lock @@ -796,15 +796,6 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7bd3e3206899af3f8b12af284fafc038cc1dc2b41d1b89dd17297221c5d225de" [[package]] -name = "smaz" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1f9ecc6775a24d971affc5b0e8549207ff53cf80eb661592165d67e0170a1fb0" -dependencies = [ - "lazy_static", -] - -[[package]] name = "socket2" version = "0.4.4" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -935,18 +926,6 @@ dependencies = [ ] [[package]] -name = "trainsearch-refresh-token" -version = "0.1.0" -dependencies = [ - "anyhow", - "base64", - "chrono", - "serde", - "smaz", - "wasm-bindgen", -] - -[[package]] name = "try-lock" version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index"
diff --git a/Cargo.toml b/Cargo.toml @@ -2,5 +2,4 @@ members = [ "hafas-rs", - "trainsearch-refresh-token" ]
diff --git a/src/main.js b/src/main.js @@ -5,19 +5,13 @@ import { journeyView } from './journeyView.js'; import { initSettings, settings } from './settings.js'; import { initDataStorage } from './dataStorage.js'; import { initHafasClient } from './hafas_client.js'; -import { initRefreshTokenLib } from './refresh_token.js'; import { showDiv, hideDiv, ElementById } from './helpers.js'; (async () => { // read settings from indexeddb - await Promise.all([ - ((async () => { - await initDataStorage(); - await initSettings(); - await initHafasClient(settings.profile || "db"); - }) ()), - initRefreshTokenLib(), - ]); + await initDataStorage(); + await initSettings(); + await initHafasClient(settings.profile || "db"); route(/^\/$/, searchView); route(/^\/([a-zA-Z0-9]+)\/([a-z]+)$/, journeysView);
diff --git a/src/refresh_token.js b/src/refresh_token.js @@ -1,8 +0,0 @@ -export let hafasToTrainsearch; -export let trainsearchToHafas; - -export const initRefreshTokenLib = async () => { - const lib = await import('../trainsearch-refresh-token/pkg/index.js'); - hafasToTrainsearch = lib.hafasToTrainsearch; - trainsearchToHafas = lib.trainsearchToHafas; -};
diff --git a/src/refresh_token/base64.js b/src/refresh_token/base64.js @@ -0,0 +1,90 @@ +"use strict"; + +/*\ +|*| +|*| Base64 / binary data / UTF-8 strings utilities +|*| +|*| https://developer.mozilla.org/en-US/docs/Web/JavaScript/Base64_encoding_and_decoding +|*| +\*/ + + +/// Adapted for url-safe character set without padding + +/* Array of bytes to Base64 string decoding */ + +function b64ToUint6 (nChr) { + + return nChr > 64 && nChr < 91 ? + nChr - 65 + : nChr > 96 && nChr < 123 ? + nChr - 71 + : nChr > 47 && nChr < 58 ? + nChr + 4 + : nChr === 45 ? + 62 + : nChr === 95 ? + 63 + : + 0; + +} + +export function base64DecToArr (sBase64, taBytes) { + + var + sB64Enc = sBase64.replace(/[^A-Za-z0-9-_]/g, ""), + nInLen = sB64Enc.length, + nOutLen = nInLen * 3 + 1 >> 2; + + for (var nMod3, nMod4, nUint24 = 0, nOutIdx = 0, nInIdx = 0; nInIdx < nInLen; nInIdx++) { + nMod4 = nInIdx & 3; + nUint24 |= b64ToUint6(sB64Enc.charCodeAt(nInIdx)) << 6 * (3 - nMod4); + if (nMod4 === 3 || nInLen - nInIdx === 1) { + for (nMod3 = 0; nMod3 < 3 && nOutIdx < nOutLen; nMod3++, nOutIdx++) { + taBytes[nOutIdx] = nUint24 >>> (16 >>> nMod3 & 24) & 255; + } + nUint24 = 0; + + } + } + + return nOutLen; +} + +/* Base64 string to array encoding */ + +function uint6ToB64 (nUint6) { + + return nUint6 < 26 ? + nUint6 + 65 + : nUint6 < 52 ? + nUint6 + 71 + : nUint6 < 62 ? + nUint6 - 4 + : nUint6 === 62 ? + 45 + : nUint6 === 63 ? + 95 + : + 65; + +} + +export function base64EncArr (aBytes) { + + var nMod3 = 2, sB64Enc = ""; + + for (var nLen = aBytes.length, nUint24 = 0, nIdx = 0; nIdx < nLen; nIdx++) { + nMod3 = nIdx % 3; + nUint24 |= aBytes[nIdx] << (16 >>> nMod3 & 24); + if (nMod3 === 2 || aBytes.length - nIdx === 1) { + sB64Enc += String.fromCodePoint(uint6ToB64(nUint24 >>> 18 & 63), uint6ToB64(nUint24 >>> 12 & 63), uint6ToB64(nUint24 >>> 6 & 63), uint6ToB64(nUint24 & 63)); + nUint24 = 0; + } + } + + //return sB64Enc.substr(0, sB64Enc.length - 2 + nMod3) + (nMod3 === 2 ? '' : nMod3 === 1 ? '=' : '=='); + return sB64Enc.substr(0, sB64Enc.length - 2 + nMod3); + +}
diff --git a/src/refresh_token/index.js b/src/refresh_token/index.js @@ -0,0 +1,334 @@ +import { compress, decompress } from './smaz.js'; +import { base64EncArr, base64DecToArr } from './base64.js'; + +const maxSize = 30000; +const buffer = new Uint8Array(maxSize); +let pos = 0; +let decBuffer; +const textEncoder = new TextEncoder(); +const textDecoder = new TextDecoder(); + +const legTypes = [ "T", "G@F", "TF", "D", "W" ]; + +const addToBuffer = content => { + buffer.set(content, pos); + pos += content.length * (content.BYTES_PER_ELEMENT || 1); +}; + +const encodeStringCompressed = content => { + const encoded = textEncoder.encode(content); + const compressed = compress(encoded); + addToBuffer([compressed.length]); + addToBuffer(compressed); +}; + +const encodeString = content => { + const encoded = textEncoder.encode(content); + addToBuffer([encoded.length]); + addToBuffer(encoded); +}; + +const encodeTrainsearchPlace = data => { + addToBuffer([data.placeType]); + if (data.placeType === 1) { // Station + encodeU32(data.id); + } else if (data.placeType === 2) { // Address + encodeStringCompressed(data.name); + encodeI32(data.x); + encodeI32(data.y); + } else if (data.placeType === 4) { // Poi + encodeU32(data.id); + encodeStringCompressed(data.name); + encodeI32(data.x); + encodeI32(data.y); + } else { + throw "unknown place type"; + } +}; + +const encodeI32 = data => { + const buffer = new ArrayBuffer(4); + new DataView(buffer).setInt32(0, data, true /* littleEndian */); + addToBuffer(new Uint8Array(buffer)); +}; +const encodeU32 = data => { + const buffer = new ArrayBuffer(4); + new DataView(buffer).setUint32(0, data, true /* littleEndian */); + addToBuffer(new Uint8Array(buffer)); +}; +const encodeU16 = data => { + const buffer = new ArrayBuffer(2); + new DataView(buffer).setUint16(0, data, true /* littleEndian */); + addToBuffer(new Uint8Array(buffer)); +}; + +const encodeTrainsearch = data => { + pos = 0; + addToBuffer([0]); // version + + encodeTrainsearchPlace(data.legs[0].from); + + for (let leg of data.legs) { + addToBuffer([legTypes.indexOf(leg.legType)]); + encodeTrainsearchPlace(leg.to); + + let departureMins = Math.floor(leg.departure / 60000); + let arrivalMins = Math.floor(leg.arrival / 60000); + encodeU32(departureMins); + encodeU16(arrivalMins - departureMins); + + encodeString(leg.line); + addToBuffer([leg.field7]); + } + return base64EncArr(buffer.slice(0, pos)); +}; + +const readByte = () => { + if (decBuffer.length < 1) throw "short read"; + const ret = decBuffer[0]; + decBuffer = decBuffer.slice(1); + return ret; +}; + +const decodeI32 = () => { + const ret = new DataView(decBuffer.buffer).getInt32(0, true /* littleEndian */); + decBuffer = decBuffer.slice(4); + return ret; +}; + +const decodeU32 = () => { + const ret = new DataView(decBuffer.buffer).getUint32(0, true /* littleEndian */); + decBuffer = decBuffer.slice(4); + return ret; +}; + +const decodeU16 = () => { + const ret = new DataView(decBuffer.buffer).getUint16(0, true /* littleEndian */); + decBuffer = decBuffer.slice(2); + return ret; +}; + +const decodeString = () => { + const len = readByte(); + if (decBuffer.length < len) throw "short read"; + const bytes = decBuffer.slice(0, len); + decBuffer = decBuffer.slice(len); + return textDecoder.decode(bytes); +}; + +const decodeStringCompressed = () => { + const len = readByte(); + if (decBuffer.length < len) throw "short read"; + const bytes = decBuffer.slice(0, len); + decBuffer = decBuffer.slice(len); + const decompressed = decompress(bytes); + return textDecoder.decode(decompressed); +}; + +const decodeTrainsearchPlace = () => { + const placeType = readByte(); + + if (placeType === 1) { // Station + const id = decodeU32(); + return { + placeType, + id + }; + } else if (placeType === 2) { // Address + const name = decodeStringCompressed(); + const x = decodeI32(); + const y = decodeI32(); + return { + placeType, + name, + x, + y + }; + } else if (placeType === 4) { // Poi + const id = decodeU32(); + const name = decodeStringCompressed(); + const x = decodeI32(); + const y = decodeI32(); + return { + placeType, + id, + name, + x, + y + }; + } else { + throw "unknown place type"; + } +}; + +const decodeTrainsearch = data => { + const length = base64DecToArr(data, buffer); + decBuffer = buffer.slice(0, length); + + const version = readByte(); + if (version !== 0) throw "unknown trainsearch token version"; + const legs = []; + let lastPlace = decodeTrainsearchPlace(); + + while (decBuffer.length > 0) { + const legType = legTypes[readByte()]; + const to = decodeTrainsearchPlace(); + + const departureMin = decodeU32(); + const diff = decodeU16(); + const departure = new Date(departureMin * 60000); + const arrival = new Date((departureMin + diff) * 60000); + + const line = decodeString(); + const field7 = readByte(); + + legs.push({ + legType, + from: lastPlace, + to, + departure, + arrival, + line, + field7 + }); + lastPlace = to; + } + + return { legs }; +}; + +const decodeHafasPlace = data => { + let values = {}; + for (let pair of data.split("@")) { + if (pair == "") continue; + const [key, value] = pair.split("="); + values[key] = value; + } + const placeType = parseInt(values["A"]); + + if (placeType === 1) { // Station + const id = parseInt(values["L"]); + return { + placeType, + id + }; + } else if (placeType === 2) { // Address + const name = values["O"]; + const x = parseInt(values["X"]); + const y = parseInt(values["Y"]); + return { + placeType, + name, + x, + y + }; + } else if (placeType === 4) { // Poi + const id = parseInt(values["L"]); + const name = values["O"]; + const x = parseInt(values["X"]); + const y = parseInt(values["Y"]); + return { + placeType, + id, + name, + x, + y + }; + } else { + throw "unknown place type"; + } +}; + +const decodeHafasDate = data => { + return new Date(`${data.slice(0, 4)}-${data.slice(4, 6)}-${data.slice(6, 8)}T${data.slice(8, 10)}:${data.slice(10, 12)}Z`); +}; + +const decodeHafas = data => { + if (!data.startsWith("¶HKI¶")) throw "invalid hafas refresh token"; + data = data.slice(5); + data = data.split("¶GP¶")[0]; + + const legs = []; + for (let leg of data.split("§")) { + const parts = leg.split("$"); + const legType = parts[0]; + const from = decodeHafasPlace(parts[1]); + const to = decodeHafasPlace(parts[2]); + const departure = decodeHafasDate(parts[3]); + const arrival = decodeHafasDate(parts[4]); + const line = parts[5].replaceAll(" ", ""); + if (parts[6] !== "") throw "unexpected field with content"; + const field7 = parseInt(parts[7]); + for (let pos = 8; pos < parts.length; pos += 1) { + if (parts[pos] !== "") throw "unexpected field with content"; + } + + legs.push({ + legType, + from, + to, + departure, + arrival, + line, + field7 + }); + } + return { legs }; +}; + +const encodeHafasDate = data => { + const iso = data.toISOString(); + let out = ""; + out += iso.slice(0, 4); + out += iso.slice(5, 7); + out += iso.slice(8, 10); + out += iso.slice(11, 13); + out += iso.slice(14, 16); + return out; +}; +const encodeHafasPlace = data => { + const parts = {}; + parts["A"] = data.placeType.toString(); + if (data.placeType === 1) { // Station + parts["L"] = data.id.toString(); + } else if (data.placeType === 2) { // Address + parts["O"] = data.name; + parts["X"] = data.x.toString(); + parts["Y"] = data.y.toString(); + } else if (data.placeType === 4) { // Poi + parts["L"] = data.id.toString(); + parts["O"] = data.name; + parts["X"] = data.x.toString(); + parts["Y"] = data.y.toString(); + } else { + throw "unknown place type"; + } + + return Object.entries(parts).map(([key, value]) => `${key}=${value}@`).join(""); +}; +const encodeHafas = data => { + const legs = []; + for (let leg of data.legs) { + const parts = []; + parts.push(leg.legType); + parts.push(encodeHafasPlace(leg.from)); + parts.push(encodeHafasPlace(leg.to)); + parts.push(encodeHafasDate(leg.departure)); + parts.push(encodeHafasDate(leg.arrival)); + parts.push(leg.line); + parts.push(""); + parts.push(leg.field7.toString()); + parts.push(""); + parts.push(""); + parts.push(""); + legs.push(parts.join("$")); + } + return `¶HKI¶${legs.join("§")}`; +}; + +export const hafasToTrainsearch = data => { + return encodeTrainsearch(decodeHafas(data)); +}; +export const trainsearchToHafas = data => { + return encodeHafas(decodeTrainsearch(data)); +};
diff --git a/src/refresh_token/smaz.js b/src/refresh_token/smaz.js @@ -0,0 +1,148 @@ +function newNode() { + return { + chars: new Map(), + code: undefined, + }; +} + +function create(strings) { + const node = newNode(); + for (let i = 0; i < strings.length; i += 1) { + const tok = strings[i]; + let root = node; + for (let j = 0; j < tok.length; j += 1) { + const c = tok.charCodeAt(j); + let next = root.chars.get(c); + if (next === undefined) { + next = newNode(); + root.chars.set(c, next); + } + root = next; + } + root.code = i; + } + return node; +} + +function lookup(trie, str) { + let node = trie; + for (let i = 0; i < str.length; i += 1) { + if (node === undefined) { + return false; + } + node = node.chars.get(str[i]); + } + return node !== undefined && node.code !== undefined; +} + +const EMPTY_UINT8_ARRAY = new Uint8Array(0); + +class SmazCompress { + constructor(codebook, maxSize) { + this.trie = create(codebook); + this.buffer = new Uint8Array(maxSize); + this.verbatim = new Uint8Array(256); + } + + compress(str) { + let bufferIndex = 0; + let verbatimIndex = 0; + let inputIndex = 0; + + while (inputIndex < str.length) { + let indexAfterMatch = -1; + let code = -1; + let root = this.trie; + for (let j = inputIndex; j < str.length; j += 1) { + root = root.chars.get(str[j]); + if (root === undefined) { + break; + } + if (root.code !== undefined) { + code = root.code; + indexAfterMatch = j + 1; + } + } + if (code === -1) { + this.verbatim[verbatimIndex++] = str[inputIndex++]; + if (verbatimIndex === 255) { + bufferIndex = this.flushVerbatim(verbatimIndex, bufferIndex); + verbatimIndex = 0; + } + } + else { + if (verbatimIndex !== 0) { + bufferIndex = this.flushVerbatim(verbatimIndex, bufferIndex); + verbatimIndex = 0; + } + this.buffer[bufferIndex++] = code; + inputIndex = indexAfterMatch; + } + } + if (verbatimIndex !== 0) { + bufferIndex = this.flushVerbatim(verbatimIndex, bufferIndex); + } + return this.buffer.slice(0, bufferIndex); + } + + flushVerbatim(verbatimIndex, bufferIndex) { + if (verbatimIndex === 1) { + this.buffer[bufferIndex++] = 254; + this.buffer[bufferIndex++] = this.verbatim[0]; + } + else { + this.buffer[bufferIndex++] = 255; + this.buffer[bufferIndex++] = verbatimIndex - 1; + for (let k = 0; k < verbatimIndex; k += 1) { + this.buffer[bufferIndex++] = this.verbatim[k]; + } + } + return bufferIndex; + } +} + +class SmazDecompress { + constructor(codebook, maxSize) { + this.codebook = codebook; + this.buffer = new Uint8Array(maxSize); + } + + decompress(arr) { + let pos = 0; + let i = 0; + while (i < arr.byteLength) { + if (arr[i] === 254) { + this.buffer[pos] = arr[i + 1]; + pos += 1; + i += 2; + } + else if (arr[i] === 255) { + for (let j = 0; j <= arr[i + 1]; j += 1) { + this.buffer[pos] = arr[i + 2 + j]; + pos += 1; + } + i += 3 + arr[i + 1]; + } + else { + for (let j = 0; j < this.codebook[arr[i]].length; j++) { + this.buffer[pos] = this.codebook[arr[i]].charCodeAt(j); + pos += 1; + } + i += 1; + } + } + return this.buffer.slice(0, pos); + } +} + +const dictionary = ' ;the;e;t;a;of;o;and;i;n;s;e ;r; th; t;in;he;th;h;he ;to;\r\n;l;s ;d; a;an;er;c; o;d ;on; of;re;of ;t ;, ;is;u;at; ;n ;or;which;f;m;as;it;that;\n;was;en; ; w;es; an; i;\r;f ;g;p;nd; s;nd ;ed ;w;ed;http://;for;te;ing;y ;The; c;ti;r ;his;st; in;ar;nt;,; to;y;ng; h;with;le;al;to ;b;ou;be;were; b;se;o ;ent;ha;ng ;their;";hi;from; f;in ;de;ion;me;v;.;ve;all;re ;ri;ro;is ;co;f t;are;ea;. ;her; m;er ; p;es ;by;they;di;ra;ic;not;s, ;d t;at ;ce;la;h ;ne;as ;tio;on ;n t;io;we; a ;om;, a;s o;ur;li;ll;ch;had;this;e t;g ;e\r\n; wh;ere; co;e o;a ;us; d;ss;\n\r\n;\r\n\r;="; be; e;s a;ma;one;t t;or ;but;el;so;l ;e s;s,;no;ter; wa;iv;ho;e a; r;hat;s t;ns;ch ;wh;tr;ut;/;have;ly ;ta; ha; on;tha;-; l;ati;en ;pe; re;there;ass;si; fo;wa;ec;our;who;its;z;fo;rs;>;ot;un;<;im;th ;nc;ate;><;ver;ad; we;ly;ee; n;id; cl;ac;il;</;rt; wi;div;e, ; it;whi; ma;ge;x;e c;men;.com'.split(";"); + +const compressor = new SmazCompress(dictionary, 30000); +const decompressor = new SmazDecompress(dictionary, 30000); + +export function compress(str) { + return compressor.compress(str); +} +export function decompress(str) { + return decompressor.decompress(str); +}
diff --git a/trainsearch-refresh-token/.gitignore b/trainsearch-refresh-token/.gitignore @@ -1 +0,0 @@ -/target
diff --git a/trainsearch-refresh-token/Cargo.toml b/trainsearch-refresh-token/Cargo.toml @@ -1,21 +0,0 @@ -[package] -name = "trainsearch-refresh-token" -version = "0.1.0" -edition = "2021" -repository = "https://cyberchaos.dev/yu-re-ka/trainsearch.git" -license = "AGPL-3.0" - -# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html -[lib] -crate-type = ["cdylib", "rlib"] - -[dependencies] -serde = { version = "1.0", features = [ "derive" ] } -anyhow = "1.0" -smaz = "0.1" -base64 = "0.13" -chrono = "0.4" -wasm-bindgen = { version = "0.2", optional = true } - -[features] -wasm-bindings = [ "wasm-bindgen" ]
diff --git a/trainsearch-refresh-token/src/lib.rs b/trainsearch-refresh-token/src/lib.rs @@ -1,433 +0,0 @@ -use anyhow::anyhow; -use chrono::NaiveDateTime; -use serde::{Deserialize, Serialize}; -use std::collections::HashMap; -use std::convert::TryInto; -#[cfg(feature = "wasm-bindings")] -use wasm_bindgen::prelude::wasm_bindgen; - -#[derive(Debug, Deserialize, Serialize)] -pub enum HafasLegType { - Journey, - Walk, - Walk2, - Transfer, - Devi, -} - -fn read_byte(input: &mut &[u8]) -> anyhow::Result<u8> { - if input.len() < 1 { - return Err(anyhow!("1 bytes")); - } - let byte = input[0]; - *input = &input[1..]; - Ok(byte) -} -fn read_2_bytes(input: &mut &[u8]) -> anyhow::Result<[u8; 2]> { - if input.len() < 2 { - return Err(anyhow!("2 bytes")); - } - let res = input[..2].try_into()?; - *input = &input[2..]; - Ok(res) -} -fn read_4_bytes(input: &mut &[u8]) -> anyhow::Result<[u8; 4]> { - if input.len() < 4 { - return Err(anyhow!("4 bytes")); - } - let res = input[..4].try_into()?; - *input = &input[4..]; - Ok(res) -} - -fn encode_string_compressed(text: &str) -> anyhow::Result<Vec<u8>> { - let mut res = vec![]; - let mut compressed = smaz::compress(text.as_bytes()); - let compressed_len: u8 = compressed.len().try_into()?; - res.push(compressed_len); - res.append(&mut compressed); - Ok(res) -} -fn decode_string_compressed(input: &mut &[u8]) -> anyhow::Result<String> { - let len = read_byte(input)? as usize; - - if input.len() < len { - return Err(anyhow!("dec string")); - } - let compressed_string = &input[..len]; - *input = &input[len..]; - - Ok(String::from_utf8(smaz::decompress(&compressed_string)?)?) -} - -fn encode_string(text: &str) -> anyhow::Result<Vec<u8>> { - let mut res = vec![]; - let len = text.len().try_into()?; - res.push(len); - res.append(&mut text.as_bytes().to_vec()); - Ok(res) -} -fn decode_string(input: &mut &[u8]) -> anyhow::Result<String> { - let len = read_byte(input)? as usize; - - if input.len() < len { - return Err(anyhow!("dec string")); - } - let bytes = &input[..len]; - *input = &input[len..]; - - Ok(String::from_utf8(bytes.to_vec())?) -} - -fn encode_trainsearch_date(datetime: NaiveDateTime) -> anyhow::Result<u32> { - Ok((datetime.timestamp() / 60).try_into()?) -} -fn decode_trainsearch_date(num: u32) -> NaiveDateTime { - chrono::NaiveDateTime::from_timestamp(num as i64 * 60, 0) -} - -fn decode_hafas_date(text: &str) -> anyhow::Result<NaiveDateTime> { - Ok(chrono::NaiveDateTime::parse_from_str(text, "%Y%m%d%H%M")?) -} -fn encode_hafas_date(datetime: NaiveDateTime) -> String { - datetime.format("%Y%m%d%H%M").to_string() -} - -fn encode_trainsearch_place(place: Place) -> anyhow::Result<Vec<u8>> { - let mut out = vec![]; - match place { - Place::Station { id } => { - out.push(1); - out.append(&mut id.to_le_bytes().to_vec()); - } - Place::Address { name, x, y } => { - out.push(2); - out.append(&mut encode_string_compressed(&name)?); - out.append(&mut x.to_le_bytes().to_vec()); - out.append(&mut y.to_le_bytes().to_vec()); - } - Place::Poi { id, name, x, y } => { - out.push(4); - out.append(&mut id.to_le_bytes().to_vec()); - out.append(&mut encode_string_compressed(&name)?); - out.append(&mut x.to_le_bytes().to_vec()); - out.append(&mut y.to_le_bytes().to_vec()); - } - } - Ok(out) -} - -fn decode_trainsearch_place(input: &mut &[u8]) -> anyhow::Result<Place> { - let t = read_byte(input)?; - Ok(match t { - 1 => Place::Station { - id: u32::from_le_bytes(read_4_bytes(input)?), - }, - 2 => Place::Address { - name: decode_string_compressed(input)?, - x: i32::from_le_bytes(read_4_bytes(input)?), - y: i32::from_le_bytes(read_4_bytes(input)?), - }, - 4 => Place::Poi { - id: u32::from_le_bytes(read_4_bytes(input)?), - name: decode_string_compressed(input)?, - x: i32::from_le_bytes(read_4_bytes(input)?), - y: i32::from_le_bytes(read_4_bytes(input)?), - }, - _ => return Err(anyhow!("unknown place type")), - }) -} - -fn decode_hafas_place(text: &str) -> anyhow::Result<Place> { - let content = text - .split("@") - .collect::<Vec<_>>() - .into_iter() // make iterator reversible - .rev() - .skip(1) - .rev() // remove last item ("") - .map(|x| { - let mut iter = x.split("="); - let first = iter.next().ok_or(anyhow!("enc place 1"))?; - let second = iter.next().ok_or(anyhow!("enc place 2"))?; - Ok((first, second)) - }) - .collect::<anyhow::Result<HashMap<&str, &str>>>()?; - Ok(match content.get("A") { - Some(&"1") => { - let id = content.get("L").ok_or(anyhow!("enc place 3"))?.parse()?; - Place::Station { id } - } - Some(&"2") => { - let name = content.get("O").ok_or(anyhow!("enc place 4"))?.to_string(); - let x = content.get("X").ok_or(anyhow!("enc place 5"))?.parse()?; - let y = content.get("Y").ok_or(anyhow!("enc place 6"))?.parse()?; - Place::Address { name, x, y } - } - Some(&"4") => { - let id = content.get("L").ok_or(anyhow!("enc place 7"))?.parse()?; - let name = content.get("O").ok_or(anyhow!("enc place 8"))?.to_string(); - let x = content.get("X").ok_or(anyhow!("enc place 9"))?.parse()?; - let y = content.get("Y").ok_or(anyhow!("enc place 10"))?.parse()?; - Place::Poi { name, x, y, id } - } - _ => return Err(anyhow!("enc place 11")), - }) -} - -fn encode_hafas_place(place: Place) -> anyhow::Result<String> { - let mut parts = vec![]; - match place { - Place::Station { id } => { - parts.push(("A", format!("{}", 1))); - parts.push(("L", format!("{}", id))); - } - Place::Address { name, x, y } => { - parts.push(("A", format!("{}", 2))); - parts.push(("O", name)); - parts.push(("X", format!("{}", x))); - parts.push(("Y", format!("{}", y))); - } - Place::Poi { id, name, x, y } => { - parts.push(("A", format!("{}", 4))); - parts.push(("L", format!("{}", id))); - parts.push(("O", name)); - parts.push(("X", format!("{}", x))); - parts.push(("Y", format!("{}", y))); - } - } - Ok(parts - .into_iter() - .map(|(a, b)| format!("{}={}@", a, b)) - .collect::<String>()) -} - -pub struct HafasReconstructionContext { - legs: Vec<HafasReconstructionContextLeg>, -} - -pub struct HafasReconstructionContextLeg { - leg_type: HafasLegType, - from: Place, - to: Place, - departure: NaiveDateTime, - arrival: NaiveDateTime, - line: String, - field_7: u8, // possibly something about replacement trains? -} - -#[derive(Debug, Clone, Serialize, Deserialize)] -pub enum Place { - Station { - id: u32, - }, - Address { - name: String, - x: i32, - y: i32, - }, - Poi { - id: u32, - name: String, - x: i32, - y: i32, - }, -} - -pub(crate) fn decode_hafas(token: &str) -> anyhow::Result<HafasReconstructionContext> { - let token = token.split("¶GP¶").next().ok_or(anyhow!("enc token 1"))?; - let token = token.strip_prefix("¶HKI¶").ok_or(anyhow!("enc token 2"))?; - - let mut legs = vec![]; - for leg in token.split('§') { - let mut parts = leg.split('$'); - - legs.push(HafasReconstructionContextLeg { - leg_type: match parts - .next() - .ok_or_else(|| anyhow!("wrong number of parts"))? - { - "T" => HafasLegType::Journey, - "G@F" => HafasLegType::Walk, - "TF" => HafasLegType::Transfer, - "D" => HafasLegType::Devi, - "W" => HafasLegType::Walk2, - other => return Err(anyhow!("unknown leg type: {}", other)), - }, - from: match parts.next() { - None => return Err(anyhow!("wrong number of parts")), - Some(text) => decode_hafas_place(text)?, - }, - to: match parts.next() { - None => return Err(anyhow!("wrong number of parts")), - Some(text) => decode_hafas_place(text)?, - }, - departure: match parts.next() { - None => return Err(anyhow!("wrong number of parts")), - Some(text) => decode_hafas_date(text)?, - }, - arrival: match parts.next() { - None => return Err(anyhow!("wrong number of parts")), - Some(text) => decode_hafas_date(text)?, - }, - line: match parts.next() { - None => return Err(anyhow!("wrong number of parts")), - Some(text) => text.replace(" ", ""), - }, - field_7: { - // field 6 - match parts.next() { - None => return Err(anyhow!("wrong number of parts")), - Some("") => (), - Some(other) => return Err(anyhow!("unexpected field with content: {}", other)), - } - // field 7 - match parts.next() { - None => return Err(anyhow!("wrong number of parts")), - Some(text) => text.parse()?, - } - }, - }); - while let Some(part) = parts.next() { - if part != "" { - return Err(anyhow!("unexpected field with content: {}", part)); - } - } - } - - Ok(HafasReconstructionContext { legs }) -} -pub(crate) fn encode_hafas(data: HafasReconstructionContext) -> anyhow::Result<String> { - let mut legs = vec![]; - - for leg in data.legs { - let mut parts = vec![]; - parts.push(match leg.leg_type { - HafasLegType::Journey => "T".to_string(), - HafasLegType::Walk => "G@F".to_string(), - HafasLegType::Transfer => "TF".to_string(), - HafasLegType::Devi => "D".to_string(), - HafasLegType::Walk2 => "W".to_string(), - }); - parts.push(encode_hafas_place(leg.from)?); - parts.push(encode_hafas_place(leg.to)?); - parts.push(encode_hafas_date(leg.departure)); - parts.push(encode_hafas_date(leg.arrival)); - parts.push(leg.line); - parts.push("".to_string()); - parts.push(leg.field_7.to_string()); - parts.push("".to_string()); - parts.push("".to_string()); - parts.push("".to_string()); - legs.push(parts.join("$")); - } - Ok(format!("¶HKI¶{}", legs.join("§"))) -} - -pub(crate) fn encode_trainsearch(token: HafasReconstructionContext) -> anyhow::Result<String> { - let mut out = vec![ - 0u8, // version - ]; - - out.append(&mut encode_trainsearch_place( - token - .legs - .get(0) - .ok_or_else(|| anyhow!("token has no legs"))? - .from - .clone(), - )?); - - for leg in token.legs { - out.push(match leg.leg_type { - HafasLegType::Journey => 0, - HafasLegType::Walk => 1, - HafasLegType::Transfer => 2, - HafasLegType::Devi => 3, - HafasLegType::Walk2 => 4, - }); - - out.append(&mut encode_trainsearch_place(leg.to)?); - - { - let departure: u32 = encode_trainsearch_date(leg.departure)?; - let arrival_minus_departure: u16 = - (encode_trainsearch_date(leg.arrival)? - departure).try_into()?; - out.append(&mut departure.to_le_bytes().to_vec()); - out.append(&mut arrival_minus_departure.to_le_bytes().to_vec()); - } - - out.append(&mut encode_string(&leg.line)?); - out.push(leg.field_7); - } - - Ok(base64::encode_config(&out, base64::URL_SAFE_NO_PAD)) -} - -pub(crate) fn decode_trainsearch(token: &str) -> anyhow::Result<HafasReconstructionContext> { - let input = base64::decode_config(token, base64::URL_SAFE_NO_PAD)?; - let mut input = input.as_slice(); - let version = read_byte(&mut input)?; - - Ok(match version { - 0 => { - let mut legs = vec![]; - let mut last_place = decode_trainsearch_place(&mut input)?; - while input.len() > 0 { - let leg_type = match read_byte(&mut input)? { - 0 => HafasLegType::Journey, - 1 => HafasLegType::Walk, - 2 => HafasLegType::Transfer, - 3 => HafasLegType::Devi, - 4 => HafasLegType::Walk2, - other => return Err(anyhow!("unknown leg type: {}", other)), - }; - let to = decode_trainsearch_place(&mut input)?; - - let (departure, arrival) = { - let departure = u32::from_le_bytes(read_4_bytes(&mut input)?); - let arrival_minus_departure = - u16::from_le_bytes(read_2_bytes(&mut input)?) as u32; - ( - decode_trainsearch_date(departure), - decode_trainsearch_date(departure + arrival_minus_departure), - ) - }; - - let line = decode_string(&mut input)?; - let field_7 = read_byte(&mut input)?; - - legs.push(HafasReconstructionContextLeg { - leg_type, - from: last_place, - to: to.clone(), - departure, - arrival, - line, - field_7, - }); - last_place = to; - } - HafasReconstructionContext { legs } - } - _ => return Err(anyhow!("unknown trainsearch token version")), - }) -} - -pub fn trainsearch_to_hafas(token: &str) -> anyhow::Result<String> { - Ok(encode_hafas(decode_trainsearch(token)?)?) -} -pub fn hafas_to_trainsearch(token: &str) -> anyhow::Result<String> { - Ok(encode_trainsearch(decode_hafas(token)?)?) -} - -#[cfg(feature = "wasm-bindings")] -#[wasm_bindgen(js_name = "trainsearchToHafas")] -pub fn wasm_trainsearch_to_hafas(token: &str) -> Result<String, String> { - trainsearch_to_hafas(token).map_err(|e| e.to_string()) -} -#[cfg(feature = "wasm-bindings")] -#[wasm_bindgen(js_name = "hafasToTrainsearch")] -pub fn wasm_hafas_to_trainsearch(token: &str) -> Result<String, String> { - hafas_to_trainsearch(token).map_err(|e| e.to_string()) -}
diff --git a/webpack.config.js b/webpack.config.js @@ -25,9 +25,5 @@ module.exports = { crateDirectory: __dirname + "/hafas-rs", extraArgs: "--no-default-features --features all-profiles,js-fetch-requester,wasm-bindings,polylines", }), - new WasmPackPlugin({ - crateDirectory: __dirname + "/trainsearch-refresh-token", - extraArgs: "--all-features", - }), ] };