16 Commits

Author SHA1 Message Date
e64011ba16 feat!: add telegram auth and async refactor
- Removed "/schedule/update-download-url" endpoint, this mechanism was replaced by Yandex Cloud FaaS. Ура :)
- Improved schedule caching mechanism.
- Added Telegram WebApp authentication support.
- Reworked endpoints responses and errors mechanism.
- Refactored application state management.
- Make synchronous database operations, middlewares and extractors to asynchronous.
- Made user password field optional to support multiple auth methods.
- Renamed users table column "version" to "android_version" and made it nullable.
2025-06-08 01:43:45 +04:00
6a106a366c feat(parser): add ability to parse mistyped date 2025-06-08 01:03:50 +04:00
4fca22662c feat(parser)!: rework of subgroups parsing 2025-06-08 01:03:00 +04:00
d23092a32a feat(parser): add lesson types "course project" and "course project defense" 2025-05-27 02:06:13 +04:00
01bfa38969 feat(parser): speed improvement, lesson type guessing and parsing of merged lesson cabinets 2025-05-27 02:03:54 +04:00
851ec9225f refactor(parser): improve readability 2025-05-26 21:12:23 +04:00
8de1891724 chore(release): bump version to 1.0.5 2025-05-26 05:30:44 +04:00
4cf6df379e fix(parser): fix lessons merging 2025-05-26 05:24:13 +04:00
ba8b164b6a refactor(parser): rewrite some parts of code 2025-05-26 05:24:08 +04:00
ff9d7d6c3a fix(cache): fix setting cache_update_required flag in cache status 2025-05-25 17:39:23 +04:00
9090716f87 fix(test): fix test sign_up_invalid_group 2025-05-25 15:57:18 +04:00
ee992f1b55 chore(xls): update schedule xls 2025-05-25 15:49:52 +04:00
7f71fb1616 refactor(env): remove unsave env::set_var call 2025-05-25 15:48:43 +04:00
234055eaeb feat(test): add ability to use test env without schedule 2025-05-25 15:48:10 +04:00
fceffb900d release/v1.0.3 2025-04-18 00:29:04 +04:00
49ce0005dc Исправление работы подключения к сайтам из-за отсутствия сертификатов. 2025-04-18 00:28:55 +04:00
77 changed files with 2889 additions and 2071 deletions

View File

@@ -44,10 +44,15 @@ jobs:
cargo test --verbose cargo test --verbose
env: env:
DATABASE_URL: ${{ env.TEST_DB }} DATABASE_URL: ${{ env.TEST_DB }}
SCHEDULE_DISABLE_AUTO_UPDATE: 1
JWT_SECRET: "test-secret-at-least-256-bits-used" JWT_SECRET: "test-secret-at-least-256-bits-used"
VKID_CLIENT_ID: 0 VK_ID_CLIENT_ID: 0
VKID_REDIRECT_URI: "vk0://vk.com/blank.html" VK_ID_REDIRECT_URI: "vk0://vk.com/blank.html"
REQWEST_USER_AGENT: "Dalvik/2.1.0 (Linux; U; Android 6.0.1; OPPO R9s Build/MMB29M)" TELEGRAM_BOT_ID: 0
TELEGRAM_MINI_APP_HOST: example.com
TELEGRAM_TEST_DC: false
YANDEX_CLOUD_API_KEY: ""
YANDEX_CLOUD_FUNC_ID: ""
build: build:
name: Build name: Build
runs-on: ubuntu-latest runs-on: ubuntu-latest

View File

@@ -26,7 +26,12 @@ jobs:
run: cargo test run: cargo test
env: env:
DATABASE_URL: ${{ secrets.TEST_DATABASE_URL }} DATABASE_URL: ${{ secrets.TEST_DATABASE_URL }}
SCHEDULE_DISABLE_AUTO_UPDATE: 1
JWT_SECRET: "test-secret-at-least-256-bits-used" JWT_SECRET: "test-secret-at-least-256-bits-used"
VKID_CLIENT_ID: 0 VK_ID_CLIENT_ID: 0
VKID_REDIRECT_URI: "vk0://vk.com/blank.html" VK_ID_REDIRECT_URI: "vk0://vk.com/blank.html"
REQWEST_USER_AGENT: "Dalvik/2.1.0 (Linux; U; Android 6.0.1; OPPO R9s Build/MMB29M)" TELEGRAM_BOT_ID: 0
TELEGRAM_MINI_APP_HOST: example.com
TELEGRAM_TEST_DC: false
YANDEX_CLOUD_API_KEY: ""
YANDEX_CLOUD_FUNC_ID: ""

View File

@@ -4,9 +4,10 @@
<content url="file://$MODULE_DIR$"> <content url="file://$MODULE_DIR$">
<sourceFolder url="file://$MODULE_DIR$/lib/schedule_parser/src" isTestSource="false" /> <sourceFolder url="file://$MODULE_DIR$/lib/schedule_parser/src" isTestSource="false" />
<sourceFolder url="file://$MODULE_DIR$/src" isTestSource="false" /> <sourceFolder url="file://$MODULE_DIR$/src" isTestSource="false" />
<sourceFolder url="file://$MODULE_DIR$/benches" isTestSource="true" />
<sourceFolder url="file://$MODULE_DIR$/actix-macros/src" isTestSource="false" /> <sourceFolder url="file://$MODULE_DIR$/actix-macros/src" isTestSource="false" />
<sourceFolder url="file://$MODULE_DIR$/actix-test/src" isTestSource="false" /> <sourceFolder url="file://$MODULE_DIR$/actix-test/src" isTestSource="false" />
<sourceFolder url="file://$MODULE_DIR$/schedule-parser/benches" isTestSource="true" />
<sourceFolder url="file://$MODULE_DIR$/schedule-parser/src" isTestSource="false" />
<excludeFolder url="file://$MODULE_DIR$/actix-macros/target" /> <excludeFolder url="file://$MODULE_DIR$/actix-macros/target" />
<excludeFolder url="file://$MODULE_DIR$/actix-test/target" /> <excludeFolder url="file://$MODULE_DIR$/actix-test/target" />
<excludeFolder url="file://$MODULE_DIR$/target" /> <excludeFolder url="file://$MODULE_DIR$/target" />

414
Cargo.lock generated
View File

@@ -453,6 +453,12 @@ version = "0.22.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6" checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6"
[[package]]
name = "base64ct"
version = "1.7.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "89e25b6adfb930f02d1981565a6e5d9c547ac15a96606256d3b59040e5cd4ca3"
[[package]] [[package]]
name = "bcrypt" name = "bcrypt"
version = "0.17.0" version = "0.17.0"
@@ -690,12 +696,27 @@ version = "1.0.3"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5b63caa9aa9397e2d9480a9b13673856c78d8ac123288526c37d7839f2a86990" checksum = "5b63caa9aa9397e2d9480a9b13673856c78d8ac123288526c37d7839f2a86990"
[[package]]
name = "const-oid"
version = "0.9.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c2459377285ad874054d797f3ccebf984978aa39129f6eafde5cdc8315b612f8"
[[package]] [[package]]
name = "const-oid" name = "const-oid"
version = "0.10.0" version = "0.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1cb3c4a0d3776f7535c32793be81d6d5fec0d48ac70955d9834e643aa249a52f" checksum = "1cb3c4a0d3776f7535c32793be81d6d5fec0d48ac70955d9834e643aa249a52f"
[[package]]
name = "convert_case"
version = "0.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bb402b8d4c85569410425650ce3eddc7d698ed96d39a73f941b08fb63082f1e7"
dependencies = [
"unicode-segmentation",
]
[[package]] [[package]]
name = "cookie" name = "cookie"
version = "0.16.2" version = "0.16.2"
@@ -753,25 +774,22 @@ dependencies = [
[[package]] [[package]]
name = "criterion" name = "criterion"
version = "0.5.1" version = "0.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f2b12d017a929603d80db1831cd3a24082f8137ce19c69e6447f54f5fc8d692f" checksum = "3bf7af66b0989381bd0be551bd7cc91912a655a58c6918420c9527b1fd8b4679"
dependencies = [ dependencies = [
"anes", "anes",
"cast", "cast",
"ciborium", "ciborium",
"clap", "clap",
"criterion-plot", "criterion-plot",
"is-terminal", "itertools 0.13.0",
"itertools",
"num-traits", "num-traits",
"once_cell",
"oorandom", "oorandom",
"plotters", "plotters",
"rayon", "rayon",
"regex", "regex",
"serde", "serde",
"serde_derive",
"serde_json", "serde_json",
"tinytemplate", "tinytemplate",
"walkdir", "walkdir",
@@ -784,7 +802,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6b50826342786a51a89e2da3a28f1c32b06e387201bc2d19791f622c673706b1" checksum = "6b50826342786a51a89e2da3a28f1c32b06e387201bc2d19791f622c673706b1"
dependencies = [ dependencies = [
"cast", "cast",
"itertools", "itertools 0.10.5",
] ]
[[package]] [[package]]
@@ -837,6 +855,33 @@ dependencies = [
"hybrid-array", "hybrid-array",
] ]
[[package]]
name = "curve25519-dalek"
version = "4.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "97fb8b7c4503de7d6ae7b42ab72a5a59857b4c937ec27a3d4539dba95b5ab2be"
dependencies = [
"cfg-if",
"cpufeatures",
"curve25519-dalek-derive",
"digest 0.10.7",
"fiat-crypto",
"rustc_version",
"subtle",
"zeroize",
]
[[package]]
name = "curve25519-dalek-derive"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f46882e17999c6cc590af592290432be3bce0428cb0d5f8b6715e4dc7b383eb3"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.100",
]
[[package]] [[package]]
name = "darling" name = "darling"
version = "0.20.10" version = "0.20.10"
@@ -882,6 +927,16 @@ dependencies = [
"uuid", "uuid",
] ]
[[package]]
name = "der"
version = "0.7.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e7c1832837b905bbfb5101e07cc24c8deddf52f93225eee6ead5f4d63d53ddcb"
dependencies = [
"const-oid 0.9.6",
"zeroize",
]
[[package]] [[package]]
name = "deranged" name = "deranged"
version = "0.4.0" version = "0.4.0"
@@ -918,6 +973,7 @@ version = "2.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bda628edc44c4bb645fbe0f758797143e4e07926f7ebf4e9bdfbd3d2ce621df3" checksum = "bda628edc44c4bb645fbe0f758797143e4e07926f7ebf4e9bdfbd3d2ce621df3"
dependencies = [ dependencies = [
"convert_case",
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.100", "syn 2.0.100",
@@ -987,7 +1043,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6c478574b20020306f98d61c8ca3322d762e1ff08117422ac6106438605ea516" checksum = "6c478574b20020306f98d61c8ca3322d762e1ff08117422ac6106438605ea516"
dependencies = [ dependencies = [
"block-buffer 0.11.0-rc.4", "block-buffer 0.11.0-rc.4",
"const-oid", "const-oid 0.10.0",
"crypto-common 0.2.0-rc.2", "crypto-common 0.2.0-rc.2",
] ]
@@ -1022,6 +1078,30 @@ dependencies = [
"syn 2.0.100", "syn 2.0.100",
] ]
[[package]]
name = "ed25519"
version = "2.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "115531babc129696a58c64a4fef0a8bf9e9698629fb97e9e40767d235cfbcd53"
dependencies = [
"pkcs8",
"signature",
]
[[package]]
name = "ed25519-dalek"
version = "2.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4a3daa8e81a3963a60642bcc1f90a670680bd4a77535faa384e9d1c79d620871"
dependencies = [
"curve25519-dalek",
"ed25519",
"serde",
"sha2",
"subtle",
"zeroize",
]
[[package]] [[package]]
name = "either" name = "either"
version = "1.15.0" version = "1.15.0"
@@ -1082,6 +1162,12 @@ version = "2.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be" checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be"
[[package]]
name = "fiat-crypto"
version = "0.2.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "28dea519a9695b9977216879a3ebfddf92f1c08c05d984f8996aecd6ecdc811d"
[[package]] [[package]]
name = "findshlibs" name = "findshlibs"
version = "0.10.2" version = "0.10.2"
@@ -1251,15 +1337,6 @@ dependencies = [
"slab", "slab",
] ]
[[package]]
name = "fuzzy-matcher"
version = "0.3.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "54614a3312934d066701a80f20f15fa3b56d67ac7722b39eea5b4c9dd1d66c94"
dependencies = [
"thread_local",
]
[[package]] [[package]]
name = "gcc" name = "gcc"
version = "0.3.55" version = "0.3.55"
@@ -1404,18 +1481,18 @@ version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea"
[[package]]
name = "hermit-abi"
version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fbd780fe5cc30f81464441920d82ac8740e2e46b29a6fad543ddd075229ce37e"
[[package]] [[package]]
name = "hex" name = "hex"
version = "0.4.3" version = "0.4.3"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70"
[[package]]
name = "hex-literal"
version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bcaaec4551594c969335c98c903c1397853d4198408ea609190f420500f6be71"
[[package]] [[package]]
name = "hostname" name = "hostname"
version = "0.1.5" version = "0.1.5"
@@ -1848,17 +1925,6 @@ version = "2.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "469fb0b9cefa57e3ef31275ee7cacb78f2fdca44e4765491884a2b119d4eb130" checksum = "469fb0b9cefa57e3ef31275ee7cacb78f2fdca44e4765491884a2b119d4eb130"
[[package]]
name = "is-terminal"
version = "0.4.16"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e04d7f318608d35d4b61ddd75cbdaee86b023ebe2bd5a66ee0915f0bf93095a9"
dependencies = [
"hermit-abi",
"libc",
"windows-sys 0.59.0",
]
[[package]] [[package]]
name = "is_terminal_polyfill" name = "is_terminal_polyfill"
version = "1.70.1" version = "1.70.1"
@@ -1874,6 +1940,15 @@ dependencies = [
"either", "either",
] ]
[[package]]
name = "itertools"
version = "0.13.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "413ee7dfc52ee1a4949ceeb7dbc8a33f2d6c088194d9f922fb8318faf1f01186"
dependencies = [
"either",
]
[[package]] [[package]]
name = "itoa" name = "itoa"
version = "1.0.15" version = "1.0.15"
@@ -2296,6 +2371,16 @@ version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184"
[[package]]
name = "pkcs8"
version = "0.10.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f950b2377845cebe5cf8b5165cb3cc1a5e0fa5cfa3e1f7f55707d8fd82e0a7b7"
dependencies = [
"der",
"spki",
]
[[package]] [[package]]
name = "pkg-config" name = "pkg-config"
version = "0.3.32" version = "0.3.32"
@@ -2396,7 +2481,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8a56d757972c98b346a9b766e3f02746cde6dd1cd1d1d563472929fdd74bec4d" checksum = "8a56d757972c98b346a9b766e3f02746cde6dd1cd1d1d563472929fdd74bec4d"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"itertools", "itertools 0.13.0",
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.100", "syn 2.0.100",
@@ -2441,7 +2526,7 @@ dependencies = [
"rustc-hash", "rustc-hash",
"rustls", "rustls",
"socket2", "socket2",
"thiserror 2.0.12", "thiserror",
"tokio", "tokio",
"tracing", "tracing",
"web-time", "web-time",
@@ -2461,7 +2546,7 @@ dependencies = [
"rustls", "rustls",
"rustls-pki-types", "rustls-pki-types",
"slab", "slab",
"thiserror 2.0.12", "thiserror",
"tinyvec", "tinyvec",
"tracing", "tracing",
"web-time", "web-time",
@@ -2875,39 +2960,57 @@ dependencies = [
] ]
[[package]] [[package]]
name = "schedule-parser-rusted" name = "schedule-parser"
version = "1.0.2" version = "0.1.0"
dependencies = [ dependencies = [
"actix-macros 0.1.0",
"actix-test",
"actix-web",
"bcrypt",
"calamine", "calamine",
"chrono", "chrono",
"criterion", "criterion",
"derive_more", "derive_more",
"log",
"regex",
"sentry",
"serde",
"serde_repr",
"strsim",
"utoipa",
]
[[package]]
name = "schedule-parser-rusted"
version = "1.0.5"
dependencies = [
"actix-macros 0.1.0",
"actix-test",
"actix-web",
"base64",
"bcrypt",
"chrono",
"derive_more",
"diesel", "diesel",
"diesel-derive-enum", "diesel-derive-enum",
"dotenvy", "dotenvy",
"ed25519-dalek",
"env_logger", "env_logger",
"firebase-messaging-rs", "firebase-messaging-rs",
"futures-util", "futures-util",
"fuzzy-matcher",
"hex", "hex",
"hex-literal",
"jsonwebtoken", "jsonwebtoken",
"log",
"mime", "mime",
"objectid", "objectid",
"rand 0.9.0", "percent-encoding",
"regex",
"reqwest", "reqwest",
"schedule-parser",
"sentry", "sentry",
"sentry-actix", "sentry-actix",
"serde", "serde",
"serde_json", "serde_json",
"serde_repr",
"serde_with", "serde_with",
"sha1 0.11.0-pre.5", "sha1 0.11.0-pre.5",
"tokio", "tokio",
"ua_generator",
"utoipa", "utoipa",
"utoipa-actix-web", "utoipa-actix-web",
"utoipa-rapidoc", "utoipa-rapidoc",
@@ -2977,13 +3080,14 @@ checksum = "56e6fa9c48d24d85fb3de5ad847117517440f6beceb7798af16b4a87d616b8d0"
[[package]] [[package]]
name = "sentry" name = "sentry"
version = "0.37.0" version = "0.38.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "255914a8e53822abd946e2ce8baa41d4cded6b8e938913b7f7b9da5b7ab44335" checksum = "a505499b38861edd82b5a688fa06ba4ba5875bb832adeeeba22b7b23fc4bc39a"
dependencies = [ dependencies = [
"httpdate", "httpdate",
"native-tls", "native-tls",
"reqwest", "reqwest",
"sentry-actix",
"sentry-backtrace", "sentry-backtrace",
"sentry-contexts", "sentry-contexts",
"sentry-core", "sentry-core",
@@ -2996,9 +3100,9 @@ dependencies = [
[[package]] [[package]]
name = "sentry-actix" name = "sentry-actix"
version = "0.37.0" version = "0.38.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a927aed43cce0e9240f7477ac81cdfa2ffb048e0e2b17000eb5976e14f063993" checksum = "39ad8bfdcfbc6e0d0dacaa5728555085ef459fa9226cfc2fe64eefa4b8038b7f"
dependencies = [ dependencies = [
"actix-http", "actix-http",
"actix-web", "actix-web",
@@ -3009,21 +3113,20 @@ dependencies = [
[[package]] [[package]]
name = "sentry-backtrace" name = "sentry-backtrace"
version = "0.37.0" version = "0.38.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "00293cd332a859961f24fd69258f7e92af736feaeb91020cff84dac4188a4302" checksum = "8dace796060e4ad10e3d1405b122ae184a8b2e71dce05ae450e4f81b7686b0d9"
dependencies = [ dependencies = [
"backtrace", "backtrace",
"once_cell",
"regex", "regex",
"sentry-core", "sentry-core",
] ]
[[package]] [[package]]
name = "sentry-contexts" name = "sentry-contexts"
version = "0.37.0" version = "0.38.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "961990f9caa76476c481de130ada05614cd7f5aa70fb57c2142f0e09ad3fb2aa" checksum = "87bd9e6b51ffe2bc7188ebe36cb67557cb95749c08a3f81f33e8c9b135e0d1bc"
dependencies = [ dependencies = [
"hostname 0.4.1", "hostname 0.4.1",
"libc", "libc",
@@ -3035,12 +3138,11 @@ dependencies = [
[[package]] [[package]]
name = "sentry-core" name = "sentry-core"
version = "0.37.0" version = "0.38.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1a6409d845707d82415c800290a5d63be5e3df3c2e417b0997c60531dfbd35ef" checksum = "7426d4beec270cfdbb50f85f0bb2ce176ea57eed0b11741182a163055a558187"
dependencies = [ dependencies = [
"once_cell", "rand 0.9.0",
"rand 0.8.5",
"sentry-types", "sentry-types",
"serde", "serde",
"serde_json", "serde_json",
@@ -3048,20 +3150,19 @@ dependencies = [
[[package]] [[package]]
name = "sentry-debug-images" name = "sentry-debug-images"
version = "0.37.0" version = "0.38.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "71ab5df4f3b64760508edfe0ba4290feab5acbbda7566a79d72673065888e5cc" checksum = "9df15c066c04f34c4dfd496a8e76590106b93283f72ef1a47d8fb24d88493424"
dependencies = [ dependencies = [
"findshlibs", "findshlibs",
"once_cell",
"sentry-core", "sentry-core",
] ]
[[package]] [[package]]
name = "sentry-panic" name = "sentry-panic"
version = "0.37.0" version = "0.38.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "609b1a12340495ce17baeec9e08ff8ed423c337c1a84dffae36a178c783623f3" checksum = "c92beed69b776a162b6d269bef1eaa3e614090b6df45a88d9b239c4fdbffdfba"
dependencies = [ dependencies = [
"sentry-backtrace", "sentry-backtrace",
"sentry-core", "sentry-core",
@@ -3069,9 +3170,9 @@ dependencies = [
[[package]] [[package]]
name = "sentry-tracing" name = "sentry-tracing"
version = "0.37.0" version = "0.38.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "49f4e86402d5c50239dc7d8fd3f6d5e048221d5fcb4e026d8d50ab57fe4644cb" checksum = "55c323492795de90824f3198562e33dd74ae3bc852fbb13c0cabec54a1cf73cd"
dependencies = [ dependencies = [
"sentry-backtrace", "sentry-backtrace",
"sentry-core", "sentry-core",
@@ -3081,16 +3182,16 @@ dependencies = [
[[package]] [[package]]
name = "sentry-types" name = "sentry-types"
version = "0.37.0" version = "0.38.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3d3f117b8755dbede8260952de2aeb029e20f432e72634e8969af34324591631" checksum = "04b6c9287202294685cb1f749b944dbbce8160b81a1061ecddc073025fed129f"
dependencies = [ dependencies = [
"debugid", "debugid",
"hex", "hex",
"rand 0.8.5", "rand 0.9.0",
"serde", "serde",
"serde_json", "serde_json",
"thiserror 1.0.69", "thiserror",
"time 0.3.40", "time 0.3.40",
"url", "url",
"uuid", "uuid",
@@ -3139,6 +3240,15 @@ dependencies = [
"syn 2.0.100", "syn 2.0.100",
] ]
[[package]]
name = "serde_spanned"
version = "0.6.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "87607cb1398ed59d48732e575a4c28a7a8ebf2454b964fe3f224f2afc07909e1"
dependencies = [
"serde",
]
[[package]] [[package]]
name = "serde_urlencoded" name = "serde_urlencoded"
version = "0.7.1" version = "0.7.1"
@@ -3203,6 +3313,17 @@ dependencies = [
"digest 0.11.0-pre.10", "digest 0.11.0-pre.10",
] ]
[[package]]
name = "sha2"
version = "0.10.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a7507d819769d01a365ab707794a4084392c824f54a7a6a7862f8c3d0892b283"
dependencies = [
"cfg-if",
"cpufeatures",
"digest 0.10.7",
]
[[package]] [[package]]
name = "shlex" name = "shlex"
version = "1.3.0" version = "1.3.0"
@@ -3218,6 +3339,15 @@ dependencies = [
"libc", "libc",
] ]
[[package]]
name = "signature"
version = "2.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "77549399552de45a898a580c1b41d445bf730df867cc44e6c0233bbc4b8329de"
dependencies = [
"rand_core 0.6.4",
]
[[package]] [[package]]
name = "simd-adler32" name = "simd-adler32"
version = "0.3.7" version = "0.3.7"
@@ -3232,7 +3362,7 @@ checksum = "297f631f50729c8c99b84667867963997ec0b50f32b2a7dbcab828ef0541e8bb"
dependencies = [ dependencies = [
"num-bigint", "num-bigint",
"num-traits", "num-traits",
"thiserror 2.0.12", "thiserror",
"time 0.3.40", "time 0.3.40",
] ]
@@ -3261,6 +3391,16 @@ dependencies = [
"windows-sys 0.52.0", "windows-sys 0.52.0",
] ]
[[package]]
name = "spki"
version = "0.7.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d91ed6c858b01f942cd56b37a94b3e0a1798290327d1236e4d9cf4eaca44d29d"
dependencies = [
"base64ct",
"der",
]
[[package]] [[package]]
name = "stable_deref_trait" name = "stable_deref_trait"
version = "1.2.0" version = "1.2.0"
@@ -3355,33 +3495,13 @@ dependencies = [
"windows-sys 0.59.0", "windows-sys 0.59.0",
] ]
[[package]]
name = "thiserror"
version = "1.0.69"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b6aaf5339b578ea85b50e080feb250a3e8ae8cfcdff9a461c9ec2904bc923f52"
dependencies = [
"thiserror-impl 1.0.69",
]
[[package]] [[package]]
name = "thiserror" name = "thiserror"
version = "2.0.12" version = "2.0.12"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "567b8a2dae586314f7be2a752ec7474332959c6460e02bde30d702a66d488708" checksum = "567b8a2dae586314f7be2a752ec7474332959c6460e02bde30d702a66d488708"
dependencies = [ dependencies = [
"thiserror-impl 2.0.12", "thiserror-impl",
]
[[package]]
name = "thiserror-impl"
version = "1.0.69"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.100",
] ]
[[package]] [[package]]
@@ -3395,16 +3515,6 @@ dependencies = [
"syn 2.0.100", "syn 2.0.100",
] ]
[[package]]
name = "thread_local"
version = "1.1.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8b9ef9bad013ada3808854ceac7b46812a6465ba368859a37e2100283d2d719c"
dependencies = [
"cfg-if",
"once_cell",
]
[[package]] [[package]]
name = "time" name = "time"
version = "0.1.45" version = "0.1.45"
@@ -3555,6 +3665,47 @@ dependencies = [
"tokio", "tokio",
] ]
[[package]]
name = "toml"
version = "0.8.22"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "05ae329d1f08c4d17a59bed7ff5b5a769d062e64a62d34a3261b219e62cd5aae"
dependencies = [
"serde",
"serde_spanned",
"toml_datetime",
"toml_edit",
]
[[package]]
name = "toml_datetime"
version = "0.6.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3da5db5a963e24bc68be8b17b6fa82814bb22ee8660f192bb182771d498f09a3"
dependencies = [
"serde",
]
[[package]]
name = "toml_edit"
version = "0.22.26"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "310068873db2c5b3e7659d2cc35d21855dbafa50d1ce336397c666e3cb08137e"
dependencies = [
"indexmap 2.8.0",
"serde",
"serde_spanned",
"toml_datetime",
"toml_write",
"winnow",
]
[[package]]
name = "toml_write"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bfb942dfe1d8e29a7ee7fcbde5bd2b9a25fb89aa70caea2eba3bee836ff41076"
[[package]] [[package]]
name = "tonic" name = "tonic"
version = "0.12.3" version = "0.12.3"
@@ -3701,6 +3852,20 @@ version = "1.18.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1dccffe3ce07af9386bfd29e80c0ab1a8205a2fc34e4bcd40364df902cfa8f3f" checksum = "1dccffe3ce07af9386bfd29e80c0ab1a8205a2fc34e4bcd40364df902cfa8f3f"
[[package]]
name = "ua_generator"
version = "0.5.16"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7fffa7e1ef86f4ed29df5ecbac2f47160cdfbc3296c25b609cd83835ec3b7151"
dependencies = [
"dotenvy",
"fastrand",
"serde",
"serde_json",
"toml",
"ureq",
]
[[package]] [[package]]
name = "uname" name = "uname"
version = "0.1.1" version = "0.1.1"
@@ -3722,6 +3887,12 @@ version = "1.0.18"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5a5f39404a5da50712a4c1eecf25e90dd62b613502b7e925fd4e4d19b5c96512" checksum = "5a5f39404a5da50712a4c1eecf25e90dd62b613502b7e925fd4e4d19b5c96512"
[[package]]
name = "unicode-segmentation"
version = "1.12.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f6ccf251212114b54433ec949fd6a7841275f9ada20dddd2f29e9ceea4501493"
[[package]] [[package]]
name = "unicode-xid" name = "unicode-xid"
version = "0.2.6" version = "0.2.6"
@@ -3741,10 +3912,18 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "02d1a66277ed75f640d608235660df48c8e3c19f3b4edb6a263315626cc3c01d" checksum = "02d1a66277ed75f640d608235660df48c8e3c19f3b4edb6a263315626cc3c01d"
dependencies = [ dependencies = [
"base64", "base64",
"brotli-decompressor",
"encoding_rs",
"flate2",
"log", "log",
"native-tls", "native-tls",
"once_cell", "once_cell",
"rustls",
"rustls-pki-types",
"serde",
"serde_json",
"url", "url",
"webpki-roots 0.26.11",
] ]
[[package]] [[package]]
@@ -3996,6 +4175,24 @@ dependencies = [
"wasm-bindgen", "wasm-bindgen",
] ]
[[package]]
name = "webpki-roots"
version = "0.26.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "521bc38abb08001b01866da9f51eb7c5d647a19260e00054a8c7fd5f9e57f7a9"
dependencies = [
"webpki-roots 1.0.0",
]
[[package]]
name = "webpki-roots"
version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2853738d1cc4f2da3a225c18ec6c3721abb31961096e9dbf5ab35fa88b19cfdb"
dependencies = [
"rustls-pki-types",
]
[[package]] [[package]]
name = "winapi" name = "winapi"
version = "0.3.9" version = "0.3.9"
@@ -4217,6 +4414,15 @@ version = "0.53.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "271414315aff87387382ec3d271b52d7ae78726f5d44ac98b4f4030c91880486" checksum = "271414315aff87387382ec3d271b52d7ae78726f5d44ac98b4f4030c91880486"
[[package]]
name = "winnow"
version = "0.7.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c06928c8748d81b05c9be96aad92e1b6ff01833332f281e8cfca3be4b35fc9ec"
dependencies = [
"memchr",
]
[[package]] [[package]]
name = "winutil" name = "winutil"
version = "0.1.1" version = "0.1.1"
@@ -4367,7 +4573,7 @@ dependencies = [
"flate2", "flate2",
"indexmap 2.8.0", "indexmap 2.8.0",
"memchr", "memchr",
"thiserror 2.0.12", "thiserror",
"zopfli", "zopfli",
] ]

View File

@@ -1,9 +1,9 @@
[workspace] [workspace]
members = ["actix-macros", "actix-test"] members = ["actix-macros", "actix-test", "schedule-parser"]
[package] [package]
name = "schedule-parser-rusted" name = "schedule-parser-rusted"
version = "1.0.2" version = "1.0.5"
edition = "2024" edition = "2024"
publish = false publish = false
@@ -13,41 +13,38 @@ debug = true
[dependencies] [dependencies]
actix-web = "4.10.2" actix-web = "4.10.2"
actix-macros = { path = "actix-macros" } actix-macros = { path = "actix-macros" }
schedule-parser = { path = "schedule-parser", features = ["test-utils"] }
bcrypt = "0.17.0" bcrypt = "0.17.0"
calamine = "0.26.1"
chrono = { version = "0.4.40", features = ["serde"] } chrono = { version = "0.4.40", features = ["serde"] }
derive_more = "2.0.1" derive_more = { version = "2", features = ["full"] }
diesel = { version = "2.2.8", features = ["postgres"] } diesel = { version = "2.2.8", features = ["postgres"] }
diesel-derive-enum = { git = "https://github.com/Havunen/diesel-derive-enum.git", features = ["postgres"] } diesel-derive-enum = { git = "https://github.com/Havunen/diesel-derive-enum.git", features = ["postgres"] }
dotenvy = "0.15.7" dotenvy = "0.15.7"
env_logger = "0.11.7" env_logger = "0.11.7"
firebase-messaging-rs = { git = "https://github.com/i10416/firebase-messaging-rs.git" } firebase-messaging-rs = { git = "https://github.com/i10416/firebase-messaging-rs.git" }
futures-util = "0.3.31" futures-util = "0.3.31"
fuzzy-matcher = "0.3.7"
jsonwebtoken = { version = "9.3.1", features = ["use_pem"] } jsonwebtoken = { version = "9.3.1", features = ["use_pem"] }
hex = "0.4.3" hex = "0.4.3"
mime = "0.3.17" mime = "0.3.17"
objectid = "0.2.0" objectid = "0.2.0"
regex = "1.11.1"
reqwest = { version = "0.12.15", features = ["json"] } reqwest = { version = "0.12.15", features = ["json"] }
sentry = "0.37.0" sentry = "0.38"
sentry-actix = "0.37.0" sentry-actix = "0.38"
serde = { version = "1.0.219", features = ["derive"] } serde = { version = "1.0.219", features = ["derive"] }
serde_json = "1.0.140" serde_json = "1.0.140"
serde_with = "3.12.0" serde_with = "3.12.0"
serde_repr = "0.1.20"
sha1 = "0.11.0-pre.5" sha1 = "0.11.0-pre.5"
tokio = { version = "1.44.1", features = ["macros", "rt-multi-thread"] } tokio = { version = "1.44.1", features = ["macros", "rt-multi-thread"] }
rand = "0.9.0"
utoipa = { version = "5", features = ["actix_extras", "chrono"] } utoipa = { version = "5", features = ["actix_extras", "chrono"] }
utoipa-rapidoc = { version = "6.0.0", features = ["actix-web"] } utoipa-rapidoc = { version = "6.0.0", features = ["actix-web"] }
utoipa-actix-web = "0.1" utoipa-actix-web = "0.1"
uuid = { version = "1.16.0", features = ["v4"] } uuid = { version = "1.16.0", features = ["v4"] }
ed25519-dalek = "2.1.1"
hex-literal = "1.0.0"
log = "0.4.26"
base64 = "0.22.1"
percent-encoding = "2.3.1"
ua_generator = "0.5.16"
[dev-dependencies] [dev-dependencies]
actix-test = { path = "actix-test" } actix-test = { path = "actix-test" }
criterion = "0.5.1"
[[bench]]
name = "parse"
harness = false

View File

@@ -6,7 +6,7 @@ ARG BINARY_NAME
WORKDIR /app/ WORKDIR /app/
RUN apt update && \ RUN apt update && \
apt install -y libpq5 apt install -y libpq5 ca-certificates openssl
COPY ./${BINARY_NAME} /bin/main COPY ./${BINARY_NAME} /bin/main
RUN chmod +x /bin/main RUN chmod +x /bin/main

View File

@@ -62,7 +62,7 @@ mod shared {
} }
} }
mod response_error_message { mod middleware_error {
use proc_macro::TokenStream; use proc_macro::TokenStream;
use quote::quote; use quote::quote;
@@ -81,28 +81,7 @@ mod response_error_message {
fn error_response(&self) -> ::actix_web::HttpResponse<BoxBody> { fn error_response(&self) -> ::actix_web::HttpResponse<BoxBody> {
::actix_web::HttpResponse::build(self.status_code()) ::actix_web::HttpResponse::build(self.status_code())
.json(crate::utility::error::ResponseErrorMessage::new(self.clone())) .json(crate::utility::error::MiddlewareError::new(self.clone()))
}
}
})
}
}
mod status_code {
use proc_macro::TokenStream;
use quote::quote;
pub fn fmt(ast: &syn::DeriveInput) -> TokenStream {
let name = &ast.ident;
let status_code_arms = super::shared::get_arms(ast);
TokenStream::from(quote! {
impl crate::routes::schema::PartialStatusCode for #name {
fn status_code(&self) -> ::actix_web::http::StatusCode {
match self {
#(#status_code_arms)*
}
} }
} }
}) })
@@ -130,7 +109,7 @@ mod responder_json {
} }
} }
mod into_response_error { mod ok_response {
use proc_macro::TokenStream; use proc_macro::TokenStream;
use quote::quote; use quote::quote;
@@ -138,46 +117,37 @@ mod into_response_error {
let name = &ast.ident; let name = &ast.ident;
TokenStream::from(quote! { TokenStream::from(quote! {
impl ::core::convert::Into<crate::routes::schema::ResponseError<#name>> for #name { impl crate::routes::schema::PartialOkResponse for #name {}
fn into(self) -> crate::routes::schema::ResponseError<#name> {
crate::routes::schema::ResponseError {
code: self,
message: ::core::option::Option::None,
}
}
}
impl<T> crate::routes::schema::IntoResponseAsError<T> for #name
where
T: ::serde::ser::Serialize + ::utoipa::PartialSchema {}
})
}
pub fn fmt_named(ast: &syn::DeriveInput) -> TokenStream {
let name = &ast.ident;
TokenStream::from(quote! {
impl ::core::convert::Into<crate::routes::schema::ResponseError<#name>> for #name {
fn into(self) -> crate::routes::schema::ResponseError<#name> {
crate::routes::schema::ResponseError {
message: ::core::option::Option::Some(format!("{}", self)),
code: self,
}
}
}
impl<T> crate::routes::schema::IntoResponseAsError<T> for #name
where
T: ::serde::ser::Serialize + ::utoipa::PartialSchema {}
}) })
} }
} }
#[proc_macro_derive(ResponseErrorMessage, attributes(status_code))] mod err_response {
pub fn rem_derive(input: TokenStream) -> TokenStream { use proc_macro::TokenStream;
use quote::quote;
pub fn fmt(ast: &syn::DeriveInput) -> TokenStream {
let name = &ast.ident;
let status_code_arms = super::shared::get_arms(ast);
TokenStream::from(quote! {
impl crate::routes::schema::PartialErrResponse for #name {
fn status_code(&self) -> ::actix_web::http::StatusCode {
match self {
#(#status_code_arms)*
}
}
}
})
}
}
#[proc_macro_derive(MiddlewareError, attributes(status_code))]
pub fn moddleware_error_derive(input: TokenStream) -> TokenStream {
let ast = syn::parse(input).unwrap(); let ast = syn::parse(input).unwrap();
response_error_message::fmt(&ast) middleware_error::fmt(&ast)
} }
#[proc_macro_derive(ResponderJson)] #[proc_macro_derive(ResponderJson)]
@@ -187,23 +157,16 @@ pub fn responser_json_derive(input: TokenStream) -> TokenStream {
responder_json::fmt(&ast) responder_json::fmt(&ast)
} }
#[proc_macro_derive(IntoResponseError)] #[proc_macro_derive(OkResponse)]
pub fn into_response_error_derive(input: TokenStream) -> TokenStream { pub fn ok_response_derive(input: TokenStream) -> TokenStream {
let ast = syn::parse(input).unwrap(); let ast = syn::parse(input).unwrap();
into_response_error::fmt(&ast) ok_response::fmt(&ast)
} }
#[proc_macro_derive(IntoResponseErrorNamed)] #[proc_macro_derive(ErrResponse, attributes(status_code))]
pub fn into_response_error_named_derive(input: TokenStream) -> TokenStream { pub fn err_response_derive(input: TokenStream) -> TokenStream {
let ast = syn::parse(input).unwrap(); let ast = syn::parse(input).unwrap();
into_response_error::fmt_named(&ast) err_response::fmt(&ast)
}
#[proc_macro_derive(StatusCode, attributes(status_code))]
pub fn status_code_derive(input: TokenStream) -> TokenStream {
let ast = syn::parse(input).unwrap();
status_code::fmt(&ast)
} }

View File

@@ -0,0 +1,2 @@
ALTER TABLE users DROP CONSTRAINT users_telegram_id_key;
ALTER TABLE users DROP COLUMN telegram_id;

View File

@@ -0,0 +1,2 @@
ALTER TABLE users ADD telegram_id int8 NULL;
ALTER TABLE users ADD CONSTRAINT users_telegram_id_key UNIQUE (telegram_id);

View File

@@ -0,0 +1,2 @@
UPDATE users SET "password" = '' WHERE "password" IS NULL;
ALTER TABLE users ALTER COLUMN "password" SET NOT NULL;

View File

@@ -0,0 +1 @@
ALTER TABLE users ALTER COLUMN "password" DROP NOT NULL;

View File

@@ -0,0 +1,3 @@
UPDATE users SET "android_version" = '' WHERE "android_version" IS NULL;
ALTER TABLE users ALTER COLUMN "android_version" SET NOT NULL;
ALTER TABLE users RENAME COLUMN android_version TO "version";

View File

@@ -0,0 +1,2 @@
ALTER TABLE users RENAME COLUMN "version" TO android_version;
ALTER TABLE users ALTER COLUMN android_version DROP NOT NULL;

View File

@@ -0,0 +1,2 @@
UPDATE users SET "group" = '' WHERE "group" IS NULL;
ALTER TABLE users ALTER COLUMN "group" SET NOT NULL;

View File

@@ -0,0 +1 @@
ALTER TABLE users ALTER COLUMN "group" DROP NOT NULL;

View File

@@ -0,0 +1,2 @@
UPDATE users SET "access_token" = '' WHERE "access_token" IS NULL;
ALTER TABLE users ALTER COLUMN "access_token" SET NOT NULL;

View File

@@ -0,0 +1 @@
ALTER TABLE users ALTER COLUMN "access_token" DROP NOT NULL;

View File

@@ -0,0 +1,26 @@
[package]
name = "schedule-parser"
version = "0.1.0"
edition = "2024"
[features]
test-utils = []
[dependencies]
calamine = "0.26"
chrono = { version = "0.4", features = ["serde"] }
derive_more = { version = "2", features = ["full"] }
sentry = "0.38"
serde = { version = "1.0.219", features = ["derive"] }
serde_repr = "0.1.20"
regex = "1.11.1"
utoipa = { version = "5", features = ["chrono"] }
strsim = "0.11.1"
log = "0.4.26"
[dev-dependencies]
criterion = "0.6"
[[bench]]
name = "parse"
harness = false

View File

@@ -1,9 +1,9 @@
use criterion::{Criterion, criterion_group, criterion_main}; use criterion::{Criterion, criterion_group, criterion_main};
use schedule_parser_rusted::parser::parse_xls; use schedule_parser::parse_xls;
pub fn bench_parse_xls(c: &mut Criterion) { pub fn bench_parse_xls(c: &mut Criterion) {
let buffer: Vec<u8> = include_bytes!("../schedule.xls").to_vec(); let buffer: Vec<u8> = include_bytes!("../../schedule.xls").to_vec();
c.bench_function("parse_xls", |b| b.iter(|| parse_xls(&buffer).unwrap())); c.bench_function("parse_xls", |b| b.iter(|| parse_xls(&buffer).unwrap()));
} }

800
schedule-parser/src/lib.rs Normal file
View File

@@ -0,0 +1,800 @@
use crate::LessonParseResult::{Lessons, Street};
use crate::schema::LessonType::Break;
use crate::schema::internal::{BoundariesCellInfo, DayCellInfo, GroupCellInfo};
use crate::schema::{
Day, ErrorCell, ErrorCellPos, Lesson, LessonBoundaries, LessonSubGroup, LessonType, ParseError,
ParseResult, ScheduleEntry,
};
use crate::worksheet::WorkSheet;
use calamine::{Reader, Xls, open_workbook_from_rs};
use chrono::{DateTime, Duration, NaiveDate, NaiveTime, Utc};
use regex::Regex;
use std::collections::HashMap;
use std::io::Cursor;
use std::sync::LazyLock;
mod macros;
pub mod schema;
mod worksheet;
/// Obtaining a "skeleton" schedule from the working sheet.
fn parse_skeleton(
worksheet: &WorkSheet,
) -> Result<(Vec<DayCellInfo>, Vec<GroupCellInfo>), ParseError> {
let mut groups: Vec<GroupCellInfo> = Vec::new();
let mut days: Vec<(u32, String, Option<DateTime<Utc>>)> = Vec::new();
let worksheet_start = worksheet.start().ok_or(ParseError::UnknownWorkSheetRange)?;
let worksheet_end = worksheet.end().ok_or(ParseError::UnknownWorkSheetRange)?;
let mut row = worksheet_start.0;
while row < worksheet_end.0 {
row += 1;
let day_full_name = or_continue!(worksheet.get_string_from_cell(row, 0));
// parse groups row when days column will found
if groups.is_empty() {
// переход на предыдущую строку
row -= 1;
for column in (worksheet_start.1 + 2)..=worksheet_end.1 {
groups.push(GroupCellInfo {
column,
name: or_continue!(worksheet.get_string_from_cell(row, column)),
});
}
// возврат на текущую строку
row += 1;
}
let (day_name, day_date) = {
let space_index = match day_full_name.find(' ') {
Some(index) => {
if index < 10 {
break;
} else {
index
}
}
None => break,
};
let name = day_full_name[..space_index].to_string();
let date_slice = &day_full_name[space_index + 1..];
let date = NaiveDate::parse_from_str(date_slice, "%d.%m.%Y")
.map(|date| date.and_time(NaiveTime::default()).and_utc())
.ok();
(name, date)
};
days.push((row, day_name, day_date));
}
// fix unparsable day dates
let days_max = days.len().min(5);
for i in 0..days_max {
if days[i].2.is_none() && days[i + 1].2.is_some() {
days[i].2 = Some(days[i + 1].2.unwrap() - Duration::days(1));
}
}
for i in 0..days_max {
let i = days_max - i;
if days[i - 1].2.is_none() && days[i].2.is_some() {
days[i - 1].2 = Some(days[i].2.unwrap() - Duration::days(1));
}
}
let days = days
.into_iter()
.map(|day| DayCellInfo {
row: day.0,
column: 0,
name: day.1,
date: day.2.unwrap(),
})
.collect();
Ok((days, groups))
}
/// The result of obtaining a lesson from the cell.
enum LessonParseResult {
/// List of lessons long from one to two.
///
/// The number of lessons will be equal to one if the couple is the first in the day,
/// otherwise the list from the change template and the lesson itself will be returned.
Lessons(Vec<Lesson>),
/// Street on which the Polytechnic Corps is located.
Street(String),
}
// noinspection GrazieInspection
/// Obtaining a non-standard type of lesson by name.
fn guess_lesson_type(text: &String) -> Option<LessonType> {
static MAP: LazyLock<HashMap<&str, LessonType>> = LazyLock::new(|| {
HashMap::from([
("консультация", LessonType::Consultation),
("самостоятельная работа", LessonType::IndependentWork),
("зачет", LessonType::Exam),
("зачет с оценкой", LessonType::ExamWithGrade),
("экзамен", LessonType::ExamDefault),
("курсовой проект", LessonType::CourseProject),
("защита курсового проекта", LessonType::CourseProjectDefense),
])
});
let name_lower = text.to_lowercase();
match MAP
.iter()
.map(|(text, lesson_type)| (lesson_type, strsim::levenshtein(text, &*name_lower)))
.filter(|x| x.1 <= 4)
.min_by_key(|(_, score)| *score)
{
None => None,
Some(v) => Some(v.0.clone()),
}
}
/// Getting a pair or street from a cell.
fn parse_lesson(
worksheet: &WorkSheet,
day: &Day,
day_boundaries: &Vec<BoundariesCellInfo>,
lesson_boundaries: &BoundariesCellInfo,
group_column: u32,
) -> Result<LessonParseResult, ParseError> {
let row = lesson_boundaries.xls_range.0.0;
let name = {
let cell_data = match worksheet.get_string_from_cell(row, group_column) {
Some(x) => x,
None => return Ok(Lessons(Vec::new())),
};
static OTHER_STREET_RE: LazyLock<Regex> =
LazyLock::new(|| Regex::new(r"^[А-Я][а-я]+[,\s]\d+$").unwrap());
if OTHER_STREET_RE.is_match(&cell_data) {
return Ok(Street(cell_data));
}
cell_data
};
let cell_range = worksheet.get_merge_from_start(row, group_column);
let (default_range, lesson_time) = {
let end_time_arr = day_boundaries
.iter()
.filter(|time| time.xls_range.1.0 == cell_range.1.0)
.collect::<Vec<&BoundariesCellInfo>>();
let end_time =
end_time_arr
.first()
.ok_or(ParseError::LessonTimeNotFound(ErrorCellPos {
row,
column: group_column,
}))?;
let range: Option<[u8; 2]> = if lesson_boundaries.default_index != None {
let default = lesson_boundaries.default_index.unwrap() as u8;
Some([default, end_time.default_index.unwrap() as u8])
} else {
None
};
let time = LessonBoundaries {
start: lesson_boundaries.time_range.start,
end: end_time.time_range.end,
};
Ok((range, time))
}?;
let (name, mut subgroups, lesson_type) = parse_name_and_subgroups(&name)?;
{
let cabinets: Vec<String> = parse_cabinets(
worksheet,
(cell_range.0.0, cell_range.1.0),
group_column + 1,
);
let cab_count = cabinets.len();
if cab_count == 1 {
// Назначаем этот кабинет всем подгруппам
let cab = Some(cabinets.get(0).unwrap().clone());
for subgroup in &mut subgroups {
if let Some(subgroup) = subgroup {
subgroup.cabinet = cab.clone()
}
}
} else if cab_count == 2 {
while subgroups.len() < cab_count {
subgroups.push(subgroups.last().unwrap_or(&None).clone());
}
for i in 0..cab_count {
let subgroup = subgroups.get_mut(i).unwrap();
let cabinet = Some(cabinets.get(i).unwrap().clone());
match subgroup {
None => {
let _ = subgroup.insert(LessonSubGroup {
teacher: None,
cabinet,
});
}
Some(subgroup) => {
subgroup.cabinet = cabinet;
}
}
}
}
};
let lesson = Lesson {
lesson_type: lesson_type.unwrap_or(lesson_boundaries.lesson_type.clone()),
range: default_range,
name: Some(name),
time: lesson_time,
subgroups: if subgroups.len() == 2
&& subgroups.get(0).unwrap().is_none()
&& subgroups.get(1).unwrap().is_none()
{
None
} else {
Some(subgroups)
},
group: None,
};
let prev_lesson = if day.lessons.is_empty() {
return Ok(Lessons(Vec::from([lesson])));
} else {
&day.lessons[day.lessons.len() - 1]
};
Ok(Lessons(Vec::from([
Lesson {
lesson_type: Break,
range: None,
name: None,
time: LessonBoundaries {
start: prev_lesson.time.end,
end: lesson.time.start,
},
subgroups: Some(Vec::new()),
group: None,
},
lesson,
])))
}
/// Obtaining a list of cabinets to the right of the lesson cell.
fn parse_cabinets(worksheet: &WorkSheet, row_range: (u32, u32), column: u32) -> Vec<String> {
let mut cabinets: Vec<String> = Vec::new();
for row in row_range.0..row_range.1 {
let raw = or_continue!(worksheet.get_string_from_cell(row, column));
let clean = raw.replace("\n", " ");
let parts: Vec<&str> = clean.split(" ").collect();
parts.iter().take(2).for_each(|part| {
let clean_part = part.to_string().trim().to_string();
cabinets.push(clean_part);
});
break;
}
cabinets
}
//noinspection GrazieInspection
/// Getting the "pure" name of the lesson and list of teachers from the text of the lesson cell.
fn parse_name_and_subgroups(
text: &String,
) -> Result<(String, Vec<Option<LessonSubGroup>>, Option<LessonType>), ParseError> {
// Части названия пары:
// 1. Само название.
// 2. Список преподавателей и подгрупп.
// 3. "Модификатор" (чаще всего).
//
// Регулярное выражение для получения ФИО преподавателей и номеров подгрупп (aka. второй части).
// (?:[А-Я][а-я]+\s?(?:[А-Я][\s.]*){2}(?:\(\d\s?[а-я]+\))?(?:, )?)+[\s.]*
//
// Подробнее:
// (?:
// [А-Я][а-я]+ - Фамилия.
// \s? - Кто знает, будет ли там пробел.
// (?:[А-Я][\s.]*){2} - Имя и отчество с учётом случайных пробелов и точек.
// (?:
// \( - Открытие подгруппы.
// \s? - Кто знает, будет ли там пробел.
// \d - Номер подгруппы.
// \s? - Кто знает, будет ли там пробел.
// [а-я\s]+ - Слово "подгруппа" с учётов ошибок.
// \) - Закрытие подгруппы.
// )? - Явное указание подгруппы может отсутствовать по понятным причинам.
// (?:, )? - Разделители между отдельными частями.
// )+
// [\s.]* - Забираем с собой всякий мусор, что бы не передать его в третью часть.
static NAMES_REGEX: LazyLock<Regex> = LazyLock::new(|| {
Regex::new(
r"(?:[А-Я][а-я]+\s?(?:[А-Я][\s.]*){2}(?:\(\s*\d\s*[а-я\s]+\))?(?:[\s,]+)?){1,2}+[\s.,]*",
)
.unwrap()
});
// Отчистка
static CLEAN_RE: LazyLock<Regex> = LazyLock::new(|| Regex::new(r"[\s\n\t]+").unwrap());
let text = CLEAN_RE
.replace(&text.replace(&[' ', '\t', '\n'], " "), " ")
.to_string();
let (lesson_name, subgroups, lesson_type) = match NAMES_REGEX.captures(&text) {
Some(captures) => {
let capture = captures.get(0).unwrap();
let subgroups: Vec<Option<LessonSubGroup>> = {
let src = capture.as_str().replace(&[' ', '.'], "");
let mut shared_subgroup = false;
let mut subgroups: [Option<LessonSubGroup>; 2] = [None, None];
for name in src.split(',') {
let open_bracket_index = name.find('(');
let number: u8 = open_bracket_index
.map_or(0, |index| name[(index + 1)..(index + 2)].parse().unwrap());
let teacher_name = {
let name_end = open_bracket_index.unwrap_or_else(|| name.len());
// Я ебал. Как же я долго до этого доходил.
format!(
"{} {}.{}.",
name.get(..name_end - 4).unwrap(),
name.get(name_end - 4..name_end - 2).unwrap(),
name.get(name_end - 2..name_end).unwrap(),
)
};
let lesson = Some(LessonSubGroup {
cabinet: None,
teacher: Some(teacher_name),
});
match number {
0 => {
subgroups[0] = lesson;
subgroups[1] = None;
shared_subgroup = true;
break;
}
num => {
// 1 - 1 = 0 | 2 - 1 = 1 | 3 - 1 = 2 (schedule index to array index)
// 0 % 2 = 0 | 1 % 2 = 1 | 2 % 2 = 0 (clamp)
let normalised = (num - 1) % 2;
subgroups[normalised as usize] = lesson;
}
}
}
if shared_subgroup {
Vec::from([subgroups[0].take()])
} else {
Vec::from(subgroups)
}
};
let name = text[..capture.start()].trim().to_string();
let extra = text[capture.end()..].trim().to_string();
let lesson_type = if extra.len() > 4 {
let result = guess_lesson_type(&extra);
if result.is_none() {
#[cfg(not(debug_assertions))]
sentry::capture_message(
&*format!("Не удалось угадать тип пары '{}'!", extra),
sentry::Level::Warning,
);
#[cfg(debug_assertions)]
log::warn!("Не удалось угадать тип пары '{}'!", extra);
}
result
} else {
None
};
(name, subgroups, lesson_type)
}
None => (text, Vec::new(), None),
};
Ok((lesson_name, subgroups, lesson_type))
}
/// Getting the start and end of a pair from a cell in the first column of a document.
///
/// # Arguments
///
/// * `cell_data`: text in cell.
/// * `date`: date of the current day.
fn parse_lesson_boundaries_cell(
cell_data: &String,
date: DateTime<Utc>,
) -> Option<LessonBoundaries> {
static TIME_RE: LazyLock<Regex> =
LazyLock::new(|| Regex::new(r"(\d+\.\d+)-(\d+\.\d+)").unwrap());
let parse_res = if let Some(captures) = TIME_RE.captures(cell_data) {
captures
} else {
return None;
};
let start_match = parse_res.get(1).unwrap().as_str();
let start_parts: Vec<&str> = start_match.split(".").collect();
let end_match = parse_res.get(2).unwrap().as_str();
let end_parts: Vec<&str> = end_match.split(".").collect();
static GET_TIME: fn(DateTime<Utc>, &Vec<&str>) -> DateTime<Utc> = |date, parts| {
date + Duration::hours(parts[0].parse::<i64>().unwrap() - 4)
+ Duration::minutes(parts[1].parse::<i64>().unwrap())
};
Some(LessonBoundaries {
start: GET_TIME(date.clone(), &start_parts),
end: GET_TIME(date, &end_parts),
})
}
/// Parse the column of the document to obtain a list of day's lesson boundaries.
///
/// # Arguments
///
/// * `worksheet`: document.
/// * `date`: date of the current day.
/// * `row_range`: row boundaries of the current day.
/// * `column`: column with the required data.
fn parse_day_boundaries(
worksheet: &WorkSheet,
date: DateTime<Utc>,
row_range: (u32, u32),
column: u32,
) -> Result<Vec<BoundariesCellInfo>, ParseError> {
let mut day_times: Vec<BoundariesCellInfo> = Vec::new();
for row in row_range.0..row_range.1 {
let time_cell = if let Some(str) = worksheet.get_string_from_cell(row, column) {
str
} else {
continue;
};
let lesson_time = parse_lesson_boundaries_cell(&time_cell, date.clone()).ok_or(
ParseError::LessonBoundaries(ErrorCell::new(row, column, time_cell.clone())),
)?;
// type
let lesson_type = if time_cell.contains("пара") {
LessonType::Default
} else {
LessonType::Additional
};
// lesson index
let default_index = if lesson_type == LessonType::Default {
Some(
time_cell
.chars()
.next()
.unwrap()
.to_string()
.parse::<u32>()
.unwrap(),
)
} else {
None
};
day_times.push(BoundariesCellInfo {
time_range: lesson_time,
lesson_type,
default_index,
xls_range: worksheet.get_merge_from_start(row, column),
});
}
Ok(day_times)
}
/// Parse the column of the document to obtain a list of week's lesson boundaries.
///
/// # Arguments
///
/// * `worksheet`: document.
/// * `week_markup`: markup of the current week.
fn parse_week_boundaries(
worksheet: &WorkSheet,
week_markup: &Vec<DayCellInfo>,
) -> Result<Vec<Vec<BoundariesCellInfo>>, ParseError> {
let mut result: Vec<Vec<BoundariesCellInfo>> = Vec::new();
let worksheet_end_row = worksheet.end().unwrap().0;
let lesson_time_column = week_markup[0].column + 1;
for day_index in 0..week_markup.len() {
let day_markup = &week_markup[day_index];
// Если текущий день не последнему, то индекс строки следующего дня.
// Если текущий день - последний, то индекс последней строки документа.
let end_row = if day_index != week_markup.len() - 1 {
week_markup[day_index + 1].row
} else {
worksheet_end_row
};
let day_boundaries = parse_day_boundaries(
&worksheet,
day_markup.date.clone(),
(day_markup.row, end_row),
lesson_time_column,
)?;
result.push(day_boundaries);
}
Ok(result)
}
/// Conversion of the list of couples of groups in the list of lessons of teachers.
fn convert_groups_to_teachers(
groups: &HashMap<String, ScheduleEntry>,
) -> HashMap<String, ScheduleEntry> {
let mut teachers: HashMap<String, ScheduleEntry> = HashMap::new();
let empty_days: Vec<Day> = groups
.values()
.next()
.unwrap()
.days
.iter()
.map(|day| Day {
name: day.name.clone(),
street: day.street.clone(),
date: day.date.clone(),
lessons: vec![],
})
.collect();
for group in groups.values() {
for (index, day) in group.days.iter().enumerate() {
for group_lesson in &day.lessons {
if group_lesson.lesson_type == Break {
continue;
}
if group_lesson.subgroups.is_none() {
continue;
}
let subgroups = group_lesson.subgroups.as_ref().unwrap();
for subgroup in subgroups {
let teacher = match subgroup {
None => continue,
Some(subgroup) => match &subgroup.teacher {
None => continue,
Some(teacher) => teacher,
},
};
if teacher == "Ошибка в расписании" {
continue;
}
if !teachers.contains_key(teacher) {
teachers.insert(
teacher.clone(),
ScheduleEntry {
name: teacher.clone(),
days: empty_days.to_vec(),
},
);
}
let teacher_day = teachers
.get_mut(teacher)
.unwrap()
.days
.get_mut(index)
.unwrap();
teacher_day.lessons.push({
let mut lesson = group_lesson.clone();
lesson.group = Some(group.name.clone());
lesson
});
}
}
}
}
teachers.iter_mut().for_each(|(_, teacher)| {
teacher.days.iter_mut().for_each(|day| {
day.lessons
.sort_by(|a, b| a.range.as_ref().unwrap()[1].cmp(&b.range.as_ref().unwrap()[1]))
})
});
teachers
}
/// Reading XLS Document from the buffer and converting it into the schedule ready to use.
///
/// # Arguments
///
/// * `buffer`: XLS data containing schedule.
///
/// returns: Result<ParseResult, ParseError>
///
/// # Examples
///
/// ```
/// use schedule_parser::parse_xls;
///
/// let result = parse_xls(&include_bytes!("../../schedule.xls").to_vec());
///
/// assert!(result.is_ok(), "{}", result.err().unwrap());
///
/// assert_ne!(result.as_ref().unwrap().groups.len(), 0);
/// assert_ne!(result.as_ref().unwrap().teachers.len(), 0);
/// ```
pub fn parse_xls(buffer: &Vec<u8>) -> Result<ParseResult, ParseError> {
let cursor = Cursor::new(&buffer);
let mut workbook: Xls<_> =
open_workbook_from_rs(cursor).map_err(|e| ParseError::BadXLS(std::sync::Arc::new(e)))?;
let worksheet = {
let (worksheet_name, worksheet) = workbook
.worksheets()
.first()
.ok_or(ParseError::NoWorkSheets)?
.clone();
let worksheet_merges = workbook
.worksheet_merge_cells(&*worksheet_name)
.ok_or(ParseError::NoWorkSheets)?;
WorkSheet {
data: worksheet,
merges: worksheet_merges,
}
};
let (week_markup, groups_markup) = parse_skeleton(&worksheet)?;
let week_boundaries = parse_week_boundaries(&worksheet, &week_markup)?;
let mut groups: HashMap<String, ScheduleEntry> = HashMap::new();
for group_markup in groups_markup {
let mut group = ScheduleEntry {
name: group_markup.name,
days: Vec::new(),
};
for day_index in 0..(&week_markup).len() {
let day_markup = &week_markup[day_index];
let mut day = Day {
name: day_markup.name.clone(),
street: None,
date: day_markup.date,
lessons: Vec::new(),
};
let day_boundaries = &week_boundaries[day_index];
for lesson_boundaries in day_boundaries {
match &mut parse_lesson(
&worksheet,
&day,
&day_boundaries,
&lesson_boundaries,
group_markup.column,
)? {
Lessons(lesson) => day.lessons.append(lesson),
Street(street) => day.street = Some(street.to_owned()),
}
}
group.days.push(day);
}
groups.insert(group.name.clone(), group);
}
Ok(ParseResult {
teachers: convert_groups_to_teachers(&groups),
groups,
})
}
#[cfg(any(test, feature = "test-utils"))]
pub mod test_utils {
use super::*;
pub fn test_result() -> Result<ParseResult, ParseError> {
parse_xls(&include_bytes!("../../schedule.xls").to_vec())
}
}
#[cfg(test)]
pub mod tests {
#[test]
fn read() {
let result = super::test_utils::test_result();
assert!(result.is_ok(), "{}", result.err().unwrap());
assert_ne!(result.as_ref().unwrap().groups.len(), 0);
assert_ne!(result.as_ref().unwrap().teachers.len(), 0);
}
#[test]
fn test_split_lesson() {
let result = super::test_utils::test_result();
assert!(result.is_ok(), "{}", result.err().unwrap());
let result = result.unwrap();
assert!(result.groups.contains_key("ИС-214/23"));
let group = result.groups.get("ИС-214/23").unwrap();
let thursday = group.days.get(3).unwrap();
assert_eq!(thursday.lessons.len(), 1);
let lesson = &thursday.lessons[0];
assert_eq!(lesson.range.unwrap()[1], 3);
assert!(lesson.subgroups.is_some());
let subgroups = lesson.subgroups.as_ref().unwrap();
assert_eq!(subgroups.len(), 2);
assert_eq!(
subgroups[0].as_ref().unwrap().cabinet,
Some("44".to_string())
);
assert_eq!(
subgroups[1].as_ref().unwrap().cabinet,
Some("43".to_string())
);
}
}

View File

@@ -0,0 +1,21 @@
#[macro_export]
macro_rules! or_continue {
( $e:expr ) => {{
if let Some(x) = $e {
x
} else {
continue;
}
}};
}
#[macro_export]
macro_rules! or_break {
( $e:expr ) => {{
if let Some(x) = $e {
x
} else {
break;
}
}};
}

View File

@@ -6,9 +6,53 @@ use std::collections::HashMap;
use std::sync::Arc; use std::sync::Arc;
use utoipa::ToSchema; use utoipa::ToSchema;
pub(crate) mod internal {
use crate::schema::{LessonBoundaries, LessonType};
use chrono::{DateTime, Utc};
/// Data cell storing the group name.
pub struct GroupCellInfo {
/// Column index.
pub column: u32,
/// Text in the cell.
pub name: String,
}
/// Data cell storing the line.
pub struct DayCellInfo {
/// Line index.
pub row: u32,
/// Column index.
pub column: u32,
/// Day name.
pub name: String,
/// Date of the day.
pub date: DateTime<Utc>,
}
/// Data on the time of lessons from the second column of the schedule.
pub struct BoundariesCellInfo {
/// Temporary segment of the lesson.
pub time_range: LessonBoundaries,
/// Type of lesson.
pub lesson_type: LessonType,
/// The lesson index.
pub default_index: Option<u32>,
/// The frame of the cell.
pub xls_range: ((u32, u32), (u32, u32)),
}
}
/// The beginning and end of the lesson. /// The beginning and end of the lesson.
#[derive(Clone, Hash, Debug, Serialize, Deserialize, ToSchema)] #[derive(Clone, Hash, Debug, Serialize, Deserialize, ToSchema)]
pub struct LessonTime { pub struct LessonBoundaries {
/// The beginning of a lesson. /// The beginning of a lesson.
pub start: DateTime<Utc>, pub start: DateTime<Utc>,
@@ -44,18 +88,21 @@ pub enum LessonType {
/// Экзамен. /// Экзамен.
ExamDefault, ExamDefault,
/// Курсовой проект.
CourseProject,
/// Защита курсового проекта.
CourseProjectDefense,
} }
#[derive(Clone, Hash, Debug, Serialize, Deserialize, ToSchema)] #[derive(Clone, Hash, Debug, Serialize, Deserialize, ToSchema)]
pub struct LessonSubGroup { pub struct LessonSubGroup {
/// Index of subgroup.
pub number: u8,
/// Cabinet, if present. /// Cabinet, if present.
pub cabinet: Option<String>, pub cabinet: Option<String>,
/// Full name of the teacher. /// Full name of the teacher.
pub teacher: String, pub teacher: Option<String>,
} }
#[derive(Clone, Hash, Debug, Serialize, Deserialize, ToSchema)] #[derive(Clone, Hash, Debug, Serialize, Deserialize, ToSchema)]
@@ -66,17 +113,17 @@ pub struct Lesson {
pub lesson_type: LessonType, pub lesson_type: LessonType,
/// Lesson indexes, if present. /// Lesson indexes, if present.
pub default_range: Option<[u8; 2]>, pub range: Option<[u8; 2]>,
/// Name. /// Name.
pub name: Option<String>, pub name: Option<String>,
/// The beginning and end. /// The beginning and end.
pub time: LessonTime, pub time: LessonBoundaries,
/// List of subgroups. /// List of subgroups.
#[serde(rename = "subGroups")] #[serde(rename = "subgroups")]
pub subgroups: Option<Vec<LessonSubGroup>>, pub subgroups: Option<Vec<Option<LessonSubGroup>>>,
/// Group name, if this is a schedule for teachers. /// Group name, if this is a schedule for teachers.
pub group: Option<String>, pub group: Option<String>,
@@ -153,9 +200,9 @@ pub enum ParseError {
#[display("There is no data on work sheet boundaries.")] #[display("There is no data on work sheet boundaries.")]
UnknownWorkSheetRange, UnknownWorkSheetRange,
/// Failed to read the beginning and end of the lesson from the line /// Failed to read the beginning and end of the lesson from the cell
#[display("Failed to read lesson start and end times from {_0}.")] #[display("Failed to read lesson start and end from {_0}.")]
GlobalTime(ErrorCell), LessonBoundaries(ErrorCell),
/// Not found the beginning and the end corresponding to the lesson. /// Not found the beginning and the end corresponding to the lesson.
#[display("No start and end times matching the lesson (at {_0}) was found.")] #[display("No start and end times matching the lesson (at {_0}) was found.")]
@@ -173,7 +220,7 @@ impl Serialize for ParseError {
ParseError::UnknownWorkSheetRange => { ParseError::UnknownWorkSheetRange => {
serializer.serialize_str("UNKNOWN_WORK_SHEET_RANGE") serializer.serialize_str("UNKNOWN_WORK_SHEET_RANGE")
} }
ParseError::GlobalTime(_) => serializer.serialize_str("GLOBAL_TIME"), ParseError::LessonBoundaries(_) => serializer.serialize_str("GLOBAL_TIME"),
ParseError::LessonTimeNotFound(_) => serializer.serialize_str("LESSON_TIME_NOT_FOUND"), ParseError::LessonTimeNotFound(_) => serializer.serialize_str("LESSON_TIME_NOT_FOUND"),
} }
} }

View File

@@ -0,0 +1,58 @@
use regex::Regex;
use std::ops::Deref;
use std::sync::LazyLock;
/// XLS WorkSheet data.
pub struct WorkSheet {
pub data: calamine::Range<calamine::Data>,
pub merges: Vec<calamine::Dimensions>,
}
impl Deref for WorkSheet {
type Target = calamine::Range<calamine::Data>;
fn deref(&self) -> &Self::Target {
&self.data
}
}
impl WorkSheet {
/// Getting a line from the required cell.
pub fn get_string_from_cell(&self, row: u32, col: u32) -> Option<String> {
let cell_data = if let Some(data) = self.get((row as usize, col as usize)) {
data.to_string()
} else {
return None;
};
if cell_data.trim().is_empty() {
return None;
}
static NL_RE: LazyLock<Regex> = LazyLock::new(|| Regex::new(r"[\n\r]+").unwrap());
static SP_RE: LazyLock<Regex> = LazyLock::new(|| Regex::new(r"\s+").unwrap());
let trimmed_data = SP_RE
.replace_all(&NL_RE.replace_all(&cell_data, " "), " ")
.trim()
.to_string();
if trimmed_data.is_empty() {
None
} else {
Some(trimmed_data)
}
}
/// Obtaining the boundaries of the cell along its upper left coordinate.
pub fn get_merge_from_start(&self, row: u32, column: u32) -> ((u32, u32), (u32, u32)) {
match self
.merges
.iter()
.find(|merge| merge.start.0 == row && merge.start.1 == column)
{
Some(merge) => (merge.start, (merge.end.0 + 1, merge.end.1 + 1)),
None => ((row, column), (row + 1, column + 1)),
}
}
}

Binary file not shown.

View File

@@ -1,88 +0,0 @@
use crate::parser::schema::ParseResult;
use crate::utility::hasher::DigestHasher;
use crate::xls_downloader::basic_impl::BasicXlsDownloader;
use actix_web::web;
use chrono::{DateTime, Utc};
use diesel::{Connection, PgConnection};
use firebase_messaging_rs::FCMClient;
use sha1::{Digest, Sha1};
use std::env;
use std::hash::Hash;
use std::sync::Mutex;
#[derive(Clone)]
pub struct Schedule {
pub etag: String,
pub fetched_at: DateTime<Utc>,
pub updated_at: DateTime<Utc>,
pub parsed_at: DateTime<Utc>,
pub data: ParseResult,
}
#[derive(Clone)]
pub struct VkId {
pub client_id: i32,
pub redirect_url: String,
}
impl VkId {
pub fn new() -> Self {
Self {
client_id: env::var("VKID_CLIENT_ID")
.expect("VKID_CLIENT_ID must be set")
.parse()
.expect("VKID_CLIENT_ID must be integer"),
redirect_url: env::var("VKID_REDIRECT_URI").expect("VKID_REDIRECT_URI must be set"),
}
}
}
impl Schedule {
pub fn hash(&self) -> String {
let mut hasher = DigestHasher::from(Sha1::new());
self.etag.hash(&mut hasher);
self.data.teachers.iter().for_each(|e| e.hash(&mut hasher));
self.data.groups.iter().for_each(|e| e.hash(&mut hasher));
hasher.finalize()
}
}
/// Common data provided to endpoints.
pub struct AppState {
pub downloader: Mutex<BasicXlsDownloader>,
pub schedule: Mutex<Option<Schedule>>,
pub database: Mutex<PgConnection>,
pub vk_id: VkId,
pub fcm_client: Option<Mutex<FCMClient>>, // в рантайме не меняется, так что опционален мьютекс, а не данные в нём.
}
impl AppState {
pub async fn new() -> Self {
let database_url = env::var("DATABASE_URL").expect("DATABASE_URL must be set");
Self {
downloader: Mutex::new(BasicXlsDownloader::new()),
schedule: Mutex::new(None),
database: Mutex::new(
PgConnection::establish(&database_url)
.unwrap_or_else(|_| panic!("Error connecting to {}", database_url)),
),
vk_id: VkId::new(),
fcm_client: if env::var("GOOGLE_APPLICATION_CREDENTIALS").is_ok() {
Some(Mutex::new(
FCMClient::new().await.expect("FCM client must be created"),
))
} else {
None
},
}
}
}
/// Create a new object web::Data<AppState>.
pub async fn app_state() -> web::Data<AppState> {
web::Data::new(AppState::new().await)
}

View File

@@ -1,77 +1,82 @@
pub mod users { pub mod users {
use crate::app_state::AppState;
use crate::database::models::User; use crate::database::models::User;
use crate::database::schema::users::dsl::users; use crate::database::schema::users::dsl::users;
use crate::database::schema::users::dsl::*; use crate::database::schema::users::dsl::*;
use crate::utility::mutex::MutexScope; use crate::state::AppState;
use actix_web::web; use actix_web::web;
use diesel::{ExpressionMethods, QueryResult, insert_into}; use diesel::{ExpressionMethods, QueryResult, insert_into};
use diesel::{QueryDsl, RunQueryDsl}; use diesel::{QueryDsl, RunQueryDsl};
use diesel::{SaveChangesDsl, SelectableHelper}; use diesel::{SaveChangesDsl, SelectableHelper};
use std::ops::DerefMut;
pub fn get(state: &web::Data<AppState>, _id: &String) -> QueryResult<User> { pub async fn get(state: &web::Data<AppState>, _id: &String) -> QueryResult<User> {
state.database.scope(|conn| {
users users
.filter(id.eq(_id)) .filter(id.eq(_id))
.select(User::as_select()) .select(User::as_select())
.first(conn) .first(state.get_database().await.deref_mut())
})
} }
pub fn get_by_username(state: &web::Data<AppState>, _username: &String) -> QueryResult<User> { pub async fn get_by_username(
state.database.scope(|conn| { state: &web::Data<AppState>,
_username: &String,
) -> QueryResult<User> {
users users
.filter(username.eq(_username)) .filter(username.eq(_username))
.select(User::as_select()) .select(User::as_select())
.first(conn) .first(state.get_database().await.deref_mut())
})
} }
//noinspection RsTraitObligations //noinspection RsTraitObligations
pub fn get_by_vk_id(state: &web::Data<AppState>, _vk_id: i32) -> QueryResult<User> { pub async fn get_by_vk_id(state: &web::Data<AppState>, _vk_id: i32) -> QueryResult<User> {
state.database.scope(|conn| {
users users
.filter(vk_id.eq(_vk_id)) .filter(vk_id.eq(_vk_id))
.select(User::as_select()) .select(User::as_select())
.first(conn) .first(state.get_database().await.deref_mut())
}) }
//noinspection RsTraitObligations
pub async fn get_by_telegram_id(
state: &web::Data<AppState>,
_telegram_id: i64,
) -> QueryResult<User> {
users
.filter(telegram_id.eq(_telegram_id))
.select(User::as_select())
.first(state.get_database().await.deref_mut())
} }
//noinspection DuplicatedCode //noinspection DuplicatedCode
pub fn contains_by_username(state: &web::Data<AppState>, _username: &String) -> bool { pub async fn contains_by_username(state: &web::Data<AppState>, _username: &String) -> bool {
// и как это нахуй сократить блять примеров нихуя нет, нихуя не работает // и как это нахуй сократить блять примеров нихуя нет, нихуя не работает
// как меня этот раст заебал уже // как меня этот раст заебал уже
state.database.scope(|conn| {
match users match users
.filter(username.eq(_username)) .filter(username.eq(_username))
.count() .count()
.get_result::<i64>(conn) .get_result::<i64>(state.get_database().await.deref_mut())
{ {
Ok(count) => count > 0, Ok(count) => count > 0,
Err(_) => false, Err(_) => false,
} }
})
} }
//noinspection DuplicatedCode //noinspection DuplicatedCode
//noinspection RsTraitObligations //noinspection RsTraitObligations
pub fn contains_by_vk_id(state: &web::Data<AppState>, _vk_id: i32) -> bool { pub async fn contains_by_vk_id(state: &web::Data<AppState>, _vk_id: i32) -> bool {
state.database.scope(|conn| {
match users match users
.filter(vk_id.eq(_vk_id)) .filter(vk_id.eq(_vk_id))
.count() .count()
.get_result::<i64>(conn) .get_result::<i64>(state.get_database().await.deref_mut())
{ {
Ok(count) => count > 0, Ok(count) => count > 0,
Err(_) => false, Err(_) => false,
} }
})
} }
pub fn insert(state: &web::Data<AppState>, user: &User) -> QueryResult<usize> { pub async fn insert(state: &web::Data<AppState>, user: &User) -> QueryResult<usize> {
state insert_into(users)
.database .values(user)
.scope(|conn| insert_into(users).values(user).execute(conn)) .execute(state.get_database().await.deref_mut())
} }
/// Function declaration [User::save][UserSave::save]. /// Function declaration [User::save][UserSave::save].
@@ -113,51 +118,47 @@ pub mod users {
/// } /// }
/// } /// }
/// ``` /// ```
fn save(&self, state: &web::Data<AppState>) -> QueryResult<User>; async fn save(&self, state: &web::Data<AppState>) -> QueryResult<User>;
} }
/// Implementation of [UserSave][UserSave] trait. /// Implementation of [UserSave][UserSave] trait.
impl UserSave for User { impl UserSave for User {
fn save(&self, state: &web::Data<AppState>) -> QueryResult<User> { async fn save(&self, state: &web::Data<AppState>) -> QueryResult<User> {
state.database.scope(|conn| self.save_changes::<Self>(conn)) self.save_changes::<Self>(state.get_database().await.deref_mut())
} }
} }
#[cfg(test)] #[cfg(test)]
pub fn delete_by_username(state: &web::Data<AppState>, _username: &String) -> bool { pub async fn delete_by_username(state: &web::Data<AppState>, _username: &String) -> bool {
state.database.scope(|conn| { match diesel::delete(users.filter(username.eq(_username)))
match diesel::delete(users.filter(username.eq(_username))).execute(conn) { .execute(state.get_database().await.deref_mut())
{
Ok(count) => count > 0, Ok(count) => count > 0,
Err(_) => false, Err(_) => false,
} }
})
} }
#[cfg(test)] #[cfg(test)]
pub fn insert_or_ignore(state: &web::Data<AppState>, user: &User) -> QueryResult<usize> { pub async fn insert_or_ignore(state: &web::Data<AppState>, user: &User) -> QueryResult<usize> {
state.database.scope(|conn| {
insert_into(users) insert_into(users)
.values(user) .values(user)
.on_conflict_do_nothing() .on_conflict_do_nothing()
.execute(conn) .execute(state.get_database().await.deref_mut())
})
} }
} }
pub mod fcm { pub mod fcm {
use crate::app_state::AppState;
use crate::database::models::{FCM, User}; use crate::database::models::{FCM, User};
use crate::utility::mutex::MutexScope; use crate::state::AppState;
use actix_web::web; use actix_web::web;
use diesel::QueryDsl; use diesel::QueryDsl;
use diesel::RunQueryDsl; use diesel::RunQueryDsl;
use diesel::{BelongingToDsl, QueryResult, SelectableHelper}; use diesel::{BelongingToDsl, QueryResult, SelectableHelper};
use std::ops::DerefMut;
pub fn from_user(state: &web::Data<AppState>, user: &User) -> QueryResult<FCM> { pub async fn from_user(state: &web::Data<AppState>, user: &User) -> QueryResult<FCM> {
state.database.scope(|conn| {
FCM::belonging_to(&user) FCM::belonging_to(&user)
.select(FCM::as_select()) .select(FCM::as_select())
.get_result(conn) .get_result(state.get_database().await.deref_mut())
})
} }
} }

View File

@@ -1,3 +1,3 @@
pub mod schema;
pub mod models;
pub mod driver; pub mod driver;
pub mod models;
pub mod schema;

View File

@@ -38,22 +38,25 @@ pub struct User {
pub username: String, pub username: String,
/// BCrypt password hash. /// BCrypt password hash.
pub password: String, pub password: Option<String>,
/// ID of the linked VK account. /// ID of the linked VK account.
pub vk_id: Option<i32>, pub vk_id: Option<i32>,
/// JWT access token. /// JWT access token.
pub access_token: String, pub access_token: Option<String>,
/// Group. /// Group.
pub group: String, pub group: Option<String>,
/// Role. /// Role.
pub role: UserRole, pub role: UserRole,
/// Version of the installed Polytechnic+ application. /// Version of the installed Polytechnic+ application.
pub version: String, pub android_version: Option<String>,
/// ID of the linked Telegram account.
pub telegram_id: Option<i64>,
} }
#[derive( #[derive(

View File

@@ -21,12 +21,13 @@ diesel::table! {
users (id) { users (id) {
id -> Text, id -> Text,
username -> Text, username -> Text,
password -> Text, password -> Nullable<Text>,
vk_id -> Nullable<Int4>, vk_id -> Nullable<Int4>,
access_token -> Text, access_token -> Nullable<Text>,
group -> Text, group -> Nullable<Text>,
role -> UserRole, role -> UserRole,
version -> Text, android_version -> Nullable<Text>,
telegram_id -> Nullable<Int8>,
} }
} }

View File

@@ -1,9 +1,9 @@
use crate::app_state::AppState;
use crate::database::driver; use crate::database::driver;
use crate::database::models::{FCM, User}; use crate::database::models::{FCM, User};
use crate::extractors::base::{FromRequestSync, SyncExtractor}; use crate::extractors::base::{AsyncExtractor, FromRequestAsync};
use crate::state::AppState;
use crate::utility::jwt; use crate::utility::jwt;
use actix_macros::ResponseErrorMessage; use actix_macros::MiddlewareError;
use actix_web::body::BoxBody; use actix_web::body::BoxBody;
use actix_web::dev::Payload; use actix_web::dev::Payload;
use actix_web::http::header; use actix_web::http::header;
@@ -12,13 +12,13 @@ use derive_more::Display;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use std::fmt::Debug; use std::fmt::Debug;
#[derive(Clone, Debug, Serialize, Deserialize, Display, ResponseErrorMessage)] #[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Display, MiddlewareError)]
#[status_code = "actix_web::http::StatusCode::UNAUTHORIZED"] #[status_code = "actix_web::http::StatusCode::UNAUTHORIZED"]
#[serde(rename_all = "SCREAMING_SNAKE_CASE")] #[serde(rename_all = "SCREAMING_SNAKE_CASE")]
pub enum Error { pub enum Error {
/// There is no Authorization header in the request. /// There is no Authorization header or cookie in the request.
#[display("No Authorization header found")] #[display("No Authorization header or cookie found")]
NoHeader, NoHeaderOrCookieFound,
/// Unknown authorization type other than Bearer. /// Unknown authorization type other than Bearer.
#[display("Bearer token is required")] #[display("Bearer token is required")]
@@ -39,31 +39,60 @@ impl Error {
} }
} }
/// User extractor from request with Bearer access token. fn get_access_token_from_header(req: &HttpRequest) -> Result<String, Error> {
impl FromRequestSync for User { let header_value = req
type Error = actix_web::Error;
fn from_request_sync(req: &HttpRequest, _: &mut Payload) -> Result<Self, Self::Error> {
let authorization = req
.headers() .headers()
.get(header::AUTHORIZATION) .get(header::AUTHORIZATION)
.ok_or(Error::NoHeader.into_err())? .ok_or(Error::NoHeaderOrCookieFound)?
.to_str() .to_str()
.map_err(|_| Error::NoHeader.into_err())? .map_err(|_| Error::NoHeaderOrCookieFound)?
.to_string(); .to_string();
let parts: Vec<&str> = authorization.split(' ').collect(); let parts = header_value
.split_once(' ')
.ok_or(Error::UnknownAuthorizationType)?;
if parts.len() != 2 || parts[0] != "Bearer" { if parts.0 != "Bearer" {
return Err(Error::UnknownAuthorizationType.into_err()); Err(Error::UnknownAuthorizationType)
} else {
Ok(parts.1.to_string())
} }
}
let user_id = jwt::verify_and_decode(&parts[1].to_string()) fn get_access_token_from_cookies(req: &HttpRequest) -> Result<String, Error> {
let cookie = req
.cookie("access_token")
.ok_or(Error::NoHeaderOrCookieFound)?;
Ok(cookie.value().to_string())
}
/// User extractor from request with Bearer access token.
impl FromRequestAsync for User {
type Error = actix_web::Error;
async fn from_request_async(
req: &HttpRequest,
_payload: &mut Payload,
) -> Result<Self, Self::Error> {
let access_token = match get_access_token_from_header(req) {
Err(Error::NoHeaderOrCookieFound) => {
get_access_token_from_cookies(req).map_err(|error| error.into_err())?
}
Err(error) => {
return Err(error.into_err());
}
Ok(access_token) => access_token,
};
let user_id = jwt::verify_and_decode(&access_token)
.map_err(|_| Error::InvalidAccessToken.into_err())?; .map_err(|_| Error::InvalidAccessToken.into_err())?;
let app_state = req.app_data::<web::Data<AppState>>().unwrap(); let app_state = req.app_data::<web::Data<AppState>>().unwrap();
driver::users::get(&app_state, &user_id).map_err(|_| Error::NoUser.into()) driver::users::get(app_state, &user_id)
.await
.map_err(|_| Error::NoUser.into())
} }
} }
@@ -88,19 +117,22 @@ impl<const FCM: bool> UserExtractor<{ FCM }> {
} }
/// Extractor of user and additional parameters from request with Bearer token. /// Extractor of user and additional parameters from request with Bearer token.
impl<const FCM: bool> FromRequestSync for UserExtractor<{ FCM }> { impl<const FCM: bool> FromRequestAsync for UserExtractor<{ FCM }> {
type Error = actix_web::Error; type Error = actix_web::Error;
fn from_request_sync(req: &HttpRequest, payload: &mut Payload) -> Result<Self, Self::Error> { async fn from_request_async(
let user = SyncExtractor::<User>::from_request(req, payload) req: &HttpRequest,
.into_inner()? payload: &mut Payload,
) -> Result<Self, Self::Error> {
let user = AsyncExtractor::<User>::from_request(req, payload)
.await?
.into_inner(); .into_inner();
let app_state = req.app_data::<web::Data<AppState>>().unwrap(); let app_state = req.app_data::<web::Data<AppState>>().unwrap();
Ok(Self { Ok(Self {
fcm: if FCM { fcm: if FCM {
driver::fcm::from_user(&app_state, &user).ok() driver::fcm::from_user(&app_state, &user).await.ok()
} else { } else {
None None
}, },

View File

@@ -57,18 +57,22 @@ pub trait FromRequestAsync: Sized {
/// web::Json(user) /// web::Json(user)
/// } /// }
/// ``` /// ```
async fn from_request_async(req: HttpRequest, payload: Payload) -> Result<Self, Self::Error>; async fn from_request_async(
req: &HttpRequest,
payload: &mut Payload,
) -> Result<Self, Self::Error>;
} }
impl<T: FromRequestAsync> FromRequest for AsyncExtractor<T> { impl<T: FromRequestAsync> FromRequest for AsyncExtractor<T> {
type Error = T::Error; type Error = T::Error;
type Future = LocalBoxFuture<'static, Result<Self, Self::Error>>; type Future = LocalBoxFuture<'static, Result<Self, Self::Error>>;
fn from_request(req: &HttpRequest, payload: &mut Payload) -> Self::Future { fn from_request(req: &HttpRequest, _payload: &mut Payload) -> Self::Future {
let req = req.clone(); let req = req.clone();
let payload = payload.take(); let mut payload = Payload::None;
Box::pin(async move { Box::pin(async move {
T::from_request_async(req, payload) T::from_request_async(&req, &mut payload)
.await .await
.map(|res| Self(res)) .map(|res| Self(res))
}) })
@@ -82,6 +86,7 @@ pub struct SyncExtractor<T>(T);
impl<T> SyncExtractor<T> { impl<T> SyncExtractor<T> {
/// Retrieving an object extracted with the extractor. /// Retrieving an object extracted with the extractor.
#[allow(unused)]
pub fn into_inner(self) -> T { pub fn into_inner(self) -> T {
self.0 self.0
} }

View File

@@ -1 +0,0 @@
pub mod parser;

View File

@@ -1,19 +1,19 @@
use crate::app_state::{AppState, app_state};
use crate::middlewares::authorization::JWTAuthorization; use crate::middlewares::authorization::JWTAuthorization;
use crate::middlewares::content_type::ContentTypeBootstrap; use crate::middlewares::content_type::ContentTypeBootstrap;
use crate::state::{AppState, new_app_state};
use actix_web::dev::{ServiceFactory, ServiceRequest}; use actix_web::dev::{ServiceFactory, ServiceRequest};
use actix_web::{App, Error, HttpServer}; use actix_web::{App, Error, HttpServer};
use dotenvy::dotenv; use dotenvy::dotenv;
use log::info;
use std::io; use std::io;
use utoipa_actix_web::AppExt; use utoipa_actix_web::AppExt;
use utoipa_actix_web::scope::Scope; use utoipa_actix_web::scope::Scope;
use utoipa_rapidoc::RapiDoc; use utoipa_rapidoc::RapiDoc;
mod app_state; mod state;
mod database; mod database;
mod parser;
mod xls_downloader; mod xls_downloader;
mod extractors; mod extractors;
@@ -47,7 +47,6 @@ pub fn get_api_scope<
ignore: &["/group-names", "/teacher-names"], ignore: &["/group-names", "/teacher-names"],
}) })
.service(routes::schedule::schedule) .service(routes::schedule::schedule)
.service(routes::schedule::update_download_url)
.service(routes::schedule::cache_status) .service(routes::schedule::cache_status)
.service(routes::schedule::group) .service(routes::schedule::group)
.service(routes::schedule::group_names) .service(routes::schedule::group_names)
@@ -59,6 +58,13 @@ pub fn get_api_scope<
.service(routes::fcm::update_callback) .service(routes::fcm::update_callback)
.service(routes::fcm::set_token); .service(routes::fcm::set_token);
let flow_scope = utoipa_actix_web::scope("/flow")
.wrap(JWTAuthorization {
ignore: &["/telegram-auth"],
})
.service(routes::flow::telegram_auth)
.service(routes::flow::telegram_complete);
let vk_id_scope = utoipa_actix_web::scope("/vkid") // let vk_id_scope = utoipa_actix_web::scope("/vkid") //
.service(routes::vk_id::oauth); .service(routes::vk_id::oauth);
@@ -67,13 +73,14 @@ pub fn get_api_scope<
.service(users_scope) .service(users_scope)
.service(schedule_scope) .service(schedule_scope)
.service(fcm_scope) .service(fcm_scope)
.service(flow_scope)
.service(vk_id_scope) .service(vk_id_scope)
} }
async fn async_main() -> io::Result<()> { async fn async_main() -> io::Result<()> {
println!("Starting server..."); info!("Запуск сервера...");
let app_state = app_state().await; let app_state = new_app_state().await.unwrap();
HttpServer::new(move || { HttpServer::new(move || {
let (app, api) = App::new() let (app, api) = App::new()
@@ -112,8 +119,6 @@ fn main() -> io::Result<()> {
}, },
)); ));
unsafe { std::env::set_var("RUST_BACKTRACE", "1") };
dotenv().unwrap(); dotenv().unwrap();
env_logger::init(); env_logger::init();

View File

@@ -1,11 +1,12 @@
use crate::database::models::User; use crate::database::models::User;
use crate::extractors::authorized_user; use crate::extractors::authorized_user;
use crate::extractors::base::FromRequestSync; use crate::extractors::base::FromRequestAsync;
use actix_web::body::{BoxBody, EitherBody}; use actix_web::body::{BoxBody, EitherBody};
use actix_web::dev::{Payload, Service, ServiceRequest, ServiceResponse, Transform, forward_ready}; use actix_web::dev::{Payload, Service, ServiceRequest, ServiceResponse, Transform, forward_ready};
use actix_web::{Error, HttpRequest, ResponseError}; use actix_web::{Error, HttpRequest, ResponseError};
use futures_util::future::LocalBoxFuture; use futures_util::future::LocalBoxFuture;
use std::future::{Ready, ready}; use std::future::{Ready, ready};
use std::rc::Rc;
/// Middleware guard working with JWT tokens. /// Middleware guard working with JWT tokens.
pub struct JWTAuthorization { pub struct JWTAuthorization {
@@ -21,7 +22,7 @@ impl Default for JWTAuthorization {
impl<S, B> Transform<S, ServiceRequest> for JWTAuthorization impl<S, B> Transform<S, ServiceRequest> for JWTAuthorization
where where
S: Service<ServiceRequest, Response = ServiceResponse<B>, Error = Error>, S: Service<ServiceRequest, Response = ServiceResponse<B>, Error = Error> + 'static,
S::Future: 'static, S::Future: 'static,
B: 'static, B: 'static,
{ {
@@ -33,14 +34,14 @@ where
fn new_transform(&self, service: S) -> Self::Future { fn new_transform(&self, service: S) -> Self::Future {
ready(Ok(JWTAuthorizationMiddleware { ready(Ok(JWTAuthorizationMiddleware {
service, service: Rc::new(service),
ignore: self.ignore, ignore: self.ignore,
})) }))
} }
} }
pub struct JWTAuthorizationMiddleware<S> { pub struct JWTAuthorizationMiddleware<S> {
service: S, service: Rc<S>,
/// List of ignored endpoints. /// List of ignored endpoints.
ignore: &'static [&'static str], ignore: &'static [&'static str],
} }
@@ -52,12 +53,11 @@ where
B: 'static, B: 'static,
{ {
/// Checking the validity of the token. /// Checking the validity of the token.
fn check_authorization( async fn check_authorization(req: &HttpRequest) -> Result<(), authorized_user::Error> {
&self, let mut payload = Payload::None;
req: &HttpRequest,
payload: &mut Payload, User::from_request_async(req, &mut payload)
) -> Result<(), authorized_user::Error> { .await
User::from_request_sync(req, payload)
.map(|_| ()) .map(|_| ())
.map_err(|e| e.as_error::<authorized_user::Error>().unwrap().clone()) .map_err(|e| e.as_error::<authorized_user::Error>().unwrap().clone())
} }
@@ -79,9 +79,9 @@ where
} }
} }
impl<'a, S, B> Service<ServiceRequest> for JWTAuthorizationMiddleware<S> impl<S, B> Service<ServiceRequest> for JWTAuthorizationMiddleware<S>
where where
S: Service<ServiceRequest, Response = ServiceResponse<B>, Error = Error>, S: Service<ServiceRequest, Response = ServiceResponse<B>, Error = Error> + 'static,
S::Future: 'static, S::Future: 'static,
B: 'static, B: 'static,
{ {
@@ -97,20 +97,19 @@ where
return Box::pin(async move { Ok(fut.await?.map_into_left_body()) }); return Box::pin(async move { Ok(fut.await?.map_into_left_body()) });
} }
let (http_req, mut payload) = req.into_parts(); let service = Rc::clone(&self.service);
if let Err(err) = self.check_authorization(&http_req, &mut payload) { Box::pin(async move {
return Box::pin(async move { match Self::check_authorization(req.request()).await {
Ok(ServiceResponse::new( Ok(_) => {
http_req, let fut = service.call(req).await?;
err.error_response().map_into_right_body(), Ok(fut.map_into_left_body())
))
});
} }
Err(err) => Ok(ServiceResponse::new(
let req = ServiceRequest::from_parts(http_req, payload); req.into_parts().0,
let fut = self.service.call(req); err.error_response().map_into_right_body(),
)),
Box::pin(async move { Ok(fut.await?.map_into_left_body()) }) }
})
} }
} }

View File

@@ -1,743 +0,0 @@
use crate::parser::LessonParseResult::{Lessons, Street};
use crate::parser::schema::LessonType::Break;
use crate::parser::schema::{
Day, ErrorCell, ErrorCellPos, Lesson, LessonSubGroup, LessonTime, LessonType, ParseError,
ParseResult, ScheduleEntry,
};
use calamine::{Reader, Xls, open_workbook_from_rs};
use chrono::{DateTime, Duration, NaiveDateTime, Utc};
use fuzzy_matcher::FuzzyMatcher;
use fuzzy_matcher::skim::SkimMatcherV2;
use regex::Regex;
use std::collections::HashMap;
use std::io::Cursor;
use std::sync::LazyLock;
pub mod schema;
/// Data cell storing the line.
struct InternalId {
/// Line index.
row: u32,
/// Column index.
column: u32,
/// Text in the cell.
name: String,
}
/// Data on the time of lessons from the second column of the schedule.
struct InternalTime {
/// Temporary segment of the lesson.
time_range: LessonTime,
/// Type of lesson.
lesson_type: LessonType,
/// The lesson index.
default_index: Option<u32>,
/// The frame of the cell.
xls_range: ((u32, u32), (u32, u32)),
}
/// Working sheet type alias.
type WorkSheet = calamine::Range<calamine::Data>;
/// Getting a line from the required cell.
fn get_string_from_cell(worksheet: &WorkSheet, row: u32, col: u32) -> Option<String> {
let cell_data = if let Some(data) = worksheet.get((row as usize, col as usize)) {
data.to_string()
} else {
return None;
};
if cell_data.trim().is_empty() {
return None;
}
static NL_RE: LazyLock<Regex> = LazyLock::new(|| Regex::new(r"[\n\r]+").unwrap());
static SP_RE: LazyLock<Regex> = LazyLock::new(|| Regex::new(r"\s+").unwrap());
let trimmed_data = SP_RE
.replace_all(&NL_RE.replace_all(&cell_data, " "), " ")
.trim()
.to_string();
if trimmed_data.is_empty() {
None
} else {
Some(trimmed_data)
}
}
/// Obtaining the boundaries of the cell along its upper left coordinate.
fn get_merge_from_start(worksheet: &WorkSheet, row: u32, column: u32) -> ((u32, u32), (u32, u32)) {
let worksheet_end = worksheet.end().unwrap();
let row_end: u32 = {
let mut r: u32 = 0;
for _r in (row + 1)..worksheet_end.0 {
r = _r;
if let Some(_) = worksheet.get((_r as usize, column as usize)) {
break;
}
}
r
};
let column_end: u32 = {
let mut c: u32 = 0;
for _c in (column + 1)..worksheet_end.1 {
c = _c;
if let Some(_) = worksheet.get((row as usize, _c as usize)) {
break;
}
}
c
};
((row, column), (row_end, column_end))
}
/// Obtaining a "skeleton" schedule from the working sheet.
fn parse_skeleton(worksheet: &WorkSheet) -> Result<(Vec<InternalId>, Vec<InternalId>), ParseError> {
let range = &worksheet;
let mut is_parsed = false;
let mut groups: Vec<InternalId> = Vec::new();
let mut days: Vec<InternalId> = Vec::new();
let start = range.start().ok_or(ParseError::UnknownWorkSheetRange)?;
let end = range.end().ok_or(ParseError::UnknownWorkSheetRange)?;
let mut row = start.0;
while row < end.0 {
row += 1;
let day_name_opt = get_string_from_cell(&worksheet, row, 0);
if day_name_opt.is_none() {
continue;
}
let day_name = day_name_opt.unwrap();
if !is_parsed {
is_parsed = true;
row -= 1;
for column in (start.1 + 2)..=end.1 {
let group_name = get_string_from_cell(&worksheet, row, column);
if group_name.is_none() {
continue;
}
groups.push(InternalId {
row,
column,
name: group_name.unwrap(),
});
}
row += 1;
}
days.push(InternalId {
row,
column: 0,
name: day_name.clone(),
});
if days.len() > 2 && day_name.starts_with("Суббота") {
break;
}
}
Ok((days, groups))
}
/// The result of obtaining a lesson from the cell.
enum LessonParseResult {
/// List of lessons long from one to two.
///
/// The number of lessons will be equal to one if the couple is the first in the day,
/// otherwise the list from the change template and the lesson itself will be returned.
Lessons(Vec<Lesson>),
/// Street on which the Polytechnic Corps is located.
Street(String),
}
trait StringInnerSlice {
/// Obtaining a line from the line on the initial and final index.
fn inner_slice(&self, from: usize, to: usize) -> Self;
}
impl StringInnerSlice for String {
fn inner_slice(&self, from: usize, to: usize) -> Self {
self.chars()
.take(from)
.chain(self.chars().skip(to))
.collect()
}
}
// noinspection GrazieInspection
/// Obtaining a non-standard type of lesson by name.
fn guess_lesson_type(name: &String) -> Option<(String, LessonType)> {
let map: HashMap<String, LessonType> = HashMap::from([
("(консультация)".to_string(), LessonType::Consultation),
(
"самостоятельная работа".to_string(),
LessonType::IndependentWork,
),
("зачет".to_string(), LessonType::Exam),
("зачет с оценкой".to_string(), LessonType::ExamWithGrade),
("экзамен".to_string(), LessonType::ExamDefault),
]);
let matcher = SkimMatcherV2::default();
let name_lower = name.to_lowercase();
type SearchResult<'a> = (&'a LessonType, i64, Vec<usize>);
let mut search_results: Vec<SearchResult> = map
.iter()
.map(|entry| -> SearchResult {
if let Some((score, indices)) = matcher.fuzzy_indices(&*name_lower, entry.0) {
return (entry.1, score, indices);
}
(entry.1, 0, Vec::new())
})
.collect();
search_results.sort_by(|a, b| b.1.cmp(&a.1));
let guessed_type = search_results.first().unwrap();
if guessed_type.1 > 80 {
Some((
name.inner_slice(guessed_type.2[0], guessed_type.2[guessed_type.2.len() - 1]),
guessed_type.0.clone(),
))
} else {
None
}
}
/// Getting a pair or street from a cell.
fn parse_lesson(
worksheet: &WorkSheet,
day: &mut Day,
day_times: &Vec<InternalTime>,
time: &InternalTime,
column: u32,
) -> Result<LessonParseResult, ParseError> {
let row = time.xls_range.0.0;
let (name, lesson_type) = {
let raw_name_opt = get_string_from_cell(&worksheet, row, column);
if raw_name_opt.is_none() {
return Ok(Lessons(Vec::new()));
}
let raw_name = raw_name_opt.unwrap();
static OTHER_STREET_RE: LazyLock<Regex> =
LazyLock::new(|| Regex::new(r"^[А-Я][а-я]+,?\s?[0-9]+$").unwrap());
if OTHER_STREET_RE.is_match(&raw_name) {
return Ok(Street(raw_name));
}
if let Some(guess) = guess_lesson_type(&raw_name) {
guess
} else {
(raw_name, time.lesson_type.clone())
}
};
let (default_range, lesson_time) = || -> Result<(Option<[u8; 2]>, LessonTime), ParseError> {
// check if multi-lesson
let cell_range = get_merge_from_start(worksheet, row, column);
let end_time_arr = day_times
.iter()
.filter(|time| time.xls_range.1.0 == cell_range.1.0)
.collect::<Vec<&InternalTime>>();
let end_time = end_time_arr
.first()
.ok_or(ParseError::LessonTimeNotFound(ErrorCellPos { row, column }))?;
let range: Option<[u8; 2]> = if time.default_index != None {
let default = time.default_index.unwrap() as u8;
Some([default, end_time.default_index.unwrap() as u8])
} else {
None
};
let time = LessonTime {
start: time.time_range.start,
end: end_time.time_range.end,
};
Ok((range, time))
}()?;
let (name, mut subgroups) = parse_name_and_subgroups(&name)?;
{
let cabinets: Vec<String> = parse_cabinets(worksheet, row, column + 1);
// Если количество кабинетов равно 1, назначаем этот кабинет всем подгруппам
if cabinets.len() == 1 {
for subgroup in &mut subgroups {
subgroup.cabinet = Some(cabinets.get(0).or(Some(&String::new())).unwrap().clone())
}
}
// Если количество кабинетов совпадает с количеством подгрупп, назначаем кабинеты по порядку
else if cabinets.len() == subgroups.len() {
for subgroup in &mut subgroups {
subgroup.cabinet = Some(
cabinets
.get((subgroup.number - 1) as usize)
.unwrap()
.clone(),
);
}
}
// Если количество кабинетов больше количества подгрупп, делаем ещё одну подгруппу.
else if cabinets.len() > subgroups.len() {
for index in 0..subgroups.len() {
subgroups[index].cabinet = Some(cabinets[index].clone());
}
while cabinets.len() > subgroups.len() {
subgroups.push(LessonSubGroup {
number: (subgroups.len() + 1) as u8,
cabinet: Some(cabinets[subgroups.len()].clone()),
teacher: "Ошибка в расписании".to_string(),
});
}
}
// Если кабинетов нет, но есть подгруппы, назначаем им значение "??"
else {
for subgroup in &mut subgroups {
subgroup.cabinet = Some("??".to_string());
}
}
cabinets
};
let lesson = Lesson {
lesson_type,
default_range,
name: Some(name),
time: lesson_time,
subgroups: Some(subgroups),
group: None,
};
let prev_lesson = if day.lessons.len() == 0 {
return Ok(Lessons(Vec::from([lesson])));
} else {
&day.lessons[day.lessons.len() - 1]
};
Ok(Lessons(Vec::from([
Lesson {
lesson_type: Break,
default_range: None,
name: None,
time: LessonTime {
start: prev_lesson.time.end,
end: lesson.time.start,
},
subgroups: Some(Vec::new()),
group: None,
},
lesson,
])))
}
/// Obtaining a list of cabinets to the right of the lesson cell.
fn parse_cabinets(worksheet: &WorkSheet, row: u32, column: u32) -> Vec<String> {
let mut cabinets: Vec<String> = Vec::new();
if let Some(raw) = get_string_from_cell(&worksheet, row, column) {
let clean = raw.replace("\n", " ");
let parts: Vec<&str> = clean.split(" ").collect();
for part in parts {
let clean_part = part.to_string().trim().to_string();
cabinets.push(clean_part);
}
}
cabinets
}
/// Getting the "pure" name of the lesson and list of teachers from the text of the lesson cell.
fn parse_name_and_subgroups(name: &String) -> Result<(String, Vec<LessonSubGroup>), ParseError> {
static LESSON_RE: LazyLock<Regex> =
LazyLock::new(|| Regex::new(r"(?:[А-Я][а-я]+[А-Я]{2}(?:\([0-9][а-я]+\))?)+$").unwrap());
static TEACHER_RE: LazyLock<Regex> =
LazyLock::new(|| Regex::new(r"([А-Я][а-я]+)([А-Я])([А-Я])(?:\(([0-9])[а-я]+\))?").unwrap());
static CLEAN_RE: LazyLock<Regex> = LazyLock::new(|| Regex::new(r"[\s.,]+").unwrap());
static END_CLEAN_RE: LazyLock<Regex> = LazyLock::new(|| Regex::new(r"[.\s]+$").unwrap());
let (teachers, lesson_name) = {
let clean_name = CLEAN_RE.replace_all(&name, "").to_string();
if let Some(captures) = LESSON_RE.captures(&clean_name) {
let capture = captures.get(0).unwrap();
let capture_str = capture.as_str().to_string();
let capture_name: String = capture_str.chars().take(5).collect();
(
END_CLEAN_RE.replace(&capture_str, "").to_string(),
END_CLEAN_RE
.replace(&name[0..name.find(&*capture_name).unwrap()], "")
.to_string(),
)
} else {
return Ok((END_CLEAN_RE.replace(&name, "").to_string(), Vec::new()));
}
};
let mut subgroups: Vec<LessonSubGroup> = Vec::new();
let teacher_it = TEACHER_RE.captures_iter(&teachers);
for captures in teacher_it {
subgroups.push(LessonSubGroup {
number: match captures.get(4) {
Some(capture) => capture.as_str().to_string().parse::<u8>().unwrap(),
None => 0,
},
cabinet: None,
teacher: format!(
"{} {}.{}.",
captures.get(1).unwrap().as_str().to_string(),
captures.get(2).unwrap().as_str().to_string(),
captures.get(3).unwrap().as_str().to_string()
),
});
}
// фикс, если у кого-то отсутствует индекс подгруппы
if subgroups.len() == 1 {
let index = subgroups[0].number;
if index == 0 {
subgroups[0].number = 1u8;
} else {
subgroups.push(LessonSubGroup {
number: if index == 1 { 2 } else { 1 },
cabinet: None,
teacher: "Только у другой".to_string(),
});
}
} else if subgroups.len() == 2 {
// если индексы отсутствуют у обоих, ставим поочерёдно
if subgroups[0].number == 0 && subgroups[1].number == 0 {
subgroups[0].number = 1;
subgroups[1].number = 2;
}
// если индекс отсутствует у первого, ставим 2, если у второго индекс 1 и наоборот
else if subgroups[0].number == 0 {
subgroups[0].number = if subgroups[1].number == 1 { 2 } else { 1 };
}
// если индекс отсутствует у второго, ставим 2, если у первого индекс 1 и наоборот
else if subgroups[1].number == 0 {
subgroups[1].number = if subgroups[0].number == 1 { 2 } else { 1 };
}
}
if subgroups.len() == 2 && subgroups[0].number == 2 && subgroups[1].number == 1 {
subgroups.reverse()
}
Ok((lesson_name, subgroups))
}
/// Conversion of the list of couples of groups in the list of lessons of teachers.
fn convert_groups_to_teachers(
groups: &HashMap<String, ScheduleEntry>,
) -> HashMap<String, ScheduleEntry> {
let mut teachers: HashMap<String, ScheduleEntry> = HashMap::new();
let empty_days: Vec<Day> = groups
.values()
.next()
.unwrap()
.days
.iter()
.map(|day| Day {
name: day.name.clone(),
street: day.street.clone(),
date: day.date.clone(),
lessons: vec![],
})
.collect();
for group in groups.values() {
for (index, day) in group.days.iter().enumerate() {
for group_lesson in &day.lessons {
if group_lesson.lesson_type == Break {
continue;
}
if group_lesson.subgroups.is_none() {
continue;
}
let subgroups = group_lesson.subgroups.as_ref().unwrap();
for subgroup in subgroups {
if subgroup.teacher == "Ошибка в расписании" {
continue;
}
if !teachers.contains_key(&subgroup.teacher) {
teachers.insert(
subgroup.teacher.clone(),
ScheduleEntry {
name: subgroup.teacher.clone(),
days: empty_days.to_vec(),
},
);
}
let teacher_day = teachers
.get_mut(&subgroup.teacher)
.unwrap()
.days
.get_mut(index)
.unwrap();
teacher_day.lessons.push({
let mut lesson = group_lesson.clone();
lesson.group = Some(group.name.clone());
lesson
});
}
}
}
}
teachers.iter_mut().for_each(|(_, teacher)| {
teacher.days.iter_mut().for_each(|day| {
day.lessons.sort_by(|a, b| {
a.default_range.as_ref().unwrap()[1].cmp(&b.default_range.as_ref().unwrap()[1])
})
})
});
teachers
}
/// Reading XLS Document from the buffer and converting it into the schedule ready to use.
///
/// # Arguments
///
/// * `buffer`: XLS data containing schedule.
///
/// returns: Result<ParseResult, ParseError>
///
/// # Examples
///
/// ```
/// use schedule_parser_rusted::parser::parse_xls;
///
/// let result = parse_xls(&include_bytes!("../../schedule.xls").to_vec());
///
/// assert!(result.is_ok());
///
/// assert_ne!(result.as_ref().unwrap().groups.len(), 0);
/// assert_ne!(result.as_ref().unwrap().teachers.len(), 0);
/// ```
pub fn parse_xls(buffer: &Vec<u8>) -> Result<ParseResult, ParseError> {
let cursor = Cursor::new(&buffer);
let mut workbook: Xls<_> =
open_workbook_from_rs(cursor).map_err(|e| ParseError::BadXLS(std::sync::Arc::new(e)))?;
let worksheet: WorkSheet = workbook
.worksheets()
.first()
.ok_or(ParseError::NoWorkSheets)?
.1
.to_owned();
let (days_markup, groups_markup) = parse_skeleton(&worksheet)?;
let mut groups: HashMap<String, ScheduleEntry> = HashMap::new();
let mut days_times: Vec<Vec<InternalTime>> = Vec::new();
let saturday_end_row = worksheet.end().unwrap().0;
for group_markup in groups_markup {
let mut group = ScheduleEntry {
name: group_markup.name,
days: Vec::new(),
};
for day_index in 0..(&days_markup).len() {
let day_markup = &days_markup[day_index];
let mut day = {
let space_index = day_markup.name.find(' ').unwrap();
let name = day_markup.name[..space_index].to_string();
let date_raw = day_markup.name[space_index + 1..].to_string();
let date_add = format!("{} 00:00:00", date_raw);
let date = NaiveDateTime::parse_from_str(&*date_add, "%d.%m.%Y %H:%M:%S");
Day {
name,
street: None,
date: date.unwrap().and_utc(),
lessons: Vec::new(),
}
};
let lesson_time_column = days_markup[0].column + 1;
let row_distance = if day_index != days_markup.len() - 1 {
days_markup[day_index + 1].row
} else {
saturday_end_row
} - day_markup.row;
if days_times.len() != 6 {
let mut day_times: Vec<InternalTime> = Vec::new();
for row in day_markup.row..(day_markup.row + row_distance) {
// time
let time_opt = get_string_from_cell(&worksheet, row, lesson_time_column);
if time_opt.is_none() {
continue;
}
let time = time_opt.unwrap();
// type
let lesson_type = if time.contains("пара") {
LessonType::Default
} else {
LessonType::Additional
};
// lesson index
let default_index = if lesson_type == LessonType::Default {
Some(
time.chars()
.next()
.unwrap()
.to_string()
.parse::<u32>()
.unwrap(),
)
} else {
None
};
// time
let time_range = {
static TIME_RE: LazyLock<Regex> =
LazyLock::new(|| Regex::new(r"(\d+\.\d+)-(\d+\.\d+)").unwrap());
let parse_res = TIME_RE.captures(&time).ok_or(ParseError::GlobalTime(
ErrorCell::new(row, lesson_time_column, time.clone()),
))?;
let start_match = parse_res.get(1).unwrap().as_str();
let start_parts: Vec<&str> = start_match.split(".").collect();
let end_match = parse_res.get(2).unwrap().as_str();
let end_parts: Vec<&str> = end_match.split(".").collect();
static GET_TIME: fn(DateTime<Utc>, &Vec<&str>) -> DateTime<Utc> =
|date, parts| {
date + Duration::hours(parts[0].parse::<i64>().unwrap() - 4)
+ Duration::minutes(parts[1].parse::<i64>().unwrap())
};
LessonTime {
start: GET_TIME(day.date.clone(), &start_parts),
end: GET_TIME(day.date.clone(), &end_parts),
}
};
day_times.push(InternalTime {
time_range,
lesson_type,
default_index,
xls_range: get_merge_from_start(&worksheet, row, lesson_time_column),
});
}
days_times.push(day_times);
}
let day_times = &days_times[day_index];
for time in day_times {
match &mut parse_lesson(
&worksheet,
&mut day,
&day_times,
&time,
group_markup.column,
)? {
Lessons(l) => day.lessons.append(l),
Street(s) => day.street = Some(s.to_owned()),
}
}
group.days.push(day);
}
groups.insert(group.name.clone(), group);
}
Ok(ParseResult {
teachers: convert_groups_to_teachers(&groups),
groups,
})
}
#[cfg(test)]
pub mod tests {
use super::*;
pub fn test_result() -> Result<ParseResult, ParseError> {
parse_xls(&include_bytes!("../../schedule.xls").to_vec())
}
#[test]
fn read() {
let result = test_result();
assert!(result.is_ok());
assert_ne!(result.as_ref().unwrap().groups.len(), 0);
assert_ne!(result.as_ref().unwrap().teachers.len(), 0);
}
}

View File

@@ -1,6 +1,6 @@
mod shared;
mod sign_in; mod sign_in;
mod sign_up; mod sign_up;
mod shared;
pub use sign_in::*; pub use sign_in::*;
pub use sign_up::*; pub use sign_up::*;

View File

@@ -1,5 +1,5 @@
use jsonwebtoken::errors::ErrorKind; use jsonwebtoken::errors::ErrorKind;
use jsonwebtoken::{decode, Algorithm, DecodingKey, Validation}; use jsonwebtoken::{Algorithm, DecodingKey, Validation, decode};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
#[derive(Deserialize, Serialize)] #[derive(Deserialize, Serialize)]

View File

@@ -1,14 +1,12 @@
use self::schema::*; use self::schema::*;
use crate::database::driver; use crate::database::driver;
use crate::database::models::User; use crate::database::driver::users::UserSave;
use crate::routes::auth::shared::parse_vk_id; use crate::routes::auth::shared::parse_vk_id;
use crate::routes::auth::sign_in::schema::SignInData::{Default, Vk}; use crate::routes::auth::sign_in::schema::SignInData::{Default, VkOAuth};
use crate::routes::schema::ResponseError;
use crate::routes::schema::user::UserResponse; use crate::routes::schema::user::UserResponse;
use crate::routes::schema::{IntoResponseAsError, ResponseError};
use crate::utility::mutex::MutexScope;
use crate::{AppState, utility}; use crate::{AppState, utility};
use actix_web::{post, web}; use actix_web::{post, web};
use diesel::SaveChangesDsl;
use web::Json; use web::Json;
async fn sign_in_combined( async fn sign_in_combined(
@@ -16,14 +14,18 @@ async fn sign_in_combined(
app_state: &web::Data<AppState>, app_state: &web::Data<AppState>,
) -> Result<UserResponse, ErrorCode> { ) -> Result<UserResponse, ErrorCode> {
let user = match &data { let user = match &data {
Default(data) => driver::users::get_by_username(&app_state, &data.username), Default(data) => driver::users::get_by_username(&app_state, &data.username).await,
Vk(id) => driver::users::get_by_vk_id(&app_state, *id), VkOAuth(id) => driver::users::get_by_vk_id(&app_state, *id).await,
}; };
match user { match user {
Ok(mut user) => { Ok(mut user) => {
if let Default(data) = data { if let Default(data) = data {
match bcrypt::verify(&data.password, &user.password) { if user.password.is_none() {
return Err(ErrorCode::IncorrectCredentials);
}
match bcrypt::verify(&data.password, &user.password.as_ref().unwrap()) {
Ok(result) => { Ok(result) => {
if !result { if !result {
return Err(ErrorCode::IncorrectCredentials); return Err(ErrorCode::IncorrectCredentials);
@@ -35,12 +37,9 @@ async fn sign_in_combined(
} }
} }
user.access_token = utility::jwt::encode(&user.id); user.access_token = Some(utility::jwt::encode(&user.id));
app_state.database.scope(|conn| { user.save(&app_state).await.expect("Failed to update user");
user.save_changes::<User>(conn)
.expect("Failed to update user")
});
Ok(user.into()) Ok(user.into())
} }
@@ -71,15 +70,17 @@ pub async fn sign_in_vk(
) -> ServiceResponse { ) -> ServiceResponse {
let data = data_json.into_inner(); let data = data_json.into_inner();
match parse_vk_id(&data.access_token, app_state.vk_id.client_id) { match parse_vk_id(&data.access_token, app_state.get_env().vk_id.client_id) {
Ok(id) => sign_in_combined(Vk(id), &app_state).await.into(), Ok(id) => sign_in_combined(VkOAuth(id), &app_state).await,
Err(_) => ErrorCode::InvalidVkAccessToken.into_response(), Err(_) => Err(ErrorCode::InvalidVkAccessToken),
} }
.into()
} }
mod schema { mod schema {
use crate::routes::schema::user::UserResponse; use crate::routes::schema::user::UserResponse;
use actix_macros::{IntoResponseError, StatusCode}; use actix_macros::ErrResponse;
use derive_more::Display;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use utoipa::ToSchema; use utoipa::ToSchema;
@@ -109,15 +110,17 @@ mod schema {
pub type ServiceResponse = crate::routes::schema::Response<UserResponse, ErrorCode>; pub type ServiceResponse = crate::routes::schema::Response<UserResponse, ErrorCode>;
#[derive(Serialize, ToSchema, Clone, IntoResponseError, StatusCode)] #[derive(Clone, Serialize, Display, ToSchema, ErrResponse)]
#[serde(rename_all = "SCREAMING_SNAKE_CASE")]
#[schema(as = SignIn::ErrorCode)] #[schema(as = SignIn::ErrorCode)]
#[serde(rename_all = "SCREAMING_SNAKE_CASE")]
#[status_code = "actix_web::http::StatusCode::NOT_ACCEPTABLE"] #[status_code = "actix_web::http::StatusCode::NOT_ACCEPTABLE"]
pub enum ErrorCode { pub enum ErrorCode {
/// Incorrect username or password. /// Incorrect username or password.
#[display("Incorrect username or password.")]
IncorrectCredentials, IncorrectCredentials,
/// Invalid VK ID token. /// Invalid VK ID token.
#[display("Invalid VK ID token.")]
InvalidVkAccessToken, InvalidVkAccessToken,
} }
@@ -129,7 +132,7 @@ mod schema {
Default(Request), Default(Request),
/// Identifier of the attached account VK. /// Identifier of the attached account VK.
Vk(i32), VkOAuth(i32),
} }
} }
@@ -184,14 +187,16 @@ mod tests {
&User { &User {
id: id.clone(), id: id.clone(),
username, username,
password: bcrypt::hash("example".to_string(), bcrypt::DEFAULT_COST).unwrap(), password: Some(bcrypt::hash("example".to_string(), bcrypt::DEFAULT_COST).unwrap()),
vk_id: None, vk_id: None,
access_token: utility::jwt::encode(&id), telegram_id: None,
group: "ИС-214/23".to_string(), access_token: Some(utility::jwt::encode(&id)),
group: Some("ИС-214/23".to_string()),
role: UserRole::Student, role: UserRole::Student,
version: "1.0.0".to_string(), android_version: None,
}, },
) )
.await
.unwrap(); .unwrap();
} }

View File

@@ -2,11 +2,10 @@ use self::schema::*;
use crate::AppState; use crate::AppState;
use crate::database::driver; use crate::database::driver;
use crate::database::models::UserRole; use crate::database::models::UserRole;
use crate::routes::auth::shared::{Error, parse_vk_id}; use crate::routes::auth::shared::parse_vk_id;
use crate::routes::schema::ResponseError;
use crate::routes::schema::user::UserResponse; use crate::routes::schema::user::UserResponse;
use crate::routes::schema::{IntoResponseAsError, ResponseError};
use actix_web::{post, web}; use actix_web::{post, web};
use rand::{Rng, rng};
use web::Json; use web::Json;
async fn sign_up_combined( async fn sign_up_combined(
@@ -18,29 +17,30 @@ async fn sign_up_combined(
return Err(ErrorCode::DisallowedRole); return Err(ErrorCode::DisallowedRole);
} }
// If specified group doesn't exist in schedule. if !app_state
let schedule_opt = app_state.schedule.lock().unwrap(); .get_schedule_snapshot()
.await
if let Some(schedule) = &*schedule_opt { .data
if !schedule.data.groups.contains_key(&data.group) { .groups
.contains_key(&data.group)
{
return Err(ErrorCode::InvalidGroupName); return Err(ErrorCode::InvalidGroupName);
} }
}
// If user with specified username already exists. // If user with specified username already exists.
if driver::users::contains_by_username(&app_state, &data.username) { if driver::users::contains_by_username(&app_state, &data.username).await {
return Err(ErrorCode::UsernameAlreadyExists); return Err(ErrorCode::UsernameAlreadyExists);
} }
// If user with specified VKID already exists. // If user with specified VKID already exists.
if let Some(id) = data.vk_id { if let Some(id) = data.vk_id {
if driver::users::contains_by_vk_id(&app_state, id) { if driver::users::contains_by_vk_id(&app_state, id).await {
return Err(ErrorCode::VkAlreadyExists); return Err(ErrorCode::VkAlreadyExists);
} }
} }
let user = data.into(); let user = data.into();
driver::users::insert(&app_state, &user).unwrap(); driver::users::insert(&app_state, &user).await.unwrap();
Ok(UserResponse::from(&user)).into() Ok(UserResponse::from(&user)).into()
} }
@@ -56,7 +56,7 @@ pub async fn sign_up(data_json: Json<Request>, app_state: web::Data<AppState>) -
sign_up_combined( sign_up_combined(
SignUpData { SignUpData {
username: data.username, username: data.username,
password: data.password, password: Some(data.password),
vk_id: None, vk_id: None,
group: data.group, group: data.group,
role: data.role, role: data.role,
@@ -79,15 +79,12 @@ pub async fn sign_up_vk(
) -> ServiceResponse { ) -> ServiceResponse {
let data = data_json.into_inner(); let data = data_json.into_inner();
match parse_vk_id(&data.access_token, app_state.vk_id.client_id) { match parse_vk_id(&data.access_token, app_state.get_env().vk_id.client_id) {
Ok(id) => sign_up_combined( Ok(id) => {
sign_up_combined(
SignUpData { SignUpData {
username: data.username, username: data.username,
password: rng() password: None,
.sample_iter(&rand::distr::Alphanumeric)
.take(16)
.map(char::from)
.collect(),
vk_id: Some(id), vk_id: Some(id),
group: data.group, group: data.group,
role: data.role, role: data.role,
@@ -96,23 +93,18 @@ pub async fn sign_up_vk(
&app_state, &app_state,
) )
.await .await
.into(),
Err(err) => {
if err != Error::Expired {
eprintln!("Failed to parse vk id token!");
eprintln!("{:?}", err);
}
ErrorCode::InvalidVkAccessToken.into_response()
} }
Err(_) => Err(ErrorCode::InvalidVkAccessToken),
} }
.into()
} }
mod schema { mod schema {
use crate::database::models::{User, UserRole}; use crate::database::models::{User, UserRole};
use crate::routes::schema::user::UserResponse; use crate::routes::schema::user::UserResponse;
use crate::utility; use crate::utility;
use actix_macros::{IntoResponseError, StatusCode}; use actix_macros::ErrResponse;
use derive_more::Display;
use objectid::ObjectId; use objectid::ObjectId;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
@@ -170,24 +162,29 @@ mod schema {
pub type ServiceResponse = crate::routes::schema::Response<UserResponse, ErrorCode>; pub type ServiceResponse = crate::routes::schema::Response<UserResponse, ErrorCode>;
#[derive(Clone, Serialize, utoipa::ToSchema, IntoResponseError, StatusCode)] #[derive(Clone, Serialize, Display, utoipa::ToSchema, ErrResponse)]
#[serde(rename_all = "SCREAMING_SNAKE_CASE")] #[serde(rename_all = "SCREAMING_SNAKE_CASE")]
#[schema(as = SignUp::ErrorCode)] #[schema(as = SignUp::ErrorCode)]
#[status_code = "actix_web::http::StatusCode::NOT_ACCEPTABLE"] #[status_code = "actix_web::http::StatusCode::NOT_ACCEPTABLE"]
pub enum ErrorCode { pub enum ErrorCode {
/// Conveyed the role of Admin. /// Conveyed the role of Admin.
#[display("Conveyed the role of Admin.")]
DisallowedRole, DisallowedRole,
/// Unknown name of the group. /// Unknown name of the group.
#[display("Unknown name of the group.")]
InvalidGroupName, InvalidGroupName,
/// User with this name is already registered. /// User with this name is already registered.
#[display("User with this name is already registered.")]
UsernameAlreadyExists, UsernameAlreadyExists,
/// Invalid VK ID token. /// Invalid VK ID token.
#[display("Invalid VK ID token.")]
InvalidVkAccessToken, InvalidVkAccessToken,
/// User with such an account VK is already registered. /// User with such an account VK is already registered.
#[display("User with such an account VK is already registered.")]
VkAlreadyExists, VkAlreadyExists,
} }
@@ -195,13 +192,14 @@ mod schema {
/// Data for registration. /// Data for registration.
pub struct SignUpData { pub struct SignUpData {
// TODO: сделать ограничение на минимальную и максимальную длину при регистрации и смене.
/// User name. /// User name.
pub username: String, pub username: String,
/// Password. /// Password.
/// ///
/// Should be present even if registration occurs using the VK ID token. /// Should be present even if registration occurs using the VK ID token.
pub password: String, pub password: Option<String>,
/// Account identifier VK. /// Account identifier VK.
pub vk_id: Option<i32>, pub vk_id: Option<i32>,
@@ -218,18 +216,23 @@ mod schema {
impl Into<User> for SignUpData { impl Into<User> for SignUpData {
fn into(self) -> User { fn into(self) -> User {
assert_ne!(self.password.is_some(), self.vk_id.is_some());
let id = ObjectId::new().unwrap().to_string(); let id = ObjectId::new().unwrap().to_string();
let access_token = utility::jwt::encode(&id); let access_token = Some(utility::jwt::encode(&id));
User { User {
id, id,
username: self.username, username: self.username,
password: bcrypt::hash(self.password, bcrypt::DEFAULT_COST).unwrap(), password: self
.password
.map(|x| bcrypt::hash(x, bcrypt::DEFAULT_COST).unwrap()),
vk_id: self.vk_id, vk_id: self.vk_id,
telegram_id: None,
access_token, access_token,
group: self.group, group: Some(self.group),
role: self.role, role: self.role,
version: self.version, android_version: Some(self.version),
} }
} }
} }
@@ -248,21 +251,21 @@ mod tests {
use actix_web::http::StatusCode; use actix_web::http::StatusCode;
use actix_web::test; use actix_web::test;
struct SignUpPartial { struct SignUpPartial<'a> {
username: String, username: &'a str,
group: String, group: &'a str,
role: UserRole, role: UserRole,
} }
async fn sign_up_client(data: SignUpPartial) -> ServiceResponse { async fn sign_up_client(data: SignUpPartial<'_>) -> ServiceResponse {
let app = test_app(test_app_state().await, sign_up).await; let app = test_app(test_app_state().await, sign_up).await;
let req = test::TestRequest::with_uri("/sign-up") let req = test::TestRequest::with_uri("/sign-up")
.method(Method::POST) .method(Method::POST)
.set_json(Request { .set_json(Request {
username: data.username.clone(), username: data.username.to_string(),
password: "example".to_string(), password: "example".to_string(),
group: data.group.clone(), group: data.group.to_string(),
role: data.role.clone(), role: data.role.clone(),
version: "1.0.0".to_string(), version: "1.0.0".to_string(),
}) })
@@ -278,13 +281,13 @@ mod tests {
test_env(); test_env();
let app_state = static_app_state().await; let app_state = static_app_state().await;
driver::users::delete_by_username(&app_state, &"test::sign_up_valid".to_string()); driver::users::delete_by_username(&app_state, &"test::sign_up_valid".to_string()).await;
// test // test
let resp = sign_up_client(SignUpPartial { let resp = sign_up_client(SignUpPartial {
username: "test::sign_up_valid".to_string(), username: "test::sign_up_valid",
group: "ИС-214/23".to_string(), group: "ИС-214/23",
role: UserRole::Student, role: UserRole::Student,
}) })
.await; .await;
@@ -299,11 +302,11 @@ mod tests {
test_env(); test_env();
let app_state = static_app_state().await; let app_state = static_app_state().await;
driver::users::delete_by_username(&app_state, &"test::sign_up_multiple".to_string()); driver::users::delete_by_username(&app_state, &"test::sign_up_multiple".to_string()).await;
let create = sign_up_client(SignUpPartial { let create = sign_up_client(SignUpPartial {
username: "test::sign_up_multiple".to_string(), username: "test::sign_up_multiple",
group: "ИС-214/23".to_string(), group: "ИС-214/23",
role: UserRole::Student, role: UserRole::Student,
}) })
.await; .await;
@@ -311,8 +314,8 @@ mod tests {
assert_eq!(create.status(), StatusCode::OK); assert_eq!(create.status(), StatusCode::OK);
let resp = sign_up_client(SignUpPartial { let resp = sign_up_client(SignUpPartial {
username: "test::sign_up_multiple".to_string(), username: "test::sign_up_multiple",
group: "ИС-214/23".to_string(), group: "ИС-214/23",
role: UserRole::Student, role: UserRole::Student,
}) })
.await; .await;
@@ -326,8 +329,8 @@ mod tests {
// test // test
let resp = sign_up_client(SignUpPartial { let resp = sign_up_client(SignUpPartial {
username: "test::sign_up_invalid_role".to_string(), username: "test::sign_up_invalid_role",
group: "ИС-214/23".to_string(), group: "ИС-214/23",
role: UserRole::Admin, role: UserRole::Admin,
}) })
.await; .await;
@@ -341,8 +344,8 @@ mod tests {
// test // test
let resp = sign_up_client(SignUpPartial { let resp = sign_up_client(SignUpPartial {
username: "test::sign_up_invalid_group".to_string(), username: "test::sign_up_invalid_group",
group: "invalid_group".to_string(), group: "invalid_group",
role: UserRole::Student, role: UserRole::Student,
}) })
.await; .await;

View File

@@ -1,5 +1,5 @@
mod update_callback;
mod set_token; mod set_token;
mod update_callback;
pub use update_callback::*;
pub use set_token::*; pub use set_token::*;
pub use update_callback::*;

View File

@@ -1,14 +1,13 @@
use crate::app_state::AppState;
use crate::database; use crate::database;
use crate::database::models::FCM; use crate::database::models::FCM;
use crate::extractors::authorized_user::UserExtractor; use crate::extractors::authorized_user::UserExtractor;
use crate::extractors::base::SyncExtractor; use crate::extractors::base::AsyncExtractor;
use crate::utility::mutex::{MutexScope, MutexScopeAsync}; use crate::state::AppState;
use actix_web::{HttpResponse, Responder, patch, web}; use actix_web::{HttpResponse, Responder, patch, web};
use diesel::{RunQueryDsl, SaveChangesDsl}; use diesel::{RunQueryDsl, SaveChangesDsl};
use firebase_messaging_rs::FCMClient; use firebase_messaging_rs::topic::TopicManagementSupport;
use firebase_messaging_rs::topic::{TopicManagementError, TopicManagementSupport};
use serde::Deserialize; use serde::Deserialize;
use std::ops::DerefMut;
#[derive(Debug, Deserialize)] #[derive(Debug, Deserialize)]
struct Params { struct Params {
@@ -34,11 +33,10 @@ async fn get_fcm(
topics: vec![], topics: vec![],
}; };
match app_state.database.scope(|conn| { match diesel::insert_into(database::schema::fcm::table)
diesel::insert_into(database::schema::fcm::table)
.values(&fcm) .values(&fcm)
.execute(conn) .execute(app_state.get_database().await.deref_mut())
}) { {
Ok(_) => Ok(fcm), Ok(_) => Ok(fcm),
Err(e) => Err(e), Err(e) => Err(e),
} }
@@ -51,7 +49,7 @@ async fn get_fcm(
pub async fn set_token( pub async fn set_token(
app_state: web::Data<AppState>, app_state: web::Data<AppState>,
web::Query(params): web::Query<Params>, web::Query(params): web::Query<Params>,
user_data: SyncExtractor<UserExtractor<true>>, user_data: AsyncExtractor<UserExtractor<true>>,
) -> impl Responder { ) -> impl Responder {
let user_data = user_data.into_inner(); let user_data = user_data.into_inner();
@@ -75,39 +73,21 @@ pub async fn set_token(
fcm.topics.push(Some("common".to_string())); fcm.topics.push(Some("common".to_string()));
} }
// Subscribe to default topics. fcm.save_changes::<FCM>(app_state.get_database().await.deref_mut())
if let Some(e) = app_state .unwrap();
.fcm_client
.as_ref() let fcm_client = app_state.get_fcm_client().await.unwrap();
.unwrap()
.async_scope(
async |client: &mut FCMClient| -> Result<(), TopicManagementError> {
let mut tokens: Vec<String> = Vec::new();
tokens.push(fcm.token.clone());
for topic in fcm.topics.clone() { for topic in fcm.topics.clone() {
if let Some(topic) = topic { if let Some(topic) = topic {
client.register_tokens_to_topic(topic.clone(), tokens.clone()).await?; if let Err(error) = fcm_client
} .register_token_to_topic(&*topic, &*fcm.token)
}
Ok(())
},
)
.await .await
.err()
{ {
eprintln!("Failed to subscribe token to topic: {:?}", e); eprintln!("Failed to subscribe token to topic: {:?}", error);
return HttpResponse::Ok(); return HttpResponse::Ok();
} }
}
// Write updates to db.
if let Some(e) = app_state
.database
.scope(|conn| fcm.save_changes::<FCM>(conn))
.err()
{
eprintln!("Failed to update FCM object: {e}");
} }
HttpResponse::Ok() HttpResponse::Ok()

View File

@@ -1,9 +1,8 @@
use crate::app_state::AppState; use crate::database::driver::users::UserSave;
use crate::database::models::User; use crate::database::models::User;
use crate::extractors::base::SyncExtractor; use crate::extractors::base::AsyncExtractor;
use crate::utility::mutex::MutexScope; use crate::state::AppState;
use actix_web::{HttpResponse, Responder, post, web}; use actix_web::{HttpResponse, Responder, post, web};
use diesel::SaveChangesDsl;
#[utoipa::path(responses( #[utoipa::path(responses(
(status = OK), (status = OK),
@@ -13,20 +12,13 @@ use diesel::SaveChangesDsl;
async fn update_callback( async fn update_callback(
app_state: web::Data<AppState>, app_state: web::Data<AppState>,
version: web::Path<String>, version: web::Path<String>,
user: SyncExtractor<User>, user: AsyncExtractor<User>,
) -> impl Responder { ) -> impl Responder {
let mut user = user.into_inner(); let mut user = user.into_inner();
user.version = version.into_inner(); user.android_version = Some(version.into_inner());
match app_state user.save(&app_state).await.unwrap();
.database
.scope(|conn| user.save_changes::<User>(conn)) HttpResponse::Ok()
{
Ok(_) => HttpResponse::Ok(),
Err(e) => {
eprintln!("Failed to update user: {}", e);
HttpResponse::InternalServerError()
}
}
} }

5
src/routes/flow/mod.rs Normal file
View File

@@ -0,0 +1,5 @@
mod telegram_auth;
mod telegram_complete;
pub use telegram_auth::*;
pub use telegram_complete::*;

View File

@@ -0,0 +1,183 @@
use self::schema::*;
use crate::database::driver;
use crate::database::driver::users::UserSave;
use crate::database::models::{User, UserRole};
use crate::routes::schema::ResponseError;
use crate::utility::telegram::{WebAppInitDataMap, WebAppUser};
use crate::{AppState, utility};
use actix_web::{post, web};
use chrono::{DateTime, Duration, Utc};
use objectid::ObjectId;
use std::sync::Arc;
use web::Json;
#[utoipa::path(responses(
(status = OK, body = Response),
(status = UNAUTHORIZED, body = ResponseError<ErrorCode>),
))]
#[post("/telegram-auth")]
pub async fn telegram_auth(
data_json: Json<Request>,
app_state: web::Data<AppState>,
) -> ServiceResponse {
let init_data = WebAppInitDataMap::from_str(data_json.into_inner().init_data);
// for (key, value) in &init_data.data_map {
// println!("key: {} | value: {}", key, value);
// }
{
let env = &app_state.get_env().telegram;
if let Err(error) = init_data.verify(env.bot_id, env.test_dc) {
return Err(ErrorCode::InvalidInitData(Arc::new(error))).into();
}
}
let auth_date = DateTime::<Utc>::from_timestamp(
init_data
.data_map
.get("auth_date")
.unwrap()
.parse()
.unwrap(),
0,
)
.unwrap();
if Utc::now() - auth_date > Duration::minutes(5) {
return Err(ErrorCode::ExpiredInitData).into();
}
let web_app_user =
serde_json::from_str::<WebAppUser>(init_data.data_map.get("user").unwrap()).unwrap();
let mut user = {
match driver::users::get_by_telegram_id(&app_state, web_app_user.id).await {
Ok(value) => Ok(value),
Err(_) => {
let new_user = User {
id: ObjectId::new().unwrap().to_string(),
username: format!("telegram_{}", web_app_user.id), // можно оставить, а можно поменять
password: None, // ибо нехуй
vk_id: None,
telegram_id: Some(web_app_user.id),
access_token: None, // установится ниже
group: None,
role: UserRole::Student, // TODO: при реге проверять данные
android_version: None,
};
driver::users::insert(&app_state, &new_user)
.await
.map(|_| new_user)
}
}
.expect("Failed to get or add user")
};
user.access_token = Some(utility::jwt::encode(&user.id));
user.save(&app_state).await.expect("Failed to update user");
Ok(Response::new(
&*user.access_token.unwrap(),
user.group.is_some(),
))
.into()
}
mod schema {
use crate::routes::schema::PartialOkResponse;
use crate::state::AppState;
use crate::utility::telegram::VerifyError;
use actix_macros::ErrResponse;
use actix_web::body::EitherBody;
use actix_web::cookie::CookieBuilder;
use actix_web::cookie::time::OffsetDateTime;
use actix_web::{HttpRequest, HttpResponse, web};
use derive_more::Display;
use serde::{Deserialize, Serialize, Serializer};
use std::ops::Add;
use std::sync::Arc;
use utoipa::ToSchema;
#[derive(Debug, Deserialize, Serialize, ToSchema)]
#[serde(rename_all = "camelCase")]
#[schema(as = Flow::TelegramAuth::Request)]
pub struct Request {
/// Telegram WebApp init data.
pub init_data: String,
}
#[derive(Serialize, ToSchema)]
#[serde(rename_all = "camelCase")]
#[schema(as = Flow::TelegramAuth::Response)]
pub struct Response {
#[serde(skip)]
#[schema(ignore)]
access_token: String,
pub completed: bool,
}
impl Response {
pub fn new(access_token: &str, completed: bool) -> Self {
Self {
access_token: access_token.to_string(),
completed,
}
}
}
impl PartialOkResponse for Response {
fn post_process(
&mut self,
request: &HttpRequest,
response: &mut HttpResponse<EitherBody<String>>,
) -> () {
let access_token = &self.access_token;
let app_state = request.app_data::<web::Data<AppState>>().unwrap();
let mini_app_host = &*app_state.get_env().telegram.mini_app_host;
let cookie = CookieBuilder::new("access_token", access_token)
.domain(mini_app_host)
.path("/")
.expires(
OffsetDateTime::now_utc().add(std::time::Duration::from_secs(60 * 60 * 24 * 7)),
)
.http_only(true)
.secure(true)
.finish();
response.add_cookie(&cookie).unwrap();
}
}
pub type ServiceResponse = crate::routes::schema::Response<Response, ErrorCode>;
#[derive(Clone, ToSchema, Display, ErrResponse)]
#[status_code = "actix_web::http::StatusCode::UNAUTHORIZED"]
#[schema(as = Flow::TelegramAuth::ErrorCode)]
pub enum ErrorCode {
#[display("Invalid init data provided: {_0}")]
#[schema(value_type = String)]
InvalidInitData(Arc<VerifyError>),
#[display("Expired init data provided.")]
ExpiredInitData,
}
impl Serialize for ErrorCode {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
match self {
ErrorCode::InvalidInitData(_) => serializer.serialize_str("INVALID_INIT_DATA"),
ErrorCode::ExpiredInitData => serializer.serialize_str("EXPIRED_INIT_DATA"),
}
}
}
}

View File

@@ -0,0 +1,94 @@
use self::schema::*;
use crate::AppState;
use crate::database::driver;
use crate::database::driver::users::UserSave;
use crate::database::models::User;
use crate::extractors::base::AsyncExtractor;
use crate::routes::schema::ResponseError;
use actix_web::{post, web};
use web::Json;
#[utoipa::path(responses(
(status = OK),
(status = CONFLICT, body = ResponseError<ErrorCode>),
(status = INTERNAL_SERVER_ERROR, body = ResponseError<ErrorCode>),
(status = BAD_REQUEST, body = ResponseError<ErrorCode>)
))]
#[post("/telegram-complete")]
pub async fn telegram_complete(
data: Json<Request>,
app_state: web::Data<AppState>,
user: AsyncExtractor<User>,
) -> ServiceResponse {
let mut user = user.into_inner();
// проверка на перезапись уже имеющихся данных
if user.group.is_some() {
return Err(ErrorCode::AlreadyCompleted).into();
}
let data = data.into_inner();
// замена существующего имени, если оно отличается
if user.username != data.username {
if driver::users::contains_by_username(&app_state, &data.username).await {
return Err(ErrorCode::UsernameAlreadyExists).into();
}
user.username = data.username;
}
// проверка на существование группы
if !app_state
.get_schedule_snapshot()
.await
.data
.groups
.contains_key(&data.group)
{
return Err(ErrorCode::InvalidGroupName).into();
}
user.group = Some(data.group);
user.save(&app_state).await.expect("Failed to update user");
Ok(()).into()
}
mod schema {
use actix_macros::ErrResponse;
use derive_more::Display;
use serde::{Deserialize, Serialize};
use utoipa::ToSchema;
#[derive(Debug, Deserialize, Serialize, ToSchema)]
#[schema(as = Flow::TelegramFill::Request)]
pub struct Request {
/// Username.
pub username: String,
/// Group.
pub group: String,
}
pub type ServiceResponse = crate::routes::schema::Response<(), ErrorCode>;
#[derive(Clone, Serialize, Display, ToSchema, ErrResponse)]
#[status_code = "actix_web::http::StatusCode::UNAUTHORIZED"]
#[serde(rename_all = "SCREAMING_SNAKE_CASE")]
#[schema(as = Flow::TelegramFill::ErrorCode)]
pub enum ErrorCode {
#[display("This flow already completed.")]
#[status_code = "actix_web::http::StatusCode::CONFLICT"]
AlreadyCompleted,
#[display("Username is already exists.")]
#[status_code = "actix_web::http::StatusCode::BAD_REQUEST"]
UsernameAlreadyExists,
#[display("The required group does not exist.")]
#[status_code = "actix_web::http::StatusCode::BAD_REQUEST"]
InvalidGroupName,
}
}

View File

@@ -1,5 +1,6 @@
pub mod auth; pub mod auth;
pub mod fcm; pub mod fcm;
pub mod flow;
pub mod schedule; pub mod schedule;
mod schema; mod schema;
pub mod users; pub mod users;

View File

@@ -7,17 +7,5 @@ use actix_web::{get, web};
))] ))]
#[get("/cache-status")] #[get("/cache-status")]
pub async fn cache_status(app_state: web::Data<AppState>) -> CacheStatus { pub async fn cache_status(app_state: web::Data<AppState>) -> CacheStatus {
// Prevent thread lock CacheStatus::from(&app_state).await.into()
let has_schedule = app_state
.schedule
.lock()
.as_ref()
.map(|res| res.is_some())
.unwrap();
match has_schedule {
true => CacheStatus::from(&app_state),
false => CacheStatus::default(),
}
.into()
} }

View File

@@ -1,12 +1,13 @@
use self::schema::*; use self::schema::*;
use crate::AppState; use crate::AppState;
use crate::database::models::User; use crate::database::models::User;
use crate::extractors::base::SyncExtractor; use crate::extractors::base::AsyncExtractor;
use crate::routes::schema::{IntoResponseAsError, ResponseError}; use crate::routes::schedule::schema::ScheduleEntryResponse;
use crate::routes::schema::ResponseError;
use actix_web::{get, web}; use actix_web::{get, web};
#[utoipa::path(responses( #[utoipa::path(responses(
(status = OK, body = Response), (status = OK, body = ScheduleEntryResponse),
( (
status = SERVICE_UNAVAILABLE, status = SERVICE_UNAVAILABLE,
body = ResponseError<ErrorCode>, body = ResponseError<ErrorCode>,
@@ -25,68 +26,42 @@ use actix_web::{get, web};
), ),
))] ))]
#[get("/group")] #[get("/group")]
pub async fn group(user: SyncExtractor<User>, app_state: web::Data<AppState>) -> ServiceResponse { pub async fn group(user: AsyncExtractor<User>, app_state: web::Data<AppState>) -> ServiceResponse {
// Prevent thread lock match &user.into_inner().group {
let schedule_lock = app_state.schedule.lock().unwrap(); None => Err(ErrorCode::SignUpNotCompleted),
match schedule_lock.as_ref() { Some(group) => match app_state
None => ErrorCode::NoSchedule.into_response(), .get_schedule_snapshot()
Some(schedule) => match schedule.data.groups.get(&user.into_inner().group) { .await
None => ErrorCode::NotFound.into_response(), .data
Some(entry) => Ok(entry.clone().into()).into(), .groups
.get(group)
{
None => Err(ErrorCode::NotFound),
Some(entry) => Ok(entry.clone().into()),
}, },
} }
.into()
} }
mod schema { mod schema {
use crate::parser::schema::ScheduleEntry; use crate::routes::schedule::schema::ScheduleEntryResponse;
use actix_macros::{IntoResponseErrorNamed, StatusCode}; use actix_macros::ErrResponse;
use chrono::{DateTime, NaiveDateTime, Utc};
use derive_more::Display; use derive_more::Display;
use serde::Serialize; use serde::Serialize;
use utoipa::ToSchema; use utoipa::ToSchema;
pub type ServiceResponse = crate::routes::schema::Response<Response, ErrorCode>; pub type ServiceResponse = crate::routes::schema::Response<ScheduleEntryResponse, ErrorCode>;
#[derive(Serialize, ToSchema)] #[derive(Clone, Serialize, Display, ToSchema, ErrResponse)]
#[schema(as = GetGroup::Response)]
#[serde(rename_all = "camelCase")]
pub struct Response {
/// Group schedule.
pub group: ScheduleEntry,
/// ## Outdated variable.
///
/// By default, an empty list is returned.
#[deprecated = "Will be removed in future versions"]
pub updated: Vec<i32>,
/// ## Outdated variable.
///
/// By default, the initial date for unix.
#[deprecated = "Will be removed in future versions"]
pub updated_at: DateTime<Utc>,
}
#[allow(deprecated)]
impl From<ScheduleEntry> for Response {
fn from(group: ScheduleEntry) -> Self {
Self {
group,
updated: Vec::new(),
updated_at: NaiveDateTime::default().and_utc(),
}
}
}
#[derive(Clone, Serialize, ToSchema, StatusCode, Display, IntoResponseErrorNamed)]
#[serde(rename_all = "SCREAMING_SNAKE_CASE")] #[serde(rename_all = "SCREAMING_SNAKE_CASE")]
#[schema(as = GroupSchedule::ErrorCode)] #[schema(as = GroupSchedule::ErrorCode)]
pub enum ErrorCode { pub enum ErrorCode {
/// Schedules have not yet been parsed. /// The user tried to access the API without completing singing up.
#[status_code = "actix_web::http::StatusCode::SERVICE_UNAVAILABLE"] #[status_code = "actix_web::http::StatusCode::FORBIDDEN"]
#[display("Schedule not parsed yet.")] #[display("You have not completed signing up.")]
NoSchedule, SignUpNotCompleted,
/// Group not found. /// Group not found.
#[status_code = "actix_web::http::StatusCode::NOT_FOUND"] #[status_code = "actix_web::http::StatusCode::NOT_FOUND"]

View File

@@ -1,48 +1,34 @@
use self::schema::*; use self::schema::*;
use crate::AppState; use crate::AppState;
use crate::routes::schedule::schema::ErrorCode;
use crate::routes::schema::{IntoResponseAsError, ResponseError};
use actix_web::{get, web}; use actix_web::{get, web};
#[utoipa::path(responses( #[utoipa::path(responses((status = OK, body = Response)))]
(status = OK, body = Response),
(status = SERVICE_UNAVAILABLE, body = ResponseError<ErrorCode>),
))]
#[get("/group-names")] #[get("/group-names")]
pub async fn group_names(app_state: web::Data<AppState>) -> ServiceResponse { pub async fn group_names(app_state: web::Data<AppState>) -> Response {
// Prevent thread lock let mut names: Vec<String> = app_state
let schedule_lock = app_state.schedule.lock().unwrap(); .get_schedule_snapshot()
.await
.data
.groups
.keys()
.cloned()
.collect();
match schedule_lock.as_ref() {
None => ErrorCode::NoSchedule.into_response(),
Some(schedule) => {
let mut names: Vec<String> = schedule.data.groups.keys().cloned().collect();
names.sort(); names.sort();
Ok(names.into()).into() Response { names }
}
}
.into()
} }
mod schema { mod schema {
use crate::routes::schedule::schema::ErrorCode; use actix_macros::ResponderJson;
use serde::Serialize; use serde::Serialize;
use utoipa::ToSchema; use utoipa::ToSchema;
pub type ServiceResponse = crate::routes::schema::Response<Response, ErrorCode>; #[derive(Serialize, ToSchema, ResponderJson)]
#[derive(Serialize, ToSchema)]
#[schema(as = GetGroupNames::Response)] #[schema(as = GetGroupNames::Response)]
pub struct Response { pub struct Response {
/// List of group names sorted in alphabetical order. /// List of group names sorted in alphabetical order.
#[schema(examples(json!(["ИС-214/23"])))] #[schema(examples(json!(["ИС-214/23"])))]
pub names: Vec<String>, pub names: Vec<String>,
} }
impl From<Vec<String>> for Response {
fn from(names: Vec<String>) -> Self {
Self { names }
}
}
} }

View File

@@ -2,10 +2,9 @@ mod cache_status;
mod group; mod group;
mod group_names; mod group_names;
mod schedule; mod schedule;
mod schema;
mod teacher; mod teacher;
mod teacher_names; mod teacher_names;
mod schema;
mod update_download_url;
pub use cache_status::*; pub use cache_status::*;
pub use group::*; pub use group::*;
@@ -13,4 +12,3 @@ pub use group_names::*;
pub use schedule::*; pub use schedule::*;
pub use teacher::*; pub use teacher::*;
pub use teacher_names::*; pub use teacher_names::*;
pub use update_download_url::*;

View File

@@ -1,25 +1,9 @@
use self::schema::*; use crate::routes::schedule::schema::ScheduleView;
use crate::app_state::AppState; use crate::state::AppState;
use crate::routes::schedule::schema::{ErrorCode, ScheduleView};
use crate::routes::schema::{IntoResponseAsError, ResponseError};
use actix_web::{get, web}; use actix_web::{get, web};
#[utoipa::path(responses( #[utoipa::path(responses((status = OK, body = ScheduleView)))]
(status = OK, body = ScheduleView),
(status = SERVICE_UNAVAILABLE, body = ResponseError<ErrorCode>)
))]
#[get("/")] #[get("/")]
pub async fn schedule(app_state: web::Data<AppState>) -> ServiceResponse { pub async fn schedule(app_state: web::Data<AppState>) -> ScheduleView {
match ScheduleView::try_from(&app_state) { ScheduleView::from(&app_state).await
Ok(res) => Ok(res).into(),
Err(e) => match e {
ErrorCode::NoSchedule => ErrorCode::NoSchedule.into_response(),
},
}
}
mod schema {
use crate::routes::schedule::schema::{ErrorCode, ScheduleView};
pub type ServiceResponse = crate::routes::schema::Response<ScheduleView, ErrorCode>;
} }

View File

@@ -1,25 +1,18 @@
use crate::app_state::{AppState, Schedule}; use crate::state::{AppState, ScheduleSnapshot};
use crate::parser::schema::ScheduleEntry; use actix_macros::{OkResponse, ResponderJson};
use actix_macros::{IntoResponseErrorNamed, ResponderJson, StatusCode};
use actix_web::web; use actix_web::web;
use chrono::{DateTime, Duration, Utc}; use schedule_parser::schema::ScheduleEntry;
use derive_more::Display;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use std::collections::HashMap; use std::collections::HashMap;
use std::ops::Deref;
use utoipa::ToSchema; use utoipa::ToSchema;
/// Response from schedule server. /// Response from schedule server.
#[derive(Serialize, ToSchema)] #[derive(Serialize, ToSchema, OkResponse, ResponderJson)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
pub struct ScheduleView { pub struct ScheduleView {
/// ETag schedules on polytechnic server. /// Url to xls file.
etag: String, url: String,
/// Schedule update date on polytechnic website.
uploaded_at: DateTime<Utc>,
/// Date last downloaded from the Polytechnic server.
downloaded_at: DateTime<Utc>,
/// Groups schedule. /// Groups schedule.
groups: HashMap<String, ScheduleEntry>, groups: HashMap<String, ScheduleEntry>,
@@ -28,80 +21,55 @@ pub struct ScheduleView {
teachers: HashMap<String, ScheduleEntry>, teachers: HashMap<String, ScheduleEntry>,
} }
#[derive(Clone, Serialize, ToSchema, StatusCode, Display, IntoResponseErrorNamed)] #[derive(Serialize, ToSchema, OkResponse)]
#[status_code = "actix_web::http::StatusCode::SERVICE_UNAVAILABLE"] pub struct ScheduleEntryResponse(ScheduleEntry);
#[serde(rename_all = "SCREAMING_SNAKE_CASE")]
#[schema(as = ScheduleShared::ErrorCode)] impl From<ScheduleEntry> for ScheduleEntryResponse {
pub enum ErrorCode { fn from(value: ScheduleEntry) -> Self {
/// Schedules not yet parsed. Self(value)
#[display("Schedule not parsed yet.")] }
NoSchedule,
} }
impl TryFrom<&web::Data<AppState>> for ScheduleView { impl ScheduleView {
type Error = ErrorCode; pub async fn from(app_state: &web::Data<AppState>) -> Self {
let schedule = app_state.get_schedule_snapshot().await.clone();
fn try_from(app_state: &web::Data<AppState>) -> Result<Self, Self::Error> { Self {
if let Some(schedule) = app_state.schedule.lock().unwrap().clone() { url: schedule.url,
Ok(Self {
etag: schedule.etag,
uploaded_at: schedule.updated_at,
downloaded_at: schedule.parsed_at,
groups: schedule.data.groups, groups: schedule.data.groups,
teachers: schedule.data.teachers, teachers: schedule.data.teachers,
})
} else {
Err(ErrorCode::NoSchedule)
} }
} }
} }
/// Cached schedule status. /// Cached schedule status.
#[derive(Serialize, Deserialize, ToSchema, ResponderJson)] #[derive(Serialize, Deserialize, ToSchema, ResponderJson, OkResponse)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
pub struct CacheStatus { pub struct CacheStatus {
/// Schedule hash. /// Schedule hash.
pub cache_hash: String, pub hash: String,
/// Whether the schedule reference needs to be updated.
pub cache_update_required: bool,
/// Last cache update date. /// Last cache update date.
pub last_cache_update: i64, pub fetched_at: i64,
/// Cached schedule update date. /// Cached schedule update date.
/// ///
/// Determined by the polytechnic's server. /// Determined by the polytechnic's server.
pub last_schedule_update: i64, pub updated_at: i64,
} }
impl CacheStatus { impl CacheStatus {
pub fn default() -> Self { pub async fn from(value: &web::Data<AppState>) -> Self {
CacheStatus { From::<&ScheduleSnapshot>::from(value.get_schedule_snapshot().await.deref())
cache_hash: "0000000000000000000000000000000000000000".to_string(),
cache_update_required: true,
last_cache_update: 0,
last_schedule_update: 0,
}
} }
} }
impl From<&web::Data<AppState>> for CacheStatus { impl From<&ScheduleSnapshot> for CacheStatus {
fn from(value: &web::Data<AppState>) -> Self { fn from(value: &ScheduleSnapshot) -> Self {
let schedule_lock = value.schedule.lock().unwrap();
let schedule = schedule_lock.as_ref().unwrap();
CacheStatus::from(schedule)
}
}
impl From<&Schedule> for CacheStatus {
fn from(value: &Schedule) -> Self {
Self { Self {
cache_hash: value.hash(), hash: value.hash(),
cache_update_required: (value.fetched_at - Utc::now()) > Duration::minutes(5), fetched_at: value.fetched_at.timestamp(),
last_cache_update: value.fetched_at.timestamp(), updated_at: value.updated_at.timestamp(),
last_schedule_update: value.updated_at.timestamp(),
} }
} }
} }

View File

@@ -1,18 +1,11 @@
use self::schema::*; use self::schema::*;
use crate::routes::schema::{IntoResponseAsError, ResponseError};
use crate::AppState; use crate::AppState;
use crate::routes::schema::ResponseError;
use actix_web::{get, web}; use actix_web::{get, web};
use schedule_parser::schema::ScheduleEntry;
#[utoipa::path(responses( #[utoipa::path(responses(
(status = OK, body = Response), (status = OK, body = ScheduleEntry),
(
status = SERVICE_UNAVAILABLE,
body = ResponseError<ErrorCode>,
example = json!({
"code": "NO_SCHEDULE",
"message": "Schedule not parsed yet."
})
),
( (
status = NOT_FOUND, status = NOT_FOUND,
body = ResponseError<ErrorCode>, body = ResponseError<ErrorCode>,
@@ -23,72 +16,34 @@ use actix_web::{get, web};
), ),
))] ))]
#[get("/teacher/{name}")] #[get("/teacher/{name}")]
pub async fn teacher( pub async fn teacher(name: web::Path<String>, app_state: web::Data<AppState>) -> ServiceResponse {
name: web::Path<String>, match app_state
app_state: web::Data<AppState>, .get_schedule_snapshot()
) -> ServiceResponse { .await
// Prevent thread lock .data
let schedule_lock = app_state.schedule.lock().unwrap(); .teachers
.get(&name.into_inner())
{
None => Err(ErrorCode::NotFound),
match schedule_lock.as_ref() { Some(entry) => Ok(entry.clone().into()),
None => ErrorCode::NoSchedule.into_response(),
Some(schedule) => match schedule.data.teachers.get(&name.into_inner()) {
None => ErrorCode::NotFound.into_response(),
Some(entry) => Ok(entry.clone().into()).into(),
},
} }
.into()
} }
mod schema { mod schema {
use crate::parser::schema::ScheduleEntry; use crate::routes::schedule::schema::ScheduleEntryResponse;
use actix_macros::{IntoResponseErrorNamed, StatusCode}; use actix_macros::ErrResponse;
use chrono::{DateTime, NaiveDateTime, Utc};
use derive_more::Display; use derive_more::Display;
use serde::Serialize; use serde::Serialize;
use utoipa::ToSchema; use utoipa::ToSchema;
pub type ServiceResponse = crate::routes::schema::Response<Response, ErrorCode>; pub type ServiceResponse = crate::routes::schema::Response<ScheduleEntryResponse, ErrorCode>;
#[derive(Serialize, ToSchema)] #[derive(Clone, Serialize, Display, ToSchema, ErrResponse)]
#[schema(as = GetTeacher::Response)]
#[serde(rename_all = "camelCase")]
pub struct Response {
/// Teacher's schedule.
pub teacher: ScheduleEntry,
/// ## Deprecated variable.
///
/// By default, an empty list is returned.
#[deprecated = "Will be removed in future versions"]
pub updated: Vec<i32>,
/// ## Deprecated variable.
///
/// Defaults to the Unix start date.
#[deprecated = "Will be removed in future versions"]
pub updated_at: DateTime<Utc>,
}
#[allow(deprecated)]
impl From<ScheduleEntry> for Response {
fn from(teacher: ScheduleEntry) -> Self {
Self {
teacher,
updated: Vec::new(),
updated_at: NaiveDateTime::default().and_utc(),
}
}
}
#[derive(Clone, Serialize, ToSchema, StatusCode, Display, IntoResponseErrorNamed)]
#[serde(rename_all = "SCREAMING_SNAKE_CASE")] #[serde(rename_all = "SCREAMING_SNAKE_CASE")]
#[schema(as = TeacherSchedule::ErrorCode)] #[schema(as = TeacherSchedule::ErrorCode)]
pub enum ErrorCode { pub enum ErrorCode {
/// Schedules have not yet been parsed.
#[status_code = "actix_web::http::StatusCode::SERVICE_UNAVAILABLE"]
#[display("Schedule not parsed yet.")]
NoSchedule,
/// Teacher not found. /// Teacher not found.
#[status_code = "actix_web::http::StatusCode::NOT_FOUND"] #[status_code = "actix_web::http::StatusCode::NOT_FOUND"]
#[display("Required teacher not found.")] #[display("Required teacher not found.")]

View File

@@ -1,48 +1,34 @@
use self::schema::*; use self::schema::*;
use crate::AppState; use crate::AppState;
use crate::routes::schedule::schema::ErrorCode;
use crate::routes::schema::{IntoResponseAsError, ResponseError};
use actix_web::{get, web}; use actix_web::{get, web};
#[utoipa::path(responses( #[utoipa::path(responses((status = OK, body = Response)))]
(status = OK, body = Response),
(status = SERVICE_UNAVAILABLE, body = ResponseError<ErrorCode>),
))]
#[get("/teacher-names")] #[get("/teacher-names")]
pub async fn teacher_names(app_state: web::Data<AppState>) -> ServiceResponse { pub async fn teacher_names(app_state: web::Data<AppState>) -> Response {
// Prevent thread lock let mut names: Vec<String> = app_state
let schedule_lock = app_state.schedule.lock().unwrap(); .get_schedule_snapshot()
.await
.data
.teachers
.keys()
.cloned()
.collect();
match schedule_lock.as_ref() {
None => ErrorCode::NoSchedule.into_response(),
Some(schedule) => {
let mut names: Vec<String> = schedule.data.teachers.keys().cloned().collect();
names.sort(); names.sort();
Ok(names.into()).into() Response { names }
}
}
.into()
} }
mod schema { mod schema {
use crate::routes::schedule::schema::ErrorCode; use actix_macros::ResponderJson;
use serde::Serialize; use serde::Serialize;
use utoipa::ToSchema; use utoipa::ToSchema;
pub type ServiceResponse = crate::routes::schema::Response<Response, ErrorCode>; #[derive(Serialize, ToSchema, ResponderJson)]
#[derive(Serialize, ToSchema)]
#[schema(as = GetTeacherNames::Response)] #[schema(as = GetTeacherNames::Response)]
pub struct Response { pub struct Response {
/// List of teacher names sorted alphabetically. /// List of teacher names sorted alphabetically.
#[schema(examples(json!(["Хомченко Н.Е."])))] #[schema(examples(json!(["Хомченко Н.Е."])))]
pub names: Vec<String>, pub names: Vec<String>,
} }
impl From<Vec<String>> for Response {
fn from(names: Vec<String>) -> Self {
Self { names }
}
}
} }

View File

@@ -1,140 +0,0 @@
use self::schema::*;
use crate::AppState;
use crate::app_state::Schedule;
use crate::parser::parse_xls;
use crate::routes::schedule::schema::CacheStatus;
use crate::routes::schema::{IntoResponseAsError, ResponseError};
use crate::xls_downloader::interface::{FetchError, XLSDownloader};
use actix_web::web::Json;
use actix_web::{patch, web};
use chrono::Utc;
#[utoipa::path(responses(
(status = OK, body = CacheStatus),
(status = NOT_ACCEPTABLE, body = ResponseError<ErrorCode>),
))]
#[patch("/update-download-url")]
pub async fn update_download_url(
data: Json<Request>,
app_state: web::Data<AppState>,
) -> ServiceResponse {
if !data.url.starts_with("https://politehnikum-eng.ru/") {
return ErrorCode::NonWhitelistedHost.into_response();
}
let mut downloader = app_state.downloader.lock().unwrap();
if let Some(url) = &downloader.url {
if url.eq(&data.url) {
return Ok(CacheStatus::from(&app_state)).into();
}
}
match downloader.set_url(data.url.clone()).await {
Ok(fetch_result) => {
let mut schedule = app_state.schedule.lock().unwrap();
if schedule.is_some()
&& fetch_result.uploaded_at < schedule.as_ref().unwrap().updated_at
{
return ErrorCode::OutdatedSchedule.into_response();
}
match downloader.fetch(false).await {
Ok(download_result) => match parse_xls(&download_result.data.unwrap()) {
Ok(data) => {
*schedule = Some(Schedule {
etag: download_result.etag,
fetched_at: download_result.requested_at,
updated_at: download_result.uploaded_at,
parsed_at: Utc::now(),
data,
});
Ok(CacheStatus::from(schedule.as_ref().unwrap())).into()
}
Err(error) => {
sentry::capture_error(&error);
ErrorCode::InvalidSchedule(error).into_response()
}
},
Err(error) => {
if let FetchError::Unknown(error) = &error {
sentry::capture_error(&error);
}
ErrorCode::DownloadFailed(error).into_response()
}
}
}
Err(error) => {
if let FetchError::Unknown(error) = &error {
sentry::capture_error(&error);
}
ErrorCode::FetchFailed(error).into_response()
}
}
}
mod schema {
use crate::parser::schema::ParseError;
use crate::routes::schedule::schema::CacheStatus;
use actix_macros::{IntoResponseErrorNamed, StatusCode};
use derive_more::Display;
use serde::{Deserialize, Serialize, Serializer};
use utoipa::ToSchema;
use crate::xls_downloader::interface::FetchError;
pub type ServiceResponse = crate::routes::schema::Response<CacheStatus, ErrorCode>;
#[derive(Serialize, Deserialize, ToSchema)]
pub struct Request {
/// Schedule link.
pub url: String,
}
#[derive(Clone, ToSchema, StatusCode, Display, IntoResponseErrorNamed)]
#[status_code = "actix_web::http::StatusCode::NOT_ACCEPTABLE"]
#[schema(as = SetDownloadUrl::ErrorCode)]
pub enum ErrorCode {
/// Transferred link with host different from politehnikum-eng.ru.
#[display("URL with unknown host provided. Provide url with 'politehnikum-eng.ru' host.")]
NonWhitelistedHost,
/// Failed to retrieve file metadata.
#[display("Unable to retrieve metadata from the specified URL: {_0}")]
FetchFailed(FetchError),
/// Failed to download the file.
#[display("Unable to retrieve data from the specified URL: {_0}")]
DownloadFailed(FetchError),
/// The link leads to an outdated schedule.
///
/// An outdated schedule refers to a schedule that was published earlier
/// than is currently available.
#[display("The schedule is older than it already is.")]
OutdatedSchedule,
/// Failed to parse the schedule.
#[display("{_0}")]
InvalidSchedule(ParseError),
}
impl Serialize for ErrorCode {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
match self {
ErrorCode::NonWhitelistedHost => serializer.serialize_str("NON_WHITELISTED_HOST"),
ErrorCode::FetchFailed(_) => serializer.serialize_str("FETCH_FAILED"),
ErrorCode::DownloadFailed(_) => serializer.serialize_str("DOWNLOAD_FAILED"),
ErrorCode::OutdatedSchedule => serializer.serialize_str("OUTDATED_SCHEDULE"),
ErrorCode::InvalidSchedule(_) => serializer.serialize_str("INVALID_SCHEDULE"),
}
}
}
}

View File

@@ -4,22 +4,19 @@ use actix_web::http::StatusCode;
use actix_web::{HttpRequest, HttpResponse, Responder}; use actix_web::{HttpRequest, HttpResponse, Responder};
use serde::{Serialize, Serializer}; use serde::{Serialize, Serializer};
use std::convert::Into; use std::convert::Into;
use std::fmt::Display;
use utoipa::PartialSchema; use utoipa::PartialSchema;
pub struct Response<T, E>(pub Result<T, E>) pub struct Response<T, E>(pub Result<T, E>)
where where
T: Serialize + PartialSchema, T: Serialize + PartialSchema + PartialOkResponse,
E: Serialize + PartialSchema + Clone + PartialStatusCode; E: Serialize + PartialSchema + Display + PartialErrResponse;
pub trait PartialStatusCode {
fn status_code(&self) -> StatusCode;
}
/// Transform Response<T, E> into Result<T, E> /// Transform Response<T, E> into Result<T, E>
impl<T, E> Into<Result<T, E>> for Response<T, E> impl<T, E> Into<Result<T, E>> for Response<T, E>
where where
T: Serialize + PartialSchema, T: Serialize + PartialSchema + PartialOkResponse,
E: Serialize + PartialSchema + Clone + PartialStatusCode, E: Serialize + PartialSchema + Display + PartialErrResponse,
{ {
fn into(self) -> Result<T, E> { fn into(self) -> Result<T, E> {
self.0 self.0
@@ -29,8 +26,8 @@ where
/// Transform T into Response<T, E> /// Transform T into Response<T, E>
impl<T, E> From<Result<T, E>> for Response<T, E> impl<T, E> From<Result<T, E>> for Response<T, E>
where where
T: Serialize + PartialSchema, T: Serialize + PartialSchema + PartialOkResponse,
E: Serialize + PartialSchema + Clone + PartialStatusCode, E: Serialize + PartialSchema + Display + PartialErrResponse,
{ {
fn from(value: Result<T, E>) -> Self { fn from(value: Result<T, E>) -> Self {
Response(value) Response(value)
@@ -40,17 +37,16 @@ where
/// Serialize Response<T, E> /// Serialize Response<T, E>
impl<T, E> Serialize for Response<T, E> impl<T, E> Serialize for Response<T, E>
where where
T: Serialize + PartialSchema, T: Serialize + PartialSchema + PartialOkResponse,
E: Serialize + PartialSchema + Clone + PartialStatusCode + Into<ResponseError<E>>, E: Serialize + PartialSchema + Display + PartialErrResponse + Clone + Into<ResponseError<E>>,
{ {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where where
S: Serializer, S: Serializer,
{ {
match &self.0 { match &self.0 {
Ok(ok) => serializer.serialize_some::<T>(&ok), Ok(ok) => serializer.serialize_some(&ok),
Err(err) => serializer Err(err) => serializer.serialize_some(&ResponseError::<E>::from(err.clone().into())),
.serialize_some::<ResponseError<E>>(&ResponseError::<E>::from(err.clone().into())),
} }
} }
} }
@@ -58,12 +54,12 @@ where
/// Transform Response<T, E> to HttpResponse<String> /// Transform Response<T, E> to HttpResponse<String>
impl<T, E> Responder for Response<T, E> impl<T, E> Responder for Response<T, E>
where where
T: Serialize + PartialSchema, T: Serialize + PartialSchema + PartialOkResponse,
E: Serialize + PartialSchema + Clone + PartialStatusCode + Into<ResponseError<E>>, E: Serialize + PartialSchema + Display + PartialErrResponse + Clone + Into<ResponseError<E>>,
{ {
type Body = EitherBody<String>; type Body = EitherBody<String>;
fn respond_to(self, _: &HttpRequest) -> HttpResponse<Self::Body> { fn respond_to(mut self, request: &HttpRequest) -> HttpResponse<Self::Body> {
match serde_json::to_string(&self) { match serde_json::to_string(&self) {
Ok(body) => { Ok(body) => {
let code = match &self.0 { let code = match &self.0 {
@@ -71,13 +67,19 @@ where
Err(e) => e.status_code(), Err(e) => e.status_code(),
}; };
match HttpResponse::build(code) let mut response = match HttpResponse::build(code)
.content_type(mime::APPLICATION_JSON) .content_type(mime::APPLICATION_JSON)
.message_body(body) .message_body(body)
{ {
Ok(res) => res.map_into_left_body(), Ok(res) => res.map_into_left_body(),
Err(err) => HttpResponse::from_error(err).map_into_right_body(), Err(err) => HttpResponse::from_error(err).map_into_right_body(),
};
if let Ok(ok) = &mut self.0 {
ok.post_process(request, &mut response);
} }
response
} }
Err(err) => { Err(err) => {
@@ -87,61 +89,80 @@ where
} }
} }
/// ResponseError<T> /// Трейт для всех положительных ответов от сервера
/// pub trait PartialOkResponse {
/// Field `message` is optional for backwards compatibility with Android App, that produces error if new fields will be added to JSON response. fn post_process(
#[derive(Serialize, utoipa::ToSchema)] &mut self,
pub struct ResponseError<T: Serialize + PartialSchema> { _request: &HttpRequest,
pub code: T, _response: &mut HttpResponse<EitherBody<String>>,
) -> () {
#[serde(skip_serializing_if = "Option::is_none")] }
pub message: Option<String>,
} }
pub trait IntoResponseAsError<T> impl PartialOkResponse for () {}
/// Трейт для всех отрицательных ответов от сервера
pub trait PartialErrResponse {
fn status_code(&self) -> StatusCode;
}
/// ResponseError<T>
#[derive(Serialize, utoipa::ToSchema)]
pub struct ResponseError<T: Serialize + PartialSchema + Clone> {
pub code: T,
pub message: String,
}
impl<T> From<T> for ResponseError<T>
where where
T: Serialize + PartialSchema, T: Serialize + PartialSchema + Display + Clone,
Self: Serialize + PartialSchema + Clone + PartialStatusCode + Into<ResponseError<Self>>,
{ {
fn into_response(self) -> Response<T, Self> { fn from(code: T) -> Self {
Response(Err(self)) Self {
message: format!("{}", code),
code,
}
} }
} }
pub mod user { pub mod user {
use crate::database::models::{User, UserRole}; use crate::database::models::{User, UserRole};
use actix_macros::ResponderJson; use actix_macros::{OkResponse, ResponderJson};
use serde::Serialize; use serde::Serialize;
//noinspection SpellCheckingInspection //noinspection SpellCheckingInspection
/// Используется для скрытия чувствительных полей, таких как хеш пароля или FCM /// Используется для скрытия чувствительных полей, таких как хеш пароля или FCM
#[derive(Serialize, utoipa::ToSchema, ResponderJson)] #[derive(Serialize, utoipa::ToSchema, ResponderJson, OkResponse)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
pub struct UserResponse { pub struct UserResponse {
/// UUID /// UUID
#[schema(examples("67dcc9a9507b0000772744a2"))] #[schema(examples("67dcc9a9507b0000772744a2"))]
id: String, pub id: String,
/// Имя пользователя /// Имя пользователя
#[schema(examples("n08i40k"))] #[schema(examples("n08i40k"))]
username: String, pub username: String,
/// Группа /// Группа
#[schema(examples("ИС-214/23"))] #[schema(examples("ИС-214/23"))]
group: String, pub group: Option<String>,
/// Роль /// Роль
role: UserRole, pub role: UserRole,
/// Идентификатор привязанного аккаунта VK /// Идентификатор привязанного аккаунта VK
#[schema(examples(498094647, json!(null)))] #[schema(examples(498094647, json!(null)))]
vk_id: Option<i32>, pub vk_id: Option<i32>,
/// Идентификатор привязанного аккаунта Telegram
#[schema(examples(996004735, json!(null)))]
pub telegram_id: Option<i64>,
/// JWT токен доступа /// JWT токен доступа
#[schema(examples( #[schema(examples(
"eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJpZCI6IjY3ZGNjOWE5NTA3YjAwMDA3NzI3NDRhMiIsImlhdCI6IjE3NDMxMDgwOTkiLCJleHAiOiIxODY5MjUyMDk5In0.rMgXRb3JbT9AvLK4eiY9HMB5LxgUudkpQyoWKOypZFY" "eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJpZCI6IjY3ZGNjOWE5NTA3YjAwMDA3NzI3NDRhMiIsImlhdCI6IjE3NDMxMDgwOTkiLCJleHAiOiIxODY5MjUyMDk5In0.rMgXRb3JbT9AvLK4eiY9HMB5LxgUudkpQyoWKOypZFY"
))] ))]
access_token: String, pub access_token: Option<String>,
} }
/// Create UserResponse from User ref. /// Create UserResponse from User ref.
@@ -153,6 +174,7 @@ pub mod user {
group: user.group.clone(), group: user.group.clone(),
role: user.role.clone(), role: user.role.clone(),
vk_id: user.vk_id.clone(), vk_id: user.vk_id.clone(),
telegram_id: user.telegram_id.clone(),
access_token: user.access_token.clone(), access_token: user.access_token.clone(),
} }
} }
@@ -167,6 +189,7 @@ pub mod user {
group: user.group, group: user.group,
role: user.role, role: user.role,
vk_id: user.vk_id, vk_id: user.vk_id,
telegram_id: user.telegram_id,
access_token: user.access_token, access_token: user.access_token,
} }
} }

View File

@@ -1,85 +1,62 @@
use self::schema::*; use self::schema::*;
use crate::app_state::AppState;
use crate::database::driver::users::UserSave; use crate::database::driver::users::UserSave;
use crate::database::models::User; use crate::database::models::User;
use crate::extractors::base::SyncExtractor; use crate::extractors::base::AsyncExtractor;
use crate::routes::schema::IntoResponseAsError; use crate::state::AppState;
use crate::utility::mutex::MutexScope;
use actix_web::{post, web}; use actix_web::{post, web};
#[utoipa::path(responses((status = OK)))] #[utoipa::path(responses((status = OK)))]
#[post("/change-group")] #[post("/change-group")]
pub async fn change_group( pub async fn change_group(
app_state: web::Data<AppState>, app_state: web::Data<AppState>,
user: SyncExtractor<User>, user: AsyncExtractor<User>,
data: web::Json<Request>, data: web::Json<Request>,
) -> ServiceResponse { ) -> ServiceResponse {
let mut user = user.into_inner(); let mut user = user.into_inner();
if user.group == data.group { if user.group.is_some_and(|group| group == data.group) {
return ErrorCode::SameGroup.into_response(); return Ok(()).into();
} }
if let Some(e) = app_state.schedule.scope(|schedule| match schedule { if !app_state
Some(schedule) => { .get_schedule_snapshot()
if schedule.data.groups.contains_key(&data.group) { .await
None .data
} else { .groups
Some(ErrorCode::NotFound) .contains_key(&data.group)
} {
} return Err(ErrorCode::NotFound).into();
None => Some(ErrorCode::NoSchedule),
}) {
return e.into_response();
} }
user.group = data.into_inner().group; user.group = Some(data.into_inner().group);
user.save(&app_state).await.unwrap();
if let Some(e) = user.save(&app_state).err() {
eprintln!("Failed to update user: {e}");
return ErrorCode::InternalServerError.into_response();
}
Ok(()).into() Ok(()).into()
} }
mod schema { mod schema {
use actix_macros::{IntoResponseErrorNamed, StatusCode}; use actix_macros::ErrResponse;
use derive_more::Display; use derive_more::Display;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use utoipa::ToSchema; use utoipa::ToSchema;
pub type ServiceResponse = crate::routes::schema::Response<(), ErrorCode>; pub type ServiceResponse = crate::routes::schema::Response<(), ErrorCode>;
#[derive(Serialize, Deserialize, ToSchema)] #[derive(Deserialize, ToSchema)]
#[schema(as = ChangeGroup::Request)] #[schema(as = ChangeGroup::Request)]
pub struct Request { pub struct Request {
/// Group name. // Group.
pub group: String, pub group: String,
} }
#[derive(Clone, Serialize, ToSchema, StatusCode, Display, IntoResponseErrorNamed)] #[derive(Clone, Serialize, Display, ToSchema, ErrResponse)]
#[serde(rename_all = "SCREAMING_SNAKE_CASE")] #[serde(rename_all = "SCREAMING_SNAKE_CASE")]
#[schema(as = ChangeGroup::ErrorCode)] #[schema(as = ChangeGroup::ErrorCode)]
#[status_code = "actix_web::http::StatusCode::CONFLICT"] #[status_code = "actix_web::http::StatusCode::CONFLICT"]
pub enum ErrorCode { pub enum ErrorCode {
/// Schedules have not yet been received.
#[display("Schedule not parsed yet.")]
#[status_code = "actix_web::http::StatusCode::SERVICE_UNAVAILABLE"]
NoSchedule,
/// Passed the same group name that is currently there.
#[display("Passed the same group name as it is at the moment.")]
SameGroup,
/// The required group does not exist. /// The required group does not exist.
#[display("The required group does not exist.")] #[display("The required group does not exist.")]
#[status_code = "actix_web::http::StatusCode::NOT_FOUND"] #[status_code = "actix_web::http::StatusCode::NOT_FOUND"]
NotFound, NotFound,
/// Server-side error.
#[display("Internal server error.")]
#[status_code = "actix_web::http::StatusCode::INTERNAL_SERVER_ERROR"]
InternalServerError,
} }
} }

View File

@@ -1,41 +1,39 @@
use self::schema::*; use self::schema::*;
use crate::app_state::AppState;
use crate::database::driver; use crate::database::driver;
use crate::database::driver::users::UserSave; use crate::database::driver::users::UserSave;
use crate::database::models::User; use crate::database::models::User;
use crate::extractors::base::SyncExtractor; use crate::extractors::base::AsyncExtractor;
use crate::routes::schema::IntoResponseAsError; use crate::state::AppState;
use actix_web::{post, web}; use actix_web::{post, web};
#[utoipa::path(responses((status = OK)))] #[utoipa::path(responses((status = OK)))]
#[post("/change-username")] #[post("/change-username")]
pub async fn change_username( pub async fn change_username(
app_state: web::Data<AppState>, app_state: web::Data<AppState>,
user: SyncExtractor<User>, user: AsyncExtractor<User>,
data: web::Json<Request>, data: web::Json<Request>,
) -> ServiceResponse { ) -> ServiceResponse {
let mut user = user.into_inner(); let mut user = user.into_inner();
if user.username == data.username { if user.username == data.username {
return ErrorCode::SameUsername.into_response(); return Ok(()).into();
} }
if driver::users::get_by_username(&app_state, &data.username).is_ok() { if driver::users::get_by_username(&app_state, &data.username)
return ErrorCode::AlreadyExists.into_response(); .await
.is_ok()
{
return Err(ErrorCode::AlreadyExists).into();
} }
user.username = data.into_inner().username; user.username = data.into_inner().username;
user.save(&app_state).await.unwrap();
if let Some(e) = user.save(&app_state).err() {
eprintln!("Failed to update user: {e}");
return ErrorCode::InternalServerError.into_response();
}
Ok(()).into() Ok(()).into()
} }
mod schema { mod schema {
use actix_macros::{IntoResponseErrorNamed, StatusCode}; use actix_macros::ErrResponse;
use derive_more::Display; use derive_more::Display;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use utoipa::ToSchema; use utoipa::ToSchema;
@@ -49,22 +47,13 @@ mod schema {
pub username: String, pub username: String,
} }
#[derive(Clone, Serialize, ToSchema, StatusCode, Display, IntoResponseErrorNamed)] #[derive(Clone, Serialize, Display, ToSchema, ErrResponse)]
#[serde(rename_all = "SCREAMING_SNAKE_CASE")] #[serde(rename_all = "SCREAMING_SNAKE_CASE")]
#[schema(as = ChangeUsername::ErrorCode)] #[schema(as = ChangeUsername::ErrorCode)]
#[status_code = "actix_web::http::StatusCode::CONFLICT"] #[status_code = "actix_web::http::StatusCode::CONFLICT"]
pub enum ErrorCode { pub enum ErrorCode {
/// The same name that is currently present is passed.
#[display("Passed the same name as it is at the moment.")]
SameUsername,
/// A user with this name already exists. /// A user with this name already exists.
#[display("A user with this name already exists.")] #[display("A user with this name already exists.")]
AlreadyExists, AlreadyExists,
/// Server-side error.
#[display("Internal server error.")]
#[status_code = "actix_web::http::StatusCode::INTERNAL_SERVER_ERROR"]
InternalServerError,
} }
} }

View File

@@ -1,10 +1,10 @@
use crate::database::models::User; use crate::database::models::User;
use crate::extractors::base::SyncExtractor; use crate::extractors::base::AsyncExtractor;
use actix_web::get;
use crate::routes::schema::user::UserResponse; use crate::routes::schema::user::UserResponse;
use actix_web::get;
#[utoipa::path(responses((status = OK, body = UserResponse)))] #[utoipa::path(responses((status = OK, body = UserResponse)))]
#[get("/me")] #[get("/me")]
pub async fn me(user: SyncExtractor<User>) -> UserResponse { pub async fn me(user: AsyncExtractor<User>) -> UserResponse {
user.into_inner().into() user.into_inner().into()
} }

View File

@@ -1,6 +1,6 @@
use self::schema::*; use self::schema::*;
use crate::app_state::AppState; use crate::routes::schema::ResponseError;
use crate::routes::schema::{IntoResponseAsError, ResponseError}; use crate::state::AppState;
use actix_web::{post, web}; use actix_web::{post, web};
use serde::Deserialize; use serde::Deserialize;
use std::collections::HashMap; use std::collections::HashMap;
@@ -35,7 +35,7 @@ async fn oauth(data: web::Json<Request>, app_state: web::Data<AppState>) -> Serv
let data = data.into_inner(); let data = data.into_inner();
let state = Uuid::new_v4().simple().to_string(); let state = Uuid::new_v4().simple().to_string();
let vk_id = &app_state.vk_id; let vk_id = &app_state.get_env().vk_id;
let client_id = vk_id.client_id.clone().to_string(); let client_id = vk_id.client_id.clone().to_string();
let mut params = HashMap::new(); let mut params = HashMap::new();
@@ -56,27 +56,27 @@ async fn oauth(data: web::Json<Request>, app_state: web::Data<AppState>) -> Serv
{ {
Ok(res) => { Ok(res) => {
if !res.status().is_success() { if !res.status().is_success() {
return ErrorCode::VkIdError.into_response(); return Err(ErrorCode::VkIdError).into();
} }
match res.json::<VkIdAuthResponse>().await { match res.json::<VkIdAuthResponse>().await {
Ok(auth_data) => Ok(auth_data) => Ok(Response {
Ok(Response {
access_token: auth_data.id_token, access_token: auth_data.id_token,
}).into(), }),
Err(error) => { Err(error) => {
sentry::capture_error(&error); sentry::capture_error(&error);
ErrorCode::VkIdError.into_response() Err(ErrorCode::VkIdError)
} }
} }
} }
Err(_) => ErrorCode::VkIdError.into_response(), Err(_) => Err(ErrorCode::VkIdError),
} }
.into()
} }
mod schema { mod schema {
use actix_macros::{IntoResponseErrorNamed, StatusCode}; use actix_macros::{ErrResponse, OkResponse};
use derive_more::Display; use derive_more::Display;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use utoipa::ToSchema; use utoipa::ToSchema;
@@ -97,7 +97,7 @@ mod schema {
pub device_id: String, pub device_id: String,
} }
#[derive(Serialize, ToSchema)] #[derive(Serialize, ToSchema, OkResponse)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
#[schema(as = VkIdOAuth::Response)] #[schema(as = VkIdOAuth::Response)]
pub struct Response { pub struct Response {
@@ -105,7 +105,7 @@ mod schema {
pub access_token: String, pub access_token: String,
} }
#[derive(Clone, Serialize, ToSchema, IntoResponseErrorNamed, StatusCode, Display)] #[derive(Clone, Serialize, Display, ToSchema, ErrResponse)]
#[serde(rename_all = "SCREAMING_SNAKE_CASE")] #[serde(rename_all = "SCREAMING_SNAKE_CASE")]
#[schema(as = VkIdOAuth::ErrorCode)] #[schema(as = VkIdOAuth::ErrorCode)]
#[status_code = "actix_web::http::StatusCode::NOT_ACCEPTABLE"] #[status_code = "actix_web::http::StatusCode::NOT_ACCEPTABLE"]

17
src/state/env/mod.rs vendored Normal file
View File

@@ -0,0 +1,17 @@
pub mod schedule;
pub mod telegram;
pub mod vk_id;
pub mod yandex_cloud;
pub use self::schedule::ScheduleEnvData;
pub use self::telegram::TelegramEnvData;
pub use self::vk_id::VkIdEnvData;
pub use self::yandex_cloud::YandexCloudEnvData;
#[derive(Default)]
pub struct AppEnv {
pub schedule: ScheduleEnvData,
pub telegram: TelegramEnvData,
pub vk_id: VkIdEnvData,
pub yandex_cloud: YandexCloudEnvData,
}

17
src/state/env/schedule.rs vendored Normal file
View File

@@ -0,0 +1,17 @@
use std::env;
#[derive(Clone)]
pub struct ScheduleEnvData {
pub url: Option<String>,
pub auto_update: bool,
}
impl Default for ScheduleEnvData {
fn default() -> Self {
Self {
url: env::var("SCHEDULE_INIT_URL").ok(),
auto_update: !env::var("SCHEDULE_DISABLE_AUTO_UPDATE")
.is_ok_and(|v| v.eq("1") || v.eq("true")),
}
}
}

28
src/state/env/telegram.rs vendored Normal file
View File

@@ -0,0 +1,28 @@
use std::env;
#[derive(Clone)]
pub struct TelegramEnvData {
pub bot_id: i64,
pub mini_app_host: String,
pub test_dc: bool,
}
impl Default for TelegramEnvData {
fn default() -> Self {
let _self = Self {
bot_id: env::var("TELEGRAM_BOT_ID")
.expect("TELEGRAM_BOT_ID must be set")
.parse()
.expect("TELEGRAM_BOT_ID must be integer"),
mini_app_host: env::var("TELEGRAM_MINI_APP_HOST")
.expect("TELEGRAM_MINI_APP_HOST must be set"),
test_dc: env::var("TELEGRAM_TEST_DC").is_ok_and(|v| v.eq("1") || v.eq("true")),
};
if _self.test_dc {
log::warn!("Using test data-center of telegram!");
}
_self
}
}

19
src/state/env/vk_id.rs vendored Normal file
View File

@@ -0,0 +1,19 @@
use std::env;
#[derive(Clone)]
pub struct VkIdEnvData {
pub client_id: i32,
pub redirect_url: String,
}
impl Default for VkIdEnvData {
fn default() -> Self {
Self {
client_id: env::var("VK_ID_CLIENT_ID")
.expect("VK_ID_CLIENT_ID must be set")
.parse()
.expect("VK_ID_CLIENT_ID must be integer"),
redirect_url: env::var("VK_ID_REDIRECT_URI").expect("VK_ID_REDIRECT_URI must be set"),
}
}
}

16
src/state/env/yandex_cloud.rs vendored Normal file
View File

@@ -0,0 +1,16 @@
use std::env;
#[derive(Clone)]
pub struct YandexCloudEnvData {
pub api_key: String,
pub func_id: String,
}
impl Default for YandexCloudEnvData {
fn default() -> Self {
Self {
api_key: env::var("YANDEX_CLOUD_API_KEY").expect("YANDEX_CLOUD_API_KEY must be set"),
func_id: env::var("YANDEX_CLOUD_FUNC_ID").expect("YANDEX_CLOUD_FUNC_ID must be set"),
}
}
}

15
src/state/fcm_client.rs Normal file
View File

@@ -0,0 +1,15 @@
use firebase_messaging_rs::FCMClient;
use std::env;
use tokio::sync::Mutex;
#[derive(Clone)]
pub struct FCMClientData;
impl FCMClientData {
pub async fn new() -> Option<Mutex<FCMClient>> {
match env::var("GOOGLE_APPLICATION_CREDENTIALS") {
Ok(_) => Some(Mutex::new(FCMClient::new().await.unwrap())),
Err(_) => None,
}
}
}

88
src/state/mod.rs Normal file
View File

@@ -0,0 +1,88 @@
mod env;
mod fcm_client;
mod schedule;
use crate::state::fcm_client::FCMClientData;
use crate::xls_downloader::basic_impl::BasicXlsDownloader;
use actix_web::web;
use diesel::{Connection, PgConnection};
use firebase_messaging_rs::FCMClient;
use std::ops::DerefMut;
use tokio::sync::{MappedMutexGuard, Mutex, MutexGuard};
pub use self::schedule::{Schedule, ScheduleSnapshot};
pub use crate::state::env::AppEnv;
/// Common data provided to endpoints.
pub struct AppState {
database: Mutex<PgConnection>,
downloader: Mutex<BasicXlsDownloader>,
schedule: Mutex<Schedule>,
env: AppEnv,
fcm_client: Option<Mutex<FCMClient>>,
}
impl AppState {
pub async fn new() -> Result<Self, self::schedule::Error> {
let database_url = std::env::var("DATABASE_URL").expect("DATABASE_URL must be set");
let mut _self = Self {
downloader: Mutex::new(BasicXlsDownloader::new()),
schedule: Mutex::new(Schedule::default()),
database: Mutex::new(
PgConnection::establish(&database_url)
.unwrap_or_else(|_| panic!("Error connecting to {}", database_url)),
),
env: AppEnv::default(),
fcm_client: FCMClientData::new().await,
};
if _self.env.schedule.auto_update {
_self
.get_schedule()
.await
.init(_self.get_downloader().await.deref_mut(), &_self.env)
.await?;
}
Ok(_self)
}
pub async fn get_downloader(&'_ self) -> MutexGuard<'_, BasicXlsDownloader> {
self.downloader.lock().await
}
pub async fn get_schedule(&'_ self) -> MutexGuard<'_, Schedule> {
self.schedule.lock().await
}
pub async fn get_schedule_snapshot(&'_ self) -> MappedMutexGuard<'_, ScheduleSnapshot> {
let snapshot =
MutexGuard::<'_, Schedule>::map(self.schedule.lock().await, |schedule| unsafe {
schedule.snapshot.assume_init_mut()
});
snapshot
}
pub async fn get_database(&'_ self) -> MutexGuard<'_, PgConnection> {
self.database.lock().await
}
pub fn get_env(&self) -> &AppEnv {
&self.env
}
pub async fn get_fcm_client(&'_ self) -> Option<MutexGuard<'_, FCMClient>> {
match &self.fcm_client {
Some(client) => Some(client.lock().await),
None => None,
}
}
}
/// Create a new object web::Data<AppState>.
pub async fn new_app_state() -> Result<web::Data<AppState>, self::schedule::Error> {
Ok(web::Data::new(AppState::new().await?))
}

290
src/state/schedule.rs Normal file
View File

@@ -0,0 +1,290 @@
use crate::state::env::AppEnv;
use crate::utility::hasher::DigestHasher;
use chrono::{DateTime, Utc};
use derive_more::{Display, Error};
use schedule_parser::parse_xls;
use schedule_parser::schema::{ParseError, ParseResult};
use sha1::{Digest, Sha1};
use std::hash::Hash;
use std::mem::MaybeUninit;
use crate::xls_downloader::basic_impl::BasicXlsDownloader;
use crate::xls_downloader::interface::{FetchError, XLSDownloader};
/// Represents errors that can occur during schedule-related operations.
#[derive(Debug, Display, Error)]
pub enum Error {
/// An error occurred while querying the Yandex Cloud API for a URL.
///
/// This may result from network failures, invalid API credentials, or issues with the Yandex Cloud Function invocation.
/// See [`QueryUrlError`] for more details about specific causes.
QueryUrlFailed(QueryUrlError),
/// The schedule snapshot creation process failed.
///
/// This can happen due to URL conflicts (same URL already in use), failed network requests,
/// download errors, or invalid XLS file content. See [`SnapshotCreationError`] for details.
SnapshotCreationFailed(SnapshotCreationError),
}
/// Errors that may occur when querying the Yandex Cloud API to retrieve a URL.
#[derive(Debug, Display, Error)]
pub enum QueryUrlError {
/// Occurs when the request to the Yandex Cloud API fails.
///
/// This may be due to network issues, invalid API key, incorrect function ID, or other
/// problems with the Yandex Cloud Function invocation.
#[display("An error occurred during the request to the Yandex Cloud API: {_0}")]
RequestFailed(reqwest::Error),
}
/// Errors that may occur during the creation of a schedule snapshot.
#[derive(Debug, Display, Error)]
pub enum SnapshotCreationError {
/// The URL is the same as the one already being used (no update needed).
#[display("The URL is the same as the one already being used.")]
SameUrl,
/// The URL query for the XLS file failed to execute, either due to network issues or invalid API parameters.
#[display("Failed to fetch URL: {_0}")]
FetchFailed(FetchError),
/// Downloading the XLS file content failed after successfully obtaining the URL.
#[display("Download failed: {_0}")]
DownloadFailed(FetchError),
/// The XLS file could not be parsed into a valid schedule format.
#[display("Schedule data is invalid: {_0}")]
InvalidSchedule(ParseError),
}
/// Represents a snapshot of the schedule parsed from an XLS file.
#[derive(Clone)]
pub struct ScheduleSnapshot {
/// Timestamp when the Polytechnic website was queried for the schedule.
pub fetched_at: DateTime<Utc>,
/// Timestamp indicating when the schedule was last updated on the Polytechnic website.
///
/// <note>
/// This value is determined by the website's content and does not depend on the application.
/// </note>
pub updated_at: DateTime<Utc>,
/// URL pointing to the XLS file containing the source schedule data.
pub url: String,
/// Parsed schedule data in the application's internal representation.
pub data: ParseResult,
}
impl ScheduleSnapshot {
/// Converting the schedule data into a hash.
/// ### Important!
/// The hash does not depend on the dates.
/// If the application is restarted, but the file with source schedule will remain unchanged, then the hash will not change.
pub fn hash(&self) -> String {
let mut hasher = DigestHasher::from(Sha1::new());
self.data.teachers.iter().for_each(|e| e.hash(&mut hasher));
self.data.groups.iter().for_each(|e| e.hash(&mut hasher));
hasher.finalize()
}
/// Simply updates the value of [`ScheduleSnapshot::fetched_at`].
/// Used for auto-updates.
pub fn update(&mut self) {
self.fetched_at = Utc::now();
}
/// Constructs a new `ScheduleSnapshot` by downloading and parsing schedule data from the specified URL.
///
/// This method first checks if the provided URL is the same as the one already configured in the downloader.
/// If different, it updates the downloader's URL, fetches the XLS content, parses it, and creates a snapshot.
/// Errors are returned for URL conflicts, network issues, download failures, or invalid data.
///
/// # Arguments
///
/// * `downloader`: A mutable reference to an `XLSDownloader` implementation used to fetch and parse the schedule data.
/// * `url`: The source URL pointing to the XLS file containing schedule data.
///
/// returns: Result<ScheduleSnapshot, SnapshotCreationError>
pub async fn new(
downloader: &mut BasicXlsDownloader,
url: String,
) -> Result<Self, SnapshotCreationError> {
if downloader.url.as_ref().is_some_and(|_url| _url.eq(&url)) {
return Err(SnapshotCreationError::SameUrl);
}
let head_result = downloader.set_url(&*url).await.map_err(|error| {
if let FetchError::Unknown(error) = &error {
sentry::capture_error(&error);
}
SnapshotCreationError::FetchFailed(error)
})?;
let xls_data = downloader
.fetch(false)
.await
.map_err(|error| {
if let FetchError::Unknown(error) = &error {
sentry::capture_error(&error);
}
SnapshotCreationError::DownloadFailed(error)
})?
.data
.unwrap();
let parse_result = parse_xls(&xls_data).map_err(|error| {
sentry::capture_error(&error);
SnapshotCreationError::InvalidSchedule(error)
})?;
Ok(ScheduleSnapshot {
fetched_at: head_result.requested_at,
updated_at: head_result.uploaded_at,
url,
data: parse_result,
})
}
}
pub struct Schedule {
pub snapshot: MaybeUninit<ScheduleSnapshot>,
}
impl Default for Schedule {
fn default() -> Self {
Self {
snapshot: MaybeUninit::uninit(),
}
}
}
impl Schedule {
/// Queries the Yandex Cloud Function (FaaS) to obtain a URL for the schedule file.
///
/// This sends a POST request to the specified Yandex Cloud Function endpoint,
/// using the provided API key for authentication. The returned URI is combined
/// with the "https://politehnikum-eng.ru" base domain to form the complete URL.
///
/// # Arguments
///
/// * `api_key` - Authentication token for Yandex Cloud API
/// * `func_id` - ID of the target Yandex Cloud Function to invoke
///
/// # Returns
///
/// Result containing:
/// - `Ok(String)` - Complete URL constructed from the Function's response
/// - `Err(QueryUrlError)` - If the request or response processing fails
async fn query_url(api_key: &str, func_id: &str) -> Result<String, QueryUrlError> {
let client = reqwest::Client::new();
let uri = client
.post(format!(
"https://functions.yandexcloud.net/{}?integration=raw",
func_id
))
.header("Authorization", format!("Api-Key {}", api_key))
.send()
.await
.map_err(|error| QueryUrlError::RequestFailed(error))?
.text()
.await
.map_err(|error| QueryUrlError::RequestFailed(error))?;
Ok(format!("https://politehnikum-eng.ru{}", uri.trim()))
}
/// Initializes the schedule by fetching the URL from the environment or Yandex Cloud Function (FaaS)
/// and creating a [`ScheduleSnapshot`] with the downloaded data.
///
/// # Arguments
///
/// * `downloader`: Mutable reference to an `XLSDownloader` implementation used to fetch and parse the schedule
/// * `app_env`: Reference to the application environment containing either a predefined URL or Yandex Cloud credentials
///
/// # Returns
///
/// Returns `Ok(())` if the snapshot was successfully initialized, or an `Error` if:
/// - URL query to Yandex Cloud failed ([`QueryUrlError`])
/// - Schedule snapshot creation failed ([`SnapshotCreationError`])
pub async fn init(
&mut self,
downloader: &mut BasicXlsDownloader,
app_env: &AppEnv,
) -> Result<(), Error> {
let url = if let Some(url) = &app_env.schedule.url {
log::info!("The default link {} will be used", url);
url.clone()
} else {
log::info!("Obtaining a link using FaaS...");
Self::query_url(
&*app_env.yandex_cloud.api_key,
&*app_env.yandex_cloud.func_id,
)
.await
.map_err(|error| Error::QueryUrlFailed(error))?
};
log::info!("For the initial setup, a link {} will be used", url);
let snapshot = ScheduleSnapshot::new(downloader, url)
.await
.map_err(|error| Error::SnapshotCreationFailed(error))?;
log::info!("Schedule snapshot successfully created!");
self.snapshot.write(snapshot);
Ok(())
}
/// Updates the schedule snapshot by querying the latest URL from FaaS and checking for changes.
/// If the URL hasn't changed, only updates the [`fetched_at`] timestamp. If changed, downloads
/// and parses the new schedule data.
///
/// # Arguments
///
/// * `downloader`: XLS file downloader used to fetch and parse the schedule data
/// * `app_env`: Application environment containing Yandex Cloud configuration and auto-update settings
///
/// returns: `Result<(), Error>` - Returns error if URL query fails or schedule parsing encounters issues
///
/// # Safety
///
/// Uses `unsafe` to access the initialized snapshot, guaranteed valid by prior `init()` call
#[allow(unused)] // TODO: сделать авто апдейт
pub async fn update(
&mut self,
downloader: &mut BasicXlsDownloader,
app_env: &AppEnv,
) -> Result<(), Error> {
assert!(app_env.schedule.auto_update);
let url = Self::query_url(
&*app_env.yandex_cloud.api_key,
&*app_env.yandex_cloud.func_id,
)
.await
.map_err(|error| Error::QueryUrlFailed(error))?;
let snapshot = match ScheduleSnapshot::new(downloader, url).await {
Ok(snapshot) => snapshot,
Err(SnapshotCreationError::SameUrl) => {
unsafe { self.snapshot.assume_init_mut() }.update();
return Ok(());
}
Err(error) => return Err(Error::SnapshotCreationFailed(error)),
};
self.snapshot.write(snapshot);
Ok(())
}
}

View File

@@ -1,23 +1,24 @@
#[cfg(test)] #[cfg(test)]
pub(crate) mod tests { pub(crate) mod tests {
use crate::app_state::{AppState, Schedule, app_state}; use crate::state::{AppState, ScheduleSnapshot, new_app_state};
use crate::parser::tests::test_result;
use actix_web::web; use actix_web::web;
use log::info;
use schedule_parser::test_utils::test_result;
use std::default::Default;
use tokio::sync::OnceCell; use tokio::sync::OnceCell;
pub fn test_env() { pub fn test_env() {
info!("Loading test environment file...");
dotenvy::from_path(".env.test").expect("Failed to load test environment file"); dotenvy::from_path(".env.test").expect("Failed to load test environment file");
} }
pub async fn test_app_state() -> web::Data<AppState> { pub async fn test_app_state() -> web::Data<AppState> {
let state = app_state().await; let state = new_app_state().await.unwrap();
let mut schedule_lock = state.schedule.lock().unwrap();
*schedule_lock = Some(Schedule { state.get_schedule().await.snapshot.write(ScheduleSnapshot {
etag: "".to_string(),
fetched_at: Default::default(), fetched_at: Default::default(),
updated_at: Default::default(), updated_at: Default::default(),
parsed_at: Default::default(), url: "".to_string(),
data: test_result().unwrap(), data: test_result().unwrap(),
}); });

View File

@@ -1,15 +1,15 @@
use std::fmt::{Write};
use std::fmt::Display;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use std::fmt::Display;
use std::fmt::Write;
/// Server response to errors within Middleware. /// Server response to errors within Middleware.
#[derive(Serialize, Deserialize)] #[derive(Serialize, Deserialize)]
pub struct ResponseErrorMessage<T: Display> { pub struct MiddlewareError<T: Display> {
code: T, code: T,
message: String, message: String,
} }
impl<T: Display + Serialize> ResponseErrorMessage<T> { impl<T: Display + Serialize> MiddlewareError<T> {
pub fn new(code: T) -> Self { pub fn new(code: T) -> Self {
let mut message = String::new(); let mut message = String::new();
write!(&mut message, "{}", code).unwrap(); write!(&mut message, "{}", code).unwrap();

View File

@@ -1,4 +1,4 @@
pub mod jwt;
pub mod error; pub mod error;
pub mod hasher; pub mod hasher;
pub mod mutex; pub mod jwt;
pub mod telegram;

View File

@@ -1,77 +0,0 @@
use std::ops::DerefMut;
use std::sync::Mutex;
pub trait MutexScope<T, ScopeFn, ScopeFnOutput>
where
ScopeFn: FnOnce(&mut T) -> ScopeFnOutput,
{
/// Replaces manually creating a mutex lock to perform operations on the data it manages.
///
/// # Arguments
///
/// * `f`: Function (mostly lambda) to which a reference to the mutable object stored in the mutex will be passed.
///
/// returns: Return value of `f` function.
///
/// # Examples
///
/// ```
/// let mtx: Mutex<i32> = Mutex::new(10);
///
/// let res = mtx.scope(|x| { *x = *x * 2; *x });
/// assert_eq!(res, *mtx.lock().unwrap());
/// ```
fn scope(&self, f: ScopeFn) -> ScopeFnOutput;
}
impl<T, ScopeFn, ScopeFnOutput> MutexScope<T, ScopeFn, ScopeFnOutput> for Mutex<T>
where
ScopeFn: FnOnce(&mut T) -> ScopeFnOutput,
{
fn scope(&self, f: ScopeFn) -> ScopeFnOutput {
let mut lock = self.lock().unwrap();
let inner = lock.deref_mut();
f(inner)
}
}
pub trait MutexScopeAsync<T> {
/// ## Asynchronous variant of [MutexScope::scope][MutexScope::scope].
///
/// Replaces manually creating a mutex lock to perform operations on the data it manages.
///
/// # Arguments
///
/// * `f`: Asynchronous function (mostly lambda) to which a reference to the mutable object stored in the mutex will be passed.
///
/// returns: Return value of `f` function.
///
/// # Examples
///
/// ```
/// let mtx: Mutex<i32> = Mutex::new(10);
///
/// let res = mtx.async_scope(async |x| { *x = *x * 2; *x }).await;
/// assert_eq!(res, *mtx.lock().unwrap());
/// ```
async fn async_scope<'a, F, FnFut, FnOut>(&'a self, f: F) -> FnOut
where
FnFut: Future<Output = FnOut>,
F: FnOnce(&'a mut T) -> FnFut,
T: 'a;
}
impl<T> MutexScopeAsync<T> for Mutex<T> {
async fn async_scope<'a, F, FnFut, FnOut>(&'a self, f: F) -> FnOut
where
FnFut: Future<Output = FnOut>,
F: FnOnce(&'a mut T) -> FnFut,
T: 'a,
{
let mut guard = self.lock().unwrap();
let ptr: &'a mut T = unsafe { &mut *(guard.deref_mut() as *mut _) };
f(ptr).await
}
}

91
src/utility/telegram.rs Normal file
View File

@@ -0,0 +1,91 @@
use base64::Engine;
use derive_more::{Display, Error};
use ed25519_dalek::Verifier;
use hex_literal::hex;
use serde::Deserialize;
use std::collections::HashMap;
pub struct WebAppInitDataMap {
pub data_map: HashMap<String, String>,
}
#[derive(Deserialize)]
pub struct WebAppUser {
pub id: i64,
}
#[derive(Clone, Debug, Display, Error)]
pub enum VerifyError {
#[display("No signature found.")]
NoSignature,
#[display("The provided signature was corrupted.")]
BadSignature,
#[display("The expected signature does not match the actual one.")]
IntegrityCheckFailed,
}
impl WebAppInitDataMap {
pub fn from_str(data: String) -> Self {
let mut this = Self {
data_map: HashMap::new(),
};
data.split('&')
.map(|kv| kv.split_once('=').unwrap_or_else(|| (kv, "")))
.for_each(|(key, value)| {
this.data_map.insert(key.to_string(), value.to_string());
});
if let Some(user) = this.data_map.get_mut("user") {
*user = percent_encoding::percent_decode_str(&*user)
.decode_utf8_lossy()
.to_string();
}
this
}
pub fn verify(&self, bot_id: i64, test_dc: bool) -> Result<(), VerifyError> {
//noinspection ALL
const TELEGRAM_PUBLIC_KEY: [[u8; 32]; 2] = [
hex!("e7bf03a2fa4602af4580703d88dda5bb59f32ed8b02a56c187fe7d34caed242d"),
hex!("40055058a4ee38156a06562e52eece92a771bcd8346a8c4615cb7376eddf72ec"),
];
let verifying_key = ed25519_dalek::VerifyingKey::from_bytes(
&TELEGRAM_PUBLIC_KEY[if test_dc { 1 } else { 0 }],
)
.unwrap();
let signature = {
let raw = self
.data_map
.get("signature")
.ok_or(VerifyError::NoSignature)?;
let bytes = base64::prelude::BASE64_URL_SAFE_NO_PAD
.decode(raw)
.map_err(|_| VerifyError::BadSignature)?;
ed25519_dalek::Signature::from_slice(bytes.as_slice())
.map_err(|_| VerifyError::BadSignature)?
};
let data_check_string = format!("{}:WebAppData\n{}", bot_id, {
let mut vec = self
.data_map
.iter()
.filter(|(key, _)| !["hash", "signature"].iter().any(|variant| variant == key))
.map(|(key, value)| format!("{}={}", key, value))
.collect::<Vec<String>>();
vec.sort();
vec.join("\n")
});
verifying_key
.verify(data_check_string.as_bytes(), &signature)
.map_err(|_| VerifyError::IntegrityCheckFailed)
}
}

View File

@@ -1,14 +1,12 @@
use crate::xls_downloader::interface::{FetchError, FetchOk, FetchResult, XLSDownloader}; use crate::xls_downloader::interface::{FetchError, FetchOk, FetchResult, XLSDownloader};
use chrono::{DateTime, Utc}; use chrono::{DateTime, Utc};
use std::env;
use std::sync::Arc; use std::sync::Arc;
pub struct BasicXlsDownloader { pub struct BasicXlsDownloader {
pub url: Option<String>, pub url: Option<String>,
user_agent: String,
} }
async fn fetch_specified(url: &String, user_agent: &String, head: bool) -> FetchResult { async fn fetch_specified(url: &str, head: bool) -> FetchResult {
let client = reqwest::Client::new(); let client = reqwest::Client::new();
let response = if head { let response = if head {
@@ -16,59 +14,47 @@ async fn fetch_specified(url: &String, user_agent: &String, head: bool) -> Fetch
} else { } else {
client.get(url) client.get(url)
} }
.header("User-Agent", user_agent.clone()) .header("User-Agent", ua_generator::ua::spoof_chrome_ua())
.send() .send()
.await; .await
.map_err(|e| FetchError::unknown(Arc::new(e)))?;
match response { if response.status().as_u16() != 200 {
Ok(r) => { return Err(FetchError::bad_status_code(response.status().as_u16()));
if r.status().as_u16() != 200 {
return Err(FetchError::BadStatusCode(r.status().as_u16()));
} }
let headers = r.headers(); let headers = response.headers();
let content_type = headers.get("Content-Type"); let content_type = headers
let etag = headers.get("etag"); .get("Content-Type")
let last_modified = headers.get("last-modified"); .ok_or(FetchError::bad_headers("Content-Type"))?;
let date = headers.get("date");
if content_type.is_none() { if !headers.contains_key("etag") {
Err(FetchError::BadHeaders("Content-Type".to_string())) return Err(FetchError::bad_headers("etag"));
} else if etag.is_none() { }
Err(FetchError::BadHeaders("ETag".to_string()))
} else if last_modified.is_none() { let last_modified = headers
Err(FetchError::BadHeaders("Last-Modified".to_string())) .get("last-modified")
} else if date.is_none() { .ok_or(FetchError::bad_headers("last-modified"))?;
Err(FetchError::BadHeaders("Date".to_string()))
} else if content_type.unwrap() != "application/vnd.ms-excel" { if content_type != "application/vnd.ms-excel" {
Err(FetchError::BadContentType( return Err(FetchError::bad_content_type(content_type.to_str().unwrap()));
content_type.unwrap().to_str().unwrap().to_string(), }
))
} else { let last_modified = DateTime::parse_from_rfc2822(&last_modified.to_str().unwrap())
let etag = etag.unwrap().to_str().unwrap().to_string();
let last_modified =
DateTime::parse_from_rfc2822(&last_modified.unwrap().to_str().unwrap())
.unwrap() .unwrap()
.with_timezone(&Utc); .with_timezone(&Utc);
Ok(if head { Ok(if head {
FetchOk::head(etag, last_modified) FetchOk::head(last_modified)
} else { } else {
FetchOk::get(etag, last_modified, r.bytes().await.unwrap().to_vec()) FetchOk::get(last_modified, response.bytes().await.unwrap().to_vec())
}) })
}
}
Err(error) => Err(FetchError::Unknown(Arc::new(error))),
}
} }
impl BasicXlsDownloader { impl BasicXlsDownloader {
pub fn new() -> Self { pub fn new() -> Self {
BasicXlsDownloader { BasicXlsDownloader { url: None }
url: None,
user_agent: env::var("REQWEST_USER_AGENT").expect("USER_AGENT must be set"),
}
} }
} }
@@ -77,15 +63,15 @@ impl XLSDownloader for BasicXlsDownloader {
if self.url.is_none() { if self.url.is_none() {
Err(FetchError::NoUrlProvided) Err(FetchError::NoUrlProvided)
} else { } else {
fetch_specified(self.url.as_ref().unwrap(), &self.user_agent, head).await fetch_specified(&*self.url.as_ref().unwrap(), head).await
} }
} }
async fn set_url(&mut self, url: String) -> FetchResult { async fn set_url(&mut self, url: &str) -> FetchResult {
let result = fetch_specified(&url, &self.user_agent, true).await; let result = fetch_specified(url, true).await;
if let Ok(_) = result { if let Ok(_) = result {
self.url = Some(url); self.url = Some(url.to_string());
} }
result result
@@ -94,17 +80,16 @@ impl XLSDownloader for BasicXlsDownloader {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use crate::xls_downloader::basic_impl::{fetch_specified, BasicXlsDownloader}; use crate::xls_downloader::basic_impl::{BasicXlsDownloader, fetch_specified};
use crate::xls_downloader::interface::{FetchError, XLSDownloader}; use crate::xls_downloader::interface::{FetchError, XLSDownloader};
#[tokio::test] #[tokio::test]
async fn bad_url() { async fn bad_url() {
let url = "bad_url".to_string(); let url = "bad_url";
let user_agent = String::new();
let results = [ let results = [
fetch_specified(&url, &user_agent, true).await, fetch_specified(url, true).await,
fetch_specified(&url, &user_agent, false).await, fetch_specified(url, false).await,
]; ];
assert!(results[0].is_err()); assert!(results[0].is_err());
@@ -113,18 +98,17 @@ mod tests {
#[tokio::test] #[tokio::test]
async fn bad_status_code() { async fn bad_status_code() {
let url = "https://www.google.com/not-found".to_string(); let url = "https://www.google.com/not-found";
let user_agent = String::new();
let results = [ let results = [
fetch_specified(&url, &user_agent, true).await, fetch_specified(url, true).await,
fetch_specified(&url, &user_agent, false).await, fetch_specified(url, false).await,
]; ];
assert!(results[0].is_err()); assert!(results[0].is_err());
assert!(results[1].is_err()); assert!(results[1].is_err());
let expected_error = FetchError::BadStatusCode(404); let expected_error = FetchError::BadStatusCode { status_code: 404 };
assert_eq!(*results[0].as_ref().err().unwrap(), expected_error); assert_eq!(*results[0].as_ref().err().unwrap(), expected_error);
assert_eq!(*results[1].as_ref().err().unwrap(), expected_error); assert_eq!(*results[1].as_ref().err().unwrap(), expected_error);
@@ -132,18 +116,19 @@ mod tests {
#[tokio::test] #[tokio::test]
async fn bad_headers() { async fn bad_headers() {
let url = "https://www.google.com/favicon.ico".to_string(); let url = "https://www.google.com/favicon.ico";
let user_agent = String::new();
let results = [ let results = [
fetch_specified(&url, &user_agent, true).await, fetch_specified(url, true).await,
fetch_specified(&url, &user_agent, false).await, fetch_specified(url, false).await,
]; ];
assert!(results[0].is_err()); assert!(results[0].is_err());
assert!(results[1].is_err()); assert!(results[1].is_err());
let expected_error = FetchError::BadHeaders("ETag".to_string()); let expected_error = FetchError::BadHeaders {
expected_header: "ETag".to_string(),
};
assert_eq!(*results[0].as_ref().err().unwrap(), expected_error); assert_eq!(*results[0].as_ref().err().unwrap(), expected_error);
assert_eq!(*results[1].as_ref().err().unwrap(), expected_error); assert_eq!(*results[1].as_ref().err().unwrap(), expected_error);
@@ -151,12 +136,11 @@ mod tests {
#[tokio::test] #[tokio::test]
async fn bad_content_type() { async fn bad_content_type() {
let url = "https://s3.aero-storage.ldragol.ru/679e5d1145a6ad00843ad3f1/67ddb59fd46303008396ac96%2Fexample.txt".to_string(); let url = "https://s3.aero-storage.ldragol.ru/679e5d1145a6ad00843ad3f1/67ddb59fd46303008396ac96%2Fexample.txt";
let user_agent = String::new();
let results = [ let results = [
fetch_specified(&url, &user_agent, true).await, fetch_specified(url, true).await,
fetch_specified(&url, &user_agent, false).await, fetch_specified(url, false).await,
]; ];
assert!(results[0].is_err()); assert!(results[0].is_err());
@@ -165,12 +149,11 @@ mod tests {
#[tokio::test] #[tokio::test]
async fn ok() { async fn ok() {
let url = "https://s3.aero-storage.ldragol.ru/679e5d1145a6ad00843ad3f1/67ddb5fad46303008396ac97%2Fschedule.xls".to_string(); let url = "https://s3.aero-storage.ldragol.ru/679e5d1145a6ad00843ad3f1/67ddb5fad46303008396ac97%2Fschedule.xls";
let user_agent = String::new();
let results = [ let results = [
fetch_specified(&url, &user_agent, true).await, fetch_specified(url, true).await,
fetch_specified(&url, &user_agent, false).await, fetch_specified(url, false).await,
]; ];
assert!(results[0].is_ok()); assert!(results[0].is_ok());
@@ -179,7 +162,7 @@ mod tests {
#[tokio::test] #[tokio::test]
async fn downloader_set_ok() { async fn downloader_set_ok() {
let url = "https://s3.aero-storage.ldragol.ru/679e5d1145a6ad00843ad3f1/67ddb5fad46303008396ac97%2Fschedule.xls".to_string(); let url = "https://s3.aero-storage.ldragol.ru/679e5d1145a6ad00843ad3f1/67ddb5fad46303008396ac97%2Fschedule.xls";
let mut downloader = BasicXlsDownloader::new(); let mut downloader = BasicXlsDownloader::new();
@@ -188,7 +171,7 @@ mod tests {
#[tokio::test] #[tokio::test]
async fn downloader_set_err() { async fn downloader_set_err() {
let url = "bad_url".to_string(); let url = "bad_url";
let mut downloader = BasicXlsDownloader::new(); let mut downloader = BasicXlsDownloader::new();
@@ -197,7 +180,7 @@ mod tests {
#[tokio::test] #[tokio::test]
async fn downloader_ok() { async fn downloader_ok() {
let url = "https://s3.aero-storage.ldragol.ru/679e5d1145a6ad00843ad3f1/67ddb5fad46303008396ac97%2Fschedule.xls".to_string(); let url = "https://s3.aero-storage.ldragol.ru/679e5d1145a6ad00843ad3f1/67ddb5fad46303008396ac97%2Fschedule.xls";
let mut downloader = BasicXlsDownloader::new(); let mut downloader = BasicXlsDownloader::new();

View File

@@ -1,11 +1,11 @@
use chrono::{DateTime, Utc}; use chrono::{DateTime, Utc};
use derive_more::Display; use derive_more::{Display, Error};
use std::mem::discriminant; use std::mem::discriminant;
use std::sync::Arc; use std::sync::Arc;
use utoipa::ToSchema; use utoipa::ToSchema;
/// XLS data retrieval errors. /// XLS data retrieval errors.
#[derive(Clone, Debug, ToSchema, Display)] #[derive(Clone, Debug, ToSchema, Display, Error)]
pub enum FetchError { pub enum FetchError {
/// File url is not set. /// File url is not set.
#[display("The link to the timetable was not provided earlier.")] #[display("The link to the timetable was not provided earlier.")]
@@ -17,16 +17,38 @@ pub enum FetchError {
Unknown(Arc<reqwest::Error>), Unknown(Arc<reqwest::Error>),
/// Server returned a status code different from 200. /// Server returned a status code different from 200.
#[display("Server returned a status code {_0}.")] #[display("Server returned a status code {status_code}.")]
BadStatusCode(u16), BadStatusCode { status_code: u16 },
/// The url leads to a file of a different type. /// The url leads to a file of a different type.
#[display("The link leads to a file of type '{_0}'.")] #[display("The link leads to a file of type '{content_type}'.")]
BadContentType(String), BadContentType { content_type: String },
/// Server doesn't return expected headers. /// Server doesn't return expected headers.
#[display("Server doesn't return expected header(s) '{_0}'.")] #[display("Server doesn't return expected header(s) '{expected_header}'.")]
BadHeaders(String), BadHeaders { expected_header: String },
}
impl FetchError {
pub fn unknown(error: Arc<reqwest::Error>) -> Self {
Self::Unknown(error)
}
pub fn bad_status_code(status_code: u16) -> Self {
Self::BadStatusCode { status_code }
}
pub fn bad_content_type(content_type: &str) -> Self {
Self::BadContentType {
content_type: content_type.to_string(),
}
}
pub fn bad_headers(expected_header: &str) -> Self {
Self::BadHeaders {
expected_header: expected_header.to_string(),
}
}
} }
impl PartialEq for FetchError { impl PartialEq for FetchError {
@@ -37,9 +59,6 @@ impl PartialEq for FetchError {
/// Result of XLS data retrieval. /// Result of XLS data retrieval.
pub struct FetchOk { pub struct FetchOk {
/// ETag object.
pub etag: String,
/// File upload date. /// File upload date.
pub uploaded_at: DateTime<Utc>, pub uploaded_at: DateTime<Utc>,
@@ -52,9 +71,8 @@ pub struct FetchOk {
impl FetchOk { impl FetchOk {
/// Result without file content. /// Result without file content.
pub fn head(etag: String, uploaded_at: DateTime<Utc>) -> Self { pub fn head(uploaded_at: DateTime<Utc>) -> Self {
FetchOk { FetchOk {
etag,
uploaded_at, uploaded_at,
requested_at: Utc::now(), requested_at: Utc::now(),
data: None, data: None,
@@ -62,9 +80,8 @@ impl FetchOk {
} }
/// Full result. /// Full result.
pub fn get(etag: String, uploaded_at: DateTime<Utc>, data: Vec<u8>) -> Self { pub fn get(uploaded_at: DateTime<Utc>, data: Vec<u8>) -> Self {
FetchOk { FetchOk {
etag,
uploaded_at, uploaded_at,
requested_at: Utc::now(), requested_at: Utc::now(),
data: Some(data), data: Some(data),
@@ -79,5 +96,5 @@ pub trait XLSDownloader {
async fn fetch(&self, head: bool) -> FetchResult; async fn fetch(&self, head: bool) -> FetchResult;
/// Setting the file link. /// Setting the file link.
async fn set_url(&mut self, url: String) -> FetchResult; async fn set_url(&mut self, url: &str) -> FetchResult;
} }