51 Commits

Author SHA1 Message Date
8de1891724 chore(release): bump version to 1.0.5 2025-05-26 05:30:44 +04:00
4cf6df379e fix(parser): fix lessons merging 2025-05-26 05:24:13 +04:00
ba8b164b6a refactor(parser): rewrite some parts of code 2025-05-26 05:24:08 +04:00
ff9d7d6c3a fix(cache): fix setting cache_update_required flag in cache status 2025-05-25 17:39:23 +04:00
9090716f87 fix(test): fix test sign_up_invalid_group 2025-05-25 15:57:18 +04:00
ee992f1b55 chore(xls): update schedule xls 2025-05-25 15:49:52 +04:00
7f71fb1616 refactor(env): remove unsave env::set_var call 2025-05-25 15:48:43 +04:00
234055eaeb feat(test): add ability to use test env without schedule 2025-05-25 15:48:10 +04:00
fceffb900d release/v1.0.3 2025-04-18 00:29:04 +04:00
49ce0005dc Исправление работы подключения к сайтам из-за отсутствия сертификатов. 2025-04-18 00:28:55 +04:00
4c738085f2 release/v1.0.2 2025-04-18 00:11:55 +04:00
20602eb863 Улучшенное отображение ошибок при обновлении ссылки расписания. 2025-04-18 00:11:05 +04:00
e04d462223 1.0.1 2025-04-17 23:08:58 +04:00
22af02464d Исправление работы авторизации с помощью VK ID. 2025-04-17 23:07:19 +04:00
9a517519db User-Agent для reqwest теперь устанавливается с помощью переменной окружения. 2025-04-17 22:41:42 +04:00
65376e75f7 Workflow для публикации релизов.
- Запускает тесты.
- Собирает приложение.
- Отправляет отладочную информацию в Sentry.
- Собирает и отправляет в реестр Docker image с приложением.
- Создаёт релиз со списком изменений и артефактами сборки.
2025-04-17 21:34:46 +04:00
bef6163c1b Отключение тестов при pull request. 2025-04-17 16:39:39 +04:00
283858fea3 Возможный фикс тестов. 2025-04-17 01:10:19 +04:00
66ad4ef938 Подключение sentry. 2025-04-17 01:07:03 +04:00
28f59389ed Исправление тестов.
FCMClient теперь не инициализируется, если отсутствует требуемая переменная окружения.
2025-04-16 16:38:37 +04:00
e71ab0526d Middleware для явного указания кодировки в заголовке Content-Type. 2025-04-16 16:21:53 +04:00
ff05614404 Исправление обработки времени у пар. 2025-04-16 16:21:18 +04:00
9cc03c4ffe Фильтр эндпоинтов для middleware. 2025-04-16 16:20:32 +04:00
5068fe3069 Обновление документации. 2025-04-15 22:09:10 +04:00
2fd6d787a0 Эндпоинт users/change-group. 2025-04-15 19:39:46 +04:00
7a1b32d843 Эндпоинт users/change-username. 2025-04-15 18:55:45 +04:00
542258df01 Эндпоинт fcm/set-token. 2025-04-15 18:44:43 +04:00
ccaabfe909 Асинхронный вариант MutexScope. 2025-04-15 18:44:03 +04:00
4c5e0761eb Подключение FCM. 2025-04-15 14:35:05 +04:00
057dac5b09 Использование функции для осуществления операций в базе данных вместо ручного блокирования мьютекса. 2025-04-15 14:33:58 +04:00
5b6f5c830f Реформат путей к эндпоинтам.
Добавлен экстрактор пользователя с дополнительными полями.

Добавлена связь таблиц User и FCM.

Завершена реализация авторизации с помощью VK ID.

Добавлен эндпоинт fcm/update-callback/{version}.
2025-04-14 22:08:28 +04:00
680419ea78 0.8.0
Реализованы все требуемые эндпоинты schedule.

Улучшена документация.
2025-03-28 23:24:37 +04:00
30c985a3d7 Добавлена возможность создания ResponseError с описанием ошибки.
Добавлен макрос для трансформации ErrorCode в Response, а также для имплементации треита PartialStatusCode.
2025-03-28 15:42:45 +04:00
70a7480ea3 0.7.0
Добавлена OpenAPI документация эндпоинтов и структур с интерфейсом RapiDoc.

Добавлены derive макросы для преобразования структуры в HttpResponse с помощью ResponderJson и IResponse<T> с помощью IntoIResponse.

Ревью кода эндпоинтов связанных с авторизацией.

Эндпоинт users/me теперь объект пользователя в требуемом виде.
2025-03-28 01:21:49 +04:00
1add903f36 0.6.0
Добавлена проверка токена пользователя для перед обработкой запроса.
2025-03-27 20:03:35 +04:00
f703cc8326 0.5.0
Возвращёна реализация сериализации в json для IResponse

Добавлены типы для экстракции данных из запросов средствами actix-web

Добавлен экстрактор для получения пользователя по токену доступа передаваемому в запросе

Добавлен макрос для автоматической реализации ResponseError для ошибок экстракторов

Добавлен эндпоинт users/me

Из главного проекта исключена зависимость actix-http посредством переноса части тестового функционала в отдельный crate
2025-03-26 08:05:22 +04:00
ab1cbd795e 0.4.0
Авторизация через токен вк

Слияние schedule_parser с проектом

Перенос схемы запросов/ответов в файлы эндпоинтов

Переход с библиотеки jwt на jsonwebtokens
2025-03-25 02:05:27 +04:00
0316f58592 Обновление workflow тестов 2025-03-23 06:19:51 +04:00
a95494d3be Регистрация и тесты эндпоинтов 2025-03-23 06:11:13 +04:00
844c89a365 Тесты JWT
Имплементация PartialEq для utils::jwt::VerifyError

Замена устаревшего changeset_options на diesel

Удалена проверка на ошибку создания токена, так как вероятность её появления близка к нулю
2025-03-22 23:14:14 +04:00
ba86dfc3fe Полностью рабочая авторизация 2025-03-22 22:44:52 +04:00
9f7460973e Подключение к Postgres и тестовый эндпоинт авторизации 2025-03-22 03:20:55 +04:00
Nikita
3cf42eea8a Create LICENSE 2025-03-22 00:31:03 +04:00
Nikita
d19b6c1069 Create CODE_OF_CONDUCT.md 2025-03-22 00:30:18 +04:00
126ba23001 Скачивание XLS документа по ссылке 2025-03-21 23:55:16 +04:00
d75d3fbc97 Установка разрешений для Workflow 2025-03-21 21:03:28 +04:00
Nikita
627cf1a74e Create dependabot.yml 2025-03-21 20:59:40 +04:00
b508db693e Добавлена конвертация расписания групп в расписание преподавателей 2025-03-21 20:54:52 +04:00
436d08a56a Добавление README 2025-03-21 07:39:56 +04:00
aa2618c5f5 Action для тестирования 2025-03-21 07:36:39 +04:00
f0a951ad38 Удаление неиспользуемых зависимостей 2025-03-21 07:28:37 +04:00
85 changed files with 9181 additions and 846 deletions

6
.github/dependabot.yml vendored Normal file
View File

@@ -0,0 +1,6 @@
version: 2
updates:
- package-ecosystem: "cargo"
directory: "/"
schedule:
interval: "weekly"

169
.github/workflows/release.yml vendored Normal file
View File

@@ -0,0 +1,169 @@
name: release
on:
push:
tags: [ "release/v*" ]
permissions:
contents: write
env:
CARGO_TERM_COLOR: always
BINARY_NAME: schedule-parser-rusted
TEST_DB: ${{ secrets.TEST_DATABASE_URL }}
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
SENTRY_ORG: ${{ secrets.SENTRY_ORG }}
SENTRY_PROJECT: ${{ secrets.SENTRY_PROJECT }}
DOCKER_IMAGE_NAME: ${{ github.repository }}
DOCKER_REGISTRY_HOST: registry.n08i40k.ru
DOCKER_REGISTRY_USERNAME: ${{ github.repository_owner }}
DOCKER_REGISTRY_PASSWORD: ${{ secrets.DOCKER_REGISTRY_PASSWORD }}
jobs:
test:
name: Test
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Setup Rust
uses: actions-rust-lang/setup-rust-toolchain@v1.11.0
with:
toolchain: stable
- name: Test
run: |
touch .env.test
cargo test --verbose
env:
DATABASE_URL: ${{ env.TEST_DB }}
JWT_SECRET: "test-secret-at-least-256-bits-used"
VKID_CLIENT_ID: 0
VKID_REDIRECT_URI: "vk0://vk.com/blank.html"
REQWEST_USER_AGENT: "Dalvik/2.1.0 (Linux; U; Android 6.0.1; OPPO R9s Build/MMB29M)"
build:
name: Build
runs-on: ubuntu-latest
needs: test
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Setup Rust
uses: actions-rust-lang/setup-rust-toolchain@v1.11.0
with:
toolchain: stable
- name: Build
run: cargo build --release --verbose
- name: Extract debug symbols
run: |
objcopy --only-keep-debug target/release/${{ env.BINARY_NAME }}{,.d}
objcopy --strip-debug --strip-unneeded target/release/${{ env.BINARY_NAME }}
objcopy --add-gnu-debuglink target/release/${{ env.BINARY_NAME }}{.d,}
- name: Setup sentry-cli
uses: matbour/setup-sentry-cli@v2.0.0
with:
version: latest
token: ${{ env.SENTRY_AUTH_TOKEN }}
organization: ${{ env.SENTRY_ORG }}
project: ${{ env.SENTRY_PROJECT }}
- name: Upload debug symbols to Sentry
run: |
sentry-cli debug-files upload --include-sources .
- name: Upload build binary artifact
uses: actions/upload-artifact@v4
with:
name: release-binary
path: target/release/${{ env.BINARY_NAME }}
- name: Upload build debug symbols artifact
uses: actions/upload-artifact@v4
with:
name: release-symbols
path: target/release/${{ env.BINARY_NAME }}.d
docker:
name: Build & Push Docker Image
runs-on: ubuntu-latest
needs: build
steps:
- uses: actions/checkout@v4
- name: Download build artifacts
uses: actions/download-artifact@v4
with:
name: release-binary
- name: Setup Docker Buildx
uses: docker/setup-buildx-action@v3.10.0
- name: Login to Registry
uses: docker/login-action@v3.4.0
with:
registry: ${{ env.DOCKER_REGISTRY_HOST }}
username: ${{ env.DOCKER_REGISTRY_USERNAME }}
password: ${{ env.DOCKER_REGISTRY_PASSWORD }}
- name: Extract Docker metadata
id: meta
uses: docker/metadata-action@v5.7.0
with:
images: ${{ env.DOCKER_REGISTRY_HOST }}/${{ env.DOCKER_IMAGE_NAME }}
- name: Build and push Docker image
id: build-and-push
uses: docker/build-push-action@v6.15.0
with:
context: .
push: true
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
cache-from: type=gha
cache-to: type=gha,mode=max
build-args: |
"BINARY_NAME=${{ env.BINARY_NAME }}"
release:
name: Create GitHub Release
runs-on: ubuntu-latest
needs:
- build
- docker
# noinspection GrazieInspection,SpellCheckingInspection
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Generate changelog
run: |
LAST_TAG=$(git describe --tags --abbrev=0 HEAD^)
echo "## Коммиты с прошлого релиза $LAST_TAG" > CHANGELOG.md
git log $LAST_TAG..HEAD --oneline >> CHANGELOG.md
- name: Download build artifacts
uses: actions/download-artifact@v4
with:
pattern: release-*
merge-multiple: true
- name: Create Release
id: create_release
uses: ncipollo/release-action@v1.16.0
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
artifacts: "${{ env.BINARY_NAME }},${{ env.BINARY_NAME }}.d"
bodyFile: CHANGELOG.md

32
.github/workflows/test.yml vendored Normal file
View File

@@ -0,0 +1,32 @@
name: cargo test
on:
push:
branches: [ "master" ]
tags-ignore: [ "release/v*" ]
permissions:
contents: read
env:
CARGO_TERM_COLOR: always
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Build
run: cargo build
- name: Create .env.test
run: touch .env.test
- name: Run tests
run: cargo test
env:
DATABASE_URL: ${{ secrets.TEST_DATABASE_URL }}
JWT_SECRET: "test-secret-at-least-256-bits-used"
VKID_CLIENT_ID: 0
VKID_REDIRECT_URI: "vk0://vk.com/blank.html"
REQWEST_USER_AGENT: "Dalvik/2.1.0 (Linux; U; Android 6.0.1; OPPO R9s Build/MMB29M)"

6
.gitignore vendored
View File

@@ -1,3 +1,7 @@
/target
.~*.xls
schedule.json
schedule.json
teachers.json
.env*
/*-firebase-adminsdk-*.json

12
.idea/dataSources.xml generated Normal file
View File

@@ -0,0 +1,12 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="DataSourceManagerImpl" format="xml" multifile-model="true">
<data-source source="LOCAL" name="sp@localhost" uuid="28502a90-08bf-4cc0-8494-10dc74e37189">
<driver-ref>postgresql</driver-ref>
<synchronize>true</synchronize>
<jdbc-driver>org.postgresql.Driver</jdbc-driver>
<jdbc-url>jdbc:postgresql://localhost:5432/sp</jdbc-url>
<working-dir>$ProjectFileDir$</working-dir>
</data-source>
</component>
</project>

14
.idea/discord.xml generated Normal file
View File

@@ -0,0 +1,14 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="DiscordProjectSettings">
<option name="show" value="PROJECT_FILES" />
<option name="description" value="" />
<option name="applicationTheme" value="default" />
<option name="iconsTheme" value="default" />
<option name="button1Title" value="" />
<option name="button1Url" value="" />
<option name="button2Title" value="" />
<option name="button2Url" value="" />
<option name="customApplicationId" value="" />
</component>
</project>

View File

@@ -2,10 +2,16 @@
<module type="EMPTY_MODULE" version="4">
<component name="NewModuleRootManager">
<content url="file://$MODULE_DIR$">
<sourceFolder url="file://$MODULE_DIR$/lib/schedule_parser/benches" isTestSource="true" />
<sourceFolder url="file://$MODULE_DIR$/lib/schedule_parser/src" isTestSource="false" />
<sourceFolder url="file://$MODULE_DIR$/src" isTestSource="false" />
<sourceFolder url="file://$MODULE_DIR$/actix-macros/src" isTestSource="false" />
<sourceFolder url="file://$MODULE_DIR$/actix-test/src" isTestSource="false" />
<sourceFolder url="file://$MODULE_DIR$/schedule-parser/benches" isTestSource="true" />
<sourceFolder url="file://$MODULE_DIR$/schedule-parser/src" isTestSource="false" />
<excludeFolder url="file://$MODULE_DIR$/actix-macros/target" />
<excludeFolder url="file://$MODULE_DIR$/actix-test/target" />
<excludeFolder url="file://$MODULE_DIR$/target" />
<excludeFolder url="file://$MODULE_DIR$/.idea/dataSources" />
</content>
<orderEntry type="inheritedJdk" />
<orderEntry type="sourceFolder" forTests="false" />

128
CODE_OF_CONDUCT.md Normal file
View File

@@ -0,0 +1,128 @@
# Contributor Covenant Code of Conduct
## Our Pledge
We as members, contributors, and leaders pledge to make participation in our
community a harassment-free experience for everyone, regardless of age, body
size, visible or invisible disability, ethnicity, sex characteristics, gender
identity and expression, level of experience, education, socio-economic status,
nationality, personal appearance, race, religion, or sexual identity
and orientation.
We pledge to act and interact in ways that contribute to an open, welcoming,
diverse, inclusive, and healthy community.
## Our Standards
Examples of behavior that contributes to a positive environment for our
community include:
* Demonstrating empathy and kindness toward other people
* Being respectful of differing opinions, viewpoints, and experiences
* Giving and gracefully accepting constructive feedback
* Accepting responsibility and apologizing to those affected by our mistakes,
and learning from the experience
* Focusing on what is best not just for us as individuals, but for the
overall community
Examples of unacceptable behavior include:
* The use of sexualized language or imagery, and sexual attention or
advances of any kind
* Trolling, insulting or derogatory comments, and personal or political attacks
* Public or private harassment
* Publishing others' private information, such as a physical or email
address, without their explicit permission
* Other conduct which could reasonably be considered inappropriate in a
professional setting
## Enforcement Responsibilities
Community leaders are responsible for clarifying and enforcing our standards of
acceptable behavior and will take appropriate and fair corrective action in
response to any behavior that they deem inappropriate, threatening, offensive,
or harmful.
Community leaders have the right and responsibility to remove, edit, or reject
comments, commits, code, wiki edits, issues, and other contributions that are
not aligned to this Code of Conduct, and will communicate reasons for moderation
decisions when appropriate.
## Scope
This Code of Conduct applies within all community spaces, and also applies when
an individual is officially representing the community in public spaces.
Examples of representing our community include using an official e-mail address,
posting via an official social media account, or acting as an appointed
representative at an online or offline event.
## Enforcement
Instances of abusive, harassing, or otherwise unacceptable behavior may be
reported to the community leaders responsible for enforcement at
email.
All complaints will be reviewed and investigated promptly and fairly.
All community leaders are obligated to respect the privacy and security of the
reporter of any incident.
## Enforcement Guidelines
Community leaders will follow these Community Impact Guidelines in determining
the consequences for any action they deem in violation of this Code of Conduct:
### 1. Correction
**Community Impact**: Use of inappropriate language or other behavior deemed
unprofessional or unwelcome in the community.
**Consequence**: A private, written warning from community leaders, providing
clarity around the nature of the violation and an explanation of why the
behavior was inappropriate. A public apology may be requested.
### 2. Warning
**Community Impact**: A violation through a single incident or series
of actions.
**Consequence**: A warning with consequences for continued behavior. No
interaction with the people involved, including unsolicited interaction with
those enforcing the Code of Conduct, for a specified period of time. This
includes avoiding interactions in community spaces as well as external channels
like social media. Violating these terms may lead to a temporary or
permanent ban.
### 3. Temporary Ban
**Community Impact**: A serious violation of community standards, including
sustained inappropriate behavior.
**Consequence**: A temporary ban from any sort of interaction or public
communication with the community for a specified period of time. No public or
private interaction with the people involved, including unsolicited interaction
with those enforcing the Code of Conduct, is allowed during this period.
Violating these terms may lead to a permanent ban.
### 4. Permanent Ban
**Community Impact**: Demonstrating a pattern of violation of community
standards, including sustained inappropriate behavior, harassment of an
individual, or aggression toward or disparagement of classes of individuals.
**Consequence**: A permanent ban from any sort of public interaction within
the community.
## Attribution
This Code of Conduct is adapted from the [Contributor Covenant][homepage],
version 2.0, available at
https://www.contributor-covenant.org/version/2/0/code_of_conduct.html.
Community Impact Guidelines were inspired by [Mozilla's code of conduct
enforcement ladder](https://github.com/mozilla/diversity).
[homepage]: https://www.contributor-covenant.org
For answers to common questions about this code of conduct, see the FAQ at
https://www.contributor-covenant.org/faq. Translations are available at
https://www.contributor-covenant.org/translations.

2420
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,15 +1,45 @@
[workspace]
members = ["lib/schedule_parser"]
members = ["actix-macros", "actix-test", "schedule-parser"]
[package]
name = "schedule-parser-rusted"
version = "0.1.0"
version = "1.0.5"
edition = "2024"
publish = false
[profile.release]
debug = true
[dependencies]
actix-web = "4.10.2"
actix-macros = { path = "actix-macros" }
schedule-parser = { path = "schedule-parser", features = ["test-utils"] }
bcrypt = "0.17.0"
chrono = { version = "0.4.40", features = ["serde"] }
derive_more = { version = "2", features = ["full"] }
diesel = { version = "2.2.8", features = ["postgres"] }
diesel-derive-enum = { git = "https://github.com/Havunen/diesel-derive-enum.git", features = ["postgres"] }
dotenvy = "0.15.7"
env_logger = "0.11.7"
firebase-messaging-rs = { git = "https://github.com/i10416/firebase-messaging-rs.git" }
futures-util = "0.3.31"
jsonwebtoken = { version = "9.3.1", features = ["use_pem"] }
hex = "0.4.3"
mime = "0.3.17"
objectid = "0.2.0"
reqwest = { version = "0.12.15", features = ["json"] }
sentry = "0.38"
sentry-actix = "0.38"
serde = { version = "1.0.219", features = ["derive"] }
serde_repr = "0.1.20"
serde_json = "1.0.140"
schedule_parser = { path = "./lib/schedule_parser" }
serde_with = "3.12.0"
sha1 = "0.11.0-pre.5"
tokio = { version = "1.44.1", features = ["macros", "rt-multi-thread"] }
rand = "0.9.0"
utoipa = { version = "5", features = ["actix_extras", "chrono"] }
utoipa-rapidoc = { version = "6.0.0", features = ["actix-web"] }
utoipa-actix-web = "0.1"
uuid = { version = "1.16.0", features = ["v4"] }
[dev-dependencies]
actix-test = { path = "actix-test" }

14
Dockerfile Normal file
View File

@@ -0,0 +1,14 @@
FROM debian:stable-slim
LABEL authors="n08i40k"
ARG BINARY_NAME
WORKDIR /app/
RUN apt update && \
apt install -y libpq5 ca-certificates openssl
COPY ./${BINARY_NAME} /bin/main
RUN chmod +x /bin/main
ENTRYPOINT ["main"]

21
LICENSE Normal file
View File

@@ -0,0 +1,21 @@
MIT License
Copyright (c) 2025 Nikita
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

3
README.md Normal file
View File

@@ -0,0 +1,3 @@
# API для получения расписания политехникума
[![Rust](https://github.com/n08i40k/schedule-parser-rusted/actions/workflows/test.yml/badge.svg)](https://github.com/n08i40k/schedule-parser-rusted/actions/workflows/test.yml)

1
actix-macros/.gitignore vendored Normal file
View File

@@ -0,0 +1 @@
/target

7
actix-macros/Cargo.lock generated Normal file
View File

@@ -0,0 +1,7 @@
# This file is automatically @generated by Cargo.
# It is not intended for manual editing.
version = 4
[[package]]
name = "actix-utility-macros"
version = "0.1.0"

12
actix-macros/Cargo.toml Normal file
View File

@@ -0,0 +1,12 @@
[package]
name = "actix-macros"
version = "0.1.0"
edition = "2024"
[dependencies]
syn = "2.0.100"
quote = "1.0.40"
proc-macro2 = "1.0.94"
[lib]
proc-macro = true

209
actix-macros/src/lib.rs Normal file
View File

@@ -0,0 +1,209 @@
extern crate proc_macro;
use proc_macro::TokenStream;
mod shared {
use quote::{ToTokens, quote};
use syn::{Attribute, DeriveInput};
pub fn find_status_code(attrs: &Vec<Attribute>) -> Option<proc_macro2::TokenStream> {
attrs
.iter()
.find_map(|attr| -> Option<proc_macro2::TokenStream> {
if !attr.path().is_ident("status_code") {
return None;
}
let meta = attr.meta.require_name_value().ok()?;
let code = meta.value.to_token_stream().to_string();
let trimmed_code = code.trim_matches('"');
if let Ok(numeric_code) = trimmed_code.parse::<u16>() {
Some(quote! { actix_web::http::StatusCode::from_u16(#numeric_code).unwrap() })
} else {
let string_code: proc_macro2::TokenStream =
trimmed_code.to_string().parse().unwrap();
Some(quote! { #string_code })
}
})
}
pub fn get_arms(ast: &DeriveInput) -> Vec<proc_macro2::TokenStream> {
let name = &ast.ident;
let variants = if let syn::Data::Enum(data) = &ast.data {
&data.variants
} else {
panic!("Only enums are supported");
};
let mut status_code_arms: Vec<proc_macro2::TokenStream> = variants
.iter()
.map(|v| -> Option<proc_macro2::TokenStream> {
let status_code = find_status_code(&v.attrs)?;
let variant_name = &v.ident;
Some(quote! { #name::#variant_name => #status_code, })
})
.filter(|v| v.is_some())
.map(|v| v.unwrap())
.collect();
if status_code_arms.len() < variants.len() {
let status_code = find_status_code(&ast.attrs)
.unwrap_or_else(|| quote! { ::actix_web::http::StatusCode::INTERNAL_SERVER_ERROR });
status_code_arms.push(quote! { _ => #status_code });
}
status_code_arms
}
}
mod response_error_message {
use proc_macro::TokenStream;
use quote::quote;
pub fn fmt(ast: &syn::DeriveInput) -> TokenStream {
let name = &ast.ident;
let status_code_arms = super::shared::get_arms(ast);
TokenStream::from(quote! {
impl ::actix_web::ResponseError for #name {
fn status_code(&self) -> ::actix_web::http::StatusCode {
match self {
#(#status_code_arms)*
}
}
fn error_response(&self) -> ::actix_web::HttpResponse<BoxBody> {
::actix_web::HttpResponse::build(self.status_code())
.json(crate::utility::error::ResponseErrorMessage::new(self.clone()))
}
}
})
}
}
mod status_code {
use proc_macro::TokenStream;
use quote::quote;
pub fn fmt(ast: &syn::DeriveInput) -> TokenStream {
let name = &ast.ident;
let status_code_arms = super::shared::get_arms(ast);
TokenStream::from(quote! {
impl crate::routes::schema::PartialStatusCode for #name {
fn status_code(&self) -> ::actix_web::http::StatusCode {
match self {
#(#status_code_arms)*
}
}
}
})
}
}
mod responder_json {
use proc_macro::TokenStream;
use quote::quote;
pub fn fmt(ast: &syn::DeriveInput) -> TokenStream {
let name = &ast.ident;
TokenStream::from(quote! {
impl ::actix_web::Responder for #name {
type Body = ::actix_web::body::EitherBody<::actix_web::body::BoxBody>;
fn respond_to(self, _: &::actix_web::HttpRequest) -> ::actix_web::HttpResponse<Self::Body> {
::actix_web::HttpResponse::Ok()
.json(self)
.map_into_left_body()
}
}
})
}
}
mod into_response_error {
use proc_macro::TokenStream;
use quote::quote;
pub fn fmt(ast: &syn::DeriveInput) -> TokenStream {
let name = &ast.ident;
TokenStream::from(quote! {
impl ::core::convert::Into<crate::routes::schema::ResponseError<#name>> for #name {
fn into(self) -> crate::routes::schema::ResponseError<#name> {
crate::routes::schema::ResponseError {
code: self,
message: ::core::option::Option::None,
}
}
}
impl<T> crate::routes::schema::IntoResponseAsError<T> for #name
where
T: ::serde::ser::Serialize + ::utoipa::PartialSchema {}
})
}
pub fn fmt_named(ast: &syn::DeriveInput) -> TokenStream {
let name = &ast.ident;
TokenStream::from(quote! {
impl ::core::convert::Into<crate::routes::schema::ResponseError<#name>> for #name {
fn into(self) -> crate::routes::schema::ResponseError<#name> {
crate::routes::schema::ResponseError {
message: ::core::option::Option::Some(format!("{}", self)),
code: self,
}
}
}
impl<T> crate::routes::schema::IntoResponseAsError<T> for #name
where
T: ::serde::ser::Serialize + ::utoipa::PartialSchema {}
})
}
}
#[proc_macro_derive(ResponseErrorMessage, attributes(status_code))]
pub fn rem_derive(input: TokenStream) -> TokenStream {
let ast = syn::parse(input).unwrap();
response_error_message::fmt(&ast)
}
#[proc_macro_derive(ResponderJson)]
pub fn responser_json_derive(input: TokenStream) -> TokenStream {
let ast = syn::parse(input).unwrap();
responder_json::fmt(&ast)
}
#[proc_macro_derive(IntoResponseError)]
pub fn into_response_error_derive(input: TokenStream) -> TokenStream {
let ast = syn::parse(input).unwrap();
into_response_error::fmt(&ast)
}
#[proc_macro_derive(IntoResponseErrorNamed)]
pub fn into_response_error_named_derive(input: TokenStream) -> TokenStream {
let ast = syn::parse(input).unwrap();
into_response_error::fmt_named(&ast)
}
#[proc_macro_derive(StatusCode, attributes(status_code))]
pub fn status_code_derive(input: TokenStream) -> TokenStream {
let ast = syn::parse(input).unwrap();
status_code::fmt(&ast)
}

1
actix-test/.gitignore vendored Normal file
View File

@@ -0,0 +1 @@
/target

1520
actix-test/Cargo.lock generated Normal file

File diff suppressed because it is too large Load Diff

8
actix-test/Cargo.toml Normal file
View File

@@ -0,0 +1,8 @@
[package]
name = "actix-test"
version = "0.1.0"
edition = "2024"
[dependencies]
actix-http = "3.10.0"
actix-web = "4.10.2"

12
actix-test/src/lib.rs Normal file
View File

@@ -0,0 +1,12 @@
use actix_web::dev::{HttpServiceFactory, Service, ServiceResponse};
use actix_web::{App, test, web};
pub async fn test_app<F, A: 'static>(
app_state: web::Data<A>,
factory: F,
) -> impl Service<actix_http::Request, Response = ServiceResponse, Error = actix_web::Error>
where
F: HttpServiceFactory + 'static,
{
test::init_service(App::new().app_data(app_state).service(factory)).await
}

9
diesel.toml Normal file
View File

@@ -0,0 +1,9 @@
# For documentation on how to configure this file,
# see https://diesel.rs/guides/configuring-diesel-cli
[print_schema]
file = "src/database/schema.rs"
custom_type_derives = ["diesel::query_builder::QueryId", "Clone"]
[migrations_directory]
dir = "./migrations"

View File

@@ -1,23 +0,0 @@
[package]
name = "schedule_parser"
version = "0.1.0"
edition = "2024"
[lib]
name = "schedule_parser"
path = "src/lib/lib.rs"
[dependencies]
serde = { version = "1.0.219", features = ["derive"] }
serde_repr = "0.1.20"
chrono = { version = "0.4.40", features = ["serde"] }
calamine = "0.26.1"
regex = "1.11.1"
fuzzy-matcher = "0.3.7"
[dev-dependencies]
criterion = "0.5.1"
[[bench]]
name = "parse"
harness = false

View File

@@ -1,627 +0,0 @@
use crate::LessonParseResult::{Lessons, Street};
use crate::schema::LessonType::Break;
use crate::schema::{Day, Group, Lesson, LessonSubGroup, LessonTime, LessonType};
use calamine::{Reader, Xls, open_workbook};
use chrono::{Duration, NaiveDateTime};
use fuzzy_matcher::FuzzyMatcher;
use fuzzy_matcher::skim::SkimMatcherV2;
use regex::Regex;
use std::collections::HashMap;
use std::path::Path;
use std::sync::LazyLock;
mod schema;
struct InternalId {
/**
* Индекс строки
*/
row: u32,
/**
* Индекс столбца
*/
column: u32,
/**
* Текст в ячейке
*/
name: String,
}
struct InternalTime {
/**
* Временной отрезок проведения пары
*/
time_range: LessonTime,
/**
* Тип пары
*/
lesson_type: LessonType,
/**
* Индекс пары
*/
default_index: Option<u32>,
/**
* Рамка ячейки
*/
xls_range: ((u32, u32), (u32, u32)),
}
type WorkSheet = calamine::Range<calamine::Data>;
fn get_string_from_cell(worksheet: &WorkSheet, row: u32, col: u32) -> Option<String> {
let cell_data = if let Some(data) = worksheet.get((row as usize, col as usize)) {
data.to_string()
} else {
return None;
};
if cell_data.trim().is_empty() {
return None;
}
static NL_RE: LazyLock<Regex, fn() -> Regex> =
LazyLock::new(|| Regex::new(r"[\n\r]+").unwrap());
static SP_RE: LazyLock<Regex, fn() -> Regex> = LazyLock::new(|| Regex::new(r"\s+").unwrap());
let trimmed_data = SP_RE
.replace_all(&NL_RE.replace_all(&cell_data, " "), " ")
.trim()
.to_string();
if trimmed_data.is_empty() {
None
} else {
Some(trimmed_data)
}
}
fn get_merge_from_start(worksheet: &WorkSheet, row: u32, column: u32) -> ((u32, u32), (u32, u32)) {
let worksheet_end = worksheet.end().unwrap();
let row_end: u32 = {
let mut r: u32 = 0;
for _r in (row + 1)..worksheet_end.0 {
r = _r;
if let Some(_) = worksheet.get((_r as usize, column as usize)) {
break;
}
}
r
};
let column_end: u32 = {
let mut c: u32 = 0;
for _c in (column + 1)..worksheet_end.1 {
c = _c;
if let Some(_) = worksheet.get((row as usize, _c as usize)) {
break;
}
}
c
};
((row, column), (row_end, column_end))
}
fn parse_skeleton(worksheet: &WorkSheet) -> (Vec<InternalId>, Vec<InternalId>) {
let range = &worksheet;
let mut is_parsed = false;
let mut groups: Vec<InternalId> = Vec::new();
let mut days: Vec<InternalId> = Vec::new();
let start = range.start().expect("Could not find start");
let end = range.end().expect("Could not find end");
let mut row = start.0;
while row < end.0 {
row += 1;
let day_name_opt = get_string_from_cell(&worksheet, row, 0);
if day_name_opt.is_none() {
continue;
}
let day_name = day_name_opt.unwrap();
if !is_parsed {
is_parsed = true;
row -= 1;
for column in (start.1 + 2)..=end.1 {
let group_name = get_string_from_cell(&worksheet, row, column);
if group_name.is_none() {
continue;
}
groups.push(InternalId {
row,
column,
name: group_name.unwrap(),
});
}
row += 1;
}
days.push(InternalId {
row,
column: 0,
name: day_name.clone(),
});
if days.len() > 2 && day_name.starts_with("Суббота") {
break;
}
}
(days, groups)
}
enum LessonParseResult {
Lessons(Vec<Lesson>),
Street(String),
}
trait StringInnerSlice {
fn inner_slice(&self, from: usize, to: usize) -> Self;
}
impl StringInnerSlice for String {
fn inner_slice(&self, from: usize, to: usize) -> Self {
self.chars()
.take(from)
.chain(self.chars().skip(to))
.collect()
}
}
fn guess_lesson_type(name: &String) -> Option<(String, LessonType)> {
let map: HashMap<String, LessonType> = HashMap::from([
("(консультация)".to_string(), LessonType::Consultation),
(
"самостоятельная работа".to_string(),
LessonType::IndependentWork,
),
("зачет".to_string(), LessonType::Exam),
("зачет с оценкой".to_string(), LessonType::ExamWithGrade),
("экзамен".to_string(), LessonType::ExamDefault),
]);
let matcher = SkimMatcherV2::default();
let name_lower = name.to_lowercase();
type SearchResult<'a> = (&'a LessonType, i64, Vec<usize>);
let mut search_results: Vec<SearchResult> = map
.iter()
.map(|entry| -> SearchResult {
if let Some((score, indices)) = matcher.fuzzy_indices(&*name_lower, entry.0) {
return (entry.1, score, indices);
}
(entry.1, 0, Vec::new())
})
.collect();
search_results.sort_by(|a, b| b.1.cmp(&a.1));
let guessed_type = search_results.first().unwrap();
if guessed_type.1 > 80 {
Some((
name.inner_slice(guessed_type.2[0], guessed_type.2[guessed_type.2.len() - 1]),
guessed_type.0.clone(),
))
} else {
None
}
}
fn parse_lesson(
worksheet: &WorkSheet,
day: &mut Day,
day_times: &Vec<InternalTime>,
time: &InternalTime,
column: u32,
) -> LessonParseResult {
let row = time.xls_range.0.0;
let (name, lesson_type) = {
let raw_name_opt = get_string_from_cell(&worksheet, row, column);
if raw_name_opt.is_none() {
return Lessons(Vec::new());
}
let raw_name = raw_name_opt.unwrap();
static OTHER_STREET_RE: LazyLock<Regex, fn() -> Regex> =
LazyLock::new(|| Regex::new(r"^[А-Я][а-я]+,?\s?[0-9]+$").unwrap());
if OTHER_STREET_RE.is_match(&raw_name) {
return Street(raw_name);
}
if let Some(guess) = guess_lesson_type(&raw_name) {
guess
} else {
(raw_name, time.lesson_type.clone())
}
};
let (default_range, lesson_time): (Option<[u8; 2]>, LessonTime) = {
// check if multi-lesson
let cell_range = get_merge_from_start(worksheet, row, column);
let end_time_arr = day_times
.iter()
.filter(|time| time.xls_range.1.0 == cell_range.1.0)
.collect::<Vec<&InternalTime>>();
let end_time = end_time_arr.first().expect("Unable to find lesson time!");
let range: Option<[u8; 2]> = if time.default_index != None {
let default = time.default_index.unwrap() as u8;
Some([default, end_time.default_index.unwrap() as u8])
} else {
None
};
let time = LessonTime {
start: time.time_range.start,
end: end_time.time_range.end,
};
(range, time)
};
let (name, mut subgroups) = parse_name_and_subgroups(&name);
{
let cabinets: Vec<String> = parse_cabinets(worksheet, row, column + 1);
// Если количество кабинетов равно 1, назначаем этот кабинет всем подгруппам
if cabinets.len() == 1 {
for subgroup in &mut subgroups {
subgroup.cabinet = Some(cabinets.get(0).or(Some(&String::new())).unwrap().clone())
}
}
// Если количество кабинетов совпадает с количеством подгрупп, назначаем кабинеты по порядку
else if cabinets.len() == subgroups.len() {
for subgroup in &mut subgroups {
subgroup.cabinet = Some(
cabinets
.get((subgroup.number - 1) as usize)
.unwrap()
.clone(),
);
}
}
// Если количество кабинетов больше количества подгрупп, делаем ещё одну подгруппу.
else if cabinets.len() > subgroups.len() {
for index in 0..subgroups.len() {
subgroups[index].cabinet = Some(cabinets[index].clone());
}
while cabinets.len() > subgroups.len() {
subgroups.push(LessonSubGroup {
number: (subgroups.len() + 1) as u8,
cabinet: Some(cabinets[subgroups.len()].clone()),
teacher: "Ошибка в расписании".to_string(),
});
}
}
// Если кабинетов нет, но есть подгруппы, назначаем им значение "??"
else {
for subgroup in &mut subgroups {
subgroup.cabinet = Some("??".to_string());
}
}
cabinets
};
let lesson = Lesson {
lesson_type,
default_range,
name: Some(name),
time: lesson_time,
subgroups: Some(subgroups),
group: None,
};
let prev_lesson = if day.lessons.len() == 0 {
return Lessons(Vec::from([lesson]));
} else {
&day.lessons[day.lessons.len() - 1]
};
Lessons(Vec::from([
Lesson {
lesson_type: Break,
default_range: None,
name: None,
time: LessonTime {
start: prev_lesson.time.end,
end: lesson.time.start,
},
subgroups: Some(Vec::new()),
group: None,
},
lesson,
]))
}
fn parse_cabinets(worksheet: &WorkSheet, row: u32, column: u32) -> Vec<String> {
let mut cabinets: Vec<String> = Vec::new();
if let Some(raw) = get_string_from_cell(&worksheet, row, column) {
let clean = raw.replace("\n", " ");
let parts: Vec<&str> = clean.split(" ").collect();
for part in parts {
let clean_part = part.to_string().trim().to_string();
cabinets.push(clean_part);
}
}
cabinets
}
fn parse_name_and_subgroups(name: &String) -> (String, Vec<LessonSubGroup>) {
static LESSON_RE: LazyLock<Regex, fn() -> Regex> =
LazyLock::new(|| Regex::new(r"(?:[А-Я][а-я]+[А-Я]{2}(?:\([0-9][а-я]+\))?)+$").unwrap());
static TEACHER_RE: LazyLock<Regex, fn() -> Regex> =
LazyLock::new(|| Regex::new(r"([А-Я][а-я]+)([А-Я])([А-Я])(?:\(([0-9])[а-я]+\))?").unwrap());
static CLEAN_RE: LazyLock<Regex, fn() -> Regex> =
LazyLock::new(|| Regex::new(r"[\s.,]+").unwrap());
static NAME_CLEAN_RE: LazyLock<Regex, fn() -> Regex> =
LazyLock::new(|| Regex::new(r"\.\s+$").unwrap());
let (teachers, lesson_name) = {
let clean_name = CLEAN_RE.replace_all(&name, "").to_string();
if let Some(captures) = LESSON_RE.captures(&clean_name) {
let capture = captures.get(0).unwrap();
let capture_str = capture.as_str().to_string();
let capture_name: String = capture_str.chars().take(5).collect();
(
NAME_CLEAN_RE.replace(&capture_str, "").to_string(),
name[0..name.find(&*capture_name).unwrap()].to_string(),
)
} else {
return (NAME_CLEAN_RE.replace(&name, "").to_string(), Vec::new());
}
};
let mut subgroups: Vec<LessonSubGroup> = Vec::new();
let teacher_it = TEACHER_RE.captures_iter(&teachers);
for captures in teacher_it {
subgroups.push(LessonSubGroup {
number: if let Some(capture) = captures.get(4) {
capture
.as_str()
.to_string()
.parse::<u8>()
.expect("Unable to read subgroup index!")
} else {
0
},
cabinet: None,
teacher: format!(
"{} {}.{}.",
captures.get(1).unwrap().as_str().to_string(),
captures.get(2).unwrap().as_str().to_string(),
captures.get(3).unwrap().as_str().to_string()
),
})
}
// фикс, если у кого-то отсутствует индекс подгруппы
if subgroups.len() == 1 {
let index = subgroups[0].number;
if index == 0 {
subgroups[0].number = 1u8;
} else {
subgroups.push(LessonSubGroup {
number: if index == 1 { 2 } else { 1 },
cabinet: None,
teacher: "Только у другой".to_string(),
});
}
} else if subgroups.len() == 2 {
// если индексы отсутствуют у обоих, ставим поочерёдно
if subgroups[0].number == 0 && subgroups[1].number == 0 {
subgroups[0].number = 1;
subgroups[1].number = 2;
}
// если индекс отсутствует у первого, ставим 2, если у второго индекс 1 и наоборот
else if subgroups[0].number == 0 {
subgroups[0].number = if subgroups[1].number == 1 { 2 } else { 1 };
}
// если индекс отсутствует у второго, ставим 2, если у первого индекс 1 и наоборот
else if subgroups[1].number == 0 {
subgroups[1].number = if subgroups[0].number == 1 { 2 } else { 1 };
}
}
if subgroups.len() == 2 && subgroups[0].number == 2 && subgroups[1].number == 1 {
subgroups.reverse()
}
(lesson_name, subgroups)
}
pub fn parse_xls(path: &Path) -> HashMap<String, Group> {
let mut workbook: Xls<_> = open_workbook(path).expect("Can't open workbook");
let worksheet: WorkSheet = workbook
.worksheets()
.first()
.expect("No worksheet found")
.1
.to_owned();
let (days_markup, groups_markup) = parse_skeleton(&worksheet);
let mut groups: HashMap<String, Group> = HashMap::new();
let mut days_times: Vec<Vec<InternalTime>> = Vec::new();
let saturday_end_row = worksheet.end().unwrap().0;
for group_markup in groups_markup {
let mut group = Group {
name: group_markup.name,
days: Vec::new(),
};
for day_index in 0..(&days_markup).len() {
let day_markup = &days_markup[day_index];
let mut day = {
let space_index = day_markup.name.find(' ').unwrap();
let name = day_markup.name[..space_index].to_string();
let date_raw = day_markup.name[space_index + 1..].to_string();
let date_add = format!("{} 00:00:00", date_raw);
let date = NaiveDateTime::parse_from_str(&*date_add, "%d.%m.%Y %H:%M:%S");
Day {
name,
street: None,
date: date.unwrap().and_utc(),
lessons: Vec::new(),
}
};
let lesson_time_column = days_markup[0].column + 1;
let row_distance = if day_index != days_markup.len() - 1 {
days_markup[day_index + 1].row
} else {
saturday_end_row
} - day_markup.row;
if days_times.len() != 6 {
let mut day_times: Vec<InternalTime> = Vec::new();
for row in day_markup.row..(day_markup.row + row_distance) {
// time
let time_opt = get_string_from_cell(&worksheet, row, lesson_time_column);
if time_opt.is_none() {
continue;
}
let time = time_opt.unwrap();
// type
let lesson_type = if time.contains("пара") {
LessonType::Default
} else {
LessonType::Additional
};
// lesson index
let default_index = if lesson_type == LessonType::Default {
Some(
time.chars()
.next()
.unwrap()
.to_string()
.parse::<u32>()
.unwrap(),
)
} else {
None
};
// time
let time_range = {
static TIME_RE: LazyLock<Regex, fn() -> Regex> =
LazyLock::new(|| Regex::new(r"(\d+\.\d+)-(\d+\.\d+)").unwrap());
let parse_res = TIME_RE
.captures(&time)
.expect("Unable to obtain lesson start and end!");
let start_match = parse_res.get(1).unwrap().as_str();
let start_parts: Vec<&str> = start_match.split(".").collect();
let end_match = parse_res.get(2).unwrap().as_str();
let end_parts: Vec<&str> = end_match.split(".").collect();
LessonTime {
start: day.date.clone()
+ Duration::hours(start_parts[0].parse().unwrap())
+ Duration::minutes(start_parts[1].parse().unwrap()),
end: day.date.clone()
+ Duration::hours(end_parts[0].parse().unwrap())
+ Duration::minutes(end_parts[1].parse().unwrap()),
}
};
day_times.push(InternalTime {
time_range,
lesson_type,
default_index,
xls_range: get_merge_from_start(&worksheet, row, lesson_time_column),
});
}
days_times.push(day_times);
}
let day_times = &days_times[day_index];
for time in day_times {
match &mut parse_lesson(
&worksheet,
&mut day,
&day_times,
&time,
group_markup.column,
) {
Lessons(l) => day.lessons.append(l),
Street(s) => day.street = Some(s.to_owned()),
}
}
group.days.push(day);
}
groups.insert(group.name.clone(), group);
}
groups
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn it_works() {
let result = parse_xls(Path::new("../../schedule.xls"));
assert_ne!(result.len(), 0);
}
}

View File

@@ -1,97 +0,0 @@
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use serde_repr::{Deserialize_repr, Serialize_repr};
#[derive(Serialize, Deserialize, Debug)]
pub struct LessonTime {
pub start: DateTime<Utc>,
pub end: DateTime<Utc>,
}
#[derive(Serialize_repr, Deserialize_repr, Debug, PartialEq, Clone)]
#[repr(u8)]
pub enum LessonType {
Default = 0, // Обычная
Additional, // Допы
Break, // Перемена
Consultation, // Консультация
IndependentWork, // Самостоятельная работа
Exam, // Зачёт
ExamWithGrade, // Зачет с оценкой
ExamDefault, // Экзамен
}
#[derive(Serialize, Deserialize, Debug)]
pub struct LessonSubGroup {
pub number: u8,
pub cabinet: Option<String>,
pub teacher: String,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct Lesson {
/**
* Тип занятия
*/
#[serde(rename = "type")]
pub lesson_type: LessonType,
/**
* Индексы пар, если присутствуют
*/
#[serde(rename = "defaultRange")]
pub default_range: Option<[u8; 2]>,
/**
* Название занятия
*/
pub name: Option<String>,
/**
* Начало и конец занятия
*/
pub time: LessonTime,
/**
* Подгруппы
*/
#[serde(rename = "subGroups")]
pub subgroups: Option<Vec<LessonSubGroup>>,
/**
* Группа (только для расписания преподавателей)
*/
pub group: Option<String>,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct Day {
pub name: String,
pub street: Option<String>,
pub date: DateTime<Utc>,
pub lessons: Vec<Lesson>,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct Group {
pub name: String,
pub days: Vec<Day>,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct Schedule {
#[serde(rename = "updatedAt")]
pub updated_at: DateTime<Utc>,
pub groups: HashMap<String, Group>,
#[serde(rename = "updatedGroups")]
pub updated_groups: Vec<Vec<usize>>,
}

0
migrations/.keep Normal file
View File

View File

@@ -0,0 +1,6 @@
-- This file was automatically created by Diesel to set up helper functions
-- and other internal bookkeeping. This file is safe to edit, any future
-- changes will be added to existing projects as new migrations.
DROP FUNCTION IF EXISTS diesel_manage_updated_at(_tbl regclass);
DROP FUNCTION IF EXISTS diesel_set_updated_at();

View File

@@ -0,0 +1,36 @@
-- This file was automatically created by Diesel to set up helper functions
-- and other internal bookkeeping. This file is safe to edit, any future
-- changes will be added to existing projects as new migrations.
-- Sets up a trigger for the given table to automatically set a column called
-- `updated_at` whenever the row is modified (unless `updated_at` was included
-- in the modified columns)
--
-- # Example
--
-- ```sql
-- CREATE TABLE users (id SERIAL PRIMARY KEY, updated_at TIMESTAMP NOT NULL DEFAULT NOW());
--
-- SELECT diesel_manage_updated_at('users');
-- ```
CREATE OR REPLACE FUNCTION diesel_manage_updated_at(_tbl regclass) RETURNS VOID AS $$
BEGIN
EXECUTE format('CREATE TRIGGER set_updated_at BEFORE UPDATE ON %s
FOR EACH ROW EXECUTE PROCEDURE diesel_set_updated_at()', _tbl);
END;
$$ LANGUAGE plpgsql;
CREATE OR REPLACE FUNCTION diesel_set_updated_at() RETURNS trigger AS $$
BEGIN
IF (
NEW IS DISTINCT FROM OLD AND
NEW.updated_at IS NOT DISTINCT FROM OLD.updated_at
) THEN
NEW.updated_at := current_timestamp;
END IF;
RETURN NEW;
END;
$$ LANGUAGE plpgsql;

View File

@@ -0,0 +1 @@
DROP TYPE user_role;

View File

@@ -0,0 +1,4 @@
CREATE TYPE user_role AS ENUM (
'STUDENT',
'TEACHER',
'ADMIN');

View File

@@ -0,0 +1 @@
DROP TABLE users;

View File

@@ -0,0 +1,11 @@
CREATE TABLE users
(
id text PRIMARY KEY NOT NULL,
username text UNIQUE NOT NULL,
password text NOT NULL,
vk_id int4 NULL,
access_token text UNIQUE NOT NULL,
"group" text NOT NULL,
role user_role NOT NULL,
version text NOT NULL
);

View File

@@ -0,0 +1 @@
DROP TABLE fcm;

View File

@@ -0,0 +1,6 @@
CREATE TABLE fcm
(
user_id text PRIMARY KEY NOT NULL REFERENCES users (id),
token text NOT NULL,
topics text[] NOT NULL CHECK ( array_position(topics, null) is null )
);

View File

@@ -0,0 +1,24 @@
[package]
name = "schedule-parser"
version = "0.1.0"
edition = "2024"
[features]
test-utils = []
[dependencies]
calamine = "0.26"
chrono = { version = "0.4", features = ["serde"] }
derive_more = { version = "2", features = ["full"] }
serde = { version = "1.0.219", features = ["derive"] }
serde_repr = "0.1.20"
fuzzy-matcher = "0.3.7"
regex = "1.11.1"
utoipa = { version = "5", features = ["chrono"] }
[dev-dependencies]
criterion = "0.6"
[[bench]]
name = "parse"
harness = false

View File

@@ -1,11 +1,11 @@
use criterion::{Criterion, criterion_group, criterion_main};
use schedule_parser::parse_xls;
use std::path::Path;
pub fn bench_parse_xls(c: &mut Criterion) {
c.bench_function("parse_xls", |b| {
b.iter(|| parse_xls(Path::new("../../schedule.xls")))
});
let buffer: Vec<u8> = include_bytes!("../../schedule.xls").to_vec();
c.bench_function("parse_xls", |b| b.iter(|| parse_xls(&buffer).unwrap()));
}
criterion_group!(benches, bench_parse_xls);

794
schedule-parser/src/lib.rs Normal file
View File

@@ -0,0 +1,794 @@
use crate::LessonParseResult::{Lessons, Street};
use crate::schema::LessonType::Break;
use crate::schema::{
Day, ErrorCell, ErrorCellPos, Lesson, LessonBoundaries, LessonSubGroup, LessonType, ParseError,
ParseResult, ScheduleEntry,
};
use calamine::{Reader, Xls, open_workbook_from_rs};
use chrono::{DateTime, Duration, NaiveDateTime, Utc};
use fuzzy_matcher::FuzzyMatcher;
use fuzzy_matcher::skim::SkimMatcherV2;
use regex::Regex;
use std::collections::HashMap;
use std::io::Cursor;
use std::ops::Deref;
use std::sync::LazyLock;
mod macros;
pub mod schema;
/// Data cell storing the group name.
struct GroupCellInfo {
/// Column index.
column: u32,
/// Text in the cell.
name: String,
}
/// Data cell storing the line.
struct DayCellInfo {
/// Line index.
row: u32,
/// Column index.
column: u32,
/// Day name.
name: String,
/// Date of the day.
date: DateTime<Utc>,
}
/// Data on the time of lessons from the second column of the schedule.
struct BoundariesCellInfo {
/// Temporary segment of the lesson.
time_range: LessonBoundaries,
/// Type of lesson.
lesson_type: LessonType,
/// The lesson index.
default_index: Option<u32>,
/// The frame of the cell.
xls_range: ((u32, u32), (u32, u32)),
}
struct WorkSheet {
pub data: calamine::Range<calamine::Data>,
pub merges: Vec<calamine::Dimensions>,
}
impl Deref for WorkSheet {
type Target = calamine::Range<calamine::Data>;
fn deref(&self) -> &Self::Target {
&self.data
}
}
/// Getting a line from the required cell.
fn get_string_from_cell(worksheet: &WorkSheet, row: u32, col: u32) -> Option<String> {
let cell_data = if let Some(data) = worksheet.get((row as usize, col as usize)) {
data.to_string()
} else {
return None;
};
if cell_data.trim().is_empty() {
return None;
}
static NL_RE: LazyLock<Regex> = LazyLock::new(|| Regex::new(r"[\n\r]+").unwrap());
static SP_RE: LazyLock<Regex> = LazyLock::new(|| Regex::new(r"\s+").unwrap());
let trimmed_data = SP_RE
.replace_all(&NL_RE.replace_all(&cell_data, " "), " ")
.trim()
.to_string();
if trimmed_data.is_empty() {
None
} else {
Some(trimmed_data)
}
}
/// Obtaining the boundaries of the cell along its upper left coordinate.
fn get_merge_from_start(worksheet: &WorkSheet, row: u32, column: u32) -> ((u32, u32), (u32, u32)) {
return match worksheet
.merges
.iter()
.find(|merge| merge.start.0 == row && merge.start.1 == column)
{
Some(merge) => (merge.start, (merge.end.0 + 1, merge.end.1 + 1)),
None => ((row, column), (row + 1, column + 1))
};
}
/// Obtaining a "skeleton" schedule from the working sheet.
fn parse_skeleton(
worksheet: &WorkSheet,
) -> Result<(Vec<DayCellInfo>, Vec<GroupCellInfo>), ParseError> {
let mut groups: Vec<GroupCellInfo> = Vec::new();
let mut days: Vec<DayCellInfo> = Vec::new();
let worksheet_start = worksheet.start().ok_or(ParseError::UnknownWorkSheetRange)?;
let worksheet_end = worksheet.end().ok_or(ParseError::UnknownWorkSheetRange)?;
let mut row = worksheet_start.0;
while row < worksheet_end.0 {
row += 1;
let day_full_name = or_continue!(get_string_from_cell(&worksheet, row, 0));
// parse groups row when days column will found
if groups.is_empty() {
// переход на предыдущую строку
row -= 1;
for column in (worksheet_start.1 + 2)..=worksheet_end.1 {
groups.push(GroupCellInfo {
column,
name: or_continue!(get_string_from_cell(&worksheet, row, column)),
});
}
// возврат на текущую строку
row += 1;
}
let (day_name, day_date) = {
let space_index = day_full_name.find(' ').unwrap();
let name = day_full_name[..space_index].to_string();
let date_raw = day_full_name[space_index + 1..].to_string();
let date_add = format!("{} 00:00:00", date_raw);
let date =
or_break!(NaiveDateTime::parse_from_str(&*date_add, "%d.%m.%Y %H:%M:%S").ok());
(name, date.and_utc())
};
days.push(DayCellInfo {
row,
column: 0,
name: day_name,
date: day_date,
});
}
Ok((days, groups))
}
/// The result of obtaining a lesson from the cell.
enum LessonParseResult {
/// List of lessons long from one to two.
///
/// The number of lessons will be equal to one if the couple is the first in the day,
/// otherwise the list from the change template and the lesson itself will be returned.
Lessons(Vec<Lesson>),
/// Street on which the Polytechnic Corps is located.
Street(String),
}
trait StringInnerSlice {
/// Obtaining a line from the line on the initial and final index.
fn inner_slice(&self, from: usize, to: usize) -> Self;
}
impl StringInnerSlice for String {
fn inner_slice(&self, from: usize, to: usize) -> Self {
self.chars()
.take(from)
.chain(self.chars().skip(to))
.collect()
}
}
// noinspection GrazieInspection
/// Obtaining a non-standard type of lesson by name.
fn guess_lesson_type(name: &String) -> Option<(String, LessonType)> {
let map: HashMap<String, LessonType> = HashMap::from([
("(консультация)".to_string(), LessonType::Consultation),
(
"самостоятельная работа".to_string(),
LessonType::IndependentWork,
),
("зачет".to_string(), LessonType::Exam),
("зачет с оценкой".to_string(), LessonType::ExamWithGrade),
("экзамен".to_string(), LessonType::ExamDefault),
]);
let matcher = SkimMatcherV2::default();
let name_lower = name.to_lowercase();
type SearchResult<'a> = (&'a LessonType, i64, Vec<usize>);
let mut search_results: Vec<SearchResult> = map
.iter()
.map(|entry| -> SearchResult {
if let Some((score, indices)) = matcher.fuzzy_indices(&*name_lower, entry.0) {
return (entry.1, score, indices);
}
(entry.1, 0, Vec::new())
})
.collect();
search_results.sort_by(|a, b| b.1.cmp(&a.1));
let guessed_type = search_results.first().unwrap();
if guessed_type.1 > 80 {
Some((
name.inner_slice(guessed_type.2[0], guessed_type.2[guessed_type.2.len() - 1]),
guessed_type.0.clone(),
))
} else {
None
}
}
/// Getting a pair or street from a cell.
fn parse_lesson(
worksheet: &WorkSheet,
day: &mut Day,
day_boundaries: &Vec<BoundariesCellInfo>,
lesson_boundaries: &BoundariesCellInfo,
column: u32,
) -> Result<LessonParseResult, ParseError> {
let row = lesson_boundaries.xls_range.0.0;
let (name, lesson_type) = {
let full_name = match get_string_from_cell(&worksheet, row, column) {
Some(x) => x,
None => return Ok(Lessons(Vec::new())),
};
static OTHER_STREET_RE: LazyLock<Regex> =
LazyLock::new(|| Regex::new(r"^[А-Я][а-я]+,?\s?[0-9]+$").unwrap());
if OTHER_STREET_RE.is_match(&full_name) {
return Ok(Street(full_name));
}
match guess_lesson_type(&full_name) {
Some(x) => x,
None => (full_name, lesson_boundaries.lesson_type.clone()),
}
};
let (default_range, lesson_time) = {
let cell_range = get_merge_from_start(worksheet, row, column);
let end_time_arr = day_boundaries
.iter()
.filter(|time| time.xls_range.1.0 == cell_range.1.0)
.collect::<Vec<&BoundariesCellInfo>>();
let end_time = end_time_arr
.first()
.ok_or(ParseError::LessonTimeNotFound(ErrorCellPos { row, column }))?;
let range: Option<[u8; 2]> = if lesson_boundaries.default_index != None {
let default = lesson_boundaries.default_index.unwrap() as u8;
Some([default, end_time.default_index.unwrap() as u8])
} else {
None
};
let time = LessonBoundaries {
start: lesson_boundaries.time_range.start,
end: end_time.time_range.end,
};
Ok((range, time))
}?;
let (name, mut subgroups) = parse_name_and_subgroups(&name)?;
{
let cabinets: Vec<String> = parse_cabinets(worksheet, row, column + 1);
match cabinets.len() {
// Если кабинетов нет, но есть подгруппы, назначаем им кабинет "??"
0 => {
for subgroup in &mut subgroups {
subgroup.cabinet = Some("??".to_string());
}
}
// Назначаем этот кабинет всем подгруппам
1 => {
for subgroup in &mut subgroups {
subgroup.cabinet =
Some(cabinets.get(0).or(Some(&String::new())).unwrap().clone())
}
}
len => {
// Если количество кабинетов совпадает с количеством подгрупп, назначаем кабинеты по порядку
if len == subgroups.len() {
for subgroup in &mut subgroups {
subgroup.cabinet = Some(
cabinets
.get((subgroup.number - 1) as usize)
.unwrap()
.clone(),
);
}
// Если количество кабинетов больше количества подгрупп, делаем ещё одну подгруппу.
} else if len > subgroups.len() {
for index in 0..subgroups.len() {
subgroups[index].cabinet = Some(cabinets[index].clone());
}
while cabinets.len() > subgroups.len() {
subgroups.push(LessonSubGroup {
number: (subgroups.len() + 1) as u8,
cabinet: Some(cabinets[subgroups.len()].clone()),
teacher: "Ошибка в расписании".to_string(),
});
}
}
}
};
};
let lesson = Lesson {
lesson_type,
default_range,
name: Some(name),
time: lesson_time,
subgroups: Some(subgroups),
group: None,
};
let prev_lesson = if day.lessons.is_empty() {
return Ok(Lessons(Vec::from([lesson])));
} else {
&day.lessons[day.lessons.len() - 1]
};
Ok(Lessons(Vec::from([
Lesson {
lesson_type: Break,
default_range: None,
name: None,
time: LessonBoundaries {
start: prev_lesson.time.end,
end: lesson.time.start,
},
subgroups: Some(Vec::new()),
group: None,
},
lesson,
])))
}
/// Obtaining a list of cabinets to the right of the lesson cell.
fn parse_cabinets(worksheet: &WorkSheet, row: u32, column: u32) -> Vec<String> {
let mut cabinets: Vec<String> = Vec::new();
if let Some(raw) = get_string_from_cell(&worksheet, row, column) {
let clean = raw.replace("\n", " ");
let parts: Vec<&str> = clean.split(" ").collect();
for part in parts {
let clean_part = part.to_string().trim().to_string();
cabinets.push(clean_part);
}
}
cabinets
}
/// Getting the "pure" name of the lesson and list of teachers from the text of the lesson cell.
fn parse_name_and_subgroups(name: &String) -> Result<(String, Vec<LessonSubGroup>), ParseError> {
static LESSON_RE: LazyLock<Regex> =
LazyLock::new(|| Regex::new(r"(?:[А-Я][а-я]+[А-Я]{2}(?:\([0-9][а-я]+\))?)+$").unwrap());
static TEACHER_RE: LazyLock<Regex> =
LazyLock::new(|| Regex::new(r"([А-Я][а-я]+)([А-Я])([А-Я])(?:\(([0-9])[а-я]+\))?").unwrap());
static CLEAN_RE: LazyLock<Regex> = LazyLock::new(|| Regex::new(r"[\s.,]+").unwrap());
static END_CLEAN_RE: LazyLock<Regex> = LazyLock::new(|| Regex::new(r"[.\s]+$").unwrap());
let (teachers, lesson_name) = {
let clean_name = CLEAN_RE.replace_all(&name, "").to_string();
if let Some(captures) = LESSON_RE.captures(&clean_name) {
let capture = captures.get(0).unwrap();
let capture_str = capture.as_str().to_string();
let capture_name: String = capture_str.chars().take(5).collect();
(
END_CLEAN_RE.replace(&capture_str, "").to_string(),
END_CLEAN_RE
.replace(&name[0..name.find(&*capture_name).unwrap()], "")
.to_string(),
)
} else {
return Ok((END_CLEAN_RE.replace(&name, "").to_string(), Vec::new()));
}
};
let mut subgroups: Vec<LessonSubGroup> = Vec::new();
let teacher_it = TEACHER_RE.captures_iter(&teachers);
for captures in teacher_it {
subgroups.push(LessonSubGroup {
number: match captures.get(4) {
Some(capture) => capture.as_str().to_string().parse::<u8>().unwrap(),
None => 0,
},
cabinet: None,
teacher: format!(
"{} {}.{}.",
captures.get(1).unwrap().as_str().to_string(),
captures.get(2).unwrap().as_str().to_string(),
captures.get(3).unwrap().as_str().to_string()
),
});
}
// фикс, если у кого-то отсутствует индекс подгруппы
if subgroups.len() == 1 {
let index = subgroups[0].number;
if index == 0 {
subgroups[0].number = 1u8;
} else {
subgroups.push(LessonSubGroup {
number: if index == 1 { 2 } else { 1 },
cabinet: None,
teacher: "Только у другой".to_string(),
});
}
} else if subgroups.len() == 2 {
// если индексы отсутствуют у обоих, ставим поочерёдно
if subgroups[0].number == 0 && subgroups[1].number == 0 {
subgroups[0].number = 1;
subgroups[1].number = 2;
}
// если индекс отсутствует у первого, ставим 2, если у второго индекс 1 и наоборот
else if subgroups[0].number == 0 {
subgroups[0].number = if subgroups[1].number == 1 { 2 } else { 1 };
}
// если индекс отсутствует у второго, ставим 2, если у первого индекс 1 и наоборот
else if subgroups[1].number == 0 {
subgroups[1].number = if subgroups[0].number == 1 { 2 } else { 1 };
}
}
if subgroups.len() == 2 && subgroups[0].number == 2 && subgroups[1].number == 1 {
subgroups.reverse()
}
Ok((lesson_name, subgroups))
}
fn parse_lesson_boundaries_cell(
cell_data: &String,
date: DateTime<Utc>,
) -> Option<LessonBoundaries> {
static TIME_RE: LazyLock<Regex> =
LazyLock::new(|| Regex::new(r"(\d+\.\d+)-(\d+\.\d+)").unwrap());
let parse_res = if let Some(captures) = TIME_RE.captures(cell_data) {
captures
} else {
return None;
};
let start_match = parse_res.get(1).unwrap().as_str();
let start_parts: Vec<&str> = start_match.split(".").collect();
let end_match = parse_res.get(2).unwrap().as_str();
let end_parts: Vec<&str> = end_match.split(".").collect();
static GET_TIME: fn(DateTime<Utc>, &Vec<&str>) -> DateTime<Utc> = |date, parts| {
date + Duration::hours(parts[0].parse::<i64>().unwrap() - 4)
+ Duration::minutes(parts[1].parse::<i64>().unwrap())
};
Some(LessonBoundaries {
start: GET_TIME(date.clone(), &start_parts),
end: GET_TIME(date, &end_parts),
})
}
fn parse_day_boundaries_column(
worksheet: &WorkSheet,
day_markup: &DayCellInfo,
lesson_time_column: u32,
row_distance: u32,
) -> Result<Vec<BoundariesCellInfo>, ParseError> {
let mut day_times: Vec<BoundariesCellInfo> = Vec::new();
for row in day_markup.row..(day_markup.row + row_distance) {
let time_cell = if let Some(str) = get_string_from_cell(&worksheet, row, lesson_time_column)
{
str
} else {
continue;
};
let lesson_time = parse_lesson_boundaries_cell(&time_cell, day_markup.date.clone()).ok_or(
ParseError::LessonBoundaries(ErrorCell::new(
row,
lesson_time_column,
time_cell.clone(),
)),
)?;
// type
let lesson_type = if time_cell.contains("пара") {
LessonType::Default
} else {
LessonType::Additional
};
// lesson index
let default_index = if lesson_type == LessonType::Default {
Some(
time_cell
.chars()
.next()
.unwrap()
.to_string()
.parse::<u32>()
.unwrap(),
)
} else {
None
};
day_times.push(BoundariesCellInfo {
time_range: lesson_time,
lesson_type,
default_index,
xls_range: get_merge_from_start(&worksheet, row, lesson_time_column),
});
}
return Ok(day_times);
}
fn parse_week_boundaries_column(
worksheet: &WorkSheet,
week_markup: &Vec<DayCellInfo>,
) -> Result<Vec<Vec<BoundariesCellInfo>>, ParseError> {
let mut result: Vec<Vec<BoundariesCellInfo>> = Vec::new();
let worksheet_end_row = worksheet.end().unwrap().0;
let lesson_time_column = week_markup[0].column + 1;
for day_index in 0..week_markup.len() {
let day_markup = &week_markup[day_index];
// Если текущий день не последнему, то индекс строки следующего дня минус индекс строки текущего дня.
// Если текущий день - последний, то индекс последней строки документа минус индекс строки текущего дня.
let row_distance = if day_index != week_markup.len() - 1 {
week_markup[day_index + 1].row
} else {
worksheet_end_row
} - day_markup.row;
let day_boundaries =
parse_day_boundaries_column(&worksheet, day_markup, lesson_time_column, row_distance)?;
result.push(day_boundaries);
}
Ok(result)
}
/// Conversion of the list of couples of groups in the list of lessons of teachers.
fn convert_groups_to_teachers(
groups: &HashMap<String, ScheduleEntry>,
) -> HashMap<String, ScheduleEntry> {
let mut teachers: HashMap<String, ScheduleEntry> = HashMap::new();
let empty_days: Vec<Day> = groups
.values()
.next()
.unwrap()
.days
.iter()
.map(|day| Day {
name: day.name.clone(),
street: day.street.clone(),
date: day.date.clone(),
lessons: vec![],
})
.collect();
for group in groups.values() {
for (index, day) in group.days.iter().enumerate() {
for group_lesson in &day.lessons {
if group_lesson.lesson_type == Break {
continue;
}
if group_lesson.subgroups.is_none() {
continue;
}
let subgroups = group_lesson.subgroups.as_ref().unwrap();
for subgroup in subgroups {
if subgroup.teacher == "Ошибка в расписании" {
continue;
}
if !teachers.contains_key(&subgroup.teacher) {
teachers.insert(
subgroup.teacher.clone(),
ScheduleEntry {
name: subgroup.teacher.clone(),
days: empty_days.to_vec(),
},
);
}
let teacher_day = teachers
.get_mut(&subgroup.teacher)
.unwrap()
.days
.get_mut(index)
.unwrap();
teacher_day.lessons.push({
let mut lesson = group_lesson.clone();
lesson.group = Some(group.name.clone());
lesson
});
}
}
}
}
teachers.iter_mut().for_each(|(_, teacher)| {
teacher.days.iter_mut().for_each(|day| {
day.lessons.sort_by(|a, b| {
a.default_range.as_ref().unwrap()[1].cmp(&b.default_range.as_ref().unwrap()[1])
})
})
});
teachers
}
/// Reading XLS Document from the buffer and converting it into the schedule ready to use.
///
/// # Arguments
///
/// * `buffer`: XLS data containing schedule.
///
/// returns: Result<ParseResult, ParseError>
///
/// # Examples
///
/// ```
/// use schedule_parser::parse_xls;
///
/// let result = parse_xls(&include_bytes!("../../schedule.xls").to_vec());
///
/// assert!(result.is_ok(), "{}", result.err().unwrap());
///
/// assert_ne!(result.as_ref().unwrap().groups.len(), 0);
/// assert_ne!(result.as_ref().unwrap().teachers.len(), 0);
/// ```
pub fn parse_xls(buffer: &Vec<u8>) -> Result<ParseResult, ParseError> {
let cursor = Cursor::new(&buffer);
let mut workbook: Xls<_> =
open_workbook_from_rs(cursor).map_err(|e| ParseError::BadXLS(std::sync::Arc::new(e)))?;
let worksheet = {
let (worksheet_name, worksheet) = workbook
.worksheets()
.first()
.ok_or(ParseError::NoWorkSheets)?
.clone();
let worksheet_merges = workbook
.worksheet_merge_cells(&*worksheet_name)
.ok_or(ParseError::NoWorkSheets)?;
WorkSheet {
data: worksheet,
merges: worksheet_merges,
}
};
let (week_markup, groups_markup) = parse_skeleton(&worksheet)?;
let week_boundaries = parse_week_boundaries_column(&worksheet, &week_markup)?;
let mut groups: HashMap<String, ScheduleEntry> = HashMap::new();
for group_markup in groups_markup {
let mut group = ScheduleEntry {
name: group_markup.name,
days: Vec::new(),
};
for day_index in 0..(&week_markup).len() {
let day_markup = &week_markup[day_index];
let mut day = Day {
name: day_markup.name.clone(),
street: None,
date: day_markup.date,
lessons: Vec::new(),
};
let day_boundaries = &week_boundaries[day_index];
for lesson_boundaries in day_boundaries {
match &mut parse_lesson(
&worksheet,
&mut day,
&day_boundaries,
&lesson_boundaries,
group_markup.column,
)? {
Lessons(lesson) => day.lessons.append(lesson),
Street(street) => day.street = Some(street.to_owned()),
}
}
group.days.push(day);
}
groups.insert(group.name.clone(), group);
}
Ok(ParseResult {
teachers: convert_groups_to_teachers(&groups),
groups,
})
}
#[cfg(any(test, feature = "test-utils"))]
pub mod test_utils {
use super::*;
pub fn test_result() -> Result<ParseResult, ParseError> {
parse_xls(&include_bytes!("../../schedule.xls").to_vec())
}
}
#[cfg(test)]
pub mod tests {
#[test]
fn read() {
let result = super::test_utils::test_result();
assert!(result.is_ok(), "{}", result.err().unwrap());
assert_ne!(result.as_ref().unwrap().groups.len(), 0);
assert_ne!(result.as_ref().unwrap().teachers.len(), 0);
}
#[test]
fn test_split_lesson() {
let result = super::test_utils::test_result();
assert!(result.is_ok(), "{}", result.err().unwrap());
let result = result.unwrap();
assert!(result.groups.contains_key("ИС-214/23"));
let group = result.groups.get("ИС-214/23").unwrap();
let thursday = group.days.get(3).unwrap();
assert_eq!(thursday.lessons.len(), 1);
assert_eq!(thursday.lessons[0].default_range.unwrap()[1], 3);
}
}

View File

@@ -0,0 +1,25 @@
#[macro_export]
macro_rules! or_continue {
( $e:expr ) => {
{
if let Some(x) = $e {
x
} else {
continue;
}
}
}
}
#[macro_export]
macro_rules! or_break {
( $e:expr ) => {
{
if let Some(x) = $e {
x
} else {
break;
}
}
}
}

View File

@@ -0,0 +1,180 @@
use chrono::{DateTime, Utc};
use derive_more::{Display, Error};
use serde::{Deserialize, Serialize, Serializer};
use serde_repr::{Deserialize_repr, Serialize_repr};
use std::collections::HashMap;
use std::sync::Arc;
use utoipa::ToSchema;
/// The beginning and end of the lesson.
#[derive(Clone, Hash, Debug, Serialize, Deserialize, ToSchema)]
pub struct LessonBoundaries {
/// The beginning of a lesson.
pub start: DateTime<Utc>,
/// The end of the lesson.
pub end: DateTime<Utc>,
}
/// Type of lesson.
#[derive(Clone, Hash, PartialEq, Debug, Serialize_repr, Deserialize_repr, ToSchema)]
#[serde(rename_all = "SCREAMING_SNAKE_CASE")]
#[repr(u8)]
pub enum LessonType {
/// Обычная.
Default = 0,
/// Допы.
Additional,
/// Перемена.
Break,
/// Консультация.
Consultation,
/// Самостоятельная работа.
IndependentWork,
/// Зачёт.
Exam,
/// Зачёт с оценкой.
ExamWithGrade,
/// Экзамен.
ExamDefault,
}
#[derive(Clone, Hash, Debug, Serialize, Deserialize, ToSchema)]
pub struct LessonSubGroup {
/// Index of subgroup.
pub number: u8,
/// Cabinet, if present.
pub cabinet: Option<String>,
/// Full name of the teacher.
pub teacher: String,
}
#[derive(Clone, Hash, Debug, Serialize, Deserialize, ToSchema)]
#[serde(rename_all = "camelCase")]
pub struct Lesson {
/// Type.
#[serde(rename = "type")]
pub lesson_type: LessonType,
/// Lesson indexes, if present.
pub default_range: Option<[u8; 2]>,
/// Name.
pub name: Option<String>,
/// The beginning and end.
pub time: LessonBoundaries,
/// List of subgroups.
#[serde(rename = "subGroups")]
pub subgroups: Option<Vec<LessonSubGroup>>,
/// Group name, if this is a schedule for teachers.
pub group: Option<String>,
}
#[derive(Clone, Hash, Debug, Serialize, Deserialize, ToSchema)]
pub struct Day {
/// Day of the week.
pub name: String,
/// Address of another corps.
pub street: Option<String>,
/// Date.
pub date: DateTime<Utc>,
/// List of lessons on this day.
pub lessons: Vec<Lesson>,
}
#[derive(Clone, Hash, Debug, Serialize, Deserialize, ToSchema)]
pub struct ScheduleEntry {
/// The name of the group or name of the teacher.
pub name: String,
/// List of six days.
pub days: Vec<Day>,
}
#[derive(Clone)]
pub struct ParseResult {
/// List of groups.
pub groups: HashMap<String, ScheduleEntry>,
/// List of teachers.
pub teachers: HashMap<String, ScheduleEntry>,
}
#[derive(Clone, Debug, Display, Error, ToSchema)]
#[display("row {row}, column {column}")]
pub struct ErrorCellPos {
pub row: u32,
pub column: u32,
}
#[derive(Clone, Debug, Display, Error, ToSchema)]
#[display("'{data}' at {pos}")]
pub struct ErrorCell {
pub pos: ErrorCellPos,
pub data: String,
}
impl ErrorCell {
pub fn new(row: u32, column: u32, data: String) -> Self {
Self {
pos: ErrorCellPos { row, column },
data,
}
}
}
#[derive(Clone, Debug, Display, Error, ToSchema)]
pub enum ParseError {
/// Errors related to reading XLS file.
#[display("{_0:?}: Failed to read XLS file.")]
#[schema(value_type = String)]
BadXLS(Arc<calamine::XlsError>),
/// Not a single sheet was found.
#[display("No work sheets found.")]
NoWorkSheets,
/// There are no data on the boundaries of the sheet.
#[display("There is no data on work sheet boundaries.")]
UnknownWorkSheetRange,
/// Failed to read the beginning and end of the lesson from the cell
#[display("Failed to read lesson start and end from {_0}.")]
LessonBoundaries(ErrorCell),
/// Not found the beginning and the end corresponding to the lesson.
#[display("No start and end times matching the lesson (at {_0}) was found.")]
LessonTimeNotFound(ErrorCellPos),
}
impl Serialize for ParseError {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
match self {
ParseError::BadXLS(_) => serializer.serialize_str("BAD_XLS"),
ParseError::NoWorkSheets => serializer.serialize_str("NO_WORK_SHEETS"),
ParseError::UnknownWorkSheetRange => {
serializer.serialize_str("UNKNOWN_WORK_SHEET_RANGE")
}
ParseError::LessonBoundaries(_) => serializer.serialize_str("GLOBAL_TIME"),
ParseError::LessonTimeNotFound(_) => serializer.serialize_str("LESSON_TIME_NOT_FOUND"),
}
}
}

Binary file not shown.

88
src/app_state.rs Normal file
View File

@@ -0,0 +1,88 @@
use schedule_parser::schema::ParseResult;
use crate::utility::hasher::DigestHasher;
use crate::xls_downloader::basic_impl::BasicXlsDownloader;
use actix_web::web;
use chrono::{DateTime, Utc};
use diesel::{Connection, PgConnection};
use firebase_messaging_rs::FCMClient;
use sha1::{Digest, Sha1};
use std::env;
use std::hash::Hash;
use std::sync::Mutex;
#[derive(Clone)]
pub struct Schedule {
pub etag: String,
pub fetched_at: DateTime<Utc>,
pub updated_at: DateTime<Utc>,
pub parsed_at: DateTime<Utc>,
pub data: ParseResult,
}
#[derive(Clone)]
pub struct VkId {
pub client_id: i32,
pub redirect_url: String,
}
impl VkId {
pub fn new() -> Self {
Self {
client_id: env::var("VKID_CLIENT_ID")
.expect("VKID_CLIENT_ID must be set")
.parse()
.expect("VKID_CLIENT_ID must be integer"),
redirect_url: env::var("VKID_REDIRECT_URI").expect("VKID_REDIRECT_URI must be set"),
}
}
}
impl Schedule {
pub fn hash(&self) -> String {
let mut hasher = DigestHasher::from(Sha1::new());
self.etag.hash(&mut hasher);
self.data.teachers.iter().for_each(|e| e.hash(&mut hasher));
self.data.groups.iter().for_each(|e| e.hash(&mut hasher));
hasher.finalize()
}
}
/// Common data provided to endpoints.
pub struct AppState {
pub downloader: Mutex<BasicXlsDownloader>,
pub schedule: Mutex<Option<Schedule>>,
pub database: Mutex<PgConnection>,
pub vk_id: VkId,
pub fcm_client: Option<Mutex<FCMClient>>, // в рантайме не меняется, так что опционален мьютекс, а не данные в нём.
}
impl AppState {
pub async fn new() -> Self {
let database_url = env::var("DATABASE_URL").expect("DATABASE_URL must be set");
Self {
downloader: Mutex::new(BasicXlsDownloader::new()),
schedule: Mutex::new(None),
database: Mutex::new(
PgConnection::establish(&database_url)
.unwrap_or_else(|_| panic!("Error connecting to {}", database_url)),
),
vk_id: VkId::new(),
fcm_client: if env::var("GOOGLE_APPLICATION_CREDENTIALS").is_ok() {
Some(Mutex::new(
FCMClient::new().await.expect("FCM client must be created"),
))
} else {
None
},
}
}
}
/// Create a new object web::Data<AppState>.
pub async fn app_state() -> web::Data<AppState> {
web::Data::new(AppState::new().await)
}

163
src/database/driver.rs Normal file
View File

@@ -0,0 +1,163 @@
pub mod users {
use crate::app_state::AppState;
use crate::database::models::User;
use crate::database::schema::users::dsl::users;
use crate::database::schema::users::dsl::*;
use crate::utility::mutex::MutexScope;
use actix_web::web;
use diesel::{ExpressionMethods, QueryResult, insert_into};
use diesel::{QueryDsl, RunQueryDsl};
use diesel::{SaveChangesDsl, SelectableHelper};
pub fn get(state: &web::Data<AppState>, _id: &String) -> QueryResult<User> {
state.database.scope(|conn| {
users
.filter(id.eq(_id))
.select(User::as_select())
.first(conn)
})
}
pub fn get_by_username(state: &web::Data<AppState>, _username: &String) -> QueryResult<User> {
state.database.scope(|conn| {
users
.filter(username.eq(_username))
.select(User::as_select())
.first(conn)
})
}
//noinspection RsTraitObligations
pub fn get_by_vk_id(state: &web::Data<AppState>, _vk_id: i32) -> QueryResult<User> {
state.database.scope(|conn| {
users
.filter(vk_id.eq(_vk_id))
.select(User::as_select())
.first(conn)
})
}
//noinspection DuplicatedCode
pub fn contains_by_username(state: &web::Data<AppState>, _username: &String) -> bool {
// и как это нахуй сократить блять примеров нихуя нет, нихуя не работает
// как меня этот раст заебал уже
state.database.scope(|conn| {
match users
.filter(username.eq(_username))
.count()
.get_result::<i64>(conn)
{
Ok(count) => count > 0,
Err(_) => false,
}
})
}
//noinspection DuplicatedCode
//noinspection RsTraitObligations
pub fn contains_by_vk_id(state: &web::Data<AppState>, _vk_id: i32) -> bool {
state.database.scope(|conn| {
match users
.filter(vk_id.eq(_vk_id))
.count()
.get_result::<i64>(conn)
{
Ok(count) => count > 0,
Err(_) => false,
}
})
}
pub fn insert(state: &web::Data<AppState>, user: &User) -> QueryResult<usize> {
state
.database
.scope(|conn| insert_into(users).values(user).execute(conn))
}
/// Function declaration [User::save][UserSave::save].
pub trait UserSave {
/// Saves the user's changes to the database.
///
/// # Arguments
///
/// * `state`: The state of the actix-web application that stores the mutex of the [connection][diesel::PgConnection].
///
/// returns: `QueryResult<User>`
///
/// # Examples
///
/// ```
/// use crate::database::driver::users;
///
/// #[derive(Deserialize)]
/// struct Params {
/// pub username: String,
/// }
///
/// #[patch("/")]
/// async fn patch_user(
/// app_state: web::Data<AppState>,
/// user: SyncExtractor<User>,
/// web::Query(params): web::Query<Params>,
/// ) -> web::Json<User> {
/// let mut user = user.into_inner();
///
/// user.username = params.username;
///
/// match user.save(&app_state) {
/// Ok(user) => web::Json(user),
/// Err(e) => {
/// eprintln!("Failed to save user: {e}");
/// panic!();
/// }
/// }
/// }
/// ```
fn save(&self, state: &web::Data<AppState>) -> QueryResult<User>;
}
/// Implementation of [UserSave][UserSave] trait.
impl UserSave for User {
fn save(&self, state: &web::Data<AppState>) -> QueryResult<User> {
state.database.scope(|conn| self.save_changes::<Self>(conn))
}
}
#[cfg(test)]
pub fn delete_by_username(state: &web::Data<AppState>, _username: &String) -> bool {
state.database.scope(|conn| {
match diesel::delete(users.filter(username.eq(_username))).execute(conn) {
Ok(count) => count > 0,
Err(_) => false,
}
})
}
#[cfg(test)]
pub fn insert_or_ignore(state: &web::Data<AppState>, user: &User) -> QueryResult<usize> {
state.database.scope(|conn| {
insert_into(users)
.values(user)
.on_conflict_do_nothing()
.execute(conn)
})
}
}
pub mod fcm {
use crate::app_state::AppState;
use crate::database::models::{FCM, User};
use crate::utility::mutex::MutexScope;
use actix_web::web;
use diesel::QueryDsl;
use diesel::RunQueryDsl;
use diesel::{BelongingToDsl, QueryResult, SelectableHelper};
pub fn from_user(state: &web::Data<AppState>, user: &User) -> QueryResult<FCM> {
state.database.scope(|conn| {
FCM::belonging_to(&user)
.select(FCM::as_select())
.get_result(conn)
})
}
}

3
src/database/mod.rs Normal file
View File

@@ -0,0 +1,3 @@
pub mod schema;
pub mod models;
pub mod driver;

84
src/database/models.rs Normal file
View File

@@ -0,0 +1,84 @@
use actix_macros::ResponderJson;
use diesel::QueryId;
use diesel::prelude::*;
use serde::{Deserialize, Serialize};
use utoipa::ToSchema;
#[derive(
Copy, Clone, PartialEq, Debug, Serialize, Deserialize, diesel_derive_enum::DbEnum, ToSchema,
)]
#[ExistingTypePath = "crate::database::schema::sql_types::UserRole"]
#[DbValueStyle = "UPPERCASE"]
#[serde(rename_all = "UPPERCASE")]
pub enum UserRole {
Student,
Teacher,
Admin,
}
#[derive(
Identifiable,
AsChangeset,
Queryable,
QueryId,
Selectable,
Serialize,
Insertable,
Debug,
ToSchema,
ResponderJson,
)]
#[diesel(table_name = crate::database::schema::users)]
#[diesel(treat_none_as_null = true)]
pub struct User {
/// Account UUID.
pub id: String,
/// User name.
pub username: String,
/// BCrypt password hash.
pub password: String,
/// ID of the linked VK account.
pub vk_id: Option<i32>,
/// JWT access token.
pub access_token: String,
/// Group.
pub group: String,
/// Role.
pub role: UserRole,
/// Version of the installed Polytechnic+ application.
pub version: String,
}
#[derive(
Debug,
Clone,
Serialize,
Identifiable,
Queryable,
Selectable,
Insertable,
AsChangeset,
Associations,
ToSchema,
ResponderJson,
)]
#[diesel(belongs_to(User))]
#[diesel(table_name = crate::database::schema::fcm)]
#[diesel(primary_key(user_id))]
pub struct FCM {
/// Account UUID.
pub user_id: String,
/// FCM token.
pub token: String,
/// List of topics subscribed to by the user.
pub topics: Vec<Option<String>>,
}

38
src/database/schema.rs Normal file
View File

@@ -0,0 +1,38 @@
// @generated automatically by Diesel CLI.
pub mod sql_types {
#[derive(diesel::query_builder::QueryId, Clone, diesel::sql_types::SqlType)]
#[diesel(postgres_type(name = "user_role"))]
pub struct UserRole;
}
diesel::table! {
fcm (user_id) {
user_id -> Text,
token -> Text,
topics -> Array<Nullable<Text>>,
}
}
diesel::table! {
use diesel::sql_types::*;
use super::sql_types::UserRole;
users (id) {
id -> Text,
username -> Text,
password -> Text,
vk_id -> Nullable<Int4>,
access_token -> Text,
group -> Text,
role -> UserRole,
version -> Text,
}
}
diesel::joinable!(fcm -> users (user_id));
diesel::allow_tables_to_appear_in_same_query!(
fcm,
users,
);

View File

@@ -0,0 +1,110 @@
use crate::app_state::AppState;
use crate::database::driver;
use crate::database::models::{FCM, User};
use crate::extractors::base::{FromRequestSync, SyncExtractor};
use crate::utility::jwt;
use actix_macros::ResponseErrorMessage;
use actix_web::body::BoxBody;
use actix_web::dev::Payload;
use actix_web::http::header;
use actix_web::{FromRequest, HttpRequest, web};
use derive_more::Display;
use serde::{Deserialize, Serialize};
use std::fmt::Debug;
#[derive(Clone, Debug, Serialize, Deserialize, Display, ResponseErrorMessage)]
#[status_code = "actix_web::http::StatusCode::UNAUTHORIZED"]
#[serde(rename_all = "SCREAMING_SNAKE_CASE")]
pub enum Error {
/// There is no Authorization header in the request.
#[display("No Authorization header found")]
NoHeader,
/// Unknown authorization type other than Bearer.
#[display("Bearer token is required")]
UnknownAuthorizationType,
/// Invalid or expired access token.
#[display("Invalid or expired access token")]
InvalidAccessToken,
/// The user bound to the token is not found in the database.
#[display("No user associated with access token")]
NoUser,
}
impl Error {
pub fn into_err(self) -> actix_web::Error {
actix_web::Error::from(self)
}
}
/// User extractor from request with Bearer access token.
impl FromRequestSync for User {
type Error = actix_web::Error;
fn from_request_sync(req: &HttpRequest, _: &mut Payload) -> Result<Self, Self::Error> {
let authorization = req
.headers()
.get(header::AUTHORIZATION)
.ok_or(Error::NoHeader.into_err())?
.to_str()
.map_err(|_| Error::NoHeader.into_err())?
.to_string();
let parts: Vec<&str> = authorization.split(' ').collect();
if parts.len() != 2 || parts[0] != "Bearer" {
return Err(Error::UnknownAuthorizationType.into_err());
}
let user_id = jwt::verify_and_decode(&parts[1].to_string())
.map_err(|_| Error::InvalidAccessToken.into_err())?;
let app_state = req.app_data::<web::Data<AppState>>().unwrap();
driver::users::get(&app_state, &user_id).map_err(|_| Error::NoUser.into())
}
}
pub struct UserExtractor<const FCM: bool> {
user: User,
fcm: Option<FCM>,
}
impl<const FCM: bool> UserExtractor<{ FCM }> {
pub fn user(&self) -> &User {
&self.user
}
pub fn fcm(&self) -> &Option<FCM> {
if !FCM {
panic!("FCM marked as not required, but it has been requested")
}
&self.fcm
}
}
/// Extractor of user and additional parameters from request with Bearer token.
impl<const FCM: bool> FromRequestSync for UserExtractor<{ FCM }> {
type Error = actix_web::Error;
fn from_request_sync(req: &HttpRequest, payload: &mut Payload) -> Result<Self, Self::Error> {
let user = SyncExtractor::<User>::from_request(req, payload)
.into_inner()?
.into_inner();
let app_state = req.app_data::<web::Data<AppState>>().unwrap();
Ok(Self {
fcm: if FCM {
driver::fcm::from_user(&app_state, &user).ok()
} else {
None
},
user,
})
}
}

151
src/extractors/base.rs Normal file
View File

@@ -0,0 +1,151 @@
use actix_web::dev::Payload;
use actix_web::{FromRequest, HttpRequest};
use futures_util::future::LocalBoxFuture;
use std::future::{Ready, ready};
use std::ops;
/// # Async extractor.
/// Asynchronous object extractor from a query.
pub struct AsyncExtractor<T>(T);
impl<T> AsyncExtractor<T> {
#[allow(dead_code)]
/// Retrieve the object extracted with the extractor.
pub fn into_inner(self) -> T {
self.0
}
}
impl<T> ops::Deref for AsyncExtractor<T> {
type Target = T;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl<T> ops::DerefMut for AsyncExtractor<T> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
pub trait FromRequestAsync: Sized {
type Error: Into<actix_web::Error>;
/// Asynchronous function for extracting data from a query.
///
/// returns: Result<Self, Self::Error>
///
/// # Examples
///
/// ```
/// struct User {
/// pub id: String,
/// pub username: String,
/// }
///
/// // TODO: Я вообще этот экстрактор не использую, нахуя мне тогда писать пример, если я не ебу как его использовать. Я забыл.
///
/// #[get("/")]
/// fn get_user_async(
/// user: web::AsyncExtractor<User>,
/// ) -> web::Json<User> {
/// let user = user.into_inner();
///
/// web::Json(user)
/// }
/// ```
async fn from_request_async(req: HttpRequest, payload: Payload) -> Result<Self, Self::Error>;
}
impl<T: FromRequestAsync> FromRequest for AsyncExtractor<T> {
type Error = T::Error;
type Future = LocalBoxFuture<'static, Result<Self, Self::Error>>;
fn from_request(req: &HttpRequest, payload: &mut Payload) -> Self::Future {
let req = req.clone();
let payload = payload.take();
Box::pin(async move {
T::from_request_async(req, payload)
.await
.map(|res| Self(res))
})
}
}
/// # Sync extractor.
/// Synchronous object extractor from a query.
pub struct SyncExtractor<T>(T);
impl<T> SyncExtractor<T> {
/// Retrieving an object extracted with the extractor.
pub fn into_inner(self) -> T {
self.0
}
}
impl<T> ops::Deref for SyncExtractor<T> {
type Target = T;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl<T> ops::DerefMut for SyncExtractor<T> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
pub trait FromRequestSync: Sized {
type Error: Into<actix_web::Error>;
/// Synchronous function for extracting data from a query.
///
/// returns: Result<Self, Self::Error>
///
/// # Examples
///
/// ```
/// struct User {
/// pub id: String,
/// pub username: String,
/// }
///
/// impl FromRequestSync for User {
/// type Error = actix_web::Error;
///
/// fn from_request_sync(req: &HttpRequest, _: &mut Payload) -> Result<Self, Self::Error> {
/// // do magic here.
///
/// Ok(User {
/// id: "qwerty".to_string(),
/// username: "n08i40k".to_string()
/// })
/// }
/// }
///
/// #[get("/")]
/// fn get_user_sync(
/// user: web::SyncExtractor<User>,
/// ) -> web::Json<User> {
/// let user = user.into_inner();
///
/// web::Json(user)
/// }
/// ```
fn from_request_sync(req: &HttpRequest, payload: &mut Payload) -> Result<Self, Self::Error>;
}
impl<T: FromRequestSync> FromRequest for SyncExtractor<T> {
type Error = T::Error;
type Future = Ready<Result<Self, Self::Error>>;
fn from_request(req: &HttpRequest, payload: &mut Payload) -> Self::Future {
ready(T::from_request_sync(req, payload).map(|res| Self(res)))
}
}

2
src/extractors/mod.rs Normal file
View File

@@ -0,0 +1,2 @@
pub mod authorized_user;
pub mod base;

View File

@@ -1,15 +1,121 @@
use std::fs;
use std::path::Path;
use schedule_parser::parse_xls;
use crate::app_state::{AppState, app_state};
use crate::middlewares::authorization::JWTAuthorization;
use crate::middlewares::content_type::ContentTypeBootstrap;
use actix_web::dev::{ServiceFactory, ServiceRequest};
use actix_web::{App, Error, HttpServer};
use dotenvy::dotenv;
use std::io;
use utoipa_actix_web::AppExt;
use utoipa_actix_web::scope::Scope;
use utoipa_rapidoc::RapiDoc;
fn main() {
let groups = parse_xls(Path::new("./schedule.xls"));
mod app_state;
fs::write(
"./schedule.json",
serde_json::to_string_pretty(&groups)
.expect("Failed to serialize schedule!")
.as_bytes(),
)
.expect("Failed to write schedule");
mod database;
mod xls_downloader;
mod extractors;
mod middlewares;
mod routes;
mod utility;
mod test_env;
pub fn get_api_scope<
I: Into<Scope<T>>,
T: ServiceFactory<ServiceRequest, Config = (), Error = Error, InitError = ()>,
>(
scope: I,
) -> Scope<T> {
let auth_scope = utoipa_actix_web::scope("/auth")
.service(routes::auth::sign_in)
.service(routes::auth::sign_in_vk)
.service(routes::auth::sign_up)
.service(routes::auth::sign_up_vk);
let users_scope = utoipa_actix_web::scope("/users")
.wrap(JWTAuthorization::default())
.service(routes::users::change_group)
.service(routes::users::change_username)
.service(routes::users::me);
let schedule_scope = utoipa_actix_web::scope("/schedule")
.wrap(JWTAuthorization {
ignore: &["/group-names", "/teacher-names"],
})
.service(routes::schedule::schedule)
.service(routes::schedule::update_download_url)
.service(routes::schedule::cache_status)
.service(routes::schedule::group)
.service(routes::schedule::group_names)
.service(routes::schedule::teacher)
.service(routes::schedule::teacher_names);
let fcm_scope = utoipa_actix_web::scope("/fcm")
.wrap(JWTAuthorization::default())
.service(routes::fcm::update_callback)
.service(routes::fcm::set_token);
let vk_id_scope = utoipa_actix_web::scope("/vkid") //
.service(routes::vk_id::oauth);
utoipa_actix_web::scope(scope)
.service(auth_scope)
.service(users_scope)
.service(schedule_scope)
.service(fcm_scope)
.service(vk_id_scope)
}
async fn async_main() -> io::Result<()> {
println!("Starting server...");
let app_state = app_state().await;
HttpServer::new(move || {
let (app, api) = App::new()
.into_utoipa_app()
.app_data(app_state.clone())
.service(
get_api_scope("/api/v1")
.wrap(sentry_actix::Sentry::new())
.wrap(ContentTypeBootstrap),
)
.split_for_parts();
let rapidoc_service = RapiDoc::with_openapi("/api-docs-json", api).path("/api-docs");
// Because CORS error on non-localhost
let patched_rapidoc_html = rapidoc_service.to_html().replace(
"https://unpkg.com/rapidoc/dist/rapidoc-min.js",
"https://cdn.jsdelivr.net/npm/rapidoc/dist/rapidoc-min.min.js",
);
app.service(rapidoc_service.custom_html(patched_rapidoc_html))
})
.workers(4)
.bind(("0.0.0.0", 5050))?
.run()
.await
}
fn main() -> io::Result<()> {
let _guard = sentry::init((
"https://9c33db76e89984b3f009b28a9f4b5954@sentry.n08i40k.ru/8",
sentry::ClientOptions {
release: sentry::release_name!(),
send_default_pii: true,
..Default::default()
},
));
dotenv().unwrap();
env_logger::init();
actix_web::rt::System::new().block_on(async { async_main().await })?;
Ok(())
}

View File

@@ -0,0 +1,116 @@
use crate::database::models::User;
use crate::extractors::authorized_user;
use crate::extractors::base::FromRequestSync;
use actix_web::body::{BoxBody, EitherBody};
use actix_web::dev::{Payload, Service, ServiceRequest, ServiceResponse, Transform, forward_ready};
use actix_web::{Error, HttpRequest, ResponseError};
use futures_util::future::LocalBoxFuture;
use std::future::{Ready, ready};
/// Middleware guard working with JWT tokens.
pub struct JWTAuthorization {
/// List of ignored endpoints.
pub ignore: &'static [&'static str],
}
impl Default for JWTAuthorization {
fn default() -> Self {
Self { ignore: &[] }
}
}
impl<S, B> Transform<S, ServiceRequest> for JWTAuthorization
where
S: Service<ServiceRequest, Response = ServiceResponse<B>, Error = Error>,
S::Future: 'static,
B: 'static,
{
type Response = ServiceResponse<EitherBody<B, BoxBody>>;
type Error = Error;
type Transform = JWTAuthorizationMiddleware<S>;
type InitError = ();
type Future = Ready<Result<Self::Transform, Self::InitError>>;
fn new_transform(&self, service: S) -> Self::Future {
ready(Ok(JWTAuthorizationMiddleware {
service,
ignore: self.ignore,
}))
}
}
pub struct JWTAuthorizationMiddleware<S> {
service: S,
/// List of ignored endpoints.
ignore: &'static [&'static str],
}
impl<S, B> JWTAuthorizationMiddleware<S>
where
S: Service<ServiceRequest, Response = ServiceResponse<B>, Error = Error>,
S::Future: 'static,
B: 'static,
{
/// Checking the validity of the token.
fn check_authorization(
&self,
req: &HttpRequest,
payload: &mut Payload,
) -> Result<(), authorized_user::Error> {
User::from_request_sync(req, payload)
.map(|_| ())
.map_err(|e| e.as_error::<authorized_user::Error>().unwrap().clone())
}
fn should_skip(&self, req: &ServiceRequest) -> bool {
let path = req.match_info().unprocessed();
self.ignore.iter().any(|ignore| {
if !path.starts_with(ignore) {
return false;
}
if let Some(other) = path.as_bytes().iter().nth(ignore.len()) {
return ['?' as u8, '/' as u8].contains(other);
}
true
})
}
}
impl<'a, S, B> Service<ServiceRequest> for JWTAuthorizationMiddleware<S>
where
S: Service<ServiceRequest, Response = ServiceResponse<B>, Error = Error>,
S::Future: 'static,
B: 'static,
{
type Response = ServiceResponse<EitherBody<B, BoxBody>>;
type Error = Error;
type Future = LocalBoxFuture<'static, Result<Self::Response, Self::Error>>;
forward_ready!(service);
fn call(&self, req: ServiceRequest) -> Self::Future {
if self.should_skip(&req) {
let fut = self.service.call(req);
return Box::pin(async move { Ok(fut.await?.map_into_left_body()) });
}
let (http_req, mut payload) = req.into_parts();
if let Err(err) = self.check_authorization(&http_req, &mut payload) {
return Box::pin(async move {
Ok(ServiceResponse::new(
http_req,
err.error_response().map_into_right_body(),
))
});
}
let req = ServiceRequest::from_parts(http_req, payload);
let fut = self.service.call(req);
Box::pin(async move { Ok(fut.await?.map_into_left_body()) })
}
}

View File

@@ -0,0 +1,64 @@
use actix_web::Error;
use actix_web::body::{BoxBody, EitherBody};
use actix_web::dev::{Service, ServiceRequest, ServiceResponse, Transform, forward_ready};
use actix_web::http::header;
use actix_web::http::header::HeaderValue;
use futures_util::future::LocalBoxFuture;
use std::future::{Ready, ready};
/// Middleware to specify the encoding in the Content-Type header.
pub struct ContentTypeBootstrap;
impl<S, B> Transform<S, ServiceRequest> for ContentTypeBootstrap
where
S: Service<ServiceRequest, Response = ServiceResponse<B>, Error = Error>,
S::Future: 'static,
B: 'static,
{
type Response = ServiceResponse<EitherBody<B, BoxBody>>;
type Error = Error;
type Transform = ContentTypeMiddleware<S>;
type InitError = ();
type Future = Ready<Result<Self::Transform, Self::InitError>>;
fn new_transform(&self, service: S) -> Self::Future {
ready(Ok(ContentTypeMiddleware { service }))
}
}
pub struct ContentTypeMiddleware<S> {
service: S,
}
impl<'a, S, B> Service<ServiceRequest> for ContentTypeMiddleware<S>
where
S: Service<ServiceRequest, Response = ServiceResponse<B>, Error = Error>,
S::Future: 'static,
B: 'static,
{
type Response = ServiceResponse<EitherBody<B, BoxBody>>;
type Error = Error;
type Future = LocalBoxFuture<'static, Result<Self::Response, Self::Error>>;
forward_ready!(service);
fn call(&self, req: ServiceRequest) -> Self::Future {
let fut = self.service.call(req);
Box::pin(async move {
let mut response = fut.await?;
let headers = response.response_mut().headers_mut();
if let Some(content_type) = headers.get("Content-Type") {
if content_type == "application/json" {
headers.insert(
header::CONTENT_TYPE,
HeaderValue::from_static("application/json; charset=utf8"),
);
}
}
Ok(response.map_into_left_body())
})
}
}

2
src/middlewares/mod.rs Normal file
View File

@@ -0,0 +1,2 @@
pub mod authorization;
pub mod content_type;

8
src/routes/auth/mod.rs Normal file
View File

@@ -0,0 +1,8 @@
mod sign_in;
mod sign_up;
mod shared;
pub use sign_in::*;
pub use sign_up::*;
// TODO: change-password

83
src/routes/auth/shared.rs Normal file
View File

@@ -0,0 +1,83 @@
use jsonwebtoken::errors::ErrorKind;
use jsonwebtoken::{decode, Algorithm, DecodingKey, Validation};
use serde::{Deserialize, Serialize};
#[derive(Deserialize, Serialize)]
struct TokenData {
iis: String,
sub: i32,
app: i32,
exp: i32,
iat: i32,
jti: i32,
}
#[derive(Debug, Serialize, Deserialize)]
struct Claims {
sub: i32,
iis: String,
jti: i32,
app: i32,
}
#[derive(Debug, PartialEq)]
pub enum Error {
JwtError(ErrorKind),
InvalidSignature,
InvalidToken,
Expired,
UnknownIssuer(String),
UnknownType(i32),
UnknownClientId(i32),
}
//noinspection SpellCheckingInspection
const VK_PUBLIC_KEY: &str = concat!(
"-----BEGIN PUBLIC KEY-----\n",
"MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAvsvJlhFX9Ju/pvCz1frB\n",
"DgJs592VjdwQuRAmnlJAItyHkoiDIOEocPzgcUBTbDf1plDcTyO2RCkUt0pz0WK6\n",
"6HNhpJyIfARjaWHeUlv4TpuHXAJJsBKklkU2gf1cjID+40sWWYjtq5dAkXnSJUVA\n",
"UR+sq0lJ7GmTdJtAr8hzESqGEcSP15PTs7VUdHZ1nkC2XgkuR8KmKAUb388ji1Q4\n",
"n02rJNOPQgd9r0ac4N2v/yTAFPXumO78N25bpcuWf5vcL9e8THk/U2zt7wf+aAWL\n",
"748e0pREqNluTBJNZfmhC79Xx6GHtwqHyyduiqfPmejmiujNM/rqnA4e30Tg86Yn\n",
"cNZ6vLJyF72Eva1wXchukH/aLispbY+EqNPxxn4zzCWaLKHG87gaCxpVv9Tm0jSD\n",
"2es22NjrUbtb+2pAGnXbyDp2eGUqw0RrTQFZqt/VcmmSCE45FlcZMT28otrwG1ZB\n",
"kZAb5Js3wLEch3ZfYL8sjhyNRPBmJBrAvzrd8qa3rdUjkC9sKyjGAaHu2MNmFl1Y\n",
"JFQ3J54tGpkGgJjD7Kz3w0K6OiPDlVCNQN5sqXm24fCw85Pbi8SJiaLTp/CImrs1\n",
"Z3nHW5q8hljA7OGmqfOP0nZS/5zW9GHPyepsI1rW6CympYLJ15WeNzePxYS5KEX9\n",
"EncmkSD9b45ge95hJeJZteUCAwEAAQ==\n",
"-----END PUBLIC KEY-----"
);
pub fn parse_vk_id(token_str: &String, client_id: i32) -> Result<i32, Error> {
let dkey = DecodingKey::from_rsa_pem(VK_PUBLIC_KEY.as_bytes()).unwrap();
match decode::<Claims>(&token_str, &dkey, &Validation::new(Algorithm::RS256)) {
Ok(token_data) => {
let claims = token_data.claims;
if claims.iis != "VK" {
Err(Error::UnknownIssuer(claims.iis))
} else if claims.jti != 21 {
Err(Error::UnknownType(claims.jti))
} else if claims.app != client_id {
Err(Error::UnknownClientId(claims.app))
} else {
Ok(claims.sub)
}
}
Err(err) => Err(match err.into_kind() {
ErrorKind::InvalidToken => Error::InvalidToken,
ErrorKind::InvalidSignature => Error::InvalidSignature,
ErrorKind::InvalidAlgorithmName => Error::InvalidToken,
ErrorKind::MissingRequiredClaim(_) => Error::InvalidToken,
ErrorKind::ExpiredSignature => Error::Expired,
ErrorKind::InvalidAlgorithm => Error::InvalidToken,
ErrorKind::MissingAlgorithm => Error::InvalidToken,
ErrorKind::Base64(_) => Error::InvalidToken,
ErrorKind::Json(_) => Error::InvalidToken,
ErrorKind::Utf8(_) => Error::InvalidToken,
kind => Error::JwtError(kind),
}),
}
}

231
src/routes/auth/sign_in.rs Normal file
View File

@@ -0,0 +1,231 @@
use self::schema::*;
use crate::database::driver;
use crate::database::models::User;
use crate::routes::auth::shared::parse_vk_id;
use crate::routes::auth::sign_in::schema::SignInData::{Default, Vk};
use crate::routes::schema::user::UserResponse;
use crate::routes::schema::{IntoResponseAsError, ResponseError};
use crate::utility::mutex::MutexScope;
use crate::{AppState, utility};
use actix_web::{post, web};
use diesel::SaveChangesDsl;
use web::Json;
async fn sign_in_combined(
data: SignInData,
app_state: &web::Data<AppState>,
) -> Result<UserResponse, ErrorCode> {
let user = match &data {
Default(data) => driver::users::get_by_username(&app_state, &data.username),
Vk(id) => driver::users::get_by_vk_id(&app_state, *id),
};
match user {
Ok(mut user) => {
if let Default(data) = data {
match bcrypt::verify(&data.password, &user.password) {
Ok(result) => {
if !result {
return Err(ErrorCode::IncorrectCredentials);
}
}
Err(_) => {
return Err(ErrorCode::IncorrectCredentials);
}
}
}
user.access_token = utility::jwt::encode(&user.id);
app_state.database.scope(|conn| {
user.save_changes::<User>(conn)
.expect("Failed to update user")
});
Ok(user.into())
}
Err(_) => Err(ErrorCode::IncorrectCredentials),
}
}
#[utoipa::path(responses(
(status = OK, body = UserResponse),
(status = NOT_ACCEPTABLE, body = ResponseError<ErrorCode>)
))]
#[post("/sign-in")]
pub async fn sign_in(data: Json<Request>, app_state: web::Data<AppState>) -> ServiceResponse {
sign_in_combined(Default(data.into_inner()), &app_state)
.await
.into()
}
#[utoipa::path(responses(
(status = OK, body = UserResponse),
(status = NOT_ACCEPTABLE, body = ResponseError<ErrorCode>)
))]
#[post("/sign-in-vk")]
pub async fn sign_in_vk(
data_json: Json<vk::Request>,
app_state: web::Data<AppState>,
) -> ServiceResponse {
let data = data_json.into_inner();
match parse_vk_id(&data.access_token, app_state.vk_id.client_id) {
Ok(id) => sign_in_combined(Vk(id), &app_state).await.into(),
Err(_) => ErrorCode::InvalidVkAccessToken.into_response(),
}
}
mod schema {
use crate::routes::schema::user::UserResponse;
use actix_macros::{IntoResponseError, StatusCode};
use serde::{Deserialize, Serialize};
use utoipa::ToSchema;
#[derive(Deserialize, Serialize, ToSchema)]
#[schema(as = SignIn::Request)]
pub struct Request {
/// User name.
#[schema(examples("n08i40k"))]
pub username: String,
/// Password.
pub password: String,
}
pub mod vk {
use serde::{Deserialize, Serialize};
use utoipa::ToSchema;
#[derive(Serialize, Deserialize, ToSchema)]
#[serde(rename_all = "camelCase")]
#[schema(as = SignInVk::Request)]
pub struct Request {
/// VK ID token.
pub access_token: String,
}
}
pub type ServiceResponse = crate::routes::schema::Response<UserResponse, ErrorCode>;
#[derive(Serialize, ToSchema, Clone, IntoResponseError, StatusCode)]
#[serde(rename_all = "SCREAMING_SNAKE_CASE")]
#[schema(as = SignIn::ErrorCode)]
#[status_code = "actix_web::http::StatusCode::NOT_ACCEPTABLE"]
pub enum ErrorCode {
/// Incorrect username or password.
IncorrectCredentials,
/// Invalid VK ID token.
InvalidVkAccessToken,
}
/// Internal
/// Type of authorization.
pub enum SignInData {
/// User and password name and password.
Default(Request),
/// Identifier of the attached account VK.
Vk(i32),
}
}
#[cfg(test)]
mod tests {
use super::schema::*;
use crate::database::driver;
use crate::database::models::{User, UserRole};
use crate::routes::auth::sign_in::sign_in;
use crate::test_env::tests::{static_app_state, test_app_state, test_env};
use crate::utility;
use actix_test::test_app;
use actix_web::dev::ServiceResponse;
use actix_web::http::Method;
use actix_web::http::StatusCode;
use actix_web::test;
use sha1::{Digest, Sha1};
use std::fmt::Write;
async fn sign_in_client(data: Request) -> ServiceResponse {
let app = test_app(test_app_state(Default::default()).await, sign_in).await;
let req = test::TestRequest::with_uri("/sign-in")
.method(Method::POST)
.set_json(data)
.to_request();
test::call_service(&app, req).await
}
async fn prepare(username: String) {
let id = {
let mut sha = Sha1::new();
sha.update(&username);
let result = sha.finalize();
let bytes = &result[..12];
let mut hex = String::new();
for byte in bytes {
write!(&mut hex, "{:02x}", byte).unwrap();
}
hex
};
test_env();
let app_state = static_app_state().await;
driver::users::insert_or_ignore(
&app_state,
&User {
id: id.clone(),
username,
password: bcrypt::hash("example".to_string(), bcrypt::DEFAULT_COST).unwrap(),
vk_id: None,
access_token: utility::jwt::encode(&id),
group: "ИС-214/23".to_string(),
role: UserRole::Student,
version: "1.0.0".to_string(),
},
)
.unwrap();
}
#[actix_web::test]
async fn sign_in_ok() {
prepare("test::sign_in_ok".to_string()).await;
let resp = sign_in_client(Request {
username: "test::sign_in_ok".to_string(),
password: "example".to_string(),
})
.await;
assert_eq!(resp.status(), StatusCode::OK);
}
#[actix_web::test]
async fn sign_in_err() {
prepare("test::sign_in_err".to_string()).await;
let invalid_username = sign_in_client(Request {
username: "test::sign_in_err::username".to_string(),
password: "example".to_string(),
})
.await;
assert_eq!(invalid_username.status(), StatusCode::NOT_ACCEPTABLE);
let invalid_password = sign_in_client(Request {
username: "test::sign_in_err".to_string(),
password: "bad_password".to_string(),
})
.await;
assert_eq!(invalid_password.status(), StatusCode::NOT_ACCEPTABLE);
}
}

371
src/routes/auth/sign_up.rs Normal file
View File

@@ -0,0 +1,371 @@
use self::schema::*;
use crate::AppState;
use crate::database::driver;
use crate::database::models::UserRole;
use crate::routes::auth::shared::{Error, parse_vk_id};
use crate::routes::schema::user::UserResponse;
use crate::routes::schema::{IntoResponseAsError, ResponseError};
use actix_web::{post, web};
use rand::{Rng, rng};
use web::Json;
async fn sign_up_combined(
data: SignUpData,
app_state: &web::Data<AppState>,
) -> Result<UserResponse, ErrorCode> {
// If user selected forbidden role.
if data.role == UserRole::Admin {
return Err(ErrorCode::DisallowedRole);
}
// If specified group doesn't exist in schedule.
let schedule_opt = app_state.schedule.lock().unwrap();
if let Some(schedule) = &*schedule_opt {
if !schedule.data.groups.contains_key(&data.group) {
return Err(ErrorCode::InvalidGroupName);
}
}
// If user with specified username already exists.
if driver::users::contains_by_username(&app_state, &data.username) {
return Err(ErrorCode::UsernameAlreadyExists);
}
// If user with specified VKID already exists.
if let Some(id) = data.vk_id {
if driver::users::contains_by_vk_id(&app_state, id) {
return Err(ErrorCode::VkAlreadyExists);
}
}
let user = data.into();
driver::users::insert(&app_state, &user).unwrap();
Ok(UserResponse::from(&user)).into()
}
#[utoipa::path(responses(
(status = OK, body = UserResponse),
(status = NOT_ACCEPTABLE, body = ResponseError<ErrorCode>)
))]
#[post("/sign-up")]
pub async fn sign_up(data_json: Json<Request>, app_state: web::Data<AppState>) -> ServiceResponse {
let data = data_json.into_inner();
sign_up_combined(
SignUpData {
username: data.username,
password: data.password,
vk_id: None,
group: data.group,
role: data.role,
version: data.version,
},
&app_state,
)
.await
.into()
}
#[utoipa::path(responses(
(status = OK, body = UserResponse),
(status = NOT_ACCEPTABLE, body = ResponseError<ErrorCode>)
))]
#[post("/sign-up-vk")]
pub async fn sign_up_vk(
data_json: Json<vk::Request>,
app_state: web::Data<AppState>,
) -> ServiceResponse {
let data = data_json.into_inner();
match parse_vk_id(&data.access_token, app_state.vk_id.client_id) {
Ok(id) => sign_up_combined(
SignUpData {
username: data.username,
password: rng()
.sample_iter(&rand::distr::Alphanumeric)
.take(16)
.map(char::from)
.collect(),
vk_id: Some(id),
group: data.group,
role: data.role,
version: data.version,
},
&app_state,
)
.await
.into(),
Err(err) => {
if err != Error::Expired {
eprintln!("Failed to parse vk id token!");
eprintln!("{:?}", err);
}
ErrorCode::InvalidVkAccessToken.into_response()
}
}
}
mod schema {
use crate::database::models::{User, UserRole};
use crate::routes::schema::user::UserResponse;
use crate::utility;
use actix_macros::{IntoResponseError, StatusCode};
use objectid::ObjectId;
use serde::{Deserialize, Serialize};
/// WEB
#[derive(Serialize, Deserialize, utoipa::ToSchema)]
#[schema(as = SignUp::Request)]
pub struct Request {
/// User name.
#[schema(examples("n08i40k"))]
pub username: String,
/// Password.
pub password: String,
/// Group.
#[schema(examples("ИС-214/23"))]
pub group: String,
/// Role.
pub role: UserRole,
/// Version of the installed Polytechnic+ application.
#[schema(examples("3.0.0"))]
pub version: String,
}
pub mod vk {
use crate::database::models::UserRole;
use serde::{Deserialize, Serialize};
#[derive(Serialize, Deserialize, utoipa::ToSchema)]
#[serde(rename_all = "camelCase")]
#[schema(as = SignUpVk::Request)]
pub struct Request {
/// VK ID token.
pub access_token: String,
/// User name.
#[schema(examples("n08i40k"))]
pub username: String,
/// Group.
#[schema(examples("ИС-214/23"))]
pub group: String,
/// Role.
pub role: UserRole,
/// Version of the installed Polytechnic+ application.
#[schema(examples("3.0.0"))]
pub version: String,
}
}
pub type ServiceResponse = crate::routes::schema::Response<UserResponse, ErrorCode>;
#[derive(Clone, Serialize, utoipa::ToSchema, IntoResponseError, StatusCode)]
#[serde(rename_all = "SCREAMING_SNAKE_CASE")]
#[schema(as = SignUp::ErrorCode)]
#[status_code = "actix_web::http::StatusCode::NOT_ACCEPTABLE"]
pub enum ErrorCode {
/// Conveyed the role of Admin.
DisallowedRole,
/// Unknown name of the group.
InvalidGroupName,
/// User with this name is already registered.
UsernameAlreadyExists,
/// Invalid VK ID token.
InvalidVkAccessToken,
/// User with such an account VK is already registered.
VkAlreadyExists,
}
/// Internal
/// Data for registration.
pub struct SignUpData {
/// User name.
pub username: String,
/// Password.
///
/// Should be present even if registration occurs using the VK ID token.
pub password: String,
/// Account identifier VK.
pub vk_id: Option<i32>,
/// Group.
pub group: String,
/// Role.
pub role: UserRole,
/// Version of the installed Polytechnic+ application.
pub version: String,
}
impl Into<User> for SignUpData {
fn into(self) -> User {
let id = ObjectId::new().unwrap().to_string();
let access_token = utility::jwt::encode(&id);
User {
id,
username: self.username,
password: bcrypt::hash(self.password, bcrypt::DEFAULT_COST).unwrap(),
vk_id: self.vk_id,
access_token,
group: self.group,
role: self.role,
version: self.version,
}
}
}
}
#[cfg(test)]
mod tests {
use crate::database::driver;
use crate::database::models::UserRole;
use crate::routes::auth::sign_up::schema::Request;
use crate::routes::auth::sign_up::sign_up;
use crate::test_env::tests::{
TestAppStateParams, TestScheduleType, static_app_state, test_app_state, test_env,
};
use actix_test::test_app;
use actix_web::dev::ServiceResponse;
use actix_web::http::Method;
use actix_web::http::StatusCode;
use actix_web::test;
struct SignUpPartial {
username: String,
group: String,
role: UserRole,
load_schedule: bool,
}
async fn sign_up_client(data: SignUpPartial) -> ServiceResponse {
let app = test_app(
test_app_state(TestAppStateParams {
schedule: if data.load_schedule {
TestScheduleType::Local
} else {
TestScheduleType::None
},
})
.await,
sign_up,
)
.await;
let req = test::TestRequest::with_uri("/sign-up")
.method(Method::POST)
.set_json(Request {
username: data.username.clone(),
password: "example".to_string(),
group: data.group.clone(),
role: data.role.clone(),
version: "1.0.0".to_string(),
})
.to_request();
test::call_service(&app, req).await
}
#[actix_web::test]
async fn sign_up_valid() {
// prepare
test_env();
let app_state = static_app_state().await;
driver::users::delete_by_username(&app_state, &"test::sign_up_valid".to_string());
// test
let resp = sign_up_client(SignUpPartial {
username: "test::sign_up_valid".to_string(),
group: "ИС-214/23".to_string(),
role: UserRole::Student,
load_schedule: false,
})
.await;
assert_eq!(resp.status(), StatusCode::OK);
}
#[actix_web::test]
async fn sign_up_multiple() {
// prepare
test_env();
let app_state = static_app_state().await;
driver::users::delete_by_username(&app_state, &"test::sign_up_multiple".to_string());
let create = sign_up_client(SignUpPartial {
username: "test::sign_up_multiple".to_string(),
group: "ИС-214/23".to_string(),
role: UserRole::Student,
load_schedule: false,
})
.await;
assert_eq!(create.status(), StatusCode::OK);
let resp = sign_up_client(SignUpPartial {
username: "test::sign_up_multiple".to_string(),
group: "ИС-214/23".to_string(),
role: UserRole::Student,
load_schedule: false,
})
.await;
assert_eq!(resp.status(), StatusCode::NOT_ACCEPTABLE);
}
#[actix_web::test]
async fn sign_up_invalid_role() {
test_env();
// test
let resp = sign_up_client(SignUpPartial {
username: "test::sign_up_invalid_role".to_string(),
group: "ИС-214/23".to_string(),
role: UserRole::Admin,
load_schedule: false,
})
.await;
assert_eq!(resp.status(), StatusCode::NOT_ACCEPTABLE);
}
#[actix_web::test]
async fn sign_up_invalid_group() {
test_env();
// test
let resp = sign_up_client(SignUpPartial {
username: "test::sign_up_invalid_group".to_string(),
group: "invalid_group".to_string(),
role: UserRole::Student,
load_schedule: true,
})
.await;
assert_eq!(resp.status(), StatusCode::NOT_ACCEPTABLE);
}
}

5
src/routes/fcm/mod.rs Normal file
View File

@@ -0,0 +1,5 @@
mod update_callback;
mod set_token;
pub use update_callback::*;
pub use set_token::*;

114
src/routes/fcm/set_token.rs Normal file
View File

@@ -0,0 +1,114 @@
use crate::app_state::AppState;
use crate::database;
use crate::database::models::FCM;
use crate::extractors::authorized_user::UserExtractor;
use crate::extractors::base::SyncExtractor;
use crate::utility::mutex::{MutexScope, MutexScopeAsync};
use actix_web::{HttpResponse, Responder, patch, web};
use diesel::{RunQueryDsl, SaveChangesDsl};
use firebase_messaging_rs::FCMClient;
use firebase_messaging_rs::topic::{TopicManagementError, TopicManagementSupport};
use serde::Deserialize;
#[derive(Debug, Deserialize)]
struct Params {
pub token: String,
}
async fn get_fcm(
app_state: &web::Data<AppState>,
user_data: &UserExtractor<true>,
token: String,
) -> Result<FCM, diesel::result::Error> {
match user_data.fcm() {
Some(fcm) => {
let mut fcm = fcm.clone();
fcm.token = token;
Ok(fcm)
}
None => {
let fcm = FCM {
user_id: user_data.user().id.clone(),
token,
topics: vec![],
};
match app_state.database.scope(|conn| {
diesel::insert_into(database::schema::fcm::table)
.values(&fcm)
.execute(conn)
}) {
Ok(_) => Ok(fcm),
Err(e) => Err(e),
}
}
}
}
#[utoipa::path(responses((status = OK)))]
#[patch("/set-token")]
pub async fn set_token(
app_state: web::Data<AppState>,
web::Query(params): web::Query<Params>,
user_data: SyncExtractor<UserExtractor<true>>,
) -> impl Responder {
let user_data = user_data.into_inner();
// If token not changes - exit.
if let Some(fcm) = user_data.fcm() {
if fcm.token == params.token {
return HttpResponse::Ok();
}
}
let fcm = get_fcm(&app_state, &user_data, params.token.clone()).await;
if let Err(e) = fcm {
eprintln!("Failed to get FCM: {e}");
return HttpResponse::Ok();
}
let mut fcm = fcm.ok().unwrap();
// Add default topics.
if !fcm.topics.contains(&Some("common".to_string())) {
fcm.topics.push(Some("common".to_string()));
}
// Subscribe to default topics.
if let Some(e) = app_state
.fcm_client
.as_ref()
.unwrap()
.async_scope(
async |client: &mut FCMClient| -> Result<(), TopicManagementError> {
let mut tokens: Vec<String> = Vec::new();
tokens.push(fcm.token.clone());
for topic in fcm.topics.clone() {
if let Some(topic) = topic {
client.register_tokens_to_topic(topic.clone(), tokens.clone()).await?;
}
}
Ok(())
},
)
.await
.err()
{
eprintln!("Failed to subscribe token to topic: {:?}", e);
return HttpResponse::Ok();
}
// Write updates to db.
if let Some(e) = app_state
.database
.scope(|conn| fcm.save_changes::<FCM>(conn))
.err()
{
eprintln!("Failed to update FCM object: {e}");
}
HttpResponse::Ok()
}

View File

@@ -0,0 +1,32 @@
use crate::app_state::AppState;
use crate::database::models::User;
use crate::extractors::base::SyncExtractor;
use crate::utility::mutex::MutexScope;
use actix_web::{HttpResponse, Responder, post, web};
use diesel::SaveChangesDsl;
#[utoipa::path(responses(
(status = OK),
(status = INTERNAL_SERVER_ERROR)
))]
#[post("/update-callback/{version}")]
async fn update_callback(
app_state: web::Data<AppState>,
version: web::Path<String>,
user: SyncExtractor<User>,
) -> impl Responder {
let mut user = user.into_inner();
user.version = version.into_inner();
match app_state
.database
.scope(|conn| user.save_changes::<User>(conn))
{
Ok(_) => HttpResponse::Ok(),
Err(e) => {
eprintln!("Failed to update user: {}", e);
HttpResponse::InternalServerError()
}
}
}

6
src/routes/mod.rs Normal file
View File

@@ -0,0 +1,6 @@
pub mod auth;
pub mod fcm;
pub mod schedule;
mod schema;
pub mod users;
pub mod vk_id;

View File

@@ -0,0 +1,23 @@
use crate::AppState;
use crate::routes::schedule::schema::CacheStatus;
use actix_web::{get, web};
#[utoipa::path(responses(
(status = OK, body = CacheStatus),
))]
#[get("/cache-status")]
pub async fn cache_status(app_state: web::Data<AppState>) -> CacheStatus {
// Prevent thread lock
let has_schedule = app_state
.schedule
.lock()
.as_ref()
.map(|res| res.is_some())
.unwrap();
match has_schedule {
true => CacheStatus::from(&app_state),
false => CacheStatus::default(),
}
.into()
}

View File

@@ -0,0 +1,96 @@
use self::schema::*;
use crate::AppState;
use crate::database::models::User;
use crate::extractors::base::SyncExtractor;
use crate::routes::schema::{IntoResponseAsError, ResponseError};
use actix_web::{get, web};
#[utoipa::path(responses(
(status = OK, body = Response),
(
status = SERVICE_UNAVAILABLE,
body = ResponseError<ErrorCode>,
example = json!({
"code": "NO_SCHEDULE",
"message": "Schedule not parsed yet."
})
),
(
status = NOT_FOUND,
body = ResponseError<ErrorCode>,
example = json!({
"code": "NOT_FOUND",
"message": "Required group not found."
})
),
))]
#[get("/group")]
pub async fn group(user: SyncExtractor<User>, app_state: web::Data<AppState>) -> ServiceResponse {
// Prevent thread lock
let schedule_lock = app_state.schedule.lock().unwrap();
match schedule_lock.as_ref() {
None => ErrorCode::NoSchedule.into_response(),
Some(schedule) => match schedule.data.groups.get(&user.into_inner().group) {
None => ErrorCode::NotFound.into_response(),
Some(entry) => Ok(entry.clone().into()).into(),
},
}
}
mod schema {
use schedule_parser::schema::ScheduleEntry;
use actix_macros::{IntoResponseErrorNamed, StatusCode};
use chrono::{DateTime, NaiveDateTime, Utc};
use derive_more::Display;
use serde::Serialize;
use utoipa::ToSchema;
pub type ServiceResponse = crate::routes::schema::Response<Response, ErrorCode>;
#[derive(Serialize, ToSchema)]
#[schema(as = GetGroup::Response)]
#[serde(rename_all = "camelCase")]
pub struct Response {
/// Group schedule.
pub group: ScheduleEntry,
/// ## Outdated variable.
///
/// By default, an empty list is returned.
#[deprecated = "Will be removed in future versions"]
pub updated: Vec<i32>,
/// ## Outdated variable.
///
/// By default, the initial date for unix.
#[deprecated = "Will be removed in future versions"]
pub updated_at: DateTime<Utc>,
}
#[allow(deprecated)]
impl From<ScheduleEntry> for Response {
fn from(group: ScheduleEntry) -> Self {
Self {
group,
updated: Vec::new(),
updated_at: NaiveDateTime::default().and_utc(),
}
}
}
#[derive(Clone, Serialize, ToSchema, StatusCode, Display, IntoResponseErrorNamed)]
#[serde(rename_all = "SCREAMING_SNAKE_CASE")]
#[schema(as = GroupSchedule::ErrorCode)]
pub enum ErrorCode {
/// Schedules have not yet been parsed.
#[status_code = "actix_web::http::StatusCode::SERVICE_UNAVAILABLE"]
#[display("Schedule not parsed yet.")]
NoSchedule,
/// Group not found.
#[status_code = "actix_web::http::StatusCode::NOT_FOUND"]
#[display("Required group not found.")]
NotFound,
}
}

View File

@@ -0,0 +1,48 @@
use self::schema::*;
use crate::AppState;
use crate::routes::schedule::schema::ErrorCode;
use crate::routes::schema::{IntoResponseAsError, ResponseError};
use actix_web::{get, web};
#[utoipa::path(responses(
(status = OK, body = Response),
(status = SERVICE_UNAVAILABLE, body = ResponseError<ErrorCode>),
))]
#[get("/group-names")]
pub async fn group_names(app_state: web::Data<AppState>) -> ServiceResponse {
// Prevent thread lock
let schedule_lock = app_state.schedule.lock().unwrap();
match schedule_lock.as_ref() {
None => ErrorCode::NoSchedule.into_response(),
Some(schedule) => {
let mut names: Vec<String> = schedule.data.groups.keys().cloned().collect();
names.sort();
Ok(names.into()).into()
}
}
.into()
}
mod schema {
use crate::routes::schedule::schema::ErrorCode;
use serde::Serialize;
use utoipa::ToSchema;
pub type ServiceResponse = crate::routes::schema::Response<Response, ErrorCode>;
#[derive(Serialize, ToSchema)]
#[schema(as = GetGroupNames::Response)]
pub struct Response {
/// List of group names sorted in alphabetical order.
#[schema(examples(json!(["ИС-214/23"])))]
pub names: Vec<String>,
}
impl From<Vec<String>> for Response {
fn from(names: Vec<String>) -> Self {
Self { names }
}
}
}

View File

@@ -0,0 +1,16 @@
mod cache_status;
mod group;
mod group_names;
mod schedule;
mod schema;
mod teacher;
mod teacher_names;
mod update_download_url;
pub use cache_status::*;
pub use group::*;
pub use group_names::*;
pub use schedule::*;
pub use teacher::*;
pub use teacher_names::*;
pub use update_download_url::*;

View File

@@ -0,0 +1,25 @@
use self::schema::*;
use crate::app_state::AppState;
use crate::routes::schedule::schema::{ErrorCode, ScheduleView};
use crate::routes::schema::{IntoResponseAsError, ResponseError};
use actix_web::{get, web};
#[utoipa::path(responses(
(status = OK, body = ScheduleView),
(status = SERVICE_UNAVAILABLE, body = ResponseError<ErrorCode>)
))]
#[get("/")]
pub async fn schedule(app_state: web::Data<AppState>) -> ServiceResponse {
match ScheduleView::try_from(&app_state) {
Ok(res) => Ok(res).into(),
Err(e) => match e {
ErrorCode::NoSchedule => ErrorCode::NoSchedule.into_response(),
},
}
}
mod schema {
use crate::routes::schedule::schema::{ErrorCode, ScheduleView};
pub type ServiceResponse = crate::routes::schema::Response<ScheduleView, ErrorCode>;
}

View File

@@ -0,0 +1,107 @@
use crate::app_state::{AppState, Schedule};
use schedule_parser::schema::ScheduleEntry;
use actix_macros::{IntoResponseErrorNamed, ResponderJson, StatusCode};
use actix_web::web;
use chrono::{DateTime, Duration, Utc};
use derive_more::Display;
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use utoipa::ToSchema;
/// Response from schedule server.
#[derive(Serialize, ToSchema)]
#[serde(rename_all = "camelCase")]
pub struct ScheduleView {
/// ETag schedules on polytechnic server.
etag: String,
/// Schedule update date on polytechnic website.
uploaded_at: DateTime<Utc>,
/// Date last downloaded from the Polytechnic server.
downloaded_at: DateTime<Utc>,
/// Groups schedule.
groups: HashMap<String, ScheduleEntry>,
/// Teachers schedule.
teachers: HashMap<String, ScheduleEntry>,
}
#[derive(Clone, Serialize, ToSchema, StatusCode, Display, IntoResponseErrorNamed)]
#[status_code = "actix_web::http::StatusCode::SERVICE_UNAVAILABLE"]
#[serde(rename_all = "SCREAMING_SNAKE_CASE")]
#[schema(as = ScheduleShared::ErrorCode)]
pub enum ErrorCode {
/// Schedules not yet parsed.
#[display("Schedule not parsed yet.")]
NoSchedule,
}
impl TryFrom<&web::Data<AppState>> for ScheduleView {
type Error = ErrorCode;
fn try_from(app_state: &web::Data<AppState>) -> Result<Self, Self::Error> {
if let Some(schedule) = app_state.schedule.lock().unwrap().clone() {
Ok(Self {
etag: schedule.etag,
uploaded_at: schedule.updated_at,
downloaded_at: schedule.parsed_at,
groups: schedule.data.groups,
teachers: schedule.data.teachers,
})
} else {
Err(ErrorCode::NoSchedule)
}
}
}
/// Cached schedule status.
#[derive(Serialize, Deserialize, ToSchema, ResponderJson)]
#[serde(rename_all = "camelCase")]
pub struct CacheStatus {
/// Schedule hash.
pub cache_hash: String,
/// Whether the schedule reference needs to be updated.
pub cache_update_required: bool,
/// Last cache update date.
pub last_cache_update: i64,
/// Cached schedule update date.
///
/// Determined by the polytechnic's server.
pub last_schedule_update: i64,
}
impl CacheStatus {
pub fn default() -> Self {
CacheStatus {
cache_hash: "0000000000000000000000000000000000000000".to_string(),
cache_update_required: true,
last_cache_update: 0,
last_schedule_update: 0,
}
}
}
impl From<&web::Data<AppState>> for CacheStatus {
fn from(value: &web::Data<AppState>) -> Self {
let schedule_lock = value.schedule.lock().unwrap();
let schedule = schedule_lock.as_ref().unwrap();
CacheStatus::from(schedule)
}
}
impl From<&Schedule> for CacheStatus {
fn from(value: &Schedule) -> Self {
Self {
cache_hash: value.hash(),
cache_update_required: (Utc::now() - value.fetched_at) > Duration::minutes(5),
last_cache_update: value.fetched_at.timestamp(),
last_schedule_update: value.updated_at.timestamp(),
}
}
}

View File

@@ -0,0 +1,97 @@
use self::schema::*;
use crate::routes::schema::{IntoResponseAsError, ResponseError};
use crate::AppState;
use actix_web::{get, web};
#[utoipa::path(responses(
(status = OK, body = Response),
(
status = SERVICE_UNAVAILABLE,
body = ResponseError<ErrorCode>,
example = json!({
"code": "NO_SCHEDULE",
"message": "Schedule not parsed yet."
})
),
(
status = NOT_FOUND,
body = ResponseError<ErrorCode>,
example = json!({
"code": "NOT_FOUND",
"message": "Required teacher not found."
})
),
))]
#[get("/teacher/{name}")]
pub async fn teacher(
name: web::Path<String>,
app_state: web::Data<AppState>,
) -> ServiceResponse {
// Prevent thread lock
let schedule_lock = app_state.schedule.lock().unwrap();
match schedule_lock.as_ref() {
None => ErrorCode::NoSchedule.into_response(),
Some(schedule) => match schedule.data.teachers.get(&name.into_inner()) {
None => ErrorCode::NotFound.into_response(),
Some(entry) => Ok(entry.clone().into()).into(),
},
}
}
mod schema {
use schedule_parser::schema::ScheduleEntry;
use actix_macros::{IntoResponseErrorNamed, StatusCode};
use chrono::{DateTime, NaiveDateTime, Utc};
use derive_more::Display;
use serde::Serialize;
use utoipa::ToSchema;
pub type ServiceResponse = crate::routes::schema::Response<Response, ErrorCode>;
#[derive(Serialize, ToSchema)]
#[schema(as = GetTeacher::Response)]
#[serde(rename_all = "camelCase")]
pub struct Response {
/// Teacher's schedule.
pub teacher: ScheduleEntry,
/// ## Deprecated variable.
///
/// By default, an empty list is returned.
#[deprecated = "Will be removed in future versions"]
pub updated: Vec<i32>,
/// ## Deprecated variable.
///
/// Defaults to the Unix start date.
#[deprecated = "Will be removed in future versions"]
pub updated_at: DateTime<Utc>,
}
#[allow(deprecated)]
impl From<ScheduleEntry> for Response {
fn from(teacher: ScheduleEntry) -> Self {
Self {
teacher,
updated: Vec::new(),
updated_at: NaiveDateTime::default().and_utc(),
}
}
}
#[derive(Clone, Serialize, ToSchema, StatusCode, Display, IntoResponseErrorNamed)]
#[serde(rename_all = "SCREAMING_SNAKE_CASE")]
#[schema(as = TeacherSchedule::ErrorCode)]
pub enum ErrorCode {
/// Schedules have not yet been parsed.
#[status_code = "actix_web::http::StatusCode::SERVICE_UNAVAILABLE"]
#[display("Schedule not parsed yet.")]
NoSchedule,
/// Teacher not found.
#[status_code = "actix_web::http::StatusCode::NOT_FOUND"]
#[display("Required teacher not found.")]
NotFound,
}
}

View File

@@ -0,0 +1,48 @@
use self::schema::*;
use crate::AppState;
use crate::routes::schedule::schema::ErrorCode;
use crate::routes::schema::{IntoResponseAsError, ResponseError};
use actix_web::{get, web};
#[utoipa::path(responses(
(status = OK, body = Response),
(status = SERVICE_UNAVAILABLE, body = ResponseError<ErrorCode>),
))]
#[get("/teacher-names")]
pub async fn teacher_names(app_state: web::Data<AppState>) -> ServiceResponse {
// Prevent thread lock
let schedule_lock = app_state.schedule.lock().unwrap();
match schedule_lock.as_ref() {
None => ErrorCode::NoSchedule.into_response(),
Some(schedule) => {
let mut names: Vec<String> = schedule.data.teachers.keys().cloned().collect();
names.sort();
Ok(names.into()).into()
}
}
.into()
}
mod schema {
use crate::routes::schedule::schema::ErrorCode;
use serde::Serialize;
use utoipa::ToSchema;
pub type ServiceResponse = crate::routes::schema::Response<Response, ErrorCode>;
#[derive(Serialize, ToSchema)]
#[schema(as = GetTeacherNames::Response)]
pub struct Response {
/// List of teacher names sorted alphabetically.
#[schema(examples(json!(["Хомченко Н.Е."])))]
pub names: Vec<String>,
}
impl From<Vec<String>> for Response {
fn from(names: Vec<String>) -> Self {
Self { names }
}
}
}

View File

@@ -0,0 +1,140 @@
use self::schema::*;
use crate::AppState;
use crate::app_state::Schedule;
use schedule_parser::parse_xls;
use crate::routes::schedule::schema::CacheStatus;
use crate::routes::schema::{IntoResponseAsError, ResponseError};
use crate::xls_downloader::interface::{FetchError, XLSDownloader};
use actix_web::web::Json;
use actix_web::{patch, web};
use chrono::Utc;
#[utoipa::path(responses(
(status = OK, body = CacheStatus),
(status = NOT_ACCEPTABLE, body = ResponseError<ErrorCode>),
))]
#[patch("/update-download-url")]
pub async fn update_download_url(
data: Json<Request>,
app_state: web::Data<AppState>,
) -> ServiceResponse {
if !data.url.starts_with("https://politehnikum-eng.ru/") {
return ErrorCode::NonWhitelistedHost.into_response();
}
let mut downloader = app_state.downloader.lock().unwrap();
if let Some(url) = &downloader.url {
if url.eq(&data.url) {
return Ok(CacheStatus::from(&app_state)).into();
}
}
match downloader.set_url(data.url.clone()).await {
Ok(fetch_result) => {
let mut schedule = app_state.schedule.lock().unwrap();
if schedule.is_some()
&& fetch_result.uploaded_at < schedule.as_ref().unwrap().updated_at
{
return ErrorCode::OutdatedSchedule.into_response();
}
match downloader.fetch(false).await {
Ok(download_result) => match parse_xls(&download_result.data.unwrap()) {
Ok(data) => {
*schedule = Some(Schedule {
etag: download_result.etag,
fetched_at: download_result.requested_at,
updated_at: download_result.uploaded_at,
parsed_at: Utc::now(),
data,
});
Ok(CacheStatus::from(schedule.as_ref().unwrap())).into()
}
Err(error) => {
sentry::capture_error(&error);
ErrorCode::InvalidSchedule(error).into_response()
}
},
Err(error) => {
if let FetchError::Unknown(error) = &error {
sentry::capture_error(&error);
}
ErrorCode::DownloadFailed(error).into_response()
}
}
}
Err(error) => {
if let FetchError::Unknown(error) = &error {
sentry::capture_error(&error);
}
ErrorCode::FetchFailed(error).into_response()
}
}
}
mod schema {
use schedule_parser::schema::ParseError;
use crate::routes::schedule::schema::CacheStatus;
use actix_macros::{IntoResponseErrorNamed, StatusCode};
use derive_more::Display;
use serde::{Deserialize, Serialize, Serializer};
use utoipa::ToSchema;
use crate::xls_downloader::interface::FetchError;
pub type ServiceResponse = crate::routes::schema::Response<CacheStatus, ErrorCode>;
#[derive(Serialize, Deserialize, ToSchema)]
pub struct Request {
/// Schedule link.
pub url: String,
}
#[derive(Clone, ToSchema, StatusCode, Display, IntoResponseErrorNamed)]
#[status_code = "actix_web::http::StatusCode::NOT_ACCEPTABLE"]
#[schema(as = SetDownloadUrl::ErrorCode)]
pub enum ErrorCode {
/// Transferred link with host different from politehnikum-eng.ru.
#[display("URL with unknown host provided. Provide url with 'politehnikum-eng.ru' host.")]
NonWhitelistedHost,
/// Failed to retrieve file metadata.
#[display("Unable to retrieve metadata from the specified URL: {_0}")]
FetchFailed(FetchError),
/// Failed to download the file.
#[display("Unable to retrieve data from the specified URL: {_0}")]
DownloadFailed(FetchError),
/// The link leads to an outdated schedule.
///
/// An outdated schedule refers to a schedule that was published earlier
/// than is currently available.
#[display("The schedule is older than it already is.")]
OutdatedSchedule,
/// Failed to parse the schedule.
#[display("{_0}")]
InvalidSchedule(ParseError),
}
impl Serialize for ErrorCode {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
match self {
ErrorCode::NonWhitelistedHost => serializer.serialize_str("NON_WHITELISTED_HOST"),
ErrorCode::FetchFailed(_) => serializer.serialize_str("FETCH_FAILED"),
ErrorCode::DownloadFailed(_) => serializer.serialize_str("DOWNLOAD_FAILED"),
ErrorCode::OutdatedSchedule => serializer.serialize_str("OUTDATED_SCHEDULE"),
ErrorCode::InvalidSchedule(_) => serializer.serialize_str("INVALID_SCHEDULE"),
}
}
}
}

174
src/routes/schema.rs Normal file
View File

@@ -0,0 +1,174 @@
use actix_web::body::EitherBody;
use actix_web::error::JsonPayloadError;
use actix_web::http::StatusCode;
use actix_web::{HttpRequest, HttpResponse, Responder};
use serde::{Serialize, Serializer};
use std::convert::Into;
use utoipa::PartialSchema;
pub struct Response<T, E>(pub Result<T, E>)
where
T: Serialize + PartialSchema,
E: Serialize + PartialSchema + Clone + PartialStatusCode;
pub trait PartialStatusCode {
fn status_code(&self) -> StatusCode;
}
/// Transform Response<T, E> into Result<T, E>
impl<T, E> Into<Result<T, E>> for Response<T, E>
where
T: Serialize + PartialSchema,
E: Serialize + PartialSchema + Clone + PartialStatusCode,
{
fn into(self) -> Result<T, E> {
self.0
}
}
/// Transform T into Response<T, E>
impl<T, E> From<Result<T, E>> for Response<T, E>
where
T: Serialize + PartialSchema,
E: Serialize + PartialSchema + Clone + PartialStatusCode,
{
fn from(value: Result<T, E>) -> Self {
Response(value)
}
}
/// Serialize Response<T, E>
impl<T, E> Serialize for Response<T, E>
where
T: Serialize + PartialSchema,
E: Serialize + PartialSchema + Clone + PartialStatusCode + Into<ResponseError<E>>,
{
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
match &self.0 {
Ok(ok) => serializer.serialize_some::<T>(&ok),
Err(err) => serializer
.serialize_some::<ResponseError<E>>(&ResponseError::<E>::from(err.clone().into())),
}
}
}
/// Transform Response<T, E> to HttpResponse<String>
impl<T, E> Responder for Response<T, E>
where
T: Serialize + PartialSchema,
E: Serialize + PartialSchema + Clone + PartialStatusCode + Into<ResponseError<E>>,
{
type Body = EitherBody<String>;
fn respond_to(self, _: &HttpRequest) -> HttpResponse<Self::Body> {
match serde_json::to_string(&self) {
Ok(body) => {
let code = match &self.0 {
Ok(_) => StatusCode::OK,
Err(e) => e.status_code(),
};
match HttpResponse::build(code)
.content_type(mime::APPLICATION_JSON)
.message_body(body)
{
Ok(res) => res.map_into_left_body(),
Err(err) => HttpResponse::from_error(err).map_into_right_body(),
}
}
Err(err) => {
HttpResponse::from_error(JsonPayloadError::Serialize(err)).map_into_right_body()
}
}
}
}
/// ResponseError<T>
///
/// Field `message` is optional for backwards compatibility with Android App, that produces error if new fields will be added to JSON response.
#[derive(Serialize, utoipa::ToSchema)]
pub struct ResponseError<T: Serialize + PartialSchema> {
pub code: T,
#[serde(skip_serializing_if = "Option::is_none")]
pub message: Option<String>,
}
pub trait IntoResponseAsError<T>
where
T: Serialize + PartialSchema,
Self: Serialize + PartialSchema + Clone + PartialStatusCode + Into<ResponseError<Self>>,
{
fn into_response(self) -> Response<T, Self> {
Response(Err(self))
}
}
pub mod user {
use crate::database::models::{User, UserRole};
use actix_macros::ResponderJson;
use serde::Serialize;
//noinspection SpellCheckingInspection
/// Используется для скрытия чувствительных полей, таких как хеш пароля или FCM
#[derive(Serialize, utoipa::ToSchema, ResponderJson)]
#[serde(rename_all = "camelCase")]
pub struct UserResponse {
/// UUID
#[schema(examples("67dcc9a9507b0000772744a2"))]
id: String,
/// Имя пользователя
#[schema(examples("n08i40k"))]
username: String,
/// Группа
#[schema(examples("ИС-214/23"))]
group: String,
/// Роль
role: UserRole,
/// Идентификатор привязанного аккаунта VK
#[schema(examples(498094647, json!(null)))]
vk_id: Option<i32>,
/// JWT токен доступа
#[schema(examples(
"eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJpZCI6IjY3ZGNjOWE5NTA3YjAwMDA3NzI3NDRhMiIsImlhdCI6IjE3NDMxMDgwOTkiLCJleHAiOiIxODY5MjUyMDk5In0.rMgXRb3JbT9AvLK4eiY9HMB5LxgUudkpQyoWKOypZFY"
))]
access_token: String,
}
/// Create UserResponse from User ref.
impl From<&User> for UserResponse {
fn from(user: &User) -> Self {
UserResponse {
id: user.id.clone(),
username: user.username.clone(),
group: user.group.clone(),
role: user.role.clone(),
vk_id: user.vk_id.clone(),
access_token: user.access_token.clone(),
}
}
}
/// Transform User to UserResponse.
impl From<User> for UserResponse {
fn from(user: User) -> Self {
UserResponse {
id: user.id,
username: user.username,
group: user.group,
role: user.role,
vk_id: user.vk_id,
access_token: user.access_token,
}
}
}
}

View File

@@ -0,0 +1,85 @@
use self::schema::*;
use crate::app_state::AppState;
use crate::database::driver::users::UserSave;
use crate::database::models::User;
use crate::extractors::base::SyncExtractor;
use crate::routes::schema::IntoResponseAsError;
use crate::utility::mutex::MutexScope;
use actix_web::{post, web};
#[utoipa::path(responses((status = OK)))]
#[post("/change-group")]
pub async fn change_group(
app_state: web::Data<AppState>,
user: SyncExtractor<User>,
data: web::Json<Request>,
) -> ServiceResponse {
let mut user = user.into_inner();
if user.group == data.group {
return ErrorCode::SameGroup.into_response();
}
if let Some(e) = app_state.schedule.scope(|schedule| match schedule {
Some(schedule) => {
if schedule.data.groups.contains_key(&data.group) {
None
} else {
Some(ErrorCode::NotFound)
}
}
None => Some(ErrorCode::NoSchedule),
}) {
return e.into_response();
}
user.group = data.into_inner().group;
if let Some(e) = user.save(&app_state).err() {
eprintln!("Failed to update user: {e}");
return ErrorCode::InternalServerError.into_response();
}
Ok(()).into()
}
mod schema {
use actix_macros::{IntoResponseErrorNamed, StatusCode};
use derive_more::Display;
use serde::{Deserialize, Serialize};
use utoipa::ToSchema;
pub type ServiceResponse = crate::routes::schema::Response<(), ErrorCode>;
#[derive(Serialize, Deserialize, ToSchema)]
#[schema(as = ChangeGroup::Request)]
pub struct Request {
/// Group name.
pub group: String,
}
#[derive(Clone, Serialize, ToSchema, StatusCode, Display, IntoResponseErrorNamed)]
#[serde(rename_all = "SCREAMING_SNAKE_CASE")]
#[schema(as = ChangeGroup::ErrorCode)]
#[status_code = "actix_web::http::StatusCode::CONFLICT"]
pub enum ErrorCode {
/// Schedules have not yet been received.
#[display("Schedule not parsed yet.")]
#[status_code = "actix_web::http::StatusCode::SERVICE_UNAVAILABLE"]
NoSchedule,
/// Passed the same group name that is currently there.
#[display("Passed the same group name as it is at the moment.")]
SameGroup,
/// The required group does not exist.
#[display("The required group does not exist.")]
#[status_code = "actix_web::http::StatusCode::NOT_FOUND"]
NotFound,
/// Server-side error.
#[display("Internal server error.")]
#[status_code = "actix_web::http::StatusCode::INTERNAL_SERVER_ERROR"]
InternalServerError,
}
}

View File

@@ -0,0 +1,70 @@
use self::schema::*;
use crate::app_state::AppState;
use crate::database::driver;
use crate::database::driver::users::UserSave;
use crate::database::models::User;
use crate::extractors::base::SyncExtractor;
use crate::routes::schema::IntoResponseAsError;
use actix_web::{post, web};
#[utoipa::path(responses((status = OK)))]
#[post("/change-username")]
pub async fn change_username(
app_state: web::Data<AppState>,
user: SyncExtractor<User>,
data: web::Json<Request>,
) -> ServiceResponse {
let mut user = user.into_inner();
if user.username == data.username {
return ErrorCode::SameUsername.into_response();
}
if driver::users::get_by_username(&app_state, &data.username).is_ok() {
return ErrorCode::AlreadyExists.into_response();
}
user.username = data.into_inner().username;
if let Some(e) = user.save(&app_state).err() {
eprintln!("Failed to update user: {e}");
return ErrorCode::InternalServerError.into_response();
}
Ok(()).into()
}
mod schema {
use actix_macros::{IntoResponseErrorNamed, StatusCode};
use derive_more::Display;
use serde::{Deserialize, Serialize};
use utoipa::ToSchema;
pub type ServiceResponse = crate::routes::schema::Response<(), ErrorCode>;
#[derive(Serialize, Deserialize, ToSchema)]
#[schema(as = ChangeUsername::Request)]
pub struct Request {
/// User name.
pub username: String,
}
#[derive(Clone, Serialize, ToSchema, StatusCode, Display, IntoResponseErrorNamed)]
#[serde(rename_all = "SCREAMING_SNAKE_CASE")]
#[schema(as = ChangeUsername::ErrorCode)]
#[status_code = "actix_web::http::StatusCode::CONFLICT"]
pub enum ErrorCode {
/// The same name that is currently present is passed.
#[display("Passed the same name as it is at the moment.")]
SameUsername,
/// A user with this name already exists.
#[display("A user with this name already exists.")]
AlreadyExists,
/// Server-side error.
#[display("Internal server error.")]
#[status_code = "actix_web::http::StatusCode::INTERNAL_SERVER_ERROR"]
InternalServerError,
}
}

10
src/routes/users/me.rs Normal file
View File

@@ -0,0 +1,10 @@
use crate::database::models::User;
use crate::extractors::base::SyncExtractor;
use actix_web::get;
use crate::routes::schema::user::UserResponse;
#[utoipa::path(responses((status = OK, body = UserResponse)))]
#[get("/me")]
pub async fn me(user: SyncExtractor<User>) -> UserResponse {
user.into_inner().into()
}

7
src/routes/users/mod.rs Normal file
View File

@@ -0,0 +1,7 @@
mod change_group;
mod change_username;
mod me;
pub use change_group::*;
pub use change_username::*;
pub use me::*;

3
src/routes/vk_id/mod.rs Normal file
View File

@@ -0,0 +1,3 @@
mod oauth;
pub use oauth::*;

117
src/routes/vk_id/oauth.rs Normal file
View File

@@ -0,0 +1,117 @@
use self::schema::*;
use crate::app_state::AppState;
use crate::routes::schema::{IntoResponseAsError, ResponseError};
use actix_web::{post, web};
use serde::Deserialize;
use std::collections::HashMap;
use uuid::Uuid;
#[allow(dead_code)]
#[derive(Deserialize)]
struct VkIdAuthResponse {
refresh_token: String,
access_token: String,
id_token: String,
token_type: String,
expires_in: i32,
user_id: i32,
state: String,
scope: String,
}
#[utoipa::path(responses(
(status = OK, body = Response),
(
status = NOT_ACCEPTABLE,
body = ResponseError<ErrorCode>,
example = json!({
"code": "VK_ID_ERROR",
"message": "VK server returned an error"
})
),
))]
#[post("/oauth")]
async fn oauth(data: web::Json<Request>, app_state: web::Data<AppState>) -> ServiceResponse {
let data = data.into_inner();
let state = Uuid::new_v4().simple().to_string();
let vk_id = &app_state.vk_id;
let client_id = vk_id.client_id.clone().to_string();
let mut params = HashMap::new();
params.insert("grant_type", "authorization_code");
params.insert("client_id", client_id.as_str());
params.insert("state", state.as_str());
params.insert("code_verifier", data.code_verifier.as_str());
params.insert("code", data.code.as_str());
params.insert("device_id", data.device_id.as_str());
params.insert("redirect_uri", vk_id.redirect_url.as_str());
let client = reqwest::Client::new();
match client
.post("https://id.vk.com/oauth2/auth")
.form(&params)
.send()
.await
{
Ok(res) => {
if !res.status().is_success() {
return ErrorCode::VkIdError.into_response();
}
match res.json::<VkIdAuthResponse>().await {
Ok(auth_data) =>
Ok(Response {
access_token: auth_data.id_token,
}).into(),
Err(error) => {
sentry::capture_error(&error);
ErrorCode::VkIdError.into_response()
}
}
}
Err(_) => ErrorCode::VkIdError.into_response(),
}
}
mod schema {
use actix_macros::{IntoResponseErrorNamed, StatusCode};
use derive_more::Display;
use serde::{Deserialize, Serialize};
use utoipa::ToSchema;
pub type ServiceResponse = crate::routes::schema::Response<Response, ErrorCode>;
#[derive(Deserialize, ToSchema)]
#[serde(rename_all = "camelCase")]
#[schema(as = VkIdOAuth::Request)]
pub struct Request {
/// Код подтверждения authorization_code.
pub code: String,
/// Parameter to protect transmitted data.
pub code_verifier: String,
/// Device ID.
pub device_id: String,
}
#[derive(Serialize, ToSchema)]
#[serde(rename_all = "camelCase")]
#[schema(as = VkIdOAuth::Response)]
pub struct Response {
/// ID token.
pub access_token: String,
}
#[derive(Clone, Serialize, ToSchema, IntoResponseErrorNamed, StatusCode, Display)]
#[serde(rename_all = "SCREAMING_SNAKE_CASE")]
#[schema(as = VkIdOAuth::ErrorCode)]
#[status_code = "actix_web::http::StatusCode::NOT_ACCEPTABLE"]
pub enum ErrorCode {
/// VK server returned an error.
#[display("VK server returned an error")]
VkIdError,
}
}

58
src/test_env.rs Normal file
View File

@@ -0,0 +1,58 @@
#[cfg(test)]
pub(crate) mod tests {
use crate::app_state::{AppState, Schedule, app_state};
use schedule_parser::test_utils::test_result;
use crate::utility::mutex::MutexScope;
use actix_web::web;
use std::default::Default;
use tokio::sync::OnceCell;
pub fn test_env() {
dotenvy::from_path(".env.test").expect("Failed to load test environment file");
}
pub enum TestScheduleType {
None,
Local,
}
pub struct TestAppStateParams {
pub schedule: TestScheduleType,
}
impl Default for TestAppStateParams {
fn default() -> Self {
Self {
schedule: TestScheduleType::None,
}
}
}
pub async fn test_app_state(params: TestAppStateParams) -> web::Data<AppState> {
let state = app_state().await;
state.schedule.scope(|schedule| {
*schedule = match params.schedule {
TestScheduleType::None => None,
TestScheduleType::Local => Some(Schedule {
etag: "".to_string(),
fetched_at: Default::default(),
updated_at: Default::default(),
parsed_at: Default::default(),
data: test_result().unwrap(),
}),
}
});
state.clone()
}
pub async fn static_app_state() -> web::Data<AppState> {
static STATE: OnceCell<web::Data<AppState>> = OnceCell::const_new();
STATE
.get_or_init(|| test_app_state(Default::default()))
.await
.clone()
}
}

19
src/utility/error.rs Normal file
View File

@@ -0,0 +1,19 @@
use std::fmt::{Write};
use std::fmt::Display;
use serde::{Deserialize, Serialize};
/// Server response to errors within Middleware.
#[derive(Serialize, Deserialize)]
pub struct ResponseErrorMessage<T: Display> {
code: T,
message: String,
}
impl<T: Display + Serialize> ResponseErrorMessage<T> {
pub fn new(code: T) -> Self {
let mut message = String::new();
write!(&mut message, "{}", code).unwrap();
Self { code, message }
}
}

38
src/utility/hasher.rs Normal file
View File

@@ -0,0 +1,38 @@
use sha1::Digest;
use std::hash::Hasher;
/// Hesher returning hash from the algorithm implementing Digest
pub struct DigestHasher<D: Digest> {
digest: D,
}
impl<D> DigestHasher<D>
where
D: Digest,
{
/// Obtain hash.
pub fn finalize(self) -> String {
hex::encode(self.digest.finalize().0)
}
}
impl<D> From<D> for DigestHasher<D>
where
D: Digest,
{
/// Creating a hash from an algorithm implementing Digest.
fn from(digest: D) -> Self {
DigestHasher { digest }
}
}
impl<D: Digest> Hasher for DigestHasher<D> {
/// Stopper to prevent calling the standard Hasher result.
fn finish(&self) -> u64 {
unimplemented!("Do not call finish()");
}
fn write(&mut self, bytes: &[u8]) {
self.digest.update(bytes);
}
}

169
src/utility/jwt.rs Normal file
View File

@@ -0,0 +1,169 @@
use chrono::Duration;
use chrono::Utc;
use jsonwebtoken::errors::ErrorKind;
use jsonwebtoken::{Algorithm, DecodingKey, EncodingKey, Header, Validation, decode};
use serde::{Deserialize, Serialize};
use serde_with::DisplayFromStr;
use serde_with::serde_as;
use std::env;
use std::mem::discriminant;
use std::sync::LazyLock;
/// Key for token verification.
static DECODING_KEY: LazyLock<DecodingKey> = LazyLock::new(|| {
let secret = env::var("JWT_SECRET").expect("JWT_SECRET must be set");
DecodingKey::from_secret(secret.as_bytes())
});
/// Key for creating a signed token.
static ENCODING_KEY: LazyLock<EncodingKey> = LazyLock::new(|| {
let secret = env::var("JWT_SECRET").expect("JWT_SECRET must be set");
EncodingKey::from_secret(secret.as_bytes())
});
/// Token verification errors.
#[allow(dead_code)]
#[derive(Debug)]
pub enum Error {
/// The token has a different signature.
InvalidSignature,
/// Token reading error.
InvalidToken(ErrorKind),
/// Token expired.
Expired,
}
impl PartialEq for Error {
fn eq(&self, other: &Self) -> bool {
discriminant(self) == discriminant(other)
}
}
/// The data the token holds.
#[serde_as]
#[derive(Debug, Serialize, Deserialize)]
struct Claims {
/// User account UUID.
id: String,
/// Token creation date.
#[serde_as(as = "DisplayFromStr")]
iat: u64,
/// Token expiry date.
#[serde_as(as = "DisplayFromStr")]
exp: u64,
}
/// Token signing algorithm.
pub(crate) const DEFAULT_ALGORITHM: Algorithm = Algorithm::HS256;
/// Checking the token and extracting the UUID of the user account from it.
pub fn verify_and_decode(token: &String) -> Result<String, Error> {
let mut validation = Validation::new(DEFAULT_ALGORITHM);
validation.required_spec_claims.remove("exp");
validation.validate_exp = false;
let result = decode::<Claims>(&token, &*DECODING_KEY, &validation);
match result {
Ok(token_data) => {
if token_data.claims.exp < Utc::now().timestamp().unsigned_abs() {
Err(Error::Expired)
} else {
Ok(token_data.claims.id)
}
}
Err(err) => Err(match err.into_kind() {
ErrorKind::InvalidSignature => Error::InvalidSignature,
ErrorKind::ExpiredSignature => Error::Expired,
kind => Error::InvalidToken(kind),
}),
}
}
/// Creating a user token.
pub fn encode(id: &String) -> String {
let header = Header {
typ: Some(String::from("JWT")),
..Default::default()
};
let iat = Utc::now();
let exp = iat + Duration::days(365 * 4);
let claims = Claims {
id: id.clone(),
iat: iat.timestamp().unsigned_abs(),
exp: exp.timestamp().unsigned_abs(),
};
jsonwebtoken::encode(&header, &claims, &*ENCODING_KEY).unwrap()
}
#[cfg(test)]
mod tests {
use super::*;
use crate::test_env::tests::test_env;
#[test]
fn test_encode() {
test_env();
assert_eq!(encode(&"test".to_string()).is_empty(), false);
}
#[test]
fn test_decode_invalid_token() {
test_env();
let token = "".to_string();
let result = verify_and_decode(&token);
assert!(result.is_err());
assert_eq!(
result.err().unwrap(),
Error::InvalidToken(ErrorKind::InvalidToken)
);
}
//noinspection SpellCheckingInspection
#[test]
fn test_decode_invalid_signature() {
test_env();
let token = "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJleHAiOiIxNjE2NTI2Mzc2IiwiaWF0IjoiMTQ5MDM4MjM3NiIsImlkIjoiNjdkY2M5YTk1MDdiMDAwMDc3Mjc0NGEyIn0.Qc2LbMJTvl2hWzDM2XyQv4m9lIqR84COAESQAieUxz8".to_string();
let result = verify_and_decode(&token);
assert!(result.is_err());
assert_eq!(result.err().unwrap(), Error::InvalidSignature);
}
//noinspection SpellCheckingInspection
#[test]
fn test_decode_expired() {
test_env();
let token = "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpZCI6IjY3ZGNjOWE5NTA3YjAwMDA3NzI3NDRhMiIsImlhdCI6IjAiLCJleHAiOiIwIn0.GBsVYvnZIfHXt00t-qmAdUMyHSyWOBtC0Mrxwg1HQOM".to_string();
let result = verify_and_decode(&token);
assert!(result.is_err());
assert_eq!(result.err().unwrap(), Error::Expired);
}
//noinspection SpellCheckingInspection
#[test]
fn test_decode_ok() {
test_env();
let token = "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpZCI6IjY3ZGNjOWE5NTA3YjAwMDA3NzI3NDRhMiIsImlhdCI6Ijk5OTk5OTk5OTkiLCJleHAiOiI5OTk5OTk5OTk5In0.o1vN-ze5iaJrnlHqe7WARXMBhhzjxTjTKkjlmTGEnOI".to_string();
let result = verify_and_decode(&token);
assert!(result.is_ok());
}
}

4
src/utility/mod.rs Normal file
View File

@@ -0,0 +1,4 @@
pub mod jwt;
pub mod error;
pub mod hasher;
pub mod mutex;

77
src/utility/mutex.rs Normal file
View File

@@ -0,0 +1,77 @@
use std::ops::DerefMut;
use std::sync::Mutex;
pub trait MutexScope<T, ScopeFn, ScopeFnOutput>
where
ScopeFn: FnOnce(&mut T) -> ScopeFnOutput,
{
/// Replaces manually creating a mutex lock to perform operations on the data it manages.
///
/// # Arguments
///
/// * `f`: Function (mostly lambda) to which a reference to the mutable object stored in the mutex will be passed.
///
/// returns: Return value of `f` function.
///
/// # Examples
///
/// ```
/// let mtx: Mutex<i32> = Mutex::new(10);
///
/// let res = mtx.scope(|x| { *x = *x * 2; *x });
/// assert_eq!(res, *mtx.lock().unwrap());
/// ```
fn scope(&self, f: ScopeFn) -> ScopeFnOutput;
}
impl<T, ScopeFn, ScopeFnOutput> MutexScope<T, ScopeFn, ScopeFnOutput> for Mutex<T>
where
ScopeFn: FnOnce(&mut T) -> ScopeFnOutput,
{
fn scope(&self, f: ScopeFn) -> ScopeFnOutput {
let mut lock = self.lock().unwrap();
let inner = lock.deref_mut();
f(inner)
}
}
pub trait MutexScopeAsync<T> {
/// ## Asynchronous variant of [MutexScope::scope][MutexScope::scope].
///
/// Replaces manually creating a mutex lock to perform operations on the data it manages.
///
/// # Arguments
///
/// * `f`: Asynchronous function (mostly lambda) to which a reference to the mutable object stored in the mutex will be passed.
///
/// returns: Return value of `f` function.
///
/// # Examples
///
/// ```
/// let mtx: Mutex<i32> = Mutex::new(10);
///
/// let res = mtx.async_scope(async |x| { *x = *x * 2; *x }).await;
/// assert_eq!(res, *mtx.lock().unwrap());
/// ```
async fn async_scope<'a, F, FnFut, FnOut>(&'a self, f: F) -> FnOut
where
FnFut: Future<Output = FnOut>,
F: FnOnce(&'a mut T) -> FnFut,
T: 'a;
}
impl<T> MutexScopeAsync<T> for Mutex<T> {
async fn async_scope<'a, F, FnFut, FnOut>(&'a self, f: F) -> FnOut
where
FnFut: Future<Output = FnOut>,
F: FnOnce(&'a mut T) -> FnFut,
T: 'a,
{
let mut guard = self.lock().unwrap();
let ptr: &'a mut T = unsafe { &mut *(guard.deref_mut() as *mut _) };
f(ptr).await
}
}

View File

@@ -0,0 +1,216 @@
use crate::xls_downloader::interface::{FetchError, FetchOk, FetchResult, XLSDownloader};
use chrono::{DateTime, Utc};
use std::env;
use std::sync::Arc;
pub struct BasicXlsDownloader {
pub url: Option<String>,
user_agent: String,
}
async fn fetch_specified(url: &String, user_agent: &String, head: bool) -> FetchResult {
let client = reqwest::Client::new();
let response = if head {
client.head(url)
} else {
client.get(url)
}
.header("User-Agent", user_agent.clone())
.send()
.await;
match response {
Ok(r) => {
if r.status().as_u16() != 200 {
return Err(FetchError::BadStatusCode(r.status().as_u16()));
}
let headers = r.headers();
let content_type = headers.get("Content-Type");
let etag = headers.get("etag");
let last_modified = headers.get("last-modified");
let date = headers.get("date");
if content_type.is_none() {
Err(FetchError::BadHeaders("Content-Type".to_string()))
} else if etag.is_none() {
Err(FetchError::BadHeaders("ETag".to_string()))
} else if last_modified.is_none() {
Err(FetchError::BadHeaders("Last-Modified".to_string()))
} else if date.is_none() {
Err(FetchError::BadHeaders("Date".to_string()))
} else if content_type.unwrap() != "application/vnd.ms-excel" {
Err(FetchError::BadContentType(
content_type.unwrap().to_str().unwrap().to_string(),
))
} else {
let etag = etag.unwrap().to_str().unwrap().to_string();
let last_modified =
DateTime::parse_from_rfc2822(&last_modified.unwrap().to_str().unwrap())
.unwrap()
.with_timezone(&Utc);
Ok(if head {
FetchOk::head(etag, last_modified)
} else {
FetchOk::get(etag, last_modified, r.bytes().await.unwrap().to_vec())
})
}
}
Err(error) => Err(FetchError::Unknown(Arc::new(error))),
}
}
impl BasicXlsDownloader {
pub fn new() -> Self {
BasicXlsDownloader {
url: None,
user_agent: env::var("REQWEST_USER_AGENT").expect("USER_AGENT must be set"),
}
}
}
impl XLSDownloader for BasicXlsDownloader {
async fn fetch(&self, head: bool) -> FetchResult {
if self.url.is_none() {
Err(FetchError::NoUrlProvided)
} else {
fetch_specified(self.url.as_ref().unwrap(), &self.user_agent, head).await
}
}
async fn set_url(&mut self, url: String) -> FetchResult {
let result = fetch_specified(&url, &self.user_agent, true).await;
if let Ok(_) = result {
self.url = Some(url);
}
result
}
}
#[cfg(test)]
mod tests {
use crate::xls_downloader::basic_impl::{fetch_specified, BasicXlsDownloader};
use crate::xls_downloader::interface::{FetchError, XLSDownloader};
#[tokio::test]
async fn bad_url() {
let url = "bad_url".to_string();
let user_agent = String::new();
let results = [
fetch_specified(&url, &user_agent, true).await,
fetch_specified(&url, &user_agent, false).await,
];
assert!(results[0].is_err());
assert!(results[1].is_err());
}
#[tokio::test]
async fn bad_status_code() {
let url = "https://www.google.com/not-found".to_string();
let user_agent = String::new();
let results = [
fetch_specified(&url, &user_agent, true).await,
fetch_specified(&url, &user_agent, false).await,
];
assert!(results[0].is_err());
assert!(results[1].is_err());
let expected_error = FetchError::BadStatusCode(404);
assert_eq!(*results[0].as_ref().err().unwrap(), expected_error);
assert_eq!(*results[1].as_ref().err().unwrap(), expected_error);
}
#[tokio::test]
async fn bad_headers() {
let url = "https://www.google.com/favicon.ico".to_string();
let user_agent = String::new();
let results = [
fetch_specified(&url, &user_agent, true).await,
fetch_specified(&url, &user_agent, false).await,
];
assert!(results[0].is_err());
assert!(results[1].is_err());
let expected_error = FetchError::BadHeaders("ETag".to_string());
assert_eq!(*results[0].as_ref().err().unwrap(), expected_error);
assert_eq!(*results[1].as_ref().err().unwrap(), expected_error);
}
#[tokio::test]
async fn bad_content_type() {
let url = "https://s3.aero-storage.ldragol.ru/679e5d1145a6ad00843ad3f1/67ddb59fd46303008396ac96%2Fexample.txt".to_string();
let user_agent = String::new();
let results = [
fetch_specified(&url, &user_agent, true).await,
fetch_specified(&url, &user_agent, false).await,
];
assert!(results[0].is_err());
assert!(results[1].is_err());
}
#[tokio::test]
async fn ok() {
let url = "https://s3.aero-storage.ldragol.ru/679e5d1145a6ad00843ad3f1/67ddb5fad46303008396ac97%2Fschedule.xls".to_string();
let user_agent = String::new();
let results = [
fetch_specified(&url, &user_agent, true).await,
fetch_specified(&url, &user_agent, false).await,
];
assert!(results[0].is_ok());
assert!(results[1].is_ok());
}
#[tokio::test]
async fn downloader_set_ok() {
let url = "https://s3.aero-storage.ldragol.ru/679e5d1145a6ad00843ad3f1/67ddb5fad46303008396ac97%2Fschedule.xls".to_string();
let mut downloader = BasicXlsDownloader::new();
assert!(downloader.set_url(url).await.is_ok());
}
#[tokio::test]
async fn downloader_set_err() {
let url = "bad_url".to_string();
let mut downloader = BasicXlsDownloader::new();
assert!(downloader.set_url(url).await.is_err());
}
#[tokio::test]
async fn downloader_ok() {
let url = "https://s3.aero-storage.ldragol.ru/679e5d1145a6ad00843ad3f1/67ddb5fad46303008396ac97%2Fschedule.xls".to_string();
let mut downloader = BasicXlsDownloader::new();
assert!(downloader.set_url(url).await.is_ok());
assert!(downloader.fetch(false).await.is_ok());
}
#[tokio::test]
async fn downloader_no_url_provided() {
let downloader = BasicXlsDownloader::new();
let result = downloader.fetch(false).await;
assert!(result.is_err());
assert_eq!(result.err().unwrap(), FetchError::NoUrlProvided);
}
}

View File

@@ -0,0 +1,83 @@
use chrono::{DateTime, Utc};
use derive_more::Display;
use std::mem::discriminant;
use std::sync::Arc;
use utoipa::ToSchema;
/// XLS data retrieval errors.
#[derive(Clone, Debug, ToSchema, Display)]
pub enum FetchError {
/// File url is not set.
#[display("The link to the timetable was not provided earlier.")]
NoUrlProvided,
/// Unknown error.
#[display("An unknown error occurred while downloading the file.")]
#[schema(value_type = String)]
Unknown(Arc<reqwest::Error>),
/// Server returned a status code different from 200.
#[display("Server returned a status code {_0}.")]
BadStatusCode(u16),
/// The url leads to a file of a different type.
#[display("The link leads to a file of type '{_0}'.")]
BadContentType(String),
/// Server doesn't return expected headers.
#[display("Server doesn't return expected header(s) '{_0}'.")]
BadHeaders(String),
}
impl PartialEq for FetchError {
fn eq(&self, other: &Self) -> bool {
discriminant(self) == discriminant(other)
}
}
/// Result of XLS data retrieval.
pub struct FetchOk {
/// ETag object.
pub etag: String,
/// File upload date.
pub uploaded_at: DateTime<Utc>,
/// Date data received.
pub requested_at: DateTime<Utc>,
/// File data.
pub data: Option<Vec<u8>>,
}
impl FetchOk {
/// Result without file content.
pub fn head(etag: String, uploaded_at: DateTime<Utc>) -> Self {
FetchOk {
etag,
uploaded_at,
requested_at: Utc::now(),
data: None,
}
}
/// Full result.
pub fn get(etag: String, uploaded_at: DateTime<Utc>, data: Vec<u8>) -> Self {
FetchOk {
etag,
uploaded_at,
requested_at: Utc::now(),
data: Some(data),
}
}
}
pub type FetchResult = Result<FetchOk, FetchError>;
pub trait XLSDownloader {
/// Get data about the file, and optionally its content.
async fn fetch(&self, head: bool) -> FetchResult;
/// Setting the file link.
async fn set_url(&mut self, url: String) -> FetchResult;
}

View File

@@ -0,0 +1,2 @@
pub mod basic_impl;
pub mod interface;