21 Commits

Author SHA1 Message Date
dependabot[bot]
b31e5a82c0 Bump tokio from 1.44.1 to 1.44.2
Bumps [tokio](https://github.com/tokio-rs/tokio) from 1.44.1 to 1.44.2.
- [Release notes](https://github.com/tokio-rs/tokio/releases)
- [Commits](https://github.com/tokio-rs/tokio/compare/tokio-1.44.1...tokio-1.44.2)

---
updated-dependencies:
- dependency-name: tokio
  dependency-version: 1.44.2
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-04-07 19:27:52 +00:00
680419ea78 0.8.0
Реализованы все требуемые эндпоинты schedule.

Улучшена документация.
2025-03-28 23:24:37 +04:00
30c985a3d7 Добавлена возможность создания ResponseError с описанием ошибки.
Добавлен макрос для трансформации ErrorCode в Response, а также для имплементации треита PartialStatusCode.
2025-03-28 15:42:45 +04:00
70a7480ea3 0.7.0
Добавлена OpenAPI документация эндпоинтов и структур с интерфейсом RapiDoc.

Добавлены derive макросы для преобразования структуры в HttpResponse с помощью ResponderJson и IResponse<T> с помощью IntoIResponse.

Ревью кода эндпоинтов связанных с авторизацией.

Эндпоинт users/me теперь объект пользователя в требуемом виде.
2025-03-28 01:21:49 +04:00
1add903f36 0.6.0
Добавлена проверка токена пользователя для перед обработкой запроса.
2025-03-27 20:03:35 +04:00
f703cc8326 0.5.0
Возвращёна реализация сериализации в json для IResponse

Добавлены типы для экстракции данных из запросов средствами actix-web

Добавлен экстрактор для получения пользователя по токену доступа передаваемому в запросе

Добавлен макрос для автоматической реализации ResponseError для ошибок экстракторов

Добавлен эндпоинт users/me

Из главного проекта исключена зависимость actix-http посредством переноса части тестового функционала в отдельный crate
2025-03-26 08:05:22 +04:00
ab1cbd795e 0.4.0
Авторизация через токен вк

Слияние schedule_parser с проектом

Перенос схемы запросов/ответов в файлы эндпоинтов

Переход с библиотеки jwt на jsonwebtokens
2025-03-25 02:05:27 +04:00
0316f58592 Обновление workflow тестов 2025-03-23 06:19:51 +04:00
a95494d3be Регистрация и тесты эндпоинтов 2025-03-23 06:11:13 +04:00
844c89a365 Тесты JWT
Имплементация PartialEq для utils::jwt::VerifyError

Замена устаревшего changeset_options на diesel

Удалена проверка на ошибку создания токена, так как вероятность её появления близка к нулю
2025-03-22 23:14:14 +04:00
ba86dfc3fe Полностью рабочая авторизация 2025-03-22 22:44:52 +04:00
9f7460973e Подключение к Postgres и тестовый эндпоинт авторизации 2025-03-22 03:20:55 +04:00
Nikita
3cf42eea8a Create LICENSE 2025-03-22 00:31:03 +04:00
Nikita
d19b6c1069 Create CODE_OF_CONDUCT.md 2025-03-22 00:30:18 +04:00
126ba23001 Скачивание XLS документа по ссылке 2025-03-21 23:55:16 +04:00
d75d3fbc97 Установка разрешений для Workflow 2025-03-21 21:03:28 +04:00
Nikita
627cf1a74e Create dependabot.yml 2025-03-21 20:59:40 +04:00
b508db693e Добавлена конвертация расписания групп в расписание преподавателей 2025-03-21 20:54:52 +04:00
436d08a56a Добавление README 2025-03-21 07:39:56 +04:00
aa2618c5f5 Action для тестирования 2025-03-21 07:36:39 +04:00
f0a951ad38 Удаление неиспользуемых зависимостей 2025-03-21 07:28:37 +04:00
73 changed files with 6450 additions and 260 deletions

6
.github/dependabot.yml vendored Normal file
View File

@@ -0,0 +1,6 @@
version: 2
updates:
- package-ecosystem: "cargo"
directory: "/"
schedule:
interval: "weekly"

30
.github/workflows/test.yml vendored Normal file
View File

@@ -0,0 +1,30 @@
name: Tests
on:
push:
branches: [ "master" ]
pull_request:
branches: [ "master" ]
permissions:
contents: read
env:
CARGO_TERM_COLOR: always
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Build
run: cargo build
- name: Create .env.test
run: touch .env.test
- name: Run tests
run: cargo test
env:
DATABASE_URL: ${{ secrets.TEST_DATABASE_URL }}
JWT_SECRET: "test-secret-at-least-256-bits-used"

3
.gitignore vendored
View File

@@ -1,3 +1,6 @@
/target
.~*.xls
schedule.json
teachers.json
.env*

12
.idea/dataSources.xml generated Normal file
View File

@@ -0,0 +1,12 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="DataSourceManagerImpl" format="xml" multifile-model="true">
<data-source source="LOCAL" name="sp@localhost" uuid="28502a90-08bf-4cc0-8494-10dc74e37189">
<driver-ref>postgresql</driver-ref>
<synchronize>true</synchronize>
<jdbc-driver>org.postgresql.Driver</jdbc-driver>
<jdbc-url>jdbc:postgresql://localhost:5432/sp</jdbc-url>
<working-dir>$ProjectFileDir$</working-dir>
</data-source>
</component>
</project>

14
.idea/discord.xml generated Normal file
View File

@@ -0,0 +1,14 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="DiscordProjectSettings">
<option name="show" value="PROJECT_FILES" />
<option name="description" value="" />
<option name="applicationTheme" value="default" />
<option name="iconsTheme" value="default" />
<option name="button1Title" value="" />
<option name="button1Url" value="" />
<option name="button2Title" value="" />
<option name="button2Url" value="" />
<option name="customApplicationId" value="" />
</component>
</project>

View File

@@ -2,9 +2,13 @@
<module type="EMPTY_MODULE" version="4">
<component name="NewModuleRootManager">
<content url="file://$MODULE_DIR$">
<sourceFolder url="file://$MODULE_DIR$/lib/schedule_parser/benches" isTestSource="true" />
<sourceFolder url="file://$MODULE_DIR$/lib/schedule_parser/src" isTestSource="false" />
<sourceFolder url="file://$MODULE_DIR$/src" isTestSource="false" />
<sourceFolder url="file://$MODULE_DIR$/benches" isTestSource="true" />
<sourceFolder url="file://$MODULE_DIR$/actix-macros/src" isTestSource="false" />
<sourceFolder url="file://$MODULE_DIR$/actix-test/src" isTestSource="false" />
<excludeFolder url="file://$MODULE_DIR$/actix-macros/target" />
<excludeFolder url="file://$MODULE_DIR$/actix-test/target" />
<excludeFolder url="file://$MODULE_DIR$/target" />
</content>
<orderEntry type="inheritedJdk" />

9
.idea/sqldialects.xml generated Normal file
View File

@@ -0,0 +1,9 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="SqlDialectMappings">
<file url="file://$PROJECT_DIR$/migrations/2025-03-21-211822_create_user_role/down.sql" dialect="PostgreSQL" />
<file url="file://$PROJECT_DIR$/migrations/2025-03-21-212111_create_users/up.sql" dialect="PostgreSQL" />
<file url="file://$PROJECT_DIR$/migrations/2025-03-21-212723_create_fcm/down.sql" dialect="PostgreSQL" />
<file url="file://$PROJECT_DIR$/migrations/2025-03-21-212723_create_fcm/up.sql" dialect="PostgreSQL" />
</component>
</project>

128
CODE_OF_CONDUCT.md Normal file
View File

@@ -0,0 +1,128 @@
# Contributor Covenant Code of Conduct
## Our Pledge
We as members, contributors, and leaders pledge to make participation in our
community a harassment-free experience for everyone, regardless of age, body
size, visible or invisible disability, ethnicity, sex characteristics, gender
identity and expression, level of experience, education, socio-economic status,
nationality, personal appearance, race, religion, or sexual identity
and orientation.
We pledge to act and interact in ways that contribute to an open, welcoming,
diverse, inclusive, and healthy community.
## Our Standards
Examples of behavior that contributes to a positive environment for our
community include:
* Demonstrating empathy and kindness toward other people
* Being respectful of differing opinions, viewpoints, and experiences
* Giving and gracefully accepting constructive feedback
* Accepting responsibility and apologizing to those affected by our mistakes,
and learning from the experience
* Focusing on what is best not just for us as individuals, but for the
overall community
Examples of unacceptable behavior include:
* The use of sexualized language or imagery, and sexual attention or
advances of any kind
* Trolling, insulting or derogatory comments, and personal or political attacks
* Public or private harassment
* Publishing others' private information, such as a physical or email
address, without their explicit permission
* Other conduct which could reasonably be considered inappropriate in a
professional setting
## Enforcement Responsibilities
Community leaders are responsible for clarifying and enforcing our standards of
acceptable behavior and will take appropriate and fair corrective action in
response to any behavior that they deem inappropriate, threatening, offensive,
or harmful.
Community leaders have the right and responsibility to remove, edit, or reject
comments, commits, code, wiki edits, issues, and other contributions that are
not aligned to this Code of Conduct, and will communicate reasons for moderation
decisions when appropriate.
## Scope
This Code of Conduct applies within all community spaces, and also applies when
an individual is officially representing the community in public spaces.
Examples of representing our community include using an official e-mail address,
posting via an official social media account, or acting as an appointed
representative at an online or offline event.
## Enforcement
Instances of abusive, harassing, or otherwise unacceptable behavior may be
reported to the community leaders responsible for enforcement at
email.
All complaints will be reviewed and investigated promptly and fairly.
All community leaders are obligated to respect the privacy and security of the
reporter of any incident.
## Enforcement Guidelines
Community leaders will follow these Community Impact Guidelines in determining
the consequences for any action they deem in violation of this Code of Conduct:
### 1. Correction
**Community Impact**: Use of inappropriate language or other behavior deemed
unprofessional or unwelcome in the community.
**Consequence**: A private, written warning from community leaders, providing
clarity around the nature of the violation and an explanation of why the
behavior was inappropriate. A public apology may be requested.
### 2. Warning
**Community Impact**: A violation through a single incident or series
of actions.
**Consequence**: A warning with consequences for continued behavior. No
interaction with the people involved, including unsolicited interaction with
those enforcing the Code of Conduct, for a specified period of time. This
includes avoiding interactions in community spaces as well as external channels
like social media. Violating these terms may lead to a temporary or
permanent ban.
### 3. Temporary Ban
**Community Impact**: A serious violation of community standards, including
sustained inappropriate behavior.
**Consequence**: A temporary ban from any sort of interaction or public
communication with the community for a specified period of time. No public or
private interaction with the people involved, including unsolicited interaction
with those enforcing the Code of Conduct, is allowed during this period.
Violating these terms may lead to a permanent ban.
### 4. Permanent Ban
**Community Impact**: Demonstrating a pattern of violation of community
standards, including sustained inappropriate behavior, harassment of an
individual, or aggression toward or disparagement of classes of individuals.
**Consequence**: A permanent ban from any sort of public interaction within
the community.
## Attribution
This Code of Conduct is adapted from the [Contributor Covenant][homepage],
version 2.0, available at
https://www.contributor-covenant.org/version/2/0/code_of_conduct.html.
Community Impact Guidelines were inspired by [Mozilla's code of conduct
enforcement ladder](https://github.com/mozilla/diversity).
[homepage]: https://www.contributor-covenant.org
For answers to common questions about this code of conduct, see the FAQ at
https://www.contributor-covenant.org/faq. Translations are available at
https://www.contributor-covenant.org/translations.

1470
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,15 +1,46 @@
[workspace]
members = ["lib/schedule_parser"]
members = ["actix-macros", "actix-test"]
[package]
name = "schedule-parser-rusted"
version = "0.1.0"
version = "0.8.0"
edition = "2024"
publish = false
[dependencies]
actix-web = "4.10.2"
actix-macros = { path = "actix-macros" }
bcrypt = "0.17.0"
calamine = "0.26.1"
chrono = { version = "0.4.40", features = ["serde"] }
derive_more = "2.0.1"
diesel = { version = "2.2.8", features = ["postgres"] }
diesel-derive-enum = { git = "https://github.com/Havunen/diesel-derive-enum.git", features = ["postgres"] }
dotenvy = "0.15.7"
env_logger = "0.11.7"
futures-util = "0.3.31"
fuzzy-matcher = "0.3.7"
jsonwebtoken = { version = "9.3.1", features = ["use_pem"] }
hex = "0.4.3"
mime = "0.3.17"
objectid = "0.2.0"
regex = "1.11.1"
reqwest = "0.12.15"
serde = { version = "1.0.219", features = ["derive"] }
serde_repr = "0.1.20"
serde_json = "1.0.140"
schedule_parser = { path = "./lib/schedule_parser" }
serde_with = "3.12.0"
serde_repr = "0.1.20"
sha1 = "0.11.0-pre.5"
tokio = { version = "1.44.2", features = ["macros", "rt-multi-thread"] }
rand = "0.9.0"
utoipa = { version = "5", features = ["actix_extras", "chrono"] }
utoipa-rapidoc = { version = "6.0.0", features = ["actix-web"] }
utoipa-actix-web = "0.1"
[dev-dependencies]
actix-test = { path = "actix-test" }
criterion = "0.5.1"
[[bench]]
name = "parse"
harness = false

21
LICENSE Normal file
View File

@@ -0,0 +1,21 @@
MIT License
Copyright (c) 2025 Nikita
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

3
README.md Normal file
View File

@@ -0,0 +1,3 @@
# API для получения расписания политехникума
[![Rust](https://github.com/n08i40k/schedule-parser-rusted/actions/workflows/test.yml/badge.svg)](https://github.com/n08i40k/schedule-parser-rusted/actions/workflows/test.yml)

1
actix-macros/.gitignore vendored Normal file
View File

@@ -0,0 +1 @@
/target

7
actix-macros/Cargo.lock generated Normal file
View File

@@ -0,0 +1,7 @@
# This file is automatically @generated by Cargo.
# It is not intended for manual editing.
version = 4
[[package]]
name = "actix-utility-macros"
version = "0.1.0"

12
actix-macros/Cargo.toml Normal file
View File

@@ -0,0 +1,12 @@
[package]
name = "actix-macros"
version = "0.1.0"
edition = "2024"
[dependencies]
syn = "2.0.100"
quote = "1.0.40"
proc-macro2 = "1.0.94"
[lib]
proc-macro = true

209
actix-macros/src/lib.rs Normal file
View File

@@ -0,0 +1,209 @@
extern crate proc_macro;
use proc_macro::TokenStream;
mod shared {
use quote::{ToTokens, quote};
use syn::{Attribute, DeriveInput};
pub fn find_status_code(attrs: &Vec<Attribute>) -> Option<proc_macro2::TokenStream> {
attrs
.iter()
.find_map(|attr| -> Option<proc_macro2::TokenStream> {
if !attr.path().is_ident("status_code") {
return None;
}
let meta = attr.meta.require_name_value().ok()?;
let code = meta.value.to_token_stream().to_string();
let trimmed_code = code.trim_matches('"');
if let Ok(numeric_code) = trimmed_code.parse::<u16>() {
Some(quote! { actix_web::http::StatusCode::from_u16(#numeric_code).unwrap() })
} else {
let string_code: proc_macro2::TokenStream =
trimmed_code.to_string().parse().unwrap();
Some(quote! { #string_code })
}
})
}
pub fn get_arms(ast: &DeriveInput) -> Vec<proc_macro2::TokenStream> {
let name = &ast.ident;
let variants = if let syn::Data::Enum(data) = &ast.data {
&data.variants
} else {
panic!("Only enums are supported");
};
let mut status_code_arms: Vec<proc_macro2::TokenStream> = variants
.iter()
.map(|v| -> Option<proc_macro2::TokenStream> {
let status_code = find_status_code(&v.attrs)?;
let variant_name = &v.ident;
Some(quote! { #name::#variant_name => #status_code, })
})
.filter(|v| v.is_some())
.map(|v| v.unwrap())
.collect();
if status_code_arms.len() < variants.len() {
let status_code = find_status_code(&ast.attrs)
.unwrap_or_else(|| quote! { ::actix_web::http::StatusCode::INTERNAL_SERVER_ERROR });
status_code_arms.push(quote! { _ => #status_code });
}
status_code_arms
}
}
mod response_error_message {
use proc_macro::TokenStream;
use quote::quote;
pub fn fmt(ast: &syn::DeriveInput) -> TokenStream {
let name = &ast.ident;
let status_code_arms = super::shared::get_arms(ast);
TokenStream::from(quote! {
impl ::actix_web::ResponseError for #name {
fn status_code(&self) -> ::actix_web::http::StatusCode {
match self {
#(#status_code_arms)*
}
}
fn error_response(&self) -> ::actix_web::HttpResponse<BoxBody> {
::actix_web::HttpResponse::build(self.status_code())
.json(crate::utility::error::ResponseErrorMessage::new(self.clone()))
}
}
})
}
}
mod status_code {
use proc_macro::TokenStream;
use quote::quote;
pub fn fmt(ast: &syn::DeriveInput) -> TokenStream {
let name = &ast.ident;
let status_code_arms = super::shared::get_arms(ast);
TokenStream::from(quote! {
impl crate::routes::schema::PartialStatusCode for #name {
fn status_code(&self) -> ::actix_web::http::StatusCode {
match self {
#(#status_code_arms)*
}
}
}
})
}
}
mod responder_json {
use proc_macro::TokenStream;
use quote::quote;
pub fn fmt(ast: &syn::DeriveInput) -> TokenStream {
let name = &ast.ident;
TokenStream::from(quote! {
impl ::actix_web::Responder for #name {
type Body = ::actix_web::body::EitherBody<::actix_web::body::BoxBody>;
fn respond_to(self, _: &::actix_web::HttpRequest) -> ::actix_web::HttpResponse<Self::Body> {
::actix_web::HttpResponse::Ok()
.json(self)
.map_into_left_body()
}
}
})
}
}
mod into_response_error {
use proc_macro::TokenStream;
use quote::quote;
pub fn fmt(ast: &syn::DeriveInput) -> TokenStream {
let name = &ast.ident;
TokenStream::from(quote! {
impl ::core::convert::Into<crate::routes::schema::ResponseError<#name>> for #name {
fn into(self) -> crate::routes::schema::ResponseError<#name> {
crate::routes::schema::ResponseError {
code: self,
message: ::core::option::Option::None,
}
}
}
impl<T> crate::routes::schema::IntoResponseAsError<T> for #name
where
T: ::serde::ser::Serialize + ::utoipa::PartialSchema {}
})
}
pub fn fmt_named(ast: &syn::DeriveInput) -> TokenStream {
let name = &ast.ident;
TokenStream::from(quote! {
impl ::core::convert::Into<crate::routes::schema::ResponseError<#name>> for #name {
fn into(self) -> crate::routes::schema::ResponseError<#name> {
crate::routes::schema::ResponseError {
message: ::core::option::Option::Some(format!("{}", self)),
code: self,
}
}
}
impl<T> crate::routes::schema::IntoResponseAsError<T> for #name
where
T: ::serde::ser::Serialize + ::utoipa::PartialSchema {}
})
}
}
#[proc_macro_derive(ResponseErrorMessage, attributes(status_code))]
pub fn rem_derive(input: TokenStream) -> TokenStream {
let ast = syn::parse(input).unwrap();
response_error_message::fmt(&ast)
}
#[proc_macro_derive(ResponderJson)]
pub fn responser_json_derive(input: TokenStream) -> TokenStream {
let ast = syn::parse(input).unwrap();
responder_json::fmt(&ast)
}
#[proc_macro_derive(IntoResponseError)]
pub fn into_response_error_derive(input: TokenStream) -> TokenStream {
let ast = syn::parse(input).unwrap();
into_response_error::fmt(&ast)
}
#[proc_macro_derive(IntoResponseErrorNamed)]
pub fn into_response_error_named_derive(input: TokenStream) -> TokenStream {
let ast = syn::parse(input).unwrap();
into_response_error::fmt_named(&ast)
}
#[proc_macro_derive(StatusCode, attributes(status_code))]
pub fn status_code_derive(input: TokenStream) -> TokenStream {
let ast = syn::parse(input).unwrap();
status_code::fmt(&ast)
}

1
actix-test/.gitignore vendored Normal file
View File

@@ -0,0 +1 @@
/target

1520
actix-test/Cargo.lock generated Normal file

File diff suppressed because it is too large Load Diff

8
actix-test/Cargo.toml Normal file
View File

@@ -0,0 +1,8 @@
[package]
name = "actix-test"
version = "0.1.0"
edition = "2024"
[dependencies]
actix-http = "3.10.0"
actix-web = "4.10.2"

12
actix-test/src/lib.rs Normal file
View File

@@ -0,0 +1,12 @@
use actix_web::dev::{HttpServiceFactory, Service, ServiceResponse};
use actix_web::{App, test, web};
pub async fn test_app<F, A: 'static>(
app_state: web::Data<A>,
factory: F,
) -> impl Service<actix_http::Request, Response = ServiceResponse, Error = actix_web::Error>
where
F: HttpServiceFactory + 'static,
{
test::init_service(App::new().app_data(app_state).service(factory)).await
}

12
benches/parse.rs Normal file
View File

@@ -0,0 +1,12 @@
use criterion::{Criterion, criterion_group, criterion_main};
use schedule_parser_rusted::parser::parse_xls;
pub fn bench_parse_xls(c: &mut Criterion) {
let buffer: Vec<u8> = include_bytes!("../schedule.xls").to_vec();
c.bench_function("parse_xls", |b| b.iter(|| parse_xls(&buffer).unwrap()));
}
criterion_group!(benches, bench_parse_xls);
criterion_main!(benches);

9
diesel.toml Normal file
View File

@@ -0,0 +1,9 @@
# For documentation on how to configure this file,
# see https://diesel.rs/guides/configuring-diesel-cli
[print_schema]
file = "src/database/schema.rs"
custom_type_derives = ["diesel::query_builder::QueryId", "Clone"]
[migrations_directory]
dir = "./migrations"

View File

@@ -1,23 +0,0 @@
[package]
name = "schedule_parser"
version = "0.1.0"
edition = "2024"
[lib]
name = "schedule_parser"
path = "src/lib/lib.rs"
[dependencies]
serde = { version = "1.0.219", features = ["derive"] }
serde_repr = "0.1.20"
chrono = { version = "0.4.40", features = ["serde"] }
calamine = "0.26.1"
regex = "1.11.1"
fuzzy-matcher = "0.3.7"
[dev-dependencies]
criterion = "0.5.1"
[[bench]]
name = "parse"
harness = false

View File

@@ -1,12 +0,0 @@
use criterion::{Criterion, criterion_group, criterion_main};
use schedule_parser::parse_xls;
use std::path::Path;
pub fn bench_parse_xls(c: &mut Criterion) {
c.bench_function("parse_xls", |b| {
b.iter(|| parse_xls(Path::new("../../schedule.xls")))
});
}
criterion_group!(benches, bench_parse_xls);
criterion_main!(benches);

View File

@@ -1,97 +0,0 @@
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use serde_repr::{Deserialize_repr, Serialize_repr};
#[derive(Serialize, Deserialize, Debug)]
pub struct LessonTime {
pub start: DateTime<Utc>,
pub end: DateTime<Utc>,
}
#[derive(Serialize_repr, Deserialize_repr, Debug, PartialEq, Clone)]
#[repr(u8)]
pub enum LessonType {
Default = 0, // Обычная
Additional, // Допы
Break, // Перемена
Consultation, // Консультация
IndependentWork, // Самостоятельная работа
Exam, // Зачёт
ExamWithGrade, // Зачет с оценкой
ExamDefault, // Экзамен
}
#[derive(Serialize, Deserialize, Debug)]
pub struct LessonSubGroup {
pub number: u8,
pub cabinet: Option<String>,
pub teacher: String,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct Lesson {
/**
* Тип занятия
*/
#[serde(rename = "type")]
pub lesson_type: LessonType,
/**
* Индексы пар, если присутствуют
*/
#[serde(rename = "defaultRange")]
pub default_range: Option<[u8; 2]>,
/**
* Название занятия
*/
pub name: Option<String>,
/**
* Начало и конец занятия
*/
pub time: LessonTime,
/**
* Подгруппы
*/
#[serde(rename = "subGroups")]
pub subgroups: Option<Vec<LessonSubGroup>>,
/**
* Группа (только для расписания преподавателей)
*/
pub group: Option<String>,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct Day {
pub name: String,
pub street: Option<String>,
pub date: DateTime<Utc>,
pub lessons: Vec<Lesson>,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct Group {
pub name: String,
pub days: Vec<Day>,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct Schedule {
#[serde(rename = "updatedAt")]
pub updated_at: DateTime<Utc>,
pub groups: HashMap<String, Group>,
#[serde(rename = "updatedGroups")]
pub updated_groups: Vec<Vec<usize>>,
}

0
migrations/.keep Normal file
View File

View File

@@ -0,0 +1,6 @@
-- This file was automatically created by Diesel to setup helper functions
-- and other internal bookkeeping. This file is safe to edit, any future
-- changes will be added to existing projects as new migrations.
DROP FUNCTION IF EXISTS diesel_manage_updated_at(_tbl regclass);
DROP FUNCTION IF EXISTS diesel_set_updated_at();

View File

@@ -0,0 +1,36 @@
-- This file was automatically created by Diesel to setup helper functions
-- and other internal bookkeeping. This file is safe to edit, any future
-- changes will be added to existing projects as new migrations.
-- Sets up a trigger for the given table to automatically set a column called
-- `updated_at` whenever the row is modified (unless `updated_at` was included
-- in the modified columns)
--
-- # Example
--
-- ```sql
-- CREATE TABLE users (id SERIAL PRIMARY KEY, updated_at TIMESTAMP NOT NULL DEFAULT NOW());
--
-- SELECT diesel_manage_updated_at('users');
-- ```
CREATE OR REPLACE FUNCTION diesel_manage_updated_at(_tbl regclass) RETURNS VOID AS $$
BEGIN
EXECUTE format('CREATE TRIGGER set_updated_at BEFORE UPDATE ON %s
FOR EACH ROW EXECUTE PROCEDURE diesel_set_updated_at()', _tbl);
END;
$$ LANGUAGE plpgsql;
CREATE OR REPLACE FUNCTION diesel_set_updated_at() RETURNS trigger AS $$
BEGIN
IF (
NEW IS DISTINCT FROM OLD AND
NEW.updated_at IS NOT DISTINCT FROM OLD.updated_at
) THEN
NEW.updated_at := current_timestamp;
END IF;
RETURN NEW;
END;
$$ LANGUAGE plpgsql;

View File

@@ -0,0 +1 @@
DROP TYPE user_role;

View File

@@ -0,0 +1,4 @@
CREATE TYPE user_role AS ENUM (
'STUDENT',
'TEACHER',
'ADMIN');

View File

@@ -0,0 +1 @@
DROP TABLE users;

View File

@@ -0,0 +1,11 @@
CREATE TABLE users
(
id text PRIMARY KEY NOT NULL,
username text UNIQUE NOT NULL,
"password" text NOT NULL,
vk_id int4 NULL,
access_token text UNIQUE NOT NULL,
"group" text NOT NULL,
role user_role NOT NULL,
version text NOT NULL
);

View File

@@ -0,0 +1 @@
DROP TABLE fcm;

View File

@@ -0,0 +1,11 @@
CREATE TABLE fcm
(
user_id text PRIMARY KEY NOT NULL,
token text NOT NULL,
topics text[] NULL
);
CREATE UNIQUE INDEX fcm_user_id_key ON fcm USING btree (user_id);
ALTER TABLE fcm
ADD CONSTRAINT fcm_user_id_fkey FOREIGN KEY (user_id) REFERENCES users (id) ON DELETE RESTRICT ON UPDATE CASCADE;

60
src/app_state.rs Normal file
View File

@@ -0,0 +1,60 @@
use crate::parser::schema::ParseResult;
use crate::utility::hasher::DigestHasher;
use crate::xls_downloader::basic_impl::BasicXlsDownloader;
use actix_web::web;
use chrono::{DateTime, Utc};
use diesel::{Connection, PgConnection};
use sha1::{Digest, Sha1};
use std::env;
use std::hash::Hash;
use std::sync::{Mutex, MutexGuard};
#[derive(Clone)]
pub struct Schedule {
pub etag: String,
pub fetched_at: DateTime<Utc>,
pub updated_at: DateTime<Utc>,
pub parsed_at: DateTime<Utc>,
pub data: ParseResult,
}
impl Schedule {
pub fn hash(&self) -> String {
let mut hasher = DigestHasher::from(Sha1::new());
self.etag.hash(&mut hasher);
self.data.teachers.iter().for_each(|e| e.hash(&mut hasher));
self.data.groups.iter().for_each(|e| e.hash(&mut hasher));
hasher.finalize()
}
}
/// Общие данные передаваемые в эндпоинты
pub struct AppState {
pub downloader: Mutex<BasicXlsDownloader>,
pub schedule: Mutex<Option<Schedule>>,
pub database: Mutex<PgConnection>,
}
impl AppState {
/// Получение объекта соединения с базой данных PostgreSQL
pub fn connection(&self) -> MutexGuard<PgConnection> {
self.database.lock().unwrap()
}
}
/// Создание нового объекта web::Data<AppState>
pub fn app_state() -> web::Data<AppState> {
let database_url = env::var("DATABASE_URL").expect("DATABASE_URL must be set");
web::Data::new(AppState {
downloader: Mutex::new(BasicXlsDownloader::new()),
schedule: Mutex::new(None),
database: Mutex::new(
PgConnection::establish(&database_url)
.unwrap_or_else(|_| panic!("Error connecting to {}", database_url)),
),
})
}

103
src/database/driver.rs Normal file
View File

@@ -0,0 +1,103 @@
pub mod users {
use crate::database::models::User;
use crate::database::schema::users::dsl::users;
use crate::database::schema::users::dsl::*;
use diesel::{ExpressionMethods, QueryResult, insert_into};
use diesel::{PgConnection, SelectableHelper};
use diesel::{QueryDsl, RunQueryDsl};
use std::ops::DerefMut;
use std::sync::Mutex;
pub fn get(connection: &Mutex<PgConnection>, _id: &String) -> QueryResult<User> {
let mut lock = connection.lock().unwrap();
let con = lock.deref_mut();
users
.filter(id.eq(_id))
.select(User::as_select())
.first(con)
}
pub fn get_by_username(
connection: &Mutex<PgConnection>,
_username: &String,
) -> QueryResult<User> {
let mut lock = connection.lock().unwrap();
let con = lock.deref_mut();
users
.filter(username.eq(_username))
.select(User::as_select())
.first(con)
}
pub fn get_by_vk_id(
connection: &Mutex<PgConnection>,
_vk_id: i32,
) -> QueryResult<User> {
let mut lock = connection.lock().unwrap();
let con = lock.deref_mut();
users
.filter(vk_id.eq(_vk_id))
.select(User::as_select())
.first(con)
}
pub fn contains_by_username(connection: &Mutex<PgConnection>, _username: &String) -> bool {
let mut lock = connection.lock().unwrap();
let con = lock.deref_mut();
match users
.filter(username.eq(_username))
.count()
.get_result::<i64>(con)
{
Ok(count) => count > 0,
Err(_) => false,
}
}
pub fn contains_by_vk_id(connection: &Mutex<PgConnection>, _vk_id: i32) -> bool {
let mut lock = connection.lock().unwrap();
let con = lock.deref_mut();
match users
.filter(vk_id.eq(_vk_id))
.count()
.get_result::<i64>(con)
{
Ok(count) => count > 0,
Err(_) => false,
}
}
pub fn insert(connection: &Mutex<PgConnection>, user: &User) -> QueryResult<usize> {
let mut lock = connection.lock().unwrap();
let con = lock.deref_mut();
insert_into(users).values(user).execute(con)
}
#[cfg(test)]
pub fn delete_by_username(connection: &Mutex<PgConnection>, _username: &String) -> bool {
let mut lock = connection.lock().unwrap();
let con = lock.deref_mut();
match diesel::delete(users.filter(username.eq(_username))).execute(con) {
Ok(count) => count > 0,
Err(_) => false,
}
}
#[cfg(test)]
pub fn insert_or_ignore(connection: &Mutex<PgConnection>, user: &User) -> QueryResult<usize> {
let mut lock = connection.lock().unwrap();
let con = lock.deref_mut();
insert_into(users)
.values(user)
.on_conflict_do_nothing()
.execute(con)
}
}

3
src/database/mod.rs Normal file
View File

@@ -0,0 +1,3 @@
pub mod schema;
pub mod models;
pub mod driver;

61
src/database/models.rs Normal file
View File

@@ -0,0 +1,61 @@
use actix_macros::ResponderJson;
use diesel::prelude::*;
use serde::{Deserialize, Serialize};
#[derive(
diesel_derive_enum::DbEnum,
Serialize,
Deserialize,
Debug,
Clone,
Copy,
PartialEq,
utoipa::ToSchema,
)]
#[ExistingTypePath = "crate::database::schema::sql_types::UserRole"]
#[DbValueStyle = "UPPERCASE"]
#[serde(rename_all = "UPPERCASE")]
pub enum UserRole {
Student,
Teacher,
Admin,
}
#[derive(
Identifiable,
AsChangeset,
Queryable,
Selectable,
Serialize,
Insertable,
Debug,
utoipa::ToSchema,
ResponderJson,
)]
#[diesel(table_name = crate::database::schema::users)]
#[diesel(treat_none_as_null = true)]
pub struct User {
/// UUID аккаунта
pub id: String,
/// Имя пользователя
pub username: String,
/// BCrypt хеш пароля
pub password: String,
/// Идентификатор привязанного аккаунта VK
pub vk_id: Option<i32>,
/// JWT токен доступа
pub access_token: String,
/// Группа
pub group: String,
/// Роль
pub role: UserRole,
/// Версия установленного приложения Polytechnic+
pub version: String,
}

38
src/database/schema.rs Normal file
View File

@@ -0,0 +1,38 @@
// @generated automatically by Diesel CLI.
pub mod sql_types {
#[derive(diesel::query_builder::QueryId, Clone, diesel::sql_types::SqlType)]
#[diesel(postgres_type(name = "user_role"))]
pub struct UserRole;
}
diesel::table! {
fcm (user_id) {
user_id -> Text,
token -> Text,
topics -> Nullable<Array<Nullable<Text>>>,
}
}
diesel::table! {
use diesel::sql_types::*;
use super::sql_types::UserRole;
users (id) {
id -> Text,
username -> Text,
password -> Text,
vk_id -> Nullable<Int4>,
access_token -> Text,
group -> Text,
role -> UserRole,
version -> Text,
}
}
diesel::joinable!(fcm -> users (user_id));
diesel::allow_tables_to_appear_in_same_query!(
fcm,
users,
);

View File

@@ -0,0 +1,68 @@
use crate::app_state::AppState;
use crate::database::driver;
use crate::database::models::User;
use crate::extractors::base::FromRequestSync;
use crate::utility::jwt;
use actix_macros::ResponseErrorMessage;
use actix_web::body::BoxBody;
use actix_web::dev::Payload;
use actix_web::http::header;
use actix_web::{HttpRequest, web};
use derive_more::Display;
use serde::{Deserialize, Serialize};
use std::fmt::Debug;
#[derive(Clone, Debug, Serialize, Deserialize, Display, ResponseErrorMessage)]
#[status_code = "actix_web::http::StatusCode::UNAUTHORIZED"]
#[serde(rename_all = "SCREAMING_SNAKE_CASE")]
pub enum Error {
/// В запросе отсутствует заголовок Authorization
#[display("No Authorization header found")]
NoHeader,
/// Неизвестный тип авторизации, отличающийся от Bearer
#[display("Bearer token is required")]
UnknownAuthorizationType,
/// Токен не действителен
#[display("Invalid or expired access token")]
InvalidAccessToken,
/// Пользователь привязанный к токену не найден в базе данных
#[display("No user associated with access token")]
NoUser,
}
impl Error {
pub fn into_err(self) -> actix_web::Error {
actix_web::Error::from(self)
}
}
/// Экстрактор пользователя из запроса с токеном
impl FromRequestSync for User {
type Error = actix_web::Error;
fn from_request_sync(req: &HttpRequest, _: &mut Payload) -> Result<Self, Self::Error> {
let authorization = req
.headers()
.get(header::AUTHORIZATION)
.ok_or(Error::NoHeader.into_err())?
.to_str()
.map_err(|_| Error::NoHeader.into_err())?
.to_string();
let parts: Vec<&str> = authorization.split(' ').collect();
if parts.len() != 2 || parts[0] != "Bearer" {
return Err(Error::UnknownAuthorizationType.into_err());
}
let user_id = jwt::verify_and_decode(&parts[1].to_string())
.map_err(|_| Error::InvalidAccessToken.into_err())?;
let app_state = req.app_data::<web::Data<AppState>>().unwrap();
driver::users::get(&app_state.database, &user_id).map_err(|_| Error::NoUser.into())
}
}

65
src/extractors/base.rs Normal file
View File

@@ -0,0 +1,65 @@
use actix_web::dev::Payload;
use actix_web::{FromRequest, HttpRequest};
use futures_util::future::LocalBoxFuture;
use std::future::{Ready, ready};
/// Асинхронный экстрактор объектов из запроса
pub struct AsyncExtractor<T>(T);
impl<T> AsyncExtractor<T> {
#[allow(dead_code)]
/// Получение объекта, извлечённого с помощью экстрактора
pub fn into_inner(self) -> T {
self.0
}
}
pub trait FromRequestAsync: Sized {
type Error: Into<actix_web::Error>;
/// Асинхронная функция для извлечения данных из запроса
async fn from_request_async(req: HttpRequest, payload: Payload) -> Result<Self, Self::Error>;
}
/// Реализация треита FromRequest для всех асинхронных экстракторов
impl<T: FromRequestAsync> FromRequest for AsyncExtractor<T> {
type Error = T::Error;
type Future = LocalBoxFuture<'static, Result<Self, Self::Error>>;
fn from_request(req: &HttpRequest, payload: &mut Payload) -> Self::Future {
let req = req.clone();
let payload = payload.take();
Box::pin(async move {
T::from_request_async(req, payload)
.await
.map(|res| Self(res))
})
}
}
/// Синхронный экстрактор объектов из запроса
pub struct SyncExtractor<T>(T);
impl<T> SyncExtractor<T> {
/// Получение объекта, извлечённого с помощью экстрактора
pub fn into_inner(self) -> T {
self.0
}
}
pub trait FromRequestSync: Sized {
type Error: Into<actix_web::Error>;
/// Синхронная функция для извлечения данных из запроса
fn from_request_sync(req: &HttpRequest, payload: &mut Payload) -> Result<Self, Self::Error>;
}
/// Реализация треита FromRequest для всех синхронных экстракторов
impl<T: FromRequestSync> FromRequest for SyncExtractor<T> {
type Error = T::Error;
type Future = Ready<Result<Self, Self::Error>>;
fn from_request(req: &HttpRequest, payload: &mut Payload) -> Self::Future {
ready(T::from_request_sync(req, payload).map(|res| Self(res)))
}
}

2
src/extractors/mod.rs Normal file
View File

@@ -0,0 +1,2 @@
pub mod authorized_user;
pub mod base;

1
src/lib.rs Normal file
View File

@@ -0,0 +1 @@
pub mod parser;

View File

@@ -1,15 +1,90 @@
use std::fs;
use std::path::Path;
use schedule_parser::parse_xls;
use crate::app_state::{AppState, app_state};
use crate::middlewares::authorization::JWTAuthorization;
use crate::routes::auth::sign_in::{sign_in_default, sign_in_vk};
use crate::routes::auth::sign_up::{sign_up_default, sign_up_vk};
use crate::routes::schedule::get_cache_status::get_cache_status;
use crate::routes::schedule::get_group::get_group;
use crate::routes::schedule::get_group_names::get_group_names;
use crate::routes::schedule::get_schedule::get_schedule;
use crate::routes::schedule::get_teacher::get_teacher;
use crate::routes::schedule::get_teacher_names::get_teacher_names;
use crate::routes::schedule::update_download_url::update_download_url;
use crate::routes::users::me::me;
use actix_web::{App, HttpServer};
use dotenvy::dotenv;
use utoipa_actix_web::AppExt;
use utoipa_rapidoc::RapiDoc;
fn main() {
let groups = parse_xls(Path::new("./schedule.xls"));
mod app_state;
fs::write(
"./schedule.json",
serde_json::to_string_pretty(&groups)
.expect("Failed to serialize schedule!")
.as_bytes(),
)
.expect("Failed to write schedule");
mod database;
mod parser;
mod xls_downloader;
mod extractors;
mod middlewares;
mod routes;
mod utility;
mod test_env;
#[actix_web::main]
async fn main() {
dotenv().ok();
unsafe { std::env::set_var("RUST_LOG", "debug") };
env_logger::init();
let app_state = app_state();
HttpServer::new(move || {
let auth_scope = utoipa_actix_web::scope("/auth")
.service(sign_in_default)
.service(sign_in_vk)
.service(sign_up_default)
.service(sign_up_vk);
let users_scope = utoipa_actix_web::scope("/users")
.wrap(JWTAuthorization)
.service(me);
let schedule_scope = utoipa_actix_web::scope("/schedule")
.wrap(JWTAuthorization)
.service(get_schedule)
.service(update_download_url)
.service(get_cache_status)
.service(get_group)
.service(get_group_names)
.service(get_teacher)
.service(get_teacher_names);
let api_scope = utoipa_actix_web::scope("/api/v1")
.service(auth_scope)
.service(users_scope)
.service(schedule_scope);
let (app, api) = App::new()
.into_utoipa_app()
.app_data(app_state.clone())
.service(api_scope)
.split_for_parts();
let rapidoc_service = RapiDoc::with_openapi("/api-docs-json", api).path("/api-docs");
// Because CORS error on non-localhost
let patched_rapidoc_html = rapidoc_service.to_html().replace(
"https://unpkg.com/rapidoc/dist/rapidoc-min.js",
"https://cdn.jsdelivr.net/npm/rapidoc/dist/rapidoc-min.min.js",
);
app.service(rapidoc_service.custom_html(patched_rapidoc_html))
})
.workers(4)
.bind(("0.0.0.0", 8080))
.unwrap()
.run()
.await
.unwrap();
}

View File

@@ -0,0 +1,81 @@
use crate::database::models::User;
use crate::extractors::authorized_user;
use crate::extractors::base::FromRequestSync;
use actix_web::body::{BoxBody, EitherBody};
use actix_web::dev::{Payload, Service, ServiceRequest, ServiceResponse, Transform, forward_ready};
use actix_web::{Error, HttpRequest, ResponseError};
use futures_util::future::LocalBoxFuture;
use std::future::{Ready, ready};
/// Middleware guard работающий с токенами JWT
pub struct JWTAuthorization;
impl<S, B> Transform<S, ServiceRequest> for JWTAuthorization
where
S: Service<ServiceRequest, Response = ServiceResponse<B>, Error = Error>,
S::Future: 'static,
B: 'static,
{
type Response = ServiceResponse<EitherBody<B, BoxBody>>;
type Error = Error;
type Transform = JWTAuthorizationMiddleware<S>;
type InitError = ();
type Future = Ready<Result<Self::Transform, Self::InitError>>;
fn new_transform(&self, service: S) -> Self::Future {
ready(Ok(JWTAuthorizationMiddleware { service }))
}
}
pub struct JWTAuthorizationMiddleware<S> {
service: S,
}
/// Функция для проверки наличия и действительности токена в запросе, а так же существования пользователя к которому он привязан
impl<S, B> JWTAuthorizationMiddleware<S>
where
S: Service<ServiceRequest, Response = ServiceResponse<B>, Error = Error>,
S::Future: 'static,
B: 'static,
{
pub fn check_authorization(
&self,
req: &HttpRequest,
payload: &mut Payload,
) -> Result<(), authorized_user::Error> {
User::from_request_sync(req, payload)
.map(|_| ())
.map_err(|e| e.as_error::<authorized_user::Error>().unwrap().clone())
}
}
impl<S, B> Service<ServiceRequest> for JWTAuthorizationMiddleware<S>
where
S: Service<ServiceRequest, Response = ServiceResponse<B>, Error = Error>,
S::Future: 'static,
B: 'static,
{
type Response = ServiceResponse<EitherBody<B, BoxBody>>;
type Error = Error;
type Future = LocalBoxFuture<'static, Result<Self::Response, Self::Error>>;
forward_ready!(service);
fn call(&self, req: ServiceRequest) -> Self::Future {
let (http_req, mut payload) = req.into_parts();
if let Err(err) = self.check_authorization(&http_req, &mut payload) {
return Box::pin(async move {
Ok(ServiceResponse::new(
http_req,
err.error_response().map_into_right_body(),
))
});
}
let req = ServiceRequest::from_parts(http_req, payload);
let fut = self.service.call(req);
Box::pin(async move { Ok(fut.await?.map_into_left_body()) })
}
}

1
src/middlewares/mod.rs Normal file
View File

@@ -0,0 +1 @@
pub mod authorization;

View File

@@ -1,26 +1,25 @@
use crate::LessonParseResult::{Lessons, Street};
use crate::schema::LessonType::Break;
use crate::schema::{Day, Group, Lesson, LessonSubGroup, LessonTime, LessonType};
use calamine::{Reader, Xls, open_workbook};
use crate::parser::LessonParseResult::{Lessons, Street};
use crate::parser::schema::LessonType::Break;
use crate::parser::schema::{
Day, Lesson, LessonSubGroup, LessonTime, LessonType, ParseError, ParseResult, ScheduleEntry,
};
use calamine::{Reader, Xls, open_workbook_from_rs};
use chrono::{Duration, NaiveDateTime};
use fuzzy_matcher::FuzzyMatcher;
use fuzzy_matcher::skim::SkimMatcherV2;
use regex::Regex;
use std::collections::HashMap;
use std::path::Path;
use std::io::Cursor;
use std::sync::LazyLock;
mod schema;
pub mod schema;
/// Данные ячейке хранящей строку
struct InternalId {
/**
* Индекс строки
*/
/// Индекс строки
row: u32,
/**
* Индекс столбца
*/
/// Индекс столбца
column: u32,
/**
@@ -29,30 +28,25 @@ struct InternalId {
name: String,
}
/// Данные о времени проведения пар из второй колонки расписания
struct InternalTime {
/**
* Временной отрезок проведения пары
*/
/// Временной отрезок проведения пары
time_range: LessonTime,
/**
* Тип пары
*/
/// Тип пары
lesson_type: LessonType,
/**
* Индекс пары
*/
/// Индекс пары
default_index: Option<u32>,
/**
* Рамка ячейки
*/
/// Рамка ячейки
xls_range: ((u32, u32), (u32, u32)),
}
/// Сокращение типа рабочего листа
type WorkSheet = calamine::Range<calamine::Data>;
/// Получение строки из требуемой ячейки
fn get_string_from_cell(worksheet: &WorkSheet, row: u32, col: u32) -> Option<String> {
let cell_data = if let Some(data) = worksheet.get((row as usize, col as usize)) {
data.to_string()
@@ -80,6 +74,7 @@ fn get_string_from_cell(worksheet: &WorkSheet, row: u32, col: u32) -> Option<Str
}
}
/// Получение границ ячейки по её верхней левой координате
fn get_merge_from_start(worksheet: &WorkSheet, row: u32, column: u32) -> ((u32, u32), (u32, u32)) {
let worksheet_end = worksheet.end().unwrap();
@@ -114,7 +109,8 @@ fn get_merge_from_start(worksheet: &WorkSheet, row: u32, column: u32) -> ((u32,
((row, column), (row_end, column_end))
}
fn parse_skeleton(worksheet: &WorkSheet) -> (Vec<InternalId>, Vec<InternalId>) {
/// Получение "скелета" расписания из рабочего листа
fn parse_skeleton(worksheet: &WorkSheet) -> Result<(Vec<InternalId>, Vec<InternalId>), ParseError> {
let range = &worksheet;
let mut is_parsed = false;
@@ -122,8 +118,8 @@ fn parse_skeleton(worksheet: &WorkSheet) -> (Vec<InternalId>, Vec<InternalId>) {
let mut groups: Vec<InternalId> = Vec::new();
let mut days: Vec<InternalId> = Vec::new();
let start = range.start().expect("Could not find start");
let end = range.end().expect("Could not find end");
let start = range.start().ok_or(ParseError::UnknownWorkSheetRange)?;
let end = range.end().ok_or(ParseError::UnknownWorkSheetRange)?;
let mut row = start.0;
while row < end.0 {
@@ -168,15 +164,22 @@ fn parse_skeleton(worksheet: &WorkSheet) -> (Vec<InternalId>, Vec<InternalId>) {
}
}
(days, groups)
Ok((days, groups))
}
/// Результат получения пары из ячейки
enum LessonParseResult {
/// Список пар длинной от одного до двух
///
/// Количество пар будет равно одному, если пара первая за день, иначе будет возвращен список из шаблона перемены и самой пары
Lessons(Vec<Lesson>),
/// Улица на которой находится корпус политехникума
Street(String),
}
trait StringInnerSlice {
/// Получения отрезка строки из строки по начальному и конечному индексу
fn inner_slice(&self, from: usize, to: usize) -> Self;
}
@@ -189,6 +192,7 @@ impl StringInnerSlice for String {
}
}
/// Получение нестандартного типа пары по названию
fn guess_lesson_type(name: &String) -> Option<(String, LessonType)> {
let map: HashMap<String, LessonType> = HashMap::from([
("(консультация)".to_string(), LessonType::Consultation),
@@ -230,19 +234,20 @@ fn guess_lesson_type(name: &String) -> Option<(String, LessonType)> {
}
}
/// Получение пары или улицы из ячейки
fn parse_lesson(
worksheet: &WorkSheet,
day: &mut Day,
day_times: &Vec<InternalTime>,
time: &InternalTime,
column: u32,
) -> LessonParseResult {
) -> Result<LessonParseResult, ParseError> {
let row = time.xls_range.0.0;
let (name, lesson_type) = {
let raw_name_opt = get_string_from_cell(&worksheet, row, column);
if raw_name_opt.is_none() {
return Lessons(Vec::new());
return Ok(Lessons(Vec::new()));
}
let raw_name = raw_name_opt.unwrap();
@@ -251,7 +256,7 @@ fn parse_lesson(
LazyLock::new(|| Regex::new(r"^[А-Я][а-я]+,?\s?[0-9]+$").unwrap());
if OTHER_STREET_RE.is_match(&raw_name) {
return Street(raw_name);
return Ok(Street(raw_name));
}
if let Some(guess) = guess_lesson_type(&raw_name) {
@@ -261,7 +266,7 @@ fn parse_lesson(
}
};
let (default_range, lesson_time): (Option<[u8; 2]>, LessonTime) = {
let (default_range, lesson_time) = || -> Result<(Option<[u8; 2]>, LessonTime), ParseError> {
// check if multi-lesson
let cell_range = get_merge_from_start(worksheet, row, column);
@@ -270,7 +275,7 @@ fn parse_lesson(
.filter(|time| time.xls_range.1.0 == cell_range.1.0)
.collect::<Vec<&InternalTime>>();
let end_time = end_time_arr.first().expect("Unable to find lesson time!");
let end_time = end_time_arr.first().ok_or(ParseError::LessonTimeNotFound)?;
let range: Option<[u8; 2]> = if time.default_index != None {
let default = time.default_index.unwrap() as u8;
@@ -284,10 +289,10 @@ fn parse_lesson(
end: end_time.time_range.end,
};
(range, time)
};
Ok((range, time))
}()?;
let (name, mut subgroups) = parse_name_and_subgroups(&name);
let (name, mut subgroups) = parse_name_and_subgroups(&name)?;
{
let cabinets: Vec<String> = parse_cabinets(worksheet, row, column + 1);
@@ -343,12 +348,12 @@ fn parse_lesson(
};
let prev_lesson = if day.lessons.len() == 0 {
return Lessons(Vec::from([lesson]));
return Ok(Lessons(Vec::from([lesson])));
} else {
&day.lessons[day.lessons.len() - 1]
};
Lessons(Vec::from([
Ok(Lessons(Vec::from([
Lesson {
lesson_type: Break,
default_range: None,
@@ -361,9 +366,10 @@ fn parse_lesson(
group: None,
},
lesson,
]))
])))
}
/// Получение списка кабинетов справа от ячейки пары
fn parse_cabinets(worksheet: &WorkSheet, row: u32, column: u32) -> Vec<String> {
let mut cabinets: Vec<String> = Vec::new();
@@ -381,15 +387,16 @@ fn parse_cabinets(worksheet: &WorkSheet, row: u32, column: u32) -> Vec<String> {
cabinets
}
fn parse_name_and_subgroups(name: &String) -> (String, Vec<LessonSubGroup>) {
/// Получение "чистого" названия пары и списка преподавателей из текста ячейки пары
fn parse_name_and_subgroups(name: &String) -> Result<(String, Vec<LessonSubGroup>), ParseError> {
static LESSON_RE: LazyLock<Regex, fn() -> Regex> =
LazyLock::new(|| Regex::new(r"(?:[А-Я][а-я]+[А-Я]{2}(?:\([0-9][а-я]+\))?)+$").unwrap());
static TEACHER_RE: LazyLock<Regex, fn() -> Regex> =
LazyLock::new(|| Regex::new(r"([А-Я][а-я]+)([А-Я])([А-Я])(?:\(([0-9])[а-я]+\))?").unwrap());
static CLEAN_RE: LazyLock<Regex, fn() -> Regex> =
LazyLock::new(|| Regex::new(r"[\s.,]+").unwrap());
static NAME_CLEAN_RE: LazyLock<Regex, fn() -> Regex> =
LazyLock::new(|| Regex::new(r"\.\s+$").unwrap());
static END_CLEAN_RE: LazyLock<Regex, fn() -> Regex> =
LazyLock::new(|| Regex::new(r"[.\s]+$").unwrap());
let (teachers, lesson_name) = {
let clean_name = CLEAN_RE.replace_all(&name, "").to_string();
@@ -400,11 +407,13 @@ fn parse_name_and_subgroups(name: &String) -> (String, Vec<LessonSubGroup>) {
let capture_name: String = capture_str.chars().take(5).collect();
(
NAME_CLEAN_RE.replace(&capture_str, "").to_string(),
name[0..name.find(&*capture_name).unwrap()].to_string(),
END_CLEAN_RE.replace(&capture_str, "").to_string(),
END_CLEAN_RE
.replace(&name[0..name.find(&*capture_name).unwrap()], "")
.to_string(),
)
} else {
return (NAME_CLEAN_RE.replace(&name, "").to_string(), Vec::new());
return Ok((END_CLEAN_RE.replace(&name, "").to_string(), Vec::new()));
}
};
@@ -419,7 +428,7 @@ fn parse_name_and_subgroups(name: &String) -> (String, Vec<LessonSubGroup>) {
.as_str()
.to_string()
.parse::<u8>()
.expect("Unable to read subgroup index!")
.map_err(|_| ParseError::SubgroupIndexParsingFailed)?
} else {
0
},
@@ -430,7 +439,7 @@ fn parse_name_and_subgroups(name: &String) -> (String, Vec<LessonSubGroup>) {
captures.get(2).unwrap().as_str().to_string(),
captures.get(3).unwrap().as_str().to_string()
),
})
});
}
// фикс, если у кого-то отсутствует индекс подгруппы
@@ -467,28 +476,108 @@ fn parse_name_and_subgroups(name: &String) -> (String, Vec<LessonSubGroup>) {
subgroups.reverse()
}
(lesson_name, subgroups)
Ok((lesson_name, subgroups))
}
pub fn parse_xls(path: &Path) -> HashMap<String, Group> {
let mut workbook: Xls<_> = open_workbook(path).expect("Can't open workbook");
/// Конвертация списка пар групп в список пар преподавателей
fn convert_groups_to_teachers(
groups: &HashMap<String, ScheduleEntry>,
) -> HashMap<String, ScheduleEntry> {
let mut teachers: HashMap<String, ScheduleEntry> = HashMap::new();
let empty_days: Vec<Day> = groups
.values()
.next()
.unwrap()
.days
.iter()
.map(|day| Day {
name: day.name.clone(),
street: day.street.clone(),
date: day.date.clone(),
lessons: vec![],
})
.collect();
for group in groups.values() {
for (index, day) in group.days.iter().enumerate() {
for group_lesson in &day.lessons {
if group_lesson.lesson_type == Break {
continue;
}
if group_lesson.subgroups.is_none() {
continue;
}
let subgroups = group_lesson.subgroups.as_ref().unwrap();
for subgroup in subgroups {
if subgroup.teacher == "Ошибка в расписании" {
continue;
}
if !teachers.contains_key(&subgroup.teacher) {
teachers.insert(
subgroup.teacher.clone(),
ScheduleEntry {
name: subgroup.teacher.clone(),
days: empty_days.to_vec(),
},
);
}
let teacher_day = teachers
.get_mut(&subgroup.teacher)
.unwrap()
.days
.get_mut(index)
.unwrap();
teacher_day.lessons.push({
let mut lesson = group_lesson.clone();
lesson.group = Some(group.name.clone());
lesson
});
}
}
}
}
teachers.iter_mut().for_each(|(_, teacher)| {
teacher.days.iter_mut().for_each(|day| {
day.lessons.sort_by(|a, b| {
a.default_range.as_ref().unwrap()[1].cmp(&b.default_range.as_ref().unwrap()[1])
})
})
});
teachers
}
/// Чтение XLS документа из буфера и преобразование его в готовые к использованию расписания
pub fn parse_xls(buffer: &Vec<u8>) -> Result<ParseResult, ParseError> {
let cursor = Cursor::new(&buffer);
let mut workbook: Xls<_> =
open_workbook_from_rs(cursor).map_err(|e| ParseError::BadXLS(std::sync::Arc::new(e)))?;
let worksheet: WorkSheet = workbook
.worksheets()
.first()
.expect("No worksheet found")
.ok_or(ParseError::NoWorkSheets)?
.1
.to_owned();
let (days_markup, groups_markup) = parse_skeleton(&worksheet);
let (days_markup, groups_markup) = parse_skeleton(&worksheet)?;
let mut groups: HashMap<String, Group> = HashMap::new();
let mut groups: HashMap<String, ScheduleEntry> = HashMap::new();
let mut days_times: Vec<Vec<InternalTime>> = Vec::new();
let saturday_end_row = worksheet.end().unwrap().0;
for group_markup in groups_markup {
let mut group = Group {
let mut group = ScheduleEntry {
name: group_markup.name,
days: Vec::new(),
};
@@ -560,9 +649,7 @@ pub fn parse_xls(path: &Path) -> HashMap<String, Group> {
static TIME_RE: LazyLock<Regex, fn() -> Regex> =
LazyLock::new(|| Regex::new(r"(\d+\.\d+)-(\d+\.\d+)").unwrap());
let parse_res = TIME_RE
.captures(&time)
.expect("Unable to obtain lesson start and end!");
let parse_res = TIME_RE.captures(&time).ok_or(ParseError::GlobalTime)?;
let start_match = parse_res.get(1).unwrap().as_str();
let start_parts: Vec<&str> = start_match.split(".").collect();
@@ -600,7 +687,7 @@ pub fn parse_xls(path: &Path) -> HashMap<String, Group> {
&day_times,
&time,
group_markup.column,
) {
)? {
Lessons(l) => day.lessons.append(l),
Street(s) => day.street = Some(s.to_owned()),
}
@@ -612,16 +699,27 @@ pub fn parse_xls(path: &Path) -> HashMap<String, Group> {
groups.insert(group.name.clone(), group);
}
groups
Ok(ParseResult {
teachers: convert_groups_to_teachers(&groups),
groups,
})
}
#[cfg(test)]
mod tests {
pub mod tests {
use super::*;
pub fn test_result() -> Result<ParseResult, ParseError> {
parse_xls(&include_bytes!("../../schedule.xls").to_vec())
}
#[test]
fn it_works() {
let result = parse_xls(Path::new("../../schedule.xls"));
assert_ne!(result.len(), 0);
fn read() {
let result = test_result();
assert!(result.is_ok());
assert_ne!(result.as_ref().unwrap().groups.len(), 0);
assert_ne!(result.as_ref().unwrap().teachers.len(), 0);
}
}

162
src/parser/schema.rs Normal file
View File

@@ -0,0 +1,162 @@
use chrono::{DateTime, Utc};
use derive_more::Display;
use serde::{Deserialize, Serialize, Serializer};
use serde_repr::{Deserialize_repr, Serialize_repr};
use std::collections::HashMap;
use std::sync::Arc;
use utoipa::ToSchema;
#[derive(Clone, Hash, Debug, Serialize, Deserialize, ToSchema)]
pub struct LessonTime {
/// Начало пары
pub start: DateTime<Utc>,
/// Конец пары
pub end: DateTime<Utc>,
}
#[derive(Clone, Hash, PartialEq, Debug, Serialize_repr, Deserialize_repr, ToSchema)]
#[serde(rename_all = "SCREAMING_SNAKE_CASE")]
#[repr(u8)]
pub enum LessonType {
/// Обычная
Default = 0,
/// Допы
Additional,
/// Перемена
Break,
/// Консультация
Consultation,
/// Самостоятельная работа
IndependentWork,
/// Зачёт
Exam,
/// Зачет с оценкой
ExamWithGrade,
/// Экзамен
ExamDefault,
}
#[derive(Clone, Hash, Debug, Serialize, Deserialize, ToSchema)]
pub struct LessonSubGroup {
/// Номер подгруппы
pub number: u8,
/// Кабинет, если присутствует
pub cabinet: Option<String>,
/// Фио преподавателя
pub teacher: String,
}
#[derive(Clone, Hash, Debug, Serialize, Deserialize, ToSchema)]
#[serde(rename_all = "camelCase")]
pub struct Lesson {
/// Тип занятия
#[serde(rename = "type")]
pub lesson_type: LessonType,
/// Индексы пар, если присутствуют
pub default_range: Option<[u8; 2]>,
/// Название занятия
pub name: Option<String>,
/// Начало и конец занятия
pub time: LessonTime,
/// Список подгрупп
#[serde(rename = "subGroups")]
pub subgroups: Option<Vec<LessonSubGroup>>,
/// Группа, если это расписание для преподавателей
pub group: Option<String>,
}
#[derive(Clone, Hash, Debug, Serialize, Deserialize, ToSchema)]
pub struct Day {
/// День недели
pub name: String,
/// Адрес другого корпуса
pub street: Option<String>,
/// Дата
pub date: DateTime<Utc>,
/// Список пар в этот день
pub lessons: Vec<Lesson>,
}
#[derive(Clone, Hash, Debug, Serialize, Deserialize, ToSchema)]
pub struct ScheduleEntry {
/// Название группы или ФИО преподавателя
pub name: String,
/// Список из шести дней
pub days: Vec<Day>,
}
#[derive(Clone)]
pub struct ParseResult {
/// Список групп
pub groups: HashMap<String, ScheduleEntry>,
/// Список преподавателей
pub teachers: HashMap<String, ScheduleEntry>,
}
#[derive(Debug, Display, Clone, ToSchema)]
pub enum ParseError {
/// Ошибки связанные с чтением XLS файла.
#[display("{}: Failed to read XLS file.", "_0")]
#[schema(value_type = String)]
BadXLS(Arc<calamine::XlsError>),
/// Не найдено ни одного листа
#[display("No work sheets found.")]
NoWorkSheets,
/// Отсутствуют данные об границах листа
#[display("There is no data on work sheet boundaries.")]
UnknownWorkSheetRange,
/// Не удалось прочитать начало и конец пары из строки
#[display("Failed to read lesson start and end times from string.")]
GlobalTime,
/// Не найдены начало и конец соответствующее паре
#[display("No start and end times matching the lesson was found.")]
LessonTimeNotFound,
/// Не удалось прочитать индекс подгруппы
#[display("Failed to read subgroup index.")]
SubgroupIndexParsingFailed,
}
impl Serialize for ParseError {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
match self {
ParseError::BadXLS(_) => serializer.serialize_str("BAD_XLS"),
ParseError::NoWorkSheets => serializer.serialize_str("NO_WORK_SHEETS"),
ParseError::UnknownWorkSheetRange => {
serializer.serialize_str("UNKNOWN_WORK_SHEET_RANGE")
}
ParseError::GlobalTime => serializer.serialize_str("GLOBAL_TIME"),
ParseError::LessonTimeNotFound => serializer.serialize_str("LESSON_TIME_NOT_FOUND"),
ParseError::SubgroupIndexParsingFailed => {
serializer.serialize_str("SUBGROUP_INDEX_PARSING_FAILED")
}
}
}
}

3
src/routes/auth/mod.rs Normal file
View File

@@ -0,0 +1,3 @@
pub mod sign_in;
pub mod sign_up;
mod shared;

96
src/routes/auth/shared.rs Normal file
View File

@@ -0,0 +1,96 @@
use crate::utility::jwt::DEFAULT_ALGORITHM;
use jsonwebtoken::errors::ErrorKind;
use jsonwebtoken::{decode, DecodingKey, Validation};
use serde::{Deserialize, Serialize};
use std::env;
use std::sync::LazyLock;
#[derive(Deserialize, Serialize)]
struct TokenData {
iis: String,
sub: i32,
app: i32,
exp: i32,
iat: i32,
jti: i32,
}
#[derive(Debug, Serialize, Deserialize)]
struct Claims {
sub: String,
iis: String,
jti: i32,
app: i32,
}
#[derive(Debug, PartialEq)]
pub enum Error {
JwtError(ErrorKind),
InvalidSignature,
InvalidToken,
Expired,
UnknownIssuer(String),
UnknownType(i32),
UnknownClientId(i32),
}
//noinspection SpellCheckingInspection
const VK_PUBLIC_KEY: &str = concat!(
"-----BEGIN PUBLIC KEY-----\n",
"MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAvsvJlhFX9Ju/pvCz1frB\n",
"DgJs592VjdwQuRAmnlJAItyHkoiDIOEocPzgcUBTbDf1plDcTyO2RCkUt0pz0WK6\n",
"6HNhpJyIfARjaWHeUlv4TpuHXAJJsBKklkU2gf1cjID+40sWWYjtq5dAkXnSJUVA\n",
"UR+sq0lJ7GmTdJtAr8hzESqGEcSP15PTs7VUdHZ1nkC2XgkuR8KmKAUb388ji1Q4\n",
"n02rJNOPQgd9r0ac4N2v/yTAFPXumO78N25bpcuWf5vcL9e8THk/U2zt7wf+aAWL\n",
"748e0pREqNluTBJNZfmhC79Xx6GHtwqHyyduiqfPmejmiujNM/rqnA4e30Tg86Yn\n",
"cNZ6vLJyF72Eva1wXchukH/aLispbY+EqNPxxn4zzCWaLKHG87gaCxpVv9Tm0jSD\n",
"2es22NjrUbtb+2pAGnXbyDp2eGUqw0RrTQFZqt/VcmmSCE45FlcZMT28otrwG1ZB\n",
"kZAb5Js3wLEch3ZfYL8sjhyNRPBmJBrAvzrd8qa3rdUjkC9sKyjGAaHu2MNmFl1Y\n",
"JFQ3J54tGpkGgJjD7Kz3w0K6OiPDlVCNQN5sqXm24fCw85Pbi8SJiaLTp/CImrs1\n",
"Z3nHW5q8hljA7OGmqfOP0nZS/5zW9GHPyepsI1rW6CympYLJ15WeNzePxYS5KEX9\n",
"EncmkSD9b45ge95hJeJZteUCAwEAAQ==\n",
"-----END PUBLIC KEY-----"
);
static VK_ID_CLIENT_ID: LazyLock<i32> = LazyLock::new(|| {
env::var("VK_ID_CLIENT_ID")
.expect("VK_ID_CLIENT_ID must be set")
.parse::<i32>()
.expect("VK_ID_CLIENT_ID must be i32")
});
pub fn parse_vk_id(token_str: &String) -> Result<i32, Error> {
let dkey = DecodingKey::from_rsa_pem(VK_PUBLIC_KEY.as_bytes()).unwrap();
match decode::<Claims>(&token_str, &dkey, &Validation::new(DEFAULT_ALGORITHM)) {
Ok(token_data) => {
let claims = token_data.claims;
if claims.iis != "VK" {
Err(Error::UnknownIssuer(claims.iis))
} else if claims.jti != 21 {
Err(Error::UnknownType(claims.jti))
} else if claims.app != *VK_ID_CLIENT_ID {
Err(Error::UnknownClientId(claims.app))
} else {
match claims.sub.parse::<i32>() {
Ok(sub) => Ok(sub),
Err(_) => Err(Error::InvalidToken),
}
}
}
Err(err) => Err(match err.into_kind() {
ErrorKind::InvalidToken => Error::InvalidToken,
ErrorKind::InvalidSignature => Error::InvalidSignature,
ErrorKind::InvalidAlgorithmName => Error::InvalidToken,
ErrorKind::MissingRequiredClaim(_) => Error::InvalidToken,
ErrorKind::ExpiredSignature => Error::Expired,
ErrorKind::InvalidAlgorithm => Error::InvalidToken,
ErrorKind::MissingAlgorithm => Error::InvalidToken,
ErrorKind::Base64(_) => Error::InvalidToken,
ErrorKind::Json(_) => Error::InvalidToken,
ErrorKind::Utf8(_) => Error::InvalidToken,
kind => Error::JwtError(kind),
}),
}
}

227
src/routes/auth/sign_in.rs Normal file
View File

@@ -0,0 +1,227 @@
use self::schema::*;
use crate::database::driver;
use crate::database::models::User;
use crate::routes::auth::shared::parse_vk_id;
use crate::routes::auth::sign_in::schema::SignInData::{Default, Vk};
use crate::routes::schema::user::UserResponse;
use crate::routes::schema::{IntoResponseAsError, ResponseError};
use crate::{utility, AppState};
use actix_web::{post, web};
use diesel::SaveChangesDsl;
use std::ops::DerefMut;
use web::Json;
async fn sign_in(
data: SignInData,
app_state: &web::Data<AppState>,
) -> Result<UserResponse, ErrorCode> {
let user = match &data {
Default(data) => driver::users::get_by_username(&app_state.database, &data.username),
Vk(id) => driver::users::get_by_vk_id(&app_state.database, *id),
};
match user {
Ok(mut user) => {
if let Default(data) = data {
match bcrypt::verify(&data.password, &user.password) {
Ok(result) => {
if !result {
return Err(ErrorCode::IncorrectCredentials);
}
}
Err(_) => {
return Err(ErrorCode::IncorrectCredentials);
}
}
}
let mut lock = app_state.connection();
let conn = lock.deref_mut();
user.access_token = utility::jwt::encode(&user.id);
user.save_changes::<User>(conn)
.expect("Failed to update user");
Ok(user.into())
}
Err(_) => Err(ErrorCode::IncorrectCredentials),
}
}
#[utoipa::path(responses(
(status = OK, body = UserResponse),
(status = NOT_ACCEPTABLE, body = ResponseError<ErrorCode>)
))]
#[post("/sign-in")]
pub async fn sign_in_default(data: Json<Request>, app_state: web::Data<AppState>) -> ServiceResponse {
sign_in(Default(data.into_inner()), &app_state).await.into()
}
#[utoipa::path(responses(
(status = OK, body = UserResponse),
(status = NOT_ACCEPTABLE, body = ResponseError<ErrorCode>)
))]
#[post("/sign-in-vk")]
pub async fn sign_in_vk(data_json: Json<vk::Request>, app_state: web::Data<AppState>) -> ServiceResponse {
let data = data_json.into_inner();
match parse_vk_id(&data.access_token) {
Ok(id) => sign_in(Vk(id), &app_state).await.into(),
Err(_) => ErrorCode::InvalidVkAccessToken.into_response(),
}
}
mod schema {
use crate::routes::schema::user::UserResponse;
use actix_macros::{IntoResponseError, StatusCode};
use serde::{Deserialize, Serialize};
use utoipa::ToSchema;
#[derive(Deserialize, Serialize, ToSchema)]
#[schema(as = SignIn::Request)]
pub struct Request {
/// Имя пользователя
#[schema(examples("n08i40k"))]
pub username: String,
/// Пароль
pub password: String,
}
pub mod vk {
use serde::{Deserialize, Serialize};
use utoipa::ToSchema;
#[derive(Serialize, Deserialize, ToSchema)]
#[serde(rename_all = "camelCase")]
#[schema(as = SignInVk::Request)]
pub struct Request {
/// Токен VK ID
pub access_token: String,
}
}
pub type ServiceResponse = crate::routes::schema::Response<UserResponse, ErrorCode>;
#[derive(Serialize, ToSchema, Clone, IntoResponseError, StatusCode)]
#[serde(rename_all = "SCREAMING_SNAKE_CASE")]
#[schema(as = SignIn::ErrorCode)]
#[status_code = "actix_web::http::StatusCode::NOT_ACCEPTABLE"]
pub enum ErrorCode {
/// Некорректное имя пользователя или пароль
IncorrectCredentials,
/// Недействительный токен VK ID
InvalidVkAccessToken,
}
/// Internal
/// Тип авторизации
pub enum SignInData {
/// Имя пользователя и пароль
Default(Request),
/// Идентификатор привязанного аккаунта VK
Vk(i32),
}
}
#[cfg(test)]
mod tests {
use super::schema::*;
use crate::database::driver;
use crate::database::models::{User, UserRole};
use crate::routes::auth::sign_in::sign_in_default;
use crate::test_env::tests::{static_app_state, test_app_state, test_env};
use crate::utility;
use actix_test::test_app;
use actix_web::dev::ServiceResponse;
use actix_web::http::Method;
use actix_web::http::StatusCode;
use actix_web::test;
use sha1::{Digest, Sha1};
use std::fmt::Write;
async fn sign_in_client(data: Request) -> ServiceResponse {
let app = test_app(test_app_state(), sign_in_default).await;
let req = test::TestRequest::with_uri("/sign-in")
.method(Method::POST)
.set_json(data)
.to_request();
test::call_service(&app, req).await
}
fn prepare(username: String) {
let id = {
let mut sha = Sha1::new();
sha.update(&username);
let result = sha.finalize();
let bytes = &result[..12];
let mut hex = String::new();
for byte in bytes {
write!(&mut hex, "{:02x}", byte).unwrap();
}
hex
};
test_env();
let app_state = static_app_state();
driver::users::insert_or_ignore(
&app_state.database,
&User {
id: id.clone(),
username,
password: bcrypt::hash("example".to_string(), bcrypt::DEFAULT_COST).unwrap(),
vk_id: None,
access_token: utility::jwt::encode(&id),
group: "ИС-214/23".to_string(),
role: UserRole::Student,
version: "1.0.0".to_string(),
},
)
.unwrap();
}
#[actix_web::test]
async fn sign_in_ok() {
prepare("test::sign_in_ok".to_string());
let resp = sign_in_client(Request {
username: "test::sign_in_ok".to_string(),
password: "example".to_string(),
})
.await;
assert_eq!(resp.status(), StatusCode::OK);
}
#[actix_web::test]
async fn sign_in_err() {
prepare("test::sign_in_err".to_string());
let invalid_username = sign_in_client(Request {
username: "test::sign_in_err::username".to_string(),
password: "example".to_string(),
})
.await;
assert_eq!(invalid_username.status(), StatusCode::NOT_ACCEPTABLE);
let invalid_password = sign_in_client(Request {
username: "test::sign_in_err".to_string(),
password: "bad_password".to_string(),
})
.await;
assert_eq!(invalid_password.status(), StatusCode::NOT_ACCEPTABLE);
}
}

358
src/routes/auth/sign_up.rs Normal file
View File

@@ -0,0 +1,358 @@
use self::schema::*;
use crate::AppState;
use crate::database::driver;
use crate::database::models::UserRole;
use crate::routes::auth::shared::{Error, parse_vk_id};
use crate::routes::schema::user::UserResponse;
use crate::routes::schema::{IntoResponseAsError, ResponseError};
use actix_web::{post, web};
use rand::{Rng, rng};
use web::Json;
async fn sign_up(
data: SignUpData,
app_state: &web::Data<AppState>,
) -> Result<UserResponse, ErrorCode> {
// If user selected forbidden role.
if data.role == UserRole::Admin {
return Err(ErrorCode::DisallowedRole);
}
// If specified group doesn't exist in schedule.
let schedule_opt = app_state.schedule.lock().unwrap();
if let Some(schedule) = &*schedule_opt {
if !schedule.data.groups.contains_key(&data.group) {
return Err(ErrorCode::InvalidGroupName);
}
}
// If user with specified username already exists.
if driver::users::contains_by_username(&app_state.database, &data.username) {
return Err(ErrorCode::UsernameAlreadyExists);
}
// If user with specified VKID already exists.
if let Some(id) = data.vk_id {
if driver::users::contains_by_vk_id(&app_state.database, id) {
return Err(ErrorCode::VkAlreadyExists);
}
}
let user = data.into();
driver::users::insert(&app_state.database, &user).unwrap();
Ok(UserResponse::from(&user)).into()
}
#[utoipa::path(responses(
(status = OK, body = UserResponse),
(status = NOT_ACCEPTABLE, body = ResponseError<ErrorCode>)
))]
#[post("/sign-up")]
pub async fn sign_up_default(
data_json: Json<Request>,
app_state: web::Data<AppState>,
) -> ServiceResponse {
let data = data_json.into_inner();
sign_up(
SignUpData {
username: data.username,
password: data.password,
vk_id: None,
group: data.group,
role: data.role,
version: data.version,
},
&app_state,
)
.await
.into()
}
#[utoipa::path(responses(
(status = OK, body = UserResponse),
(status = NOT_ACCEPTABLE, body = ResponseError<ErrorCode>)
))]
#[post("/sign-up-vk")]
pub async fn sign_up_vk(
data_json: Json<vk::Request>,
app_state: web::Data<AppState>,
) -> ServiceResponse {
let data = data_json.into_inner();
match parse_vk_id(&data.access_token) {
Ok(id) => sign_up(
SignUpData {
username: data.username,
password: rng()
.sample_iter(&rand::distr::Alphanumeric)
.take(16)
.map(char::from)
.collect(),
vk_id: Some(id),
group: data.group,
role: data.role,
version: data.version,
},
&app_state,
)
.await
.into(),
Err(err) => {
if err != Error::Expired {
eprintln!("Failed to parse vk id token!");
eprintln!("{:?}", err);
}
ErrorCode::InvalidVkAccessToken.into_response()
}
}
}
mod schema {
use crate::database::models::{User, UserRole};
use crate::routes::schema::user::UserResponse;
use crate::utility;
use actix_macros::{IntoResponseError, StatusCode};
use objectid::ObjectId;
use serde::{Deserialize, Serialize};
/// WEB
#[derive(Serialize, Deserialize, utoipa::ToSchema)]
#[schema(as = SignUp::Request)]
pub struct Request {
/// Имя пользователя
#[schema(examples("n08i40k"))]
pub username: String,
/// Пароль
pub password: String,
/// Группа
#[schema(examples("ИС-214/23"))]
pub group: String,
/// Роль
pub role: UserRole,
/// Версия установленного приложения Polytechnic+
#[schema(examples("3.0.0"))]
pub version: String,
}
pub mod vk {
use crate::database::models::UserRole;
use serde::{Deserialize, Serialize};
#[derive(Serialize, Deserialize, utoipa::ToSchema)]
#[serde(rename_all = "camelCase")]
#[schema(as = SignUpVk::Request)]
pub struct Request {
/// Токен VK ID
pub access_token: String,
/// Имя пользователя
#[schema(examples("n08i40k"))]
pub username: String,
/// Группа
#[schema(examples("ИС-214/23"))]
pub group: String,
/// Роль
pub role: UserRole,
/// Версия установленного приложения Polytechnic+
#[schema(examples("3.0.0"))]
pub version: String,
}
}
pub type ServiceResponse = crate::routes::schema::Response<UserResponse, ErrorCode>;
#[derive(Clone, Serialize, utoipa::ToSchema, IntoResponseError, StatusCode)]
#[serde(rename_all = "SCREAMING_SNAKE_CASE")]
#[schema(as = SignUp::ErrorCode)]
#[status_code = "actix_web::http::StatusCode::NOT_ACCEPTABLE"]
pub enum ErrorCode {
/// Передана роль ADMIN
DisallowedRole,
/// Неизвестное название группы
InvalidGroupName,
/// Пользователь с таким именем уже зарегистрирован
UsernameAlreadyExists,
/// Недействительный токен VK ID
InvalidVkAccessToken,
/// Пользователь с таким аккаунтом VK уже зарегистрирован
VkAlreadyExists,
}
/// Internal
/// Данные для регистрации
pub struct SignUpData {
/// Имя пользователя
pub username: String,
/// Пароль
///
/// Должен присутствовать даже если регистрация происходит с помощью токена VK ID
pub password: String,
/// Идентификатор аккаунта VK
pub vk_id: Option<i32>,
/// Группа
pub group: String,
/// Роль
pub role: UserRole,
/// Версия установленного приложения Polytechnic+
pub version: String,
}
impl Into<User> for SignUpData {
fn into(self) -> User {
let id = ObjectId::new().unwrap().to_string();
let access_token = utility::jwt::encode(&id);
User {
id,
username: self.username,
password: bcrypt::hash(self.password, bcrypt::DEFAULT_COST).unwrap(),
vk_id: self.vk_id,
access_token,
group: self.group,
role: self.role,
version: self.version,
}
}
}
}
#[cfg(test)]
mod tests {
use crate::database::driver;
use crate::database::models::UserRole;
use crate::routes::auth::sign_up::schema::Request;
use crate::routes::auth::sign_up::sign_up_default;
use crate::test_env::tests::{static_app_state, test_app_state, test_env};
use actix_test::test_app;
use actix_web::dev::ServiceResponse;
use actix_web::http::Method;
use actix_web::http::StatusCode;
use actix_web::test;
struct SignUpPartial {
username: String,
group: String,
role: UserRole,
}
async fn sign_up_client(data: SignUpPartial) -> ServiceResponse {
let app = test_app(test_app_state(), sign_up_default).await;
let req = test::TestRequest::with_uri("/sign-up")
.method(Method::POST)
.set_json(Request {
username: data.username.clone(),
password: "example".to_string(),
group: data.group.clone(),
role: data.role.clone(),
version: "1.0.0".to_string(),
})
.to_request();
test::call_service(&app, req).await
}
#[actix_web::test]
async fn sign_up_valid() {
// prepare
test_env();
let app_state = static_app_state();
driver::users::delete_by_username(&app_state.database, &"test::sign_up_valid".to_string());
// test
let resp = sign_up_client(SignUpPartial {
username: "test::sign_up_valid".to_string(),
group: "ИС-214/23".to_string(),
role: UserRole::Student,
})
.await;
assert_eq!(resp.status(), StatusCode::OK);
}
#[actix_web::test]
async fn sign_up_multiple() {
// prepare
test_env();
let app_state = static_app_state();
driver::users::delete_by_username(
&app_state.database,
&"test::sign_up_multiple".to_string(),
);
let create = sign_up_client(SignUpPartial {
username: "test::sign_up_multiple".to_string(),
group: "ИС-214/23".to_string(),
role: UserRole::Student,
})
.await;
assert_eq!(create.status(), StatusCode::OK);
let resp = sign_up_client(SignUpPartial {
username: "test::sign_up_multiple".to_string(),
group: "ИС-214/23".to_string(),
role: UserRole::Student,
})
.await;
assert_eq!(resp.status(), StatusCode::NOT_ACCEPTABLE);
}
#[actix_web::test]
async fn sign_up_invalid_role() {
test_env();
// test
let resp = sign_up_client(SignUpPartial {
username: "test::sign_up_invalid_role".to_string(),
group: "ИС-214/23".to_string(),
role: UserRole::Admin,
})
.await;
assert_eq!(resp.status(), StatusCode::NOT_ACCEPTABLE);
}
#[actix_web::test]
async fn sign_up_invalid_group() {
test_env();
// test
let resp = sign_up_client(SignUpPartial {
username: "test::sign_up_invalid_group".to_string(),
group: "invalid_group".to_string(),
role: UserRole::Student,
})
.await;
assert_eq!(resp.status(), StatusCode::NOT_ACCEPTABLE);
}
}

4
src/routes/mod.rs Normal file
View File

@@ -0,0 +1,4 @@
pub mod auth;
pub mod users;
pub mod schedule;
mod schema;

View File

@@ -0,0 +1,23 @@
use crate::AppState;
use crate::routes::schedule::schema::CacheStatus;
use actix_web::{get, web};
#[utoipa::path(responses(
(status = OK, body = CacheStatus),
))]
#[get("/cache-status")]
pub async fn get_cache_status(app_state: web::Data<AppState>) -> CacheStatus {
// Prevent thread lock
let has_schedule = app_state
.schedule
.lock()
.as_ref()
.map(|res| res.is_some())
.unwrap();
match has_schedule {
true => CacheStatus::from(&app_state),
false => CacheStatus::default(),
}
.into()
}

View File

@@ -0,0 +1,99 @@
use self::schema::*;
use crate::AppState;
use crate::database::models::User;
use crate::extractors::base::SyncExtractor;
use crate::routes::schema::{IntoResponseAsError, ResponseError};
use actix_web::{get, web};
#[utoipa::path(responses(
(status = OK, body = Response),
(
status = SERVICE_UNAVAILABLE,
body = ResponseError<ErrorCode>,
example = json!({
"code": "NO_SCHEDULE",
"message": "Schedule not parsed yet."
})
),
(
status = NOT_FOUND,
body = ResponseError<ErrorCode>,
example = json!({
"code": "NOT_FOUND",
"message": "Required group not found."
})
),
))]
#[get("/group")]
pub async fn get_group(
user: SyncExtractor<User>,
app_state: web::Data<AppState>,
) -> ServiceResponse {
// Prevent thread lock
let schedule_lock = app_state.schedule.lock().unwrap();
match schedule_lock.as_ref() {
None => ErrorCode::NoSchedule.into_response(),
Some(schedule) => match schedule.data.groups.get(&user.into_inner().group) {
None => ErrorCode::NotFound.into_response(),
Some(entry) => Ok(entry.clone().into()).into(),
},
}
}
mod schema {
use crate::parser::schema::ScheduleEntry;
use actix_macros::{IntoResponseErrorNamed, StatusCode};
use chrono::{DateTime, NaiveDateTime, Utc};
use derive_more::Display;
use serde::Serialize;
use utoipa::ToSchema;
pub type ServiceResponse = crate::routes::schema::Response<Response, ErrorCode>;
#[derive(Serialize, ToSchema)]
#[schema(as = GetGroup::Response)]
#[serde(rename_all = "camelCase")]
pub struct Response {
/// Расписание группы
pub group: ScheduleEntry,
/// Устаревшая переменная
///
/// По умолчанию возвращается пустой список
#[deprecated = "Will be removed in future versions"]
pub updated: Vec<i32>,
/// Устаревшая переменная
///
/// По умолчанию начальная дата по Unix
#[deprecated = "Will be removed in future versions"]
pub updated_at: DateTime<Utc>,
}
#[allow(deprecated)]
impl From<ScheduleEntry> for Response {
fn from(group: ScheduleEntry) -> Self {
Self {
group,
updated: Vec::new(),
updated_at: NaiveDateTime::default().and_utc(),
}
}
}
#[derive(Clone, Serialize, ToSchema, StatusCode, Display, IntoResponseErrorNamed)]
#[serde(rename_all = "SCREAMING_SNAKE_CASE")]
#[schema(as = GroupSchedule::ErrorCode)]
pub enum ErrorCode {
/// Расписания ещё не получены
#[status_code = "actix_web::http::StatusCode::SERVICE_UNAVAILABLE"]
#[display("Schedule not parsed yet.")]
NoSchedule,
/// Группа не найдена
#[status_code = "actix_web::http::StatusCode::NOT_FOUND"]
#[display("Required group not found.")]
NotFound,
}
}

View File

@@ -0,0 +1,48 @@
use self::schema::*;
use crate::AppState;
use crate::routes::schedule::schema::ErrorCode;
use crate::routes::schema::{IntoResponseAsError, ResponseError};
use actix_web::{get, web};
#[utoipa::path(responses(
(status = OK, body = Response),
(status = SERVICE_UNAVAILABLE, body = ResponseError<ErrorCode>),
))]
#[get("/group-names")]
pub async fn get_group_names(app_state: web::Data<AppState>) -> ServiceResponse {
// Prevent thread lock
let schedule_lock = app_state.schedule.lock().unwrap();
match schedule_lock.as_ref() {
None => ErrorCode::NoSchedule.into_response(),
Some(schedule) => {
let mut names: Vec<String> = schedule.data.groups.keys().cloned().collect();
names.sort();
Ok(names.into()).into()
}
}
.into()
}
mod schema {
use crate::routes::schedule::schema::ErrorCode;
use serde::Serialize;
use utoipa::ToSchema;
pub type ServiceResponse = crate::routes::schema::Response<Response, ErrorCode>;
#[derive(Serialize, ToSchema)]
#[schema(as = GetGroupNames::Response)]
pub struct Response {
/// Список названий групп отсортированный в алфавитном порядке
#[schema(examples(json!(["ИС-214/23"])))]
pub names: Vec<String>,
}
impl From<Vec<String>> for Response {
fn from(names: Vec<String>) -> Self {
Self { names }
}
}
}

View File

@@ -0,0 +1,25 @@
use self::schema::*;
use crate::app_state::AppState;
use crate::routes::schedule::schema::{ErrorCode, ScheduleView};
use crate::routes::schema::{IntoResponseAsError, ResponseError};
use actix_web::{get, web};
#[utoipa::path(responses(
(status = OK, body = ScheduleView),
(status = SERVICE_UNAVAILABLE, body = ResponseError<ErrorCode>)
))]
#[get("/")]
pub async fn get_schedule(app_state: web::Data<AppState>) -> ServiceResponse {
match ScheduleView::try_from(&app_state) {
Ok(res) => Ok(res).into(),
Err(e) => match e {
ErrorCode::NoSchedule => ErrorCode::NoSchedule.into_response(),
},
}
}
mod schema {
use crate::routes::schedule::schema::{ErrorCode, ScheduleView};
pub type ServiceResponse = crate::routes::schema::Response<ScheduleView, ErrorCode>;
}

View File

@@ -0,0 +1,97 @@
use self::schema::*;
use crate::routes::schema::{IntoResponseAsError, ResponseError};
use crate::AppState;
use actix_web::{get, web};
#[utoipa::path(responses(
(status = OK, body = Response),
(
status = SERVICE_UNAVAILABLE,
body = ResponseError<ErrorCode>,
example = json!({
"code": "NO_SCHEDULE",
"message": "Schedule not parsed yet."
})
),
(
status = NOT_FOUND,
body = ResponseError<ErrorCode>,
example = json!({
"code": "NOT_FOUND",
"message": "Required teacher not found."
})
),
))]
#[get("/teacher/{name}")]
pub async fn get_teacher(
name: web::Path<String>,
app_state: web::Data<AppState>,
) -> ServiceResponse {
// Prevent thread lock
let schedule_lock = app_state.schedule.lock().unwrap();
match schedule_lock.as_ref() {
None => ErrorCode::NoSchedule.into_response(),
Some(schedule) => match schedule.data.teachers.get(&name.into_inner()) {
None => ErrorCode::NotFound.into_response(),
Some(entry) => Ok(entry.clone().into()).into(),
},
}
}
mod schema {
use crate::parser::schema::ScheduleEntry;
use actix_macros::{IntoResponseErrorNamed, StatusCode};
use chrono::{DateTime, NaiveDateTime, Utc};
use derive_more::Display;
use serde::Serialize;
use utoipa::ToSchema;
pub type ServiceResponse = crate::routes::schema::Response<Response, ErrorCode>;
#[derive(Serialize, ToSchema)]
#[schema(as = GetTeacher::Response)]
#[serde(rename_all = "camelCase")]
pub struct Response {
/// Расписание преподавателя
pub teacher: ScheduleEntry,
/// Устаревшая переменная
///
/// По умолчанию возвращается пустой список
#[deprecated = "Will be removed in future versions"]
pub updated: Vec<i32>,
/// Устаревшая переменная
///
/// По умолчанию начальная дата по Unix
#[deprecated = "Will be removed in future versions"]
pub updated_at: DateTime<Utc>,
}
#[allow(deprecated)]
impl From<ScheduleEntry> for Response {
fn from(teacher: ScheduleEntry) -> Self {
Self {
teacher,
updated: Vec::new(),
updated_at: NaiveDateTime::default().and_utc(),
}
}
}
#[derive(Clone, Serialize, ToSchema, StatusCode, Display, IntoResponseErrorNamed)]
#[serde(rename_all = "SCREAMING_SNAKE_CASE")]
#[schema(as = TeacherSchedule::ErrorCode)]
pub enum ErrorCode {
/// Расписания ещё не получены
#[status_code = "actix_web::http::StatusCode::SERVICE_UNAVAILABLE"]
#[display("Schedule not parsed yet.")]
NoSchedule,
/// Преподаватель не найден
#[status_code = "actix_web::http::StatusCode::NOT_FOUND"]
#[display("Required teacher not found.")]
NotFound,
}
}

View File

@@ -0,0 +1,48 @@
use self::schema::*;
use crate::AppState;
use crate::routes::schedule::schema::ErrorCode;
use crate::routes::schema::{IntoResponseAsError, ResponseError};
use actix_web::{get, web};
#[utoipa::path(responses(
(status = OK, body = Response),
(status = SERVICE_UNAVAILABLE, body = ResponseError<ErrorCode>),
))]
#[get("/teacher-names")]
pub async fn get_teacher_names(app_state: web::Data<AppState>) -> ServiceResponse {
// Prevent thread lock
let schedule_lock = app_state.schedule.lock().unwrap();
match schedule_lock.as_ref() {
None => ErrorCode::NoSchedule.into_response(),
Some(schedule) => {
let mut names: Vec<String> = schedule.data.teachers.keys().cloned().collect();
names.sort();
Ok(names.into()).into()
}
}
.into()
}
mod schema {
use crate::routes::schedule::schema::ErrorCode;
use serde::Serialize;
use utoipa::ToSchema;
pub type ServiceResponse = crate::routes::schema::Response<Response, ErrorCode>;
#[derive(Serialize, ToSchema)]
#[schema(as = GetTeacherNames::Response)]
pub struct Response {
/// Список имён преподавателей отсортированный в алфавитном порядке
#[schema(examples(json!(["Хомченко Н.Е."])))]
pub names: Vec<String>,
}
impl From<Vec<String>> for Response {
fn from(names: Vec<String>) -> Self {
Self { names }
}
}
}

View File

@@ -0,0 +1,8 @@
pub mod get_cache_status;
pub mod get_schedule;
pub mod get_group;
pub mod get_group_names;
pub mod get_teacher;
pub mod get_teacher_names;
mod schema;
pub mod update_download_url;

View File

@@ -0,0 +1,107 @@
use crate::app_state::{AppState, Schedule};
use crate::parser::schema::ScheduleEntry;
use actix_macros::{IntoResponseErrorNamed, ResponderJson, StatusCode};
use actix_web::web;
use chrono::{DateTime, Duration, Utc};
use derive_more::Display;
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use utoipa::ToSchema;
/// Ответ от сервера с расписаниями
#[derive(Serialize, ToSchema)]
#[serde(rename_all = "camelCase")]
pub struct ScheduleView {
/// ETag расписания на сервере политехникума
etag: String,
/// Дата обновления расписания на сайте политехникума
uploaded_at: DateTime<Utc>,
/// Дата последнего скачивания расписания с сервера политехникума
downloaded_at: DateTime<Utc>,
/// Расписание групп
groups: HashMap<String, ScheduleEntry>,
/// Расписание преподавателей
teachers: HashMap<String, ScheduleEntry>,
}
#[derive(Clone, Serialize, ToSchema, StatusCode, Display, IntoResponseErrorNamed)]
#[status_code = "actix_web::http::StatusCode::SERVICE_UNAVAILABLE"]
#[serde(rename_all = "SCREAMING_SNAKE_CASE")]
#[schema(as = ScheduleShared::ErrorCode)]
pub enum ErrorCode {
/// Расписания ещё не получены
#[display("Schedule not parsed yet.")]
NoSchedule,
}
impl TryFrom<&web::Data<AppState>> for ScheduleView {
type Error = ErrorCode;
fn try_from(app_state: &web::Data<AppState>) -> Result<Self, Self::Error> {
if let Some(schedule) = app_state.schedule.lock().unwrap().clone() {
Ok(Self {
etag: schedule.etag,
uploaded_at: schedule.updated_at,
downloaded_at: schedule.parsed_at,
groups: schedule.data.groups,
teachers: schedule.data.teachers,
})
} else {
Err(ErrorCode::NoSchedule)
}
}
}
/// Статус кешированного расписаний
#[derive(Serialize, Deserialize, ToSchema, ResponderJson)]
#[serde(rename_all = "camelCase")]
pub struct CacheStatus {
/// Хеш расписаний
pub cache_hash: String,
/// Требуется ли обновить ссылку на расписание
pub cache_update_required: bool,
/// Дата последнего обновления кеша
pub last_cache_update: i64,
/// Дата обновления кешированного расписания
///
/// Определяется сервером политехникума
pub last_schedule_update: i64,
}
impl CacheStatus {
pub fn default() -> Self {
CacheStatus {
cache_hash: "0000000000000000000000000000000000000000".to_string(),
cache_update_required: true,
last_cache_update: 0,
last_schedule_update: 0,
}
}
}
impl From<&web::Data<AppState>> for CacheStatus {
fn from(value: &web::Data<AppState>) -> Self {
let schedule_lock = value.schedule.lock().unwrap();
let schedule = schedule_lock.as_ref().unwrap();
CacheStatus::from(schedule)
}
}
impl From<&Schedule> for CacheStatus {
fn from(value: &Schedule) -> Self {
Self {
cache_hash: value.hash(),
cache_update_required: (value.fetched_at - Utc::now()) > Duration::minutes(5),
last_cache_update: value.fetched_at.timestamp(),
last_schedule_update: value.updated_at.timestamp(),
}
}
}

View File

@@ -0,0 +1,132 @@
use self::schema::*;
use crate::AppState;
use crate::app_state::Schedule;
use crate::parser::parse_xls;
use crate::routes::schedule::schema::CacheStatus;
use crate::routes::schema::{IntoResponseAsError, ResponseError};
use crate::xls_downloader::interface::XLSDownloader;
use actix_web::web::Json;
use actix_web::{patch, web};
use chrono::Utc;
#[utoipa::path(responses(
(status = OK, body = CacheStatus),
(status = NOT_ACCEPTABLE, body = ResponseError<ErrorCode>),
))]
#[patch("/update-download-url")]
pub async fn update_download_url(
data: Json<Request>,
app_state: web::Data<AppState>,
) -> ServiceResponse {
if !data.url.starts_with("https://politehnikum-eng.ru/") {
return ErrorCode::NonWhitelistedHost.into_response();
}
let mut downloader = app_state.downloader.lock().unwrap();
if let Some(url) = &downloader.url {
if url.eq(&data.url) {
return Ok(CacheStatus::from(&app_state)).into();
}
}
match downloader.set_url(data.url.clone()).await {
Ok(fetch_result) => {
let mut schedule = app_state.schedule.lock().unwrap();
if schedule.is_some()
&& fetch_result.uploaded_at < schedule.as_ref().unwrap().updated_at
{
return ErrorCode::OutdatedSchedule.into_response();
}
match downloader.fetch(false).await {
Ok(download_result) => match parse_xls(download_result.data.as_ref().unwrap()) {
Ok(data) => {
*schedule = Some(Schedule {
etag: download_result.etag,
fetched_at: download_result.requested_at,
updated_at: download_result.uploaded_at,
parsed_at: Utc::now(),
data,
});
Ok(CacheStatus::from(schedule.as_ref().unwrap())).into()
}
Err(error) => ErrorCode::InvalidSchedule(error).into_response(),
},
Err(error) => {
eprintln!("Unknown url provided {}", data.url);
eprintln!("{:?}", error);
ErrorCode::DownloadFailed.into_response()
}
}
}
Err(error) => {
eprintln!("Unknown url provided {}", data.url);
eprintln!("{:?}", error);
ErrorCode::FetchFailed.into_response()
}
}
}
mod schema {
use crate::parser::schema::ParseError;
use crate::routes::schedule::schema::CacheStatus;
use actix_macros::{IntoResponseErrorNamed, StatusCode};
use derive_more::Display;
use serde::{Deserialize, Serialize, Serializer};
use utoipa::ToSchema;
pub type ServiceResponse = crate::routes::schema::Response<CacheStatus, ErrorCode>;
#[derive(Serialize, Deserialize, ToSchema)]
pub struct Request {
/// Ссылка на расписание
pub url: String,
}
#[derive(Clone, ToSchema, StatusCode, Display, IntoResponseErrorNamed)]
#[status_code = "actix_web::http::StatusCode::NOT_ACCEPTABLE"]
#[schema(as = SetDownloadUrl::ErrorCode)]
pub enum ErrorCode {
/// Передана ссылка с хостом отличающимся от politehnikum-eng.ru
#[display("URL with unknown host provided. Provide url with politehnikum-eng.ru host.")]
NonWhitelistedHost,
/// Не удалось получить мета-данные файла
#[display("Unable to retrieve metadata from the specified URL.")]
FetchFailed,
/// Не удалось скачать файл
#[display("Unable to retrieve data from the specified URL.")]
DownloadFailed,
/// Ссылка ведёт на устаревшее расписание
///
/// Под устаревшим расписанием подразумевается расписание, которое было опубликовано раньше, чем уже имеется на данный момент
#[display("The schedule is older than it already is.")]
OutdatedSchedule,
/// Не удалось преобразовать расписание
#[display("{}", "_0.display()")]
InvalidSchedule(ParseError),
}
impl Serialize for ErrorCode {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
match self {
ErrorCode::NonWhitelistedHost => serializer.serialize_str("NON_WHITELISTED_HOST"),
ErrorCode::FetchFailed => serializer.serialize_str("FETCH_FAILED"),
ErrorCode::DownloadFailed => serializer.serialize_str("DOWNLOAD_FAILED"),
ErrorCode::OutdatedSchedule => serializer.serialize_str("OUTDATED_SCHEDULE"),
ErrorCode::InvalidSchedule(_) => serializer.serialize_str("INVALID_SCHEDULE"),
}
}
}
}

173
src/routes/schema.rs Normal file
View File

@@ -0,0 +1,173 @@
use actix_web::body::EitherBody;
use actix_web::error::JsonPayloadError;
use actix_web::http::StatusCode;
use actix_web::{HttpRequest, HttpResponse, Responder};
use serde::{Serialize, Serializer};
use std::convert::Into;
use utoipa::PartialSchema;
pub struct Response<T, E>(pub Result<T, E>)
where
T: Serialize + PartialSchema,
E: Serialize + PartialSchema + Clone + PartialStatusCode;
pub trait PartialStatusCode {
fn status_code(&self) -> StatusCode;
}
/// Transform Response<T, E> into Result<T, E>
impl<T, E> Into<Result<T, E>> for Response<T, E>
where
T: Serialize + PartialSchema,
E: Serialize + PartialSchema + Clone + PartialStatusCode,
{
fn into(self) -> Result<T, E> {
self.0
}
}
/// Transform T into Response<T, E>
impl<T, E> From<Result<T, E>> for Response<T, E>
where
T: Serialize + PartialSchema,
E: Serialize + PartialSchema + Clone + PartialStatusCode,
{
fn from(value: Result<T, E>) -> Self {
Response(value)
}
}
/// Serialize Response<T, E>
impl<T, E> Serialize for Response<T, E>
where
T: Serialize + PartialSchema,
E: Serialize + PartialSchema + Clone + PartialStatusCode + Into<ResponseError<E>>,
{
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
match &self.0 {
Ok(ok) => serializer.serialize_some::<T>(&ok),
Err(err) => serializer
.serialize_some::<ResponseError<E>>(&ResponseError::<E>::from(err.clone().into())),
}
}
}
/// Transform Response<T, E> to HttpResponse<String>
impl<T, E> Responder for Response<T, E>
where
T: Serialize + PartialSchema,
E: Serialize + PartialSchema + Clone + PartialStatusCode + Into<ResponseError<E>>,
{
type Body = EitherBody<String>;
fn respond_to(self, _: &HttpRequest) -> HttpResponse<Self::Body> {
match serde_json::to_string(&self) {
Ok(body) => {
let code = match &self.0 {
Ok(_) => StatusCode::OK,
Err(e) => e.status_code(),
};
match HttpResponse::build(code)
.content_type(mime::APPLICATION_JSON)
.message_body(body)
{
Ok(res) => res.map_into_left_body(),
Err(err) => HttpResponse::from_error(err).map_into_right_body(),
}
}
Err(err) => {
HttpResponse::from_error(JsonPayloadError::Serialize(err)).map_into_right_body()
}
}
}
}
/// ResponseError<T>
///
/// Field `message` is optional for backwards compatibility with Android App, that produces error if new fields will be added to JSON response.
#[derive(Serialize, utoipa::ToSchema)]
pub struct ResponseError<T: Serialize + PartialSchema> {
pub code: T,
#[serde(skip_serializing_if = "Option::is_none")]
pub message: Option<String>,
}
pub trait IntoResponseAsError<T>
where
T: Serialize + PartialSchema,
Self: Serialize + PartialSchema + Clone + PartialStatusCode + Into<ResponseError<Self>>,
{
fn into_response(self) -> Response<T, Self> {
Response(Err(self))
}
}
pub mod user {
use crate::database::models::{User, UserRole};
use actix_macros::ResponderJson;
use serde::Serialize;
/// Используется для скрытия чувствительных полей, таких как хеш пароля или FCM
#[derive(Serialize, utoipa::ToSchema, ResponderJson)]
#[serde(rename_all = "camelCase")]
pub struct UserResponse {
/// UUID
#[schema(examples("67dcc9a9507b0000772744a2"))]
id: String,
/// Имя пользователя
#[schema(examples("n08i40k"))]
username: String,
/// Группа
#[schema(examples("ИС-214/23"))]
group: String,
/// Роль
role: UserRole,
/// Идентификатор прявязанного аккаунта VK
#[schema(examples(498094647, json!(null)))]
vk_id: Option<i32>,
/// JWT токен доступа
#[schema(examples(
"eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJpZCI6IjY3ZGNjOWE5NTA3YjAwMDA3NzI3NDRhMiIsImlhdCI6IjE3NDMxMDgwOTkiLCJleHAiOiIxODY5MjUyMDk5In0.rMgXRb3JbT9AvLK4eiY9HMB5LxgUudkpQyoWKOypZFY"
))]
access_token: String,
}
/// Create UserResponse from User ref.
impl From<&User> for UserResponse {
fn from(user: &User) -> Self {
UserResponse {
id: user.id.clone(),
username: user.username.clone(),
group: user.group.clone(),
role: user.role.clone(),
vk_id: user.vk_id.clone(),
access_token: user.access_token.clone(),
}
}
}
/// Transform User to UserResponse.
impl From<User> for UserResponse {
fn from(user: User) -> Self {
UserResponse {
id: user.id,
username: user.username,
group: user.group,
role: user.role,
vk_id: user.vk_id,
access_token: user.access_token,
}
}
}
}

10
src/routes/users/me.rs Normal file
View File

@@ -0,0 +1,10 @@
use crate::database::models::User;
use crate::extractors::base::SyncExtractor;
use actix_web::get;
use crate::routes::schema::user::UserResponse;
#[utoipa::path(responses((status = OK, body = UserResponse)))]
#[get("/me")]
pub async fn me(user: SyncExtractor<User>) -> UserResponse {
user.into_inner().into()
}

1
src/routes/users/mod.rs Normal file
View File

@@ -0,0 +1 @@
pub mod me;

32
src/test_env.rs Normal file
View File

@@ -0,0 +1,32 @@
#[cfg(test)]
pub(crate) mod tests {
use crate::app_state::{app_state, AppState, Schedule};
use crate::parser::tests::test_result;
use actix_web::{web};
use std::sync::LazyLock;
pub fn test_env() {
dotenvy::from_path(".env.test").expect("Failed to load test environment file");
}
pub fn test_app_state() -> web::Data<AppState> {
let state = app_state();
let mut schedule_lock = state.schedule.lock().unwrap();
*schedule_lock = Some(Schedule {
etag: "".to_string(),
fetched_at: Default::default(),
updated_at: Default::default(),
parsed_at: Default::default(),
data: test_result().unwrap(),
});
state.clone()
}
pub fn static_app_state() -> web::Data<AppState> {
static STATE: LazyLock<web::Data<AppState>> = LazyLock::new(|| test_app_state());
STATE.clone()
}
}

19
src/utility/error.rs Normal file
View File

@@ -0,0 +1,19 @@
use std::fmt::{Write};
use std::fmt::Display;
use serde::{Deserialize, Serialize};
/// Ответ от сервера при ошибках внутри Middleware
#[derive(Serialize, Deserialize)]
pub struct ResponseErrorMessage<T: Display> {
code: T,
message: String,
}
impl<T: Display + Serialize> ResponseErrorMessage<T> {
pub fn new(code: T) -> Self {
let mut message = String::new();
write!(&mut message, "{}", code).unwrap();
Self { code, message }
}
}

38
src/utility/hasher.rs Normal file
View File

@@ -0,0 +1,38 @@
use sha1::Digest;
use std::hash::Hasher;
/// Хешер возвращающий хеш из алгоритма реализующего Digest
pub struct DigestHasher<D: Digest> {
digest: D,
}
impl<D> DigestHasher<D>
where
D: Digest,
{
/// Получение хеша
pub fn finalize(self) -> String {
hex::encode(self.digest.finalize().0)
}
}
impl<D> From<D> for DigestHasher<D>
where
D: Digest,
{
/// Создания хешера из алгоритма реализующего Digest
fn from(digest: D) -> Self {
DigestHasher { digest }
}
}
impl<D: Digest> Hasher for DigestHasher<D> {
/// Заглушка для предотвращения вызова стандартного результата Hasher
fn finish(&self) -> u64 {
unimplemented!("Do not call finish()");
}
fn write(&mut self, bytes: &[u8]) {
self.digest.update(bytes);
}
}

166
src/utility/jwt.rs Normal file
View File

@@ -0,0 +1,166 @@
use chrono::Duration;
use chrono::Utc;
use jsonwebtoken::errors::ErrorKind;
use jsonwebtoken::{Algorithm, DecodingKey, EncodingKey, Header, Validation, decode};
use serde::{Deserialize, Serialize};
use serde_with::DisplayFromStr;
use serde_with::serde_as;
use std::env;
use std::mem::discriminant;
use std::sync::LazyLock;
/// Ключ для верификации токена
static DECODING_KEY: LazyLock<DecodingKey> = LazyLock::new(|| {
let secret = env::var("JWT_SECRET").expect("JWT_SECRET must be set");
DecodingKey::from_secret(secret.as_bytes())
});
/// Ключ для создания подписанного токена
static ENCODING_KEY: LazyLock<EncodingKey> = LazyLock::new(|| {
let secret = env::var("JWT_SECRET").expect("JWT_SECRET must be set");
EncodingKey::from_secret(secret.as_bytes())
});
/// Ошибки верификации токена
#[allow(dead_code)]
#[derive(Debug)]
pub enum Error {
/// Токен имеет другую подпись
InvalidSignature,
/// Ошибка чтения токена
InvalidToken(ErrorKind),
/// Токен просрочен
Expired,
}
impl PartialEq for Error {
fn eq(&self, other: &Self) -> bool {
discriminant(self) == discriminant(other)
}
}
/// Данные, которые хранит в себе токен
#[serde_as]
#[derive(Debug, Serialize, Deserialize)]
struct Claims {
/// UUID аккаунта пользователя
id: String,
/// Дата создания токена
#[serde_as(as = "DisplayFromStr")]
iat: u64,
/// Дата окончания действия токена
#[serde_as(as = "DisplayFromStr")]
exp: u64,
}
/// Алгоритм подписи токенов
pub(crate) const DEFAULT_ALGORITHM: Algorithm = Algorithm::HS256;
/// Проверка токена и извлечение из него UUID аккаунта пользователя
pub fn verify_and_decode(token: &String) -> Result<String, Error> {
let mut validation = Validation::new(DEFAULT_ALGORITHM);
validation.required_spec_claims.remove("exp");
validation.validate_exp = false;
let result = decode::<Claims>(&token, &*DECODING_KEY, &validation);
match result {
Ok(token_data) => {
if token_data.claims.exp < Utc::now().timestamp().unsigned_abs() {
Err(Error::Expired)
} else {
Ok(token_data.claims.id)
}
}
Err(err) => Err(match err.into_kind() {
ErrorKind::InvalidSignature => Error::InvalidSignature,
ErrorKind::ExpiredSignature => Error::Expired,
kind => Error::InvalidToken(kind),
}),
}
}
/// Создание токена пользователя
pub fn encode(id: &String) -> String {
let header = Header {
typ: Some(String::from("JWT")),
..Default::default()
};
let iat = Utc::now();
let exp = iat + Duration::days(365 * 4);
let claims = Claims {
id: id.clone(),
iat: iat.timestamp().unsigned_abs(),
exp: exp.timestamp().unsigned_abs(),
};
jsonwebtoken::encode(&header, &claims, &*ENCODING_KEY).unwrap()
}
#[cfg(test)]
mod tests {
use super::*;
use crate::test_env::tests::test_env;
#[test]
fn test_encode() {
test_env();
assert_eq!(encode(&"test".to_string()).is_empty(), false);
}
#[test]
fn test_decode_invalid_token() {
test_env();
let token = "".to_string();
let result = verify_and_decode(&token);
assert!(result.is_err());
assert_eq!(
result.err().unwrap(),
Error::InvalidToken(ErrorKind::InvalidToken)
);
}
#[test]
fn test_decode_invalid_signature() {
test_env();
let token = "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJleHAiOiIxNjE2NTI2Mzc2IiwiaWF0IjoiMTQ5MDM4MjM3NiIsImlkIjoiNjdkY2M5YTk1MDdiMDAwMDc3Mjc0NGEyIn0.Qc2LbMJTvl2hWzDM2XyQv4m9lIqR84COAESQAieUxz8".to_string();
let result = verify_and_decode(&token);
assert!(result.is_err());
assert_eq!(result.err().unwrap(), Error::InvalidSignature);
}
#[test]
fn test_decode_expired() {
test_env();
let token = "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpZCI6IjY3ZGNjOWE5NTA3YjAwMDA3NzI3NDRhMiIsImlhdCI6IjAiLCJleHAiOiIwIn0.GBsVYvnZIfHXt00t-qmAdUMyHSyWOBtC0Mrxwg1HQOM".to_string();
let result = verify_and_decode(&token);
assert!(result.is_err());
assert_eq!(result.err().unwrap(), Error::Expired);
}
#[test]
fn test_decode_ok() {
test_env();
let token = "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpZCI6IjY3ZGNjOWE5NTA3YjAwMDA3NzI3NDRhMiIsImlhdCI6Ijk5OTk5OTk5OTkiLCJleHAiOiI5OTk5OTk5OTk5In0.o1vN-ze5iaJrnlHqe7WARXMBhhzjxTjTKkjlmTGEnOI".to_string();
let result = verify_and_decode(&token);
assert!(result.is_ok());
}
}

3
src/utility/mod.rs Normal file
View File

@@ -0,0 +1,3 @@
pub mod jwt;
pub mod error;
pub mod hasher;

View File

@@ -0,0 +1,219 @@
use crate::xls_downloader::interface::{FetchError, FetchOk, FetchResult, XLSDownloader};
use chrono::{DateTime, Utc};
pub struct BasicXlsDownloader {
pub url: Option<String>,
}
async fn fetch_specified(url: &String, user_agent: String, head: bool) -> FetchResult {
let client = reqwest::Client::new();
let response = if head {
client.head(url)
} else {
client.get(url)
}
.header("User-Agent", user_agent)
.send()
.await;
match response {
Ok(r) => {
if r.status().as_u16() != 200 {
return Err(FetchError::BadStatusCode);
}
let headers = r.headers();
let content_type = headers.get("Content-Type");
let etag = headers.get("etag");
let last_modified = headers.get("last-modified");
let date = headers.get("date");
if content_type.is_none() || etag.is_none() || last_modified.is_none() || date.is_none()
{
Err(FetchError::BadHeaders)
} else if content_type.unwrap() != "application/vnd.ms-excel" {
Err(FetchError::BadContentType)
} else {
let etag = etag.unwrap().to_str().unwrap().to_string();
let last_modified =
DateTime::parse_from_rfc2822(&last_modified.unwrap().to_str().unwrap())
.unwrap()
.with_timezone(&Utc);
Ok(if head {
FetchOk::head(etag, last_modified)
} else {
FetchOk::get(etag, last_modified, r.bytes().await.unwrap().to_vec())
})
}
}
Err(_) => Err(FetchError::Unknown),
}
}
impl BasicXlsDownloader {
pub fn new() -> Self {
BasicXlsDownloader { url: None }
}
}
impl XLSDownloader for BasicXlsDownloader {
async fn fetch(&self, head: bool) -> FetchResult {
if self.url.is_none() {
Err(FetchError::NoUrlProvided)
} else {
fetch_specified(
self.url.as_ref().unwrap(),
"t.me/polytechnic_next".to_string(),
head,
)
.await
}
}
async fn set_url(&mut self, url: String) -> FetchResult {
let result = fetch_specified(&url, "t.me/polytechnic_next".to_string(), true).await;
if let Ok(_) = result {
self.url = Some(url);
}
result
}
}
#[cfg(test)]
mod tests {
use crate::xls_downloader::basic_impl::{BasicXlsDownloader, fetch_specified};
use crate::xls_downloader::interface::{FetchError, XLSDownloader};
#[tokio::test]
async fn bad_url() {
let url = "bad_url".to_string();
let user_agent = String::new();
let results = [
fetch_specified(&url, user_agent.clone(), true).await,
fetch_specified(&url, user_agent.clone(), false).await,
];
assert!(results[0].is_err());
assert!(results[1].is_err());
}
#[tokio::test]
async fn bad_status_code() {
let url = "https://www.google.com/not-found".to_string();
let user_agent = String::new();
let results = [
fetch_specified(&url, user_agent.clone(), true).await,
fetch_specified(&url, user_agent.clone(), false).await,
];
assert!(results[0].is_err());
assert!(results[1].is_err());
assert_eq!(
*results[0].as_ref().err().unwrap(),
FetchError::BadStatusCode
);
assert_eq!(
*results[1].as_ref().err().unwrap(),
FetchError::BadStatusCode
);
}
#[tokio::test]
async fn bad_headers() {
let url = "https://www.google.com/favicon.ico".to_string();
let user_agent = String::new();
let results = [
fetch_specified(&url, user_agent.clone(), true).await,
fetch_specified(&url, user_agent.clone(), false).await,
];
assert!(results[0].is_err());
assert!(results[1].is_err());
assert_eq!(*results[0].as_ref().err().unwrap(), FetchError::BadHeaders);
assert_eq!(*results[1].as_ref().err().unwrap(), FetchError::BadHeaders);
}
#[tokio::test]
async fn bad_content_type() {
let url = "https://s3.aero-storage.ldragol.ru/679e5d1145a6ad00843ad3f1/67ddb59fd46303008396ac96%2Fexample.txt".to_string();
let user_agent = String::new();
let results = [
fetch_specified(&url, user_agent.clone(), true).await,
fetch_specified(&url, user_agent.clone(), false).await,
];
assert!(results[0].is_err());
assert!(results[1].is_err());
assert_eq!(
*results[0].as_ref().err().unwrap(),
FetchError::BadContentType
);
assert_eq!(
*results[1].as_ref().err().unwrap(),
FetchError::BadContentType
);
}
#[tokio::test]
async fn ok() {
let url = "https://s3.aero-storage.ldragol.ru/679e5d1145a6ad00843ad3f1/67ddb5fad46303008396ac97%2Fschedule.xls".to_string();
let user_agent = String::new();
let results = [
fetch_specified(&url, user_agent.clone(), true).await,
fetch_specified(&url, user_agent.clone(), false).await,
];
assert!(results[0].is_ok());
assert!(results[1].is_ok());
}
#[tokio::test]
async fn downloader_set_ok() {
let url = "https://s3.aero-storage.ldragol.ru/679e5d1145a6ad00843ad3f1/67ddb5fad46303008396ac97%2Fschedule.xls".to_string();
let mut downloader = BasicXlsDownloader::new();
assert!(downloader.set_url(url).await.is_ok());
}
#[tokio::test]
async fn downloader_set_err() {
let url = "bad_url".to_string();
let mut downloader = BasicXlsDownloader::new();
assert!(downloader.set_url(url).await.is_err());
}
#[tokio::test]
async fn downloader_ok() {
let url = "https://s3.aero-storage.ldragol.ru/679e5d1145a6ad00843ad3f1/67ddb5fad46303008396ac97%2Fschedule.xls".to_string();
let mut downloader = BasicXlsDownloader::new();
assert!(downloader.set_url(url).await.is_ok());
assert!(downloader.fetch(false).await.is_ok());
}
#[tokio::test]
async fn downloader_no_url_provided() {
let downloader = BasicXlsDownloader::new();
let result = downloader.fetch(false).await;
assert!(result.is_err());
assert_eq!(result.err().unwrap(), FetchError::NoUrlProvided);
}
}

View File

@@ -0,0 +1,67 @@
use chrono::{DateTime, Utc};
/// Ошибки получения данных XLS
#[derive(PartialEq, Debug)]
pub enum FetchError {
/// Не установлена ссылка на файл
NoUrlProvided,
/// Неизвестная ошибка
Unknown,
/// Сервер вернул статус код отличающийся от 200
BadStatusCode,
/// Ссылка ведёт на файл другого типа
BadContentType,
/// Сервер не вернул ожидаемые заголовки
BadHeaders,
}
/// Результат получения данных XLS
pub struct FetchOk {
/// ETag объекта
pub etag: String,
/// Дата загрузки файла
pub uploaded_at: DateTime<Utc>,
/// Дата получения данных
pub requested_at: DateTime<Utc>,
/// Данные файла
pub data: Option<Vec<u8>>,
}
impl FetchOk {
/// Результат без контента файла
pub fn head(etag: String, uploaded_at: DateTime<Utc>) -> Self {
FetchOk {
etag,
uploaded_at,
requested_at: Utc::now(),
data: None,
}
}
/// Полный результат
pub fn get(etag: String, uploaded_at: DateTime<Utc>, data: Vec<u8>) -> Self {
FetchOk {
etag,
uploaded_at,
requested_at: Utc::now(),
data: Some(data),
}
}
}
pub type FetchResult = Result<FetchOk, FetchError>;
pub trait XLSDownloader {
/// Получение данных о файле, и, опционально, его контент
async fn fetch(&self, head: bool) -> FetchResult;
/// Установка ссылки на файл
async fn set_url(&mut self, url: String) -> FetchResult;
}

View File

@@ -0,0 +1,2 @@
pub mod basic_impl;
pub mod interface;