30 Commits

Author SHA1 Message Date
b664ba578d chore(clippy): fix all clippy warnings 2025-09-25 03:42:34 +04:00
983967f8b0 chore(downloader): suppress unused_assignments warning 2025-09-25 03:27:55 +04:00
e5760120e2 chore(release): bump version to 1.3.0 2025-09-25 03:17:14 +04:00
a28fb66dd4 feat(downloader): add retry-mechanism for querying uri from yandex-cloud api (#18) 2025-09-25 03:15:36 +04:00
3780fb3136 feat(downloader): implement etag-based difference check for schedule 2025-09-25 03:14:39 +04:00
6c71bc19f5 chore(parser): fix crash caused by another mistype in schedule 2025-09-25 02:50:43 +04:00
2d0041dc8b feat(schedule): add practice lesson type 2025-09-25 02:49:23 +04:00
b5d372e109 feat(ci): build and push image to docker registry on every push to master 2025-09-10 20:05:11 +04:00
84dca02c34 fix(database): use migrator and change connection options 2025-09-10 20:04:19 +04:00
6c9d3b3b31 chore(release): bump version to 1.2.2 2025-09-08 07:16:53 +04:00
a348b1b99b refactor(auth): interpret failure to find user as an error 2025-09-08 07:15:22 +04:00
ff12ee5da2 chore(release): bump version to 1.2.1 2025-09-06 21:25:26 +04:00
35f707901f chore(clippy): fix all clippy warnings 2025-09-06 21:24:52 +04:00
edea6c5424 chore(release): bump version to 1.2.0 2025-09-06 20:33:17 +04:00
fdbb872fc3 refactor(dev): move tracing to feature 2025-09-06 20:17:14 +04:00
dbc800fef1 feat(database)!: switch from diesel to sea-orm 2025-09-06 20:09:04 +04:00
e729d84c93 fix: fix typo in error description 2025-09-06 18:42:23 +04:00
cc7adf10ed fix(env): make dotenv file optional 2025-09-06 18:40:04 +04:00
57c1699c9a chore(release): bump version to 1.1.1 2025-09-03 07:13:37 +04:00
298c4f4dd3 feat(auth): send raw token on telegram auth 2025-09-03 07:08:51 +04:00
e3904a255b chore(release): bump version to 1.1.0 2025-09-02 09:23:29 +04:00
829c1cf68d chore(deps): bump calamine version 2025-09-02 09:20:02 +04:00
6a535f8d73 chore(ci): remove creating .env.test 2025-09-02 09:20:02 +04:00
5e5cd53f46 chore(ci): remove creating .env.test 2025-09-02 09:11:30 +04:00
8d59e37976 refactor(android)!: remove FCM support 2025-09-02 09:08:17 +04:00
5e39fc9acc feat(schedule)!: move schedule parser, downloader, and updater to external library
This can be used to support more schedule formats in the future.
2025-09-02 08:59:59 +04:00
7c973bfda0 refactor(middlewares): move MiddlewareError from crate::utility to crate::middlewares 2025-09-02 08:52:15 +04:00
8fba0fc709 feat(env): add ability to set custom .env file for testing on local machine 2025-06-13 01:03:06 +04:00
983ff4fa5e feat(env): expose .env.test 2025-06-13 01:02:08 +04:00
fb6f3fc05f chore(deps): upgrade dependencies 2025-06-13 00:56:58 +04:00
106 changed files with 4534 additions and 4569 deletions

26
.env.test Normal file
View File

@@ -0,0 +1,26 @@
# Schedule
# SCHEDULE_INIT_URL=
SCHEDULE_DISABLE_AUTO_UPDATE=1
# Basic authorization
JWT_SECRET="test-secret-at-least-256-bits-used"
# VKID
VK_ID_CLIENT_ID=0
VK_ID_REDIRECT_URI="vk0://vk.com/blank.html"
# Telegram Mini-App
TELEGRAM_BOT_ID=0
TELEGRAM_MINI_APP_HOST=example.com
TELEGRAM_TEST_DC=false
# Yandex Cloud
YANDEX_CLOUD_API_KEY=""
YANDEX_CLOUD_FUNC_ID=""
# Firebase
# GOOGLE_APPLICATION_CREDENTIALS=
# LOGGING
RUST_BACKTRACE=1
# RUST_LOG=debug

142
.github/workflows/build.yml vendored Normal file
View File

@@ -0,0 +1,142 @@
name: build
on:
push:
branches: [ "master" ]
tags-ignore: [ "release/v*" ]
permissions:
contents: write
env:
CARGO_TERM_COLOR: always
BINARY_NAME: schedule-parser-rusted
TEST_DB: ${{ secrets.TEST_DATABASE_URL }}
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
SENTRY_ORG: ${{ secrets.SENTRY_ORG }}
SENTRY_PROJECT: ${{ secrets.SENTRY_PROJECT }}
DOCKER_IMAGE_NAME: ${{ github.repository }}
DOCKER_REGISTRY_HOST: registry.n08i40k.ru
DOCKER_REGISTRY_USERNAME: ${{ github.repository_owner }}
DOCKER_REGISTRY_PASSWORD: ${{ secrets.DOCKER_REGISTRY_PASSWORD }}
jobs:
test:
name: Test
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Setup Rust
uses: actions-rust-lang/setup-rust-toolchain@v1.11.0
with:
toolchain: stable
- name: Test
run: |
cargo test --verbose
env:
DATABASE_URL: ${{ env.TEST_DB }}
SCHEDULE_DISABLE_AUTO_UPDATE: 1
JWT_SECRET: "test-secret-at-least-256-bits-used"
VK_ID_CLIENT_ID: 0
VK_ID_REDIRECT_URI: "vk0://vk.com/blank.html"
TELEGRAM_BOT_ID: 0
TELEGRAM_MINI_APP_HOST: example.com
TELEGRAM_TEST_DC: false
YANDEX_CLOUD_API_KEY: ""
YANDEX_CLOUD_FUNC_ID: ""
build:
name: Build
runs-on: ubuntu-latest
needs: test
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Setup Rust
uses: actions-rust-lang/setup-rust-toolchain@v1.11.0
with:
toolchain: stable
- name: Build
run: cargo build --release --verbose
- name: Extract debug symbols
run: |
objcopy --only-keep-debug target/release/${{ env.BINARY_NAME }}{,.d}
objcopy --strip-debug --strip-unneeded target/release/${{ env.BINARY_NAME }}
objcopy --add-gnu-debuglink target/release/${{ env.BINARY_NAME }}{.d,}
- name: Setup sentry-cli
uses: matbour/setup-sentry-cli@v2.0.0
with:
version: latest
token: ${{ env.SENTRY_AUTH_TOKEN }}
organization: ${{ env.SENTRY_ORG }}
project: ${{ env.SENTRY_PROJECT }}
- name: Upload debug symbols to Sentry
run: |
sentry-cli debug-files upload --include-sources .
- name: Upload build binary artifact
uses: actions/upload-artifact@v4
with:
name: release-binary
path: target/release/${{ env.BINARY_NAME }}
- name: Upload build debug symbols artifact
uses: actions/upload-artifact@v4
with:
name: release-symbols
path: target/release/${{ env.BINARY_NAME }}.d
docker:
name: Build & Push Docker Image
runs-on: ubuntu-latest
needs: build
steps:
- uses: actions/checkout@v4
- name: Download build artifacts
uses: actions/download-artifact@v4
with:
name: release-binary
- name: Setup Docker Buildx
uses: docker/setup-buildx-action@v3.10.0
- name: Login to Registry
uses: docker/login-action@v3.4.0
with:
registry: ${{ env.DOCKER_REGISTRY_HOST }}
username: ${{ env.DOCKER_REGISTRY_USERNAME }}
password: ${{ env.DOCKER_REGISTRY_PASSWORD }}
- name: Extract Docker metadata
id: meta
uses: docker/metadata-action@v5.7.0
with:
images: ${{ env.DOCKER_REGISTRY_HOST }}/${{ env.DOCKER_IMAGE_NAME }}
- name: Build and push Docker image
id: build-and-push
uses: docker/build-push-action@v6.15.0
with:
context: .
push: true
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
cache-from: type=gha
cache-to: type=gha,mode=max
build-args: |
"BINARY_NAME=${{ env.BINARY_NAME }}"

View File

@@ -40,7 +40,6 @@ jobs:
- name: Test
run: |
touch .env.test
cargo test --verbose
env:
DATABASE_URL: ${{ env.TEST_DB }}

View File

@@ -2,7 +2,7 @@ name: cargo test
on:
push:
branches: [ "master" ]
branches: [ "development" ]
tags-ignore: [ "release/v*" ]
permissions:
@@ -18,10 +18,10 @@ jobs:
steps:
- uses: actions/checkout@v4
- name: Build
run: cargo build
- name: Create .env.test
run: touch .env.test
- name: Run tests
run: cargo test
env:

View File

@@ -8,6 +8,9 @@
<sourceFolder url="file://$MODULE_DIR$/actix-test/src" isTestSource="false" />
<sourceFolder url="file://$MODULE_DIR$/schedule-parser/benches" isTestSource="true" />
<sourceFolder url="file://$MODULE_DIR$/schedule-parser/src" isTestSource="false" />
<sourceFolder url="file://$MODULE_DIR$/providers/base/src" isTestSource="false" />
<sourceFolder url="file://$MODULE_DIR$/providers/provider-engels-polytechnic/src" isTestSource="false" />
<sourceFolder url="file://$MODULE_DIR$/providers/src" isTestSource="false" />
<excludeFolder url="file://$MODULE_DIR$/actix-macros/target" />
<excludeFolder url="file://$MODULE_DIR$/actix-test/target" />
<excludeFolder url="file://$MODULE_DIR$/target" />

3568
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,50 +1,81 @@
[workspace]
members = ["actix-macros", "actix-test", "schedule-parser"]
members = ["actix-macros", "actix-test", "providers"]
[package]
name = "schedule-parser-rusted"
version = "1.0.5"
version = "1.3.0"
edition = "2024"
publish = false
[profile.release]
debug = true
[features]
trace = ["tracing", "console-subscriber"]
[dependencies]
actix-web = "4.10.2"
providers = { path = "providers" }
actix-macros = { path = "actix-macros" }
schedule-parser = { path = "schedule-parser", features = ["test-utils"] }
bcrypt = "0.17.0"
chrono = { version = "0.4.40", features = ["serde"] }
derive_more = { version = "2", features = ["full"] }
diesel = { version = "2.2.8", features = ["postgres"] }
diesel-derive-enum = { git = "https://github.com/Havunen/diesel-derive-enum.git", features = ["postgres"] }
# serve api
actix-web = "4.11.0"
# basic
chrono = { version = "0.4.42", features = ["serde"] }
derive_more = { version = "2.0.1", features = ["full"] }
dotenvy = "0.15.7"
env_logger = "0.11.7"
firebase-messaging-rs = { git = "https://github.com/i10416/firebase-messaging-rs.git" }
# sql
database = { path = "database" }
# logging
env_logger = "0.11.8"
# async
tokio = { version = "1.47.1", features = ["macros", "rt-multi-thread"] }
tokio-util = "0.7.16"
futures-util = "0.3.31"
# authorization
bcrypt = "0.17.1"
jsonwebtoken = { version = "9.3.1", features = ["use_pem"] }
hex = "0.4.3"
mime = "0.3.17"
# creating users
objectid = "0.2.0"
reqwest = { version = "0.12.15", features = ["json"] }
sentry = "0.38"
sentry-actix = "0.38"
serde = { version = "1.0.219", features = ["derive"] }
serde_json = "1.0.140"
serde_with = "3.12.0"
sha1 = "0.11.0-pre.5"
tokio = { version = "1.44.1", features = ["macros", "rt-multi-thread"] }
utoipa = { version = "5", features = ["actix_extras", "chrono"] }
# schedule downloader
reqwest = { version = "0.12.23", features = ["json"] }
mime = "0.3.17"
# error handling
sentry = "0.43.0"
sentry-actix = "0.43.0"
# [de]serializing
serde = { version = "1", features = ["derive"] }
serde_json = "1"
serde_with = "3.14"
sha1 = "0.11.0-rc.2"
# documentation
utoipa = { version = "5.4.0", features = ["actix_extras", "chrono"] }
utoipa-rapidoc = { version = "6.0.0", features = ["actix-web"] }
utoipa-actix-web = "0.1"
uuid = { version = "1.16.0", features = ["v4"] }
ed25519-dalek = "2.1.1"
hex-literal = "1.0.0"
log = "0.4.26"
utoipa-actix-web = "0.1.2"
uuid = { version = "1.18.1", features = ["v4"] }
hex-literal = "1"
log = "0.4.28"
# telegram webdata deciding and verify
base64 = "0.22.1"
percent-encoding = "2.3.1"
ua_generator = "0.5.16"
percent-encoding = "2.3.2"
ed25519-dalek = "3.0.0-pre.1"
# development tracing
console-subscriber = { version = "0.4.1", optional = true }
tracing = { version = "0.1.41", optional = true }
[dev-dependencies]
providers = { path = "providers", features = ["test"] }
actix-test = { path = "actix-test" }

View File

@@ -1,7 +0,0 @@
# This file is automatically @generated by Cargo.
# It is not intended for manual editing.
version = 4
[[package]]
name = "actix-utility-macros"
version = "0.1.0"

View File

@@ -4,9 +4,9 @@ version = "0.1.0"
edition = "2024"
[dependencies]
syn = "2.0.100"
syn = "2.0.106"
quote = "1.0.40"
proc-macro2 = "1.0.94"
proc-macro2 = "1.0.101"
[lib]
proc-macro = true

View File

@@ -6,7 +6,7 @@ mod shared {
use quote::{ToTokens, quote};
use syn::{Attribute, DeriveInput};
pub fn find_status_code(attrs: &Vec<Attribute>) -> Option<proc_macro2::TokenStream> {
pub fn find_status_code(attrs: &[Attribute]) -> Option<proc_macro2::TokenStream> {
attrs
.iter()
.find_map(|attr| -> Option<proc_macro2::TokenStream> {
@@ -41,14 +41,12 @@ mod shared {
let mut status_code_arms: Vec<proc_macro2::TokenStream> = variants
.iter()
.map(|v| -> Option<proc_macro2::TokenStream> {
.filter_map(|v| -> Option<proc_macro2::TokenStream> {
let status_code = find_status_code(&v.attrs)?;
let variant_name = &v.ident;
Some(quote! { #name::#variant_name => #status_code, })
})
.filter(|v| v.is_some())
.map(|v| v.unwrap())
.collect();
if status_code_arms.len() < variants.len() {
@@ -81,7 +79,7 @@ mod middleware_error {
fn error_response(&self) -> ::actix_web::HttpResponse<BoxBody> {
::actix_web::HttpResponse::build(self.status_code())
.json(crate::utility::error::MiddlewareError::new(self.clone()))
.json(crate::middlewares::error::MiddlewareError::new(self.clone()))
}
}
})

1520
actix-test/Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -4,5 +4,5 @@ version = "0.1.0"
edition = "2024"
[dependencies]
actix-http = "3.10.0"
actix-web = "4.10.2"
actix-http = "3.11.1"
actix-web = "4.11.0"

11
database/Cargo.toml Normal file
View File

@@ -0,0 +1,11 @@
[package]
name = "database"
version = "0.1.0"
edition = "2024"
[dependencies]
migration = { path = "migration" }
entity = { path = "entity" }
sea-orm = { version = "2.0.0-rc.6", features = ["sqlx-postgres", "runtime-tokio"] }
paste = "1.0.15"

1
database/entity/.gitignore vendored Normal file
View File

@@ -0,0 +1 @@
/target

View File

@@ -0,0 +1,9 @@
[package]
name = "entity"
version = "0.1.0"
edition = "2024"
[dependencies]
sea-orm = "2.0.0-rc.6"
serde = { version = "1.0.219", features = ["derive"] }
utoipa = "5.4.0"

View File

@@ -0,0 +1,6 @@
//! `SeaORM` Entity, @generated by sea-orm-codegen 1.1.12
pub mod prelude;
pub mod sea_orm_active_enums;
pub mod user;

View File

@@ -0,0 +1,3 @@
//! `SeaORM` Entity, @generated by sea-orm-codegen 1.1.12
pub use super::user::Entity as User;

View File

@@ -0,0 +1,25 @@
//! `SeaORM` Entity, @generated by sea-orm-codegen 1.1.12
use sea_orm::entity::prelude::*;
#[derive(
Debug,
Clone,
PartialEq,
Eq,
EnumIter,
DeriveActiveEnum,
:: serde :: Serialize,
:: serde :: Deserialize,
:: utoipa :: ToSchema,
)]
#[sea_orm(rs_type = "String", db_type = "Enum", enum_name = "user_role")]
#[serde(rename_all = "SCREAMING_SNAKE_CASE")]
pub enum UserRole {
#[sea_orm(string_value = "student")]
Student,
#[sea_orm(string_value = "teacher")]
Teacher,
#[sea_orm(string_value = "admin")]
Admin,
}

View File

@@ -0,0 +1,25 @@
//! `SeaORM` Entity, @generated by sea-orm-codegen 1.1.12
use super::sea_orm_active_enums::UserRole;
use sea_orm::entity::prelude::*;
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq)]
#[sea_orm(table_name = "user")]
pub struct Model {
#[sea_orm(primary_key, auto_increment = false)]
pub id: String,
#[sea_orm(unique)]
pub username: String,
pub password: Option<String>,
pub vk_id: Option<i32>,
pub group: Option<String>,
pub role: UserRole,
pub android_version: Option<String>,
#[sea_orm(unique)]
pub telegram_id: Option<i64>,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {}
impl ActiveModelBehavior for ActiveModel {}

1
database/migration/.gitignore vendored Normal file
View File

@@ -0,0 +1 @@
/target

View File

@@ -0,0 +1,22 @@
[package]
name = "migration"
version = "0.1.0"
edition = "2021"
publish = false
[lib]
name = "migration"
path = "src/lib.rs"
[dependencies]
async-std = { version = "1", features = ["attributes", "tokio1"] }
[dependencies.sea-orm-migration]
version = "2.0.0-rc.6"
features = [
# Enable at least one `ASYNC_RUNTIME` and `DATABASE_DRIVER` feature if you want to run migration via CLI.
# View the list of supported features at https://www.sea-ql.org/SeaORM/docs/install-and-config/database-and-async-runtime.
# e.g.
"runtime-tokio", # `ASYNC_RUNTIME` feature
"sqlx-postgres", # `DATABASE_DRIVER` feature
]

View File

@@ -0,0 +1,16 @@
pub use sea_orm_migration::prelude::MigratorTrait;
use sea_orm_migration::prelude::*;
mod m20250904_024854_init;
pub struct Migrator;
#[async_trait::async_trait]
impl MigratorTrait for Migrator {
fn migrations() -> Vec<Box<dyn MigrationTrait>> {
vec![
Box::new(m20250904_024854_init::Migration),
]
}
}

View File

@@ -0,0 +1,70 @@
use sea_orm_migration::prelude::extension::postgres::Type;
use sea_orm_migration::sea_orm::{EnumIter, Iterable};
use sea_orm_migration::{prelude::*, schema::*};
#[derive(DeriveMigrationName)]
pub struct Migration;
#[async_trait::async_trait]
impl MigrationTrait for Migration {
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
manager
.create_type(
Type::create()
.as_enum(UserRole)
.values(UserRoleVariants::iter())
.to_owned(),
)
.await?;
manager
.create_table(
Table::create()
.table(User::Table)
.if_not_exists()
.col(string_uniq(User::Id).primary_key().not_null())
.col(string_uniq(User::Username).not_null())
.col(string_null(User::Password))
.col(integer_null(User::VkId))
.col(string_null(User::Group))
.col(enumeration(User::Role, UserRole, UserRoleVariants::iter()))
.col(string_null(User::AndroidVersion))
.col(big_integer_null(User::TelegramId).unique_key())
.to_owned(),
)
.await
}
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
manager
.drop_table(Table::drop().table(User::Table).to_owned())
.await?;
manager
.drop_type(Type::drop().name(UserRole).to_owned())
.await
}
}
#[derive(DeriveIden)]
struct UserRole;
#[derive(DeriveIden, EnumIter)]
enum UserRoleVariants {
Student,
Teacher,
Admin,
}
#[derive(DeriveIden)]
enum User {
Table,
Id,
Username,
Password,
VkId,
Group,
Role,
AndroidVersion,
TelegramId,
}

View File

@@ -0,0 +1,6 @@
use sea_orm_migration::prelude::*;
#[async_std::main]
async fn main() {
cli::run_cli(migration::Migrator).await;
}

10
database/src/lib.rs Normal file
View File

@@ -0,0 +1,10 @@
pub mod query;
pub use migration;
pub use sea_orm;
pub mod entity {
pub use entity::*;
pub use entity::user::{ActiveModel as ActiveUser, Model as User, Entity as UserEntity, Column as UserColumn};
}

63
database/src/query.rs Normal file
View File

@@ -0,0 +1,63 @@
use paste::paste;
use sea_orm::ColumnTrait;
use sea_orm::EntityTrait;
use sea_orm::QueryFilter;
pub struct Query;
macro_rules! ref_type {
(String) => {
&String
};
(str) => {
&str
};
($other:ty) => {
$other
};
}
macro_rules! define_is_exists {
($entity: ident, $by: ident, $by_type: ident, $by_column: ident) => {
paste! {
pub async fn [<is_ $entity _exists_by_ $by>](
db: &::sea_orm::DbConn,
$by: ref_type!($by_type)
) -> Result<bool, ::sea_orm::DbErr> {
::entity::$entity::Entity::find()
.filter(::entity::$entity::Column::$by_column.eq($by))
.one(db)
.await
.map(|x| x.is_some())
}
}
};
}
macro_rules! define_find_by {
($entity: ident, $by: ident, $by_type: ident, $by_column: ident) => {
paste! {
pub async fn [<find_ $entity _by_ $by>](
db: &::sea_orm::DbConn,
$by: ref_type!($by_type)
) -> Result<Option<::entity::$entity::Model>, ::sea_orm::DbErr> {
::entity::$entity::Entity::find()
.filter(::entity::$entity::Column::$by_column.eq($by))
.one(db)
.await
}
}
};
}
impl Query {
define_find_by!(user, id, str, Id);
define_find_by!(user, telegram_id, i64, TelegramId);
define_find_by!(user, vk_id, i32, VkId);
define_find_by!(user, username, str, Username);
define_is_exists!(user, id, str, Id);
define_is_exists!(user, username, str, Username);
define_is_exists!(user, telegram_id, i64, TelegramId);
define_is_exists!(user, vk_id, i32, VkId);
}

View File

@@ -1,9 +0,0 @@
# For documentation on how to configure this file,
# see https://diesel.rs/guides/configuring-diesel-cli
[print_schema]
file = "src/database/schema.rs"
custom_type_derives = ["diesel::query_builder::QueryId", "Clone"]
[migrations_directory]
dir = "./migrations"

View File

View File

@@ -1,6 +0,0 @@
-- This file was automatically created by Diesel to set up helper functions
-- and other internal bookkeeping. This file is safe to edit, any future
-- changes will be added to existing projects as new migrations.
DROP FUNCTION IF EXISTS diesel_manage_updated_at(_tbl regclass);
DROP FUNCTION IF EXISTS diesel_set_updated_at();

View File

@@ -1,36 +0,0 @@
-- This file was automatically created by Diesel to set up helper functions
-- and other internal bookkeeping. This file is safe to edit, any future
-- changes will be added to existing projects as new migrations.
-- Sets up a trigger for the given table to automatically set a column called
-- `updated_at` whenever the row is modified (unless `updated_at` was included
-- in the modified columns)
--
-- # Example
--
-- ```sql
-- CREATE TABLE users (id SERIAL PRIMARY KEY, updated_at TIMESTAMP NOT NULL DEFAULT NOW());
--
-- SELECT diesel_manage_updated_at('users');
-- ```
CREATE OR REPLACE FUNCTION diesel_manage_updated_at(_tbl regclass) RETURNS VOID AS $$
BEGIN
EXECUTE format('CREATE TRIGGER set_updated_at BEFORE UPDATE ON %s
FOR EACH ROW EXECUTE PROCEDURE diesel_set_updated_at()', _tbl);
END;
$$ LANGUAGE plpgsql;
CREATE OR REPLACE FUNCTION diesel_set_updated_at() RETURNS trigger AS $$
BEGIN
IF (
NEW IS DISTINCT FROM OLD AND
NEW.updated_at IS NOT DISTINCT FROM OLD.updated_at
) THEN
NEW.updated_at := current_timestamp;
END IF;
RETURN NEW;
END;
$$ LANGUAGE plpgsql;

View File

@@ -1 +0,0 @@
DROP TYPE user_role;

View File

@@ -1,4 +0,0 @@
CREATE TYPE user_role AS ENUM (
'STUDENT',
'TEACHER',
'ADMIN');

View File

@@ -1 +0,0 @@
DROP TABLE users;

View File

@@ -1,11 +0,0 @@
CREATE TABLE users
(
id text PRIMARY KEY NOT NULL,
username text UNIQUE NOT NULL,
password text NOT NULL,
vk_id int4 NULL,
access_token text UNIQUE NOT NULL,
"group" text NOT NULL,
role user_role NOT NULL,
version text NOT NULL
);

View File

@@ -1 +0,0 @@
DROP TABLE fcm;

View File

@@ -1,6 +0,0 @@
CREATE TABLE fcm
(
user_id text PRIMARY KEY NOT NULL REFERENCES users (id),
token text NOT NULL,
topics text[] NOT NULL CHECK ( array_position(topics, null) is null )
);

View File

@@ -1,2 +0,0 @@
ALTER TABLE users DROP CONSTRAINT users_telegram_id_key;
ALTER TABLE users DROP COLUMN telegram_id;

View File

@@ -1,2 +0,0 @@
ALTER TABLE users ADD telegram_id int8 NULL;
ALTER TABLE users ADD CONSTRAINT users_telegram_id_key UNIQUE (telegram_id);

View File

@@ -1,2 +0,0 @@
UPDATE users SET "password" = '' WHERE "password" IS NULL;
ALTER TABLE users ALTER COLUMN "password" SET NOT NULL;

View File

@@ -1 +0,0 @@
ALTER TABLE users ALTER COLUMN "password" DROP NOT NULL;

View File

@@ -1,3 +0,0 @@
UPDATE users SET "android_version" = '' WHERE "android_version" IS NULL;
ALTER TABLE users ALTER COLUMN "android_version" SET NOT NULL;
ALTER TABLE users RENAME COLUMN android_version TO "version";

View File

@@ -1,2 +0,0 @@
ALTER TABLE users RENAME COLUMN "version" TO android_version;
ALTER TABLE users ALTER COLUMN android_version DROP NOT NULL;

View File

@@ -1,2 +0,0 @@
UPDATE users SET "group" = '' WHERE "group" IS NULL;
ALTER TABLE users ALTER COLUMN "group" SET NOT NULL;

View File

@@ -1 +0,0 @@
ALTER TABLE users ALTER COLUMN "group" DROP NOT NULL;

View File

@@ -1,2 +0,0 @@
UPDATE users SET "access_token" = '' WHERE "access_token" IS NULL;
ALTER TABLE users ALTER COLUMN "access_token" SET NOT NULL;

View File

@@ -1 +0,0 @@
ALTER TABLE users ALTER COLUMN "access_token" DROP NOT NULL;

12
providers/Cargo.toml Normal file
View File

@@ -0,0 +1,12 @@
[package]
name = "providers"
version = "0.1.0"
edition = "2024"
[features]
test = ["provider-engels-polytechnic/test"]
[dependencies]
base = { path = "base" }
provider-engels-polytechnic = { path = "provider-engels-polytechnic" }

17
providers/base/Cargo.toml Normal file
View File

@@ -0,0 +1,17 @@
[package]
name = "base"
version = "0.1.0"
edition = "2024"
[dependencies]
tokio-util = "0.7.16"
async-trait = "0.1.89"
chrono = { version = "0.4.41", features = ["serde"] }
serde = { version = "1.0.219", features = ["derive"] }
serde_repr = "0.1.20"
utoipa = { version = "5.4.0", features = ["macros", "chrono"] }
sha1 = "0.11.0-rc.2"

View File

@@ -1,4 +1,6 @@
use sha1::Digest;
use sha1::digest::OutputSizeUser;
use sha1::digest::typenum::Unsigned;
use std::hash::Hasher;
/// Hesher returning hash from the algorithm implementing Digest
@@ -12,7 +14,20 @@ where
{
/// Obtain hash.
pub fn finalize(self) -> String {
hex::encode(self.digest.finalize().0)
static ALPHABET: [char; 16] = [
'0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'A', 'B', 'C', 'D', 'E', 'F',
];
let mut hex = String::with_capacity(<D as OutputSizeUser>::OutputSize::USIZE * 2);
for byte in self.digest.finalize().0.into_iter() {
let byte: u8 = byte;
hex.push(ALPHABET[(byte >> 4) as usize]);
hex.push(ALPHABET[(byte & 0xF) as usize]);
}
hex
}
}

292
providers/base/src/lib.rs Normal file
View File

@@ -0,0 +1,292 @@
use crate::hasher::DigestHasher;
use async_trait::async_trait;
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use serde_repr::{Deserialize_repr, Serialize_repr};
use sha1::{Digest, Sha1};
use std::collections::HashMap;
use std::hash::Hash;
use std::sync::Arc;
use tokio_util::sync::CancellationToken;
use utoipa::ToSchema;
mod hasher;
// pub(crate) mod internal {
// use super::{LessonBoundaries, LessonType};
// use chrono::{DateTime, Utc};
//
// /// Data cell storing the group name.
// pub struct GroupCellInfo {
// /// Column index.
// pub column: u32,
//
// /// Text in the cell.
// pub name: String,
// }
//
// /// Data cell storing the line.
// pub struct DayCellInfo {
// /// Line index.
// pub row: u32,
//
// /// Column index.
// pub column: u32,
//
// /// Day name.
// pub name: String,
//
// /// Date of the day.
// pub date: DateTime<Utc>,
// }
//
// /// Data on the time of lessons from the second column of the schedule.
// pub struct BoundariesCellInfo {
// /// Temporary segment of the lesson.
// pub time_range: LessonBoundaries,
//
// /// Type of lesson.
// pub lesson_type: LessonType,
//
// /// The lesson index.
// pub default_index: Option<u32>,
//
// /// The frame of the cell.
// pub xls_range: ((u32, u32), (u32, u32)),
// }
// }
/// The beginning and end of the lesson.
#[derive(Clone, Hash, Debug, Serialize, Deserialize, ToSchema)]
pub struct LessonBoundaries {
/// The beginning of a lesson.
pub start: DateTime<Utc>,
/// The end of the lesson.
pub end: DateTime<Utc>,
}
/// Type of lesson.
#[derive(Clone, Hash, PartialEq, Debug, Serialize_repr, Deserialize_repr, ToSchema)]
#[serde(rename_all = "SCREAMING_SNAKE_CASE")]
#[repr(u8)]
pub enum LessonType {
/// Обычная.
Default = 0,
/// Допы.
Additional,
/// Перемена.
Break,
/// Консультация.
Consultation,
/// Самостоятельная работа.
IndependentWork,
/// Зачёт.
Exam,
/// Зачёт с оценкой.
ExamWithGrade,
/// Экзамен.
ExamDefault,
/// Курсовой проект.
CourseProject,
/// Защита курсового проекта.
CourseProjectDefense,
/// Практическое занятие.
Practice
}
#[derive(Clone, Hash, Debug, Serialize, Deserialize, ToSchema)]
pub struct LessonSubGroup {
/// Cabinet, if present.
pub cabinet: Option<String>,
/// Full name of the teacher.
pub teacher: Option<String>,
}
#[derive(Clone, Hash, Debug, Serialize, Deserialize, ToSchema)]
#[serde(rename_all = "camelCase")]
pub struct Lesson {
/// Type.
#[serde(rename = "type")]
pub lesson_type: LessonType,
/// Lesson indexes, if present.
pub range: Option<[u8; 2]>,
/// Name.
pub name: Option<String>,
/// The beginning and end.
pub time: LessonBoundaries,
/// List of subgroups.
#[serde(rename = "subgroups")]
pub subgroups: Option<Vec<Option<LessonSubGroup>>>,
/// Group name, if this is a schedule for teachers.
pub group: Option<String>,
}
#[derive(Clone, Hash, Debug, Serialize, Deserialize, ToSchema)]
pub struct Day {
/// Day of the week.
pub name: String,
/// Address of another corps.
pub street: Option<String>,
/// Date.
pub date: DateTime<Utc>,
/// List of lessons on this day.
pub lessons: Vec<Lesson>,
}
#[derive(Clone, Hash, Debug, Serialize, Deserialize, ToSchema)]
pub struct ScheduleEntry {
/// The name of the group or name of the teacher.
pub name: String,
/// List of six days.
pub days: Vec<Day>,
}
#[derive(Clone)]
pub struct ParsedSchedule {
/// List of groups.
pub groups: HashMap<String, ScheduleEntry>,
/// List of teachers.
pub teachers: HashMap<String, ScheduleEntry>,
}
/// Represents a snapshot of the schedule parsed from an XLS file.
#[derive(Clone)]
pub struct ScheduleSnapshot {
/// Timestamp when the Polytechnic website was queried for the schedule.
pub fetched_at: DateTime<Utc>,
/// Timestamp indicating when the schedule was last updated on the Polytechnic website.
///
/// <note>
/// This value is determined by the website's content and does not depend on the application.
/// </note>
pub updated_at: DateTime<Utc>,
/// URL pointing to the XLS file containing the source schedule data.
pub url: String,
/// Parsed schedule data in the application's internal representation.
pub data: ParsedSchedule,
}
impl ScheduleSnapshot {
/// Converting the schedule data into a hash.
/// ### Important!
/// The hash does not depend on the dates.
/// If the application is restarted, but the file with source schedule will remain unchanged, then the hash will not change.
pub fn hash(&self) -> String {
let mut hasher = DigestHasher::from(Sha1::new());
self.data.teachers.iter().for_each(|e| e.hash(&mut hasher));
self.data.groups.iter().for_each(|e| e.hash(&mut hasher));
hasher.finalize()
}
/// Simply updates the value of [`ScheduleSnapshot::fetched_at`].
/// Used for auto-updates.
pub fn update(&mut self) {
self.fetched_at = Utc::now();
}
}
// #[derive(Clone, Debug, Display, Error, ToSchema)]
// #[display("row {row}, column {column}")]
// pub struct ErrorCellPos {
// pub row: u32,
// pub column: u32,
// }
//
// #[derive(Clone, Debug, Display, Error, ToSchema)]
// #[display("'{data}' at {pos}")]
// pub struct ErrorCell {
// pub pos: ErrorCellPos,
// pub data: String,
// }
//
// impl ErrorCell {
// pub fn new(row: u32, column: u32, data: String) -> Self {
// Self {
// pos: ErrorCellPos { row, column },
// data,
// }
// }
// }
// #[derive(Clone, Debug, Display, Error, ToSchema)]
// pub enum ParseError {
// /// Errors related to reading XLS file.
// #[display("{_0:?}: Failed to read XLS file.")]
// #[schema(value_type = String)]
// BadXLS(Arc<calamine::XlsError>),
//
// /// Not a single sheet was found.
// #[display("No work sheets found.")]
// NoWorkSheets,
//
// /// There are no data on the boundaries of the sheet.
// #[display("There is no data on work sheet boundaries.")]
// UnknownWorkSheetRange,
//
// /// Failed to read the beginning and end of the lesson from the cell
// #[display("Failed to read lesson start and end from {_0}.")]
// LessonBoundaries(ErrorCell),
//
// /// Not found the beginning and the end corresponding to the lesson.
// #[display("No start and end times matching the lesson (at {_0}) was found.")]
// LessonTimeNotFound(ErrorCellPos),
// }
//
// impl Serialize for ParseError {
// fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
// where
// S: Serializer,
// {
// match self {
// ParseError::BadXLS(_) => serializer.serialize_str("BAD_XLS"),
// ParseError::NoWorkSheets => serializer.serialize_str("NO_WORK_SHEETS"),
// ParseError::UnknownWorkSheetRange => {
// serializer.serialize_str("UNKNOWN_WORK_SHEET_RANGE")
// }
// ParseError::LessonBoundaries(_) => serializer.serialize_str("GLOBAL_TIME"),
// ParseError::LessonTimeNotFound(_) => serializer.serialize_str("LESSON_TIME_NOT_FOUND"),
// }
// }
// }
#[async_trait]
pub trait ScheduleProvider
where
Self: Sync + Send,
{
/// Returns ok when task has been canceled.
/// Returns err when error appeared while trying to parse or download schedule
async fn start_auto_update_task(
&self,
cancellation_token: CancellationToken,
) -> Result<(), Box<dyn std::error::Error + Send + Sync + 'static>>;
async fn get_schedule(&self) -> Arc<ScheduleSnapshot>;
}

View File

@@ -0,0 +1,32 @@
[package]
name = "provider-engels-polytechnic"
version = "0.2.0"
edition = "2024"
[features]
test = []
[dependencies]
base = { path = "../base" }
tokio = { version = "1.47.1", features = ["sync", "macros", "time"] }
tokio-util = "0.7.16"
chrono = { version = "0.4.41", features = ["serde"] }
serde = { version = "1.0.219", features = ["derive"] }
derive_more = { version = "2.0.1", features = ["error", "display"] }
utoipa = { version = "5.4.0", features = ["macros", "chrono"] }
calamine = "0.30"
async-trait = "0.1.89"
reqwest = "0.12.23"
ua_generator = "0.5.22"
regex = "1.11.2"
strsim = "0.11.1"
log = "0.4.27"
sentry = "0.43.0"

View File

@@ -0,0 +1,85 @@
use crate::updater::Updater;
use async_trait::async_trait;
use base::{ScheduleProvider, ScheduleSnapshot};
use std::ops::DerefMut;
use std::sync::Arc;
use std::time::Duration;
use tokio::sync::RwLock;
use tokio::time::interval;
use tokio_util::sync::CancellationToken;
pub use crate::updater::UpdateSource;
mod parser;
mod updater;
mod xls_downloader;
#[cfg(feature = "test")]
pub mod test_utils {
pub use crate::parser::test_utils::test_result;
}
pub struct EngelsPolytechnicProvider {
updater: Updater,
snapshot: Arc<ScheduleSnapshot>,
}
impl EngelsPolytechnicProvider {
pub async fn get(
update_source: UpdateSource,
) -> Result<Arc<dyn ScheduleProvider>, crate::updater::error::Error> {
let (updater, snapshot) = Updater::new(update_source).await?;
Ok(Arc::new(Wrapper {
inner: RwLock::new(Self {
updater,
snapshot: Arc::new(snapshot),
}),
}))
}
}
pub struct Wrapper {
inner: RwLock<EngelsPolytechnicProvider>,
}
#[async_trait]
impl ScheduleProvider for Wrapper {
async fn start_auto_update_task(
&self,
cancellation_token: CancellationToken,
) -> Result<(), Box<dyn std::error::Error + Send + Sync + 'static>> {
let mut ticker = interval(Duration::from_secs(60 * 30));
ticker.tick().await; // bc we already have the latest schedule, when instantiating provider
loop {
tokio::select! {
_ = ticker.tick() => {
let mut lock = self.inner.write().await;
let this= lock.deref_mut();
log::info!("Updating schedule...");
match this.updater.update(&this.snapshot).await {
Ok(snapshot) => {
this.snapshot = Arc::new(snapshot);
},
Err(err) => {
cancellation_token.cancel();
return Err(err.into());
}
}
}
_ = cancellation_token.cancelled() => {
return Ok(());
}
}
}
}
async fn get_schedule(&self) -> Arc<ScheduleSnapshot> {
self.inner.read().await.snapshot.clone()
}
}

View File

@@ -1,12 +1,12 @@
use crate::LessonParseResult::{Lessons, Street};
use crate::schema::LessonType::Break;
use crate::schema::internal::{BoundariesCellInfo, DayCellInfo, GroupCellInfo};
use crate::schema::{
Day, ErrorCell, ErrorCellPos, Lesson, LessonBoundaries, LessonSubGroup, LessonType, ParseError,
ParseResult, ScheduleEntry,
use crate::or_continue;
use crate::parser::error::{Error, ErrorCell, ErrorCellPos};
use crate::parser::worksheet::WorkSheet;
use crate::parser::LessonParseResult::{Lessons, Street};
use base::LessonType::Break;
use base::{
Day, Lesson, LessonBoundaries, LessonSubGroup, LessonType, ParsedSchedule, ScheduleEntry,
};
use crate::worksheet::WorkSheet;
use calamine::{Reader, Xls, open_workbook_from_rs};
use calamine::{open_workbook_from_rs, Reader, Xls};
use chrono::{DateTime, Duration, NaiveDate, NaiveTime, Utc};
use regex::Regex;
use std::collections::HashMap;
@@ -14,18 +14,128 @@ use std::io::Cursor;
use std::sync::LazyLock;
mod macros;
pub mod schema;
mod worksheet;
pub mod error {
use derive_more::{Display, Error};
use serde::{Serialize, Serializer};
use std::sync::Arc;
use utoipa::ToSchema;
#[derive(Clone, Debug, Display, Error, ToSchema)]
#[display("row {row}, column {column}")]
pub struct ErrorCellPos {
pub row: u32,
pub column: u32,
}
#[derive(Clone, Debug, Display, Error, ToSchema)]
#[display("'{data}' at {pos}")]
pub struct ErrorCell {
pub pos: ErrorCellPos,
pub data: String,
}
impl ErrorCell {
pub fn new(row: u32, column: u32, data: String) -> Self {
Self {
pos: ErrorCellPos { row, column },
data,
}
}
}
#[derive(Clone, Debug, Display, Error, ToSchema)]
pub enum Error {
/// Errors related to reading XLS file.
#[display("{_0:?}: Failed to read XLS file.")]
#[schema(value_type = String)]
BadXLS(Arc<calamine::XlsError>),
/// Not a single sheet was found.
#[display("No work sheets found.")]
NoWorkSheets,
/// There are no data on the boundaries of the sheet.
#[display("There is no data on work sheet boundaries.")]
UnknownWorkSheetRange,
/// Failed to read the beginning and end of the lesson from the cell
#[display("Failed to read lesson start and end from {_0}.")]
LessonBoundaries(ErrorCell),
/// Not found the beginning and the end corresponding to the lesson.
#[display("No start and end times matching the lesson (at {_0}) was found.")]
LessonTimeNotFound(ErrorCellPos),
}
impl Serialize for Error {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
match self {
Error::BadXLS(_) => serializer.serialize_str("BAD_XLS"),
Error::NoWorkSheets => serializer.serialize_str("NO_WORK_SHEETS"),
Error::UnknownWorkSheetRange => {
serializer.serialize_str("UNKNOWN_WORK_SHEET_RANGE")
}
Error::LessonBoundaries(_) => serializer.serialize_str("GLOBAL_TIME"),
Error::LessonTimeNotFound(_) => serializer.serialize_str("LESSON_TIME_NOT_FOUND"),
}
}
}
}
/// Data cell storing the group name.
pub struct GroupCellInfo {
/// Column index.
pub column: u32,
/// Text in the cell.
pub name: String,
}
/// Data cell storing the line.
pub struct DayCellInfo {
/// Line index.
pub row: u32,
/// Column index.
pub column: u32,
/// Day name.
pub name: String,
/// Date of the day.
pub date: DateTime<Utc>,
}
/// Data on the time of lessons from the second column of the schedule.
pub struct BoundariesCellInfo {
/// Temporary segment of the lesson.
pub time_range: LessonBoundaries,
/// Type of lesson.
pub lesson_type: LessonType,
/// The lesson index.
pub default_index: Option<u32>,
/// The frame of the cell.
pub xls_range: ((u32, u32), (u32, u32)),
}
/// Obtaining a "skeleton" schedule from the working sheet.
fn parse_skeleton(
worksheet: &WorkSheet,
) -> Result<(Vec<DayCellInfo>, Vec<GroupCellInfo>), ParseError> {
) -> Result<(Vec<DayCellInfo>, Vec<GroupCellInfo>), crate::parser::error::Error> {
let mut groups: Vec<GroupCellInfo> = Vec::new();
let mut days: Vec<(u32, String, Option<DateTime<Utc>>)> = Vec::new();
let worksheet_start = worksheet.start().ok_or(ParseError::UnknownWorkSheetRange)?;
let worksheet_end = worksheet.end().ok_or(ParseError::UnknownWorkSheetRange)?;
let worksheet_start = worksheet
.start()
.ok_or(error::Error::UnknownWorkSheetRange)?;
let worksheet_end = worksheet.end().ok_or(error::Error::UnknownWorkSheetRange)?;
let mut row = worksheet_start.0;
@@ -42,7 +152,8 @@ fn parse_skeleton(
for column in (worksheet_start.1 + 2)..=worksheet_end.1 {
groups.push(GroupCellInfo {
column,
name: or_continue!(worksheet.get_string_from_cell(row, column)),
name: or_continue!(worksheet.get_string_from_cell(row, column))
.replace(" ", ""),
});
}
@@ -119,7 +230,7 @@ enum LessonParseResult {
// noinspection GrazieInspection
/// Obtaining a non-standard type of lesson by name.
fn guess_lesson_type(text: &String) -> Option<LessonType> {
fn guess_lesson_type(text: &str) -> Option<LessonType> {
static MAP: LazyLock<HashMap<&str, LessonType>> = LazyLock::new(|| {
HashMap::from([
("консультация", LessonType::Consultation),
@@ -129,30 +240,27 @@ fn guess_lesson_type(text: &String) -> Option<LessonType> {
("экзамен", LessonType::ExamDefault),
("курсовой проект", LessonType::CourseProject),
("защита курсового проекта", LessonType::CourseProjectDefense),
("практическое занятие", LessonType::Practice),
])
});
let name_lower = text.to_lowercase();
match MAP
.iter()
.map(|(text, lesson_type)| (lesson_type, strsim::levenshtein(text, &*name_lower)))
MAP.iter()
.map(|(text, lesson_type)| (lesson_type, strsim::levenshtein(text, &name_lower)))
.filter(|x| x.1 <= 4)
.min_by_key(|(_, score)| *score)
{
None => None,
Some(v) => Some(v.0.clone()),
}
.map(|v| v.0.clone())
}
/// Getting a pair or street from a cell.
fn parse_lesson(
worksheet: &WorkSheet,
day: &Day,
day_boundaries: &Vec<BoundariesCellInfo>,
day_boundaries: &[BoundariesCellInfo],
lesson_boundaries: &BoundariesCellInfo,
group_column: u32,
) -> Result<LessonParseResult, ParseError> {
) -> Result<LessonParseResult, crate::parser::error::Error> {
let row = lesson_boundaries.xls_range.0.0;
let name = {
@@ -179,15 +287,14 @@ fn parse_lesson(
.filter(|time| time.xls_range.1.0 == cell_range.1.0)
.collect::<Vec<&BoundariesCellInfo>>();
let end_time =
end_time_arr
let end_time = end_time_arr
.first()
.ok_or(ParseError::LessonTimeNotFound(ErrorCellPos {
.ok_or(error::Error::LessonTimeNotFound(ErrorCellPos {
row,
column: group_column,
}))?;
let range: Option<[u8; 2]> = if lesson_boundaries.default_index != None {
let range: Option<[u8; 2]> = if lesson_boundaries.default_index.is_some() {
let default = lesson_boundaries.default_index.unwrap() as u8;
Some([default, end_time.default_index.unwrap() as u8])
} else {
@@ -202,7 +309,11 @@ fn parse_lesson(
Ok((range, time))
}?;
let (name, mut subgroups, lesson_type) = parse_name_and_subgroups(&name)?;
let ParsedLessonName {
name,
mut subgroups,
r#type: lesson_type,
} = parse_name_and_subgroups(&name)?;
{
let cabinets: Vec<String> = parse_cabinets(
@@ -215,13 +326,11 @@ fn parse_lesson(
if cab_count == 1 {
// Назначаем этот кабинет всем подгруппам
let cab = Some(cabinets.get(0).unwrap().clone());
let cab = Some(cabinets.first().unwrap().clone());
for subgroup in &mut subgroups {
if let Some(subgroup) = subgroup {
for subgroup in subgroups.iter_mut().flatten() {
subgroup.cabinet = cab.clone()
}
}
} else if cab_count == 2 {
while subgroups.len() < cab_count {
subgroups.push(subgroups.last().unwrap_or(&None).clone());
@@ -251,10 +360,7 @@ fn parse_lesson(
range: default_range,
name: Some(name),
time: lesson_time,
subgroups: if subgroups.len() == 2
&& subgroups.get(0).unwrap().is_none()
&& subgroups.get(1).unwrap().is_none()
{
subgroups: if subgroups.len() == 2 && subgroups.iter().all(|x| x.is_none()) {
None
} else {
Some(subgroups)
@@ -306,11 +412,15 @@ fn parse_cabinets(worksheet: &WorkSheet, row_range: (u32, u32), column: u32) ->
cabinets
}
struct ParsedLessonName {
name: String,
subgroups: Vec<Option<LessonSubGroup>>,
r#type: Option<LessonType>,
}
//noinspection GrazieInspection
/// Getting the "pure" name of the lesson and list of teachers from the text of the lesson cell.
fn parse_name_and_subgroups(
text: &String,
) -> Result<(String, Vec<Option<LessonSubGroup>>, Option<LessonType>), ParseError> {
fn parse_name_and_subgroups(text: &str) -> Result<ParsedLessonName, Error> {
// Части названия пары:
// 1. Само название.
// 2. Список преподавателей и подгрупп.
@@ -338,7 +448,7 @@ fn parse_name_and_subgroups(
static NAMES_REGEX: LazyLock<Regex> = LazyLock::new(|| {
Regex::new(
r"(?:[А-Я][а-я]+\s?(?:[А-Я][\s.]*){2}(?:\(\s*\d\s*[а-я\s]+\))?(?:[\s,]+)?){1,2}+[\s.,]*",
r"(?:[А-Я][а-я]+\s?(?:[А-Я][\s.]*){2}(?:\(?\s*\d\s*[а-я\s]+\)?)?(?:[\s,.]+)?){1,2}+[\s.,]*",
)
.unwrap()
});
@@ -347,7 +457,7 @@ fn parse_name_and_subgroups(
static CLEAN_RE: LazyLock<Regex> = LazyLock::new(|| Regex::new(r"[\s\n\t]+").unwrap());
let text = CLEAN_RE
.replace(&text.replace(&[' ', '\t', '\n'], " "), " ")
.replace(&text.replace([' ', '\t', '\n'], " ").replace(",", ""), " ")
.to_string();
let (lesson_name, subgroups, lesson_type) = match NAMES_REGEX.captures(&text) {
@@ -355,19 +465,21 @@ fn parse_name_and_subgroups(
let capture = captures.get(0).unwrap();
let subgroups: Vec<Option<LessonSubGroup>> = {
let src = capture.as_str().replace(&[' ', '.'], "");
let src = capture.as_str().replace([' ', '.'], "");
let mut shared_subgroup = false;
let mut subgroups: [Option<LessonSubGroup>; 2] = [None, None];
for name in src.split(',') {
let open_bracket_index = name.find('(');
let digit_index = name.find(|c: char| c.is_ascii_digit());
let number: u8 = open_bracket_index
.map_or(0, |index| name[(index + 1)..(index + 2)].parse().unwrap());
let number: u8 =
digit_index.map_or(0, |index| name[(index)..(index + 1)].parse().unwrap());
let teacher_name = {
let name_end = open_bracket_index.unwrap_or_else(|| name.len());
let name_end = name
.find(|c: char| !c.is_alphabetic())
.unwrap_or(name.len());
// Я ебал. Как же я долго до этого доходил.
format!(
@@ -416,7 +528,7 @@ fn parse_name_and_subgroups(
if result.is_none() {
#[cfg(not(debug_assertions))]
sentry::capture_message(
&*format!("Не удалось угадать тип пары '{}'!", extra),
&format!("Не удалось угадать тип пары '{}'!", extra),
sentry::Level::Warning,
);
@@ -434,7 +546,11 @@ fn parse_name_and_subgroups(
None => (text, Vec::new(), None),
};
Ok((lesson_name, subgroups, lesson_type))
Ok(ParsedLessonName {
name: lesson_name,
subgroups,
r#type: lesson_type,
})
}
/// Getting the start and end of a pair from a cell in the first column of a document.
@@ -443,18 +559,11 @@ fn parse_name_and_subgroups(
///
/// * `cell_data`: text in cell.
/// * `date`: date of the current day.
fn parse_lesson_boundaries_cell(
cell_data: &String,
date: DateTime<Utc>,
) -> Option<LessonBoundaries> {
fn parse_lesson_boundaries_cell(cell_data: &str, date: DateTime<Utc>) -> Option<LessonBoundaries> {
static TIME_RE: LazyLock<Regex> =
LazyLock::new(|| Regex::new(r"(\d+\.\d+)-(\d+\.\d+)").unwrap());
let parse_res = if let Some(captures) = TIME_RE.captures(cell_data) {
captures
} else {
return None;
};
let parse_res = TIME_RE.captures(cell_data)?;
let start_match = parse_res.get(1).unwrap().as_str();
let start_parts: Vec<&str> = start_match.split(".").collect();
@@ -468,7 +577,7 @@ fn parse_lesson_boundaries_cell(
};
Some(LessonBoundaries {
start: GET_TIME(date.clone(), &start_parts),
start: GET_TIME(date, &start_parts),
end: GET_TIME(date, &end_parts),
})
}
@@ -486,7 +595,7 @@ fn parse_day_boundaries(
date: DateTime<Utc>,
row_range: (u32, u32),
column: u32,
) -> Result<Vec<BoundariesCellInfo>, ParseError> {
) -> Result<Vec<BoundariesCellInfo>, crate::parser::error::Error> {
let mut day_times: Vec<BoundariesCellInfo> = Vec::new();
for row in row_range.0..row_range.1 {
@@ -496,8 +605,8 @@ fn parse_day_boundaries(
continue;
};
let lesson_time = parse_lesson_boundaries_cell(&time_cell, date.clone()).ok_or(
ParseError::LessonBoundaries(ErrorCell::new(row, column, time_cell.clone())),
let lesson_time = parse_lesson_boundaries_cell(&time_cell, date).ok_or(
error::Error::LessonBoundaries(ErrorCell::new(row, column, time_cell.clone())),
)?;
// type
@@ -541,8 +650,8 @@ fn parse_day_boundaries(
/// * `week_markup`: markup of the current week.
fn parse_week_boundaries(
worksheet: &WorkSheet,
week_markup: &Vec<DayCellInfo>,
) -> Result<Vec<Vec<BoundariesCellInfo>>, ParseError> {
week_markup: &[DayCellInfo],
) -> Result<Vec<Vec<BoundariesCellInfo>>, crate::parser::error::Error> {
let mut result: Vec<Vec<BoundariesCellInfo>> = Vec::new();
let worksheet_end_row = worksheet.end().unwrap().0;
@@ -560,8 +669,8 @@ fn parse_week_boundaries(
};
let day_boundaries = parse_day_boundaries(
&worksheet,
day_markup.date.clone(),
worksheet,
day_markup.date,
(day_markup.row, end_row),
lesson_time_column,
)?;
@@ -587,7 +696,7 @@ fn convert_groups_to_teachers(
.map(|day| Day {
name: day.name.clone(),
street: day.street.clone(),
date: day.date.clone(),
date: day.date,
lessons: vec![],
})
.collect();
@@ -662,35 +771,22 @@ fn convert_groups_to_teachers(
///
/// * `buffer`: XLS data containing schedule.
///
/// returns: Result<ParseResult, ParseError>
///
/// # Examples
///
/// ```
/// use schedule_parser::parse_xls;
///
/// let result = parse_xls(&include_bytes!("../../schedule.xls").to_vec());
///
/// assert!(result.is_ok(), "{}", result.err().unwrap());
///
/// assert_ne!(result.as_ref().unwrap().groups.len(), 0);
/// assert_ne!(result.as_ref().unwrap().teachers.len(), 0);
/// ```
pub fn parse_xls(buffer: &Vec<u8>) -> Result<ParseResult, ParseError> {
/// returns: Result<ParseResult, crate::parser::error::Error>
pub fn parse_xls(buffer: &Vec<u8>) -> Result<ParsedSchedule, crate::parser::error::Error> {
let cursor = Cursor::new(&buffer);
let mut workbook: Xls<_> =
open_workbook_from_rs(cursor).map_err(|e| ParseError::BadXLS(std::sync::Arc::new(e)))?;
open_workbook_from_rs(cursor).map_err(|e| error::Error::BadXLS(std::sync::Arc::new(e)))?;
let worksheet = {
let (worksheet_name, worksheet) = workbook
.worksheets()
.first()
.ok_or(ParseError::NoWorkSheets)?
.ok_or(error::Error::NoWorkSheets)?
.clone();
let worksheet_merges = workbook
.worksheet_merge_cells(&*worksheet_name)
.ok_or(ParseError::NoWorkSheets)?;
.worksheet_merge_cells(&worksheet_name)
.ok_or(error::Error::NoWorkSheets)?;
WorkSheet {
data: worksheet,
@@ -709,7 +805,7 @@ pub fn parse_xls(buffer: &Vec<u8>) -> Result<ParseResult, ParseError> {
days: Vec::new(),
};
for day_index in 0..(&week_markup).len() {
for day_index in 0..week_markup.len() {
let day_markup = &week_markup[day_index];
let mut day = Day {
@@ -725,8 +821,8 @@ pub fn parse_xls(buffer: &Vec<u8>) -> Result<ParseResult, ParseError> {
match &mut parse_lesson(
&worksheet,
&day,
&day_boundaries,
&lesson_boundaries,
day_boundaries,
lesson_boundaries,
group_markup.column,
)? {
Lessons(lesson) => day.lessons.append(lesson),
@@ -740,18 +836,19 @@ pub fn parse_xls(buffer: &Vec<u8>) -> Result<ParseResult, ParseError> {
groups.insert(group.name.clone(), group);
}
Ok(ParseResult {
Ok(ParsedSchedule {
teachers: convert_groups_to_teachers(&groups),
groups,
})
}
#[cfg(any(test, feature = "test-utils"))]
#[cfg(any(test, feature = "test"))]
pub mod test_utils {
use super::*;
use base::ParsedSchedule;
pub fn test_result() -> Result<ParseResult, ParseError> {
parse_xls(&include_bytes!("../../schedule.xls").to_vec())
pub fn test_result() -> Result<ParsedSchedule, crate::parser::error::Error> {
parse_xls(&include_bytes!("../../../../test-data/engels-polytechnic.xls").to_vec())
}
}

View File

@@ -0,0 +1,291 @@
use crate::parser::parse_xls;
use crate::updater::error::{Error, QueryUrlError, SnapshotCreationError};
use crate::xls_downloader::{FetchError, XlsDownloader};
use base::ScheduleSnapshot;
pub enum UpdateSource {
Prepared(ScheduleSnapshot),
Url(String),
GrabFromSite {
yandex_api_key: String,
yandex_func_id: String,
},
}
pub struct Updater {
downloader: XlsDownloader,
update_source: UpdateSource,
}
pub mod error {
use crate::xls_downloader::FetchError;
use derive_more::{Display, Error};
#[derive(Debug, Display, Error)]
pub enum Error {
/// An error occurred while querying the Yandex Cloud API for a URL.
///
/// This may result from network failures, invalid API credentials, or issues with the Yandex Cloud Function invocation.
/// See [`QueryUrlError`] for more details about specific causes.
QueryUrlFailed(QueryUrlError),
/// The schedule snapshot creation process failed.
///
/// This can happen due to URL conflicts (same URL already in use), failed network requests,
/// download errors, or invalid XLS file content. See [`SnapshotCreationError`] for details.
SnapshotCreationFailed(SnapshotCreationError),
}
/// Errors that may occur when querying the Yandex Cloud API to retrieve a URL.
#[derive(Debug, Display, Error)]
pub enum QueryUrlError {
/// Occurs when the request to the Yandex Cloud API fails.
///
/// This may be due to network issues, invalid API key, incorrect function ID, or other
/// problems with the Yandex Cloud Function invocation.
#[display("An error occurred during the request to the Yandex Cloud API: {_0}")]
RequestFailed(reqwest::Error),
#[display("Unable to fetch Uri in 3 retries")]
UriFetchFailed,
}
/// Errors that may occur during the creation of a schedule snapshot.
#[derive(Debug, Display, Error)]
pub enum SnapshotCreationError {
/// The ETag is the same (no update needed).
#[display("The ETag is the same.")]
Same,
/// The URL query for the XLS file failed to execute, either due to network issues or invalid API parameters.
#[display("Failed to fetch URL: {_0}")]
FetchFailed(FetchError),
/// Downloading the XLS file content failed after successfully obtaining the URL.
#[display("Download failed: {_0}")]
DownloadFailed(FetchError),
/// The XLS file could not be parsed into a valid schedule format.
#[display("Schedule data is invalid: {_0}")]
InvalidSchedule(crate::parser::error::Error),
}
}
impl Updater {
/// Constructs a new `ScheduleSnapshot` by downloading and parsing schedule data from the specified URL.
///
/// This method first checks if the provided URL is the same as the one already configured in the downloader.
/// If different, it updates the downloader's URL, fetches the XLS content, parses it, and creates a snapshot.
/// Errors are returned for URL conflicts, network issues, download failures, or invalid data.
///
/// # Arguments
///
/// * `downloader`: A mutable reference to an `XLSDownloader` implementation used to fetch and parse the schedule data.
/// * `url`: The source URL pointing to the XLS file containing schedule data.
///
/// returns: Result<ScheduleSnapshot, SnapshotCreationError>
pub async fn new_snapshot(
downloader: &mut XlsDownloader,
url: String,
) -> Result<ScheduleSnapshot, SnapshotCreationError> {
let head_result = downloader.set_url(&url).await.map_err(|error| {
if let FetchError::Unknown(error) = &error {
sentry::capture_error(&error);
}
SnapshotCreationError::FetchFailed(error)
})?;
if downloader.etag == Some(head_result.etag) {
return Err(SnapshotCreationError::Same);
}
let xls_data = downloader
.fetch(false)
.await
.map_err(|error| {
if let FetchError::Unknown(error) = &error {
sentry::capture_error(&error);
}
SnapshotCreationError::DownloadFailed(error)
})?
.data
.unwrap();
let parse_result = parse_xls(&xls_data).map_err(|error| {
sentry::capture_error(&error);
SnapshotCreationError::InvalidSchedule(error)
})?;
Ok(ScheduleSnapshot {
fetched_at: head_result.requested_at,
updated_at: head_result.uploaded_at,
url,
data: parse_result,
})
}
/// Queries the Yandex Cloud Function (FaaS) to obtain a URL for the schedule file.
///
/// This sends a POST request to the specified Yandex Cloud Function endpoint,
/// using the provided API key for authentication. The returned URI is combined
/// with the "https://politehnikum-eng.ru" base domain to form the complete URL.
///
/// # Arguments
///
/// * `api_key` - Authentication token for Yandex Cloud API
/// * `func_id` - ID of the target Yandex Cloud Function to invoke
///
/// # Returns
///
/// Result containing:
/// - `Ok(String)` - Complete URL constructed from the Function's response
/// - `Err(QueryUrlError)` - If the request or response processing fails
async fn query_url(api_key: &str, func_id: &str) -> Result<String, QueryUrlError> {
let client = reqwest::Client::new();
let uri = {
// вот бы добавили named-scopes как в котлине,
// чтоб мне не пришлось такой хуйнёй страдать.
#[allow(unused_assignments)]
let mut uri = String::new();
let mut counter = 0;
loop {
if counter == 3 {
return Err(QueryUrlError::UriFetchFailed);
}
counter += 1;
uri = client
.post(format!(
"https://functions.yandexcloud.net/{}?integration=raw",
func_id
))
.header("Authorization", format!("Api-Key {}", api_key))
.send()
.await
.map_err(QueryUrlError::RequestFailed)?
.text()
.await
.map_err(QueryUrlError::RequestFailed)?;
if uri.is_empty() {
log::warn!("[{}] Unable to get uri! Retrying in 5 seconds...", counter);
continue;
}
break;
}
uri
};
Ok(format!("https://politehnikum-eng.ru{}", uri.trim()))
}
/// Initializes the schedule by fetching the URL from the environment or Yandex Cloud Function (FaaS)
/// and creating a [`ScheduleSnapshot`] with the downloaded data.
///
/// # Arguments
///
/// * `downloader`: Mutable reference to an `XLSDownloader` implementation used to fetch and parse the schedule
/// * `app_env`: Reference to the application environment containing either a predefined URL or Yandex Cloud credentials
///
/// # Returns
///
/// Returns `Ok(())` if the snapshot was successfully initialized, or an `Error` if:
/// - URL query to Yandex Cloud failed ([`QueryUrlError`])
/// - Schedule snapshot creation failed ([`SnapshotCreationError`])
pub async fn new(update_source: UpdateSource) -> Result<(Self, ScheduleSnapshot), Error> {
let mut this = Updater {
downloader: XlsDownloader::new(),
update_source,
};
if let UpdateSource::Prepared(snapshot) = &this.update_source {
let snapshot = snapshot.clone();
return Ok((this, snapshot));
}
let url = match &this.update_source {
UpdateSource::Url(url) => {
log::info!("The default link {} will be used", url);
url.clone()
}
UpdateSource::GrabFromSite {
yandex_api_key,
yandex_func_id,
} => {
log::info!("Obtaining a link using FaaS...");
Self::query_url(yandex_api_key, yandex_func_id)
.await
.map_err(Error::QueryUrlFailed)?
}
_ => unreachable!(),
};
log::info!("For the initial setup, a link {} will be used", url);
let snapshot = Self::new_snapshot(&mut this.downloader, url)
.await
.map_err(Error::SnapshotCreationFailed)?;
log::info!("Schedule snapshot successfully created!");
Ok((this, snapshot))
}
/// Updates the schedule snapshot by querying the latest URL from FaaS and checking for changes.
/// If the URL hasn't changed, only updates the [`fetched_at`] timestamp. If changed, downloads
/// and parses the new schedule data.
///
/// # Arguments
///
/// * `downloader`: XLS file downloader used to fetch and parse the schedule data
/// * `app_env`: Application environment containing Yandex Cloud configuration and auto-update settings
///
/// returns: `Result<(), Error>` - Returns error if URL query fails or schedule parsing encounters issues
///
/// # Safety
///
/// Use `unsafe` to access the initialized snapshot, guaranteed valid by prior `init()` call
pub async fn update(
&mut self,
current_snapshot: &ScheduleSnapshot,
) -> Result<ScheduleSnapshot, Error> {
if let UpdateSource::Prepared(snapshot) = &self.update_source {
let mut snapshot = snapshot.clone();
snapshot.update();
return Ok(snapshot);
}
let url = match &self.update_source {
UpdateSource::Url(url) => url.clone(),
UpdateSource::GrabFromSite {
yandex_api_key,
yandex_func_id,
} => Self::query_url(yandex_api_key.as_str(), yandex_func_id.as_str())
.await
.map_err(Error::QueryUrlFailed)?,
_ => unreachable!(),
};
let snapshot = match Self::new_snapshot(&mut self.downloader, url).await {
Ok(snapshot) => snapshot,
Err(SnapshotCreationError::Same) => {
let mut clone = current_snapshot.clone();
clone.update();
clone
}
Err(error) => return Err(Error::SnapshotCreationFailed(error)),
};
Ok(snapshot)
}
}

View File

@@ -0,0 +1,253 @@
use chrono::{DateTime, Utc};
use derive_more::{Display, Error};
use std::mem::discriminant;
use std::sync::Arc;
use utoipa::ToSchema;
/// XLS data retrieval errors.
#[derive(Clone, Debug, ToSchema, Display, Error)]
pub enum FetchError {
/// File url is not set.
#[display("The link to the timetable was not provided earlier.")]
NoUrlProvided,
/// Unknown error.
#[display("An unknown error occurred while downloading the file.")]
#[schema(value_type = String)]
Unknown(Arc<reqwest::Error>),
/// Server returned a status code different from 200.
#[display("Server returned a status code {status_code}.")]
BadStatusCode { status_code: u16 },
/// The url leads to a file of a different type.
#[display("The link leads to a file of type '{content_type}'.")]
BadContentType { content_type: String },
/// Server doesn't return expected headers.
#[display("Server doesn't return expected header(s) '{expected_header}'.")]
BadHeaders { expected_header: String },
}
impl FetchError {
pub fn unknown(error: Arc<reqwest::Error>) -> Self {
Self::Unknown(error)
}
pub fn bad_status_code(status_code: u16) -> Self {
Self::BadStatusCode { status_code }
}
pub fn bad_content_type(content_type: &str) -> Self {
Self::BadContentType {
content_type: content_type.to_string(),
}
}
pub fn bad_headers(expected_header: &str) -> Self {
Self::BadHeaders {
expected_header: expected_header.to_string(),
}
}
}
impl PartialEq for FetchError {
fn eq(&self, other: &Self) -> bool {
discriminant(self) == discriminant(other)
}
}
/// Result of XLS data retrieval.
#[derive(Debug, PartialEq)]
pub struct FetchOk {
/// File upload date.
pub uploaded_at: DateTime<Utc>,
/// Date data received.
pub requested_at: DateTime<Utc>,
/// Etag.
pub etag: String,
/// File data.
pub data: Option<Vec<u8>>,
}
impl FetchOk {
/// Result without file content.
pub fn head(uploaded_at: DateTime<Utc>, etag: String) -> Self {
FetchOk {
uploaded_at,
requested_at: Utc::now(),
etag,
data: None,
}
}
/// Full result.
pub fn get(uploaded_at: DateTime<Utc>, etag: String, data: Vec<u8>) -> Self {
FetchOk {
uploaded_at,
requested_at: Utc::now(),
etag,
data: Some(data),
}
}
}
pub type FetchResult = Result<FetchOk, FetchError>;
pub struct XlsDownloader {
pub url: Option<String>,
pub etag: Option<String>,
}
impl XlsDownloader {
pub fn new() -> Self {
XlsDownloader {
url: None,
etag: None,
}
}
async fn fetch_specified(url: &str, head: bool) -> FetchResult {
let client = reqwest::Client::new();
let response = if head {
client.head(url)
} else {
client.get(url)
}
.header("User-Agent", ua_generator::ua::spoof_chrome_ua())
.send()
.await
.map_err(|e| FetchError::unknown(Arc::new(e)))?;
if response.status().as_u16() != 200 {
return Err(FetchError::bad_status_code(response.status().as_u16()));
}
let headers = response.headers();
let content_type = headers
.get("Content-Type")
.ok_or(FetchError::bad_headers("Content-Type"))?;
let etag = headers
.get("etag")
.ok_or(FetchError::bad_headers("etag"))?
.to_str()
.or(Err(FetchError::bad_headers("etag")))?
.to_string();
let last_modified = headers
.get("last-modified")
.ok_or(FetchError::bad_headers("last-modified"))?;
if content_type != "application/vnd.ms-excel" {
return Err(FetchError::bad_content_type(content_type.to_str().unwrap()));
}
let last_modified = DateTime::parse_from_rfc2822(last_modified.to_str().unwrap())
.unwrap()
.with_timezone(&Utc);
Ok(if head {
FetchOk::head(last_modified, etag)
} else {
FetchOk::get(
last_modified,
etag,
response.bytes().await.unwrap().to_vec(),
)
})
}
pub async fn fetch(&self, head: bool) -> FetchResult {
if self.url.is_none() {
Err(FetchError::NoUrlProvided)
} else {
Self::fetch_specified(self.url.as_ref().unwrap(), head).await
}
}
pub async fn set_url(&mut self, url: &str) -> FetchResult {
let result = Self::fetch_specified(url, true).await;
if result.is_ok() {
self.url = Some(url.to_string());
}
result
}
}
#[cfg(test)]
mod tests {
use crate::xls_downloader::{FetchError, XlsDownloader};
#[tokio::test]
async fn bad_url() {
let url = "bad_url";
let mut downloader = XlsDownloader::new();
assert!(downloader.set_url(url).await.is_err());
}
#[tokio::test]
async fn bad_status_code() {
let url = "https://www.google.com/not-found";
let mut downloader = XlsDownloader::new();
assert_eq!(
downloader.set_url(url).await,
Err(FetchError::bad_status_code(404))
);
}
#[tokio::test]
async fn bad_headers() {
let url = "https://www.google.com/favicon.ico";
let mut downloader = XlsDownloader::new();
assert_eq!(
downloader.set_url(url).await,
Err(FetchError::BadHeaders {
expected_header: "ETag".to_string(),
})
);
}
#[tokio::test]
async fn bad_content_type() {
let url = "https://s3.aero-storage.ldragol.ru/679e5d1145a6ad00843ad3f1/67ddb59fd46303008396ac96%2Fexample.txt";
let mut downloader = XlsDownloader::new();
assert!(downloader.set_url(url).await.is_err());
}
#[tokio::test]
async fn ok() {
let url = "https://s3.aero-storage.ldragol.ru/679e5d1145a6ad00843ad3f1/67ddb5fad46303008396ac97%2Fschedule.xls";
let mut downloader = XlsDownloader::new();
assert!(downloader.set_url(url).await.is_ok());
}
#[tokio::test]
async fn downloader_ok() {
let url = "https://s3.aero-storage.ldragol.ru/679e5d1145a6ad00843ad3f1/67ddb5fad46303008396ac97%2Fschedule.xls";
let mut downloader = XlsDownloader::new();
assert!(downloader.set_url(url).await.is_ok());
assert!(downloader.fetch(false).await.is_ok());
}
#[tokio::test]
async fn downloader_no_url_provided() {
let downloader = XlsDownloader::new();
let result = downloader.fetch(false).await;
assert_eq!(result, Err(FetchError::NoUrlProvided));
}
}

9
providers/src/lib.rs Normal file
View File

@@ -0,0 +1,9 @@
pub use base;
pub use provider_engels_polytechnic::EngelsPolytechnicProvider;
pub use provider_engels_polytechnic::UpdateSource as EngelsPolytechnicUpdateSource;
#[cfg(feature = "test")]
pub mod test_utils {
pub use provider_engels_polytechnic::test_utils as engels_polytechnic;
}

View File

@@ -1,26 +0,0 @@
[package]
name = "schedule-parser"
version = "0.1.0"
edition = "2024"
[features]
test-utils = []
[dependencies]
calamine = "0.26"
chrono = { version = "0.4", features = ["serde"] }
derive_more = { version = "2", features = ["full"] }
sentry = "0.38"
serde = { version = "1.0.219", features = ["derive"] }
serde_repr = "0.1.20"
regex = "1.11.1"
utoipa = { version = "5", features = ["chrono"] }
strsim = "0.11.1"
log = "0.4.26"
[dev-dependencies]
criterion = "0.6"
[[bench]]
name = "parse"
harness = false

View File

@@ -1,12 +0,0 @@
use criterion::{Criterion, criterion_group, criterion_main};
use schedule_parser::parse_xls;
pub fn bench_parse_xls(c: &mut Criterion) {
let buffer: Vec<u8> = include_bytes!("../../schedule.xls").to_vec();
c.bench_function("parse_xls", |b| b.iter(|| parse_xls(&buffer).unwrap()));
}
criterion_group!(benches, bench_parse_xls);
criterion_main!(benches);

View File

@@ -1,227 +0,0 @@
use chrono::{DateTime, Utc};
use derive_more::{Display, Error};
use serde::{Deserialize, Serialize, Serializer};
use serde_repr::{Deserialize_repr, Serialize_repr};
use std::collections::HashMap;
use std::sync::Arc;
use utoipa::ToSchema;
pub(crate) mod internal {
use crate::schema::{LessonBoundaries, LessonType};
use chrono::{DateTime, Utc};
/// Data cell storing the group name.
pub struct GroupCellInfo {
/// Column index.
pub column: u32,
/// Text in the cell.
pub name: String,
}
/// Data cell storing the line.
pub struct DayCellInfo {
/// Line index.
pub row: u32,
/// Column index.
pub column: u32,
/// Day name.
pub name: String,
/// Date of the day.
pub date: DateTime<Utc>,
}
/// Data on the time of lessons from the second column of the schedule.
pub struct BoundariesCellInfo {
/// Temporary segment of the lesson.
pub time_range: LessonBoundaries,
/// Type of lesson.
pub lesson_type: LessonType,
/// The lesson index.
pub default_index: Option<u32>,
/// The frame of the cell.
pub xls_range: ((u32, u32), (u32, u32)),
}
}
/// The beginning and end of the lesson.
#[derive(Clone, Hash, Debug, Serialize, Deserialize, ToSchema)]
pub struct LessonBoundaries {
/// The beginning of a lesson.
pub start: DateTime<Utc>,
/// The end of the lesson.
pub end: DateTime<Utc>,
}
/// Type of lesson.
#[derive(Clone, Hash, PartialEq, Debug, Serialize_repr, Deserialize_repr, ToSchema)]
#[serde(rename_all = "SCREAMING_SNAKE_CASE")]
#[repr(u8)]
pub enum LessonType {
/// Обычная.
Default = 0,
/// Допы.
Additional,
/// Перемена.
Break,
/// Консультация.
Consultation,
/// Самостоятельная работа.
IndependentWork,
/// Зачёт.
Exam,
/// Зачёт с оценкой.
ExamWithGrade,
/// Экзамен.
ExamDefault,
/// Курсовой проект.
CourseProject,
/// Защита курсового проекта.
CourseProjectDefense,
}
#[derive(Clone, Hash, Debug, Serialize, Deserialize, ToSchema)]
pub struct LessonSubGroup {
/// Cabinet, if present.
pub cabinet: Option<String>,
/// Full name of the teacher.
pub teacher: Option<String>,
}
#[derive(Clone, Hash, Debug, Serialize, Deserialize, ToSchema)]
#[serde(rename_all = "camelCase")]
pub struct Lesson {
/// Type.
#[serde(rename = "type")]
pub lesson_type: LessonType,
/// Lesson indexes, if present.
pub range: Option<[u8; 2]>,
/// Name.
pub name: Option<String>,
/// The beginning and end.
pub time: LessonBoundaries,
/// List of subgroups.
#[serde(rename = "subgroups")]
pub subgroups: Option<Vec<Option<LessonSubGroup>>>,
/// Group name, if this is a schedule for teachers.
pub group: Option<String>,
}
#[derive(Clone, Hash, Debug, Serialize, Deserialize, ToSchema)]
pub struct Day {
/// Day of the week.
pub name: String,
/// Address of another corps.
pub street: Option<String>,
/// Date.
pub date: DateTime<Utc>,
/// List of lessons on this day.
pub lessons: Vec<Lesson>,
}
#[derive(Clone, Hash, Debug, Serialize, Deserialize, ToSchema)]
pub struct ScheduleEntry {
/// The name of the group or name of the teacher.
pub name: String,
/// List of six days.
pub days: Vec<Day>,
}
#[derive(Clone)]
pub struct ParseResult {
/// List of groups.
pub groups: HashMap<String, ScheduleEntry>,
/// List of teachers.
pub teachers: HashMap<String, ScheduleEntry>,
}
#[derive(Clone, Debug, Display, Error, ToSchema)]
#[display("row {row}, column {column}")]
pub struct ErrorCellPos {
pub row: u32,
pub column: u32,
}
#[derive(Clone, Debug, Display, Error, ToSchema)]
#[display("'{data}' at {pos}")]
pub struct ErrorCell {
pub pos: ErrorCellPos,
pub data: String,
}
impl ErrorCell {
pub fn new(row: u32, column: u32, data: String) -> Self {
Self {
pos: ErrorCellPos { row, column },
data,
}
}
}
#[derive(Clone, Debug, Display, Error, ToSchema)]
pub enum ParseError {
/// Errors related to reading XLS file.
#[display("{_0:?}: Failed to read XLS file.")]
#[schema(value_type = String)]
BadXLS(Arc<calamine::XlsError>),
/// Not a single sheet was found.
#[display("No work sheets found.")]
NoWorkSheets,
/// There are no data on the boundaries of the sheet.
#[display("There is no data on work sheet boundaries.")]
UnknownWorkSheetRange,
/// Failed to read the beginning and end of the lesson from the cell
#[display("Failed to read lesson start and end from {_0}.")]
LessonBoundaries(ErrorCell),
/// Not found the beginning and the end corresponding to the lesson.
#[display("No start and end times matching the lesson (at {_0}) was found.")]
LessonTimeNotFound(ErrorCellPos),
}
impl Serialize for ParseError {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
match self {
ParseError::BadXLS(_) => serializer.serialize_str("BAD_XLS"),
ParseError::NoWorkSheets => serializer.serialize_str("NO_WORK_SHEETS"),
ParseError::UnknownWorkSheetRange => {
serializer.serialize_str("UNKNOWN_WORK_SHEET_RANGE")
}
ParseError::LessonBoundaries(_) => serializer.serialize_str("GLOBAL_TIME"),
ParseError::LessonTimeNotFound(_) => serializer.serialize_str("LESSON_TIME_NOT_FOUND"),
}
}
}

View File

@@ -1,164 +0,0 @@
pub mod users {
use crate::database::models::User;
use crate::database::schema::users::dsl::users;
use crate::database::schema::users::dsl::*;
use crate::state::AppState;
use actix_web::web;
use diesel::{ExpressionMethods, QueryResult, insert_into};
use diesel::{QueryDsl, RunQueryDsl};
use diesel::{SaveChangesDsl, SelectableHelper};
use std::ops::DerefMut;
pub async fn get(state: &web::Data<AppState>, _id: &String) -> QueryResult<User> {
users
.filter(id.eq(_id))
.select(User::as_select())
.first(state.get_database().await.deref_mut())
}
pub async fn get_by_username(
state: &web::Data<AppState>,
_username: &String,
) -> QueryResult<User> {
users
.filter(username.eq(_username))
.select(User::as_select())
.first(state.get_database().await.deref_mut())
}
//noinspection RsTraitObligations
pub async fn get_by_vk_id(state: &web::Data<AppState>, _vk_id: i32) -> QueryResult<User> {
users
.filter(vk_id.eq(_vk_id))
.select(User::as_select())
.first(state.get_database().await.deref_mut())
}
//noinspection RsTraitObligations
pub async fn get_by_telegram_id(
state: &web::Data<AppState>,
_telegram_id: i64,
) -> QueryResult<User> {
users
.filter(telegram_id.eq(_telegram_id))
.select(User::as_select())
.first(state.get_database().await.deref_mut())
}
//noinspection DuplicatedCode
pub async fn contains_by_username(state: &web::Data<AppState>, _username: &String) -> bool {
// и как это нахуй сократить блять примеров нихуя нет, нихуя не работает
// как меня этот раст заебал уже
match users
.filter(username.eq(_username))
.count()
.get_result::<i64>(state.get_database().await.deref_mut())
{
Ok(count) => count > 0,
Err(_) => false,
}
}
//noinspection DuplicatedCode
//noinspection RsTraitObligations
pub async fn contains_by_vk_id(state: &web::Data<AppState>, _vk_id: i32) -> bool {
match users
.filter(vk_id.eq(_vk_id))
.count()
.get_result::<i64>(state.get_database().await.deref_mut())
{
Ok(count) => count > 0,
Err(_) => false,
}
}
pub async fn insert(state: &web::Data<AppState>, user: &User) -> QueryResult<usize> {
insert_into(users)
.values(user)
.execute(state.get_database().await.deref_mut())
}
/// Function declaration [User::save][UserSave::save].
pub trait UserSave {
/// Saves the user's changes to the database.
///
/// # Arguments
///
/// * `state`: The state of the actix-web application that stores the mutex of the [connection][diesel::PgConnection].
///
/// returns: `QueryResult<User>`
///
/// # Examples
///
/// ```
/// use crate::database::driver::users;
///
/// #[derive(Deserialize)]
/// struct Params {
/// pub username: String,
/// }
///
/// #[patch("/")]
/// async fn patch_user(
/// app_state: web::Data<AppState>,
/// user: SyncExtractor<User>,
/// web::Query(params): web::Query<Params>,
/// ) -> web::Json<User> {
/// let mut user = user.into_inner();
///
/// user.username = params.username;
///
/// match user.save(&app_state) {
/// Ok(user) => web::Json(user),
/// Err(e) => {
/// eprintln!("Failed to save user: {e}");
/// panic!();
/// }
/// }
/// }
/// ```
async fn save(&self, state: &web::Data<AppState>) -> QueryResult<User>;
}
/// Implementation of [UserSave][UserSave] trait.
impl UserSave for User {
async fn save(&self, state: &web::Data<AppState>) -> QueryResult<User> {
self.save_changes::<Self>(state.get_database().await.deref_mut())
}
}
#[cfg(test)]
pub async fn delete_by_username(state: &web::Data<AppState>, _username: &String) -> bool {
match diesel::delete(users.filter(username.eq(_username)))
.execute(state.get_database().await.deref_mut())
{
Ok(count) => count > 0,
Err(_) => false,
}
}
#[cfg(test)]
pub async fn insert_or_ignore(state: &web::Data<AppState>, user: &User) -> QueryResult<usize> {
insert_into(users)
.values(user)
.on_conflict_do_nothing()
.execute(state.get_database().await.deref_mut())
}
}
pub mod fcm {
use crate::database::models::{FCM, User};
use crate::state::AppState;
use actix_web::web;
use diesel::QueryDsl;
use diesel::RunQueryDsl;
use diesel::{BelongingToDsl, QueryResult, SelectableHelper};
use std::ops::DerefMut;
pub async fn from_user(state: &web::Data<AppState>, user: &User) -> QueryResult<FCM> {
FCM::belonging_to(&user)
.select(FCM::as_select())
.get_result(state.get_database().await.deref_mut())
}
}

View File

@@ -1,3 +0,0 @@
pub mod driver;
pub mod models;
pub mod schema;

View File

@@ -1,87 +0,0 @@
use actix_macros::ResponderJson;
use diesel::QueryId;
use diesel::prelude::*;
use serde::{Deserialize, Serialize};
use utoipa::ToSchema;
#[derive(
Copy, Clone, PartialEq, Debug, Serialize, Deserialize, diesel_derive_enum::DbEnum, ToSchema,
)]
#[ExistingTypePath = "crate::database::schema::sql_types::UserRole"]
#[DbValueStyle = "UPPERCASE"]
#[serde(rename_all = "UPPERCASE")]
pub enum UserRole {
Student,
Teacher,
Admin,
}
#[derive(
Identifiable,
AsChangeset,
Queryable,
QueryId,
Selectable,
Serialize,
Insertable,
Debug,
ToSchema,
ResponderJson,
)]
#[diesel(table_name = crate::database::schema::users)]
#[diesel(treat_none_as_null = true)]
pub struct User {
/// Account UUID.
pub id: String,
/// User name.
pub username: String,
/// BCrypt password hash.
pub password: Option<String>,
/// ID of the linked VK account.
pub vk_id: Option<i32>,
/// JWT access token.
pub access_token: Option<String>,
/// Group.
pub group: Option<String>,
/// Role.
pub role: UserRole,
/// Version of the installed Polytechnic+ application.
pub android_version: Option<String>,
/// ID of the linked Telegram account.
pub telegram_id: Option<i64>,
}
#[derive(
Debug,
Clone,
Serialize,
Identifiable,
Queryable,
Selectable,
Insertable,
AsChangeset,
Associations,
ToSchema,
ResponderJson,
)]
#[diesel(belongs_to(User))]
#[diesel(table_name = crate::database::schema::fcm)]
#[diesel(primary_key(user_id))]
pub struct FCM {
/// Account UUID.
pub user_id: String,
/// FCM token.
pub token: String,
/// List of topics subscribed to by the user.
pub topics: Vec<Option<String>>,
}

View File

@@ -1,39 +0,0 @@
// @generated automatically by Diesel CLI.
pub mod sql_types {
#[derive(diesel::query_builder::QueryId, Clone, diesel::sql_types::SqlType)]
#[diesel(postgres_type(name = "user_role"))]
pub struct UserRole;
}
diesel::table! {
fcm (user_id) {
user_id -> Text,
token -> Text,
topics -> Array<Nullable<Text>>,
}
}
diesel::table! {
use diesel::sql_types::*;
use super::sql_types::UserRole;
users (id) {
id -> Text,
username -> Text,
password -> Nullable<Text>,
vk_id -> Nullable<Int4>,
access_token -> Nullable<Text>,
group -> Nullable<Text>,
role -> UserRole,
android_version -> Nullable<Text>,
telegram_id -> Nullable<Int8>,
}
}
diesel::joinable!(fcm -> users (user_id));
diesel::allow_tables_to_appear_in_same_query!(
fcm,
users,
);

View File

@@ -1,13 +1,13 @@
use crate::database::driver;
use crate::database::models::{FCM, User};
use crate::extractors::base::{AsyncExtractor, FromRequestAsync};
use crate::extractors::base::FromRequestAsync;
use crate::state::AppState;
use crate::utility::jwt;
use actix_macros::MiddlewareError;
use actix_web::body::BoxBody;
use actix_web::dev::Payload;
use actix_web::http::header;
use actix_web::{FromRequest, HttpRequest, web};
use actix_web::{web, HttpRequest};
use database::entity::User;
use database::query::Query;
use derive_more::Display;
use serde::{Deserialize, Serialize};
use std::fmt::Debug;
@@ -88,55 +88,20 @@ impl FromRequestAsync for User {
let user_id = jwt::verify_and_decode(&access_token)
.map_err(|_| Error::InvalidAccessToken.into_err())?;
let app_state = req.app_data::<web::Data<AppState>>().unwrap();
let db = req
.app_data::<web::Data<AppState>>()
.unwrap()
.get_database();
driver::users::get(app_state, &user_id)
Query::find_user_by_id(db, &user_id)
.await
.map_err(|_| Error::NoUser.into())
}
}
pub struct UserExtractor<const FCM: bool> {
user: User,
fcm: Option<FCM>,
}
impl<const FCM: bool> UserExtractor<{ FCM }> {
pub fn user(&self) -> &User {
&self.user
}
pub fn fcm(&self) -> &Option<FCM> {
if !FCM {
panic!("FCM marked as not required, but it has been requested")
}
&self.fcm
}
}
/// Extractor of user and additional parameters from request with Bearer token.
impl<const FCM: bool> FromRequestAsync for UserExtractor<{ FCM }> {
type Error = actix_web::Error;
async fn from_request_async(
req: &HttpRequest,
payload: &mut Payload,
) -> Result<Self, Self::Error> {
let user = AsyncExtractor::<User>::from_request(req, payload)
.await?
.into_inner();
let app_state = req.app_data::<web::Data<AppState>>().unwrap();
Ok(Self {
fcm: if FCM {
driver::fcm::from_user(&app_state, &user).await.ok()
.and_then(|user| {
if let Some(user) = user {
Ok(user)
} else {
None
},
user,
Err(actix_web::Error::from(Error::NoUser))
}
})
}
}

View File

@@ -5,7 +5,6 @@ use std::future::{Ready, ready};
use std::ops;
/// # Async extractor.
/// Asynchronous object extractor from a query.
pub struct AsyncExtractor<T>(T);
@@ -80,7 +79,6 @@ impl<T: FromRequestAsync> FromRequest for AsyncExtractor<T> {
}
/// # Sync extractor.
/// Synchronous object extractor from a query.
pub struct SyncExtractor<T>(T);

View File

@@ -1,21 +1,17 @@
use crate::middlewares::authorization::JWTAuthorization;
use crate::middlewares::content_type::ContentTypeBootstrap;
use crate::state::{AppState, new_app_state};
use crate::state::{new_app_state, AppState};
use actix_web::dev::{ServiceFactory, ServiceRequest};
use actix_web::{App, Error, HttpServer};
use dotenvy::dotenv;
use log::info;
use std::io;
use utoipa_actix_web::AppExt;
use utoipa_actix_web::scope::Scope;
use utoipa_actix_web::AppExt;
use utoipa_rapidoc::RapiDoc;
mod state;
mod database;
mod xls_downloader;
mod extractors;
mod middlewares;
mod routes;
@@ -53,11 +49,6 @@ pub fn get_api_scope<
.service(routes::schedule::teacher)
.service(routes::schedule::teacher_names);
let fcm_scope = utoipa_actix_web::scope("/fcm")
.wrap(JWTAuthorization::default())
.service(routes::fcm::update_callback)
.service(routes::fcm::set_token);
let flow_scope = utoipa_actix_web::scope("/flow")
.wrap(JWTAuthorization {
ignore: &["/telegram-auth"],
@@ -72,7 +63,6 @@ pub fn get_api_scope<
.service(auth_scope)
.service(users_scope)
.service(schedule_scope)
.service(fcm_scope)
.service(flow_scope)
.service(vk_id_scope)
}
@@ -80,7 +70,7 @@ pub fn get_api_scope<
async fn async_main() -> io::Result<()> {
info!("Запуск сервера...");
let app_state = new_app_state().await.unwrap();
let app_state = new_app_state(None).await.unwrap();
HttpServer::new(move || {
let (app, api) = App::new()
@@ -119,7 +109,7 @@ fn main() -> io::Result<()> {
},
));
dotenv().unwrap();
let _ = dotenv();
env_logger::init();

View File

@@ -1,25 +1,20 @@
use crate::database::models::User;
use crate::extractors::authorized_user;
use crate::extractors::base::FromRequestAsync;
use actix_web::body::{BoxBody, EitherBody};
use actix_web::dev::{Payload, Service, ServiceRequest, ServiceResponse, Transform, forward_ready};
use actix_web::dev::{forward_ready, Payload, Service, ServiceRequest, ServiceResponse, Transform};
use actix_web::{Error, HttpRequest, ResponseError};
use database::entity::User;
use futures_util::future::LocalBoxFuture;
use std::future::{Ready, ready};
use std::future::{ready, Ready};
use std::rc::Rc;
/// Middleware guard working with JWT tokens.
#[derive(Default)]
pub struct JWTAuthorization {
/// List of ignored endpoints.
pub ignore: &'static [&'static str],
}
impl Default for JWTAuthorization {
fn default() -> Self {
Self { ignore: &[] }
}
}
impl<S, B> Transform<S, ServiceRequest> for JWTAuthorization
where
S: Service<ServiceRequest, Response = ServiceResponse<B>, Error = Error> + 'static,
@@ -70,8 +65,8 @@ where
return false;
}
if let Some(other) = path.as_bytes().iter().nth(ignore.len()) {
return ['?' as u8, '/' as u8].contains(other);
if let Some(other) = path.as_bytes().get(ignore.len()) {
return [b'?', b'/'].contains(other);
}
true

View File

@@ -1,10 +1,10 @@
use actix_web::Error;
use actix_web::body::{BoxBody, EitherBody};
use actix_web::dev::{Service, ServiceRequest, ServiceResponse, Transform, forward_ready};
use actix_web::dev::{forward_ready, Service, ServiceRequest, ServiceResponse, Transform};
use actix_web::http::header;
use actix_web::http::header::HeaderValue;
use actix_web::Error;
use futures_util::future::LocalBoxFuture;
use std::future::{Ready, ready};
use std::future::{ready, Ready};
/// Middleware to specify the encoding in the Content-Type header.
pub struct ContentTypeBootstrap;
@@ -30,7 +30,7 @@ pub struct ContentTypeMiddleware<S> {
service: S,
}
impl<'a, S, B> Service<ServiceRequest> for ContentTypeMiddleware<S>
impl<S, B> Service<ServiceRequest> for ContentTypeMiddleware<S>
where
S: Service<ServiceRequest, Response = ServiceResponse<B>, Error = Error>,
S::Future: 'static,
@@ -49,14 +49,15 @@ where
let mut response = fut.await?;
let headers = response.response_mut().headers_mut();
if let Some(content_type) = headers.get("Content-Type") {
if content_type == "application/json" {
if let Some(content_type) = headers.get("Content-Type")
&& content_type == "application/json"
{
headers.insert(
header::CONTENT_TYPE,
HeaderValue::from_static("application/json; charset=utf8"),
);
}
}
Ok(response.map_into_left_body())
})

View File

@@ -1,2 +1,4 @@
pub mod error;
pub mod authorization;
pub mod content_type;

View File

@@ -2,16 +2,6 @@ use jsonwebtoken::errors::ErrorKind;
use jsonwebtoken::{Algorithm, DecodingKey, Validation, decode};
use serde::{Deserialize, Serialize};
#[derive(Deserialize, Serialize)]
struct TokenData {
iis: String,
sub: i32,
app: i32,
exp: i32,
iat: i32,
jti: i32,
}
#[derive(Debug, Serialize, Deserialize)]
struct Claims {
sub: i32,
@@ -22,7 +12,7 @@ struct Claims {
#[derive(Debug, PartialEq)]
pub enum Error {
JwtError(ErrorKind),
Jwt(ErrorKind),
InvalidSignature,
InvalidToken,
Expired,
@@ -49,10 +39,10 @@ const VK_PUBLIC_KEY: &str = concat!(
"-----END PUBLIC KEY-----"
);
pub fn parse_vk_id(token_str: &String, client_id: i32) -> Result<i32, Error> {
pub fn parse_vk_id(token_str: &str, client_id: i32) -> Result<i32, Error> {
let dkey = DecodingKey::from_rsa_pem(VK_PUBLIC_KEY.as_bytes()).unwrap();
match decode::<Claims>(&token_str, &dkey, &Validation::new(Algorithm::RS256)) {
match decode::<Claims>(token_str, &dkey, &Validation::new(Algorithm::RS256)) {
Ok(token_data) => {
let claims = token_data.claims;
@@ -77,7 +67,7 @@ pub fn parse_vk_id(token_str: &String, client_id: i32) -> Result<i32, Error> {
ErrorKind::Base64(_) => Error::InvalidToken,
ErrorKind::Json(_) => Error::InvalidToken,
ErrorKind::Utf8(_) => Error::InvalidToken,
kind => Error::JwtError(kind),
kind => Error::Jwt(kind),
}),
}
}

View File

@@ -1,31 +1,34 @@
use self::schema::*;
use crate::database::driver;
use crate::database::driver::users::UserSave;
use crate::routes::auth::shared::parse_vk_id;
use crate::routes::auth::sign_in::schema::SignInData::{Default, VkOAuth};
use crate::routes::schema::ResponseError;
use crate::routes::schema::user::UserResponse;
use crate::{AppState, utility};
use crate::routes::schema::ResponseError;
use crate::{utility, AppState};
use actix_web::{post, web};
use database::query::Query;
use web::Json;
async fn sign_in_combined(
data: SignInData,
app_state: &web::Data<AppState>,
) -> Result<UserResponse, ErrorCode> {
let db = app_state.get_database();
let user = match &data {
Default(data) => driver::users::get_by_username(&app_state, &data.username).await,
VkOAuth(id) => driver::users::get_by_vk_id(&app_state, *id).await,
};
Default(data) => Query::find_user_by_username(db, &data.username).await,
VkOAuth(id) => Query::find_user_by_vk_id(db, *id).await,
}
.ok()
.flatten();
match user {
Ok(mut user) => {
Some(user) => {
if let Default(data) = data {
if user.password.is_none() {
return Err(ErrorCode::IncorrectCredentials);
}
match bcrypt::verify(&data.password, &user.password.as_ref().unwrap()) {
match bcrypt::verify(&data.password, user.password.as_ref().unwrap()) {
Ok(result) => {
if !result {
return Err(ErrorCode::IncorrectCredentials);
@@ -37,14 +40,11 @@ async fn sign_in_combined(
}
}
user.access_token = Some(utility::jwt::encode(&user.id));
user.save(&app_state).await.expect("Failed to update user");
Ok(user.into())
let access_token = utility::jwt::encode(&user.id);
Ok(UserResponse::from_user_with_token(user, access_token))
}
Err(_) => Err(ErrorCode::IncorrectCredentials),
None => Err(ErrorCode::IncorrectCredentials),
}
}
@@ -124,8 +124,6 @@ mod schema {
InvalidVkAccessToken,
}
/// Internal
/// Type of authorization.
pub enum SignInData {
/// User and password name and password.
@@ -139,16 +137,16 @@ mod schema {
#[cfg(test)]
mod tests {
use super::schema::*;
use crate::database::driver;
use crate::database::models::{User, UserRole};
use crate::routes::auth::sign_in::sign_in;
use crate::test_env::tests::{static_app_state, test_app_state, test_env};
use crate::utility;
use actix_test::test_app;
use actix_web::dev::ServiceResponse;
use actix_web::http::Method;
use actix_web::http::StatusCode;
use actix_web::test;
use database::entity::sea_orm_active_enums::UserRole;
use database::entity::ActiveUser;
use database::sea_orm::{ActiveModelTrait, Set};
use sha1::{Digest, Sha1};
use std::fmt::Write;
@@ -182,22 +180,24 @@ mod tests {
test_env();
let app_state = static_app_state().await;
driver::users::insert_or_ignore(
&app_state,
&User {
id: id.clone(),
username,
password: Some(bcrypt::hash("example".to_string(), bcrypt::DEFAULT_COST).unwrap()),
vk_id: None,
telegram_id: None,
access_token: Some(utility::jwt::encode(&id)),
group: Some("ИС-214/23".to_string()),
role: UserRole::Student,
android_version: None,
},
)
let active_user = ActiveUser {
id: Set(id.clone()),
username: Set(username),
password: Set(Some(
bcrypt::hash("example", bcrypt::DEFAULT_COST).unwrap(),
)),
vk_id: Set(None),
telegram_id: Set(None),
group: Set(Some("ИС-214/23".to_string())),
role: Set(UserRole::Student),
android_version: Set(None),
};
active_user
.save(app_state.get_database())
.await
.unwrap();
.expect("Failed to save user");
}
#[actix_web::test]

View File

@@ -1,11 +1,13 @@
use self::schema::*;
use crate::AppState;
use crate::database::driver;
use crate::database::models::UserRole;
use crate::routes::auth::shared::parse_vk_id;
use crate::routes::schema::ResponseError;
use crate::routes::schema::user::UserResponse;
use crate::routes::schema::ResponseError;
use crate::{utility, AppState};
use actix_web::{post, web};
use database::entity::sea_orm_active_enums::UserRole;
use database::entity::ActiveUser;
use database::query::Query;
use database::sea_orm::ActiveModelTrait;
use web::Json;
async fn sign_up_combined(
@@ -18,8 +20,9 @@ async fn sign_up_combined(
}
if !app_state
.get_schedule_snapshot()
.get_schedule_snapshot("eng_polytechnic")
.await
.unwrap()
.data
.groups
.contains_key(&data.group)
@@ -27,22 +30,30 @@ async fn sign_up_combined(
return Err(ErrorCode::InvalidGroupName);
}
// If user with specified username already exists.
if driver::users::contains_by_username(&app_state, &data.username).await {
let db = app_state.get_database();
// If user with specified username already exists.O
if Query::find_user_by_username(db, &data.username)
.await
.is_ok_and(|user| user.is_some())
{
return Err(ErrorCode::UsernameAlreadyExists);
}
// If user with specified VKID already exists.
if let Some(id) = data.vk_id {
if driver::users::contains_by_vk_id(&app_state, id).await {
if let Some(id) = data.vk_id
&& Query::is_user_exists_by_vk_id(db, id)
.await
.expect("Failed to check user existence")
{
return Err(ErrorCode::VkAlreadyExists);
}
}
let user = data.into();
driver::users::insert(&app_state, &user).await.unwrap();
let active_user: ActiveUser = data.into();
let user = active_user.insert(db).await.unwrap();
let access_token = utility::jwt::encode(&user.id);
Ok(UserResponse::from(&user)).into()
Ok(UserResponse::from_user_with_token(user, access_token))
}
#[utoipa::path(responses(
@@ -100,10 +111,11 @@ pub async fn sign_up_vk(
}
mod schema {
use crate::database::models::{User, UserRole};
use crate::routes::schema::user::UserResponse;
use crate::utility;
use actix_macros::ErrResponse;
use database::entity::sea_orm_active_enums::UserRole;
use database::entity::ActiveUser;
use database::sea_orm::Set;
use derive_more::Display;
use objectid::ObjectId;
use serde::{Deserialize, Serialize};
@@ -133,7 +145,7 @@ mod schema {
}
pub mod vk {
use crate::database::models::UserRole;
use database::entity::sea_orm_active_enums::UserRole;
use serde::{Deserialize, Serialize};
#[derive(Serialize, Deserialize, utoipa::ToSchema)]
@@ -188,8 +200,6 @@ mod schema {
VkAlreadyExists,
}
/// Internal
/// Data for registration.
pub struct SignUpData {
// TODO: сделать ограничение на минимальную и максимальную длину при регистрации и смене.
@@ -214,25 +224,21 @@ mod schema {
pub version: String,
}
impl Into<User> for SignUpData {
fn into(self) -> User {
assert_ne!(self.password.is_some(), self.vk_id.is_some());
impl From<SignUpData> for ActiveUser {
fn from(value: SignUpData) -> Self {
assert_ne!(value.password.is_some(), value.vk_id.is_some());
let id = ObjectId::new().unwrap().to_string();
let access_token = Some(utility::jwt::encode(&id));
User {
id,
username: self.username,
password: self
ActiveUser {
id: Set(ObjectId::new().unwrap().to_string()),
username: Set(value.username),
password: Set(value
.password
.map(|x| bcrypt::hash(x, bcrypt::DEFAULT_COST).unwrap()),
vk_id: self.vk_id,
telegram_id: None,
access_token,
group: Some(self.group),
role: self.role,
android_version: Some(self.version),
.map(|x| bcrypt::hash(x, bcrypt::DEFAULT_COST).unwrap())),
vk_id: Set(value.vk_id),
telegram_id: Set(None),
group: Set(Some(value.group)),
role: Set(value.role),
android_version: Set(Some(value.version)),
}
}
}
@@ -240,8 +246,6 @@ mod schema {
#[cfg(test)]
mod tests {
use crate::database::driver;
use crate::database::models::UserRole;
use crate::routes::auth::sign_up::schema::Request;
use crate::routes::auth::sign_up::sign_up;
use crate::test_env::tests::{static_app_state, test_app_state, test_env};
@@ -250,6 +254,10 @@ mod tests {
use actix_web::http::Method;
use actix_web::http::StatusCode;
use actix_web::test;
use database::entity::sea_orm_active_enums::UserRole;
use database::entity::{UserColumn, UserEntity};
use database::sea_orm::ColumnTrait;
use database::sea_orm::{EntityTrait, QueryFilter};
struct SignUpPartial<'a> {
username: &'a str,
@@ -281,7 +289,12 @@ mod tests {
test_env();
let app_state = static_app_state().await;
driver::users::delete_by_username(&app_state, &"test::sign_up_valid".to_string()).await;
UserEntity::delete_many()
.filter(UserColumn::Username.eq("test::sign_up_valid"))
.exec(app_state.get_database())
.await
.expect("Failed to delete user");
// test
@@ -302,7 +315,12 @@ mod tests {
test_env();
let app_state = static_app_state().await;
driver::users::delete_by_username(&app_state, &"test::sign_up_multiple".to_string()).await;
UserEntity::delete_many()
.filter(UserColumn::Username.eq("test::sign_up_multiple"))
.exec(app_state.get_database())
.await
.expect("Failed to delete user");
let create = sign_up_client(SignUpPartial {
username: "test::sign_up_multiple",

View File

@@ -1,5 +0,0 @@
mod set_token;
mod update_callback;
pub use set_token::*;
pub use update_callback::*;

View File

@@ -1,94 +0,0 @@
use crate::database;
use crate::database::models::FCM;
use crate::extractors::authorized_user::UserExtractor;
use crate::extractors::base::AsyncExtractor;
use crate::state::AppState;
use actix_web::{HttpResponse, Responder, patch, web};
use diesel::{RunQueryDsl, SaveChangesDsl};
use firebase_messaging_rs::topic::TopicManagementSupport;
use serde::Deserialize;
use std::ops::DerefMut;
#[derive(Debug, Deserialize)]
struct Params {
pub token: String,
}
async fn get_fcm(
app_state: &web::Data<AppState>,
user_data: &UserExtractor<true>,
token: String,
) -> Result<FCM, diesel::result::Error> {
match user_data.fcm() {
Some(fcm) => {
let mut fcm = fcm.clone();
fcm.token = token;
Ok(fcm)
}
None => {
let fcm = FCM {
user_id: user_data.user().id.clone(),
token,
topics: vec![],
};
match diesel::insert_into(database::schema::fcm::table)
.values(&fcm)
.execute(app_state.get_database().await.deref_mut())
{
Ok(_) => Ok(fcm),
Err(e) => Err(e),
}
}
}
}
#[utoipa::path(responses((status = OK)))]
#[patch("/set-token")]
pub async fn set_token(
app_state: web::Data<AppState>,
web::Query(params): web::Query<Params>,
user_data: AsyncExtractor<UserExtractor<true>>,
) -> impl Responder {
let user_data = user_data.into_inner();
// If token not changes - exit.
if let Some(fcm) = user_data.fcm() {
if fcm.token == params.token {
return HttpResponse::Ok();
}
}
let fcm = get_fcm(&app_state, &user_data, params.token.clone()).await;
if let Err(e) = fcm {
eprintln!("Failed to get FCM: {e}");
return HttpResponse::Ok();
}
let mut fcm = fcm.ok().unwrap();
// Add default topics.
if !fcm.topics.contains(&Some("common".to_string())) {
fcm.topics.push(Some("common".to_string()));
}
fcm.save_changes::<FCM>(app_state.get_database().await.deref_mut())
.unwrap();
let fcm_client = app_state.get_fcm_client().await.unwrap();
for topic in fcm.topics.clone() {
if let Some(topic) = topic {
if let Err(error) = fcm_client
.register_token_to_topic(&*topic, &*fcm.token)
.await
{
eprintln!("Failed to subscribe token to topic: {:?}", error);
return HttpResponse::Ok();
}
}
}
HttpResponse::Ok()
}

View File

@@ -1,24 +0,0 @@
use crate::database::driver::users::UserSave;
use crate::database::models::User;
use crate::extractors::base::AsyncExtractor;
use crate::state::AppState;
use actix_web::{HttpResponse, Responder, post, web};
#[utoipa::path(responses(
(status = OK),
(status = INTERNAL_SERVER_ERROR)
))]
#[post("/update-callback/{version}")]
async fn update_callback(
app_state: web::Data<AppState>,
version: web::Path<String>,
user: AsyncExtractor<User>,
) -> impl Responder {
let mut user = user.into_inner();
user.android_version = Some(version.into_inner());
user.save(&app_state).await.unwrap();
HttpResponse::Ok()
}

View File

@@ -1,12 +1,13 @@
use self::schema::*;
use crate::database::driver;
use crate::database::driver::users::UserSave;
use crate::database::models::{User, UserRole};
use crate::routes::schema::ResponseError;
use crate::utility::telegram::{WebAppInitDataMap, WebAppUser};
use crate::{AppState, utility};
use crate::{utility, AppState};
use actix_web::{post, web};
use chrono::{DateTime, Duration, Utc};
use database::entity::sea_orm_active_enums::UserRole;
use database::entity::ActiveUser;
use database::query::Query;
use database::sea_orm::{ActiveModelTrait, Set};
use objectid::ObjectId;
use std::sync::Arc;
use web::Json;
@@ -22,10 +23,6 @@ pub async fn telegram_auth(
) -> ServiceResponse {
let init_data = WebAppInitDataMap::from_str(data_json.into_inner().init_data);
// for (key, value) in &init_data.data_map {
// println!("key: {} | value: {}", key, value);
// }
{
let env = &app_state.get_env().telegram;
@@ -52,39 +49,32 @@ pub async fn telegram_auth(
let web_app_user =
serde_json::from_str::<WebAppUser>(init_data.data_map.get("user").unwrap()).unwrap();
let mut user = {
match driver::users::get_by_telegram_id(&app_state, web_app_user.id).await {
Ok(value) => Ok(value),
Err(_) => {
let new_user = User {
id: ObjectId::new().unwrap().to_string(),
username: format!("telegram_{}", web_app_user.id), // можно оставить, а можно поменять
password: None, // ибо нехуй
vk_id: None,
telegram_id: Some(web_app_user.id),
access_token: None, // установится ниже
group: None,
role: UserRole::Student, // TODO: при реге проверять данные
android_version: None,
};
driver::users::insert(&app_state, &new_user)
let user = match Query::find_user_by_telegram_id(app_state.get_database(), web_app_user.id)
.await
.map(|_| new_user)
}
}
.expect("Failed to get or add user")
.expect("Failed to find user by telegram id")
{
Some(value) => value,
None => {
let new_user = ActiveUser {
id: Set(ObjectId::new().unwrap().to_string()),
username: Set(format!("telegram_{}", web_app_user.id)), // можно оставить, а можно поменять
password: Set(None), // ибо нехуй
vk_id: Set(None),
telegram_id: Set(Some(web_app_user.id)),
group: Set(None),
role: Set(UserRole::Student), // TODO: при реге проверять данные
android_version: Set(None),
};
user.access_token = Some(utility::jwt::encode(&user.id));
new_user
.insert(app_state.get_database())
.await
.expect("Failed to insert user")
}
};
user.save(&app_state).await.expect("Failed to update user");
Ok(Response::new(
&*user.access_token.unwrap(),
user.group.is_some(),
))
.into()
let access_token = utility::jwt::encode(&user.id);
Ok(Response::new(&access_token, user.group.is_some())).into()
}
mod schema {
@@ -93,9 +83,9 @@ mod schema {
use crate::utility::telegram::VerifyError;
use actix_macros::ErrResponse;
use actix_web::body::EitherBody;
use actix_web::cookie::CookieBuilder;
use actix_web::cookie::time::OffsetDateTime;
use actix_web::{HttpRequest, HttpResponse, web};
use actix_web::cookie::CookieBuilder;
use actix_web::{web, HttpRequest, HttpResponse};
use derive_more::Display;
use serde::{Deserialize, Serialize, Serializer};
use std::ops::Add;
@@ -114,8 +104,8 @@ mod schema {
#[serde(rename_all = "camelCase")]
#[schema(as = Flow::TelegramAuth::Response)]
pub struct Response {
#[serde(skip)]
#[schema(ignore)]
// #[serde(skip)] // TODO: я пока не придумал как не отдавать сырой токен в ответе
// #[schema(ignore)]
access_token: String,
pub completed: bool,
@@ -135,7 +125,7 @@ mod schema {
&mut self,
request: &HttpRequest,
response: &mut HttpResponse<EitherBody<String>>,
) -> () {
) {
let access_token = &self.access_token;
let app_state = request.app_data::<web::Data<AppState>>().unwrap();

View File

@@ -1,11 +1,11 @@
use self::schema::*;
use crate::AppState;
use crate::database::driver;
use crate::database::driver::users::UserSave;
use crate::database::models::User;
use crate::extractors::base::AsyncExtractor;
use crate::routes::schema::ResponseError;
use crate::AppState;
use actix_web::{post, web};
use database::entity::User;
use database::query::Query;
use database::sea_orm::{ActiveModelTrait, IntoActiveModel, Set};
use web::Json;
#[utoipa::path(responses(
@@ -20,7 +20,7 @@ pub async fn telegram_complete(
app_state: web::Data<AppState>,
user: AsyncExtractor<User>,
) -> ServiceResponse {
let mut user = user.into_inner();
let user = user.into_inner();
// проверка на перезапись уже имеющихся данных
if user.group.is_some() {
@@ -29,19 +29,26 @@ pub async fn telegram_complete(
let data = data.into_inner();
let db = app_state.get_database();
let mut active_user = user.clone().into_active_model();
// замена существующего имени, если оно отличается
if user.username != data.username {
if driver::users::contains_by_username(&app_state, &data.username).await {
if Query::is_user_exists_by_username(db, &data.username)
.await
.unwrap()
{
return Err(ErrorCode::UsernameAlreadyExists).into();
}
user.username = data.username;
active_user.username = Set(data.username);
}
// проверка на существование группы
if !app_state
.get_schedule_snapshot()
.get_schedule_snapshot("eng_polytechnic")
.await
.unwrap()
.data
.groups
.contains_key(&data.group)
@@ -49,9 +56,12 @@ pub async fn telegram_complete(
return Err(ErrorCode::InvalidGroupName).into();
}
user.group = Some(data.group);
active_user.group = Set(Some(data.group));
user.save(&app_state).await.expect("Failed to update user");
active_user
.update(db)
.await
.expect("Failed to update user");
Ok(()).into()
}
@@ -79,11 +89,11 @@ mod schema {
#[serde(rename_all = "SCREAMING_SNAKE_CASE")]
#[schema(as = Flow::TelegramFill::ErrorCode)]
pub enum ErrorCode {
#[display("This flow already completed.")]
#[display("This flow is already completed.")]
#[status_code = "actix_web::http::StatusCode::CONFLICT"]
AlreadyCompleted,
#[display("Username is already exists.")]
#[display("User with that name already exists.")]
#[status_code = "actix_web::http::StatusCode::BAD_REQUEST"]
UsernameAlreadyExists,

View File

@@ -1,5 +1,4 @@
pub mod auth;
pub mod fcm;
pub mod flow;
pub mod schedule;
mod schema;

View File

@@ -1,11 +1,17 @@
use crate::AppState;
use crate::routes::schedule::schema::CacheStatus;
use crate::AppState;
use actix_web::{get, web};
use std::ops::Deref;
#[utoipa::path(responses(
(status = OK, body = CacheStatus),
))]
#[get("/cache-status")]
pub async fn cache_status(app_state: web::Data<AppState>) -> CacheStatus {
CacheStatus::from(&app_state).await.into()
app_state
.get_schedule_snapshot("eng_polytechnic")
.await
.unwrap()
.deref()
.into()
}

View File

@@ -1,10 +1,10 @@
use self::schema::*;
use crate::AppState;
use crate::database::models::User;
use crate::extractors::base::AsyncExtractor;
use crate::routes::schedule::schema::ScheduleEntryResponse;
use crate::routes::schema::ResponseError;
use actix_web::{get, web};
use database::entity::User;
#[utoipa::path(responses(
(status = OK, body = ScheduleEntryResponse),
@@ -31,8 +31,9 @@ pub async fn group(user: AsyncExtractor<User>, app_state: web::Data<AppState>) -
None => Err(ErrorCode::SignUpNotCompleted),
Some(group) => match app_state
.get_schedule_snapshot()
.get_schedule_snapshot("eng_polytechnic")
.await
.unwrap()
.data
.groups
.get(group)

View File

@@ -6,8 +6,9 @@ use actix_web::{get, web};
#[get("/group-names")]
pub async fn group_names(app_state: web::Data<AppState>) -> Response {
let mut names: Vec<String> = app_state
.get_schedule_snapshot()
.get_schedule_snapshot("eng_polytechnic")
.await
.unwrap()
.data
.groups
.keys()

View File

@@ -1,7 +1,7 @@
mod cache_status;
mod group;
mod group_names;
mod schedule;
mod get;
mod schema;
mod teacher;
mod teacher_names;
@@ -9,6 +9,6 @@ mod teacher_names;
pub use cache_status::*;
pub use group::*;
pub use group_names::*;
pub use schedule::*;
pub use get::*;
pub use teacher::*;
pub use teacher_names::*;

View File

@@ -1,7 +1,7 @@
use crate::state::{AppState, ScheduleSnapshot};
use crate::state::AppState;
use actix_macros::{OkResponse, ResponderJson};
use actix_web::web;
use schedule_parser::schema::ScheduleEntry;
use providers::base::{ScheduleEntry, ScheduleSnapshot};
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use std::ops::Deref;
@@ -32,7 +32,12 @@ impl From<ScheduleEntry> for ScheduleEntryResponse {
impl ScheduleView {
pub async fn from(app_state: &web::Data<AppState>) -> Self {
let schedule = app_state.get_schedule_snapshot().await.clone();
let schedule = app_state
.get_schedule_snapshot("eng_polytechnic")
.await
.unwrap()
.deref()
.clone();
Self {
url: schedule.url,
@@ -58,12 +63,6 @@ pub struct CacheStatus {
pub updated_at: i64,
}
impl CacheStatus {
pub async fn from(value: &web::Data<AppState>) -> Self {
From::<&ScheduleSnapshot>::from(value.get_schedule_snapshot().await.deref())
}
}
impl From<&ScheduleSnapshot> for CacheStatus {
fn from(value: &ScheduleSnapshot) -> Self {
Self {

View File

@@ -2,7 +2,7 @@ use self::schema::*;
use crate::AppState;
use crate::routes::schema::ResponseError;
use actix_web::{get, web};
use schedule_parser::schema::ScheduleEntry;
use providers::base::ScheduleEntry;
#[utoipa::path(responses(
(status = OK, body = ScheduleEntry),
@@ -18,8 +18,9 @@ use schedule_parser::schema::ScheduleEntry;
#[get("/teacher/{name}")]
pub async fn teacher(name: web::Path<String>, app_state: web::Data<AppState>) -> ServiceResponse {
match app_state
.get_schedule_snapshot()
.get_schedule_snapshot("eng_polytechnic")
.await
.unwrap()
.data
.teachers
.get(&name.into_inner())

View File

@@ -6,8 +6,9 @@ use actix_web::{get, web};
#[get("/teacher-names")]
pub async fn teacher_names(app_state: web::Data<AppState>) -> Response {
let mut names: Vec<String> = app_state
.get_schedule_snapshot()
.get_schedule_snapshot("eng_polytechnic")
.await
.unwrap()
.data
.teachers
.keys()

View File

@@ -13,13 +13,13 @@ where
E: Serialize + PartialSchema + Display + PartialErrResponse;
/// Transform Response<T, E> into Result<T, E>
impl<T, E> Into<Result<T, E>> for Response<T, E>
impl<T, E> From<Response<T, E>> for Result<T, E>
where
T: Serialize + PartialSchema + PartialOkResponse,
E: Serialize + PartialSchema + Display + PartialErrResponse,
{
fn into(self) -> Result<T, E> {
self.0
fn from(value: Response<T, E>) -> Self {
value.0
}
}
@@ -46,7 +46,7 @@ where
{
match &self.0 {
Ok(ok) => serializer.serialize_some(&ok),
Err(err) => serializer.serialize_some(&ResponseError::<E>::from(err.clone().into())),
Err(err) => serializer.serialize_some(&err.clone().into()),
}
}
}
@@ -95,7 +95,7 @@ pub trait PartialOkResponse {
&mut self,
_request: &HttpRequest,
_response: &mut HttpResponse<EitherBody<String>>,
) -> () {
) {
}
}
@@ -126,12 +126,13 @@ where
}
pub mod user {
use crate::database::models::{User, UserRole};
use actix_macros::{OkResponse, ResponderJson};
use database::entity::sea_orm_active_enums::UserRole;
use database::entity::User;
use serde::Serialize;
//noinspection SpellCheckingInspection
/// Используется для скрытия чувствительных полей, таких как хеш пароля или FCM
/// Используется для скрытия чувствительных полей, таких как хеш пароля
#[derive(Serialize, utoipa::ToSchema, ResponderJson, OkResponse)]
#[serde(rename_all = "camelCase")]
pub struct UserResponse {
@@ -165,17 +166,31 @@ pub mod user {
pub access_token: Option<String>,
}
/// Create UserResponse from User ref.
impl From<&User> for UserResponse {
fn from(user: &User) -> Self {
UserResponse {
impl UserResponse {
pub fn from_user_with_token(user: User, access_token: String) -> Self {
Self {
id: user.id.clone(),
username: user.username.clone(),
group: user.group.clone(),
role: user.role.clone(),
vk_id: user.vk_id.clone(),
telegram_id: user.telegram_id.clone(),
access_token: user.access_token.clone(),
vk_id: user.vk_id,
telegram_id: user.telegram_id,
access_token: Some(access_token),
}
}
}
/// Create UserResponse from User ref.
impl From<&User> for UserResponse {
fn from(user: &User) -> Self {
Self {
id: user.id.clone(),
username: user.username.clone(),
group: user.group.clone(),
role: user.role.clone(),
vk_id: user.vk_id,
telegram_id: user.telegram_id,
access_token: None,
}
}
}
@@ -183,14 +198,14 @@ pub mod user {
/// Transform User to UserResponse.
impl From<User> for UserResponse {
fn from(user: User) -> Self {
UserResponse {
Self {
id: user.id,
username: user.username,
group: user.group,
role: user.role,
vk_id: user.vk_id,
telegram_id: user.telegram_id,
access_token: user.access_token,
access_token: None,
}
}
}

View File

@@ -1,9 +1,9 @@
use self::schema::*;
use crate::database::driver::users::UserSave;
use crate::database::models::User;
use crate::extractors::base::AsyncExtractor;
use crate::state::AppState;
use actix_web::{post, web};
use database::entity::User;
use database::sea_orm::{ActiveModelTrait, IntoActiveModel, Set};
#[utoipa::path(responses((status = OK)))]
#[post("/change-group")]
@@ -12,15 +12,20 @@ pub async fn change_group(
user: AsyncExtractor<User>,
data: web::Json<Request>,
) -> ServiceResponse {
let mut user = user.into_inner();
let user = user.into_inner();
if user.group.is_some_and(|group| group == data.group) {
if user
.group
.as_ref()
.is_some_and(|group| group.eq(&data.group))
{
return Ok(()).into();
}
if !app_state
.get_schedule_snapshot()
.get_schedule_snapshot("eng_polytechnic")
.await
.unwrap()
.data
.groups
.contains_key(&data.group)
@@ -28,8 +33,10 @@ pub async fn change_group(
return Err(ErrorCode::NotFound).into();
}
user.group = Some(data.into_inner().group);
user.save(&app_state).await.unwrap();
let mut active_user = user.clone().into_active_model();
active_user.group = Set(Some(data.into_inner().group));
active_user.update(app_state.get_database()).await.unwrap();
Ok(()).into()
}

View File

@@ -1,10 +1,10 @@
use self::schema::*;
use crate::database::driver;
use crate::database::driver::users::UserSave;
use crate::database::models::User;
use crate::extractors::base::AsyncExtractor;
use crate::state::AppState;
use actix_web::{post, web};
use database::entity::User;
use database::query::Query;
use database::sea_orm::{ActiveModelTrait, IntoActiveModel, Set};
#[utoipa::path(responses((status = OK)))]
#[post("/change-username")]
@@ -13,21 +13,24 @@ pub async fn change_username(
user: AsyncExtractor<User>,
data: web::Json<Request>,
) -> ServiceResponse {
let mut user = user.into_inner();
let user = user.into_inner();
if user.username == data.username {
return Ok(()).into();
}
if driver::users::get_by_username(&app_state, &data.username)
let db = app_state.get_database();
if Query::is_user_exists_by_username(db, &data.username)
.await
.is_ok()
.unwrap()
{
return Err(ErrorCode::AlreadyExists).into();
}
user.username = data.into_inner().username;
user.save(&app_state).await.unwrap();
let mut active_user = user.into_active_model();
active_user.username = Set(data.into_inner().username);
active_user.update(db).await.unwrap();
Ok(()).into()
}

View File

@@ -1,7 +1,7 @@
use crate::database::models::User;
use crate::extractors::base::AsyncExtractor;
use crate::routes::schema::user::UserResponse;
use actix_web::get;
use database::entity::User;
#[utoipa::path(responses((status = OK, body = UserResponse)))]
#[get("/me")]

View File

@@ -1,11 +1,15 @@
pub mod schedule;
pub mod telegram;
pub mod vk_id;
#[cfg(not(test))]
pub mod yandex_cloud;
pub use self::schedule::ScheduleEnvData;
pub use self::telegram::TelegramEnvData;
pub use self::vk_id::VkIdEnvData;
#[cfg(not(test))]
pub use self::yandex_cloud::YandexCloudEnvData;
#[derive(Default)]
@@ -13,5 +17,7 @@ pub struct AppEnv {
pub schedule: ScheduleEnvData,
pub telegram: TelegramEnvData,
pub vk_id: VkIdEnvData,
#[cfg(not(test))]
pub yandex_cloud: YandexCloudEnvData,
}

View File

@@ -2,6 +2,7 @@ use std::env;
#[derive(Clone)]
pub struct ScheduleEnvData {
#[cfg(not(test))]
pub url: Option<String>,
pub auto_update: bool,
}
@@ -9,6 +10,7 @@ pub struct ScheduleEnvData {
impl Default for ScheduleEnvData {
fn default() -> Self {
Self {
#[cfg(not(test))]
url: env::var("SCHEDULE_INIT_URL").ok(),
auto_update: !env::var("SCHEDULE_DISABLE_AUTO_UPDATE")
.is_ok_and(|v| v.eq("1") || v.eq("true")),

View File

@@ -1,15 +0,0 @@
use firebase_messaging_rs::FCMClient;
use std::env;
use tokio::sync::Mutex;
#[derive(Clone)]
pub struct FCMClientData;
impl FCMClientData {
pub async fn new() -> Option<Mutex<FCMClient>> {
match env::var("GOOGLE_APPLICATION_CREDENTIALS") {
Ok(_) => Some(Mutex::new(FCMClient::new().await.unwrap())),
Err(_) => None,
}
}
}

View File

@@ -1,88 +1,117 @@
mod env;
mod fcm_client;
mod schedule;
use crate::state::fcm_client::FCMClientData;
use crate::xls_downloader::basic_impl::BasicXlsDownloader;
use actix_web::web;
use diesel::{Connection, PgConnection};
use firebase_messaging_rs::FCMClient;
use std::ops::DerefMut;
use tokio::sync::{MappedMutexGuard, Mutex, MutexGuard};
pub use self::schedule::{Schedule, ScheduleSnapshot};
pub use crate::state::env::AppEnv;
use actix_web::web;
use database::migration::{Migrator, MigratorTrait};
use database::sea_orm::{ConnectOptions, Database, DatabaseConnection};
use providers::base::{ScheduleProvider, ScheduleSnapshot};
use std::collections::HashMap;
use std::sync::Arc;
use std::time::Duration;
use tokio_util::sync::CancellationToken;
/// Common data provided to endpoints.
pub struct AppState {
database: Mutex<PgConnection>,
downloader: Mutex<BasicXlsDownloader>,
schedule: Mutex<Schedule>,
cancel_token: CancellationToken,
database: DatabaseConnection,
providers: HashMap<String, Arc<dyn ScheduleProvider>>,
env: AppEnv,
fcm_client: Option<Mutex<FCMClient>>,
}
impl AppState {
pub async fn new() -> Result<Self, self::schedule::Error> {
pub async fn new(
database: Option<DatabaseConnection>,
) -> Result<Self, Box<dyn std::error::Error>> {
let env = AppEnv::default();
let providers: HashMap<String, Arc<dyn ScheduleProvider>> = HashMap::from([(
"eng_polytechnic".to_string(),
providers::EngelsPolytechnicProvider::get({
#[cfg(test)]
{
providers::EngelsPolytechnicUpdateSource::Prepared(ScheduleSnapshot {
url: "".to_string(),
fetched_at: chrono::DateTime::default(),
updated_at: chrono::DateTime::default(),
data: providers::test_utils::engels_polytechnic::test_result().unwrap(),
})
}
#[cfg(not(test))]
{
if let Some(url) = &env.schedule.url {
providers::EngelsPolytechnicUpdateSource::Url(url.clone())
} else {
providers::EngelsPolytechnicUpdateSource::GrabFromSite {
yandex_api_key: env.yandex_cloud.api_key.clone(),
yandex_func_id: env.yandex_cloud.func_id.clone(),
}
}
}
})
.await?,
)]);
let this = Self {
cancel_token: CancellationToken::new(),
database: if let Some(database) = database {
database
} else {
let database_url = std::env::var("DATABASE_URL").expect("DATABASE_URL must be set");
let mut _self = Self {
downloader: Mutex::new(BasicXlsDownloader::new()),
let mut opt = ConnectOptions::new(database_url.clone());
schedule: Mutex::new(Schedule::default()),
database: Mutex::new(
PgConnection::establish(&database_url)
.unwrap_or_else(|_| panic!("Error connecting to {}", database_url)),
),
env: AppEnv::default(),
fcm_client: FCMClientData::new().await,
opt.max_connections(4)
.min_connections(2)
.connect_timeout(Duration::from_secs(10))
.idle_timeout(Duration::from_secs(8))
.sqlx_logging(true);
let database = Database::connect(opt)
.await
.unwrap_or_else(|_| panic!("Error connecting to {}", database_url));
Migrator::up(&database, None)
.await
.expect("Failed to run database migrations");
database
},
env,
providers,
};
if _self.env.schedule.auto_update {
_self
.get_schedule()
.await
.init(_self.get_downloader().await.deref_mut(), &_self.env)
.await?;
if this.env.schedule.auto_update {
for provider in this.providers.values() {
let provider = provider.clone();
let cancel_token = this.cancel_token.clone();
tokio::spawn(async move { provider.start_auto_update_task(cancel_token).await });
}
}
Ok(_self)
Ok(this)
}
pub async fn get_downloader(&'_ self) -> MutexGuard<'_, BasicXlsDownloader> {
self.downloader.lock().await
pub async fn get_schedule_snapshot(&'_ self, provider: &str) -> Option<Arc<ScheduleSnapshot>> {
if let Some(provider) = self.providers.get(provider) {
return Some(provider.get_schedule().await);
}
pub async fn get_schedule(&'_ self) -> MutexGuard<'_, Schedule> {
self.schedule.lock().await
None
}
pub async fn get_schedule_snapshot(&'_ self) -> MappedMutexGuard<'_, ScheduleSnapshot> {
let snapshot =
MutexGuard::<'_, Schedule>::map(self.schedule.lock().await, |schedule| unsafe {
schedule.snapshot.assume_init_mut()
});
snapshot
}
pub async fn get_database(&'_ self) -> MutexGuard<'_, PgConnection> {
self.database.lock().await
pub fn get_database(&'_ self) -> &DatabaseConnection {
&self.database
}
pub fn get_env(&self) -> &AppEnv {
&self.env
}
pub async fn get_fcm_client(&'_ self) -> Option<MutexGuard<'_, FCMClient>> {
match &self.fcm_client {
Some(client) => Some(client.lock().await),
None => None,
}
}
}
/// Create a new object web::Data<AppState>.
pub async fn new_app_state() -> Result<web::Data<AppState>, self::schedule::Error> {
Ok(web::Data::new(AppState::new().await?))
pub async fn new_app_state(
database: Option<DatabaseConnection>,
) -> Result<web::Data<AppState>, Box<dyn std::error::Error>> {
Ok(web::Data::new(AppState::new(database).await?))
}

View File

@@ -1,290 +0,0 @@
use crate::state::env::AppEnv;
use crate::utility::hasher::DigestHasher;
use chrono::{DateTime, Utc};
use derive_more::{Display, Error};
use schedule_parser::parse_xls;
use schedule_parser::schema::{ParseError, ParseResult};
use sha1::{Digest, Sha1};
use std::hash::Hash;
use std::mem::MaybeUninit;
use crate::xls_downloader::basic_impl::BasicXlsDownloader;
use crate::xls_downloader::interface::{FetchError, XLSDownloader};
/// Represents errors that can occur during schedule-related operations.
#[derive(Debug, Display, Error)]
pub enum Error {
/// An error occurred while querying the Yandex Cloud API for a URL.
///
/// This may result from network failures, invalid API credentials, or issues with the Yandex Cloud Function invocation.
/// See [`QueryUrlError`] for more details about specific causes.
QueryUrlFailed(QueryUrlError),
/// The schedule snapshot creation process failed.
///
/// This can happen due to URL conflicts (same URL already in use), failed network requests,
/// download errors, or invalid XLS file content. See [`SnapshotCreationError`] for details.
SnapshotCreationFailed(SnapshotCreationError),
}
/// Errors that may occur when querying the Yandex Cloud API to retrieve a URL.
#[derive(Debug, Display, Error)]
pub enum QueryUrlError {
/// Occurs when the request to the Yandex Cloud API fails.
///
/// This may be due to network issues, invalid API key, incorrect function ID, or other
/// problems with the Yandex Cloud Function invocation.
#[display("An error occurred during the request to the Yandex Cloud API: {_0}")]
RequestFailed(reqwest::Error),
}
/// Errors that may occur during the creation of a schedule snapshot.
#[derive(Debug, Display, Error)]
pub enum SnapshotCreationError {
/// The URL is the same as the one already being used (no update needed).
#[display("The URL is the same as the one already being used.")]
SameUrl,
/// The URL query for the XLS file failed to execute, either due to network issues or invalid API parameters.
#[display("Failed to fetch URL: {_0}")]
FetchFailed(FetchError),
/// Downloading the XLS file content failed after successfully obtaining the URL.
#[display("Download failed: {_0}")]
DownloadFailed(FetchError),
/// The XLS file could not be parsed into a valid schedule format.
#[display("Schedule data is invalid: {_0}")]
InvalidSchedule(ParseError),
}
/// Represents a snapshot of the schedule parsed from an XLS file.
#[derive(Clone)]
pub struct ScheduleSnapshot {
/// Timestamp when the Polytechnic website was queried for the schedule.
pub fetched_at: DateTime<Utc>,
/// Timestamp indicating when the schedule was last updated on the Polytechnic website.
///
/// <note>
/// This value is determined by the website's content and does not depend on the application.
/// </note>
pub updated_at: DateTime<Utc>,
/// URL pointing to the XLS file containing the source schedule data.
pub url: String,
/// Parsed schedule data in the application's internal representation.
pub data: ParseResult,
}
impl ScheduleSnapshot {
/// Converting the schedule data into a hash.
/// ### Important!
/// The hash does not depend on the dates.
/// If the application is restarted, but the file with source schedule will remain unchanged, then the hash will not change.
pub fn hash(&self) -> String {
let mut hasher = DigestHasher::from(Sha1::new());
self.data.teachers.iter().for_each(|e| e.hash(&mut hasher));
self.data.groups.iter().for_each(|e| e.hash(&mut hasher));
hasher.finalize()
}
/// Simply updates the value of [`ScheduleSnapshot::fetched_at`].
/// Used for auto-updates.
pub fn update(&mut self) {
self.fetched_at = Utc::now();
}
/// Constructs a new `ScheduleSnapshot` by downloading and parsing schedule data from the specified URL.
///
/// This method first checks if the provided URL is the same as the one already configured in the downloader.
/// If different, it updates the downloader's URL, fetches the XLS content, parses it, and creates a snapshot.
/// Errors are returned for URL conflicts, network issues, download failures, or invalid data.
///
/// # Arguments
///
/// * `downloader`: A mutable reference to an `XLSDownloader` implementation used to fetch and parse the schedule data.
/// * `url`: The source URL pointing to the XLS file containing schedule data.
///
/// returns: Result<ScheduleSnapshot, SnapshotCreationError>
pub async fn new(
downloader: &mut BasicXlsDownloader,
url: String,
) -> Result<Self, SnapshotCreationError> {
if downloader.url.as_ref().is_some_and(|_url| _url.eq(&url)) {
return Err(SnapshotCreationError::SameUrl);
}
let head_result = downloader.set_url(&*url).await.map_err(|error| {
if let FetchError::Unknown(error) = &error {
sentry::capture_error(&error);
}
SnapshotCreationError::FetchFailed(error)
})?;
let xls_data = downloader
.fetch(false)
.await
.map_err(|error| {
if let FetchError::Unknown(error) = &error {
sentry::capture_error(&error);
}
SnapshotCreationError::DownloadFailed(error)
})?
.data
.unwrap();
let parse_result = parse_xls(&xls_data).map_err(|error| {
sentry::capture_error(&error);
SnapshotCreationError::InvalidSchedule(error)
})?;
Ok(ScheduleSnapshot {
fetched_at: head_result.requested_at,
updated_at: head_result.uploaded_at,
url,
data: parse_result,
})
}
}
pub struct Schedule {
pub snapshot: MaybeUninit<ScheduleSnapshot>,
}
impl Default for Schedule {
fn default() -> Self {
Self {
snapshot: MaybeUninit::uninit(),
}
}
}
impl Schedule {
/// Queries the Yandex Cloud Function (FaaS) to obtain a URL for the schedule file.
///
/// This sends a POST request to the specified Yandex Cloud Function endpoint,
/// using the provided API key for authentication. The returned URI is combined
/// with the "https://politehnikum-eng.ru" base domain to form the complete URL.
///
/// # Arguments
///
/// * `api_key` - Authentication token for Yandex Cloud API
/// * `func_id` - ID of the target Yandex Cloud Function to invoke
///
/// # Returns
///
/// Result containing:
/// - `Ok(String)` - Complete URL constructed from the Function's response
/// - `Err(QueryUrlError)` - If the request or response processing fails
async fn query_url(api_key: &str, func_id: &str) -> Result<String, QueryUrlError> {
let client = reqwest::Client::new();
let uri = client
.post(format!(
"https://functions.yandexcloud.net/{}?integration=raw",
func_id
))
.header("Authorization", format!("Api-Key {}", api_key))
.send()
.await
.map_err(|error| QueryUrlError::RequestFailed(error))?
.text()
.await
.map_err(|error| QueryUrlError::RequestFailed(error))?;
Ok(format!("https://politehnikum-eng.ru{}", uri.trim()))
}
/// Initializes the schedule by fetching the URL from the environment or Yandex Cloud Function (FaaS)
/// and creating a [`ScheduleSnapshot`] with the downloaded data.
///
/// # Arguments
///
/// * `downloader`: Mutable reference to an `XLSDownloader` implementation used to fetch and parse the schedule
/// * `app_env`: Reference to the application environment containing either a predefined URL or Yandex Cloud credentials
///
/// # Returns
///
/// Returns `Ok(())` if the snapshot was successfully initialized, or an `Error` if:
/// - URL query to Yandex Cloud failed ([`QueryUrlError`])
/// - Schedule snapshot creation failed ([`SnapshotCreationError`])
pub async fn init(
&mut self,
downloader: &mut BasicXlsDownloader,
app_env: &AppEnv,
) -> Result<(), Error> {
let url = if let Some(url) = &app_env.schedule.url {
log::info!("The default link {} will be used", url);
url.clone()
} else {
log::info!("Obtaining a link using FaaS...");
Self::query_url(
&*app_env.yandex_cloud.api_key,
&*app_env.yandex_cloud.func_id,
)
.await
.map_err(|error| Error::QueryUrlFailed(error))?
};
log::info!("For the initial setup, a link {} will be used", url);
let snapshot = ScheduleSnapshot::new(downloader, url)
.await
.map_err(|error| Error::SnapshotCreationFailed(error))?;
log::info!("Schedule snapshot successfully created!");
self.snapshot.write(snapshot);
Ok(())
}
/// Updates the schedule snapshot by querying the latest URL from FaaS and checking for changes.
/// If the URL hasn't changed, only updates the [`fetched_at`] timestamp. If changed, downloads
/// and parses the new schedule data.
///
/// # Arguments
///
/// * `downloader`: XLS file downloader used to fetch and parse the schedule data
/// * `app_env`: Application environment containing Yandex Cloud configuration and auto-update settings
///
/// returns: `Result<(), Error>` - Returns error if URL query fails or schedule parsing encounters issues
///
/// # Safety
///
/// Uses `unsafe` to access the initialized snapshot, guaranteed valid by prior `init()` call
#[allow(unused)] // TODO: сделать авто апдейт
pub async fn update(
&mut self,
downloader: &mut BasicXlsDownloader,
app_env: &AppEnv,
) -> Result<(), Error> {
assert!(app_env.schedule.auto_update);
let url = Self::query_url(
&*app_env.yandex_cloud.api_key,
&*app_env.yandex_cloud.func_id,
)
.await
.map_err(|error| Error::QueryUrlFailed(error))?;
let snapshot = match ScheduleSnapshot::new(downloader, url).await {
Ok(snapshot) => snapshot,
Err(SnapshotCreationError::SameUrl) => {
unsafe { self.snapshot.assume_init_mut() }.update();
return Ok(());
}
Err(error) => return Err(Error::SnapshotCreationFailed(error)),
};
self.snapshot.write(snapshot);
Ok(())
}
}

View File

@@ -1,26 +1,22 @@
#[cfg(test)]
pub(crate) mod tests {
use crate::state::{AppState, ScheduleSnapshot, new_app_state};
use crate::state::{new_app_state, AppState};
use actix_web::web;
use log::info;
use schedule_parser::test_utils::test_result;
use std::default::Default;
use tokio::sync::OnceCell;
pub fn test_env() {
info!("Loading test environment file...");
dotenvy::from_path(".env.test").expect("Failed to load test environment file");
dotenvy::from_filename(".env.test.local")
.or_else(|_| dotenvy::from_filename(".env.test"))
.expect("Failed to load test environment file");
}
pub async fn test_app_state() -> web::Data<AppState> {
let state = new_app_state().await.unwrap();
state.get_schedule().await.snapshot.write(ScheduleSnapshot {
fetched_at: Default::default(),
updated_at: Default::default(),
url: "".to_string(),
data: test_result().unwrap(),
});
let state = new_app_state(Some(static_app_state().await.get_database().clone()))
.await
.unwrap();
state.clone()
}
@@ -28,6 +24,14 @@ pub(crate) mod tests {
pub async fn static_app_state() -> web::Data<AppState> {
static STATE: OnceCell<web::Data<AppState>> = OnceCell::const_new();
STATE.get_or_init(|| test_app_state()).await.clone()
STATE
.get_or_init(async || -> web::Data<AppState> {
#[cfg(feature = "trace")]
console_subscriber::init();
new_app_state(None).await.unwrap()
})
.await
.clone()
}
}

View File

@@ -24,14 +24,13 @@ static ENCODING_KEY: LazyLock<EncodingKey> = LazyLock::new(|| {
});
/// Token verification errors.
#[allow(dead_code)]
#[derive(Debug)]
pub enum Error {
/// The token has a different signature.
InvalidSignature,
/// Token reading error.
InvalidToken(ErrorKind),
InvalidToken,
/// Token expired.
Expired,
@@ -63,13 +62,13 @@ struct Claims {
pub(crate) const DEFAULT_ALGORITHM: Algorithm = Algorithm::HS256;
/// Checking the token and extracting the UUID of the user account from it.
pub fn verify_and_decode(token: &String) -> Result<String, Error> {
pub fn verify_and_decode(token: &str) -> Result<String, Error> {
let mut validation = Validation::new(DEFAULT_ALGORITHM);
validation.required_spec_claims.remove("exp");
validation.validate_exp = false;
let result = decode::<Claims>(&token, &*DECODING_KEY, &validation);
let result = decode::<Claims>(token, &DECODING_KEY, &validation);
match result {
Ok(token_data) => {
@@ -82,13 +81,13 @@ pub fn verify_and_decode(token: &String) -> Result<String, Error> {
Err(err) => Err(match err.into_kind() {
ErrorKind::InvalidSignature => Error::InvalidSignature,
ErrorKind::ExpiredSignature => Error::Expired,
kind => Error::InvalidToken(kind),
_ => Error::InvalidToken,
}),
}
}
/// Creating a user token.
pub fn encode(id: &String) -> String {
pub fn encode(id: &str) -> String {
let header = Header {
typ: Some(String::from("JWT")),
..Default::default()
@@ -98,12 +97,12 @@ pub fn encode(id: &String) -> String {
let exp = iat + Duration::days(365 * 4);
let claims = Claims {
id: id.clone(),
id: id.to_string(),
iat: iat.timestamp().unsigned_abs(),
exp: exp.timestamp().unsigned_abs(),
};
jsonwebtoken::encode(&header, &claims, &*ENCODING_KEY).unwrap()
jsonwebtoken::encode(&header, &claims, &ENCODING_KEY).unwrap()
}
#[cfg(test)]
@@ -115,7 +114,7 @@ mod tests {
fn test_encode() {
test_env();
assert_eq!(encode(&"test".to_string()).is_empty(), false);
assert!(!encode("test").is_empty());
}
#[test]
@@ -128,7 +127,7 @@ mod tests {
assert!(result.is_err());
assert_eq!(
result.err().unwrap(),
Error::InvalidToken(ErrorKind::InvalidToken)
Error::InvalidToken
);
}

Some files were not shown because too many files have changed in this diff Show More