38 Commits

Author SHA1 Message Date
cdc89b5bcd fix(parser): fix sentry error sending 2025-10-10 03:00:47 +04:00
ad86f6cd64 feat(parser): limit names regex to maximum 2 elements
This allows us to not worry about subgroups array index overflows, and we can make better non-standard case solving.
2025-10-10 01:39:54 +04:00
a3b4a501db feat(parser): improve names regex to exclude some non-standard cases
Like "Название ФАмилия. И.О.".
In that case regex will grab "Название ФА", instead of "Амилия. И. О." (we can't add 'Ф', bc it will make regex checks way more complex).

Now it will ignore "Название ФА" if after that lower or upper char is placed.
Previously only lower chars are excluded and check won't exclude "Название ФА" and grabs "Название Ф" bc after 'Ф' uppercase char is present.
2025-10-10 01:37:52 +04:00
df0e99a4d0 feat(parser): make lesson cell range less strict to support upcoming split-lessons 2025-10-10 01:31:55 +04:00
a8cf8fb0f5 feat(parser): improve street regex 2025-10-10 01:30:56 +04:00
7ed866138e feat(error): add error for unknown lesson type 2025-10-10 01:30:30 +04:00
7bac48f8fc feat(error): add more intuitive CellPos formatting and get rid of ErrorCell 2025-10-10 01:27:05 +04:00
191ec36fef chore: remove useless commented code 2025-10-10 01:25:12 +04:00
f121a04f1b refactor: refactor providers code 2025-10-02 07:55:07 +04:00
df74ab03a1 chore(ci): make building non-verbose 2025-10-02 07:54:09 +04:00
1b79d1cf1e chore: bump provider version to 0.2.2 2025-10-02 07:44:35 +04:00
2b9b1ea66b chore(deps): update dependencies 2025-10-02 07:43:37 +04:00
ca713d8d51 refactor: prevent updater stop because of errors 2025-10-02 07:40:44 +04:00
69df538467 refactor(updater): don't cancel token when uri fetch error occurred 2025-09-29 08:27:25 +04:00
aa019f8fcf fix(parser): rework teacher name parsing 2025-09-29 08:27:24 +04:00
b664ba578d chore(clippy): fix all clippy warnings 2025-09-25 03:42:34 +04:00
983967f8b0 chore(downloader): suppress unused_assignments warning 2025-09-25 03:27:55 +04:00
e5760120e2 chore(release): bump version to 1.3.0 2025-09-25 03:17:14 +04:00
a28fb66dd4 feat(downloader): add retry-mechanism for querying uri from yandex-cloud api (#18) 2025-09-25 03:15:36 +04:00
3780fb3136 feat(downloader): implement etag-based difference check for schedule 2025-09-25 03:14:39 +04:00
6c71bc19f5 chore(parser): fix crash caused by another mistype in schedule 2025-09-25 02:50:43 +04:00
2d0041dc8b feat(schedule): add practice lesson type 2025-09-25 02:49:23 +04:00
b5d372e109 feat(ci): build and push image to docker registry on every push to master 2025-09-10 20:05:11 +04:00
84dca02c34 fix(database): use migrator and change connection options 2025-09-10 20:04:19 +04:00
6c9d3b3b31 chore(release): bump version to 1.2.2 2025-09-08 07:16:53 +04:00
a348b1b99b refactor(auth): interpret failure to find user as an error 2025-09-08 07:15:22 +04:00
ff12ee5da2 chore(release): bump version to 1.2.1 2025-09-06 21:25:26 +04:00
35f707901f chore(clippy): fix all clippy warnings 2025-09-06 21:24:52 +04:00
edea6c5424 chore(release): bump version to 1.2.0 2025-09-06 20:33:17 +04:00
fdbb872fc3 refactor(dev): move tracing to feature 2025-09-06 20:17:14 +04:00
dbc800fef1 feat(database)!: switch from diesel to sea-orm 2025-09-06 20:09:04 +04:00
e729d84c93 fix: fix typo in error description 2025-09-06 18:42:23 +04:00
cc7adf10ed fix(env): make dotenv file optional 2025-09-06 18:40:04 +04:00
57c1699c9a chore(release): bump version to 1.1.1 2025-09-03 07:13:37 +04:00
298c4f4dd3 feat(auth): send raw token on telegram auth 2025-09-03 07:08:51 +04:00
e3904a255b chore(release): bump version to 1.1.0 2025-09-02 09:23:29 +04:00
829c1cf68d chore(deps): bump calamine version 2025-09-02 09:20:02 +04:00
6a535f8d73 chore(ci): remove creating .env.test 2025-09-02 09:20:02 +04:00
78 changed files with 3710 additions and 1635 deletions

142
.github/workflows/build.yml vendored Normal file
View File

@@ -0,0 +1,142 @@
name: build
on:
push:
branches: [ "master" ]
tags-ignore: [ "release/v*" ]
permissions:
contents: write
env:
CARGO_TERM_COLOR: always
BINARY_NAME: schedule-parser-rusted
TEST_DB: ${{ secrets.TEST_DATABASE_URL }}
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
SENTRY_ORG: ${{ secrets.SENTRY_ORG }}
SENTRY_PROJECT: ${{ secrets.SENTRY_PROJECT }}
DOCKER_IMAGE_NAME: ${{ github.repository }}
DOCKER_REGISTRY_HOST: registry.n08i40k.ru
DOCKER_REGISTRY_USERNAME: ${{ github.repository_owner }}
DOCKER_REGISTRY_PASSWORD: ${{ secrets.DOCKER_REGISTRY_PASSWORD }}
jobs:
test:
name: Test
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Setup Rust
uses: actions-rust-lang/setup-rust-toolchain@v1.11.0
with:
toolchain: stable
- name: Test
run: |
cargo test
env:
DATABASE_URL: ${{ env.TEST_DB }}
SCHEDULE_DISABLE_AUTO_UPDATE: 1
JWT_SECRET: "test-secret-at-least-256-bits-used"
VK_ID_CLIENT_ID: 0
VK_ID_REDIRECT_URI: "vk0://vk.com/blank.html"
TELEGRAM_BOT_ID: 0
TELEGRAM_MINI_APP_HOST: example.com
TELEGRAM_TEST_DC: false
YANDEX_CLOUD_API_KEY: ""
YANDEX_CLOUD_FUNC_ID: ""
build:
name: Build
runs-on: ubuntu-latest
needs: test
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Setup Rust
uses: actions-rust-lang/setup-rust-toolchain@v1.11.0
with:
toolchain: stable
- name: Build
run: cargo build --release
- name: Extract debug symbols
run: |
objcopy --only-keep-debug target/release/${{ env.BINARY_NAME }}{,.d}
objcopy --strip-debug --strip-unneeded target/release/${{ env.BINARY_NAME }}
objcopy --add-gnu-debuglink target/release/${{ env.BINARY_NAME }}{.d,}
- name: Setup sentry-cli
uses: matbour/setup-sentry-cli@v2.0.0
with:
version: latest
token: ${{ env.SENTRY_AUTH_TOKEN }}
organization: ${{ env.SENTRY_ORG }}
project: ${{ env.SENTRY_PROJECT }}
- name: Upload debug symbols to Sentry
run: |
sentry-cli debug-files upload --include-sources .
- name: Upload build binary artifact
uses: actions/upload-artifact@v4
with:
name: release-binary
path: target/release/${{ env.BINARY_NAME }}
- name: Upload build debug symbols artifact
uses: actions/upload-artifact@v4
with:
name: release-symbols
path: target/release/${{ env.BINARY_NAME }}.d
docker:
name: Build & Push Docker Image
runs-on: ubuntu-latest
needs: build
steps:
- uses: actions/checkout@v4
- name: Download build artifacts
uses: actions/download-artifact@v4
with:
name: release-binary
- name: Setup Docker Buildx
uses: docker/setup-buildx-action@v3.10.0
- name: Login to Registry
uses: docker/login-action@v3.4.0
with:
registry: ${{ env.DOCKER_REGISTRY_HOST }}
username: ${{ env.DOCKER_REGISTRY_USERNAME }}
password: ${{ env.DOCKER_REGISTRY_PASSWORD }}
- name: Extract Docker metadata
id: meta
uses: docker/metadata-action@v5.7.0
with:
images: ${{ env.DOCKER_REGISTRY_HOST }}/${{ env.DOCKER_IMAGE_NAME }}
- name: Build and push Docker image
id: build-and-push
uses: docker/build-push-action@v6.15.0
with:
context: .
push: true
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
cache-from: type=gha
cache-to: type=gha,mode=max
build-args: |
"BINARY_NAME=${{ env.BINARY_NAME }}"

View File

@@ -2,7 +2,7 @@ name: cargo test
on: on:
push: push:
branches: [ "master" ] branches: [ "development" ]
tags-ignore: [ "release/v*" ] tags-ignore: [ "release/v*" ]
permissions: permissions:
@@ -18,10 +18,10 @@ jobs:
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
- name: Build - name: Build
run: cargo build run: cargo build
- name: Create .env.test
run: touch .env.test
- name: Run tests - name: Run tests
run: cargo test run: cargo test
env: env:

2983
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -3,72 +3,78 @@ members = ["actix-macros", "actix-test", "providers"]
[package] [package]
name = "schedule-parser-rusted" name = "schedule-parser-rusted"
version = "1.0.5" version = "1.3.1"
edition = "2024" edition = "2024"
publish = false publish = false
[profile.release] [profile.release]
debug = true debug = true
[features]
trace = ["tracing", "console-subscriber"]
[dependencies] [dependencies]
providers = { path = "providers" } providers = { path = "providers" }
actix-macros = { path = "actix-macros" } actix-macros = { path = "actix-macros" }
# serve api # serve api
actix-web = "4" actix-web = "4.11.0"
# basic # basic
chrono = { version = "0.4.40", features = ["serde"] } chrono = { version = "0.4.42", features = ["serde"] }
derive_more = { version = "2", features = ["full"] } derive_more = { version = "2.0.1", features = ["full"] }
dotenvy = "0.15.7" dotenvy = "0.15.7"
# sql # sql
diesel = { version = "2.2.8", features = ["postgres"] } database = { path = "database" }
diesel-derive-enum = { git = "https://github.com/Havunen/diesel-derive-enum.git", features = ["postgres"] }
# logging # logging
env_logger = "0.11.7" env_logger = "0.11.8"
# async # async
tokio = { version = "1.44.1", features = ["macros", "rt-multi-thread"] } tokio = { version = "1.47.1", features = ["macros", "rt-multi-thread"] }
tokio-util = "0.7.16" tokio-util = "0.7.16"
futures-util = "0.3.31" futures-util = "0.3.31"
# authorization # authorization
bcrypt = "0.17.0" bcrypt = "0.17.1"
jsonwebtoken = { version = "9.3.1", features = ["use_pem"] } jsonwebtoken = { version = "9.3.1", features = ["use_pem"] }
# creating users # creating users
objectid = "0.2.0" objectid = "0.2.0"
# schedule downloader # schedule downloader
reqwest = { version = "0.12.15", features = ["json"] } reqwest = { version = "0.12.23", features = ["json"] }
mime = "0.3.17" mime = "0.3.17"
# error handling # error handling
sentry = "0.42.0" sentry = "0.43.0"
sentry-actix = "0.42.0" sentry-actix = "0.43.0"
# [de]serializing # [de]serializing
serde = { version = "1.0.219", features = ["derive"] } serde = { version = "1", features = ["derive"] }
serde_json = "1.0.140" serde_json = "1"
serde_with = "3.12.0" serde_with = "3.14"
sha1 = "0.11.0-rc.0" sha1 = "0.11.0-rc.2"
# documentation # documentation
utoipa = { version = "5", features = ["actix_extras", "chrono"] } utoipa = { version = "5.4.0", features = ["actix_extras", "chrono"] }
utoipa-rapidoc = { version = "6", features = ["actix-web"] } utoipa-rapidoc = { version = "6.0.0", features = ["actix-web"] }
utoipa-actix-web = "0.1" utoipa-actix-web = "0.1.2"
uuid = { version = "1", features = ["v4"] } uuid = { version = "1.18.1", features = ["v4"] }
hex-literal = "1" hex-literal = "1"
log = "0.4" log = "0.4.28"
# telegram webdata deciding and verify # telegram webdata deciding and verify
base64 = "0.22" base64 = "0.22.1"
percent-encoding = "2.3" percent-encoding = "2.3.2"
ed25519-dalek = "2" ed25519-dalek = "3.0.0-pre.1"
# development tracing
console-subscriber = { version = "0.4.1", optional = true }
tracing = { version = "0.1.41", optional = true }
[dev-dependencies] [dev-dependencies]
providers = { path = "providers", features = ["test"] } providers = { path = "providers", features = ["test"] }

View File

@@ -4,9 +4,9 @@ version = "0.1.0"
edition = "2024" edition = "2024"
[dependencies] [dependencies]
syn = "2" syn = "2.0.106"
quote = "1" quote = "1.0.40"
proc-macro2 = "1" proc-macro2 = "1.0.101"
[lib] [lib]
proc-macro = true proc-macro = true

View File

@@ -6,7 +6,7 @@ mod shared {
use quote::{ToTokens, quote}; use quote::{ToTokens, quote};
use syn::{Attribute, DeriveInput}; use syn::{Attribute, DeriveInput};
pub fn find_status_code(attrs: &Vec<Attribute>) -> Option<proc_macro2::TokenStream> { pub fn find_status_code(attrs: &[Attribute]) -> Option<proc_macro2::TokenStream> {
attrs attrs
.iter() .iter()
.find_map(|attr| -> Option<proc_macro2::TokenStream> { .find_map(|attr| -> Option<proc_macro2::TokenStream> {
@@ -41,14 +41,12 @@ mod shared {
let mut status_code_arms: Vec<proc_macro2::TokenStream> = variants let mut status_code_arms: Vec<proc_macro2::TokenStream> = variants
.iter() .iter()
.map(|v| -> Option<proc_macro2::TokenStream> { .filter_map(|v| -> Option<proc_macro2::TokenStream> {
let status_code = find_status_code(&v.attrs)?; let status_code = find_status_code(&v.attrs)?;
let variant_name = &v.ident; let variant_name = &v.ident;
Some(quote! { #name::#variant_name => #status_code, }) Some(quote! { #name::#variant_name => #status_code, })
}) })
.filter(|v| v.is_some())
.map(|v| v.unwrap())
.collect(); .collect();
if status_code_arms.len() < variants.len() { if status_code_arms.len() < variants.len() {

View File

@@ -4,5 +4,5 @@ version = "0.1.0"
edition = "2024" edition = "2024"
[dependencies] [dependencies]
actix-http = "3" actix-http = "3.11.1"
actix-web = "4" actix-web = "4.11.0"

11
database/Cargo.toml Normal file
View File

@@ -0,0 +1,11 @@
[package]
name = "database"
version = "0.1.0"
edition = "2024"
[dependencies]
migration = { path = "migration" }
entity = { path = "entity" }
sea-orm = { version = "2.0.0-rc.6", features = ["sqlx-postgres", "runtime-tokio"] }
paste = "1.0.15"

1
database/entity/.gitignore vendored Normal file
View File

@@ -0,0 +1 @@
/target

View File

@@ -0,0 +1,9 @@
[package]
name = "entity"
version = "0.1.0"
edition = "2024"
[dependencies]
sea-orm = "2.0.0-rc.6"
serde = { version = "1.0.219", features = ["derive"] }
utoipa = "5.4.0"

View File

@@ -0,0 +1,6 @@
//! `SeaORM` Entity, @generated by sea-orm-codegen 1.1.12
pub mod prelude;
pub mod sea_orm_active_enums;
pub mod user;

View File

@@ -0,0 +1,3 @@
//! `SeaORM` Entity, @generated by sea-orm-codegen 1.1.12
pub use super::user::Entity as User;

View File

@@ -0,0 +1,25 @@
//! `SeaORM` Entity, @generated by sea-orm-codegen 1.1.12
use sea_orm::entity::prelude::*;
#[derive(
Debug,
Clone,
PartialEq,
Eq,
EnumIter,
DeriveActiveEnum,
:: serde :: Serialize,
:: serde :: Deserialize,
:: utoipa :: ToSchema,
)]
#[sea_orm(rs_type = "String", db_type = "Enum", enum_name = "user_role")]
#[serde(rename_all = "SCREAMING_SNAKE_CASE")]
pub enum UserRole {
#[sea_orm(string_value = "student")]
Student,
#[sea_orm(string_value = "teacher")]
Teacher,
#[sea_orm(string_value = "admin")]
Admin,
}

View File

@@ -0,0 +1,25 @@
//! `SeaORM` Entity, @generated by sea-orm-codegen 1.1.12
use super::sea_orm_active_enums::UserRole;
use sea_orm::entity::prelude::*;
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq)]
#[sea_orm(table_name = "user")]
pub struct Model {
#[sea_orm(primary_key, auto_increment = false)]
pub id: String,
#[sea_orm(unique)]
pub username: String,
pub password: Option<String>,
pub vk_id: Option<i32>,
pub group: Option<String>,
pub role: UserRole,
pub android_version: Option<String>,
#[sea_orm(unique)]
pub telegram_id: Option<i64>,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {}
impl ActiveModelBehavior for ActiveModel {}

1
database/migration/.gitignore vendored Normal file
View File

@@ -0,0 +1 @@
/target

View File

@@ -0,0 +1,22 @@
[package]
name = "migration"
version = "0.1.0"
edition = "2021"
publish = false
[lib]
name = "migration"
path = "src/lib.rs"
[dependencies]
async-std = { version = "1", features = ["attributes", "tokio1"] }
[dependencies.sea-orm-migration]
version = "2.0.0-rc.6"
features = [
# Enable at least one `ASYNC_RUNTIME` and `DATABASE_DRIVER` feature if you want to run migration via CLI.
# View the list of supported features at https://www.sea-ql.org/SeaORM/docs/install-and-config/database-and-async-runtime.
# e.g.
"runtime-tokio", # `ASYNC_RUNTIME` feature
"sqlx-postgres", # `DATABASE_DRIVER` feature
]

View File

@@ -0,0 +1,16 @@
pub use sea_orm_migration::prelude::MigratorTrait;
use sea_orm_migration::prelude::*;
mod m20250904_024854_init;
pub struct Migrator;
#[async_trait::async_trait]
impl MigratorTrait for Migrator {
fn migrations() -> Vec<Box<dyn MigrationTrait>> {
vec![
Box::new(m20250904_024854_init::Migration),
]
}
}

View File

@@ -0,0 +1,70 @@
use sea_orm_migration::prelude::extension::postgres::Type;
use sea_orm_migration::sea_orm::{EnumIter, Iterable};
use sea_orm_migration::{prelude::*, schema::*};
#[derive(DeriveMigrationName)]
pub struct Migration;
#[async_trait::async_trait]
impl MigrationTrait for Migration {
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
manager
.create_type(
Type::create()
.as_enum(UserRole)
.values(UserRoleVariants::iter())
.to_owned(),
)
.await?;
manager
.create_table(
Table::create()
.table(User::Table)
.if_not_exists()
.col(string_uniq(User::Id).primary_key().not_null())
.col(string_uniq(User::Username).not_null())
.col(string_null(User::Password))
.col(integer_null(User::VkId))
.col(string_null(User::Group))
.col(enumeration(User::Role, UserRole, UserRoleVariants::iter()))
.col(string_null(User::AndroidVersion))
.col(big_integer_null(User::TelegramId).unique_key())
.to_owned(),
)
.await
}
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
manager
.drop_table(Table::drop().table(User::Table).to_owned())
.await?;
manager
.drop_type(Type::drop().name(UserRole).to_owned())
.await
}
}
#[derive(DeriveIden)]
struct UserRole;
#[derive(DeriveIden, EnumIter)]
enum UserRoleVariants {
Student,
Teacher,
Admin,
}
#[derive(DeriveIden)]
enum User {
Table,
Id,
Username,
Password,
VkId,
Group,
Role,
AndroidVersion,
TelegramId,
}

View File

@@ -0,0 +1,6 @@
use sea_orm_migration::prelude::*;
#[async_std::main]
async fn main() {
cli::run_cli(migration::Migrator).await;
}

10
database/src/lib.rs Normal file
View File

@@ -0,0 +1,10 @@
pub mod query;
pub use migration;
pub use sea_orm;
pub mod entity {
pub use entity::*;
pub use entity::user::{ActiveModel as ActiveUser, Model as User, Entity as UserEntity, Column as UserColumn};
}

63
database/src/query.rs Normal file
View File

@@ -0,0 +1,63 @@
use paste::paste;
use sea_orm::ColumnTrait;
use sea_orm::EntityTrait;
use sea_orm::QueryFilter;
pub struct Query;
macro_rules! ref_type {
(String) => {
&String
};
(str) => {
&str
};
($other:ty) => {
$other
};
}
macro_rules! define_is_exists {
($entity: ident, $by: ident, $by_type: ident, $by_column: ident) => {
paste! {
pub async fn [<is_ $entity _exists_by_ $by>](
db: &::sea_orm::DbConn,
$by: ref_type!($by_type)
) -> Result<bool, ::sea_orm::DbErr> {
::entity::$entity::Entity::find()
.filter(::entity::$entity::Column::$by_column.eq($by))
.one(db)
.await
.map(|x| x.is_some())
}
}
};
}
macro_rules! define_find_by {
($entity: ident, $by: ident, $by_type: ident, $by_column: ident) => {
paste! {
pub async fn [<find_ $entity _by_ $by>](
db: &::sea_orm::DbConn,
$by: ref_type!($by_type)
) -> Result<Option<::entity::$entity::Model>, ::sea_orm::DbErr> {
::entity::$entity::Entity::find()
.filter(::entity::$entity::Column::$by_column.eq($by))
.one(db)
.await
}
}
};
}
impl Query {
define_find_by!(user, id, str, Id);
define_find_by!(user, telegram_id, i64, TelegramId);
define_find_by!(user, vk_id, i32, VkId);
define_find_by!(user, username, str, Username);
define_is_exists!(user, id, str, Id);
define_is_exists!(user, username, str, Username);
define_is_exists!(user, telegram_id, i64, TelegramId);
define_is_exists!(user, vk_id, i32, VkId);
}

View File

@@ -1,9 +0,0 @@
# For documentation on how to configure this file,
# see https://diesel.rs/guides/configuring-diesel-cli
[print_schema]
file = "src/database/schema.rs"
custom_type_derives = ["diesel::query_builder::QueryId", "Clone"]
[migrations_directory]
dir = "./migrations"

View File

View File

@@ -1,6 +0,0 @@
-- This file was automatically created by Diesel to set up helper functions
-- and other internal bookkeeping. This file is safe to edit, any future
-- changes will be added to existing projects as new migrations.
DROP FUNCTION IF EXISTS diesel_manage_updated_at(_tbl regclass);
DROP FUNCTION IF EXISTS diesel_set_updated_at();

View File

@@ -1,36 +0,0 @@
-- This file was automatically created by Diesel to set up helper functions
-- and other internal bookkeeping. This file is safe to edit, any future
-- changes will be added to existing projects as new migrations.
-- Sets up a trigger for the given table to automatically set a column called
-- `updated_at` whenever the row is modified (unless `updated_at` was included
-- in the modified columns)
--
-- # Example
--
-- ```sql
-- CREATE TABLE users (id SERIAL PRIMARY KEY, updated_at TIMESTAMP NOT NULL DEFAULT NOW());
--
-- SELECT diesel_manage_updated_at('users');
-- ```
CREATE OR REPLACE FUNCTION diesel_manage_updated_at(_tbl regclass) RETURNS VOID AS $$
BEGIN
EXECUTE format('CREATE TRIGGER set_updated_at BEFORE UPDATE ON %s
FOR EACH ROW EXECUTE PROCEDURE diesel_set_updated_at()', _tbl);
END;
$$ LANGUAGE plpgsql;
CREATE OR REPLACE FUNCTION diesel_set_updated_at() RETURNS trigger AS $$
BEGIN
IF (
NEW IS DISTINCT FROM OLD AND
NEW.updated_at IS NOT DISTINCT FROM OLD.updated_at
) THEN
NEW.updated_at := current_timestamp;
END IF;
RETURN NEW;
END;
$$ LANGUAGE plpgsql;

View File

@@ -1 +0,0 @@
DROP TYPE user_role;

View File

@@ -1,4 +0,0 @@
CREATE TYPE user_role AS ENUM (
'STUDENT',
'TEACHER',
'ADMIN');

View File

@@ -1 +0,0 @@
DROP TABLE users;

View File

@@ -1,11 +0,0 @@
CREATE TABLE users
(
id text PRIMARY KEY NOT NULL,
username text UNIQUE NOT NULL,
password text NOT NULL,
vk_id int4 NULL,
access_token text UNIQUE NOT NULL,
"group" text NOT NULL,
role user_role NOT NULL,
version text NOT NULL
);

View File

@@ -1 +0,0 @@
DROP TABLE fcm;

View File

@@ -1,6 +0,0 @@
CREATE TABLE fcm
(
user_id text PRIMARY KEY NOT NULL REFERENCES users (id),
token text NOT NULL,
topics text[] NOT NULL CHECK ( array_position(topics, null) is null )
);

View File

@@ -1,2 +0,0 @@
ALTER TABLE users DROP CONSTRAINT users_telegram_id_key;
ALTER TABLE users DROP COLUMN telegram_id;

View File

@@ -1,2 +0,0 @@
ALTER TABLE users ADD telegram_id int8 NULL;
ALTER TABLE users ADD CONSTRAINT users_telegram_id_key UNIQUE (telegram_id);

View File

@@ -1,2 +0,0 @@
UPDATE users SET "password" = '' WHERE "password" IS NULL;
ALTER TABLE users ALTER COLUMN "password" SET NOT NULL;

View File

@@ -1 +0,0 @@
ALTER TABLE users ALTER COLUMN "password" DROP NOT NULL;

View File

@@ -1,3 +0,0 @@
UPDATE users SET "android_version" = '' WHERE "android_version" IS NULL;
ALTER TABLE users ALTER COLUMN "android_version" SET NOT NULL;
ALTER TABLE users RENAME COLUMN android_version TO "version";

View File

@@ -1,2 +0,0 @@
ALTER TABLE users RENAME COLUMN "version" TO android_version;
ALTER TABLE users ALTER COLUMN android_version DROP NOT NULL;

View File

@@ -1,2 +0,0 @@
UPDATE users SET "group" = '' WHERE "group" IS NULL;
ALTER TABLE users ALTER COLUMN "group" SET NOT NULL;

View File

@@ -1 +0,0 @@
ALTER TABLE users ALTER COLUMN "group" DROP NOT NULL;

View File

@@ -1,2 +0,0 @@
UPDATE users SET "access_token" = '' WHERE "access_token" IS NULL;
ALTER TABLE users ALTER COLUMN "access_token" SET NOT NULL;

View File

@@ -1 +0,0 @@
ALTER TABLE users ALTER COLUMN "access_token" DROP NOT NULL;

View File

@@ -14,4 +14,4 @@ serde_repr = "0.1.20"
utoipa = { version = "5.4.0", features = ["macros", "chrono"] } utoipa = { version = "5.4.0", features = ["macros", "chrono"] }
sha1 = "0.11.0-rc.0" sha1 = "0.11.0-rc.2"

View File

@@ -100,6 +100,9 @@ pub enum LessonType {
/// Защита курсового проекта. /// Защита курсового проекта.
CourseProjectDefense, CourseProjectDefense,
/// Практическое занятие.
Practice,
} }
#[derive(Clone, Hash, Debug, Serialize, Deserialize, ToSchema)] #[derive(Clone, Hash, Debug, Serialize, Deserialize, ToSchema)]
@@ -209,70 +212,6 @@ impl ScheduleSnapshot {
} }
} }
// #[derive(Clone, Debug, Display, Error, ToSchema)]
// #[display("row {row}, column {column}")]
// pub struct ErrorCellPos {
// pub row: u32,
// pub column: u32,
// }
//
// #[derive(Clone, Debug, Display, Error, ToSchema)]
// #[display("'{data}' at {pos}")]
// pub struct ErrorCell {
// pub pos: ErrorCellPos,
// pub data: String,
// }
//
// impl ErrorCell {
// pub fn new(row: u32, column: u32, data: String) -> Self {
// Self {
// pos: ErrorCellPos { row, column },
// data,
// }
// }
// }
// #[derive(Clone, Debug, Display, Error, ToSchema)]
// pub enum ParseError {
// /// Errors related to reading XLS file.
// #[display("{_0:?}: Failed to read XLS file.")]
// #[schema(value_type = String)]
// BadXLS(Arc<calamine::XlsError>),
//
// /// Not a single sheet was found.
// #[display("No work sheets found.")]
// NoWorkSheets,
//
// /// There are no data on the boundaries of the sheet.
// #[display("There is no data on work sheet boundaries.")]
// UnknownWorkSheetRange,
//
// /// Failed to read the beginning and end of the lesson from the cell
// #[display("Failed to read lesson start and end from {_0}.")]
// LessonBoundaries(ErrorCell),
//
// /// Not found the beginning and the end corresponding to the lesson.
// #[display("No start and end times matching the lesson (at {_0}) was found.")]
// LessonTimeNotFound(ErrorCellPos),
// }
//
// impl Serialize for ParseError {
// fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
// where
// S: Serializer,
// {
// match self {
// ParseError::BadXLS(_) => serializer.serialize_str("BAD_XLS"),
// ParseError::NoWorkSheets => serializer.serialize_str("NO_WORK_SHEETS"),
// ParseError::UnknownWorkSheetRange => {
// serializer.serialize_str("UNKNOWN_WORK_SHEET_RANGE")
// }
// ParseError::LessonBoundaries(_) => serializer.serialize_str("GLOBAL_TIME"),
// ParseError::LessonTimeNotFound(_) => serializer.serialize_str("LESSON_TIME_NOT_FOUND"),
// }
// }
// }
#[async_trait] #[async_trait]
pub trait ScheduleProvider pub trait ScheduleProvider
where where

View File

@@ -1,6 +1,6 @@
[package] [package]
name = "provider-engels-polytechnic" name = "provider-engels-polytechnic"
version = "0.1.0" version = "0.2.3"
edition = "2024" edition = "2024"
[features] [features]
@@ -14,19 +14,18 @@ tokio-util = "0.7.16"
chrono = { version = "0.4.41", features = ["serde"] } chrono = { version = "0.4.41", features = ["serde"] }
serde = { version = "1.0.219", features = ["derive"] } derive_more = { version = "2.0.1", features = ["error", "display", "from"] }
derive_more = { version = "2.0.1", features = ["error", "display"] }
utoipa = { version = "5.4.0", features = ["macros", "chrono"] } utoipa = { version = "5.4.0", features = ["macros", "chrono"] }
calamine = { git = "https://github.com/prophittcorey/calamine.git", branch = "fix/zip-3.0" } calamine = "0.31"
async-trait = "0.1.89" async-trait = "0.1.89"
reqwest = "0.12.23" reqwest = "0.12.23"
ua_generator = "0.5.22" ua_generator = "0.5.22"
regex = "1.11.1" regex = "1.11.2"
strsim = "0.11.1" strsim = "0.11.1"
log = "0.4.27" log = "0.4.27"
sentry = "0.42.0" sentry = "0.43.0"
fancy-regex = "0.16.2"

View File

@@ -1,4 +1,4 @@
use crate::updater::Updater; pub use crate::updater::{UpdateSource, Updater};
use async_trait::async_trait; use async_trait::async_trait;
use base::{ScheduleProvider, ScheduleSnapshot}; use base::{ScheduleProvider, ScheduleSnapshot};
use std::ops::DerefMut; use std::ops::DerefMut;
@@ -8,8 +8,6 @@ use tokio::sync::RwLock;
use tokio::time::interval; use tokio::time::interval;
use tokio_util::sync::CancellationToken; use tokio_util::sync::CancellationToken;
pub use crate::updater::UpdateSource;
mod parser; mod parser;
mod updater; mod updater;
mod xls_downloader; mod xls_downloader;
@@ -25,9 +23,9 @@ pub struct EngelsPolytechnicProvider {
} }
impl EngelsPolytechnicProvider { impl EngelsPolytechnicProvider {
pub async fn new( pub async fn get(
update_source: UpdateSource, update_source: UpdateSource,
) -> Result<Arc<dyn ScheduleProvider>, crate::updater::error::Error> { ) -> Result<Arc<dyn ScheduleProvider>, crate::updater::Error> {
let (updater, snapshot) = Updater::new(update_source).await?; let (updater, snapshot) = Updater::new(update_source).await?;
Ok(Arc::new(Wrapper { Ok(Arc::new(Wrapper {
@@ -60,14 +58,15 @@ impl ScheduleProvider for Wrapper {
log::info!("Updating schedule..."); log::info!("Updating schedule...");
match this.updater.update(&mut this.snapshot).await { match this.updater.update(&this.snapshot).await {
Ok(snapshot) => { Ok(snapshot) => {
this.snapshot = Arc::new(snapshot); this.snapshot = Arc::new(snapshot);
}, },
Err(updater::Error::EmptyUri) => {},
Err(err) => { Err(err) => {
cancellation_token.cancel(); sentry::capture_error(&err);
return Err(err.into());
} }
} }
} }

View File

@@ -0,0 +1,25 @@
use crate::parser::worksheet::CellPos;
use derive_more::{Display, Error, From};
#[derive(Debug, Display, Error, From)]
pub enum Error {
#[from]
BadXls(calamine::XlsError),
#[display("No work sheets found.")]
NoWorkSheets,
#[display("There is no data on work sheet boundaries.")]
UnknownWorkSheetRange,
#[display("Failed to read lesson start and end of lesson at {_0}.")]
NoLessonBoundaries(CellPos),
#[display("No start and end times matching the lesson (at {_0}) was found.")]
LessonTimeNotFound(CellPos),
#[display("Unknown lesson type `{type}` at {pos}")]
UnknownLessonType { pos: CellPos, r#type: String },
}
pub type Result<T> = core::result::Result<T, Error>;

View File

@@ -1,6 +1,6 @@
pub use self::error::{Error, Result};
use crate::or_continue; use crate::or_continue;
use crate::parser::error::{ErrorCell, ErrorCellPos}; use crate::parser::worksheet::{CellPos, CellRange, WorkSheet};
use crate::parser::worksheet::WorkSheet;
use crate::parser::LessonParseResult::{Lessons, Street}; use crate::parser::LessonParseResult::{Lessons, Street};
use base::LessonType::Break; use base::LessonType::Break;
use base::{ use base::{
@@ -13,82 +13,12 @@ use std::collections::HashMap;
use std::io::Cursor; use std::io::Cursor;
use std::sync::LazyLock; use std::sync::LazyLock;
mod error;
mod macros; mod macros;
mod worksheet; mod worksheet;
pub mod error {
use derive_more::{Display, Error};
use serde::{Serialize, Serializer};
use std::sync::Arc;
use utoipa::ToSchema;
#[derive(Clone, Debug, Display, Error, ToSchema)]
#[display("row {row}, column {column}")]
pub struct ErrorCellPos {
pub row: u32,
pub column: u32,
}
#[derive(Clone, Debug, Display, Error, ToSchema)]
#[display("'{data}' at {pos}")]
pub struct ErrorCell {
pub pos: ErrorCellPos,
pub data: String,
}
impl ErrorCell {
pub fn new(row: u32, column: u32, data: String) -> Self {
Self {
pos: ErrorCellPos { row, column },
data,
}
}
}
#[derive(Clone, Debug, Display, Error, ToSchema)]
pub enum Error {
/// Errors related to reading XLS file.
#[display("{_0:?}: Failed to read XLS file.")]
#[schema(value_type = String)]
BadXLS(Arc<calamine::XlsError>),
/// Not a single sheet was found.
#[display("No work sheets found.")]
NoWorkSheets,
/// There are no data on the boundaries of the sheet.
#[display("There is no data on work sheet boundaries.")]
UnknownWorkSheetRange,
/// Failed to read the beginning and end of the lesson from the cell
#[display("Failed to read lesson start and end from {_0}.")]
LessonBoundaries(ErrorCell),
/// Not found the beginning and the end corresponding to the lesson.
#[display("No start and end times matching the lesson (at {_0}) was found.")]
LessonTimeNotFound(ErrorCellPos),
}
impl Serialize for Error {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
match self {
Error::BadXLS(_) => serializer.serialize_str("BAD_XLS"),
Error::NoWorkSheets => serializer.serialize_str("NO_WORK_SHEETS"),
Error::UnknownWorkSheetRange => {
serializer.serialize_str("UNKNOWN_WORK_SHEET_RANGE")
}
Error::LessonBoundaries(_) => serializer.serialize_str("GLOBAL_TIME"),
Error::LessonTimeNotFound(_) => serializer.serialize_str("LESSON_TIME_NOT_FOUND"),
}
}
}
}
/// Data cell storing the group name. /// Data cell storing the group name.
pub struct GroupCellInfo { pub struct GroupMarkup {
/// Column index. /// Column index.
pub column: u32, pub column: u32,
@@ -97,7 +27,7 @@ pub struct GroupCellInfo {
} }
/// Data cell storing the line. /// Data cell storing the line.
pub struct DayCellInfo { pub struct DayMarkup {
/// Line index. /// Line index.
pub row: u32, pub row: u32,
@@ -111,8 +41,13 @@ pub struct DayCellInfo {
pub date: DateTime<Utc>, pub date: DateTime<Utc>,
} }
pub struct WorkSheetMarkup {
days: Box<[DayMarkup]>,
groups: Box<[GroupMarkup]>,
}
/// Data on the time of lessons from the second column of the schedule. /// Data on the time of lessons from the second column of the schedule.
pub struct BoundariesCellInfo { pub struct BoundariesData {
/// Temporary segment of the lesson. /// Temporary segment of the lesson.
pub time_range: LessonBoundaries, pub time_range: LessonBoundaries,
@@ -123,23 +58,26 @@ pub struct BoundariesCellInfo {
pub default_index: Option<u32>, pub default_index: Option<u32>,
/// The frame of the cell. /// The frame of the cell.
pub xls_range: ((u32, u32), (u32, u32)), pub range: CellRange,
} }
/// Obtaining a "skeleton" schedule from the working sheet. /// Obtaining a "skeleton" schedule from the working sheet.
fn parse_skeleton( fn parse_markup(worksheet: &WorkSheet) -> Result<WorkSheetMarkup> {
worksheet: &WorkSheet, struct PartialDayMarkup {
) -> Result<(Vec<DayCellInfo>, Vec<GroupCellInfo>), crate::parser::error::Error> { row: u32,
let mut groups: Vec<GroupCellInfo> = Vec::new(); name: String,
let mut days: Vec<(u32, String, Option<DateTime<Utc>>)> = Vec::new(); date: Option<DateTime<Utc>>,
}
let worksheet_start = worksheet let mut groups: Vec<GroupMarkup> = Vec::new();
.start() let mut days: Vec<PartialDayMarkup> = Vec::new();
.ok_or(error::Error::UnknownWorkSheetRange)?;
let worksheet_end = worksheet.end().ok_or(error::Error::UnknownWorkSheetRange)?;
let mut row = worksheet_start.0; let (start_row, start_col) = worksheet.start().ok_or(Error::UnknownWorkSheetRange)?;
let (end_row, end_col) = worksheet.end().ok_or(Error::UnknownWorkSheetRange)?;
while row < worksheet_end.0 { let mut row = start_row;
while row < end_row {
row += 1; row += 1;
let day_full_name = or_continue!(worksheet.get_string_from_cell(row, 0)); let day_full_name = or_continue!(worksheet.get_string_from_cell(row, 0));
@@ -149,8 +87,8 @@ fn parse_skeleton(
// переход на предыдущую строку // переход на предыдущую строку
row -= 1; row -= 1;
for column in (worksheet_start.1 + 2)..=worksheet_end.1 { for column in (start_col + 2)..=end_col {
groups.push(GroupCellInfo { groups.push(GroupMarkup {
column, column,
name: or_continue!(worksheet.get_string_from_cell(row, column)) name: or_continue!(worksheet.get_string_from_cell(row, column))
.replace(" ", ""), .replace(" ", ""),
@@ -183,37 +121,44 @@ fn parse_skeleton(
(name, date) (name, date)
}; };
days.push((row, day_name, day_date)); days.push(PartialDayMarkup {
row,
name: day_name,
date: day_date,
});
} }
// fix unparsable day dates // fix unparsable day dates
let days_max = days.len().min(5); let days_max = days.len().min(5);
for i in 0..days_max { for i in 0..days_max {
if days[i].2.is_none() && days[i + 1].2.is_some() { if days[i].date.is_none() && days[i + 1].date.is_some() {
days[i].2 = Some(days[i + 1].2.unwrap() - Duration::days(1)); days[i].date = Some(days[i + 1].date.unwrap() - Duration::days(1));
} }
} }
for i in 0..days_max { for i in 0..days_max {
let i = days_max - i; let i = days_max - i;
if days[i - 1].2.is_none() && days[i].2.is_some() { if days[i - 1].date.is_none() && days[i].date.is_some() {
days[i - 1].2 = Some(days[i].2.unwrap() - Duration::days(1)); days[i - 1].date = Some(days[i].date.unwrap() - Duration::days(1));
} }
} }
let days = days let days = days
.into_iter() .into_iter()
.map(|day| DayCellInfo { .map(|day| DayMarkup {
row: day.0, row: day.row,
column: 0, column: 0,
name: day.1, name: day.name,
date: day.2.unwrap(), date: day.date.unwrap(),
}) })
.collect(); .collect();
Ok((days, groups)) Ok(WorkSheetMarkup {
days,
groups: groups.into_boxed_slice(),
})
} }
/// The result of obtaining a lesson from the cell. /// The result of obtaining a lesson from the cell.
@@ -230,9 +175,10 @@ enum LessonParseResult {
// noinspection GrazieInspection // noinspection GrazieInspection
/// Obtaining a non-standard type of lesson by name. /// Obtaining a non-standard type of lesson by name.
fn guess_lesson_type(text: &String) -> Option<LessonType> { fn guess_lesson_type(text: &str) -> Option<LessonType> {
static MAP: LazyLock<HashMap<&str, LessonType>> = LazyLock::new(|| { static MAP: LazyLock<HashMap<&str, LessonType>> = LazyLock::new(|| {
HashMap::from([ HashMap::from([
("о важном", LessonType::Additional),
("консультация", LessonType::Consultation), ("консультация", LessonType::Consultation),
("самостоятельная работа", LessonType::IndependentWork), ("самостоятельная работа", LessonType::IndependentWork),
("зачет", LessonType::Exam), ("зачет", LessonType::Exam),
@@ -240,31 +186,28 @@ fn guess_lesson_type(text: &String) -> Option<LessonType> {
("экзамен", LessonType::ExamDefault), ("экзамен", LessonType::ExamDefault),
("курсовой проект", LessonType::CourseProject), ("курсовой проект", LessonType::CourseProject),
("защита курсового проекта", LessonType::CourseProjectDefense), ("защита курсового проекта", LessonType::CourseProjectDefense),
("практическое занятие", LessonType::Practice),
]) ])
}); });
let name_lower = text.to_lowercase(); let name_lower = text.to_lowercase();
match MAP MAP.iter()
.iter() .map(|(text, lesson_type)| (lesson_type, strsim::levenshtein(text, &name_lower)))
.map(|(text, lesson_type)| (lesson_type, strsim::levenshtein(text, &*name_lower)))
.filter(|x| x.1 <= 4) .filter(|x| x.1 <= 4)
.min_by_key(|(_, score)| *score) .min_by_key(|(_, score)| *score)
{ .map(|v| v.0.clone())
None => None,
Some(v) => Some(v.0.clone()),
}
} }
/// Getting a pair or street from a cell. /// Getting a pair or street from a cell.
fn parse_lesson( fn parse_lesson(
worksheet: &WorkSheet, worksheet: &WorkSheet,
day: &Day, day: &Day,
day_boundaries: &Vec<BoundariesCellInfo>, day_boundaries: &[BoundariesData],
lesson_boundaries: &BoundariesCellInfo, lesson_boundaries: &BoundariesData,
group_column: u32, group_column: u32,
) -> Result<LessonParseResult, crate::parser::error::Error> { ) -> Result<LessonParseResult> {
let row = lesson_boundaries.xls_range.0.0; let row = lesson_boundaries.range.start.row;
let name = { let name = {
let cell_data = match worksheet.get_string_from_cell(row, group_column) { let cell_data = match worksheet.get_string_from_cell(row, group_column) {
@@ -273,7 +216,7 @@ fn parse_lesson(
}; };
static OTHER_STREET_RE: LazyLock<Regex> = static OTHER_STREET_RE: LazyLock<Regex> =
LazyLock::new(|| Regex::new(r"^[А-Я][а-я]+[,\s]\d+$").unwrap()); LazyLock::new(|| Regex::new(r"^[А-Я][а-я]+[,\s]+д\.\s\d+$").unwrap());
if OTHER_STREET_RE.is_match(&cell_data) { if OTHER_STREET_RE.is_match(&cell_data) {
return Ok(Street(cell_data)); return Ok(Street(cell_data));
@@ -282,22 +225,24 @@ fn parse_lesson(
cell_data cell_data
}; };
let cell_range = worksheet.get_merge_from_start(row, group_column); let lesson_cell_range = worksheet.get_merge_from_start(row, group_column);
let (default_range, lesson_time) = { let (default_range, lesson_time) = {
let end_time_arr = day_boundaries let end_time_arr = day_boundaries
.iter() .iter()
.filter(|time| time.xls_range.1.0 == cell_range.1.0) .filter(
.collect::<Vec<&BoundariesCellInfo>>(); |BoundariesData {
range: CellRange { end, .. },
..
}| { lesson_cell_range.end.row <= end.row },
)
.collect::<Vec<&BoundariesData>>();
let end_time = end_time_arr let end_time = end_time_arr
.first() .first()
.ok_or(error::Error::LessonTimeNotFound(ErrorCellPos { .ok_or(Error::LessonTimeNotFound(CellPos::new(row, group_column)))?;
row,
column: group_column,
}))?;
let range: Option<[u8; 2]> = if lesson_boundaries.default_index != None { let range: Option<[u8; 2]> = if lesson_boundaries.default_index.is_some() {
let default = lesson_boundaries.default_index.unwrap() as u8; let default = lesson_boundaries.default_index.unwrap() as u8;
Some([default, end_time.default_index.unwrap() as u8]) Some([default, end_time.default_index.unwrap() as u8])
} else { } else {
@@ -309,15 +254,19 @@ fn parse_lesson(
end: end_time.time_range.end, end: end_time.time_range.end,
}; };
Ok((range, time)) (range, time)
}?; };
let (name, mut subgroups, lesson_type) = parse_name_and_subgroups(&name)?; let ParsedLessonName {
name,
mut subgroups,
r#type: lesson_type,
} = parse_name_and_subgroups(&name, row, group_column)?;
{ {
let cabinets: Vec<String> = parse_cabinets( let cabinets: Vec<String> = parse_cabinets(
worksheet, worksheet,
(cell_range.0.0, cell_range.1.0), (lesson_cell_range.start.row, lesson_cell_range.end.row),
group_column + 1, group_column + 1,
); );
@@ -325,13 +274,11 @@ fn parse_lesson(
if cab_count == 1 { if cab_count == 1 {
// Назначаем этот кабинет всем подгруппам // Назначаем этот кабинет всем подгруппам
let cab = Some(cabinets.get(0).unwrap().clone()); let cab = Some(cabinets.first().unwrap().clone());
for subgroup in &mut subgroups { for subgroup in subgroups.iter_mut().flatten() {
if let Some(subgroup) = subgroup {
subgroup.cabinet = cab.clone() subgroup.cabinet = cab.clone()
} }
}
} else if cab_count == 2 { } else if cab_count == 2 {
while subgroups.len() < cab_count { while subgroups.len() < cab_count {
subgroups.push(subgroups.last().unwrap_or(&None).clone()); subgroups.push(subgroups.last().unwrap_or(&None).clone());
@@ -361,10 +308,7 @@ fn parse_lesson(
range: default_range, range: default_range,
name: Some(name), name: Some(name),
time: lesson_time, time: lesson_time,
subgroups: if subgroups.len() == 2 subgroups: if subgroups.len() == 2 && subgroups.iter().all(|x| x.is_none()) {
&& subgroups.get(0).unwrap().is_none()
&& subgroups.get(1).unwrap().is_none()
{
None None
} else { } else {
Some(subgroups) Some(subgroups)
@@ -416,123 +360,144 @@ fn parse_cabinets(worksheet: &WorkSheet, row_range: (u32, u32), column: u32) ->
cabinets cabinets
} }
struct ParsedLessonName {
name: String,
subgroups: Vec<Option<LessonSubGroup>>,
r#type: Option<LessonType>,
}
//noinspection GrazieInspection //noinspection GrazieInspection
/// Getting the "pure" name of the lesson and list of teachers from the text of the lesson cell. /// Getting the "pure" name of the lesson and list of teachers from the text of the lesson cell.
fn parse_name_and_subgroups( fn parse_name_and_subgroups(text: &str, row: u32, column: u32) -> Result<ParsedLessonName> {
text: &String,
) -> Result<(String, Vec<Option<LessonSubGroup>>, Option<LessonType>), crate::parser::error::Error>
{
// Части названия пары: // Части названия пары:
// 1. Само название. // 1. Само название.
// 2. Список преподавателей и подгрупп. // 2. Список преподавателей и подгрупп.
// 3. "Модификатор" (чаще всего). // 3. "Модификатор" (чаще всего).
// //
// Регулярное выражение для получения ФИО преподавателей и номеров подгрупп (aka. второй части). // Регулярное выражение для получения ФИО преподавателей и номеров подгрупп (aka. второй части).
// (?:[А-Я][а-я]+\s?(?:[А-Я][\s.]*){2}(?:\(\d\s?[а-я]+\))?(?:, )?)+[\s.]* static NAME_RE: LazyLock<fancy_regex::Regex> = LazyLock::new(|| {
// fancy_regex::Regex::new(
// Подробнее: r"([А-Я][а-я]+(?:[\s.]*[А-Я]){1,2})(?=[^А-Яа-я])[.\s]*(?:\(?(\d)[\sа-я]*\)?)?",
// (?:
// [А-Я][а-я]+ - Фамилия.
// \s? - Кто знает, будет ли там пробел.
// (?:[А-Я][\s.]*){2} - Имя и отчество с учётом случайных пробелов и точек.
// (?:
// \( - Открытие подгруппы.
// \s? - Кто знает, будет ли там пробел.
// \d - Номер подгруппы.
// \s? - Кто знает, будет ли там пробел.
// [а-я\s]+ - Слово "подгруппа" с учётов ошибок.
// \) - Закрытие подгруппы.
// )? - Явное указание подгруппы может отсутствовать по понятным причинам.
// (?:, )? - Разделители между отдельными частями.
// )+
// [\s.]* - Забираем с собой всякий мусор, что бы не передать его в третью часть.
static NAMES_REGEX: LazyLock<Regex> = LazyLock::new(|| {
Regex::new(
r"(?:[А-Я][а-я]+\s?(?:[А-Я][\s.]*){2}(?:\(\s*\d\s*[а-я\s]+\))?(?:[\s,]+)?){1,2}+[\s.,]*",
) )
.unwrap() .unwrap()
}); });
// Отчистка let text = text
static CLEAN_RE: LazyLock<Regex> = LazyLock::new(|| Regex::new(r"[\s\n\t]+").unwrap()); .chars()
.filter(|c: &char| {
c.is_whitespace()
|| c.is_ascii_digit()
|| (*c >= 'а' && *c <= 'я')
|| (*c >= 'А' && *c <= 'Я')
|| *c == '.'
|| *c == '-'
})
.collect::<String>()
.replace(r"\s+", " ");
let text = CLEAN_RE let mut lesson_name: Option<&str> = None;
.replace(&text.replace(&[' ', '\t', '\n'], " "), " ") let mut extra: Option<&str> = None;
.to_string();
let (lesson_name, subgroups, lesson_type) = match NAMES_REGEX.captures(&text) { let mut shared_subgroup = true;
Some(captures) => {
let capture = captures.get(0).unwrap();
let subgroups: Vec<Option<LessonSubGroup>> = {
let src = capture.as_str().replace(&[' ', '.'], "");
let mut shared_subgroup = false;
let mut subgroups: [Option<LessonSubGroup>; 2] = [None, None]; let mut subgroups: [Option<LessonSubGroup>; 2] = [None, None];
for name in src.split(',') { for capture in NAME_RE.captures_iter(&text).take(2) {
let open_bracket_index = name.find('('); let capture = capture.unwrap();
let number: u8 = open_bracket_index if lesson_name.is_none() {
.map_or(0, |index| name[(index + 1)..(index + 2)].parse().unwrap()); lesson_name = Some(&text[..capture.get(0).unwrap().start()]);
}
extra = Some(&text[capture.get(0).unwrap().end()..]);
let teacher_name = { let teacher_name = {
let name_end = open_bracket_index.unwrap_or_else(|| name.len()); let clean = capture
.get(1)
.unwrap()
.as_str()
.chars()
.filter(|c| c.is_alphabetic())
.collect::<Vec<char>>();
// Я ебал. Как же я долго до этого доходил. if clean.get(clean.len() - 2).is_some_and(|c| c.is_uppercase()) {
let (name, remaining) = clean.split_at(clean.len() - 2);
format!( format!(
"{} {}.{}.", "{} {}.{}.",
name.get(..name_end - 4).unwrap(), name.iter().collect::<String>(),
name.get(name_end - 4..name_end - 2).unwrap(), remaining[0],
name.get(name_end - 2..name_end).unwrap(), remaining[1]
) )
} else {
let (remaining, name) = clean.split_last().unwrap();
format!("{} {}.", name.iter().collect::<String>(), remaining)
}
}; };
let lesson = Some(LessonSubGroup { let subgroup_index = capture.get(2).map(|m| m.as_str().parse::<u32>().unwrap());
let subgroup = Some(LessonSubGroup {
cabinet: None, cabinet: None,
teacher: Some(teacher_name), teacher: Some(teacher_name),
}); });
match number { match subgroup_index {
0 => { None => {
subgroups[0] = lesson; // we have only 2 matches max so more than 2 subgroups we cant have 100%
subgroups[1] = None; *subgroups.iter_mut().find(|x| x.is_none()).unwrap() = subgroup;
shared_subgroup = true;
break;
} }
num => { Some(num) => {
// bc we have indexed subgroup
shared_subgroup = false;
// 1 - 1 = 0 | 2 - 1 = 1 | 3 - 1 = 2 (schedule index to array index) // 1 - 1 = 0 | 2 - 1 = 1 | 3 - 1 = 2 (schedule index to array index)
// 0 % 2 = 0 | 1 % 2 = 1 | 2 % 2 = 0 (clamp) // 0 % 2 = 0 | 1 % 2 = 1 | 2 % 2 = 0 (clamp)
let normalised = (num - 1) % 2; let subgroup_index = ((num - 1) % 2) as usize;
subgroups[normalised as usize] = lesson; // if we have subgroup in that index (probably non-indexed, we change it index to free)
if subgroups[subgroup_index].is_some() {
subgroups.swap(0, 1);
}
subgroups[subgroup_index] = subgroup;
} }
} }
} }
if shared_subgroup { let subgroups = if lesson_name.is_none() {
Vec::from([subgroups[0].take()]) Vec::new()
} else if shared_subgroup {
Vec::from([subgroups.into_iter().next().unwrap()])
} else { } else {
Vec::from(subgroups) Vec::from(subgroups)
}
}; };
let name = text[..capture.start()].trim().to_string(); if extra.is_none() {
let extra = text[capture.end()..].trim().to_string(); extra = text
.rfind(" ")
.and_then(|i| text[..i].rfind(" "))
.map(|i| &text[i + 1..]);
}
let lesson_type = if extra.len() > 4 { let lesson_type = if let Some(extra) = extra
let result = guess_lesson_type(&extra); && extra.len() > 4
{
let result = guess_lesson_type(extra);
if result.is_none() { if result.is_none() {
#[cfg(not(debug_assertions))] #[cfg(not(debug_assertions))]
sentry::capture_message( sentry::capture_error(&Error::UnknownLessonType {
&*format!("Не удалось угадать тип пары '{}'!", extra), r#type: extra.to_string(),
sentry::Level::Warning, pos: CellPos::new(row, column),
); });
#[cfg(debug_assertions)] #[cfg(debug_assertions)]
log::warn!("Не удалось угадать тип пары '{}'!", extra); log::warn!(
"{}",
Error::UnknownLessonType {
r#type: extra.to_string(),
pos: CellPos::new(row, column),
}
);
} }
result result
@@ -540,12 +505,11 @@ fn parse_name_and_subgroups(
None None
}; };
(name, subgroups, lesson_type) Ok(ParsedLessonName {
} name: lesson_name.unwrap_or(&text).to_string(),
None => (text, Vec::new(), None), subgroups,
}; r#type: lesson_type,
})
Ok((lesson_name, subgroups, lesson_type))
} }
/// Getting the start and end of a pair from a cell in the first column of a document. /// Getting the start and end of a pair from a cell in the first column of a document.
@@ -554,18 +518,11 @@ fn parse_name_and_subgroups(
/// ///
/// * `cell_data`: text in cell. /// * `cell_data`: text in cell.
/// * `date`: date of the current day. /// * `date`: date of the current day.
fn parse_lesson_boundaries_cell( fn parse_lesson_boundaries_cell(cell_data: &str, date: DateTime<Utc>) -> Option<LessonBoundaries> {
cell_data: &String,
date: DateTime<Utc>,
) -> Option<LessonBoundaries> {
static TIME_RE: LazyLock<Regex> = static TIME_RE: LazyLock<Regex> =
LazyLock::new(|| Regex::new(r"(\d+\.\d+)-(\d+\.\d+)").unwrap()); LazyLock::new(|| Regex::new(r"(\d+\.\d+)-(\d+\.\d+)").unwrap());
let parse_res = if let Some(captures) = TIME_RE.captures(cell_data) { let parse_res = TIME_RE.captures(cell_data)?;
captures
} else {
return None;
};
let start_match = parse_res.get(1).unwrap().as_str(); let start_match = parse_res.get(1).unwrap().as_str();
let start_parts: Vec<&str> = start_match.split(".").collect(); let start_parts: Vec<&str> = start_match.split(".").collect();
@@ -579,7 +536,7 @@ fn parse_lesson_boundaries_cell(
}; };
Some(LessonBoundaries { Some(LessonBoundaries {
start: GET_TIME(date.clone(), &start_parts), start: GET_TIME(date, &start_parts),
end: GET_TIME(date, &end_parts), end: GET_TIME(date, &end_parts),
}) })
} }
@@ -597,8 +554,8 @@ fn parse_day_boundaries(
date: DateTime<Utc>, date: DateTime<Utc>,
row_range: (u32, u32), row_range: (u32, u32),
column: u32, column: u32,
) -> Result<Vec<BoundariesCellInfo>, crate::parser::error::Error> { ) -> Result<Vec<BoundariesData>> {
let mut day_times: Vec<BoundariesCellInfo> = Vec::new(); let mut day_times: Vec<BoundariesData> = Vec::new();
for row in row_range.0..row_range.1 { for row in row_range.0..row_range.1 {
let time_cell = if let Some(str) = worksheet.get_string_from_cell(row, column) { let time_cell = if let Some(str) = worksheet.get_string_from_cell(row, column) {
@@ -607,9 +564,8 @@ fn parse_day_boundaries(
continue; continue;
}; };
let lesson_time = parse_lesson_boundaries_cell(&time_cell, date.clone()).ok_or( let lesson_time = parse_lesson_boundaries_cell(&time_cell, date)
error::Error::LessonBoundaries(ErrorCell::new(row, column, time_cell.clone())), .ok_or(Error::NoLessonBoundaries(CellPos::new(row, column)))?;
)?;
// type // type
let lesson_type = if time_cell.contains("пара") { let lesson_type = if time_cell.contains("пара") {
@@ -633,11 +589,11 @@ fn parse_day_boundaries(
None None
}; };
day_times.push(BoundariesCellInfo { day_times.push(BoundariesData {
time_range: lesson_time, time_range: lesson_time,
lesson_type, lesson_type,
default_index, default_index,
xls_range: worksheet.get_merge_from_start(row, column), range: worksheet.get_merge_from_start(row, column),
}); });
} }
@@ -652,9 +608,9 @@ fn parse_day_boundaries(
/// * `week_markup`: markup of the current week. /// * `week_markup`: markup of the current week.
fn parse_week_boundaries( fn parse_week_boundaries(
worksheet: &WorkSheet, worksheet: &WorkSheet,
week_markup: &Vec<DayCellInfo>, week_markup: &[DayMarkup],
) -> Result<Vec<Vec<BoundariesCellInfo>>, crate::parser::error::Error> { ) -> Result<Vec<Vec<BoundariesData>>> {
let mut result: Vec<Vec<BoundariesCellInfo>> = Vec::new(); let mut result: Vec<Vec<BoundariesData>> = Vec::new();
let worksheet_end_row = worksheet.end().unwrap().0; let worksheet_end_row = worksheet.end().unwrap().0;
let lesson_time_column = week_markup[0].column + 1; let lesson_time_column = week_markup[0].column + 1;
@@ -671,8 +627,8 @@ fn parse_week_boundaries(
}; };
let day_boundaries = parse_day_boundaries( let day_boundaries = parse_day_boundaries(
&worksheet, worksheet,
day_markup.date.clone(), day_markup.date,
(day_markup.row, end_row), (day_markup.row, end_row),
lesson_time_column, lesson_time_column,
)?; )?;
@@ -698,7 +654,7 @@ fn convert_groups_to_teachers(
.map(|day| Day { .map(|day| Day {
name: day.name.clone(), name: day.name.clone(),
street: day.street.clone(), street: day.street.clone(),
date: day.date.clone(), date: day.date,
lessons: vec![], lessons: vec![],
}) })
.collect(); .collect();
@@ -773,35 +729,21 @@ fn convert_groups_to_teachers(
/// ///
/// * `buffer`: XLS data containing schedule. /// * `buffer`: XLS data containing schedule.
/// ///
/// returns: Result<ParseResult, crate::parser::error::Error> /// returns: Result<ParseResult, Error>
/// pub fn parse_xls(buffer: &Vec<u8>) -> Result<ParsedSchedule> {
/// # Examples
///
/// ```
/// use schedule_parser::parse_xls;
///
/// let result = parse_xls(&include_bytes!("../../schedule.xls").to_vec());
///
/// assert!(result.is_ok(), "{}", result.err().unwrap());
///
/// assert_ne!(result.as_ref().unwrap().groups.len(), 0);
/// assert_ne!(result.as_ref().unwrap().teachers.len(), 0);
/// ```
pub fn parse_xls(buffer: &Vec<u8>) -> Result<ParsedSchedule, crate::parser::error::Error> {
let cursor = Cursor::new(&buffer); let cursor = Cursor::new(&buffer);
let mut workbook: Xls<_> = let mut workbook: Xls<_> = open_workbook_from_rs(cursor)?;
open_workbook_from_rs(cursor).map_err(|e| error::Error::BadXLS(std::sync::Arc::new(e)))?;
let worksheet = { let worksheet = {
let (worksheet_name, worksheet) = workbook let (worksheet_name, worksheet) = workbook
.worksheets() .worksheets()
.first() .first()
.ok_or(error::Error::NoWorkSheets)? .ok_or(Error::NoWorkSheets)?
.clone(); .clone();
let worksheet_merges = workbook let worksheet_merges = workbook
.worksheet_merge_cells(&*worksheet_name) .worksheet_merge_cells(&worksheet_name)
.ok_or(error::Error::NoWorkSheets)?; .ok_or(Error::NoWorkSheets)?;
WorkSheet { WorkSheet {
data: worksheet, data: worksheet,
@@ -809,7 +751,11 @@ pub fn parse_xls(buffer: &Vec<u8>) -> Result<ParsedSchedule, crate::parser::erro
} }
}; };
let (week_markup, groups_markup) = parse_skeleton(&worksheet)?; let WorkSheetMarkup {
days: week_markup,
groups: groups_markup,
} = parse_markup(&worksheet)?;
let week_boundaries = parse_week_boundaries(&worksheet, &week_markup)?; let week_boundaries = parse_week_boundaries(&worksheet, &week_markup)?;
let mut groups: HashMap<String, ScheduleEntry> = HashMap::new(); let mut groups: HashMap<String, ScheduleEntry> = HashMap::new();
@@ -820,7 +766,7 @@ pub fn parse_xls(buffer: &Vec<u8>) -> Result<ParsedSchedule, crate::parser::erro
days: Vec::new(), days: Vec::new(),
}; };
for day_index in 0..(&week_markup).len() { for day_index in 0..week_markup.len() {
let day_markup = &week_markup[day_index]; let day_markup = &week_markup[day_index];
let mut day = Day { let mut day = Day {
@@ -836,8 +782,8 @@ pub fn parse_xls(buffer: &Vec<u8>) -> Result<ParsedSchedule, crate::parser::erro
match &mut parse_lesson( match &mut parse_lesson(
&worksheet, &worksheet,
&day, &day,
&day_boundaries, day_boundaries,
&lesson_boundaries, lesson_boundaries,
group_markup.column, group_markup.column,
)? { )? {
Lessons(lesson) => day.lessons.append(lesson), Lessons(lesson) => day.lessons.append(lesson),
@@ -862,7 +808,7 @@ pub mod test_utils {
use super::*; use super::*;
use base::ParsedSchedule; use base::ParsedSchedule;
pub fn test_result() -> Result<ParsedSchedule, crate::parser::error::Error> { pub fn test_result() -> Result<ParsedSchedule> {
parse_xls(&include_bytes!("../../../../test-data/engels-polytechnic.xls").to_vec()) parse_xls(&include_bytes!("../../../../test-data/engels-polytechnic.xls").to_vec())
} }
} }

View File

@@ -1,4 +1,5 @@
use regex::Regex; use regex::Regex;
use std::fmt::{Display, Formatter};
use std::ops::Deref; use std::ops::Deref;
use std::sync::LazyLock; use std::sync::LazyLock;
@@ -8,6 +9,40 @@ pub struct WorkSheet {
pub merges: Vec<calamine::Dimensions>, pub merges: Vec<calamine::Dimensions>,
} }
#[derive(Clone, Debug, derive_more::Error)]
pub struct CellPos {
pub row: u32,
pub column: u32,
}
fn format_column_index(index: u32) -> String {
// https://stackoverflow.com/a/297214
let quotient = index / 26;
let char = char::from((65 + (index % 26)) as u8);
if quotient > 0 {
return format!("{}{}", format_column_index(quotient - 1), char);
}
return char.to_string();
}
impl Display for CellPos {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
f.write_fmt(format_args!(
"column {}, row {}",
format_column_index(self.column),
self.row + 1,
))
}
}
pub struct CellRange {
pub start: CellPos,
pub end: CellPos,
}
impl Deref for WorkSheet { impl Deref for WorkSheet {
type Target = calamine::Range<calamine::Data>; type Target = calamine::Range<calamine::Data>;
@@ -45,14 +80,26 @@ impl WorkSheet {
} }
/// Obtaining the boundaries of the cell along its upper left coordinate. /// Obtaining the boundaries of the cell along its upper left coordinate.
pub fn get_merge_from_start(&self, row: u32, column: u32) -> ((u32, u32), (u32, u32)) { pub fn get_merge_from_start(&self, row: u32, column: u32) -> CellRange {
match self match self
.merges .merges
.iter() .iter()
.find(|merge| merge.start.0 == row && merge.start.1 == column) .find(|merge| merge.start.0 == row && merge.start.1 == column)
{ {
Some(merge) => (merge.start, (merge.end.0 + 1, merge.end.1 + 1)), Some(merge) => CellRange {
None => ((row, column), (row + 1, column + 1)), start: CellPos::new(merge.start.0, merge.start.1),
end: CellPos::new(merge.end.0 + 1, merge.end.1 + 1),
},
None => CellRange {
start: CellPos::new(row, column),
end: CellPos::new(row + 1, column + 1),
},
} }
} }
} }
impl CellPos {
pub fn new(row: u32, column: u32) -> Self {
Self { row, column }
}
}

View File

@@ -0,0 +1,33 @@
use crate::xls_downloader::FetchError;
use derive_more::{Display, Error, From};
#[derive(Debug, Display, Error, From)]
pub enum Error {
/// Occurs when the request to the Yandex Cloud API fails.
///
/// This may be due to network issues, invalid API key, incorrect function ID, or other
/// problems with the Yandex Cloud Function invocation.
#[display("An error occurred during the request to the Yandex Cloud API: {_0}")]
Reqwest(reqwest::Error),
#[display("Unable to get URI in 3 retries")]
EmptyUri,
/// The ETag is the same (no update needed).
#[display("The ETag is the same.")]
SameETag,
/// The URL query for the XLS file failed to execute, either due to network issues or invalid API parameters.
#[display("Failed to fetch URL: {_0}")]
ScheduleFetchFailed(FetchError),
/// Downloading the XLS file content failed after successfully obtaining the URL.
#[display("Download failed: {_0}")]
ScheduleDownloadFailed(FetchError),
/// The XLS file could not be parsed into a valid schedule format.
#[from]
InvalidSchedule(crate::parser::Error),
}
pub type Result<T> = core::result::Result<T, Error>;

View File

@@ -1,7 +1,8 @@
pub use self::error::{Error, Result};
use crate::parser::parse_xls; use crate::parser::parse_xls;
use crate::updater::error::{Error, QueryUrlError, SnapshotCreationError};
use crate::xls_downloader::{FetchError, XlsDownloader}; use crate::xls_downloader::{FetchError, XlsDownloader};
use base::ScheduleSnapshot; use base::ScheduleSnapshot;
mod error;
pub enum UpdateSource { pub enum UpdateSource {
Prepared(ScheduleSnapshot), Prepared(ScheduleSnapshot),
@@ -19,56 +20,6 @@ pub struct Updater {
update_source: UpdateSource, update_source: UpdateSource,
} }
pub mod error {
use crate::xls_downloader::FetchError;
use derive_more::{Display, Error};
#[derive(Debug, Display, Error)]
pub enum Error {
/// An error occurred while querying the Yandex Cloud API for a URL.
///
/// This may result from network failures, invalid API credentials, or issues with the Yandex Cloud Function invocation.
/// See [`QueryUrlError`] for more details about specific causes.
QueryUrlFailed(QueryUrlError),
/// The schedule snapshot creation process failed.
///
/// This can happen due to URL conflicts (same URL already in use), failed network requests,
/// download errors, or invalid XLS file content. See [`SnapshotCreationError`] for details.
SnapshotCreationFailed(SnapshotCreationError),
}
/// Errors that may occur when querying the Yandex Cloud API to retrieve a URL.
#[derive(Debug, Display, Error)]
pub enum QueryUrlError {
/// Occurs when the request to the Yandex Cloud API fails.
///
/// This may be due to network issues, invalid API key, incorrect function ID, or other
/// problems with the Yandex Cloud Function invocation.
#[display("An error occurred during the request to the Yandex Cloud API: {_0}")]
RequestFailed(reqwest::Error),
}
/// Errors that may occur during the creation of a schedule snapshot.
#[derive(Debug, Display, Error)]
pub enum SnapshotCreationError {
/// The URL is the same as the one already being used (no update needed).
#[display("The URL is the same as the one already being used.")]
SameUrl,
/// The URL query for the XLS file failed to execute, either due to network issues or invalid API parameters.
#[display("Failed to fetch URL: {_0}")]
FetchFailed(FetchError),
/// Downloading the XLS file content failed after successfully obtaining the URL.
#[display("Download failed: {_0}")]
DownloadFailed(FetchError),
/// The XLS file could not be parsed into a valid schedule format.
#[display("Schedule data is invalid: {_0}")]
InvalidSchedule(crate::parser::error::Error),
}
}
impl Updater { impl Updater {
/// Constructs a new `ScheduleSnapshot` by downloading and parsing schedule data from the specified URL. /// Constructs a new `ScheduleSnapshot` by downloading and parsing schedule data from the specified URL.
/// ///
@@ -82,40 +33,33 @@ impl Updater {
/// * `url`: The source URL pointing to the XLS file containing schedule data. /// * `url`: The source URL pointing to the XLS file containing schedule data.
/// ///
/// returns: Result<ScheduleSnapshot, SnapshotCreationError> /// returns: Result<ScheduleSnapshot, SnapshotCreationError>
pub async fn new_snapshot( async fn new_snapshot(downloader: &mut XlsDownloader, url: String) -> Result<ScheduleSnapshot> {
downloader: &mut XlsDownloader, let head_result = downloader.set_url(&url).await.map_err(|error| {
url: String, if let FetchError::Reqwest(error) = &error {
) -> Result<ScheduleSnapshot, SnapshotCreationError> {
if downloader.url.as_ref().is_some_and(|_url| _url.eq(&url)) {
return Err(SnapshotCreationError::SameUrl);
}
let head_result = downloader.set_url(&*url).await.map_err(|error| {
if let FetchError::Unknown(error) = &error {
sentry::capture_error(&error); sentry::capture_error(&error);
} }
SnapshotCreationError::FetchFailed(error) Error::ScheduleFetchFailed(error)
})?; })?;
if downloader.etag == Some(head_result.etag) {
return Err(Error::SameETag);
}
let xls_data = downloader let xls_data = downloader
.fetch(false) .fetch(false)
.await .await
.map_err(|error| { .map_err(|error| {
if let FetchError::Unknown(error) = &error { if let FetchError::Reqwest(error) = &error {
sentry::capture_error(&error); sentry::capture_error(&error);
} }
SnapshotCreationError::DownloadFailed(error) Error::ScheduleDownloadFailed(error)
})? })?
.data .data
.unwrap(); .unwrap();
let parse_result = parse_xls(&xls_data).map_err(|error| { let parse_result = parse_xls(&xls_data)?;
sentry::capture_error(&error);
SnapshotCreationError::InvalidSchedule(error)
})?;
Ok(ScheduleSnapshot { Ok(ScheduleSnapshot {
fetched_at: head_result.requested_at, fetched_at: head_result.requested_at,
@@ -141,10 +85,24 @@ impl Updater {
/// Result containing: /// Result containing:
/// - `Ok(String)` - Complete URL constructed from the Function's response /// - `Ok(String)` - Complete URL constructed from the Function's response
/// - `Err(QueryUrlError)` - If the request or response processing fails /// - `Err(QueryUrlError)` - If the request or response processing fails
async fn query_url(api_key: &str, func_id: &str) -> Result<String, QueryUrlError> { async fn query_url(api_key: &str, func_id: &str) -> Result<String> {
let client = reqwest::Client::new(); let client = reqwest::Client::new();
let uri = client let uri = {
// вот бы добавили named-scopes как в котлине,
// чтоб мне не пришлось такой хуйнёй страдать.
#[allow(unused_assignments)]
let mut uri = String::new();
let mut counter = 0;
loop {
if counter == 3 {
return Err(Error::EmptyUri);
}
counter += 1;
uri = client
.post(format!( .post(format!(
"https://functions.yandexcloud.net/{}?integration=raw", "https://functions.yandexcloud.net/{}?integration=raw",
func_id func_id
@@ -152,10 +110,21 @@ impl Updater {
.header("Authorization", format!("Api-Key {}", api_key)) .header("Authorization", format!("Api-Key {}", api_key))
.send() .send()
.await .await
.map_err(|error| QueryUrlError::RequestFailed(error))? .map_err(Error::Reqwest)?
.text() .text()
.await .await
.map_err(|error| QueryUrlError::RequestFailed(error))?; .map_err(Error::Reqwest)?;
if uri.is_empty() {
log::warn!("[{}] Unable to get uri! Retrying in 5 seconds...", counter);
continue;
}
break;
}
uri
};
Ok(format!("https://politehnikum-eng.ru{}", uri.trim())) Ok(format!("https://politehnikum-eng.ru{}", uri.trim()))
} }
@@ -173,7 +142,7 @@ impl Updater {
/// Returns `Ok(())` if the snapshot was successfully initialized, or an `Error` if: /// Returns `Ok(())` if the snapshot was successfully initialized, or an `Error` if:
/// - URL query to Yandex Cloud failed ([`QueryUrlError`]) /// - URL query to Yandex Cloud failed ([`QueryUrlError`])
/// - Schedule snapshot creation failed ([`SnapshotCreationError`]) /// - Schedule snapshot creation failed ([`SnapshotCreationError`])
pub async fn new(update_source: UpdateSource) -> Result<(Self, ScheduleSnapshot), Error> { pub async fn new(update_source: UpdateSource) -> Result<(Self, ScheduleSnapshot)> {
let mut this = Updater { let mut this = Updater {
downloader: XlsDownloader::new(), downloader: XlsDownloader::new(),
update_source, update_source,
@@ -194,19 +163,14 @@ impl Updater {
yandex_func_id, yandex_func_id,
} => { } => {
log::info!("Obtaining a link using FaaS..."); log::info!("Obtaining a link using FaaS...");
Self::query_url(yandex_api_key, yandex_func_id) Self::query_url(yandex_api_key, yandex_func_id).await?
.await
.map_err(|error| Error::QueryUrlFailed(error))?
} }
_ => unreachable!(), _ => unreachable!(),
}; };
log::info!("For the initial setup, a link {} will be used", url); log::info!("For the initial setup, a link {} will be used", url);
let snapshot = Self::new_snapshot(&mut this.downloader, url) let snapshot = Self::new_snapshot(&mut this.downloader, url).await?;
.await
.map_err(|error| Error::SnapshotCreationFailed(error))?;
log::info!("Schedule snapshot successfully created!"); log::info!("Schedule snapshot successfully created!");
Ok((this, snapshot)) Ok((this, snapshot))
@@ -229,7 +193,7 @@ impl Updater {
pub async fn update( pub async fn update(
&mut self, &mut self,
current_snapshot: &ScheduleSnapshot, current_snapshot: &ScheduleSnapshot,
) -> Result<ScheduleSnapshot, Error> { ) -> Result<ScheduleSnapshot> {
if let UpdateSource::Prepared(snapshot) = &self.update_source { if let UpdateSource::Prepared(snapshot) = &self.update_source {
let mut snapshot = snapshot.clone(); let mut snapshot = snapshot.clone();
snapshot.update(); snapshot.update();
@@ -241,21 +205,19 @@ impl Updater {
UpdateSource::GrabFromSite { UpdateSource::GrabFromSite {
yandex_api_key, yandex_api_key,
yandex_func_id, yandex_func_id,
} => Self::query_url(yandex_api_key.as_str(), yandex_func_id.as_str()) } => Self::query_url(yandex_api_key.as_str(), yandex_func_id.as_str()).await?,
.await
.map_err(|error| Error::QueryUrlFailed(error))?,
_ => unreachable!(), _ => unreachable!(),
}; };
let snapshot = match Self::new_snapshot(&mut self.downloader, url).await { let snapshot = match Self::new_snapshot(&mut self.downloader, url).await {
Ok(snapshot) => snapshot, Ok(snapshot) => snapshot,
Err(SnapshotCreationError::SameUrl) => { Err(Error::SameETag) => {
let mut clone = current_snapshot.clone(); let mut clone = current_snapshot.clone();
clone.update(); clone.update();
clone clone
} }
Err(error) => return Err(Error::SnapshotCreationFailed(error)), Err(error) => return Err(error),
}; };
Ok(snapshot) Ok(snapshot)

View File

@@ -14,7 +14,7 @@ pub enum FetchError {
/// Unknown error. /// Unknown error.
#[display("An unknown error occurred while downloading the file.")] #[display("An unknown error occurred while downloading the file.")]
#[schema(value_type = String)] #[schema(value_type = String)]
Unknown(Arc<reqwest::Error>), Reqwest(Arc<reqwest::Error>),
/// Server returned a status code different from 200. /// Server returned a status code different from 200.
#[display("Server returned a status code {status_code}.")] #[display("Server returned a status code {status_code}.")]
@@ -31,7 +31,7 @@ pub enum FetchError {
impl FetchError { impl FetchError {
pub fn unknown(error: Arc<reqwest::Error>) -> Self { pub fn unknown(error: Arc<reqwest::Error>) -> Self {
Self::Unknown(error) Self::Reqwest(error)
} }
pub fn bad_status_code(status_code: u16) -> Self { pub fn bad_status_code(status_code: u16) -> Self {
@@ -66,25 +66,30 @@ pub struct FetchOk {
/// Date data received. /// Date data received.
pub requested_at: DateTime<Utc>, pub requested_at: DateTime<Utc>,
/// Etag.
pub etag: String,
/// File data. /// File data.
pub data: Option<Vec<u8>>, pub data: Option<Vec<u8>>,
} }
impl FetchOk { impl FetchOk {
/// Result without file content. /// Result without file content.
pub fn head(uploaded_at: DateTime<Utc>) -> Self { pub fn head(uploaded_at: DateTime<Utc>, etag: String) -> Self {
FetchOk { FetchOk {
uploaded_at, uploaded_at,
requested_at: Utc::now(), requested_at: Utc::now(),
etag,
data: None, data: None,
} }
} }
/// Full result. /// Full result.
pub fn get(uploaded_at: DateTime<Utc>, data: Vec<u8>) -> Self { pub fn get(uploaded_at: DateTime<Utc>, etag: String, data: Vec<u8>) -> Self {
FetchOk { FetchOk {
uploaded_at, uploaded_at,
requested_at: Utc::now(), requested_at: Utc::now(),
etag,
data: Some(data), data: Some(data),
} }
} }
@@ -94,11 +99,15 @@ pub type FetchResult = Result<FetchOk, FetchError>;
pub struct XlsDownloader { pub struct XlsDownloader {
pub url: Option<String>, pub url: Option<String>,
pub etag: Option<String>,
} }
impl XlsDownloader { impl XlsDownloader {
pub fn new() -> Self { pub fn new() -> Self {
XlsDownloader { url: None } XlsDownloader {
url: None,
etag: None,
}
} }
async fn fetch_specified(url: &str, head: bool) -> FetchResult { async fn fetch_specified(url: &str, head: bool) -> FetchResult {
@@ -124,9 +133,12 @@ impl XlsDownloader {
.get("Content-Type") .get("Content-Type")
.ok_or(FetchError::bad_headers("Content-Type"))?; .ok_or(FetchError::bad_headers("Content-Type"))?;
if !headers.contains_key("etag") { let etag = headers
return Err(FetchError::bad_headers("etag")); .get("etag")
} .ok_or(FetchError::bad_headers("etag"))?
.to_str()
.or(Err(FetchError::bad_headers("etag")))?
.to_string();
let last_modified = headers let last_modified = headers
.get("last-modified") .get("last-modified")
@@ -136,14 +148,18 @@ impl XlsDownloader {
return Err(FetchError::bad_content_type(content_type.to_str().unwrap())); return Err(FetchError::bad_content_type(content_type.to_str().unwrap()));
} }
let last_modified = DateTime::parse_from_rfc2822(&last_modified.to_str().unwrap()) let last_modified = DateTime::parse_from_rfc2822(last_modified.to_str().unwrap())
.unwrap() .unwrap()
.with_timezone(&Utc); .with_timezone(&Utc);
Ok(if head { Ok(if head {
FetchOk::head(last_modified) FetchOk::head(last_modified, etag)
} else { } else {
FetchOk::get(last_modified, response.bytes().await.unwrap().to_vec()) FetchOk::get(
last_modified,
etag,
response.bytes().await.unwrap().to_vec(),
)
}) })
} }
@@ -151,14 +167,14 @@ impl XlsDownloader {
if self.url.is_none() { if self.url.is_none() {
Err(FetchError::NoUrlProvided) Err(FetchError::NoUrlProvided)
} else { } else {
Self::fetch_specified(&*self.url.as_ref().unwrap(), head).await Self::fetch_specified(self.url.as_ref().unwrap(), head).await
} }
} }
pub async fn set_url(&mut self, url: &str) -> FetchResult { pub async fn set_url(&mut self, url: &str) -> FetchResult {
let result = Self::fetch_specified(url, true).await; let result = Self::fetch_specified(url, true).await;
if let Ok(_) = result { if result.is_ok() {
self.url = Some(url.to_string()); self.url = Some(url.to_string());
} }

View File

@@ -1,148 +0,0 @@
pub mod users {
use crate::database::models::User;
use crate::database::schema::users::dsl::users;
use crate::database::schema::users::dsl::*;
use crate::state::AppState;
use actix_web::web;
use diesel::{insert_into, ExpressionMethods, QueryResult};
use diesel::{QueryDsl, RunQueryDsl};
use diesel::{SaveChangesDsl, SelectableHelper};
use std::ops::DerefMut;
pub async fn get(state: &web::Data<AppState>, _id: &String) -> QueryResult<User> {
users
.filter(id.eq(_id))
.select(User::as_select())
.first(state.get_database().await.deref_mut())
}
pub async fn get_by_username(
state: &web::Data<AppState>,
_username: &String,
) -> QueryResult<User> {
users
.filter(username.eq(_username))
.select(User::as_select())
.first(state.get_database().await.deref_mut())
}
//noinspection RsTraitObligations
pub async fn get_by_vk_id(state: &web::Data<AppState>, _vk_id: i32) -> QueryResult<User> {
users
.filter(vk_id.eq(_vk_id))
.select(User::as_select())
.first(state.get_database().await.deref_mut())
}
//noinspection RsTraitObligations
pub async fn get_by_telegram_id(
state: &web::Data<AppState>,
_telegram_id: i64,
) -> QueryResult<User> {
users
.filter(telegram_id.eq(_telegram_id))
.select(User::as_select())
.first(state.get_database().await.deref_mut())
}
//noinspection DuplicatedCode
pub async fn contains_by_username(state: &web::Data<AppState>, _username: &String) -> bool {
// и как это нахуй сократить блять примеров нихуя нет, нихуя не работает
// как меня этот раст заебал уже
match users
.filter(username.eq(_username))
.count()
.get_result::<i64>(state.get_database().await.deref_mut())
{
Ok(count) => count > 0,
Err(_) => false,
}
}
//noinspection DuplicatedCode
//noinspection RsTraitObligations
pub async fn contains_by_vk_id(state: &web::Data<AppState>, _vk_id: i32) -> bool {
match users
.filter(vk_id.eq(_vk_id))
.count()
.get_result::<i64>(state.get_database().await.deref_mut())
{
Ok(count) => count > 0,
Err(_) => false,
}
}
pub async fn insert(state: &web::Data<AppState>, user: &User) -> QueryResult<usize> {
insert_into(users)
.values(user)
.execute(state.get_database().await.deref_mut())
}
/// Function declaration [User::save][UserSave::save].
pub trait UserSave {
/// Saves the user's changes to the database.
///
/// # Arguments
///
/// * `state`: The state of the actix-web application that stores the mutex of the [connection][diesel::PgConnection].
///
/// returns: `QueryResult<User>`
///
/// # Examples
///
/// ```
/// use crate::database::driver::users;
///
/// #[derive(Deserialize)]
/// struct Params {
/// pub username: String,
/// }
///
/// #[patch("/")]
/// async fn patch_user(
/// app_state: web::Data<AppState>,
/// user: SyncExtractor<User>,
/// web::Query(params): web::Query<Params>,
/// ) -> web::Json<User> {
/// let mut user = user.into_inner();
///
/// user.username = params.username;
///
/// match user.save(&app_state) {
/// Ok(user) => web::Json(user),
/// Err(e) => {
/// eprintln!("Failed to save user: {e}");
/// panic!();
/// }
/// }
/// }
/// ```
async fn save(&self, state: &web::Data<AppState>) -> QueryResult<User>;
}
/// Implementation of [UserSave][UserSave] trait.
impl UserSave for User {
async fn save(&self, state: &web::Data<AppState>) -> QueryResult<User> {
self.save_changes::<Self>(state.get_database().await.deref_mut())
}
}
#[cfg(test)]
pub async fn delete_by_username(state: &web::Data<AppState>, _username: &String) -> bool {
match diesel::delete(users.filter(username.eq(_username)))
.execute(state.get_database().await.deref_mut())
{
Ok(count) => count > 0,
Err(_) => false,
}
}
#[cfg(test)]
pub async fn insert_or_ignore(state: &web::Data<AppState>, user: &User) -> QueryResult<usize> {
insert_into(users)
.values(user)
.on_conflict_do_nothing()
.execute(state.get_database().await.deref_mut())
}
}

View File

@@ -1,3 +0,0 @@
pub mod driver;
pub mod models;
pub mod schema;

View File

@@ -1,87 +0,0 @@
use actix_macros::ResponderJson;
use diesel::QueryId;
use diesel::prelude::*;
use serde::{Deserialize, Serialize};
use utoipa::ToSchema;
#[derive(
Copy, Clone, PartialEq, Debug, Serialize, Deserialize, diesel_derive_enum::DbEnum, ToSchema,
)]
#[ExistingTypePath = "crate::database::schema::sql_types::UserRole"]
#[DbValueStyle = "UPPERCASE"]
#[serde(rename_all = "UPPERCASE")]
pub enum UserRole {
Student,
Teacher,
Admin,
}
#[derive(
Identifiable,
AsChangeset,
Queryable,
QueryId,
Selectable,
Serialize,
Insertable,
Debug,
ToSchema,
ResponderJson,
)]
#[diesel(table_name = crate::database::schema::users)]
#[diesel(treat_none_as_null = true)]
pub struct User {
/// Account UUID.
pub id: String,
/// User name.
pub username: String,
/// BCrypt password hash.
pub password: Option<String>,
/// ID of the linked VK account.
pub vk_id: Option<i32>,
/// JWT access token.
pub access_token: Option<String>,
/// Group.
pub group: Option<String>,
/// Role.
pub role: UserRole,
/// Version of the installed Polytechnic+ application.
pub android_version: Option<String>,
/// ID of the linked Telegram account.
pub telegram_id: Option<i64>,
}
#[derive(
Debug,
Clone,
Serialize,
Identifiable,
Queryable,
Selectable,
Insertable,
AsChangeset,
Associations,
ToSchema,
ResponderJson,
)]
#[diesel(belongs_to(User))]
#[diesel(table_name = crate::database::schema::fcm)]
#[diesel(primary_key(user_id))]
pub struct FCM {
/// Account UUID.
pub user_id: String,
/// FCM token.
pub token: String,
/// List of topics subscribed to by the user.
pub topics: Vec<Option<String>>,
}

View File

@@ -1,39 +0,0 @@
// @generated automatically by Diesel CLI.
pub mod sql_types {
#[derive(diesel::query_builder::QueryId, Clone, diesel::sql_types::SqlType)]
#[diesel(postgres_type(name = "user_role"))]
pub struct UserRole;
}
diesel::table! {
fcm (user_id) {
user_id -> Text,
token -> Text,
topics -> Array<Nullable<Text>>,
}
}
diesel::table! {
use diesel::sql_types::*;
use super::sql_types::UserRole;
users (id) {
id -> Text,
username -> Text,
password -> Nullable<Text>,
vk_id -> Nullable<Int4>,
access_token -> Nullable<Text>,
group -> Nullable<Text>,
role -> UserRole,
android_version -> Nullable<Text>,
telegram_id -> Nullable<Int8>,
}
}
diesel::joinable!(fcm -> users (user_id));
diesel::allow_tables_to_appear_in_same_query!(
fcm,
users,
);

View File

@@ -1,5 +1,3 @@
use crate::database::driver;
use crate::database::models::User;
use crate::extractors::base::FromRequestAsync; use crate::extractors::base::FromRequestAsync;
use crate::state::AppState; use crate::state::AppState;
use crate::utility::jwt; use crate::utility::jwt;
@@ -8,6 +6,8 @@ use actix_web::body::BoxBody;
use actix_web::dev::Payload; use actix_web::dev::Payload;
use actix_web::http::header; use actix_web::http::header;
use actix_web::{web, HttpRequest}; use actix_web::{web, HttpRequest};
use database::entity::User;
use database::query::Query;
use derive_more::Display; use derive_more::Display;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use std::fmt::Debug; use std::fmt::Debug;
@@ -88,10 +88,20 @@ impl FromRequestAsync for User {
let user_id = jwt::verify_and_decode(&access_token) let user_id = jwt::verify_and_decode(&access_token)
.map_err(|_| Error::InvalidAccessToken.into_err())?; .map_err(|_| Error::InvalidAccessToken.into_err())?;
let app_state = req.app_data::<web::Data<AppState>>().unwrap(); let db = req
.app_data::<web::Data<AppState>>()
.unwrap()
.get_database();
driver::users::get(app_state, &user_id) Query::find_user_by_id(db, &user_id)
.await .await
.map_err(|_| Error::NoUser.into()) .map_err(|_| Error::NoUser.into())
.and_then(|user| {
if let Some(user) = user {
Ok(user)
} else {
Err(actix_web::Error::from(Error::NoUser))
}
})
} }
} }

View File

@@ -5,7 +5,6 @@ use std::future::{Ready, ready};
use std::ops; use std::ops;
/// # Async extractor. /// # Async extractor.
/// Asynchronous object extractor from a query. /// Asynchronous object extractor from a query.
pub struct AsyncExtractor<T>(T); pub struct AsyncExtractor<T>(T);
@@ -80,7 +79,6 @@ impl<T: FromRequestAsync> FromRequest for AsyncExtractor<T> {
} }
/// # Sync extractor. /// # Sync extractor.
/// Synchronous object extractor from a query. /// Synchronous object extractor from a query.
pub struct SyncExtractor<T>(T); pub struct SyncExtractor<T>(T);

View File

@@ -12,8 +12,6 @@ use utoipa_rapidoc::RapiDoc;
mod state; mod state;
mod database;
mod extractors; mod extractors;
mod middlewares; mod middlewares;
mod routes; mod routes;
@@ -72,7 +70,7 @@ pub fn get_api_scope<
async fn async_main() -> io::Result<()> { async fn async_main() -> io::Result<()> {
info!("Запуск сервера..."); info!("Запуск сервера...");
let app_state = new_app_state().await.unwrap(); let app_state = new_app_state(None).await.unwrap();
HttpServer::new(move || { HttpServer::new(move || {
let (app, api) = App::new() let (app, api) = App::new()
@@ -111,7 +109,7 @@ fn main() -> io::Result<()> {
}, },
)); ));
dotenv().unwrap(); let _ = dotenv();
env_logger::init(); env_logger::init();

View File

@@ -1,25 +1,20 @@
use crate::database::models::User;
use crate::extractors::authorized_user; use crate::extractors::authorized_user;
use crate::extractors::base::FromRequestAsync; use crate::extractors::base::FromRequestAsync;
use actix_web::body::{BoxBody, EitherBody}; use actix_web::body::{BoxBody, EitherBody};
use actix_web::dev::{Payload, Service, ServiceRequest, ServiceResponse, Transform, forward_ready}; use actix_web::dev::{forward_ready, Payload, Service, ServiceRequest, ServiceResponse, Transform};
use actix_web::{Error, HttpRequest, ResponseError}; use actix_web::{Error, HttpRequest, ResponseError};
use database::entity::User;
use futures_util::future::LocalBoxFuture; use futures_util::future::LocalBoxFuture;
use std::future::{Ready, ready}; use std::future::{ready, Ready};
use std::rc::Rc; use std::rc::Rc;
/// Middleware guard working with JWT tokens. /// Middleware guard working with JWT tokens.
#[derive(Default)]
pub struct JWTAuthorization { pub struct JWTAuthorization {
/// List of ignored endpoints. /// List of ignored endpoints.
pub ignore: &'static [&'static str], pub ignore: &'static [&'static str],
} }
impl Default for JWTAuthorization {
fn default() -> Self {
Self { ignore: &[] }
}
}
impl<S, B> Transform<S, ServiceRequest> for JWTAuthorization impl<S, B> Transform<S, ServiceRequest> for JWTAuthorization
where where
S: Service<ServiceRequest, Response = ServiceResponse<B>, Error = Error> + 'static, S: Service<ServiceRequest, Response = ServiceResponse<B>, Error = Error> + 'static,
@@ -70,8 +65,8 @@ where
return false; return false;
} }
if let Some(other) = path.as_bytes().iter().nth(ignore.len()) { if let Some(other) = path.as_bytes().get(ignore.len()) {
return ['?' as u8, '/' as u8].contains(other); return [b'?', b'/'].contains(other);
} }
true true

View File

@@ -1,10 +1,10 @@
use actix_web::Error;
use actix_web::body::{BoxBody, EitherBody}; use actix_web::body::{BoxBody, EitherBody};
use actix_web::dev::{Service, ServiceRequest, ServiceResponse, Transform, forward_ready}; use actix_web::dev::{forward_ready, Service, ServiceRequest, ServiceResponse, Transform};
use actix_web::http::header; use actix_web::http::header;
use actix_web::http::header::HeaderValue; use actix_web::http::header::HeaderValue;
use actix_web::Error;
use futures_util::future::LocalBoxFuture; use futures_util::future::LocalBoxFuture;
use std::future::{Ready, ready}; use std::future::{ready, Ready};
/// Middleware to specify the encoding in the Content-Type header. /// Middleware to specify the encoding in the Content-Type header.
pub struct ContentTypeBootstrap; pub struct ContentTypeBootstrap;
@@ -30,7 +30,7 @@ pub struct ContentTypeMiddleware<S> {
service: S, service: S,
} }
impl<'a, S, B> Service<ServiceRequest> for ContentTypeMiddleware<S> impl<S, B> Service<ServiceRequest> for ContentTypeMiddleware<S>
where where
S: Service<ServiceRequest, Response = ServiceResponse<B>, Error = Error>, S: Service<ServiceRequest, Response = ServiceResponse<B>, Error = Error>,
S::Future: 'static, S::Future: 'static,
@@ -49,14 +49,15 @@ where
let mut response = fut.await?; let mut response = fut.await?;
let headers = response.response_mut().headers_mut(); let headers = response.response_mut().headers_mut();
if let Some(content_type) = headers.get("Content-Type") {
if content_type == "application/json" { if let Some(content_type) = headers.get("Content-Type")
&& content_type == "application/json"
{
headers.insert( headers.insert(
header::CONTENT_TYPE, header::CONTENT_TYPE,
HeaderValue::from_static("application/json; charset=utf8"), HeaderValue::from_static("application/json; charset=utf8"),
); );
} }
}
Ok(response.map_into_left_body()) Ok(response.map_into_left_body())
}) })

View File

@@ -2,16 +2,6 @@ use jsonwebtoken::errors::ErrorKind;
use jsonwebtoken::{Algorithm, DecodingKey, Validation, decode}; use jsonwebtoken::{Algorithm, DecodingKey, Validation, decode};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
#[derive(Deserialize, Serialize)]
struct TokenData {
iis: String,
sub: i32,
app: i32,
exp: i32,
iat: i32,
jti: i32,
}
#[derive(Debug, Serialize, Deserialize)] #[derive(Debug, Serialize, Deserialize)]
struct Claims { struct Claims {
sub: i32, sub: i32,
@@ -22,7 +12,7 @@ struct Claims {
#[derive(Debug, PartialEq)] #[derive(Debug, PartialEq)]
pub enum Error { pub enum Error {
JwtError(ErrorKind), Jwt(ErrorKind),
InvalidSignature, InvalidSignature,
InvalidToken, InvalidToken,
Expired, Expired,
@@ -49,10 +39,10 @@ const VK_PUBLIC_KEY: &str = concat!(
"-----END PUBLIC KEY-----" "-----END PUBLIC KEY-----"
); );
pub fn parse_vk_id(token_str: &String, client_id: i32) -> Result<i32, Error> { pub fn parse_vk_id(token_str: &str, client_id: i32) -> Result<i32, Error> {
let dkey = DecodingKey::from_rsa_pem(VK_PUBLIC_KEY.as_bytes()).unwrap(); let dkey = DecodingKey::from_rsa_pem(VK_PUBLIC_KEY.as_bytes()).unwrap();
match decode::<Claims>(&token_str, &dkey, &Validation::new(Algorithm::RS256)) { match decode::<Claims>(token_str, &dkey, &Validation::new(Algorithm::RS256)) {
Ok(token_data) => { Ok(token_data) => {
let claims = token_data.claims; let claims = token_data.claims;
@@ -77,7 +67,7 @@ pub fn parse_vk_id(token_str: &String, client_id: i32) -> Result<i32, Error> {
ErrorKind::Base64(_) => Error::InvalidToken, ErrorKind::Base64(_) => Error::InvalidToken,
ErrorKind::Json(_) => Error::InvalidToken, ErrorKind::Json(_) => Error::InvalidToken,
ErrorKind::Utf8(_) => Error::InvalidToken, ErrorKind::Utf8(_) => Error::InvalidToken,
kind => Error::JwtError(kind), kind => Error::Jwt(kind),
}), }),
} }
} }

View File

@@ -1,31 +1,34 @@
use self::schema::*; use self::schema::*;
use crate::database::driver;
use crate::database::driver::users::UserSave;
use crate::routes::auth::shared::parse_vk_id; use crate::routes::auth::shared::parse_vk_id;
use crate::routes::auth::sign_in::schema::SignInData::{Default, VkOAuth}; use crate::routes::auth::sign_in::schema::SignInData::{Default, VkOAuth};
use crate::routes::schema::ResponseError;
use crate::routes::schema::user::UserResponse; use crate::routes::schema::user::UserResponse;
use crate::{AppState, utility}; use crate::routes::schema::ResponseError;
use crate::{utility, AppState};
use actix_web::{post, web}; use actix_web::{post, web};
use database::query::Query;
use web::Json; use web::Json;
async fn sign_in_combined( async fn sign_in_combined(
data: SignInData, data: SignInData,
app_state: &web::Data<AppState>, app_state: &web::Data<AppState>,
) -> Result<UserResponse, ErrorCode> { ) -> Result<UserResponse, ErrorCode> {
let db = app_state.get_database();
let user = match &data { let user = match &data {
Default(data) => driver::users::get_by_username(&app_state, &data.username).await, Default(data) => Query::find_user_by_username(db, &data.username).await,
VkOAuth(id) => driver::users::get_by_vk_id(&app_state, *id).await, VkOAuth(id) => Query::find_user_by_vk_id(db, *id).await,
}; }
.ok()
.flatten();
match user { match user {
Ok(mut user) => { Some(user) => {
if let Default(data) = data { if let Default(data) = data {
if user.password.is_none() { if user.password.is_none() {
return Err(ErrorCode::IncorrectCredentials); return Err(ErrorCode::IncorrectCredentials);
} }
match bcrypt::verify(&data.password, &user.password.as_ref().unwrap()) { match bcrypt::verify(&data.password, user.password.as_ref().unwrap()) {
Ok(result) => { Ok(result) => {
if !result { if !result {
return Err(ErrorCode::IncorrectCredentials); return Err(ErrorCode::IncorrectCredentials);
@@ -37,14 +40,11 @@ async fn sign_in_combined(
} }
} }
user.access_token = Some(utility::jwt::encode(&user.id)); let access_token = utility::jwt::encode(&user.id);
Ok(UserResponse::from_user_with_token(user, access_token))
user.save(&app_state).await.expect("Failed to update user");
Ok(user.into())
} }
Err(_) => Err(ErrorCode::IncorrectCredentials), None => Err(ErrorCode::IncorrectCredentials),
} }
} }
@@ -124,8 +124,6 @@ mod schema {
InvalidVkAccessToken, InvalidVkAccessToken,
} }
/// Internal
/// Type of authorization. /// Type of authorization.
pub enum SignInData { pub enum SignInData {
/// User and password name and password. /// User and password name and password.
@@ -139,16 +137,16 @@ mod schema {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::schema::*; use super::schema::*;
use crate::database::driver;
use crate::database::models::{User, UserRole};
use crate::routes::auth::sign_in::sign_in; use crate::routes::auth::sign_in::sign_in;
use crate::test_env::tests::{static_app_state, test_app_state, test_env}; use crate::test_env::tests::{static_app_state, test_app_state, test_env};
use crate::utility;
use actix_test::test_app; use actix_test::test_app;
use actix_web::dev::ServiceResponse; use actix_web::dev::ServiceResponse;
use actix_web::http::Method; use actix_web::http::Method;
use actix_web::http::StatusCode; use actix_web::http::StatusCode;
use actix_web::test; use actix_web::test;
use database::entity::sea_orm_active_enums::UserRole;
use database::entity::ActiveUser;
use database::sea_orm::{ActiveModelTrait, Set};
use sha1::{Digest, Sha1}; use sha1::{Digest, Sha1};
use std::fmt::Write; use std::fmt::Write;
@@ -182,22 +180,24 @@ mod tests {
test_env(); test_env();
let app_state = static_app_state().await; let app_state = static_app_state().await;
driver::users::insert_or_ignore(
&app_state, let active_user = ActiveUser {
&User { id: Set(id.clone()),
id: id.clone(), username: Set(username),
username, password: Set(Some(
password: Some(bcrypt::hash("example".to_string(), bcrypt::DEFAULT_COST).unwrap()), bcrypt::hash("example", bcrypt::DEFAULT_COST).unwrap(),
vk_id: None, )),
telegram_id: None, vk_id: Set(None),
access_token: Some(utility::jwt::encode(&id)), telegram_id: Set(None),
group: Some("ИС-214/23".to_string()), group: Set(Some("ИС-214/23".to_string())),
role: UserRole::Student, role: Set(UserRole::Student),
android_version: None, android_version: Set(None),
}, };
)
active_user
.save(app_state.get_database())
.await .await
.unwrap(); .expect("Failed to save user");
} }
#[actix_web::test] #[actix_web::test]

View File

@@ -1,11 +1,13 @@
use self::schema::*; use self::schema::*;
use crate::AppState;
use crate::database::driver;
use crate::database::models::UserRole;
use crate::routes::auth::shared::parse_vk_id; use crate::routes::auth::shared::parse_vk_id;
use crate::routes::schema::ResponseError;
use crate::routes::schema::user::UserResponse; use crate::routes::schema::user::UserResponse;
use crate::routes::schema::ResponseError;
use crate::{utility, AppState};
use actix_web::{post, web}; use actix_web::{post, web};
use database::entity::sea_orm_active_enums::UserRole;
use database::entity::ActiveUser;
use database::query::Query;
use database::sea_orm::ActiveModelTrait;
use web::Json; use web::Json;
async fn sign_up_combined( async fn sign_up_combined(
@@ -28,22 +30,30 @@ async fn sign_up_combined(
return Err(ErrorCode::InvalidGroupName); return Err(ErrorCode::InvalidGroupName);
} }
// If user with specified username already exists. let db = app_state.get_database();
if driver::users::contains_by_username(&app_state, &data.username).await {
// If user with specified username already exists.O
if Query::find_user_by_username(db, &data.username)
.await
.is_ok_and(|user| user.is_some())
{
return Err(ErrorCode::UsernameAlreadyExists); return Err(ErrorCode::UsernameAlreadyExists);
} }
// If user with specified VKID already exists. // If user with specified VKID already exists.
if let Some(id) = data.vk_id { if let Some(id) = data.vk_id
if driver::users::contains_by_vk_id(&app_state, id).await { && Query::is_user_exists_by_vk_id(db, id)
.await
.expect("Failed to check user existence")
{
return Err(ErrorCode::VkAlreadyExists); return Err(ErrorCode::VkAlreadyExists);
} }
}
let user = data.into(); let active_user: ActiveUser = data.into();
driver::users::insert(&app_state, &user).await.unwrap(); let user = active_user.insert(db).await.unwrap();
let access_token = utility::jwt::encode(&user.id);
Ok(UserResponse::from(&user)).into() Ok(UserResponse::from_user_with_token(user, access_token))
} }
#[utoipa::path(responses( #[utoipa::path(responses(
@@ -101,10 +111,11 @@ pub async fn sign_up_vk(
} }
mod schema { mod schema {
use crate::database::models::{User, UserRole};
use crate::routes::schema::user::UserResponse; use crate::routes::schema::user::UserResponse;
use crate::utility;
use actix_macros::ErrResponse; use actix_macros::ErrResponse;
use database::entity::sea_orm_active_enums::UserRole;
use database::entity::ActiveUser;
use database::sea_orm::Set;
use derive_more::Display; use derive_more::Display;
use objectid::ObjectId; use objectid::ObjectId;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
@@ -134,7 +145,7 @@ mod schema {
} }
pub mod vk { pub mod vk {
use crate::database::models::UserRole; use database::entity::sea_orm_active_enums::UserRole;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
#[derive(Serialize, Deserialize, utoipa::ToSchema)] #[derive(Serialize, Deserialize, utoipa::ToSchema)]
@@ -189,8 +200,6 @@ mod schema {
VkAlreadyExists, VkAlreadyExists,
} }
/// Internal
/// Data for registration. /// Data for registration.
pub struct SignUpData { pub struct SignUpData {
// TODO: сделать ограничение на минимальную и максимальную длину при регистрации и смене. // TODO: сделать ограничение на минимальную и максимальную длину при регистрации и смене.
@@ -215,25 +224,21 @@ mod schema {
pub version: String, pub version: String,
} }
impl Into<User> for SignUpData { impl From<SignUpData> for ActiveUser {
fn into(self) -> User { fn from(value: SignUpData) -> Self {
assert_ne!(self.password.is_some(), self.vk_id.is_some()); assert_ne!(value.password.is_some(), value.vk_id.is_some());
let id = ObjectId::new().unwrap().to_string(); ActiveUser {
let access_token = Some(utility::jwt::encode(&id)); id: Set(ObjectId::new().unwrap().to_string()),
username: Set(value.username),
User { password: Set(value
id,
username: self.username,
password: self
.password .password
.map(|x| bcrypt::hash(x, bcrypt::DEFAULT_COST).unwrap()), .map(|x| bcrypt::hash(x, bcrypt::DEFAULT_COST).unwrap())),
vk_id: self.vk_id, vk_id: Set(value.vk_id),
telegram_id: None, telegram_id: Set(None),
access_token, group: Set(Some(value.group)),
group: Some(self.group), role: Set(value.role),
role: self.role, android_version: Set(Some(value.version)),
android_version: Some(self.version),
} }
} }
} }
@@ -241,8 +246,6 @@ mod schema {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use crate::database::driver;
use crate::database::models::UserRole;
use crate::routes::auth::sign_up::schema::Request; use crate::routes::auth::sign_up::schema::Request;
use crate::routes::auth::sign_up::sign_up; use crate::routes::auth::sign_up::sign_up;
use crate::test_env::tests::{static_app_state, test_app_state, test_env}; use crate::test_env::tests::{static_app_state, test_app_state, test_env};
@@ -251,6 +254,10 @@ mod tests {
use actix_web::http::Method; use actix_web::http::Method;
use actix_web::http::StatusCode; use actix_web::http::StatusCode;
use actix_web::test; use actix_web::test;
use database::entity::sea_orm_active_enums::UserRole;
use database::entity::{UserColumn, UserEntity};
use database::sea_orm::ColumnTrait;
use database::sea_orm::{EntityTrait, QueryFilter};
struct SignUpPartial<'a> { struct SignUpPartial<'a> {
username: &'a str, username: &'a str,
@@ -282,7 +289,12 @@ mod tests {
test_env(); test_env();
let app_state = static_app_state().await; let app_state = static_app_state().await;
driver::users::delete_by_username(&app_state, &"test::sign_up_valid".to_string()).await;
UserEntity::delete_many()
.filter(UserColumn::Username.eq("test::sign_up_valid"))
.exec(app_state.get_database())
.await
.expect("Failed to delete user");
// test // test
@@ -303,7 +315,12 @@ mod tests {
test_env(); test_env();
let app_state = static_app_state().await; let app_state = static_app_state().await;
driver::users::delete_by_username(&app_state, &"test::sign_up_multiple".to_string()).await;
UserEntity::delete_many()
.filter(UserColumn::Username.eq("test::sign_up_multiple"))
.exec(app_state.get_database())
.await
.expect("Failed to delete user");
let create = sign_up_client(SignUpPartial { let create = sign_up_client(SignUpPartial {
username: "test::sign_up_multiple", username: "test::sign_up_multiple",

View File

@@ -1,12 +1,13 @@
use self::schema::*; use self::schema::*;
use crate::database::driver;
use crate::database::driver::users::UserSave;
use crate::database::models::{User, UserRole};
use crate::routes::schema::ResponseError; use crate::routes::schema::ResponseError;
use crate::utility::telegram::{WebAppInitDataMap, WebAppUser}; use crate::utility::telegram::{WebAppInitDataMap, WebAppUser};
use crate::{AppState, utility}; use crate::{utility, AppState};
use actix_web::{post, web}; use actix_web::{post, web};
use chrono::{DateTime, Duration, Utc}; use chrono::{DateTime, Duration, Utc};
use database::entity::sea_orm_active_enums::UserRole;
use database::entity::ActiveUser;
use database::query::Query;
use database::sea_orm::{ActiveModelTrait, Set};
use objectid::ObjectId; use objectid::ObjectId;
use std::sync::Arc; use std::sync::Arc;
use web::Json; use web::Json;
@@ -22,10 +23,6 @@ pub async fn telegram_auth(
) -> ServiceResponse { ) -> ServiceResponse {
let init_data = WebAppInitDataMap::from_str(data_json.into_inner().init_data); let init_data = WebAppInitDataMap::from_str(data_json.into_inner().init_data);
// for (key, value) in &init_data.data_map {
// println!("key: {} | value: {}", key, value);
// }
{ {
let env = &app_state.get_env().telegram; let env = &app_state.get_env().telegram;
@@ -52,39 +49,32 @@ pub async fn telegram_auth(
let web_app_user = let web_app_user =
serde_json::from_str::<WebAppUser>(init_data.data_map.get("user").unwrap()).unwrap(); serde_json::from_str::<WebAppUser>(init_data.data_map.get("user").unwrap()).unwrap();
let mut user = { let user = match Query::find_user_by_telegram_id(app_state.get_database(), web_app_user.id)
match driver::users::get_by_telegram_id(&app_state, web_app_user.id).await {
Ok(value) => Ok(value),
Err(_) => {
let new_user = User {
id: ObjectId::new().unwrap().to_string(),
username: format!("telegram_{}", web_app_user.id), // можно оставить, а можно поменять
password: None, // ибо нехуй
vk_id: None,
telegram_id: Some(web_app_user.id),
access_token: None, // установится ниже
group: None,
role: UserRole::Student, // TODO: при реге проверять данные
android_version: None,
};
driver::users::insert(&app_state, &new_user)
.await .await
.map(|_| new_user) .expect("Failed to find user by telegram id")
} {
} Some(value) => value,
.expect("Failed to get or add user") None => {
let new_user = ActiveUser {
id: Set(ObjectId::new().unwrap().to_string()),
username: Set(format!("telegram_{}", web_app_user.id)), // можно оставить, а можно поменять
password: Set(None), // ибо нехуй
vk_id: Set(None),
telegram_id: Set(Some(web_app_user.id)),
group: Set(None),
role: Set(UserRole::Student), // TODO: при реге проверять данные
android_version: Set(None),
}; };
user.access_token = Some(utility::jwt::encode(&user.id)); new_user
.insert(app_state.get_database())
.await
.expect("Failed to insert user")
}
};
user.save(&app_state).await.expect("Failed to update user"); let access_token = utility::jwt::encode(&user.id);
Ok(Response::new(&access_token, user.group.is_some())).into()
Ok(Response::new(
&*user.access_token.unwrap(),
user.group.is_some(),
))
.into()
} }
mod schema { mod schema {
@@ -93,9 +83,9 @@ mod schema {
use crate::utility::telegram::VerifyError; use crate::utility::telegram::VerifyError;
use actix_macros::ErrResponse; use actix_macros::ErrResponse;
use actix_web::body::EitherBody; use actix_web::body::EitherBody;
use actix_web::cookie::CookieBuilder;
use actix_web::cookie::time::OffsetDateTime; use actix_web::cookie::time::OffsetDateTime;
use actix_web::{HttpRequest, HttpResponse, web}; use actix_web::cookie::CookieBuilder;
use actix_web::{web, HttpRequest, HttpResponse};
use derive_more::Display; use derive_more::Display;
use serde::{Deserialize, Serialize, Serializer}; use serde::{Deserialize, Serialize, Serializer};
use std::ops::Add; use std::ops::Add;
@@ -114,8 +104,8 @@ mod schema {
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
#[schema(as = Flow::TelegramAuth::Response)] #[schema(as = Flow::TelegramAuth::Response)]
pub struct Response { pub struct Response {
#[serde(skip)] // #[serde(skip)] // TODO: я пока не придумал как не отдавать сырой токен в ответе
#[schema(ignore)] // #[schema(ignore)]
access_token: String, access_token: String,
pub completed: bool, pub completed: bool,
@@ -135,7 +125,7 @@ mod schema {
&mut self, &mut self,
request: &HttpRequest, request: &HttpRequest,
response: &mut HttpResponse<EitherBody<String>>, response: &mut HttpResponse<EitherBody<String>>,
) -> () { ) {
let access_token = &self.access_token; let access_token = &self.access_token;
let app_state = request.app_data::<web::Data<AppState>>().unwrap(); let app_state = request.app_data::<web::Data<AppState>>().unwrap();

View File

@@ -1,11 +1,11 @@
use self::schema::*; use self::schema::*;
use crate::AppState;
use crate::database::driver;
use crate::database::driver::users::UserSave;
use crate::database::models::User;
use crate::extractors::base::AsyncExtractor; use crate::extractors::base::AsyncExtractor;
use crate::routes::schema::ResponseError; use crate::routes::schema::ResponseError;
use crate::AppState;
use actix_web::{post, web}; use actix_web::{post, web};
use database::entity::User;
use database::query::Query;
use database::sea_orm::{ActiveModelTrait, IntoActiveModel, Set};
use web::Json; use web::Json;
#[utoipa::path(responses( #[utoipa::path(responses(
@@ -20,7 +20,7 @@ pub async fn telegram_complete(
app_state: web::Data<AppState>, app_state: web::Data<AppState>,
user: AsyncExtractor<User>, user: AsyncExtractor<User>,
) -> ServiceResponse { ) -> ServiceResponse {
let mut user = user.into_inner(); let user = user.into_inner();
// проверка на перезапись уже имеющихся данных // проверка на перезапись уже имеющихся данных
if user.group.is_some() { if user.group.is_some() {
@@ -29,13 +29,19 @@ pub async fn telegram_complete(
let data = data.into_inner(); let data = data.into_inner();
let db = app_state.get_database();
let mut active_user = user.clone().into_active_model();
// замена существующего имени, если оно отличается // замена существующего имени, если оно отличается
if user.username != data.username { if user.username != data.username {
if driver::users::contains_by_username(&app_state, &data.username).await { if Query::is_user_exists_by_username(db, &data.username)
.await
.unwrap()
{
return Err(ErrorCode::UsernameAlreadyExists).into(); return Err(ErrorCode::UsernameAlreadyExists).into();
} }
user.username = data.username; active_user.username = Set(data.username);
} }
// проверка на существование группы // проверка на существование группы
@@ -50,9 +56,12 @@ pub async fn telegram_complete(
return Err(ErrorCode::InvalidGroupName).into(); return Err(ErrorCode::InvalidGroupName).into();
} }
user.group = Some(data.group); active_user.group = Set(Some(data.group));
user.save(&app_state).await.expect("Failed to update user"); active_user
.update(db)
.await
.expect("Failed to update user");
Ok(()).into() Ok(()).into()
} }
@@ -80,11 +89,11 @@ mod schema {
#[serde(rename_all = "SCREAMING_SNAKE_CASE")] #[serde(rename_all = "SCREAMING_SNAKE_CASE")]
#[schema(as = Flow::TelegramFill::ErrorCode)] #[schema(as = Flow::TelegramFill::ErrorCode)]
pub enum ErrorCode { pub enum ErrorCode {
#[display("This flow already completed.")] #[display("This flow is already completed.")]
#[status_code = "actix_web::http::StatusCode::CONFLICT"] #[status_code = "actix_web::http::StatusCode::CONFLICT"]
AlreadyCompleted, AlreadyCompleted,
#[display("Username is already exists.")] #[display("User with that name already exists.")]
#[status_code = "actix_web::http::StatusCode::BAD_REQUEST"] #[status_code = "actix_web::http::StatusCode::BAD_REQUEST"]
UsernameAlreadyExists, UsernameAlreadyExists,

View File

@@ -1,11 +1,17 @@
use crate::AppState;
use crate::routes::schedule::schema::CacheStatus; use crate::routes::schedule::schema::CacheStatus;
use crate::AppState;
use actix_web::{get, web}; use actix_web::{get, web};
use std::ops::Deref;
#[utoipa::path(responses( #[utoipa::path(responses(
(status = OK, body = CacheStatus), (status = OK, body = CacheStatus),
))] ))]
#[get("/cache-status")] #[get("/cache-status")]
pub async fn cache_status(app_state: web::Data<AppState>) -> CacheStatus { pub async fn cache_status(app_state: web::Data<AppState>) -> CacheStatus {
CacheStatus::from(&app_state).await.into() app_state
.get_schedule_snapshot("eng_polytechnic")
.await
.unwrap()
.deref()
.into()
} }

View File

@@ -1,10 +1,10 @@
use self::schema::*; use self::schema::*;
use crate::AppState; use crate::AppState;
use crate::database::models::User;
use crate::extractors::base::AsyncExtractor; use crate::extractors::base::AsyncExtractor;
use crate::routes::schedule::schema::ScheduleEntryResponse; use crate::routes::schedule::schema::ScheduleEntryResponse;
use crate::routes::schema::ResponseError; use crate::routes::schema::ResponseError;
use actix_web::{get, web}; use actix_web::{get, web};
use database::entity::User;
#[utoipa::path(responses( #[utoipa::path(responses(
(status = OK, body = ScheduleEntryResponse), (status = OK, body = ScheduleEntryResponse),

View File

@@ -1,7 +1,7 @@
mod cache_status; mod cache_status;
mod group; mod group;
mod group_names; mod group_names;
mod schedule; mod get;
mod schema; mod schema;
mod teacher; mod teacher;
mod teacher_names; mod teacher_names;
@@ -9,6 +9,6 @@ mod teacher_names;
pub use cache_status::*; pub use cache_status::*;
pub use group::*; pub use group::*;
pub use group_names::*; pub use group_names::*;
pub use schedule::*; pub use get::*;
pub use teacher::*; pub use teacher::*;
pub use teacher_names::*; pub use teacher_names::*;

View File

@@ -63,18 +63,6 @@ pub struct CacheStatus {
pub updated_at: i64, pub updated_at: i64,
} }
impl CacheStatus {
pub async fn from(value: &web::Data<AppState>) -> Self {
From::<&ScheduleSnapshot>::from(
value
.get_schedule_snapshot("eng_polytechnic")
.await
.unwrap()
.deref(),
)
}
}
impl From<&ScheduleSnapshot> for CacheStatus { impl From<&ScheduleSnapshot> for CacheStatus {
fn from(value: &ScheduleSnapshot) -> Self { fn from(value: &ScheduleSnapshot) -> Self {
Self { Self {

View File

@@ -13,13 +13,13 @@ where
E: Serialize + PartialSchema + Display + PartialErrResponse; E: Serialize + PartialSchema + Display + PartialErrResponse;
/// Transform Response<T, E> into Result<T, E> /// Transform Response<T, E> into Result<T, E>
impl<T, E> Into<Result<T, E>> for Response<T, E> impl<T, E> From<Response<T, E>> for Result<T, E>
where where
T: Serialize + PartialSchema + PartialOkResponse, T: Serialize + PartialSchema + PartialOkResponse,
E: Serialize + PartialSchema + Display + PartialErrResponse, E: Serialize + PartialSchema + Display + PartialErrResponse,
{ {
fn into(self) -> Result<T, E> { fn from(value: Response<T, E>) -> Self {
self.0 value.0
} }
} }
@@ -46,7 +46,7 @@ where
{ {
match &self.0 { match &self.0 {
Ok(ok) => serializer.serialize_some(&ok), Ok(ok) => serializer.serialize_some(&ok),
Err(err) => serializer.serialize_some(&ResponseError::<E>::from(err.clone().into())), Err(err) => serializer.serialize_some(&err.clone().into()),
} }
} }
} }
@@ -95,7 +95,7 @@ pub trait PartialOkResponse {
&mut self, &mut self,
_request: &HttpRequest, _request: &HttpRequest,
_response: &mut HttpResponse<EitherBody<String>>, _response: &mut HttpResponse<EitherBody<String>>,
) -> () { ) {
} }
} }
@@ -126,8 +126,9 @@ where
} }
pub mod user { pub mod user {
use crate::database::models::{User, UserRole};
use actix_macros::{OkResponse, ResponderJson}; use actix_macros::{OkResponse, ResponderJson};
use database::entity::sea_orm_active_enums::UserRole;
use database::entity::User;
use serde::Serialize; use serde::Serialize;
//noinspection SpellCheckingInspection //noinspection SpellCheckingInspection
@@ -165,17 +166,31 @@ pub mod user {
pub access_token: Option<String>, pub access_token: Option<String>,
} }
/// Create UserResponse from User ref. impl UserResponse {
impl From<&User> for UserResponse { pub fn from_user_with_token(user: User, access_token: String) -> Self {
fn from(user: &User) -> Self { Self {
UserResponse {
id: user.id.clone(), id: user.id.clone(),
username: user.username.clone(), username: user.username.clone(),
group: user.group.clone(), group: user.group.clone(),
role: user.role.clone(), role: user.role.clone(),
vk_id: user.vk_id.clone(), vk_id: user.vk_id,
telegram_id: user.telegram_id.clone(), telegram_id: user.telegram_id,
access_token: user.access_token.clone(), access_token: Some(access_token),
}
}
}
/// Create UserResponse from User ref.
impl From<&User> for UserResponse {
fn from(user: &User) -> Self {
Self {
id: user.id.clone(),
username: user.username.clone(),
group: user.group.clone(),
role: user.role.clone(),
vk_id: user.vk_id,
telegram_id: user.telegram_id,
access_token: None,
} }
} }
} }
@@ -183,14 +198,14 @@ pub mod user {
/// Transform User to UserResponse. /// Transform User to UserResponse.
impl From<User> for UserResponse { impl From<User> for UserResponse {
fn from(user: User) -> Self { fn from(user: User) -> Self {
UserResponse { Self {
id: user.id, id: user.id,
username: user.username, username: user.username,
group: user.group, group: user.group,
role: user.role, role: user.role,
vk_id: user.vk_id, vk_id: user.vk_id,
telegram_id: user.telegram_id, telegram_id: user.telegram_id,
access_token: user.access_token, access_token: None,
} }
} }
} }

View File

@@ -1,9 +1,9 @@
use self::schema::*; use self::schema::*;
use crate::database::driver::users::UserSave;
use crate::database::models::User;
use crate::extractors::base::AsyncExtractor; use crate::extractors::base::AsyncExtractor;
use crate::state::AppState; use crate::state::AppState;
use actix_web::{post, web}; use actix_web::{post, web};
use database::entity::User;
use database::sea_orm::{ActiveModelTrait, IntoActiveModel, Set};
#[utoipa::path(responses((status = OK)))] #[utoipa::path(responses((status = OK)))]
#[post("/change-group")] #[post("/change-group")]
@@ -12,9 +12,13 @@ pub async fn change_group(
user: AsyncExtractor<User>, user: AsyncExtractor<User>,
data: web::Json<Request>, data: web::Json<Request>,
) -> ServiceResponse { ) -> ServiceResponse {
let mut user = user.into_inner(); let user = user.into_inner();
if user.group.is_some_and(|group| group == data.group) { if user
.group
.as_ref()
.is_some_and(|group| group.eq(&data.group))
{
return Ok(()).into(); return Ok(()).into();
} }
@@ -29,8 +33,10 @@ pub async fn change_group(
return Err(ErrorCode::NotFound).into(); return Err(ErrorCode::NotFound).into();
} }
user.group = Some(data.into_inner().group); let mut active_user = user.clone().into_active_model();
user.save(&app_state).await.unwrap(); active_user.group = Set(Some(data.into_inner().group));
active_user.update(app_state.get_database()).await.unwrap();
Ok(()).into() Ok(()).into()
} }

View File

@@ -1,10 +1,10 @@
use self::schema::*; use self::schema::*;
use crate::database::driver;
use crate::database::driver::users::UserSave;
use crate::database::models::User;
use crate::extractors::base::AsyncExtractor; use crate::extractors::base::AsyncExtractor;
use crate::state::AppState; use crate::state::AppState;
use actix_web::{post, web}; use actix_web::{post, web};
use database::entity::User;
use database::query::Query;
use database::sea_orm::{ActiveModelTrait, IntoActiveModel, Set};
#[utoipa::path(responses((status = OK)))] #[utoipa::path(responses((status = OK)))]
#[post("/change-username")] #[post("/change-username")]
@@ -13,21 +13,24 @@ pub async fn change_username(
user: AsyncExtractor<User>, user: AsyncExtractor<User>,
data: web::Json<Request>, data: web::Json<Request>,
) -> ServiceResponse { ) -> ServiceResponse {
let mut user = user.into_inner(); let user = user.into_inner();
if user.username == data.username { if user.username == data.username {
return Ok(()).into(); return Ok(()).into();
} }
if driver::users::get_by_username(&app_state, &data.username) let db = app_state.get_database();
if Query::is_user_exists_by_username(db, &data.username)
.await .await
.is_ok() .unwrap()
{ {
return Err(ErrorCode::AlreadyExists).into(); return Err(ErrorCode::AlreadyExists).into();
} }
user.username = data.into_inner().username; let mut active_user = user.into_active_model();
user.save(&app_state).await.unwrap(); active_user.username = Set(data.into_inner().username);
active_user.update(db).await.unwrap();
Ok(()).into() Ok(()).into()
} }

View File

@@ -1,7 +1,7 @@
use crate::database::models::User;
use crate::extractors::base::AsyncExtractor; use crate::extractors::base::AsyncExtractor;
use crate::routes::schema::user::UserResponse; use crate::routes::schema::user::UserResponse;
use actix_web::get; use actix_web::get;
use database::entity::User;
#[utoipa::path(responses((status = OK, body = UserResponse)))] #[utoipa::path(responses((status = OK, body = UserResponse)))]
#[get("/me")] #[get("/me")]

View File

@@ -2,29 +2,30 @@ mod env;
pub use crate::state::env::AppEnv; pub use crate::state::env::AppEnv;
use actix_web::web; use actix_web::web;
use diesel::{Connection, PgConnection}; use database::migration::{Migrator, MigratorTrait};
use database::sea_orm::{ConnectOptions, Database, DatabaseConnection};
use providers::base::{ScheduleProvider, ScheduleSnapshot}; use providers::base::{ScheduleProvider, ScheduleSnapshot};
use std::collections::HashMap; use std::collections::HashMap;
use std::sync::Arc; use std::sync::Arc;
use tokio::sync::{Mutex, MutexGuard}; use std::time::Duration;
use tokio_util::sync::CancellationToken; use tokio_util::sync::CancellationToken;
/// Common data provided to endpoints. /// Common data provided to endpoints.
pub struct AppState { pub struct AppState {
cancel_token: CancellationToken, cancel_token: CancellationToken,
database: Mutex<PgConnection>, database: DatabaseConnection,
providers: HashMap<String, Arc<dyn ScheduleProvider>>, providers: HashMap<String, Arc<dyn ScheduleProvider>>,
env: AppEnv, env: AppEnv,
} }
impl AppState { impl AppState {
pub async fn new() -> Result<Self, Box<dyn std::error::Error>> { pub async fn new(
let database_url = std::env::var("DATABASE_URL").expect("DATABASE_URL must be set"); database: Option<DatabaseConnection>,
) -> Result<Self, Box<dyn std::error::Error>> {
let env = AppEnv::default(); let env = AppEnv::default();
let providers: HashMap<String, Arc<dyn ScheduleProvider>> = HashMap::from([( let providers: HashMap<String, Arc<dyn ScheduleProvider>> = HashMap::from([(
"eng_polytechnic".to_string(), "eng_polytechnic".to_string(),
providers::EngelsPolytechnicProvider::new({ providers::EngelsPolytechnicProvider::get({
#[cfg(test)] #[cfg(test)]
{ {
providers::EngelsPolytechnicUpdateSource::Prepared(ScheduleSnapshot { providers::EngelsPolytechnicUpdateSource::Prepared(ScheduleSnapshot {
@@ -52,16 +53,35 @@ impl AppState {
let this = Self { let this = Self {
cancel_token: CancellationToken::new(), cancel_token: CancellationToken::new(),
database: Mutex::new( database: if let Some(database) = database {
PgConnection::establish(&database_url) database
.unwrap_or_else(|_| panic!("Error connecting to {}", database_url)), } else {
), let database_url = std::env::var("DATABASE_URL").expect("DATABASE_URL must be set");
let mut opt = ConnectOptions::new(database_url.clone());
opt.max_connections(4)
.min_connections(2)
.connect_timeout(Duration::from_secs(10))
.idle_timeout(Duration::from_secs(8))
.sqlx_logging(true);
let database = Database::connect(opt)
.await
.unwrap_or_else(|_| panic!("Error connecting to {}", database_url));
Migrator::up(&database, None)
.await
.expect("Failed to run database migrations");
database
},
env, env,
providers, providers,
}; };
if this.env.schedule.auto_update { if this.env.schedule.auto_update {
for (_, provider) in &this.providers { for provider in this.providers.values() {
let provider = provider.clone(); let provider = provider.clone();
let cancel_token = this.cancel_token.clone(); let cancel_token = this.cancel_token.clone();
@@ -80,8 +100,8 @@ impl AppState {
None None
} }
pub async fn get_database(&'_ self) -> MutexGuard<'_, PgConnection> { pub fn get_database(&'_ self) -> &DatabaseConnection {
self.database.lock().await &self.database
} }
pub fn get_env(&self) -> &AppEnv { pub fn get_env(&self) -> &AppEnv {
@@ -90,6 +110,8 @@ impl AppState {
} }
/// Create a new object web::Data<AppState>. /// Create a new object web::Data<AppState>.
pub async fn new_app_state() -> Result<web::Data<AppState>, Box<dyn std::error::Error>> { pub async fn new_app_state(
Ok(web::Data::new(AppState::new().await?)) database: Option<DatabaseConnection>,
) -> Result<web::Data<AppState>, Box<dyn std::error::Error>> {
Ok(web::Data::new(AppState::new(database).await?))
} }

View File

@@ -7,13 +7,16 @@ pub(crate) mod tests {
pub fn test_env() { pub fn test_env() {
info!("Loading test environment file..."); info!("Loading test environment file...");
dotenvy::from_filename(".env.test.local") dotenvy::from_filename(".env.test.local")
.or_else(|_| dotenvy::from_filename(".env.test")) .or_else(|_| dotenvy::from_filename(".env.test"))
.expect("Failed to load test environment file"); .expect("Failed to load test environment file");
} }
pub async fn test_app_state() -> web::Data<AppState> { pub async fn test_app_state() -> web::Data<AppState> {
let state = new_app_state().await.unwrap(); let state = new_app_state(Some(static_app_state().await.get_database().clone()))
.await
.unwrap();
state.clone() state.clone()
} }
@@ -21,6 +24,14 @@ pub(crate) mod tests {
pub async fn static_app_state() -> web::Data<AppState> { pub async fn static_app_state() -> web::Data<AppState> {
static STATE: OnceCell<web::Data<AppState>> = OnceCell::const_new(); static STATE: OnceCell<web::Data<AppState>> = OnceCell::const_new();
STATE.get_or_init(|| test_app_state()).await.clone() STATE
.get_or_init(async || -> web::Data<AppState> {
#[cfg(feature = "trace")]
console_subscriber::init();
new_app_state(None).await.unwrap()
})
.await
.clone()
} }
} }

View File

@@ -24,14 +24,13 @@ static ENCODING_KEY: LazyLock<EncodingKey> = LazyLock::new(|| {
}); });
/// Token verification errors. /// Token verification errors.
#[allow(dead_code)]
#[derive(Debug)] #[derive(Debug)]
pub enum Error { pub enum Error {
/// The token has a different signature. /// The token has a different signature.
InvalidSignature, InvalidSignature,
/// Token reading error. /// Token reading error.
InvalidToken(ErrorKind), InvalidToken,
/// Token expired. /// Token expired.
Expired, Expired,
@@ -63,13 +62,13 @@ struct Claims {
pub(crate) const DEFAULT_ALGORITHM: Algorithm = Algorithm::HS256; pub(crate) const DEFAULT_ALGORITHM: Algorithm = Algorithm::HS256;
/// Checking the token and extracting the UUID of the user account from it. /// Checking the token and extracting the UUID of the user account from it.
pub fn verify_and_decode(token: &String) -> Result<String, Error> { pub fn verify_and_decode(token: &str) -> Result<String, Error> {
let mut validation = Validation::new(DEFAULT_ALGORITHM); let mut validation = Validation::new(DEFAULT_ALGORITHM);
validation.required_spec_claims.remove("exp"); validation.required_spec_claims.remove("exp");
validation.validate_exp = false; validation.validate_exp = false;
let result = decode::<Claims>(&token, &*DECODING_KEY, &validation); let result = decode::<Claims>(token, &DECODING_KEY, &validation);
match result { match result {
Ok(token_data) => { Ok(token_data) => {
@@ -82,13 +81,13 @@ pub fn verify_and_decode(token: &String) -> Result<String, Error> {
Err(err) => Err(match err.into_kind() { Err(err) => Err(match err.into_kind() {
ErrorKind::InvalidSignature => Error::InvalidSignature, ErrorKind::InvalidSignature => Error::InvalidSignature,
ErrorKind::ExpiredSignature => Error::Expired, ErrorKind::ExpiredSignature => Error::Expired,
kind => Error::InvalidToken(kind), _ => Error::InvalidToken,
}), }),
} }
} }
/// Creating a user token. /// Creating a user token.
pub fn encode(id: &String) -> String { pub fn encode(id: &str) -> String {
let header = Header { let header = Header {
typ: Some(String::from("JWT")), typ: Some(String::from("JWT")),
..Default::default() ..Default::default()
@@ -98,12 +97,12 @@ pub fn encode(id: &String) -> String {
let exp = iat + Duration::days(365 * 4); let exp = iat + Duration::days(365 * 4);
let claims = Claims { let claims = Claims {
id: id.clone(), id: id.to_string(),
iat: iat.timestamp().unsigned_abs(), iat: iat.timestamp().unsigned_abs(),
exp: exp.timestamp().unsigned_abs(), exp: exp.timestamp().unsigned_abs(),
}; };
jsonwebtoken::encode(&header, &claims, &*ENCODING_KEY).unwrap() jsonwebtoken::encode(&header, &claims, &ENCODING_KEY).unwrap()
} }
#[cfg(test)] #[cfg(test)]
@@ -115,7 +114,7 @@ mod tests {
fn test_encode() { fn test_encode() {
test_env(); test_env();
assert_eq!(encode(&"test".to_string()).is_empty(), false); assert!(!encode("test").is_empty());
} }
#[test] #[test]
@@ -128,7 +127,7 @@ mod tests {
assert!(result.is_err()); assert!(result.is_err());
assert_eq!( assert_eq!(
result.err().unwrap(), result.err().unwrap(),
Error::InvalidToken(ErrorKind::InvalidToken) Error::InvalidToken
); );
} }

View File

@@ -33,7 +33,7 @@ impl WebAppInitDataMap {
}; };
data.split('&') data.split('&')
.map(|kv| kv.split_once('=').unwrap_or_else(|| (kv, ""))) .map(|kv| kv.split_once('=').unwrap_or((kv, "")))
.for_each(|(key, value)| { .for_each(|(key, value)| {
this.data_map.insert(key.to_string(), value.to_string()); this.data_map.insert(key.to_string(), value.to_string());
}); });