mirror of
https://github.com/n08i40k/schedule-parser-rusted.git
synced 2025-12-06 17:57:47 +03:00
Compare commits
4 Commits
d75d3fbc97
...
9f7460973e
| Author | SHA1 | Date | |
|---|---|---|---|
|
9f7460973e
|
|||
|
|
3cf42eea8a | ||
|
|
d19b6c1069 | ||
|
126ba23001
|
2
.github/workflows/test.yml
vendored
2
.github/workflows/test.yml
vendored
@@ -22,4 +22,4 @@ jobs:
|
|||||||
- name: Build
|
- name: Build
|
||||||
run: cargo build --verbose
|
run: cargo build --verbose
|
||||||
- name: Run tests
|
- name: Run tests
|
||||||
run: cargo test --verbose -p schedule_parser
|
run: cargo test --verbose -p schedule-parser-rusted -p schedule_parser
|
||||||
|
|||||||
4
.gitignore
vendored
4
.gitignore
vendored
@@ -1,4 +1,6 @@
|
|||||||
/target
|
/target
|
||||||
.~*.xls
|
.~*.xls
|
||||||
schedule.json
|
schedule.json
|
||||||
teachers.json
|
teachers.json
|
||||||
|
|
||||||
|
.env*
|
||||||
12
.idea/dataSources.xml
generated
Normal file
12
.idea/dataSources.xml
generated
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<project version="4">
|
||||||
|
<component name="DataSourceManagerImpl" format="xml" multifile-model="true">
|
||||||
|
<data-source source="LOCAL" name="sp@localhost" uuid="28502a90-08bf-4cc0-8494-10dc74e37189">
|
||||||
|
<driver-ref>postgresql</driver-ref>
|
||||||
|
<synchronize>true</synchronize>
|
||||||
|
<jdbc-driver>org.postgresql.Driver</jdbc-driver>
|
||||||
|
<jdbc-url>jdbc:postgresql://localhost:5432/sp</jdbc-url>
|
||||||
|
<working-dir>$ProjectFileDir$</working-dir>
|
||||||
|
</data-source>
|
||||||
|
</component>
|
||||||
|
</project>
|
||||||
9
.idea/sqldialects.xml
generated
Normal file
9
.idea/sqldialects.xml
generated
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<project version="4">
|
||||||
|
<component name="SqlDialectMappings">
|
||||||
|
<file url="file://$PROJECT_DIR$/migrations/2025-03-21-211822_create_user_role/down.sql" dialect="PostgreSQL" />
|
||||||
|
<file url="file://$PROJECT_DIR$/migrations/2025-03-21-212111_create_users/up.sql" dialect="PostgreSQL" />
|
||||||
|
<file url="file://$PROJECT_DIR$/migrations/2025-03-21-212723_create_fcm/down.sql" dialect="PostgreSQL" />
|
||||||
|
<file url="file://$PROJECT_DIR$/migrations/2025-03-21-212723_create_fcm/up.sql" dialect="PostgreSQL" />
|
||||||
|
</component>
|
||||||
|
</project>
|
||||||
128
CODE_OF_CONDUCT.md
Normal file
128
CODE_OF_CONDUCT.md
Normal file
@@ -0,0 +1,128 @@
|
|||||||
|
# Contributor Covenant Code of Conduct
|
||||||
|
|
||||||
|
## Our Pledge
|
||||||
|
|
||||||
|
We as members, contributors, and leaders pledge to make participation in our
|
||||||
|
community a harassment-free experience for everyone, regardless of age, body
|
||||||
|
size, visible or invisible disability, ethnicity, sex characteristics, gender
|
||||||
|
identity and expression, level of experience, education, socio-economic status,
|
||||||
|
nationality, personal appearance, race, religion, or sexual identity
|
||||||
|
and orientation.
|
||||||
|
|
||||||
|
We pledge to act and interact in ways that contribute to an open, welcoming,
|
||||||
|
diverse, inclusive, and healthy community.
|
||||||
|
|
||||||
|
## Our Standards
|
||||||
|
|
||||||
|
Examples of behavior that contributes to a positive environment for our
|
||||||
|
community include:
|
||||||
|
|
||||||
|
* Demonstrating empathy and kindness toward other people
|
||||||
|
* Being respectful of differing opinions, viewpoints, and experiences
|
||||||
|
* Giving and gracefully accepting constructive feedback
|
||||||
|
* Accepting responsibility and apologizing to those affected by our mistakes,
|
||||||
|
and learning from the experience
|
||||||
|
* Focusing on what is best not just for us as individuals, but for the
|
||||||
|
overall community
|
||||||
|
|
||||||
|
Examples of unacceptable behavior include:
|
||||||
|
|
||||||
|
* The use of sexualized language or imagery, and sexual attention or
|
||||||
|
advances of any kind
|
||||||
|
* Trolling, insulting or derogatory comments, and personal or political attacks
|
||||||
|
* Public or private harassment
|
||||||
|
* Publishing others' private information, such as a physical or email
|
||||||
|
address, without their explicit permission
|
||||||
|
* Other conduct which could reasonably be considered inappropriate in a
|
||||||
|
professional setting
|
||||||
|
|
||||||
|
## Enforcement Responsibilities
|
||||||
|
|
||||||
|
Community leaders are responsible for clarifying and enforcing our standards of
|
||||||
|
acceptable behavior and will take appropriate and fair corrective action in
|
||||||
|
response to any behavior that they deem inappropriate, threatening, offensive,
|
||||||
|
or harmful.
|
||||||
|
|
||||||
|
Community leaders have the right and responsibility to remove, edit, or reject
|
||||||
|
comments, commits, code, wiki edits, issues, and other contributions that are
|
||||||
|
not aligned to this Code of Conduct, and will communicate reasons for moderation
|
||||||
|
decisions when appropriate.
|
||||||
|
|
||||||
|
## Scope
|
||||||
|
|
||||||
|
This Code of Conduct applies within all community spaces, and also applies when
|
||||||
|
an individual is officially representing the community in public spaces.
|
||||||
|
Examples of representing our community include using an official e-mail address,
|
||||||
|
posting via an official social media account, or acting as an appointed
|
||||||
|
representative at an online or offline event.
|
||||||
|
|
||||||
|
## Enforcement
|
||||||
|
|
||||||
|
Instances of abusive, harassing, or otherwise unacceptable behavior may be
|
||||||
|
reported to the community leaders responsible for enforcement at
|
||||||
|
email.
|
||||||
|
All complaints will be reviewed and investigated promptly and fairly.
|
||||||
|
|
||||||
|
All community leaders are obligated to respect the privacy and security of the
|
||||||
|
reporter of any incident.
|
||||||
|
|
||||||
|
## Enforcement Guidelines
|
||||||
|
|
||||||
|
Community leaders will follow these Community Impact Guidelines in determining
|
||||||
|
the consequences for any action they deem in violation of this Code of Conduct:
|
||||||
|
|
||||||
|
### 1. Correction
|
||||||
|
|
||||||
|
**Community Impact**: Use of inappropriate language or other behavior deemed
|
||||||
|
unprofessional or unwelcome in the community.
|
||||||
|
|
||||||
|
**Consequence**: A private, written warning from community leaders, providing
|
||||||
|
clarity around the nature of the violation and an explanation of why the
|
||||||
|
behavior was inappropriate. A public apology may be requested.
|
||||||
|
|
||||||
|
### 2. Warning
|
||||||
|
|
||||||
|
**Community Impact**: A violation through a single incident or series
|
||||||
|
of actions.
|
||||||
|
|
||||||
|
**Consequence**: A warning with consequences for continued behavior. No
|
||||||
|
interaction with the people involved, including unsolicited interaction with
|
||||||
|
those enforcing the Code of Conduct, for a specified period of time. This
|
||||||
|
includes avoiding interactions in community spaces as well as external channels
|
||||||
|
like social media. Violating these terms may lead to a temporary or
|
||||||
|
permanent ban.
|
||||||
|
|
||||||
|
### 3. Temporary Ban
|
||||||
|
|
||||||
|
**Community Impact**: A serious violation of community standards, including
|
||||||
|
sustained inappropriate behavior.
|
||||||
|
|
||||||
|
**Consequence**: A temporary ban from any sort of interaction or public
|
||||||
|
communication with the community for a specified period of time. No public or
|
||||||
|
private interaction with the people involved, including unsolicited interaction
|
||||||
|
with those enforcing the Code of Conduct, is allowed during this period.
|
||||||
|
Violating these terms may lead to a permanent ban.
|
||||||
|
|
||||||
|
### 4. Permanent Ban
|
||||||
|
|
||||||
|
**Community Impact**: Demonstrating a pattern of violation of community
|
||||||
|
standards, including sustained inappropriate behavior, harassment of an
|
||||||
|
individual, or aggression toward or disparagement of classes of individuals.
|
||||||
|
|
||||||
|
**Consequence**: A permanent ban from any sort of public interaction within
|
||||||
|
the community.
|
||||||
|
|
||||||
|
## Attribution
|
||||||
|
|
||||||
|
This Code of Conduct is adapted from the [Contributor Covenant][homepage],
|
||||||
|
version 2.0, available at
|
||||||
|
https://www.contributor-covenant.org/version/2/0/code_of_conduct.html.
|
||||||
|
|
||||||
|
Community Impact Guidelines were inspired by [Mozilla's code of conduct
|
||||||
|
enforcement ladder](https://github.com/mozilla/diversity).
|
||||||
|
|
||||||
|
[homepage]: https://www.contributor-covenant.org
|
||||||
|
|
||||||
|
For answers to common questions about this code of conduct, see the FAQ at
|
||||||
|
https://www.contributor-covenant.org/faq. Translations are available at
|
||||||
|
https://www.contributor-covenant.org/translations.
|
||||||
2020
Cargo.lock
generated
2020
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
12
Cargo.toml
12
Cargo.toml
@@ -3,11 +3,17 @@ members = ["lib/schedule_parser"]
|
|||||||
|
|
||||||
[package]
|
[package]
|
||||||
name = "schedule-parser-rusted"
|
name = "schedule-parser-rusted"
|
||||||
version = "0.2.0"
|
version = "0.3.0"
|
||||||
edition = "2024"
|
edition = "2024"
|
||||||
publish = false
|
publish = false
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
|
diesel = { version = "2.2.8", features = ["postgres"] }
|
||||||
|
diesel-derive-enum = { git = "https://github.com/Havunen/diesel-derive-enum.git", features = ["postgres"] }
|
||||||
|
dotenvy = "0.15.7"
|
||||||
serde = { version = "1.0.219", features = ["derive"] }
|
serde = { version = "1.0.219", features = ["derive"] }
|
||||||
serde_json = "1.0.140"
|
schedule_parser = { path = "./lib/schedule_parser" }
|
||||||
schedule_parser = { path = "./lib/schedule_parser" }
|
chrono = "0.4.40"
|
||||||
|
reqwest = "0.12.15"
|
||||||
|
tokio = { version = "1.44.1", features = ["macros", "rt-multi-thread"] }
|
||||||
|
actix-web = "4.10.2"
|
||||||
|
|||||||
21
LICENSE
Normal file
21
LICENSE
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
MIT License
|
||||||
|
|
||||||
|
Copyright (c) 2025 Nikita
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all
|
||||||
|
copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
SOFTWARE.
|
||||||
9
diesel.toml
Normal file
9
diesel.toml
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
# For documentation on how to configure this file,
|
||||||
|
# see https://diesel.rs/guides/configuring-diesel-cli
|
||||||
|
|
||||||
|
[print_schema]
|
||||||
|
file = "src/database/schema.rs"
|
||||||
|
custom_type_derives = ["diesel::query_builder::QueryId", "Clone"]
|
||||||
|
|
||||||
|
[migrations_directory]
|
||||||
|
dir = "./migrations"
|
||||||
@@ -1,11 +1,10 @@
|
|||||||
use criterion::{Criterion, criterion_group, criterion_main};
|
use criterion::{Criterion, criterion_group, criterion_main};
|
||||||
use schedule_parser::parse_xls;
|
use schedule_parser::parse_xls;
|
||||||
use std::path::Path;
|
|
||||||
|
|
||||||
pub fn bench_parse_xls(c: &mut Criterion) {
|
pub fn bench_parse_xls(c: &mut Criterion) {
|
||||||
c.bench_function("parse_xls", |b| {
|
let buffer: Vec<u8> = include_bytes!("../../../schedule.xls").to_vec();
|
||||||
b.iter(|| parse_xls(Path::new("../../schedule.xls")))
|
|
||||||
});
|
c.bench_function("parse_xls", |b| b.iter(|| parse_xls(&buffer)));
|
||||||
}
|
}
|
||||||
|
|
||||||
criterion_group!(benches, bench_parse_xls);
|
criterion_group!(benches, bench_parse_xls);
|
||||||
|
|||||||
@@ -1,16 +1,16 @@
|
|||||||
use crate::LessonParseResult::{Lessons, Street};
|
|
||||||
use crate::schema::LessonType::Break;
|
use crate::schema::LessonType::Break;
|
||||||
use crate::schema::{Day, Lesson, LessonSubGroup, LessonTime, LessonType, ScheduleEntity};
|
use crate::schema::{Day, Lesson, LessonSubGroup, LessonTime, LessonType, ScheduleEntity};
|
||||||
use calamine::{Reader, Xls, open_workbook};
|
use crate::LessonParseResult::{Lessons, Street};
|
||||||
|
use calamine::{open_workbook_from_rs, Reader, Xls};
|
||||||
use chrono::{Duration, NaiveDateTime};
|
use chrono::{Duration, NaiveDateTime};
|
||||||
use fuzzy_matcher::FuzzyMatcher;
|
|
||||||
use fuzzy_matcher::skim::SkimMatcherV2;
|
use fuzzy_matcher::skim::SkimMatcherV2;
|
||||||
|
use fuzzy_matcher::FuzzyMatcher;
|
||||||
use regex::Regex;
|
use regex::Regex;
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use std::path::Path;
|
use std::io::Cursor;
|
||||||
use std::sync::LazyLock;
|
use std::sync::LazyLock;
|
||||||
|
|
||||||
mod schema;
|
pub mod schema;
|
||||||
|
|
||||||
struct InternalId {
|
struct InternalId {
|
||||||
/**
|
/**
|
||||||
@@ -539,12 +539,13 @@ fn convert_groups_to_teachers(
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn parse_xls(
|
pub fn parse_xls(
|
||||||
path: &Path,
|
buffer: &Vec<u8>,
|
||||||
) -> (
|
) -> (
|
||||||
HashMap<String, ScheduleEntity>,
|
HashMap<String, ScheduleEntity>,
|
||||||
HashMap<String, ScheduleEntity>,
|
HashMap<String, ScheduleEntity>,
|
||||||
) {
|
) {
|
||||||
let mut workbook: Xls<_> = open_workbook(path).expect("Can't open workbook");
|
let cursor = Cursor::new(&buffer);
|
||||||
|
let mut workbook: Xls<_> = open_workbook_from_rs(cursor).expect("Can't open workbook");
|
||||||
|
|
||||||
let worksheet: WorkSheet = workbook
|
let worksheet: WorkSheet = workbook
|
||||||
.worksheets()
|
.worksheets()
|
||||||
@@ -693,9 +694,10 @@ mod tests {
|
|||||||
use super::*;
|
use super::*;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn it_works() {
|
fn read() {
|
||||||
let result = parse_xls(Path::new("../../schedule.xls"));
|
let buffer: Vec<u8> = include_bytes!("../../../../schedule.xls").to_vec();
|
||||||
|
let result = parse_xls(&buffer);
|
||||||
|
|
||||||
assert_ne!(result.0.len(), 0);
|
assert_ne!(result.0.len(), 0);
|
||||||
assert_ne!(result.1.len(), 0);
|
assert_ne!(result.1.len(), 0);
|
||||||
}
|
}
|
||||||
|
|||||||
0
migrations/.keep
Normal file
0
migrations/.keep
Normal file
6
migrations/00000000000000_diesel_initial_setup/down.sql
Normal file
6
migrations/00000000000000_diesel_initial_setup/down.sql
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
-- This file was automatically created by Diesel to setup helper functions
|
||||||
|
-- and other internal bookkeeping. This file is safe to edit, any future
|
||||||
|
-- changes will be added to existing projects as new migrations.
|
||||||
|
|
||||||
|
DROP FUNCTION IF EXISTS diesel_manage_updated_at(_tbl regclass);
|
||||||
|
DROP FUNCTION IF EXISTS diesel_set_updated_at();
|
||||||
36
migrations/00000000000000_diesel_initial_setup/up.sql
Normal file
36
migrations/00000000000000_diesel_initial_setup/up.sql
Normal file
@@ -0,0 +1,36 @@
|
|||||||
|
-- This file was automatically created by Diesel to setup helper functions
|
||||||
|
-- and other internal bookkeeping. This file is safe to edit, any future
|
||||||
|
-- changes will be added to existing projects as new migrations.
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
-- Sets up a trigger for the given table to automatically set a column called
|
||||||
|
-- `updated_at` whenever the row is modified (unless `updated_at` was included
|
||||||
|
-- in the modified columns)
|
||||||
|
--
|
||||||
|
-- # Example
|
||||||
|
--
|
||||||
|
-- ```sql
|
||||||
|
-- CREATE TABLE users (id SERIAL PRIMARY KEY, updated_at TIMESTAMP NOT NULL DEFAULT NOW());
|
||||||
|
--
|
||||||
|
-- SELECT diesel_manage_updated_at('users');
|
||||||
|
-- ```
|
||||||
|
CREATE OR REPLACE FUNCTION diesel_manage_updated_at(_tbl regclass) RETURNS VOID AS $$
|
||||||
|
BEGIN
|
||||||
|
EXECUTE format('CREATE TRIGGER set_updated_at BEFORE UPDATE ON %s
|
||||||
|
FOR EACH ROW EXECUTE PROCEDURE diesel_set_updated_at()', _tbl);
|
||||||
|
END;
|
||||||
|
$$ LANGUAGE plpgsql;
|
||||||
|
|
||||||
|
CREATE OR REPLACE FUNCTION diesel_set_updated_at() RETURNS trigger AS $$
|
||||||
|
BEGIN
|
||||||
|
IF (
|
||||||
|
NEW IS DISTINCT FROM OLD AND
|
||||||
|
NEW.updated_at IS NOT DISTINCT FROM OLD.updated_at
|
||||||
|
) THEN
|
||||||
|
NEW.updated_at := current_timestamp;
|
||||||
|
END IF;
|
||||||
|
RETURN NEW;
|
||||||
|
END;
|
||||||
|
$$ LANGUAGE plpgsql;
|
||||||
1
migrations/2025-03-21-211822_create_user_role/down.sql
Normal file
1
migrations/2025-03-21-211822_create_user_role/down.sql
Normal file
@@ -0,0 +1 @@
|
|||||||
|
DROP TYPE user_role;
|
||||||
4
migrations/2025-03-21-211822_create_user_role/up.sql
Normal file
4
migrations/2025-03-21-211822_create_user_role/up.sql
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
CREATE TYPE user_role AS ENUM (
|
||||||
|
'STUDENT',
|
||||||
|
'TEACHER',
|
||||||
|
'ADMIN');
|
||||||
1
migrations/2025-03-21-212111_create_users/down.sql
Normal file
1
migrations/2025-03-21-212111_create_users/down.sql
Normal file
@@ -0,0 +1 @@
|
|||||||
|
DROP TABLE users;
|
||||||
11
migrations/2025-03-21-212111_create_users/up.sql
Normal file
11
migrations/2025-03-21-212111_create_users/up.sql
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
CREATE TABLE users
|
||||||
|
(
|
||||||
|
id text PRIMARY KEY NOT NULL,
|
||||||
|
username text UNIQUE NOT NULL,
|
||||||
|
"password" text NOT NULL,
|
||||||
|
vk_id int4 NULL,
|
||||||
|
access_token text UNIQUE NOT NULL,
|
||||||
|
"group" text NOT NULL,
|
||||||
|
role user_role NOT NULL,
|
||||||
|
version text NOT NULL
|
||||||
|
);
|
||||||
1
migrations/2025-03-21-212723_create_fcm/down.sql
Normal file
1
migrations/2025-03-21-212723_create_fcm/down.sql
Normal file
@@ -0,0 +1 @@
|
|||||||
|
DROP TABLE fcm;
|
||||||
11
migrations/2025-03-21-212723_create_fcm/up.sql
Normal file
11
migrations/2025-03-21-212723_create_fcm/up.sql
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
CREATE TABLE fcm
|
||||||
|
(
|
||||||
|
user_id text PRIMARY KEY NOT NULL,
|
||||||
|
token text NOT NULL,
|
||||||
|
topics text[] NULL
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE UNIQUE INDEX fcm_user_id_key ON fcm USING btree (user_id);
|
||||||
|
|
||||||
|
ALTER TABLE fcm
|
||||||
|
ADD CONSTRAINT fcm_user_id_fkey FOREIGN KEY (user_id) REFERENCES users (id) ON DELETE RESTRICT ON UPDATE CASCADE;
|
||||||
2
src/database/mod.rs
Normal file
2
src/database/mod.rs
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
pub mod schema;
|
||||||
|
pub mod models;
|
||||||
26
src/database/models.rs
Normal file
26
src/database/models.rs
Normal file
@@ -0,0 +1,26 @@
|
|||||||
|
use diesel::prelude::*;
|
||||||
|
use serde::Serialize;
|
||||||
|
|
||||||
|
#[derive(diesel_derive_enum::DbEnum, Serialize, Debug)]
|
||||||
|
#[ExistingTypePath = "crate::database::schema::sql_types::UserRole"]
|
||||||
|
#[DbValueStyle = "UPPERCASE"]
|
||||||
|
#[serde(rename_all = "UPPERCASE")]
|
||||||
|
pub enum UserRole {
|
||||||
|
Student,
|
||||||
|
Teacher,
|
||||||
|
Admin,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Queryable, Selectable, Serialize)]
|
||||||
|
#[diesel(table_name = crate::database::schema::users)]
|
||||||
|
#[diesel(check_for_backend(diesel::pg::Pg))]
|
||||||
|
pub struct User {
|
||||||
|
pub id: String,
|
||||||
|
pub username: String,
|
||||||
|
pub password: String,
|
||||||
|
pub vk_id: Option<i32>,
|
||||||
|
pub access_token: String,
|
||||||
|
pub group: String,
|
||||||
|
pub role: UserRole,
|
||||||
|
pub version: String,
|
||||||
|
}
|
||||||
38
src/database/schema.rs
Normal file
38
src/database/schema.rs
Normal file
@@ -0,0 +1,38 @@
|
|||||||
|
// @generated automatically by Diesel CLI.
|
||||||
|
|
||||||
|
pub mod sql_types {
|
||||||
|
#[derive(diesel::query_builder::QueryId, Clone, diesel::sql_types::SqlType)]
|
||||||
|
#[diesel(postgres_type(name = "user_role"))]
|
||||||
|
pub struct UserRole;
|
||||||
|
}
|
||||||
|
|
||||||
|
diesel::table! {
|
||||||
|
fcm (user_id) {
|
||||||
|
user_id -> Text,
|
||||||
|
token -> Text,
|
||||||
|
topics -> Nullable<Array<Nullable<Text>>>,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
diesel::table! {
|
||||||
|
use diesel::sql_types::*;
|
||||||
|
use super::sql_types::UserRole;
|
||||||
|
|
||||||
|
users (id) {
|
||||||
|
id -> Text,
|
||||||
|
username -> Text,
|
||||||
|
password -> Text,
|
||||||
|
vk_id -> Nullable<Int4>,
|
||||||
|
access_token -> Text,
|
||||||
|
group -> Text,
|
||||||
|
role -> UserRole,
|
||||||
|
version -> Text,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
diesel::joinable!(fcm -> users (user_id));
|
||||||
|
|
||||||
|
diesel::allow_tables_to_appear_in_same_query!(
|
||||||
|
fcm,
|
||||||
|
users,
|
||||||
|
);
|
||||||
74
src/main.rs
74
src/main.rs
@@ -1,23 +1,57 @@
|
|||||||
use std::fs;
|
use crate::routes::auth::sign_in::sign_in;
|
||||||
use std::path::Path;
|
use crate::xls_downloader::basic_impl::BasicXlsDownloader;
|
||||||
use schedule_parser::parse_xls;
|
use actix_web::{web, App, HttpServer};
|
||||||
|
use chrono::{DateTime, Utc};
|
||||||
|
use diesel::{Connection, PgConnection};
|
||||||
|
use dotenvy::dotenv;
|
||||||
|
use schedule_parser::schema::ScheduleEntity;
|
||||||
|
use std::collections::HashMap;
|
||||||
|
use std::sync::Mutex;
|
||||||
|
use std::env;
|
||||||
|
|
||||||
fn main() {
|
mod database;
|
||||||
let (teachers, groups) = parse_xls(Path::new("./schedule.xls"));
|
mod routes;
|
||||||
|
mod xls_downloader;
|
||||||
|
|
||||||
fs::write(
|
pub struct AppState {
|
||||||
"./schedule.json",
|
downloader: Mutex<BasicXlsDownloader>,
|
||||||
serde_json::to_string_pretty(&groups)
|
schedule: Mutex<
|
||||||
.expect("Failed to serialize schedule")
|
Option<(
|
||||||
.as_bytes(),
|
String,
|
||||||
)
|
DateTime<Utc>,
|
||||||
.expect("Failed to write schedule");
|
(
|
||||||
|
HashMap<String, ScheduleEntity>,
|
||||||
fs::write(
|
HashMap<String, ScheduleEntity>,
|
||||||
"./teachers.json",
|
),
|
||||||
serde_json::to_string_pretty(&teachers)
|
)>,
|
||||||
.expect("Failed to serialize teachers schedule")
|
>,
|
||||||
.as_bytes(),
|
database: Mutex<PgConnection>,
|
||||||
)
|
}
|
||||||
.expect("Failed to write teachers schedule");
|
|
||||||
|
#[actix_web::main]
|
||||||
|
async fn main() {
|
||||||
|
dotenv().ok();
|
||||||
|
|
||||||
|
let database_url = env::var("DATABASE_URL").expect("DATABASE_URL must be set");
|
||||||
|
|
||||||
|
let data = web::Data::new(AppState {
|
||||||
|
downloader: Mutex::new(BasicXlsDownloader::new()),
|
||||||
|
schedule: Mutex::new(None),
|
||||||
|
database: Mutex::new(
|
||||||
|
PgConnection::establish(&database_url)
|
||||||
|
.unwrap_or_else(|_| panic!("Error connecting to {}", database_url)),
|
||||||
|
),
|
||||||
|
});
|
||||||
|
|
||||||
|
HttpServer::new(move || {
|
||||||
|
let schedule_scope = web::scope("/auth").service(sign_in);
|
||||||
|
let api_scope = web::scope("/api/v1").service(schedule_scope);
|
||||||
|
|
||||||
|
App::new().app_data(data.clone()).service(api_scope)
|
||||||
|
})
|
||||||
|
.bind(("127.0.0.1", 8080))
|
||||||
|
.unwrap()
|
||||||
|
.run()
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
}
|
}
|
||||||
|
|||||||
2
src/routes/auth/mod.rs
Normal file
2
src/routes/auth/mod.rs
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
pub mod sign_in;
|
||||||
|
mod schema;
|
||||||
56
src/routes/auth/schema.rs
Normal file
56
src/routes/auth/schema.rs
Normal file
@@ -0,0 +1,56 @@
|
|||||||
|
use crate::database::models::User;
|
||||||
|
use serde::{Deserialize, Serialize, Serializer};
|
||||||
|
|
||||||
|
#[derive(Deserialize)]
|
||||||
|
pub struct SignInDto {
|
||||||
|
pub username: String,
|
||||||
|
pub password: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct SignInResult(Result<SignInOk, SignInErr>);
|
||||||
|
|
||||||
|
#[derive(Serialize)]
|
||||||
|
#[serde(rename_all = "camelCase")]
|
||||||
|
pub struct SignInOk {
|
||||||
|
id: String,
|
||||||
|
access_token: String,
|
||||||
|
group: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize)]
|
||||||
|
pub struct SignInErr {
|
||||||
|
code: SignInErrCode,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize)]
|
||||||
|
#[serde(rename_all = "SCREAMING_SNAKE_CASE")]
|
||||||
|
pub enum SignInErrCode {
|
||||||
|
IncorrectCredentials,
|
||||||
|
InvalidVkAccessToken,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl SignInResult {
|
||||||
|
pub fn ok(user: &User) -> Self {
|
||||||
|
Self(Ok(SignInOk {
|
||||||
|
id: user.id.clone(),
|
||||||
|
access_token: user.access_token.clone(),
|
||||||
|
group: user.group.clone(),
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn err(code: SignInErrCode) -> SignInResult {
|
||||||
|
Self(Err(SignInErr { code }))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Serialize for SignInResult {
|
||||||
|
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||||
|
where
|
||||||
|
S: Serializer,
|
||||||
|
{
|
||||||
|
match &self.0 {
|
||||||
|
Ok(ok) => serializer.serialize_some(&ok),
|
||||||
|
Err(err) => serializer.serialize_some(&err),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
26
src/routes/auth/sign_in.rs
Normal file
26
src/routes/auth/sign_in.rs
Normal file
@@ -0,0 +1,26 @@
|
|||||||
|
use crate::database::models::User;
|
||||||
|
use crate::routes::auth::schema::SignInErrCode::IncorrectCredentials;
|
||||||
|
use crate::routes::auth::schema::{SignInDto, SignInResult};
|
||||||
|
use crate::AppState;
|
||||||
|
use actix_web::{post, web};
|
||||||
|
use diesel::{ExpressionMethods, QueryDsl, RunQueryDsl, SelectableHelper};
|
||||||
|
use std::ops::DerefMut;
|
||||||
|
use web::Json;
|
||||||
|
|
||||||
|
#[post("/sign-in")]
|
||||||
|
pub async fn sign_in(data: Json<SignInDto>, app_state: web::Data<AppState>) -> Json<SignInResult> {
|
||||||
|
use crate::database::schema::users::dsl::*;
|
||||||
|
|
||||||
|
match {
|
||||||
|
let mut lock = app_state.database.lock().unwrap();
|
||||||
|
let connection = lock.deref_mut();
|
||||||
|
|
||||||
|
users
|
||||||
|
.filter(username.eq(data.username.clone()))
|
||||||
|
.select(User::as_select())
|
||||||
|
.first(connection)
|
||||||
|
} {
|
||||||
|
Ok(user) => Json(SignInResult::ok(&user)),
|
||||||
|
Err(_) => Json(SignInResult::err(IncorrectCredentials)),
|
||||||
|
}
|
||||||
|
}
|
||||||
1
src/routes/mod.rs
Normal file
1
src/routes/mod.rs
Normal file
@@ -0,0 +1 @@
|
|||||||
|
pub mod auth;
|
||||||
219
src/xls_downloader/basic_impl.rs
Normal file
219
src/xls_downloader/basic_impl.rs
Normal file
@@ -0,0 +1,219 @@
|
|||||||
|
use crate::xls_downloader::interface::{FetchError, FetchOk, FetchResult, XLSDownloader};
|
||||||
|
use chrono::{DateTime, Utc};
|
||||||
|
|
||||||
|
pub struct BasicXlsDownloader {
|
||||||
|
url: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn fetch_specified(url: &String, user_agent: String, head: bool) -> FetchResult {
|
||||||
|
let client = reqwest::Client::new();
|
||||||
|
|
||||||
|
let response = if head {
|
||||||
|
client.head(url)
|
||||||
|
} else {
|
||||||
|
client.get(url)
|
||||||
|
}
|
||||||
|
.header("User-Agent", user_agent)
|
||||||
|
.send()
|
||||||
|
.await;
|
||||||
|
|
||||||
|
match response {
|
||||||
|
Ok(r) => {
|
||||||
|
if r.status().as_u16() != 200 {
|
||||||
|
return Err(FetchError::BadStatusCode);
|
||||||
|
}
|
||||||
|
|
||||||
|
let headers = r.headers();
|
||||||
|
|
||||||
|
let content_type = headers.get("Content-Type");
|
||||||
|
let etag = headers.get("etag");
|
||||||
|
let last_modified = headers.get("last-modified");
|
||||||
|
let date = headers.get("date");
|
||||||
|
|
||||||
|
if content_type.is_none() || etag.is_none() || last_modified.is_none() || date.is_none()
|
||||||
|
{
|
||||||
|
Err(FetchError::BadHeaders)
|
||||||
|
} else if content_type.unwrap() != "application/vnd.ms-excel" {
|
||||||
|
Err(FetchError::BadContentType)
|
||||||
|
} else {
|
||||||
|
let etag = etag.unwrap().to_str().unwrap().to_string();
|
||||||
|
let last_modified =
|
||||||
|
DateTime::parse_from_rfc2822(&last_modified.unwrap().to_str().unwrap())
|
||||||
|
.unwrap()
|
||||||
|
.with_timezone(&Utc);
|
||||||
|
|
||||||
|
Ok(if head {
|
||||||
|
FetchOk::head(etag, last_modified)
|
||||||
|
} else {
|
||||||
|
FetchOk::get(etag, last_modified, r.bytes().await.unwrap().to_vec())
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Err(_) => Err(FetchError::Unknown),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl BasicXlsDownloader {
|
||||||
|
pub fn new() -> Self {
|
||||||
|
BasicXlsDownloader { url: None }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl XLSDownloader for BasicXlsDownloader {
|
||||||
|
async fn fetch(&self, head: bool) -> FetchResult {
|
||||||
|
if self.url.is_none() {
|
||||||
|
Err(FetchError::NoUrlProvided)
|
||||||
|
} else {
|
||||||
|
fetch_specified(
|
||||||
|
self.url.as_ref().unwrap(),
|
||||||
|
"t.me/polytechnic_next".to_string(),
|
||||||
|
head,
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn set_url(&mut self, url: String) -> Result<(), FetchError> {
|
||||||
|
let result = fetch_specified(&url, "t.me/polytechnic_next".to_string(), true).await;
|
||||||
|
|
||||||
|
if let Ok(_) = result {
|
||||||
|
Ok(self.url = Some(url))
|
||||||
|
} else {
|
||||||
|
Err(result.err().unwrap())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use crate::xls_downloader::basic_impl::{BasicXlsDownloader, fetch_specified};
|
||||||
|
use crate::xls_downloader::interface::{FetchError, XLSDownloader};
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn bad_url() {
|
||||||
|
let url = "bad_url".to_string();
|
||||||
|
let user_agent = String::new();
|
||||||
|
|
||||||
|
let results = [
|
||||||
|
fetch_specified(&url, user_agent.clone(), true).await,
|
||||||
|
fetch_specified(&url, user_agent.clone(), false).await,
|
||||||
|
];
|
||||||
|
|
||||||
|
assert!(results[0].is_err());
|
||||||
|
assert!(results[1].is_err());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn bad_status_code() {
|
||||||
|
let url = "https://www.google.com/not-found".to_string();
|
||||||
|
let user_agent = String::new();
|
||||||
|
|
||||||
|
let results = [
|
||||||
|
fetch_specified(&url, user_agent.clone(), true).await,
|
||||||
|
fetch_specified(&url, user_agent.clone(), false).await,
|
||||||
|
];
|
||||||
|
|
||||||
|
assert!(results[0].is_err());
|
||||||
|
assert!(results[1].is_err());
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
*results[0].as_ref().err().unwrap(),
|
||||||
|
FetchError::BadStatusCode
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
*results[1].as_ref().err().unwrap(),
|
||||||
|
FetchError::BadStatusCode
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn bad_headers() {
|
||||||
|
let url = "https://www.google.com/favicon.ico".to_string();
|
||||||
|
let user_agent = String::new();
|
||||||
|
|
||||||
|
let results = [
|
||||||
|
fetch_specified(&url, user_agent.clone(), true).await,
|
||||||
|
fetch_specified(&url, user_agent.clone(), false).await,
|
||||||
|
];
|
||||||
|
|
||||||
|
assert!(results[0].is_err());
|
||||||
|
assert!(results[1].is_err());
|
||||||
|
|
||||||
|
assert_eq!(*results[0].as_ref().err().unwrap(), FetchError::BadHeaders);
|
||||||
|
assert_eq!(*results[1].as_ref().err().unwrap(), FetchError::BadHeaders);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn bad_content_type() {
|
||||||
|
let url = "https://s3.aero-storage.ldragol.ru/679e5d1145a6ad00843ad3f1/67ddb59fd46303008396ac96%2Fexample.txt".to_string();
|
||||||
|
let user_agent = String::new();
|
||||||
|
|
||||||
|
let results = [
|
||||||
|
fetch_specified(&url, user_agent.clone(), true).await,
|
||||||
|
fetch_specified(&url, user_agent.clone(), false).await,
|
||||||
|
];
|
||||||
|
|
||||||
|
assert!(results[0].is_err());
|
||||||
|
assert!(results[1].is_err());
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
*results[0].as_ref().err().unwrap(),
|
||||||
|
FetchError::BadContentType
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
*results[1].as_ref().err().unwrap(),
|
||||||
|
FetchError::BadContentType
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn ok() {
|
||||||
|
let url = "https://s3.aero-storage.ldragol.ru/679e5d1145a6ad00843ad3f1/67ddb5fad46303008396ac97%2Fschedule.xls".to_string();
|
||||||
|
let user_agent = String::new();
|
||||||
|
|
||||||
|
let results = [
|
||||||
|
fetch_specified(&url, user_agent.clone(), true).await,
|
||||||
|
fetch_specified(&url, user_agent.clone(), false).await,
|
||||||
|
];
|
||||||
|
|
||||||
|
assert!(results[0].is_ok());
|
||||||
|
assert!(results[1].is_ok());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn downloader_set_ok() {
|
||||||
|
let url = "https://s3.aero-storage.ldragol.ru/679e5d1145a6ad00843ad3f1/67ddb5fad46303008396ac97%2Fschedule.xls".to_string();
|
||||||
|
|
||||||
|
let mut downloader = BasicXlsDownloader::new();
|
||||||
|
|
||||||
|
assert!(downloader.set_url(url).await.is_ok());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn downloader_set_err() {
|
||||||
|
let url = "bad_url".to_string();
|
||||||
|
|
||||||
|
let mut downloader = BasicXlsDownloader::new();
|
||||||
|
|
||||||
|
assert!(downloader.set_url(url).await.is_err());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn downloader_ok() {
|
||||||
|
let url = "https://s3.aero-storage.ldragol.ru/679e5d1145a6ad00843ad3f1/67ddb5fad46303008396ac97%2Fschedule.xls".to_string();
|
||||||
|
|
||||||
|
let mut downloader = BasicXlsDownloader::new();
|
||||||
|
|
||||||
|
assert!(downloader.set_url(url).await.is_ok());
|
||||||
|
assert!(downloader.fetch(false).await.is_ok());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn downloader_no_url_provided() {
|
||||||
|
let downloader = BasicXlsDownloader::new();
|
||||||
|
let result = downloader.fetch(false).await;
|
||||||
|
|
||||||
|
assert!(result.is_err());
|
||||||
|
assert_eq!(result.err().unwrap(), FetchError::NoUrlProvided);
|
||||||
|
}
|
||||||
|
}
|
||||||
44
src/xls_downloader/interface.rs
Normal file
44
src/xls_downloader/interface.rs
Normal file
@@ -0,0 +1,44 @@
|
|||||||
|
use chrono::{DateTime, Utc};
|
||||||
|
|
||||||
|
#[derive(PartialEq, Debug)]
|
||||||
|
pub enum FetchError {
|
||||||
|
NoUrlProvided,
|
||||||
|
Unknown,
|
||||||
|
BadStatusCode,
|
||||||
|
BadContentType,
|
||||||
|
BadHeaders,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct FetchOk {
|
||||||
|
pub etag: String,
|
||||||
|
pub uploaded_at: DateTime<Utc>,
|
||||||
|
pub requested_at: DateTime<Utc>,
|
||||||
|
pub data: Option<Vec<u8>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl FetchOk {
|
||||||
|
pub fn head(etag: String, uploaded_at: DateTime<Utc>) -> Self {
|
||||||
|
FetchOk {
|
||||||
|
etag,
|
||||||
|
uploaded_at,
|
||||||
|
requested_at: Utc::now(),
|
||||||
|
data: None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get(etag: String, uploaded_at: DateTime<Utc>, data: Vec<u8>) -> Self {
|
||||||
|
FetchOk {
|
||||||
|
etag,
|
||||||
|
uploaded_at,
|
||||||
|
requested_at: Utc::now(),
|
||||||
|
data: Some(data),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub type FetchResult = Result<FetchOk, FetchError>;
|
||||||
|
|
||||||
|
pub trait XLSDownloader {
|
||||||
|
async fn fetch(&self, head: bool) -> FetchResult;
|
||||||
|
async fn set_url(&mut self, url: String) -> Result<(), FetchError>;
|
||||||
|
}
|
||||||
2
src/xls_downloader/mod.rs
Normal file
2
src/xls_downloader/mod.rs
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
pub mod basic_impl;
|
||||||
|
pub mod interface;
|
||||||
Reference in New Issue
Block a user