mirror of
https://github.com/n08i40k/schedule-parser-rusted.git
synced 2025-12-06 17:57:47 +03:00
Compare commits
12 Commits
release/v0
...
844c89a365
| Author | SHA1 | Date | |
|---|---|---|---|
|
844c89a365
|
|||
|
ba86dfc3fe
|
|||
|
9f7460973e
|
|||
|
|
3cf42eea8a | ||
|
|
d19b6c1069 | ||
|
126ba23001
|
|||
|
d75d3fbc97
|
|||
|
|
627cf1a74e | ||
| b508db693e | |||
| 436d08a56a | |||
| aa2618c5f5 | |||
| f0a951ad38 |
6
.github/dependabot.yml
vendored
Normal file
6
.github/dependabot.yml
vendored
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
version: 2
|
||||||
|
updates:
|
||||||
|
- package-ecosystem: "cargo"
|
||||||
|
directory: "/"
|
||||||
|
schedule:
|
||||||
|
interval: "weekly"
|
||||||
25
.github/workflows/test.yml
vendored
Normal file
25
.github/workflows/test.yml
vendored
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
name: Tests
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [ "master" ]
|
||||||
|
pull_request:
|
||||||
|
branches: [ "master" ]
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
|
||||||
|
env:
|
||||||
|
CARGO_TERM_COLOR: always
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build:
|
||||||
|
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
- name: Build
|
||||||
|
run: cargo build --verbose
|
||||||
|
- name: Run tests
|
||||||
|
run: cargo test --verbose -p schedule-parser-rusted -p schedule_parser
|
||||||
3
.gitignore
vendored
3
.gitignore
vendored
@@ -1,3 +1,6 @@
|
|||||||
/target
|
/target
|
||||||
.~*.xls
|
.~*.xls
|
||||||
schedule.json
|
schedule.json
|
||||||
|
teachers.json
|
||||||
|
|
||||||
|
.env*
|
||||||
12
.idea/dataSources.xml
generated
Normal file
12
.idea/dataSources.xml
generated
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<project version="4">
|
||||||
|
<component name="DataSourceManagerImpl" format="xml" multifile-model="true">
|
||||||
|
<data-source source="LOCAL" name="sp@localhost" uuid="28502a90-08bf-4cc0-8494-10dc74e37189">
|
||||||
|
<driver-ref>postgresql</driver-ref>
|
||||||
|
<synchronize>true</synchronize>
|
||||||
|
<jdbc-driver>org.postgresql.Driver</jdbc-driver>
|
||||||
|
<jdbc-url>jdbc:postgresql://localhost:5432/sp</jdbc-url>
|
||||||
|
<working-dir>$ProjectFileDir$</working-dir>
|
||||||
|
</data-source>
|
||||||
|
</component>
|
||||||
|
</project>
|
||||||
9
.idea/sqldialects.xml
generated
Normal file
9
.idea/sqldialects.xml
generated
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<project version="4">
|
||||||
|
<component name="SqlDialectMappings">
|
||||||
|
<file url="file://$PROJECT_DIR$/migrations/2025-03-21-211822_create_user_role/down.sql" dialect="PostgreSQL" />
|
||||||
|
<file url="file://$PROJECT_DIR$/migrations/2025-03-21-212111_create_users/up.sql" dialect="PostgreSQL" />
|
||||||
|
<file url="file://$PROJECT_DIR$/migrations/2025-03-21-212723_create_fcm/down.sql" dialect="PostgreSQL" />
|
||||||
|
<file url="file://$PROJECT_DIR$/migrations/2025-03-21-212723_create_fcm/up.sql" dialect="PostgreSQL" />
|
||||||
|
</component>
|
||||||
|
</project>
|
||||||
128
CODE_OF_CONDUCT.md
Normal file
128
CODE_OF_CONDUCT.md
Normal file
@@ -0,0 +1,128 @@
|
|||||||
|
# Contributor Covenant Code of Conduct
|
||||||
|
|
||||||
|
## Our Pledge
|
||||||
|
|
||||||
|
We as members, contributors, and leaders pledge to make participation in our
|
||||||
|
community a harassment-free experience for everyone, regardless of age, body
|
||||||
|
size, visible or invisible disability, ethnicity, sex characteristics, gender
|
||||||
|
identity and expression, level of experience, education, socio-economic status,
|
||||||
|
nationality, personal appearance, race, religion, or sexual identity
|
||||||
|
and orientation.
|
||||||
|
|
||||||
|
We pledge to act and interact in ways that contribute to an open, welcoming,
|
||||||
|
diverse, inclusive, and healthy community.
|
||||||
|
|
||||||
|
## Our Standards
|
||||||
|
|
||||||
|
Examples of behavior that contributes to a positive environment for our
|
||||||
|
community include:
|
||||||
|
|
||||||
|
* Demonstrating empathy and kindness toward other people
|
||||||
|
* Being respectful of differing opinions, viewpoints, and experiences
|
||||||
|
* Giving and gracefully accepting constructive feedback
|
||||||
|
* Accepting responsibility and apologizing to those affected by our mistakes,
|
||||||
|
and learning from the experience
|
||||||
|
* Focusing on what is best not just for us as individuals, but for the
|
||||||
|
overall community
|
||||||
|
|
||||||
|
Examples of unacceptable behavior include:
|
||||||
|
|
||||||
|
* The use of sexualized language or imagery, and sexual attention or
|
||||||
|
advances of any kind
|
||||||
|
* Trolling, insulting or derogatory comments, and personal or political attacks
|
||||||
|
* Public or private harassment
|
||||||
|
* Publishing others' private information, such as a physical or email
|
||||||
|
address, without their explicit permission
|
||||||
|
* Other conduct which could reasonably be considered inappropriate in a
|
||||||
|
professional setting
|
||||||
|
|
||||||
|
## Enforcement Responsibilities
|
||||||
|
|
||||||
|
Community leaders are responsible for clarifying and enforcing our standards of
|
||||||
|
acceptable behavior and will take appropriate and fair corrective action in
|
||||||
|
response to any behavior that they deem inappropriate, threatening, offensive,
|
||||||
|
or harmful.
|
||||||
|
|
||||||
|
Community leaders have the right and responsibility to remove, edit, or reject
|
||||||
|
comments, commits, code, wiki edits, issues, and other contributions that are
|
||||||
|
not aligned to this Code of Conduct, and will communicate reasons for moderation
|
||||||
|
decisions when appropriate.
|
||||||
|
|
||||||
|
## Scope
|
||||||
|
|
||||||
|
This Code of Conduct applies within all community spaces, and also applies when
|
||||||
|
an individual is officially representing the community in public spaces.
|
||||||
|
Examples of representing our community include using an official e-mail address,
|
||||||
|
posting via an official social media account, or acting as an appointed
|
||||||
|
representative at an online or offline event.
|
||||||
|
|
||||||
|
## Enforcement
|
||||||
|
|
||||||
|
Instances of abusive, harassing, or otherwise unacceptable behavior may be
|
||||||
|
reported to the community leaders responsible for enforcement at
|
||||||
|
email.
|
||||||
|
All complaints will be reviewed and investigated promptly and fairly.
|
||||||
|
|
||||||
|
All community leaders are obligated to respect the privacy and security of the
|
||||||
|
reporter of any incident.
|
||||||
|
|
||||||
|
## Enforcement Guidelines
|
||||||
|
|
||||||
|
Community leaders will follow these Community Impact Guidelines in determining
|
||||||
|
the consequences for any action they deem in violation of this Code of Conduct:
|
||||||
|
|
||||||
|
### 1. Correction
|
||||||
|
|
||||||
|
**Community Impact**: Use of inappropriate language or other behavior deemed
|
||||||
|
unprofessional or unwelcome in the community.
|
||||||
|
|
||||||
|
**Consequence**: A private, written warning from community leaders, providing
|
||||||
|
clarity around the nature of the violation and an explanation of why the
|
||||||
|
behavior was inappropriate. A public apology may be requested.
|
||||||
|
|
||||||
|
### 2. Warning
|
||||||
|
|
||||||
|
**Community Impact**: A violation through a single incident or series
|
||||||
|
of actions.
|
||||||
|
|
||||||
|
**Consequence**: A warning with consequences for continued behavior. No
|
||||||
|
interaction with the people involved, including unsolicited interaction with
|
||||||
|
those enforcing the Code of Conduct, for a specified period of time. This
|
||||||
|
includes avoiding interactions in community spaces as well as external channels
|
||||||
|
like social media. Violating these terms may lead to a temporary or
|
||||||
|
permanent ban.
|
||||||
|
|
||||||
|
### 3. Temporary Ban
|
||||||
|
|
||||||
|
**Community Impact**: A serious violation of community standards, including
|
||||||
|
sustained inappropriate behavior.
|
||||||
|
|
||||||
|
**Consequence**: A temporary ban from any sort of interaction or public
|
||||||
|
communication with the community for a specified period of time. No public or
|
||||||
|
private interaction with the people involved, including unsolicited interaction
|
||||||
|
with those enforcing the Code of Conduct, is allowed during this period.
|
||||||
|
Violating these terms may lead to a permanent ban.
|
||||||
|
|
||||||
|
### 4. Permanent Ban
|
||||||
|
|
||||||
|
**Community Impact**: Demonstrating a pattern of violation of community
|
||||||
|
standards, including sustained inappropriate behavior, harassment of an
|
||||||
|
individual, or aggression toward or disparagement of classes of individuals.
|
||||||
|
|
||||||
|
**Consequence**: A permanent ban from any sort of public interaction within
|
||||||
|
the community.
|
||||||
|
|
||||||
|
## Attribution
|
||||||
|
|
||||||
|
This Code of Conduct is adapted from the [Contributor Covenant][homepage],
|
||||||
|
version 2.0, available at
|
||||||
|
https://www.contributor-covenant.org/version/2/0/code_of_conduct.html.
|
||||||
|
|
||||||
|
Community Impact Guidelines were inspired by [Mozilla's code of conduct
|
||||||
|
enforcement ladder](https://github.com/mozilla/diversity).
|
||||||
|
|
||||||
|
[homepage]: https://www.contributor-covenant.org
|
||||||
|
|
||||||
|
For answers to common questions about this code of conduct, see the FAQ at
|
||||||
|
https://www.contributor-covenant.org/faq. Translations are available at
|
||||||
|
https://www.contributor-covenant.org/translations.
|
||||||
929
Cargo.lock
generated
929
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
16
Cargo.toml
16
Cargo.toml
@@ -3,13 +3,21 @@ members = ["lib/schedule_parser"]
|
|||||||
|
|
||||||
[package]
|
[package]
|
||||||
name = "schedule-parser-rusted"
|
name = "schedule-parser-rusted"
|
||||||
version = "0.1.0"
|
version = "0.3.0"
|
||||||
edition = "2024"
|
edition = "2024"
|
||||||
publish = false
|
publish = false
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
actix-web = "4.10.2"
|
bcrypt = "0.17.0"
|
||||||
|
jwt = "0.16.0"
|
||||||
|
hmac = "0.12.1"
|
||||||
|
sha2 = "0.10.8"
|
||||||
|
diesel = { version = "2.2.8", features = ["postgres"] }
|
||||||
|
diesel-derive-enum = { git = "https://github.com/Havunen/diesel-derive-enum.git", features = ["postgres"] }
|
||||||
|
dotenvy = "0.15.7"
|
||||||
serde = { version = "1.0.219", features = ["derive"] }
|
serde = { version = "1.0.219", features = ["derive"] }
|
||||||
serde_repr = "0.1.20"
|
|
||||||
serde_json = "1.0.140"
|
|
||||||
schedule_parser = { path = "./lib/schedule_parser" }
|
schedule_parser = { path = "./lib/schedule_parser" }
|
||||||
|
chrono = "0.4.40"
|
||||||
|
reqwest = "0.12.15"
|
||||||
|
tokio = { version = "1.44.1", features = ["macros", "rt-multi-thread"] }
|
||||||
|
actix-web = "4.10.2"
|
||||||
|
|||||||
21
LICENSE
Normal file
21
LICENSE
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
MIT License
|
||||||
|
|
||||||
|
Copyright (c) 2025 Nikita
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all
|
||||||
|
copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
SOFTWARE.
|
||||||
3
README.md
Normal file
3
README.md
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
# API для получения расписания политехникума
|
||||||
|
|
||||||
|
[](https://github.com/n08i40k/schedule-parser-rusted/actions/workflows/test.yml)
|
||||||
9
diesel.toml
Normal file
9
diesel.toml
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
# For documentation on how to configure this file,
|
||||||
|
# see https://diesel.rs/guides/configuring-diesel-cli
|
||||||
|
|
||||||
|
[print_schema]
|
||||||
|
file = "src/database/schema.rs"
|
||||||
|
custom_type_derives = ["diesel::query_builder::QueryId", "Clone"]
|
||||||
|
|
||||||
|
[migrations_directory]
|
||||||
|
dir = "./migrations"
|
||||||
@@ -1,6 +1,6 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "schedule_parser"
|
name = "schedule_parser"
|
||||||
version = "0.1.0"
|
version = "0.2.0"
|
||||||
edition = "2024"
|
edition = "2024"
|
||||||
|
|
||||||
[lib]
|
[lib]
|
||||||
|
|||||||
@@ -1,11 +1,10 @@
|
|||||||
use criterion::{Criterion, criterion_group, criterion_main};
|
use criterion::{Criterion, criterion_group, criterion_main};
|
||||||
use schedule_parser::parse_xls;
|
use schedule_parser::parse_xls;
|
||||||
use std::path::Path;
|
|
||||||
|
|
||||||
pub fn bench_parse_xls(c: &mut Criterion) {
|
pub fn bench_parse_xls(c: &mut Criterion) {
|
||||||
c.bench_function("parse_xls", |b| {
|
let buffer: Vec<u8> = include_bytes!("../../../schedule.xls").to_vec();
|
||||||
b.iter(|| parse_xls(Path::new("../../schedule.xls")))
|
|
||||||
});
|
c.bench_function("parse_xls", |b| b.iter(|| parse_xls(&buffer)));
|
||||||
}
|
}
|
||||||
|
|
||||||
criterion_group!(benches, bench_parse_xls);
|
criterion_group!(benches, bench_parse_xls);
|
||||||
|
|||||||
@@ -1,16 +1,16 @@
|
|||||||
use crate::LessonParseResult::{Lessons, Street};
|
|
||||||
use crate::schema::LessonType::Break;
|
use crate::schema::LessonType::Break;
|
||||||
use crate::schema::{Day, Group, Lesson, LessonSubGroup, LessonTime, LessonType};
|
use crate::schema::{Day, Lesson, LessonSubGroup, LessonTime, LessonType, ScheduleEntity};
|
||||||
use calamine::{Reader, Xls, open_workbook};
|
use crate::LessonParseResult::{Lessons, Street};
|
||||||
|
use calamine::{open_workbook_from_rs, Reader, Xls};
|
||||||
use chrono::{Duration, NaiveDateTime};
|
use chrono::{Duration, NaiveDateTime};
|
||||||
use fuzzy_matcher::FuzzyMatcher;
|
|
||||||
use fuzzy_matcher::skim::SkimMatcherV2;
|
use fuzzy_matcher::skim::SkimMatcherV2;
|
||||||
|
use fuzzy_matcher::FuzzyMatcher;
|
||||||
use regex::Regex;
|
use regex::Regex;
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use std::path::Path;
|
use std::io::Cursor;
|
||||||
use std::sync::LazyLock;
|
use std::sync::LazyLock;
|
||||||
|
|
||||||
mod schema;
|
pub mod schema;
|
||||||
|
|
||||||
struct InternalId {
|
struct InternalId {
|
||||||
/**
|
/**
|
||||||
@@ -470,8 +470,82 @@ fn parse_name_and_subgroups(name: &String) -> (String, Vec<LessonSubGroup>) {
|
|||||||
(lesson_name, subgroups)
|
(lesson_name, subgroups)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn parse_xls(path: &Path) -> HashMap<String, Group> {
|
fn convert_groups_to_teachers(
|
||||||
let mut workbook: Xls<_> = open_workbook(path).expect("Can't open workbook");
|
groups: &HashMap<String, ScheduleEntity>,
|
||||||
|
) -> HashMap<String, ScheduleEntity> {
|
||||||
|
let mut teachers: HashMap<String, ScheduleEntity> = HashMap::new();
|
||||||
|
|
||||||
|
let empty_days: Vec<Day> = groups
|
||||||
|
.values()
|
||||||
|
.next()
|
||||||
|
.unwrap()
|
||||||
|
.days
|
||||||
|
.iter()
|
||||||
|
.map(|day| Day {
|
||||||
|
name: day.name.clone(),
|
||||||
|
street: day.street.clone(),
|
||||||
|
date: day.date.clone(),
|
||||||
|
lessons: vec![],
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
for group in groups.values() {
|
||||||
|
for (index, day) in group.days.iter().enumerate() {
|
||||||
|
for group_lesson in &day.lessons {
|
||||||
|
if group_lesson.lesson_type == Break {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
if group_lesson.subgroups.is_none() {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
let subgroups = group_lesson.subgroups.as_ref().unwrap();
|
||||||
|
|
||||||
|
for subgroup in subgroups {
|
||||||
|
if subgroup.teacher == "Ошибка в расписании" {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
if !teachers.contains_key(&subgroup.teacher) {
|
||||||
|
teachers.insert(
|
||||||
|
subgroup.teacher.clone(),
|
||||||
|
ScheduleEntity {
|
||||||
|
name: subgroup.teacher.clone(),
|
||||||
|
days: empty_days.to_vec(),
|
||||||
|
},
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
let teacher_day = teachers
|
||||||
|
.get_mut(&subgroup.teacher)
|
||||||
|
.unwrap()
|
||||||
|
.days
|
||||||
|
.get_mut(index)
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
teacher_day.lessons.push({
|
||||||
|
let mut lesson = group_lesson.clone();
|
||||||
|
lesson.group = Some(group.name.clone());
|
||||||
|
|
||||||
|
lesson
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
teachers
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn parse_xls(
|
||||||
|
buffer: &Vec<u8>,
|
||||||
|
) -> (
|
||||||
|
HashMap<String, ScheduleEntity>,
|
||||||
|
HashMap<String, ScheduleEntity>,
|
||||||
|
) {
|
||||||
|
let cursor = Cursor::new(&buffer);
|
||||||
|
let mut workbook: Xls<_> = open_workbook_from_rs(cursor).expect("Can't open workbook");
|
||||||
|
|
||||||
let worksheet: WorkSheet = workbook
|
let worksheet: WorkSheet = workbook
|
||||||
.worksheets()
|
.worksheets()
|
||||||
@@ -482,13 +556,13 @@ pub fn parse_xls(path: &Path) -> HashMap<String, Group> {
|
|||||||
|
|
||||||
let (days_markup, groups_markup) = parse_skeleton(&worksheet);
|
let (days_markup, groups_markup) = parse_skeleton(&worksheet);
|
||||||
|
|
||||||
let mut groups: HashMap<String, Group> = HashMap::new();
|
let mut groups: HashMap<String, ScheduleEntity> = HashMap::new();
|
||||||
let mut days_times: Vec<Vec<InternalTime>> = Vec::new();
|
let mut days_times: Vec<Vec<InternalTime>> = Vec::new();
|
||||||
|
|
||||||
let saturday_end_row = worksheet.end().unwrap().0;
|
let saturday_end_row = worksheet.end().unwrap().0;
|
||||||
|
|
||||||
for group_markup in groups_markup {
|
for group_markup in groups_markup {
|
||||||
let mut group = Group {
|
let mut group = ScheduleEntity {
|
||||||
name: group_markup.name,
|
name: group_markup.name,
|
||||||
days: Vec::new(),
|
days: Vec::new(),
|
||||||
};
|
};
|
||||||
@@ -612,7 +686,7 @@ pub fn parse_xls(path: &Path) -> HashMap<String, Group> {
|
|||||||
groups.insert(group.name.clone(), group);
|
groups.insert(group.name.clone(), group);
|
||||||
}
|
}
|
||||||
|
|
||||||
groups
|
(convert_groups_to_teachers(&groups), groups)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
@@ -620,8 +694,11 @@ mod tests {
|
|||||||
use super::*;
|
use super::*;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn it_works() {
|
fn read() {
|
||||||
let result = parse_xls(Path::new("../../schedule.xls"));
|
let buffer: Vec<u8> = include_bytes!("../../../../schedule.xls").to_vec();
|
||||||
assert_ne!(result.len(), 0);
|
let result = parse_xls(&buffer);
|
||||||
|
|
||||||
|
assert_ne!(result.0.len(), 0);
|
||||||
|
assert_ne!(result.1.len(), 0);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,9 +1,9 @@
|
|||||||
use chrono::{DateTime, Utc};
|
use chrono::{DateTime, Utc};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use std::collections::HashMap;
|
|
||||||
use serde_repr::{Deserialize_repr, Serialize_repr};
|
use serde_repr::{Deserialize_repr, Serialize_repr};
|
||||||
|
use std::collections::HashMap;
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize, Debug)]
|
#[derive(Serialize, Deserialize, Debug, Clone)]
|
||||||
pub struct LessonTime {
|
pub struct LessonTime {
|
||||||
pub start: DateTime<Utc>,
|
pub start: DateTime<Utc>,
|
||||||
pub end: DateTime<Utc>,
|
pub end: DateTime<Utc>,
|
||||||
@@ -22,7 +22,7 @@ pub enum LessonType {
|
|||||||
ExamDefault, // Экзамен
|
ExamDefault, // Экзамен
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize, Debug)]
|
#[derive(Serialize, Deserialize, Debug, Clone)]
|
||||||
pub struct LessonSubGroup {
|
pub struct LessonSubGroup {
|
||||||
pub number: u8,
|
pub number: u8,
|
||||||
|
|
||||||
@@ -31,7 +31,7 @@ pub struct LessonSubGroup {
|
|||||||
pub teacher: String,
|
pub teacher: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize, Debug)]
|
#[derive(Serialize, Deserialize, Debug, Clone)]
|
||||||
pub struct Lesson {
|
pub struct Lesson {
|
||||||
/**
|
/**
|
||||||
* Тип занятия
|
* Тип занятия
|
||||||
@@ -67,7 +67,7 @@ pub struct Lesson {
|
|||||||
pub group: Option<String>,
|
pub group: Option<String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize, Debug)]
|
#[derive(Serialize, Deserialize, Debug, Clone)]
|
||||||
pub struct Day {
|
pub struct Day {
|
||||||
pub name: String,
|
pub name: String,
|
||||||
|
|
||||||
@@ -79,7 +79,7 @@ pub struct Day {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize, Debug)]
|
#[derive(Serialize, Deserialize, Debug)]
|
||||||
pub struct Group {
|
pub struct ScheduleEntity {
|
||||||
pub name: String,
|
pub name: String,
|
||||||
|
|
||||||
pub days: Vec<Day>,
|
pub days: Vec<Day>,
|
||||||
@@ -90,8 +90,18 @@ pub struct Schedule {
|
|||||||
#[serde(rename = "updatedAt")]
|
#[serde(rename = "updatedAt")]
|
||||||
pub updated_at: DateTime<Utc>,
|
pub updated_at: DateTime<Utc>,
|
||||||
|
|
||||||
pub groups: HashMap<String, Group>,
|
pub groups: HashMap<String, ScheduleEntity>,
|
||||||
|
|
||||||
#[serde(rename = "updatedGroups")]
|
#[serde(rename = "updatedGroups")]
|
||||||
pub updated_groups: Vec<Vec<usize>>,
|
pub updated_groups: Vec<Vec<usize>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize, Deserialize, Debug)]
|
||||||
|
pub struct TeacherSchedule {
|
||||||
|
#[serde(rename = "updatedAt")]
|
||||||
|
pub updated_at: DateTime<Utc>,
|
||||||
|
|
||||||
|
pub teacher: ScheduleEntity,
|
||||||
|
|
||||||
|
pub updated: Vec<usize>,
|
||||||
|
}
|
||||||
|
|||||||
0
migrations/.keep
Normal file
0
migrations/.keep
Normal file
6
migrations/00000000000000_diesel_initial_setup/down.sql
Normal file
6
migrations/00000000000000_diesel_initial_setup/down.sql
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
-- This file was automatically created by Diesel to setup helper functions
|
||||||
|
-- and other internal bookkeeping. This file is safe to edit, any future
|
||||||
|
-- changes will be added to existing projects as new migrations.
|
||||||
|
|
||||||
|
DROP FUNCTION IF EXISTS diesel_manage_updated_at(_tbl regclass);
|
||||||
|
DROP FUNCTION IF EXISTS diesel_set_updated_at();
|
||||||
36
migrations/00000000000000_diesel_initial_setup/up.sql
Normal file
36
migrations/00000000000000_diesel_initial_setup/up.sql
Normal file
@@ -0,0 +1,36 @@
|
|||||||
|
-- This file was automatically created by Diesel to setup helper functions
|
||||||
|
-- and other internal bookkeeping. This file is safe to edit, any future
|
||||||
|
-- changes will be added to existing projects as new migrations.
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
-- Sets up a trigger for the given table to automatically set a column called
|
||||||
|
-- `updated_at` whenever the row is modified (unless `updated_at` was included
|
||||||
|
-- in the modified columns)
|
||||||
|
--
|
||||||
|
-- # Example
|
||||||
|
--
|
||||||
|
-- ```sql
|
||||||
|
-- CREATE TABLE users (id SERIAL PRIMARY KEY, updated_at TIMESTAMP NOT NULL DEFAULT NOW());
|
||||||
|
--
|
||||||
|
-- SELECT diesel_manage_updated_at('users');
|
||||||
|
-- ```
|
||||||
|
CREATE OR REPLACE FUNCTION diesel_manage_updated_at(_tbl regclass) RETURNS VOID AS $$
|
||||||
|
BEGIN
|
||||||
|
EXECUTE format('CREATE TRIGGER set_updated_at BEFORE UPDATE ON %s
|
||||||
|
FOR EACH ROW EXECUTE PROCEDURE diesel_set_updated_at()', _tbl);
|
||||||
|
END;
|
||||||
|
$$ LANGUAGE plpgsql;
|
||||||
|
|
||||||
|
CREATE OR REPLACE FUNCTION diesel_set_updated_at() RETURNS trigger AS $$
|
||||||
|
BEGIN
|
||||||
|
IF (
|
||||||
|
NEW IS DISTINCT FROM OLD AND
|
||||||
|
NEW.updated_at IS NOT DISTINCT FROM OLD.updated_at
|
||||||
|
) THEN
|
||||||
|
NEW.updated_at := current_timestamp;
|
||||||
|
END IF;
|
||||||
|
RETURN NEW;
|
||||||
|
END;
|
||||||
|
$$ LANGUAGE plpgsql;
|
||||||
1
migrations/2025-03-21-211822_create_user_role/down.sql
Normal file
1
migrations/2025-03-21-211822_create_user_role/down.sql
Normal file
@@ -0,0 +1 @@
|
|||||||
|
DROP TYPE user_role;
|
||||||
4
migrations/2025-03-21-211822_create_user_role/up.sql
Normal file
4
migrations/2025-03-21-211822_create_user_role/up.sql
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
CREATE TYPE user_role AS ENUM (
|
||||||
|
'STUDENT',
|
||||||
|
'TEACHER',
|
||||||
|
'ADMIN');
|
||||||
1
migrations/2025-03-21-212111_create_users/down.sql
Normal file
1
migrations/2025-03-21-212111_create_users/down.sql
Normal file
@@ -0,0 +1 @@
|
|||||||
|
DROP TABLE users;
|
||||||
11
migrations/2025-03-21-212111_create_users/up.sql
Normal file
11
migrations/2025-03-21-212111_create_users/up.sql
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
CREATE TABLE users
|
||||||
|
(
|
||||||
|
id text PRIMARY KEY NOT NULL,
|
||||||
|
username text UNIQUE NOT NULL,
|
||||||
|
"password" text NOT NULL,
|
||||||
|
vk_id int4 NULL,
|
||||||
|
access_token text UNIQUE NOT NULL,
|
||||||
|
"group" text NOT NULL,
|
||||||
|
role user_role NOT NULL,
|
||||||
|
version text NOT NULL
|
||||||
|
);
|
||||||
1
migrations/2025-03-21-212723_create_fcm/down.sql
Normal file
1
migrations/2025-03-21-212723_create_fcm/down.sql
Normal file
@@ -0,0 +1 @@
|
|||||||
|
DROP TABLE fcm;
|
||||||
11
migrations/2025-03-21-212723_create_fcm/up.sql
Normal file
11
migrations/2025-03-21-212723_create_fcm/up.sql
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
CREATE TABLE fcm
|
||||||
|
(
|
||||||
|
user_id text PRIMARY KEY NOT NULL,
|
||||||
|
token text NOT NULL,
|
||||||
|
topics text[] NULL
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE UNIQUE INDEX fcm_user_id_key ON fcm USING btree (user_id);
|
||||||
|
|
||||||
|
ALTER TABLE fcm
|
||||||
|
ADD CONSTRAINT fcm_user_id_fkey FOREIGN KEY (user_id) REFERENCES users (id) ON DELETE RESTRICT ON UPDATE CASCADE;
|
||||||
34
src/database/driver.rs
Normal file
34
src/database/driver.rs
Normal file
@@ -0,0 +1,34 @@
|
|||||||
|
pub mod users {
|
||||||
|
use crate::database::models::User;
|
||||||
|
use crate::database::schema::fcm::user_id;
|
||||||
|
use crate::database::schema::users::dsl::users;
|
||||||
|
use crate::database::schema::users::dsl::*;
|
||||||
|
use diesel::{ExpressionMethods, QueryResult};
|
||||||
|
use diesel::{PgConnection, SelectableHelper};
|
||||||
|
use diesel::{QueryDsl, RunQueryDsl};
|
||||||
|
use std::ops::DerefMut;
|
||||||
|
use std::sync::Mutex;
|
||||||
|
|
||||||
|
pub fn get(connection: &Mutex<PgConnection>, _id: String) -> QueryResult<User> {
|
||||||
|
let mut lock = connection.lock().unwrap();
|
||||||
|
let con = lock.deref_mut();
|
||||||
|
|
||||||
|
users
|
||||||
|
.filter(id.eq(_id))
|
||||||
|
.select(User::as_select())
|
||||||
|
.first(con)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_by_username(
|
||||||
|
connection: &Mutex<PgConnection>,
|
||||||
|
_username: String,
|
||||||
|
) -> QueryResult<User> {
|
||||||
|
let mut lock = connection.lock().unwrap();
|
||||||
|
let con = lock.deref_mut();
|
||||||
|
|
||||||
|
users
|
||||||
|
.filter(username.eq(_username))
|
||||||
|
.select(User::as_select())
|
||||||
|
.first(con)
|
||||||
|
}
|
||||||
|
}
|
||||||
3
src/database/mod.rs
Normal file
3
src/database/mod.rs
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
pub mod schema;
|
||||||
|
pub mod models;
|
||||||
|
pub mod driver;
|
||||||
26
src/database/models.rs
Normal file
26
src/database/models.rs
Normal file
@@ -0,0 +1,26 @@
|
|||||||
|
use diesel::prelude::*;
|
||||||
|
use serde::Serialize;
|
||||||
|
|
||||||
|
#[derive(diesel_derive_enum::DbEnum, Serialize, Debug, Clone, Copy, PartialEq)]
|
||||||
|
#[ExistingTypePath = "crate::database::schema::sql_types::UserRole"]
|
||||||
|
#[DbValueStyle = "UPPERCASE"]
|
||||||
|
#[serde(rename_all = "UPPERCASE")]
|
||||||
|
pub enum UserRole {
|
||||||
|
Student,
|
||||||
|
Teacher,
|
||||||
|
Admin,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Identifiable, AsChangeset, Queryable, Selectable, Serialize)]
|
||||||
|
#[diesel(table_name = crate::database::schema::users)]
|
||||||
|
#[diesel(treat_none_as_null = true)]
|
||||||
|
pub struct User {
|
||||||
|
pub id: String,
|
||||||
|
pub username: String,
|
||||||
|
pub password: String,
|
||||||
|
pub vk_id: Option<i32>,
|
||||||
|
pub access_token: String,
|
||||||
|
pub group: String,
|
||||||
|
pub role: UserRole,
|
||||||
|
pub version: String,
|
||||||
|
}
|
||||||
38
src/database/schema.rs
Normal file
38
src/database/schema.rs
Normal file
@@ -0,0 +1,38 @@
|
|||||||
|
// @generated automatically by Diesel CLI.
|
||||||
|
|
||||||
|
pub mod sql_types {
|
||||||
|
#[derive(diesel::query_builder::QueryId, Clone, diesel::sql_types::SqlType)]
|
||||||
|
#[diesel(postgres_type(name = "user_role"))]
|
||||||
|
pub struct UserRole;
|
||||||
|
}
|
||||||
|
|
||||||
|
diesel::table! {
|
||||||
|
fcm (user_id) {
|
||||||
|
user_id -> Text,
|
||||||
|
token -> Text,
|
||||||
|
topics -> Nullable<Array<Nullable<Text>>>,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
diesel::table! {
|
||||||
|
use diesel::sql_types::*;
|
||||||
|
use super::sql_types::UserRole;
|
||||||
|
|
||||||
|
users (id) {
|
||||||
|
id -> Text,
|
||||||
|
username -> Text,
|
||||||
|
password -> Text,
|
||||||
|
vk_id -> Nullable<Int4>,
|
||||||
|
access_token -> Text,
|
||||||
|
group -> Text,
|
||||||
|
role -> UserRole,
|
||||||
|
version -> Text,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
diesel::joinable!(fcm -> users (user_id));
|
||||||
|
|
||||||
|
diesel::allow_tables_to_appear_in_same_query!(
|
||||||
|
fcm,
|
||||||
|
users,
|
||||||
|
);
|
||||||
73
src/main.rs
73
src/main.rs
@@ -1,15 +1,64 @@
|
|||||||
use std::fs;
|
use crate::routes::auth::sign_in::sign_in;
|
||||||
use std::path::Path;
|
use crate::xls_downloader::basic_impl::BasicXlsDownloader;
|
||||||
use schedule_parser::parse_xls;
|
use actix_web::{App, HttpServer, web};
|
||||||
|
use chrono::{DateTime, Utc};
|
||||||
|
use diesel::{Connection, PgConnection};
|
||||||
|
use dotenvy::dotenv;
|
||||||
|
use schedule_parser::schema::ScheduleEntity;
|
||||||
|
use std::collections::HashMap;
|
||||||
|
use std::env;
|
||||||
|
use std::sync::{Mutex, MutexGuard};
|
||||||
|
|
||||||
fn main() {
|
mod database;
|
||||||
let groups = parse_xls(Path::new("./schedule.xls"));
|
mod routes;
|
||||||
|
mod utility;
|
||||||
|
mod xls_downloader;
|
||||||
|
|
||||||
fs::write(
|
pub struct AppState {
|
||||||
"./schedule.json",
|
downloader: Mutex<BasicXlsDownloader>,
|
||||||
serde_json::to_string_pretty(&groups)
|
schedule: Mutex<
|
||||||
.expect("Failed to serialize schedule!")
|
Option<(
|
||||||
.as_bytes(),
|
String,
|
||||||
)
|
DateTime<Utc>,
|
||||||
.expect("Failed to write schedule");
|
(
|
||||||
|
HashMap<String, ScheduleEntity>,
|
||||||
|
HashMap<String, ScheduleEntity>,
|
||||||
|
),
|
||||||
|
)>,
|
||||||
|
>,
|
||||||
|
database: Mutex<PgConnection>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl AppState {
|
||||||
|
pub fn connection(&self) -> MutexGuard<PgConnection> {
|
||||||
|
self.database.lock().unwrap()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_web::main]
|
||||||
|
async fn main() {
|
||||||
|
dotenv().ok();
|
||||||
|
|
||||||
|
let database_url = env::var("DATABASE_URL").expect("DATABASE_URL must be set");
|
||||||
|
|
||||||
|
let data = web::Data::new(AppState {
|
||||||
|
downloader: Mutex::new(BasicXlsDownloader::new()),
|
||||||
|
schedule: Mutex::new(None),
|
||||||
|
database: Mutex::new(
|
||||||
|
PgConnection::establish(&database_url)
|
||||||
|
.unwrap_or_else(|_| panic!("Error connecting to {}", database_url)),
|
||||||
|
),
|
||||||
|
});
|
||||||
|
|
||||||
|
HttpServer::new(move || {
|
||||||
|
let schedule_scope = web::scope("/auth").service(sign_in);
|
||||||
|
let api_scope = web::scope("/api/v1").service(schedule_scope);
|
||||||
|
|
||||||
|
App::new().app_data(data.clone()).service(api_scope)
|
||||||
|
})
|
||||||
|
.bind(("127.0.0.1", 8080))
|
||||||
|
.unwrap()
|
||||||
|
.run()
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
}
|
}
|
||||||
|
|||||||
2
src/routes/auth/mod.rs
Normal file
2
src/routes/auth/mod.rs
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
pub mod sign_in;
|
||||||
|
mod schema;
|
||||||
56
src/routes/auth/schema.rs
Normal file
56
src/routes/auth/schema.rs
Normal file
@@ -0,0 +1,56 @@
|
|||||||
|
use crate::database::models::User;
|
||||||
|
use serde::{Deserialize, Serialize, Serializer};
|
||||||
|
|
||||||
|
#[derive(Deserialize)]
|
||||||
|
pub struct SignInDto {
|
||||||
|
pub username: String,
|
||||||
|
pub password: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct SignInResult(Result<SignInOk, SignInErr>);
|
||||||
|
|
||||||
|
#[derive(Serialize)]
|
||||||
|
#[serde(rename_all = "camelCase")]
|
||||||
|
pub struct SignInOk {
|
||||||
|
id: String,
|
||||||
|
access_token: String,
|
||||||
|
group: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize)]
|
||||||
|
pub struct SignInErr {
|
||||||
|
code: SignInErrCode,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize)]
|
||||||
|
#[serde(rename_all = "SCREAMING_SNAKE_CASE")]
|
||||||
|
pub enum SignInErrCode {
|
||||||
|
IncorrectCredentials,
|
||||||
|
InvalidVkAccessToken,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl SignInResult {
|
||||||
|
pub fn ok(user: &User) -> Self {
|
||||||
|
Self(Ok(SignInOk {
|
||||||
|
id: user.id.clone(),
|
||||||
|
access_token: user.access_token.clone(),
|
||||||
|
group: user.group.clone(),
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn err(code: SignInErrCode) -> SignInResult {
|
||||||
|
Self(Err(SignInErr { code }))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Serialize for SignInResult {
|
||||||
|
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||||
|
where
|
||||||
|
S: Serializer,
|
||||||
|
{
|
||||||
|
match &self.0 {
|
||||||
|
Ok(ok) => serializer.serialize_some(&ok),
|
||||||
|
Err(err) => serializer.serialize_some(&err),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
33
src/routes/auth/sign_in.rs
Normal file
33
src/routes/auth/sign_in.rs
Normal file
@@ -0,0 +1,33 @@
|
|||||||
|
use crate::database::driver;
|
||||||
|
use crate::database::models::User;
|
||||||
|
use crate::routes::auth::schema::SignInErrCode::IncorrectCredentials;
|
||||||
|
use crate::routes::auth::schema::{SignInDto, SignInResult};
|
||||||
|
use crate::{AppState, utility};
|
||||||
|
use actix_web::{post, web};
|
||||||
|
use diesel::SaveChangesDsl;
|
||||||
|
use std::ops::DerefMut;
|
||||||
|
use web::Json;
|
||||||
|
|
||||||
|
#[post("/sign-in")]
|
||||||
|
pub async fn sign_in(data: Json<SignInDto>, app_state: web::Data<AppState>) -> Json<SignInResult> {
|
||||||
|
let result = match driver::users::get_by_username(&app_state.database, data.username.clone()) {
|
||||||
|
Ok(mut user) => match bcrypt::verify(&data.password, &user.password) {
|
||||||
|
Ok(true) => {
|
||||||
|
let mut lock = app_state.connection();
|
||||||
|
let conn = lock.deref_mut();
|
||||||
|
|
||||||
|
user.access_token = utility::jwt::encode(&user.id);
|
||||||
|
|
||||||
|
user.save_changes::<User>(conn)
|
||||||
|
.expect("Failed to update user");
|
||||||
|
|
||||||
|
SignInResult::ok(&user)
|
||||||
|
}
|
||||||
|
Ok(false) | Err(_) => SignInResult::err(IncorrectCredentials),
|
||||||
|
},
|
||||||
|
|
||||||
|
Err(_) => SignInResult::err(IncorrectCredentials),
|
||||||
|
};
|
||||||
|
|
||||||
|
Json(result)
|
||||||
|
}
|
||||||
1
src/routes/mod.rs
Normal file
1
src/routes/mod.rs
Normal file
@@ -0,0 +1 @@
|
|||||||
|
pub mod auth;
|
||||||
140
src/utility/jwt.rs
Normal file
140
src/utility/jwt.rs
Normal file
@@ -0,0 +1,140 @@
|
|||||||
|
use chrono::DateTime;
|
||||||
|
use chrono::Duration;
|
||||||
|
use chrono::TimeZone;
|
||||||
|
use chrono::Utc;
|
||||||
|
use hmac::{Hmac, Mac};
|
||||||
|
use jwt::{SignWithKey, Token, VerifyWithKey};
|
||||||
|
use sha2::Sha256;
|
||||||
|
use std::collections::BTreeMap;
|
||||||
|
use std::env;
|
||||||
|
use std::mem::discriminant;
|
||||||
|
use std::sync::LazyLock;
|
||||||
|
|
||||||
|
static JWT_SECRET: LazyLock<Hmac<Sha256>> = LazyLock::new(|| {
|
||||||
|
let secret = env::var("JWT_SECRET").expect("JWT_SECRET must be set");
|
||||||
|
|
||||||
|
Hmac::new_from_slice(secret.as_bytes()).expect("Hmac::new_from_slice failed")
|
||||||
|
});
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub enum VerifyError {
|
||||||
|
JwtError(jwt::Error),
|
||||||
|
InvalidSignature,
|
||||||
|
InvalidToken,
|
||||||
|
Expired,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PartialEq for VerifyError {
|
||||||
|
fn eq(&self, other: &Self) -> bool {
|
||||||
|
discriminant(self) == discriminant(other)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn verify_and_decode(token: &String) -> Result<String, VerifyError> {
|
||||||
|
let jwt = &*JWT_SECRET;
|
||||||
|
|
||||||
|
let result: Result<BTreeMap<String, String>, jwt::Error> = token.verify_with_key(jwt);
|
||||||
|
|
||||||
|
match result {
|
||||||
|
Ok(claims) => {
|
||||||
|
let exp = claims.get("exp").unwrap();
|
||||||
|
let exp_date = DateTime::from_timestamp(exp.parse::<i64>().unwrap(), 0)
|
||||||
|
.expect("Failed to parse expiration time");
|
||||||
|
|
||||||
|
if Utc::now() > exp_date {
|
||||||
|
return Err(VerifyError::Expired);
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(claims.get("id").cloned().unwrap())
|
||||||
|
}
|
||||||
|
Err(err) => Err(match err {
|
||||||
|
jwt::Error::InvalidSignature => VerifyError::InvalidSignature,
|
||||||
|
jwt::Error::Format | jwt::Error::Base64(_) | jwt::Error::NoClaimsComponent => {
|
||||||
|
VerifyError::InvalidToken
|
||||||
|
}
|
||||||
|
|
||||||
|
_ => VerifyError::JwtError(err),
|
||||||
|
}),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn encode(id: &String) -> String {
|
||||||
|
let header = jwt::Header {
|
||||||
|
type_: Some(jwt::header::HeaderType::JsonWebToken),
|
||||||
|
..Default::default()
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut claims = BTreeMap::new();
|
||||||
|
|
||||||
|
let iat = Utc::now();
|
||||||
|
let exp = iat + Duration::days(365 * 4);
|
||||||
|
|
||||||
|
let iat_str = iat.timestamp().to_string();
|
||||||
|
let exp_str = exp.timestamp().to_string();
|
||||||
|
|
||||||
|
claims.insert("id", id.as_str());
|
||||||
|
claims.insert("iat", iat_str.as_str());
|
||||||
|
claims.insert("exp", exp_str.as_str());
|
||||||
|
|
||||||
|
Token::new(header, claims)
|
||||||
|
.sign_with_key(&*JWT_SECRET)
|
||||||
|
.unwrap()
|
||||||
|
.as_str()
|
||||||
|
.to_string()
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
use dotenvy::dotenv;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_encode() {
|
||||||
|
dotenv().unwrap();
|
||||||
|
|
||||||
|
assert_eq!(encode(&"test".to_string()).is_empty(), false);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_decode_invalid_token() {
|
||||||
|
dotenv().unwrap();
|
||||||
|
|
||||||
|
let token = "".to_string();
|
||||||
|
let result = verify_and_decode(&token);
|
||||||
|
|
||||||
|
assert!(result.is_err());
|
||||||
|
assert_eq!(result.err().unwrap(), VerifyError::InvalidToken);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_decode_invalid_signature() {
|
||||||
|
dotenv().unwrap();
|
||||||
|
|
||||||
|
let token = "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJleHAiOiIxODY4ODEyOTI4IiwiaWF0IjoiMTc0MjY2ODkyOCIsImlkIjoiNjdkY2M5YTk1MDdiMDAwMDc3Mjc0NGEyIn0.DQYFYF-3DoJgCLOVdAWa47nUaCJAh16DXj-ChNSSmWz".to_string();
|
||||||
|
let result = verify_and_decode(&token);
|
||||||
|
|
||||||
|
assert!(result.is_err());
|
||||||
|
assert_eq!(result.err().unwrap(), VerifyError::InvalidToken);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_decode_expired() {
|
||||||
|
dotenv().unwrap();
|
||||||
|
|
||||||
|
let token = "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJleHAiOiIxNjE2NTI2Mzc2IiwiaWF0IjoiMTQ5MDM4MjM3NiIsImlkIjoiNjdkY2M5YTk1MDdiMDAwMDc3Mjc0NGEyIn0.Qc2LbMJTvl2hWzDM2XyQv4m9lIqR84COAESQAieUxz8".to_string();
|
||||||
|
let result = verify_and_decode(&token);
|
||||||
|
|
||||||
|
assert!(result.is_err());
|
||||||
|
assert_eq!(result.err().unwrap(), VerifyError::Expired);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_decode_ok() {
|
||||||
|
dotenv().unwrap();
|
||||||
|
|
||||||
|
let token = "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJleHAiOiIxODY4ODEyOTI4IiwiaWF0IjoiMTc0MjY2ODkyOCIsImlkIjoiNjdkY2M5YTk1MDdiMDAwMDc3Mjc0NGEyIn0.DQYFYF-3DoJgCLOVdAWa47nUaCJAh16DXj-ChNSSmWw".to_string();
|
||||||
|
let result = verify_and_decode(&token);
|
||||||
|
|
||||||
|
assert!(result.is_ok());
|
||||||
|
}
|
||||||
|
}
|
||||||
1
src/utility/mod.rs
Normal file
1
src/utility/mod.rs
Normal file
@@ -0,0 +1 @@
|
|||||||
|
pub mod jwt;
|
||||||
219
src/xls_downloader/basic_impl.rs
Normal file
219
src/xls_downloader/basic_impl.rs
Normal file
@@ -0,0 +1,219 @@
|
|||||||
|
use crate::xls_downloader::interface::{FetchError, FetchOk, FetchResult, XLSDownloader};
|
||||||
|
use chrono::{DateTime, Utc};
|
||||||
|
|
||||||
|
pub struct BasicXlsDownloader {
|
||||||
|
url: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn fetch_specified(url: &String, user_agent: String, head: bool) -> FetchResult {
|
||||||
|
let client = reqwest::Client::new();
|
||||||
|
|
||||||
|
let response = if head {
|
||||||
|
client.head(url)
|
||||||
|
} else {
|
||||||
|
client.get(url)
|
||||||
|
}
|
||||||
|
.header("User-Agent", user_agent)
|
||||||
|
.send()
|
||||||
|
.await;
|
||||||
|
|
||||||
|
match response {
|
||||||
|
Ok(r) => {
|
||||||
|
if r.status().as_u16() != 200 {
|
||||||
|
return Err(FetchError::BadStatusCode);
|
||||||
|
}
|
||||||
|
|
||||||
|
let headers = r.headers();
|
||||||
|
|
||||||
|
let content_type = headers.get("Content-Type");
|
||||||
|
let etag = headers.get("etag");
|
||||||
|
let last_modified = headers.get("last-modified");
|
||||||
|
let date = headers.get("date");
|
||||||
|
|
||||||
|
if content_type.is_none() || etag.is_none() || last_modified.is_none() || date.is_none()
|
||||||
|
{
|
||||||
|
Err(FetchError::BadHeaders)
|
||||||
|
} else if content_type.unwrap() != "application/vnd.ms-excel" {
|
||||||
|
Err(FetchError::BadContentType)
|
||||||
|
} else {
|
||||||
|
let etag = etag.unwrap().to_str().unwrap().to_string();
|
||||||
|
let last_modified =
|
||||||
|
DateTime::parse_from_rfc2822(&last_modified.unwrap().to_str().unwrap())
|
||||||
|
.unwrap()
|
||||||
|
.with_timezone(&Utc);
|
||||||
|
|
||||||
|
Ok(if head {
|
||||||
|
FetchOk::head(etag, last_modified)
|
||||||
|
} else {
|
||||||
|
FetchOk::get(etag, last_modified, r.bytes().await.unwrap().to_vec())
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Err(_) => Err(FetchError::Unknown),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl BasicXlsDownloader {
|
||||||
|
pub fn new() -> Self {
|
||||||
|
BasicXlsDownloader { url: None }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl XLSDownloader for BasicXlsDownloader {
|
||||||
|
async fn fetch(&self, head: bool) -> FetchResult {
|
||||||
|
if self.url.is_none() {
|
||||||
|
Err(FetchError::NoUrlProvided)
|
||||||
|
} else {
|
||||||
|
fetch_specified(
|
||||||
|
self.url.as_ref().unwrap(),
|
||||||
|
"t.me/polytechnic_next".to_string(),
|
||||||
|
head,
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn set_url(&mut self, url: String) -> Result<(), FetchError> {
|
||||||
|
let result = fetch_specified(&url, "t.me/polytechnic_next".to_string(), true).await;
|
||||||
|
|
||||||
|
if let Ok(_) = result {
|
||||||
|
Ok(self.url = Some(url))
|
||||||
|
} else {
|
||||||
|
Err(result.err().unwrap())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use crate::xls_downloader::basic_impl::{BasicXlsDownloader, fetch_specified};
|
||||||
|
use crate::xls_downloader::interface::{FetchError, XLSDownloader};
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn bad_url() {
|
||||||
|
let url = "bad_url".to_string();
|
||||||
|
let user_agent = String::new();
|
||||||
|
|
||||||
|
let results = [
|
||||||
|
fetch_specified(&url, user_agent.clone(), true).await,
|
||||||
|
fetch_specified(&url, user_agent.clone(), false).await,
|
||||||
|
];
|
||||||
|
|
||||||
|
assert!(results[0].is_err());
|
||||||
|
assert!(results[1].is_err());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn bad_status_code() {
|
||||||
|
let url = "https://www.google.com/not-found".to_string();
|
||||||
|
let user_agent = String::new();
|
||||||
|
|
||||||
|
let results = [
|
||||||
|
fetch_specified(&url, user_agent.clone(), true).await,
|
||||||
|
fetch_specified(&url, user_agent.clone(), false).await,
|
||||||
|
];
|
||||||
|
|
||||||
|
assert!(results[0].is_err());
|
||||||
|
assert!(results[1].is_err());
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
*results[0].as_ref().err().unwrap(),
|
||||||
|
FetchError::BadStatusCode
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
*results[1].as_ref().err().unwrap(),
|
||||||
|
FetchError::BadStatusCode
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn bad_headers() {
|
||||||
|
let url = "https://www.google.com/favicon.ico".to_string();
|
||||||
|
let user_agent = String::new();
|
||||||
|
|
||||||
|
let results = [
|
||||||
|
fetch_specified(&url, user_agent.clone(), true).await,
|
||||||
|
fetch_specified(&url, user_agent.clone(), false).await,
|
||||||
|
];
|
||||||
|
|
||||||
|
assert!(results[0].is_err());
|
||||||
|
assert!(results[1].is_err());
|
||||||
|
|
||||||
|
assert_eq!(*results[0].as_ref().err().unwrap(), FetchError::BadHeaders);
|
||||||
|
assert_eq!(*results[1].as_ref().err().unwrap(), FetchError::BadHeaders);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn bad_content_type() {
|
||||||
|
let url = "https://s3.aero-storage.ldragol.ru/679e5d1145a6ad00843ad3f1/67ddb59fd46303008396ac96%2Fexample.txt".to_string();
|
||||||
|
let user_agent = String::new();
|
||||||
|
|
||||||
|
let results = [
|
||||||
|
fetch_specified(&url, user_agent.clone(), true).await,
|
||||||
|
fetch_specified(&url, user_agent.clone(), false).await,
|
||||||
|
];
|
||||||
|
|
||||||
|
assert!(results[0].is_err());
|
||||||
|
assert!(results[1].is_err());
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
*results[0].as_ref().err().unwrap(),
|
||||||
|
FetchError::BadContentType
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
*results[1].as_ref().err().unwrap(),
|
||||||
|
FetchError::BadContentType
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn ok() {
|
||||||
|
let url = "https://s3.aero-storage.ldragol.ru/679e5d1145a6ad00843ad3f1/67ddb5fad46303008396ac97%2Fschedule.xls".to_string();
|
||||||
|
let user_agent = String::new();
|
||||||
|
|
||||||
|
let results = [
|
||||||
|
fetch_specified(&url, user_agent.clone(), true).await,
|
||||||
|
fetch_specified(&url, user_agent.clone(), false).await,
|
||||||
|
];
|
||||||
|
|
||||||
|
assert!(results[0].is_ok());
|
||||||
|
assert!(results[1].is_ok());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn downloader_set_ok() {
|
||||||
|
let url = "https://s3.aero-storage.ldragol.ru/679e5d1145a6ad00843ad3f1/67ddb5fad46303008396ac97%2Fschedule.xls".to_string();
|
||||||
|
|
||||||
|
let mut downloader = BasicXlsDownloader::new();
|
||||||
|
|
||||||
|
assert!(downloader.set_url(url).await.is_ok());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn downloader_set_err() {
|
||||||
|
let url = "bad_url".to_string();
|
||||||
|
|
||||||
|
let mut downloader = BasicXlsDownloader::new();
|
||||||
|
|
||||||
|
assert!(downloader.set_url(url).await.is_err());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn downloader_ok() {
|
||||||
|
let url = "https://s3.aero-storage.ldragol.ru/679e5d1145a6ad00843ad3f1/67ddb5fad46303008396ac97%2Fschedule.xls".to_string();
|
||||||
|
|
||||||
|
let mut downloader = BasicXlsDownloader::new();
|
||||||
|
|
||||||
|
assert!(downloader.set_url(url).await.is_ok());
|
||||||
|
assert!(downloader.fetch(false).await.is_ok());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn downloader_no_url_provided() {
|
||||||
|
let downloader = BasicXlsDownloader::new();
|
||||||
|
let result = downloader.fetch(false).await;
|
||||||
|
|
||||||
|
assert!(result.is_err());
|
||||||
|
assert_eq!(result.err().unwrap(), FetchError::NoUrlProvided);
|
||||||
|
}
|
||||||
|
}
|
||||||
44
src/xls_downloader/interface.rs
Normal file
44
src/xls_downloader/interface.rs
Normal file
@@ -0,0 +1,44 @@
|
|||||||
|
use chrono::{DateTime, Utc};
|
||||||
|
|
||||||
|
#[derive(PartialEq, Debug)]
|
||||||
|
pub enum FetchError {
|
||||||
|
NoUrlProvided,
|
||||||
|
Unknown,
|
||||||
|
BadStatusCode,
|
||||||
|
BadContentType,
|
||||||
|
BadHeaders,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct FetchOk {
|
||||||
|
pub etag: String,
|
||||||
|
pub uploaded_at: DateTime<Utc>,
|
||||||
|
pub requested_at: DateTime<Utc>,
|
||||||
|
pub data: Option<Vec<u8>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl FetchOk {
|
||||||
|
pub fn head(etag: String, uploaded_at: DateTime<Utc>) -> Self {
|
||||||
|
FetchOk {
|
||||||
|
etag,
|
||||||
|
uploaded_at,
|
||||||
|
requested_at: Utc::now(),
|
||||||
|
data: None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get(etag: String, uploaded_at: DateTime<Utc>, data: Vec<u8>) -> Self {
|
||||||
|
FetchOk {
|
||||||
|
etag,
|
||||||
|
uploaded_at,
|
||||||
|
requested_at: Utc::now(),
|
||||||
|
data: Some(data),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub type FetchResult = Result<FetchOk, FetchError>;
|
||||||
|
|
||||||
|
pub trait XLSDownloader {
|
||||||
|
async fn fetch(&self, head: bool) -> FetchResult;
|
||||||
|
async fn set_url(&mut self, url: String) -> Result<(), FetchError>;
|
||||||
|
}
|
||||||
2
src/xls_downloader/mod.rs
Normal file
2
src/xls_downloader/mod.rs
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
pub mod basic_impl;
|
||||||
|
pub mod interface;
|
||||||
Reference in New Issue
Block a user