mirror of
https://github.com/n08i40k/schedule-parser-rusted.git
synced 2025-12-06 17:57:47 +03:00
Compare commits
7 Commits
e64011ba16
...
developmen
| Author | SHA1 | Date | |
|---|---|---|---|
|
5e5cd53f46
|
|||
|
8d59e37976
|
|||
|
5e39fc9acc
|
|||
|
7c973bfda0
|
|||
|
8fba0fc709
|
|||
|
983ff4fa5e
|
|||
|
fb6f3fc05f
|
26
.env.test
Normal file
26
.env.test
Normal file
@@ -0,0 +1,26 @@
|
||||
# Schedule
|
||||
# SCHEDULE_INIT_URL=
|
||||
SCHEDULE_DISABLE_AUTO_UPDATE=1
|
||||
|
||||
# Basic authorization
|
||||
JWT_SECRET="test-secret-at-least-256-bits-used"
|
||||
|
||||
# VKID
|
||||
VK_ID_CLIENT_ID=0
|
||||
VK_ID_REDIRECT_URI="vk0://vk.com/blank.html"
|
||||
|
||||
# Telegram Mini-App
|
||||
TELEGRAM_BOT_ID=0
|
||||
TELEGRAM_MINI_APP_HOST=example.com
|
||||
TELEGRAM_TEST_DC=false
|
||||
|
||||
# Yandex Cloud
|
||||
YANDEX_CLOUD_API_KEY=""
|
||||
YANDEX_CLOUD_FUNC_ID=""
|
||||
|
||||
# Firebase
|
||||
# GOOGLE_APPLICATION_CREDENTIALS=
|
||||
|
||||
# LOGGING
|
||||
RUST_BACKTRACE=1
|
||||
# RUST_LOG=debug
|
||||
1
.github/workflows/release.yml
vendored
1
.github/workflows/release.yml
vendored
@@ -40,7 +40,6 @@ jobs:
|
||||
|
||||
- name: Test
|
||||
run: |
|
||||
touch .env.test
|
||||
cargo test --verbose
|
||||
env:
|
||||
DATABASE_URL: ${{ env.TEST_DB }}
|
||||
|
||||
3
.idea/schedule-parser-rusted.iml
generated
3
.idea/schedule-parser-rusted.iml
generated
@@ -8,6 +8,9 @@
|
||||
<sourceFolder url="file://$MODULE_DIR$/actix-test/src" isTestSource="false" />
|
||||
<sourceFolder url="file://$MODULE_DIR$/schedule-parser/benches" isTestSource="true" />
|
||||
<sourceFolder url="file://$MODULE_DIR$/schedule-parser/src" isTestSource="false" />
|
||||
<sourceFolder url="file://$MODULE_DIR$/providers/base/src" isTestSource="false" />
|
||||
<sourceFolder url="file://$MODULE_DIR$/providers/provider-engels-polytechnic/src" isTestSource="false" />
|
||||
<sourceFolder url="file://$MODULE_DIR$/providers/src" isTestSource="false" />
|
||||
<excludeFolder url="file://$MODULE_DIR$/actix-macros/target" />
|
||||
<excludeFolder url="file://$MODULE_DIR$/actix-test/target" />
|
||||
<excludeFolder url="file://$MODULE_DIR$/target" />
|
||||
|
||||
1942
Cargo.lock
generated
1942
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
65
Cargo.toml
65
Cargo.toml
@@ -1,5 +1,5 @@
|
||||
[workspace]
|
||||
members = ["actix-macros", "actix-test", "schedule-parser"]
|
||||
members = ["actix-macros", "actix-test", "providers"]
|
||||
|
||||
[package]
|
||||
name = "schedule-parser-rusted"
|
||||
@@ -11,40 +11,65 @@ publish = false
|
||||
debug = true
|
||||
|
||||
[dependencies]
|
||||
actix-web = "4.10.2"
|
||||
providers = { path = "providers" }
|
||||
actix-macros = { path = "actix-macros" }
|
||||
schedule-parser = { path = "schedule-parser", features = ["test-utils"] }
|
||||
bcrypt = "0.17.0"
|
||||
|
||||
# serve api
|
||||
actix-web = "4"
|
||||
|
||||
# basic
|
||||
chrono = { version = "0.4.40", features = ["serde"] }
|
||||
derive_more = { version = "2", features = ["full"] }
|
||||
dotenvy = "0.15.7"
|
||||
|
||||
# sql
|
||||
diesel = { version = "2.2.8", features = ["postgres"] }
|
||||
diesel-derive-enum = { git = "https://github.com/Havunen/diesel-derive-enum.git", features = ["postgres"] }
|
||||
dotenvy = "0.15.7"
|
||||
|
||||
# logging
|
||||
env_logger = "0.11.7"
|
||||
firebase-messaging-rs = { git = "https://github.com/i10416/firebase-messaging-rs.git" }
|
||||
|
||||
# async
|
||||
tokio = { version = "1.44.1", features = ["macros", "rt-multi-thread"] }
|
||||
tokio-util = "0.7.16"
|
||||
futures-util = "0.3.31"
|
||||
|
||||
# authorization
|
||||
bcrypt = "0.17.0"
|
||||
jsonwebtoken = { version = "9.3.1", features = ["use_pem"] }
|
||||
hex = "0.4.3"
|
||||
mime = "0.3.17"
|
||||
|
||||
# creating users
|
||||
objectid = "0.2.0"
|
||||
|
||||
# schedule downloader
|
||||
reqwest = { version = "0.12.15", features = ["json"] }
|
||||
sentry = "0.38"
|
||||
sentry-actix = "0.38"
|
||||
mime = "0.3.17"
|
||||
|
||||
# error handling
|
||||
sentry = "0.42.0"
|
||||
sentry-actix = "0.42.0"
|
||||
|
||||
# [de]serializing
|
||||
serde = { version = "1.0.219", features = ["derive"] }
|
||||
serde_json = "1.0.140"
|
||||
serde_with = "3.12.0"
|
||||
sha1 = "0.11.0-pre.5"
|
||||
tokio = { version = "1.44.1", features = ["macros", "rt-multi-thread"] }
|
||||
|
||||
sha1 = "0.11.0-rc.0"
|
||||
|
||||
# documentation
|
||||
utoipa = { version = "5", features = ["actix_extras", "chrono"] }
|
||||
utoipa-rapidoc = { version = "6.0.0", features = ["actix-web"] }
|
||||
utoipa-rapidoc = { version = "6", features = ["actix-web"] }
|
||||
utoipa-actix-web = "0.1"
|
||||
uuid = { version = "1.16.0", features = ["v4"] }
|
||||
ed25519-dalek = "2.1.1"
|
||||
hex-literal = "1.0.0"
|
||||
log = "0.4.26"
|
||||
base64 = "0.22.1"
|
||||
percent-encoding = "2.3.1"
|
||||
ua_generator = "0.5.16"
|
||||
|
||||
uuid = { version = "1", features = ["v4"] }
|
||||
hex-literal = "1"
|
||||
log = "0.4"
|
||||
|
||||
# telegram webdata deciding and verify
|
||||
base64 = "0.22"
|
||||
percent-encoding = "2.3"
|
||||
ed25519-dalek = "2"
|
||||
|
||||
[dev-dependencies]
|
||||
providers = { path = "providers", features = ["test"] }
|
||||
actix-test = { path = "actix-test" }
|
||||
|
||||
7
actix-macros/Cargo.lock
generated
7
actix-macros/Cargo.lock
generated
@@ -1,7 +0,0 @@
|
||||
# This file is automatically @generated by Cargo.
|
||||
# It is not intended for manual editing.
|
||||
version = 4
|
||||
|
||||
[[package]]
|
||||
name = "actix-utility-macros"
|
||||
version = "0.1.0"
|
||||
@@ -4,9 +4,9 @@ version = "0.1.0"
|
||||
edition = "2024"
|
||||
|
||||
[dependencies]
|
||||
syn = "2.0.100"
|
||||
quote = "1.0.40"
|
||||
proc-macro2 = "1.0.94"
|
||||
syn = "2"
|
||||
quote = "1"
|
||||
proc-macro2 = "1"
|
||||
|
||||
[lib]
|
||||
proc-macro = true
|
||||
@@ -81,7 +81,7 @@ mod middleware_error {
|
||||
|
||||
fn error_response(&self) -> ::actix_web::HttpResponse<BoxBody> {
|
||||
::actix_web::HttpResponse::build(self.status_code())
|
||||
.json(crate::utility::error::MiddlewareError::new(self.clone()))
|
||||
.json(crate::middlewares::error::MiddlewareError::new(self.clone()))
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
1520
actix-test/Cargo.lock
generated
1520
actix-test/Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -4,5 +4,5 @@ version = "0.1.0"
|
||||
edition = "2024"
|
||||
|
||||
[dependencies]
|
||||
actix-http = "3.10.0"
|
||||
actix-web = "4.10.2"
|
||||
actix-http = "3"
|
||||
actix-web = "4"
|
||||
12
providers/Cargo.toml
Normal file
12
providers/Cargo.toml
Normal file
@@ -0,0 +1,12 @@
|
||||
[package]
|
||||
name = "providers"
|
||||
version = "0.1.0"
|
||||
edition = "2024"
|
||||
|
||||
[features]
|
||||
test = ["provider-engels-polytechnic/test"]
|
||||
|
||||
[dependencies]
|
||||
base = { path = "base" }
|
||||
|
||||
provider-engels-polytechnic = { path = "provider-engels-polytechnic" }
|
||||
17
providers/base/Cargo.toml
Normal file
17
providers/base/Cargo.toml
Normal file
@@ -0,0 +1,17 @@
|
||||
[package]
|
||||
name = "base"
|
||||
version = "0.1.0"
|
||||
edition = "2024"
|
||||
|
||||
[dependencies]
|
||||
tokio-util = "0.7.16"
|
||||
async-trait = "0.1.89"
|
||||
|
||||
chrono = { version = "0.4.41", features = ["serde"] }
|
||||
|
||||
serde = { version = "1.0.219", features = ["derive"] }
|
||||
serde_repr = "0.1.20"
|
||||
|
||||
utoipa = { version = "5.4.0", features = ["macros", "chrono"] }
|
||||
|
||||
sha1 = "0.11.0-rc.0"
|
||||
@@ -1,4 +1,6 @@
|
||||
use sha1::Digest;
|
||||
use sha1::digest::OutputSizeUser;
|
||||
use sha1::digest::typenum::Unsigned;
|
||||
use std::hash::Hasher;
|
||||
|
||||
/// Hesher returning hash from the algorithm implementing Digest
|
||||
@@ -12,7 +14,20 @@ where
|
||||
{
|
||||
/// Obtain hash.
|
||||
pub fn finalize(self) -> String {
|
||||
hex::encode(self.digest.finalize().0)
|
||||
static ALPHABET: [char; 16] = [
|
||||
'0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'A', 'B', 'C', 'D', 'E', 'F',
|
||||
];
|
||||
|
||||
let mut hex = String::with_capacity(<D as OutputSizeUser>::OutputSize::USIZE * 2);
|
||||
|
||||
for byte in self.digest.finalize().0.into_iter() {
|
||||
let byte: u8 = byte;
|
||||
|
||||
hex.push(ALPHABET[(byte >> 4) as usize]);
|
||||
hex.push(ALPHABET[(byte & 0xF) as usize]);
|
||||
}
|
||||
|
||||
hex
|
||||
}
|
||||
}
|
||||
|
||||
289
providers/base/src/lib.rs
Normal file
289
providers/base/src/lib.rs
Normal file
@@ -0,0 +1,289 @@
|
||||
use crate::hasher::DigestHasher;
|
||||
use async_trait::async_trait;
|
||||
use chrono::{DateTime, Utc};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_repr::{Deserialize_repr, Serialize_repr};
|
||||
use sha1::{Digest, Sha1};
|
||||
use std::collections::HashMap;
|
||||
use std::hash::Hash;
|
||||
use std::sync::Arc;
|
||||
use tokio_util::sync::CancellationToken;
|
||||
use utoipa::ToSchema;
|
||||
|
||||
mod hasher;
|
||||
|
||||
// pub(crate) mod internal {
|
||||
// use super::{LessonBoundaries, LessonType};
|
||||
// use chrono::{DateTime, Utc};
|
||||
//
|
||||
// /// Data cell storing the group name.
|
||||
// pub struct GroupCellInfo {
|
||||
// /// Column index.
|
||||
// pub column: u32,
|
||||
//
|
||||
// /// Text in the cell.
|
||||
// pub name: String,
|
||||
// }
|
||||
//
|
||||
// /// Data cell storing the line.
|
||||
// pub struct DayCellInfo {
|
||||
// /// Line index.
|
||||
// pub row: u32,
|
||||
//
|
||||
// /// Column index.
|
||||
// pub column: u32,
|
||||
//
|
||||
// /// Day name.
|
||||
// pub name: String,
|
||||
//
|
||||
// /// Date of the day.
|
||||
// pub date: DateTime<Utc>,
|
||||
// }
|
||||
//
|
||||
// /// Data on the time of lessons from the second column of the schedule.
|
||||
// pub struct BoundariesCellInfo {
|
||||
// /// Temporary segment of the lesson.
|
||||
// pub time_range: LessonBoundaries,
|
||||
//
|
||||
// /// Type of lesson.
|
||||
// pub lesson_type: LessonType,
|
||||
//
|
||||
// /// The lesson index.
|
||||
// pub default_index: Option<u32>,
|
||||
//
|
||||
// /// The frame of the cell.
|
||||
// pub xls_range: ((u32, u32), (u32, u32)),
|
||||
// }
|
||||
// }
|
||||
|
||||
/// The beginning and end of the lesson.
|
||||
#[derive(Clone, Hash, Debug, Serialize, Deserialize, ToSchema)]
|
||||
pub struct LessonBoundaries {
|
||||
/// The beginning of a lesson.
|
||||
pub start: DateTime<Utc>,
|
||||
|
||||
/// The end of the lesson.
|
||||
pub end: DateTime<Utc>,
|
||||
}
|
||||
|
||||
/// Type of lesson.
|
||||
#[derive(Clone, Hash, PartialEq, Debug, Serialize_repr, Deserialize_repr, ToSchema)]
|
||||
#[serde(rename_all = "SCREAMING_SNAKE_CASE")]
|
||||
#[repr(u8)]
|
||||
pub enum LessonType {
|
||||
/// Обычная.
|
||||
Default = 0,
|
||||
|
||||
/// Допы.
|
||||
Additional,
|
||||
|
||||
/// Перемена.
|
||||
Break,
|
||||
|
||||
/// Консультация.
|
||||
Consultation,
|
||||
|
||||
/// Самостоятельная работа.
|
||||
IndependentWork,
|
||||
|
||||
/// Зачёт.
|
||||
Exam,
|
||||
|
||||
/// Зачёт с оценкой.
|
||||
ExamWithGrade,
|
||||
|
||||
/// Экзамен.
|
||||
ExamDefault,
|
||||
|
||||
/// Курсовой проект.
|
||||
CourseProject,
|
||||
|
||||
/// Защита курсового проекта.
|
||||
CourseProjectDefense,
|
||||
}
|
||||
|
||||
#[derive(Clone, Hash, Debug, Serialize, Deserialize, ToSchema)]
|
||||
pub struct LessonSubGroup {
|
||||
/// Cabinet, if present.
|
||||
pub cabinet: Option<String>,
|
||||
|
||||
/// Full name of the teacher.
|
||||
pub teacher: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Hash, Debug, Serialize, Deserialize, ToSchema)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct Lesson {
|
||||
/// Type.
|
||||
#[serde(rename = "type")]
|
||||
pub lesson_type: LessonType,
|
||||
|
||||
/// Lesson indexes, if present.
|
||||
pub range: Option<[u8; 2]>,
|
||||
|
||||
/// Name.
|
||||
pub name: Option<String>,
|
||||
|
||||
/// The beginning and end.
|
||||
pub time: LessonBoundaries,
|
||||
|
||||
/// List of subgroups.
|
||||
#[serde(rename = "subgroups")]
|
||||
pub subgroups: Option<Vec<Option<LessonSubGroup>>>,
|
||||
|
||||
/// Group name, if this is a schedule for teachers.
|
||||
pub group: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Hash, Debug, Serialize, Deserialize, ToSchema)]
|
||||
pub struct Day {
|
||||
/// Day of the week.
|
||||
pub name: String,
|
||||
|
||||
/// Address of another corps.
|
||||
pub street: Option<String>,
|
||||
|
||||
/// Date.
|
||||
pub date: DateTime<Utc>,
|
||||
|
||||
/// List of lessons on this day.
|
||||
pub lessons: Vec<Lesson>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Hash, Debug, Serialize, Deserialize, ToSchema)]
|
||||
pub struct ScheduleEntry {
|
||||
/// The name of the group or name of the teacher.
|
||||
pub name: String,
|
||||
|
||||
/// List of six days.
|
||||
pub days: Vec<Day>,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct ParsedSchedule {
|
||||
/// List of groups.
|
||||
pub groups: HashMap<String, ScheduleEntry>,
|
||||
|
||||
/// List of teachers.
|
||||
pub teachers: HashMap<String, ScheduleEntry>,
|
||||
}
|
||||
|
||||
/// Represents a snapshot of the schedule parsed from an XLS file.
|
||||
#[derive(Clone)]
|
||||
pub struct ScheduleSnapshot {
|
||||
/// Timestamp when the Polytechnic website was queried for the schedule.
|
||||
pub fetched_at: DateTime<Utc>,
|
||||
|
||||
/// Timestamp indicating when the schedule was last updated on the Polytechnic website.
|
||||
///
|
||||
/// <note>
|
||||
/// This value is determined by the website's content and does not depend on the application.
|
||||
/// </note>
|
||||
pub updated_at: DateTime<Utc>,
|
||||
|
||||
/// URL pointing to the XLS file containing the source schedule data.
|
||||
pub url: String,
|
||||
|
||||
/// Parsed schedule data in the application's internal representation.
|
||||
pub data: ParsedSchedule,
|
||||
}
|
||||
|
||||
impl ScheduleSnapshot {
|
||||
/// Converting the schedule data into a hash.
|
||||
/// ### Important!
|
||||
/// The hash does not depend on the dates.
|
||||
/// If the application is restarted, but the file with source schedule will remain unchanged, then the hash will not change.
|
||||
pub fn hash(&self) -> String {
|
||||
let mut hasher = DigestHasher::from(Sha1::new());
|
||||
|
||||
self.data.teachers.iter().for_each(|e| e.hash(&mut hasher));
|
||||
self.data.groups.iter().for_each(|e| e.hash(&mut hasher));
|
||||
|
||||
hasher.finalize()
|
||||
}
|
||||
|
||||
/// Simply updates the value of [`ScheduleSnapshot::fetched_at`].
|
||||
/// Used for auto-updates.
|
||||
pub fn update(&mut self) {
|
||||
self.fetched_at = Utc::now();
|
||||
}
|
||||
}
|
||||
|
||||
// #[derive(Clone, Debug, Display, Error, ToSchema)]
|
||||
// #[display("row {row}, column {column}")]
|
||||
// pub struct ErrorCellPos {
|
||||
// pub row: u32,
|
||||
// pub column: u32,
|
||||
// }
|
||||
//
|
||||
// #[derive(Clone, Debug, Display, Error, ToSchema)]
|
||||
// #[display("'{data}' at {pos}")]
|
||||
// pub struct ErrorCell {
|
||||
// pub pos: ErrorCellPos,
|
||||
// pub data: String,
|
||||
// }
|
||||
//
|
||||
// impl ErrorCell {
|
||||
// pub fn new(row: u32, column: u32, data: String) -> Self {
|
||||
// Self {
|
||||
// pos: ErrorCellPos { row, column },
|
||||
// data,
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
|
||||
// #[derive(Clone, Debug, Display, Error, ToSchema)]
|
||||
// pub enum ParseError {
|
||||
// /// Errors related to reading XLS file.
|
||||
// #[display("{_0:?}: Failed to read XLS file.")]
|
||||
// #[schema(value_type = String)]
|
||||
// BadXLS(Arc<calamine::XlsError>),
|
||||
//
|
||||
// /// Not a single sheet was found.
|
||||
// #[display("No work sheets found.")]
|
||||
// NoWorkSheets,
|
||||
//
|
||||
// /// There are no data on the boundaries of the sheet.
|
||||
// #[display("There is no data on work sheet boundaries.")]
|
||||
// UnknownWorkSheetRange,
|
||||
//
|
||||
// /// Failed to read the beginning and end of the lesson from the cell
|
||||
// #[display("Failed to read lesson start and end from {_0}.")]
|
||||
// LessonBoundaries(ErrorCell),
|
||||
//
|
||||
// /// Not found the beginning and the end corresponding to the lesson.
|
||||
// #[display("No start and end times matching the lesson (at {_0}) was found.")]
|
||||
// LessonTimeNotFound(ErrorCellPos),
|
||||
// }
|
||||
//
|
||||
// impl Serialize for ParseError {
|
||||
// fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
// where
|
||||
// S: Serializer,
|
||||
// {
|
||||
// match self {
|
||||
// ParseError::BadXLS(_) => serializer.serialize_str("BAD_XLS"),
|
||||
// ParseError::NoWorkSheets => serializer.serialize_str("NO_WORK_SHEETS"),
|
||||
// ParseError::UnknownWorkSheetRange => {
|
||||
// serializer.serialize_str("UNKNOWN_WORK_SHEET_RANGE")
|
||||
// }
|
||||
// ParseError::LessonBoundaries(_) => serializer.serialize_str("GLOBAL_TIME"),
|
||||
// ParseError::LessonTimeNotFound(_) => serializer.serialize_str("LESSON_TIME_NOT_FOUND"),
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
|
||||
#[async_trait]
|
||||
pub trait ScheduleProvider
|
||||
where
|
||||
Self: Sync + Send,
|
||||
{
|
||||
/// Returns ok when task has been canceled.
|
||||
/// Returns err when error appeared while trying to parse or download schedule
|
||||
async fn start_auto_update_task(
|
||||
&self,
|
||||
cancellation_token: CancellationToken,
|
||||
) -> Result<(), Box<dyn std::error::Error + Send + Sync + 'static>>;
|
||||
|
||||
async fn get_schedule(&self) -> Arc<ScheduleSnapshot>;
|
||||
}
|
||||
32
providers/provider-engels-polytechnic/Cargo.toml
Normal file
32
providers/provider-engels-polytechnic/Cargo.toml
Normal file
@@ -0,0 +1,32 @@
|
||||
[package]
|
||||
name = "provider-engels-polytechnic"
|
||||
version = "0.1.0"
|
||||
edition = "2024"
|
||||
|
||||
[features]
|
||||
test = []
|
||||
|
||||
[dependencies]
|
||||
base = { path = "../base" }
|
||||
|
||||
tokio = { version = "1.47.1", features = ["sync", "macros", "time"] }
|
||||
tokio-util = "0.7.16"
|
||||
|
||||
chrono = { version = "0.4.41", features = ["serde"] }
|
||||
|
||||
serde = { version = "1.0.219", features = ["derive"] }
|
||||
|
||||
derive_more = { version = "2.0.1", features = ["error", "display"] }
|
||||
|
||||
utoipa = { version = "5.4.0", features = ["macros", "chrono"] }
|
||||
|
||||
calamine = { git = "https://github.com/prophittcorey/calamine.git", branch = "fix/zip-3.0" }
|
||||
async-trait = "0.1.89"
|
||||
|
||||
reqwest = "0.12.23"
|
||||
ua_generator = "0.5.22"
|
||||
regex = "1.11.1"
|
||||
strsim = "0.11.1"
|
||||
log = "0.4.27"
|
||||
sentry = "0.42.0"
|
||||
|
||||
85
providers/provider-engels-polytechnic/src/lib.rs
Normal file
85
providers/provider-engels-polytechnic/src/lib.rs
Normal file
@@ -0,0 +1,85 @@
|
||||
use crate::updater::Updater;
|
||||
use async_trait::async_trait;
|
||||
use base::{ScheduleProvider, ScheduleSnapshot};
|
||||
use std::ops::DerefMut;
|
||||
use std::sync::Arc;
|
||||
use std::time::Duration;
|
||||
use tokio::sync::RwLock;
|
||||
use tokio::time::interval;
|
||||
use tokio_util::sync::CancellationToken;
|
||||
|
||||
pub use crate::updater::UpdateSource;
|
||||
|
||||
mod parser;
|
||||
mod updater;
|
||||
mod xls_downloader;
|
||||
|
||||
#[cfg(feature = "test")]
|
||||
pub mod test_utils {
|
||||
pub use crate::parser::test_utils::test_result;
|
||||
}
|
||||
|
||||
pub struct EngelsPolytechnicProvider {
|
||||
updater: Updater,
|
||||
snapshot: Arc<ScheduleSnapshot>,
|
||||
}
|
||||
|
||||
impl EngelsPolytechnicProvider {
|
||||
pub async fn new(
|
||||
update_source: UpdateSource,
|
||||
) -> Result<Arc<dyn ScheduleProvider>, crate::updater::error::Error> {
|
||||
let (updater, snapshot) = Updater::new(update_source).await?;
|
||||
|
||||
Ok(Arc::new(Wrapper {
|
||||
inner: RwLock::new(Self {
|
||||
updater,
|
||||
snapshot: Arc::new(snapshot),
|
||||
}),
|
||||
}))
|
||||
}
|
||||
}
|
||||
|
||||
pub struct Wrapper {
|
||||
inner: RwLock<EngelsPolytechnicProvider>,
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl ScheduleProvider for Wrapper {
|
||||
async fn start_auto_update_task(
|
||||
&self,
|
||||
cancellation_token: CancellationToken,
|
||||
) -> Result<(), Box<dyn std::error::Error + Send + Sync + 'static>> {
|
||||
let mut ticker = interval(Duration::from_secs(60 * 30));
|
||||
ticker.tick().await; // bc we already have the latest schedule, when instantiating provider
|
||||
|
||||
loop {
|
||||
tokio::select! {
|
||||
_ = ticker.tick() => {
|
||||
let mut lock = self.inner.write().await;
|
||||
let this= lock.deref_mut();
|
||||
|
||||
log::info!("Updating schedule...");
|
||||
|
||||
match this.updater.update(&mut this.snapshot).await {
|
||||
Ok(snapshot) => {
|
||||
this.snapshot = Arc::new(snapshot);
|
||||
},
|
||||
|
||||
Err(err) => {
|
||||
cancellation_token.cancel();
|
||||
return Err(err.into());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
_ = cancellation_token.cancelled() => {
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn get_schedule(&self) -> Arc<ScheduleSnapshot> {
|
||||
self.inner.read().await.snapshot.clone()
|
||||
}
|
||||
}
|
||||
@@ -1,12 +1,12 @@
|
||||
use crate::LessonParseResult::{Lessons, Street};
|
||||
use crate::schema::LessonType::Break;
|
||||
use crate::schema::internal::{BoundariesCellInfo, DayCellInfo, GroupCellInfo};
|
||||
use crate::schema::{
|
||||
Day, ErrorCell, ErrorCellPos, Lesson, LessonBoundaries, LessonSubGroup, LessonType, ParseError,
|
||||
ParseResult, ScheduleEntry,
|
||||
use crate::or_continue;
|
||||
use crate::parser::error::{ErrorCell, ErrorCellPos};
|
||||
use crate::parser::worksheet::WorkSheet;
|
||||
use crate::parser::LessonParseResult::{Lessons, Street};
|
||||
use base::LessonType::Break;
|
||||
use base::{
|
||||
Day, Lesson, LessonBoundaries, LessonSubGroup, LessonType, ParsedSchedule, ScheduleEntry,
|
||||
};
|
||||
use crate::worksheet::WorkSheet;
|
||||
use calamine::{Reader, Xls, open_workbook_from_rs};
|
||||
use calamine::{open_workbook_from_rs, Reader, Xls};
|
||||
use chrono::{DateTime, Duration, NaiveDate, NaiveTime, Utc};
|
||||
use regex::Regex;
|
||||
use std::collections::HashMap;
|
||||
@@ -14,18 +14,128 @@ use std::io::Cursor;
|
||||
use std::sync::LazyLock;
|
||||
|
||||
mod macros;
|
||||
pub mod schema;
|
||||
mod worksheet;
|
||||
|
||||
pub mod error {
|
||||
use derive_more::{Display, Error};
|
||||
use serde::{Serialize, Serializer};
|
||||
use std::sync::Arc;
|
||||
use utoipa::ToSchema;
|
||||
|
||||
#[derive(Clone, Debug, Display, Error, ToSchema)]
|
||||
#[display("row {row}, column {column}")]
|
||||
pub struct ErrorCellPos {
|
||||
pub row: u32,
|
||||
pub column: u32,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Display, Error, ToSchema)]
|
||||
#[display("'{data}' at {pos}")]
|
||||
pub struct ErrorCell {
|
||||
pub pos: ErrorCellPos,
|
||||
pub data: String,
|
||||
}
|
||||
|
||||
impl ErrorCell {
|
||||
pub fn new(row: u32, column: u32, data: String) -> Self {
|
||||
Self {
|
||||
pos: ErrorCellPos { row, column },
|
||||
data,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Display, Error, ToSchema)]
|
||||
pub enum Error {
|
||||
/// Errors related to reading XLS file.
|
||||
#[display("{_0:?}: Failed to read XLS file.")]
|
||||
#[schema(value_type = String)]
|
||||
BadXLS(Arc<calamine::XlsError>),
|
||||
|
||||
/// Not a single sheet was found.
|
||||
#[display("No work sheets found.")]
|
||||
NoWorkSheets,
|
||||
|
||||
/// There are no data on the boundaries of the sheet.
|
||||
#[display("There is no data on work sheet boundaries.")]
|
||||
UnknownWorkSheetRange,
|
||||
|
||||
/// Failed to read the beginning and end of the lesson from the cell
|
||||
#[display("Failed to read lesson start and end from {_0}.")]
|
||||
LessonBoundaries(ErrorCell),
|
||||
|
||||
/// Not found the beginning and the end corresponding to the lesson.
|
||||
#[display("No start and end times matching the lesson (at {_0}) was found.")]
|
||||
LessonTimeNotFound(ErrorCellPos),
|
||||
}
|
||||
|
||||
impl Serialize for Error {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
match self {
|
||||
Error::BadXLS(_) => serializer.serialize_str("BAD_XLS"),
|
||||
Error::NoWorkSheets => serializer.serialize_str("NO_WORK_SHEETS"),
|
||||
Error::UnknownWorkSheetRange => {
|
||||
serializer.serialize_str("UNKNOWN_WORK_SHEET_RANGE")
|
||||
}
|
||||
Error::LessonBoundaries(_) => serializer.serialize_str("GLOBAL_TIME"),
|
||||
Error::LessonTimeNotFound(_) => serializer.serialize_str("LESSON_TIME_NOT_FOUND"),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Data cell storing the group name.
|
||||
pub struct GroupCellInfo {
|
||||
/// Column index.
|
||||
pub column: u32,
|
||||
|
||||
/// Text in the cell.
|
||||
pub name: String,
|
||||
}
|
||||
|
||||
/// Data cell storing the line.
|
||||
pub struct DayCellInfo {
|
||||
/// Line index.
|
||||
pub row: u32,
|
||||
|
||||
/// Column index.
|
||||
pub column: u32,
|
||||
|
||||
/// Day name.
|
||||
pub name: String,
|
||||
|
||||
/// Date of the day.
|
||||
pub date: DateTime<Utc>,
|
||||
}
|
||||
|
||||
/// Data on the time of lessons from the second column of the schedule.
|
||||
pub struct BoundariesCellInfo {
|
||||
/// Temporary segment of the lesson.
|
||||
pub time_range: LessonBoundaries,
|
||||
|
||||
/// Type of lesson.
|
||||
pub lesson_type: LessonType,
|
||||
|
||||
/// The lesson index.
|
||||
pub default_index: Option<u32>,
|
||||
|
||||
/// The frame of the cell.
|
||||
pub xls_range: ((u32, u32), (u32, u32)),
|
||||
}
|
||||
/// Obtaining a "skeleton" schedule from the working sheet.
|
||||
fn parse_skeleton(
|
||||
worksheet: &WorkSheet,
|
||||
) -> Result<(Vec<DayCellInfo>, Vec<GroupCellInfo>), ParseError> {
|
||||
) -> Result<(Vec<DayCellInfo>, Vec<GroupCellInfo>), crate::parser::error::Error> {
|
||||
let mut groups: Vec<GroupCellInfo> = Vec::new();
|
||||
let mut days: Vec<(u32, String, Option<DateTime<Utc>>)> = Vec::new();
|
||||
|
||||
let worksheet_start = worksheet.start().ok_or(ParseError::UnknownWorkSheetRange)?;
|
||||
let worksheet_end = worksheet.end().ok_or(ParseError::UnknownWorkSheetRange)?;
|
||||
let worksheet_start = worksheet
|
||||
.start()
|
||||
.ok_or(error::Error::UnknownWorkSheetRange)?;
|
||||
let worksheet_end = worksheet.end().ok_or(error::Error::UnknownWorkSheetRange)?;
|
||||
|
||||
let mut row = worksheet_start.0;
|
||||
|
||||
@@ -42,7 +152,8 @@ fn parse_skeleton(
|
||||
for column in (worksheet_start.1 + 2)..=worksheet_end.1 {
|
||||
groups.push(GroupCellInfo {
|
||||
column,
|
||||
name: or_continue!(worksheet.get_string_from_cell(row, column)),
|
||||
name: or_continue!(worksheet.get_string_from_cell(row, column))
|
||||
.replace(" ", ""),
|
||||
});
|
||||
}
|
||||
|
||||
@@ -152,7 +263,7 @@ fn parse_lesson(
|
||||
day_boundaries: &Vec<BoundariesCellInfo>,
|
||||
lesson_boundaries: &BoundariesCellInfo,
|
||||
group_column: u32,
|
||||
) -> Result<LessonParseResult, ParseError> {
|
||||
) -> Result<LessonParseResult, crate::parser::error::Error> {
|
||||
let row = lesson_boundaries.xls_range.0.0;
|
||||
|
||||
let name = {
|
||||
@@ -179,13 +290,12 @@ fn parse_lesson(
|
||||
.filter(|time| time.xls_range.1.0 == cell_range.1.0)
|
||||
.collect::<Vec<&BoundariesCellInfo>>();
|
||||
|
||||
let end_time =
|
||||
end_time_arr
|
||||
.first()
|
||||
.ok_or(ParseError::LessonTimeNotFound(ErrorCellPos {
|
||||
row,
|
||||
column: group_column,
|
||||
}))?;
|
||||
let end_time = end_time_arr
|
||||
.first()
|
||||
.ok_or(error::Error::LessonTimeNotFound(ErrorCellPos {
|
||||
row,
|
||||
column: group_column,
|
||||
}))?;
|
||||
|
||||
let range: Option<[u8; 2]> = if lesson_boundaries.default_index != None {
|
||||
let default = lesson_boundaries.default_index.unwrap() as u8;
|
||||
@@ -310,7 +420,8 @@ fn parse_cabinets(worksheet: &WorkSheet, row_range: (u32, u32), column: u32) ->
|
||||
/// Getting the "pure" name of the lesson and list of teachers from the text of the lesson cell.
|
||||
fn parse_name_and_subgroups(
|
||||
text: &String,
|
||||
) -> Result<(String, Vec<Option<LessonSubGroup>>, Option<LessonType>), ParseError> {
|
||||
) -> Result<(String, Vec<Option<LessonSubGroup>>, Option<LessonType>), crate::parser::error::Error>
|
||||
{
|
||||
// Части названия пары:
|
||||
// 1. Само название.
|
||||
// 2. Список преподавателей и подгрупп.
|
||||
@@ -486,7 +597,7 @@ fn parse_day_boundaries(
|
||||
date: DateTime<Utc>,
|
||||
row_range: (u32, u32),
|
||||
column: u32,
|
||||
) -> Result<Vec<BoundariesCellInfo>, ParseError> {
|
||||
) -> Result<Vec<BoundariesCellInfo>, crate::parser::error::Error> {
|
||||
let mut day_times: Vec<BoundariesCellInfo> = Vec::new();
|
||||
|
||||
for row in row_range.0..row_range.1 {
|
||||
@@ -497,7 +608,7 @@ fn parse_day_boundaries(
|
||||
};
|
||||
|
||||
let lesson_time = parse_lesson_boundaries_cell(&time_cell, date.clone()).ok_or(
|
||||
ParseError::LessonBoundaries(ErrorCell::new(row, column, time_cell.clone())),
|
||||
error::Error::LessonBoundaries(ErrorCell::new(row, column, time_cell.clone())),
|
||||
)?;
|
||||
|
||||
// type
|
||||
@@ -542,7 +653,7 @@ fn parse_day_boundaries(
|
||||
fn parse_week_boundaries(
|
||||
worksheet: &WorkSheet,
|
||||
week_markup: &Vec<DayCellInfo>,
|
||||
) -> Result<Vec<Vec<BoundariesCellInfo>>, ParseError> {
|
||||
) -> Result<Vec<Vec<BoundariesCellInfo>>, crate::parser::error::Error> {
|
||||
let mut result: Vec<Vec<BoundariesCellInfo>> = Vec::new();
|
||||
|
||||
let worksheet_end_row = worksheet.end().unwrap().0;
|
||||
@@ -662,7 +773,7 @@ fn convert_groups_to_teachers(
|
||||
///
|
||||
/// * `buffer`: XLS data containing schedule.
|
||||
///
|
||||
/// returns: Result<ParseResult, ParseError>
|
||||
/// returns: Result<ParseResult, crate::parser::error::Error>
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
@@ -676,21 +787,21 @@ fn convert_groups_to_teachers(
|
||||
/// assert_ne!(result.as_ref().unwrap().groups.len(), 0);
|
||||
/// assert_ne!(result.as_ref().unwrap().teachers.len(), 0);
|
||||
/// ```
|
||||
pub fn parse_xls(buffer: &Vec<u8>) -> Result<ParseResult, ParseError> {
|
||||
pub fn parse_xls(buffer: &Vec<u8>) -> Result<ParsedSchedule, crate::parser::error::Error> {
|
||||
let cursor = Cursor::new(&buffer);
|
||||
let mut workbook: Xls<_> =
|
||||
open_workbook_from_rs(cursor).map_err(|e| ParseError::BadXLS(std::sync::Arc::new(e)))?;
|
||||
open_workbook_from_rs(cursor).map_err(|e| error::Error::BadXLS(std::sync::Arc::new(e)))?;
|
||||
|
||||
let worksheet = {
|
||||
let (worksheet_name, worksheet) = workbook
|
||||
.worksheets()
|
||||
.first()
|
||||
.ok_or(ParseError::NoWorkSheets)?
|
||||
.ok_or(error::Error::NoWorkSheets)?
|
||||
.clone();
|
||||
|
||||
let worksheet_merges = workbook
|
||||
.worksheet_merge_cells(&*worksheet_name)
|
||||
.ok_or(ParseError::NoWorkSheets)?;
|
||||
.ok_or(error::Error::NoWorkSheets)?;
|
||||
|
||||
WorkSheet {
|
||||
data: worksheet,
|
||||
@@ -740,18 +851,19 @@ pub fn parse_xls(buffer: &Vec<u8>) -> Result<ParseResult, ParseError> {
|
||||
groups.insert(group.name.clone(), group);
|
||||
}
|
||||
|
||||
Ok(ParseResult {
|
||||
Ok(ParsedSchedule {
|
||||
teachers: convert_groups_to_teachers(&groups),
|
||||
groups,
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg(any(test, feature = "test-utils"))]
|
||||
#[cfg(any(test, feature = "test"))]
|
||||
pub mod test_utils {
|
||||
use super::*;
|
||||
use base::ParsedSchedule;
|
||||
|
||||
pub fn test_result() -> Result<ParseResult, ParseError> {
|
||||
parse_xls(&include_bytes!("../../schedule.xls").to_vec())
|
||||
pub fn test_result() -> Result<ParsedSchedule, crate::parser::error::Error> {
|
||||
parse_xls(&include_bytes!("../../../../test-data/engels-polytechnic.xls").to_vec())
|
||||
}
|
||||
}
|
||||
|
||||
263
providers/provider-engels-polytechnic/src/updater.rs
Normal file
263
providers/provider-engels-polytechnic/src/updater.rs
Normal file
@@ -0,0 +1,263 @@
|
||||
use crate::parser::parse_xls;
|
||||
use crate::updater::error::{Error, QueryUrlError, SnapshotCreationError};
|
||||
use crate::xls_downloader::{FetchError, XlsDownloader};
|
||||
use base::ScheduleSnapshot;
|
||||
|
||||
pub enum UpdateSource {
|
||||
Prepared(ScheduleSnapshot),
|
||||
|
||||
Url(String),
|
||||
|
||||
GrabFromSite {
|
||||
yandex_api_key: String,
|
||||
yandex_func_id: String,
|
||||
},
|
||||
}
|
||||
|
||||
pub struct Updater {
|
||||
downloader: XlsDownloader,
|
||||
update_source: UpdateSource,
|
||||
}
|
||||
|
||||
pub mod error {
|
||||
use crate::xls_downloader::FetchError;
|
||||
use derive_more::{Display, Error};
|
||||
|
||||
#[derive(Debug, Display, Error)]
|
||||
pub enum Error {
|
||||
/// An error occurred while querying the Yandex Cloud API for a URL.
|
||||
///
|
||||
/// This may result from network failures, invalid API credentials, or issues with the Yandex Cloud Function invocation.
|
||||
/// See [`QueryUrlError`] for more details about specific causes.
|
||||
QueryUrlFailed(QueryUrlError),
|
||||
|
||||
/// The schedule snapshot creation process failed.
|
||||
///
|
||||
/// This can happen due to URL conflicts (same URL already in use), failed network requests,
|
||||
/// download errors, or invalid XLS file content. See [`SnapshotCreationError`] for details.
|
||||
SnapshotCreationFailed(SnapshotCreationError),
|
||||
}
|
||||
/// Errors that may occur when querying the Yandex Cloud API to retrieve a URL.
|
||||
#[derive(Debug, Display, Error)]
|
||||
pub enum QueryUrlError {
|
||||
/// Occurs when the request to the Yandex Cloud API fails.
|
||||
///
|
||||
/// This may be due to network issues, invalid API key, incorrect function ID, or other
|
||||
/// problems with the Yandex Cloud Function invocation.
|
||||
#[display("An error occurred during the request to the Yandex Cloud API: {_0}")]
|
||||
RequestFailed(reqwest::Error),
|
||||
}
|
||||
|
||||
/// Errors that may occur during the creation of a schedule snapshot.
|
||||
#[derive(Debug, Display, Error)]
|
||||
pub enum SnapshotCreationError {
|
||||
/// The URL is the same as the one already being used (no update needed).
|
||||
#[display("The URL is the same as the one already being used.")]
|
||||
SameUrl,
|
||||
|
||||
/// The URL query for the XLS file failed to execute, either due to network issues or invalid API parameters.
|
||||
#[display("Failed to fetch URL: {_0}")]
|
||||
FetchFailed(FetchError),
|
||||
|
||||
/// Downloading the XLS file content failed after successfully obtaining the URL.
|
||||
#[display("Download failed: {_0}")]
|
||||
DownloadFailed(FetchError),
|
||||
|
||||
/// The XLS file could not be parsed into a valid schedule format.
|
||||
#[display("Schedule data is invalid: {_0}")]
|
||||
InvalidSchedule(crate::parser::error::Error),
|
||||
}
|
||||
}
|
||||
|
||||
impl Updater {
|
||||
/// Constructs a new `ScheduleSnapshot` by downloading and parsing schedule data from the specified URL.
|
||||
///
|
||||
/// This method first checks if the provided URL is the same as the one already configured in the downloader.
|
||||
/// If different, it updates the downloader's URL, fetches the XLS content, parses it, and creates a snapshot.
|
||||
/// Errors are returned for URL conflicts, network issues, download failures, or invalid data.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `downloader`: A mutable reference to an `XLSDownloader` implementation used to fetch and parse the schedule data.
|
||||
/// * `url`: The source URL pointing to the XLS file containing schedule data.
|
||||
///
|
||||
/// returns: Result<ScheduleSnapshot, SnapshotCreationError>
|
||||
pub async fn new_snapshot(
|
||||
downloader: &mut XlsDownloader,
|
||||
url: String,
|
||||
) -> Result<ScheduleSnapshot, SnapshotCreationError> {
|
||||
if downloader.url.as_ref().is_some_and(|_url| _url.eq(&url)) {
|
||||
return Err(SnapshotCreationError::SameUrl);
|
||||
}
|
||||
|
||||
let head_result = downloader.set_url(&*url).await.map_err(|error| {
|
||||
if let FetchError::Unknown(error) = &error {
|
||||
sentry::capture_error(&error);
|
||||
}
|
||||
|
||||
SnapshotCreationError::FetchFailed(error)
|
||||
})?;
|
||||
|
||||
let xls_data = downloader
|
||||
.fetch(false)
|
||||
.await
|
||||
.map_err(|error| {
|
||||
if let FetchError::Unknown(error) = &error {
|
||||
sentry::capture_error(&error);
|
||||
}
|
||||
|
||||
SnapshotCreationError::DownloadFailed(error)
|
||||
})?
|
||||
.data
|
||||
.unwrap();
|
||||
|
||||
let parse_result = parse_xls(&xls_data).map_err(|error| {
|
||||
sentry::capture_error(&error);
|
||||
|
||||
SnapshotCreationError::InvalidSchedule(error)
|
||||
})?;
|
||||
|
||||
Ok(ScheduleSnapshot {
|
||||
fetched_at: head_result.requested_at,
|
||||
updated_at: head_result.uploaded_at,
|
||||
url,
|
||||
data: parse_result,
|
||||
})
|
||||
}
|
||||
|
||||
/// Queries the Yandex Cloud Function (FaaS) to obtain a URL for the schedule file.
|
||||
///
|
||||
/// This sends a POST request to the specified Yandex Cloud Function endpoint,
|
||||
/// using the provided API key for authentication. The returned URI is combined
|
||||
/// with the "https://politehnikum-eng.ru" base domain to form the complete URL.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `api_key` - Authentication token for Yandex Cloud API
|
||||
/// * `func_id` - ID of the target Yandex Cloud Function to invoke
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// Result containing:
|
||||
/// - `Ok(String)` - Complete URL constructed from the Function's response
|
||||
/// - `Err(QueryUrlError)` - If the request or response processing fails
|
||||
async fn query_url(api_key: &str, func_id: &str) -> Result<String, QueryUrlError> {
|
||||
let client = reqwest::Client::new();
|
||||
|
||||
let uri = client
|
||||
.post(format!(
|
||||
"https://functions.yandexcloud.net/{}?integration=raw",
|
||||
func_id
|
||||
))
|
||||
.header("Authorization", format!("Api-Key {}", api_key))
|
||||
.send()
|
||||
.await
|
||||
.map_err(|error| QueryUrlError::RequestFailed(error))?
|
||||
.text()
|
||||
.await
|
||||
.map_err(|error| QueryUrlError::RequestFailed(error))?;
|
||||
|
||||
Ok(format!("https://politehnikum-eng.ru{}", uri.trim()))
|
||||
}
|
||||
|
||||
/// Initializes the schedule by fetching the URL from the environment or Yandex Cloud Function (FaaS)
|
||||
/// and creating a [`ScheduleSnapshot`] with the downloaded data.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `downloader`: Mutable reference to an `XLSDownloader` implementation used to fetch and parse the schedule
|
||||
/// * `app_env`: Reference to the application environment containing either a predefined URL or Yandex Cloud credentials
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// Returns `Ok(())` if the snapshot was successfully initialized, or an `Error` if:
|
||||
/// - URL query to Yandex Cloud failed ([`QueryUrlError`])
|
||||
/// - Schedule snapshot creation failed ([`SnapshotCreationError`])
|
||||
pub async fn new(update_source: UpdateSource) -> Result<(Self, ScheduleSnapshot), Error> {
|
||||
let mut this = Updater {
|
||||
downloader: XlsDownloader::new(),
|
||||
update_source,
|
||||
};
|
||||
|
||||
if let UpdateSource::Prepared(snapshot) = &this.update_source {
|
||||
let snapshot = snapshot.clone();
|
||||
return Ok((this, snapshot));
|
||||
}
|
||||
|
||||
let url = match &this.update_source {
|
||||
UpdateSource::Url(url) => {
|
||||
log::info!("The default link {} will be used", url);
|
||||
url.clone()
|
||||
}
|
||||
UpdateSource::GrabFromSite {
|
||||
yandex_api_key,
|
||||
yandex_func_id,
|
||||
} => {
|
||||
log::info!("Obtaining a link using FaaS...");
|
||||
Self::query_url(yandex_api_key, yandex_func_id)
|
||||
.await
|
||||
.map_err(|error| Error::QueryUrlFailed(error))?
|
||||
}
|
||||
_ => unreachable!(),
|
||||
};
|
||||
|
||||
log::info!("For the initial setup, a link {} will be used", url);
|
||||
|
||||
let snapshot = Self::new_snapshot(&mut this.downloader, url)
|
||||
.await
|
||||
.map_err(|error| Error::SnapshotCreationFailed(error))?;
|
||||
|
||||
log::info!("Schedule snapshot successfully created!");
|
||||
|
||||
Ok((this, snapshot))
|
||||
}
|
||||
|
||||
/// Updates the schedule snapshot by querying the latest URL from FaaS and checking for changes.
|
||||
/// If the URL hasn't changed, only updates the [`fetched_at`] timestamp. If changed, downloads
|
||||
/// and parses the new schedule data.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `downloader`: XLS file downloader used to fetch and parse the schedule data
|
||||
/// * `app_env`: Application environment containing Yandex Cloud configuration and auto-update settings
|
||||
///
|
||||
/// returns: `Result<(), Error>` - Returns error if URL query fails or schedule parsing encounters issues
|
||||
///
|
||||
/// # Safety
|
||||
///
|
||||
/// Use `unsafe` to access the initialized snapshot, guaranteed valid by prior `init()` call
|
||||
pub async fn update(
|
||||
&mut self,
|
||||
current_snapshot: &ScheduleSnapshot,
|
||||
) -> Result<ScheduleSnapshot, Error> {
|
||||
if let UpdateSource::Prepared(snapshot) = &self.update_source {
|
||||
let mut snapshot = snapshot.clone();
|
||||
snapshot.update();
|
||||
return Ok(snapshot);
|
||||
}
|
||||
|
||||
let url = match &self.update_source {
|
||||
UpdateSource::Url(url) => url.clone(),
|
||||
UpdateSource::GrabFromSite {
|
||||
yandex_api_key,
|
||||
yandex_func_id,
|
||||
} => Self::query_url(yandex_api_key.as_str(), yandex_func_id.as_str())
|
||||
.await
|
||||
.map_err(|error| Error::QueryUrlFailed(error))?,
|
||||
_ => unreachable!(),
|
||||
};
|
||||
|
||||
let snapshot = match Self::new_snapshot(&mut self.downloader, url).await {
|
||||
Ok(snapshot) => snapshot,
|
||||
Err(SnapshotCreationError::SameUrl) => {
|
||||
let mut clone = current_snapshot.clone();
|
||||
clone.update();
|
||||
|
||||
clone
|
||||
}
|
||||
Err(error) => return Err(Error::SnapshotCreationFailed(error)),
|
||||
};
|
||||
|
||||
Ok(snapshot)
|
||||
}
|
||||
}
|
||||
237
providers/provider-engels-polytechnic/src/xls_downloader.rs
Normal file
237
providers/provider-engels-polytechnic/src/xls_downloader.rs
Normal file
@@ -0,0 +1,237 @@
|
||||
use chrono::{DateTime, Utc};
|
||||
use derive_more::{Display, Error};
|
||||
use std::mem::discriminant;
|
||||
use std::sync::Arc;
|
||||
use utoipa::ToSchema;
|
||||
|
||||
/// XLS data retrieval errors.
|
||||
#[derive(Clone, Debug, ToSchema, Display, Error)]
|
||||
pub enum FetchError {
|
||||
/// File url is not set.
|
||||
#[display("The link to the timetable was not provided earlier.")]
|
||||
NoUrlProvided,
|
||||
|
||||
/// Unknown error.
|
||||
#[display("An unknown error occurred while downloading the file.")]
|
||||
#[schema(value_type = String)]
|
||||
Unknown(Arc<reqwest::Error>),
|
||||
|
||||
/// Server returned a status code different from 200.
|
||||
#[display("Server returned a status code {status_code}.")]
|
||||
BadStatusCode { status_code: u16 },
|
||||
|
||||
/// The url leads to a file of a different type.
|
||||
#[display("The link leads to a file of type '{content_type}'.")]
|
||||
BadContentType { content_type: String },
|
||||
|
||||
/// Server doesn't return expected headers.
|
||||
#[display("Server doesn't return expected header(s) '{expected_header}'.")]
|
||||
BadHeaders { expected_header: String },
|
||||
}
|
||||
|
||||
impl FetchError {
|
||||
pub fn unknown(error: Arc<reqwest::Error>) -> Self {
|
||||
Self::Unknown(error)
|
||||
}
|
||||
|
||||
pub fn bad_status_code(status_code: u16) -> Self {
|
||||
Self::BadStatusCode { status_code }
|
||||
}
|
||||
|
||||
pub fn bad_content_type(content_type: &str) -> Self {
|
||||
Self::BadContentType {
|
||||
content_type: content_type.to_string(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn bad_headers(expected_header: &str) -> Self {
|
||||
Self::BadHeaders {
|
||||
expected_header: expected_header.to_string(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq for FetchError {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
discriminant(self) == discriminant(other)
|
||||
}
|
||||
}
|
||||
|
||||
/// Result of XLS data retrieval.
|
||||
#[derive(Debug, PartialEq)]
|
||||
pub struct FetchOk {
|
||||
/// File upload date.
|
||||
pub uploaded_at: DateTime<Utc>,
|
||||
|
||||
/// Date data received.
|
||||
pub requested_at: DateTime<Utc>,
|
||||
|
||||
/// File data.
|
||||
pub data: Option<Vec<u8>>,
|
||||
}
|
||||
|
||||
impl FetchOk {
|
||||
/// Result without file content.
|
||||
pub fn head(uploaded_at: DateTime<Utc>) -> Self {
|
||||
FetchOk {
|
||||
uploaded_at,
|
||||
requested_at: Utc::now(),
|
||||
data: None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Full result.
|
||||
pub fn get(uploaded_at: DateTime<Utc>, data: Vec<u8>) -> Self {
|
||||
FetchOk {
|
||||
uploaded_at,
|
||||
requested_at: Utc::now(),
|
||||
data: Some(data),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub type FetchResult = Result<FetchOk, FetchError>;
|
||||
|
||||
pub struct XlsDownloader {
|
||||
pub url: Option<String>,
|
||||
}
|
||||
|
||||
impl XlsDownloader {
|
||||
pub fn new() -> Self {
|
||||
XlsDownloader { url: None }
|
||||
}
|
||||
|
||||
async fn fetch_specified(url: &str, head: bool) -> FetchResult {
|
||||
let client = reqwest::Client::new();
|
||||
|
||||
let response = if head {
|
||||
client.head(url)
|
||||
} else {
|
||||
client.get(url)
|
||||
}
|
||||
.header("User-Agent", ua_generator::ua::spoof_chrome_ua())
|
||||
.send()
|
||||
.await
|
||||
.map_err(|e| FetchError::unknown(Arc::new(e)))?;
|
||||
|
||||
if response.status().as_u16() != 200 {
|
||||
return Err(FetchError::bad_status_code(response.status().as_u16()));
|
||||
}
|
||||
|
||||
let headers = response.headers();
|
||||
|
||||
let content_type = headers
|
||||
.get("Content-Type")
|
||||
.ok_or(FetchError::bad_headers("Content-Type"))?;
|
||||
|
||||
if !headers.contains_key("etag") {
|
||||
return Err(FetchError::bad_headers("etag"));
|
||||
}
|
||||
|
||||
let last_modified = headers
|
||||
.get("last-modified")
|
||||
.ok_or(FetchError::bad_headers("last-modified"))?;
|
||||
|
||||
if content_type != "application/vnd.ms-excel" {
|
||||
return Err(FetchError::bad_content_type(content_type.to_str().unwrap()));
|
||||
}
|
||||
|
||||
let last_modified = DateTime::parse_from_rfc2822(&last_modified.to_str().unwrap())
|
||||
.unwrap()
|
||||
.with_timezone(&Utc);
|
||||
|
||||
Ok(if head {
|
||||
FetchOk::head(last_modified)
|
||||
} else {
|
||||
FetchOk::get(last_modified, response.bytes().await.unwrap().to_vec())
|
||||
})
|
||||
}
|
||||
|
||||
pub async fn fetch(&self, head: bool) -> FetchResult {
|
||||
if self.url.is_none() {
|
||||
Err(FetchError::NoUrlProvided)
|
||||
} else {
|
||||
Self::fetch_specified(&*self.url.as_ref().unwrap(), head).await
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn set_url(&mut self, url: &str) -> FetchResult {
|
||||
let result = Self::fetch_specified(url, true).await;
|
||||
|
||||
if let Ok(_) = result {
|
||||
self.url = Some(url.to_string());
|
||||
}
|
||||
|
||||
result
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::xls_downloader::{FetchError, XlsDownloader};
|
||||
|
||||
#[tokio::test]
|
||||
async fn bad_url() {
|
||||
let url = "bad_url";
|
||||
|
||||
let mut downloader = XlsDownloader::new();
|
||||
assert!(downloader.set_url(url).await.is_err());
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn bad_status_code() {
|
||||
let url = "https://www.google.com/not-found";
|
||||
|
||||
let mut downloader = XlsDownloader::new();
|
||||
assert_eq!(
|
||||
downloader.set_url(url).await,
|
||||
Err(FetchError::bad_status_code(404))
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn bad_headers() {
|
||||
let url = "https://www.google.com/favicon.ico";
|
||||
|
||||
let mut downloader = XlsDownloader::new();
|
||||
assert_eq!(
|
||||
downloader.set_url(url).await,
|
||||
Err(FetchError::BadHeaders {
|
||||
expected_header: "ETag".to_string(),
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn bad_content_type() {
|
||||
let url = "https://s3.aero-storage.ldragol.ru/679e5d1145a6ad00843ad3f1/67ddb59fd46303008396ac96%2Fexample.txt";
|
||||
|
||||
let mut downloader = XlsDownloader::new();
|
||||
assert!(downloader.set_url(url).await.is_err());
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn ok() {
|
||||
let url = "https://s3.aero-storage.ldragol.ru/679e5d1145a6ad00843ad3f1/67ddb5fad46303008396ac97%2Fschedule.xls";
|
||||
|
||||
let mut downloader = XlsDownloader::new();
|
||||
assert!(downloader.set_url(url).await.is_ok());
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn downloader_ok() {
|
||||
let url = "https://s3.aero-storage.ldragol.ru/679e5d1145a6ad00843ad3f1/67ddb5fad46303008396ac97%2Fschedule.xls";
|
||||
|
||||
let mut downloader = XlsDownloader::new();
|
||||
assert!(downloader.set_url(url).await.is_ok());
|
||||
assert!(downloader.fetch(false).await.is_ok());
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn downloader_no_url_provided() {
|
||||
let downloader = XlsDownloader::new();
|
||||
|
||||
let result = downloader.fetch(false).await;
|
||||
assert_eq!(result, Err(FetchError::NoUrlProvided));
|
||||
}
|
||||
}
|
||||
9
providers/src/lib.rs
Normal file
9
providers/src/lib.rs
Normal file
@@ -0,0 +1,9 @@
|
||||
pub use base;
|
||||
|
||||
pub use provider_engels_polytechnic::EngelsPolytechnicProvider;
|
||||
pub use provider_engels_polytechnic::UpdateSource as EngelsPolytechnicUpdateSource;
|
||||
|
||||
#[cfg(feature = "test")]
|
||||
pub mod test_utils {
|
||||
pub use provider_engels_polytechnic::test_utils as engels_polytechnic;
|
||||
}
|
||||
@@ -1,26 +0,0 @@
|
||||
[package]
|
||||
name = "schedule-parser"
|
||||
version = "0.1.0"
|
||||
edition = "2024"
|
||||
|
||||
[features]
|
||||
test-utils = []
|
||||
|
||||
[dependencies]
|
||||
calamine = "0.26"
|
||||
chrono = { version = "0.4", features = ["serde"] }
|
||||
derive_more = { version = "2", features = ["full"] }
|
||||
sentry = "0.38"
|
||||
serde = { version = "1.0.219", features = ["derive"] }
|
||||
serde_repr = "0.1.20"
|
||||
regex = "1.11.1"
|
||||
utoipa = { version = "5", features = ["chrono"] }
|
||||
strsim = "0.11.1"
|
||||
log = "0.4.26"
|
||||
|
||||
[dev-dependencies]
|
||||
criterion = "0.6"
|
||||
|
||||
[[bench]]
|
||||
name = "parse"
|
||||
harness = false
|
||||
@@ -1,12 +0,0 @@
|
||||
use criterion::{Criterion, criterion_group, criterion_main};
|
||||
|
||||
use schedule_parser::parse_xls;
|
||||
|
||||
pub fn bench_parse_xls(c: &mut Criterion) {
|
||||
let buffer: Vec<u8> = include_bytes!("../../schedule.xls").to_vec();
|
||||
|
||||
c.bench_function("parse_xls", |b| b.iter(|| parse_xls(&buffer).unwrap()));
|
||||
}
|
||||
|
||||
criterion_group!(benches, bench_parse_xls);
|
||||
criterion_main!(benches);
|
||||
@@ -1,227 +0,0 @@
|
||||
use chrono::{DateTime, Utc};
|
||||
use derive_more::{Display, Error};
|
||||
use serde::{Deserialize, Serialize, Serializer};
|
||||
use serde_repr::{Deserialize_repr, Serialize_repr};
|
||||
use std::collections::HashMap;
|
||||
use std::sync::Arc;
|
||||
use utoipa::ToSchema;
|
||||
|
||||
pub(crate) mod internal {
|
||||
use crate::schema::{LessonBoundaries, LessonType};
|
||||
use chrono::{DateTime, Utc};
|
||||
|
||||
/// Data cell storing the group name.
|
||||
pub struct GroupCellInfo {
|
||||
/// Column index.
|
||||
pub column: u32,
|
||||
|
||||
/// Text in the cell.
|
||||
pub name: String,
|
||||
}
|
||||
|
||||
/// Data cell storing the line.
|
||||
pub struct DayCellInfo {
|
||||
/// Line index.
|
||||
pub row: u32,
|
||||
|
||||
/// Column index.
|
||||
pub column: u32,
|
||||
|
||||
/// Day name.
|
||||
pub name: String,
|
||||
|
||||
/// Date of the day.
|
||||
pub date: DateTime<Utc>,
|
||||
}
|
||||
|
||||
/// Data on the time of lessons from the second column of the schedule.
|
||||
pub struct BoundariesCellInfo {
|
||||
/// Temporary segment of the lesson.
|
||||
pub time_range: LessonBoundaries,
|
||||
|
||||
/// Type of lesson.
|
||||
pub lesson_type: LessonType,
|
||||
|
||||
/// The lesson index.
|
||||
pub default_index: Option<u32>,
|
||||
|
||||
/// The frame of the cell.
|
||||
pub xls_range: ((u32, u32), (u32, u32)),
|
||||
}
|
||||
}
|
||||
|
||||
/// The beginning and end of the lesson.
|
||||
#[derive(Clone, Hash, Debug, Serialize, Deserialize, ToSchema)]
|
||||
pub struct LessonBoundaries {
|
||||
/// The beginning of a lesson.
|
||||
pub start: DateTime<Utc>,
|
||||
|
||||
/// The end of the lesson.
|
||||
pub end: DateTime<Utc>,
|
||||
}
|
||||
|
||||
/// Type of lesson.
|
||||
#[derive(Clone, Hash, PartialEq, Debug, Serialize_repr, Deserialize_repr, ToSchema)]
|
||||
#[serde(rename_all = "SCREAMING_SNAKE_CASE")]
|
||||
#[repr(u8)]
|
||||
pub enum LessonType {
|
||||
/// Обычная.
|
||||
Default = 0,
|
||||
|
||||
/// Допы.
|
||||
Additional,
|
||||
|
||||
/// Перемена.
|
||||
Break,
|
||||
|
||||
/// Консультация.
|
||||
Consultation,
|
||||
|
||||
/// Самостоятельная работа.
|
||||
IndependentWork,
|
||||
|
||||
/// Зачёт.
|
||||
Exam,
|
||||
|
||||
/// Зачёт с оценкой.
|
||||
ExamWithGrade,
|
||||
|
||||
/// Экзамен.
|
||||
ExamDefault,
|
||||
|
||||
/// Курсовой проект.
|
||||
CourseProject,
|
||||
|
||||
/// Защита курсового проекта.
|
||||
CourseProjectDefense,
|
||||
}
|
||||
|
||||
#[derive(Clone, Hash, Debug, Serialize, Deserialize, ToSchema)]
|
||||
pub struct LessonSubGroup {
|
||||
/// Cabinet, if present.
|
||||
pub cabinet: Option<String>,
|
||||
|
||||
/// Full name of the teacher.
|
||||
pub teacher: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Hash, Debug, Serialize, Deserialize, ToSchema)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct Lesson {
|
||||
/// Type.
|
||||
#[serde(rename = "type")]
|
||||
pub lesson_type: LessonType,
|
||||
|
||||
/// Lesson indexes, if present.
|
||||
pub range: Option<[u8; 2]>,
|
||||
|
||||
/// Name.
|
||||
pub name: Option<String>,
|
||||
|
||||
/// The beginning and end.
|
||||
pub time: LessonBoundaries,
|
||||
|
||||
/// List of subgroups.
|
||||
#[serde(rename = "subgroups")]
|
||||
pub subgroups: Option<Vec<Option<LessonSubGroup>>>,
|
||||
|
||||
/// Group name, if this is a schedule for teachers.
|
||||
pub group: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Hash, Debug, Serialize, Deserialize, ToSchema)]
|
||||
pub struct Day {
|
||||
/// Day of the week.
|
||||
pub name: String,
|
||||
|
||||
/// Address of another corps.
|
||||
pub street: Option<String>,
|
||||
|
||||
/// Date.
|
||||
pub date: DateTime<Utc>,
|
||||
|
||||
/// List of lessons on this day.
|
||||
pub lessons: Vec<Lesson>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Hash, Debug, Serialize, Deserialize, ToSchema)]
|
||||
pub struct ScheduleEntry {
|
||||
/// The name of the group or name of the teacher.
|
||||
pub name: String,
|
||||
|
||||
/// List of six days.
|
||||
pub days: Vec<Day>,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct ParseResult {
|
||||
/// List of groups.
|
||||
pub groups: HashMap<String, ScheduleEntry>,
|
||||
|
||||
/// List of teachers.
|
||||
pub teachers: HashMap<String, ScheduleEntry>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Display, Error, ToSchema)]
|
||||
#[display("row {row}, column {column}")]
|
||||
pub struct ErrorCellPos {
|
||||
pub row: u32,
|
||||
pub column: u32,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Display, Error, ToSchema)]
|
||||
#[display("'{data}' at {pos}")]
|
||||
pub struct ErrorCell {
|
||||
pub pos: ErrorCellPos,
|
||||
pub data: String,
|
||||
}
|
||||
|
||||
impl ErrorCell {
|
||||
pub fn new(row: u32, column: u32, data: String) -> Self {
|
||||
Self {
|
||||
pos: ErrorCellPos { row, column },
|
||||
data,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Display, Error, ToSchema)]
|
||||
pub enum ParseError {
|
||||
/// Errors related to reading XLS file.
|
||||
#[display("{_0:?}: Failed to read XLS file.")]
|
||||
#[schema(value_type = String)]
|
||||
BadXLS(Arc<calamine::XlsError>),
|
||||
|
||||
/// Not a single sheet was found.
|
||||
#[display("No work sheets found.")]
|
||||
NoWorkSheets,
|
||||
|
||||
/// There are no data on the boundaries of the sheet.
|
||||
#[display("There is no data on work sheet boundaries.")]
|
||||
UnknownWorkSheetRange,
|
||||
|
||||
/// Failed to read the beginning and end of the lesson from the cell
|
||||
#[display("Failed to read lesson start and end from {_0}.")]
|
||||
LessonBoundaries(ErrorCell),
|
||||
|
||||
/// Not found the beginning and the end corresponding to the lesson.
|
||||
#[display("No start and end times matching the lesson (at {_0}) was found.")]
|
||||
LessonTimeNotFound(ErrorCellPos),
|
||||
}
|
||||
|
||||
impl Serialize for ParseError {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
match self {
|
||||
ParseError::BadXLS(_) => serializer.serialize_str("BAD_XLS"),
|
||||
ParseError::NoWorkSheets => serializer.serialize_str("NO_WORK_SHEETS"),
|
||||
ParseError::UnknownWorkSheetRange => {
|
||||
serializer.serialize_str("UNKNOWN_WORK_SHEET_RANGE")
|
||||
}
|
||||
ParseError::LessonBoundaries(_) => serializer.serialize_str("GLOBAL_TIME"),
|
||||
ParseError::LessonTimeNotFound(_) => serializer.serialize_str("LESSON_TIME_NOT_FOUND"),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -4,7 +4,7 @@ pub mod users {
|
||||
use crate::database::schema::users::dsl::*;
|
||||
use crate::state::AppState;
|
||||
use actix_web::web;
|
||||
use diesel::{ExpressionMethods, QueryResult, insert_into};
|
||||
use diesel::{insert_into, ExpressionMethods, QueryResult};
|
||||
use diesel::{QueryDsl, RunQueryDsl};
|
||||
use diesel::{SaveChangesDsl, SelectableHelper};
|
||||
use std::ops::DerefMut;
|
||||
@@ -146,19 +146,3 @@ pub mod users {
|
||||
.execute(state.get_database().await.deref_mut())
|
||||
}
|
||||
}
|
||||
|
||||
pub mod fcm {
|
||||
use crate::database::models::{FCM, User};
|
||||
use crate::state::AppState;
|
||||
use actix_web::web;
|
||||
use diesel::QueryDsl;
|
||||
use diesel::RunQueryDsl;
|
||||
use diesel::{BelongingToDsl, QueryResult, SelectableHelper};
|
||||
use std::ops::DerefMut;
|
||||
|
||||
pub async fn from_user(state: &web::Data<AppState>, user: &User) -> QueryResult<FCM> {
|
||||
FCM::belonging_to(&user)
|
||||
.select(FCM::as_select())
|
||||
.get_result(state.get_database().await.deref_mut())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
use crate::database::driver;
|
||||
use crate::database::models::{FCM, User};
|
||||
use crate::extractors::base::{AsyncExtractor, FromRequestAsync};
|
||||
use crate::database::models::User;
|
||||
use crate::extractors::base::FromRequestAsync;
|
||||
use crate::state::AppState;
|
||||
use crate::utility::jwt;
|
||||
use actix_macros::MiddlewareError;
|
||||
use actix_web::body::BoxBody;
|
||||
use actix_web::dev::Payload;
|
||||
use actix_web::http::header;
|
||||
use actix_web::{FromRequest, HttpRequest, web};
|
||||
use actix_web::{web, HttpRequest};
|
||||
use derive_more::Display;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::fmt::Debug;
|
||||
@@ -95,48 +95,3 @@ impl FromRequestAsync for User {
|
||||
.map_err(|_| Error::NoUser.into())
|
||||
}
|
||||
}
|
||||
|
||||
pub struct UserExtractor<const FCM: bool> {
|
||||
user: User,
|
||||
|
||||
fcm: Option<FCM>,
|
||||
}
|
||||
|
||||
impl<const FCM: bool> UserExtractor<{ FCM }> {
|
||||
pub fn user(&self) -> &User {
|
||||
&self.user
|
||||
}
|
||||
|
||||
pub fn fcm(&self) -> &Option<FCM> {
|
||||
if !FCM {
|
||||
panic!("FCM marked as not required, but it has been requested")
|
||||
}
|
||||
|
||||
&self.fcm
|
||||
}
|
||||
}
|
||||
|
||||
/// Extractor of user and additional parameters from request with Bearer token.
|
||||
impl<const FCM: bool> FromRequestAsync for UserExtractor<{ FCM }> {
|
||||
type Error = actix_web::Error;
|
||||
|
||||
async fn from_request_async(
|
||||
req: &HttpRequest,
|
||||
payload: &mut Payload,
|
||||
) -> Result<Self, Self::Error> {
|
||||
let user = AsyncExtractor::<User>::from_request(req, payload)
|
||||
.await?
|
||||
.into_inner();
|
||||
|
||||
let app_state = req.app_data::<web::Data<AppState>>().unwrap();
|
||||
|
||||
Ok(Self {
|
||||
fcm: if FCM {
|
||||
driver::fcm::from_user(&app_state, &user).await.ok()
|
||||
} else {
|
||||
None
|
||||
},
|
||||
user,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
12
src/main.rs
12
src/main.rs
@@ -1,21 +1,19 @@
|
||||
use crate::middlewares::authorization::JWTAuthorization;
|
||||
use crate::middlewares::content_type::ContentTypeBootstrap;
|
||||
use crate::state::{AppState, new_app_state};
|
||||
use crate::state::{new_app_state, AppState};
|
||||
use actix_web::dev::{ServiceFactory, ServiceRequest};
|
||||
use actix_web::{App, Error, HttpServer};
|
||||
use dotenvy::dotenv;
|
||||
use log::info;
|
||||
use std::io;
|
||||
use utoipa_actix_web::AppExt;
|
||||
use utoipa_actix_web::scope::Scope;
|
||||
use utoipa_actix_web::AppExt;
|
||||
use utoipa_rapidoc::RapiDoc;
|
||||
|
||||
mod state;
|
||||
|
||||
mod database;
|
||||
|
||||
mod xls_downloader;
|
||||
|
||||
mod extractors;
|
||||
mod middlewares;
|
||||
mod routes;
|
||||
@@ -53,11 +51,6 @@ pub fn get_api_scope<
|
||||
.service(routes::schedule::teacher)
|
||||
.service(routes::schedule::teacher_names);
|
||||
|
||||
let fcm_scope = utoipa_actix_web::scope("/fcm")
|
||||
.wrap(JWTAuthorization::default())
|
||||
.service(routes::fcm::update_callback)
|
||||
.service(routes::fcm::set_token);
|
||||
|
||||
let flow_scope = utoipa_actix_web::scope("/flow")
|
||||
.wrap(JWTAuthorization {
|
||||
ignore: &["/telegram-auth"],
|
||||
@@ -72,7 +65,6 @@ pub fn get_api_scope<
|
||||
.service(auth_scope)
|
||||
.service(users_scope)
|
||||
.service(schedule_scope)
|
||||
.service(fcm_scope)
|
||||
.service(flow_scope)
|
||||
.service(vk_id_scope)
|
||||
}
|
||||
|
||||
@@ -1,2 +1,4 @@
|
||||
pub mod error;
|
||||
|
||||
pub mod authorization;
|
||||
pub mod content_type;
|
||||
|
||||
@@ -18,8 +18,9 @@ async fn sign_up_combined(
|
||||
}
|
||||
|
||||
if !app_state
|
||||
.get_schedule_snapshot()
|
||||
.get_schedule_snapshot("eng_polytechnic")
|
||||
.await
|
||||
.unwrap()
|
||||
.data
|
||||
.groups
|
||||
.contains_key(&data.group)
|
||||
|
||||
@@ -1,5 +0,0 @@
|
||||
mod set_token;
|
||||
mod update_callback;
|
||||
|
||||
pub use set_token::*;
|
||||
pub use update_callback::*;
|
||||
@@ -1,94 +0,0 @@
|
||||
use crate::database;
|
||||
use crate::database::models::FCM;
|
||||
use crate::extractors::authorized_user::UserExtractor;
|
||||
use crate::extractors::base::AsyncExtractor;
|
||||
use crate::state::AppState;
|
||||
use actix_web::{HttpResponse, Responder, patch, web};
|
||||
use diesel::{RunQueryDsl, SaveChangesDsl};
|
||||
use firebase_messaging_rs::topic::TopicManagementSupport;
|
||||
use serde::Deserialize;
|
||||
use std::ops::DerefMut;
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct Params {
|
||||
pub token: String,
|
||||
}
|
||||
|
||||
async fn get_fcm(
|
||||
app_state: &web::Data<AppState>,
|
||||
user_data: &UserExtractor<true>,
|
||||
token: String,
|
||||
) -> Result<FCM, diesel::result::Error> {
|
||||
match user_data.fcm() {
|
||||
Some(fcm) => {
|
||||
let mut fcm = fcm.clone();
|
||||
fcm.token = token;
|
||||
|
||||
Ok(fcm)
|
||||
}
|
||||
None => {
|
||||
let fcm = FCM {
|
||||
user_id: user_data.user().id.clone(),
|
||||
token,
|
||||
topics: vec![],
|
||||
};
|
||||
|
||||
match diesel::insert_into(database::schema::fcm::table)
|
||||
.values(&fcm)
|
||||
.execute(app_state.get_database().await.deref_mut())
|
||||
{
|
||||
Ok(_) => Ok(fcm),
|
||||
Err(e) => Err(e),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[utoipa::path(responses((status = OK)))]
|
||||
#[patch("/set-token")]
|
||||
pub async fn set_token(
|
||||
app_state: web::Data<AppState>,
|
||||
web::Query(params): web::Query<Params>,
|
||||
user_data: AsyncExtractor<UserExtractor<true>>,
|
||||
) -> impl Responder {
|
||||
let user_data = user_data.into_inner();
|
||||
|
||||
// If token not changes - exit.
|
||||
if let Some(fcm) = user_data.fcm() {
|
||||
if fcm.token == params.token {
|
||||
return HttpResponse::Ok();
|
||||
}
|
||||
}
|
||||
|
||||
let fcm = get_fcm(&app_state, &user_data, params.token.clone()).await;
|
||||
if let Err(e) = fcm {
|
||||
eprintln!("Failed to get FCM: {e}");
|
||||
return HttpResponse::Ok();
|
||||
}
|
||||
|
||||
let mut fcm = fcm.ok().unwrap();
|
||||
|
||||
// Add default topics.
|
||||
if !fcm.topics.contains(&Some("common".to_string())) {
|
||||
fcm.topics.push(Some("common".to_string()));
|
||||
}
|
||||
|
||||
fcm.save_changes::<FCM>(app_state.get_database().await.deref_mut())
|
||||
.unwrap();
|
||||
|
||||
let fcm_client = app_state.get_fcm_client().await.unwrap();
|
||||
|
||||
for topic in fcm.topics.clone() {
|
||||
if let Some(topic) = topic {
|
||||
if let Err(error) = fcm_client
|
||||
.register_token_to_topic(&*topic, &*fcm.token)
|
||||
.await
|
||||
{
|
||||
eprintln!("Failed to subscribe token to topic: {:?}", error);
|
||||
return HttpResponse::Ok();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
HttpResponse::Ok()
|
||||
}
|
||||
@@ -1,24 +0,0 @@
|
||||
use crate::database::driver::users::UserSave;
|
||||
use crate::database::models::User;
|
||||
use crate::extractors::base::AsyncExtractor;
|
||||
use crate::state::AppState;
|
||||
use actix_web::{HttpResponse, Responder, post, web};
|
||||
|
||||
#[utoipa::path(responses(
|
||||
(status = OK),
|
||||
(status = INTERNAL_SERVER_ERROR)
|
||||
))]
|
||||
#[post("/update-callback/{version}")]
|
||||
async fn update_callback(
|
||||
app_state: web::Data<AppState>,
|
||||
version: web::Path<String>,
|
||||
user: AsyncExtractor<User>,
|
||||
) -> impl Responder {
|
||||
let mut user = user.into_inner();
|
||||
|
||||
user.android_version = Some(version.into_inner());
|
||||
|
||||
user.save(&app_state).await.unwrap();
|
||||
|
||||
HttpResponse::Ok()
|
||||
}
|
||||
@@ -40,8 +40,9 @@ pub async fn telegram_complete(
|
||||
|
||||
// проверка на существование группы
|
||||
if !app_state
|
||||
.get_schedule_snapshot()
|
||||
.get_schedule_snapshot("eng_polytechnic")
|
||||
.await
|
||||
.unwrap()
|
||||
.data
|
||||
.groups
|
||||
.contains_key(&data.group)
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
pub mod auth;
|
||||
pub mod fcm;
|
||||
pub mod flow;
|
||||
pub mod schedule;
|
||||
mod schema;
|
||||
|
||||
@@ -31,8 +31,9 @@ pub async fn group(user: AsyncExtractor<User>, app_state: web::Data<AppState>) -
|
||||
None => Err(ErrorCode::SignUpNotCompleted),
|
||||
|
||||
Some(group) => match app_state
|
||||
.get_schedule_snapshot()
|
||||
.get_schedule_snapshot("eng_polytechnic")
|
||||
.await
|
||||
.unwrap()
|
||||
.data
|
||||
.groups
|
||||
.get(group)
|
||||
|
||||
@@ -6,8 +6,9 @@ use actix_web::{get, web};
|
||||
#[get("/group-names")]
|
||||
pub async fn group_names(app_state: web::Data<AppState>) -> Response {
|
||||
let mut names: Vec<String> = app_state
|
||||
.get_schedule_snapshot()
|
||||
.get_schedule_snapshot("eng_polytechnic")
|
||||
.await
|
||||
.unwrap()
|
||||
.data
|
||||
.groups
|
||||
.keys()
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
use crate::state::{AppState, ScheduleSnapshot};
|
||||
use crate::state::AppState;
|
||||
use actix_macros::{OkResponse, ResponderJson};
|
||||
use actix_web::web;
|
||||
use schedule_parser::schema::ScheduleEntry;
|
||||
use providers::base::{ScheduleEntry, ScheduleSnapshot};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::collections::HashMap;
|
||||
use std::ops::Deref;
|
||||
@@ -32,7 +32,12 @@ impl From<ScheduleEntry> for ScheduleEntryResponse {
|
||||
|
||||
impl ScheduleView {
|
||||
pub async fn from(app_state: &web::Data<AppState>) -> Self {
|
||||
let schedule = app_state.get_schedule_snapshot().await.clone();
|
||||
let schedule = app_state
|
||||
.get_schedule_snapshot("eng_polytechnic")
|
||||
.await
|
||||
.unwrap()
|
||||
.deref()
|
||||
.clone();
|
||||
|
||||
Self {
|
||||
url: schedule.url,
|
||||
@@ -60,7 +65,13 @@ pub struct CacheStatus {
|
||||
|
||||
impl CacheStatus {
|
||||
pub async fn from(value: &web::Data<AppState>) -> Self {
|
||||
From::<&ScheduleSnapshot>::from(value.get_schedule_snapshot().await.deref())
|
||||
From::<&ScheduleSnapshot>::from(
|
||||
value
|
||||
.get_schedule_snapshot("eng_polytechnic")
|
||||
.await
|
||||
.unwrap()
|
||||
.deref(),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -2,7 +2,7 @@ use self::schema::*;
|
||||
use crate::AppState;
|
||||
use crate::routes::schema::ResponseError;
|
||||
use actix_web::{get, web};
|
||||
use schedule_parser::schema::ScheduleEntry;
|
||||
use providers::base::ScheduleEntry;
|
||||
|
||||
#[utoipa::path(responses(
|
||||
(status = OK, body = ScheduleEntry),
|
||||
@@ -18,8 +18,9 @@ use schedule_parser::schema::ScheduleEntry;
|
||||
#[get("/teacher/{name}")]
|
||||
pub async fn teacher(name: web::Path<String>, app_state: web::Data<AppState>) -> ServiceResponse {
|
||||
match app_state
|
||||
.get_schedule_snapshot()
|
||||
.get_schedule_snapshot("eng_polytechnic")
|
||||
.await
|
||||
.unwrap()
|
||||
.data
|
||||
.teachers
|
||||
.get(&name.into_inner())
|
||||
|
||||
@@ -6,8 +6,9 @@ use actix_web::{get, web};
|
||||
#[get("/teacher-names")]
|
||||
pub async fn teacher_names(app_state: web::Data<AppState>) -> Response {
|
||||
let mut names: Vec<String> = app_state
|
||||
.get_schedule_snapshot()
|
||||
.get_schedule_snapshot("eng_polytechnic")
|
||||
.await
|
||||
.unwrap()
|
||||
.data
|
||||
.teachers
|
||||
.keys()
|
||||
|
||||
@@ -131,7 +131,7 @@ pub mod user {
|
||||
use serde::Serialize;
|
||||
|
||||
//noinspection SpellCheckingInspection
|
||||
/// Используется для скрытия чувствительных полей, таких как хеш пароля или FCM
|
||||
/// Используется для скрытия чувствительных полей, таких как хеш пароля
|
||||
#[derive(Serialize, utoipa::ToSchema, ResponderJson, OkResponse)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct UserResponse {
|
||||
|
||||
@@ -19,8 +19,9 @@ pub async fn change_group(
|
||||
}
|
||||
|
||||
if !app_state
|
||||
.get_schedule_snapshot()
|
||||
.get_schedule_snapshot("eng_polytechnic")
|
||||
.await
|
||||
.unwrap()
|
||||
.data
|
||||
.groups
|
||||
.contains_key(&data.group)
|
||||
|
||||
6
src/state/env/mod.rs
vendored
6
src/state/env/mod.rs
vendored
@@ -1,11 +1,15 @@
|
||||
pub mod schedule;
|
||||
pub mod telegram;
|
||||
pub mod vk_id;
|
||||
|
||||
#[cfg(not(test))]
|
||||
pub mod yandex_cloud;
|
||||
|
||||
pub use self::schedule::ScheduleEnvData;
|
||||
pub use self::telegram::TelegramEnvData;
|
||||
pub use self::vk_id::VkIdEnvData;
|
||||
|
||||
#[cfg(not(test))]
|
||||
pub use self::yandex_cloud::YandexCloudEnvData;
|
||||
|
||||
#[derive(Default)]
|
||||
@@ -13,5 +17,7 @@ pub struct AppEnv {
|
||||
pub schedule: ScheduleEnvData,
|
||||
pub telegram: TelegramEnvData,
|
||||
pub vk_id: VkIdEnvData,
|
||||
|
||||
#[cfg(not(test))]
|
||||
pub yandex_cloud: YandexCloudEnvData,
|
||||
}
|
||||
|
||||
2
src/state/env/schedule.rs
vendored
2
src/state/env/schedule.rs
vendored
@@ -2,6 +2,7 @@ use std::env;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct ScheduleEnvData {
|
||||
#[cfg(not(test))]
|
||||
pub url: Option<String>,
|
||||
pub auto_update: bool,
|
||||
}
|
||||
@@ -9,6 +10,7 @@ pub struct ScheduleEnvData {
|
||||
impl Default for ScheduleEnvData {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
#[cfg(not(test))]
|
||||
url: env::var("SCHEDULE_INIT_URL").ok(),
|
||||
auto_update: !env::var("SCHEDULE_DISABLE_AUTO_UPDATE")
|
||||
.is_ok_and(|v| v.eq("1") || v.eq("true")),
|
||||
|
||||
@@ -1,15 +0,0 @@
|
||||
use firebase_messaging_rs::FCMClient;
|
||||
use std::env;
|
||||
use tokio::sync::Mutex;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct FCMClientData;
|
||||
|
||||
impl FCMClientData {
|
||||
pub async fn new() -> Option<Mutex<FCMClient>> {
|
||||
match env::var("GOOGLE_APPLICATION_CREDENTIALS") {
|
||||
Ok(_) => Some(Mutex::new(FCMClient::new().await.unwrap())),
|
||||
Err(_) => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
103
src/state/mod.rs
103
src/state/mod.rs
@@ -1,69 +1,83 @@
|
||||
mod env;
|
||||
mod fcm_client;
|
||||
mod schedule;
|
||||
|
||||
use crate::state::fcm_client::FCMClientData;
|
||||
use crate::xls_downloader::basic_impl::BasicXlsDownloader;
|
||||
pub use crate::state::env::AppEnv;
|
||||
use actix_web::web;
|
||||
use diesel::{Connection, PgConnection};
|
||||
use firebase_messaging_rs::FCMClient;
|
||||
use std::ops::DerefMut;
|
||||
use tokio::sync::{MappedMutexGuard, Mutex, MutexGuard};
|
||||
|
||||
pub use self::schedule::{Schedule, ScheduleSnapshot};
|
||||
pub use crate::state::env::AppEnv;
|
||||
use providers::base::{ScheduleProvider, ScheduleSnapshot};
|
||||
use std::collections::HashMap;
|
||||
use std::sync::Arc;
|
||||
use tokio::sync::{Mutex, MutexGuard};
|
||||
use tokio_util::sync::CancellationToken;
|
||||
|
||||
/// Common data provided to endpoints.
|
||||
pub struct AppState {
|
||||
cancel_token: CancellationToken,
|
||||
database: Mutex<PgConnection>,
|
||||
downloader: Mutex<BasicXlsDownloader>,
|
||||
schedule: Mutex<Schedule>,
|
||||
providers: HashMap<String, Arc<dyn ScheduleProvider>>,
|
||||
env: AppEnv,
|
||||
fcm_client: Option<Mutex<FCMClient>>,
|
||||
}
|
||||
|
||||
impl AppState {
|
||||
pub async fn new() -> Result<Self, self::schedule::Error> {
|
||||
pub async fn new() -> Result<Self, Box<dyn std::error::Error>> {
|
||||
let database_url = std::env::var("DATABASE_URL").expect("DATABASE_URL must be set");
|
||||
|
||||
let mut _self = Self {
|
||||
downloader: Mutex::new(BasicXlsDownloader::new()),
|
||||
let env = AppEnv::default();
|
||||
let providers: HashMap<String, Arc<dyn ScheduleProvider>> = HashMap::from([(
|
||||
"eng_polytechnic".to_string(),
|
||||
providers::EngelsPolytechnicProvider::new({
|
||||
#[cfg(test)]
|
||||
{
|
||||
providers::EngelsPolytechnicUpdateSource::Prepared(ScheduleSnapshot {
|
||||
url: "".to_string(),
|
||||
fetched_at: chrono::DateTime::default(),
|
||||
updated_at: chrono::DateTime::default(),
|
||||
data: providers::test_utils::engels_polytechnic::test_result().unwrap(),
|
||||
})
|
||||
}
|
||||
|
||||
schedule: Mutex::new(Schedule::default()),
|
||||
#[cfg(not(test))]
|
||||
{
|
||||
if let Some(url) = &env.schedule.url {
|
||||
providers::EngelsPolytechnicUpdateSource::Url(url.clone())
|
||||
} else {
|
||||
providers::EngelsPolytechnicUpdateSource::GrabFromSite {
|
||||
yandex_api_key: env.yandex_cloud.api_key.clone(),
|
||||
yandex_func_id: env.yandex_cloud.func_id.clone(),
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
.await?,
|
||||
)]);
|
||||
|
||||
let this = Self {
|
||||
cancel_token: CancellationToken::new(),
|
||||
database: Mutex::new(
|
||||
PgConnection::establish(&database_url)
|
||||
.unwrap_or_else(|_| panic!("Error connecting to {}", database_url)),
|
||||
),
|
||||
env: AppEnv::default(),
|
||||
fcm_client: FCMClientData::new().await,
|
||||
env,
|
||||
providers,
|
||||
};
|
||||
|
||||
if _self.env.schedule.auto_update {
|
||||
_self
|
||||
.get_schedule()
|
||||
.await
|
||||
.init(_self.get_downloader().await.deref_mut(), &_self.env)
|
||||
.await?;
|
||||
if this.env.schedule.auto_update {
|
||||
for (_, provider) in &this.providers {
|
||||
let provider = provider.clone();
|
||||
let cancel_token = this.cancel_token.clone();
|
||||
|
||||
tokio::spawn(async move { provider.start_auto_update_task(cancel_token).await });
|
||||
}
|
||||
}
|
||||
|
||||
Ok(_self)
|
||||
Ok(this)
|
||||
}
|
||||
|
||||
pub async fn get_downloader(&'_ self) -> MutexGuard<'_, BasicXlsDownloader> {
|
||||
self.downloader.lock().await
|
||||
}
|
||||
pub async fn get_schedule_snapshot(&'_ self, provider: &str) -> Option<Arc<ScheduleSnapshot>> {
|
||||
if let Some(provider) = self.providers.get(provider) {
|
||||
return Some(provider.get_schedule().await);
|
||||
}
|
||||
|
||||
pub async fn get_schedule(&'_ self) -> MutexGuard<'_, Schedule> {
|
||||
self.schedule.lock().await
|
||||
}
|
||||
|
||||
pub async fn get_schedule_snapshot(&'_ self) -> MappedMutexGuard<'_, ScheduleSnapshot> {
|
||||
let snapshot =
|
||||
MutexGuard::<'_, Schedule>::map(self.schedule.lock().await, |schedule| unsafe {
|
||||
schedule.snapshot.assume_init_mut()
|
||||
});
|
||||
|
||||
snapshot
|
||||
None
|
||||
}
|
||||
|
||||
pub async fn get_database(&'_ self) -> MutexGuard<'_, PgConnection> {
|
||||
@@ -73,16 +87,9 @@ impl AppState {
|
||||
pub fn get_env(&self) -> &AppEnv {
|
||||
&self.env
|
||||
}
|
||||
|
||||
pub async fn get_fcm_client(&'_ self) -> Option<MutexGuard<'_, FCMClient>> {
|
||||
match &self.fcm_client {
|
||||
Some(client) => Some(client.lock().await),
|
||||
None => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Create a new object web::Data<AppState>.
|
||||
pub async fn new_app_state() -> Result<web::Data<AppState>, self::schedule::Error> {
|
||||
pub async fn new_app_state() -> Result<web::Data<AppState>, Box<dyn std::error::Error>> {
|
||||
Ok(web::Data::new(AppState::new().await?))
|
||||
}
|
||||
|
||||
@@ -1,290 +0,0 @@
|
||||
use crate::state::env::AppEnv;
|
||||
use crate::utility::hasher::DigestHasher;
|
||||
use chrono::{DateTime, Utc};
|
||||
use derive_more::{Display, Error};
|
||||
use schedule_parser::parse_xls;
|
||||
use schedule_parser::schema::{ParseError, ParseResult};
|
||||
use sha1::{Digest, Sha1};
|
||||
use std::hash::Hash;
|
||||
use std::mem::MaybeUninit;
|
||||
|
||||
use crate::xls_downloader::basic_impl::BasicXlsDownloader;
|
||||
use crate::xls_downloader::interface::{FetchError, XLSDownloader};
|
||||
|
||||
/// Represents errors that can occur during schedule-related operations.
|
||||
#[derive(Debug, Display, Error)]
|
||||
pub enum Error {
|
||||
/// An error occurred while querying the Yandex Cloud API for a URL.
|
||||
///
|
||||
/// This may result from network failures, invalid API credentials, or issues with the Yandex Cloud Function invocation.
|
||||
/// See [`QueryUrlError`] for more details about specific causes.
|
||||
QueryUrlFailed(QueryUrlError),
|
||||
|
||||
/// The schedule snapshot creation process failed.
|
||||
///
|
||||
/// This can happen due to URL conflicts (same URL already in use), failed network requests,
|
||||
/// download errors, or invalid XLS file content. See [`SnapshotCreationError`] for details.
|
||||
SnapshotCreationFailed(SnapshotCreationError),
|
||||
}
|
||||
/// Errors that may occur when querying the Yandex Cloud API to retrieve a URL.
|
||||
#[derive(Debug, Display, Error)]
|
||||
pub enum QueryUrlError {
|
||||
/// Occurs when the request to the Yandex Cloud API fails.
|
||||
///
|
||||
/// This may be due to network issues, invalid API key, incorrect function ID, or other
|
||||
/// problems with the Yandex Cloud Function invocation.
|
||||
#[display("An error occurred during the request to the Yandex Cloud API: {_0}")]
|
||||
RequestFailed(reqwest::Error),
|
||||
}
|
||||
|
||||
/// Errors that may occur during the creation of a schedule snapshot.
|
||||
#[derive(Debug, Display, Error)]
|
||||
pub enum SnapshotCreationError {
|
||||
/// The URL is the same as the one already being used (no update needed).
|
||||
#[display("The URL is the same as the one already being used.")]
|
||||
SameUrl,
|
||||
|
||||
/// The URL query for the XLS file failed to execute, either due to network issues or invalid API parameters.
|
||||
#[display("Failed to fetch URL: {_0}")]
|
||||
FetchFailed(FetchError),
|
||||
|
||||
/// Downloading the XLS file content failed after successfully obtaining the URL.
|
||||
#[display("Download failed: {_0}")]
|
||||
DownloadFailed(FetchError),
|
||||
|
||||
/// The XLS file could not be parsed into a valid schedule format.
|
||||
#[display("Schedule data is invalid: {_0}")]
|
||||
InvalidSchedule(ParseError),
|
||||
}
|
||||
|
||||
/// Represents a snapshot of the schedule parsed from an XLS file.
|
||||
#[derive(Clone)]
|
||||
pub struct ScheduleSnapshot {
|
||||
/// Timestamp when the Polytechnic website was queried for the schedule.
|
||||
pub fetched_at: DateTime<Utc>,
|
||||
|
||||
/// Timestamp indicating when the schedule was last updated on the Polytechnic website.
|
||||
///
|
||||
/// <note>
|
||||
/// This value is determined by the website's content and does not depend on the application.
|
||||
/// </note>
|
||||
pub updated_at: DateTime<Utc>,
|
||||
|
||||
/// URL pointing to the XLS file containing the source schedule data.
|
||||
pub url: String,
|
||||
|
||||
/// Parsed schedule data in the application's internal representation.
|
||||
pub data: ParseResult,
|
||||
}
|
||||
|
||||
impl ScheduleSnapshot {
|
||||
/// Converting the schedule data into a hash.
|
||||
/// ### Important!
|
||||
/// The hash does not depend on the dates.
|
||||
/// If the application is restarted, but the file with source schedule will remain unchanged, then the hash will not change.
|
||||
pub fn hash(&self) -> String {
|
||||
let mut hasher = DigestHasher::from(Sha1::new());
|
||||
|
||||
self.data.teachers.iter().for_each(|e| e.hash(&mut hasher));
|
||||
self.data.groups.iter().for_each(|e| e.hash(&mut hasher));
|
||||
|
||||
hasher.finalize()
|
||||
}
|
||||
|
||||
/// Simply updates the value of [`ScheduleSnapshot::fetched_at`].
|
||||
/// Used for auto-updates.
|
||||
pub fn update(&mut self) {
|
||||
self.fetched_at = Utc::now();
|
||||
}
|
||||
|
||||
/// Constructs a new `ScheduleSnapshot` by downloading and parsing schedule data from the specified URL.
|
||||
///
|
||||
/// This method first checks if the provided URL is the same as the one already configured in the downloader.
|
||||
/// If different, it updates the downloader's URL, fetches the XLS content, parses it, and creates a snapshot.
|
||||
/// Errors are returned for URL conflicts, network issues, download failures, or invalid data.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `downloader`: A mutable reference to an `XLSDownloader` implementation used to fetch and parse the schedule data.
|
||||
/// * `url`: The source URL pointing to the XLS file containing schedule data.
|
||||
///
|
||||
/// returns: Result<ScheduleSnapshot, SnapshotCreationError>
|
||||
pub async fn new(
|
||||
downloader: &mut BasicXlsDownloader,
|
||||
url: String,
|
||||
) -> Result<Self, SnapshotCreationError> {
|
||||
if downloader.url.as_ref().is_some_and(|_url| _url.eq(&url)) {
|
||||
return Err(SnapshotCreationError::SameUrl);
|
||||
}
|
||||
|
||||
let head_result = downloader.set_url(&*url).await.map_err(|error| {
|
||||
if let FetchError::Unknown(error) = &error {
|
||||
sentry::capture_error(&error);
|
||||
}
|
||||
|
||||
SnapshotCreationError::FetchFailed(error)
|
||||
})?;
|
||||
|
||||
let xls_data = downloader
|
||||
.fetch(false)
|
||||
.await
|
||||
.map_err(|error| {
|
||||
if let FetchError::Unknown(error) = &error {
|
||||
sentry::capture_error(&error);
|
||||
}
|
||||
|
||||
SnapshotCreationError::DownloadFailed(error)
|
||||
})?
|
||||
.data
|
||||
.unwrap();
|
||||
|
||||
let parse_result = parse_xls(&xls_data).map_err(|error| {
|
||||
sentry::capture_error(&error);
|
||||
|
||||
SnapshotCreationError::InvalidSchedule(error)
|
||||
})?;
|
||||
|
||||
Ok(ScheduleSnapshot {
|
||||
fetched_at: head_result.requested_at,
|
||||
updated_at: head_result.uploaded_at,
|
||||
url,
|
||||
data: parse_result,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
pub struct Schedule {
|
||||
pub snapshot: MaybeUninit<ScheduleSnapshot>,
|
||||
}
|
||||
|
||||
impl Default for Schedule {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
snapshot: MaybeUninit::uninit(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Schedule {
|
||||
/// Queries the Yandex Cloud Function (FaaS) to obtain a URL for the schedule file.
|
||||
///
|
||||
/// This sends a POST request to the specified Yandex Cloud Function endpoint,
|
||||
/// using the provided API key for authentication. The returned URI is combined
|
||||
/// with the "https://politehnikum-eng.ru" base domain to form the complete URL.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `api_key` - Authentication token for Yandex Cloud API
|
||||
/// * `func_id` - ID of the target Yandex Cloud Function to invoke
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// Result containing:
|
||||
/// - `Ok(String)` - Complete URL constructed from the Function's response
|
||||
/// - `Err(QueryUrlError)` - If the request or response processing fails
|
||||
async fn query_url(api_key: &str, func_id: &str) -> Result<String, QueryUrlError> {
|
||||
let client = reqwest::Client::new();
|
||||
|
||||
let uri = client
|
||||
.post(format!(
|
||||
"https://functions.yandexcloud.net/{}?integration=raw",
|
||||
func_id
|
||||
))
|
||||
.header("Authorization", format!("Api-Key {}", api_key))
|
||||
.send()
|
||||
.await
|
||||
.map_err(|error| QueryUrlError::RequestFailed(error))?
|
||||
.text()
|
||||
.await
|
||||
.map_err(|error| QueryUrlError::RequestFailed(error))?;
|
||||
|
||||
Ok(format!("https://politehnikum-eng.ru{}", uri.trim()))
|
||||
}
|
||||
|
||||
/// Initializes the schedule by fetching the URL from the environment or Yandex Cloud Function (FaaS)
|
||||
/// and creating a [`ScheduleSnapshot`] with the downloaded data.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `downloader`: Mutable reference to an `XLSDownloader` implementation used to fetch and parse the schedule
|
||||
/// * `app_env`: Reference to the application environment containing either a predefined URL or Yandex Cloud credentials
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// Returns `Ok(())` if the snapshot was successfully initialized, or an `Error` if:
|
||||
/// - URL query to Yandex Cloud failed ([`QueryUrlError`])
|
||||
/// - Schedule snapshot creation failed ([`SnapshotCreationError`])
|
||||
pub async fn init(
|
||||
&mut self,
|
||||
downloader: &mut BasicXlsDownloader,
|
||||
app_env: &AppEnv,
|
||||
) -> Result<(), Error> {
|
||||
let url = if let Some(url) = &app_env.schedule.url {
|
||||
log::info!("The default link {} will be used", url);
|
||||
url.clone()
|
||||
} else {
|
||||
log::info!("Obtaining a link using FaaS...");
|
||||
Self::query_url(
|
||||
&*app_env.yandex_cloud.api_key,
|
||||
&*app_env.yandex_cloud.func_id,
|
||||
)
|
||||
.await
|
||||
.map_err(|error| Error::QueryUrlFailed(error))?
|
||||
};
|
||||
|
||||
log::info!("For the initial setup, a link {} will be used", url);
|
||||
|
||||
let snapshot = ScheduleSnapshot::new(downloader, url)
|
||||
.await
|
||||
.map_err(|error| Error::SnapshotCreationFailed(error))?;
|
||||
|
||||
log::info!("Schedule snapshot successfully created!");
|
||||
|
||||
self.snapshot.write(snapshot);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Updates the schedule snapshot by querying the latest URL from FaaS and checking for changes.
|
||||
/// If the URL hasn't changed, only updates the [`fetched_at`] timestamp. If changed, downloads
|
||||
/// and parses the new schedule data.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `downloader`: XLS file downloader used to fetch and parse the schedule data
|
||||
/// * `app_env`: Application environment containing Yandex Cloud configuration and auto-update settings
|
||||
///
|
||||
/// returns: `Result<(), Error>` - Returns error if URL query fails or schedule parsing encounters issues
|
||||
///
|
||||
/// # Safety
|
||||
///
|
||||
/// Uses `unsafe` to access the initialized snapshot, guaranteed valid by prior `init()` call
|
||||
#[allow(unused)] // TODO: сделать авто апдейт
|
||||
pub async fn update(
|
||||
&mut self,
|
||||
downloader: &mut BasicXlsDownloader,
|
||||
app_env: &AppEnv,
|
||||
) -> Result<(), Error> {
|
||||
assert!(app_env.schedule.auto_update);
|
||||
|
||||
let url = Self::query_url(
|
||||
&*app_env.yandex_cloud.api_key,
|
||||
&*app_env.yandex_cloud.func_id,
|
||||
)
|
||||
.await
|
||||
.map_err(|error| Error::QueryUrlFailed(error))?;
|
||||
|
||||
let snapshot = match ScheduleSnapshot::new(downloader, url).await {
|
||||
Ok(snapshot) => snapshot,
|
||||
Err(SnapshotCreationError::SameUrl) => {
|
||||
unsafe { self.snapshot.assume_init_mut() }.update();
|
||||
return Ok(());
|
||||
}
|
||||
Err(error) => return Err(Error::SnapshotCreationFailed(error)),
|
||||
};
|
||||
|
||||
self.snapshot.write(snapshot);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
@@ -1,33 +1,26 @@
|
||||
#[cfg(test)]
|
||||
pub(crate) mod tests {
|
||||
use crate::state::{AppState, ScheduleSnapshot, new_app_state};
|
||||
use crate::state::{new_app_state, AppState};
|
||||
use actix_web::web;
|
||||
use log::info;
|
||||
use schedule_parser::test_utils::test_result;
|
||||
use std::default::Default;
|
||||
use tokio::sync::OnceCell;
|
||||
|
||||
pub fn test_env() {
|
||||
info!("Loading test environment file...");
|
||||
dotenvy::from_path(".env.test").expect("Failed to load test environment file");
|
||||
dotenvy::from_filename(".env.test.local")
|
||||
.or_else(|_| dotenvy::from_filename(".env.test"))
|
||||
.expect("Failed to load test environment file");
|
||||
}
|
||||
|
||||
pub async fn test_app_state() -> web::Data<AppState> {
|
||||
let state = new_app_state().await.unwrap();
|
||||
|
||||
state.get_schedule().await.snapshot.write(ScheduleSnapshot {
|
||||
fetched_at: Default::default(),
|
||||
updated_at: Default::default(),
|
||||
url: "".to_string(),
|
||||
data: test_result().unwrap(),
|
||||
});
|
||||
|
||||
state.clone()
|
||||
}
|
||||
|
||||
pub async fn static_app_state() -> web::Data<AppState> {
|
||||
static STATE: OnceCell<web::Data<AppState>> = OnceCell::const_new();
|
||||
|
||||
|
||||
STATE.get_or_init(|| test_app_state()).await.clone()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,4 +1,2 @@
|
||||
pub mod error;
|
||||
pub mod hasher;
|
||||
pub mod jwt;
|
||||
pub mod telegram;
|
||||
|
||||
@@ -1,199 +0,0 @@
|
||||
use crate::xls_downloader::interface::{FetchError, FetchOk, FetchResult, XLSDownloader};
|
||||
use chrono::{DateTime, Utc};
|
||||
use std::sync::Arc;
|
||||
|
||||
pub struct BasicXlsDownloader {
|
||||
pub url: Option<String>,
|
||||
}
|
||||
|
||||
async fn fetch_specified(url: &str, head: bool) -> FetchResult {
|
||||
let client = reqwest::Client::new();
|
||||
|
||||
let response = if head {
|
||||
client.head(url)
|
||||
} else {
|
||||
client.get(url)
|
||||
}
|
||||
.header("User-Agent", ua_generator::ua::spoof_chrome_ua())
|
||||
.send()
|
||||
.await
|
||||
.map_err(|e| FetchError::unknown(Arc::new(e)))?;
|
||||
|
||||
if response.status().as_u16() != 200 {
|
||||
return Err(FetchError::bad_status_code(response.status().as_u16()));
|
||||
}
|
||||
|
||||
let headers = response.headers();
|
||||
|
||||
let content_type = headers
|
||||
.get("Content-Type")
|
||||
.ok_or(FetchError::bad_headers("Content-Type"))?;
|
||||
|
||||
if !headers.contains_key("etag") {
|
||||
return Err(FetchError::bad_headers("etag"));
|
||||
}
|
||||
|
||||
let last_modified = headers
|
||||
.get("last-modified")
|
||||
.ok_or(FetchError::bad_headers("last-modified"))?;
|
||||
|
||||
if content_type != "application/vnd.ms-excel" {
|
||||
return Err(FetchError::bad_content_type(content_type.to_str().unwrap()));
|
||||
}
|
||||
|
||||
let last_modified = DateTime::parse_from_rfc2822(&last_modified.to_str().unwrap())
|
||||
.unwrap()
|
||||
.with_timezone(&Utc);
|
||||
|
||||
Ok(if head {
|
||||
FetchOk::head(last_modified)
|
||||
} else {
|
||||
FetchOk::get(last_modified, response.bytes().await.unwrap().to_vec())
|
||||
})
|
||||
}
|
||||
|
||||
impl BasicXlsDownloader {
|
||||
pub fn new() -> Self {
|
||||
BasicXlsDownloader { url: None }
|
||||
}
|
||||
}
|
||||
|
||||
impl XLSDownloader for BasicXlsDownloader {
|
||||
async fn fetch(&self, head: bool) -> FetchResult {
|
||||
if self.url.is_none() {
|
||||
Err(FetchError::NoUrlProvided)
|
||||
} else {
|
||||
fetch_specified(&*self.url.as_ref().unwrap(), head).await
|
||||
}
|
||||
}
|
||||
|
||||
async fn set_url(&mut self, url: &str) -> FetchResult {
|
||||
let result = fetch_specified(url, true).await;
|
||||
|
||||
if let Ok(_) = result {
|
||||
self.url = Some(url.to_string());
|
||||
}
|
||||
|
||||
result
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::xls_downloader::basic_impl::{BasicXlsDownloader, fetch_specified};
|
||||
use crate::xls_downloader::interface::{FetchError, XLSDownloader};
|
||||
|
||||
#[tokio::test]
|
||||
async fn bad_url() {
|
||||
let url = "bad_url";
|
||||
|
||||
let results = [
|
||||
fetch_specified(url, true).await,
|
||||
fetch_specified(url, false).await,
|
||||
];
|
||||
|
||||
assert!(results[0].is_err());
|
||||
assert!(results[1].is_err());
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn bad_status_code() {
|
||||
let url = "https://www.google.com/not-found";
|
||||
|
||||
let results = [
|
||||
fetch_specified(url, true).await,
|
||||
fetch_specified(url, false).await,
|
||||
];
|
||||
|
||||
assert!(results[0].is_err());
|
||||
assert!(results[1].is_err());
|
||||
|
||||
let expected_error = FetchError::BadStatusCode { status_code: 404 };
|
||||
|
||||
assert_eq!(*results[0].as_ref().err().unwrap(), expected_error);
|
||||
assert_eq!(*results[1].as_ref().err().unwrap(), expected_error);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn bad_headers() {
|
||||
let url = "https://www.google.com/favicon.ico";
|
||||
|
||||
let results = [
|
||||
fetch_specified(url, true).await,
|
||||
fetch_specified(url, false).await,
|
||||
];
|
||||
|
||||
assert!(results[0].is_err());
|
||||
assert!(results[1].is_err());
|
||||
|
||||
let expected_error = FetchError::BadHeaders {
|
||||
expected_header: "ETag".to_string(),
|
||||
};
|
||||
|
||||
assert_eq!(*results[0].as_ref().err().unwrap(), expected_error);
|
||||
assert_eq!(*results[1].as_ref().err().unwrap(), expected_error);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn bad_content_type() {
|
||||
let url = "https://s3.aero-storage.ldragol.ru/679e5d1145a6ad00843ad3f1/67ddb59fd46303008396ac96%2Fexample.txt";
|
||||
|
||||
let results = [
|
||||
fetch_specified(url, true).await,
|
||||
fetch_specified(url, false).await,
|
||||
];
|
||||
|
||||
assert!(results[0].is_err());
|
||||
assert!(results[1].is_err());
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn ok() {
|
||||
let url = "https://s3.aero-storage.ldragol.ru/679e5d1145a6ad00843ad3f1/67ddb5fad46303008396ac97%2Fschedule.xls";
|
||||
|
||||
let results = [
|
||||
fetch_specified(url, true).await,
|
||||
fetch_specified(url, false).await,
|
||||
];
|
||||
|
||||
assert!(results[0].is_ok());
|
||||
assert!(results[1].is_ok());
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn downloader_set_ok() {
|
||||
let url = "https://s3.aero-storage.ldragol.ru/679e5d1145a6ad00843ad3f1/67ddb5fad46303008396ac97%2Fschedule.xls";
|
||||
|
||||
let mut downloader = BasicXlsDownloader::new();
|
||||
|
||||
assert!(downloader.set_url(url).await.is_ok());
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn downloader_set_err() {
|
||||
let url = "bad_url";
|
||||
|
||||
let mut downloader = BasicXlsDownloader::new();
|
||||
|
||||
assert!(downloader.set_url(url).await.is_err());
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn downloader_ok() {
|
||||
let url = "https://s3.aero-storage.ldragol.ru/679e5d1145a6ad00843ad3f1/67ddb5fad46303008396ac97%2Fschedule.xls";
|
||||
|
||||
let mut downloader = BasicXlsDownloader::new();
|
||||
|
||||
assert!(downloader.set_url(url).await.is_ok());
|
||||
assert!(downloader.fetch(false).await.is_ok());
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn downloader_no_url_provided() {
|
||||
let downloader = BasicXlsDownloader::new();
|
||||
let result = downloader.fetch(false).await;
|
||||
|
||||
assert!(result.is_err());
|
||||
assert_eq!(result.err().unwrap(), FetchError::NoUrlProvided);
|
||||
}
|
||||
}
|
||||
@@ -1,100 +0,0 @@
|
||||
use chrono::{DateTime, Utc};
|
||||
use derive_more::{Display, Error};
|
||||
use std::mem::discriminant;
|
||||
use std::sync::Arc;
|
||||
use utoipa::ToSchema;
|
||||
|
||||
/// XLS data retrieval errors.
|
||||
#[derive(Clone, Debug, ToSchema, Display, Error)]
|
||||
pub enum FetchError {
|
||||
/// File url is not set.
|
||||
#[display("The link to the timetable was not provided earlier.")]
|
||||
NoUrlProvided,
|
||||
|
||||
/// Unknown error.
|
||||
#[display("An unknown error occurred while downloading the file.")]
|
||||
#[schema(value_type = String)]
|
||||
Unknown(Arc<reqwest::Error>),
|
||||
|
||||
/// Server returned a status code different from 200.
|
||||
#[display("Server returned a status code {status_code}.")]
|
||||
BadStatusCode { status_code: u16 },
|
||||
|
||||
/// The url leads to a file of a different type.
|
||||
#[display("The link leads to a file of type '{content_type}'.")]
|
||||
BadContentType { content_type: String },
|
||||
|
||||
/// Server doesn't return expected headers.
|
||||
#[display("Server doesn't return expected header(s) '{expected_header}'.")]
|
||||
BadHeaders { expected_header: String },
|
||||
}
|
||||
|
||||
impl FetchError {
|
||||
pub fn unknown(error: Arc<reqwest::Error>) -> Self {
|
||||
Self::Unknown(error)
|
||||
}
|
||||
|
||||
pub fn bad_status_code(status_code: u16) -> Self {
|
||||
Self::BadStatusCode { status_code }
|
||||
}
|
||||
|
||||
pub fn bad_content_type(content_type: &str) -> Self {
|
||||
Self::BadContentType {
|
||||
content_type: content_type.to_string(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn bad_headers(expected_header: &str) -> Self {
|
||||
Self::BadHeaders {
|
||||
expected_header: expected_header.to_string(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq for FetchError {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
discriminant(self) == discriminant(other)
|
||||
}
|
||||
}
|
||||
|
||||
/// Result of XLS data retrieval.
|
||||
pub struct FetchOk {
|
||||
/// File upload date.
|
||||
pub uploaded_at: DateTime<Utc>,
|
||||
|
||||
/// Date data received.
|
||||
pub requested_at: DateTime<Utc>,
|
||||
|
||||
/// File data.
|
||||
pub data: Option<Vec<u8>>,
|
||||
}
|
||||
|
||||
impl FetchOk {
|
||||
/// Result without file content.
|
||||
pub fn head(uploaded_at: DateTime<Utc>) -> Self {
|
||||
FetchOk {
|
||||
uploaded_at,
|
||||
requested_at: Utc::now(),
|
||||
data: None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Full result.
|
||||
pub fn get(uploaded_at: DateTime<Utc>, data: Vec<u8>) -> Self {
|
||||
FetchOk {
|
||||
uploaded_at,
|
||||
requested_at: Utc::now(),
|
||||
data: Some(data),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub type FetchResult = Result<FetchOk, FetchError>;
|
||||
|
||||
pub trait XLSDownloader {
|
||||
/// Get data about the file, and optionally its content.
|
||||
async fn fetch(&self, head: bool) -> FetchResult;
|
||||
|
||||
/// Setting the file link.
|
||||
async fn set_url(&mut self, url: &str) -> FetchResult;
|
||||
}
|
||||
@@ -1,2 +0,0 @@
|
||||
pub mod basic_impl;
|
||||
pub mod interface;
|
||||
Reference in New Issue
Block a user