chore(clippy): fix all clippy warnings

This commit is contained in:
2025-09-06 21:24:52 +04:00
parent edea6c5424
commit 35f707901f
25 changed files with 126 additions and 167 deletions

View File

@@ -6,7 +6,7 @@ mod shared {
use quote::{ToTokens, quote}; use quote::{ToTokens, quote};
use syn::{Attribute, DeriveInput}; use syn::{Attribute, DeriveInput};
pub fn find_status_code(attrs: &Vec<Attribute>) -> Option<proc_macro2::TokenStream> { pub fn find_status_code(attrs: &[Attribute]) -> Option<proc_macro2::TokenStream> {
attrs attrs
.iter() .iter()
.find_map(|attr| -> Option<proc_macro2::TokenStream> { .find_map(|attr| -> Option<proc_macro2::TokenStream> {
@@ -41,14 +41,12 @@ mod shared {
let mut status_code_arms: Vec<proc_macro2::TokenStream> = variants let mut status_code_arms: Vec<proc_macro2::TokenStream> = variants
.iter() .iter()
.map(|v| -> Option<proc_macro2::TokenStream> { .filter_map(|v| -> Option<proc_macro2::TokenStream> {
let status_code = find_status_code(&v.attrs)?; let status_code = find_status_code(&v.attrs)?;
let variant_name = &v.ident; let variant_name = &v.ident;
Some(quote! { #name::#variant_name => #status_code, }) Some(quote! { #name::#variant_name => #status_code, })
}) })
.filter(|v| v.is_some())
.map(|v| v.unwrap())
.collect(); .collect();
if status_code_arms.len() < variants.len() { if status_code_arms.len() < variants.len() {

View File

@@ -59,4 +59,5 @@ impl Query {
define_is_exists!(user, id, str, Id); define_is_exists!(user, id, str, Id);
define_is_exists!(user, username, str, Username); define_is_exists!(user, username, str, Username);
define_is_exists!(user, telegram_id, i64, TelegramId); define_is_exists!(user, telegram_id, i64, TelegramId);
define_is_exists!(user, vk_id, i32, VkId);
} }

View File

@@ -25,7 +25,7 @@ pub struct EngelsPolytechnicProvider {
} }
impl EngelsPolytechnicProvider { impl EngelsPolytechnicProvider {
pub async fn new( pub async fn get(
update_source: UpdateSource, update_source: UpdateSource,
) -> Result<Arc<dyn ScheduleProvider>, crate::updater::error::Error> { ) -> Result<Arc<dyn ScheduleProvider>, crate::updater::error::Error> {
let (updater, snapshot) = Updater::new(update_source).await?; let (updater, snapshot) = Updater::new(update_source).await?;
@@ -60,7 +60,7 @@ impl ScheduleProvider for Wrapper {
log::info!("Updating schedule..."); log::info!("Updating schedule...");
match this.updater.update(&mut this.snapshot).await { match this.updater.update(&this.snapshot).await {
Ok(snapshot) => { Ok(snapshot) => {
this.snapshot = Arc::new(snapshot); this.snapshot = Arc::new(snapshot);
}, },

View File

@@ -1,5 +1,5 @@
use crate::or_continue; use crate::or_continue;
use crate::parser::error::{ErrorCell, ErrorCellPos}; use crate::parser::error::{Error, ErrorCell, ErrorCellPos};
use crate::parser::worksheet::WorkSheet; use crate::parser::worksheet::WorkSheet;
use crate::parser::LessonParseResult::{Lessons, Street}; use crate::parser::LessonParseResult::{Lessons, Street};
use base::LessonType::Break; use base::LessonType::Break;
@@ -230,7 +230,7 @@ enum LessonParseResult {
// noinspection GrazieInspection // noinspection GrazieInspection
/// Obtaining a non-standard type of lesson by name. /// Obtaining a non-standard type of lesson by name.
fn guess_lesson_type(text: &String) -> Option<LessonType> { fn guess_lesson_type(text: &str) -> Option<LessonType> {
static MAP: LazyLock<HashMap<&str, LessonType>> = LazyLock::new(|| { static MAP: LazyLock<HashMap<&str, LessonType>> = LazyLock::new(|| {
HashMap::from([ HashMap::from([
("консультация", LessonType::Consultation), ("консультация", LessonType::Consultation),
@@ -245,22 +245,18 @@ fn guess_lesson_type(text: &String) -> Option<LessonType> {
let name_lower = text.to_lowercase(); let name_lower = text.to_lowercase();
match MAP MAP.iter()
.iter() .map(|(text, lesson_type)| (lesson_type, strsim::levenshtein(text, &name_lower)))
.map(|(text, lesson_type)| (lesson_type, strsim::levenshtein(text, &*name_lower)))
.filter(|x| x.1 <= 4) .filter(|x| x.1 <= 4)
.min_by_key(|(_, score)| *score) .min_by_key(|(_, score)| *score)
{ .map(|v| v.0.clone())
None => None,
Some(v) => Some(v.0.clone()),
}
} }
/// Getting a pair or street from a cell. /// Getting a pair or street from a cell.
fn parse_lesson( fn parse_lesson(
worksheet: &WorkSheet, worksheet: &WorkSheet,
day: &Day, day: &Day,
day_boundaries: &Vec<BoundariesCellInfo>, day_boundaries: &[BoundariesCellInfo],
lesson_boundaries: &BoundariesCellInfo, lesson_boundaries: &BoundariesCellInfo,
group_column: u32, group_column: u32,
) -> Result<LessonParseResult, crate::parser::error::Error> { ) -> Result<LessonParseResult, crate::parser::error::Error> {
@@ -297,7 +293,7 @@ fn parse_lesson(
column: group_column, column: group_column,
}))?; }))?;
let range: Option<[u8; 2]> = if lesson_boundaries.default_index != None { let range: Option<[u8; 2]> = if lesson_boundaries.default_index.is_some() {
let default = lesson_boundaries.default_index.unwrap() as u8; let default = lesson_boundaries.default_index.unwrap() as u8;
Some([default, end_time.default_index.unwrap() as u8]) Some([default, end_time.default_index.unwrap() as u8])
} else { } else {
@@ -312,7 +308,11 @@ fn parse_lesson(
Ok((range, time)) Ok((range, time))
}?; }?;
let (name, mut subgroups, lesson_type) = parse_name_and_subgroups(&name)?; let ParsedLessonName {
name,
mut subgroups,
r#type: lesson_type,
} = parse_name_and_subgroups(&name)?;
{ {
let cabinets: Vec<String> = parse_cabinets( let cabinets: Vec<String> = parse_cabinets(
@@ -325,12 +325,10 @@ fn parse_lesson(
if cab_count == 1 { if cab_count == 1 {
// Назначаем этот кабинет всем подгруппам // Назначаем этот кабинет всем подгруппам
let cab = Some(cabinets.get(0).unwrap().clone()); let cab = Some(cabinets.first().unwrap().clone());
for subgroup in &mut subgroups { for subgroup in subgroups.iter_mut().flatten() {
if let Some(subgroup) = subgroup { subgroup.cabinet = cab.clone()
subgroup.cabinet = cab.clone()
}
} }
} else if cab_count == 2 { } else if cab_count == 2 {
while subgroups.len() < cab_count { while subgroups.len() < cab_count {
@@ -361,10 +359,7 @@ fn parse_lesson(
range: default_range, range: default_range,
name: Some(name), name: Some(name),
time: lesson_time, time: lesson_time,
subgroups: if subgroups.len() == 2 subgroups: if subgroups.len() == 2 && subgroups.iter().all(|x| x.is_none()) {
&& subgroups.get(0).unwrap().is_none()
&& subgroups.get(1).unwrap().is_none()
{
None None
} else { } else {
Some(subgroups) Some(subgroups)
@@ -416,12 +411,15 @@ fn parse_cabinets(worksheet: &WorkSheet, row_range: (u32, u32), column: u32) ->
cabinets cabinets
} }
struct ParsedLessonName {
name: String,
subgroups: Vec<Option<LessonSubGroup>>,
r#type: Option<LessonType>,
}
//noinspection GrazieInspection //noinspection GrazieInspection
/// Getting the "pure" name of the lesson and list of teachers from the text of the lesson cell. /// Getting the "pure" name of the lesson and list of teachers from the text of the lesson cell.
fn parse_name_and_subgroups( fn parse_name_and_subgroups(text: &str) -> Result<ParsedLessonName, Error> {
text: &String,
) -> Result<(String, Vec<Option<LessonSubGroup>>, Option<LessonType>), crate::parser::error::Error>
{
// Части названия пары: // Части названия пары:
// 1. Само название. // 1. Само название.
// 2. Список преподавателей и подгрупп. // 2. Список преподавателей и подгрупп.
@@ -458,7 +456,7 @@ fn parse_name_and_subgroups(
static CLEAN_RE: LazyLock<Regex> = LazyLock::new(|| Regex::new(r"[\s\n\t]+").unwrap()); static CLEAN_RE: LazyLock<Regex> = LazyLock::new(|| Regex::new(r"[\s\n\t]+").unwrap());
let text = CLEAN_RE let text = CLEAN_RE
.replace(&text.replace(&[' ', '\t', '\n'], " "), " ") .replace(&text.replace([' ', '\t', '\n'], " "), " ")
.to_string(); .to_string();
let (lesson_name, subgroups, lesson_type) = match NAMES_REGEX.captures(&text) { let (lesson_name, subgroups, lesson_type) = match NAMES_REGEX.captures(&text) {
@@ -466,7 +464,7 @@ fn parse_name_and_subgroups(
let capture = captures.get(0).unwrap(); let capture = captures.get(0).unwrap();
let subgroups: Vec<Option<LessonSubGroup>> = { let subgroups: Vec<Option<LessonSubGroup>> = {
let src = capture.as_str().replace(&[' ', '.'], ""); let src = capture.as_str().replace([' ', '.'], "");
let mut shared_subgroup = false; let mut shared_subgroup = false;
let mut subgroups: [Option<LessonSubGroup>; 2] = [None, None]; let mut subgroups: [Option<LessonSubGroup>; 2] = [None, None];
@@ -478,7 +476,7 @@ fn parse_name_and_subgroups(
.map_or(0, |index| name[(index + 1)..(index + 2)].parse().unwrap()); .map_or(0, |index| name[(index + 1)..(index + 2)].parse().unwrap());
let teacher_name = { let teacher_name = {
let name_end = open_bracket_index.unwrap_or_else(|| name.len()); let name_end = open_bracket_index.unwrap_or(name.len());
// Я ебал. Как же я долго до этого доходил. // Я ебал. Как же я долго до этого доходил.
format!( format!(
@@ -545,7 +543,11 @@ fn parse_name_and_subgroups(
None => (text, Vec::new(), None), None => (text, Vec::new(), None),
}; };
Ok((lesson_name, subgroups, lesson_type)) Ok(ParsedLessonName {
name: lesson_name,
subgroups,
r#type: lesson_type,
})
} }
/// Getting the start and end of a pair from a cell in the first column of a document. /// Getting the start and end of a pair from a cell in the first column of a document.
@@ -554,18 +556,11 @@ fn parse_name_and_subgroups(
/// ///
/// * `cell_data`: text in cell. /// * `cell_data`: text in cell.
/// * `date`: date of the current day. /// * `date`: date of the current day.
fn parse_lesson_boundaries_cell( fn parse_lesson_boundaries_cell(cell_data: &str, date: DateTime<Utc>) -> Option<LessonBoundaries> {
cell_data: &String,
date: DateTime<Utc>,
) -> Option<LessonBoundaries> {
static TIME_RE: LazyLock<Regex> = static TIME_RE: LazyLock<Regex> =
LazyLock::new(|| Regex::new(r"(\d+\.\d+)-(\d+\.\d+)").unwrap()); LazyLock::new(|| Regex::new(r"(\d+\.\d+)-(\d+\.\d+)").unwrap());
let parse_res = if let Some(captures) = TIME_RE.captures(cell_data) { let parse_res = TIME_RE.captures(cell_data)?;
captures
} else {
return None;
};
let start_match = parse_res.get(1).unwrap().as_str(); let start_match = parse_res.get(1).unwrap().as_str();
let start_parts: Vec<&str> = start_match.split(".").collect(); let start_parts: Vec<&str> = start_match.split(".").collect();
@@ -579,7 +574,7 @@ fn parse_lesson_boundaries_cell(
}; };
Some(LessonBoundaries { Some(LessonBoundaries {
start: GET_TIME(date.clone(), &start_parts), start: GET_TIME(date, &start_parts),
end: GET_TIME(date, &end_parts), end: GET_TIME(date, &end_parts),
}) })
} }
@@ -607,7 +602,7 @@ fn parse_day_boundaries(
continue; continue;
}; };
let lesson_time = parse_lesson_boundaries_cell(&time_cell, date.clone()).ok_or( let lesson_time = parse_lesson_boundaries_cell(&time_cell, date).ok_or(
error::Error::LessonBoundaries(ErrorCell::new(row, column, time_cell.clone())), error::Error::LessonBoundaries(ErrorCell::new(row, column, time_cell.clone())),
)?; )?;
@@ -652,7 +647,7 @@ fn parse_day_boundaries(
/// * `week_markup`: markup of the current week. /// * `week_markup`: markup of the current week.
fn parse_week_boundaries( fn parse_week_boundaries(
worksheet: &WorkSheet, worksheet: &WorkSheet,
week_markup: &Vec<DayCellInfo>, week_markup: &[DayCellInfo],
) -> Result<Vec<Vec<BoundariesCellInfo>>, crate::parser::error::Error> { ) -> Result<Vec<Vec<BoundariesCellInfo>>, crate::parser::error::Error> {
let mut result: Vec<Vec<BoundariesCellInfo>> = Vec::new(); let mut result: Vec<Vec<BoundariesCellInfo>> = Vec::new();
@@ -671,8 +666,8 @@ fn parse_week_boundaries(
}; };
let day_boundaries = parse_day_boundaries( let day_boundaries = parse_day_boundaries(
&worksheet, worksheet,
day_markup.date.clone(), day_markup.date,
(day_markup.row, end_row), (day_markup.row, end_row),
lesson_time_column, lesson_time_column,
)?; )?;
@@ -698,7 +693,7 @@ fn convert_groups_to_teachers(
.map(|day| Day { .map(|day| Day {
name: day.name.clone(), name: day.name.clone(),
street: day.street.clone(), street: day.street.clone(),
date: day.date.clone(), date: day.date,
lessons: vec![], lessons: vec![],
}) })
.collect(); .collect();
@@ -774,19 +769,6 @@ fn convert_groups_to_teachers(
/// * `buffer`: XLS data containing schedule. /// * `buffer`: XLS data containing schedule.
/// ///
/// returns: Result<ParseResult, crate::parser::error::Error> /// returns: Result<ParseResult, crate::parser::error::Error>
///
/// # Examples
///
/// ```
/// use schedule_parser::parse_xls;
///
/// let result = parse_xls(&include_bytes!("../../schedule.xls").to_vec());
///
/// assert!(result.is_ok(), "{}", result.err().unwrap());
///
/// assert_ne!(result.as_ref().unwrap().groups.len(), 0);
/// assert_ne!(result.as_ref().unwrap().teachers.len(), 0);
/// ```
pub fn parse_xls(buffer: &Vec<u8>) -> Result<ParsedSchedule, crate::parser::error::Error> { pub fn parse_xls(buffer: &Vec<u8>) -> Result<ParsedSchedule, crate::parser::error::Error> {
let cursor = Cursor::new(&buffer); let cursor = Cursor::new(&buffer);
let mut workbook: Xls<_> = let mut workbook: Xls<_> =
@@ -800,7 +782,7 @@ pub fn parse_xls(buffer: &Vec<u8>) -> Result<ParsedSchedule, crate::parser::erro
.clone(); .clone();
let worksheet_merges = workbook let worksheet_merges = workbook
.worksheet_merge_cells(&*worksheet_name) .worksheet_merge_cells(&worksheet_name)
.ok_or(error::Error::NoWorkSheets)?; .ok_or(error::Error::NoWorkSheets)?;
WorkSheet { WorkSheet {
@@ -820,7 +802,7 @@ pub fn parse_xls(buffer: &Vec<u8>) -> Result<ParsedSchedule, crate::parser::erro
days: Vec::new(), days: Vec::new(),
}; };
for day_index in 0..(&week_markup).len() { for day_index in 0..week_markup.len() {
let day_markup = &week_markup[day_index]; let day_markup = &week_markup[day_index];
let mut day = Day { let mut day = Day {
@@ -836,8 +818,8 @@ pub fn parse_xls(buffer: &Vec<u8>) -> Result<ParsedSchedule, crate::parser::erro
match &mut parse_lesson( match &mut parse_lesson(
&worksheet, &worksheet,
&day, &day,
&day_boundaries, day_boundaries,
&lesson_boundaries, lesson_boundaries,
group_markup.column, group_markup.column,
)? { )? {
Lessons(lesson) => day.lessons.append(lesson), Lessons(lesson) => day.lessons.append(lesson),

View File

@@ -90,7 +90,7 @@ impl Updater {
return Err(SnapshotCreationError::SameUrl); return Err(SnapshotCreationError::SameUrl);
} }
let head_result = downloader.set_url(&*url).await.map_err(|error| { let head_result = downloader.set_url(&url).await.map_err(|error| {
if let FetchError::Unknown(error) = &error { if let FetchError::Unknown(error) = &error {
sentry::capture_error(&error); sentry::capture_error(&error);
} }
@@ -152,10 +152,10 @@ impl Updater {
.header("Authorization", format!("Api-Key {}", api_key)) .header("Authorization", format!("Api-Key {}", api_key))
.send() .send()
.await .await
.map_err(|error| QueryUrlError::RequestFailed(error))? .map_err(QueryUrlError::RequestFailed)?
.text() .text()
.await .await
.map_err(|error| QueryUrlError::RequestFailed(error))?; .map_err(QueryUrlError::RequestFailed)?;
Ok(format!("https://politehnikum-eng.ru{}", uri.trim())) Ok(format!("https://politehnikum-eng.ru{}", uri.trim()))
} }
@@ -196,7 +196,7 @@ impl Updater {
log::info!("Obtaining a link using FaaS..."); log::info!("Obtaining a link using FaaS...");
Self::query_url(yandex_api_key, yandex_func_id) Self::query_url(yandex_api_key, yandex_func_id)
.await .await
.map_err(|error| Error::QueryUrlFailed(error))? .map_err(Error::QueryUrlFailed)?
} }
_ => unreachable!(), _ => unreachable!(),
}; };
@@ -205,7 +205,7 @@ impl Updater {
let snapshot = Self::new_snapshot(&mut this.downloader, url) let snapshot = Self::new_snapshot(&mut this.downloader, url)
.await .await
.map_err(|error| Error::SnapshotCreationFailed(error))?; .map_err(Error::SnapshotCreationFailed)?;
log::info!("Schedule snapshot successfully created!"); log::info!("Schedule snapshot successfully created!");
@@ -243,7 +243,7 @@ impl Updater {
yandex_func_id, yandex_func_id,
} => Self::query_url(yandex_api_key.as_str(), yandex_func_id.as_str()) } => Self::query_url(yandex_api_key.as_str(), yandex_func_id.as_str())
.await .await
.map_err(|error| Error::QueryUrlFailed(error))?, .map_err(Error::QueryUrlFailed)?,
_ => unreachable!(), _ => unreachable!(),
}; };

View File

@@ -136,7 +136,7 @@ impl XlsDownloader {
return Err(FetchError::bad_content_type(content_type.to_str().unwrap())); return Err(FetchError::bad_content_type(content_type.to_str().unwrap()));
} }
let last_modified = DateTime::parse_from_rfc2822(&last_modified.to_str().unwrap()) let last_modified = DateTime::parse_from_rfc2822(last_modified.to_str().unwrap())
.unwrap() .unwrap()
.with_timezone(&Utc); .with_timezone(&Utc);
@@ -151,14 +151,14 @@ impl XlsDownloader {
if self.url.is_none() { if self.url.is_none() {
Err(FetchError::NoUrlProvided) Err(FetchError::NoUrlProvided)
} else { } else {
Self::fetch_specified(&*self.url.as_ref().unwrap(), head).await Self::fetch_specified(self.url.as_ref().unwrap(), head).await
} }
} }
pub async fn set_url(&mut self, url: &str) -> FetchResult { pub async fn set_url(&mut self, url: &str) -> FetchResult {
let result = Self::fetch_specified(url, true).await; let result = Self::fetch_specified(url, true).await;
if let Ok(_) = result { if result.is_ok() {
self.url = Some(url.to_string()); self.url = Some(url.to_string());
} }

View File

@@ -11,7 +11,6 @@ use database::query::Query;
use derive_more::Display; use derive_more::Display;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use std::fmt::Debug; use std::fmt::Debug;
use std::ops::Deref;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Display, MiddlewareError)] #[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Display, MiddlewareError)]
#[status_code = "actix_web::http::StatusCode::UNAUTHORIZED"] #[status_code = "actix_web::http::StatusCode::UNAUTHORIZED"]

View File

@@ -5,7 +5,6 @@ use std::future::{Ready, ready};
use std::ops; use std::ops;
/// # Async extractor. /// # Async extractor.
/// Asynchronous object extractor from a query. /// Asynchronous object extractor from a query.
pub struct AsyncExtractor<T>(T); pub struct AsyncExtractor<T>(T);
@@ -80,7 +79,6 @@ impl<T: FromRequestAsync> FromRequest for AsyncExtractor<T> {
} }
/// # Sync extractor. /// # Sync extractor.
/// Synchronous object extractor from a query. /// Synchronous object extractor from a query.
pub struct SyncExtractor<T>(T); pub struct SyncExtractor<T>(T);

View File

@@ -1,25 +1,20 @@
use crate::extractors::authorized_user; use crate::extractors::authorized_user;
use crate::extractors::base::FromRequestAsync; use crate::extractors::base::FromRequestAsync;
use actix_web::body::{BoxBody, EitherBody}; use actix_web::body::{BoxBody, EitherBody};
use actix_web::dev::{Payload, Service, ServiceRequest, ServiceResponse, Transform, forward_ready}; use actix_web::dev::{forward_ready, Payload, Service, ServiceRequest, ServiceResponse, Transform};
use actix_web::{Error, HttpRequest, ResponseError}; use actix_web::{Error, HttpRequest, ResponseError};
use futures_util::future::LocalBoxFuture;
use std::future::{Ready, ready};
use std::rc::Rc;
use database::entity::User; use database::entity::User;
use futures_util::future::LocalBoxFuture;
use std::future::{ready, Ready};
use std::rc::Rc;
/// Middleware guard working with JWT tokens. /// Middleware guard working with JWT tokens.
#[derive(Default)]
pub struct JWTAuthorization { pub struct JWTAuthorization {
/// List of ignored endpoints. /// List of ignored endpoints.
pub ignore: &'static [&'static str], pub ignore: &'static [&'static str],
} }
impl Default for JWTAuthorization {
fn default() -> Self {
Self { ignore: &[] }
}
}
impl<S, B> Transform<S, ServiceRequest> for JWTAuthorization impl<S, B> Transform<S, ServiceRequest> for JWTAuthorization
where where
S: Service<ServiceRequest, Response = ServiceResponse<B>, Error = Error> + 'static, S: Service<ServiceRequest, Response = ServiceResponse<B>, Error = Error> + 'static,
@@ -70,8 +65,8 @@ where
return false; return false;
} }
if let Some(other) = path.as_bytes().iter().nth(ignore.len()) { if let Some(other) = path.as_bytes().get(ignore.len()) {
return ['?' as u8, '/' as u8].contains(other); return [b'?', b'/'].contains(other);
} }
true true

View File

@@ -1,10 +1,10 @@
use actix_web::Error;
use actix_web::body::{BoxBody, EitherBody}; use actix_web::body::{BoxBody, EitherBody};
use actix_web::dev::{Service, ServiceRequest, ServiceResponse, Transform, forward_ready}; use actix_web::dev::{forward_ready, Service, ServiceRequest, ServiceResponse, Transform};
use actix_web::http::header; use actix_web::http::header;
use actix_web::http::header::HeaderValue; use actix_web::http::header::HeaderValue;
use actix_web::Error;
use futures_util::future::LocalBoxFuture; use futures_util::future::LocalBoxFuture;
use std::future::{Ready, ready}; use std::future::{ready, Ready};
/// Middleware to specify the encoding in the Content-Type header. /// Middleware to specify the encoding in the Content-Type header.
pub struct ContentTypeBootstrap; pub struct ContentTypeBootstrap;
@@ -30,7 +30,7 @@ pub struct ContentTypeMiddleware<S> {
service: S, service: S,
} }
impl<'a, S, B> Service<ServiceRequest> for ContentTypeMiddleware<S> impl<S, B> Service<ServiceRequest> for ContentTypeMiddleware<S>
where where
S: Service<ServiceRequest, Response = ServiceResponse<B>, Error = Error>, S: Service<ServiceRequest, Response = ServiceResponse<B>, Error = Error>,
S::Future: 'static, S::Future: 'static,
@@ -49,13 +49,14 @@ where
let mut response = fut.await?; let mut response = fut.await?;
let headers = response.response_mut().headers_mut(); let headers = response.response_mut().headers_mut();
if let Some(content_type) = headers.get("Content-Type") {
if content_type == "application/json" { if let Some(content_type) = headers.get("Content-Type")
headers.insert( && content_type == "application/json"
header::CONTENT_TYPE, {
HeaderValue::from_static("application/json; charset=utf8"), headers.insert(
); header::CONTENT_TYPE,
} HeaderValue::from_static("application/json; charset=utf8"),
);
} }
Ok(response.map_into_left_body()) Ok(response.map_into_left_body())

View File

@@ -22,7 +22,7 @@ struct Claims {
#[derive(Debug, PartialEq)] #[derive(Debug, PartialEq)]
pub enum Error { pub enum Error {
JwtError(ErrorKind), Jwt(ErrorKind),
InvalidSignature, InvalidSignature,
InvalidToken, InvalidToken,
Expired, Expired,
@@ -49,10 +49,10 @@ const VK_PUBLIC_KEY: &str = concat!(
"-----END PUBLIC KEY-----" "-----END PUBLIC KEY-----"
); );
pub fn parse_vk_id(token_str: &String, client_id: i32) -> Result<i32, Error> { pub fn parse_vk_id(token_str: &str, client_id: i32) -> Result<i32, Error> {
let dkey = DecodingKey::from_rsa_pem(VK_PUBLIC_KEY.as_bytes()).unwrap(); let dkey = DecodingKey::from_rsa_pem(VK_PUBLIC_KEY.as_bytes()).unwrap();
match decode::<Claims>(&token_str, &dkey, &Validation::new(Algorithm::RS256)) { match decode::<Claims>(token_str, &dkey, &Validation::new(Algorithm::RS256)) {
Ok(token_data) => { Ok(token_data) => {
let claims = token_data.claims; let claims = token_data.claims;
@@ -77,7 +77,7 @@ pub fn parse_vk_id(token_str: &String, client_id: i32) -> Result<i32, Error> {
ErrorKind::Base64(_) => Error::InvalidToken, ErrorKind::Base64(_) => Error::InvalidToken,
ErrorKind::Json(_) => Error::InvalidToken, ErrorKind::Json(_) => Error::InvalidToken,
ErrorKind::Utf8(_) => Error::InvalidToken, ErrorKind::Utf8(_) => Error::InvalidToken,
kind => Error::JwtError(kind), kind => Error::Jwt(kind),
}), }),
} }
} }

View File

@@ -28,7 +28,7 @@ async fn sign_in_combined(
return Err(ErrorCode::IncorrectCredentials); return Err(ErrorCode::IncorrectCredentials);
} }
match bcrypt::verify(&data.password, &user.password.as_ref().unwrap()) { match bcrypt::verify(&data.password, user.password.as_ref().unwrap()) {
Ok(result) => { Ok(result) => {
if !result { if !result {
return Err(ErrorCode::IncorrectCredentials); return Err(ErrorCode::IncorrectCredentials);
@@ -124,8 +124,6 @@ mod schema {
InvalidVkAccessToken, InvalidVkAccessToken,
} }
/// Internal
/// Type of authorization. /// Type of authorization.
pub enum SignInData { pub enum SignInData {
/// User and password name and password. /// User and password name and password.

View File

@@ -8,7 +8,6 @@ use database::entity::sea_orm_active_enums::UserRole;
use database::entity::ActiveUser; use database::entity::ActiveUser;
use database::query::Query; use database::query::Query;
use database::sea_orm::ActiveModelTrait; use database::sea_orm::ActiveModelTrait;
use std::ops::Deref;
use web::Json; use web::Json;
async fn sign_up_combined( async fn sign_up_combined(
@@ -42,13 +41,12 @@ async fn sign_up_combined(
} }
// If user with specified VKID already exists. // If user with specified VKID already exists.
if let Some(id) = data.vk_id { if let Some(id) = data.vk_id
if Query::find_user_by_vk_id(db, id) && Query::is_user_exists_by_vk_id(db, id)
.await .await
.is_ok_and(|user| user.is_some()) .expect("Failed to check user existence")
{ {
return Err(ErrorCode::VkAlreadyExists); return Err(ErrorCode::VkAlreadyExists);
}
} }
let active_user: ActiveUser = data.into(); let active_user: ActiveUser = data.into();
@@ -202,8 +200,6 @@ mod schema {
VkAlreadyExists, VkAlreadyExists,
} }
/// Internal
/// Data for registration. /// Data for registration.
pub struct SignUpData { pub struct SignUpData {
// TODO: сделать ограничение на минимальную и максимальную длину при регистрации и смене. // TODO: сделать ограничение на минимальную и максимальную длину при регистрации и смене.
@@ -228,21 +224,21 @@ mod schema {
pub version: String, pub version: String,
} }
impl Into<ActiveUser> for SignUpData { impl From<SignUpData> for ActiveUser {
fn into(self) -> ActiveUser { fn from(value: SignUpData) -> Self {
assert_ne!(self.password.is_some(), self.vk_id.is_some()); assert_ne!(value.password.is_some(), value.vk_id.is_some());
ActiveUser { ActiveUser {
id: Set(ObjectId::new().unwrap().to_string()), id: Set(ObjectId::new().unwrap().to_string()),
username: Set(self.username), username: Set(value.username),
password: Set(self password: Set(value
.password .password
.map(|x| bcrypt::hash(x, bcrypt::DEFAULT_COST).unwrap())), .map(|x| bcrypt::hash(x, bcrypt::DEFAULT_COST).unwrap())),
vk_id: Set(self.vk_id), vk_id: Set(value.vk_id),
telegram_id: Set(None), telegram_id: Set(None),
group: Set(Some(self.group)), group: Set(Some(value.group)),
role: Set(self.role), role: Set(value.role),
android_version: Set(Some(self.version)), android_version: Set(Some(value.version)),
} }
} }
} }
@@ -262,7 +258,6 @@ mod tests {
use database::entity::{UserColumn, UserEntity}; use database::entity::{UserColumn, UserEntity};
use database::sea_orm::ColumnTrait; use database::sea_orm::ColumnTrait;
use database::sea_orm::{EntityTrait, QueryFilter}; use database::sea_orm::{EntityTrait, QueryFilter};
use std::ops::Deref;
struct SignUpPartial<'a> { struct SignUpPartial<'a> {
username: &'a str, username: &'a str,

View File

@@ -9,7 +9,6 @@ use database::entity::ActiveUser;
use database::query::Query; use database::query::Query;
use database::sea_orm::{ActiveModelTrait, Set}; use database::sea_orm::{ActiveModelTrait, Set};
use objectid::ObjectId; use objectid::ObjectId;
use std::ops::Deref;
use std::sync::Arc; use std::sync::Arc;
use web::Json; use web::Json;
@@ -122,7 +121,7 @@ mod schema {
&mut self, &mut self,
request: &HttpRequest, request: &HttpRequest,
response: &mut HttpResponse<EitherBody<String>>, response: &mut HttpResponse<EitherBody<String>>,
) -> () { ) {
let access_token = &self.access_token; let access_token = &self.access_token;
let app_state = request.app_data::<web::Data<AppState>>().unwrap(); let app_state = request.app_data::<web::Data<AppState>>().unwrap();

View File

@@ -6,7 +6,6 @@ use actix_web::{post, web};
use database::entity::User; use database::entity::User;
use database::query::Query; use database::query::Query;
use database::sea_orm::{ActiveModelTrait, IntoActiveModel, Set}; use database::sea_orm::{ActiveModelTrait, IntoActiveModel, Set};
use std::ops::Deref;
use web::Json; use web::Json;
#[utoipa::path(responses( #[utoipa::path(responses(

View File

@@ -1,11 +1,17 @@
use crate::AppState;
use crate::routes::schedule::schema::CacheStatus; use crate::routes::schedule::schema::CacheStatus;
use crate::AppState;
use actix_web::{get, web}; use actix_web::{get, web};
use std::ops::Deref;
#[utoipa::path(responses( #[utoipa::path(responses(
(status = OK, body = CacheStatus), (status = OK, body = CacheStatus),
))] ))]
#[get("/cache-status")] #[get("/cache-status")]
pub async fn cache_status(app_state: web::Data<AppState>) -> CacheStatus { pub async fn cache_status(app_state: web::Data<AppState>) -> CacheStatus {
CacheStatus::from(&app_state).await.into() app_state
.get_schedule_snapshot("eng_polytechnic")
.await
.unwrap()
.deref()
.into()
} }

View File

@@ -1,7 +1,7 @@
mod cache_status; mod cache_status;
mod group; mod group;
mod group_names; mod group_names;
mod schedule; mod get;
mod schema; mod schema;
mod teacher; mod teacher;
mod teacher_names; mod teacher_names;
@@ -9,6 +9,6 @@ mod teacher_names;
pub use cache_status::*; pub use cache_status::*;
pub use group::*; pub use group::*;
pub use group_names::*; pub use group_names::*;
pub use schedule::*; pub use get::*;
pub use teacher::*; pub use teacher::*;
pub use teacher_names::*; pub use teacher_names::*;

View File

@@ -63,18 +63,6 @@ pub struct CacheStatus {
pub updated_at: i64, pub updated_at: i64,
} }
impl CacheStatus {
pub async fn from(value: &web::Data<AppState>) -> Self {
From::<&ScheduleSnapshot>::from(
value
.get_schedule_snapshot("eng_polytechnic")
.await
.unwrap()
.deref(),
)
}
}
impl From<&ScheduleSnapshot> for CacheStatus { impl From<&ScheduleSnapshot> for CacheStatus {
fn from(value: &ScheduleSnapshot) -> Self { fn from(value: &ScheduleSnapshot) -> Self {
Self { Self {

View File

@@ -13,13 +13,13 @@ where
E: Serialize + PartialSchema + Display + PartialErrResponse; E: Serialize + PartialSchema + Display + PartialErrResponse;
/// Transform Response<T, E> into Result<T, E> /// Transform Response<T, E> into Result<T, E>
impl<T, E> Into<Result<T, E>> for Response<T, E> impl<T, E> From<Response<T, E>> for Result<T, E>
where where
T: Serialize + PartialSchema + PartialOkResponse, T: Serialize + PartialSchema + PartialOkResponse,
E: Serialize + PartialSchema + Display + PartialErrResponse, E: Serialize + PartialSchema + Display + PartialErrResponse,
{ {
fn into(self) -> Result<T, E> { fn from(value: Response<T, E>) -> Self {
self.0 value.0
} }
} }
@@ -46,7 +46,7 @@ where
{ {
match &self.0 { match &self.0 {
Ok(ok) => serializer.serialize_some(&ok), Ok(ok) => serializer.serialize_some(&ok),
Err(err) => serializer.serialize_some(&ResponseError::<E>::from(err.clone().into())), Err(err) => serializer.serialize_some(&err.clone().into()),
} }
} }
} }
@@ -95,7 +95,7 @@ pub trait PartialOkResponse {
&mut self, &mut self,
_request: &HttpRequest, _request: &HttpRequest,
_response: &mut HttpResponse<EitherBody<String>>, _response: &mut HttpResponse<EitherBody<String>>,
) -> () { ) {
} }
} }
@@ -173,8 +173,8 @@ pub mod user {
username: user.username.clone(), username: user.username.clone(),
group: user.group.clone(), group: user.group.clone(),
role: user.role.clone(), role: user.role.clone(),
vk_id: user.vk_id.clone(), vk_id: user.vk_id,
telegram_id: user.telegram_id.clone(), telegram_id: user.telegram_id,
access_token: Some(access_token), access_token: Some(access_token),
} }
} }
@@ -188,8 +188,8 @@ pub mod user {
username: user.username.clone(), username: user.username.clone(),
group: user.group.clone(), group: user.group.clone(),
role: user.role.clone(), role: user.role.clone(),
vk_id: user.vk_id.clone(), vk_id: user.vk_id,
telegram_id: user.telegram_id.clone(), telegram_id: user.telegram_id,
access_token: None, access_token: None,
} }
} }

View File

@@ -4,7 +4,6 @@ use crate::state::AppState;
use actix_web::{post, web}; use actix_web::{post, web};
use database::entity::User; use database::entity::User;
use database::sea_orm::{ActiveModelTrait, IntoActiveModel, Set}; use database::sea_orm::{ActiveModelTrait, IntoActiveModel, Set};
use std::ops::Deref;
#[utoipa::path(responses((status = OK)))] #[utoipa::path(responses((status = OK)))]
#[post("/change-group")] #[post("/change-group")]

View File

@@ -5,7 +5,6 @@ use actix_web::{post, web};
use database::entity::User; use database::entity::User;
use database::query::Query; use database::query::Query;
use database::sea_orm::{ActiveModelTrait, IntoActiveModel, Set}; use database::sea_orm::{ActiveModelTrait, IntoActiveModel, Set};
use std::ops::Deref;
#[utoipa::path(responses((status = OK)))] #[utoipa::path(responses((status = OK)))]
#[post("/change-username")] #[post("/change-username")]

View File

@@ -23,7 +23,7 @@ impl AppState {
let env = AppEnv::default(); let env = AppEnv::default();
let providers: HashMap<String, Arc<dyn ScheduleProvider>> = HashMap::from([( let providers: HashMap<String, Arc<dyn ScheduleProvider>> = HashMap::from([(
"eng_polytechnic".to_string(), "eng_polytechnic".to_string(),
providers::EngelsPolytechnicProvider::new({ providers::EngelsPolytechnicProvider::get({
#[cfg(test)] #[cfg(test)]
{ {
providers::EngelsPolytechnicUpdateSource::Prepared(ScheduleSnapshot { providers::EngelsPolytechnicUpdateSource::Prepared(ScheduleSnapshot {
@@ -64,7 +64,7 @@ impl AppState {
}; };
if this.env.schedule.auto_update { if this.env.schedule.auto_update {
for (_, provider) in &this.providers { for provider in this.providers.values() {
let provider = provider.clone(); let provider = provider.clone();
let cancel_token = this.cancel_token.clone(); let cancel_token = this.cancel_token.clone();
@@ -93,6 +93,8 @@ impl AppState {
} }
/// Create a new object web::Data<AppState>. /// Create a new object web::Data<AppState>.
pub async fn new_app_state(database: Option<DatabaseConnection>) -> Result<web::Data<AppState>, Box<dyn std::error::Error>> { pub async fn new_app_state(
database: Option<DatabaseConnection>,
) -> Result<web::Data<AppState>, Box<dyn std::error::Error>> {
Ok(web::Data::new(AppState::new(database).await?)) Ok(web::Data::new(AppState::new(database).await?))
} }

View File

@@ -63,13 +63,13 @@ struct Claims {
pub(crate) const DEFAULT_ALGORITHM: Algorithm = Algorithm::HS256; pub(crate) const DEFAULT_ALGORITHM: Algorithm = Algorithm::HS256;
/// Checking the token and extracting the UUID of the user account from it. /// Checking the token and extracting the UUID of the user account from it.
pub fn verify_and_decode(token: &String) -> Result<String, Error> { pub fn verify_and_decode(token: &str) -> Result<String, Error> {
let mut validation = Validation::new(DEFAULT_ALGORITHM); let mut validation = Validation::new(DEFAULT_ALGORITHM);
validation.required_spec_claims.remove("exp"); validation.required_spec_claims.remove("exp");
validation.validate_exp = false; validation.validate_exp = false;
let result = decode::<Claims>(&token, &*DECODING_KEY, &validation); let result = decode::<Claims>(token, &DECODING_KEY, &validation);
match result { match result {
Ok(token_data) => { Ok(token_data) => {
@@ -88,7 +88,7 @@ pub fn verify_and_decode(token: &String) -> Result<String, Error> {
} }
/// Creating a user token. /// Creating a user token.
pub fn encode(id: &String) -> String { pub fn encode(id: &str) -> String {
let header = Header { let header = Header {
typ: Some(String::from("JWT")), typ: Some(String::from("JWT")),
..Default::default() ..Default::default()
@@ -98,12 +98,12 @@ pub fn encode(id: &String) -> String {
let exp = iat + Duration::days(365 * 4); let exp = iat + Duration::days(365 * 4);
let claims = Claims { let claims = Claims {
id: id.clone(), id: id.to_string(),
iat: iat.timestamp().unsigned_abs(), iat: iat.timestamp().unsigned_abs(),
exp: exp.timestamp().unsigned_abs(), exp: exp.timestamp().unsigned_abs(),
}; };
jsonwebtoken::encode(&header, &claims, &*ENCODING_KEY).unwrap() jsonwebtoken::encode(&header, &claims, &ENCODING_KEY).unwrap()
} }
#[cfg(test)] #[cfg(test)]

View File

@@ -33,7 +33,7 @@ impl WebAppInitDataMap {
}; };
data.split('&') data.split('&')
.map(|kv| kv.split_once('=').unwrap_or_else(|| (kv, ""))) .map(|kv| kv.split_once('=').unwrap_or((kv, "")))
.for_each(|(key, value)| { .for_each(|(key, value)| {
this.data_map.insert(key.to_string(), value.to_string()); this.data_map.insert(key.to_string(), value.to_string());
}); });