5 Commits

Author SHA1 Message Date
dependabot[bot]
03a655dfa9 Bump diesel from 2.2.8 to 2.2.10
Bumps [diesel](https://github.com/diesel-rs/diesel) from 2.2.8 to 2.2.10.
- [Release notes](https://github.com/diesel-rs/diesel/releases)
- [Changelog](https://github.com/diesel-rs/diesel/blob/v2.2.10/CHANGELOG.md)
- [Commits](https://github.com/diesel-rs/diesel/compare/v2.2.8...v2.2.10)

---
updated-dependencies:
- dependency-name: diesel
  dependency-version: 2.2.10
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-04-28 20:23:49 +00:00
fceffb900d release/v1.0.3 2025-04-18 00:29:04 +04:00
49ce0005dc Исправление работы подключения к сайтам из-за отсутствия сертификатов. 2025-04-18 00:28:55 +04:00
4c738085f2 release/v1.0.2 2025-04-18 00:11:55 +04:00
20602eb863 Улучшенное отображение ошибок при обновлении ссылки расписания. 2025-04-18 00:11:05 +04:00
6 changed files with 54 additions and 47 deletions

6
Cargo.lock generated
View File

@@ -926,9 +926,9 @@ dependencies = [
[[package]] [[package]]
name = "diesel" name = "diesel"
version = "2.2.8" version = "2.2.10"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "470eb10efc8646313634c99bb1593f402a6434cbd86e266770c6e39219adb86a" checksum = "ff3e1edb1f37b4953dd5176916347289ed43d7119cc2e6c7c3f7849ff44ea506"
dependencies = [ dependencies = [
"bitflags", "bitflags",
"byteorder 1.5.0", "byteorder 1.5.0",
@@ -2876,7 +2876,7 @@ dependencies = [
[[package]] [[package]]
name = "schedule-parser-rusted" name = "schedule-parser-rusted"
version = "1.0.1" version = "1.0.3"
dependencies = [ dependencies = [
"actix-macros 0.1.0", "actix-macros 0.1.0",
"actix-test", "actix-test",

View File

@@ -3,7 +3,7 @@ members = ["actix-macros", "actix-test"]
[package] [package]
name = "schedule-parser-rusted" name = "schedule-parser-rusted"
version = "1.0.1" version = "1.0.3"
edition = "2024" edition = "2024"
publish = false publish = false
@@ -17,7 +17,7 @@ bcrypt = "0.17.0"
calamine = "0.26.1" calamine = "0.26.1"
chrono = { version = "0.4.40", features = ["serde"] } chrono = { version = "0.4.40", features = ["serde"] }
derive_more = "2.0.1" derive_more = "2.0.1"
diesel = { version = "2.2.8", features = ["postgres"] } diesel = { version = "2.2.10", features = ["postgres"] }
diesel-derive-enum = { git = "https://github.com/Havunen/diesel-derive-enum.git", features = ["postgres"] } diesel-derive-enum = { git = "https://github.com/Havunen/diesel-derive-enum.git", features = ["postgres"] }
dotenvy = "0.15.7" dotenvy = "0.15.7"
env_logger = "0.11.7" env_logger = "0.11.7"

View File

@@ -6,7 +6,7 @@ ARG BINARY_NAME
WORKDIR /app/ WORKDIR /app/
RUN apt update && \ RUN apt update && \
apt install -y libpq5 apt install -y libpq5 ca-certificates openssl
COPY ./${BINARY_NAME} /bin/main COPY ./${BINARY_NAME} /bin/main
RUN chmod +x /bin/main RUN chmod +x /bin/main

View File

@@ -60,20 +60,20 @@ pub async fn update_download_url(
} }
}, },
Err(error) => { Err(error) => {
if let FetchError::Unknown(error) = error { if let FetchError::Unknown(error) = &error {
sentry::capture_error(&error); sentry::capture_error(&error);
} }
ErrorCode::DownloadFailed.into_response() ErrorCode::DownloadFailed(error).into_response()
} }
} }
} }
Err(error) => { Err(error) => {
if let FetchError::Unknown(error) = error { if let FetchError::Unknown(error) = &error {
sentry::capture_error(&error); sentry::capture_error(&error);
} }
ErrorCode::FetchFailed.into_response() ErrorCode::FetchFailed(error).into_response()
} }
} }
} }
@@ -85,6 +85,7 @@ mod schema {
use derive_more::Display; use derive_more::Display;
use serde::{Deserialize, Serialize, Serializer}; use serde::{Deserialize, Serialize, Serializer};
use utoipa::ToSchema; use utoipa::ToSchema;
use crate::xls_downloader::interface::FetchError;
pub type ServiceResponse = crate::routes::schema::Response<CacheStatus, ErrorCode>; pub type ServiceResponse = crate::routes::schema::Response<CacheStatus, ErrorCode>;
@@ -103,12 +104,12 @@ mod schema {
NonWhitelistedHost, NonWhitelistedHost,
/// Failed to retrieve file metadata. /// Failed to retrieve file metadata.
#[display("Unable to retrieve metadata from the specified URL.")] #[display("Unable to retrieve metadata from the specified URL: {_0}")]
FetchFailed, FetchFailed(FetchError),
/// Failed to download the file. /// Failed to download the file.
#[display("Unable to retrieve data from the specified URL.")] #[display("Unable to retrieve data from the specified URL: {_0}")]
DownloadFailed, DownloadFailed(FetchError),
/// The link leads to an outdated schedule. /// The link leads to an outdated schedule.
/// ///
@@ -129,8 +130,8 @@ mod schema {
{ {
match self { match self {
ErrorCode::NonWhitelistedHost => serializer.serialize_str("NON_WHITELISTED_HOST"), ErrorCode::NonWhitelistedHost => serializer.serialize_str("NON_WHITELISTED_HOST"),
ErrorCode::FetchFailed => serializer.serialize_str("FETCH_FAILED"), ErrorCode::FetchFailed(_) => serializer.serialize_str("FETCH_FAILED"),
ErrorCode::DownloadFailed => serializer.serialize_str("DOWNLOAD_FAILED"), ErrorCode::DownloadFailed(_) => serializer.serialize_str("DOWNLOAD_FAILED"),
ErrorCode::OutdatedSchedule => serializer.serialize_str("OUTDATED_SCHEDULE"), ErrorCode::OutdatedSchedule => serializer.serialize_str("OUTDATED_SCHEDULE"),
ErrorCode::InvalidSchedule(_) => serializer.serialize_str("INVALID_SCHEDULE"), ErrorCode::InvalidSchedule(_) => serializer.serialize_str("INVALID_SCHEDULE"),
} }

View File

@@ -1,6 +1,7 @@
use crate::xls_downloader::interface::{FetchError, FetchOk, FetchResult, XLSDownloader}; use crate::xls_downloader::interface::{FetchError, FetchOk, FetchResult, XLSDownloader};
use chrono::{DateTime, Utc}; use chrono::{DateTime, Utc};
use std::env; use std::env;
use std::sync::Arc;
pub struct BasicXlsDownloader { pub struct BasicXlsDownloader {
pub url: Option<String>, pub url: Option<String>,
@@ -22,7 +23,7 @@ async fn fetch_specified(url: &String, user_agent: &String, head: bool) -> Fetch
match response { match response {
Ok(r) => { Ok(r) => {
if r.status().as_u16() != 200 { if r.status().as_u16() != 200 {
return Err(FetchError::BadStatusCode); return Err(FetchError::BadStatusCode(r.status().as_u16()));
} }
let headers = r.headers(); let headers = r.headers();
@@ -32,11 +33,18 @@ async fn fetch_specified(url: &String, user_agent: &String, head: bool) -> Fetch
let last_modified = headers.get("last-modified"); let last_modified = headers.get("last-modified");
let date = headers.get("date"); let date = headers.get("date");
if content_type.is_none() || etag.is_none() || last_modified.is_none() || date.is_none() if content_type.is_none() {
{ Err(FetchError::BadHeaders("Content-Type".to_string()))
Err(FetchError::BadHeaders) } else if etag.is_none() {
Err(FetchError::BadHeaders("ETag".to_string()))
} else if last_modified.is_none() {
Err(FetchError::BadHeaders("Last-Modified".to_string()))
} else if date.is_none() {
Err(FetchError::BadHeaders("Date".to_string()))
} else if content_type.unwrap() != "application/vnd.ms-excel" { } else if content_type.unwrap() != "application/vnd.ms-excel" {
Err(FetchError::BadContentType) Err(FetchError::BadContentType(
content_type.unwrap().to_str().unwrap().to_string(),
))
} else { } else {
let etag = etag.unwrap().to_str().unwrap().to_string(); let etag = etag.unwrap().to_str().unwrap().to_string();
let last_modified = let last_modified =
@@ -51,7 +59,7 @@ async fn fetch_specified(url: &String, user_agent: &String, head: bool) -> Fetch
}) })
} }
} }
Err(e) => Err(FetchError::Unknown(e)), Err(error) => Err(FetchError::Unknown(Arc::new(error))),
} }
} }
@@ -86,7 +94,7 @@ impl XLSDownloader for BasicXlsDownloader {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use crate::xls_downloader::basic_impl::{BasicXlsDownloader, fetch_specified}; use crate::xls_downloader::basic_impl::{fetch_specified, BasicXlsDownloader};
use crate::xls_downloader::interface::{FetchError, XLSDownloader}; use crate::xls_downloader::interface::{FetchError, XLSDownloader};
#[tokio::test] #[tokio::test]
@@ -116,14 +124,10 @@ mod tests {
assert!(results[0].is_err()); assert!(results[0].is_err());
assert!(results[1].is_err()); assert!(results[1].is_err());
assert_eq!( let expected_error = FetchError::BadStatusCode(404);
*results[0].as_ref().err().unwrap(),
FetchError::BadStatusCode assert_eq!(*results[0].as_ref().err().unwrap(), expected_error);
); assert_eq!(*results[1].as_ref().err().unwrap(), expected_error);
assert_eq!(
*results[1].as_ref().err().unwrap(),
FetchError::BadStatusCode
);
} }
#[tokio::test] #[tokio::test]
@@ -139,8 +143,10 @@ mod tests {
assert!(results[0].is_err()); assert!(results[0].is_err());
assert!(results[1].is_err()); assert!(results[1].is_err());
assert_eq!(*results[0].as_ref().err().unwrap(), FetchError::BadHeaders); let expected_error = FetchError::BadHeaders("ETag".to_string());
assert_eq!(*results[1].as_ref().err().unwrap(), FetchError::BadHeaders);
assert_eq!(*results[0].as_ref().err().unwrap(), expected_error);
assert_eq!(*results[1].as_ref().err().unwrap(), expected_error);
} }
#[tokio::test] #[tokio::test]
@@ -155,15 +161,6 @@ mod tests {
assert!(results[0].is_err()); assert!(results[0].is_err());
assert!(results[1].is_err()); assert!(results[1].is_err());
assert_eq!(
*results[0].as_ref().err().unwrap(),
FetchError::BadContentType
);
assert_eq!(
*results[1].as_ref().err().unwrap(),
FetchError::BadContentType
);
} }
#[tokio::test] #[tokio::test]

View File

@@ -1,23 +1,32 @@
use chrono::{DateTime, Utc}; use chrono::{DateTime, Utc};
use derive_more::Display;
use std::mem::discriminant; use std::mem::discriminant;
use std::sync::Arc;
use utoipa::ToSchema;
/// XLS data retrieval errors. /// XLS data retrieval errors.
#[derive(Debug)] #[derive(Clone, Debug, ToSchema, Display)]
pub enum FetchError { pub enum FetchError {
/// File url is not set. /// File url is not set.
#[display("The link to the timetable was not provided earlier.")]
NoUrlProvided, NoUrlProvided,
/// Unknown error. /// Unknown error.
Unknown(reqwest::Error), #[display("An unknown error occurred while downloading the file.")]
#[schema(value_type = String)]
Unknown(Arc<reqwest::Error>),
/// Server returned a status code different from 200. /// Server returned a status code different from 200.
BadStatusCode, #[display("Server returned a status code {_0}.")]
BadStatusCode(u16),
/// The url leads to a file of a different type. /// The url leads to a file of a different type.
BadContentType, #[display("The link leads to a file of type '{_0}'.")]
BadContentType(String),
/// Server doesn't return expected headers. /// Server doesn't return expected headers.
BadHeaders, #[display("Server doesn't return expected header(s) '{_0}'.")]
BadHeaders(String),
} }
impl PartialEq for FetchError { impl PartialEq for FetchError {