11 Commits

14 changed files with 872 additions and 552 deletions

142
.github/workflows/build.yml vendored Normal file
View File

@@ -0,0 +1,142 @@
name: build
on:
push:
branches: [ "master" ]
tags-ignore: [ "release/v*" ]
permissions:
contents: write
env:
CARGO_TERM_COLOR: always
BINARY_NAME: schedule-parser-rusted
TEST_DB: ${{ secrets.TEST_DATABASE_URL }}
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
SENTRY_ORG: ${{ secrets.SENTRY_ORG }}
SENTRY_PROJECT: ${{ secrets.SENTRY_PROJECT }}
DOCKER_IMAGE_NAME: ${{ github.repository }}
DOCKER_REGISTRY_HOST: registry.n08i40k.ru
DOCKER_REGISTRY_USERNAME: ${{ github.repository_owner }}
DOCKER_REGISTRY_PASSWORD: ${{ secrets.DOCKER_REGISTRY_PASSWORD }}
jobs:
test:
name: Test
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Setup Rust
uses: actions-rust-lang/setup-rust-toolchain@v1.11.0
with:
toolchain: stable
- name: Test
run: |
cargo test --verbose
env:
DATABASE_URL: ${{ env.TEST_DB }}
SCHEDULE_DISABLE_AUTO_UPDATE: 1
JWT_SECRET: "test-secret-at-least-256-bits-used"
VK_ID_CLIENT_ID: 0
VK_ID_REDIRECT_URI: "vk0://vk.com/blank.html"
TELEGRAM_BOT_ID: 0
TELEGRAM_MINI_APP_HOST: example.com
TELEGRAM_TEST_DC: false
YANDEX_CLOUD_API_KEY: ""
YANDEX_CLOUD_FUNC_ID: ""
build:
name: Build
runs-on: ubuntu-latest
needs: test
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Setup Rust
uses: actions-rust-lang/setup-rust-toolchain@v1.11.0
with:
toolchain: stable
- name: Build
run: cargo build --release --verbose
- name: Extract debug symbols
run: |
objcopy --only-keep-debug target/release/${{ env.BINARY_NAME }}{,.d}
objcopy --strip-debug --strip-unneeded target/release/${{ env.BINARY_NAME }}
objcopy --add-gnu-debuglink target/release/${{ env.BINARY_NAME }}{.d,}
- name: Setup sentry-cli
uses: matbour/setup-sentry-cli@v2.0.0
with:
version: latest
token: ${{ env.SENTRY_AUTH_TOKEN }}
organization: ${{ env.SENTRY_ORG }}
project: ${{ env.SENTRY_PROJECT }}
- name: Upload debug symbols to Sentry
run: |
sentry-cli debug-files upload --include-sources .
- name: Upload build binary artifact
uses: actions/upload-artifact@v4
with:
name: release-binary
path: target/release/${{ env.BINARY_NAME }}
- name: Upload build debug symbols artifact
uses: actions/upload-artifact@v4
with:
name: release-symbols
path: target/release/${{ env.BINARY_NAME }}.d
docker:
name: Build & Push Docker Image
runs-on: ubuntu-latest
needs: build
steps:
- uses: actions/checkout@v4
- name: Download build artifacts
uses: actions/download-artifact@v4
with:
name: release-binary
- name: Setup Docker Buildx
uses: docker/setup-buildx-action@v3.10.0
- name: Login to Registry
uses: docker/login-action@v3.4.0
with:
registry: ${{ env.DOCKER_REGISTRY_HOST }}
username: ${{ env.DOCKER_REGISTRY_USERNAME }}
password: ${{ env.DOCKER_REGISTRY_PASSWORD }}
- name: Extract Docker metadata
id: meta
uses: docker/metadata-action@v5.7.0
with:
images: ${{ env.DOCKER_REGISTRY_HOST }}/${{ env.DOCKER_IMAGE_NAME }}
- name: Build and push Docker image
id: build-and-push
uses: docker/build-push-action@v6.15.0
with:
context: .
push: true
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
cache-from: type=gha
cache-to: type=gha,mode=max
build-args: |
"BINARY_NAME=${{ env.BINARY_NAME }}"

View File

@@ -2,7 +2,7 @@ name: cargo test
on: on:
push: push:
branches: [ "master" ] branches: [ "development" ]
tags-ignore: [ "release/v*" ] tags-ignore: [ "release/v*" ]
permissions: permissions:

883
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -3,7 +3,7 @@ members = ["actix-macros", "actix-test", "providers"]
[package] [package]
name = "schedule-parser-rusted" name = "schedule-parser-rusted"
version = "1.2.2" version = "1.3.0"
edition = "2024" edition = "2024"
publish = false publish = false
@@ -21,7 +21,7 @@ actix-macros = { path = "actix-macros" }
actix-web = "4.11.0" actix-web = "4.11.0"
# basic # basic
chrono = { version = "0.4.41", features = ["serde"] } chrono = { version = "0.4.42", features = ["serde"] }
derive_more = { version = "2.0.1", features = ["full"] } derive_more = { version = "2.0.1", features = ["full"] }
dotenvy = "0.15.7" dotenvy = "0.15.7"
@@ -48,13 +48,13 @@ reqwest = { version = "0.12.23", features = ["json"] }
mime = "0.3.17" mime = "0.3.17"
# error handling # error handling
sentry = "0.42.0" sentry = "0.43.0"
sentry-actix = "0.42.0" sentry-actix = "0.43.0"
# [de]serializing # [de]serializing
serde = { version = "1.0.219", features = ["derive"] } serde = { version = "1", features = ["derive"] }
serde_json = "1.0.143" serde_json = "1"
serde_with = "3.14.0" serde_with = "3.14"
sha1 = "0.11.0-rc.2" sha1 = "0.11.0-rc.2"
@@ -65,12 +65,12 @@ utoipa-actix-web = "0.1.2"
uuid = { version = "1.18.1", features = ["v4"] } uuid = { version = "1.18.1", features = ["v4"] }
hex-literal = "1" hex-literal = "1"
log = "0.4.27" log = "0.4.28"
# telegram webdata deciding and verify # telegram webdata deciding and verify
base64 = "0.22.1" base64 = "0.22.1"
percent-encoding = "2.3.2" percent-encoding = "2.3.2"
ed25519-dalek = "3.0.0-pre.0" ed25519-dalek = "3.0.0-pre.1"
# development tracing # development tracing
console-subscriber = { version = "0.4.1", optional = true } console-subscriber = { version = "0.4.1", optional = true }

View File

@@ -100,6 +100,9 @@ pub enum LessonType {
/// Защита курсового проекта. /// Защита курсового проекта.
CourseProjectDefense, CourseProjectDefense,
/// Практическое занятие.
Practice
} }
#[derive(Clone, Hash, Debug, Serialize, Deserialize, ToSchema)] #[derive(Clone, Hash, Debug, Serialize, Deserialize, ToSchema)]

View File

@@ -1,6 +1,6 @@
[package] [package]
name = "provider-engels-polytechnic" name = "provider-engels-polytechnic"
version = "0.1.0" version = "0.2.0"
edition = "2024" edition = "2024"
[features] [features]
@@ -20,7 +20,7 @@ derive_more = { version = "2.0.1", features = ["error", "display"] }
utoipa = { version = "5.4.0", features = ["macros", "chrono"] } utoipa = { version = "5.4.0", features = ["macros", "chrono"] }
calamine = "0.30.0" calamine = "0.31"
async-trait = "0.1.89" async-trait = "0.1.89"
reqwest = "0.12.23" reqwest = "0.12.23"
@@ -28,5 +28,6 @@ ua_generator = "0.5.22"
regex = "1.11.2" regex = "1.11.2"
strsim = "0.11.1" strsim = "0.11.1"
log = "0.4.27" log = "0.4.27"
sentry = "0.42.0" sentry = "0.43.0"
fancy-regex = "0.16.2"

View File

@@ -65,7 +65,11 @@ impl ScheduleProvider for Wrapper {
this.snapshot = Arc::new(snapshot); this.snapshot = Arc::new(snapshot);
}, },
Err(updater::error::Error::QueryUrlFailed(updater::error::QueryUrlError::UriFetchFailed)) => {},
Err(err) => { Err(err) => {
sentry::capture_error(&err);
cancellation_token.cancel(); cancellation_token.cancel();
return Err(err.into()); return Err(err.into());
} }

View File

@@ -233,6 +233,7 @@ enum LessonParseResult {
fn guess_lesson_type(text: &str) -> Option<LessonType> { fn guess_lesson_type(text: &str) -> Option<LessonType> {
static MAP: LazyLock<HashMap<&str, LessonType>> = LazyLock::new(|| { static MAP: LazyLock<HashMap<&str, LessonType>> = LazyLock::new(|| {
HashMap::from([ HashMap::from([
("о важном", LessonType::Additional),
("консультация", LessonType::Consultation), ("консультация", LessonType::Consultation),
("самостоятельная работа", LessonType::IndependentWork), ("самостоятельная работа", LessonType::IndependentWork),
("зачет", LessonType::Exam), ("зачет", LessonType::Exam),
@@ -240,6 +241,7 @@ fn guess_lesson_type(text: &str) -> Option<LessonType> {
("экзамен", LessonType::ExamDefault), ("экзамен", LessonType::ExamDefault),
("курсовой проект", LessonType::CourseProject), ("курсовой проект", LessonType::CourseProject),
("защита курсового проекта", LessonType::CourseProjectDefense), ("защита курсового проекта", LessonType::CourseProjectDefense),
("практическое занятие", LessonType::Practice),
]) ])
}); });
@@ -426,125 +428,128 @@ fn parse_name_and_subgroups(text: &str) -> Result<ParsedLessonName, Error> {
// 3. "Модификатор" (чаще всего). // 3. "Модификатор" (чаще всего).
// //
// Регулярное выражение для получения ФИО преподавателей и номеров подгрупп (aka. второй части). // Регулярное выражение для получения ФИО преподавателей и номеров подгрупп (aka. второй части).
// (?:[А-Я][а-я]+\s?(?:[А-Я][\s.]*){2}(?:\(\d\s?[а-я]+\))?(?:, )?)+[\s.]* static NAME_RE: LazyLock<fancy_regex::Regex> = LazyLock::new(|| {
// fancy_regex::Regex::new(
// Подробнее: r"([А-Я][а-я]+(?:[\s.]*[А-Я]){1,2})(?=[^а-я])[.\s]*(?:\(?(\d)[\sа-я]*\)?)?",
// (?:
// [А-Я][а-я]+ - Фамилия.
// \s? - Кто знает, будет ли там пробел.
// (?:[А-Я][\s.]*){2} - Имя и отчество с учётом случайных пробелов и точек.
// (?:
// \( - Открытие подгруппы.
// \s? - Кто знает, будет ли там пробел.
// \d - Номер подгруппы.
// \s? - Кто знает, будет ли там пробел.
// [а-я\s]+ - Слово "подгруппа" с учётов ошибок.
// \) - Закрытие подгруппы.
// )? - Явное указание подгруппы может отсутствовать по понятным причинам.
// (?:, )? - Разделители между отдельными частями.
// )+
// [\s.]* - Забираем с собой всякий мусор, что бы не передать его в третью часть.
static NAMES_REGEX: LazyLock<Regex> = LazyLock::new(|| {
Regex::new(
r"(?:[А-Я][а-я]+\s?(?:[А-Я][\s.]*){2}(?:\(\s*\d\s*[а-я\s]+\))?(?:[\s,]+)?){1,2}+[\s.,]*",
) )
.unwrap() .unwrap()
}); });
// Отчистка let text = text
static CLEAN_RE: LazyLock<Regex> = LazyLock::new(|| Regex::new(r"[\s\n\t]+").unwrap()); .chars()
.filter(|c: &char| {
c.is_whitespace()
|| c.is_ascii_digit()
|| (*c >= 'а' && *c <= 'я')
|| (*c >= 'А' && *c <= 'Я')
|| *c == '.'
|| *c == '-'
})
.collect::<String>()
.replace(r"\s+", " ");
let text = CLEAN_RE let mut lesson_name: Option<&str> = None;
.replace(&text.replace([' ', '\t', '\n'], " "), " ") let mut extra: Option<&str> = None;
.to_string();
let (lesson_name, subgroups, lesson_type) = match NAMES_REGEX.captures(&text) { let mut shared_subgroup = false;
Some(captures) => { let mut subgroups: [Option<LessonSubGroup>; 2] = [None, None];
let capture = captures.get(0).unwrap();
let subgroups: Vec<Option<LessonSubGroup>> = { for capture in NAME_RE.captures_iter(&text) {
let src = capture.as_str().replace([' ', '.'], ""); let capture = capture.unwrap();
let mut shared_subgroup = false; if lesson_name.is_none() {
let mut subgroups: [Option<LessonSubGroup>; 2] = [None, None]; lesson_name = Some(&text[..capture.get(0).unwrap().start()]);
for name in src.split(',') {
let open_bracket_index = name.find('(');
let number: u8 = open_bracket_index
.map_or(0, |index| name[(index + 1)..(index + 2)].parse().unwrap());
let teacher_name = {
let name_end = open_bracket_index.unwrap_or(name.len());
// Я ебал. Как же я долго до этого доходил.
format!(
"{} {}.{}.",
name.get(..name_end - 4).unwrap(),
name.get(name_end - 4..name_end - 2).unwrap(),
name.get(name_end - 2..name_end).unwrap(),
)
};
let lesson = Some(LessonSubGroup {
cabinet: None,
teacher: Some(teacher_name),
});
match number {
0 => {
subgroups[0] = lesson;
subgroups[1] = None;
shared_subgroup = true;
break;
}
num => {
// 1 - 1 = 0 | 2 - 1 = 1 | 3 - 1 = 2 (schedule index to array index)
// 0 % 2 = 0 | 1 % 2 = 1 | 2 % 2 = 0 (clamp)
let normalised = (num - 1) % 2;
subgroups[normalised as usize] = lesson;
}
}
}
if shared_subgroup {
Vec::from([subgroups[0].take()])
} else {
Vec::from(subgroups)
}
};
let name = text[..capture.start()].trim().to_string();
let extra = text[capture.end()..].trim().to_string();
let lesson_type = if extra.len() > 4 {
let result = guess_lesson_type(&extra);
if result.is_none() {
#[cfg(not(debug_assertions))]
sentry::capture_message(
&*format!("Не удалось угадать тип пары '{}'!", extra),
sentry::Level::Warning,
);
#[cfg(debug_assertions)]
log::warn!("Не удалось угадать тип пары '{}'!", extra);
}
result
} else {
None
};
(name, subgroups, lesson_type)
} }
None => (text, Vec::new(), None),
extra = Some(&text[capture.get(0).unwrap().end()..]);
let teacher_name = {
let clean = capture
.get(1)
.unwrap()
.as_str()
.chars()
.filter(|c| c.is_alphabetic())
.collect::<Vec<char>>();
if clean.get(clean.len() - 2).is_some_and(|c| c.is_uppercase()) {
let (name, remaining) = clean.split_at(clean.len() - 2);
format!(
"{} {}.{}.",
name.iter().collect::<String>(),
remaining[0],
remaining[1]
)
} else {
let (remaining, name) = clean.split_last().unwrap();
format!("{} {}.", name.iter().collect::<String>(), remaining)
}
};
let subgroup_index = capture
.get(2)
.and_then(|m| Some(m.as_str().parse::<u32>().unwrap()));
let subgroup = Some(LessonSubGroup {
cabinet: None,
teacher: Some(teacher_name),
});
match subgroup_index {
None => {
subgroups[0] = subgroup;
subgroups[1] = None;
shared_subgroup = true;
break;
}
Some(num) => {
// 1 - 1 = 0 | 2 - 1 = 1 | 3 - 1 = 2 (schedule index to array index)
// 0 % 2 = 0 | 1 % 2 = 1 | 2 % 2 = 0 (clamp)
let normalised = (num - 1) % 2;
subgroups[normalised as usize] = subgroup;
}
}
}
let subgroups = if lesson_name.is_none() {
Vec::new()
} else if shared_subgroup {
Vec::from([subgroups[0].take()])
} else {
Vec::from(subgroups)
};
if extra.is_none() {
extra = text
.rfind(" ")
.and_then(|i| text[..i].rfind(" "))
.map(|i| &text[i + 1..]);
}
let lesson_type = if let Some(extra) = extra
&& extra.len() > 4
{
let result = guess_lesson_type(&extra);
if result.is_none() {
#[cfg(not(debug_assertions))]
sentry::capture_message(
&format!("Не удалось угадать тип пары '{}'!", extra),
sentry::Level::Warning,
);
#[cfg(debug_assertions)]
log::warn!("Не удалось угадать тип пары '{}'!", extra);
}
result
} else {
None
}; };
Ok(ParsedLessonName { Ok(ParsedLessonName {
name: lesson_name, name: lesson_name.unwrap_or(&text).to_string(),
subgroups, subgroups,
r#type: lesson_type, r#type: lesson_type,
}) })

View File

@@ -46,14 +46,17 @@ pub mod error {
/// problems with the Yandex Cloud Function invocation. /// problems with the Yandex Cloud Function invocation.
#[display("An error occurred during the request to the Yandex Cloud API: {_0}")] #[display("An error occurred during the request to the Yandex Cloud API: {_0}")]
RequestFailed(reqwest::Error), RequestFailed(reqwest::Error),
#[display("Unable to fetch Uri in 3 retries")]
UriFetchFailed,
} }
/// Errors that may occur during the creation of a schedule snapshot. /// Errors that may occur during the creation of a schedule snapshot.
#[derive(Debug, Display, Error)] #[derive(Debug, Display, Error)]
pub enum SnapshotCreationError { pub enum SnapshotCreationError {
/// The URL is the same as the one already being used (no update needed). /// The ETag is the same (no update needed).
#[display("The URL is the same as the one already being used.")] #[display("The ETag is the same.")]
SameUrl, Same,
/// The URL query for the XLS file failed to execute, either due to network issues or invalid API parameters. /// The URL query for the XLS file failed to execute, either due to network issues or invalid API parameters.
#[display("Failed to fetch URL: {_0}")] #[display("Failed to fetch URL: {_0}")]
@@ -86,10 +89,6 @@ impl Updater {
downloader: &mut XlsDownloader, downloader: &mut XlsDownloader,
url: String, url: String,
) -> Result<ScheduleSnapshot, SnapshotCreationError> { ) -> Result<ScheduleSnapshot, SnapshotCreationError> {
if downloader.url.as_ref().is_some_and(|_url| _url.eq(&url)) {
return Err(SnapshotCreationError::SameUrl);
}
let head_result = downloader.set_url(&url).await.map_err(|error| { let head_result = downloader.set_url(&url).await.map_err(|error| {
if let FetchError::Unknown(error) = &error { if let FetchError::Unknown(error) = &error {
sentry::capture_error(&error); sentry::capture_error(&error);
@@ -98,6 +97,10 @@ impl Updater {
SnapshotCreationError::FetchFailed(error) SnapshotCreationError::FetchFailed(error)
})?; })?;
if downloader.etag == Some(head_result.etag) {
return Err(SnapshotCreationError::Same);
}
let xls_data = downloader let xls_data = downloader
.fetch(false) .fetch(false)
.await .await
@@ -144,18 +147,43 @@ impl Updater {
async fn query_url(api_key: &str, func_id: &str) -> Result<String, QueryUrlError> { async fn query_url(api_key: &str, func_id: &str) -> Result<String, QueryUrlError> {
let client = reqwest::Client::new(); let client = reqwest::Client::new();
let uri = client let uri = {
.post(format!( // вот бы добавили named-scopes как в котлине,
"https://functions.yandexcloud.net/{}?integration=raw", // чтоб мне не пришлось такой хуйнёй страдать.
func_id #[allow(unused_assignments)]
)) let mut uri = String::new();
.header("Authorization", format!("Api-Key {}", api_key)) let mut counter = 0;
.send()
.await loop {
.map_err(QueryUrlError::RequestFailed)? if counter == 3 {
.text() return Err(QueryUrlError::UriFetchFailed);
.await }
.map_err(QueryUrlError::RequestFailed)?;
counter += 1;
uri = client
.post(format!(
"https://functions.yandexcloud.net/{}?integration=raw",
func_id
))
.header("Authorization", format!("Api-Key {}", api_key))
.send()
.await
.map_err(QueryUrlError::RequestFailed)?
.text()
.await
.map_err(QueryUrlError::RequestFailed)?;
if uri.is_empty() {
log::warn!("[{}] Unable to get uri! Retrying in 5 seconds...", counter);
continue;
}
break;
}
uri
};
Ok(format!("https://politehnikum-eng.ru{}", uri.trim())) Ok(format!("https://politehnikum-eng.ru{}", uri.trim()))
} }
@@ -249,7 +277,7 @@ impl Updater {
let snapshot = match Self::new_snapshot(&mut self.downloader, url).await { let snapshot = match Self::new_snapshot(&mut self.downloader, url).await {
Ok(snapshot) => snapshot, Ok(snapshot) => snapshot,
Err(SnapshotCreationError::SameUrl) => { Err(SnapshotCreationError::Same) => {
let mut clone = current_snapshot.clone(); let mut clone = current_snapshot.clone();
clone.update(); clone.update();

View File

@@ -66,25 +66,30 @@ pub struct FetchOk {
/// Date data received. /// Date data received.
pub requested_at: DateTime<Utc>, pub requested_at: DateTime<Utc>,
/// Etag.
pub etag: String,
/// File data. /// File data.
pub data: Option<Vec<u8>>, pub data: Option<Vec<u8>>,
} }
impl FetchOk { impl FetchOk {
/// Result without file content. /// Result without file content.
pub fn head(uploaded_at: DateTime<Utc>) -> Self { pub fn head(uploaded_at: DateTime<Utc>, etag: String) -> Self {
FetchOk { FetchOk {
uploaded_at, uploaded_at,
requested_at: Utc::now(), requested_at: Utc::now(),
etag,
data: None, data: None,
} }
} }
/// Full result. /// Full result.
pub fn get(uploaded_at: DateTime<Utc>, data: Vec<u8>) -> Self { pub fn get(uploaded_at: DateTime<Utc>, etag: String, data: Vec<u8>) -> Self {
FetchOk { FetchOk {
uploaded_at, uploaded_at,
requested_at: Utc::now(), requested_at: Utc::now(),
etag,
data: Some(data), data: Some(data),
} }
} }
@@ -94,11 +99,15 @@ pub type FetchResult = Result<FetchOk, FetchError>;
pub struct XlsDownloader { pub struct XlsDownloader {
pub url: Option<String>, pub url: Option<String>,
pub etag: Option<String>,
} }
impl XlsDownloader { impl XlsDownloader {
pub fn new() -> Self { pub fn new() -> Self {
XlsDownloader { url: None } XlsDownloader {
url: None,
etag: None,
}
} }
async fn fetch_specified(url: &str, head: bool) -> FetchResult { async fn fetch_specified(url: &str, head: bool) -> FetchResult {
@@ -124,9 +133,12 @@ impl XlsDownloader {
.get("Content-Type") .get("Content-Type")
.ok_or(FetchError::bad_headers("Content-Type"))?; .ok_or(FetchError::bad_headers("Content-Type"))?;
if !headers.contains_key("etag") { let etag = headers
return Err(FetchError::bad_headers("etag")); .get("etag")
} .ok_or(FetchError::bad_headers("etag"))?
.to_str()
.or(Err(FetchError::bad_headers("etag")))?
.to_string();
let last_modified = headers let last_modified = headers
.get("last-modified") .get("last-modified")
@@ -141,9 +153,13 @@ impl XlsDownloader {
.with_timezone(&Utc); .with_timezone(&Utc);
Ok(if head { Ok(if head {
FetchOk::head(last_modified) FetchOk::head(last_modified, etag)
} else { } else {
FetchOk::get(last_modified, response.bytes().await.unwrap().to_vec()) FetchOk::get(
last_modified,
etag,
response.bytes().await.unwrap().to_vec(),
)
}) })
} }

View File

@@ -2,16 +2,6 @@ use jsonwebtoken::errors::ErrorKind;
use jsonwebtoken::{Algorithm, DecodingKey, Validation, decode}; use jsonwebtoken::{Algorithm, DecodingKey, Validation, decode};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
#[derive(Deserialize, Serialize)]
struct TokenData {
iis: String,
sub: i32,
app: i32,
exp: i32,
iat: i32,
jti: i32,
}
#[derive(Debug, Serialize, Deserialize)] #[derive(Debug, Serialize, Deserialize)]
struct Claims { struct Claims {
sub: i32, sub: i32,

View File

@@ -185,7 +185,7 @@ mod tests {
id: Set(id.clone()), id: Set(id.clone()),
username: Set(username), username: Set(username),
password: Set(Some( password: Set(Some(
bcrypt::hash("example".to_string(), bcrypt::DEFAULT_COST).unwrap(), bcrypt::hash("example", bcrypt::DEFAULT_COST).unwrap(),
)), )),
vk_id: Set(None), vk_id: Set(None),
telegram_id: Set(None), telegram_id: Set(None),

View File

@@ -2,10 +2,12 @@ mod env;
pub use crate::state::env::AppEnv; pub use crate::state::env::AppEnv;
use actix_web::web; use actix_web::web;
use database::sea_orm::{Database, DatabaseConnection}; use database::migration::{Migrator, MigratorTrait};
use database::sea_orm::{ConnectOptions, Database, DatabaseConnection};
use providers::base::{ScheduleProvider, ScheduleSnapshot}; use providers::base::{ScheduleProvider, ScheduleSnapshot};
use std::collections::HashMap; use std::collections::HashMap;
use std::sync::Arc; use std::sync::Arc;
use std::time::Duration;
use tokio_util::sync::CancellationToken; use tokio_util::sync::CancellationToken;
/// Common data provided to endpoints. /// Common data provided to endpoints.
@@ -55,9 +57,24 @@ impl AppState {
database database
} else { } else {
let database_url = std::env::var("DATABASE_URL").expect("DATABASE_URL must be set"); let database_url = std::env::var("DATABASE_URL").expect("DATABASE_URL must be set");
Database::connect(&database_url)
let mut opt = ConnectOptions::new(database_url.clone());
opt.max_connections(4)
.min_connections(2)
.connect_timeout(Duration::from_secs(10))
.idle_timeout(Duration::from_secs(8))
.sqlx_logging(true);
let database = Database::connect(opt)
.await .await
.unwrap_or_else(|_| panic!("Error connecting to {}", database_url)) .unwrap_or_else(|_| panic!("Error connecting to {}", database_url));
Migrator::up(&database, None)
.await
.expect("Failed to run database migrations");
database
}, },
env, env,
providers, providers,

View File

@@ -24,14 +24,13 @@ static ENCODING_KEY: LazyLock<EncodingKey> = LazyLock::new(|| {
}); });
/// Token verification errors. /// Token verification errors.
#[allow(dead_code)]
#[derive(Debug)] #[derive(Debug)]
pub enum Error { pub enum Error {
/// The token has a different signature. /// The token has a different signature.
InvalidSignature, InvalidSignature,
/// Token reading error. /// Token reading error.
InvalidToken(ErrorKind), InvalidToken,
/// Token expired. /// Token expired.
Expired, Expired,
@@ -82,7 +81,7 @@ pub fn verify_and_decode(token: &str) -> Result<String, Error> {
Err(err) => Err(match err.into_kind() { Err(err) => Err(match err.into_kind() {
ErrorKind::InvalidSignature => Error::InvalidSignature, ErrorKind::InvalidSignature => Error::InvalidSignature,
ErrorKind::ExpiredSignature => Error::Expired, ErrorKind::ExpiredSignature => Error::Expired,
kind => Error::InvalidToken(kind), _ => Error::InvalidToken,
}), }),
} }
} }
@@ -115,7 +114,7 @@ mod tests {
fn test_encode() { fn test_encode() {
test_env(); test_env();
assert_eq!(encode(&"test".to_string()).is_empty(), false); assert!(!encode("test").is_empty());
} }
#[test] #[test]
@@ -128,7 +127,7 @@ mod tests {
assert!(result.is_err()); assert!(result.is_err());
assert_eq!( assert_eq!(
result.err().unwrap(), result.err().unwrap(),
Error::InvalidToken(ErrorKind::InvalidToken) Error::InvalidToken
); );
} }