mirror of
https://github.com/n08i40k/schedule-parser-rusted.git
synced 2025-12-06 17:57:47 +03:00
Compare commits
1 Commits
release/v1
...
94ed51c71c
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
94ed51c71c |
169
.github/workflows/release.yml
vendored
169
.github/workflows/release.yml
vendored
@@ -1,169 +0,0 @@
|
||||
name: release
|
||||
|
||||
on:
|
||||
push:
|
||||
tags: [ "release/v*" ]
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
env:
|
||||
CARGO_TERM_COLOR: always
|
||||
|
||||
BINARY_NAME: schedule-parser-rusted
|
||||
|
||||
TEST_DB: ${{ secrets.TEST_DATABASE_URL }}
|
||||
|
||||
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
||||
SENTRY_ORG: ${{ secrets.SENTRY_ORG }}
|
||||
SENTRY_PROJECT: ${{ secrets.SENTRY_PROJECT }}
|
||||
|
||||
DOCKER_IMAGE_NAME: ${{ github.repository }}
|
||||
|
||||
DOCKER_REGISTRY_HOST: registry.n08i40k.ru
|
||||
DOCKER_REGISTRY_USERNAME: ${{ github.repository_owner }}
|
||||
DOCKER_REGISTRY_PASSWORD: ${{ secrets.DOCKER_REGISTRY_PASSWORD }}
|
||||
|
||||
jobs:
|
||||
test:
|
||||
name: Test
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Setup Rust
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1.11.0
|
||||
with:
|
||||
toolchain: stable
|
||||
|
||||
- name: Test
|
||||
run: |
|
||||
touch .env.test
|
||||
cargo test --verbose
|
||||
env:
|
||||
DATABASE_URL: ${{ env.TEST_DB }}
|
||||
JWT_SECRET: "test-secret-at-least-256-bits-used"
|
||||
VKID_CLIENT_ID: 0
|
||||
VKID_REDIRECT_URI: "vk0://vk.com/blank.html"
|
||||
REQWEST_USER_AGENT: "Dalvik/2.1.0 (Linux; U; Android 6.0.1; OPPO R9s Build/MMB29M)"
|
||||
build:
|
||||
name: Build
|
||||
runs-on: ubuntu-latest
|
||||
needs: test
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Setup Rust
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1.11.0
|
||||
with:
|
||||
toolchain: stable
|
||||
|
||||
- name: Build
|
||||
run: cargo build --release --verbose
|
||||
|
||||
- name: Extract debug symbols
|
||||
run: |
|
||||
objcopy --only-keep-debug target/release/${{ env.BINARY_NAME }}{,.d}
|
||||
objcopy --strip-debug --strip-unneeded target/release/${{ env.BINARY_NAME }}
|
||||
objcopy --add-gnu-debuglink target/release/${{ env.BINARY_NAME }}{.d,}
|
||||
|
||||
- name: Setup sentry-cli
|
||||
uses: matbour/setup-sentry-cli@v2.0.0
|
||||
with:
|
||||
version: latest
|
||||
token: ${{ env.SENTRY_AUTH_TOKEN }}
|
||||
organization: ${{ env.SENTRY_ORG }}
|
||||
project: ${{ env.SENTRY_PROJECT }}
|
||||
|
||||
- name: Upload debug symbols to Sentry
|
||||
run: |
|
||||
sentry-cli debug-files upload --include-sources .
|
||||
|
||||
- name: Upload build binary artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: release-binary
|
||||
path: target/release/${{ env.BINARY_NAME }}
|
||||
|
||||
- name: Upload build debug symbols artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: release-symbols
|
||||
path: target/release/${{ env.BINARY_NAME }}.d
|
||||
|
||||
docker:
|
||||
name: Build & Push Docker Image
|
||||
runs-on: ubuntu-latest
|
||||
needs: build
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Download build artifacts
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: release-binary
|
||||
|
||||
- name: Setup Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3.10.0
|
||||
|
||||
- name: Login to Registry
|
||||
uses: docker/login-action@v3.4.0
|
||||
with:
|
||||
registry: ${{ env.DOCKER_REGISTRY_HOST }}
|
||||
username: ${{ env.DOCKER_REGISTRY_USERNAME }}
|
||||
password: ${{ env.DOCKER_REGISTRY_PASSWORD }}
|
||||
|
||||
- name: Extract Docker metadata
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5.7.0
|
||||
with:
|
||||
images: ${{ env.DOCKER_REGISTRY_HOST }}/${{ env.DOCKER_IMAGE_NAME }}
|
||||
|
||||
- name: Build and push Docker image
|
||||
id: build-and-push
|
||||
uses: docker/build-push-action@v6.15.0
|
||||
with:
|
||||
context: .
|
||||
push: true
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
build-args: |
|
||||
"BINARY_NAME=${{ env.BINARY_NAME }}"
|
||||
release:
|
||||
name: Create GitHub Release
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- build
|
||||
- docker
|
||||
# noinspection GrazieInspection,SpellCheckingInspection
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Generate changelog
|
||||
run: |
|
||||
LAST_TAG=$(git describe --tags --abbrev=0 HEAD^)
|
||||
echo "## Коммиты с прошлого релиза $LAST_TAG" > CHANGELOG.md
|
||||
git log $LAST_TAG..HEAD --oneline >> CHANGELOG.md
|
||||
|
||||
- name: Download build artifacts
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
pattern: release-*
|
||||
merge-multiple: true
|
||||
|
||||
- name: Create Release
|
||||
id: create_release
|
||||
uses: ncipollo/release-action@v1.16.0
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
artifacts: "${{ env.BINARY_NAME }},${{ env.BINARY_NAME }}.d"
|
||||
bodyFile: CHANGELOG.md
|
||||
6
.github/workflows/test.yml
vendored
6
.github/workflows/test.yml
vendored
@@ -1,9 +1,10 @@
|
||||
name: cargo test
|
||||
name: Tests
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ "master" ]
|
||||
tags-ignore: [ "release/v*" ]
|
||||
pull_request:
|
||||
branches: [ "master" ]
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
@@ -29,4 +30,3 @@ jobs:
|
||||
JWT_SECRET: "test-secret-at-least-256-bits-used"
|
||||
VKID_CLIENT_ID: 0
|
||||
VKID_REDIRECT_URI: "vk0://vk.com/blank.html"
|
||||
REQWEST_USER_AGENT: "Dalvik/2.1.0 (Linux; U; Android 6.0.1; OPPO R9s Build/MMB29M)"
|
||||
9
.idea/sqldialects.xml
generated
Normal file
9
.idea/sqldialects.xml
generated
Normal file
@@ -0,0 +1,9 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project version="4">
|
||||
<component name="SqlDialectMappings">
|
||||
<file url="file://$PROJECT_DIR$/migrations/2025-03-21-211822_create_user_role/down.sql" dialect="PostgreSQL" />
|
||||
<file url="file://$PROJECT_DIR$/migrations/2025-03-21-212111_create_users/up.sql" dialect="PostgreSQL" />
|
||||
<file url="file://$PROJECT_DIR$/migrations/2025-03-21-212723_create_fcm/down.sql" dialect="PostgreSQL" />
|
||||
<file url="file://$PROJECT_DIR$/migrations/2025-03-21-212723_create_fcm/up.sql" dialect="PostgreSQL" />
|
||||
</component>
|
||||
</project>
|
||||
6
Cargo.lock
generated
6
Cargo.lock
generated
@@ -926,9 +926,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "diesel"
|
||||
version = "2.2.8"
|
||||
version = "2.2.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "470eb10efc8646313634c99bb1593f402a6434cbd86e266770c6e39219adb86a"
|
||||
checksum = "34d3950690ba3a6910126162b47e775e203006d4242a15de912bec6c0a695153"
|
||||
dependencies = [
|
||||
"bitflags",
|
||||
"byteorder 1.5.0",
|
||||
@@ -2876,7 +2876,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "schedule-parser-rusted"
|
||||
version = "1.0.1"
|
||||
version = "0.8.0"
|
||||
dependencies = [
|
||||
"actix-macros 0.1.0",
|
||||
"actix-test",
|
||||
|
||||
@@ -3,13 +3,10 @@ members = ["actix-macros", "actix-test"]
|
||||
|
||||
[package]
|
||||
name = "schedule-parser-rusted"
|
||||
version = "1.0.1"
|
||||
version = "0.8.0"
|
||||
edition = "2024"
|
||||
publish = false
|
||||
|
||||
[profile.release]
|
||||
debug = true
|
||||
|
||||
[dependencies]
|
||||
actix-web = "4.10.2"
|
||||
actix-macros = { path = "actix-macros" }
|
||||
@@ -17,7 +14,7 @@ bcrypt = "0.17.0"
|
||||
calamine = "0.26.1"
|
||||
chrono = { version = "0.4.40", features = ["serde"] }
|
||||
derive_more = "2.0.1"
|
||||
diesel = { version = "2.2.8", features = ["postgres"] }
|
||||
diesel = { version = "2.2.9", features = ["postgres"] }
|
||||
diesel-derive-enum = { git = "https://github.com/Havunen/diesel-derive-enum.git", features = ["postgres"] }
|
||||
dotenvy = "0.15.7"
|
||||
env_logger = "0.11.7"
|
||||
|
||||
14
Dockerfile
14
Dockerfile
@@ -1,14 +0,0 @@
|
||||
FROM debian:stable-slim
|
||||
LABEL authors="n08i40k"
|
||||
|
||||
ARG BINARY_NAME
|
||||
|
||||
WORKDIR /app/
|
||||
|
||||
RUN apt update && \
|
||||
apt install -y libpq5
|
||||
|
||||
COPY ./${BINARY_NAME} /bin/main
|
||||
RUN chmod +x /bin/main
|
||||
|
||||
ENTRYPOINT ["main"]
|
||||
@@ -1,6 +1,9 @@
|
||||
use crate::utility::jwt::DEFAULT_ALGORITHM;
|
||||
use jsonwebtoken::errors::ErrorKind;
|
||||
use jsonwebtoken::{decode, Algorithm, DecodingKey, Validation};
|
||||
use jsonwebtoken::{decode, DecodingKey, Validation};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::env;
|
||||
use std::sync::LazyLock;
|
||||
|
||||
#[derive(Deserialize, Serialize)]
|
||||
struct TokenData {
|
||||
@@ -14,7 +17,7 @@ struct TokenData {
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
struct Claims {
|
||||
sub: i32,
|
||||
sub: String,
|
||||
iis: String,
|
||||
jti: i32,
|
||||
app: i32,
|
||||
@@ -49,10 +52,17 @@ const VK_PUBLIC_KEY: &str = concat!(
|
||||
"-----END PUBLIC KEY-----"
|
||||
);
|
||||
|
||||
pub fn parse_vk_id(token_str: &String, client_id: i32) -> Result<i32, Error> {
|
||||
static VK_ID_CLIENT_ID: LazyLock<i32> = LazyLock::new(|| {
|
||||
env::var("VK_ID_CLIENT_ID")
|
||||
.expect("VK_ID_CLIENT_ID must be set")
|
||||
.parse::<i32>()
|
||||
.expect("VK_ID_CLIENT_ID must be i32")
|
||||
});
|
||||
|
||||
pub fn parse_vk_id(token_str: &String) -> Result<i32, Error> {
|
||||
let dkey = DecodingKey::from_rsa_pem(VK_PUBLIC_KEY.as_bytes()).unwrap();
|
||||
|
||||
match decode::<Claims>(&token_str, &dkey, &Validation::new(Algorithm::RS256)) {
|
||||
match decode::<Claims>(&token_str, &dkey, &Validation::new(DEFAULT_ALGORITHM)) {
|
||||
Ok(token_data) => {
|
||||
let claims = token_data.claims;
|
||||
|
||||
@@ -60,10 +70,13 @@ pub fn parse_vk_id(token_str: &String, client_id: i32) -> Result<i32, Error> {
|
||||
Err(Error::UnknownIssuer(claims.iis))
|
||||
} else if claims.jti != 21 {
|
||||
Err(Error::UnknownType(claims.jti))
|
||||
} else if claims.app != client_id {
|
||||
} else if claims.app != *VK_ID_CLIENT_ID {
|
||||
Err(Error::UnknownClientId(claims.app))
|
||||
} else {
|
||||
Ok(claims.sub)
|
||||
match claims.sub.parse::<i32>() {
|
||||
Ok(sub) => Ok(sub),
|
||||
Err(_) => Err(Error::InvalidToken),
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(err) => Err(match err.into_kind() {
|
||||
|
||||
@@ -71,7 +71,7 @@ pub async fn sign_in_vk(
|
||||
) -> ServiceResponse {
|
||||
let data = data_json.into_inner();
|
||||
|
||||
match parse_vk_id(&data.access_token, app_state.vk_id.client_id) {
|
||||
match parse_vk_id(&data.access_token) {
|
||||
Ok(id) => sign_in_combined(Vk(id), &app_state).await.into(),
|
||||
Err(_) => ErrorCode::InvalidVkAccessToken.into_response(),
|
||||
}
|
||||
|
||||
@@ -79,7 +79,7 @@ pub async fn sign_up_vk(
|
||||
) -> ServiceResponse {
|
||||
let data = data_json.into_inner();
|
||||
|
||||
match parse_vk_id(&data.access_token, app_state.vk_id.client_id) {
|
||||
match parse_vk_id(&data.access_token) {
|
||||
Ok(id) => sign_up_combined(
|
||||
SignUpData {
|
||||
username: data.username,
|
||||
|
||||
@@ -4,7 +4,7 @@ use crate::app_state::Schedule;
|
||||
use crate::parser::parse_xls;
|
||||
use crate::routes::schedule::schema::CacheStatus;
|
||||
use crate::routes::schema::{IntoResponseAsError, ResponseError};
|
||||
use crate::xls_downloader::interface::{FetchError, XLSDownloader};
|
||||
use crate::xls_downloader::interface::XLSDownloader;
|
||||
use actix_web::web::Json;
|
||||
use actix_web::{patch, web};
|
||||
use chrono::Utc;
|
||||
@@ -60,18 +60,16 @@ pub async fn update_download_url(
|
||||
}
|
||||
},
|
||||
Err(error) => {
|
||||
if let FetchError::Unknown(error) = error {
|
||||
sentry::capture_error(&error);
|
||||
}
|
||||
eprintln!("Unknown url provided {}", data.url);
|
||||
eprintln!("{:?}", error);
|
||||
|
||||
ErrorCode::DownloadFailed.into_response()
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(error) => {
|
||||
if let FetchError::Unknown(error) = error {
|
||||
sentry::capture_error(&error);
|
||||
}
|
||||
eprintln!("Unknown url provided {}", data.url);
|
||||
eprintln!("{:?}", error);
|
||||
|
||||
ErrorCode::FetchFailed.into_response()
|
||||
}
|
||||
|
||||
@@ -59,16 +59,13 @@ async fn oauth(data: web::Json<Request>, app_state: web::Data<AppState>) -> Serv
|
||||
return ErrorCode::VkIdError.into_response();
|
||||
}
|
||||
|
||||
match res.json::<VkIdAuthResponse>().await {
|
||||
Ok(auth_data) =>
|
||||
Ok(Response {
|
||||
access_token: auth_data.id_token,
|
||||
}).into(),
|
||||
Err(error) => {
|
||||
sentry::capture_error(&error);
|
||||
|
||||
ErrorCode::VkIdError.into_response()
|
||||
}
|
||||
if let Ok(auth_data) = res.json::<VkIdAuthResponse>().await {
|
||||
Ok(Response {
|
||||
access_token: auth_data.id_token,
|
||||
})
|
||||
.into()
|
||||
} else {
|
||||
ErrorCode::VkIdError.into_response()
|
||||
}
|
||||
}
|
||||
Err(_) => ErrorCode::VkIdError.into_response(),
|
||||
|
||||
@@ -1,13 +1,11 @@
|
||||
use crate::xls_downloader::interface::{FetchError, FetchOk, FetchResult, XLSDownloader};
|
||||
use chrono::{DateTime, Utc};
|
||||
use std::env;
|
||||
|
||||
pub struct BasicXlsDownloader {
|
||||
pub url: Option<String>,
|
||||
user_agent: String,
|
||||
}
|
||||
|
||||
async fn fetch_specified(url: &String, user_agent: &String, head: bool) -> FetchResult {
|
||||
async fn fetch_specified(url: &String, user_agent: String, head: bool) -> FetchResult {
|
||||
let client = reqwest::Client::new();
|
||||
|
||||
let response = if head {
|
||||
@@ -15,7 +13,7 @@ async fn fetch_specified(url: &String, user_agent: &String, head: bool) -> Fetch
|
||||
} else {
|
||||
client.get(url)
|
||||
}
|
||||
.header("User-Agent", user_agent.clone())
|
||||
.header("User-Agent", user_agent)
|
||||
.send()
|
||||
.await;
|
||||
|
||||
@@ -51,16 +49,13 @@ async fn fetch_specified(url: &String, user_agent: &String, head: bool) -> Fetch
|
||||
})
|
||||
}
|
||||
}
|
||||
Err(e) => Err(FetchError::Unknown(e)),
|
||||
Err(_) => Err(FetchError::Unknown),
|
||||
}
|
||||
}
|
||||
|
||||
impl BasicXlsDownloader {
|
||||
pub fn new() -> Self {
|
||||
BasicXlsDownloader {
|
||||
url: None,
|
||||
user_agent: env::var("REQWEST_USER_AGENT").expect("USER_AGENT must be set"),
|
||||
}
|
||||
BasicXlsDownloader { url: None }
|
||||
}
|
||||
}
|
||||
|
||||
@@ -69,12 +64,17 @@ impl XLSDownloader for BasicXlsDownloader {
|
||||
if self.url.is_none() {
|
||||
Err(FetchError::NoUrlProvided)
|
||||
} else {
|
||||
fetch_specified(self.url.as_ref().unwrap(), &self.user_agent, head).await
|
||||
fetch_specified(
|
||||
self.url.as_ref().unwrap(),
|
||||
"t.me/polytechnic_next".to_string(),
|
||||
head,
|
||||
)
|
||||
.await
|
||||
}
|
||||
}
|
||||
|
||||
async fn set_url(&mut self, url: String) -> FetchResult {
|
||||
let result = fetch_specified(&url, &self.user_agent, true).await;
|
||||
let result = fetch_specified(&url, "t.me/polytechnic_next".to_string(), true).await;
|
||||
|
||||
if let Ok(_) = result {
|
||||
self.url = Some(url);
|
||||
@@ -95,8 +95,8 @@ mod tests {
|
||||
let user_agent = String::new();
|
||||
|
||||
let results = [
|
||||
fetch_specified(&url, &user_agent, true).await,
|
||||
fetch_specified(&url, &user_agent, false).await,
|
||||
fetch_specified(&url, user_agent.clone(), true).await,
|
||||
fetch_specified(&url, user_agent.clone(), false).await,
|
||||
];
|
||||
|
||||
assert!(results[0].is_err());
|
||||
@@ -109,8 +109,8 @@ mod tests {
|
||||
let user_agent = String::new();
|
||||
|
||||
let results = [
|
||||
fetch_specified(&url, &user_agent, true).await,
|
||||
fetch_specified(&url, &user_agent, false).await,
|
||||
fetch_specified(&url, user_agent.clone(), true).await,
|
||||
fetch_specified(&url, user_agent.clone(), false).await,
|
||||
];
|
||||
|
||||
assert!(results[0].is_err());
|
||||
@@ -132,8 +132,8 @@ mod tests {
|
||||
let user_agent = String::new();
|
||||
|
||||
let results = [
|
||||
fetch_specified(&url, &user_agent, true).await,
|
||||
fetch_specified(&url, &user_agent, false).await,
|
||||
fetch_specified(&url, user_agent.clone(), true).await,
|
||||
fetch_specified(&url, user_agent.clone(), false).await,
|
||||
];
|
||||
|
||||
assert!(results[0].is_err());
|
||||
@@ -149,8 +149,8 @@ mod tests {
|
||||
let user_agent = String::new();
|
||||
|
||||
let results = [
|
||||
fetch_specified(&url, &user_agent, true).await,
|
||||
fetch_specified(&url, &user_agent, false).await,
|
||||
fetch_specified(&url, user_agent.clone(), true).await,
|
||||
fetch_specified(&url, user_agent.clone(), false).await,
|
||||
];
|
||||
|
||||
assert!(results[0].is_err());
|
||||
@@ -172,8 +172,8 @@ mod tests {
|
||||
let user_agent = String::new();
|
||||
|
||||
let results = [
|
||||
fetch_specified(&url, &user_agent, true).await,
|
||||
fetch_specified(&url, &user_agent, false).await,
|
||||
fetch_specified(&url, user_agent.clone(), true).await,
|
||||
fetch_specified(&url, user_agent.clone(), false).await,
|
||||
];
|
||||
|
||||
assert!(results[0].is_ok());
|
||||
|
||||
@@ -1,14 +1,13 @@
|
||||
use chrono::{DateTime, Utc};
|
||||
use std::mem::discriminant;
|
||||
|
||||
/// XLS data retrieval errors.
|
||||
#[derive(Debug)]
|
||||
#[derive(PartialEq, Debug)]
|
||||
pub enum FetchError {
|
||||
/// File url is not set.
|
||||
NoUrlProvided,
|
||||
|
||||
/// Unknown error.
|
||||
Unknown(reqwest::Error),
|
||||
Unknown,
|
||||
|
||||
/// Server returned a status code different from 200.
|
||||
BadStatusCode,
|
||||
@@ -20,12 +19,6 @@ pub enum FetchError {
|
||||
BadHeaders,
|
||||
}
|
||||
|
||||
impl PartialEq for FetchError {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
discriminant(self) == discriminant(other)
|
||||
}
|
||||
}
|
||||
|
||||
/// Result of XLS data retrieval.
|
||||
pub struct FetchOk {
|
||||
/// ETag object.
|
||||
|
||||
Reference in New Issue
Block a user