mirror of
https://github.com/n08i40k/schedule-parser-rusted.git
synced 2025-12-06 09:47:50 +03:00
Подключение к Postgres и тестовый эндпоинт авторизации
This commit is contained in:
4
.gitignore
vendored
4
.gitignore
vendored
@@ -1,4 +1,6 @@
|
|||||||
/target
|
/target
|
||||||
.~*.xls
|
.~*.xls
|
||||||
schedule.json
|
schedule.json
|
||||||
teachers.json
|
teachers.json
|
||||||
|
|
||||||
|
.env*
|
||||||
12
.idea/dataSources.xml
generated
Normal file
12
.idea/dataSources.xml
generated
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<project version="4">
|
||||||
|
<component name="DataSourceManagerImpl" format="xml" multifile-model="true">
|
||||||
|
<data-source source="LOCAL" name="sp@localhost" uuid="28502a90-08bf-4cc0-8494-10dc74e37189">
|
||||||
|
<driver-ref>postgresql</driver-ref>
|
||||||
|
<synchronize>true</synchronize>
|
||||||
|
<jdbc-driver>org.postgresql.Driver</jdbc-driver>
|
||||||
|
<jdbc-url>jdbc:postgresql://localhost:5432/sp</jdbc-url>
|
||||||
|
<working-dir>$ProjectFileDir$</working-dir>
|
||||||
|
</data-source>
|
||||||
|
</component>
|
||||||
|
</project>
|
||||||
9
.idea/sqldialects.xml
generated
Normal file
9
.idea/sqldialects.xml
generated
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<project version="4">
|
||||||
|
<component name="SqlDialectMappings">
|
||||||
|
<file url="file://$PROJECT_DIR$/migrations/2025-03-21-211822_create_user_role/down.sql" dialect="PostgreSQL" />
|
||||||
|
<file url="file://$PROJECT_DIR$/migrations/2025-03-21-212111_create_users/up.sql" dialect="PostgreSQL" />
|
||||||
|
<file url="file://$PROJECT_DIR$/migrations/2025-03-21-212723_create_fcm/down.sql" dialect="PostgreSQL" />
|
||||||
|
<file url="file://$PROJECT_DIR$/migrations/2025-03-21-212723_create_fcm/up.sql" dialect="PostgreSQL" />
|
||||||
|
</component>
|
||||||
|
</project>
|
||||||
789
Cargo.lock
generated
789
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -8,9 +8,12 @@ edition = "2024"
|
|||||||
publish = false
|
publish = false
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
|
diesel = { version = "2.2.8", features = ["postgres"] }
|
||||||
|
diesel-derive-enum = { git = "https://github.com/Havunen/diesel-derive-enum.git", features = ["postgres"] }
|
||||||
|
dotenvy = "0.15.7"
|
||||||
serde = { version = "1.0.219", features = ["derive"] }
|
serde = { version = "1.0.219", features = ["derive"] }
|
||||||
serde_json = "1.0.140"
|
|
||||||
schedule_parser = { path = "./lib/schedule_parser" }
|
schedule_parser = { path = "./lib/schedule_parser" }
|
||||||
chrono = "0.4.40"
|
chrono = "0.4.40"
|
||||||
reqwest = "0.12.15"
|
reqwest = "0.12.15"
|
||||||
tokio = { version = "1.44.1", features = ["macros", "rt-multi-thread"] }
|
tokio = { version = "1.44.1", features = ["macros", "rt-multi-thread"] }
|
||||||
|
actix-web = "4.10.2"
|
||||||
|
|||||||
9
diesel.toml
Normal file
9
diesel.toml
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
# For documentation on how to configure this file,
|
||||||
|
# see https://diesel.rs/guides/configuring-diesel-cli
|
||||||
|
|
||||||
|
[print_schema]
|
||||||
|
file = "src/database/schema.rs"
|
||||||
|
custom_type_derives = ["diesel::query_builder::QueryId", "Clone"]
|
||||||
|
|
||||||
|
[migrations_directory]
|
||||||
|
dir = "./migrations"
|
||||||
@@ -10,7 +10,7 @@ use std::collections::HashMap;
|
|||||||
use std::io::Cursor;
|
use std::io::Cursor;
|
||||||
use std::sync::LazyLock;
|
use std::sync::LazyLock;
|
||||||
|
|
||||||
mod schema;
|
pub mod schema;
|
||||||
|
|
||||||
struct InternalId {
|
struct InternalId {
|
||||||
/**
|
/**
|
||||||
|
|||||||
0
migrations/.keep
Normal file
0
migrations/.keep
Normal file
6
migrations/00000000000000_diesel_initial_setup/down.sql
Normal file
6
migrations/00000000000000_diesel_initial_setup/down.sql
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
-- This file was automatically created by Diesel to setup helper functions
|
||||||
|
-- and other internal bookkeeping. This file is safe to edit, any future
|
||||||
|
-- changes will be added to existing projects as new migrations.
|
||||||
|
|
||||||
|
DROP FUNCTION IF EXISTS diesel_manage_updated_at(_tbl regclass);
|
||||||
|
DROP FUNCTION IF EXISTS diesel_set_updated_at();
|
||||||
36
migrations/00000000000000_diesel_initial_setup/up.sql
Normal file
36
migrations/00000000000000_diesel_initial_setup/up.sql
Normal file
@@ -0,0 +1,36 @@
|
|||||||
|
-- This file was automatically created by Diesel to setup helper functions
|
||||||
|
-- and other internal bookkeeping. This file is safe to edit, any future
|
||||||
|
-- changes will be added to existing projects as new migrations.
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
-- Sets up a trigger for the given table to automatically set a column called
|
||||||
|
-- `updated_at` whenever the row is modified (unless `updated_at` was included
|
||||||
|
-- in the modified columns)
|
||||||
|
--
|
||||||
|
-- # Example
|
||||||
|
--
|
||||||
|
-- ```sql
|
||||||
|
-- CREATE TABLE users (id SERIAL PRIMARY KEY, updated_at TIMESTAMP NOT NULL DEFAULT NOW());
|
||||||
|
--
|
||||||
|
-- SELECT diesel_manage_updated_at('users');
|
||||||
|
-- ```
|
||||||
|
CREATE OR REPLACE FUNCTION diesel_manage_updated_at(_tbl regclass) RETURNS VOID AS $$
|
||||||
|
BEGIN
|
||||||
|
EXECUTE format('CREATE TRIGGER set_updated_at BEFORE UPDATE ON %s
|
||||||
|
FOR EACH ROW EXECUTE PROCEDURE diesel_set_updated_at()', _tbl);
|
||||||
|
END;
|
||||||
|
$$ LANGUAGE plpgsql;
|
||||||
|
|
||||||
|
CREATE OR REPLACE FUNCTION diesel_set_updated_at() RETURNS trigger AS $$
|
||||||
|
BEGIN
|
||||||
|
IF (
|
||||||
|
NEW IS DISTINCT FROM OLD AND
|
||||||
|
NEW.updated_at IS NOT DISTINCT FROM OLD.updated_at
|
||||||
|
) THEN
|
||||||
|
NEW.updated_at := current_timestamp;
|
||||||
|
END IF;
|
||||||
|
RETURN NEW;
|
||||||
|
END;
|
||||||
|
$$ LANGUAGE plpgsql;
|
||||||
1
migrations/2025-03-21-211822_create_user_role/down.sql
Normal file
1
migrations/2025-03-21-211822_create_user_role/down.sql
Normal file
@@ -0,0 +1 @@
|
|||||||
|
DROP TYPE user_role;
|
||||||
4
migrations/2025-03-21-211822_create_user_role/up.sql
Normal file
4
migrations/2025-03-21-211822_create_user_role/up.sql
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
CREATE TYPE user_role AS ENUM (
|
||||||
|
'STUDENT',
|
||||||
|
'TEACHER',
|
||||||
|
'ADMIN');
|
||||||
1
migrations/2025-03-21-212111_create_users/down.sql
Normal file
1
migrations/2025-03-21-212111_create_users/down.sql
Normal file
@@ -0,0 +1 @@
|
|||||||
|
DROP TABLE users;
|
||||||
11
migrations/2025-03-21-212111_create_users/up.sql
Normal file
11
migrations/2025-03-21-212111_create_users/up.sql
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
CREATE TABLE users
|
||||||
|
(
|
||||||
|
id text PRIMARY KEY NOT NULL,
|
||||||
|
username text UNIQUE NOT NULL,
|
||||||
|
"password" text NOT NULL,
|
||||||
|
vk_id int4 NULL,
|
||||||
|
access_token text UNIQUE NOT NULL,
|
||||||
|
"group" text NOT NULL,
|
||||||
|
role user_role NOT NULL,
|
||||||
|
version text NOT NULL
|
||||||
|
);
|
||||||
1
migrations/2025-03-21-212723_create_fcm/down.sql
Normal file
1
migrations/2025-03-21-212723_create_fcm/down.sql
Normal file
@@ -0,0 +1 @@
|
|||||||
|
DROP TABLE fcm;
|
||||||
11
migrations/2025-03-21-212723_create_fcm/up.sql
Normal file
11
migrations/2025-03-21-212723_create_fcm/up.sql
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
CREATE TABLE fcm
|
||||||
|
(
|
||||||
|
user_id text PRIMARY KEY NOT NULL,
|
||||||
|
token text NOT NULL,
|
||||||
|
topics text[] NULL
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE UNIQUE INDEX fcm_user_id_key ON fcm USING btree (user_id);
|
||||||
|
|
||||||
|
ALTER TABLE fcm
|
||||||
|
ADD CONSTRAINT fcm_user_id_fkey FOREIGN KEY (user_id) REFERENCES users (id) ON DELETE RESTRICT ON UPDATE CASCADE;
|
||||||
2
src/database/mod.rs
Normal file
2
src/database/mod.rs
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
pub mod schema;
|
||||||
|
pub mod models;
|
||||||
26
src/database/models.rs
Normal file
26
src/database/models.rs
Normal file
@@ -0,0 +1,26 @@
|
|||||||
|
use diesel::prelude::*;
|
||||||
|
use serde::Serialize;
|
||||||
|
|
||||||
|
#[derive(diesel_derive_enum::DbEnum, Serialize, Debug)]
|
||||||
|
#[ExistingTypePath = "crate::database::schema::sql_types::UserRole"]
|
||||||
|
#[DbValueStyle = "UPPERCASE"]
|
||||||
|
#[serde(rename_all = "UPPERCASE")]
|
||||||
|
pub enum UserRole {
|
||||||
|
Student,
|
||||||
|
Teacher,
|
||||||
|
Admin,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Queryable, Selectable, Serialize)]
|
||||||
|
#[diesel(table_name = crate::database::schema::users)]
|
||||||
|
#[diesel(check_for_backend(diesel::pg::Pg))]
|
||||||
|
pub struct User {
|
||||||
|
pub id: String,
|
||||||
|
pub username: String,
|
||||||
|
pub password: String,
|
||||||
|
pub vk_id: Option<i32>,
|
||||||
|
pub access_token: String,
|
||||||
|
pub group: String,
|
||||||
|
pub role: UserRole,
|
||||||
|
pub version: String,
|
||||||
|
}
|
||||||
38
src/database/schema.rs
Normal file
38
src/database/schema.rs
Normal file
@@ -0,0 +1,38 @@
|
|||||||
|
// @generated automatically by Diesel CLI.
|
||||||
|
|
||||||
|
pub mod sql_types {
|
||||||
|
#[derive(diesel::query_builder::QueryId, Clone, diesel::sql_types::SqlType)]
|
||||||
|
#[diesel(postgres_type(name = "user_role"))]
|
||||||
|
pub struct UserRole;
|
||||||
|
}
|
||||||
|
|
||||||
|
diesel::table! {
|
||||||
|
fcm (user_id) {
|
||||||
|
user_id -> Text,
|
||||||
|
token -> Text,
|
||||||
|
topics -> Nullable<Array<Nullable<Text>>>,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
diesel::table! {
|
||||||
|
use diesel::sql_types::*;
|
||||||
|
use super::sql_types::UserRole;
|
||||||
|
|
||||||
|
users (id) {
|
||||||
|
id -> Text,
|
||||||
|
username -> Text,
|
||||||
|
password -> Text,
|
||||||
|
vk_id -> Nullable<Int4>,
|
||||||
|
access_token -> Text,
|
||||||
|
group -> Text,
|
||||||
|
role -> UserRole,
|
||||||
|
version -> Text,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
diesel::joinable!(fcm -> users (user_id));
|
||||||
|
|
||||||
|
diesel::allow_tables_to_appear_in_same_query!(
|
||||||
|
fcm,
|
||||||
|
users,
|
||||||
|
);
|
||||||
86
src/main.rs
86
src/main.rs
@@ -1,39 +1,57 @@
|
|||||||
|
use crate::routes::auth::sign_in::sign_in;
|
||||||
use crate::xls_downloader::basic_impl::BasicXlsDownloader;
|
use crate::xls_downloader::basic_impl::BasicXlsDownloader;
|
||||||
use crate::xls_downloader::interface::XLSDownloader;
|
use actix_web::{web, App, HttpServer};
|
||||||
use schedule_parser::parse_xls;
|
use chrono::{DateTime, Utc};
|
||||||
use std::{env, fs};
|
use diesel::{Connection, PgConnection};
|
||||||
|
use dotenvy::dotenv;
|
||||||
|
use schedule_parser::schema::ScheduleEntity;
|
||||||
|
use std::collections::HashMap;
|
||||||
|
use std::sync::Mutex;
|
||||||
|
use std::env;
|
||||||
|
|
||||||
|
mod database;
|
||||||
|
mod routes;
|
||||||
mod xls_downloader;
|
mod xls_downloader;
|
||||||
|
|
||||||
#[tokio::main]
|
pub struct AppState {
|
||||||
async fn main() {
|
downloader: Mutex<BasicXlsDownloader>,
|
||||||
let args: Vec<String> = env::args().collect();
|
schedule: Mutex<
|
||||||
assert_ne!(args.len(), 1);
|
Option<(
|
||||||
|
String,
|
||||||
let mut downloader = BasicXlsDownloader::new();
|
DateTime<Utc>,
|
||||||
|
(
|
||||||
downloader
|
HashMap<String, ScheduleEntity>,
|
||||||
.set_url(args[1].to_string())
|
HashMap<String, ScheduleEntity>,
|
||||||
.await
|
),
|
||||||
.expect("Failed to set url");
|
)>,
|
||||||
|
>,
|
||||||
let fetch_res = downloader.fetch(false).await.expect("Failed to fetch xls");
|
database: Mutex<PgConnection>,
|
||||||
|
}
|
||||||
let (teachers, groups) = parse_xls(fetch_res.data.as_ref().unwrap());
|
|
||||||
|
#[actix_web::main]
|
||||||
fs::write(
|
async fn main() {
|
||||||
"./schedule.json",
|
dotenv().ok();
|
||||||
serde_json::to_string_pretty(&groups)
|
|
||||||
.expect("Failed to serialize schedule")
|
let database_url = env::var("DATABASE_URL").expect("DATABASE_URL must be set");
|
||||||
.as_bytes(),
|
|
||||||
)
|
let data = web::Data::new(AppState {
|
||||||
.expect("Failed to write schedule");
|
downloader: Mutex::new(BasicXlsDownloader::new()),
|
||||||
|
schedule: Mutex::new(None),
|
||||||
fs::write(
|
database: Mutex::new(
|
||||||
"./teachers.json",
|
PgConnection::establish(&database_url)
|
||||||
serde_json::to_string_pretty(&teachers)
|
.unwrap_or_else(|_| panic!("Error connecting to {}", database_url)),
|
||||||
.expect("Failed to serialize teachers schedule")
|
),
|
||||||
.as_bytes(),
|
});
|
||||||
)
|
|
||||||
.expect("Failed to write teachers schedule");
|
HttpServer::new(move || {
|
||||||
|
let schedule_scope = web::scope("/auth").service(sign_in);
|
||||||
|
let api_scope = web::scope("/api/v1").service(schedule_scope);
|
||||||
|
|
||||||
|
App::new().app_data(data.clone()).service(api_scope)
|
||||||
|
})
|
||||||
|
.bind(("127.0.0.1", 8080))
|
||||||
|
.unwrap()
|
||||||
|
.run()
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
}
|
}
|
||||||
|
|||||||
2
src/routes/auth/mod.rs
Normal file
2
src/routes/auth/mod.rs
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
pub mod sign_in;
|
||||||
|
mod schema;
|
||||||
56
src/routes/auth/schema.rs
Normal file
56
src/routes/auth/schema.rs
Normal file
@@ -0,0 +1,56 @@
|
|||||||
|
use crate::database::models::User;
|
||||||
|
use serde::{Deserialize, Serialize, Serializer};
|
||||||
|
|
||||||
|
#[derive(Deserialize)]
|
||||||
|
pub struct SignInDto {
|
||||||
|
pub username: String,
|
||||||
|
pub password: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct SignInResult(Result<SignInOk, SignInErr>);
|
||||||
|
|
||||||
|
#[derive(Serialize)]
|
||||||
|
#[serde(rename_all = "camelCase")]
|
||||||
|
pub struct SignInOk {
|
||||||
|
id: String,
|
||||||
|
access_token: String,
|
||||||
|
group: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize)]
|
||||||
|
pub struct SignInErr {
|
||||||
|
code: SignInErrCode,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize)]
|
||||||
|
#[serde(rename_all = "SCREAMING_SNAKE_CASE")]
|
||||||
|
pub enum SignInErrCode {
|
||||||
|
IncorrectCredentials,
|
||||||
|
InvalidVkAccessToken,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl SignInResult {
|
||||||
|
pub fn ok(user: &User) -> Self {
|
||||||
|
Self(Ok(SignInOk {
|
||||||
|
id: user.id.clone(),
|
||||||
|
access_token: user.access_token.clone(),
|
||||||
|
group: user.group.clone(),
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn err(code: SignInErrCode) -> SignInResult {
|
||||||
|
Self(Err(SignInErr { code }))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Serialize for SignInResult {
|
||||||
|
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||||
|
where
|
||||||
|
S: Serializer,
|
||||||
|
{
|
||||||
|
match &self.0 {
|
||||||
|
Ok(ok) => serializer.serialize_some(&ok),
|
||||||
|
Err(err) => serializer.serialize_some(&err),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
26
src/routes/auth/sign_in.rs
Normal file
26
src/routes/auth/sign_in.rs
Normal file
@@ -0,0 +1,26 @@
|
|||||||
|
use crate::database::models::User;
|
||||||
|
use crate::routes::auth::schema::SignInErrCode::IncorrectCredentials;
|
||||||
|
use crate::routes::auth::schema::{SignInDto, SignInResult};
|
||||||
|
use crate::AppState;
|
||||||
|
use actix_web::{post, web};
|
||||||
|
use diesel::{ExpressionMethods, QueryDsl, RunQueryDsl, SelectableHelper};
|
||||||
|
use std::ops::DerefMut;
|
||||||
|
use web::Json;
|
||||||
|
|
||||||
|
#[post("/sign-in")]
|
||||||
|
pub async fn sign_in(data: Json<SignInDto>, app_state: web::Data<AppState>) -> Json<SignInResult> {
|
||||||
|
use crate::database::schema::users::dsl::*;
|
||||||
|
|
||||||
|
match {
|
||||||
|
let mut lock = app_state.database.lock().unwrap();
|
||||||
|
let connection = lock.deref_mut();
|
||||||
|
|
||||||
|
users
|
||||||
|
.filter(username.eq(data.username.clone()))
|
||||||
|
.select(User::as_select())
|
||||||
|
.first(connection)
|
||||||
|
} {
|
||||||
|
Ok(user) => Json(SignInResult::ok(&user)),
|
||||||
|
Err(_) => Json(SignInResult::err(IncorrectCredentials)),
|
||||||
|
}
|
||||||
|
}
|
||||||
1
src/routes/mod.rs
Normal file
1
src/routes/mod.rs
Normal file
@@ -0,0 +1 @@
|
|||||||
|
pub mod auth;
|
||||||
Reference in New Issue
Block a user